hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c2da358466cdf6059f3a2649c3e957e2b02ec7a | 1,606 | py | Python | tests/asyncio/test_lifespan.py | justin0mcateer/hypercorn | c6df3becf73df7be03451d53f5685aaadd4bbd80 | [
"MIT"
] | null | null | null | tests/asyncio/test_lifespan.py | justin0mcateer/hypercorn | c6df3becf73df7be03451d53f5685aaadd4bbd80 | [
"MIT"
] | null | null | null | tests/asyncio/test_lifespan.py | justin0mcateer/hypercorn | c6df3becf73df7be03451d53f5685aaadd4bbd80 | [
"MIT"
] | null | null | null | import asyncio
from time import sleep
from typing import Callable
import pytest
from hypercorn.asyncio.lifespan import Lifespan
from hypercorn.config import Config
from hypercorn.utils import LifespanFailure, LifespanTimeout
from ..helpers import lifespan_failure, SlowLifespanFramework
async def no_lifespan_app(scope: dict, receive: Callable, send: Callable) -> None:
sleep(0.1) # Block purposefully
raise Exception()
@pytest.mark.asyncio
async def test_ensure_no_race_condition() -> None:
config = Config()
config.startup_timeout = 0.2
lifespan = Lifespan(no_lifespan_app, config)
asyncio.ensure_future(lifespan.handle_lifespan())
await lifespan.wait_for_startup() # Raises if there is a race condition
@pytest.mark.asyncio
async def test_startup_timeout_error() -> None:
config = Config()
config.startup_timeout = 0.01
lifespan = Lifespan(SlowLifespanFramework(0.02, asyncio.sleep), config) # type: ignore
asyncio.ensure_future(lifespan.handle_lifespan())
with pytest.raises(LifespanTimeout) as exc_info:
await lifespan.wait_for_startup()
assert str(exc_info.value).startswith("Timeout whilst awaiting startup")
@pytest.mark.asyncio
async def test_startup_failure() -> None:
lifespan = Lifespan(lifespan_failure, Config())
lifespan_task = asyncio.ensure_future(lifespan.handle_lifespan())
await lifespan.wait_for_startup()
assert lifespan_task.done()
exception = lifespan_task.exception()
assert isinstance(exception, LifespanFailure)
assert str(exception) == "Lifespan failure in startup. 'Failure'"
| 34.170213 | 91 | 0.764633 | import asyncio
from time import sleep
from typing import Callable
import pytest
from hypercorn.asyncio.lifespan import Lifespan
from hypercorn.config import Config
from hypercorn.utils import LifespanFailure, LifespanTimeout
from ..helpers import lifespan_failure, SlowLifespanFramework
async def no_lifespan_app(scope: dict, receive: Callable, send: Callable) -> None:
sleep(0.1)
raise Exception()
@pytest.mark.asyncio
async def test_ensure_no_race_condition() -> None:
config = Config()
config.startup_timeout = 0.2
lifespan = Lifespan(no_lifespan_app, config)
asyncio.ensure_future(lifespan.handle_lifespan())
await lifespan.wait_for_startup()
@pytest.mark.asyncio
async def test_startup_timeout_error() -> None:
config = Config()
config.startup_timeout = 0.01
lifespan = Lifespan(SlowLifespanFramework(0.02, asyncio.sleep), config)
asyncio.ensure_future(lifespan.handle_lifespan())
with pytest.raises(LifespanTimeout) as exc_info:
await lifespan.wait_for_startup()
assert str(exc_info.value).startswith("Timeout whilst awaiting startup")
@pytest.mark.asyncio
async def test_startup_failure() -> None:
lifespan = Lifespan(lifespan_failure, Config())
lifespan_task = asyncio.ensure_future(lifespan.handle_lifespan())
await lifespan.wait_for_startup()
assert lifespan_task.done()
exception = lifespan_task.exception()
assert isinstance(exception, LifespanFailure)
assert str(exception) == "Lifespan failure in startup. 'Failure'"
| true | true |
1c2da3c94d476fb4dd3196ed021713a45ff4e451 | 41,099 | py | Python | Instrument_Turi_Project/venv/lib/python3.6/site-packages/mxnet/symbol/gen_contrib.py | fozoglu/instrument-recognition | 8cc14a481c2736c4ba55f48f00794684271d82cd | [
"MIT"
] | null | null | null | Instrument_Turi_Project/venv/lib/python3.6/site-packages/mxnet/symbol/gen_contrib.py | fozoglu/instrument-recognition | 8cc14a481c2736c4ba55f48f00794684271d82cd | [
"MIT"
] | null | null | null | Instrument_Turi_Project/venv/lib/python3.6/site-packages/mxnet/symbol/gen_contrib.py | fozoglu/instrument-recognition | 8cc14a481c2736c4ba55f48f00794684271d82cd | [
"MIT"
] | null | null | null | # File content is auto-generated. Do not modify.
# pylint: skip-file
from ._internal import SymbolBase
from ..base import _Null
def CTCLoss(data=None, label=None, data_lengths=None, label_lengths=None, use_data_lengths=_Null, use_label_lengths=_Null, blank_label=_Null, name=None, attr=None, out=None, **kwargs):
r"""Connectionist Temporal Classification Loss.
The shapes of the inputs and outputs:
- **data**: `(sequence_length, batch_size, alphabet_size)`
- **label**: `(batch_size, label_sequence_length)`
- **out**: `(batch_size)`
The `data` tensor consists of sequences of activation vectors (without applying softmax),
with i-th channel in the last dimension corresponding to i-th label
for i between 0 and alphabet_size-1 (i.e always 0-indexed).
Alphabet size should include one additional value reserved for blank label.
When `blank_label` is ``"first"``, the ``0``-th channel is be reserved for
activation of blank label, or otherwise if it is "last", ``(alphabet_size-1)``-th channel should be
reserved for blank label.
``label`` is an index matrix of integers. When `blank_label` is ``"first"``,
the value 0 is then reserved for blank label, and should not be passed in this matrix. Otherwise,
when `blank_label` is ``"last"``, the value `(alphabet_size-1)` is reserved for blank label.
If a sequence of labels is shorter than *label_sequence_length*, use the special
padding value at the end of the sequence to conform it to the correct
length. The padding value is `0` when `blank_label` is ``"first"``, and `-1` otherwise.
For example, suppose the vocabulary is `[a, b, c]`, and in one batch we have three sequences
'ba', 'cbb', and 'abac'. When `blank_label` is ``"first"``, we can index the labels as
`{'a': 1, 'b': 2, 'c': 3}`, and we reserve the 0-th channel for blank label in data tensor.
The resulting `label` tensor should be padded to be::
[[2, 1, 0, 0], [3, 2, 2, 0], [1, 2, 1, 3]]
When `blank_label` is ``"last"``, we can index the labels as
`{'a': 0, 'b': 1, 'c': 2}`, and we reserve the channel index 3 for blank label in data tensor.
The resulting `label` tensor should be padded to be::
[[1, 0, -1, -1], [2, 1, 1, -1], [0, 1, 0, 2]]
``out`` is a list of CTC loss values, one per example in the batch.
See *Connectionist Temporal Classification: Labelling Unsegmented
Sequence Data with Recurrent Neural Networks*, A. Graves *et al*. for more
information on the definition and the algorithm.
Defined in src/operator/contrib/ctc_loss.cc:L115
Parameters
----------
data : Symbol
Input data to the ctc_loss op.
label : Symbol
Ground-truth labels for the loss.
data_lengths : Symbol
Lengths of data for each of the samples. Only required when use_data_lengths is true.
label_lengths : Symbol
Lengths of labels for each of the samples. Only required when use_label_lengths is true.
use_data_lengths : boolean, optional, default=0
Whether the data lenghts are decided by `data_lengths`. If false, the lengths are equal to the max sequence length.
use_label_lengths : boolean, optional, default=0
Whether the label lenghts are decided by `label_lengths`, or derived from `padding_mask`. If false, the lengths are derived from the first occurrence of the value of `padding_mask`. The value of `padding_mask` is ``0`` when first CTC label is reserved for blank, and ``-1`` when last label is reserved for blank. See `blank_label`.
blank_label : {'first', 'last'},optional, default='first'
Set the label that is reserved for blank label.If "first", 0-th label is reserved, and label values for tokens in the vocabulary are between ``1`` and ``alphabet_size-1``, and the padding mask is ``-1``. If "last", last label value ``alphabet_size-1`` is reserved for blank label instead, and label values for tokens in the vocabulary are between ``0`` and ``alphabet_size-2``, and the padding mask is ``0``.
name : string, optional.
Name of the resulting symbol.
Returns
-------
Symbol
The result symbol.
"""
return (0,)
def DeformableConvolution(data=None, offset=None, weight=None, bias=None, kernel=_Null, stride=_Null, dilate=_Null, pad=_Null, num_filter=_Null, num_group=_Null, num_deformable_group=_Null, workspace=_Null, no_bias=_Null, layout=_Null, name=None, attr=None, out=None, **kwargs):
r"""Compute 2-D deformable convolution on 4-D input.
The deformable convolution operation is described in https://arxiv.org/abs/1703.06211
For 2-D deformable convolution, the shapes are
- **data**: *(batch_size, channel, height, width)*
- **offset**: *(batch_size, num_deformable_group * kernel[0] * kernel[1], height, width)*
- **weight**: *(num_filter, channel, kernel[0], kernel[1])*
- **bias**: *(num_filter,)*
- **out**: *(batch_size, num_filter, out_height, out_width)*.
Define::
f(x,k,p,s,d) = floor((x+2*p-d*(k-1)-1)/s)+1
then we have::
out_height=f(height, kernel[0], pad[0], stride[0], dilate[0])
out_width=f(width, kernel[1], pad[1], stride[1], dilate[1])
If ``no_bias`` is set to be true, then the ``bias`` term is ignored.
The default data ``layout`` is *NCHW*, namely *(batch_size, channle, height,
width)*.
If ``num_group`` is larger than 1, denoted by *g*, then split the input ``data``
evenly into *g* parts along the channel axis, and also evenly split ``weight``
along the first dimension. Next compute the convolution on the *i*-th part of
the data with the *i*-th weight part. The output is obtained by concating all
the *g* results.
If ``num_deformable_group`` is larger than 1, denoted by *dg*, then split the
input ``offset`` evenly into *dg* parts along the channel axis, and also evenly
split ``out`` evenly into *dg* parts along the channel axis. Next compute the
deformable convolution, apply the *i*-th part of the offset part on the *i*-th
out.
Both ``weight`` and ``bias`` are learnable parameters.
Defined in src/operator/contrib/deformable_convolution.cc:L100
Parameters
----------
data : Symbol
Input data to the DeformableConvolutionOp.
offset : Symbol
Input offset to the DeformableConvolutionOp.
weight : Symbol
Weight matrix.
bias : Symbol
Bias parameter.
kernel : Shape(tuple), required
Convolution kernel size: (h, w) or (d, h, w)
stride : Shape(tuple), optional, default=[]
Convolution stride: (h, w) or (d, h, w). Defaults to 1 for each dimension.
dilate : Shape(tuple), optional, default=[]
Convolution dilate: (h, w) or (d, h, w). Defaults to 1 for each dimension.
pad : Shape(tuple), optional, default=[]
Zero pad for convolution: (h, w) or (d, h, w). Defaults to no padding.
num_filter : int (non-negative), required
Convolution filter(channel) number
num_group : int (non-negative), optional, default=1
Number of group partitions.
num_deformable_group : int (non-negative), optional, default=1
Number of deformable group partitions.
workspace : long (non-negative), optional, default=1024
Maximum temperal workspace allowed for convolution (MB).
no_bias : boolean, optional, default=0
Whether to disable bias parameter.
layout : {None, 'NCDHW', 'NCHW', 'NCW'},optional, default='None'
Set layout for input, output and weight. Empty for
default layout: NCW for 1d, NCHW for 2d and NCDHW for 3d.
name : string, optional.
Name of the resulting symbol.
Returns
-------
Symbol
The result symbol.
"""
return (0,)
def DeformablePSROIPooling(data=None, rois=None, trans=None, spatial_scale=_Null, output_dim=_Null, group_size=_Null, pooled_size=_Null, part_size=_Null, sample_per_part=_Null, trans_std=_Null, no_trans=_Null, name=None, attr=None, out=None, **kwargs):
r"""Performs deformable position-sensitive region-of-interest pooling on inputs.
The DeformablePSROIPooling operation is described in https://arxiv.org/abs/1703.06211 .batch_size will change to the number of region bounding boxes after DeformablePSROIPooling
Parameters
----------
data : Symbol
Input data to the pooling operator, a 4D Feature maps
rois : Symbol
Bounding box coordinates, a 2D array of [[batch_index, x1, y1, x2, y2]]. (x1, y1) and (x2, y2) are top left and down right corners of designated region of interest. batch_index indicates the index of corresponding image in the input data
trans : Symbol
transition parameter
spatial_scale : float, required
Ratio of input feature map height (or w) to raw image height (or w). Equals the reciprocal of total stride in convolutional layers
output_dim : int, required
fix output dim
group_size : int, required
fix group size
pooled_size : int, required
fix pooled size
part_size : int, optional, default='0'
fix part size
sample_per_part : int, optional, default='1'
fix samples per part
trans_std : float, optional, default=0
fix transition std
no_trans : boolean, optional, default=0
Whether to disable trans parameter.
name : string, optional.
Name of the resulting symbol.
Returns
-------
Symbol
The result symbol.
"""
return (0,)
def MultiBoxDetection(cls_prob=None, loc_pred=None, anchor=None, clip=_Null, threshold=_Null, background_id=_Null, nms_threshold=_Null, force_suppress=_Null, variances=_Null, nms_topk=_Null, name=None, attr=None, out=None, **kwargs):
r"""Convert multibox detection predictions.
Parameters
----------
cls_prob : Symbol
Class probabilities.
loc_pred : Symbol
Location regression predictions.
anchor : Symbol
Multibox prior anchor boxes
clip : boolean, optional, default=1
Clip out-of-boundary boxes.
threshold : float, optional, default=0.01
Threshold to be a positive prediction.
background_id : int, optional, default='0'
Background id.
nms_threshold : float, optional, default=0.5
Non-maximum suppression threshold.
force_suppress : boolean, optional, default=0
Suppress all detections regardless of class_id.
variances : tuple of <float>, optional, default=[0.1,0.1,0.2,0.2]
Variances to be decoded from box regression output.
nms_topk : int, optional, default='-1'
Keep maximum top k detections before nms, -1 for no limit.
name : string, optional.
Name of the resulting symbol.
Returns
-------
Symbol
The result symbol.
"""
return (0,)
def MultiBoxPrior(data=None, sizes=_Null, ratios=_Null, clip=_Null, steps=_Null, offsets=_Null, name=None, attr=None, out=None, **kwargs):
r"""Generate prior(anchor) boxes from data, sizes and ratios.
Parameters
----------
data : Symbol
Input data.
sizes : tuple of <float>, optional, default=[1]
List of sizes of generated MultiBoxPriores.
ratios : tuple of <float>, optional, default=[1]
List of aspect ratios of generated MultiBoxPriores.
clip : boolean, optional, default=0
Whether to clip out-of-boundary boxes.
steps : tuple of <float>, optional, default=[-1,-1]
Priorbox step across y and x, -1 for auto calculation.
offsets : tuple of <float>, optional, default=[0.5,0.5]
Priorbox center offsets, y and x respectively
name : string, optional.
Name of the resulting symbol.
Returns
-------
Symbol
The result symbol.
"""
return (0,)
def MultiBoxTarget(anchor=None, label=None, cls_pred=None, overlap_threshold=_Null, ignore_label=_Null, negative_mining_ratio=_Null, negative_mining_thresh=_Null, minimum_negative_samples=_Null, variances=_Null, name=None, attr=None, out=None, **kwargs):
r"""Compute Multibox training targets
Parameters
----------
anchor : Symbol
Generated anchor boxes.
label : Symbol
Object detection labels.
cls_pred : Symbol
Class predictions.
overlap_threshold : float, optional, default=0.5
Anchor-GT overlap threshold to be regarded as a positive match.
ignore_label : float, optional, default=-1
Label for ignored anchors.
negative_mining_ratio : float, optional, default=-1
Max negative to positive samples ratio, use -1 to disable mining
negative_mining_thresh : float, optional, default=0.5
Threshold used for negative mining.
minimum_negative_samples : int, optional, default='0'
Minimum number of negative samples.
variances : tuple of <float>, optional, default=[0.1,0.1,0.2,0.2]
Variances to be encoded in box regression target.
name : string, optional.
Name of the resulting symbol.
Returns
-------
Symbol
The result symbol.
"""
return (0,)
def MultiProposal(cls_score=None, bbox_pred=None, im_info=None, rpn_pre_nms_top_n=_Null, rpn_post_nms_top_n=_Null, threshold=_Null, rpn_min_size=_Null, scales=_Null, ratios=_Null, feature_stride=_Null, output_score=_Null, iou_loss=_Null, name=None, attr=None, out=None, **kwargs):
r"""Generate region proposals via RPN
Parameters
----------
cls_score : Symbol
Score of how likely proposal is object.
bbox_pred : Symbol
BBox Predicted deltas from anchors for proposals
im_info : Symbol
Image size and scale.
rpn_pre_nms_top_n : int, optional, default='6000'
Number of top scoring boxes to keep after applying NMS to RPN proposals
rpn_post_nms_top_n : int, optional, default='300'
Overlap threshold used for non-maximumsuppresion(suppress boxes with IoU >= this threshold
threshold : float, optional, default=0.7
NMS value, below which to suppress.
rpn_min_size : int, optional, default='16'
Minimum height or width in proposal
scales : tuple of <float>, optional, default=[4,8,16,32]
Used to generate anchor windows by enumerating scales
ratios : tuple of <float>, optional, default=[0.5,1,2]
Used to generate anchor windows by enumerating ratios
feature_stride : int, optional, default='16'
The size of the receptive field each unit in the convolution layer of the rpn,for example the product of all stride's prior to this layer.
output_score : boolean, optional, default=0
Add score to outputs
iou_loss : boolean, optional, default=0
Usage of IoU Loss
name : string, optional.
Name of the resulting symbol.
Returns
-------
Symbol
The result symbol.
"""
return (0,)
def PSROIPooling(data=None, rois=None, spatial_scale=_Null, output_dim=_Null, pooled_size=_Null, group_size=_Null, name=None, attr=None, out=None, **kwargs):
r"""Performs region-of-interest pooling on inputs. Resize bounding box coordinates by spatial_scale and crop input feature maps accordingly. The cropped feature maps are pooled by max pooling to a fixed size output indicated by pooled_size. batch_size will change to the number of region bounding boxes after PSROIPooling
Parameters
----------
data : Symbol
Input data to the pooling operator, a 4D Feature maps
rois : Symbol
Bounding box coordinates, a 2D array of [[batch_index, x1, y1, x2, y2]]. (x1, y1) and (x2, y2) are top left and down right corners of designated region of interest. batch_index indicates the index of corresponding image in the input data
spatial_scale : float, required
Ratio of input feature map height (or w) to raw image height (or w). Equals the reciprocal of total stride in convolutional layers
output_dim : int, required
fix output dim
pooled_size : int, required
fix pooled size
group_size : int, optional, default='0'
fix group size
name : string, optional.
Name of the resulting symbol.
Returns
-------
Symbol
The result symbol.
"""
return (0,)
def Proposal(cls_score=None, bbox_pred=None, im_info=None, rpn_pre_nms_top_n=_Null, rpn_post_nms_top_n=_Null, threshold=_Null, rpn_min_size=_Null, scales=_Null, ratios=_Null, feature_stride=_Null, output_score=_Null, iou_loss=_Null, name=None, attr=None, out=None, **kwargs):
r"""Generate region proposals via RPN
Parameters
----------
cls_score : Symbol
Score of how likely proposal is object.
bbox_pred : Symbol
BBox Predicted deltas from anchors for proposals
im_info : Symbol
Image size and scale.
rpn_pre_nms_top_n : int, optional, default='6000'
Number of top scoring boxes to keep after applying NMS to RPN proposals
rpn_post_nms_top_n : int, optional, default='300'
Overlap threshold used for non-maximumsuppresion(suppress boxes with IoU >= this threshold
threshold : float, optional, default=0.7
NMS value, below which to suppress.
rpn_min_size : int, optional, default='16'
Minimum height or width in proposal
scales : tuple of <float>, optional, default=[4,8,16,32]
Used to generate anchor windows by enumerating scales
ratios : tuple of <float>, optional, default=[0.5,1,2]
Used to generate anchor windows by enumerating ratios
feature_stride : int, optional, default='16'
The size of the receptive field each unit in the convolution layer of the rpn,for example the product of all stride's prior to this layer.
output_score : boolean, optional, default=0
Add score to outputs
iou_loss : boolean, optional, default=0
Usage of IoU Loss
name : string, optional.
Name of the resulting symbol.
Returns
-------
Symbol
The result symbol.
"""
return (0,)
def SparseEmbedding(data=None, weight=None, input_dim=_Null, output_dim=_Null, dtype=_Null, name=None, attr=None, out=None, **kwargs):
r"""Maps integer indices to vector representations (embeddings).
This operator maps words to real-valued vectors in a high-dimensional space,
called word embeddings. These embeddings can capture semantic and syntactic properties of the words.
For example, it has been noted that in the learned embedding spaces, similar words tend
to be close to each other and dissimilar words far apart.
For an input array of shape (d1, ..., dK),
the shape of an output array is (d1, ..., dK, output_dim).
All the input values should be integers in the range [0, input_dim).
If the input_dim is ip0 and output_dim is op0, then shape of the embedding weight matrix must be
(ip0, op0).
The storage type of weight must be `row_sparse`, and the gradient of the weight will be of
`row_sparse` storage type, too.
.. Note::
`SparseEmbedding` is designed for the use case where `input_dim` is very large (e.g. 100k).
The operator is available on both CPU and GPU.
Examples::
input_dim = 4
output_dim = 5
// Each row in weight matrix y represents a word. So, y = (w0,w1,w2,w3)
y = [[ 0., 1., 2., 3., 4.],
[ 5., 6., 7., 8., 9.],
[ 10., 11., 12., 13., 14.],
[ 15., 16., 17., 18., 19.]]
// Input array x represents n-grams(2-gram). So, x = [(w1,w3), (w0,w2)]
x = [[ 1., 3.],
[ 0., 2.]]
// Mapped input x to its vector representation y.
SparseEmbedding(x, y, 4, 5) = [[[ 5., 6., 7., 8., 9.],
[ 15., 16., 17., 18., 19.]],
[[ 0., 1., 2., 3., 4.],
[ 10., 11., 12., 13., 14.]]]
Defined in src/operator/tensor/indexing_op.cc:L294
Parameters
----------
data : Symbol
The input array to the embedding operator.
weight : Symbol
The embedding weight matrix.
input_dim : int, required
Vocabulary size of the input indices.
output_dim : int, required
Dimension of the embedding vectors.
dtype : {'float16', 'float32', 'float64', 'int32', 'uint8'},optional, default='float32'
Data type of weight.
name : string, optional.
Name of the resulting symbol.
Returns
-------
Symbol
The result symbol.
"""
return (0,)
def bipartite_matching(data=None, is_ascend=_Null, threshold=_Null, topk=_Null, name=None, attr=None, out=None, **kwargs):
r"""Compute bipartite matching.
The matching is performed on score matrix with shape [B, N, M]
- B: batch_size
- N: number of rows to match
- M: number of columns as reference to be matched against.
Returns:
x : matched column indices. -1 indicating non-matched elements in rows.
y : matched row indices.
Note::
Zero gradients are back-propagated in this op for now.
Example::
s = [[0.5, 0.6], [0.1, 0.2], [0.3, 0.4]]
x, y = bipartite_matching(x, threshold=1e-12, is_ascend=False)
x = [1, -1, 0]
y = [2, 0]
Defined in src/operator/contrib/bounding_box.cc:L169
Parameters
----------
data : Symbol
The input
is_ascend : boolean, optional, default=0
Use ascend order for scores instead of descending. Please set threshold accordingly.
threshold : float, required
Ignore matching when score < thresh, if is_ascend=false, or ignore score > thresh, if is_ascend=true.
topk : int, optional, default='-1'
Limit the number of matches to topk, set -1 for no limit
name : string, optional.
Name of the resulting symbol.
Returns
-------
Symbol
The result symbol.
"""
return (0,)
def box_iou(lhs=None, rhs=None, format=_Null, name=None, attr=None, out=None, **kwargs):
r"""Bounding box overlap of two arrays.
The overlap is defined as Intersection-over-Union, aka, IOU.
- lhs: (a_1, a_2, ..., a_n, 4) array
- rhs: (b_1, b_2, ..., b_n, 4) array
- output: (a_1, a_2, ..., a_n, b_1, b_2, ..., b_n) array
Note::
Zero gradients are back-propagated in this op for now.
Example::
x = [[0.5, 0.5, 1.0, 1.0], [0.0, 0.0, 0.5, 0.5]]
y = [0.25, 0.25, 0.75, 0.75]
box_iou(x, y, format='corner') = [[0.1428], [0.1428]]
Defined in src/operator/contrib/bounding_box.cc:L123
Parameters
----------
lhs : Symbol
The first input
rhs : Symbol
The second input
format : {'center', 'corner'},optional, default='corner'
The box encoding type.
"corner" means boxes are encoded as [xmin, ymin, xmax, ymax], "center" means boxes are encodes as [x, y, width, height].
name : string, optional.
Name of the resulting symbol.
Returns
-------
Symbol
The result symbol.
"""
return (0,)
def box_nms(data=None, overlap_thresh=_Null, topk=_Null, coord_start=_Null, score_index=_Null, id_index=_Null, force_suppress=_Null, in_format=_Null, out_format=_Null, name=None, attr=None, out=None, **kwargs):
r"""Apply non-maximum suppression to input.
The output will be sorted in descending order according to `score`. Boxes with
overlaps larger than `overlap_thresh` and smaller scores will be removed and
filled with -1, the corresponding position will be recorded for backward propogation.
During back-propagation, the gradient will be copied to the original
position according to the input index. For positions that have been suppressed,
the in_grad will be assigned 0.
In summary, gradients are sticked to its boxes, will either be moved or discarded
according to its original index in input.
Input requirements:
1. Input tensor have at least 2 dimensions, (n, k), any higher dims will be regarded
as batch, e.g. (a, b, c, d, n, k) == (a*b*c*d, n, k)
2. n is the number of boxes in each batch
3. k is the width of each box item.
By default, a box is [id, score, xmin, ymin, xmax, ymax, ...],
additional elements are allowed.
- `id_index`: optional, use -1 to ignore, useful if `force_suppress=False`, which means
we will skip highly overlapped boxes if one is `apple` while the other is `car`.
- `coord_start`: required, default=2, the starting index of the 4 coordinates.
Two formats are supported:
`corner`: [xmin, ymin, xmax, ymax]
`center`: [x, y, width, height]
- `score_index`: required, default=1, box score/confidence.
When two boxes overlap IOU > `overlap_thresh`, the one with smaller score will be suppressed.
- `in_format` and `out_format`: default='corner', specify in/out box formats.
Examples::
x = [[0, 0.5, 0.1, 0.1, 0.2, 0.2], [1, 0.4, 0.1, 0.1, 0.2, 0.2],
[0, 0.3, 0.1, 0.1, 0.14, 0.14], [2, 0.6, 0.5, 0.5, 0.7, 0.8]]
box_nms(x, overlap_thresh=0.1, coord_start=2, score_index=1, id_index=0,
force_suppress=True, in_format='corner', out_typ='corner') =
[[2, 0.6, 0.5, 0.5, 0.7, 0.8], [0, 0.5, 0.1, 0.1, 0.2, 0.2],
[-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]]
out_grad = [[0.1, 0.1, 0.1, 0.1, 0.1, 0.1], [0.2, 0.2, 0.2, 0.2, 0.2, 0.2],
[0.3, 0.3, 0.3, 0.3, 0.3, 0.3], [0.4, 0.4, 0.4, 0.4, 0.4, 0.4]]
# exe.backward
in_grad = [[0.2, 0.2, 0.2, 0.2, 0.2, 0.2], [0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0], [0.1, 0.1, 0.1, 0.1, 0.1, 0.1]]
Defined in src/operator/contrib/bounding_box.cc:L82
Parameters
----------
data : Symbol
The input
overlap_thresh : float, optional, default=0.5
Overlapping(IoU) threshold to suppress object with smaller score.
topk : int, optional, default='-1'
Apply nms to topk boxes with descending scores, -1 to no restriction.
coord_start : int, optional, default='2'
Start index of the consecutive 4 coordinates.
score_index : int, optional, default='1'
Index of the scores/confidence of boxes.
id_index : int, optional, default='-1'
Optional, index of the class categories, -1 to disable.
force_suppress : boolean, optional, default=0
Optional, if set false and id_index is provided, nms will only apply to boxes belongs to the same category
in_format : {'center', 'corner'},optional, default='corner'
The input box encoding type.
"corner" means boxes are encoded as [xmin, ymin, xmax, ymax], "center" means boxes are encodes as [x, y, width, height].
out_format : {'center', 'corner'},optional, default='corner'
The output box encoding type.
"corner" means boxes are encoded as [xmin, ymin, xmax, ymax], "center" means boxes are encodes as [x, y, width, height].
name : string, optional.
Name of the resulting symbol.
Returns
-------
Symbol
The result symbol.
"""
return (0,)
def box_non_maximum_suppression(data=None, overlap_thresh=_Null, topk=_Null, coord_start=_Null, score_index=_Null, id_index=_Null, force_suppress=_Null, in_format=_Null, out_format=_Null, name=None, attr=None, out=None, **kwargs):
r"""Apply non-maximum suppression to input.
The output will be sorted in descending order according to `score`. Boxes with
overlaps larger than `overlap_thresh` and smaller scores will be removed and
filled with -1, the corresponding position will be recorded for backward propogation.
During back-propagation, the gradient will be copied to the original
position according to the input index. For positions that have been suppressed,
the in_grad will be assigned 0.
In summary, gradients are sticked to its boxes, will either be moved or discarded
according to its original index in input.
Input requirements:
1. Input tensor have at least 2 dimensions, (n, k), any higher dims will be regarded
as batch, e.g. (a, b, c, d, n, k) == (a*b*c*d, n, k)
2. n is the number of boxes in each batch
3. k is the width of each box item.
By default, a box is [id, score, xmin, ymin, xmax, ymax, ...],
additional elements are allowed.
- `id_index`: optional, use -1 to ignore, useful if `force_suppress=False`, which means
we will skip highly overlapped boxes if one is `apple` while the other is `car`.
- `coord_start`: required, default=2, the starting index of the 4 coordinates.
Two formats are supported:
`corner`: [xmin, ymin, xmax, ymax]
`center`: [x, y, width, height]
- `score_index`: required, default=1, box score/confidence.
When two boxes overlap IOU > `overlap_thresh`, the one with smaller score will be suppressed.
- `in_format` and `out_format`: default='corner', specify in/out box formats.
Examples::
x = [[0, 0.5, 0.1, 0.1, 0.2, 0.2], [1, 0.4, 0.1, 0.1, 0.2, 0.2],
[0, 0.3, 0.1, 0.1, 0.14, 0.14], [2, 0.6, 0.5, 0.5, 0.7, 0.8]]
box_nms(x, overlap_thresh=0.1, coord_start=2, score_index=1, id_index=0,
force_suppress=True, in_format='corner', out_typ='corner') =
[[2, 0.6, 0.5, 0.5, 0.7, 0.8], [0, 0.5, 0.1, 0.1, 0.2, 0.2],
[-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]]
out_grad = [[0.1, 0.1, 0.1, 0.1, 0.1, 0.1], [0.2, 0.2, 0.2, 0.2, 0.2, 0.2],
[0.3, 0.3, 0.3, 0.3, 0.3, 0.3], [0.4, 0.4, 0.4, 0.4, 0.4, 0.4]]
# exe.backward
in_grad = [[0.2, 0.2, 0.2, 0.2, 0.2, 0.2], [0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0], [0.1, 0.1, 0.1, 0.1, 0.1, 0.1]]
Defined in src/operator/contrib/bounding_box.cc:L82
Parameters
----------
data : Symbol
The input
overlap_thresh : float, optional, default=0.5
Overlapping(IoU) threshold to suppress object with smaller score.
topk : int, optional, default='-1'
Apply nms to topk boxes with descending scores, -1 to no restriction.
coord_start : int, optional, default='2'
Start index of the consecutive 4 coordinates.
score_index : int, optional, default='1'
Index of the scores/confidence of boxes.
id_index : int, optional, default='-1'
Optional, index of the class categories, -1 to disable.
force_suppress : boolean, optional, default=0
Optional, if set false and id_index is provided, nms will only apply to boxes belongs to the same category
in_format : {'center', 'corner'},optional, default='corner'
The input box encoding type.
"corner" means boxes are encoded as [xmin, ymin, xmax, ymax], "center" means boxes are encodes as [x, y, width, height].
out_format : {'center', 'corner'},optional, default='corner'
The output box encoding type.
"corner" means boxes are encoded as [xmin, ymin, xmax, ymax], "center" means boxes are encodes as [x, y, width, height].
name : string, optional.
Name of the resulting symbol.
Returns
-------
Symbol
The result symbol.
"""
return (0,)
def count_sketch(data=None, h=None, s=None, out_dim=_Null, processing_batch_size=_Null, name=None, attr=None, out=None, **kwargs):
r"""Apply CountSketch to input: map a d-dimension data to k-dimension data"
.. note:: `count_sketch` is only available on GPU.
Assume input data has shape (N, d), sign hash table s has shape (N, d),
index hash table h has shape (N, d) and mapping dimension out_dim = k,
each element in s is either +1 or -1, each element in h is random integer from 0 to k-1.
Then the operator computs:
.. math::
out[h[i]] += data[i] * s[i]
Example::
out_dim = 5
x = [[1.2, 2.5, 3.4],[3.2, 5.7, 6.6]]
h = [[0, 3, 4]]
s = [[1, -1, 1]]
mx.contrib.ndarray.count_sketch(data=x, h=h, s=s, out_dim = 5) = [[1.2, 0, 0, -2.5, 3.4],
[3.2, 0, 0, -5.7, 6.6]]
Defined in src/operator/contrib/count_sketch.cc:L67
Parameters
----------
data : Symbol
Input data to the CountSketchOp.
h : Symbol
The index vector
s : Symbol
The sign vector
out_dim : int, required
The output dimension.
processing_batch_size : int, optional, default='32'
How many sketch vectors to process at one time.
name : string, optional.
Name of the resulting symbol.
Returns
-------
Symbol
The result symbol.
"""
return (0,)
def ctc_loss(data=None, label=None, data_lengths=None, label_lengths=None, use_data_lengths=_Null, use_label_lengths=_Null, blank_label=_Null, name=None, attr=None, out=None, **kwargs):
r"""Connectionist Temporal Classification Loss.
The shapes of the inputs and outputs:
- **data**: `(sequence_length, batch_size, alphabet_size)`
- **label**: `(batch_size, label_sequence_length)`
- **out**: `(batch_size)`
The `data` tensor consists of sequences of activation vectors (without applying softmax),
with i-th channel in the last dimension corresponding to i-th label
for i between 0 and alphabet_size-1 (i.e always 0-indexed).
Alphabet size should include one additional value reserved for blank label.
When `blank_label` is ``"first"``, the ``0``-th channel is be reserved for
activation of blank label, or otherwise if it is "last", ``(alphabet_size-1)``-th channel should be
reserved for blank label.
``label`` is an index matrix of integers. When `blank_label` is ``"first"``,
the value 0 is then reserved for blank label, and should not be passed in this matrix. Otherwise,
when `blank_label` is ``"last"``, the value `(alphabet_size-1)` is reserved for blank label.
If a sequence of labels is shorter than *label_sequence_length*, use the special
padding value at the end of the sequence to conform it to the correct
length. The padding value is `0` when `blank_label` is ``"first"``, and `-1` otherwise.
For example, suppose the vocabulary is `[a, b, c]`, and in one batch we have three sequences
'ba', 'cbb', and 'abac'. When `blank_label` is ``"first"``, we can index the labels as
`{'a': 1, 'b': 2, 'c': 3}`, and we reserve the 0-th channel for blank label in data tensor.
The resulting `label` tensor should be padded to be::
[[2, 1, 0, 0], [3, 2, 2, 0], [1, 2, 1, 3]]
When `blank_label` is ``"last"``, we can index the labels as
`{'a': 0, 'b': 1, 'c': 2}`, and we reserve the channel index 3 for blank label in data tensor.
The resulting `label` tensor should be padded to be::
[[1, 0, -1, -1], [2, 1, 1, -1], [0, 1, 0, 2]]
``out`` is a list of CTC loss values, one per example in the batch.
See *Connectionist Temporal Classification: Labelling Unsegmented
Sequence Data with Recurrent Neural Networks*, A. Graves *et al*. for more
information on the definition and the algorithm.
Defined in src/operator/contrib/ctc_loss.cc:L115
Parameters
----------
data : Symbol
Input data to the ctc_loss op.
label : Symbol
Ground-truth labels for the loss.
data_lengths : Symbol
Lengths of data for each of the samples. Only required when use_data_lengths is true.
label_lengths : Symbol
Lengths of labels for each of the samples. Only required when use_label_lengths is true.
use_data_lengths : boolean, optional, default=0
Whether the data lenghts are decided by `data_lengths`. If false, the lengths are equal to the max sequence length.
use_label_lengths : boolean, optional, default=0
Whether the label lenghts are decided by `label_lengths`, or derived from `padding_mask`. If false, the lengths are derived from the first occurrence of the value of `padding_mask`. The value of `padding_mask` is ``0`` when first CTC label is reserved for blank, and ``-1`` when last label is reserved for blank. See `blank_label`.
blank_label : {'first', 'last'},optional, default='first'
Set the label that is reserved for blank label.If "first", 0-th label is reserved, and label values for tokens in the vocabulary are between ``1`` and ``alphabet_size-1``, and the padding mask is ``-1``. If "last", last label value ``alphabet_size-1`` is reserved for blank label instead, and label values for tokens in the vocabulary are between ``0`` and ``alphabet_size-2``, and the padding mask is ``0``.
name : string, optional.
Name of the resulting symbol.
Returns
-------
Symbol
The result symbol.
"""
return (0,)
def dequantize(input=None, min_range=None, max_range=None, out_type=_Null, name=None, attr=None, out=None, **kwargs):
r"""Dequantize the input tensor into a float tensor.
[min_range, max_range] are scalar floats that spcify the range for
the output data.
Each value of the tensor will undergo the following:
`out[i] = min_range + (in[i] * (max_range - min_range) / range(INPUT_TYPE))`
here `range(T) = numeric_limits<T>::max() - numeric_limits<T>::min()`
Defined in src/operator/contrib/dequantize.cc:L41
Parameters
----------
input : Symbol
A ndarray/symbol of type `uint8`
min_range : Symbol
The minimum scalar value possibly produced for the input
max_range : Symbol
The maximum scalar value possibly produced for the input
out_type : {'float32'}, required
Output data type.
name : string, optional.
Name of the resulting symbol.
Returns
-------
Symbol
The result symbol.
"""
return (0,)
def fft(data=None, compute_size=_Null, name=None, attr=None, out=None, **kwargs):
r"""Apply 1D FFT to input"
.. note:: `fft` is only available on GPU.
Currently accept 2 input data shapes: (N, d) or (N1, N2, N3, d), data can only be real numbers.
The output data has shape: (N, 2*d) or (N1, N2, N3, 2*d). The format is: [real0, imag0, real1, imag1, ...].
Example::
data = np.random.normal(0,1,(3,4))
out = mx.contrib.ndarray.fft(data = mx.nd.array(data,ctx = mx.gpu(0)))
Defined in src/operator/contrib/fft.cc:L56
Parameters
----------
data : Symbol
Input data to the FFTOp.
compute_size : int, optional, default='128'
Maximum size of sub-batch to be forwarded at one time
name : string, optional.
Name of the resulting symbol.
Returns
-------
Symbol
The result symbol.
"""
return (0,)
def ifft(data=None, compute_size=_Null, name=None, attr=None, out=None, **kwargs):
r"""Apply 1D ifft to input"
.. note:: `ifft` is only available on GPU.
Currently accept 2 input data shapes: (N, d) or (N1, N2, N3, d). Data is in format: [real0, imag0, real1, imag1, ...].
Last dimension must be an even number.
The output data has shape: (N, d/2) or (N1, N2, N3, d/2). It is only the real part of the result.
Example::
data = np.random.normal(0,1,(3,4))
out = mx.contrib.ndarray.ifft(data = mx.nd.array(data,ctx = mx.gpu(0)))
Defined in src/operator/contrib/ifft.cc:L58
Parameters
----------
data : Symbol
Input data to the IFFTOp.
compute_size : int, optional, default='128'
Maximum size of sub-batch to be forwarded at one time
name : string, optional.
Name of the resulting symbol.
Returns
-------
Symbol
The result symbol.
"""
return (0,)
def quantize(input=None, min_range=None, max_range=None, out_type=_Null, name=None, attr=None, out=None, **kwargs):
r"""Quantize a input tensor from float to `out_type`,
with user-specified `min_range` and `max_range`.
[min_range, max_range] are scalar floats that spcify the range for
the input data. Each value of the tensor will undergo the following:
`out[i] = (in[i] - min_range) * range(OUTPUT_TYPE) / (max_range - min_range)`
here `range(T) = numeric_limits<T>::max() - numeric_limits<T>::min()`
Defined in src/operator/contrib/quantize.cc:L41
Parameters
----------
input : Symbol
A ndarray/symbol of type `float32`
min_range : Symbol
The minimum scalar value possibly produced for the input
max_range : Symbol
The maximum scalar value possibly produced for the input
out_type : {'uint8'},optional, default='uint8'
Output data type.
name : string, optional.
Name of the resulting symbol.
Returns
-------
Symbol
The result symbol.
"""
return (0,)
__all__ = ['CTCLoss', 'DeformableConvolution', 'DeformablePSROIPooling', 'MultiBoxDetection', 'MultiBoxPrior', 'MultiBoxTarget', 'MultiProposal', 'PSROIPooling', 'Proposal', 'SparseEmbedding', 'bipartite_matching', 'box_iou', 'box_nms', 'box_non_maximum_suppression', 'count_sketch', 'ctc_loss', 'dequantize', 'fft', 'ifft', 'quantize'] | 41.016966 | 416 | 0.649918 |
from ._internal import SymbolBase
from ..base import _Null
def CTCLoss(data=None, label=None, data_lengths=None, label_lengths=None, use_data_lengths=_Null, use_label_lengths=_Null, blank_label=_Null, name=None, attr=None, out=None, **kwargs):
return (0,)
def DeformableConvolution(data=None, offset=None, weight=None, bias=None, kernel=_Null, stride=_Null, dilate=_Null, pad=_Null, num_filter=_Null, num_group=_Null, num_deformable_group=_Null, workspace=_Null, no_bias=_Null, layout=_Null, name=None, attr=None, out=None, **kwargs):
return (0,)
def DeformablePSROIPooling(data=None, rois=None, trans=None, spatial_scale=_Null, output_dim=_Null, group_size=_Null, pooled_size=_Null, part_size=_Null, sample_per_part=_Null, trans_std=_Null, no_trans=_Null, name=None, attr=None, out=None, **kwargs):
return (0,)
def MultiBoxDetection(cls_prob=None, loc_pred=None, anchor=None, clip=_Null, threshold=_Null, background_id=_Null, nms_threshold=_Null, force_suppress=_Null, variances=_Null, nms_topk=_Null, name=None, attr=None, out=None, **kwargs):
return (0,)
def MultiBoxPrior(data=None, sizes=_Null, ratios=_Null, clip=_Null, steps=_Null, offsets=_Null, name=None, attr=None, out=None, **kwargs):
return (0,)
def MultiBoxTarget(anchor=None, label=None, cls_pred=None, overlap_threshold=_Null, ignore_label=_Null, negative_mining_ratio=_Null, negative_mining_thresh=_Null, minimum_negative_samples=_Null, variances=_Null, name=None, attr=None, out=None, **kwargs):
return (0,)
def MultiProposal(cls_score=None, bbox_pred=None, im_info=None, rpn_pre_nms_top_n=_Null, rpn_post_nms_top_n=_Null, threshold=_Null, rpn_min_size=_Null, scales=_Null, ratios=_Null, feature_stride=_Null, output_score=_Null, iou_loss=_Null, name=None, attr=None, out=None, **kwargs):
return (0,)
def PSROIPooling(data=None, rois=None, spatial_scale=_Null, output_dim=_Null, pooled_size=_Null, group_size=_Null, name=None, attr=None, out=None, **kwargs):
return (0,)
def Proposal(cls_score=None, bbox_pred=None, im_info=None, rpn_pre_nms_top_n=_Null, rpn_post_nms_top_n=_Null, threshold=_Null, rpn_min_size=_Null, scales=_Null, ratios=_Null, feature_stride=_Null, output_score=_Null, iou_loss=_Null, name=None, attr=None, out=None, **kwargs):
return (0,)
def SparseEmbedding(data=None, weight=None, input_dim=_Null, output_dim=_Null, dtype=_Null, name=None, attr=None, out=None, **kwargs):
return (0,)
def bipartite_matching(data=None, is_ascend=_Null, threshold=_Null, topk=_Null, name=None, attr=None, out=None, **kwargs):
return (0,)
def box_iou(lhs=None, rhs=None, format=_Null, name=None, attr=None, out=None, **kwargs):
return (0,)
def box_nms(data=None, overlap_thresh=_Null, topk=_Null, coord_start=_Null, score_index=_Null, id_index=_Null, force_suppress=_Null, in_format=_Null, out_format=_Null, name=None, attr=None, out=None, **kwargs):
return (0,)
def box_non_maximum_suppression(data=None, overlap_thresh=_Null, topk=_Null, coord_start=_Null, score_index=_Null, id_index=_Null, force_suppress=_Null, in_format=_Null, out_format=_Null, name=None, attr=None, out=None, **kwargs):
return (0,)
def count_sketch(data=None, h=None, s=None, out_dim=_Null, processing_batch_size=_Null, name=None, attr=None, out=None, **kwargs):
return (0,)
def ctc_loss(data=None, label=None, data_lengths=None, label_lengths=None, use_data_lengths=_Null, use_label_lengths=_Null, blank_label=_Null, name=None, attr=None, out=None, **kwargs):
return (0,)
def dequantize(input=None, min_range=None, max_range=None, out_type=_Null, name=None, attr=None, out=None, **kwargs):
return (0,)
def fft(data=None, compute_size=_Null, name=None, attr=None, out=None, **kwargs):
return (0,)
def ifft(data=None, compute_size=_Null, name=None, attr=None, out=None, **kwargs):
return (0,)
def quantize(input=None, min_range=None, max_range=None, out_type=_Null, name=None, attr=None, out=None, **kwargs):
return (0,)
__all__ = ['CTCLoss', 'DeformableConvolution', 'DeformablePSROIPooling', 'MultiBoxDetection', 'MultiBoxPrior', 'MultiBoxTarget', 'MultiProposal', 'PSROIPooling', 'Proposal', 'SparseEmbedding', 'bipartite_matching', 'box_iou', 'box_nms', 'box_non_maximum_suppression', 'count_sketch', 'ctc_loss', 'dequantize', 'fft', 'ifft', 'quantize'] | true | true |
1c2da447b7adf5eb8f5afb463937b0be2ed115d1 | 190 | py | Python | indonesian_dot/agents/agent.py | Ra-Ni/Indonesian-Dot-Solver | 2baf507d23816b686f046f89d4c833728b25f2dc | [
"MIT"
] | null | null | null | indonesian_dot/agents/agent.py | Ra-Ni/Indonesian-Dot-Solver | 2baf507d23816b686f046f89d4c833728b25f2dc | [
"MIT"
] | null | null | null | indonesian_dot/agents/agent.py | Ra-Ni/Indonesian-Dot-Solver | 2baf507d23816b686f046f89d4c833728b25f2dc | [
"MIT"
] | 1 | 2020-03-18T15:23:24.000Z | 2020-03-18T15:23:24.000Z | class Agent:
def g(self, n) -> int:
raise NotImplementedError
def h(self, n) -> int:
raise NotImplementedError
def __str__(self) -> str:
return 'agent'
| 19 | 33 | 0.578947 | class Agent:
def g(self, n) -> int:
raise NotImplementedError
def h(self, n) -> int:
raise NotImplementedError
def __str__(self) -> str:
return 'agent'
| true | true |
1c2da47e791d320267d90c36afc0ffe1389121c7 | 4,938 | py | Python | pred/queries/predictionqueryparts.py | Duke-GCB/PredictionsDB | 066278425890288d9e430a46096a347453301b08 | [
"MIT"
] | null | null | null | pred/queries/predictionqueryparts.py | Duke-GCB/PredictionsDB | 066278425890288d9e430a46096a347453301b08 | [
"MIT"
] | 57 | 2016-09-16T15:23:49.000Z | 2021-09-07T15:20:22.000Z | pred/queries/predictionqueryparts.py | Duke-GCB/PredictionsDB | 066278425890288d9e430a46096a347453301b08 | [
"MIT"
] | 1 | 2016-09-09T20:03:48.000Z | 2016-09-09T20:03:48.000Z | from pred.queries.querybuilder import QueryPart
RANGE_OPERATOR = '@>' # contains range - excludes predictions not completely inside gene TSS range
def _query_part(sql):
return QueryPart(sql, [])
def set_search_path(schema):
return QueryPart("SET search_path TO %s,public;", [schema])
def custom_range_list_query(list_id, model_name):
return QueryPart("""select '' as name,
'range' || seq as common_name,
max(custom_range_list.chrom) as chrom,
'' as strand,
'' as gene_begin,
case WHEN max(value) > abs(min(value)) THEN
round(max(value), 4)
ELSE
round(min(value), 4)
end as max_value,
json_agg(json_build_object('value', round(value, 4), 'start', start_range, 'end', end_range)) as pred,
max(lower(custom_range_list.range)) as range_start,
max(upper(custom_range_list.range)) as range_end
from custom_range_list
left outer join prediction
on prediction.chrom = custom_range_list.chrom
and custom_range_list.range {} prediction.range
and model_name = %s
where
custom_range_list.id = %s
group by seq""".format(RANGE_OPERATOR), [model_name, list_id])
def select_prediction_values(table_name="gene_prediction", first_field="common_name"):
return _query_part("""select
{},
string_agg(name, '; ') as name,
case WHEN max(value) > abs(min(value)) THEN
round(max(value), 4)
ELSE
round(min(value), 4)
end as max_value,
max(chrom) as chrom,
max(strand) as strand,
max(gene_begin) as gene_begin,
json_agg(json_build_object('value', round(value, 4), 'start', start_range, 'end', end_range)) as pred
from {}""".format(first_field, table_name))
def alias_join_gene_prediction(comparison_fieldname):
return _query_part("""left outer join gene_symbol_alias on upper(alias) = upper(gene_name)
left outer join gene_prediction on upper({}) in (upper(symbol), upper(alias), upper(gene_name))""".format(comparison_fieldname))
def id_equals(id_value):
return QueryPart("""id = %s""", [id_value])
def gene_id_in_max_prediction_names():
return _query_part("and gene_id in (select gene_id from max_prediction_names)")
def filter_gene_list(gene_list, model_name, upstream, downstream):
"""
Overlapping range filter.
"""
beginning_sql = ""
params = []
if gene_list and gene_list.upper() != 'ALL':
beginning_sql = "gene_list = %s\nand\n"
params.append(gene_list)
params.extend([model_name, upstream, downstream, downstream, upstream])
return QueryPart(beginning_sql + """model_name = %s
and
case strand when '+' then
int4range(gene_begin - %s, gene_begin + %s) {} int4range(start_range, end_range)
else
int4range(gene_begin - %s, gene_begin + %s) {} int4range(start_range, end_range)
end""".format(RANGE_OPERATOR, RANGE_OPERATOR), params)
def items_not_in_gene_list(list_id, gene_list_filter, custom_gene_name_type):
inner_filter = "upper(gene.name) = upper(custom_gene_list.gene_name)"
if custom_gene_name_type:
inner_filter = "upper(gene.common_name) = upper(custom_gene_list.gene_name)"
sql = """select gene_name from custom_gene_list
where id = %s and not exists
(select 1 from gene where ({})""".format(inner_filter)
params = [list_id]
if gene_list_filter and gene_list_filter.upper() != "ALL":
sql += "and gene_list = %s"
params.append(gene_list_filter)
sql += ")"
return QueryPart(sql, params)
def with_max_prediction_names():
return _query_part("""with max_prediction_names as (
select gene_id from gene_prediction""")
def end_with():
return _query_part(")")
def where():
return _query_part("where")
def value_greater_than(value):
return QueryPart("and abs(value) > %s", [value])
def group_by_name():
return _query_part("group by name")
def group_by_common_name_and_parts(first_field="common_name"):
return _query_part("group by {}, chrom, strand, gene_begin".format(first_field))
def group_by_gene_id():
return _query_part("group by gene_id")
def order_by_gene_id():
return _query_part("order by gene_id")
def order_by_chrom_and_txstart():
return _query_part("order by chrom, gene_begin")
def order_by_name():
return _query_part("order by name")
def order_by_gene_name():
return _query_part("order by max(gene_name)")
def order_by_common_name_and_name():
return _query_part("order by common_name, name")
def order_by_seq():
return _query_part("order by seq")
def order_by_max_value_desc():
return _query_part("order by max(abs(value)) desc")
def order_by_max_value_desc_gene_id():
return _query_part("order by max(abs(value)) desc, gene_id")
def limit_and_offset(limit, offset):
return QueryPart("limit %s offset %s", [limit, offset])
def begin_count():
return _query_part("select count(*) from (")
def end_count():
return _query_part(") as foo")
def begin():
return _query_part("begin;")
def commit():
return _query_part(";commit;")
def and_sql():
return _query_part("and")
| 27.131868 | 128 | 0.724382 | from pred.queries.querybuilder import QueryPart
RANGE_OPERATOR = '@>'
def _query_part(sql):
return QueryPart(sql, [])
def set_search_path(schema):
return QueryPart("SET search_path TO %s,public;", [schema])
def custom_range_list_query(list_id, model_name):
return QueryPart("""select '' as name,
'range' || seq as common_name,
max(custom_range_list.chrom) as chrom,
'' as strand,
'' as gene_begin,
case WHEN max(value) > abs(min(value)) THEN
round(max(value), 4)
ELSE
round(min(value), 4)
end as max_value,
json_agg(json_build_object('value', round(value, 4), 'start', start_range, 'end', end_range)) as pred,
max(lower(custom_range_list.range)) as range_start,
max(upper(custom_range_list.range)) as range_end
from custom_range_list
left outer join prediction
on prediction.chrom = custom_range_list.chrom
and custom_range_list.range {} prediction.range
and model_name = %s
where
custom_range_list.id = %s
group by seq""".format(RANGE_OPERATOR), [model_name, list_id])
def select_prediction_values(table_name="gene_prediction", first_field="common_name"):
return _query_part("""select
{},
string_agg(name, '; ') as name,
case WHEN max(value) > abs(min(value)) THEN
round(max(value), 4)
ELSE
round(min(value), 4)
end as max_value,
max(chrom) as chrom,
max(strand) as strand,
max(gene_begin) as gene_begin,
json_agg(json_build_object('value', round(value, 4), 'start', start_range, 'end', end_range)) as pred
from {}""".format(first_field, table_name))
def alias_join_gene_prediction(comparison_fieldname):
return _query_part("""left outer join gene_symbol_alias on upper(alias) = upper(gene_name)
left outer join gene_prediction on upper({}) in (upper(symbol), upper(alias), upper(gene_name))""".format(comparison_fieldname))
def id_equals(id_value):
return QueryPart("""id = %s""", [id_value])
def gene_id_in_max_prediction_names():
return _query_part("and gene_id in (select gene_id from max_prediction_names)")
def filter_gene_list(gene_list, model_name, upstream, downstream):
beginning_sql = ""
params = []
if gene_list and gene_list.upper() != 'ALL':
beginning_sql = "gene_list = %s\nand\n"
params.append(gene_list)
params.extend([model_name, upstream, downstream, downstream, upstream])
return QueryPart(beginning_sql + """model_name = %s
and
case strand when '+' then
int4range(gene_begin - %s, gene_begin + %s) {} int4range(start_range, end_range)
else
int4range(gene_begin - %s, gene_begin + %s) {} int4range(start_range, end_range)
end""".format(RANGE_OPERATOR, RANGE_OPERATOR), params)
def items_not_in_gene_list(list_id, gene_list_filter, custom_gene_name_type):
inner_filter = "upper(gene.name) = upper(custom_gene_list.gene_name)"
if custom_gene_name_type:
inner_filter = "upper(gene.common_name) = upper(custom_gene_list.gene_name)"
sql = """select gene_name from custom_gene_list
where id = %s and not exists
(select 1 from gene where ({})""".format(inner_filter)
params = [list_id]
if gene_list_filter and gene_list_filter.upper() != "ALL":
sql += "and gene_list = %s"
params.append(gene_list_filter)
sql += ")"
return QueryPart(sql, params)
def with_max_prediction_names():
return _query_part("""with max_prediction_names as (
select gene_id from gene_prediction""")
def end_with():
return _query_part(")")
def where():
return _query_part("where")
def value_greater_than(value):
return QueryPart("and abs(value) > %s", [value])
def group_by_name():
return _query_part("group by name")
def group_by_common_name_and_parts(first_field="common_name"):
return _query_part("group by {}, chrom, strand, gene_begin".format(first_field))
def group_by_gene_id():
return _query_part("group by gene_id")
def order_by_gene_id():
return _query_part("order by gene_id")
def order_by_chrom_and_txstart():
return _query_part("order by chrom, gene_begin")
def order_by_name():
return _query_part("order by name")
def order_by_gene_name():
return _query_part("order by max(gene_name)")
def order_by_common_name_and_name():
return _query_part("order by common_name, name")
def order_by_seq():
return _query_part("order by seq")
def order_by_max_value_desc():
return _query_part("order by max(abs(value)) desc")
def order_by_max_value_desc_gene_id():
return _query_part("order by max(abs(value)) desc, gene_id")
def limit_and_offset(limit, offset):
return QueryPart("limit %s offset %s", [limit, offset])
def begin_count():
return _query_part("select count(*) from (")
def end_count():
return _query_part(") as foo")
def begin():
return _query_part("begin;")
def commit():
return _query_part(";commit;")
def and_sql():
return _query_part("and")
| true | true |
1c2da523ab460b74b6fcc0dbaccbcf22925a498c | 232 | py | Python | tests/utils/http_utils.py | Sinkler/python-sdk-v2 | a1ad7cc9900f8adf967ca4dec0bb05d8eddc2999 | [
"MIT"
] | null | null | null | tests/utils/http_utils.py | Sinkler/python-sdk-v2 | a1ad7cc9900f8adf967ca4dec0bb05d8eddc2999 | [
"MIT"
] | null | null | null | tests/utils/http_utils.py | Sinkler/python-sdk-v2 | a1ad7cc9900f8adf967ca4dec0bb05d8eddc2999 | [
"MIT"
] | null | null | null | # coding: utf-8
import urllib3
from config import create_logger
logger = create_logger(__name__)
http = urllib3.PoolManager()
def do_get(url):
r = http.request('GET', url)
logger.info("%s %s", r.status, r._request_url)
| 16.571429 | 50 | 0.702586 |
import urllib3
from config import create_logger
logger = create_logger(__name__)
http = urllib3.PoolManager()
def do_get(url):
r = http.request('GET', url)
logger.info("%s %s", r.status, r._request_url)
| true | true |
1c2da67d0017c87f2d96b8ad92b048f1617fe228 | 6,814 | py | Python | kubernetes/client/models/v1_replication_controller_list.py | fooka03/python | 073cf4d89e532f92b57e8955b4efc3d5d5eb80cf | [
"Apache-2.0"
] | 2 | 2020-07-02T05:47:41.000Z | 2020-07-02T05:50:34.000Z | kubernetes/client/models/v1_replication_controller_list.py | fooka03/python | 073cf4d89e532f92b57e8955b4efc3d5d5eb80cf | [
"Apache-2.0"
] | 1 | 2021-03-25T23:44:49.000Z | 2021-03-25T23:44:49.000Z | k8sdeployment/k8sstat/python/kubernetes/client/models/v1_replication_controller_list.py | JeffYFHuang/gpuaccounting | afa934350ebbd0634beb60b9df4a147426ea0006 | [
"MIT"
] | 1 | 2021-10-13T17:45:37.000Z | 2021-10-13T17:45:37.000Z | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: v1.15.6
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class V1ReplicationControllerList(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_version': 'str',
'items': 'list[V1ReplicationController]',
'kind': 'str',
'metadata': 'V1ListMeta'
}
attribute_map = {
'api_version': 'apiVersion',
'items': 'items',
'kind': 'kind',
'metadata': 'metadata'
}
def __init__(self, api_version=None, items=None, kind=None, metadata=None): # noqa: E501
"""V1ReplicationControllerList - a model defined in OpenAPI""" # noqa: E501
self._api_version = None
self._items = None
self._kind = None
self._metadata = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
self.items = items
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
@property
def api_version(self):
"""Gets the api_version of this V1ReplicationControllerList. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources # noqa: E501
:return: The api_version of this V1ReplicationControllerList. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this V1ReplicationControllerList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this V1ReplicationControllerList. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def items(self):
"""Gets the items of this V1ReplicationControllerList. # noqa: E501
List of replication controllers. More info: https://kubernetes.io/docs/concepts/workloads/controllers/replicationcontroller # noqa: E501
:return: The items of this V1ReplicationControllerList. # noqa: E501
:rtype: list[V1ReplicationController]
"""
return self._items
@items.setter
def items(self, items):
"""Sets the items of this V1ReplicationControllerList.
List of replication controllers. More info: https://kubernetes.io/docs/concepts/workloads/controllers/replicationcontroller # noqa: E501
:param items: The items of this V1ReplicationControllerList. # noqa: E501
:type: list[V1ReplicationController]
"""
if items is None:
raise ValueError("Invalid value for `items`, must not be `None`") # noqa: E501
self._items = items
@property
def kind(self):
"""Gets the kind of this V1ReplicationControllerList. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this V1ReplicationControllerList. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1ReplicationControllerList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this V1ReplicationControllerList. # noqa: E501
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""Gets the metadata of this V1ReplicationControllerList. # noqa: E501
:return: The metadata of this V1ReplicationControllerList. # noqa: E501
:rtype: V1ListMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V1ReplicationControllerList.
:param metadata: The metadata of this V1ReplicationControllerList. # noqa: E501
:type: V1ListMeta
"""
self._metadata = metadata
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1ReplicationControllerList):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 34.414141 | 295 | 0.632962 |
import pprint
import re
import six
class V1ReplicationControllerList(object):
openapi_types = {
'api_version': 'str',
'items': 'list[V1ReplicationController]',
'kind': 'str',
'metadata': 'V1ListMeta'
}
attribute_map = {
'api_version': 'apiVersion',
'items': 'items',
'kind': 'kind',
'metadata': 'metadata'
}
def __init__(self, api_version=None, items=None, kind=None, metadata=None):
self._api_version = None
self._items = None
self._kind = None
self._metadata = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
self.items = items
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
@property
def api_version(self):
return self._api_version
@api_version.setter
def api_version(self, api_version):
self._api_version = api_version
@property
def items(self):
return self._items
@items.setter
def items(self, items):
if items is None:
raise ValueError("Invalid value for `items`, must not be `None`")
self._items = items
@property
def kind(self):
return self._kind
@kind.setter
def kind(self, kind):
self._kind = kind
@property
def metadata(self):
return self._metadata
@metadata.setter
def metadata(self, metadata):
self._metadata = metadata
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, V1ReplicationControllerList):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
1c2da6f171cae7cb1045a0bd7c00a1fd34426c95 | 5,120 | py | Python | agents/DQN.py | manjavacas/drl-building | 6aaa117c0d02b0349af227939460adf31d8d40d9 | [
"MIT"
] | 4 | 2021-03-22T20:04:38.000Z | 2022-02-21T11:44:32.000Z | agents/DQN.py | manjavacas/drl-building | 6aaa117c0d02b0349af227939460adf31d8d40d9 | [
"MIT"
] | null | null | null | agents/DQN.py | manjavacas/drl-building | 6aaa117c0d02b0349af227939460adf31d8d40d9 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
import gym
import energym
import argparse
import uuid
import mlflow
import numpy as np
from energym.utils.callbacks import LoggerCallback, LoggerEvalCallback
from energym.utils.wrappers import NormalizeObservation, LoggerWrapper
from stable_baselines3 import DQN
from stable_baselines3.common.callbacks import EvalCallback, BaseCallback, CallbackList
from stable_baselines3.common.vec_env import DummyVecEnv
parser = argparse.ArgumentParser()
parser.add_argument('--environment', '-env', type=str, default=None)
parser.add_argument('--episodes', '-ep', type=int, default=1)
parser.add_argument('--learning_rate', '-lr', type=float, default=0.0001)
parser.add_argument('--buffer_size', '-bf', type=int, default=1000000)
parser.add_argument('--learning_starts', '-ls', type=int, default=50000)
parser.add_argument('--batch_size', '-bs', type=int, default=32)
parser.add_argument('--tau', '-t', type=float, default=1.0)
parser.add_argument('--gamma', '-g', type=float, default=.99)
parser.add_argument('--train_freq', '-tf', type=int, default=4)
parser.add_argument('--gradient_steps', '-gs', type=int, default=1)
parser.add_argument('--target_update_interval', '-tu', type=int, default=10000)
parser.add_argument('--exploration_fraction', '-e', type=float, default=.1)
parser.add_argument('--exploration_initial_eps', '-ei', type=float, default=1.0)
parser.add_argument('--exploration_final_eps', '-ef', type=float, default=.05)
parser.add_argument('--max_grad_norm', '-m', type=float, default=10)
args = parser.parse_args()
# experiment ID
environment = args.environment
n_episodes = args.episodes
name = 'DQN-' + environment + '-' + str(n_episodes) + '-episodes'
with mlflow.start_run(run_name=name):
mlflow.log_param('env', environment)
mlflow.log_param('episodes', n_episodes)
mlflow.log_param('learning_rate', args.learning_rate)
mlflow.log_param('buffer_size', args.buffer_size)
mlflow.log_param('learning_starts', args.learning_starts)
mlflow.log_param('batch_size', args.batch_size)
mlflow.log_param('tau', args.tau)
mlflow.log_param('gamma', args.gamma)
mlflow.log_param('train_freq', args.train_freq)
mlflow.log_param('gradient_steps', args.gradient_steps)
mlflow.log_param('target_update_interval', args.target_update_interval)
mlflow.log_param('exploration_fraction', args.exploration_fraction)
mlflow.log_param('exploration_initial_eps', args.exploration_initial_eps)
mlflow.log_param('exploration_final_eps', args.exploration_final_eps)
mlflow.log_param('max_grad_norm', args.max_grad_norm)
env = gym.make(environment)
env = NormalizeObservation(LoggerWrapper(env))
#### TRAINING ####
# Build model
# model = DQN('MlpPolicy', env, verbose=1,
# learning_rate=args.learning_rate,
# buffer_size=args.buffer_size,
# learning_starts=args.learning_starts,
# batch_size=args.batch_size,
# tau=args.tau,
# gamma=args.gamma,
# train_freq=args.train_freq,
# gradient_steps=args.gradient_steps,
# target_update_interval=args.target_update_interval,
# exploration_fraction=args.exploration_fraction,
# exploration_initial_eps=args.exploration_initial_eps,
# exploration_final_eps=args.exploration_final_eps,
# max_grad_norm=args.max_grad_norm,
# tensorboard_log='./tensorboard_log/' + name)
# n_timesteps_episode = env.simulator._eplus_one_epi_len / \
# env.simulator._eplus_run_stepsize
# timesteps = n_episodes * n_timesteps_episode + 501
# env = DummyVecEnv([lambda: env])
# env.env_method('activate_logger')
# # Callbacks
# freq = 5 # evaluate every N episodes
# eval_callback = LoggerEvalCallback(env, best_model_save_path='./best_models/' + name + '/',
# log_path='./best_models/' + name + '/', eval_freq=n_timesteps_episode * freq,
# deterministic=True, render=False, n_eval_episodes=2)
# log_callback = LoggerCallback()
# callback = CallbackList([log_callback, eval_callback])
# # Training
# model.learn(total_timesteps=timesteps, callback=callback)
# model.save(name)
#### LOAD MODEL ####
model = DQN.load('best_models/' + name + '/best_model.zip')
for i in range(n_episodes - 1):
obs = env.reset()
rewards = []
done = False
current_month = 0
while not done:
a, _ = model.predict(obs)
obs, reward, done, info = env.step(a)
rewards.append(reward)
if info['month'] != current_month:
current_month = info['month']
print(info['month'], sum(rewards))
print('Episode ', i, 'Mean reward: ', np.mean(rewards), 'Cumulative reward: ', sum(rewards))
env.close()
mlflow.log_metric('mean_reward', np.mean(rewards))
mlflow.log_metric('cumulative_reward', sum(rewards))
mlflow.end_run() | 41.290323 | 118 | 0.67793 |
import gym
import energym
import argparse
import uuid
import mlflow
import numpy as np
from energym.utils.callbacks import LoggerCallback, LoggerEvalCallback
from energym.utils.wrappers import NormalizeObservation, LoggerWrapper
from stable_baselines3 import DQN
from stable_baselines3.common.callbacks import EvalCallback, BaseCallback, CallbackList
from stable_baselines3.common.vec_env import DummyVecEnv
parser = argparse.ArgumentParser()
parser.add_argument('--environment', '-env', type=str, default=None)
parser.add_argument('--episodes', '-ep', type=int, default=1)
parser.add_argument('--learning_rate', '-lr', type=float, default=0.0001)
parser.add_argument('--buffer_size', '-bf', type=int, default=1000000)
parser.add_argument('--learning_starts', '-ls', type=int, default=50000)
parser.add_argument('--batch_size', '-bs', type=int, default=32)
parser.add_argument('--tau', '-t', type=float, default=1.0)
parser.add_argument('--gamma', '-g', type=float, default=.99)
parser.add_argument('--train_freq', '-tf', type=int, default=4)
parser.add_argument('--gradient_steps', '-gs', type=int, default=1)
parser.add_argument('--target_update_interval', '-tu', type=int, default=10000)
parser.add_argument('--exploration_fraction', '-e', type=float, default=.1)
parser.add_argument('--exploration_initial_eps', '-ei', type=float, default=1.0)
parser.add_argument('--exploration_final_eps', '-ef', type=float, default=.05)
parser.add_argument('--max_grad_norm', '-m', type=float, default=10)
args = parser.parse_args()
environment = args.environment
n_episodes = args.episodes
name = 'DQN-' + environment + '-' + str(n_episodes) + '-episodes'
with mlflow.start_run(run_name=name):
mlflow.log_param('env', environment)
mlflow.log_param('episodes', n_episodes)
mlflow.log_param('learning_rate', args.learning_rate)
mlflow.log_param('buffer_size', args.buffer_size)
mlflow.log_param('learning_starts', args.learning_starts)
mlflow.log_param('batch_size', args.batch_size)
mlflow.log_param('tau', args.tau)
mlflow.log_param('gamma', args.gamma)
mlflow.log_param('train_freq', args.train_freq)
mlflow.log_param('gradient_steps', args.gradient_steps)
mlflow.log_param('target_update_interval', args.target_update_interval)
mlflow.log_param('exploration_fraction', args.exploration_fraction)
mlflow.log_param('exploration_initial_eps', args.exploration_initial_eps)
mlflow.log_param('exploration_final_eps', args.exploration_final_eps)
mlflow.log_param('max_grad_norm', args.max_grad_norm)
env = gym.make(environment)
env = NormalizeObservation(LoggerWrapper(env))
)
for i in range(n_episodes - 1):
obs = env.reset()
rewards = []
done = False
current_month = 0
while not done:
a, _ = model.predict(obs)
obs, reward, done, info = env.step(a)
rewards.append(reward)
if info['month'] != current_month:
current_month = info['month']
print(info['month'], sum(rewards))
print('Episode ', i, 'Mean reward: ', np.mean(rewards), 'Cumulative reward: ', sum(rewards))
env.close()
mlflow.log_metric('mean_reward', np.mean(rewards))
mlflow.log_metric('cumulative_reward', sum(rewards))
mlflow.end_run() | true | true |
1c2da91a4fcfe9df61ac196a1e59085d8e4a082e | 549 | py | Python | main.py | ivicel/steamkit-python | 0a3f250e432cf890965db5e7245841aa512bca22 | [
"Apache-2.0"
] | 5 | 2018-11-16T08:59:41.000Z | 2021-04-03T05:32:18.000Z | main.py | ivicel/steamkit-python | 0a3f250e432cf890965db5e7245841aa512bca22 | [
"Apache-2.0"
] | null | null | null | main.py | ivicel/steamkit-python | 0a3f250e432cf890965db5e7245841aa512bca22 | [
"Apache-2.0"
] | null | null | null |
import logging
from steam import SteamClient
from steam.base.msg.emsg import EMsg
logging.basicConfig(format="[%(levelname)s] %(asctime)s: %(name)s: %(message)s",
level=logging.DEBUG)
client = SteamClient()
@client.on(EMsg.ClientAccountInfo)
async def account_info(msg):
print(msg.body)
if __name__ == '__main__':
try:
client.login()
client.run_forever()
except KeyboardInterrupt:
logging.info('Waiting client to close')
client.close()
logging.info('Client closed')
| 18.931034 | 80 | 0.653916 |
import logging
from steam import SteamClient
from steam.base.msg.emsg import EMsg
logging.basicConfig(format="[%(levelname)s] %(asctime)s: %(name)s: %(message)s",
level=logging.DEBUG)
client = SteamClient()
@client.on(EMsg.ClientAccountInfo)
async def account_info(msg):
print(msg.body)
if __name__ == '__main__':
try:
client.login()
client.run_forever()
except KeyboardInterrupt:
logging.info('Waiting client to close')
client.close()
logging.info('Client closed')
| true | true |
1c2da93f0a175cd6cc180de3072f3bbc7b671a6f | 67,781 | py | Python | tests/components/cast/test_media_player.py | gregsheremeta/core | 8e39ba387d0fcbd8462fff76da4d64890bc4ec57 | [
"Apache-2.0"
] | null | null | null | tests/components/cast/test_media_player.py | gregsheremeta/core | 8e39ba387d0fcbd8462fff76da4d64890bc4ec57 | [
"Apache-2.0"
] | 4 | 2022-03-02T07:18:01.000Z | 2022-03-31T07:09:30.000Z | tests/components/cast/test_media_player.py | gregsheremeta/core | 8e39ba387d0fcbd8462fff76da4d64890bc4ec57 | [
"Apache-2.0"
] | null | null | null | """The tests for the Cast Media player platform."""
# pylint: disable=protected-access
from __future__ import annotations
import json
from unittest.mock import ANY, AsyncMock, MagicMock, Mock, patch
from uuid import UUID
import attr
import pychromecast
from pychromecast.const import CAST_TYPE_CHROMECAST, CAST_TYPE_GROUP
import pytest
import yarl
from homeassistant.components import media_player, tts
from homeassistant.components.cast import media_player as cast
from homeassistant.components.cast.media_player import ChromecastInfo
from homeassistant.components.media_player import BrowseMedia
from homeassistant.components.media_player.const import (
MEDIA_CLASS_APP,
MEDIA_CLASS_PLAYLIST,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SEEK,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
)
from homeassistant.config import async_process_ha_core_config
from homeassistant.const import (
ATTR_ENTITY_ID,
CAST_APP_ID_HOMEASSISTANT_LOVELACE,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr, entity_registry as er, network
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry, assert_setup_component, mock_platform
from tests.components.media_player import common
# pylint: disable=invalid-name
FakeUUID = UUID("57355bce-9364-4aa6-ac1e-eb849dccf9e2")
FakeUUID2 = UUID("57355bce-9364-4aa6-ac1e-eb849dccf9e4")
FakeGroupUUID = UUID("57355bce-9364-4aa6-ac1e-eb849dccf9e3")
FAKE_HOST_SERVICE = pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_HOST, ("127.0.0.1", 8009)
)
FAKE_MDNS_SERVICE = pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "the-service"
)
def get_fake_chromecast(info: ChromecastInfo):
"""Generate a Fake Chromecast object with the specified arguments."""
mock = MagicMock(uuid=info.uuid)
mock.app_id = None
mock.media_controller.status = None
return mock
def get_fake_chromecast_info(
host="192.168.178.42", port=8009, service=None, uuid: UUID | None = FakeUUID
):
"""Generate a Fake ChromecastInfo with the specified arguments."""
if service is None:
service = pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_HOST, (host, port)
)
return ChromecastInfo(
cast_info=pychromecast.models.CastInfo(
services={service},
uuid=uuid,
model_name="Chromecast",
friendly_name="Speaker",
host=host,
port=port,
cast_type=CAST_TYPE_GROUP if port != 8009 else CAST_TYPE_CHROMECAST,
manufacturer="Nabu Casa",
)
)
def get_fake_zconf(host="192.168.178.42", port=8009):
"""Generate a Fake Zeroconf object with the specified arguments."""
parsed_addresses = MagicMock()
parsed_addresses.return_value = [host]
service_info = MagicMock(parsed_addresses=parsed_addresses, port=port)
zconf = MagicMock()
zconf.get_service_info.return_value = service_info
return zconf
async def async_setup_cast(hass, config=None):
"""Set up the cast platform."""
if config is None:
config = {}
data = {**{"ignore_cec": [], "known_hosts": [], "uuid": []}, **config}
with patch(
"homeassistant.helpers.entity_platform.EntityPlatform._async_schedule_add_entities"
) as add_entities:
entry = MockConfigEntry(data=data, domain="cast")
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return add_entities
async def async_setup_cast_internal_discovery(hass, config=None):
"""Set up the cast platform and the discovery."""
browser = MagicMock(devices={}, zc={})
with patch(
"homeassistant.components.cast.discovery.pychromecast.discovery.CastBrowser",
return_value=browser,
) as cast_browser:
add_entities = await async_setup_cast(hass, config)
await hass.async_block_till_done()
await hass.async_block_till_done()
assert browser.start_discovery.call_count == 1
discovery_callback = cast_browser.call_args[0][0].add_cast
remove_callback = cast_browser.call_args[0][0].remove_cast
def discover_chromecast(
service: pychromecast.discovery.ServiceInfo, info: ChromecastInfo
) -> None:
"""Discover a chromecast device."""
browser.devices[info.uuid] = pychromecast.discovery.CastInfo(
{service},
info.uuid,
info.cast_info.model_name,
info.friendly_name,
info.cast_info.host,
info.cast_info.port,
info.cast_info.cast_type,
info.cast_info.manufacturer,
)
discovery_callback(info.uuid, "")
def remove_chromecast(service_name: str, info: ChromecastInfo) -> None:
"""Remove a chromecast device."""
remove_callback(
info.uuid,
service_name,
pychromecast.models.CastInfo(
set(),
info.uuid,
info.cast_info.model_name,
info.cast_info.friendly_name,
info.cast_info.host,
info.cast_info.port,
info.cast_info.cast_type,
info.cast_info.manufacturer,
),
)
return discover_chromecast, remove_chromecast, add_entities
async def async_setup_media_player_cast(hass: HomeAssistant, info: ChromecastInfo):
"""Set up the cast platform with async_setup_component."""
browser = MagicMock(devices={}, zc={})
chromecast = get_fake_chromecast(info)
zconf = get_fake_zconf(host=info.cast_info.host, port=info.cast_info.port)
with patch(
"homeassistant.components.cast.discovery.pychromecast.get_chromecast_from_cast_info",
return_value=chromecast,
) as get_chromecast, patch(
"homeassistant.components.cast.discovery.pychromecast.discovery.CastBrowser",
return_value=browser,
) as cast_browser, patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf,
):
await async_setup_component(
hass, "cast", {"cast": {"media_player": {"uuid": info.uuid}}}
)
await hass.async_block_till_done()
await hass.async_block_till_done()
discovery_callback = cast_browser.call_args[0][0].add_cast
browser.devices[info.uuid] = pychromecast.discovery.CastInfo(
{FAKE_MDNS_SERVICE},
info.uuid,
info.cast_info.model_name,
info.friendly_name,
info.cast_info.host,
info.cast_info.port,
info.cast_info.cast_type,
info.cast_info.manufacturer,
)
discovery_callback(info.uuid, FAKE_MDNS_SERVICE[1])
await hass.async_block_till_done()
await hass.async_block_till_done()
assert get_chromecast.call_count == 1
def discover_chromecast(service_name: str, info: ChromecastInfo) -> None:
"""Discover a chromecast device."""
browser.devices[info.uuid] = pychromecast.discovery.CastInfo(
{FAKE_MDNS_SERVICE},
info.uuid,
info.cast_info.model_name,
info.friendly_name,
info.cast_info.host,
info.cast_info.port,
info.cast_info.cast_type,
info.cast_info.manufacturer,
)
discovery_callback(info.uuid, FAKE_MDNS_SERVICE[1])
return chromecast, discover_chromecast
def get_status_callbacks(chromecast_mock, mz_mock=None):
"""Get registered status callbacks from the chromecast mock."""
status_listener = chromecast_mock.register_status_listener.call_args[0][0]
cast_status_cb = status_listener.new_cast_status
connection_listener = chromecast_mock.register_connection_listener.call_args[0][0]
conn_status_cb = connection_listener.new_connection_status
mc = chromecast_mock.socket_client.media_controller
media_status_cb = mc.register_status_listener.call_args[0][0].new_media_status
if not mz_mock:
return cast_status_cb, conn_status_cb, media_status_cb
mz_listener = mz_mock.register_listener.call_args[0][1]
group_media_status_cb = mz_listener.multizone_new_media_status
return cast_status_cb, conn_status_cb, media_status_cb, group_media_status_cb
async def test_start_discovery_called_once(hass, castbrowser_mock):
"""Test pychromecast.start_discovery called exactly once."""
await async_setup_cast(hass)
assert castbrowser_mock.return_value.start_discovery.call_count == 1
await async_setup_cast(hass)
assert castbrowser_mock.return_value.start_discovery.call_count == 1
async def test_internal_discovery_callback_fill_out_group_fail(
hass, get_multizone_status_mock
):
"""Test internal discovery automatically filling out information."""
discover_cast, _, _ = await async_setup_cast_internal_discovery(hass)
info = get_fake_chromecast_info(host="host1", port=12345, service=FAKE_MDNS_SERVICE)
zconf = get_fake_zconf(host="host1", port=12345)
full_info = attr.evolve(
info,
cast_info=pychromecast.discovery.CastInfo(
services=info.cast_info.services,
uuid=FakeUUID,
model_name="Chromecast",
friendly_name="Speaker",
host=info.cast_info.host,
port=info.cast_info.port,
cast_type=info.cast_info.cast_type,
manufacturer=info.cast_info.manufacturer,
),
is_dynamic_group=False,
)
get_multizone_status_mock.assert_not_called()
get_multizone_status_mock.return_value = None
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf,
):
signal = MagicMock()
async_dispatcher_connect(hass, "cast_discovered", signal)
discover_cast(FAKE_MDNS_SERVICE, info)
await hass.async_block_till_done()
# when called with incomplete info, it should use HTTP to get missing
discover = signal.mock_calls[0][1][0]
assert discover == full_info
get_multizone_status_mock.assert_called_once()
async def test_internal_discovery_callback_fill_out_group(
hass, get_multizone_status_mock
):
"""Test internal discovery automatically filling out information."""
discover_cast, _, _ = await async_setup_cast_internal_discovery(hass)
info = get_fake_chromecast_info(host="host1", port=12345, service=FAKE_MDNS_SERVICE)
zconf = get_fake_zconf(host="host1", port=12345)
full_info = attr.evolve(
info,
cast_info=pychromecast.discovery.CastInfo(
services=info.cast_info.services,
uuid=FakeUUID,
model_name="Chromecast",
friendly_name="Speaker",
host=info.cast_info.host,
port=info.cast_info.port,
cast_type=info.cast_info.cast_type,
manufacturer=info.cast_info.manufacturer,
),
is_dynamic_group=False,
)
get_multizone_status_mock.assert_not_called()
get_multizone_status_mock.return_value = None
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf,
):
signal = MagicMock()
async_dispatcher_connect(hass, "cast_discovered", signal)
discover_cast(FAKE_MDNS_SERVICE, info)
await hass.async_block_till_done()
# when called with incomplete info, it should use HTTP to get missing
discover = signal.mock_calls[0][1][0]
assert discover == full_info
get_multizone_status_mock.assert_called_once()
async def test_stop_discovery_called_on_stop(hass, castbrowser_mock):
"""Test pychromecast.stop_discovery called on shutdown."""
# start_discovery should be called with empty config
await async_setup_cast(hass, {})
assert castbrowser_mock.return_value.start_discovery.call_count == 1
# stop discovery should be called on shutdown
hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP)
await hass.async_block_till_done()
assert castbrowser_mock.return_value.stop_discovery.call_count == 1
async def test_create_cast_device_without_uuid(hass):
"""Test create a cast device with no UUId does not create an entity."""
info = get_fake_chromecast_info(uuid=None)
cast_device = cast._async_create_cast_device(hass, info)
assert cast_device is None
async def test_create_cast_device_with_uuid(hass):
"""Test create cast devices with UUID creates entities."""
added_casts = hass.data[cast.ADDED_CAST_DEVICES_KEY] = set()
info = get_fake_chromecast_info()
cast_device = cast._async_create_cast_device(hass, info)
assert cast_device is not None
assert info.uuid in added_casts
# Sending second time should not create new entity
cast_device = cast._async_create_cast_device(hass, info)
assert cast_device is None
async def test_manual_cast_chromecasts_uuid(hass):
"""Test only wanted casts are added for manual configuration."""
cast_1 = get_fake_chromecast_info(host="host_1", uuid=FakeUUID)
cast_2 = get_fake_chromecast_info(host="host_2", uuid=FakeUUID2)
zconf_1 = get_fake_zconf(host="host_1")
zconf_2 = get_fake_zconf(host="host_2")
# Manual configuration of media player with host "configured_host"
discover_cast, _, add_dev1 = await async_setup_cast_internal_discovery(
hass, config={"uuid": str(FakeUUID)}
)
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf_2,
):
discover_cast(
pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "service2"
),
cast_2,
)
await hass.async_block_till_done()
await hass.async_block_till_done() # having tasks that add jobs
assert add_dev1.call_count == 0
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf_1,
):
discover_cast(
pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "service1"
),
cast_1,
)
await hass.async_block_till_done()
await hass.async_block_till_done() # having tasks that add jobs
assert add_dev1.call_count == 1
async def test_auto_cast_chromecasts(hass):
"""Test all discovered casts are added for default configuration."""
cast_1 = get_fake_chromecast_info(host="some_host")
cast_2 = get_fake_chromecast_info(host="other_host", uuid=FakeUUID2)
zconf_1 = get_fake_zconf(host="some_host")
zconf_2 = get_fake_zconf(host="other_host")
# Manual configuration of media player with host "configured_host"
discover_cast, _, add_dev1 = await async_setup_cast_internal_discovery(hass)
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf_1,
):
discover_cast(
pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "service2"
),
cast_2,
)
await hass.async_block_till_done()
await hass.async_block_till_done() # having tasks that add jobs
assert add_dev1.call_count == 1
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf_2,
):
discover_cast(
pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "service1"
),
cast_1,
)
await hass.async_block_till_done()
await hass.async_block_till_done() # having tasks that add jobs
assert add_dev1.call_count == 2
async def test_discover_dynamic_group(
hass, get_multizone_status_mock, get_chromecast_mock, caplog
):
"""Test dynamic group does not create device or entity."""
cast_1 = get_fake_chromecast_info(host="host_1", port=23456, uuid=FakeUUID)
cast_2 = get_fake_chromecast_info(host="host_2", port=34567, uuid=FakeUUID2)
zconf_1 = get_fake_zconf(host="host_1", port=23456)
zconf_2 = get_fake_zconf(host="host_2", port=34567)
reg = er.async_get(hass)
# Fake dynamic group info
tmp1 = MagicMock()
tmp1.uuid = FakeUUID
tmp2 = MagicMock()
tmp2.uuid = FakeUUID2
get_multizone_status_mock.return_value.dynamic_groups = [tmp1, tmp2]
get_chromecast_mock.assert_not_called()
discover_cast, remove_cast, add_dev1 = await async_setup_cast_internal_discovery(
hass
)
# Discover cast service
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf_1,
):
discover_cast(
pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "service"
),
cast_1,
)
await hass.async_block_till_done()
await hass.async_block_till_done() # having tasks that add jobs
get_chromecast_mock.assert_called()
get_chromecast_mock.reset_mock()
assert add_dev1.call_count == 0
assert reg.async_get_entity_id("media_player", "cast", cast_1.uuid) is None
# Discover other dynamic group cast service
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf_2,
):
discover_cast(
pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "service"
),
cast_2,
)
await hass.async_block_till_done()
await hass.async_block_till_done() # having tasks that add jobs
get_chromecast_mock.assert_called()
get_chromecast_mock.reset_mock()
assert add_dev1.call_count == 0
assert reg.async_get_entity_id("media_player", "cast", cast_2.uuid) is None
# Get update for cast service
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf_1,
):
discover_cast(
pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "service"
),
cast_1,
)
await hass.async_block_till_done()
await hass.async_block_till_done() # having tasks that add jobs
get_chromecast_mock.assert_not_called()
assert add_dev1.call_count == 0
assert reg.async_get_entity_id("media_player", "cast", cast_1.uuid) is None
# Remove cast service
assert "Disconnecting from chromecast" not in caplog.text
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf_1,
):
remove_cast(
pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "service"
),
cast_1,
)
await hass.async_block_till_done()
await hass.async_block_till_done() # having tasks that add jobs
assert "Disconnecting from chromecast" in caplog.text
async def test_update_cast_chromecasts(hass):
"""Test discovery of same UUID twice only adds one cast."""
cast_1 = get_fake_chromecast_info(host="old_host")
cast_2 = get_fake_chromecast_info(host="new_host")
zconf_1 = get_fake_zconf(host="old_host")
zconf_2 = get_fake_zconf(host="new_host")
# Manual configuration of media player with host "configured_host"
discover_cast, _, add_dev1 = await async_setup_cast_internal_discovery(hass)
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf_1,
):
discover_cast(
pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "service1"
),
cast_1,
)
await hass.async_block_till_done()
await hass.async_block_till_done() # having tasks that add jobs
assert add_dev1.call_count == 1
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf_2,
):
discover_cast(
pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "service2"
),
cast_2,
)
await hass.async_block_till_done()
await hass.async_block_till_done() # having tasks that add jobs
assert add_dev1.call_count == 1
async def test_entity_availability(hass: HomeAssistant):
"""Test handling of connection status."""
entity_id = "media_player.speaker"
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, _ = get_status_callbacks(chromecast)
state = hass.states.get(entity_id)
assert state.state == "unavailable"
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "off"
connection_status = MagicMock()
connection_status.status = "DISCONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "unavailable"
@pytest.mark.parametrize("port,entry_type", ((8009, None),))
async def test_device_registry(hass: HomeAssistant, port, entry_type):
"""Test device registry integration."""
entity_id = "media_player.speaker"
reg = er.async_get(hass)
dev_reg = dr.async_get(hass)
info = get_fake_chromecast_info(port=port)
chromecast, _ = await async_setup_media_player_cast(hass, info)
chromecast.cast_type = pychromecast.const.CAST_TYPE_CHROMECAST
_, conn_status_cb, _ = get_status_callbacks(chromecast)
cast_entry = hass.config_entries.async_entries("cast")[0]
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
entity_entry = reg.async_get(entity_id)
assert entity_entry.device_id is not None
device_entry = dev_reg.async_get(entity_entry.device_id)
assert device_entry.entry_type == entry_type
# Check that the chromecast object is torn down when the device is removed
chromecast.disconnect.assert_not_called()
dev_reg.async_update_device(
device_entry.id, remove_config_entry_id=cast_entry.entry_id
)
await hass.async_block_till_done()
await hass.async_block_till_done()
chromecast.disconnect.assert_called_once()
async def test_entity_cast_status(hass: HomeAssistant):
"""Test handling of cast status."""
entity_id = "media_player.speaker"
reg = er.async_get(hass)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
chromecast.cast_type = pychromecast.const.CAST_TYPE_CHROMECAST
cast_status_cb, conn_status_cb, _ = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
# No media status, pause, play, stop not supported
assert state.attributes.get("supported_features") == (
SUPPORT_PLAY_MEDIA
| SUPPORT_TURN_OFF
| SUPPORT_TURN_ON
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET
)
cast_status = MagicMock()
cast_status.volume_level = 0.5
cast_status.volume_muted = False
cast_status_cb(cast_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
# Volume hidden if no app is active
assert state.attributes.get("volume_level") is None
assert not state.attributes.get("is_volume_muted")
chromecast.app_id = "1234"
cast_status = MagicMock()
cast_status.volume_level = 0.5
cast_status.volume_muted = False
cast_status_cb(cast_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get("volume_level") == 0.5
assert not state.attributes.get("is_volume_muted")
cast_status = MagicMock()
cast_status.volume_level = 0.2
cast_status.volume_muted = True
cast_status_cb(cast_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get("volume_level") == 0.2
assert state.attributes.get("is_volume_muted")
# Disable support for volume control
cast_status = MagicMock()
cast_status.volume_control_type = "fixed"
cast_status_cb(cast_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get("supported_features") == (
SUPPORT_PLAY_MEDIA | SUPPORT_TURN_OFF | SUPPORT_TURN_ON
)
@pytest.mark.parametrize(
"cast_type,supported_features,supported_features_no_media",
[
(
pychromecast.const.CAST_TYPE_AUDIO,
SUPPORT_PAUSE
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_STOP
| SUPPORT_TURN_OFF
| SUPPORT_TURN_ON
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET,
SUPPORT_PLAY_MEDIA
| SUPPORT_TURN_OFF
| SUPPORT_TURN_ON
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET,
),
(
pychromecast.const.CAST_TYPE_CHROMECAST,
SUPPORT_PAUSE
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_STOP
| SUPPORT_TURN_OFF
| SUPPORT_TURN_ON
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET,
SUPPORT_PLAY_MEDIA
| SUPPORT_TURN_OFF
| SUPPORT_TURN_ON
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET,
),
(
pychromecast.const.CAST_TYPE_GROUP,
SUPPORT_PAUSE
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_STOP
| SUPPORT_TURN_OFF
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET,
SUPPORT_PLAY_MEDIA
| SUPPORT_TURN_OFF
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET,
),
],
)
async def test_supported_features(
hass: HomeAssistant, cast_type, supported_features, supported_features_no_media
):
"""Test supported features."""
entity_id = "media_player.speaker"
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
chromecast.cast_type = cast_type
_, conn_status_cb, media_status_cb = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert state.attributes.get("supported_features") == supported_features_no_media
media_status = MagicMock(images=None)
media_status.supports_queue_next = False
media_status.supports_seek = False
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get("supported_features") == supported_features
async def test_entity_browse_media(hass: HomeAssistant, hass_ws_client):
"""Test we can browse media."""
await async_setup_component(hass, "media_source", {"media_source": {}})
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, _ = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
client = await hass_ws_client()
await client.send_json(
{
"id": 1,
"type": "media_player/browse_media",
"entity_id": "media_player.speaker",
}
)
response = await client.receive_json()
assert response["success"]
expected_child_1 = {
"title": "Epic Sax Guy 10 Hours.mp4",
"media_class": "video",
"media_content_type": "video/mp4",
"media_content_id": "media-source://media_source/local/Epic Sax Guy 10 Hours.mp4",
"can_play": True,
"can_expand": False,
"children_media_class": None,
"thumbnail": None,
}
assert expected_child_1 in response["result"]["children"]
expected_child_2 = {
"title": "test.mp3",
"media_class": "music",
"media_content_type": "audio/mpeg",
"media_content_id": "media-source://media_source/local/test.mp3",
"can_play": True,
"can_expand": False,
"children_media_class": None,
"thumbnail": None,
}
assert expected_child_2 in response["result"]["children"]
@pytest.mark.parametrize(
"cast_type",
[pychromecast.const.CAST_TYPE_AUDIO, pychromecast.const.CAST_TYPE_GROUP],
)
async def test_entity_browse_media_audio_only(
hass: HomeAssistant, hass_ws_client, cast_type
):
"""Test we can browse media."""
await async_setup_component(hass, "media_source", {"media_source": {}})
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
chromecast.cast_type = cast_type
_, conn_status_cb, _ = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
client = await hass_ws_client()
await client.send_json(
{
"id": 1,
"type": "media_player/browse_media",
"entity_id": "media_player.speaker",
}
)
response = await client.receive_json()
assert response["success"]
expected_child_1 = {
"title": "Epic Sax Guy 10 Hours.mp4",
"media_class": "video",
"media_content_type": "video/mp4",
"media_content_id": "media-source://media_source/local/Epic Sax Guy 10 Hours.mp4",
"can_play": True,
"can_expand": False,
"children_media_class": None,
"thumbnail": None,
}
assert expected_child_1 not in response["result"]["children"]
expected_child_2 = {
"title": "test.mp3",
"media_class": "music",
"media_content_type": "audio/mpeg",
"media_content_id": "media-source://media_source/local/test.mp3",
"can_play": True,
"can_expand": False,
"children_media_class": None,
"thumbnail": None,
}
assert expected_child_2 in response["result"]["children"]
async def test_entity_play_media(hass: HomeAssistant, quick_play_mock):
"""Test playing media."""
entity_id = "media_player.speaker"
reg = er.async_get(hass)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, _ = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
# Play_media
await hass.services.async_call(
media_player.DOMAIN,
media_player.SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: entity_id,
media_player.ATTR_MEDIA_CONTENT_TYPE: "audio",
media_player.ATTR_MEDIA_CONTENT_ID: "best.mp3",
media_player.ATTR_MEDIA_EXTRA: {"metadata": {"metadatatype": 3}},
},
blocking=True,
)
chromecast.media_controller.play_media.assert_not_called()
quick_play_mock.assert_called_once_with(
chromecast,
"default_media_receiver",
{
"media_id": "best.mp3",
"media_type": "audio",
"metadata": {"metadatatype": 3},
},
)
async def test_entity_play_media_cast(hass: HomeAssistant, quick_play_mock):
"""Test playing media with cast special features."""
entity_id = "media_player.speaker"
reg = er.async_get(hass)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, _ = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
# Play_media - cast with app ID
await common.async_play_media(hass, "cast", '{"app_id": "abc123"}', entity_id)
chromecast.start_app.assert_called_once_with("abc123")
# Play_media - cast with app name (quick play)
await hass.services.async_call(
media_player.DOMAIN,
media_player.SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: entity_id,
media_player.ATTR_MEDIA_CONTENT_TYPE: "cast",
media_player.ATTR_MEDIA_CONTENT_ID: '{"app_name":"youtube"}',
media_player.ATTR_MEDIA_EXTRA: {"metadata": {"metadatatype": 3}},
},
blocking=True,
)
quick_play_mock.assert_called_once_with(
ANY, "youtube", {"metadata": {"metadatatype": 3}}
)
async def test_entity_play_media_cast_invalid(hass, caplog, quick_play_mock):
"""Test playing media."""
entity_id = "media_player.speaker"
reg = er.async_get(hass)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, _ = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
# play_media - media_type cast with invalid JSON
with pytest.raises(json.decoder.JSONDecodeError):
await common.async_play_media(hass, "cast", '{"app_id": "abc123"', entity_id)
assert "Invalid JSON in media_content_id" in caplog.text
chromecast.start_app.assert_not_called()
quick_play_mock.assert_not_called()
# Play_media - media_type cast with extra keys
await common.async_play_media(
hass, "cast", '{"app_id": "abc123", "extra": "data"}', entity_id
)
assert "Extra keys dict_keys(['extra']) were ignored" in caplog.text
chromecast.start_app.assert_called_once_with("abc123")
quick_play_mock.assert_not_called()
# Play_media - media_type cast with unsupported app
quick_play_mock.side_effect = NotImplementedError()
await common.async_play_media(hass, "cast", '{"app_name": "unknown"}', entity_id)
quick_play_mock.assert_called_once_with(ANY, "unknown", {})
assert "App unknown not supported" in caplog.text
async def test_entity_play_media_sign_URL(hass: HomeAssistant, quick_play_mock):
"""Test playing media."""
entity_id = "media_player.speaker"
await async_process_ha_core_config(
hass,
{"internal_url": "http://example.com:8123"},
)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, _ = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
# Play_media
await common.async_play_media(hass, "audio", "/best.mp3", entity_id)
quick_play_mock.assert_called_once_with(
chromecast, "default_media_receiver", {"media_id": ANY, "media_type": "audio"}
)
assert quick_play_mock.call_args[0][2]["media_id"].startswith(
"http://example.com:8123/best.mp3?authSig="
)
async def test_entity_media_content_type(hass: HomeAssistant):
"""Test various content types."""
entity_id = "media_player.speaker"
reg = er.async_get(hass)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, media_status_cb = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
media_status = MagicMock(images=None)
media_status.media_is_movie = False
media_status.media_is_musictrack = False
media_status.media_is_tvshow = False
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get("media_content_type") is None
media_status.media_is_tvshow = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get("media_content_type") == "tvshow"
media_status.media_is_tvshow = False
media_status.media_is_musictrack = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get("media_content_type") == "music"
media_status.media_is_musictrack = True
media_status.media_is_movie = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get("media_content_type") == "movie"
async def test_entity_control(hass: HomeAssistant):
"""Test various device and media controls."""
entity_id = "media_player.speaker"
reg = er.async_get(hass)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
chromecast.cast_type = pychromecast.const.CAST_TYPE_CHROMECAST
_, conn_status_cb, media_status_cb = get_status_callbacks(chromecast)
# Fake connection status
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
# Fake media status
media_status = MagicMock(images=None)
media_status.supports_queue_next = False
media_status.supports_seek = False
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "playing"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
assert state.attributes.get("supported_features") == (
SUPPORT_PAUSE
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_STOP
| SUPPORT_TURN_OFF
| SUPPORT_TURN_ON
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET
)
# Turn on
await common.async_turn_on(hass, entity_id)
chromecast.play_media.assert_called_once_with(
"https://www.home-assistant.io/images/cast/splash.png", "image/png"
)
chromecast.quit_app.reset_mock()
# Turn off
await common.async_turn_off(hass, entity_id)
chromecast.quit_app.assert_called_once_with()
# Mute
await common.async_mute_volume(hass, True, entity_id)
chromecast.set_volume_muted.assert_called_once_with(True)
# Volume
await common.async_set_volume_level(hass, 0.33, entity_id)
chromecast.set_volume.assert_called_once_with(0.33)
# Media play
await common.async_media_play(hass, entity_id)
chromecast.media_controller.play.assert_called_once_with()
# Media pause
await common.async_media_pause(hass, entity_id)
chromecast.media_controller.pause.assert_called_once_with()
# Media previous
await common.async_media_previous_track(hass, entity_id)
chromecast.media_controller.queue_prev.assert_not_called()
# Media next
await common.async_media_next_track(hass, entity_id)
chromecast.media_controller.queue_next.assert_not_called()
# Media seek
await common.async_media_seek(hass, 123, entity_id)
chromecast.media_controller.seek.assert_not_called()
# Enable support for queue and seek
media_status = MagicMock(images=None)
media_status.supports_queue_next = True
media_status.supports_seek = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get("supported_features") == (
SUPPORT_PAUSE
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_STOP
| SUPPORT_TURN_OFF
| SUPPORT_TURN_ON
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_NEXT_TRACK
| SUPPORT_SEEK
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET
)
# Media previous
await common.async_media_previous_track(hass, entity_id)
chromecast.media_controller.queue_prev.assert_called_once_with()
# Media next
await common.async_media_next_track(hass, entity_id)
chromecast.media_controller.queue_next.assert_called_once_with()
# Media seek
await common.async_media_seek(hass, 123, entity_id)
chromecast.media_controller.seek.assert_called_once_with(123)
# Some smart TV's with Google TV report "Netflix", not the Netflix app's ID
@pytest.mark.parametrize(
"app_id, state_no_media",
[(pychromecast.APP_YOUTUBE, "idle"), ("Netflix", "playing")],
)
async def test_entity_media_states(hass: HomeAssistant, app_id, state_no_media):
"""Test various entity media states."""
entity_id = "media_player.speaker"
reg = er.async_get(hass)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
cast_status_cb, conn_status_cb, media_status_cb = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
# App id updated, but no media status
chromecast.app_id = app_id
cast_status = MagicMock()
cast_status_cb(cast_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == state_no_media
# Got media status
media_status = MagicMock(images=None)
media_status.player_is_playing = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "playing"
media_status.player_is_playing = False
media_status.player_is_paused = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "paused"
media_status.player_is_paused = False
media_status.player_is_idle = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "idle"
# No media status, app is still running
media_status_cb(None)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == state_no_media
# App no longer running
chromecast.app_id = pychromecast.IDLE_APP_ID
cast_status = MagicMock()
cast_status_cb(cast_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "off"
# No cast status
chromecast.is_idle = False
cast_status_cb(None)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "unknown"
async def test_entity_media_states_lovelace_app(hass: HomeAssistant):
"""Test various entity media states when the lovelace app is active."""
entity_id = "media_player.speaker"
reg = er.async_get(hass)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
cast_status_cb, conn_status_cb, media_status_cb = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
chromecast.app_id = CAST_APP_ID_HOMEASSISTANT_LOVELACE
cast_status = MagicMock()
cast_status_cb(cast_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "playing"
assert state.attributes.get("supported_features") == (
SUPPORT_PLAY_MEDIA | SUPPORT_TURN_OFF | SUPPORT_VOLUME_MUTE | SUPPORT_VOLUME_SET
)
media_status = MagicMock(images=None)
media_status.player_is_playing = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "playing"
media_status.player_is_playing = False
media_status.player_is_paused = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "playing"
media_status.player_is_paused = False
media_status.player_is_idle = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "playing"
chromecast.app_id = pychromecast.IDLE_APP_ID
media_status.player_is_idle = False
chromecast.is_idle = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "off"
chromecast.is_idle = False
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "unknown"
async def test_group_media_states(hass, mz_mock):
"""Test media states are read from group if entity has no state."""
entity_id = "media_player.speaker"
reg = er.async_get(hass)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, media_status_cb, group_media_status_cb = get_status_callbacks(
chromecast, mz_mock
)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
group_media_status = MagicMock(images=None)
player_media_status = MagicMock(images=None)
# Player has no state, group is playing -> Should report 'playing'
group_media_status.player_is_playing = True
group_media_status_cb(str(FakeGroupUUID), group_media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "playing"
# Player is paused, group is playing -> Should report 'paused'
player_media_status.player_is_playing = False
player_media_status.player_is_paused = True
media_status_cb(player_media_status)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "paused"
# Player is in unknown state, group is playing -> Should report 'playing'
player_media_status.player_state = "UNKNOWN"
media_status_cb(player_media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "playing"
async def test_group_media_control(hass, mz_mock, quick_play_mock):
"""Test media controls are handled by group if entity has no state."""
entity_id = "media_player.speaker"
reg = er.async_get(hass)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, media_status_cb, group_media_status_cb = get_status_callbacks(
chromecast, mz_mock
)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
group_media_status = MagicMock(images=None)
player_media_status = MagicMock(images=None)
# Player has no state, group is playing -> Should forward calls to group
group_media_status.player_is_playing = True
group_media_status_cb(str(FakeGroupUUID), group_media_status)
await common.async_media_play(hass, entity_id)
grp_media = mz_mock.get_multizone_mediacontroller(str(FakeGroupUUID))
assert grp_media.play.called
assert not chromecast.media_controller.play.called
# Player is paused, group is playing -> Should not forward
player_media_status.player_is_playing = False
player_media_status.player_is_paused = True
media_status_cb(player_media_status)
await common.async_media_pause(hass, entity_id)
grp_media = mz_mock.get_multizone_mediacontroller(str(FakeGroupUUID))
assert not grp_media.pause.called
assert chromecast.media_controller.pause.called
# Player is in unknown state, group is playing -> Should forward to group
player_media_status.player_state = "UNKNOWN"
media_status_cb(player_media_status)
await common.async_media_stop(hass, entity_id)
grp_media = mz_mock.get_multizone_mediacontroller(str(FakeGroupUUID))
assert grp_media.stop.called
assert not chromecast.media_controller.stop.called
# Verify play_media is not forwarded
await common.async_play_media(hass, "music", "best.mp3", entity_id)
assert not grp_media.play_media.called
assert not chromecast.media_controller.play_media.called
quick_play_mock.assert_called_once_with(
chromecast,
"default_media_receiver",
{"media_id": "best.mp3", "media_type": "music"},
)
async def test_failed_cast_on_idle(hass, caplog):
"""Test no warning when unless player went idle with reason "ERROR"."""
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, _, media_status_cb = get_status_callbacks(chromecast)
media_status = MagicMock(images=None)
media_status.player_is_idle = False
media_status.idle_reason = "ERROR"
media_status.content_id = "http://example.com:8123/tts.mp3"
media_status_cb(media_status)
assert "Failed to cast media" not in caplog.text
media_status = MagicMock(images=None)
media_status.player_is_idle = True
media_status.idle_reason = "Other"
media_status.content_id = "http://example.com:8123/tts.mp3"
media_status_cb(media_status)
assert "Failed to cast media" not in caplog.text
media_status = MagicMock(images=None)
media_status.player_is_idle = True
media_status.idle_reason = "ERROR"
media_status.content_id = "http://example.com:8123/tts.mp3"
media_status_cb(media_status)
assert "Failed to cast media http://example.com:8123/tts.mp3." in caplog.text
async def test_failed_cast_other_url(hass, caplog):
"""Test warning when casting from internal_url fails."""
with assert_setup_component(1, tts.DOMAIN):
assert await async_setup_component(
hass,
tts.DOMAIN,
{tts.DOMAIN: {"platform": "demo", "base_url": "http://example.local:8123"}},
)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, _, media_status_cb = get_status_callbacks(chromecast)
media_status = MagicMock(images=None)
media_status.player_is_idle = True
media_status.idle_reason = "ERROR"
media_status.content_id = "http://example.com:8123/tts.mp3"
media_status_cb(media_status)
assert "Failed to cast media http://example.com:8123/tts.mp3." in caplog.text
async def test_failed_cast_internal_url(hass, caplog):
"""Test warning when casting from internal_url fails."""
await async_process_ha_core_config(
hass,
{"internal_url": "http://example.local:8123"},
)
with assert_setup_component(1, tts.DOMAIN):
assert await async_setup_component(
hass, tts.DOMAIN, {tts.DOMAIN: {"platform": "demo"}}
)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, _, media_status_cb = get_status_callbacks(chromecast)
media_status = MagicMock(images=None)
media_status.player_is_idle = True
media_status.idle_reason = "ERROR"
media_status.content_id = "http://example.local:8123/tts.mp3"
media_status_cb(media_status)
assert (
"Failed to cast media http://example.local:8123/tts.mp3 from internal_url"
in caplog.text
)
async def test_failed_cast_external_url(hass, caplog):
"""Test warning when casting from external_url fails."""
await async_process_ha_core_config(
hass,
{"external_url": "http://example.com:8123"},
)
with assert_setup_component(1, tts.DOMAIN):
assert await async_setup_component(
hass,
tts.DOMAIN,
{tts.DOMAIN: {"platform": "demo", "base_url": "http://example.com:8123"}},
)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, _, media_status_cb = get_status_callbacks(chromecast)
media_status = MagicMock(images=None)
media_status.player_is_idle = True
media_status.idle_reason = "ERROR"
media_status.content_id = "http://example.com:8123/tts.mp3"
media_status_cb(media_status)
assert (
"Failed to cast media http://example.com:8123/tts.mp3 from external_url"
in caplog.text
)
async def test_failed_cast_tts_base_url(hass, caplog):
"""Test warning when casting from tts.base_url fails."""
with assert_setup_component(1, tts.DOMAIN):
assert await async_setup_component(
hass,
tts.DOMAIN,
{tts.DOMAIN: {"platform": "demo", "base_url": "http://example.local:8123"}},
)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, _, media_status_cb = get_status_callbacks(chromecast)
media_status = MagicMock(images=None)
media_status.player_is_idle = True
media_status.idle_reason = "ERROR"
media_status.content_id = "http://example.local:8123/tts.mp3"
media_status_cb(media_status)
assert (
"Failed to cast media http://example.local:8123/tts.mp3 from tts.base_url"
in caplog.text
)
async def test_disconnect_on_stop(hass: HomeAssistant):
"""Test cast device disconnects socket on stop."""
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP)
await hass.async_block_till_done()
assert chromecast.disconnect.call_count == 1
async def test_entry_setup_no_config(hass: HomeAssistant):
"""Test deprecated empty yaml config.."""
await async_setup_component(hass, "cast", {})
await hass.async_block_till_done()
assert not hass.config_entries.async_entries("cast")
async def test_entry_setup_empty_config(hass: HomeAssistant):
"""Test deprecated empty yaml config.."""
await async_setup_component(hass, "cast", {"cast": {}})
await hass.async_block_till_done()
config_entry = hass.config_entries.async_entries("cast")[0]
assert config_entry.data["uuid"] == []
assert config_entry.data["ignore_cec"] == []
async def test_entry_setup_single_config(hass: HomeAssistant):
"""Test deprecated yaml config with a single config media_player."""
await async_setup_component(
hass, "cast", {"cast": {"media_player": {"uuid": "bla", "ignore_cec": "cast1"}}}
)
await hass.async_block_till_done()
config_entry = hass.config_entries.async_entries("cast")[0]
assert config_entry.data["uuid"] == ["bla"]
assert config_entry.data["ignore_cec"] == ["cast1"]
assert pychromecast.IGNORE_CEC == ["cast1"]
async def test_entry_setup_list_config(hass: HomeAssistant):
"""Test deprecated yaml config with multiple media_players."""
await async_setup_component(
hass,
"cast",
{
"cast": {
"media_player": [
{"uuid": "bla", "ignore_cec": "cast1"},
{"uuid": "blu", "ignore_cec": ["cast2", "cast3"]},
]
}
},
)
await hass.async_block_till_done()
config_entry = hass.config_entries.async_entries("cast")[0]
assert set(config_entry.data["uuid"]) == {"bla", "blu"}
assert set(config_entry.data["ignore_cec"]) == {"cast1", "cast2", "cast3"}
assert set(pychromecast.IGNORE_CEC) == {"cast1", "cast2", "cast3"}
async def test_invalid_cast_platform(hass: HomeAssistant, caplog):
"""Test we can play media through a cast platform."""
cast_platform_mock = Mock()
del cast_platform_mock.async_get_media_browser_root_object
del cast_platform_mock.async_browse_media
del cast_platform_mock.async_play_media
mock_platform(hass, "test.cast", cast_platform_mock)
await async_setup_component(hass, "test", {"test": {}})
await hass.async_block_till_done()
info = get_fake_chromecast_info()
await async_setup_media_player_cast(hass, info)
assert "Invalid cast platform <Mock id" in caplog.text
async def test_cast_platform_play_media(hass: HomeAssistant, quick_play_mock, caplog):
"""Test we can play media through a cast platform."""
entity_id = "media_player.speaker"
_can_play = True
def can_play(*args):
return _can_play
cast_platform_mock = Mock(
async_get_media_browser_root_object=AsyncMock(return_value=[]),
async_browse_media=AsyncMock(return_value=None),
async_play_media=AsyncMock(side_effect=can_play),
)
mock_platform(hass, "test.cast", cast_platform_mock)
await async_setup_component(hass, "test", {"test": {}})
await hass.async_block_till_done()
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
assert "Invalid cast platform <Mock id" not in caplog.text
_, conn_status_cb, _ = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
# This will play using the cast platform
await hass.services.async_call(
media_player.DOMAIN,
media_player.SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: entity_id,
media_player.ATTR_MEDIA_CONTENT_TYPE: "audio",
media_player.ATTR_MEDIA_CONTENT_ID: "best.mp3",
media_player.ATTR_MEDIA_EXTRA: {"metadata": {"metadatatype": 3}},
},
blocking=True,
)
# Assert the media player attempt to play media through the cast platform
cast_platform_mock.async_play_media.assert_called_once_with(
hass, entity_id, chromecast, "audio", "best.mp3"
)
# Assert pychromecast is not used to play media
chromecast.media_controller.play_media.assert_not_called()
quick_play_mock.assert_not_called()
# This will not play using the cast platform
_can_play = False
cast_platform_mock.async_play_media.reset_mock()
await hass.services.async_call(
media_player.DOMAIN,
media_player.SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: entity_id,
media_player.ATTR_MEDIA_CONTENT_TYPE: "audio",
media_player.ATTR_MEDIA_CONTENT_ID: "best.mp3",
media_player.ATTR_MEDIA_EXTRA: {"metadata": {"metadatatype": 3}},
},
blocking=True,
)
# Assert the media player attempt to play media through the cast platform
cast_platform_mock.async_play_media.assert_called_once_with(
hass, entity_id, chromecast, "audio", "best.mp3"
)
# Assert pychromecast is used to play media
chromecast.media_controller.play_media.assert_not_called()
quick_play_mock.assert_called()
async def test_cast_platform_browse_media(hass: HomeAssistant, hass_ws_client):
"""Test we can play media through a cast platform."""
cast_platform_mock = Mock(
async_get_media_browser_root_object=AsyncMock(
return_value=[
BrowseMedia(
title="Spotify",
media_class=MEDIA_CLASS_APP,
media_content_id="",
media_content_type="spotify",
thumbnail="https://brands.home-assistant.io/_/spotify/logo.png",
can_play=False,
can_expand=True,
)
]
),
async_browse_media=AsyncMock(
return_value=BrowseMedia(
title="Spotify Favourites",
media_class=MEDIA_CLASS_PLAYLIST,
media_content_id="",
media_content_type="spotify",
can_play=True,
can_expand=False,
)
),
async_play_media=AsyncMock(return_value=False),
)
mock_platform(hass, "test.cast", cast_platform_mock)
await async_setup_component(hass, "test", {"test": {}})
await async_setup_component(hass, "media_source", {"media_source": {}})
await hass.async_block_till_done()
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, _ = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
client = await hass_ws_client()
await client.send_json(
{
"id": 1,
"type": "media_player/browse_media",
"entity_id": "media_player.speaker",
}
)
response = await client.receive_json()
assert response["success"]
expected_child = {
"title": "Spotify",
"media_class": "app",
"media_content_type": "spotify",
"media_content_id": "",
"can_play": False,
"can_expand": True,
"children_media_class": None,
"thumbnail": "https://brands.home-assistant.io/_/spotify/logo.png",
}
assert expected_child in response["result"]["children"]
client = await hass_ws_client()
await client.send_json(
{
"id": 2,
"type": "media_player/browse_media",
"entity_id": "media_player.speaker",
"media_content_id": "",
"media_content_type": "spotify",
}
)
response = await client.receive_json()
assert response["success"]
expected_response = {
"title": "Spotify Favourites",
"media_class": "playlist",
"media_content_type": "spotify",
"media_content_id": "",
"can_play": True,
"can_expand": False,
"children_media_class": None,
"thumbnail": None,
"children": [],
}
assert response["result"] == expected_response
async def test_cast_platform_play_media_local_media(
hass: HomeAssistant, quick_play_mock, caplog
):
"""Test we process data when playing local media."""
entity_id = "media_player.speaker"
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, _ = get_status_callbacks(chromecast)
# Bring Chromecast online
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
# This will play using the cast platform
await hass.services.async_call(
media_player.DOMAIN,
media_player.SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: entity_id,
media_player.ATTR_MEDIA_CONTENT_TYPE: "application/vnd.apple.mpegurl",
media_player.ATTR_MEDIA_CONTENT_ID: "/api/hls/bla/master_playlist.m3u8",
},
blocking=True,
)
await hass.async_block_till_done()
# Assert we added extra play information
quick_play_mock.assert_called()
app_data = quick_play_mock.call_args[0][2]
assert not app_data["media_id"].startswith("/")
assert "authSig" in yarl.URL(app_data["media_id"]).query
assert app_data["media_type"] == "application/vnd.apple.mpegurl"
assert app_data["stream_type"] == "LIVE"
assert app_data["media_info"] == {
"hlsVideoSegmentFormat": "fmp4",
}
quick_play_mock.reset_mock()
# Test not appending if we have a signature
await hass.services.async_call(
media_player.DOMAIN,
media_player.SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: entity_id,
media_player.ATTR_MEDIA_CONTENT_TYPE: "application/vnd.apple.mpegurl",
media_player.ATTR_MEDIA_CONTENT_ID: f"{network.get_url(hass)}/api/hls/bla/master_playlist.m3u8?token=bla",
},
blocking=True,
)
await hass.async_block_till_done()
# Assert we added extra play information
quick_play_mock.assert_called()
app_data = quick_play_mock.call_args[0][2]
# No authSig appended
assert (
app_data["media_id"]
== f"{network.get_url(hass)}/api/hls/bla/master_playlist.m3u8?token=bla"
)
| 35.065184 | 118 | 0.695682 |
from __future__ import annotations
import json
from unittest.mock import ANY, AsyncMock, MagicMock, Mock, patch
from uuid import UUID
import attr
import pychromecast
from pychromecast.const import CAST_TYPE_CHROMECAST, CAST_TYPE_GROUP
import pytest
import yarl
from homeassistant.components import media_player, tts
from homeassistant.components.cast import media_player as cast
from homeassistant.components.cast.media_player import ChromecastInfo
from homeassistant.components.media_player import BrowseMedia
from homeassistant.components.media_player.const import (
MEDIA_CLASS_APP,
MEDIA_CLASS_PLAYLIST,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SEEK,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
)
from homeassistant.config import async_process_ha_core_config
from homeassistant.const import (
ATTR_ENTITY_ID,
CAST_APP_ID_HOMEASSISTANT_LOVELACE,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr, entity_registry as er, network
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry, assert_setup_component, mock_platform
from tests.components.media_player import common
FakeUUID = UUID("57355bce-9364-4aa6-ac1e-eb849dccf9e2")
FakeUUID2 = UUID("57355bce-9364-4aa6-ac1e-eb849dccf9e4")
FakeGroupUUID = UUID("57355bce-9364-4aa6-ac1e-eb849dccf9e3")
FAKE_HOST_SERVICE = pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_HOST, ("127.0.0.1", 8009)
)
FAKE_MDNS_SERVICE = pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "the-service"
)
def get_fake_chromecast(info: ChromecastInfo):
mock = MagicMock(uuid=info.uuid)
mock.app_id = None
mock.media_controller.status = None
return mock
def get_fake_chromecast_info(
host="192.168.178.42", port=8009, service=None, uuid: UUID | None = FakeUUID
):
if service is None:
service = pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_HOST, (host, port)
)
return ChromecastInfo(
cast_info=pychromecast.models.CastInfo(
services={service},
uuid=uuid,
model_name="Chromecast",
friendly_name="Speaker",
host=host,
port=port,
cast_type=CAST_TYPE_GROUP if port != 8009 else CAST_TYPE_CHROMECAST,
manufacturer="Nabu Casa",
)
)
def get_fake_zconf(host="192.168.178.42", port=8009):
parsed_addresses = MagicMock()
parsed_addresses.return_value = [host]
service_info = MagicMock(parsed_addresses=parsed_addresses, port=port)
zconf = MagicMock()
zconf.get_service_info.return_value = service_info
return zconf
async def async_setup_cast(hass, config=None):
if config is None:
config = {}
data = {**{"ignore_cec": [], "known_hosts": [], "uuid": []}, **config}
with patch(
"homeassistant.helpers.entity_platform.EntityPlatform._async_schedule_add_entities"
) as add_entities:
entry = MockConfigEntry(data=data, domain="cast")
entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return add_entities
async def async_setup_cast_internal_discovery(hass, config=None):
browser = MagicMock(devices={}, zc={})
with patch(
"homeassistant.components.cast.discovery.pychromecast.discovery.CastBrowser",
return_value=browser,
) as cast_browser:
add_entities = await async_setup_cast(hass, config)
await hass.async_block_till_done()
await hass.async_block_till_done()
assert browser.start_discovery.call_count == 1
discovery_callback = cast_browser.call_args[0][0].add_cast
remove_callback = cast_browser.call_args[0][0].remove_cast
def discover_chromecast(
service: pychromecast.discovery.ServiceInfo, info: ChromecastInfo
) -> None:
browser.devices[info.uuid] = pychromecast.discovery.CastInfo(
{service},
info.uuid,
info.cast_info.model_name,
info.friendly_name,
info.cast_info.host,
info.cast_info.port,
info.cast_info.cast_type,
info.cast_info.manufacturer,
)
discovery_callback(info.uuid, "")
def remove_chromecast(service_name: str, info: ChromecastInfo) -> None:
remove_callback(
info.uuid,
service_name,
pychromecast.models.CastInfo(
set(),
info.uuid,
info.cast_info.model_name,
info.cast_info.friendly_name,
info.cast_info.host,
info.cast_info.port,
info.cast_info.cast_type,
info.cast_info.manufacturer,
),
)
return discover_chromecast, remove_chromecast, add_entities
async def async_setup_media_player_cast(hass: HomeAssistant, info: ChromecastInfo):
browser = MagicMock(devices={}, zc={})
chromecast = get_fake_chromecast(info)
zconf = get_fake_zconf(host=info.cast_info.host, port=info.cast_info.port)
with patch(
"homeassistant.components.cast.discovery.pychromecast.get_chromecast_from_cast_info",
return_value=chromecast,
) as get_chromecast, patch(
"homeassistant.components.cast.discovery.pychromecast.discovery.CastBrowser",
return_value=browser,
) as cast_browser, patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf,
):
await async_setup_component(
hass, "cast", {"cast": {"media_player": {"uuid": info.uuid}}}
)
await hass.async_block_till_done()
await hass.async_block_till_done()
discovery_callback = cast_browser.call_args[0][0].add_cast
browser.devices[info.uuid] = pychromecast.discovery.CastInfo(
{FAKE_MDNS_SERVICE},
info.uuid,
info.cast_info.model_name,
info.friendly_name,
info.cast_info.host,
info.cast_info.port,
info.cast_info.cast_type,
info.cast_info.manufacturer,
)
discovery_callback(info.uuid, FAKE_MDNS_SERVICE[1])
await hass.async_block_till_done()
await hass.async_block_till_done()
assert get_chromecast.call_count == 1
def discover_chromecast(service_name: str, info: ChromecastInfo) -> None:
browser.devices[info.uuid] = pychromecast.discovery.CastInfo(
{FAKE_MDNS_SERVICE},
info.uuid,
info.cast_info.model_name,
info.friendly_name,
info.cast_info.host,
info.cast_info.port,
info.cast_info.cast_type,
info.cast_info.manufacturer,
)
discovery_callback(info.uuid, FAKE_MDNS_SERVICE[1])
return chromecast, discover_chromecast
def get_status_callbacks(chromecast_mock, mz_mock=None):
status_listener = chromecast_mock.register_status_listener.call_args[0][0]
cast_status_cb = status_listener.new_cast_status
connection_listener = chromecast_mock.register_connection_listener.call_args[0][0]
conn_status_cb = connection_listener.new_connection_status
mc = chromecast_mock.socket_client.media_controller
media_status_cb = mc.register_status_listener.call_args[0][0].new_media_status
if not mz_mock:
return cast_status_cb, conn_status_cb, media_status_cb
mz_listener = mz_mock.register_listener.call_args[0][1]
group_media_status_cb = mz_listener.multizone_new_media_status
return cast_status_cb, conn_status_cb, media_status_cb, group_media_status_cb
async def test_start_discovery_called_once(hass, castbrowser_mock):
await async_setup_cast(hass)
assert castbrowser_mock.return_value.start_discovery.call_count == 1
await async_setup_cast(hass)
assert castbrowser_mock.return_value.start_discovery.call_count == 1
async def test_internal_discovery_callback_fill_out_group_fail(
hass, get_multizone_status_mock
):
discover_cast, _, _ = await async_setup_cast_internal_discovery(hass)
info = get_fake_chromecast_info(host="host1", port=12345, service=FAKE_MDNS_SERVICE)
zconf = get_fake_zconf(host="host1", port=12345)
full_info = attr.evolve(
info,
cast_info=pychromecast.discovery.CastInfo(
services=info.cast_info.services,
uuid=FakeUUID,
model_name="Chromecast",
friendly_name="Speaker",
host=info.cast_info.host,
port=info.cast_info.port,
cast_type=info.cast_info.cast_type,
manufacturer=info.cast_info.manufacturer,
),
is_dynamic_group=False,
)
get_multizone_status_mock.assert_not_called()
get_multizone_status_mock.return_value = None
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf,
):
signal = MagicMock()
async_dispatcher_connect(hass, "cast_discovered", signal)
discover_cast(FAKE_MDNS_SERVICE, info)
await hass.async_block_till_done()
discover = signal.mock_calls[0][1][0]
assert discover == full_info
get_multizone_status_mock.assert_called_once()
async def test_internal_discovery_callback_fill_out_group(
hass, get_multizone_status_mock
):
discover_cast, _, _ = await async_setup_cast_internal_discovery(hass)
info = get_fake_chromecast_info(host="host1", port=12345, service=FAKE_MDNS_SERVICE)
zconf = get_fake_zconf(host="host1", port=12345)
full_info = attr.evolve(
info,
cast_info=pychromecast.discovery.CastInfo(
services=info.cast_info.services,
uuid=FakeUUID,
model_name="Chromecast",
friendly_name="Speaker",
host=info.cast_info.host,
port=info.cast_info.port,
cast_type=info.cast_info.cast_type,
manufacturer=info.cast_info.manufacturer,
),
is_dynamic_group=False,
)
get_multizone_status_mock.assert_not_called()
get_multizone_status_mock.return_value = None
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf,
):
signal = MagicMock()
async_dispatcher_connect(hass, "cast_discovered", signal)
discover_cast(FAKE_MDNS_SERVICE, info)
await hass.async_block_till_done()
discover = signal.mock_calls[0][1][0]
assert discover == full_info
get_multizone_status_mock.assert_called_once()
async def test_stop_discovery_called_on_stop(hass, castbrowser_mock):
await async_setup_cast(hass, {})
assert castbrowser_mock.return_value.start_discovery.call_count == 1
hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP)
await hass.async_block_till_done()
assert castbrowser_mock.return_value.stop_discovery.call_count == 1
async def test_create_cast_device_without_uuid(hass):
info = get_fake_chromecast_info(uuid=None)
cast_device = cast._async_create_cast_device(hass, info)
assert cast_device is None
async def test_create_cast_device_with_uuid(hass):
added_casts = hass.data[cast.ADDED_CAST_DEVICES_KEY] = set()
info = get_fake_chromecast_info()
cast_device = cast._async_create_cast_device(hass, info)
assert cast_device is not None
assert info.uuid in added_casts
cast_device = cast._async_create_cast_device(hass, info)
assert cast_device is None
async def test_manual_cast_chromecasts_uuid(hass):
cast_1 = get_fake_chromecast_info(host="host_1", uuid=FakeUUID)
cast_2 = get_fake_chromecast_info(host="host_2", uuid=FakeUUID2)
zconf_1 = get_fake_zconf(host="host_1")
zconf_2 = get_fake_zconf(host="host_2")
discover_cast, _, add_dev1 = await async_setup_cast_internal_discovery(
hass, config={"uuid": str(FakeUUID)}
)
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf_2,
):
discover_cast(
pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "service2"
),
cast_2,
)
await hass.async_block_till_done()
await hass.async_block_till_done()
assert add_dev1.call_count == 0
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf_1,
):
discover_cast(
pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "service1"
),
cast_1,
)
await hass.async_block_till_done()
await hass.async_block_till_done()
assert add_dev1.call_count == 1
async def test_auto_cast_chromecasts(hass):
cast_1 = get_fake_chromecast_info(host="some_host")
cast_2 = get_fake_chromecast_info(host="other_host", uuid=FakeUUID2)
zconf_1 = get_fake_zconf(host="some_host")
zconf_2 = get_fake_zconf(host="other_host")
discover_cast, _, add_dev1 = await async_setup_cast_internal_discovery(hass)
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf_1,
):
discover_cast(
pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "service2"
),
cast_2,
)
await hass.async_block_till_done()
await hass.async_block_till_done()
assert add_dev1.call_count == 1
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf_2,
):
discover_cast(
pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "service1"
),
cast_1,
)
await hass.async_block_till_done()
await hass.async_block_till_done()
assert add_dev1.call_count == 2
async def test_discover_dynamic_group(
hass, get_multizone_status_mock, get_chromecast_mock, caplog
):
cast_1 = get_fake_chromecast_info(host="host_1", port=23456, uuid=FakeUUID)
cast_2 = get_fake_chromecast_info(host="host_2", port=34567, uuid=FakeUUID2)
zconf_1 = get_fake_zconf(host="host_1", port=23456)
zconf_2 = get_fake_zconf(host="host_2", port=34567)
reg = er.async_get(hass)
tmp1 = MagicMock()
tmp1.uuid = FakeUUID
tmp2 = MagicMock()
tmp2.uuid = FakeUUID2
get_multizone_status_mock.return_value.dynamic_groups = [tmp1, tmp2]
get_chromecast_mock.assert_not_called()
discover_cast, remove_cast, add_dev1 = await async_setup_cast_internal_discovery(
hass
)
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf_1,
):
discover_cast(
pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "service"
),
cast_1,
)
await hass.async_block_till_done()
await hass.async_block_till_done()
get_chromecast_mock.assert_called()
get_chromecast_mock.reset_mock()
assert add_dev1.call_count == 0
assert reg.async_get_entity_id("media_player", "cast", cast_1.uuid) is None
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf_2,
):
discover_cast(
pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "service"
),
cast_2,
)
await hass.async_block_till_done()
await hass.async_block_till_done()
get_chromecast_mock.assert_called()
get_chromecast_mock.reset_mock()
assert add_dev1.call_count == 0
assert reg.async_get_entity_id("media_player", "cast", cast_2.uuid) is None
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf_1,
):
discover_cast(
pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "service"
),
cast_1,
)
await hass.async_block_till_done()
await hass.async_block_till_done()
get_chromecast_mock.assert_not_called()
assert add_dev1.call_count == 0
assert reg.async_get_entity_id("media_player", "cast", cast_1.uuid) is None
assert "Disconnecting from chromecast" not in caplog.text
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf_1,
):
remove_cast(
pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "service"
),
cast_1,
)
await hass.async_block_till_done()
await hass.async_block_till_done()
assert "Disconnecting from chromecast" in caplog.text
async def test_update_cast_chromecasts(hass):
cast_1 = get_fake_chromecast_info(host="old_host")
cast_2 = get_fake_chromecast_info(host="new_host")
zconf_1 = get_fake_zconf(host="old_host")
zconf_2 = get_fake_zconf(host="new_host")
discover_cast, _, add_dev1 = await async_setup_cast_internal_discovery(hass)
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf_1,
):
discover_cast(
pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "service1"
),
cast_1,
)
await hass.async_block_till_done()
await hass.async_block_till_done()
assert add_dev1.call_count == 1
with patch(
"homeassistant.components.cast.discovery.ChromeCastZeroconf.get_zeroconf",
return_value=zconf_2,
):
discover_cast(
pychromecast.discovery.ServiceInfo(
pychromecast.const.SERVICE_TYPE_MDNS, "service2"
),
cast_2,
)
await hass.async_block_till_done()
await hass.async_block_till_done()
assert add_dev1.call_count == 1
async def test_entity_availability(hass: HomeAssistant):
entity_id = "media_player.speaker"
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, _ = get_status_callbacks(chromecast)
state = hass.states.get(entity_id)
assert state.state == "unavailable"
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "off"
connection_status = MagicMock()
connection_status.status = "DISCONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "unavailable"
@pytest.mark.parametrize("port,entry_type", ((8009, None),))
async def test_device_registry(hass: HomeAssistant, port, entry_type):
entity_id = "media_player.speaker"
reg = er.async_get(hass)
dev_reg = dr.async_get(hass)
info = get_fake_chromecast_info(port=port)
chromecast, _ = await async_setup_media_player_cast(hass, info)
chromecast.cast_type = pychromecast.const.CAST_TYPE_CHROMECAST
_, conn_status_cb, _ = get_status_callbacks(chromecast)
cast_entry = hass.config_entries.async_entries("cast")[0]
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
entity_entry = reg.async_get(entity_id)
assert entity_entry.device_id is not None
device_entry = dev_reg.async_get(entity_entry.device_id)
assert device_entry.entry_type == entry_type
chromecast.disconnect.assert_not_called()
dev_reg.async_update_device(
device_entry.id, remove_config_entry_id=cast_entry.entry_id
)
await hass.async_block_till_done()
await hass.async_block_till_done()
chromecast.disconnect.assert_called_once()
async def test_entity_cast_status(hass: HomeAssistant):
entity_id = "media_player.speaker"
reg = er.async_get(hass)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
chromecast.cast_type = pychromecast.const.CAST_TYPE_CHROMECAST
cast_status_cb, conn_status_cb, _ = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
assert state.attributes.get("supported_features") == (
SUPPORT_PLAY_MEDIA
| SUPPORT_TURN_OFF
| SUPPORT_TURN_ON
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET
)
cast_status = MagicMock()
cast_status.volume_level = 0.5
cast_status.volume_muted = False
cast_status_cb(cast_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get("volume_level") is None
assert not state.attributes.get("is_volume_muted")
chromecast.app_id = "1234"
cast_status = MagicMock()
cast_status.volume_level = 0.5
cast_status.volume_muted = False
cast_status_cb(cast_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get("volume_level") == 0.5
assert not state.attributes.get("is_volume_muted")
cast_status = MagicMock()
cast_status.volume_level = 0.2
cast_status.volume_muted = True
cast_status_cb(cast_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get("volume_level") == 0.2
assert state.attributes.get("is_volume_muted")
cast_status = MagicMock()
cast_status.volume_control_type = "fixed"
cast_status_cb(cast_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get("supported_features") == (
SUPPORT_PLAY_MEDIA | SUPPORT_TURN_OFF | SUPPORT_TURN_ON
)
@pytest.mark.parametrize(
"cast_type,supported_features,supported_features_no_media",
[
(
pychromecast.const.CAST_TYPE_AUDIO,
SUPPORT_PAUSE
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_STOP
| SUPPORT_TURN_OFF
| SUPPORT_TURN_ON
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET,
SUPPORT_PLAY_MEDIA
| SUPPORT_TURN_OFF
| SUPPORT_TURN_ON
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET,
),
(
pychromecast.const.CAST_TYPE_CHROMECAST,
SUPPORT_PAUSE
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_STOP
| SUPPORT_TURN_OFF
| SUPPORT_TURN_ON
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET,
SUPPORT_PLAY_MEDIA
| SUPPORT_TURN_OFF
| SUPPORT_TURN_ON
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET,
),
(
pychromecast.const.CAST_TYPE_GROUP,
SUPPORT_PAUSE
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_STOP
| SUPPORT_TURN_OFF
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET,
SUPPORT_PLAY_MEDIA
| SUPPORT_TURN_OFF
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET,
),
],
)
async def test_supported_features(
hass: HomeAssistant, cast_type, supported_features, supported_features_no_media
):
entity_id = "media_player.speaker"
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
chromecast.cast_type = cast_type
_, conn_status_cb, media_status_cb = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert state.attributes.get("supported_features") == supported_features_no_media
media_status = MagicMock(images=None)
media_status.supports_queue_next = False
media_status.supports_seek = False
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get("supported_features") == supported_features
async def test_entity_browse_media(hass: HomeAssistant, hass_ws_client):
await async_setup_component(hass, "media_source", {"media_source": {}})
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, _ = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
client = await hass_ws_client()
await client.send_json(
{
"id": 1,
"type": "media_player/browse_media",
"entity_id": "media_player.speaker",
}
)
response = await client.receive_json()
assert response["success"]
expected_child_1 = {
"title": "Epic Sax Guy 10 Hours.mp4",
"media_class": "video",
"media_content_type": "video/mp4",
"media_content_id": "media-source://media_source/local/Epic Sax Guy 10 Hours.mp4",
"can_play": True,
"can_expand": False,
"children_media_class": None,
"thumbnail": None,
}
assert expected_child_1 in response["result"]["children"]
expected_child_2 = {
"title": "test.mp3",
"media_class": "music",
"media_content_type": "audio/mpeg",
"media_content_id": "media-source://media_source/local/test.mp3",
"can_play": True,
"can_expand": False,
"children_media_class": None,
"thumbnail": None,
}
assert expected_child_2 in response["result"]["children"]
@pytest.mark.parametrize(
"cast_type",
[pychromecast.const.CAST_TYPE_AUDIO, pychromecast.const.CAST_TYPE_GROUP],
)
async def test_entity_browse_media_audio_only(
hass: HomeAssistant, hass_ws_client, cast_type
):
await async_setup_component(hass, "media_source", {"media_source": {}})
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
chromecast.cast_type = cast_type
_, conn_status_cb, _ = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
client = await hass_ws_client()
await client.send_json(
{
"id": 1,
"type": "media_player/browse_media",
"entity_id": "media_player.speaker",
}
)
response = await client.receive_json()
assert response["success"]
expected_child_1 = {
"title": "Epic Sax Guy 10 Hours.mp4",
"media_class": "video",
"media_content_type": "video/mp4",
"media_content_id": "media-source://media_source/local/Epic Sax Guy 10 Hours.mp4",
"can_play": True,
"can_expand": False,
"children_media_class": None,
"thumbnail": None,
}
assert expected_child_1 not in response["result"]["children"]
expected_child_2 = {
"title": "test.mp3",
"media_class": "music",
"media_content_type": "audio/mpeg",
"media_content_id": "media-source://media_source/local/test.mp3",
"can_play": True,
"can_expand": False,
"children_media_class": None,
"thumbnail": None,
}
assert expected_child_2 in response["result"]["children"]
async def test_entity_play_media(hass: HomeAssistant, quick_play_mock):
entity_id = "media_player.speaker"
reg = er.async_get(hass)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, _ = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
await hass.services.async_call(
media_player.DOMAIN,
media_player.SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: entity_id,
media_player.ATTR_MEDIA_CONTENT_TYPE: "audio",
media_player.ATTR_MEDIA_CONTENT_ID: "best.mp3",
media_player.ATTR_MEDIA_EXTRA: {"metadata": {"metadatatype": 3}},
},
blocking=True,
)
chromecast.media_controller.play_media.assert_not_called()
quick_play_mock.assert_called_once_with(
chromecast,
"default_media_receiver",
{
"media_id": "best.mp3",
"media_type": "audio",
"metadata": {"metadatatype": 3},
},
)
async def test_entity_play_media_cast(hass: HomeAssistant, quick_play_mock):
entity_id = "media_player.speaker"
reg = er.async_get(hass)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, _ = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
await common.async_play_media(hass, "cast", '{"app_id": "abc123"}', entity_id)
chromecast.start_app.assert_called_once_with("abc123")
await hass.services.async_call(
media_player.DOMAIN,
media_player.SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: entity_id,
media_player.ATTR_MEDIA_CONTENT_TYPE: "cast",
media_player.ATTR_MEDIA_CONTENT_ID: '{"app_name":"youtube"}',
media_player.ATTR_MEDIA_EXTRA: {"metadata": {"metadatatype": 3}},
},
blocking=True,
)
quick_play_mock.assert_called_once_with(
ANY, "youtube", {"metadata": {"metadatatype": 3}}
)
async def test_entity_play_media_cast_invalid(hass, caplog, quick_play_mock):
entity_id = "media_player.speaker"
reg = er.async_get(hass)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, _ = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
with pytest.raises(json.decoder.JSONDecodeError):
await common.async_play_media(hass, "cast", '{"app_id": "abc123"', entity_id)
assert "Invalid JSON in media_content_id" in caplog.text
chromecast.start_app.assert_not_called()
quick_play_mock.assert_not_called()
await common.async_play_media(
hass, "cast", '{"app_id": "abc123", "extra": "data"}', entity_id
)
assert "Extra keys dict_keys(['extra']) were ignored" in caplog.text
chromecast.start_app.assert_called_once_with("abc123")
quick_play_mock.assert_not_called()
quick_play_mock.side_effect = NotImplementedError()
await common.async_play_media(hass, "cast", '{"app_name": "unknown"}', entity_id)
quick_play_mock.assert_called_once_with(ANY, "unknown", {})
assert "App unknown not supported" in caplog.text
async def test_entity_play_media_sign_URL(hass: HomeAssistant, quick_play_mock):
entity_id = "media_player.speaker"
await async_process_ha_core_config(
hass,
{"internal_url": "http://example.com:8123"},
)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, _ = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
await common.async_play_media(hass, "audio", "/best.mp3", entity_id)
quick_play_mock.assert_called_once_with(
chromecast, "default_media_receiver", {"media_id": ANY, "media_type": "audio"}
)
assert quick_play_mock.call_args[0][2]["media_id"].startswith(
"http://example.com:8123/best.mp3?authSig="
)
async def test_entity_media_content_type(hass: HomeAssistant):
entity_id = "media_player.speaker"
reg = er.async_get(hass)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, media_status_cb = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
media_status = MagicMock(images=None)
media_status.media_is_movie = False
media_status.media_is_musictrack = False
media_status.media_is_tvshow = False
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get("media_content_type") is None
media_status.media_is_tvshow = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get("media_content_type") == "tvshow"
media_status.media_is_tvshow = False
media_status.media_is_musictrack = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get("media_content_type") == "music"
media_status.media_is_musictrack = True
media_status.media_is_movie = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get("media_content_type") == "movie"
async def test_entity_control(hass: HomeAssistant):
entity_id = "media_player.speaker"
reg = er.async_get(hass)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
chromecast.cast_type = pychromecast.const.CAST_TYPE_CHROMECAST
_, conn_status_cb, media_status_cb = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
media_status = MagicMock(images=None)
media_status.supports_queue_next = False
media_status.supports_seek = False
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "playing"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
assert state.attributes.get("supported_features") == (
SUPPORT_PAUSE
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_STOP
| SUPPORT_TURN_OFF
| SUPPORT_TURN_ON
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET
)
await common.async_turn_on(hass, entity_id)
chromecast.play_media.assert_called_once_with(
"https://www.home-assistant.io/images/cast/splash.png", "image/png"
)
chromecast.quit_app.reset_mock()
await common.async_turn_off(hass, entity_id)
chromecast.quit_app.assert_called_once_with()
await common.async_mute_volume(hass, True, entity_id)
chromecast.set_volume_muted.assert_called_once_with(True)
await common.async_set_volume_level(hass, 0.33, entity_id)
chromecast.set_volume.assert_called_once_with(0.33)
await common.async_media_play(hass, entity_id)
chromecast.media_controller.play.assert_called_once_with()
await common.async_media_pause(hass, entity_id)
chromecast.media_controller.pause.assert_called_once_with()
await common.async_media_previous_track(hass, entity_id)
chromecast.media_controller.queue_prev.assert_not_called()
await common.async_media_next_track(hass, entity_id)
chromecast.media_controller.queue_next.assert_not_called()
await common.async_media_seek(hass, 123, entity_id)
chromecast.media_controller.seek.assert_not_called()
media_status = MagicMock(images=None)
media_status.supports_queue_next = True
media_status.supports_seek = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get("supported_features") == (
SUPPORT_PAUSE
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_STOP
| SUPPORT_TURN_OFF
| SUPPORT_TURN_ON
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_NEXT_TRACK
| SUPPORT_SEEK
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET
)
await common.async_media_previous_track(hass, entity_id)
chromecast.media_controller.queue_prev.assert_called_once_with()
await common.async_media_next_track(hass, entity_id)
chromecast.media_controller.queue_next.assert_called_once_with()
await common.async_media_seek(hass, 123, entity_id)
chromecast.media_controller.seek.assert_called_once_with(123)
@pytest.mark.parametrize(
"app_id, state_no_media",
[(pychromecast.APP_YOUTUBE, "idle"), ("Netflix", "playing")],
)
async def test_entity_media_states(hass: HomeAssistant, app_id, state_no_media):
entity_id = "media_player.speaker"
reg = er.async_get(hass)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
cast_status_cb, conn_status_cb, media_status_cb = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
chromecast.app_id = app_id
cast_status = MagicMock()
cast_status_cb(cast_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == state_no_media
media_status = MagicMock(images=None)
media_status.player_is_playing = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "playing"
media_status.player_is_playing = False
media_status.player_is_paused = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "paused"
media_status.player_is_paused = False
media_status.player_is_idle = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "idle"
media_status_cb(None)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == state_no_media
chromecast.app_id = pychromecast.IDLE_APP_ID
cast_status = MagicMock()
cast_status_cb(cast_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "off"
chromecast.is_idle = False
cast_status_cb(None)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "unknown"
async def test_entity_media_states_lovelace_app(hass: HomeAssistant):
entity_id = "media_player.speaker"
reg = er.async_get(hass)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
cast_status_cb, conn_status_cb, media_status_cb = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
chromecast.app_id = CAST_APP_ID_HOMEASSISTANT_LOVELACE
cast_status = MagicMock()
cast_status_cb(cast_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "playing"
assert state.attributes.get("supported_features") == (
SUPPORT_PLAY_MEDIA | SUPPORT_TURN_OFF | SUPPORT_VOLUME_MUTE | SUPPORT_VOLUME_SET
)
media_status = MagicMock(images=None)
media_status.player_is_playing = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "playing"
media_status.player_is_playing = False
media_status.player_is_paused = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "playing"
media_status.player_is_paused = False
media_status.player_is_idle = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "playing"
chromecast.app_id = pychromecast.IDLE_APP_ID
media_status.player_is_idle = False
chromecast.is_idle = True
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "off"
chromecast.is_idle = False
media_status_cb(media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "unknown"
async def test_group_media_states(hass, mz_mock):
entity_id = "media_player.speaker"
reg = er.async_get(hass)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, media_status_cb, group_media_status_cb = get_status_callbacks(
chromecast, mz_mock
)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
group_media_status = MagicMock(images=None)
player_media_status = MagicMock(images=None)
group_media_status.player_is_playing = True
group_media_status_cb(str(FakeGroupUUID), group_media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "playing"
player_media_status.player_is_playing = False
player_media_status.player_is_paused = True
media_status_cb(player_media_status)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "paused"
player_media_status.player_state = "UNKNOWN"
media_status_cb(player_media_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.state == "playing"
async def test_group_media_control(hass, mz_mock, quick_play_mock):
entity_id = "media_player.speaker"
reg = er.async_get(hass)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, media_status_cb, group_media_status_cb = get_status_callbacks(
chromecast, mz_mock
)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.name == "Speaker"
assert state.state == "off"
assert entity_id == reg.async_get_entity_id("media_player", "cast", str(info.uuid))
group_media_status = MagicMock(images=None)
player_media_status = MagicMock(images=None)
group_media_status.player_is_playing = True
group_media_status_cb(str(FakeGroupUUID), group_media_status)
await common.async_media_play(hass, entity_id)
grp_media = mz_mock.get_multizone_mediacontroller(str(FakeGroupUUID))
assert grp_media.play.called
assert not chromecast.media_controller.play.called
player_media_status.player_is_playing = False
player_media_status.player_is_paused = True
media_status_cb(player_media_status)
await common.async_media_pause(hass, entity_id)
grp_media = mz_mock.get_multizone_mediacontroller(str(FakeGroupUUID))
assert not grp_media.pause.called
assert chromecast.media_controller.pause.called
player_media_status.player_state = "UNKNOWN"
media_status_cb(player_media_status)
await common.async_media_stop(hass, entity_id)
grp_media = mz_mock.get_multizone_mediacontroller(str(FakeGroupUUID))
assert grp_media.stop.called
assert not chromecast.media_controller.stop.called
await common.async_play_media(hass, "music", "best.mp3", entity_id)
assert not grp_media.play_media.called
assert not chromecast.media_controller.play_media.called
quick_play_mock.assert_called_once_with(
chromecast,
"default_media_receiver",
{"media_id": "best.mp3", "media_type": "music"},
)
async def test_failed_cast_on_idle(hass, caplog):
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, _, media_status_cb = get_status_callbacks(chromecast)
media_status = MagicMock(images=None)
media_status.player_is_idle = False
media_status.idle_reason = "ERROR"
media_status.content_id = "http://example.com:8123/tts.mp3"
media_status_cb(media_status)
assert "Failed to cast media" not in caplog.text
media_status = MagicMock(images=None)
media_status.player_is_idle = True
media_status.idle_reason = "Other"
media_status.content_id = "http://example.com:8123/tts.mp3"
media_status_cb(media_status)
assert "Failed to cast media" not in caplog.text
media_status = MagicMock(images=None)
media_status.player_is_idle = True
media_status.idle_reason = "ERROR"
media_status.content_id = "http://example.com:8123/tts.mp3"
media_status_cb(media_status)
assert "Failed to cast media http://example.com:8123/tts.mp3." in caplog.text
async def test_failed_cast_other_url(hass, caplog):
with assert_setup_component(1, tts.DOMAIN):
assert await async_setup_component(
hass,
tts.DOMAIN,
{tts.DOMAIN: {"platform": "demo", "base_url": "http://example.local:8123"}},
)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, _, media_status_cb = get_status_callbacks(chromecast)
media_status = MagicMock(images=None)
media_status.player_is_idle = True
media_status.idle_reason = "ERROR"
media_status.content_id = "http://example.com:8123/tts.mp3"
media_status_cb(media_status)
assert "Failed to cast media http://example.com:8123/tts.mp3." in caplog.text
async def test_failed_cast_internal_url(hass, caplog):
await async_process_ha_core_config(
hass,
{"internal_url": "http://example.local:8123"},
)
with assert_setup_component(1, tts.DOMAIN):
assert await async_setup_component(
hass, tts.DOMAIN, {tts.DOMAIN: {"platform": "demo"}}
)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, _, media_status_cb = get_status_callbacks(chromecast)
media_status = MagicMock(images=None)
media_status.player_is_idle = True
media_status.idle_reason = "ERROR"
media_status.content_id = "http://example.local:8123/tts.mp3"
media_status_cb(media_status)
assert (
"Failed to cast media http://example.local:8123/tts.mp3 from internal_url"
in caplog.text
)
async def test_failed_cast_external_url(hass, caplog):
await async_process_ha_core_config(
hass,
{"external_url": "http://example.com:8123"},
)
with assert_setup_component(1, tts.DOMAIN):
assert await async_setup_component(
hass,
tts.DOMAIN,
{tts.DOMAIN: {"platform": "demo", "base_url": "http://example.com:8123"}},
)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, _, media_status_cb = get_status_callbacks(chromecast)
media_status = MagicMock(images=None)
media_status.player_is_idle = True
media_status.idle_reason = "ERROR"
media_status.content_id = "http://example.com:8123/tts.mp3"
media_status_cb(media_status)
assert (
"Failed to cast media http://example.com:8123/tts.mp3 from external_url"
in caplog.text
)
async def test_failed_cast_tts_base_url(hass, caplog):
with assert_setup_component(1, tts.DOMAIN):
assert await async_setup_component(
hass,
tts.DOMAIN,
{tts.DOMAIN: {"platform": "demo", "base_url": "http://example.local:8123"}},
)
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, _, media_status_cb = get_status_callbacks(chromecast)
media_status = MagicMock(images=None)
media_status.player_is_idle = True
media_status.idle_reason = "ERROR"
media_status.content_id = "http://example.local:8123/tts.mp3"
media_status_cb(media_status)
assert (
"Failed to cast media http://example.local:8123/tts.mp3 from tts.base_url"
in caplog.text
)
async def test_disconnect_on_stop(hass: HomeAssistant):
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP)
await hass.async_block_till_done()
assert chromecast.disconnect.call_count == 1
async def test_entry_setup_no_config(hass: HomeAssistant):
await async_setup_component(hass, "cast", {})
await hass.async_block_till_done()
assert not hass.config_entries.async_entries("cast")
async def test_entry_setup_empty_config(hass: HomeAssistant):
await async_setup_component(hass, "cast", {"cast": {}})
await hass.async_block_till_done()
config_entry = hass.config_entries.async_entries("cast")[0]
assert config_entry.data["uuid"] == []
assert config_entry.data["ignore_cec"] == []
async def test_entry_setup_single_config(hass: HomeAssistant):
await async_setup_component(
hass, "cast", {"cast": {"media_player": {"uuid": "bla", "ignore_cec": "cast1"}}}
)
await hass.async_block_till_done()
config_entry = hass.config_entries.async_entries("cast")[0]
assert config_entry.data["uuid"] == ["bla"]
assert config_entry.data["ignore_cec"] == ["cast1"]
assert pychromecast.IGNORE_CEC == ["cast1"]
async def test_entry_setup_list_config(hass: HomeAssistant):
await async_setup_component(
hass,
"cast",
{
"cast": {
"media_player": [
{"uuid": "bla", "ignore_cec": "cast1"},
{"uuid": "blu", "ignore_cec": ["cast2", "cast3"]},
]
}
},
)
await hass.async_block_till_done()
config_entry = hass.config_entries.async_entries("cast")[0]
assert set(config_entry.data["uuid"]) == {"bla", "blu"}
assert set(config_entry.data["ignore_cec"]) == {"cast1", "cast2", "cast3"}
assert set(pychromecast.IGNORE_CEC) == {"cast1", "cast2", "cast3"}
async def test_invalid_cast_platform(hass: HomeAssistant, caplog):
cast_platform_mock = Mock()
del cast_platform_mock.async_get_media_browser_root_object
del cast_platform_mock.async_browse_media
del cast_platform_mock.async_play_media
mock_platform(hass, "test.cast", cast_platform_mock)
await async_setup_component(hass, "test", {"test": {}})
await hass.async_block_till_done()
info = get_fake_chromecast_info()
await async_setup_media_player_cast(hass, info)
assert "Invalid cast platform <Mock id" in caplog.text
async def test_cast_platform_play_media(hass: HomeAssistant, quick_play_mock, caplog):
entity_id = "media_player.speaker"
_can_play = True
def can_play(*args):
return _can_play
cast_platform_mock = Mock(
async_get_media_browser_root_object=AsyncMock(return_value=[]),
async_browse_media=AsyncMock(return_value=None),
async_play_media=AsyncMock(side_effect=can_play),
)
mock_platform(hass, "test.cast", cast_platform_mock)
await async_setup_component(hass, "test", {"test": {}})
await hass.async_block_till_done()
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
assert "Invalid cast platform <Mock id" not in caplog.text
_, conn_status_cb, _ = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
await hass.services.async_call(
media_player.DOMAIN,
media_player.SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: entity_id,
media_player.ATTR_MEDIA_CONTENT_TYPE: "audio",
media_player.ATTR_MEDIA_CONTENT_ID: "best.mp3",
media_player.ATTR_MEDIA_EXTRA: {"metadata": {"metadatatype": 3}},
},
blocking=True,
)
cast_platform_mock.async_play_media.assert_called_once_with(
hass, entity_id, chromecast, "audio", "best.mp3"
)
chromecast.media_controller.play_media.assert_not_called()
quick_play_mock.assert_not_called()
_can_play = False
cast_platform_mock.async_play_media.reset_mock()
await hass.services.async_call(
media_player.DOMAIN,
media_player.SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: entity_id,
media_player.ATTR_MEDIA_CONTENT_TYPE: "audio",
media_player.ATTR_MEDIA_CONTENT_ID: "best.mp3",
media_player.ATTR_MEDIA_EXTRA: {"metadata": {"metadatatype": 3}},
},
blocking=True,
)
cast_platform_mock.async_play_media.assert_called_once_with(
hass, entity_id, chromecast, "audio", "best.mp3"
)
chromecast.media_controller.play_media.assert_not_called()
quick_play_mock.assert_called()
async def test_cast_platform_browse_media(hass: HomeAssistant, hass_ws_client):
cast_platform_mock = Mock(
async_get_media_browser_root_object=AsyncMock(
return_value=[
BrowseMedia(
title="Spotify",
media_class=MEDIA_CLASS_APP,
media_content_id="",
media_content_type="spotify",
thumbnail="https://brands.home-assistant.io/_/spotify/logo.png",
can_play=False,
can_expand=True,
)
]
),
async_browse_media=AsyncMock(
return_value=BrowseMedia(
title="Spotify Favourites",
media_class=MEDIA_CLASS_PLAYLIST,
media_content_id="",
media_content_type="spotify",
can_play=True,
can_expand=False,
)
),
async_play_media=AsyncMock(return_value=False),
)
mock_platform(hass, "test.cast", cast_platform_mock)
await async_setup_component(hass, "test", {"test": {}})
await async_setup_component(hass, "media_source", {"media_source": {}})
await hass.async_block_till_done()
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, _ = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
client = await hass_ws_client()
await client.send_json(
{
"id": 1,
"type": "media_player/browse_media",
"entity_id": "media_player.speaker",
}
)
response = await client.receive_json()
assert response["success"]
expected_child = {
"title": "Spotify",
"media_class": "app",
"media_content_type": "spotify",
"media_content_id": "",
"can_play": False,
"can_expand": True,
"children_media_class": None,
"thumbnail": "https://brands.home-assistant.io/_/spotify/logo.png",
}
assert expected_child in response["result"]["children"]
client = await hass_ws_client()
await client.send_json(
{
"id": 2,
"type": "media_player/browse_media",
"entity_id": "media_player.speaker",
"media_content_id": "",
"media_content_type": "spotify",
}
)
response = await client.receive_json()
assert response["success"]
expected_response = {
"title": "Spotify Favourites",
"media_class": "playlist",
"media_content_type": "spotify",
"media_content_id": "",
"can_play": True,
"can_expand": False,
"children_media_class": None,
"thumbnail": None,
"children": [],
}
assert response["result"] == expected_response
async def test_cast_platform_play_media_local_media(
hass: HomeAssistant, quick_play_mock, caplog
):
entity_id = "media_player.speaker"
info = get_fake_chromecast_info()
chromecast, _ = await async_setup_media_player_cast(hass, info)
_, conn_status_cb, _ = get_status_callbacks(chromecast)
connection_status = MagicMock()
connection_status.status = "CONNECTED"
conn_status_cb(connection_status)
await hass.async_block_till_done()
await hass.services.async_call(
media_player.DOMAIN,
media_player.SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: entity_id,
media_player.ATTR_MEDIA_CONTENT_TYPE: "application/vnd.apple.mpegurl",
media_player.ATTR_MEDIA_CONTENT_ID: "/api/hls/bla/master_playlist.m3u8",
},
blocking=True,
)
await hass.async_block_till_done()
quick_play_mock.assert_called()
app_data = quick_play_mock.call_args[0][2]
assert not app_data["media_id"].startswith("/")
assert "authSig" in yarl.URL(app_data["media_id"]).query
assert app_data["media_type"] == "application/vnd.apple.mpegurl"
assert app_data["stream_type"] == "LIVE"
assert app_data["media_info"] == {
"hlsVideoSegmentFormat": "fmp4",
}
quick_play_mock.reset_mock()
await hass.services.async_call(
media_player.DOMAIN,
media_player.SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: entity_id,
media_player.ATTR_MEDIA_CONTENT_TYPE: "application/vnd.apple.mpegurl",
media_player.ATTR_MEDIA_CONTENT_ID: f"{network.get_url(hass)}/api/hls/bla/master_playlist.m3u8?token=bla",
},
blocking=True,
)
await hass.async_block_till_done()
quick_play_mock.assert_called()
app_data = quick_play_mock.call_args[0][2]
assert (
app_data["media_id"]
== f"{network.get_url(hass)}/api/hls/bla/master_playlist.m3u8?token=bla"
)
| true | true |
1c2dab50d42e019542081e038e89f7d1b6d275fd | 1,912 | py | Python | tests/conftest.py | jannikluhn/tlbc-monitor | 9d54d40bfed48db5542fd6714946ea27684a918e | [
"MIT"
] | null | null | null | tests/conftest.py | jannikluhn/tlbc-monitor | 9d54d40bfed48db5542fd6714946ea27684a918e | [
"MIT"
] | 61 | 2019-04-08T20:13:47.000Z | 2020-07-16T09:18:48.000Z | tests/conftest.py | jannikluhn/tlbc-monitor | 9d54d40bfed48db5542fd6714946ea27684a918e | [
"MIT"
] | 3 | 2019-02-22T14:15:27.000Z | 2019-10-23T04:20:47.000Z | import math
import pytest
from eth_tester import EthereumTester
from eth_keys import keys
from web3 import EthereumTesterProvider, Web3
from eth_utils import int_to_big_endian, to_checksum_address
from sqlalchemy import create_engine
from monitor.db import BlockDB
from monitor.validators import PrimaryOracle, Epoch
from tests.fake_aura_backend import (
FakeAuraBackend,
FakeAuraValidator,
FakeAuraNormalizer,
key_renaming_middleware,
)
@pytest.fixture
def eth_tester(address_to_private_key):
eth_tester = EthereumTester(
backend=FakeAuraBackend(),
validator=FakeAuraValidator(),
normalizer=FakeAuraNormalizer(),
)
existing_accounts = eth_tester.get_accounts()
for address, private_key in address_to_private_key.items():
if to_checksum_address(address) not in existing_accounts:
eth_tester.add_account(private_key.to_hex())
return eth_tester
@pytest.fixture
def address_to_private_key():
private_keys = [
keys.PrivateKey(int_to_big_endian(i).rjust(32, b"\x00")) for i in range(1, 10)
]
return {
private_key.public_key.to_canonical_address(): private_key
for private_key in private_keys
}
@pytest.fixture
def w3(eth_tester):
provider = EthereumTesterProvider(eth_tester)
w3 = Web3(provider)
w3.middleware_onion.add(key_renaming_middleware)
return w3
@pytest.fixture
def engine():
return create_engine("sqlite:///:memory:")
@pytest.fixture
def empty_db(engine):
return BlockDB(engine)
@pytest.fixture
def validators():
return [b"\x00" * 20, b"\x11" * 20, b"\x22" * 20]
@pytest.fixture
def primary_oracle(validators):
primary_oracle = PrimaryOracle()
primary_oracle.add_epoch(
Epoch(start_height=0, validators=validators, validator_definition_index=0)
)
primary_oracle.max_height = math.inf
return primary_oracle
| 22.494118 | 86 | 0.736402 | import math
import pytest
from eth_tester import EthereumTester
from eth_keys import keys
from web3 import EthereumTesterProvider, Web3
from eth_utils import int_to_big_endian, to_checksum_address
from sqlalchemy import create_engine
from monitor.db import BlockDB
from monitor.validators import PrimaryOracle, Epoch
from tests.fake_aura_backend import (
FakeAuraBackend,
FakeAuraValidator,
FakeAuraNormalizer,
key_renaming_middleware,
)
@pytest.fixture
def eth_tester(address_to_private_key):
eth_tester = EthereumTester(
backend=FakeAuraBackend(),
validator=FakeAuraValidator(),
normalizer=FakeAuraNormalizer(),
)
existing_accounts = eth_tester.get_accounts()
for address, private_key in address_to_private_key.items():
if to_checksum_address(address) not in existing_accounts:
eth_tester.add_account(private_key.to_hex())
return eth_tester
@pytest.fixture
def address_to_private_key():
private_keys = [
keys.PrivateKey(int_to_big_endian(i).rjust(32, b"\x00")) for i in range(1, 10)
]
return {
private_key.public_key.to_canonical_address(): private_key
for private_key in private_keys
}
@pytest.fixture
def w3(eth_tester):
provider = EthereumTesterProvider(eth_tester)
w3 = Web3(provider)
w3.middleware_onion.add(key_renaming_middleware)
return w3
@pytest.fixture
def engine():
return create_engine("sqlite:///:memory:")
@pytest.fixture
def empty_db(engine):
return BlockDB(engine)
@pytest.fixture
def validators():
return [b"\x00" * 20, b"\x11" * 20, b"\x22" * 20]
@pytest.fixture
def primary_oracle(validators):
primary_oracle = PrimaryOracle()
primary_oracle.add_epoch(
Epoch(start_height=0, validators=validators, validator_definition_index=0)
)
primary_oracle.max_height = math.inf
return primary_oracle
| true | true |
1c2dac728e9bb035d33003ba95e387686a62fb5b | 17,495 | py | Python | neutron/tests/unit/_test_extension_portbindings.py | sajuptpm/notification_neutron | 45933f63c9eff0d2931a7209b040ff2dc69835c5 | [
"Apache-2.0"
] | 5 | 2015-10-20T07:56:53.000Z | 2017-12-31T22:39:15.000Z | neutron/tests/unit/_test_extension_portbindings.py | sajuptpm/notification_neutron | 45933f63c9eff0d2931a7209b040ff2dc69835c5 | [
"Apache-2.0"
] | null | null | null | neutron/tests/unit/_test_extension_portbindings.py | sajuptpm/notification_neutron | 45933f63c9eff0d2931a7209b040ff2dc69835c5 | [
"Apache-2.0"
] | 3 | 2015-05-08T22:36:28.000Z | 2015-10-24T21:25:35.000Z | # Copyright 2013 NEC Corporation
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import httplib
from oslo_config import cfg
from webob import exc
from neutron import context
from neutron.extensions import portbindings
from neutron import manager
from neutron.tests.unit.db import test_db_base_plugin_v2
class PortBindingsTestCase(test_db_base_plugin_v2.NeutronDbPluginV2TestCase):
# VIF_TYPE must be overridden according to plugin vif_type
VIF_TYPE = portbindings.VIF_TYPE_OTHER
# VIF_DETAILS must be overridden according to plugin vif_details
VIF_DETAILS = None
def _check_response_portbindings(self, port):
self.assertEqual(port[portbindings.VIF_TYPE], self.VIF_TYPE)
# REVISIT(rkukura): Consider reworking tests to enable ML2 to bind
if self.VIF_TYPE not in [portbindings.VIF_TYPE_UNBOUND,
portbindings.VIF_TYPE_BINDING_FAILED]:
# NOTE(r-mibu): The following six lines are just for backward
# compatibility. In this class, HAS_PORT_FILTER has been replaced
# by VIF_DETAILS which can be set expected vif_details to check,
# but all replacement of HAS_PORT_FILTER in successor has not been
# completed.
if self.VIF_DETAILS is None:
expected = getattr(self, 'HAS_PORT_FILTER', False)
vif_details = port[portbindings.VIF_DETAILS]
port_filter = vif_details[portbindings.CAP_PORT_FILTER]
self.assertEqual(expected, port_filter)
return
self.assertEqual(self.VIF_DETAILS, port[portbindings.VIF_DETAILS])
def _check_response_no_portbindings(self, port):
self.assertIn('status', port)
self.assertNotIn(portbindings.VIF_TYPE, port)
self.assertNotIn(portbindings.VIF_DETAILS, port)
def _get_non_admin_context(self):
return context.Context(user_id=None,
tenant_id=self._tenant_id,
is_admin=False,
read_deleted="no")
def test_port_vif_details(self):
with self.port(name='name') as port:
port_id = port['port']['id']
# Check a response of create_port
self._check_response_portbindings(port['port'])
# Check a response of get_port
ctx = context.get_admin_context()
port = self._show('ports', port_id, neutron_context=ctx)['port']
self._check_response_portbindings(port)
# By default user is admin - now test non admin user
ctx = self._get_non_admin_context()
non_admin_port = self._show(
'ports', port_id, neutron_context=ctx)['port']
self._check_response_no_portbindings(non_admin_port)
def test_ports_vif_details(self):
plugin = manager.NeutronManager.get_plugin()
cfg.CONF.set_default('allow_overlapping_ips', True)
with contextlib.nested(self.port(), self.port()):
ctx = context.get_admin_context()
ports = plugin.get_ports(ctx)
self.assertEqual(len(ports), 2)
for port in ports:
self._check_response_portbindings(port)
# By default user is admin - now test non admin user
ctx = self._get_non_admin_context()
ports = self._list('ports', neutron_context=ctx)['ports']
self.assertEqual(len(ports), 2)
for non_admin_port in ports:
self._check_response_no_portbindings(non_admin_port)
def _check_port_binding_profile(self, port, profile=None):
# For plugins which does not use binding:profile attr
# we just check an operation for the port succeed.
self.assertIn('id', port)
def _test_create_port_binding_profile(self, profile):
profile_arg = {portbindings.PROFILE: profile}
with self.port(arg_list=(portbindings.PROFILE,),
**profile_arg) as port:
port_id = port['port']['id']
self._check_port_binding_profile(port['port'], profile)
port = self._show('ports', port_id)
self._check_port_binding_profile(port['port'], profile)
def test_create_port_binding_profile_none(self):
self._test_create_port_binding_profile(None)
def test_create_port_binding_profile_with_empty_dict(self):
self._test_create_port_binding_profile({})
def _test_update_port_binding_profile(self, profile):
profile_arg = {portbindings.PROFILE: profile}
with self.port() as port:
self._check_port_binding_profile(port['port'])
port_id = port['port']['id']
ctx = context.get_admin_context()
port = self._update('ports', port_id, {'port': profile_arg},
neutron_context=ctx)['port']
self._check_port_binding_profile(port, profile)
port = self._show('ports', port_id)['port']
self._check_port_binding_profile(port, profile)
def test_update_port_binding_profile_none(self):
self._test_update_port_binding_profile(None)
def test_update_port_binding_profile_with_empty_dict(self):
self._test_update_port_binding_profile({})
def test_port_create_portinfo_non_admin(self):
profile_arg = {portbindings.PROFILE: {'dummy': 'dummy'}}
with self.network(set_context=True, tenant_id='test') as net1:
with self.subnet(network=net1) as subnet1:
# succeed without binding:profile
with self.port(subnet=subnet1,
set_context=True, tenant_id='test'):
pass
# fail with binding:profile
try:
with self.port(subnet=subnet1,
expected_res_status=403,
arg_list=(portbindings.PROFILE,),
set_context=True, tenant_id='test',
**profile_arg):
pass
except exc.HTTPClientError:
pass
def test_port_update_portinfo_non_admin(self):
profile_arg = {portbindings.PROFILE: {'dummy': 'dummy'}}
with self.network() as net1:
with self.subnet(network=net1) as subnet1:
with self.port(subnet=subnet1) as port:
# By default user is admin - now test non admin user
port_id = port['port']['id']
ctx = self._get_non_admin_context()
port = self._update('ports', port_id,
{'port': profile_arg},
expected_code=exc.HTTPForbidden.code,
neutron_context=ctx)
class PortBindingsHostTestCaseMixin(object):
fmt = 'json'
hostname = 'testhost'
def _check_response_portbindings_host(self, port):
self.assertEqual(port[portbindings.HOST_ID], self.hostname)
def _check_response_no_portbindings_host(self, port):
self.assertIn('status', port)
self.assertNotIn(portbindings.HOST_ID, port)
def test_port_vif_non_admin(self):
with self.network(set_context=True,
tenant_id='test') as net1:
with self.subnet(network=net1) as subnet1:
host_arg = {portbindings.HOST_ID: self.hostname}
try:
with self.port(subnet=subnet1,
expected_res_status=403,
arg_list=(portbindings.HOST_ID,),
set_context=True,
tenant_id='test',
**host_arg):
pass
except exc.HTTPClientError:
pass
def test_port_vif_host(self):
host_arg = {portbindings.HOST_ID: self.hostname}
with self.port(name='name', arg_list=(portbindings.HOST_ID,),
**host_arg) as port:
port_id = port['port']['id']
# Check a response of create_port
self._check_response_portbindings_host(port['port'])
# Check a response of get_port
ctx = context.get_admin_context()
port = self._show('ports', port_id, neutron_context=ctx)['port']
self._check_response_portbindings_host(port)
# By default user is admin - now test non admin user
ctx = context.Context(user_id=None,
tenant_id=self._tenant_id,
is_admin=False,
read_deleted="no")
non_admin_port = self._show(
'ports', port_id, neutron_context=ctx)['port']
self._check_response_no_portbindings_host(non_admin_port)
def test_ports_vif_host(self):
cfg.CONF.set_default('allow_overlapping_ips', True)
host_arg = {portbindings.HOST_ID: self.hostname}
with contextlib.nested(
self.port(name='name1',
arg_list=(portbindings.HOST_ID,),
**host_arg),
self.port(name='name2')):
ctx = context.get_admin_context()
ports = self._list('ports', neutron_context=ctx)['ports']
self.assertEqual(2, len(ports))
for port in ports:
if port['name'] == 'name1':
self._check_response_portbindings_host(port)
else:
self.assertFalse(port[portbindings.HOST_ID])
# By default user is admin - now test non admin user
ctx = context.Context(user_id=None,
tenant_id=self._tenant_id,
is_admin=False,
read_deleted="no")
ports = self._list('ports', neutron_context=ctx)['ports']
self.assertEqual(2, len(ports))
for non_admin_port in ports:
self._check_response_no_portbindings_host(non_admin_port)
def test_ports_vif_host_update(self):
cfg.CONF.set_default('allow_overlapping_ips', True)
host_arg = {portbindings.HOST_ID: self.hostname}
with contextlib.nested(
self.port(name='name1',
arg_list=(portbindings.HOST_ID,),
**host_arg),
self.port(name='name2')) as (port1, port2):
data = {'port': {portbindings.HOST_ID: 'testhosttemp'}}
req = self.new_update_request('ports', data, port1['port']['id'])
req.get_response(self.api)
req = self.new_update_request('ports', data, port2['port']['id'])
ctx = context.get_admin_context()
req.get_response(self.api)
ports = self._list('ports', neutron_context=ctx)['ports']
self.assertEqual(2, len(ports))
for port in ports:
self.assertEqual('testhosttemp', port[portbindings.HOST_ID])
def test_ports_vif_non_host_update(self):
host_arg = {portbindings.HOST_ID: self.hostname}
with self.port(name='name', arg_list=(portbindings.HOST_ID,),
**host_arg) as port:
data = {'port': {'admin_state_up': False}}
req = self.new_update_request('ports', data, port['port']['id'])
res = self.deserialize(self.fmt, req.get_response(self.api))
self.assertEqual(port['port'][portbindings.HOST_ID],
res['port'][portbindings.HOST_ID])
def test_ports_vif_non_host_update_when_host_null(self):
with self.port() as port:
data = {'port': {'admin_state_up': False}}
req = self.new_update_request('ports', data, port['port']['id'])
res = self.deserialize(self.fmt, req.get_response(self.api))
self.assertEqual(port['port'][portbindings.HOST_ID],
res['port'][portbindings.HOST_ID])
def test_ports_vif_host_list(self):
cfg.CONF.set_default('allow_overlapping_ips', True)
host_arg = {portbindings.HOST_ID: self.hostname}
with contextlib.nested(
self.port(name='name1',
arg_list=(portbindings.HOST_ID,),
**host_arg),
self.port(name='name2'),
self.port(name='name3',
arg_list=(portbindings.HOST_ID,),
**host_arg),) as (port1, _port2, port3):
self._test_list_resources(
'port', (port1, port3),
query_params='%s=%s' % (portbindings.HOST_ID, self.hostname))
class PortBindingsVnicTestCaseMixin(object):
fmt = 'json'
vnic_type = portbindings.VNIC_NORMAL
def _check_response_portbindings_vnic_type(self, port):
self.assertIn('status', port)
self.assertEqual(port[portbindings.VNIC_TYPE], self.vnic_type)
def test_port_vnic_type_non_admin(self):
with self.network(set_context=True,
tenant_id='test') as net1:
with self.subnet(network=net1) as subnet1:
vnic_arg = {portbindings.VNIC_TYPE: self.vnic_type}
with self.port(subnet=subnet1,
expected_res_status=httplib.CREATED,
arg_list=(portbindings.VNIC_TYPE,),
set_context=True,
tenant_id='test',
**vnic_arg) as port:
# Check a response of create_port
self._check_response_portbindings_vnic_type(port['port'])
def test_port_vnic_type(self):
vnic_arg = {portbindings.VNIC_TYPE: self.vnic_type}
with self.port(name='name', arg_list=(portbindings.VNIC_TYPE,),
**vnic_arg) as port:
port_id = port['port']['id']
# Check a response of create_port
self._check_response_portbindings_vnic_type(port['port'])
# Check a response of get_port
ctx = context.get_admin_context()
port = self._show('ports', port_id, neutron_context=ctx)['port']
self._check_response_portbindings_vnic_type(port)
# By default user is admin - now test non admin user
ctx = context.Context(user_id=None,
tenant_id=self._tenant_id,
is_admin=False,
read_deleted="no")
non_admin_port = self._show(
'ports', port_id, neutron_context=ctx)['port']
self._check_response_portbindings_vnic_type(non_admin_port)
def test_ports_vnic_type(self):
cfg.CONF.set_default('allow_overlapping_ips', True)
vnic_arg = {portbindings.VNIC_TYPE: self.vnic_type}
with contextlib.nested(
self.port(name='name1',
arg_list=(portbindings.VNIC_TYPE,),
**vnic_arg),
self.port(name='name2')):
ctx = context.get_admin_context()
ports = self._list('ports', neutron_context=ctx)['ports']
self.assertEqual(2, len(ports))
for port in ports:
if port['name'] == 'name1':
self._check_response_portbindings_vnic_type(port)
else:
self.assertEqual(portbindings.VNIC_NORMAL,
port[portbindings.VNIC_TYPE])
# By default user is admin - now test non admin user
ctx = context.Context(user_id=None,
tenant_id=self._tenant_id,
is_admin=False,
read_deleted="no")
ports = self._list('ports', neutron_context=ctx)['ports']
self.assertEqual(2, len(ports))
for non_admin_port in ports:
self._check_response_portbindings_vnic_type(non_admin_port)
def test_ports_vnic_type_list(self):
cfg.CONF.set_default('allow_overlapping_ips', True)
vnic_arg = {portbindings.VNIC_TYPE: self.vnic_type}
with contextlib.nested(
self.port(name='name1',
arg_list=(portbindings.VNIC_TYPE,),
**vnic_arg),
self.port(name='name2'),
self.port(name='name3',
arg_list=(portbindings.VNIC_TYPE,),
**vnic_arg),) as (port1, port2, port3):
self._test_list_resources(
'port', (port1, port2, port3),
query_params='%s=%s' % (portbindings.VNIC_TYPE,
self.vnic_type))
| 46.405836 | 78 | 0.582338 |
import contextlib
import httplib
from oslo_config import cfg
from webob import exc
from neutron import context
from neutron.extensions import portbindings
from neutron import manager
from neutron.tests.unit.db import test_db_base_plugin_v2
class PortBindingsTestCase(test_db_base_plugin_v2.NeutronDbPluginV2TestCase):
VIF_TYPE = portbindings.VIF_TYPE_OTHER
VIF_DETAILS = None
def _check_response_portbindings(self, port):
self.assertEqual(port[portbindings.VIF_TYPE], self.VIF_TYPE)
if self.VIF_TYPE not in [portbindings.VIF_TYPE_UNBOUND,
portbindings.VIF_TYPE_BINDING_FAILED]:
if self.VIF_DETAILS is None:
expected = getattr(self, 'HAS_PORT_FILTER', False)
vif_details = port[portbindings.VIF_DETAILS]
port_filter = vif_details[portbindings.CAP_PORT_FILTER]
self.assertEqual(expected, port_filter)
return
self.assertEqual(self.VIF_DETAILS, port[portbindings.VIF_DETAILS])
def _check_response_no_portbindings(self, port):
self.assertIn('status', port)
self.assertNotIn(portbindings.VIF_TYPE, port)
self.assertNotIn(portbindings.VIF_DETAILS, port)
def _get_non_admin_context(self):
return context.Context(user_id=None,
tenant_id=self._tenant_id,
is_admin=False,
read_deleted="no")
def test_port_vif_details(self):
with self.port(name='name') as port:
port_id = port['port']['id']
self._check_response_portbindings(port['port'])
ctx = context.get_admin_context()
port = self._show('ports', port_id, neutron_context=ctx)['port']
self._check_response_portbindings(port)
ctx = self._get_non_admin_context()
non_admin_port = self._show(
'ports', port_id, neutron_context=ctx)['port']
self._check_response_no_portbindings(non_admin_port)
def test_ports_vif_details(self):
plugin = manager.NeutronManager.get_plugin()
cfg.CONF.set_default('allow_overlapping_ips', True)
with contextlib.nested(self.port(), self.port()):
ctx = context.get_admin_context()
ports = plugin.get_ports(ctx)
self.assertEqual(len(ports), 2)
for port in ports:
self._check_response_portbindings(port)
ctx = self._get_non_admin_context()
ports = self._list('ports', neutron_context=ctx)['ports']
self.assertEqual(len(ports), 2)
for non_admin_port in ports:
self._check_response_no_portbindings(non_admin_port)
def _check_port_binding_profile(self, port, profile=None):
self.assertIn('id', port)
def _test_create_port_binding_profile(self, profile):
profile_arg = {portbindings.PROFILE: profile}
with self.port(arg_list=(portbindings.PROFILE,),
**profile_arg) as port:
port_id = port['port']['id']
self._check_port_binding_profile(port['port'], profile)
port = self._show('ports', port_id)
self._check_port_binding_profile(port['port'], profile)
def test_create_port_binding_profile_none(self):
self._test_create_port_binding_profile(None)
def test_create_port_binding_profile_with_empty_dict(self):
self._test_create_port_binding_profile({})
def _test_update_port_binding_profile(self, profile):
profile_arg = {portbindings.PROFILE: profile}
with self.port() as port:
self._check_port_binding_profile(port['port'])
port_id = port['port']['id']
ctx = context.get_admin_context()
port = self._update('ports', port_id, {'port': profile_arg},
neutron_context=ctx)['port']
self._check_port_binding_profile(port, profile)
port = self._show('ports', port_id)['port']
self._check_port_binding_profile(port, profile)
def test_update_port_binding_profile_none(self):
self._test_update_port_binding_profile(None)
def test_update_port_binding_profile_with_empty_dict(self):
self._test_update_port_binding_profile({})
def test_port_create_portinfo_non_admin(self):
profile_arg = {portbindings.PROFILE: {'dummy': 'dummy'}}
with self.network(set_context=True, tenant_id='test') as net1:
with self.subnet(network=net1) as subnet1:
with self.port(subnet=subnet1,
set_context=True, tenant_id='test'):
pass
try:
with self.port(subnet=subnet1,
expected_res_status=403,
arg_list=(portbindings.PROFILE,),
set_context=True, tenant_id='test',
**profile_arg):
pass
except exc.HTTPClientError:
pass
def test_port_update_portinfo_non_admin(self):
profile_arg = {portbindings.PROFILE: {'dummy': 'dummy'}}
with self.network() as net1:
with self.subnet(network=net1) as subnet1:
with self.port(subnet=subnet1) as port:
port_id = port['port']['id']
ctx = self._get_non_admin_context()
port = self._update('ports', port_id,
{'port': profile_arg},
expected_code=exc.HTTPForbidden.code,
neutron_context=ctx)
class PortBindingsHostTestCaseMixin(object):
fmt = 'json'
hostname = 'testhost'
def _check_response_portbindings_host(self, port):
self.assertEqual(port[portbindings.HOST_ID], self.hostname)
def _check_response_no_portbindings_host(self, port):
self.assertIn('status', port)
self.assertNotIn(portbindings.HOST_ID, port)
def test_port_vif_non_admin(self):
with self.network(set_context=True,
tenant_id='test') as net1:
with self.subnet(network=net1) as subnet1:
host_arg = {portbindings.HOST_ID: self.hostname}
try:
with self.port(subnet=subnet1,
expected_res_status=403,
arg_list=(portbindings.HOST_ID,),
set_context=True,
tenant_id='test',
**host_arg):
pass
except exc.HTTPClientError:
pass
def test_port_vif_host(self):
host_arg = {portbindings.HOST_ID: self.hostname}
with self.port(name='name', arg_list=(portbindings.HOST_ID,),
**host_arg) as port:
port_id = port['port']['id']
self._check_response_portbindings_host(port['port'])
ctx = context.get_admin_context()
port = self._show('ports', port_id, neutron_context=ctx)['port']
self._check_response_portbindings_host(port)
ctx = context.Context(user_id=None,
tenant_id=self._tenant_id,
is_admin=False,
read_deleted="no")
non_admin_port = self._show(
'ports', port_id, neutron_context=ctx)['port']
self._check_response_no_portbindings_host(non_admin_port)
def test_ports_vif_host(self):
cfg.CONF.set_default('allow_overlapping_ips', True)
host_arg = {portbindings.HOST_ID: self.hostname}
with contextlib.nested(
self.port(name='name1',
arg_list=(portbindings.HOST_ID,),
**host_arg),
self.port(name='name2')):
ctx = context.get_admin_context()
ports = self._list('ports', neutron_context=ctx)['ports']
self.assertEqual(2, len(ports))
for port in ports:
if port['name'] == 'name1':
self._check_response_portbindings_host(port)
else:
self.assertFalse(port[portbindings.HOST_ID])
ctx = context.Context(user_id=None,
tenant_id=self._tenant_id,
is_admin=False,
read_deleted="no")
ports = self._list('ports', neutron_context=ctx)['ports']
self.assertEqual(2, len(ports))
for non_admin_port in ports:
self._check_response_no_portbindings_host(non_admin_port)
def test_ports_vif_host_update(self):
cfg.CONF.set_default('allow_overlapping_ips', True)
host_arg = {portbindings.HOST_ID: self.hostname}
with contextlib.nested(
self.port(name='name1',
arg_list=(portbindings.HOST_ID,),
**host_arg),
self.port(name='name2')) as (port1, port2):
data = {'port': {portbindings.HOST_ID: 'testhosttemp'}}
req = self.new_update_request('ports', data, port1['port']['id'])
req.get_response(self.api)
req = self.new_update_request('ports', data, port2['port']['id'])
ctx = context.get_admin_context()
req.get_response(self.api)
ports = self._list('ports', neutron_context=ctx)['ports']
self.assertEqual(2, len(ports))
for port in ports:
self.assertEqual('testhosttemp', port[portbindings.HOST_ID])
def test_ports_vif_non_host_update(self):
host_arg = {portbindings.HOST_ID: self.hostname}
with self.port(name='name', arg_list=(portbindings.HOST_ID,),
**host_arg) as port:
data = {'port': {'admin_state_up': False}}
req = self.new_update_request('ports', data, port['port']['id'])
res = self.deserialize(self.fmt, req.get_response(self.api))
self.assertEqual(port['port'][portbindings.HOST_ID],
res['port'][portbindings.HOST_ID])
def test_ports_vif_non_host_update_when_host_null(self):
with self.port() as port:
data = {'port': {'admin_state_up': False}}
req = self.new_update_request('ports', data, port['port']['id'])
res = self.deserialize(self.fmt, req.get_response(self.api))
self.assertEqual(port['port'][portbindings.HOST_ID],
res['port'][portbindings.HOST_ID])
def test_ports_vif_host_list(self):
cfg.CONF.set_default('allow_overlapping_ips', True)
host_arg = {portbindings.HOST_ID: self.hostname}
with contextlib.nested(
self.port(name='name1',
arg_list=(portbindings.HOST_ID,),
**host_arg),
self.port(name='name2'),
self.port(name='name3',
arg_list=(portbindings.HOST_ID,),
**host_arg),) as (port1, _port2, port3):
self._test_list_resources(
'port', (port1, port3),
query_params='%s=%s' % (portbindings.HOST_ID, self.hostname))
class PortBindingsVnicTestCaseMixin(object):
fmt = 'json'
vnic_type = portbindings.VNIC_NORMAL
def _check_response_portbindings_vnic_type(self, port):
self.assertIn('status', port)
self.assertEqual(port[portbindings.VNIC_TYPE], self.vnic_type)
def test_port_vnic_type_non_admin(self):
with self.network(set_context=True,
tenant_id='test') as net1:
with self.subnet(network=net1) as subnet1:
vnic_arg = {portbindings.VNIC_TYPE: self.vnic_type}
with self.port(subnet=subnet1,
expected_res_status=httplib.CREATED,
arg_list=(portbindings.VNIC_TYPE,),
set_context=True,
tenant_id='test',
**vnic_arg) as port:
self._check_response_portbindings_vnic_type(port['port'])
def test_port_vnic_type(self):
vnic_arg = {portbindings.VNIC_TYPE: self.vnic_type}
with self.port(name='name', arg_list=(portbindings.VNIC_TYPE,),
**vnic_arg) as port:
port_id = port['port']['id']
self._check_response_portbindings_vnic_type(port['port'])
ctx = context.get_admin_context()
port = self._show('ports', port_id, neutron_context=ctx)['port']
self._check_response_portbindings_vnic_type(port)
ctx = context.Context(user_id=None,
tenant_id=self._tenant_id,
is_admin=False,
read_deleted="no")
non_admin_port = self._show(
'ports', port_id, neutron_context=ctx)['port']
self._check_response_portbindings_vnic_type(non_admin_port)
def test_ports_vnic_type(self):
cfg.CONF.set_default('allow_overlapping_ips', True)
vnic_arg = {portbindings.VNIC_TYPE: self.vnic_type}
with contextlib.nested(
self.port(name='name1',
arg_list=(portbindings.VNIC_TYPE,),
**vnic_arg),
self.port(name='name2')):
ctx = context.get_admin_context()
ports = self._list('ports', neutron_context=ctx)['ports']
self.assertEqual(2, len(ports))
for port in ports:
if port['name'] == 'name1':
self._check_response_portbindings_vnic_type(port)
else:
self.assertEqual(portbindings.VNIC_NORMAL,
port[portbindings.VNIC_TYPE])
ctx = context.Context(user_id=None,
tenant_id=self._tenant_id,
is_admin=False,
read_deleted="no")
ports = self._list('ports', neutron_context=ctx)['ports']
self.assertEqual(2, len(ports))
for non_admin_port in ports:
self._check_response_portbindings_vnic_type(non_admin_port)
def test_ports_vnic_type_list(self):
cfg.CONF.set_default('allow_overlapping_ips', True)
vnic_arg = {portbindings.VNIC_TYPE: self.vnic_type}
with contextlib.nested(
self.port(name='name1',
arg_list=(portbindings.VNIC_TYPE,),
**vnic_arg),
self.port(name='name2'),
self.port(name='name3',
arg_list=(portbindings.VNIC_TYPE,),
**vnic_arg),) as (port1, port2, port3):
self._test_list_resources(
'port', (port1, port2, port3),
query_params='%s=%s' % (portbindings.VNIC_TYPE,
self.vnic_type))
| true | true |
1c2dade172981cc31aa1caf156e345e1669c48d6 | 1,533 | py | Python | simulation/utils/machine_learning/data/rosbag_to_video.py | KITcar-Team/kitcar-gazebo-simulation | 8a9438b5a24c288721ae0302889fe55e26046310 | [
"MIT"
] | 13 | 2020-06-30T17:18:28.000Z | 2021-07-20T16:55:35.000Z | simulation/utils/machine_learning/data/rosbag_to_video.py | KITcar-Team/kitcar-gazebo-simulation | 8a9438b5a24c288721ae0302889fe55e26046310 | [
"MIT"
] | 1 | 2020-11-10T20:15:42.000Z | 2020-12-25T18:27:56.000Z | simulation/utils/machine_learning/data/rosbag_to_video.py | KITcar-Team/kitcar-gazebo-simulation | 8a9438b5a24c288721ae0302889fe55e26046310 | [
"MIT"
] | 3 | 2020-07-20T09:09:08.000Z | 2021-07-20T17:00:37.000Z | import argparse
import os
import shutil
from simulation.utils.machine_learning.data.images_to_video import images_to_video
from simulation.utils.machine_learning.data.rosbag_to_images import rosbag_to_images
def rosbag_to_video(rosbag_dir: str, output_dir: str, image_topic: str):
os.makedirs(output_dir, exist_ok=True)
for root, dirs, files in os.walk(rosbag_dir):
for name in files:
if not name.lower().endswith(".bag"):
continue
input_file_path = os.path.join(root, name)
rosbag_to_images(input_file_path, os.path.join(rosbag_dir, "tmp"), image_topic)
output_file_path = os.path.join(output_dir, name.replace(".bag", ".mp4"))
images_to_video(
os.path.abspath(os.path.join(rosbag_dir, "tmp/*.png")),
output_file_path,
use_glob=True,
)
shutil.rmtree(os.path.join(rosbag_dir, "tmp"))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Rosbags to Videos")
parser.add_argument(
"--rosbag_dir",
type=str,
required=True,
help="directory of all rosbags",
)
parser.add_argument(
"--output_dir",
type=str,
required=True,
help="the output directory for all videos",
)
parser.add_argument("--image_topic", default="/camera/image_raw", help="Image topic.")
args = parser.parse_args()
rosbag_to_video(args.rosbag_dir, args.output_dir, args.image_topic)
| 32.617021 | 91 | 0.64775 | import argparse
import os
import shutil
from simulation.utils.machine_learning.data.images_to_video import images_to_video
from simulation.utils.machine_learning.data.rosbag_to_images import rosbag_to_images
def rosbag_to_video(rosbag_dir: str, output_dir: str, image_topic: str):
os.makedirs(output_dir, exist_ok=True)
for root, dirs, files in os.walk(rosbag_dir):
for name in files:
if not name.lower().endswith(".bag"):
continue
input_file_path = os.path.join(root, name)
rosbag_to_images(input_file_path, os.path.join(rosbag_dir, "tmp"), image_topic)
output_file_path = os.path.join(output_dir, name.replace(".bag", ".mp4"))
images_to_video(
os.path.abspath(os.path.join(rosbag_dir, "tmp/*.png")),
output_file_path,
use_glob=True,
)
shutil.rmtree(os.path.join(rosbag_dir, "tmp"))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Rosbags to Videos")
parser.add_argument(
"--rosbag_dir",
type=str,
required=True,
help="directory of all rosbags",
)
parser.add_argument(
"--output_dir",
type=str,
required=True,
help="the output directory for all videos",
)
parser.add_argument("--image_topic", default="/camera/image_raw", help="Image topic.")
args = parser.parse_args()
rosbag_to_video(args.rosbag_dir, args.output_dir, args.image_topic)
| true | true |
1c2dae3786cbef4c0e87da202d359b38ed6a331b | 242 | py | Python | application/templatetags/makeurl.py | amarlearning/Footstep | 557beda097834a031fa2f114bad5de261c7daf95 | [
"MIT"
] | null | null | null | application/templatetags/makeurl.py | amarlearning/Footstep | 557beda097834a031fa2f114bad5de261c7daf95 | [
"MIT"
] | 2 | 2017-05-12T14:38:01.000Z | 2017-05-18T13:25:35.000Z | application/templatetags/makeurl.py | amarlearning/Footstep | 557beda097834a031fa2f114bad5de261c7daf95 | [
"MIT"
] | null | null | null | from django import template
register = template.Library()
@register.filter
def makeurl(value, args):
string = value.replace("api.","")
string = string.replace("repos/", "")
string = string + '/tree/' +str(args)
return string | 24.2 | 41 | 0.661157 | from django import template
register = template.Library()
@register.filter
def makeurl(value, args):
string = value.replace("api.","")
string = string.replace("repos/", "")
string = string + '/tree/' +str(args)
return string | true | true |
1c2daed7c883679b17d14291ec3c2b8b92f0c669 | 20,263 | py | Python | sympy/functions/special/zeta_functions.py | ianmasc/sympy | f089bdc70cfa1e2aa6ecfdb6d568f37bd937bd5e | [
"BSD-3-Clause"
] | 603 | 2020-12-23T13:49:32.000Z | 2022-03-31T23:38:03.000Z | sympy/functions/special/zeta_functions.py | ianmasc/sympy | f089bdc70cfa1e2aa6ecfdb6d568f37bd937bd5e | [
"BSD-3-Clause"
] | 387 | 2020-12-15T14:54:04.000Z | 2022-03-31T07:00:21.000Z | sympy/functions/special/zeta_functions.py | ianmasc/sympy | f089bdc70cfa1e2aa6ecfdb6d568f37bd937bd5e | [
"BSD-3-Clause"
] | 35 | 2021-03-26T03:12:04.000Z | 2022-03-23T10:15:10.000Z | """ Riemann zeta and related function. """
from sympy.core import Function, S, sympify, pi, I
from sympy.core.function import ArgumentIndexError
from sympy.functions.combinatorial.numbers import bernoulli, factorial, harmonic
from sympy.functions.elementary.exponential import log, exp_polar
from sympy.functions.elementary.miscellaneous import sqrt
###############################################################################
###################### LERCH TRANSCENDENT #####################################
###############################################################################
class lerchphi(Function):
r"""
Lerch transcendent (Lerch phi function).
Explanation
===========
For $\operatorname{Re}(a) > 0$, $|z| < 1$ and $s \in \mathbb{C}$, the
Lerch transcendent is defined as
.. math :: \Phi(z, s, a) = \sum_{n=0}^\infty \frac{z^n}{(n + a)^s},
where the standard branch of the argument is used for $n + a$,
and by analytic continuation for other values of the parameters.
A commonly used related function is the Lerch zeta function, defined by
.. math:: L(q, s, a) = \Phi(e^{2\pi i q}, s, a).
**Analytic Continuation and Branching Behavior**
It can be shown that
.. math:: \Phi(z, s, a) = z\Phi(z, s, a+1) + a^{-s}.
This provides the analytic continuation to $\operatorname{Re}(a) \le 0$.
Assume now $\operatorname{Re}(a) > 0$. The integral representation
.. math:: \Phi_0(z, s, a) = \int_0^\infty \frac{t^{s-1} e^{-at}}{1 - ze^{-t}}
\frac{\mathrm{d}t}{\Gamma(s)}
provides an analytic continuation to $\mathbb{C} - [1, \infty)$.
Finally, for $x \in (1, \infty)$ we find
.. math:: \lim_{\epsilon \to 0^+} \Phi_0(x + i\epsilon, s, a)
-\lim_{\epsilon \to 0^+} \Phi_0(x - i\epsilon, s, a)
= \frac{2\pi i \log^{s-1}{x}}{x^a \Gamma(s)},
using the standard branch for both $\log{x}$ and
$\log{\log{x}}$ (a branch of $\log{\log{x}}$ is needed to
evaluate $\log{x}^{s-1}$).
This concludes the analytic continuation. The Lerch transcendent is thus
branched at $z \in \{0, 1, \infty\}$ and
$a \in \mathbb{Z}_{\le 0}$. For fixed $z, a$ outside these
branch points, it is an entire function of $s$.
Examples
========
The Lerch transcendent is a fairly general function, for this reason it does
not automatically evaluate to simpler functions. Use ``expand_func()`` to
achieve this.
If $z=1$, the Lerch transcendent reduces to the Hurwitz zeta function:
>>> from sympy import lerchphi, expand_func
>>> from sympy.abc import z, s, a
>>> expand_func(lerchphi(1, s, a))
zeta(s, a)
More generally, if $z$ is a root of unity, the Lerch transcendent
reduces to a sum of Hurwitz zeta functions:
>>> expand_func(lerchphi(-1, s, a))
2**(-s)*zeta(s, a/2) - 2**(-s)*zeta(s, a/2 + 1/2)
If $a=1$, the Lerch transcendent reduces to the polylogarithm:
>>> expand_func(lerchphi(z, s, 1))
polylog(s, z)/z
More generally, if $a$ is rational, the Lerch transcendent reduces
to a sum of polylogarithms:
>>> from sympy import S
>>> expand_func(lerchphi(z, s, S(1)/2))
2**(s - 1)*(polylog(s, sqrt(z))/sqrt(z) -
polylog(s, sqrt(z)*exp_polar(I*pi))/sqrt(z))
>>> expand_func(lerchphi(z, s, S(3)/2))
-2**s/z + 2**(s - 1)*(polylog(s, sqrt(z))/sqrt(z) -
polylog(s, sqrt(z)*exp_polar(I*pi))/sqrt(z))/z
The derivatives with respect to $z$ and $a$ can be computed in
closed form:
>>> lerchphi(z, s, a).diff(z)
(-a*lerchphi(z, s, a) + lerchphi(z, s - 1, a))/z
>>> lerchphi(z, s, a).diff(a)
-s*lerchphi(z, s + 1, a)
See Also
========
polylog, zeta
References
==========
.. [1] Bateman, H.; Erdelyi, A. (1953), Higher Transcendental Functions,
Vol. I, New York: McGraw-Hill. Section 1.11.
.. [2] http://dlmf.nist.gov/25.14
.. [3] https://en.wikipedia.org/wiki/Lerch_transcendent
"""
def _eval_expand_func(self, **hints):
from sympy import exp, I, floor, Add, Poly, Dummy, exp_polar, unpolarify
z, s, a = self.args
if z == 1:
return zeta(s, a)
if s.is_Integer and s <= 0:
t = Dummy('t')
p = Poly((t + a)**(-s), t)
start = 1/(1 - t)
res = S.Zero
for c in reversed(p.all_coeffs()):
res += c*start
start = t*start.diff(t)
return res.subs(t, z)
if a.is_Rational:
# See section 18 of
# Kelly B. Roach. Hypergeometric Function Representations.
# In: Proceedings of the 1997 International Symposium on Symbolic and
# Algebraic Computation, pages 205-211, New York, 1997. ACM.
# TODO should something be polarified here?
add = S.Zero
mul = S.One
# First reduce a to the interaval (0, 1]
if a > 1:
n = floor(a)
if n == a:
n -= 1
a -= n
mul = z**(-n)
add = Add(*[-z**(k - n)/(a + k)**s for k in range(n)])
elif a <= 0:
n = floor(-a) + 1
a += n
mul = z**n
add = Add(*[z**(n - 1 - k)/(a - k - 1)**s for k in range(n)])
m, n = S([a.p, a.q])
zet = exp_polar(2*pi*I/n)
root = z**(1/n)
return add + mul*n**(s - 1)*Add(
*[polylog(s, zet**k*root)._eval_expand_func(**hints)
/ (unpolarify(zet)**k*root)**m for k in range(n)])
# TODO use minpoly instead of ad-hoc methods when issue 5888 is fixed
if isinstance(z, exp) and (z.args[0]/(pi*I)).is_Rational or z in [-1, I, -I]:
# TODO reference?
if z == -1:
p, q = S([1, 2])
elif z == I:
p, q = S([1, 4])
elif z == -I:
p, q = S([-1, 4])
else:
arg = z.args[0]/(2*pi*I)
p, q = S([arg.p, arg.q])
return Add(*[exp(2*pi*I*k*p/q)/q**s*zeta(s, (k + a)/q)
for k in range(q)])
return lerchphi(z, s, a)
def fdiff(self, argindex=1):
z, s, a = self.args
if argindex == 3:
return -s*lerchphi(z, s + 1, a)
elif argindex == 1:
return (lerchphi(z, s - 1, a) - a*lerchphi(z, s, a))/z
else:
raise ArgumentIndexError
def _eval_rewrite_helper(self, z, s, a, target):
res = self._eval_expand_func()
if res.has(target):
return res
else:
return self
def _eval_rewrite_as_zeta(self, z, s, a, **kwargs):
return self._eval_rewrite_helper(z, s, a, zeta)
def _eval_rewrite_as_polylog(self, z, s, a, **kwargs):
return self._eval_rewrite_helper(z, s, a, polylog)
###############################################################################
###################### POLYLOGARITHM ##########################################
###############################################################################
class polylog(Function):
r"""
Polylogarithm function.
Explanation
===========
For $|z| < 1$ and $s \in \mathbb{C}$, the polylogarithm is
defined by
.. math:: \operatorname{Li}_s(z) = \sum_{n=1}^\infty \frac{z^n}{n^s},
where the standard branch of the argument is used for $n$. It admits
an analytic continuation which is branched at $z=1$ (notably not on the
sheet of initial definition), $z=0$ and $z=\infty$.
The name polylogarithm comes from the fact that for $s=1$, the
polylogarithm is related to the ordinary logarithm (see examples), and that
.. math:: \operatorname{Li}_{s+1}(z) =
\int_0^z \frac{\operatorname{Li}_s(t)}{t} \mathrm{d}t.
The polylogarithm is a special case of the Lerch transcendent:
.. math:: \operatorname{Li}_{s}(z) = z \Phi(z, s, 1).
Examples
========
For $z \in \{0, 1, -1\}$, the polylogarithm is automatically expressed
using other functions:
>>> from sympy import polylog
>>> from sympy.abc import s
>>> polylog(s, 0)
0
>>> polylog(s, 1)
zeta(s)
>>> polylog(s, -1)
-dirichlet_eta(s)
If $s$ is a negative integer, $0$ or $1$, the polylogarithm can be
expressed using elementary functions. This can be done using
``expand_func()``:
>>> from sympy import expand_func
>>> from sympy.abc import z
>>> expand_func(polylog(1, z))
-log(1 - z)
>>> expand_func(polylog(0, z))
z/(1 - z)
The derivative with respect to $z$ can be computed in closed form:
>>> polylog(s, z).diff(z)
polylog(s - 1, z)/z
The polylogarithm can be expressed in terms of the lerch transcendent:
>>> from sympy import lerchphi
>>> polylog(s, z).rewrite(lerchphi)
z*lerchphi(z, s, 1)
See Also
========
zeta, lerchphi
"""
@classmethod
def eval(cls, s, z):
s, z = sympify((s, z))
if z is S.One:
return zeta(s)
elif z is S.NegativeOne:
return -dirichlet_eta(s)
elif z is S.Zero:
return S.Zero
elif s == 2:
if z == S.Half:
return pi**2/12 - log(2)**2/2
elif z == 2:
return pi**2/4 - I*pi*log(2)
elif z == -(sqrt(5) - 1)/2:
return -pi**2/15 + log((sqrt(5)-1)/2)**2/2
elif z == -(sqrt(5) + 1)/2:
return -pi**2/10 - log((sqrt(5)+1)/2)**2
elif z == (3 - sqrt(5))/2:
return pi**2/15 - log((sqrt(5)-1)/2)**2
elif z == (sqrt(5) - 1)/2:
return pi**2/10 - log((sqrt(5)-1)/2)**2
if z.is_zero:
return S.Zero
# Make an effort to determine if z is 1 to avoid replacing into
# expression with singularity
zone = z.equals(S.One)
if zone:
return zeta(s)
elif zone is False:
# For s = 0 or -1 use explicit formulas to evaluate, but
# automatically expanding polylog(1, z) to -log(1-z) seems
# undesirable for summation methods based on hypergeometric
# functions
if s is S.Zero:
return z/(1 - z)
elif s is S.NegativeOne:
return z/(1 - z)**2
if s.is_zero:
return z/(1 - z)
# polylog is branched, but not over the unit disk
from sympy.functions.elementary.complexes import (Abs, unpolarify,
polar_lift)
if z.has(exp_polar, polar_lift) and (zone or (Abs(z) <= S.One) == True):
return cls(s, unpolarify(z))
def fdiff(self, argindex=1):
s, z = self.args
if argindex == 2:
return polylog(s - 1, z)/z
raise ArgumentIndexError
def _eval_rewrite_as_lerchphi(self, s, z, **kwargs):
return z*lerchphi(z, s, 1)
def _eval_expand_func(self, **hints):
from sympy import log, expand_mul, Dummy
s, z = self.args
if s == 1:
return -log(1 - z)
if s.is_Integer and s <= 0:
u = Dummy('u')
start = u/(1 - u)
for _ in range(-s):
start = u*start.diff(u)
return expand_mul(start).subs(u, z)
return polylog(s, z)
def _eval_is_zero(self):
z = self.args[1]
if z.is_zero:
return True
###############################################################################
###################### HURWITZ GENERALIZED ZETA FUNCTION ######################
###############################################################################
class zeta(Function):
r"""
Hurwitz zeta function (or Riemann zeta function).
Explanation
===========
For $\operatorname{Re}(a) > 0$ and $\operatorname{Re}(s) > 1$, this
function is defined as
.. math:: \zeta(s, a) = \sum_{n=0}^\infty \frac{1}{(n + a)^s},
where the standard choice of argument for $n + a$ is used. For fixed
$a$ with $\operatorname{Re}(a) > 0$ the Hurwitz zeta function admits a
meromorphic continuation to all of $\mathbb{C}$, it is an unbranched
function with a simple pole at $s = 1$.
Analytic continuation to other $a$ is possible under some circumstances,
but this is not typically done.
The Hurwitz zeta function is a special case of the Lerch transcendent:
.. math:: \zeta(s, a) = \Phi(1, s, a).
This formula defines an analytic continuation for all possible values of
$s$ and $a$ (also $\operatorname{Re}(a) < 0$), see the documentation of
:class:`lerchphi` for a description of the branching behavior.
If no value is passed for $a$, by this function assumes a default value
of $a = 1$, yielding the Riemann zeta function.
Examples
========
For $a = 1$ the Hurwitz zeta function reduces to the famous Riemann
zeta function:
.. math:: \zeta(s, 1) = \zeta(s) = \sum_{n=1}^\infty \frac{1}{n^s}.
>>> from sympy import zeta
>>> from sympy.abc import s
>>> zeta(s, 1)
zeta(s)
>>> zeta(s)
zeta(s)
The Riemann zeta function can also be expressed using the Dirichlet eta
function:
>>> from sympy import dirichlet_eta
>>> zeta(s).rewrite(dirichlet_eta)
dirichlet_eta(s)/(1 - 2**(1 - s))
The Riemann zeta function at positive even integer and negative odd integer
values is related to the Bernoulli numbers:
>>> zeta(2)
pi**2/6
>>> zeta(4)
pi**4/90
>>> zeta(-1)
-1/12
The specific formulae are:
.. math:: \zeta(2n) = (-1)^{n+1} \frac{B_{2n} (2\pi)^{2n}}{2(2n)!}
.. math:: \zeta(-n) = -\frac{B_{n+1}}{n+1}
At negative even integers the Riemann zeta function is zero:
>>> zeta(-4)
0
No closed-form expressions are known at positive odd integers, but
numerical evaluation is possible:
>>> zeta(3).n()
1.20205690315959
The derivative of $\zeta(s, a)$ with respect to $a$ can be computed:
>>> from sympy.abc import a
>>> zeta(s, a).diff(a)
-s*zeta(s + 1, a)
However the derivative with respect to $s$ has no useful closed form
expression:
>>> zeta(s, a).diff(s)
Derivative(zeta(s, a), s)
The Hurwitz zeta function can be expressed in terms of the Lerch
transcendent, :class:`~.lerchphi`:
>>> from sympy import lerchphi
>>> zeta(s, a).rewrite(lerchphi)
lerchphi(1, s, a)
See Also
========
dirichlet_eta, lerchphi, polylog
References
==========
.. [1] http://dlmf.nist.gov/25.11
.. [2] https://en.wikipedia.org/wiki/Hurwitz_zeta_function
"""
@classmethod
def eval(cls, z, a_=None):
if a_ is None:
z, a = list(map(sympify, (z, 1)))
else:
z, a = list(map(sympify, (z, a_)))
if a.is_Number:
if a is S.NaN:
return S.NaN
elif a is S.One and a_ is not None:
return cls(z)
# TODO Should a == 0 return S.NaN as well?
if z.is_Number:
if z is S.NaN:
return S.NaN
elif z is S.Infinity:
return S.One
elif z.is_zero:
return S.Half - a
elif z is S.One:
return S.ComplexInfinity
if z.is_integer:
if a.is_Integer:
if z.is_negative:
zeta = (-1)**z * bernoulli(-z + 1)/(-z + 1)
elif z.is_even and z.is_positive:
B, F = bernoulli(z), factorial(z)
zeta = ((-1)**(z/2+1) * 2**(z - 1) * B * pi**z) / F
else:
return
if a.is_negative:
return zeta + harmonic(abs(a), z)
else:
return zeta - harmonic(a - 1, z)
if z.is_zero:
return S.Half - a
def _eval_rewrite_as_dirichlet_eta(self, s, a=1, **kwargs):
if a != 1:
return self
s = self.args[0]
return dirichlet_eta(s)/(1 - 2**(1 - s))
def _eval_rewrite_as_lerchphi(self, s, a=1, **kwargs):
return lerchphi(1, s, a)
def _eval_is_finite(self):
arg_is_one = (self.args[0] - 1).is_zero
if arg_is_one is not None:
return not arg_is_one
def fdiff(self, argindex=1):
if len(self.args) == 2:
s, a = self.args
else:
s, a = self.args + (1,)
if argindex == 2:
return -s*zeta(s + 1, a)
else:
raise ArgumentIndexError
class dirichlet_eta(Function):
r"""
Dirichlet eta function.
Explanation
===========
For $\operatorname{Re}(s) > 0$, this function is defined as
.. math:: \eta(s) = \sum_{n=1}^\infty \frac{(-1)^{n-1}}{n^s}.
It admits a unique analytic continuation to all of $\mathbb{C}$.
It is an entire, unbranched function.
Examples
========
The Dirichlet eta function is closely related to the Riemann zeta function:
>>> from sympy import dirichlet_eta, zeta
>>> from sympy.abc import s
>>> dirichlet_eta(s).rewrite(zeta)
(1 - 2**(1 - s))*zeta(s)
See Also
========
zeta
References
==========
.. [1] https://en.wikipedia.org/wiki/Dirichlet_eta_function
"""
@classmethod
def eval(cls, s):
if s == 1:
return log(2)
z = zeta(s)
if not z.has(zeta):
return (1 - 2**(1 - s))*z
def _eval_rewrite_as_zeta(self, s, **kwargs):
return (1 - 2**(1 - s)) * zeta(s)
class riemann_xi(Function):
r"""
Riemann Xi function.
Examples
========
The Riemann Xi function is closely related to the Riemann zeta function.
The zeros of Riemann Xi function are precisely the non-trivial zeros
of the zeta function.
>>> from sympy import riemann_xi, zeta
>>> from sympy.abc import s
>>> riemann_xi(s).rewrite(zeta)
pi**(-s/2)*s*(s - 1)*gamma(s/2)*zeta(s)/2
References
==========
.. [1] https://en.wikipedia.org/wiki/Riemann_Xi_function
"""
@classmethod
def eval(cls, s):
from sympy import gamma
z = zeta(s)
if s is S.Zero or s is S.One:
return S.Half
if not isinstance(z, zeta):
return s*(s - 1)*gamma(s/2)*z/(2*pi**(s/2))
def _eval_rewrite_as_zeta(self, s, **kwargs):
from sympy import gamma
return s*(s - 1)*gamma(s/2)*zeta(s)/(2*pi**(s/2))
class stieltjes(Function):
r"""
Represents Stieltjes constants, $\gamma_{k}$ that occur in
Laurent Series expansion of the Riemann zeta function.
Examples
========
>>> from sympy import stieltjes
>>> from sympy.abc import n, m
>>> stieltjes(n)
stieltjes(n)
The zero'th stieltjes constant:
>>> stieltjes(0)
EulerGamma
>>> stieltjes(0, 1)
EulerGamma
For generalized stieltjes constants:
>>> stieltjes(n, m)
stieltjes(n, m)
Constants are only defined for integers >= 0:
>>> stieltjes(-1)
zoo
References
==========
.. [1] https://en.wikipedia.org/wiki/Stieltjes_constants
"""
@classmethod
def eval(cls, n, a=None):
n = sympify(n)
if a is not None:
a = sympify(a)
if a is S.NaN:
return S.NaN
if a.is_Integer and a.is_nonpositive:
return S.ComplexInfinity
if n.is_Number:
if n is S.NaN:
return S.NaN
elif n < 0:
return S.ComplexInfinity
elif not n.is_Integer:
return S.ComplexInfinity
elif n is S.Zero and a in [None, 1]:
return S.EulerGamma
if n.is_extended_negative:
return S.ComplexInfinity
if n.is_zero and a in [None, 1]:
return S.EulerGamma
if n.is_integer == False:
return S.ComplexInfinity
| 29.452035 | 85 | 0.520061 |
from sympy.core import Function, S, sympify, pi, I
from sympy.core.function import ArgumentIndexError
from sympy.functions.combinatorial.numbers import bernoulli, factorial, harmonic
from sympy.functions.elementary.exponential import log, exp_polar
from sympy.functions.elementary.miscellaneous import sqrt
| true | true |
1c2daff473a571d5bdb482be512b46226cd28954 | 13,338 | py | Python | assemblyline/common/backupmanager.py | spelcha/assemblyline-base | 835446128664084c6a45ad2734a636669eca5ad1 | [
"MIT"
] | 39 | 2020-05-06T02:10:25.000Z | 2022-02-22T00:33:52.000Z | assemblyline/common/backupmanager.py | spelcha/assemblyline-base | 835446128664084c6a45ad2734a636669eca5ad1 | [
"MIT"
] | 186 | 2020-04-17T10:38:47.000Z | 2022-03-30T13:20:52.000Z | assemblyline/common/backupmanager.py | spelcha/assemblyline-base | 835446128664084c6a45ad2734a636669eca5ad1 | [
"MIT"
] | 22 | 2020-04-22T16:00:38.000Z | 2022-02-09T03:06:55.000Z | from __future__ import annotations
import json
import os
import random
import time
import threading
import logging
from typing import Any
from multiprocessing import Process
from assemblyline.common import forge
from assemblyline.common.uid import get_random_id
from assemblyline.odm.models.error import ERROR_TYPES
from assemblyline.remote.datatypes.hash import Hash
from assemblyline.remote.datatypes.queues.named import NamedQueue
# noinspection PyBroadException
def backup_worker(worker_id: str, instance_id: str, working_dir: str):
datastore = forge.get_datastore(archive_access=True)
worker_queue: NamedQueue[dict[str, Any]] = NamedQueue(f"r-worker-{instance_id}", ttl=1800)
done_queue: NamedQueue[dict[str, Any]] = NamedQueue(f"r-done-{instance_id}", ttl=1800)
hash_queue: Hash[str] = Hash(f"r-hash-{instance_id}")
stopping = False
with open(os.path.join(working_dir, "backup.part%s" % worker_id), "w+") as backup_file:
while True:
data = worker_queue.pop(timeout=1)
if data is None:
if stopping:
break
continue
if data.get('stop', False):
if not stopping:
stopping = True
else:
time.sleep(round(random.uniform(0.050, 0.250), 3))
worker_queue.push(data)
continue
missing = False
success = True
try:
to_write = datastore.get_collection(data['bucket_name']).get(data['key'], as_obj=False)
if to_write:
if data.get('follow_keys', False):
for bucket, bucket_key, getter in FOLLOW_KEYS.get(data['bucket_name'], []):
for key in getter(to_write.get(bucket_key, None)):
hash_key = "%s_%s" % (bucket, key)
if not hash_queue.exists(hash_key):
hash_queue.add(hash_key, "True")
worker_queue.push({"bucket_name": bucket, "key": key, "follow_keys": True})
backup_file.write(json.dumps((data['bucket_name'], data['key'], to_write)) + "\n")
else:
missing = True
except Exception:
success = False
done_queue.push({
"success": success,
"missing": missing,
"bucket_name": data['bucket_name'],
"key": data['key']
})
done_queue.push({"stopped": True})
# noinspection PyBroadException
def restore_worker(worker_id: str, instance_id: str, working_dir: str):
datastore = forge.get_datastore(archive_access=True)
done_queue: NamedQueue[dict[str, Any]] = NamedQueue(f"r-done-{instance_id}", ttl=1800)
with open(os.path.join(working_dir, "backup.part%s" % worker_id), "rb") as input_file:
for line in input_file:
bucket_name, key, data = json.loads(line)
success = True
try:
collection = datastore.get_collection(bucket_name)
collection.save(key, data)
except Exception:
success = False
done_queue.push({
"success": success,
"missing": False,
"bucket_name": bucket_name,
"key": key})
done_queue.push({"stopped": True})
class DistributedBackup(object):
def __init__(self, working_dir: str, worker_count: int = 50, spawn_workers: bool = True,
use_threading: bool = False, logger: logging.Logger = None):
self.working_dir = working_dir
self.datastore = forge.get_datastore(archive_access=True)
self.logger = logger
self.plist: list[Process] = []
self.use_threading = use_threading
self.instance_id = get_random_id()
self.worker_queue: NamedQueue[dict[str, Any]] = NamedQueue(f"r-worker-{self.instance_id}", ttl=1800)
self.done_queue: NamedQueue[dict[str, Any]] = NamedQueue(f"r-done-{self.instance_id}", ttl=1800)
self.hash_queue: Hash[str] = Hash(f"r-hash-{self.instance_id}")
self.bucket_error: list[str] = []
self.valid_buckets: list[str] = sorted(list(self.datastore.ds.get_models().keys()))
self.worker_count = worker_count
self.spawn_workers = spawn_workers
self.total_count = 0
self.error_map_count: dict[str, int] = {}
self.missing_map_count: dict[str, int] = {}
self.map_count: dict[str, int] = {}
self.last_time: float = 0
self.last_count = 0
self.error_count = 0
def cleanup(self):
self.worker_queue.delete()
self.done_queue.delete()
self.hash_queue.delete()
for p in self.plist:
p.terminate()
def done_thread(self, title: str):
t0 = time.time()
self.last_time = t0
running_threads = self.worker_count
while running_threads > 0:
msg = self.done_queue.pop(timeout=1)
if msg is None:
continue
if "stopped" in msg:
running_threads -= 1
continue
bucket_name = msg.get('bucket_name', 'unknown')
if msg.get('success', False):
self.total_count += 1
if msg.get("missing", False):
if bucket_name not in self.missing_map_count:
self.missing_map_count[bucket_name] = 0
self.missing_map_count[bucket_name] += 1
else:
if bucket_name not in self.map_count:
self.map_count[bucket_name] = 0
self.map_count[bucket_name] += 1
new_t = time.time()
if (new_t - self.last_time) > 5:
if self.logger:
self.logger.info("%s (%s at %s keys/sec) ==> %s" %
(self.total_count,
new_t - self.last_time,
int((self.total_count - self.last_count) / (new_t - self.last_time)),
self.map_count))
self.last_count = self.total_count
self.last_time = new_t
else:
self.error_count += 1
if bucket_name not in self.error_map_count:
self.error_map_count[bucket_name] = 0
self.error_map_count[bucket_name] += 1
# Cleanup
self.cleanup()
summary = ""
summary += "\n########################\n"
summary += "####### SUMMARY #######\n"
summary += "########################\n"
summary += "%s items - %s errors - %s secs\n\n" % \
(self.total_count, self.error_count, time.time() - t0)
for k, v in self.map_count.items():
summary += "\t%15s: %s\n" % (k.upper(), v)
if len(self.missing_map_count.keys()) > 0:
summary += "\n\nMissing data:\n\n"
for k, v in self.missing_map_count.items():
summary += "\t%15s: %s\n" % (k.upper(), v)
if len(self.error_map_count.keys()) > 0:
summary += "\n\nErrors:\n\n"
for k, v in self.error_map_count.items():
summary += "\t%15s: %s\n" % (k.upper(), v)
if len(self.bucket_error) > 0:
summary += f"\nThese buckets failed to {title.lower()} completely: {self.bucket_error}\n"
if self.logger:
self.logger.info(summary)
# noinspection PyBroadException,PyProtectedMember
def backup(self, bucket_list: list[str], follow_keys: bool = False, query: str = None):
if query is None:
query = 'id:*'
for bucket in bucket_list:
if bucket not in self.valid_buckets:
if self.logger:
self.logger.warn("\n%s is not a valid bucket.\n\n"
"The list of valid buckets is the following:\n\n\t%s\n" %
(bucket.upper(), "\n\t".join(self.valid_buckets)))
return
targets = ', '.join(bucket_list)
try:
if self.logger:
self.logger.info("\n-----------------------")
self.logger.info("----- Data Backup -----")
self.logger.info("-----------------------")
self.logger.info(f" Deep: {follow_keys}")
self.logger.info(f" Buckets: {targets}")
self.logger.info(f" Workers: {self.worker_count}")
self.logger.info(f" Target directory: {self.working_dir}")
self.logger.info(f" Filtering query: {query}")
# Start the workers
for x in range(self.worker_count):
if self.use_threading:
t = threading.Thread(target=backup_worker, args=(x, self.instance_id, self.working_dir))
t.setDaemon(True)
t.start()
else:
p = Process(target=backup_worker, args=(x, self.instance_id, self.working_dir))
p.start()
self.plist.append(p)
# Start done thread
dt = threading.Thread(target=self.done_thread, args=('Backup',), name="Done thread")
dt.setDaemon(True)
dt.start()
# Process data buckets
for bucket_name in bucket_list:
try:
collection = self.datastore.get_collection(bucket_name)
for item in collection.stream_search(query, fl="id", item_buffer_size=500, as_obj=False):
self.worker_queue.push({"bucket_name": bucket_name, "key": item['id'],
"follow_keys": follow_keys})
except Exception as e:
self.cleanup()
if self.logger:
self.logger.exception(e)
self.logger.error("Error occurred while processing bucket %s." % bucket_name)
self.bucket_error.append(bucket_name)
for _ in range(self.worker_count):
self.worker_queue.push({"stop": True})
dt.join()
except Exception as e:
if self.logger:
self.logger.exception(e)
def restore(self):
try:
if self.logger:
self.logger.info("\n------------------------")
self.logger.info("----- Data Restore -----")
self.logger.info("------------------------")
self.logger.info(f" Workers: {self.worker_count}")
self.logger.info(f" Target directory: {self.working_dir}")
for x in range(self.worker_count):
if self.use_threading:
t = threading.Thread(target=restore_worker,
args=(x, self.instance_id, self.working_dir))
t.setDaemon(True)
t.start()
else:
p = Process(target=restore_worker, args=(x, self.instance_id, self.working_dir))
p.start()
self.plist.append(p)
# Start done thread
dt = threading.Thread(target=self.done_thread, args=('Restore',), name="Done thread")
dt.setDaemon(True)
dt.start()
# Wait for workers to finish
dt.join()
except Exception as e:
if self.logger:
self.logger.exception(e)
def _string_getter(data) -> list[str]:
if data is not None:
return [data]
else:
return []
def _result_getter(data) -> list[str]:
if data is not None:
return [x for x in data if not x.endswith('.e')]
else:
return []
def _emptyresult_getter(data) -> list[str]:
if data is not None:
return [x for x in data if x.endswith('.e')]
else:
return []
def _error_getter(data) -> list[str]:
if data is not None:
return [x for x in data if x.rsplit('.e', 1)[1] not in ERROR_TYPES.values()]
else:
return []
def _sha256_getter(data) -> list[str]:
if data is not None:
return [x[:64] for x in data]
else:
return []
def _file_getter(data) -> list[str]:
if data is not None:
return [x['sha256'] for x in data]
else:
return []
def _result_file_getter(data) -> list[str]:
if data is not None:
supp = data.get("supplementary", []) + data.get("extracted", [])
return _file_getter(supp)
else:
return []
FOLLOW_KEYS = {
"alert": [
('submission', 'sid', _string_getter),
],
"submission": [
('result', 'results', _result_getter),
('error', 'errors', _error_getter),
('file', 'results', _sha256_getter),
('file', 'files', _file_getter),
('file', 'errors', _sha256_getter),
],
"results": [
('file', 'response', _result_file_getter),
]
}
| 36.442623 | 111 | 0.523992 | from __future__ import annotations
import json
import os
import random
import time
import threading
import logging
from typing import Any
from multiprocessing import Process
from assemblyline.common import forge
from assemblyline.common.uid import get_random_id
from assemblyline.odm.models.error import ERROR_TYPES
from assemblyline.remote.datatypes.hash import Hash
from assemblyline.remote.datatypes.queues.named import NamedQueue
def backup_worker(worker_id: str, instance_id: str, working_dir: str):
datastore = forge.get_datastore(archive_access=True)
worker_queue: NamedQueue[dict[str, Any]] = NamedQueue(f"r-worker-{instance_id}", ttl=1800)
done_queue: NamedQueue[dict[str, Any]] = NamedQueue(f"r-done-{instance_id}", ttl=1800)
hash_queue: Hash[str] = Hash(f"r-hash-{instance_id}")
stopping = False
with open(os.path.join(working_dir, "backup.part%s" % worker_id), "w+") as backup_file:
while True:
data = worker_queue.pop(timeout=1)
if data is None:
if stopping:
break
continue
if data.get('stop', False):
if not stopping:
stopping = True
else:
time.sleep(round(random.uniform(0.050, 0.250), 3))
worker_queue.push(data)
continue
missing = False
success = True
try:
to_write = datastore.get_collection(data['bucket_name']).get(data['key'], as_obj=False)
if to_write:
if data.get('follow_keys', False):
for bucket, bucket_key, getter in FOLLOW_KEYS.get(data['bucket_name'], []):
for key in getter(to_write.get(bucket_key, None)):
hash_key = "%s_%s" % (bucket, key)
if not hash_queue.exists(hash_key):
hash_queue.add(hash_key, "True")
worker_queue.push({"bucket_name": bucket, "key": key, "follow_keys": True})
backup_file.write(json.dumps((data['bucket_name'], data['key'], to_write)) + "\n")
else:
missing = True
except Exception:
success = False
done_queue.push({
"success": success,
"missing": missing,
"bucket_name": data['bucket_name'],
"key": data['key']
})
done_queue.push({"stopped": True})
def restore_worker(worker_id: str, instance_id: str, working_dir: str):
datastore = forge.get_datastore(archive_access=True)
done_queue: NamedQueue[dict[str, Any]] = NamedQueue(f"r-done-{instance_id}", ttl=1800)
with open(os.path.join(working_dir, "backup.part%s" % worker_id), "rb") as input_file:
for line in input_file:
bucket_name, key, data = json.loads(line)
success = True
try:
collection = datastore.get_collection(bucket_name)
collection.save(key, data)
except Exception:
success = False
done_queue.push({
"success": success,
"missing": False,
"bucket_name": bucket_name,
"key": key})
done_queue.push({"stopped": True})
class DistributedBackup(object):
def __init__(self, working_dir: str, worker_count: int = 50, spawn_workers: bool = True,
use_threading: bool = False, logger: logging.Logger = None):
self.working_dir = working_dir
self.datastore = forge.get_datastore(archive_access=True)
self.logger = logger
self.plist: list[Process] = []
self.use_threading = use_threading
self.instance_id = get_random_id()
self.worker_queue: NamedQueue[dict[str, Any]] = NamedQueue(f"r-worker-{self.instance_id}", ttl=1800)
self.done_queue: NamedQueue[dict[str, Any]] = NamedQueue(f"r-done-{self.instance_id}", ttl=1800)
self.hash_queue: Hash[str] = Hash(f"r-hash-{self.instance_id}")
self.bucket_error: list[str] = []
self.valid_buckets: list[str] = sorted(list(self.datastore.ds.get_models().keys()))
self.worker_count = worker_count
self.spawn_workers = spawn_workers
self.total_count = 0
self.error_map_count: dict[str, int] = {}
self.missing_map_count: dict[str, int] = {}
self.map_count: dict[str, int] = {}
self.last_time: float = 0
self.last_count = 0
self.error_count = 0
def cleanup(self):
self.worker_queue.delete()
self.done_queue.delete()
self.hash_queue.delete()
for p in self.plist:
p.terminate()
def done_thread(self, title: str):
t0 = time.time()
self.last_time = t0
running_threads = self.worker_count
while running_threads > 0:
msg = self.done_queue.pop(timeout=1)
if msg is None:
continue
if "stopped" in msg:
running_threads -= 1
continue
bucket_name = msg.get('bucket_name', 'unknown')
if msg.get('success', False):
self.total_count += 1
if msg.get("missing", False):
if bucket_name not in self.missing_map_count:
self.missing_map_count[bucket_name] = 0
self.missing_map_count[bucket_name] += 1
else:
if bucket_name not in self.map_count:
self.map_count[bucket_name] = 0
self.map_count[bucket_name] += 1
new_t = time.time()
if (new_t - self.last_time) > 5:
if self.logger:
self.logger.info("%s (%s at %s keys/sec) ==> %s" %
(self.total_count,
new_t - self.last_time,
int((self.total_count - self.last_count) / (new_t - self.last_time)),
self.map_count))
self.last_count = self.total_count
self.last_time = new_t
else:
self.error_count += 1
if bucket_name not in self.error_map_count:
self.error_map_count[bucket_name] = 0
self.error_map_count[bucket_name] += 1
self.cleanup()
summary = ""
summary += "\n########################\n"
summary += "####### SUMMARY #######\n"
summary += "########################\n"
summary += "%s items - %s errors - %s secs\n\n" % \
(self.total_count, self.error_count, time.time() - t0)
for k, v in self.map_count.items():
summary += "\t%15s: %s\n" % (k.upper(), v)
if len(self.missing_map_count.keys()) > 0:
summary += "\n\nMissing data:\n\n"
for k, v in self.missing_map_count.items():
summary += "\t%15s: %s\n" % (k.upper(), v)
if len(self.error_map_count.keys()) > 0:
summary += "\n\nErrors:\n\n"
for k, v in self.error_map_count.items():
summary += "\t%15s: %s\n" % (k.upper(), v)
if len(self.bucket_error) > 0:
summary += f"\nThese buckets failed to {title.lower()} completely: {self.bucket_error}\n"
if self.logger:
self.logger.info(summary)
def backup(self, bucket_list: list[str], follow_keys: bool = False, query: str = None):
if query is None:
query = 'id:*'
for bucket in bucket_list:
if bucket not in self.valid_buckets:
if self.logger:
self.logger.warn("\n%s is not a valid bucket.\n\n"
"The list of valid buckets is the following:\n\n\t%s\n" %
(bucket.upper(), "\n\t".join(self.valid_buckets)))
return
targets = ', '.join(bucket_list)
try:
if self.logger:
self.logger.info("\n-----------------------")
self.logger.info("----- Data Backup -----")
self.logger.info("-----------------------")
self.logger.info(f" Deep: {follow_keys}")
self.logger.info(f" Buckets: {targets}")
self.logger.info(f" Workers: {self.worker_count}")
self.logger.info(f" Target directory: {self.working_dir}")
self.logger.info(f" Filtering query: {query}")
for x in range(self.worker_count):
if self.use_threading:
t = threading.Thread(target=backup_worker, args=(x, self.instance_id, self.working_dir))
t.setDaemon(True)
t.start()
else:
p = Process(target=backup_worker, args=(x, self.instance_id, self.working_dir))
p.start()
self.plist.append(p)
dt = threading.Thread(target=self.done_thread, args=('Backup',), name="Done thread")
dt.setDaemon(True)
dt.start()
for bucket_name in bucket_list:
try:
collection = self.datastore.get_collection(bucket_name)
for item in collection.stream_search(query, fl="id", item_buffer_size=500, as_obj=False):
self.worker_queue.push({"bucket_name": bucket_name, "key": item['id'],
"follow_keys": follow_keys})
except Exception as e:
self.cleanup()
if self.logger:
self.logger.exception(e)
self.logger.error("Error occurred while processing bucket %s." % bucket_name)
self.bucket_error.append(bucket_name)
for _ in range(self.worker_count):
self.worker_queue.push({"stop": True})
dt.join()
except Exception as e:
if self.logger:
self.logger.exception(e)
def restore(self):
try:
if self.logger:
self.logger.info("\n------------------------")
self.logger.info("----- Data Restore -----")
self.logger.info("------------------------")
self.logger.info(f" Workers: {self.worker_count}")
self.logger.info(f" Target directory: {self.working_dir}")
for x in range(self.worker_count):
if self.use_threading:
t = threading.Thread(target=restore_worker,
args=(x, self.instance_id, self.working_dir))
t.setDaemon(True)
t.start()
else:
p = Process(target=restore_worker, args=(x, self.instance_id, self.working_dir))
p.start()
self.plist.append(p)
dt = threading.Thread(target=self.done_thread, args=('Restore',), name="Done thread")
dt.setDaemon(True)
dt.start()
dt.join()
except Exception as e:
if self.logger:
self.logger.exception(e)
def _string_getter(data) -> list[str]:
if data is not None:
return [data]
else:
return []
def _result_getter(data) -> list[str]:
if data is not None:
return [x for x in data if not x.endswith('.e')]
else:
return []
def _emptyresult_getter(data) -> list[str]:
if data is not None:
return [x for x in data if x.endswith('.e')]
else:
return []
def _error_getter(data) -> list[str]:
if data is not None:
return [x for x in data if x.rsplit('.e', 1)[1] not in ERROR_TYPES.values()]
else:
return []
def _sha256_getter(data) -> list[str]:
if data is not None:
return [x[:64] for x in data]
else:
return []
def _file_getter(data) -> list[str]:
if data is not None:
return [x['sha256'] for x in data]
else:
return []
def _result_file_getter(data) -> list[str]:
if data is not None:
supp = data.get("supplementary", []) + data.get("extracted", [])
return _file_getter(supp)
else:
return []
FOLLOW_KEYS = {
"alert": [
('submission', 'sid', _string_getter),
],
"submission": [
('result', 'results', _result_getter),
('error', 'errors', _error_getter),
('file', 'results', _sha256_getter),
('file', 'files', _file_getter),
('file', 'errors', _sha256_getter),
],
"results": [
('file', 'response', _result_file_getter),
]
}
| true | true |
1c2db06313afeebd3e8c41976a6c2e579de5ebdd | 10,035 | py | Python | tools_webrtc/libs/generate_licenses.py | wyshen2020/webrtc | b93e2240f1653b82e24553e092bbab84337774af | [
"BSD-3-Clause"
] | 2 | 2022-03-10T01:47:56.000Z | 2022-03-31T12:51:46.000Z | tools_webrtc/libs/generate_licenses.py | wyshen2020/webrtc | b93e2240f1653b82e24553e092bbab84337774af | [
"BSD-3-Clause"
] | null | null | null | tools_webrtc/libs/generate_licenses.py | wyshen2020/webrtc | b93e2240f1653b82e24553e092bbab84337774af | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env vpython3
# Copyright 2016 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""Generates license markdown for a prebuilt version of WebRTC.
Licenses are taken from dependent libraries which are determined by
GN desc command `gn desc` on all targets specified via `--target` argument.
One can see all dependencies by invoking this command:
$ gn.py desc --all --format=json <out_directory> <target> | \
vpython3 -m json.tool
(see "deps" subarray)
Libraries are mapped to licenses via LIB_TO_LICENSES_DICT dictionary.
"""
import sys
import argparse
import json
import logging
import os
import re
import subprocess
from html import escape
# Third_party library to licences mapping. Keys are names of the libraries
# (right after the `third_party/` prefix)
LIB_TO_LICENSES_DICT = {
'abseil-cpp': ['third_party/abseil-cpp/LICENSE'],
'android_ndk': ['third_party/android_ndk/NOTICE'],
'android_sdk': ['third_party/android_sdk/LICENSE'],
'auto': [
'third_party/android_deps/libs/'
'com_google_auto_service_auto_service/LICENSE'
],
'bazel': ['third_party/bazel/LICENSE'],
'boringssl': ['third_party/boringssl/src/LICENSE'],
'crc32c': ['third_party/crc32c/src/LICENSE'],
'errorprone': [
'third_party/android_deps/libs/'
'com_google_errorprone_error_prone_core/LICENSE'
],
'fiat': ['third_party/boringssl/src/third_party/fiat/LICENSE'],
'guava': ['third_party/android_deps/libs/com_google_guava_guava/LICENSE'],
'ijar': ['third_party/ijar/LICENSE'],
'jsoncpp': ['third_party/jsoncpp/LICENSE'],
'libaom': ['third_party/libaom/source/libaom/LICENSE'],
'libc++': ['buildtools/third_party/libc++/trunk/LICENSE.TXT'],
'libc++abi': ['buildtools/third_party/libc++abi/trunk/LICENSE.TXT'],
'libevent': ['base/third_party/libevent/LICENSE'],
'libjpeg_turbo': ['third_party/libjpeg_turbo/LICENSE.md'],
'libsrtp': ['third_party/libsrtp/LICENSE'],
'libunwind': ['buildtools/third_party/libunwind/trunk/LICENSE.TXT'],
'libvpx': ['third_party/libvpx/source/libvpx/LICENSE'],
'libyuv': ['third_party/libyuv/LICENSE'],
'nasm': ['third_party/nasm/LICENSE'],
'opus': ['third_party/opus/src/COPYING'],
'pffft': ['third_party/pffft/LICENSE'],
'protobuf': ['third_party/protobuf/LICENSE'],
'rnnoise': ['third_party/rnnoise/COPYING'],
'usrsctp': ['third_party/usrsctp/LICENSE'],
'webrtc': ['LICENSE'],
'zlib': ['third_party/zlib/LICENSE'],
'base64': ['rtc_base/third_party/base64/LICENSE'],
'sigslot': ['rtc_base/third_party/sigslot/LICENSE'],
'portaudio': ['modules/third_party/portaudio/LICENSE'],
'fft': ['modules/third_party/fft/LICENSE'],
'g711': ['modules/third_party/g711/LICENSE'],
'g722': ['modules/third_party/g722/LICENSE'],
'ooura': ['common_audio/third_party/ooura/LICENSE'],
'spl_sqrt_floor': ['common_audio/third_party/spl_sqrt_floor/LICENSE'],
# TODO(bugs.webrtc.org/1110): Remove this hack. This is not a lib.
# For some reason it is listed as so in _GetThirdPartyLibraries.
'android_deps': [],
# This is not a library but a collection of libraries.
'androidx': [],
# Compile time dependencies, no license needed:
'yasm': [],
'ow2_asm': [],
'jdk': [],
}
# Third_party library _regex_ to licences mapping. Keys are regular expression
# with names of the libraries (right after the `third_party/` prefix)
LIB_REGEX_TO_LICENSES_DICT = {
'android_deps:android_support_annotations.*': [
'third_party/android_deps/libs/' +
'com_android_support_support_annotations/LICENSE'
],
# Internal dependencies, licenses are already included by other dependencies
'android_deps:com_android_support_support_annotations.*': [],
}
def FindSrcDirPath():
"""Returns the abs path to the src/ dir of the project."""
src_dir = os.path.dirname(os.path.abspath(__file__))
while os.path.basename(src_dir) != 'src':
src_dir = os.path.normpath(os.path.join(src_dir, os.pardir))
return src_dir
SCRIPT_DIR = os.path.dirname(os.path.realpath(sys.argv[0]))
WEBRTC_ROOT = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir, os.pardir))
SRC_DIR = FindSrcDirPath()
sys.path.append(os.path.join(SRC_DIR, 'build'))
import find_depot_tools
THIRD_PARTY_LIB_SIMPLE_NAME_REGEX = r'^.*/third_party/([\w\-+]+).*$'
THIRD_PARTY_LIB_REGEX_TEMPLATE = r'^.*/third_party/%s$'
class LicenseBuilder:
def __init__(self,
buildfile_dirs,
targets,
lib_to_licenses_dict=None,
lib_regex_to_licenses_dict=None):
if lib_to_licenses_dict is None:
lib_to_licenses_dict = LIB_TO_LICENSES_DICT
if lib_regex_to_licenses_dict is None:
lib_regex_to_licenses_dict = LIB_REGEX_TO_LICENSES_DICT
self.buildfile_dirs = buildfile_dirs
self.targets = targets
self.lib_to_licenses_dict = lib_to_licenses_dict
self.lib_regex_to_licenses_dict = lib_regex_to_licenses_dict
self.common_licenses_dict = self.lib_to_licenses_dict.copy()
self.common_licenses_dict.update(self.lib_regex_to_licenses_dict)
@staticmethod
def _ParseLibraryName(dep):
"""Returns library name after third_party
Input one of:
//a/b/third_party/libname:c
//a/b/third_party/libname:c(//d/e/f:g)
//a/b/third_party/libname/c:d(//e/f/g:h)
Outputs libname or None if this is not a third_party dependency.
"""
groups = re.match(THIRD_PARTY_LIB_SIMPLE_NAME_REGEX, dep)
return groups.group(1) if groups else None
def _ParseLibrary(self, dep):
"""Returns library simple or regex name that matches `dep` after third_party
This method matches `dep` dependency against simple names in
LIB_TO_LICENSES_DICT and regular expression names in
LIB_REGEX_TO_LICENSES_DICT keys
Outputs matched dict key or None if this is not a third_party dependency.
"""
libname = LicenseBuilder._ParseLibraryName(dep)
for lib_regex in self.lib_regex_to_licenses_dict:
if re.match(THIRD_PARTY_LIB_REGEX_TEMPLATE % lib_regex, dep):
return lib_regex
return libname
@staticmethod
def _RunGN(buildfile_dir, target):
cmd = [
sys.executable,
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py'),
'desc',
'--all',
'--format=json',
os.path.abspath(buildfile_dir),
target,
]
logging.debug('Running: %r', cmd)
output_json = subprocess.check_output(cmd, cwd=WEBRTC_ROOT).decode('UTF-8')
logging.debug('Output: %s', output_json)
return output_json
def _GetThirdPartyLibraries(self, buildfile_dir, target):
output = json.loads(LicenseBuilder._RunGN(buildfile_dir, target))
libraries = set()
for described_target in list(output.values()):
third_party_libs = (self._ParseLibrary(dep)
for dep in described_target['deps'])
libraries |= set(lib for lib in third_party_libs if lib)
return libraries
def GenerateLicenseText(self, output_dir):
# Get a list of third_party libs from gn. For fat libraries we must consider
# all architectures, hence the multiple buildfile directories.
third_party_libs = set()
for buildfile in self.buildfile_dirs:
for target in self.targets:
third_party_libs |= self._GetThirdPartyLibraries(buildfile, target)
assert len(third_party_libs) > 0
missing_licenses = third_party_libs - set(self.common_licenses_dict.keys())
if missing_licenses:
error_msg = 'Missing licenses for following third_party targets: %s' % \
', '.join(sorted(missing_licenses))
logging.error(error_msg)
raise Exception(error_msg)
# Put webrtc at the front of the list.
license_libs = sorted(third_party_libs)
license_libs.insert(0, 'webrtc')
logging.info('List of licenses: %s', ', '.join(license_libs))
# Generate markdown.
output_license_file = open(os.path.join(output_dir, 'LICENSE.md'), 'w+')
for license_lib in license_libs:
if len(self.common_licenses_dict[license_lib]) == 0:
logging.info('Skipping compile time or internal dependency: %s',
license_lib)
continue # Compile time dependency
output_license_file.write('# %s\n' % license_lib)
output_license_file.write('```\n')
for path in self.common_licenses_dict[license_lib]:
license_path = os.path.join(WEBRTC_ROOT, path)
with open(license_path, 'r') as license_file:
license_text = escape(license_file.read(), quote=True)
output_license_file.write(license_text)
output_license_file.write('\n')
output_license_file.write('```\n\n')
output_license_file.close()
def main():
parser = argparse.ArgumentParser(description='Generate WebRTC LICENSE.md')
parser.add_argument('--verbose',
action='store_true',
default=False,
help='Debug logging.')
parser.add_argument('--target',
required=True,
action='append',
default=[],
help='Name of the GN target to generate a license for')
parser.add_argument('output_dir', help='Directory to output LICENSE.md to.')
parser.add_argument('buildfile_dirs',
nargs='+',
help='Directories containing gn generated ninja files')
args = parser.parse_args()
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
builder = LicenseBuilder(args.buildfile_dirs, args.target)
builder.GenerateLicenseText(args.output_dir)
if __name__ == '__main__':
sys.exit(main())
| 37.58427 | 80 | 0.692875 |
import sys
import argparse
import json
import logging
import os
import re
import subprocess
from html import escape
LIB_TO_LICENSES_DICT = {
'abseil-cpp': ['third_party/abseil-cpp/LICENSE'],
'android_ndk': ['third_party/android_ndk/NOTICE'],
'android_sdk': ['third_party/android_sdk/LICENSE'],
'auto': [
'third_party/android_deps/libs/'
'com_google_auto_service_auto_service/LICENSE'
],
'bazel': ['third_party/bazel/LICENSE'],
'boringssl': ['third_party/boringssl/src/LICENSE'],
'crc32c': ['third_party/crc32c/src/LICENSE'],
'errorprone': [
'third_party/android_deps/libs/'
'com_google_errorprone_error_prone_core/LICENSE'
],
'fiat': ['third_party/boringssl/src/third_party/fiat/LICENSE'],
'guava': ['third_party/android_deps/libs/com_google_guava_guava/LICENSE'],
'ijar': ['third_party/ijar/LICENSE'],
'jsoncpp': ['third_party/jsoncpp/LICENSE'],
'libaom': ['third_party/libaom/source/libaom/LICENSE'],
'libc++': ['buildtools/third_party/libc++/trunk/LICENSE.TXT'],
'libc++abi': ['buildtools/third_party/libc++abi/trunk/LICENSE.TXT'],
'libevent': ['base/third_party/libevent/LICENSE'],
'libjpeg_turbo': ['third_party/libjpeg_turbo/LICENSE.md'],
'libsrtp': ['third_party/libsrtp/LICENSE'],
'libunwind': ['buildtools/third_party/libunwind/trunk/LICENSE.TXT'],
'libvpx': ['third_party/libvpx/source/libvpx/LICENSE'],
'libyuv': ['third_party/libyuv/LICENSE'],
'nasm': ['third_party/nasm/LICENSE'],
'opus': ['third_party/opus/src/COPYING'],
'pffft': ['third_party/pffft/LICENSE'],
'protobuf': ['third_party/protobuf/LICENSE'],
'rnnoise': ['third_party/rnnoise/COPYING'],
'usrsctp': ['third_party/usrsctp/LICENSE'],
'webrtc': ['LICENSE'],
'zlib': ['third_party/zlib/LICENSE'],
'base64': ['rtc_base/third_party/base64/LICENSE'],
'sigslot': ['rtc_base/third_party/sigslot/LICENSE'],
'portaudio': ['modules/third_party/portaudio/LICENSE'],
'fft': ['modules/third_party/fft/LICENSE'],
'g711': ['modules/third_party/g711/LICENSE'],
'g722': ['modules/third_party/g722/LICENSE'],
'ooura': ['common_audio/third_party/ooura/LICENSE'],
'spl_sqrt_floor': ['common_audio/third_party/spl_sqrt_floor/LICENSE'],
'android_deps': [],
'androidx': [],
'yasm': [],
'ow2_asm': [],
'jdk': [],
}
LIB_REGEX_TO_LICENSES_DICT = {
'android_deps:android_support_annotations.*': [
'third_party/android_deps/libs/' +
'com_android_support_support_annotations/LICENSE'
],
'android_deps:com_android_support_support_annotations.*': [],
}
def FindSrcDirPath():
src_dir = os.path.dirname(os.path.abspath(__file__))
while os.path.basename(src_dir) != 'src':
src_dir = os.path.normpath(os.path.join(src_dir, os.pardir))
return src_dir
SCRIPT_DIR = os.path.dirname(os.path.realpath(sys.argv[0]))
WEBRTC_ROOT = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir, os.pardir))
SRC_DIR = FindSrcDirPath()
sys.path.append(os.path.join(SRC_DIR, 'build'))
import find_depot_tools
THIRD_PARTY_LIB_SIMPLE_NAME_REGEX = r'^.*/third_party/([\w\-+]+).*$'
THIRD_PARTY_LIB_REGEX_TEMPLATE = r'^.*/third_party/%s$'
class LicenseBuilder:
def __init__(self,
buildfile_dirs,
targets,
lib_to_licenses_dict=None,
lib_regex_to_licenses_dict=None):
if lib_to_licenses_dict is None:
lib_to_licenses_dict = LIB_TO_LICENSES_DICT
if lib_regex_to_licenses_dict is None:
lib_regex_to_licenses_dict = LIB_REGEX_TO_LICENSES_DICT
self.buildfile_dirs = buildfile_dirs
self.targets = targets
self.lib_to_licenses_dict = lib_to_licenses_dict
self.lib_regex_to_licenses_dict = lib_regex_to_licenses_dict
self.common_licenses_dict = self.lib_to_licenses_dict.copy()
self.common_licenses_dict.update(self.lib_regex_to_licenses_dict)
@staticmethod
def _ParseLibraryName(dep):
groups = re.match(THIRD_PARTY_LIB_SIMPLE_NAME_REGEX, dep)
return groups.group(1) if groups else None
def _ParseLibrary(self, dep):
libname = LicenseBuilder._ParseLibraryName(dep)
for lib_regex in self.lib_regex_to_licenses_dict:
if re.match(THIRD_PARTY_LIB_REGEX_TEMPLATE % lib_regex, dep):
return lib_regex
return libname
@staticmethod
def _RunGN(buildfile_dir, target):
cmd = [
sys.executable,
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py'),
'desc',
'--all',
'--format=json',
os.path.abspath(buildfile_dir),
target,
]
logging.debug('Running: %r', cmd)
output_json = subprocess.check_output(cmd, cwd=WEBRTC_ROOT).decode('UTF-8')
logging.debug('Output: %s', output_json)
return output_json
def _GetThirdPartyLibraries(self, buildfile_dir, target):
output = json.loads(LicenseBuilder._RunGN(buildfile_dir, target))
libraries = set()
for described_target in list(output.values()):
third_party_libs = (self._ParseLibrary(dep)
for dep in described_target['deps'])
libraries |= set(lib for lib in third_party_libs if lib)
return libraries
def GenerateLicenseText(self, output_dir):
third_party_libs = set()
for buildfile in self.buildfile_dirs:
for target in self.targets:
third_party_libs |= self._GetThirdPartyLibraries(buildfile, target)
assert len(third_party_libs) > 0
missing_licenses = third_party_libs - set(self.common_licenses_dict.keys())
if missing_licenses:
error_msg = 'Missing licenses for following third_party targets: %s' % \
', '.join(sorted(missing_licenses))
logging.error(error_msg)
raise Exception(error_msg)
license_libs = sorted(third_party_libs)
license_libs.insert(0, 'webrtc')
logging.info('List of licenses: %s', ', '.join(license_libs))
output_license_file = open(os.path.join(output_dir, 'LICENSE.md'), 'w+')
for license_lib in license_libs:
if len(self.common_licenses_dict[license_lib]) == 0:
logging.info('Skipping compile time or internal dependency: %s',
license_lib)
continue
output_license_file.write('# %s\n' % license_lib)
output_license_file.write('```\n')
for path in self.common_licenses_dict[license_lib]:
license_path = os.path.join(WEBRTC_ROOT, path)
with open(license_path, 'r') as license_file:
license_text = escape(license_file.read(), quote=True)
output_license_file.write(license_text)
output_license_file.write('\n')
output_license_file.write('```\n\n')
output_license_file.close()
def main():
parser = argparse.ArgumentParser(description='Generate WebRTC LICENSE.md')
parser.add_argument('--verbose',
action='store_true',
default=False,
help='Debug logging.')
parser.add_argument('--target',
required=True,
action='append',
default=[],
help='Name of the GN target to generate a license for')
parser.add_argument('output_dir', help='Directory to output LICENSE.md to.')
parser.add_argument('buildfile_dirs',
nargs='+',
help='Directories containing gn generated ninja files')
args = parser.parse_args()
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
builder = LicenseBuilder(args.buildfile_dirs, args.target)
builder.GenerateLicenseText(args.output_dir)
if __name__ == '__main__':
sys.exit(main())
| true | true |
1c2db11e6425644ddfb4ca0850b6f9cd5ad986e3 | 6,991 | py | Python | A_pathfinding.py | Drake0306/a-_pathfinding | 46cbfbc44b3e563f9bdc2ec2d4c7742beed6c416 | [
"MIT"
] | null | null | null | A_pathfinding.py | Drake0306/a-_pathfinding | 46cbfbc44b3e563f9bdc2ec2d4c7742beed6c416 | [
"MIT"
] | null | null | null | A_pathfinding.py | Drake0306/a-_pathfinding | 46cbfbc44b3e563f9bdc2ec2d4c7742beed6c416 | [
"MIT"
] | null | null | null | import math
import pygame
from queue import PriorityQueue
WIDTH = 800
WIN = pygame.display.set_mode((WIDTH,WIDTH))
pygame.display.set_caption('A* Path Finding Algoritham')
RED = (255,0,0)
GREEN = (0,225,0)
BLUE = (0,225,0)
YELLOW = (225,225,0)
WHITE = (225,225,225)
BLACK = (0,0,0)
PURPLE = (128,0,128)
ORANGE = (255,165,0)
GREY = (128,128,128)
TURQUOISE = (64,224,208)
class Spot:
def __init__(self, row, col, width, total_rows):
self.row = row
self.col = col
self.x = row * width
self.y = col * width
self.color = WHITE
self.neighbor = []
self.width = width
self.total_rows = total_rows
# Default function
def get_pos(self):
return self.row, self.col
def is_closed(self):
return self.color == RED
def is_open(self):
return self.color == GREEN
def is_barrier(self):
return self.color == BLACK
def is_start(self):
return self.color == ORANGE
def is_end(self):
return self.color == TURQUOISE
def reset(self):
self.color = WHITE
# ON change Function
def make_closed(self):
self.color = RED
def make_start(self):
self.color = ORANGE
def make_open(self):
self.color = GREEN
def make_barrier(self):
self.color = BLACK
def make_end(self):
self.color = TURQUOISE
def make_path(self):
self.color = PURPLE
# Draw
def draw(self, win):
pygame.draw.rect(win, self.color, (self.x, self.y, self.width, self.width))
def update_neighbors(self,grid):
self.neighbors = []
if self.row < self.total_rows - 1 and not grid[self.row + 1][self.col].is_barrier(): # DOWN
self.neighbors.append(grid[self.row + 1][self.col])
if self.row > 0 and not grid[self.row - 1][self.col].is_barrier(): # UP
self.neighbors.append(grid[self.row - 1][self.col])
if self.col < self.total_rows - 1 and not grid[self.row][self.col + 1].is_barrier(): # RIGHT
self.neighbors.append(grid[self.row][self.col + 1])
if self.col > 0 and not grid[self.row][self.col - 1].is_barrier(): # LEFT
self.neighbors.append(grid[self.row][self.col - 1])
# if Confition
def __lt__(self,other):
return False
def h(p1,p2):
x1, y1 = p1
x2, y2 = p2
return abs(x1 - x2) + abs(y1 - y2)
def reconstruct_path(came_from, current, draw):
while current in came_from:
current = came_from[current]
current.make_path()
draw()
def algoritham(draw, grid, start, end):
count = 0
open_set = PriorityQueue()
open_set.put((0, count, start))
came_from = {}
g_score = {spot: float("inf") for row in grid for spot in row}
g_score[start] = 0
f_score = {spot: float("inf") for row in grid for spot in row}
f_score[start] = h(start.get_pos(), end.get_pos())
open_set_hash = {start}
while not open_set.empty():
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
current = open_set.get()[2]
open_set_hash.remove(current)
if current == end:
reconstruct_path(came_from, end, draw)
end.make_end()
return True
for neighbor in current.neighbors:
temp_g_score = g_score[current] + 1
if temp_g_score < g_score[neighbor]:
came_from[neighbor] = current
g_score[neighbor] = temp_g_score
f_score[neighbor] = temp_g_score + h(neighbor.get_pos(), end.get_pos())
if neighbor not in open_set_hash:
count += 1
open_set.put((f_score[neighbor], count, neighbor))
open_set_hash.add(neighbor)
neighbor.make_open()
draw()
if current != start:
current.make_closed()
return False
def make_grid(rows,width):
grid = []
# // mean int division
gap = width // rows
for i in range(rows):
grid.append([])
for j in range(rows):
spot = Spot(i, j, gap, rows)
grid[i].append(spot)
return grid
def draw_grid(win, rows, width):
# // mean int division
gap = width // rows
for i in range(rows):
pygame.draw.line(win, GREY, (0, i * gap), (width, i * gap))
for j in range(rows):
pygame.draw.line(win, GREY, (j * gap, 0), (j * gap, width))
def draw(win, grid, rows, width):
win.fill(WHITE)
for row in grid:
for spot in row:
# print(win)
spot.draw(win)
draw_grid(win, rows, width)
pygame.display.update()
def get_clicked_pos(pos, rows, width):
gap = width // rows
y, x = pos
row = y // gap
col = x // gap
return row, col
def main(win, width):
ROWS = 50
grid = make_grid(ROWS, width)
start = None
end = None
run = True
started = False
while run:
draw(win, grid, ROWS, width)
for event in pygame.event.get():
if event.type == pygame.QUIT:
run = False
if started:
continue
if pygame.mouse.get_pressed()[0]: # LEFT MOUSE
pos = pygame.mouse.get_pos()
row, col = get_clicked_pos(pos, ROWS, width)
spot = grid[row][col]
if not start and spot != end:
start = spot
start.make_start()
elif not end and spot != start:
end = spot
end.make_end()
elif spot != end and spot != start:
spot.make_barrier()
elif pygame.mouse.get_pressed()[2]: # RIGHT MOUSE
pos = pygame.mouse.get_pos()
row, col = get_clicked_pos(pos, ROWS, width)
spot = grid[row][col]
spot.reset()
if spot == start:
start = None
elif spot == end:
end = None
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE and start and end:
for row in grid:
for spot in row:
spot.update_neighbors(grid)
algoritham(lambda: draw(win, grid, ROWS, width), grid, start, end)
if event.key == pygame.K_c:
start = None
end = None
grid = make_grid(ROWS, width)
pygame.quit()
main(WIN, WIDTH)
| 26.481061 | 101 | 0.51223 | import math
import pygame
from queue import PriorityQueue
WIDTH = 800
WIN = pygame.display.set_mode((WIDTH,WIDTH))
pygame.display.set_caption('A* Path Finding Algoritham')
RED = (255,0,0)
GREEN = (0,225,0)
BLUE = (0,225,0)
YELLOW = (225,225,0)
WHITE = (225,225,225)
BLACK = (0,0,0)
PURPLE = (128,0,128)
ORANGE = (255,165,0)
GREY = (128,128,128)
TURQUOISE = (64,224,208)
class Spot:
def __init__(self, row, col, width, total_rows):
self.row = row
self.col = col
self.x = row * width
self.y = col * width
self.color = WHITE
self.neighbor = []
self.width = width
self.total_rows = total_rows
def get_pos(self):
return self.row, self.col
def is_closed(self):
return self.color == RED
def is_open(self):
return self.color == GREEN
def is_barrier(self):
return self.color == BLACK
def is_start(self):
return self.color == ORANGE
def is_end(self):
return self.color == TURQUOISE
def reset(self):
self.color = WHITE
def make_closed(self):
self.color = RED
def make_start(self):
self.color = ORANGE
def make_open(self):
self.color = GREEN
def make_barrier(self):
self.color = BLACK
def make_end(self):
self.color = TURQUOISE
def make_path(self):
self.color = PURPLE
def draw(self, win):
pygame.draw.rect(win, self.color, (self.x, self.y, self.width, self.width))
def update_neighbors(self,grid):
self.neighbors = []
if self.row < self.total_rows - 1 and not grid[self.row + 1][self.col].is_barrier():
self.neighbors.append(grid[self.row + 1][self.col])
if self.row > 0 and not grid[self.row - 1][self.col].is_barrier():
self.neighbors.append(grid[self.row - 1][self.col])
if self.col < self.total_rows - 1 and not grid[self.row][self.col + 1].is_barrier():
self.neighbors.append(grid[self.row][self.col + 1])
if self.col > 0 and not grid[self.row][self.col - 1].is_barrier():
self.neighbors.append(grid[self.row][self.col - 1])
def __lt__(self,other):
return False
def h(p1,p2):
x1, y1 = p1
x2, y2 = p2
return abs(x1 - x2) + abs(y1 - y2)
def reconstruct_path(came_from, current, draw):
while current in came_from:
current = came_from[current]
current.make_path()
draw()
def algoritham(draw, grid, start, end):
count = 0
open_set = PriorityQueue()
open_set.put((0, count, start))
came_from = {}
g_score = {spot: float("inf") for row in grid for spot in row}
g_score[start] = 0
f_score = {spot: float("inf") for row in grid for spot in row}
f_score[start] = h(start.get_pos(), end.get_pos())
open_set_hash = {start}
while not open_set.empty():
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
current = open_set.get()[2]
open_set_hash.remove(current)
if current == end:
reconstruct_path(came_from, end, draw)
end.make_end()
return True
for neighbor in current.neighbors:
temp_g_score = g_score[current] + 1
if temp_g_score < g_score[neighbor]:
came_from[neighbor] = current
g_score[neighbor] = temp_g_score
f_score[neighbor] = temp_g_score + h(neighbor.get_pos(), end.get_pos())
if neighbor not in open_set_hash:
count += 1
open_set.put((f_score[neighbor], count, neighbor))
open_set_hash.add(neighbor)
neighbor.make_open()
draw()
if current != start:
current.make_closed()
return False
def make_grid(rows,width):
grid = []
gap = width // rows
for i in range(rows):
grid.append([])
for j in range(rows):
spot = Spot(i, j, gap, rows)
grid[i].append(spot)
return grid
def draw_grid(win, rows, width):
gap = width // rows
for i in range(rows):
pygame.draw.line(win, GREY, (0, i * gap), (width, i * gap))
for j in range(rows):
pygame.draw.line(win, GREY, (j * gap, 0), (j * gap, width))
def draw(win, grid, rows, width):
win.fill(WHITE)
for row in grid:
for spot in row:
spot.draw(win)
draw_grid(win, rows, width)
pygame.display.update()
def get_clicked_pos(pos, rows, width):
gap = width // rows
y, x = pos
row = y // gap
col = x // gap
return row, col
def main(win, width):
ROWS = 50
grid = make_grid(ROWS, width)
start = None
end = None
run = True
started = False
while run:
draw(win, grid, ROWS, width)
for event in pygame.event.get():
if event.type == pygame.QUIT:
run = False
if started:
continue
if pygame.mouse.get_pressed()[0]:
pos = pygame.mouse.get_pos()
row, col = get_clicked_pos(pos, ROWS, width)
spot = grid[row][col]
if not start and spot != end:
start = spot
start.make_start()
elif not end and spot != start:
end = spot
end.make_end()
elif spot != end and spot != start:
spot.make_barrier()
elif pygame.mouse.get_pressed()[2]:
pos = pygame.mouse.get_pos()
row, col = get_clicked_pos(pos, ROWS, width)
spot = grid[row][col]
spot.reset()
if spot == start:
start = None
elif spot == end:
end = None
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE and start and end:
for row in grid:
for spot in row:
spot.update_neighbors(grid)
algoritham(lambda: draw(win, grid, ROWS, width), grid, start, end)
if event.key == pygame.K_c:
start = None
end = None
grid = make_grid(ROWS, width)
pygame.quit()
main(WIN, WIDTH)
| true | true |
1c2db146a81095258082a5e01445b3cddf1eab20 | 8,037 | py | Python | users/models.py | moshthepitt/probsc | 9b8cab206bb1c41238e36bd77f5e0573df4d8e2d | [
"MIT"
] | null | null | null | users/models.py | moshthepitt/probsc | 9b8cab206bb1c41238e36bd77f5e0573df4d8e2d | [
"MIT"
] | null | null | null | users/models.py | moshthepitt/probsc | 9b8cab206bb1c41238e36bd77f5e0573df4d8e2d | [
"MIT"
] | null | null | null | from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from django.urls import reverse
from django_extensions.db.models import TimeStampedModel
from mptt.models import MPTTModel, TreeForeignKey
from .managers import UserProfileManager, DepartmentManager, PositionManager
User = settings.AUTH_USER_MODEL
class Department(MPTTModel, TimeStampedModel):
"""
Departments in an organisation
"""
name = models.CharField(_("Name"), max_length=255)
description = models.TextField(_("Description"), blank=True, default="")
parent = TreeForeignKey('self', verbose_name=_("Parent"), null=True,
blank=True, related_name='children', db_index=True,
on_delete=models.PROTECT,
help_text=_("The parent department"))
customer = models.ForeignKey(
'customers.Customer', verbose_name=_("Customer"),
on_delete=models.PROTECT)
manager = models.ForeignKey(
User, verbose_name=_("Manager"), on_delete=models.PROTECT,
blank=True, null=True)
active = models.BooleanField(_("Active"), default=True)
objects = DepartmentManager()
class Meta:
verbose_name = _("Department")
verbose_name_plural = _("Departments")
ordering = ['name']
def get_absolute_url(self):
return "#"
def get_edit_url(self):
return reverse('users:departments_edit', args=[self.pk])
def get_delete_url(self):
return reverse('users:departments_delete', args=[self.pk])
def get_list_url(self):
return reverse('users:departments_list')
def __str__(self):
return self.name
class Position(MPTTModel, TimeStampedModel):
"""
Job positions in an organisation
"""
name = models.CharField(_("Name"), max_length=255)
description = models.TextField(_("Description"), blank=True, default="")
department = models.ForeignKey(
Department, verbose_name=_("Department"), on_delete=models.PROTECT)
parent = TreeForeignKey('self', verbose_name=_("Reports To"), null=True,
blank=True, related_name='children', db_index=True,
on_delete=models.PROTECT,
help_text=_("The parent Job Position"))
supervisor = models.ForeignKey(
User, verbose_name=_("Supervisor"), on_delete=models.PROTECT,
blank=True, null=True)
customer = models.ForeignKey(
'customers.Customer', verbose_name=_("Customer"),
on_delete=models.PROTECT)
active = models.BooleanField(_("Active"), default=True)
objects = PositionManager()
class Meta:
verbose_name = _("Job Positions")
verbose_name_plural = _("Job Positions")
ordering = ['name']
def get_absolute_url(self):
return "#"
def get_edit_url(self):
return reverse('users:positions_edit', args=[self.pk])
def get_delete_url(self):
return reverse('users:positions_delete', args=[self.pk])
def get_list_url(self):
return reverse('users:positions_list')
def __str__(self):
return "{} - {}".format(self.department.name, self.name)
@python_2_unicode_compatible
class UserProfile(models.Model):
"""
Model used to store more information on users
"""
ADMIN = '1'
MEMBER = '2'
EDITOR = '3'
MEMBER_ROLE_CHOICES = (
(ADMIN, _('Admin')),
(EDITOR, _('Editor')),
(MEMBER, _('Member')),
)
created_on = models.DateTimeField(_("Created on"), auto_now_add=True)
updated_on = models.DateTimeField(_("Updated on"), auto_now=True)
user = models.OneToOneField(User, verbose_name=_("User"))
position = models.ForeignKey(Position, verbose_name=_(
"job Position"), on_delete=models.SET_NULL, blank=True, null=True,
default=None)
customer = models.ForeignKey('customers.Customer', verbose_name=_(
"Customer"), on_delete=models.SET_NULL, blank=True, null=True,
default=None)
role = models.CharField(
_("Role"), max_length=1, choices=MEMBER_ROLE_CHOICES, blank=False,
default=MEMBER)
active = models.BooleanField(
_("Active"), default=True, help_text="Is the staff member actively "
"employed?")
objects = UserProfileManager()
class Meta:
verbose_name = _("Staff Member")
verbose_name_plural = _("Staff Members")
ordering = ['user__first_name', 'user__last_name', 'user__email']
def get_name(self):
if self.user.get_full_name():
return self.user.get_full_name()
if self.user.email:
return self.user.email
return self.user.username
def get_initials(self):
if self.user.first_name and self.user.last_name:
return "{}{}".format(self.user.first_name[0],
self.user.last_name[0])
if self.user.first_name:
return self.user.first_name[0]
if self.user.last_name:
return self.user.last_name[0]
return self.user.email[0]
def is_admin(self):
return self.role == self.ADMIN
def is_editor(self):
return self.role == self.EDITOR
def can_edit(self):
return self.role == self.EDITOR or self.role == self.ADMIN
def get_subordinates(self):
"""
Returns a queryset of UserProfile objects which report to this
userprofile
"""
if self.position:
queryset = UserProfile.objects.active().exclude(
id=self.id).filter(
models.Q(
position__supervisor=self.user) | models.Q(
position__department__manager=self.user) | models.Q(
position__parent=self.position))
else:
queryset = UserProfile.objects.active().exclude(
id=self.id).filter(
models.Q(
position__supervisor=self.user) | models.Q(
position__department__manager=self.user))
# get job positions of subs
subordinate_positions = Position.objects.filter(
userprofile__in=queryset)
# get any position that may report to these positions
# list of position ids of Positions that report to
# subordinate_positions
reporting_jp_ids = []
for sub_p in subordinate_positions:
reporting_jps = sub_p.get_descendants(include_self=False)
if reporting_jps is not None:
reporting_jp_ids = reporting_jp_ids + list(
reporting_jps.values_list('id', flat=True))
reporting_jp_ids = list(set(reporting_jp_ids))
# get user profiles wiht positions that report to subordinate_positions
reporting_profiles = UserProfile.objects.active().filter(
position__id__in=reporting_jp_ids)
queryset = queryset.union(reporting_profiles)
# unions result in weird filtering so we create a new queryset
queryset_ids = list(set([x.id for x in queryset]))
if queryset_ids:
queryset = UserProfile.objects.filter(id__in=queryset_ids)
else:
queryset = UserProfile.objects.none()
return queryset
def has_subordinates(self):
return self.get_subordinates().exists()
def get_department(self):
if self.position is not None:
return self.position.department.name
return None
def get_absolute_url(self):
return "#"
def get_edit_url(self):
return reverse('users:userprofiles_edit', args=[self.pk])
def get_delete_url(self):
return "#"
def get_list_url(self):
return reverse('users:userprofiles_list')
def __str__(self):
return _("{user}").format(user=self.get_name())
| 33.911392 | 79 | 0.630459 | from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from django.urls import reverse
from django_extensions.db.models import TimeStampedModel
from mptt.models import MPTTModel, TreeForeignKey
from .managers import UserProfileManager, DepartmentManager, PositionManager
User = settings.AUTH_USER_MODEL
class Department(MPTTModel, TimeStampedModel):
name = models.CharField(_("Name"), max_length=255)
description = models.TextField(_("Description"), blank=True, default="")
parent = TreeForeignKey('self', verbose_name=_("Parent"), null=True,
blank=True, related_name='children', db_index=True,
on_delete=models.PROTECT,
help_text=_("The parent department"))
customer = models.ForeignKey(
'customers.Customer', verbose_name=_("Customer"),
on_delete=models.PROTECT)
manager = models.ForeignKey(
User, verbose_name=_("Manager"), on_delete=models.PROTECT,
blank=True, null=True)
active = models.BooleanField(_("Active"), default=True)
objects = DepartmentManager()
class Meta:
verbose_name = _("Department")
verbose_name_plural = _("Departments")
ordering = ['name']
def get_absolute_url(self):
return "#"
def get_edit_url(self):
return reverse('users:departments_edit', args=[self.pk])
def get_delete_url(self):
return reverse('users:departments_delete', args=[self.pk])
def get_list_url(self):
return reverse('users:departments_list')
def __str__(self):
return self.name
class Position(MPTTModel, TimeStampedModel):
name = models.CharField(_("Name"), max_length=255)
description = models.TextField(_("Description"), blank=True, default="")
department = models.ForeignKey(
Department, verbose_name=_("Department"), on_delete=models.PROTECT)
parent = TreeForeignKey('self', verbose_name=_("Reports To"), null=True,
blank=True, related_name='children', db_index=True,
on_delete=models.PROTECT,
help_text=_("The parent Job Position"))
supervisor = models.ForeignKey(
User, verbose_name=_("Supervisor"), on_delete=models.PROTECT,
blank=True, null=True)
customer = models.ForeignKey(
'customers.Customer', verbose_name=_("Customer"),
on_delete=models.PROTECT)
active = models.BooleanField(_("Active"), default=True)
objects = PositionManager()
class Meta:
verbose_name = _("Job Positions")
verbose_name_plural = _("Job Positions")
ordering = ['name']
def get_absolute_url(self):
return "#"
def get_edit_url(self):
return reverse('users:positions_edit', args=[self.pk])
def get_delete_url(self):
return reverse('users:positions_delete', args=[self.pk])
def get_list_url(self):
return reverse('users:positions_list')
def __str__(self):
return "{} - {}".format(self.department.name, self.name)
@python_2_unicode_compatible
class UserProfile(models.Model):
ADMIN = '1'
MEMBER = '2'
EDITOR = '3'
MEMBER_ROLE_CHOICES = (
(ADMIN, _('Admin')),
(EDITOR, _('Editor')),
(MEMBER, _('Member')),
)
created_on = models.DateTimeField(_("Created on"), auto_now_add=True)
updated_on = models.DateTimeField(_("Updated on"), auto_now=True)
user = models.OneToOneField(User, verbose_name=_("User"))
position = models.ForeignKey(Position, verbose_name=_(
"job Position"), on_delete=models.SET_NULL, blank=True, null=True,
default=None)
customer = models.ForeignKey('customers.Customer', verbose_name=_(
"Customer"), on_delete=models.SET_NULL, blank=True, null=True,
default=None)
role = models.CharField(
_("Role"), max_length=1, choices=MEMBER_ROLE_CHOICES, blank=False,
default=MEMBER)
active = models.BooleanField(
_("Active"), default=True, help_text="Is the staff member actively "
"employed?")
objects = UserProfileManager()
class Meta:
verbose_name = _("Staff Member")
verbose_name_plural = _("Staff Members")
ordering = ['user__first_name', 'user__last_name', 'user__email']
def get_name(self):
if self.user.get_full_name():
return self.user.get_full_name()
if self.user.email:
return self.user.email
return self.user.username
def get_initials(self):
if self.user.first_name and self.user.last_name:
return "{}{}".format(self.user.first_name[0],
self.user.last_name[0])
if self.user.first_name:
return self.user.first_name[0]
if self.user.last_name:
return self.user.last_name[0]
return self.user.email[0]
def is_admin(self):
return self.role == self.ADMIN
def is_editor(self):
return self.role == self.EDITOR
def can_edit(self):
return self.role == self.EDITOR or self.role == self.ADMIN
def get_subordinates(self):
if self.position:
queryset = UserProfile.objects.active().exclude(
id=self.id).filter(
models.Q(
position__supervisor=self.user) | models.Q(
position__department__manager=self.user) | models.Q(
position__parent=self.position))
else:
queryset = UserProfile.objects.active().exclude(
id=self.id).filter(
models.Q(
position__supervisor=self.user) | models.Q(
position__department__manager=self.user))
subordinate_positions = Position.objects.filter(
userprofile__in=queryset)
reporting_jp_ids = []
for sub_p in subordinate_positions:
reporting_jps = sub_p.get_descendants(include_self=False)
if reporting_jps is not None:
reporting_jp_ids = reporting_jp_ids + list(
reporting_jps.values_list('id', flat=True))
reporting_jp_ids = list(set(reporting_jp_ids))
reporting_profiles = UserProfile.objects.active().filter(
position__id__in=reporting_jp_ids)
queryset = queryset.union(reporting_profiles)
queryset_ids = list(set([x.id for x in queryset]))
if queryset_ids:
queryset = UserProfile.objects.filter(id__in=queryset_ids)
else:
queryset = UserProfile.objects.none()
return queryset
def has_subordinates(self):
return self.get_subordinates().exists()
def get_department(self):
if self.position is not None:
return self.position.department.name
return None
def get_absolute_url(self):
return "#"
def get_edit_url(self):
return reverse('users:userprofiles_edit', args=[self.pk])
def get_delete_url(self):
return "#"
def get_list_url(self):
return reverse('users:userprofiles_list')
def __str__(self):
return _("{user}").format(user=self.get_name())
| true | true |
1c2db15793a6bd45d52b1845770cbdfdfae549a1 | 5,001 | py | Python | cinder/tests/unit/api/contrib/test_volume_tenant_attribute.py | potsmaster/cinder | 275c2acdfb4282b0ec0314c9875b759958c093f8 | [
"Apache-2.0"
] | null | null | null | cinder/tests/unit/api/contrib/test_volume_tenant_attribute.py | potsmaster/cinder | 275c2acdfb4282b0ec0314c9875b759958c093f8 | [
"Apache-2.0"
] | null | null | null | cinder/tests/unit/api/contrib/test_volume_tenant_attribute.py | potsmaster/cinder | 275c2acdfb4282b0ec0314c9875b759958c093f8 | [
"Apache-2.0"
] | null | null | null | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import uuid
from lxml import etree
from oslo_utils import timeutils
import webob
from cinder import context
from cinder import test
from cinder.tests.unit.api import fakes
from cinder import volume
PROJECT_ID = '88fd1da4-f464-4a87-9ce5-26f2f40743b9'
def fake_volume_get(*args, **kwargs):
return {
'id': 'fake',
'host': 'host001',
'status': 'available',
'size': 5,
'availability_zone': 'somewhere',
'created_at': timeutils.utcnow(),
'attach_status': None,
'display_name': 'anothervolume',
'display_description': 'Just another volume!',
'volume_type_id': None,
'snapshot_id': None,
'project_id': PROJECT_ID,
'migration_status': None,
'_name_id': 'fake2',
}
def fake_volume_get_all(*args, **kwargs):
return [fake_volume_get()]
def app():
# no auth, just let environ['cinder.context'] pass through
api = fakes.router.APIRouter()
mapper = fakes.urlmap.URLMap()
mapper['/v2'] = api
return mapper
class VolumeTenantAttributeTest(test.TestCase):
def setUp(self):
super(VolumeTenantAttributeTest, self).setUp()
self.stubs.Set(volume.API, 'get', fake_volume_get)
self.stubs.Set(volume.API, 'get_all', fake_volume_get_all)
self.UUID = uuid.uuid4()
def test_get_volume_allowed(self):
ctx = context.RequestContext('admin', 'fake', True)
req = webob.Request.blank('/v2/fake/volumes/%s' % self.UUID)
req.method = 'GET'
req.environ['cinder.context'] = ctx
res = req.get_response(app())
vol = json.loads(res.body)['volume']
self.assertEqual(vol['os-vol-tenant-attr:tenant_id'], PROJECT_ID)
def test_get_volume_unallowed(self):
ctx = context.RequestContext('non-admin', 'fake', False)
req = webob.Request.blank('/v2/fake/volumes/%s' % self.UUID)
req.method = 'GET'
req.environ['cinder.context'] = ctx
res = req.get_response(app())
vol = json.loads(res.body)['volume']
self.assertNotIn('os-vol-tenant-attr:tenant_id', vol)
def test_list_detail_volumes_allowed(self):
ctx = context.RequestContext('admin', 'fake', True)
req = webob.Request.blank('/v2/fake/volumes/detail')
req.method = 'GET'
req.environ['cinder.context'] = ctx
res = req.get_response(app())
vol = json.loads(res.body)['volumes']
self.assertEqual(vol[0]['os-vol-tenant-attr:tenant_id'], PROJECT_ID)
def test_list_detail_volumes_unallowed(self):
ctx = context.RequestContext('non-admin', 'fake', False)
req = webob.Request.blank('/v2/fake/volumes/detail')
req.method = 'GET'
req.environ['cinder.context'] = ctx
res = req.get_response(app())
vol = json.loads(res.body)['volumes']
self.assertNotIn('os-vol-tenant-attr:tenant_id', vol[0])
def test_list_simple_volumes_no_tenant_id(self):
ctx = context.RequestContext('admin', 'fake', True)
req = webob.Request.blank('/v2/fake/volumes')
req.method = 'GET'
req.environ['cinder.context'] = ctx
res = req.get_response(app())
vol = json.loads(res.body)['volumes']
self.assertNotIn('os-vol-tenant-attr:tenant_id', vol[0])
def test_get_volume_xml(self):
ctx = context.RequestContext('admin', 'fake', True)
req = webob.Request.blank('/v2/fake/volumes/%s' % self.UUID)
req.method = 'GET'
req.accept = 'application/xml'
req.environ['cinder.context'] = ctx
res = req.get_response(app())
vol = etree.XML(res.body)
tenant_key = ('{http://docs.openstack.org/volume/ext/'
'volume_tenant_attribute/api/v2}tenant_id')
self.assertEqual(vol.get(tenant_key), PROJECT_ID)
def test_list_volumes_detail_xml(self):
ctx = context.RequestContext('admin', 'fake', True)
req = webob.Request.blank('/v2/fake/volumes/detail')
req.method = 'GET'
req.accept = 'application/xml'
req.environ['cinder.context'] = ctx
res = req.get_response(app())
vol = list(etree.XML(res.body))[0]
tenant_key = ('{http://docs.openstack.org/volume/ext/'
'volume_tenant_attribute/api/v2}tenant_id')
self.assertEqual(vol.get(tenant_key), PROJECT_ID)
| 36.23913 | 77 | 0.641072 |
import json
import uuid
from lxml import etree
from oslo_utils import timeutils
import webob
from cinder import context
from cinder import test
from cinder.tests.unit.api import fakes
from cinder import volume
PROJECT_ID = '88fd1da4-f464-4a87-9ce5-26f2f40743b9'
def fake_volume_get(*args, **kwargs):
return {
'id': 'fake',
'host': 'host001',
'status': 'available',
'size': 5,
'availability_zone': 'somewhere',
'created_at': timeutils.utcnow(),
'attach_status': None,
'display_name': 'anothervolume',
'display_description': 'Just another volume!',
'volume_type_id': None,
'snapshot_id': None,
'project_id': PROJECT_ID,
'migration_status': None,
'_name_id': 'fake2',
}
def fake_volume_get_all(*args, **kwargs):
return [fake_volume_get()]
def app():
api = fakes.router.APIRouter()
mapper = fakes.urlmap.URLMap()
mapper['/v2'] = api
return mapper
class VolumeTenantAttributeTest(test.TestCase):
def setUp(self):
super(VolumeTenantAttributeTest, self).setUp()
self.stubs.Set(volume.API, 'get', fake_volume_get)
self.stubs.Set(volume.API, 'get_all', fake_volume_get_all)
self.UUID = uuid.uuid4()
def test_get_volume_allowed(self):
ctx = context.RequestContext('admin', 'fake', True)
req = webob.Request.blank('/v2/fake/volumes/%s' % self.UUID)
req.method = 'GET'
req.environ['cinder.context'] = ctx
res = req.get_response(app())
vol = json.loads(res.body)['volume']
self.assertEqual(vol['os-vol-tenant-attr:tenant_id'], PROJECT_ID)
def test_get_volume_unallowed(self):
ctx = context.RequestContext('non-admin', 'fake', False)
req = webob.Request.blank('/v2/fake/volumes/%s' % self.UUID)
req.method = 'GET'
req.environ['cinder.context'] = ctx
res = req.get_response(app())
vol = json.loads(res.body)['volume']
self.assertNotIn('os-vol-tenant-attr:tenant_id', vol)
def test_list_detail_volumes_allowed(self):
ctx = context.RequestContext('admin', 'fake', True)
req = webob.Request.blank('/v2/fake/volumes/detail')
req.method = 'GET'
req.environ['cinder.context'] = ctx
res = req.get_response(app())
vol = json.loads(res.body)['volumes']
self.assertEqual(vol[0]['os-vol-tenant-attr:tenant_id'], PROJECT_ID)
def test_list_detail_volumes_unallowed(self):
ctx = context.RequestContext('non-admin', 'fake', False)
req = webob.Request.blank('/v2/fake/volumes/detail')
req.method = 'GET'
req.environ['cinder.context'] = ctx
res = req.get_response(app())
vol = json.loads(res.body)['volumes']
self.assertNotIn('os-vol-tenant-attr:tenant_id', vol[0])
def test_list_simple_volumes_no_tenant_id(self):
ctx = context.RequestContext('admin', 'fake', True)
req = webob.Request.blank('/v2/fake/volumes')
req.method = 'GET'
req.environ['cinder.context'] = ctx
res = req.get_response(app())
vol = json.loads(res.body)['volumes']
self.assertNotIn('os-vol-tenant-attr:tenant_id', vol[0])
def test_get_volume_xml(self):
ctx = context.RequestContext('admin', 'fake', True)
req = webob.Request.blank('/v2/fake/volumes/%s' % self.UUID)
req.method = 'GET'
req.accept = 'application/xml'
req.environ['cinder.context'] = ctx
res = req.get_response(app())
vol = etree.XML(res.body)
tenant_key = ('{http://docs.openstack.org/volume/ext/'
'volume_tenant_attribute/api/v2}tenant_id')
self.assertEqual(vol.get(tenant_key), PROJECT_ID)
def test_list_volumes_detail_xml(self):
ctx = context.RequestContext('admin', 'fake', True)
req = webob.Request.blank('/v2/fake/volumes/detail')
req.method = 'GET'
req.accept = 'application/xml'
req.environ['cinder.context'] = ctx
res = req.get_response(app())
vol = list(etree.XML(res.body))[0]
tenant_key = ('{http://docs.openstack.org/volume/ext/'
'volume_tenant_attribute/api/v2}tenant_id')
self.assertEqual(vol.get(tenant_key), PROJECT_ID)
| true | true |
1c2db1f67a2d09b7d486f3f1ad5c389b9885d986 | 878 | py | Python | mmtfPyspark/tests/datasets/test_polymerSequenceExtractor.py | sbliven/mmtf-pyspark | 3d444178bdc0d5128aafdb1326fec12b5d7634b5 | [
"Apache-2.0"
] | 59 | 2018-01-28T06:50:56.000Z | 2022-02-10T06:07:12.000Z | mmtfPyspark/tests/datasets/test_polymerSequenceExtractor.py | sbliven/mmtf-pyspark | 3d444178bdc0d5128aafdb1326fec12b5d7634b5 | [
"Apache-2.0"
] | 101 | 2018-02-01T20:51:10.000Z | 2022-01-24T00:50:29.000Z | mmtfPyspark/tests/datasets/test_polymerSequenceExtractor.py | sbliven/mmtf-pyspark | 3d444178bdc0d5128aafdb1326fec12b5d7634b5 | [
"Apache-2.0"
] | 29 | 2018-01-29T10:09:51.000Z | 2022-01-23T18:53:28.000Z | #!/usr/bin/env python
import unittest
from pyspark.sql import SparkSession
from mmtfPyspark.io.mmtfReader import download_mmtf_files
from mmtfPyspark.datasets import polymerSequenceExtractor
from mmtfPyspark.mappers import StructureToPolymerChains
class PolymerSequenceExtractorTest(unittest.TestCase):
def setUp(self):
self.spark = SparkSession.builder.master("local[*]") \
.appName("polymerSequenceExtractorTest") \
.getOrCreate()
pdbIds = ["1STP","4HHB"]
self.pdb = download_mmtf_files(pdbIds)
def test1(self):
pdb = self.pdb.flatMap(StructureToPolymerChains())
seq = polymerSequenceExtractor.get_dataset(pdb)
self.assertTrue(seq.count() == 5)
def tearDown(self):
self.spark.stop()
if __name__ == '__main__':
unittest.main()
| 25.823529 | 75 | 0.666287 |
import unittest
from pyspark.sql import SparkSession
from mmtfPyspark.io.mmtfReader import download_mmtf_files
from mmtfPyspark.datasets import polymerSequenceExtractor
from mmtfPyspark.mappers import StructureToPolymerChains
class PolymerSequenceExtractorTest(unittest.TestCase):
def setUp(self):
self.spark = SparkSession.builder.master("local[*]") \
.appName("polymerSequenceExtractorTest") \
.getOrCreate()
pdbIds = ["1STP","4HHB"]
self.pdb = download_mmtf_files(pdbIds)
def test1(self):
pdb = self.pdb.flatMap(StructureToPolymerChains())
seq = polymerSequenceExtractor.get_dataset(pdb)
self.assertTrue(seq.count() == 5)
def tearDown(self):
self.spark.stop()
if __name__ == '__main__':
unittest.main()
| true | true |
1c2db35a10b0968fdc22b3acdada71c16fa39a8d | 4,729 | py | Python | api.py | Salva5297/WidocoServer | 75e0170c2a644c4fbc3e1f673bd1c3ddc0d8fb73 | [
"Apache-2.0"
] | null | null | null | api.py | Salva5297/WidocoServer | 75e0170c2a644c4fbc3e1f673bd1c3ddc0d8fb73 | [
"Apache-2.0"
] | null | null | null | api.py | Salva5297/WidocoServer | 75e0170c2a644c4fbc3e1f673bd1c3ddc0d8fb73 | [
"Apache-2.0"
] | null | null | null | import os
from flask import Flask, render_template, request, send_file
from flask_restful import Api, Resource, reqparse
import tempfile
from werkzeug.utils import secure_filename
import zipfile
import json
app = Flask(__name__)
api = Api(app)
def zipdir(dirPath=None, zipFilePath=None, includeDirInZip=False):
if not zipFilePath:
zipFilePath = dirPath + ".zip"
if not os.path.isdir(dirPath):
raise OSError("dirPath argument must point to a directory. "
"'%s' does not." % dirPath)
parentDir, dirToZip = os.path.split(dirPath)
def trimPath(path):
archivePath = path.replace(parentDir, "", 1)
if parentDir:
archivePath = archivePath.replace(os.path.sep, "", 1)
if not includeDirInZip:
archivePath = archivePath.replace(dirToZip + os.path.sep, "", 1)
return os.path.normcase(archivePath)
outFile = zipfile.ZipFile(zipFilePath, "w",
compression=zipfile.ZIP_DEFLATED)
for (archiveDirPath, dirNames, fileNames) in os.walk(dirPath):
for fileName in fileNames:
filePath = os.path.join(archiveDirPath, fileName)
outFile.write(filePath, trimPath(filePath))
if not fileNames and not dirNames:
zipInfo = zipfile.ZipInfo(trimPath(archiveDirPath) + "/")
outFile.writestr(zipInfo, "")
outFile.close()
class Widoco(Resource):
def post(self):
os.system("rm -rf tmp")
os.system("mkdir tmp")
extend = "java -jar widoco.jar -outFolder tmp/WidocoDocs "
data = request.form.get("data")
data = json.loads(data)
# If we have the ontology file
if(request.files["ontoFile"]):
file = request.files["ontoFile"]
file.save(os.path.join("tmp/", secure_filename(file.filename)))
file_stats = os.stat("tmp/"+file.filename)
extend += "-ontFile tmp/" + file.filename + " "
# If we have the ontology uri
elif("ontoUri" in data):
extend += "-ontUri " + data["ontoUri"] + " "
# If we dont have anything
else:
return "Error no Ontology to make Documentation"
# If we have configFile
if("confFile" in data):
extend += "-confFile " + data["confFile"] + " "
# If we have getOntologyMetadata
elif("getOntologyMetadata" in data):
extend += "-getOntologyMetadata "
# If we have oops
if("oops" in data):
extend += "-oops "
# If we have rewriteAll
if("rewriteAll" in data):
extend += "-rewriteAll "
# If we have crossRef
if("crossRef" in data):
extend += "-crossRef "
# If we have saveConfig
if("saveConfig" in data):
extend += "-saveConfig " + data["saveConfig"] + " "
# If we have usecustomStyle
if("usecustomStyle" in data):
extend += "-useCustomStyle "
# If we have lang
if("lang" in data):
extend += "-lang " + data["lang"] + " "
# If we have includeImportedOntologies
if("includeImportedOntologies" in data):
extend += "-includeImportedOntologies "
# If we have htaccess
if("htaccess" in data):
extend += "-htaccess "
# If we have webVowl
if("webVowl" in data):
extend += "-webVowl "
# If we have licensius
if("licensius" in data):
extend += "-licensius "
# If we have ignoreIndividuals
if("ignoreIndividuals" in data):
extend += "-ignoreIndividuals "
# If we have analytics
if("analytics" in data):
extend += "-analytics " + data["analytics"] + " "
# If we have doNotDisplaySerializations
if("doNotDisplaySerializations" in data):
extend += "-doNotDisplaySerializations "
# If we have displayDirectImportsOnly
if("displayDirectImportsOnly" in data):
extend += "-displayDirectImportsOnly "
# If we have rewriteBase
if("rewriteBase" in data):
extend += "-rewriteBase " + data["rewriteBase"] + " "
# If we have excludeIntroduction
if("excludeIntroduction" in data):
extend += "-excludeIntroduction "
# If we have uniteSections
if("uniteSections" in data):
extend += "-uniteSections "
print(extend)
os.system(extend)
os.system(extend)
zipdir("tmp/WidocoDocs/","tmp/WidocoDocs.zip",True)
return send_file("tmp/WidocoDocs.zip", attachment_filename='WidocoDocs.zip')
api.add_resource(Widoco, "/")
app.run(host='0.0.0.0')
| 31.317881 | 84 | 0.58469 | import os
from flask import Flask, render_template, request, send_file
from flask_restful import Api, Resource, reqparse
import tempfile
from werkzeug.utils import secure_filename
import zipfile
import json
app = Flask(__name__)
api = Api(app)
def zipdir(dirPath=None, zipFilePath=None, includeDirInZip=False):
if not zipFilePath:
zipFilePath = dirPath + ".zip"
if not os.path.isdir(dirPath):
raise OSError("dirPath argument must point to a directory. "
"'%s' does not." % dirPath)
parentDir, dirToZip = os.path.split(dirPath)
def trimPath(path):
archivePath = path.replace(parentDir, "", 1)
if parentDir:
archivePath = archivePath.replace(os.path.sep, "", 1)
if not includeDirInZip:
archivePath = archivePath.replace(dirToZip + os.path.sep, "", 1)
return os.path.normcase(archivePath)
outFile = zipfile.ZipFile(zipFilePath, "w",
compression=zipfile.ZIP_DEFLATED)
for (archiveDirPath, dirNames, fileNames) in os.walk(dirPath):
for fileName in fileNames:
filePath = os.path.join(archiveDirPath, fileName)
outFile.write(filePath, trimPath(filePath))
if not fileNames and not dirNames:
zipInfo = zipfile.ZipInfo(trimPath(archiveDirPath) + "/")
outFile.writestr(zipInfo, "")
outFile.close()
class Widoco(Resource):
def post(self):
os.system("rm -rf tmp")
os.system("mkdir tmp")
extend = "java -jar widoco.jar -outFolder tmp/WidocoDocs "
data = request.form.get("data")
data = json.loads(data)
if(request.files["ontoFile"]):
file = request.files["ontoFile"]
file.save(os.path.join("tmp/", secure_filename(file.filename)))
file_stats = os.stat("tmp/"+file.filename)
extend += "-ontFile tmp/" + file.filename + " "
elif("ontoUri" in data):
extend += "-ontUri " + data["ontoUri"] + " "
else:
return "Error no Ontology to make Documentation"
if("confFile" in data):
extend += "-confFile " + data["confFile"] + " "
elif("getOntologyMetadata" in data):
extend += "-getOntologyMetadata "
if("oops" in data):
extend += "-oops "
if("rewriteAll" in data):
extend += "-rewriteAll "
if("crossRef" in data):
extend += "-crossRef "
if("saveConfig" in data):
extend += "-saveConfig " + data["saveConfig"] + " "
if("usecustomStyle" in data):
extend += "-useCustomStyle "
if("lang" in data):
extend += "-lang " + data["lang"] + " "
if("includeImportedOntologies" in data):
extend += "-includeImportedOntologies "
if("htaccess" in data):
extend += "-htaccess "
if("webVowl" in data):
extend += "-webVowl "
if("licensius" in data):
extend += "-licensius "
if("ignoreIndividuals" in data):
extend += "-ignoreIndividuals "
if("analytics" in data):
extend += "-analytics " + data["analytics"] + " "
if("doNotDisplaySerializations" in data):
extend += "-doNotDisplaySerializations "
if("displayDirectImportsOnly" in data):
extend += "-displayDirectImportsOnly "
if("rewriteBase" in data):
extend += "-rewriteBase " + data["rewriteBase"] + " "
if("excludeIntroduction" in data):
extend += "-excludeIntroduction "
if("uniteSections" in data):
extend += "-uniteSections "
print(extend)
os.system(extend)
os.system(extend)
zipdir("tmp/WidocoDocs/","tmp/WidocoDocs.zip",True)
return send_file("tmp/WidocoDocs.zip", attachment_filename='WidocoDocs.zip')
api.add_resource(Widoco, "/")
app.run(host='0.0.0.0')
| true | true |
1c2db3eb24f12d0a3f3016c599035d65b14f6ae1 | 51,413 | py | Python | mmtbx/cablam/cablam_training.py | dperl-sol/cctbx_project | b9e390221a2bc4fd00b9122e97c3b79c632c6664 | [
"BSD-3-Clause-LBNL"
] | 155 | 2016-11-23T12:52:16.000Z | 2022-03-31T15:35:44.000Z | mmtbx/cablam/cablam_training.py | dperl-sol/cctbx_project | b9e390221a2bc4fd00b9122e97c3b79c632c6664 | [
"BSD-3-Clause-LBNL"
] | 590 | 2016-12-10T11:31:18.000Z | 2022-03-30T23:10:09.000Z | mmtbx/cablam/cablam_training.py | dperl-sol/cctbx_project | b9e390221a2bc4fd00b9122e97c3b79c632c6664 | [
"BSD-3-Clause-LBNL"
] | 115 | 2016-11-15T08:17:28.000Z | 2022-02-09T15:30:14.000Z | from __future__ import absolute_import, division, print_function
# (jEdit options) :folding=explicit:collapseFolds=1:
#This module contains the training/exploration components of cablam
#It can be run stand-alone with many commandline options
#It is intended for use in determining contours, motif fingerprints, etc for
# the annotation portion
#It is probably not intended for direct use in phenix, but is included as a
# useful tool for understanding the cablam system
#The May 2012 update reflects a substantial change in method. from the previous
# version. DSSP has been abandoned in favor of directly assessing hydrogen
# bonding patterns determined by Probe. Hydrogen bonding pattern definitions
# are stored in fingerprints.py, but will likely be assembled into a separate
# library in the future.
#2012-09-05 cablam_training can now run probe if precomputed probe files are not
# provided. Argument parsing has been updated to libtbx.phil for phenix
# compatibility. cablam=True now yields CA_d_in, CA_d_out, and (instead of
# CA_a) CO_d_in. usage() help message added.
#2012-12-04: Added cis_or_trans argument for selecting cis or non-cis peptides
# during printing. Default returns all residues.
#2013-09-17: Major fingerprints rewrite to use new fingerprints objects/methods
# /storage. See cablam_fingerprints.py and the fingerprints dir.
# add_probe_data now stores full 4-character pdb-style atom names
# New output: probe_mode=sequence will print amino acid sequence for motif
#2014_02_07: Updates to probe output methods to match changes in
# cablam_fingerprints. Motifs without continuous sequence now supported for all
# probe outputs
#Next: iotbx.file_reader incorporated to control input
#To do: Collect cis-peptides for analysis. Are they identifiable in cablamspace?
# Add clash filtering. 0.4 is sufficient clash to cull, mc-mc are the important
# contacts, at least for base cablam
import os, sys
from iotbx import pdb #contains the very useful hierarchy
from mmtbx.cablam import cablam_res #contains a data structure derived from
# hierarchy, but more suited to cablam's needs - specifically it can hold
# geometric and probe measures and can look forward and backward in sequence
from mmtbx.cablam import cablam_math #contains geometric measure calculators
#from mmtbx.cablam import fingerprints #contains motif definitions
from mmtbx.cablam import cablam_fingerprints
#import cablam_fingerprints
# Storage for motif definitions subject to change
from libtbx import easy_run
import libtbx.phil.command_line
from iotbx import file_reader
from libtbx import group_args
#{{{ phil
#-------------------------------------------------------------------------------
master_phil = libtbx.phil.parse("""
cablam_training {
file_or_dir = None
.type = path
.help = '''input pdb file or dir thereof'''
separate_files = False
.type = bool
.help = '''Generate a separate, auto-named output file for each input file'''
give_kin = False
.type = bool
.help = '''Print output to screen in .kin format (default is comma-separated .csv format)'''
give_connections = False
.type = bool
.help = '''Add prevres and nextres columns to .csv output'''
debug = False
.type = bool
.help = '''Adds some text printed to stderr for debugging esp. for fingerprints'''
all_measures = False
.type = bool
.help = '''Does all measures'''
cad = False
.type = bool
.help = '''2 CA pseudo dihedrals'''
caa = False
.type = bool
.help = '''3 CA pseudo angles'''
cod = False
.type = bool
.help = '''2 CO pseudo dihedrals'''
rama = False
.type = bool
.help = '''2 Ramachandran dihedrals: phi, psi'''
exrama = False
.type = bool
.help = '''4 Ramachandran dihedrals: psi-1, phi, psi, phi+1'''
tau = False
.type = bool
.help = '''1 backbone angle: tau (defined by N-CA-C)'''
omega = False
.type = bool
.help = '''1 backbone dihedral: omega (defined by CA_1-C_1-N_2-CA_2)'''
cablam = False
.type = bool
.help = '''Shortcut for just cablam-relevant measures CA_d_in, CA_d_out, CO_in'''
probe_motifs = None
.type = strings
.help = '''Activates hydrogen bonding analysis, probe=motif_name1,motif_name2,... use --listmotifs to list available fingerprints'''
probe_path = None
.type = path
.help = '''Stores path to dir of probed files, probe will be called for each file if this is not provided'''
probe_mode = *kin annote instance sequence superpose
.type = choice
.help = '''=kin for dotlist kins (default) =annote for ball on model, =instance for vectorlist kins'''
list_motifs = False
.type = bool
.help = '''print motifs/fingerprints available to screen'''
b_max = None
.type = float
.help = '''Set a max b factor, residues containing a backbone atom with higher b will be pruned, recommended: -b=30'''
prune_alts = False
.type = bool
.help = '''Removes all residues with alternate conformations in relevant atoms'''
prune = None
.type = strings
.help = '''List of restypes to be pruned, separated by commas, no spaces eg PRO'''
skip_types = None
.type = strings
.help = '''List of restypes to be skipped during printing, separated by commas'''
include_types = None
.type = strings
.help = '''List of restypes to be printed, all others will be skipped'''
cis_or_trans = *both cis trans
.type = choice
.help = '''selects whether cis-peptides, trans-peptides, or both will be returned'''
fear = False
.type = bool
.help = '''turns on fear-to-tread analysis (this is temporary)'''
help = False
.type = bool
.help = '''print help text to screen'''
}
""", process_includes=True)
#-------------------------------------------------------------------------------
#}}}
#{{{ usage notes
#-------------------------------------------------------------------------------
def usage():
sys.stderr.write("""
phenix.cablam_training or cablam_training.py is a program intended for the
exploration of protein structure datasets, the annotation of motifs of
interest, and the training of reference datasets. It was used in the
construction of the reference contours used by cablam_validate. It contains a
number of features and modes and is intended primarily as a development tool
rather than a utility for typical users. However, anyone interested in
exploring protein backboen geometry may find something of use here.
--------------------------------------------------------------------------------
file_or_dir=*path*
Path to a pdb file or dir of pdb files to operate on, the only argument that
doesn't need an explicit flag
--------------------------------------------------------------------------------
-----Basic Printing Options-----------------------------------------------------
separate_files=True/False
Generate a separate, auto-named output file in the current dir for each input
file, default output prints a single file to screen
give_kin=True/False
Print output to screen in .kin format, may be combinded with separate_files,
default output prints comma-separated .csv format
give_connections=True/False
If set to True, adds prevres and nextres columns to .csv output
skip_types=restype1,restype2
include_types=restype3,restype4
Together, these control which residue types are printed to screen or file.
Default prints all residues.
Residue types and relationships given to skip_types are excluded from printing
If only include_types is used, only the listed restypes will be printed
If include_types and skip_types are both used, then the types given to
include_types will override those skipped by skip_types.
List restypes by their 3-letter code and separated by commas withoug spaces,
e.g. GLY,PRO,ALA,TRP
Sequence relationships may be represented with underscores, e.g. _PRO is
pre-proline, and GLY__ (2 underscores) is post-post-glycine
examples:
skip_types=PRO would print every residue except proline
include_types=PRO,GLY would print *only* glycines and prolines
skip_types=_PRO include_types=GLY would skip pre-prolines unless they were
also glycines
cis_or_trans='cis' 'trans' 'both'
Selects printing for cis-peptides or trans-peptides exclusively. The default
is 'both' which will print all residues. cis is defined as -60 to +60 degrees
trans is defined as 120 to 180 and -120 to -180 degrees for the omega dihedral
Note that selecting 'cis' or 'trans' will also stop printing for any residue
for which omega cannot be calculated.
--------------------------------------------------------------------------------
-----Probe and Motif Search Options---------------------------------------------
This is an alternate mode which searches for hydrogen bonding patterns defined
in fingerprints.
probe_motifs=motif_name1,motif_name2
This flag activates hydrogen bonding pattern analysis, which will not run
otherwise. The flag accepts a spaceless string of comma-separated motif names
to search for. Use list_motifs=True to get a list of available motifs.
probe_path=*path*
cablam_training can use precomputed probe results to speed up runs on large
datasets. If a path to such prepared files is not provided, Reduce and Probe
will be run on each pdb file, which may be time-consuming.
Running:
phenix.probe -u -condense -self -mc -NOVDWOUT -NOCLASHOUT MC filename.pdb > filename.probe
Should produce appropriately formatted and named files for this option
probe_mode=kin/annote/instance/sequence
These are printing options for hydrogen bond pattern analysis, which overrides
the Basic Printing Options above.
Choose 1 of 3:
=kin returns automatically-named kinemage files, one for each unique member
residue in each motif. The kins are high-dimensional dotlists containing the
measures specified in the commandline (see below for options). This is the
default printing.
=annote returns an automatically-named kinemage file for each pdb file. These
kins are balllists that highlight the selected motifs of interest if appended
to existing kinemages of the structures.
=instance returns an automatically-named vectorlist kinemage file for each motif
of interest. Each kin is a high-dimensional vectorlist that shows the path of
a multi-residue motif through the measures specified in the commandline
(see below for options)
=sequence prints to screen the animo acid sequence of the motif of interest.
Does not behave with multiple motifs. Uses single-letter amino acid codes, if
a residue type is unrecognized, will print 'X' followed by the 3-letter code.
list_motifs=True/False
Prints to screen a list of all the motifs/"fingerprints" currently available
for hydrogen bond pattern search
--------------------------------------------------------------------------------
-----Geometric Measures---------------------------------------------------------
All of these default to False, and some output modes will not function unless at
least one of these options is turned on. When in doubt, cablam=True and/or
rama=True will provide relevant information.
cad=True/False
For each residue, calculate the 2 C-alpha pseudo dihedrals
caa=True/False
For each residue, calculate the 3 C-alpha pseudo angles
cod=True/False
For each residue, calculate the 2 carbonyl oxygen pseudo dihedrals
rama=True/False
For each residue, calculate Ramachandran dihedrals phi and psi
exrama=True/False
For each residue, calculate Ramachandran dihedrals psi-1, phi, psi, phi+1
tau=True/False
For each residue, calculate backbone angle tau, defined by N-NA-C
omega=True/False
For each residue, calculate backbone peptide dihedral,
defined by CA_1,C_1,N_2,CA_2
all_measures=True/False
For each residue, calculate all of the above measures (may be overkill)
cablam=True/False
Recommended, but not default behavior.
For each residue calculate the measures most relevant to cablam analysis:
CA_d_in, CA_d_out, CO_in
--------------------------------------------------------------------------------
-----Quality Control Options----------------------------------------------------
b_max=#.#
Set a max b factor value. Residues containing a backbone atom with higher b
will be pruned and excluded from all calculations. Note this may affect
neighboring residues. Strongly Recommenced: b_max=30.0
prune_alts=True/False
Prune and excludes from calculations all residues with alternate conformations
for backbone atoms. Note this may affect neighboring residues. Default is
prune_alts=False, which results in only the first alternate position for each
residue being reported on.
prune=restype1,restype2
Prune and exclude from calculations the selected list of residue types. Note
this may affect neighboring residues. Restypes should be given as 3-letter
codes, e.g. GLY,PRO, but this option does not yet support the sequence
relationship that skip_types= and include_types= do.
--------------------------------------------------------------------------------
-----Help Options---------------------------------------------------------------
help=True/False
Displays this help message.
list_motifs=True/False
Prints to screen a list of all the motifs/"fingerprints" currently available
for hydrogen bond pattern search
debug=True/False
Activates print-to-stderr debugging notes for hydrogen bond pattern search.
This may be valuable when trying to define a new pattern correctly and with
proper format.
--------------------------------------------------------------------------------
Examples:
phenix.cablam_training cad=True cod=True skip_types=GLY,PRO,_PRO,ILE,VAL b_max=30.0 kin=True file_or_dir=path/pdbfilename.pdb
phenix.cablam_training cablam=True b_max=30.0 prune=GLY probe_motifs=parallel_beta,antiparallel_beta_cwc,antiparallel_beta_wcw probe_mode=kin probe_path=path/database/probefiles file_or_dir=path/database/pdbfiles
""")
#-------------------------------------------------------------------------------
#}}}
#{{{ stripB function
#Deletes all residues containing any atom of interest with atom.b > bmax from
# a dictionary of residues, so that the uncertainty in these atoms cannot
# contaminate later calculations.
#Important for training, not for annotation
#Will need to make distinction between main- and side-chain eventually
#-------------------------------------------------------------------------------
def stripB(resdata, bmax):
reslist = list(resdata.keys())
for resid in reslist:
deleted = False
for alt in resdata[resid].alts:
if deleted:
break
for atom in resdata[resid].atomb[alt]:
if resdata[resid].atomb[alt][atom] > bmax:
resdata[resid].removelinks()
trash = resdata.pop(resid)
deleted = True
break
#-------------------------------------------------------------------------------
#}}}
#{{{ prune alts function
#Deletes all residues that have alternate conformations at one or more atoms
# from a dictionary of residues, so that uncertainty in these atoms or in their
# relations with other atoms in the structure cannot contaminate later
# calculations.
#A function for robustly handling and choosing among alternates is eventually
# forthcoming, but will be separate.
#-------------------------------------------------------------------------------
def prune_alts(resdata):
reslist = list(resdata.keys())
for resid in reslist:
residue = resdata[resid]
if len(residue.alts) > 1:
residue.removelinks()
trash = resdata.pop(resid)
#-------------------------------------------------------------------------------
#}}}
#{{{ skipcheck function
#Residue types can be skipped during output without pruning their influence
# entirely. This function handles checks for skipping, and returns boolean True
# if the residue should be skipped.
#Additional functionality is expected in this function over time. More complex
# sequence-sensitive selection is probable as I expand my training needs.
#As with pruning, important in training, less so in annotation.
#-------------------------------------------------------------------------------
def skipcheck(residue, skiplist, inclist):
if skiplist: #if there's anything to skip...
doskip = False #...the default state is include
elif inclist: #if there's nothing to skip but thing to include...
doskip = True #...the default state is skip
else:
return False #if skip and include are empty, return default 'include'
for skip in skiplist:
currentres = residue
if skip.startswith('_') and skip.endswith('_'):
sys.stderr.write('\n\
Invalid --skip flag argument: '+skip+ ' has\'_\' on both sides\n\n')
sys.exit()
#Underscores are used in the commandline call to indicate position relative
# to a residue of interest. For example, '_PRO' refers to pre-proline, and
# 'GLY__' (two underscores) refers to post-post-glycine. These loops
# manage the underscores
while skip.startswith('_'):
if currentres.nextres:
currentres = currentres.nextres
skip = skip[1:]
else:
return True #cannot determine inclusion, so exclude
while skip.endswith('_'):
if currentres.prevres:
currentres = currentres.prevres
skip = skip[:-1]
else:
return True #cannot determine inclusion, so exclude
if currentres.firstalt('CA') is not None:
resname = currentres.alts[currentres.firstalt('CA')]['resname']
else:
return True
if resname == skip.upper():
doskip = True
for inc in inclist:
currentres = residue
if inc.startswith('_') and inc.endswith('_'):
sys.stderr.write('\n\
Invalid --skip flag argument: '+inc+ ' has\'_\' on both sides\n\n')
sys.exit()
while inc.startswith('_'):
if currentres.nextres:
currentres = currentres.nextres
inc = inc[1:]
else:
return True
while inc.endswith('_'):
if currentres.prevres:
currentres = currentres.prevres
inc = inc[:-1]
else:
return True #cannot determine inclusion, so exclude
if currentres.firstalt('CA') is not None:
resname = currentres.alts[currentres.firstalt('CA')]['resname']
else:
return True #cannot determine inclusion, so exclude
if resname == inc.upper():
doskip = False
return doskip
#-------------------------------------------------------------------------------
#}}}
#{{{ fails cis check function
#Allows cis or trans peptides to be skipped during printing. Passing
# cis_or_trans='both' will print all residues. Residues without an omega value
# will be skipped unless cis_or_trans=='both'.
#As with pruning, important in training, less so in annotation.
#-------------------------------------------------------------------------------
def fails_cis_check(residue,cis_or_trans):
doskip = True
if cis_or_trans == 'both':
doskip = False
else:
if 'omega' not in residue.measures:
doskip = True
else:
omega = residue.measures['omega']
if cis_or_trans == 'cis' and (omega >= -30 and omega <= 30):
doskip = False
if cis_or_trans == 'trans' and (omega >= 150 or omega <= -150):
doskip = False
return doskip
#-------------------------------------------------------------------------------
#}}}
#{{{ make probe data function
#If a precomputed probe file has not been provided, this function calls probe to
# generate appropriate data for use in add_probe_data()
#-------------------------------------------------------------------------------
def make_probe_data(hierarchy):
trim_command = "phenix.reduce -quiet -trim -"
build_command = "phenix.reduce -oh -his -flip -pen9999 -keep -allalt -"
#probe_command = "phenix.probe -u -condense -self -mc -NOVDWOUT -NOCLASHOUT MC -"
probe_command = "phenix.probe -u -condense -self -mc -NOVDWOUT -NOCLASHOUT ALL -"
for i,m in enumerate(hierarchy.models()):
#multi-model compatibility coming soon?
#probe doesn't keep model data, so add_probe_data doesn't handle that
#so this just takes the first model
model = m
break
r = pdb.hierarchy.root()
mdc = model.detached_copy()
r.append_model(mdc)
sys.stderr.write(' cleaning . . .\n')
clean_out = easy_run.fully_buffered(trim_command, stdin_lines=r.as_pdb_string())
sys.stderr.write(' reducing . . .\n')
build_out = easy_run.fully_buffered(build_command, stdin_lines=clean_out.stdout_lines)
#print build_out.stdout_lines
input_str = '\n'.join(build_out.stdout_lines)
sys.stderr.write(' probing . . .\n')
probe_out = easy_run.fully_buffered(probe_command, stdin_lines=input_str)
#print '\n'.join(probe_out)
#print '\n'.join(probe_out.stdout_lines)
return probe_out.stdout_lines
#-------------------------------------------------------------------------------
#}}}
#{{{ add probe data function
#Adds mainchina-mainchain hydrogen bonding information from 'unformated' Probe
# output to a dictionary of residues.
#At the moment, reliant on precomputed .probe files, will gain run-time Probe
#May gain other contact relationship info, by mc-mc H-bonds are most important
#-------------------------------------------------------------------------------
def add_probe_data(resdata, open_probe_file):
#print open_probe_file
reskeys = list(resdata.keys())
for line in open_probe_file:
#Probe Unformatted Output:
#name:pat:type:srcAtom:targAtom:min-gap:gap:spX:spY:spZ:spikeLen:score:stype:ttype:x:y:z:sBval:tBval
#for condensed output we have:
#name:pat:type:srcAtom:targAtom:*dotcount*:min-gap:gap:spX:spY:spZ:spikeLen:score:stype:ttype:x:y:z:sBval:tBval
###'name' is set by the user on the command line
###'pat' is one of 1->1, 1->2, or 2->1; where 1 is src and 2 is targ.
###'type' is one of wc, cc, so, bo, hb (wide/close contact, small/bad overlap, h-bond).
###'srcAtom' and 'targAtom' follow the pattern CNNNNITTT AAAAL, where C is chain, N is number, I is insertion code, T is residue type, A is atom name, and L is alternate conformation flag.
###'*dotcount*' is condensed-output-only, and gives the number of dots in the contact
###'min-gap' is the distance between atoms, minus their van der Waals radii; i.e., the distance of closest approach for their vdW surfaces. gap is the distance between vdW surfaces at the current dot. Negative values indicate overlap (clashes or H-bonds).
###'x','y','z' is a point on the vdW surface; 'spX','spY','spZ' is tip of spike, if any (same as x,y,z for contacts)
###'score' is "this dot's contribution to the [Probe] score" (scaled already? YES)
###'stype' and 'ttype' are heavy-atom element name (C, N, O, etc)
if not line.strip(): continue #averts an IndexError problem with empty lines
bnana = line.split(':')
name = bnana[0]
pattern = bnana[1]
interactiontype = bnana[2]
if not interactiontype == 'hb': continue #skip non-h-bonds
srcAtom = bnana[3]
srcChain = srcAtom[0:2].strip()
srcNum = int(srcAtom[2:6].strip())
srcIns = srcAtom[6:7]#.strip()
srcResname = srcAtom[7:10].strip()
if srcResname == 'HOH': continue #skip waters
srcAtomname = srcAtom[11:15]#.strip()
srcAlt = srcAtom[15:16].strip()
trgAtom = bnana[4]
#going to count dots per bond as a measure of strength instead
trgChain = trgAtom[0:2].strip()
trgNum = int(trgAtom[2:6].strip())
trgNumStr = trgAtom[2:6]
trgIns = trgAtom[6:7]#.strip()
trgResname = trgAtom[7:10].strip()
#if trgResname == 'HOH': continue #skip waters
trgAtomname = trgAtom[11:15]#.strip()
trgAlt = trgAtom[15:16].strip()
dotcount = bnana[5]
mingap = bnana[6]
#new model for probe storage------------------------------------------------
# If targ is not in resdata then it is likely a water or hetgroup. However,
# we want to have a record of the hb info. In this case 'residue' in 'record'
# will be an object with chain, resnum, resname, and icode.
# If src is not in resdata then we arn't interested.
src_key = ' '.join(['', srcChain, '%04i' % srcNum, srcIns])
if src_key not in list(resdata.keys()) : continue
srcResidue = resdata[src_key]
targ_key = ' '.join(['', trgChain, '%04i' % trgNum, trgIns])
if targ_key not in list(resdata.keys()):
continue
#trgResidue = group_args(chain = trgChain,
# resnum = trgNum,
# resname = trgResname,
# icode = trgIns)
#recordkey = trgResname +' '+trgChain + trgNumStr + trgIns + trgAtomname
else:
trgResidue = resdata[targ_key]
recordkey = trgResidue.id_with_resname() + trgAtomname
record = group_args(residue = trgResidue,
atom = trgAtomname,
dotcount = dotcount,
mingap = mingap,
seqdist = srcResidue.seq_dist(trgResidue))
if srcAtomname not in list(srcResidue.probe.keys()):
srcResidue.probe[srcAtomname] = {}
#####srcResidue = resdata[' '.join(['', srcChain, '%04i' % srcNum, srcIns])]
#####trgResidue = resdata[' '.join(['', trgChain, '%04i' % trgNum, trgIns])]
#####recordkey = trgResidue.id_with_resname() + trgAtomname
#####record = group_args(residue=trgResidue, atom=trgAtomname, mingap=mingap, seqdist=srcResidue.seq_dist(trgResidue))
######print [trgResidue.id_with_resname(),trgAtomname,dotcount,srcResidue.seq_dist(trgResidue)]
#####if srcAtomname not in srcResidue.probe.keys():
##### srcResidue.probe[srcAtomname] = {}
#probe keys first by the current residue's atom, then by the target
# residue's id and atom, id+atom is unique enough to handle bifurcations
srcResidue.probe[srcAtomname][recordkey] = record
#end new model for probe storage--------------------------------------------
#reference: resid_string = ' '.join([modelid,chainid,'%04i' % resnum,icode])
#-------------------------------------------------------------------------------
#}}}
#{{{ Output function collection
#A collection of headers, formatting, and printint functions used in output
#Default output is to stdout, but anything with a .write can be passed to the
# 'writeto=' argument of most functions. Functions that lack a 'writeto='
# generate or find uniquely named files in the working dir for their output.
#Print methods called by these functions are generally from cablam_res.py
#-------------------------------------------------------------------------------
#{{{ --- kin_frame ---
#-------------------------------------------------------------------------------
#kin_frame is a 3-dimensional frame for dihedral-space (-180 to 180) kinemages
def kin_frame(writeto=sys.stdout):
writeto.write("""
@group {Rama Frame}
@dotlist {center} color= yellow off
0 0 0
@vectorlist {frame_xy} color= yellow
P -180 -180 0
180 -180 0
180 180 0
-180 180 0
-180 -180 0
@vectorlist {frame_xz} color= yellow
P -180 0 -180
180 0 -180
180 0 180
-180 0 180
-180 0 -180
@vectorlist {frame_yz} color= yellow
P 0 -180 -180
0 180 -180
0 180 180
0 -180 180
0 -180 -180
""")
#-------------------------------------------------------------------------------
#}}}
#{{{ --- CSV printing ---
#-------------------------------------------------------------------------------
#csv_header writes column names for the top of a .csv
#It starts with a comma for a reason, but I don't remember what it is
def csv_header(kinorder, doconnections=False, writeto=sys.stdout):
writeto.write(',pdb:model:chain:resnum:ins:resname,')
writeto.write(','.join(kinorder))
if doconnections:
writeto.write(',prevres,nextres')
writeto.write('\n')
#Prints residues in comma-separated format, suitable for contouring and other
# analysis
#This is currently the default behavior of cablam_training. This output format
# is used to generate percentile and probability contours for cablam_annote
# using the programs Silk and kin2Dcont/kin3Dcont from the Richardson Lab.
def csv_print(protein, kinorder, skiplist=[], inclist=[],
doconnections=False, cis_or_trans='both', writeto=sys.stdout):
reslist = list(protein.keys())
reslist.sort()
for resid in reslist:
if skipcheck(protein[resid], skiplist, inclist):
pass
elif fails_cis_check(protein[resid],cis_or_trans):
pass
else:
protein[resid].printtocsv(kinorder, doconnections, writeto)
#-------------------------------------------------------------------------------
#}}}
#{{{ --- Generic KIN printing ---
#-------------------------------------------------------------------------------
#kin_header writes the start of a kinemage file
#@text provides self-documentation of the commandline used to generate the .kin
#@dimensions and @dimminmax allow the .kin to handle high-dimensional data
def kin_header(kinorder,kinranges, writeto=sys.stdout):
if len(kinorder) == 0:
sys.stderr.write('\nNo geometric measures (e.g. rama=True) specified')
sys.stderr.write('\nExiting . . .\n')
sys.exit()
writeto.write('@text\n')
for arg in sys.argv:
writeto.write(arg + ' ')
writeto.write('\n\n@kinemage\n')
writeto.write('@dimensions {' + '} {'.join(kinorder)+'}\n')
writeto.write('@dimminmax '+ ' '.join(kinranges)+'\n')
kin_frame(writeto=writeto)
writeto.write('@group {points}\n')
writeto.write(
'@dotlist {points} nobutton dimension='+str(len(kinorder))+'\n')
#prints residues in .kin format
#Uses skipcheck() to select residues to print (default includes all)
def kin_print(protein, kinorder, skiplist=[], inclist=[], cis_or_trans='both',
writeto=sys.stdout):
if len(kinorder) == 0:
sys.stderr.write('\nNo geometric measures (e.g. rama=True) specified')
sys.stderr.write('\nExiting . . .\n')
sys.exit()
reslist = list(protein.keys())
reslist.sort()
for resid in reslist:
if skipcheck(protein[resid], skiplist, inclist):
pass
elif fails_cis_check(protein[resid],cis_or_trans):
pass
else:
protein[resid].printtokin(kinorder, writeto)
#-------------------------------------------------------------------------------
#}}}
#{{{ --- Default PROBE printing ---
#-------------------------------------------------------------------------------
#Creates files and prints headers in them for generic probe output
#One .kin for each unique label in each motif. This can produce a lot of files.
def kin_print_probe_header(full_label_list, kinorder, kinranges):
if len(kinorder) == 0:
sys.stderr.write('\nNo geometric measures (e.g. rama=True) specified')
sys.stderr.write('\nExiting . . .\n')
sys.exit()
outfiles = {}
for label in full_label_list:
outfiles[label] = open(label+'.kin','a')
outfiles[label].write('\n@kinemage\n')
outfiles[label].write('@dimensions {' + '} {'.join(kinorder)+'}\n')
outfiles[label].write('@dimminmax '+ ' '.join(kinranges)+'\n')
kin_frame(writeto=outfiles[label])
outfiles[label].write(
'@group {'+label+'} dominant animate\n@dotlist {'+label+
'} dimension='+str(len(kinorder))+'\n')
return outfiles
#For producing distributions of points in cablam space
#Generic output is one point (many dimensions) for each residue that matches a
# motif definition/fingerprint.
def kin_print_probe(motif_instances, kinorder, outfiles):
for motif_name in motif_instances:
for instance in motif_instances[motif_name]:
if not instance.has_all_measures(kinorder):
sys.stderr.write(
' '+motif_name+' has incomplete measures, probably due to b_max\n')
continue
for index in instance.names:
residue = instance.residues[index]
name = instance.names[index]
residue.printtokin(kinorder, writeto=outfiles[name])
#-------------------------------------------------------------------------------
#}}}
#{{{ --- PROBE ANNOTE printing ---
#-------------------------------------------------------------------------------
#For annotating an existing .kin file with balls at CA's participating in
# motifs of interest.
#Produces one .kin per input file.
#Does not require a header as such.
def kin_print_probe_annote(motif_instances, writeto=sys.stdout):
for motif_name in motif_instances:
if motif_instances[motif_name]:
writeto.write('@group {'+motif_name+'}\n')
ref_instance = motif_instances[motif_name][0]
indices = list(ref_instance.residues.keys())
indices.sort()
for index in indices:
writeto.write('@balllist {'+ref_instance.names[index]+'}\n')
for instance in motif_instances[motif_name]:
residue = instance.residues[index]
firstalt = residue.firstalt('CA')
CAxyz = residue.atomxyz[firstalt]['CA']
pointid = residue.pdbid+' '+ residue.chain +' '+ str(residue.resnum)+' '+ instance.names[index]
writeto.write("{ "+pointid+" } "+str(CAxyz[0])+" "+str(CAxyz[1])+" "+str(CAxyz[2])+"\n")
#{{{
def old_kin_print_probe_annote(resdata, motif_list, writeto=sys.stdout):
reskeys = list(resdata.keys())
reskeys.sort()
motifs = cablam_fingerprints.fetch_fingerprints(motif_list)
for motif in motifs:
writeto.write('@group {'+motif.motif_name+'}\n')
for label in motif.residue_names.values():
writeto.write('@balllist {'+label+'}\n')
for resid in reskeys:
residue = resdata[resid]
if label in residue.motifs:
firstalt = residue.firstalt('CA')
#try:
CAxyz = residue.atomxyz[firstalt]['CA']
pointid = residue.pdbid +' '+ residue.chain +' '+ str(residue.resnum)+' '+ label
writeto.write("{ "+pointid+" } "+str(CAxyz[0])+" "+str(CAxyz[1])+" "+str(CAxyz[2])+"\n")
#}}}
#-------------------------------------------------------------------------------
#}}}
#{{{ --- PROBE BY INSTANCE printing ---
#-------------------------------------------------------------------------------
#Creates files and prints headers in them for instance output
#One .kin for each motif. This can produce several files.
def kin_print_by_instance_header(motif_list, kinorder, kinranges):
if len(kinorder) == 0:
sys.stderr.write('\nNo geometric measures (e.g. rama=True) specified')
sys.stderr.write('\nExiting . . .\n')
sys.exit()
outfiles = {}
motifs = cablam_fingerprints.fetch_fingerprints(motif_list)
for motif in motifs:
motif_name = motif.motif_name
outfiles[motif_name] = open(motif_name+'_instances.kin', 'w')
outfiles[motif_name].write('@text\n')
for arg in sys.argv:
outfiles[motif_name].write(arg + ' ')
outfiles[motif_name].write('\n@kinemage\n')
outfiles[motif_name].write('@dimensions {' + '} {'.join(kinorder)+'}\n')
outfiles[motif_name].write('@dimminmax '+ ' '.join(kinranges)+'\n')
kin_frame(writeto=outfiles[motif_name])
return outfiles
#What this means is: each instance of a full motif, printed as a vector list so
# the path through cablam space can be followed
def kin_print_by_instance(motif_instances, motif_list, kinorder, outfiles):
for motif_name in motif_instances:
for instance in motif_instances[motif_name]:
if not instance.has_all_measures(kinorder):
sys.stderr.write(
' '+motif_name+' has incomplete measures, probably due to b_max\n')
continue
indices = list(instance.names.keys())
indices.sort()
#print indices
residue = instance.residues[indices[0]]
outfiles[motif_name].write(
'@group {'+residue.pdbid.rstrip('.pdb')+' '+str(residue.resnum)+
'} dominant animate\n@vectorlist {'+motif_name+
'} dimension='+str(len(kinorder))+'\n')
for index in indices:#instance.names:
residue = instance.residues[index]
name = instance.names[index]
outline = ['{'+residue.id_with_resname()+'_'+name+'}']
for order in kinorder:
outline.append(str(residue.measures[order]))
outfiles[motif_name].write(' '.join(outline)+'\n')
#print a string of 1-char resnames for each motif instance,
# for use with WebLogo and the like.
def res_seq_by_instance(motif_instances):
#reshash contains the standard 3char to 1char mappings, followed by an ever-
# growing list of non-standard animo acids
reshash = {'GLY':'G','ALA':'A','VAL':'V','ILE':'I','LEU':'L','PHE':'F',
'TRP':'W','MET':'M','GLU':'E','GLN':'Q','ASP':'D','ASN':'N','SER':'S',
'THR':'T','TYR':'Y','HIS':'H','LYS':'K','PRO':'P','CYS':'C','ARG':'R',
'MSE':'M','SME':'M','CSO':'C','OCS':'C','CSX':'C','CME':'C','YCM':'C',
'MLY':'K'}
for motif_name in motif_instances:
for instance in motif_instances[motif_name]:
indices = list(instance.residues.keys())
indices.sort()
seq_string = []
for index in indices:
resname = instance.residues[index].id_with_resname()[0:3]
if resname in reshash:
code = reshash[resname]
else:
#non-standard amino acids not already handled can be found in the
# output by searching for 'X'
code = 'X'+resname
seq_string.append(code)
seq_string.append('\n')
sys.stdout.write(''.join(seq_string))
#-------------------------------------------------------------------------------
#}}}
#{{{ --- PROBE superposition ---
#-------------------------------------------------------------------------------
#First step: excise the relevant bits of each pdb file
def trim_motifs(motif_instances, filename, superpose_refs):
pwd = os.getcwd()
for motif_name in motif_instances:
if os.path.isdir(motif_name): pass
else: os.mkdir(motif_name)
os.chdir(motif_name)
instance_num = 0
for instance in motif_instances[motif_name]:
instance_num += 1
outputfile = os.path.basename(filename) + "_" + str(instance_num) + ".pdb"
resnums = []
for residue in instance.residues.values():
resnum = str(residue.resnum)
resnums.append(resnum)
selection = "resseq "+ " or resseq ".join(resnums)
command = 'phenix.pdbtools stop_for_unknowns=False modify.keep=\"'+selection+'\" '+filename + " output.file_name=" + outputfile
#output.file_name=*****
#sys.stderr.write(command)
runthis = easy_run.fully_buffered(command)
if motif_name not in superpose_refs:
superpose_refs[motif_name] = {"motif":instance,"filename":outputfile}
else:
sys.stderr.write("trying to superpose\n")
ref = superpose_refs[motif_name]
#phenix.superpose_pdbs fixed.pdb moving.pdb selection_fixed="name CA" selection_moving="name CA"
command = "phenix.superpose_pdbs "+ ref["filename"] + " " + outputfile + " selection_default_fixed="+ref["motif"].superpose_thus +" selection_default_moving="+instance.superpose_thus + " output.file_name=" + outputfile
sys.stderr.write(command)
sys.stderr.write("\n")
runthis = easy_run.fully_buffered(command)
os.chdir(pwd)
return superpose_refs
#-------------------------------------------------------------------------------
#}}}
#-------------------------------------------------------------------------------
#}}}
#{{{ run
#The run function is currently rather messy. (Indeed, all of
# cablam_training is a bit messy, as it's really a development tool, not a
# general-use program.) Hopefully, everything needed for general use (structure
# annotation) has been packaged in other modules for easy access. Good luck.
def run(args):
#{{{ phil parsing
#-----------------------------------------------------------------------------
interpreter = libtbx.phil.command_line.argument_interpreter(master_phil=master_phil)
sources = []
for arg in args:
if os.path.isfile(arg):
input_file = file_reader.any_file(arg)
if (input_file.file_type == "pdb"):
sources.append(interpreter.process(arg="file_or_dir=\"%s\"" % arg))
elif (input_file.file_type == "phil"):
sources.append(input_file.file_object)
elif os.path.isdir(arg):
sources.append(interpreter.process(arg="file_or_dir=\"%s\"" % arg))
else:
arg_phil = interpreter.process(arg=arg)
sources.append(arg_phil)
work_phil = master_phil.fetch(sources=sources)
work_params = work_phil.extract()
params = work_params.cablam_training
#catch missing file or dir later?
#if not work_params.cablam_training.file_or_dir:
# usage()
# sys.exit()
params = work_params.cablam_training
#-----------------------------------------------------------------------------
#}}} end phil parsing
if params.help:
usage()
sys.exit()
if params.list_motifs:
sys.stdout.write('\n')
fileset = os.listdir(libtbx.env.find_in_repositories(
"cctbx_project/mmtbx/cablam/fingerprints"))
for filename in fileset:
if filename.endswith(".pickle"):
motifname = os.path.splitext(os.path.basename(filename))[0]
sys.stdout.write(motifname + '\n')
sys.exit()
if not params.file_or_dir:
usage()
sys.exit()
if os.path.isdir(params.file_or_dir):
fileset = os.listdir(params.file_or_dir)
dirpath = params.file_or_dir
elif os.path.isfile(params.file_or_dir):
fileset = [params.file_or_dir]
dirpath = None
else:
sys.stderr.write("Could not identify valid target file or dir.\n")
usage()
sys.exit()
#{{{ measurement selection
#This section manages the user's orders for calculations
#Note: The 'kin' in kinorder and kin ranges is a misnomer
#-----------------------------------------------------------------------------
if params.all_measures:
params.cad = True
params.caa = True
params.cod = True
params.exrama = True
params.tau = True
params.omega = True
kinorder, kinranges = [],[]
if params.cad:
kinorder.append('CA_d_in'), kinranges.append('-180 180')
kinorder.append('CA_d_out'), kinranges.append('-180 180')
else:
pass
if params.cod:
kinorder.append('CO_d_in'), kinranges.append('-180 180')
kinorder.append('CO_d_out'), kinranges.append('-180 180')
else:
pass
if params.caa:
kinorder.append('CA_a_in'), kinranges.append('0 180')
kinorder.append('CA_a'), kinranges.append('0 180')
kinorder.append('CA_a_out'), kinranges.append('0 180')
else:
pass
if params.cablam:
if 'CA_d_in' not in kinorder:
kinorder.append('CA_d_in'), kinranges.append('-180 180')
if 'CA_d_out' not in kinorder:
kinorder.append('CA_d_out'), kinranges.append('-180 180')
if 'CO_d_in' not in kinorder:
kinorder.append('CO_d_in'), kinranges.append('-180 180')
if 'CA_a' not in kinorder:
kinorder.append('CA_a'), kinranges.append('0, 180')
else:
pass
if params.rama or params.exrama:
if params.exrama:
kinorder.append('psi-1'), kinranges.append('-180 180')
kinorder.append('phi'), kinranges.append('-180 180')
kinorder.append('psi'), kinranges.append('-180 180')
kinorder.append('phi+1'), kinranges.append('-180 180')
else:
kinorder.append('phi'), kinranges.append('-180 180')
kinorder.append('psi'), kinranges.append('-180 180')
else:
pass
if params.tau:
kinorder.append('tau'), kinranges.append('0 180')
else:
pass
if params.omega:
kinorder.append('omega'), kinranges.append('-180 180')
else:
pass
#The following lines record the order and values for kinorder and kinranges
# for sake of reference
#kinorder = ['CA_d_in', 'CA_d_out','CO_d_in', 'CO_d_out',
# 'psi-1', 'phi', 'psi', 'phi+1', 'tau', 'omega']
#kinranges = ['-180 180','-180 180','-180 180','-180 180',
# '-180 180','-180 180','-180 180','-180 180','0 180', '-180 180']
#-----------------------------------------------------------------------------
#}}}
#{{{ setup
#-----------------------------------------------------------------------------
targetatoms = ["CA","O","C","N"]
superpose_refs = {}
outfiles = {}
if params.probe_motifs:
motif_list = params.probe_motifs[0].split(',')
if params.probe_path:
probefilelist = os.listdir(params.probe_path)
if params.probe_mode == 'kin':# or params.probe_mode == None:
outfiles = kin_print_probe_header(cablam_fingerprints.get_all_labels(motif_list),kinorder,kinranges)
elif params.probe_mode == 'instance':
outfiles = kin_print_by_instance_header(motif_list, kinorder, kinranges)
prunelist = []
if params.prune:
prunelist = params.prune[0].split(',')
prunelist = [res.upper() for res in prunelist] #Ha ha! List comprehension!
skiplist = []
inclist = []
if params.skip_types:
skiplist = params.skip_types[0].split(',')
if params.include_types:
inclist = params.include_types[0].split(',')
if params.separate_files:
pass
else:
if params.give_kin:
kin_header(kinorder,kinranges)
elif params.probe_motifs:
pass
else:
csv_header(kinorder,params.give_connections)
#-----------------------------------------------------------------------------
#}}}
#{{{ get file, start loop
#-----------------------------------------------------------------------------
for filename in fileset:
#if not filename.endswith('.pdb'):
# continue
if dirpath: #must add the path if using the listed contents of a dir
filename = os.path.join(dirpath,filename)
else:
pass
pdbid = os.path.basename(filename)
if not os.path.isfile(filename): continue
pdb_in = file_reader.any_file(filename)
if pdb_in.file_type != "pdb":
sys.stderr.write(filename +" not id'd as readable file\n")
continue
sys.stderr.write(pdbid+'\n')
pdb_io = pdb.input(filename)
hierarchy = pdb_io.construct_hierarchy()
resdata = cablam_res.construct_linked_residues(hierarchy,targetatoms,pdbid)
if not resdata: #skips further processing of files not readable by hierarchy
continue
#-----------------------------------------------------------------------------
#}}}
#{{{ preprocessing
#---------------------------------------------------------------------------
cablam_res.prunerestype(resdata, 'HOH')
for restype in prunelist:
cablam_res.prunerestype(resdata, restype)
if params.b_max:
stripB(resdata,params.b_max)
if params.prune_alts:
prune_alts(resdata)
#---------------------------------------------------------------------------
#}}}
#{{{ calculation calls
#---------------------------------------------------------------------------
if params.cad and params.caa:
cablam_math.CApseudos(resdata, dodihedrals = True, doangles = True)
elif params.cad:
cablam_math.CApseudos(resdata, dodihedrals = True, doangles = False)
elif params.caa:
cablam_math.CApseudos(resdata, dodihedrals = False, doangles = True)
else: #no CA-based calculations
pass
if params.cod:
cablam_math.COpseudodihedrals(resdata)
else:
pass
if params.rama or params.exrama:
cablam_math.phipsi(resdata)
else:
pass
if params.tau:
cablam_math.taucalc(resdata)
else:
pass
if params.omega or params.cis_or_trans != 'both':
cablam_math.omegacalc(resdata)
else:
pass
if params.cablam:
cablam_math.cablam_measures(resdata)
else:
pass
#---------------------------------------------------------------------------
#}}}
#{{{ probe stuff
#---------------------------------------------------------------------------
#need the run phenix.probe
if params.probe_motifs and params.probe_path:
probefilename = pdbid.rstrip('.pdb') + '.probe'
if probefilename in probefilelist:
probefilepath = os.path.join(params.probe_path,probefilename)
open_probe_file = open(probefilepath)
add_probe_data(resdata,open_probe_file)
open_probe_file.close()
else:
continue
elif params.probe_motifs:
add_probe_data(resdata,make_probe_data(hierarchy))
if params.probe_motifs:
found_motifs = cablam_fingerprints.check_protein(resdata, motif_list)
#found_motifs is a dictionary. The keys are motif names.
# The values are lists of cablam_fingerprints.motif_instance objects.
#---------------------------------------------------------------------------
#}}}
#{{{ output
#---------------------------------------------------------------------------
#--probemode=kin for dotlist kins, this is the default
#--probemode=annote for balls drawn at CA positions on the model
#--probemode=instance for kins where each veclist is one instance of motif
if params.probe_motifs:# and args.probepath:
if params.probe_mode == 'kin':# or params.probe_mode == None:
kin_print_probe(found_motifs, kinorder, outfiles)
elif params.probe_mode == 'annote':
outfile = open(pdbid+'cablam_motifs.kin','w')
#kin_print_probe_annote(resdata, motif_list, writeto=outfile)
kin_print_probe_annote(found_motifs, writeto=outfile)
outfile.close()
elif params.probe_mode == 'instance':
#kin_print_by_instance(resdata, motif_list, kinorder, outfiles)
kin_print_by_instance(found_motifs, motif_list, kinorder, outfiles)
elif params.probe_mode == 'sequence':
res_seq_by_instance(found_motifs)
#res_seq_by_instance(resdata, motif_list)
elif params.probe_mode == 'superpose':
#trim_motifs(resdata, filename, motif_list)
superpose_refs = trim_motifs(found_motifs, filename,superpose_refs)
#superpose_motifs(motif_list)
else:
sys.stderr.write('\n\nUnrecognized probemode request\n\n')
sys.exit()
#add if args.kin once things basically work
outfile = sys.stdout
#need printer from probe version
#Not sure what the stray outfile=sys.stdout is doing here anymore
#default printing, with no arguments, is to .csv, one line per residue
#--separatefiles writes a separate file for each input file to working dir
#--kin prints kinemage file, dotlist, one point per residue
#--doconnections adds connectivity information to csv output
else:
if params.give_kin:
if params.separate_files:
outfile = open(pdbid+'_cablam.kin','w')
kin_header(kinorder,kinranges,writeto=outfile)
kin_print(resdata, kinorder, skiplist, inclist, params.cis_or_trans, writeto=outfile)
outfile.close()
else:
kin_print(resdata,kinorder,skiplist,inclist,params.cis_or_trans)
else:
if params.separate_files:
outfile = open(pdbid+'_cablam.csv','w')
csv_header(kinorder,params.give_connections,writeto=outfile)
csv_print(resdata, kinorder, skiplist, inclist, params.give_connections,params.cis_or_trans, writeto=outfile)
outfile.close()
else:
csv_print(resdata,kinorder,skiplist,inclist,params.give_connections,params.cis_or_trans,)
if outfiles:
for filename in outfiles:
outfiles[filename].close()
#---------------------------------------------------------------------------
#}}}
#-------------------------------------------------------------------------------
#}}}
| 41.562652 | 259 | 0.629549 | from __future__ import absolute_import, division, print_function
import os, sys
from iotbx import pdb
from mmtbx.cablam import cablam_res
# geometric and probe measures and can look forward and backward in sequence
from mmtbx.cablam import cablam_math #contains geometric measure calculators
#from mmtbx.cablam import fingerprints #contains motif definitions
from mmtbx.cablam import cablam_fingerprints
#import cablam_fingerprints
# Storage for motif definitions subject to change
from libtbx import easy_run
import libtbx.phil.command_line
from iotbx import file_reader
from libtbx import group_args
#{{{ phil
#-------------------------------------------------------------------------------
master_phil = libtbx.phil.parse("""
cablam_training {
file_or_dir = None
.type = path
.help = '''input pdb file or dir thereof'''
separate_files = False
.type = bool
.help = '''Generate a separate, auto-named output file for each input file'''
give_kin = False
.type = bool
.help = '''Print output to screen in .kin format (default is comma-separated .csv format)'''
give_connections = False
.type = bool
.help = '''Add prevres and nextres columns to .csv output'''
debug = False
.type = bool
.help = '''Adds some text printed to stderr for debugging esp. for fingerprints'''
all_measures = False
.type = bool
.help = '''Does all measures'''
cad = False
.type = bool
.help = '''2 CA pseudo dihedrals'''
caa = False
.type = bool
.help = '''3 CA pseudo angles'''
cod = False
.type = bool
.help = '''2 CO pseudo dihedrals'''
rama = False
.type = bool
.help = '''2 Ramachandran dihedrals: phi, psi'''
exrama = False
.type = bool
.help = '''4 Ramachandran dihedrals: psi-1, phi, psi, phi+1'''
tau = False
.type = bool
.help = '''1 backbone angle: tau (defined by N-CA-C)'''
omega = False
.type = bool
.help = '''1 backbone dihedral: omega (defined by CA_1-C_1-N_2-CA_2)'''
cablam = False
.type = bool
.help = '''Shortcut for just cablam-relevant measures CA_d_in, CA_d_out, CO_in'''
probe_motifs = None
.type = strings
.help = '''Activates hydrogen bonding analysis, probe=motif_name1,motif_name2,... use --listmotifs to list available fingerprints'''
probe_path = None
.type = path
.help = '''Stores path to dir of probed files, probe will be called for each file if this is not provided'''
probe_mode = *kin annote instance sequence superpose
.type = choice
.help = '''=kin for dotlist kins (default) =annote for ball on model, =instance for vectorlist kins'''
list_motifs = False
.type = bool
.help = '''print motifs/fingerprints available to screen'''
b_max = None
.type = float
.help = '''Set a max b factor, residues containing a backbone atom with higher b will be pruned, recommended: -b=30'''
prune_alts = False
.type = bool
.help = '''Removes all residues with alternate conformations in relevant atoms'''
prune = None
.type = strings
.help = '''List of restypes to be pruned, separated by commas, no spaces eg PRO'''
skip_types = None
.type = strings
.help = '''List of restypes to be skipped during printing, separated by commas'''
include_types = None
.type = strings
.help = '''List of restypes to be printed, all others will be skipped'''
cis_or_trans = *both cis trans
.type = choice
.help = '''selects whether cis-peptides, trans-peptides, or both will be returned'''
fear = False
.type = bool
.help = '''turns on fear-to-tread analysis (this is temporary)'''
help = False
.type = bool
.help = '''print help text to screen'''
}
""", process_includes=True)
#-------------------------------------------------------------------------------
#}}}
#{{{ usage notes
#-------------------------------------------------------------------------------
def usage():
sys.stderr.write("""
phenix.cablam_training or cablam_training.py is a program intended for the
exploration of protein structure datasets, the annotation of motifs of
interest, and the training of reference datasets. It was used in the
construction of the reference contours used by cablam_validate. It contains a
number of features and modes and is intended primarily as a development tool
rather than a utility for typical users. However, anyone interested in
exploring protein backboen geometry may find something of use here.
--------------------------------------------------------------------------------
file_or_dir=*path*
Path to a pdb file or dir of pdb files to operate on, the only argument that
doesn't need an explicit flag
--------------------------------------------------------------------------------
-----Basic Printing Options-----------------------------------------------------
separate_files=True/False
Generate a separate, auto-named output file in the current dir for each input
file, default output prints a single file to screen
give_kin=True/False
Print output to screen in .kin format, may be combinded with separate_files,
default output prints comma-separated .csv format
give_connections=True/False
If set to True, adds prevres and nextres columns to .csv output
skip_types=restype1,restype2
include_types=restype3,restype4
Together, these control which residue types are printed to screen or file.
Default prints all residues.
Residue types and relationships given to skip_types are excluded from printing
If only include_types is used, only the listed restypes will be printed
If include_types and skip_types are both used, then the types given to
include_types will override those skipped by skip_types.
List restypes by their 3-letter code and separated by commas withoug spaces,
e.g. GLY,PRO,ALA,TRP
Sequence relationships may be represented with underscores, e.g. _PRO is
pre-proline, and GLY__ (2 underscores) is post-post-glycine
examples:
skip_types=PRO would print every residue except proline
include_types=PRO,GLY would print *only* glycines and prolines
skip_types=_PRO include_types=GLY would skip pre-prolines unless they were
also glycines
cis_or_trans='cis' 'trans' 'both'
Selects printing for cis-peptides or trans-peptides exclusively. The default
is 'both' which will print all residues. cis is defined as -60 to +60 degrees
trans is defined as 120 to 180 and -120 to -180 degrees for the omega dihedral
Note that selecting 'cis' or 'trans' will also stop printing for any residue
for which omega cannot be calculated.
--------------------------------------------------------------------------------
-----Probe and Motif Search Options---------------------------------------------
This is an alternate mode which searches for hydrogen bonding patterns defined
in fingerprints.
probe_motifs=motif_name1,motif_name2
This flag activates hydrogen bonding pattern analysis, which will not run
otherwise. The flag accepts a spaceless string of comma-separated motif names
to search for. Use list_motifs=True to get a list of available motifs.
probe_path=*path*
cablam_training can use precomputed probe results to speed up runs on large
datasets. If a path to such prepared files is not provided, Reduce and Probe
will be run on each pdb file, which may be time-consuming.
Running:
phenix.probe -u -condense -self -mc -NOVDWOUT -NOCLASHOUT MC filename.pdb > filename.probe
Should produce appropriately formatted and named files for this option
probe_mode=kin/annote/instance/sequence
These are printing options for hydrogen bond pattern analysis, which overrides
the Basic Printing Options above.
Choose 1 of 3:
=kin returns automatically-named kinemage files, one for each unique member
residue in each motif. The kins are high-dimensional dotlists containing the
measures specified in the commandline (see below for options). This is the
default printing.
=annote returns an automatically-named kinemage file for each pdb file. These
kins are balllists that highlight the selected motifs of interest if appended
to existing kinemages of the structures.
=instance returns an automatically-named vectorlist kinemage file for each motif
of interest. Each kin is a high-dimensional vectorlist that shows the path of
a multi-residue motif through the measures specified in the commandline
(see below for options)
=sequence prints to screen the animo acid sequence of the motif of interest.
Does not behave with multiple motifs. Uses single-letter amino acid codes, if
a residue type is unrecognized, will print 'X' followed by the 3-letter code.
list_motifs=True/False
Prints to screen a list of all the motifs/"fingerprints" currently available
for hydrogen bond pattern search
--------------------------------------------------------------------------------
-----Geometric Measures---------------------------------------------------------
All of these default to False, and some output modes will not function unless at
least one of these options is turned on. When in doubt, cablam=True and/or
rama=True will provide relevant information.
cad=True/False
For each residue, calculate the 2 C-alpha pseudo dihedrals
caa=True/False
For each residue, calculate the 3 C-alpha pseudo angles
cod=True/False
For each residue, calculate the 2 carbonyl oxygen pseudo dihedrals
rama=True/False
For each residue, calculate Ramachandran dihedrals phi and psi
exrama=True/False
For each residue, calculate Ramachandran dihedrals psi-1, phi, psi, phi+1
tau=True/False
For each residue, calculate backbone angle tau, defined by N-NA-C
omega=True/False
For each residue, calculate backbone peptide dihedral,
defined by CA_1,C_1,N_2,CA_2
all_measures=True/False
For each residue, calculate all of the above measures (may be overkill)
cablam=True/False
Recommended, but not default behavior.
For each residue calculate the measures most relevant to cablam analysis:
CA_d_in, CA_d_out, CO_in
--------------------------------------------------------------------------------
-----Quality Control Options----------------------------------------------------
b_max=#.#
Set a max b factor value. Residues containing a backbone atom with higher b
will be pruned and excluded from all calculations. Note this may affect
neighboring residues. Strongly Recommenced: b_max=30.0
prune_alts=True/False
Prune and excludes from calculations all residues with alternate conformations
for backbone atoms. Note this may affect neighboring residues. Default is
prune_alts=False, which results in only the first alternate position for each
residue being reported on.
prune=restype1,restype2
Prune and exclude from calculations the selected list of residue types. Note
this may affect neighboring residues. Restypes should be given as 3-letter
codes, e.g. GLY,PRO, but this option does not yet support the sequence
relationship that skip_types= and include_types= do.
--------------------------------------------------------------------------------
-----Help Options---------------------------------------------------------------
help=True/False
Displays this help message.
list_motifs=True/False
Prints to screen a list of all the motifs/"fingerprints" currently available
for hydrogen bond pattern search
debug=True/False
Activates print-to-stderr debugging notes for hydrogen bond pattern search.
This may be valuable when trying to define a new pattern correctly and with
proper format.
--------------------------------------------------------------------------------
Examples:
phenix.cablam_training cad=True cod=True skip_types=GLY,PRO,_PRO,ILE,VAL b_max=30.0 kin=True file_or_dir=path/pdbfilename.pdb
phenix.cablam_training cablam=True b_max=30.0 prune=GLY probe_motifs=parallel_beta,antiparallel_beta_cwc,antiparallel_beta_wcw probe_mode=kin probe_path=path/database/probefiles file_or_dir=path/database/pdbfiles
""")
def stripB(resdata, bmax):
reslist = list(resdata.keys())
for resid in reslist:
deleted = False
for alt in resdata[resid].alts:
if deleted:
break
for atom in resdata[resid].atomb[alt]:
if resdata[resid].atomb[alt][atom] > bmax:
resdata[resid].removelinks()
trash = resdata.pop(resid)
deleted = True
break
def prune_alts(resdata):
reslist = list(resdata.keys())
for resid in reslist:
residue = resdata[resid]
if len(residue.alts) > 1:
residue.removelinks()
trash = resdata.pop(resid)
def skipcheck(residue, skiplist, inclist):
if skiplist:
doskip = False #...the default state is include
elif inclist: #if there's nothing to skip but thing to include...
doskip = True
else:
return False
for skip in skiplist:
currentres = residue
if skip.startswith('_') and skip.endswith('_'):
sys.stderr.write('\n\
Invalid --skip flag argument: '+skip+ ' has\'_\' on both sides\n\n')
sys.exit()
while skip.startswith('_'):
if currentres.nextres:
currentres = currentres.nextres
skip = skip[1:]
else:
return True
while skip.endswith('_'):
if currentres.prevres:
currentres = currentres.prevres
skip = skip[:-1]
else:
return True
if currentres.firstalt('CA') is not None:
resname = currentres.alts[currentres.firstalt('CA')]['resname']
else:
return True
if resname == skip.upper():
doskip = True
for inc in inclist:
currentres = residue
if inc.startswith('_') and inc.endswith('_'):
sys.stderr.write('\n\
Invalid --skip flag argument: '+inc+ ' has\'_\' on both sides\n\n')
sys.exit()
while inc.startswith('_'):
if currentres.nextres:
currentres = currentres.nextres
inc = inc[1:]
else:
return True
while inc.endswith('_'):
if currentres.prevres:
currentres = currentres.prevres
inc = inc[:-1]
else:
return True
if currentres.firstalt('CA') is not None:
resname = currentres.alts[currentres.firstalt('CA')]['resname']
else:
return True
if resname == inc.upper():
doskip = False
return doskip
def fails_cis_check(residue,cis_or_trans):
doskip = True
if cis_or_trans == 'both':
doskip = False
else:
if 'omega' not in residue.measures:
doskip = True
else:
omega = residue.measures['omega']
if cis_or_trans == 'cis' and (omega >= -30 and omega <= 30):
doskip = False
if cis_or_trans == 'trans' and (omega >= 150 or omega <= -150):
doskip = False
return doskip
def make_probe_data(hierarchy):
trim_command = "phenix.reduce -quiet -trim -"
build_command = "phenix.reduce -oh -his -flip -pen9999 -keep -allalt -"
probe_command = "phenix.probe -u -condense -self -mc -NOVDWOUT -NOCLASHOUT ALL -"
for i,m in enumerate(hierarchy.models()):
model = m
break
r = pdb.hierarchy.root()
mdc = model.detached_copy()
r.append_model(mdc)
sys.stderr.write(' cleaning . . .\n')
clean_out = easy_run.fully_buffered(trim_command, stdin_lines=r.as_pdb_string())
sys.stderr.write(' reducing . . .\n')
build_out = easy_run.fully_buffered(build_command, stdin_lines=clean_out.stdout_lines)
input_str = '\n'.join(build_out.stdout_lines)
sys.stderr.write(' probing . . .\n')
probe_out = easy_run.fully_buffered(probe_command, stdin_lines=input_str)
return probe_out.stdout_lines
def add_probe_data(resdata, open_probe_file):
reskeys = list(resdata.keys())
for line in open_probe_file:
Residue = resdata[targ_key]
recordkey = trgResidue.id_with_resname() + trgAtomname
record = group_args(residue = trgResidue,
atom = trgAtomname,
dotcount = dotcount,
mingap = mingap,
seqdist = srcResidue.seq_dist(trgResidue))
if srcAtomname not in list(srcResidue.probe.keys()):
srcResidue.probe[srcAtomname] = {}
o geometric measures (e.g. rama=True) specified')
sys.stderr.write('\nExiting . . .\n')
sys.exit()
writeto.write('@text\n')
for arg in sys.argv:
writeto.write(arg + ' ')
writeto.write('\n\n@kinemage\n')
writeto.write('@dimensions {' + '} {'.join(kinorder)+'}\n')
writeto.write('@dimminmax '+ ' '.join(kinranges)+'\n')
kin_frame(writeto=writeto)
writeto.write('@group {points}\n')
writeto.write(
'@dotlist {points} nobutton dimension='+str(len(kinorder))+'\n')
#prints residues in .kin format
#Uses skipcheck() to select residues to print (default includes all)
def kin_print(protein, kinorder, skiplist=[], inclist=[], cis_or_trans='both',
writeto=sys.stdout):
if len(kinorder) == 0:
sys.stderr.write('\nNo geometric measures (e.g. rama=True) specified')
sys.stderr.write('\nExiting . . .\n')
sys.exit()
reslist = list(protein.keys())
reslist.sort()
for resid in reslist:
if skipcheck(protein[resid], skiplist, inclist):
pass
elif fails_cis_check(protein[resid],cis_or_trans):
pass
else:
protein[resid].printtokin(kinorder, writeto)
#-------------------------------------------------------------------------------
#}}}
#{{{ --- Default PROBE printing ---
#-------------------------------------------------------------------------------
#Creates files and prints headers in them for generic probe output
#One .kin for each unique label in each motif. This can produce a lot of files.
def kin_print_probe_header(full_label_list, kinorder, kinranges):
if len(kinorder) == 0:
sys.stderr.write('\nNo geometric measures (e.g. rama=True) specified')
sys.stderr.write('\nExiting . . .\n')
sys.exit()
outfiles = {}
for label in full_label_list:
outfiles[label] = open(label+'.kin','a')
outfiles[label].write('\n@kinemage\n')
outfiles[label].write('@dimensions {' + '} {'.join(kinorder)+'}\n')
outfiles[label].write('@dimminmax '+ ' '.join(kinranges)+'\n')
kin_frame(writeto=outfiles[label])
outfiles[label].write(
'@group {'+label+'} dominant animate\n@dotlist {'+label+
'} dimension='+str(len(kinorder))+'\n')
return outfiles
#For producing distributions of points in cablam space
#Generic output is one point (many dimensions) for each residue that matches a
# motif definition/fingerprint.
def kin_print_probe(motif_instances, kinorder, outfiles):
for motif_name in motif_instances:
for instance in motif_instances[motif_name]:
if not instance.has_all_measures(kinorder):
sys.stderr.write(
' '+motif_name+' has incomplete measures, probably due to b_max\n')
continue
for index in instance.names:
residue = instance.residues[index]
name = instance.names[index]
residue.printtokin(kinorder, writeto=outfiles[name])
#-------------------------------------------------------------------------------
#}}}
#{{{ --- PROBE ANNOTE printing ---
#-------------------------------------------------------------------------------
#For annotating an existing .kin file with balls at CA's participating in
def kin_print_probe_annote(motif_instances, writeto=sys.stdout):
for motif_name in motif_instances:
if motif_instances[motif_name]:
writeto.write('@group {'+motif_name+'}\n')
ref_instance = motif_instances[motif_name][0]
indices = list(ref_instance.residues.keys())
indices.sort()
for index in indices:
writeto.write('@balllist {'+ref_instance.names[index]+'}\n')
for instance in motif_instances[motif_name]:
residue = instance.residues[index]
firstalt = residue.firstalt('CA')
CAxyz = residue.atomxyz[firstalt]['CA']
pointid = residue.pdbid+' '+ residue.chain +' '+ str(residue.resnum)+' '+ instance.names[index]
writeto.write("{ "+pointid+" } "+str(CAxyz[0])+" "+str(CAxyz[1])+" "+str(CAxyz[2])+"\n")
def old_kin_print_probe_annote(resdata, motif_list, writeto=sys.stdout):
reskeys = list(resdata.keys())
reskeys.sort()
motifs = cablam_fingerprints.fetch_fingerprints(motif_list)
for motif in motifs:
writeto.write('@group {'+motif.motif_name+'}\n')
for label in motif.residue_names.values():
writeto.write('@balllist {'+label+'}\n')
for resid in reskeys:
residue = resdata[resid]
if label in residue.motifs:
firstalt = residue.firstalt('CA')
CAxyz = residue.atomxyz[firstalt]['CA']
pointid = residue.pdbid +' '+ residue.chain +' '+ str(residue.resnum)+' '+ label
writeto.write("{ "+pointid+" } "+str(CAxyz[0])+" "+str(CAxyz[1])+" "+str(CAxyz[2])+"\n")
def kin_print_by_instance_header(motif_list, kinorder, kinranges):
if len(kinorder) == 0:
sys.stderr.write('\nNo geometric measures (e.g. rama=True) specified')
sys.stderr.write('\nExiting . . .\n')
sys.exit()
outfiles = {}
motifs = cablam_fingerprints.fetch_fingerprints(motif_list)
for motif in motifs:
motif_name = motif.motif_name
outfiles[motif_name] = open(motif_name+'_instances.kin', 'w')
outfiles[motif_name].write('@text\n')
for arg in sys.argv:
outfiles[motif_name].write(arg + ' ')
outfiles[motif_name].write('\n@kinemage\n')
outfiles[motif_name].write('@dimensions {' + '} {'.join(kinorder)+'}\n')
outfiles[motif_name].write('@dimminmax '+ ' '.join(kinranges)+'\n')
kin_frame(writeto=outfiles[motif_name])
return outfiles
def kin_print_by_instance(motif_instances, motif_list, kinorder, outfiles):
for motif_name in motif_instances:
for instance in motif_instances[motif_name]:
if not instance.has_all_measures(kinorder):
sys.stderr.write(
' '+motif_name+' has incomplete measures, probably due to b_max\n')
continue
indices = list(instance.names.keys())
indices.sort()
residue = instance.residues[indices[0]]
outfiles[motif_name].write(
'@group {'+residue.pdbid.rstrip('.pdb')+' '+str(residue.resnum)+
'} dominant animate\n@vectorlist {'+motif_name+
'} dimension='+str(len(kinorder))+'\n')
for index in indices:
residue = instance.residues[index]
name = instance.names[index]
outline = ['{'+residue.id_with_resname()+'_'+name+'}']
for order in kinorder:
outline.append(str(residue.measures[order]))
outfiles[motif_name].write(' '.join(outline)+'\n')
def res_seq_by_instance(motif_instances):
reshash = {'GLY':'G','ALA':'A','VAL':'V','ILE':'I','LEU':'L','PHE':'F',
'TRP':'W','MET':'M','GLU':'E','GLN':'Q','ASP':'D','ASN':'N','SER':'S',
'THR':'T','TYR':'Y','HIS':'H','LYS':'K','PRO':'P','CYS':'C','ARG':'R',
'MSE':'M','SME':'M','CSO':'C','OCS':'C','CSX':'C','CME':'C','YCM':'C',
'MLY':'K'}
for motif_name in motif_instances:
for instance in motif_instances[motif_name]:
indices = list(instance.residues.keys())
indices.sort()
seq_string = []
for index in indices:
resname = instance.residues[index].id_with_resname()[0:3]
if resname in reshash:
code = reshash[resname]
else:
code = 'X'+resname
seq_string.append(code)
seq_string.append('\n')
sys.stdout.write(''.join(seq_string))
def trim_motifs(motif_instances, filename, superpose_refs):
pwd = os.getcwd()
for motif_name in motif_instances:
if os.path.isdir(motif_name): pass
else: os.mkdir(motif_name)
os.chdir(motif_name)
instance_num = 0
for instance in motif_instances[motif_name]:
instance_num += 1
outputfile = os.path.basename(filename) + "_" + str(instance_num) + ".pdb"
resnums = []
for residue in instance.residues.values():
resnum = str(residue.resnum)
resnums.append(resnum)
selection = "resseq "+ " or resseq ".join(resnums)
command = 'phenix.pdbtools stop_for_unknowns=False modify.keep=\"'+selection+'\" '+filename + " output.file_name=" + outputfile
runthis = easy_run.fully_buffered(command)
if motif_name not in superpose_refs:
superpose_refs[motif_name] = {"motif":instance,"filename":outputfile}
else:
sys.stderr.write("trying to superpose\n")
ref = superpose_refs[motif_name]
command = "phenix.superpose_pdbs "+ ref["filename"] + " " + outputfile + " selection_default_fixed="+ref["motif"].superpose_thus +" selection_default_moving="+instance.superpose_thus + " output.file_name=" + outputfile
sys.stderr.write(command)
sys.stderr.write("\n")
runthis = easy_run.fully_buffered(command)
os.chdir(pwd)
return superpose_refs
# general-use program.) Hopefully, everything needed for general use (structure
# annotation) has been packaged in other modules for easy access. Good luck.
def run(args):
#{{{ phil parsing
#-----------------------------------------------------------------------------
interpreter = libtbx.phil.command_line.argument_interpreter(master_phil=master_phil)
sources = []
for arg in args:
if os.path.isfile(arg):
input_file = file_reader.any_file(arg)
if (input_file.file_type == "pdb"):
sources.append(interpreter.process(arg="file_or_dir=\"%s\"" % arg))
elif (input_file.file_type == "phil"):
sources.append(input_file.file_object)
elif os.path.isdir(arg):
sources.append(interpreter.process(arg="file_or_dir=\"%s\"" % arg))
else:
arg_phil = interpreter.process(arg=arg)
sources.append(arg_phil)
work_phil = master_phil.fetch(sources=sources)
work_params = work_phil.extract()
params = work_params.cablam_training
#catch missing file or dir later?
#if not work_params.cablam_training.file_or_dir:
# usage()
# sys.exit()
params = work_params.cablam_training
#-----------------------------------------------------------------------------
#}}} end phil parsing
if params.help:
usage()
sys.exit()
if params.list_motifs:
sys.stdout.write('\n')
fileset = os.listdir(libtbx.env.find_in_repositories(
"cctbx_project/mmtbx/cablam/fingerprints"))
for filename in fileset:
if filename.endswith(".pickle"):
motifname = os.path.splitext(os.path.basename(filename))[0]
sys.stdout.write(motifname + '\n')
sys.exit()
if not params.file_or_dir:
usage()
sys.exit()
if os.path.isdir(params.file_or_dir):
fileset = os.listdir(params.file_or_dir)
dirpath = params.file_or_dir
elif os.path.isfile(params.file_or_dir):
fileset = [params.file_or_dir]
dirpath = None
else:
sys.stderr.write("Could not identify valid target file or dir.\n")
usage()
sys.exit()
#{{{ measurement selection
#This section manages the user's orders for calculations
if params.all_measures:
params.cad = True
params.caa = True
params.cod = True
params.exrama = True
params.tau = True
params.omega = True
kinorder, kinranges = [],[]
if params.cad:
kinorder.append('CA_d_in'), kinranges.append('-180 180')
kinorder.append('CA_d_out'), kinranges.append('-180 180')
else:
pass
if params.cod:
kinorder.append('CO_d_in'), kinranges.append('-180 180')
kinorder.append('CO_d_out'), kinranges.append('-180 180')
else:
pass
if params.caa:
kinorder.append('CA_a_in'), kinranges.append('0 180')
kinorder.append('CA_a'), kinranges.append('0 180')
kinorder.append('CA_a_out'), kinranges.append('0 180')
else:
pass
if params.cablam:
if 'CA_d_in' not in kinorder:
kinorder.append('CA_d_in'), kinranges.append('-180 180')
if 'CA_d_out' not in kinorder:
kinorder.append('CA_d_out'), kinranges.append('-180 180')
if 'CO_d_in' not in kinorder:
kinorder.append('CO_d_in'), kinranges.append('-180 180')
if 'CA_a' not in kinorder:
kinorder.append('CA_a'), kinranges.append('0, 180')
else:
pass
if params.rama or params.exrama:
if params.exrama:
kinorder.append('psi-1'), kinranges.append('-180 180')
kinorder.append('phi'), kinranges.append('-180 180')
kinorder.append('psi'), kinranges.append('-180 180')
kinorder.append('phi+1'), kinranges.append('-180 180')
else:
kinorder.append('phi'), kinranges.append('-180 180')
kinorder.append('psi'), kinranges.append('-180 180')
else:
pass
if params.tau:
kinorder.append('tau'), kinranges.append('0 180')
else:
pass
if params.omega:
kinorder.append('omega'), kinranges.append('-180 180')
else:
pass
targetatoms = ["CA","O","C","N"]
superpose_refs = {}
outfiles = {}
if params.probe_motifs:
motif_list = params.probe_motifs[0].split(',')
if params.probe_path:
probefilelist = os.listdir(params.probe_path)
if params.probe_mode == 'kin':
outfiles = kin_print_probe_header(cablam_fingerprints.get_all_labels(motif_list),kinorder,kinranges)
elif params.probe_mode == 'instance':
outfiles = kin_print_by_instance_header(motif_list, kinorder, kinranges)
prunelist = []
if params.prune:
prunelist = params.prune[0].split(',')
prunelist = [res.upper() for res in prunelist]
skiplist = []
inclist = []
if params.skip_types:
skiplist = params.skip_types[0].split(',')
if params.include_types:
inclist = params.include_types[0].split(',')
if params.separate_files:
pass
else:
if params.give_kin:
kin_header(kinorder,kinranges)
elif params.probe_motifs:
pass
else:
csv_header(kinorder,params.give_connections)
for filename in fileset:
if dirpath:
filename = os.path.join(dirpath,filename)
else:
pass
pdbid = os.path.basename(filename)
if not os.path.isfile(filename): continue
pdb_in = file_reader.any_file(filename)
if pdb_in.file_type != "pdb":
sys.stderr.write(filename +" not id'd as readable file\n")
continue
sys.stderr.write(pdbid+'\n')
pdb_io = pdb.input(filename)
hierarchy = pdb_io.construct_hierarchy()
resdata = cablam_res.construct_linked_residues(hierarchy,targetatoms,pdbid)
if not resdata: #skips further processing of files not readable by hierarchy
continue
#-----------------------------------------------------------------------------
#}}}
#{{{ preprocessing
#---------------------------------------------------------------------------
cablam_res.prunerestype(resdata, 'HOH')
for restype in prunelist:
cablam_res.prunerestype(resdata, restype)
if params.b_max:
stripB(resdata,params.b_max)
if params.prune_alts:
prune_alts(resdata)
#---------------------------------------------------------------------------
#}}}
#{{{ calculation calls
#---------------------------------------------------------------------------
if params.cad and params.caa:
cablam_math.CApseudos(resdata, dodihedrals = True, doangles = True)
elif params.cad:
cablam_math.CApseudos(resdata, dodihedrals = True, doangles = False)
elif params.caa:
cablam_math.CApseudos(resdata, dodihedrals = False, doangles = True)
else: #no CA-based calculations
pass
if params.cod:
cablam_math.COpseudodihedrals(resdata)
else:
pass
if params.rama or params.exrama:
cablam_math.phipsi(resdata)
else:
pass
if params.tau:
cablam_math.taucalc(resdata)
else:
pass
if params.omega or params.cis_or_trans != 'both':
cablam_math.omegacalc(resdata)
else:
pass
if params.cablam:
cablam_math.cablam_measures(resdata)
else:
pass
#---------------------------------------------------------------------------
#}}}
#{{{ probe stuff
#---------------------------------------------------------------------------
#need the run phenix.probe
if params.probe_motifs and params.probe_path:
probefilename = pdbid.rstrip('.pdb') + '.probe'
if probefilename in probefilelist:
probefilepath = os.path.join(params.probe_path,probefilename)
open_probe_file = open(probefilepath)
add_probe_data(resdata,open_probe_file)
open_probe_file.close()
else:
continue
elif params.probe_motifs:
add_probe_data(resdata,make_probe_data(hierarchy))
if params.probe_motifs:
found_motifs = cablam_fingerprints.check_protein(resdata, motif_list)
#found_motifs is a dictionary. The keys are motif names.
# The values are lists of cablam_fingerprints.motif_instance objects.
#---------------------------------------------------------------------------
#}}}
#{{{ output
#---------------------------------------------------------------------------
#--probemode=kin for dotlist kins, this is the default
#--probemode=annote for balls drawn at CA positions on the model
#--probemode=instance for kins where each veclist is one instance of motif
if params.probe_motifs:# and args.probepath:
if params.probe_mode == 'kin':# or params.probe_mode == None:
kin_print_probe(found_motifs, kinorder, outfiles)
elif params.probe_mode == 'annote':
outfile = open(pdbid+'cablam_motifs.kin','w')
#kin_print_probe_annote(resdata, motif_list, writeto=outfile)
kin_print_probe_annote(found_motifs, writeto=outfile)
outfile.close()
elif params.probe_mode == 'instance':
#kin_print_by_instance(resdata, motif_list, kinorder, outfiles)
kin_print_by_instance(found_motifs, motif_list, kinorder, outfiles)
elif params.probe_mode == 'sequence':
res_seq_by_instance(found_motifs)
#res_seq_by_instance(resdata, motif_list)
elif params.probe_mode == 'superpose':
#trim_motifs(resdata, filename, motif_list)
superpose_refs = trim_motifs(found_motifs, filename,superpose_refs)
#superpose_motifs(motif_list)
else:
sys.stderr.write('\n\nUnrecognized probemode request\n\n')
sys.exit()
#add if args.kin once things basically work
outfile = sys.stdout
#need printer from probe version
#Not sure what the stray outfile=sys.stdout is doing here anymore
#default printing, with no arguments, is to .csv, one line per residue
#--separatefiles writes a separate file for each input file to working dir
#--kin prints kinemage file, dotlist, one point per residue
#--doconnections adds connectivity information to csv output
else:
if params.give_kin:
if params.separate_files:
outfile = open(pdbid+'_cablam.kin','w')
kin_header(kinorder,kinranges,writeto=outfile)
kin_print(resdata, kinorder, skiplist, inclist, params.cis_or_trans, writeto=outfile)
outfile.close()
else:
kin_print(resdata,kinorder,skiplist,inclist,params.cis_or_trans)
else:
if params.separate_files:
outfile = open(pdbid+'_cablam.csv','w')
csv_header(kinorder,params.give_connections,writeto=outfile)
csv_print(resdata, kinorder, skiplist, inclist, params.give_connections,params.cis_or_trans, writeto=outfile)
outfile.close()
else:
csv_print(resdata,kinorder,skiplist,inclist,params.give_connections,params.cis_or_trans,)
if outfiles:
for filename in outfiles:
outfiles[filename].close()
#---------------------------------------------------------------------------
#}}}
#-------------------------------------------------------------------------------
#}}}
| true | true |
1c2db4051f1a13e119f743784e73c0796eecc5d3 | 15,892 | py | Python | kolibri/core/auth/api.py | khangmach/kolibri | f4b89b8262effe68a407edc032a735d5a1b0b71b | [
"MIT"
] | null | null | null | kolibri/core/auth/api.py | khangmach/kolibri | f4b89b8262effe68a407edc032a735d5a1b0b71b | [
"MIT"
] | null | null | null | kolibri/core/auth/api.py | khangmach/kolibri | f4b89b8262effe68a407edc032a735d5a1b0b71b | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import time
from django.contrib.auth import authenticate
from django.contrib.auth import get_user
from django.contrib.auth import login
from django.contrib.auth import logout
from django.contrib.auth import update_session_auth_hash
from django.contrib.auth.models import AnonymousUser
from django.db import transaction
from django.db.models import Q
from django.db.models.query import F
from django.utils.decorators import method_decorator
from django.utils.timezone import now
from django.views.decorators.csrf import ensure_csrf_cookie
from django_filters.rest_framework import CharFilter
from django_filters.rest_framework import DjangoFilterBackend
from django_filters.rest_framework import FilterSet
from django_filters.rest_framework import ModelChoiceFilter
from rest_framework import filters
from rest_framework import permissions
from rest_framework import status
from rest_framework import viewsets
from rest_framework.response import Response
from .constants import collection_kinds
from .constants import role_kinds
from .filters import HierarchyRelationsFilter
from .models import Classroom
from .models import Collection
from .models import Facility
from .models import FacilityDataset
from .models import FacilityUser
from .models import LearnerGroup
from .models import Membership
from .models import Role
from .serializers import ClassroomSerializer
from .serializers import FacilityDatasetSerializer
from .serializers import FacilitySerializer
from .serializers import FacilityUsernameSerializer
from .serializers import FacilityUserSerializer
from .serializers import LearnerGroupSerializer
from .serializers import MembershipSerializer
from .serializers import PublicFacilitySerializer
from .serializers import RoleSerializer
from kolibri.core import error_constants
from kolibri.core.decorators import signin_redirect_exempt
from kolibri.core.logger.models import UserSessionLog
from kolibri.core.mixins import BulkCreateMixin
from kolibri.core.mixins import BulkDeleteMixin
class KolibriAuthPermissionsFilter(filters.BaseFilterBackend):
"""
A Django REST Framework filter backend that limits results to those where the
requesting user has read object level permissions. This filtering is delegated
to the ``filter_readable`` method on ``KolibriAbstractBaseUser``.
"""
def filter_queryset(self, request, queryset, view):
if request.method == "GET" and request.resolver_match.url_name.endswith(
"-list"
):
# only filter down the queryset in the case of the list view being requested
return request.user.filter_readable(queryset)
else:
# otherwise, return the full queryset, as permission checks will happen object-by-object
# (and filtering here then leads to 404's instead of the more correct 403's)
return queryset
def _ensure_raw_dict(d):
if hasattr(d, "dict"):
d = d.dict()
return dict(d)
class KolibriAuthPermissions(permissions.BasePermission):
"""
A Django REST Framework permissions class that defers to Kolibri's permissions
system to determine object-level permissions.
"""
def validator(self, request, view, datum):
model = view.get_serializer_class().Meta.model
validated_data = view.get_serializer().to_internal_value(
_ensure_raw_dict(datum)
)
return request.user.can_create(model, validated_data)
def has_permission(self, request, view):
# as `has_object_permission` isn't called for POST/create, we need to check here
if request.method == "POST" and request.data:
if type(request.data) is list:
data = request.data
else:
data = [request.data]
return all(self.validator(request, view, datum) for datum in data)
# for other methods, we return True, as their permissions get checked below
return True
def has_object_permission(self, request, view, obj):
# note that there is no entry for POST here, as creation is handled by `has_permission`, above
if request.method in permissions.SAFE_METHODS: # 'GET', 'OPTIONS' or 'HEAD'
return request.user.can_read(obj)
elif request.method in ["PUT", "PATCH"]:
return request.user.can_update(obj)
elif request.method == "DELETE":
return request.user.can_delete(obj)
else:
return False
class FacilityDatasetViewSet(viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter,)
serializer_class = FacilityDatasetSerializer
def get_queryset(self):
queryset = FacilityDataset.objects.filter(
collection__kind=collection_kinds.FACILITY
)
facility_id = self.request.query_params.get("facility_id", None)
if facility_id is not None:
queryset = queryset.filter(collection__id=facility_id)
return queryset
class FacilityUserFilter(FilterSet):
member_of = ModelChoiceFilter(
method="filter_member_of", queryset=Collection.objects.all()
)
def filter_member_of(self, queryset, name, value):
return HierarchyRelationsFilter(queryset).filter_by_hierarchy(
target_user=F("id"), ancestor_collection=value
)
class Meta:
model = FacilityUser
fields = ["member_of"]
class FacilityUserViewSet(viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter, DjangoFilterBackend)
queryset = FacilityUser.objects.all()
serializer_class = FacilityUserSerializer
filter_class = FacilityUserFilter
def set_password_if_needed(self, instance, serializer):
with transaction.atomic():
if serializer.validated_data.get("password", ""):
instance.set_password(serializer.validated_data["password"])
instance.save()
return instance
def perform_update(self, serializer):
instance = serializer.save()
self.set_password_if_needed(instance, serializer)
# if the user is updating their own password, ensure they don't get logged out
if self.request.user == instance:
update_session_auth_hash(self.request, instance)
def perform_create(self, serializer):
instance = serializer.save()
self.set_password_if_needed(instance, serializer)
@method_decorator(signin_redirect_exempt, name="dispatch")
class FacilityUsernameViewSet(viewsets.ReadOnlyModelViewSet):
filter_backends = (DjangoFilterBackend, filters.SearchFilter)
serializer_class = FacilityUsernameSerializer
filter_fields = ("facility",)
search_fields = ("^username",)
def get_queryset(self):
return FacilityUser.objects.filter(
dataset__learner_can_login_with_no_password=True, roles=None
).filter(
Q(devicepermissions__is_superuser=False) | Q(devicepermissions__isnull=True)
)
class MembershipFilter(FilterSet):
user_ids = CharFilter(method="filter_user_ids")
def filter_user_ids(self, queryset, name, value):
return queryset.filter(user_id__in=value.split(","))
class Meta:
model = Membership
fields = ["user", "collection", "user_ids"]
class MembershipViewSet(BulkDeleteMixin, BulkCreateMixin, viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter, DjangoFilterBackend)
queryset = Membership.objects.all()
serializer_class = MembershipSerializer
filter_class = MembershipFilter
filter_fields = ["user", "collection", "user_ids"]
class RoleFilter(FilterSet):
user_ids = CharFilter(method="filter_user_ids")
def filter_user_ids(self, queryset, name, value):
return queryset.filter(user_id__in=value.split(","))
class Meta:
model = Role
fields = ["user", "collection", "kind", "user_ids"]
class RoleViewSet(BulkDeleteMixin, BulkCreateMixin, viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter, DjangoFilterBackend)
queryset = Role.objects.all()
serializer_class = RoleSerializer
filter_class = RoleFilter
filter_fields = ["user", "collection", "kind", "user_ids"]
class FacilityViewSet(viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter,)
queryset = Facility.objects.all()
serializer_class = FacilitySerializer
def get_queryset(self, prefetch=True):
queryset = Facility.objects.all()
if prefetch:
# This is a default field on the serializer, so do a select_related
# to prevent n queries when n facilities are queried
return queryset.select_related("dataset")
return queryset
@method_decorator(signin_redirect_exempt, name="dispatch")
class PublicFacilityViewSet(viewsets.ReadOnlyModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter,)
queryset = Facility.objects.all()
serializer_class = PublicFacilitySerializer
class ClassroomFilter(FilterSet):
role = CharFilter(method="filter_has_role_for")
parent = ModelChoiceFilter(queryset=Facility.objects.all())
def filter_has_role_for(self, queryset, name, value):
requesting_user = get_user(self.request)
if requesting_user.is_superuser:
return queryset
# filter queryset by admin role and coach role
return HierarchyRelationsFilter(queryset).filter_by_hierarchy(
source_user=requesting_user,
role_kind=role_kinds.ADMIN,
descendant_collection=F("id"),
) | HierarchyRelationsFilter(queryset).filter_by_hierarchy(
source_user=requesting_user, role_kind=value, descendant_collection=F("id")
)
class Meta:
model = Classroom
fields = ["role", "parent"]
class ClassroomViewSet(viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter, DjangoFilterBackend)
queryset = Classroom.objects.all()
serializer_class = ClassroomSerializer
filter_class = ClassroomFilter
class LearnerGroupViewSet(viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter, DjangoFilterBackend)
queryset = LearnerGroup.objects.all()
serializer_class = LearnerGroupSerializer
filter_fields = ("parent",)
@method_decorator(signin_redirect_exempt, name="dispatch")
class SignUpViewSet(viewsets.ViewSet):
serializer_class = FacilityUserSerializer
def extract_request_data(self, request):
return {
"username": request.data.get("username", ""),
"full_name": request.data.get("full_name", ""),
"password": request.data.get("password", ""),
"facility": Facility.get_default_facility().id,
}
def create(self, request):
data = self.extract_request_data(request)
# we validate the user's input, and if valid, login as user
serialized_user = self.serializer_class(data=data)
if serialized_user.is_valid(raise_exception=True):
serialized_user.save()
serialized_user.instance.set_password(data["password"])
serialized_user.instance.save()
authenticated_user = authenticate(
username=data["username"],
password=data["password"],
facility=data["facility"],
)
login(request, authenticated_user)
return Response(serialized_user.data, status=status.HTTP_201_CREATED)
@method_decorator(signin_redirect_exempt, name="dispatch")
@method_decorator(ensure_csrf_cookie, name="dispatch")
class SessionViewSet(viewsets.ViewSet):
def create(self, request):
username = request.data.get("username", "")
password = request.data.get("password", "")
facility_id = request.data.get("facility", None)
user = authenticate(username=username, password=password, facility=facility_id)
if user is not None and user.is_active:
# Correct password, and the user is marked "active"
login(request, user)
# Success!
# Is this the first time this user has logged in?
# If so, they will not have any UserSessionLogs until we call get_session.
request.session["first_login"] = not UserSessionLog.objects.filter(
user=user
).exists()
return Response(self.get_session(request))
elif (
not password
and FacilityUser.objects.filter(
username__iexact=username, facility=facility_id
).exists()
):
# Password was missing, but username is valid, prompt to give password
return Response(
[
{
"id": error_constants.MISSING_PASSWORD,
"metadata": {
"field": "password",
"message": "Username is valid, but password is missing.",
},
}
],
status=status.HTTP_400_BAD_REQUEST,
)
else:
# Respond with error
return Response(
[{"id": error_constants.INVALID_CREDENTIALS, "metadata": {}}],
status=status.HTTP_401_UNAUTHORIZED,
)
def destroy(self, request, pk=None):
logout(request)
return Response([])
def retrieve(self, request, pk=None):
return Response(self.get_session(request))
def get_session(self, request):
user = get_user(request)
session_key = "current"
server_time = now()
if isinstance(user, AnonymousUser):
return {
"id": session_key,
"username": "",
"full_name": "",
"user_id": None,
"facility_id": getattr(Facility.get_default_facility(), "id", None),
"kind": ["anonymous"],
"error": "200",
"server_time": server_time,
}
# Set last activity on session to the current time to prevent session timeout
# Only do this for logged in users, as anonymous users cannot get logged out!
request.session["last_session_request"] = int(time.time())
# Default to active, only assume not active when explicitly set.
active = True if request.GET.get("active", "true") == "true" else False
session = {
"id": session_key,
"username": user.username,
"full_name": user.full_name,
"user_id": user.id,
"can_manage_content": user.can_manage_content,
"server_time": server_time,
}
roles = list(
Role.objects.filter(user_id=user.id)
.values_list("kind", flat=True)
.distinct()
)
if roles:
session.update(
{"facility_id": user.facility_id, "kind": roles, "error": "200"}
)
else:
session.update(
{"facility_id": user.facility_id, "kind": ["learner"], "error": "200"}
)
if user.is_superuser:
session["kind"].insert(0, "superuser")
if active:
UserSessionLog.update_log(user)
return session
| 36.87239 | 102 | 0.67921 | from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import time
from django.contrib.auth import authenticate
from django.contrib.auth import get_user
from django.contrib.auth import login
from django.contrib.auth import logout
from django.contrib.auth import update_session_auth_hash
from django.contrib.auth.models import AnonymousUser
from django.db import transaction
from django.db.models import Q
from django.db.models.query import F
from django.utils.decorators import method_decorator
from django.utils.timezone import now
from django.views.decorators.csrf import ensure_csrf_cookie
from django_filters.rest_framework import CharFilter
from django_filters.rest_framework import DjangoFilterBackend
from django_filters.rest_framework import FilterSet
from django_filters.rest_framework import ModelChoiceFilter
from rest_framework import filters
from rest_framework import permissions
from rest_framework import status
from rest_framework import viewsets
from rest_framework.response import Response
from .constants import collection_kinds
from .constants import role_kinds
from .filters import HierarchyRelationsFilter
from .models import Classroom
from .models import Collection
from .models import Facility
from .models import FacilityDataset
from .models import FacilityUser
from .models import LearnerGroup
from .models import Membership
from .models import Role
from .serializers import ClassroomSerializer
from .serializers import FacilityDatasetSerializer
from .serializers import FacilitySerializer
from .serializers import FacilityUsernameSerializer
from .serializers import FacilityUserSerializer
from .serializers import LearnerGroupSerializer
from .serializers import MembershipSerializer
from .serializers import PublicFacilitySerializer
from .serializers import RoleSerializer
from kolibri.core import error_constants
from kolibri.core.decorators import signin_redirect_exempt
from kolibri.core.logger.models import UserSessionLog
from kolibri.core.mixins import BulkCreateMixin
from kolibri.core.mixins import BulkDeleteMixin
class KolibriAuthPermissionsFilter(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
if request.method == "GET" and request.resolver_match.url_name.endswith(
"-list"
):
return request.user.filter_readable(queryset)
else:
return queryset
def _ensure_raw_dict(d):
if hasattr(d, "dict"):
d = d.dict()
return dict(d)
class KolibriAuthPermissions(permissions.BasePermission):
def validator(self, request, view, datum):
model = view.get_serializer_class().Meta.model
validated_data = view.get_serializer().to_internal_value(
_ensure_raw_dict(datum)
)
return request.user.can_create(model, validated_data)
def has_permission(self, request, view):
if request.method == "POST" and request.data:
if type(request.data) is list:
data = request.data
else:
data = [request.data]
return all(self.validator(request, view, datum) for datum in data)
# for other methods, we return True, as their permissions get checked below
return True
def has_object_permission(self, request, view, obj):
# note that there is no entry for POST here, as creation is handled by `has_permission`, above
if request.method in permissions.SAFE_METHODS: # 'GET', 'OPTIONS' or 'HEAD'
return request.user.can_read(obj)
elif request.method in ["PUT", "PATCH"]:
return request.user.can_update(obj)
elif request.method == "DELETE":
return request.user.can_delete(obj)
else:
return False
class FacilityDatasetViewSet(viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter,)
serializer_class = FacilityDatasetSerializer
def get_queryset(self):
queryset = FacilityDataset.objects.filter(
collection__kind=collection_kinds.FACILITY
)
facility_id = self.request.query_params.get("facility_id", None)
if facility_id is not None:
queryset = queryset.filter(collection__id=facility_id)
return queryset
class FacilityUserFilter(FilterSet):
member_of = ModelChoiceFilter(
method="filter_member_of", queryset=Collection.objects.all()
)
def filter_member_of(self, queryset, name, value):
return HierarchyRelationsFilter(queryset).filter_by_hierarchy(
target_user=F("id"), ancestor_collection=value
)
class Meta:
model = FacilityUser
fields = ["member_of"]
class FacilityUserViewSet(viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter, DjangoFilterBackend)
queryset = FacilityUser.objects.all()
serializer_class = FacilityUserSerializer
filter_class = FacilityUserFilter
def set_password_if_needed(self, instance, serializer):
with transaction.atomic():
if serializer.validated_data.get("password", ""):
instance.set_password(serializer.validated_data["password"])
instance.save()
return instance
def perform_update(self, serializer):
instance = serializer.save()
self.set_password_if_needed(instance, serializer)
# if the user is updating their own password, ensure they don't get logged out
if self.request.user == instance:
update_session_auth_hash(self.request, instance)
def perform_create(self, serializer):
instance = serializer.save()
self.set_password_if_needed(instance, serializer)
@method_decorator(signin_redirect_exempt, name="dispatch")
class FacilityUsernameViewSet(viewsets.ReadOnlyModelViewSet):
filter_backends = (DjangoFilterBackend, filters.SearchFilter)
serializer_class = FacilityUsernameSerializer
filter_fields = ("facility",)
search_fields = ("^username",)
def get_queryset(self):
return FacilityUser.objects.filter(
dataset__learner_can_login_with_no_password=True, roles=None
).filter(
Q(devicepermissions__is_superuser=False) | Q(devicepermissions__isnull=True)
)
class MembershipFilter(FilterSet):
user_ids = CharFilter(method="filter_user_ids")
def filter_user_ids(self, queryset, name, value):
return queryset.filter(user_id__in=value.split(","))
class Meta:
model = Membership
fields = ["user", "collection", "user_ids"]
class MembershipViewSet(BulkDeleteMixin, BulkCreateMixin, viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter, DjangoFilterBackend)
queryset = Membership.objects.all()
serializer_class = MembershipSerializer
filter_class = MembershipFilter
filter_fields = ["user", "collection", "user_ids"]
class RoleFilter(FilterSet):
user_ids = CharFilter(method="filter_user_ids")
def filter_user_ids(self, queryset, name, value):
return queryset.filter(user_id__in=value.split(","))
class Meta:
model = Role
fields = ["user", "collection", "kind", "user_ids"]
class RoleViewSet(BulkDeleteMixin, BulkCreateMixin, viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter, DjangoFilterBackend)
queryset = Role.objects.all()
serializer_class = RoleSerializer
filter_class = RoleFilter
filter_fields = ["user", "collection", "kind", "user_ids"]
class FacilityViewSet(viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter,)
queryset = Facility.objects.all()
serializer_class = FacilitySerializer
def get_queryset(self, prefetch=True):
queryset = Facility.objects.all()
if prefetch:
return queryset.select_related("dataset")
return queryset
@method_decorator(signin_redirect_exempt, name="dispatch")
class PublicFacilityViewSet(viewsets.ReadOnlyModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter,)
queryset = Facility.objects.all()
serializer_class = PublicFacilitySerializer
class ClassroomFilter(FilterSet):
role = CharFilter(method="filter_has_role_for")
parent = ModelChoiceFilter(queryset=Facility.objects.all())
def filter_has_role_for(self, queryset, name, value):
requesting_user = get_user(self.request)
if requesting_user.is_superuser:
return queryset
return HierarchyRelationsFilter(queryset).filter_by_hierarchy(
source_user=requesting_user,
role_kind=role_kinds.ADMIN,
descendant_collection=F("id"),
) | HierarchyRelationsFilter(queryset).filter_by_hierarchy(
source_user=requesting_user, role_kind=value, descendant_collection=F("id")
)
class Meta:
model = Classroom
fields = ["role", "parent"]
class ClassroomViewSet(viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter, DjangoFilterBackend)
queryset = Classroom.objects.all()
serializer_class = ClassroomSerializer
filter_class = ClassroomFilter
class LearnerGroupViewSet(viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter, DjangoFilterBackend)
queryset = LearnerGroup.objects.all()
serializer_class = LearnerGroupSerializer
filter_fields = ("parent",)
@method_decorator(signin_redirect_exempt, name="dispatch")
class SignUpViewSet(viewsets.ViewSet):
serializer_class = FacilityUserSerializer
def extract_request_data(self, request):
return {
"username": request.data.get("username", ""),
"full_name": request.data.get("full_name", ""),
"password": request.data.get("password", ""),
"facility": Facility.get_default_facility().id,
}
def create(self, request):
data = self.extract_request_data(request)
serialized_user = self.serializer_class(data=data)
if serialized_user.is_valid(raise_exception=True):
serialized_user.save()
serialized_user.instance.set_password(data["password"])
serialized_user.instance.save()
authenticated_user = authenticate(
username=data["username"],
password=data["password"],
facility=data["facility"],
)
login(request, authenticated_user)
return Response(serialized_user.data, status=status.HTTP_201_CREATED)
@method_decorator(signin_redirect_exempt, name="dispatch")
@method_decorator(ensure_csrf_cookie, name="dispatch")
class SessionViewSet(viewsets.ViewSet):
def create(self, request):
username = request.data.get("username", "")
password = request.data.get("password", "")
facility_id = request.data.get("facility", None)
user = authenticate(username=username, password=password, facility=facility_id)
if user is not None and user.is_active:
# Correct password, and the user is marked "active"
login(request, user)
# Success!
# Is this the first time this user has logged in?
# If so, they will not have any UserSessionLogs until we call get_session.
request.session["first_login"] = not UserSessionLog.objects.filter(
user=user
).exists()
return Response(self.get_session(request))
elif (
not password
and FacilityUser.objects.filter(
username__iexact=username, facility=facility_id
).exists()
):
# Password was missing, but username is valid, prompt to give password
return Response(
[
{
"id": error_constants.MISSING_PASSWORD,
"metadata": {
"field": "password",
"message": "Username is valid, but password is missing.",
},
}
],
status=status.HTTP_400_BAD_REQUEST,
)
else:
# Respond with error
return Response(
[{"id": error_constants.INVALID_CREDENTIALS, "metadata": {}}],
status=status.HTTP_401_UNAUTHORIZED,
)
def destroy(self, request, pk=None):
logout(request)
return Response([])
def retrieve(self, request, pk=None):
return Response(self.get_session(request))
def get_session(self, request):
user = get_user(request)
session_key = "current"
server_time = now()
if isinstance(user, AnonymousUser):
return {
"id": session_key,
"username": "",
"full_name": "",
"user_id": None,
"facility_id": getattr(Facility.get_default_facility(), "id", None),
"kind": ["anonymous"],
"error": "200",
"server_time": server_time,
}
# Set last activity on session to the current time to prevent session timeout
# Only do this for logged in users, as anonymous users cannot get logged out!
request.session["last_session_request"] = int(time.time())
# Default to active, only assume not active when explicitly set.
active = True if request.GET.get("active", "true") == "true" else False
session = {
"id": session_key,
"username": user.username,
"full_name": user.full_name,
"user_id": user.id,
"can_manage_content": user.can_manage_content,
"server_time": server_time,
}
roles = list(
Role.objects.filter(user_id=user.id)
.values_list("kind", flat=True)
.distinct()
)
if roles:
session.update(
{"facility_id": user.facility_id, "kind": roles, "error": "200"}
)
else:
session.update(
{"facility_id": user.facility_id, "kind": ["learner"], "error": "200"}
)
if user.is_superuser:
session["kind"].insert(0, "superuser")
if active:
UserSessionLog.update_log(user)
return session
| true | true |
1c2db46600b0b2fa6fff8480c70ee198eb0e8b1a | 237 | py | Python | bits_wilp/sumOfDigits.py | deepak5998/Py | 5ae3bd9e8dcf3104a8ca7512911a1607f6c9ae20 | [
"MIT"
] | 726 | 2019-06-04T04:46:06.000Z | 2022-03-31T17:54:00.000Z | bits_wilp/sumOfDigits.py | Ishajj/Python-Interview-Problems-for-Practice | 12ece68be497757e2aad8a07c29399856de782da | [
"MIT"
] | 12 | 2019-06-05T14:21:35.000Z | 2021-04-17T05:11:01.000Z | bits_wilp/sumOfDigits.py | Ishajj/Python-Interview-Problems-for-Practice | 12ece68be497757e2aad8a07c29399856de782da | [
"MIT"
] | 118 | 2019-06-04T10:25:12.000Z | 2022-02-04T22:31:12.000Z | def sumOfDigits(n):
sum = 0
while n > 0:
rem = n % 10
sum = sum + rem
n = n // 10
return sum
print("Please enter a number: ")
num = int(input())
sod = sumOfDigits(num)
print("The sum of digits for", num, "is", sod)
| 18.230769 | 46 | 0.56962 | def sumOfDigits(n):
sum = 0
while n > 0:
rem = n % 10
sum = sum + rem
n = n // 10
return sum
print("Please enter a number: ")
num = int(input())
sod = sumOfDigits(num)
print("The sum of digits for", num, "is", sod)
| true | true |
1c2db4c46ce12cac94c3d473662f4b32112e937b | 305 | py | Python | src/core/factory/mysql/customer_factory.py | lucassaporetti/car-rental | 6e37032df3a399b78ed3d7998a2cb31a2a84d033 | [
"MIT"
] | 1 | 2021-02-11T18:45:12.000Z | 2021-02-11T18:45:12.000Z | src/core/factory/mysql/customer_factory.py | lucassaporetti/car-rental | 6e37032df3a399b78ed3d7998a2cb31a2a84d033 | [
"MIT"
] | null | null | null | src/core/factory/mysql/customer_factory.py | lucassaporetti/car-rental | 6e37032df3a399b78ed3d7998a2cb31a2a84d033 | [
"MIT"
] | null | null | null |
from core.config.app_configs import AppConfigs
from src.core.factory.mysql.mysql_factory import MySqlFactory
class CustomerFactory(MySqlFactory):
sql_template_file = "sql/mysql/ddl/customer_templates.properties"
def __init__(self):
super().__init__(CustomerFactory.sql_template_file)
| 25.416667 | 69 | 0.796721 |
from core.config.app_configs import AppConfigs
from src.core.factory.mysql.mysql_factory import MySqlFactory
class CustomerFactory(MySqlFactory):
sql_template_file = "sql/mysql/ddl/customer_templates.properties"
def __init__(self):
super().__init__(CustomerFactory.sql_template_file)
| true | true |
1c2db5343cc59db58bf404f7bd56fa843789dea3 | 22,945 | py | Python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_11_01/operations/_virtual_network_peerings_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 3 | 2020-06-23T02:25:27.000Z | 2021-09-07T18:48:11.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_11_01/operations/_virtual_network_peerings_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 510 | 2019-07-17T16:11:19.000Z | 2021-08-02T08:38:32.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_11_01/operations/_virtual_network_peerings_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 5 | 2019-09-04T12:51:37.000Z | 2020-09-16T07:28:40.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualNetworkPeeringsOperations(object):
"""VirtualNetworkPeeringsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified virtual network peering.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the virtual network peering.
:type virtual_network_peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
virtual_network_peering_name=virtual_network_peering_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetworkPeering"
"""Gets the specified virtual network peering.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the virtual network peering.
:type virtual_network_peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetworkPeering, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_11_01.models.VirtualNetworkPeering
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
virtual_network_peering_parameters, # type: "_models.VirtualNetworkPeering"
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetworkPeering"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(virtual_network_peering_parameters, 'VirtualNetworkPeering')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
virtual_network_peering_parameters, # type: "_models.VirtualNetworkPeering"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VirtualNetworkPeering"]
"""Creates or updates a peering in the specified virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the peering.
:type virtual_network_peering_name: str
:param virtual_network_peering_parameters: Parameters supplied to the create or update virtual
network peering operation.
:type virtual_network_peering_parameters: ~azure.mgmt.network.v2020_11_01.models.VirtualNetworkPeering
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkPeering or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_11_01.models.VirtualNetworkPeering]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
virtual_network_peering_name=virtual_network_peering_name,
virtual_network_peering_parameters=virtual_network_peering_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.VirtualNetworkPeeringListResult"]
"""Gets all virtual network peerings in a virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkPeeringListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_11_01.models.VirtualNetworkPeeringListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeeringListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkPeeringListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings'} # type: ignore
| 51.911765 | 250 | 0.67993 |
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualNetworkPeeringsOperations(object):
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name,
virtual_network_name,
virtual_network_peering_name,
**kwargs
):
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
url = self._delete_initial.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'}
def begin_delete(
self,
resource_group_name,
virtual_network_name,
virtual_network_peering_name,
**kwargs
):
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None)
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
virtual_network_peering_name=virtual_network_peering_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'}
def get(
self,
resource_group_name,
virtual_network_name,
virtual_network_peering_name,
**kwargs
):
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
url = self.get.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'}
def _create_or_update_initial(
self,
resource_group_name,
virtual_network_name,
virtual_network_peering_name,
virtual_network_peering_parameters,
**kwargs
):
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
url = self._create_or_update_initial.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {}
body_content = self._serialize.body(virtual_network_peering_parameters, 'VirtualNetworkPeering')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'}
def begin_create_or_update(
self,
resource_group_name,
virtual_network_name,
virtual_network_peering_name,
virtual_network_peering_parameters,
**kwargs
):
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None)
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
virtual_network_peering_name=virtual_network_peering_name,
virtual_network_peering_parameters=virtual_network_peering_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'}
def list(
self,
resource_group_name,
virtual_network_name,
**kwargs
):
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-11-01"
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
url = self.list.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkPeeringListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings'}
| true | true |
1c2db539dea968d76fd87b98ec5526bd68909b9e | 48,324 | py | Python | catkin_ws/simulation/rviz_tools_py-master/src/rviz_tools_py/rviz_tools.py | fontysrobotics/Blackboard_based_distributed_fleet_manager | a6b44738fe67f4948a69f8d45da58d981c6724e0 | [
"BSD-3-Clause"
] | null | null | null | catkin_ws/simulation/rviz_tools_py-master/src/rviz_tools_py/rviz_tools.py | fontysrobotics/Blackboard_based_distributed_fleet_manager | a6b44738fe67f4948a69f8d45da58d981c6724e0 | [
"BSD-3-Clause"
] | null | null | null | catkin_ws/simulation/rviz_tools_py-master/src/rviz_tools_py/rviz_tools.py | fontysrobotics/Blackboard_based_distributed_fleet_manager | a6b44738fe67f4948a69f8d45da58d981c6724e0 | [
"BSD-3-Clause"
] | 2 | 2018-09-04T06:44:21.000Z | 2018-10-15T02:30:50.000Z | #!/usr/bin/env python
# Copyright (c) 2015, Carnegie Mellon University
# All rights reserved.
# Authors: David Butterworth <dbworth@cmu.edu>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# - Neither the name of Carnegie Mellon University nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# Python includes
import numpy
import random # randint
# ROS includes
import roslib
import rospy
import tf # tf/transformations.py
from std_msgs.msg import Header, ColorRGBA
from geometry_msgs.msg import Transform
from geometry_msgs.msg import Pose
from geometry_msgs.msg import Point, Point32
from geometry_msgs.msg import Vector3
from geometry_msgs.msg import Quaternion
from geometry_msgs.msg import Polygon
from visualization_msgs.msg import Marker
class RvizMarkers(object):
"""
A class for publishing markers in Rviz
"""
def __init__(self, base_frame, marker_topic, wait_time=None):
self.base_frame = base_frame
self.marker_topic = marker_topic
# Set the default Marker parameters
self.setDefaultMarkerParams()
# Create the Rviz Marker Publisher
self.loadMarkerPublisher(wait_time)
def setDefaultMarkerParams(self):
"""
Set the default parameters for each type of Rviz Marker
"""
self.marker_lifetime = rospy.Duration(0.0) # 0 = Marker never expires
self.muted = False
self.alpha = 1.0
# Set default parameters for Cylinder Marker
self.cylinder_marker = Marker()
self.cylinder_marker.header.frame_id = self.base_frame
self.cylinder_marker.ns = "Cylinder" # unique ID
self.cylinder_marker.action = Marker().ADD
self.cylinder_marker.type = Marker().CYLINDER
self.cylinder_marker.lifetime = self.marker_lifetime
# Reset Marker
self.reset_marker = Marker()
self.reset_marker.header.frame_id = self.base_frame
self.reset_marker.header.stamp = rospy.Time()
self.reset_marker.action = 3
# Arrow Marker
self.arrow_marker = Marker()
self.arrow_marker.header.frame_id = self.base_frame
self.arrow_marker.ns = "Arrow" # unique ID
self.arrow_marker.action = Marker().ADD
self.arrow_marker.type = Marker().ARROW
self.arrow_marker.lifetime = self.marker_lifetime
# Rectangle Marker
self.rectangle_marker = Marker()
self.rectangle_marker.header.frame_id = self.base_frame
self.rectangle_marker.ns = "Rectangle" # unique ID
self.rectangle_marker.action = Marker().ADD
self.rectangle_marker.type = Marker().CUBE
self.rectangle_marker.lifetime = self.marker_lifetime
# Line Marker
self.line_marker = Marker()
self.line_marker.header.frame_id = self.base_frame
self.line_marker.ns = "Line" # unique ID
self.line_marker.action = Marker().ADD
self.line_marker.type = Marker().LINE_STRIP
self.line_marker.lifetime = self.marker_lifetime
# Path Marker (Line List)
self.path_marker = Marker()
self.path_marker.header.frame_id = self.base_frame
self.path_marker.ns = "Path" # unique ID
self.path_marker.action = Marker().ADD
self.path_marker.type = Marker().LINE_LIST
self.path_marker.lifetime = self.marker_lifetime
self.path_marker.pose.position.x = 0.0
self.path_marker.pose.position.y = 0.0
self.path_marker.pose.position.z = 0.0
self.path_marker.pose.orientation.x = 0.0
self.path_marker.pose.orientation.y = 0.0
self.path_marker.pose.orientation.z = 0.0
self.path_marker.pose.orientation.w = 1.0
# Sphere Marker (A single sphere)
# This renders a low-quality sphere
self.sphere_marker = Marker()
self.sphere_marker.header.frame_id = self.base_frame
self.sphere_marker.ns = "Sphere" # unique ID
self.sphere_marker.type = Marker().SPHERE
self.sphere_marker.action = Marker().ADD
self.sphere_marker.lifetime = self.marker_lifetime
self.sphere_marker.pose.position.x = 0
self.sphere_marker.pose.position.y = 0
self.sphere_marker.pose.position.z = 0
self.sphere_marker.pose.orientation.x = 0.0
self.sphere_marker.pose.orientation.y = 0.0
self.sphere_marker.pose.orientation.z = 0.0
self.sphere_marker.pose.orientation.w = 1.0
# Sphere Marker #2 (A single sphere)
# A Sphere List with one sphere, this renders a
# higher-quality sphere than the method above
self.sphere_marker2 = Marker()
self.sphere_marker2.header.frame_id = self.base_frame
self.sphere_marker2.ns = "Sphere" # unique ID
self.sphere_marker2.type = Marker().SPHERE_LIST
self.sphere_marker2.action = Marker().ADD
self.sphere_marker2.lifetime = self.marker_lifetime
self.sphere_marker2.pose.position.x = 0
self.sphere_marker2.pose.position.y = 0
self.sphere_marker2.pose.position.z = 0
self.sphere_marker2.pose.orientation.x = 0.0
self.sphere_marker2.pose.orientation.y = 0.0
self.sphere_marker2.pose.orientation.z = 0.0
self.sphere_marker2.pose.orientation.w = 1.0
point1 = Point()
self.sphere_marker2.points.append(point1)
self.sphere_marker2.colors.append(self.getColor('blue'))
# Spheres List (Multiple spheres)
self.spheres_marker = Marker()
self.spheres_marker.header.frame_id = self.base_frame
self.spheres_marker.ns = "Spheres" # unique ID
self.spheres_marker.type = Marker().SPHERE_LIST
self.spheres_marker.action = Marker().ADD
self.spheres_marker.lifetime = self.marker_lifetime
self.spheres_marker.pose.position.x = 0.0
self.spheres_marker.pose.position.y = 0.0
self.spheres_marker.pose.position.z = 0.0
self.spheres_marker.pose.orientation.x = 0.0
self.spheres_marker.pose.orientation.y = 0.0
self.spheres_marker.pose.orientation.z = 0.0
self.spheres_marker.pose.orientation.w = 1.0
# Cube Marker (Block or cuboid)
self.cube_marker = Marker()
self.cube_marker.header.frame_id = self.base_frame
self.cube_marker.ns = "Block" # unique ID
self.cube_marker.action = Marker().ADD
self.cube_marker.type = Marker().CUBE
self.cube_marker.lifetime = self.marker_lifetime
# Cubes List (Multiple cubes)
self.cubes_marker = Marker()
self.cubes_marker.header.frame_id = self.base_frame
self.cubes_marker.ns = "Cubes" # unique ID
self.cubes_marker.type = Marker().CUBE_LIST
self.cubes_marker.action = Marker().ADD
self.cubes_marker.lifetime = self.marker_lifetime
self.cubes_marker.pose.position.x = 0.0
self.cubes_marker.pose.position.y = 0.0
self.cubes_marker.pose.position.z = 0.0
self.cubes_marker.pose.orientation.x = 0.0
self.cubes_marker.pose.orientation.y = 0.0
self.cubes_marker.pose.orientation.z = 0.0
self.cubes_marker.pose.orientation.w = 1.0
# Cylinder Marker
self.cylinder_marker = Marker()
self.cylinder_marker.header.frame_id = self.base_frame
self.cylinder_marker.ns = "Cylinder" # unique ID
self.cylinder_marker.action = Marker().ADD
self.cylinder_marker.type = Marker().CYLINDER
self.cylinder_marker.lifetime = self.marker_lifetime
# Mesh Marker
self.mesh_marker = Marker()
self.mesh_marker.header.frame_id = self.base_frame
self.mesh_marker.ns = "Mesh" # unique ID
self.mesh_marker.action = Marker().ADD
self.mesh_marker.type = Marker().MESH_RESOURCE
self.mesh_marker.lifetime = self.marker_lifetime
# Text Marker
self.text_marker = Marker()
self.text_marker.header.frame_id = self.base_frame
self.text_marker.ns = "Text" # unique ID
self.text_marker.action = Marker().ADD
self.text_marker.type = Marker().TEXT_VIEW_FACING
self.text_marker.lifetime = self.marker_lifetime
def loadMarkerPublisher(self, wait_time=None):
"""
Initialize the ROS Publisher.
If wait_time != None, wait for specified number of
seconds for a subscriber to connect.
"""
# Check if the ROS Publisher has already been created
if hasattr(self, 'pub_rviz_marker'):
return
# Create the Rviz Marker Publisher
self.pub_rviz_marker = rospy.Publisher(self.marker_topic, Marker, queue_size=10)
rospy.logdebug("Publishing Rviz markers on topic '%s'", self.marker_topic)
# Block for specified number of seconds,
# or until there is 1 subscriber
if wait_time != None:
self.waitForSubscriber(self.pub_rviz_marker, wait_time)
def waitForSubscriber(self, publisher, wait_time=1.0):
"""
Wait until there is 1 subscriber to a ROS Publisher,
or until some number of seconds have elapsed.
"""
start_time = rospy.Time.now()
max_time = start_time + rospy.Duration(wait_time)
num_existing_subscribers = publisher.get_num_connections()
while (num_existing_subscribers == 0):
#print 'Number of subscribers: ', num_existing_subscribers
rospy.Rate(100).sleep()
if (rospy.Time.now() > max_time):
rospy.logerr("No subscribers connected to the '%s' topic after %f seconds", self.marker_topic, wait_time)
return False
num_existing_subscribers = publisher.get_num_connections()
return True
def publishMarker(self, marker):
"""
Publish a Marker Msg
"""
if (self.muted == True):
return True
## Check ROS Publisher
#self.loadMarkerPublisher()
self.pub_rviz_marker.publish(marker)
return True
def deleteAllMarkers(self):
"""
Publish a Msg to delete all Markers
"""
return self.publishMarker(self.reset_marker)
def getColor(self, color):
"""
Convert a color name or RGB value to a ROS ColorRGBA type
@param color name (string) or RGB color value (tuple or list)
@return color (ColorRGBA)
"""
result = ColorRGBA()
result.a = self.alpha
if (type(color) == tuple) or (type(color) == list):
if len(color) == 3:
result.r = color[0]
result.g = color[1]
result.b = color[2]
elif len(color) == 4:
result.r = color[0]
result.g = color[1]
result.b = color[2]
result.a = color[3]
else:
raise ValueError('color must have 3 or 4 float values in getColor()')
elif (color == 'red'):
result.r = 0.8
result.g = 0.1
result.b = 0.1
elif (color == 'green'):
result.r = 0.1
result.g = 0.8
result.b = 0.1
elif (color == 'blue'):
result.r = 0.1
result.g = 0.1
result.b = 0.8
elif (color == 'grey') or (color == 'gray'):
result.r = 0.9
result.g = 0.9
result.b = 0.9
elif (color == 'white'):
result.r = 1.0
result.g = 1.0
result.b = 1.0
elif (color == 'orange'):
result.r = 1.0
result.g = 0.5
result.b = 0.0
elif (color == 'translucent_light'):
result.r = 0.1
result.g = 0.1
result.b = 0.1
result.a = 0.1
elif (color == 'translucent'):
result.r = 0.1
result.g = 0.1
result.b = 0.1
result.a = 0.25
elif (color == 'translucent_dark'):
result.r = 0.1
result.g = 0.1
result.b = 0.1
result.a = 0.5
elif (color == 'black'):
result.r = 0.0
result.g = 0.0
result.b = 0.0
elif (color == 'yellow'):
result.r = 1.0
result.g = 1.0
result.b = 0.0
elif (color == 'brown'):
result.r = 0.597
result.g = 0.296
result.b = 0.0
elif (color == 'pink'):
result.r = 1.0
result.g = 0.4
result.b = 1
elif (color == 'lime_green'):
result.r = 0.6
result.g = 1.0
result.b = 0.2
elif (color == 'clear'):
result.r=1.0
result.g=1.0
result.b=1.0
result.a=0.0
elif (color == 'purple'):
result.r = 0.597
result.g = 0.0
result.b = 0.597
elif (color == 'random'):
# Get a random color that is not too light
while True:
result.r = random.random() # random float from 0 to 1
result.g = random.random()
result.b = random.random()
if ((result.r + result.g + result.b) > 1.5): # 0=black, 3=white
break
else:
rospy.logerr("getColor() called with unknown color name '%s', defaulting to 'blue'", color)
result.r = 0.1
result.g = 0.1
result.b = 0.8
return result
def getRandomColor(self):
"""
Get a random color.
@return color (ColorRGBA)
"""
# Make a list of the color names to choose from
all_colors = []
all_colors.append('red')
all_colors.append('green')
all_colors.append('blue')
all_colors.append('grey')
all_colors.append('white')
all_colors.append('orange')
all_colors.append('yellow')
all_colors.append('brown')
all_colors.append('pink')
all_colors.append('lime_green')
all_colors.append('purple')
# Chose a random color name
rand_num = random.randint(0, len(all_colors) - 1)
rand_color_name = all_colors[rand_num]
return rand_color_name
def publishSphere(self, pose, color, scale, lifetime=None):
"""
Publish a sphere Marker. This renders 3D looking sphere.
@param pose (numpy matrix, numpy ndarray, ROS Pose)
@param color name (string) or RGB color value (tuple or list)
@param scale (ROS Vector3, float)
@param lifetime (float, None = never expire)
"""
if (self.muted == True):
return True
# Convert input pose to a ROS Pose Msg
if (type(pose) == numpy.matrix) or (type(pose) == numpy.ndarray):
sphere_pose = mat_to_pose(pose)
elif type(pose) == Pose:
sphere_pose = pose
elif type(pose) == Point:
pose_msg = Pose()
pose_msg.position = pose
sphere_pose = pose_msg
else:
rospy.logerr("Pose is unsupported type '%s' in publishSphere()", type(pose).__name__)
return False
# Convert input scale to a ROS Vector3 Msg
if type(scale) == Vector3:
sphere_scale = scale
elif type(scale) == float:
sphere_scale = Vector3(scale, scale, scale)
else:
rospy.logerr("Scale is unsupported type '%s' in publishSphere()", type(scale).__name__)
return False
# Increment the ID number
self.sphere_marker.id += 1
# Get the default parameters
sphere_marker = self.sphere_marker
if lifetime == None:
sphere_marker.lifetime = rospy.Duration(0.0) # 0 = Marker never expires
else:
sphere_marker.lifetime = rospy.Duration(lifetime) # in seconds
# Set the timestamp
sphere_marker.header.stamp = rospy.Time.now()
# Set marker size
sphere_marker.scale = sphere_scale
# Set marker color
sphere_marker.color = self.getColor(color)
# Set the pose
sphere_marker.pose = sphere_pose
return self.publishMarker(sphere_marker)
def publishSphere2(self, pose, color, scale, lifetime=None):
"""
Publish a sphere Marker. This renders a smoother, flatter-looking sphere.
@param pose (numpy matrix, numpy ndarray, ROS Pose)
@param color name (string) or RGB color value (tuple or list)
@param scale (ROS Vector3, float)
@param lifetime (float, None = never expire)
"""
if (self.muted == True):
return True
# Convert input pose to a ROS Pose Msg
if (type(pose) == numpy.matrix) or (type(pose) == numpy.ndarray):
sphere_pose = mat_to_pose(pose)
elif type(pose) == Pose:
sphere_pose = pose
elif type(pose) == Point:
pose_msg = Pose()
pose_msg.position = pose
sphere_pose = pose_msg
else:
rospy.logerr("Pose is unsupported type '%s' in publishSphere()", type(pose).__name__)
return False
# Convert input scale to a ROS Vector3 Msg
if type(scale) == Vector3:
sphere_scale = scale
elif type(scale) == float:
sphere_scale = Vector3(scale, scale, scale)
else:
rospy.logerr("Scale is unsupported type '%s' in publishSphere()", type(scale).__name__)
return False
# Increment the ID number
self.sphere_marker.id += 1
# Get the default parameters
sphere_marker = self.sphere_marker2 # sphere_marker2 = SPHERE_LIST
if lifetime == None:
sphere_marker.lifetime = rospy.Duration(0.0) # 0 = Marker never expires
else:
sphere_marker.lifetime = rospy.Duration(lifetime) # in seconds
# Set the timestamp
sphere_marker.header.stamp = rospy.Time.now()
# Set marker size
sphere_marker.scale = sphere_scale
# Set marker color
sphere_marker.color = self.getColor(color)
# Set the pose of one sphere in the list
sphere_marker.points[0] = sphere_pose.position
sphere_marker.colors[0] = self.getColor(color)
return self.publishMarker(sphere_marker)
def publishArrow(self, pose, color, scale, lifetime=None):
"""
Publish an arrow Marker.
@param pose (numpy matrix, numpy ndarray, ROS Pose)
@param color name (string) or RGB color value (tuple or list)
@param scale (ROS Vector3, float)
@param lifetime (float, None = never expire)
"""
if (self.muted == True):
return True
# Convert input pose to a ROS Pose Msg
if (type(pose) == numpy.matrix) or (type(pose) == numpy.ndarray):
arrow_pose = mat_to_pose(pose)
elif type(pose) == Pose:
arrow_pose = pose
else:
rospy.logerr("Pose is unsupported type '%s' in publishArrow()", type(pose).__name__)
return False
# Convert input scale to a ROS Vector3 Msg
if type(scale) == Vector3:
arrow_scale = scale
elif type(scale) == float:
arrow_scale = Vector3(scale, 0.1*scale, 0.1*scale)
else:
rospy.logerr("Scale is unsupported type '%s' in publishArrow()", type(scale).__name__)
return False
# Increment the ID number
self.arrow_marker.id += 1
# Get the default parameters
arrow_marker = self.arrow_marker
if lifetime == None:
arrow_marker.lifetime = rospy.Duration(0.0) # 0 = Marker never expires
else:
arrow_marker.lifetime = rospy.Duration(lifetime) # in seconds
# Set the timestamp
arrow_marker.header.stamp = rospy.Time.now()
# Set the pose
arrow_marker.pose = arrow_pose
# Set marker size
arrow_marker.scale = arrow_scale
# Set marker color
arrow_marker.color = self.getColor(color)
return self.publishMarker(arrow_marker)
def publishCube(self, pose, color, scale, lifetime=None):
"""
Publish a cube Marker.
@param pose (numpy matrix, numpy ndarray, ROS Pose)
@param color name (string) or RGB color value (tuple or list)
@param scale (ROS Vector3, float)
@param lifetime (float, None = never expire)
"""
if (self.muted == True):
return True
# Convert input pose to a ROS Pose Msg
if (type(pose) == numpy.matrix) or (type(pose) == numpy.ndarray):
cube_pose = mat_to_pose(pose)
elif type(pose) == Pose:
cube_pose = pose
else:
rospy.logerr("Pose is unsupported type '%s' in publishCube()", type(pose).__name__)
return False
# Convert input scale to a ROS Vector3 Msg
if type(scale) == Vector3:
cube_scale = scale
elif type(scale) == float:
cube_scale = Vector3(scale, scale, scale)
else:
rospy.logerr("Scale is unsupported type '%s' in publishCube()", type(scale).__name__)
return False
# Increment the ID number
self.cube_marker.id += 1
# Get the default parameters
cube_marker = self.cube_marker
if lifetime == None:
cube_marker.lifetime = rospy.Duration(0.0) # 0 = Marker never expires
else:
cube_marker.lifetime = rospy.Duration(lifetime) # in seconds
# Set the timestamp
cube_marker.header.stamp = rospy.Time.now()
# Set the pose
cube_marker.pose = cube_pose
# Set marker size
cube_marker.scale = cube_scale
# Set marker color
cube_marker.color = self.getColor(color)
return self.publishMarker(cube_marker)
def publishCubes(self, list_of_cubes, color, scale, lifetime=None):
"""
Publish a list of cubes.
@param list_of_cubes (list of numpy matrix, list of numpy ndarray, list of ROS Pose)
@param color name (string) or RGB color value (tuple or list)
@param scale (ROS Vector3, float)
@param lifetime (float, None = never expire)
"""
if (self.muted == True):
return True
# Check input
if type(list_of_cubes) != list:
rospy.logerr("list_of_cubes is unsupported type '%s' in publishCubes()", type(list_of_cubes).__name__)
return False
# Convert input scale to a ROS Vector3 Msg
if type(scale) == Vector3:
cubes_scale = scale
elif type(scale) == float:
cubes_scale = Vector3(scale, scale, scale)
else:
rospy.logerr("Scale is unsupported type '%s' in publishCubes()", type(scale).__name__)
return False
# Increment the ID number
self.cubes_marker.id += 1
# Get the default parameters
cubes_marker = self.cubes_marker
if lifetime == None:
cubes_marker.lifetime = rospy.Duration(0.0) # 0 = Marker never expires
else:
cubes_marker.lifetime = rospy.Duration(lifetime) # in seconds
# Set the timestamp
cubes_marker.header.stamp = rospy.Time.now()
# Set marker size
cubes_marker.scale = cubes_scale
# Set marker color
cubes_marker.color = self.getColor(color)
cubes_color = self.getColor(color)
# Set the cubes positions and color
cubes_marker.points[:] = [] # clear
cubes_marker.colors[:] = []
for i in range(0, len(list_of_cubes)):
# Each cube position needs to be a ROS Point Msg
if type(list_of_cubes[i]) == Pose:
cubes_marker.points.append(list_of_cubes[i].position)
cubes_marker.colors.append(cubes_color)
elif (type(list_of_cubes[i]) == numpy.matrix) or (type(list_of_cubes[i]) == numpy.ndarray):
pose_i = mat_to_pose(list_of_cubes[i])
cubes_marker.points.append(pose_i.position)
cubes_marker.colors.append(cubes_color)
elif type(list_of_cubes[i]) == Point:
cubes_marker.points.append(list_of_cubes[i])
cubes_marker.colors.append(cubes_color)
else:
rospy.logerr("list_of_cubes contains unsupported type '%s' in publishCubes()", type(list_of_cubes[i]).__name__)
return False
return self.publishMarker(cubes_marker)
def publishBlock(self, pose, color, scale, lifetime=None):
"""
Publish a cube Marker.
@param pose (numpy matrix, numpy ndarray, ROS Pose)
@param color name (string) or RGB color value (tuple or list)
@param scale (ROS Vector3, float)
@param lifetime (float, None = never expire)
"""
return self.publishCube(pose, color, scale)
def publishCylinder(self, pose, color, height, radius, lifetime=None):
"""
Publish a cylinder Marker.
@param pose (numpy matrix, numpy ndarray, ROS Pose)
@param color name (string) or RGB color value (tuple or list)
@param height (float)
@param radius (float)
@param lifetime (float, None = never expire)
"""
if (self.muted == True):
return True
# Convert input pose to a ROS Pose Msg
if (type(pose) == numpy.matrix) or (type(pose) == numpy.ndarray):
cylinder_pose = mat_to_pose(pose)
elif type(pose) == Pose:
cylinder_pose = pose
else:
rospy.logerr("Pose is unsupported type '%s' in publishCylinder()", type(pose).__name__)
return False
# Increment the ID number
self.cylinder_marker.id += 1
# Get the default parameters
cylinder_marker = self.cylinder_marker
if lifetime == None:
cylinder_marker.lifetime = rospy.Duration(0.0) # 0 = Marker never expires
else:
cylinder_marker.lifetime = rospy.Duration(lifetime) # in seconds
# Set the timestamp
cylinder_marker.header.stamp = rospy.Time.now()
# Set the pose
cylinder_marker.pose = cylinder_pose
# Set marker size
cylinder_marker.scale.x = radius
cylinder_marker.scale.y = radius
cylinder_marker.scale.z = height
# Set marker color
cylinder_marker.color = self.getColor(color)
return self.publishMarker(cylinder_marker)
def publishAxis(self, pose, length, radius, lifetime=None):
"""
Publish an axis Marker.
@param pose (numpy matrix, numpy ndarray, ROS Pose)
@param length axis length (float)
@param radius axis radius (float)
@param lifetime (float, None = never expire)
"""
# Convert input pose to a numpy matrix
if (type(pose) == numpy.matrix) or (type(pose) == numpy.ndarray):
axis_pose = pose
elif type(pose) == Pose:
axis_pose = pose_to_mat(pose)
else:
rospy.logerr("Pose is unsupported type '%s' in publishAxis()", type(pose).__name__)
return False
t = tf.transformations.translation_matrix( (length/2.0, 0.0, 0.0) )
r = tf.transformations.rotation_matrix(numpy.pi/2.0, (0,1,0))
m = tf.transformations.concatenate_matrices(axis_pose, t, r)
x_pose = mat_to_pose(m)
self.publishCylinder(x_pose, 'red', length, radius, lifetime)
t = tf.transformations.translation_matrix( (0.0, length/2.0, 0.0) )
r = tf.transformations.rotation_matrix(numpy.pi/2.0, (1,0,0))
m = tf.transformations.concatenate_matrices(axis_pose, t, r)
y_pose = mat_to_pose(m)
self.publishCylinder(y_pose, 'green', length, radius, lifetime)
t = tf.transformations.translation_matrix( (0.0, 0.0, length/2.0) )
r = tf.transformations.rotation_matrix(0.0, (0,0,1))
m = tf.transformations.concatenate_matrices(axis_pose, t, r)
z_pose = mat_to_pose(m)
self.publishCylinder(z_pose, 'blue', length, radius, lifetime)
return True
def publishMesh(self, pose, file_name, color, scale, lifetime=None):
"""
Publish a mesh Marker. The mesh file can be a binary STL or collada DAE file.
@param pose (numpy matrix, numpy ndarray, ROS Pose)
@param file_name (string)
@param color name (string) or RGB color value (tuple or list)
@param scale (ROS Vector3, float)
@param lifetime (float, None = never expire)
"""
if (self.muted == True):
return True
# Convert input pose to a ROS Pose Msg
if (type(pose) == numpy.matrix) or (type(pose) == numpy.ndarray):
mesh_pose = mat_to_pose(pose)
elif type(pose) == Pose:
mesh_pose = pose
else:
rospy.logerr("Pose is unsupported type '%s' in publishMesh()", type(pose).__name__)
return False
# Convert input scale to a ROS Vector3 Msg
if type(scale) == Vector3:
mesh_scale = scale
elif type(scale) == float:
mesh_scale = Vector3(scale, scale, scale)
else:
rospy.logerr("Scale is unsupported type '%s' in publishMesh()", type(scale).__name__)
return False
# Increment the ID number
self.mesh_marker.id += 1
# Get the default parameters
mesh_marker = self.mesh_marker
if lifetime == None:
mesh_marker.lifetime = rospy.Duration(0.0) # 0 = Marker never expires
else:
mesh_marker.lifetime = rospy.Duration(lifetime) # in seconds
# Set the timestamp
mesh_marker.header.stamp = rospy.Time.now()
# Set marker size
mesh_marker.scale = mesh_scale
# Set marker color
if color == None:
mesh_marker.color = ColorRGBA() # no color
else:
mesh_marker.color = self.getColor(color)
# Set the pose
mesh_marker.pose = mesh_pose
# Set the mesh
mesh_marker.mesh_resource = file_name
mesh_marker.mesh_use_embedded_materials = True
return self.publishMarker(mesh_marker)
def publishRectangle(self, point1, point2, color, lifetime=None):
"""
Publish a rectangle Marker between two points. If the z-values are not the same then this will result in a cuboid.
@param point1 (ROS Point)
@param point2 (ROS Point)
@param color name (string) or RGB color value (tuple or list)
@param lifetime (float, None = never expire)
"""
if (self.muted == True):
return True
# Convert input points to ROS Point Msgs
if type(point1) == Point:
rect_point1 = point1
else:
rospy.logerr("Point1 is unsupported type '%s' in publishRectangle()", type(point1).__name__)
return False
if type(point2) == Point:
rect_point2 = point2
else:
rospy.logerr("Point2 is unsupported type '%s' in publishRectangle()", type(point2).__name__)
return False
# Increment the ID number
self.rectangle_marker.id += 1
# Get the default parameters
rectangle_marker = self.rectangle_marker
if lifetime == None:
rectangle_marker.lifetime = rospy.Duration(0.0) # 0 = Marker never expires
else:
rectangle_marker.lifetime = rospy.Duration(lifetime) # in seconds
# Set the timestamp
rectangle_marker.header.stamp = rospy.Time.now()
# Set marker color
rectangle_marker.color = self.getColor(color)
# Calculate the center pose
rect_pose = Pose()
rect_pose.position.x = (rect_point1.x - rect_point2.x) / 2.0 + rect_point2.x
rect_pose.position.y = (rect_point1.y - rect_point2.y) / 2.0 + rect_point2.y
rect_pose.position.z = (rect_point1.z - rect_point2.z) / 2.0 + rect_point2.z
rectangle_marker.pose = rect_pose
# Calculate scale
rectangle_marker.scale.x = numpy.fabs(rect_point1.x - rect_point2.x)
rectangle_marker.scale.y = numpy.fabs(rect_point1.y - rect_point2.y)
rectangle_marker.scale.z = numpy.fabs(rect_point1.z - rect_point2.z)
return self.publishMarker(rectangle_marker)
def publishPlane(self, pose, depth, width, color, lifetime=None):
"""
Publish a plane Marker.
@param pose (numpy matrix, numpy ndarray, ROS Pose)
@param depth (float)
@param width (float)
@param color name (string) or RGB color value (tuple or list)
@param lifetime (float, None = never expire)
"""
if (self.muted == True):
return True
# Convert input pose to a ROS Pose Msg
if (type(pose) == numpy.matrix) or (type(pose) == numpy.ndarray):
rect_pose = mat_to_pose(pose)
elif type(pose) == Pose:
rect_pose = pose
else:
rospy.logerr("Pose is unsupported type '%s' in publishRectangle()", type(pose).__name__)
return False
# Increment the ID number
self.rectangle_marker.id += 1
# Get the default parameters
rectangle_marker = self.rectangle_marker
if lifetime == None:
rectangle_marker.lifetime = rospy.Duration(0.0) # 0 = Marker never expires
else:
rectangle_marker.lifetime = rospy.Duration(lifetime) # in seconds
# Set the timestamp
rectangle_marker.header.stamp = rospy.Time.now()
# Set marker color
rectangle_marker.color = self.getColor(color)
# Set the pose
rectangle_marker.pose = rect_pose
# Set the scale
rectangle_marker.scale.x = depth
rectangle_marker.scale.y = width
rectangle_marker.scale.z = 0.0
return self.publishMarker(rectangle_marker)
def publishLine(self, point1, point2, color, width, lifetime=None):
"""
Publish a line Marker between two points.
@param point1 (ROS Point, ROS Pose, numpy matrix, numpy ndarray)
@param point2 (ROS Point, ROS Pose, numpy matrix, numpy ndarray)
@param color name (string) or RGB color value (tuple or list)
@param width (float)
@param lifetime (float, None = never expire)
"""
if (self.muted == True):
return True
# Convert input points to ROS Point Msgs
if type(point1) == Point:
line_point1 = point1
elif type(point1) == Pose:
position = point1.position
line_point1 = Point(position.x, position.y, position.z)
elif (type(point1) == numpy.matrix) or (type(point1) == numpy.ndarray):
pose = mat_to_pose(point1)
position = pose.position
line_point1 = Point(position.x, position.y, position.z)
else:
rospy.logerr("Point1 is unsupported type '%s' in publishLine()", type(point1).__name__)
return False
if type(point2) == Point:
line_point2 = point2
elif type(point2) == Pose:
position = point2.position
line_point2 = Point(position.x, position.y, position.z)
elif (type(point2) == numpy.matrix) or (type(point2) == numpy.ndarray):
pose = mat_to_pose(point2)
position = pose.position
line_point2 = Point(position.x, position.y, position.z)
else:
rospy.logerr("Point2 is unsupported type '%s' in publishLine()", type(point2).__name__)
return False
# Increment the ID number
self.line_marker.id += 1
# Get the default parameters
line_marker = self.line_marker
if lifetime == None:
line_marker.lifetime = rospy.Duration(0.0) # 0 = Marker never expires
else:
line_marker.lifetime = rospy.Duration(lifetime) # in seconds
# Set the timestamp
line_marker.header.stamp = rospy.Time.now()
# Set marker color
line_marker.color = self.getColor(color)
# Set the start and end points
line_marker.points[:] = [] # clear
line_marker.points.append(line_point1)
line_marker.points.append(line_point2)
# Set the line width
line_marker.scale.x = width
return self.publishMarker(line_marker)
def publishPath(self, path, color, width, lifetime=None):
"""
Publish a path Marker using a set of waypoints.
@param path (list of ROS Points)
@param color name (string) or RGB color value (tuple or list)
@param width (float)
@param lifetime (float, None = never expire)
"""
if (self.muted == True):
return True
# Check input
if type(path) == list:
path_path = path # :-)
else:
rospy.logerr("Path is unsupported type '%s' in publishPath()", type(path).__name__)
return False
# Increment the ID number
self.path_marker.id += 1
# Get the default parameters
path_marker = self.path_marker
if lifetime == None:
path_marker.lifetime = rospy.Duration(0.0) # 0 = Marker never expires
else:
path_marker.lifetime = rospy.Duration(lifetime) # in seconds
# Set the timestamp
path_marker.header.stamp = rospy.Time.now()
# Set the path width
path_marker.scale.x = width
path_color = self.getColor(color)
# Set the path points and color
path_marker.points[:] = [] # clear
path_marker.colors[:] = []
for i in range(1, len(path)):
# Each path waypoint needs to be a ROS Point Msg
if type(path[i]) == Point:
# Start of segment is previous point
path_marker.points.append(path[i-1])
path_marker.colors.append(path_color)
# End of segment is current point
path_marker.points.append(path[i])
path_marker.colors.append(path_color)
elif type(path[i]) == Pose:
# Start of segment is previous point
position = path[i-1].position
point = Point(position.x, position.y, position.z)
path_marker.points.append(point)
path_marker.colors.append(path_color)
# End of segment is current point
position = path[i].position
point = Point(position.x, position.y, position.z)
path_marker.points.append(point)
path_marker.colors.append(path_color)
elif (type(path[i]) == numpy.matrix) or (type(path[i]) == numpy.ndarray):
# Start of segment is previous point
pose = mat_to_pose(path[i-1])
position = pose.position
point = Point(position.x, position.y, position.z)
path_marker.points.append(point)
path_marker.colors.append(path_color)
# End of segment is current point
pose = mat_to_pose(path[i])
position = pose.position
point = Point(position.x, position.y, position.z)
path_marker.points.append(point)
path_marker.colors.append(path_color)
else:
rospy.logerr("path list contains unsupported type '%s' in publishPath()", type(path[i]).__name__)
return False
return self.publishMarker(path_marker)
def publishPolygon(self, polygon, color, width, lifetime=None):
"""
Publish a polygon Marker.
@param polygon (ROS Polygon)
@param color name (string) or RGB color value (tuple or list)
@param width line width (float)
@param lifetime (float, None = never expire)
a path with the start and end points connected
"""
if (self.muted == True):
return True
# Check input
if type(polygon) == Polygon:
polygon_msg = polygon
else:
rospy.logerr("Path is unsupported type '%s' in publishPolygon()", type(polygon).__name__)
return False
# Copy points from ROS Polygon Msg into a list
polygon_path = []
for i in range(0, len(polygon_msg.points)):
x = polygon_msg.points[i].x
y = polygon_msg.points[i].y
z = polygon_msg.points[i].z
polygon_path.append( Point(x,y,z) )
# Add the first point again
x = polygon_msg.points[0].x
y = polygon_msg.points[0].y
z = polygon_msg.points[0].z
polygon_path.append( Point(x,y,z) )
return self.publishPath(polygon_path, color, width, lifetime)
def publishSpheres(self, list_of_spheres, color, scale, lifetime=None):
"""
Publish a list of spheres. This renders smoother, flatter-looking spheres.
@param list_of_spheres (list of numpy matrix, list of numpy ndarray, list of ROS Pose)
@param color name (string) or RGB color value (tuple or list)
@param scale (ROS Vector3, float)
@param lifetime (float, None = never expire)
"""
if (self.muted == True):
return True
# Check input
if type(list_of_spheres) != list:
rospy.logerr("list_of_spheres is unsupported type '%s' in publishSpheres()", type(list_of_spheres).__name__)
return False
# Convert input scale to a ROS Vector3 Msg
if type(scale) == Vector3:
spheres_scale = scale
elif type(scale) == float:
spheres_scale = Vector3(scale, scale, scale)
else:
rospy.logerr("Scale is unsupported type '%s' in publishSpheres()", type(scale).__name__)
return False
# Increment the ID number
self.spheres_marker.id += 1
# Get the default parameters
spheres_marker = self.spheres_marker
if lifetime == None:
spheres_marker.lifetime = rospy.Duration(0.0) # 0 = Marker never expires
else:
spheres_marker.lifetime = rospy.Duration(lifetime) # in seconds
# Set the timestamp
spheres_marker.header.stamp = rospy.Time.now()
# Set marker size
spheres_marker.scale = spheres_scale
# Set marker color
spheres_marker.color = self.getColor(color)
spheres_color = self.getColor(color)
#spheres_marker.color = spheres_color
# Set the sphere positions and color
spheres_marker.points[:] = [] # clear
spheres_marker.colors[:] = []
for i in range(0, len(list_of_spheres)):
# Each sphere position needs to be a ROS Point Msg
if type(list_of_spheres[i]) == Pose:
spheres_marker.points.append( list_of_spheres[i].position )
spheres_marker.colors.append(spheres_color)
elif (type(list_of_spheres[i]) == numpy.matrix) or (type(list_of_spheres[i]) == numpy.ndarray):
pose_i = mat_to_pose(list_of_spheres[i])
spheres_marker.points.append( pose_i.position )
spheres_marker.colors.append(spheres_color)
elif type(list_of_spheres[i]) == Point:
spheres_marker.points.append(list_of_spheres[i])
spheres_marker.colors.append(spheres_color)
else:
rospy.logerr("list_of_sphere contains unsupported type '%s' in publishSphere()", type(list_of_spheres[i]).__name__)
return False
return self.publishMarker(spheres_marker)
def publishText(self, pose, text, color, scale, lifetime=None):
"""
Publish a text Marker
@param pose (numpy matrix, numpy ndarray, ROS Pose)
@param text (string)
@param color name (string) or RGB color value (tuple or list)
@param scale (ROS Vector3, float)
@param lifetime (float, None = never expire)
"""
if (self.muted == True):
return True
# Convert input pose to a ROS Pose Msg
if (type(pose) == numpy.matrix) or (type(pose) == numpy.ndarray):
text_pose = mat_to_pose(pose)
elif type(pose) == Pose:
text_pose = pose
else:
rospy.logerr("Pose is unsupported type '%s' in publishText()", type(pose).__name__)
return False
# Convert input scale to a ROS Vector3 Msg
if type(scale) == Vector3:
text_scale = scale
elif type(scale) == float:
text_scale = Vector3(scale, scale, scale)
else:
rospy.logerr("Scale is unsupported type '%s' in publishText()", type(scale).__name__)
return False
# Increment the ID number
self.text_marker.id += 1
# Get the default parameters
text_marker = self.text_marker
if lifetime == None:
text_marker.lifetime = rospy.Duration(0.0) # 0 = Marker never expires
else:
text_marker.lifetime = rospy.Duration(lifetime) # in seconds
# Set the timestamp
text_marker.header.stamp = rospy.Time.now()
# Set the pose
text_marker.pose = text_pose
# Set marker size
text_marker.scale = text_scale
# Set marker color
text_marker.color = self.getColor(color)
text_marker.text = text
return self.publishMarker(text_marker)
#------------------------------------------------------------------------------#
def pose_to_mat(pose):
"""
Convert a ROS Pose msg to a 4x4 matrix.
@param pose (ROS geometry_msgs.msg.Pose)
@return mat 4x4 matrix (numpy.matrix)
"""
quat = [pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w]
pos = numpy.matrix([pose.position.x, pose.position.y, pose.position.z]).T
mat = numpy.matrix(tf.transformations.quaternion_matrix(quat))
mat[0:3, 3] = pos
return mat
def mat_to_pose(mat):
"""
Convert a homogeneous transformation matrix to a ROS Pose msg.
@param mat 4x4 homogenous transform (numpy.matrix or numpy.ndarray)
@return pose (ROS geometry_msgs.msg.Pose)
"""
pose = Pose()
pose.position.x = mat[0,3]
pose.position.y = mat[1,3]
pose.position.z = mat[2,3]
quat = tf.transformations.quaternion_from_matrix(mat)
pose.orientation.x = quat[0]
pose.orientation.y = quat[1]
pose.orientation.z = quat[2]
pose.orientation.w = quat[3]
return pose
| 34.890975 | 131 | 0.601813 |
import numpy
import random
import roslib
import rospy
import tf
from std_msgs.msg import Header, ColorRGBA
from geometry_msgs.msg import Transform
from geometry_msgs.msg import Pose
from geometry_msgs.msg import Point, Point32
from geometry_msgs.msg import Vector3
from geometry_msgs.msg import Quaternion
from geometry_msgs.msg import Polygon
from visualization_msgs.msg import Marker
class RvizMarkers(object):
def __init__(self, base_frame, marker_topic, wait_time=None):
self.base_frame = base_frame
self.marker_topic = marker_topic
self.setDefaultMarkerParams()
self.loadMarkerPublisher(wait_time)
def setDefaultMarkerParams(self):
self.marker_lifetime = rospy.Duration(0.0)
self.muted = False
self.alpha = 1.0
self.cylinder_marker = Marker()
self.cylinder_marker.header.frame_id = self.base_frame
self.cylinder_marker.ns = "Cylinder"
self.cylinder_marker.action = Marker().ADD
self.cylinder_marker.type = Marker().CYLINDER
self.cylinder_marker.lifetime = self.marker_lifetime
self.reset_marker = Marker()
self.reset_marker.header.frame_id = self.base_frame
self.reset_marker.header.stamp = rospy.Time()
self.reset_marker.action = 3
self.arrow_marker = Marker()
self.arrow_marker.header.frame_id = self.base_frame
self.arrow_marker.ns = "Arrow"
self.arrow_marker.action = Marker().ADD
self.arrow_marker.type = Marker().ARROW
self.arrow_marker.lifetime = self.marker_lifetime
self.rectangle_marker = Marker()
self.rectangle_marker.header.frame_id = self.base_frame
self.rectangle_marker.ns = "Rectangle"
self.rectangle_marker.action = Marker().ADD
self.rectangle_marker.type = Marker().CUBE
self.rectangle_marker.lifetime = self.marker_lifetime
self.line_marker = Marker()
self.line_marker.header.frame_id = self.base_frame
self.line_marker.ns = "Line"
self.line_marker.action = Marker().ADD
self.line_marker.type = Marker().LINE_STRIP
self.line_marker.lifetime = self.marker_lifetime
self.path_marker = Marker()
self.path_marker.header.frame_id = self.base_frame
self.path_marker.ns = "Path"
self.path_marker.action = Marker().ADD
self.path_marker.type = Marker().LINE_LIST
self.path_marker.lifetime = self.marker_lifetime
self.path_marker.pose.position.x = 0.0
self.path_marker.pose.position.y = 0.0
self.path_marker.pose.position.z = 0.0
self.path_marker.pose.orientation.x = 0.0
self.path_marker.pose.orientation.y = 0.0
self.path_marker.pose.orientation.z = 0.0
self.path_marker.pose.orientation.w = 1.0
self.sphere_marker = Marker()
self.sphere_marker.header.frame_id = self.base_frame
self.sphere_marker.ns = "Sphere"
self.sphere_marker.type = Marker().SPHERE
self.sphere_marker.action = Marker().ADD
self.sphere_marker.lifetime = self.marker_lifetime
self.sphere_marker.pose.position.x = 0
self.sphere_marker.pose.position.y = 0
self.sphere_marker.pose.position.z = 0
self.sphere_marker.pose.orientation.x = 0.0
self.sphere_marker.pose.orientation.y = 0.0
self.sphere_marker.pose.orientation.z = 0.0
self.sphere_marker.pose.orientation.w = 1.0
self.sphere_marker2 = Marker()
self.sphere_marker2.header.frame_id = self.base_frame
self.sphere_marker2.ns = "Sphere"
self.sphere_marker2.type = Marker().SPHERE_LIST
self.sphere_marker2.action = Marker().ADD
self.sphere_marker2.lifetime = self.marker_lifetime
self.sphere_marker2.pose.position.x = 0
self.sphere_marker2.pose.position.y = 0
self.sphere_marker2.pose.position.z = 0
self.sphere_marker2.pose.orientation.x = 0.0
self.sphere_marker2.pose.orientation.y = 0.0
self.sphere_marker2.pose.orientation.z = 0.0
self.sphere_marker2.pose.orientation.w = 1.0
point1 = Point()
self.sphere_marker2.points.append(point1)
self.sphere_marker2.colors.append(self.getColor('blue'))
self.spheres_marker = Marker()
self.spheres_marker.header.frame_id = self.base_frame
self.spheres_marker.ns = "Spheres"
self.spheres_marker.type = Marker().SPHERE_LIST
self.spheres_marker.action = Marker().ADD
self.spheres_marker.lifetime = self.marker_lifetime
self.spheres_marker.pose.position.x = 0.0
self.spheres_marker.pose.position.y = 0.0
self.spheres_marker.pose.position.z = 0.0
self.spheres_marker.pose.orientation.x = 0.0
self.spheres_marker.pose.orientation.y = 0.0
self.spheres_marker.pose.orientation.z = 0.0
self.spheres_marker.pose.orientation.w = 1.0
self.cube_marker = Marker()
self.cube_marker.header.frame_id = self.base_frame
self.cube_marker.ns = "Block"
self.cube_marker.action = Marker().ADD
self.cube_marker.type = Marker().CUBE
self.cube_marker.lifetime = self.marker_lifetime
self.cubes_marker = Marker()
self.cubes_marker.header.frame_id = self.base_frame
self.cubes_marker.ns = "Cubes"
self.cubes_marker.type = Marker().CUBE_LIST
self.cubes_marker.action = Marker().ADD
self.cubes_marker.lifetime = self.marker_lifetime
self.cubes_marker.pose.position.x = 0.0
self.cubes_marker.pose.position.y = 0.0
self.cubes_marker.pose.position.z = 0.0
self.cubes_marker.pose.orientation.x = 0.0
self.cubes_marker.pose.orientation.y = 0.0
self.cubes_marker.pose.orientation.z = 0.0
self.cubes_marker.pose.orientation.w = 1.0
self.cylinder_marker = Marker()
self.cylinder_marker.header.frame_id = self.base_frame
self.cylinder_marker.ns = "Cylinder"
self.cylinder_marker.action = Marker().ADD
self.cylinder_marker.type = Marker().CYLINDER
self.cylinder_marker.lifetime = self.marker_lifetime
self.mesh_marker = Marker()
self.mesh_marker.header.frame_id = self.base_frame
self.mesh_marker.ns = "Mesh"
self.mesh_marker.action = Marker().ADD
self.mesh_marker.type = Marker().MESH_RESOURCE
self.mesh_marker.lifetime = self.marker_lifetime
self.text_marker = Marker()
self.text_marker.header.frame_id = self.base_frame
self.text_marker.ns = "Text"
self.text_marker.action = Marker().ADD
self.text_marker.type = Marker().TEXT_VIEW_FACING
self.text_marker.lifetime = self.marker_lifetime
def loadMarkerPublisher(self, wait_time=None):
if hasattr(self, 'pub_rviz_marker'):
return
self.pub_rviz_marker = rospy.Publisher(self.marker_topic, Marker, queue_size=10)
rospy.logdebug("Publishing Rviz markers on topic '%s'", self.marker_topic)
if wait_time != None:
self.waitForSubscriber(self.pub_rviz_marker, wait_time)
def waitForSubscriber(self, publisher, wait_time=1.0):
start_time = rospy.Time.now()
max_time = start_time + rospy.Duration(wait_time)
num_existing_subscribers = publisher.get_num_connections()
while (num_existing_subscribers == 0):
rospy.Rate(100).sleep()
if (rospy.Time.now() > max_time):
rospy.logerr("No subscribers connected to the '%s' topic after %f seconds", self.marker_topic, wait_time)
return False
num_existing_subscribers = publisher.get_num_connections()
return True
def publishMarker(self, marker):
if (self.muted == True):
return True
lf.pub_rviz_marker.publish(marker)
return True
def deleteAllMarkers(self):
return self.publishMarker(self.reset_marker)
def getColor(self, color):
result = ColorRGBA()
result.a = self.alpha
if (type(color) == tuple) or (type(color) == list):
if len(color) == 3:
result.r = color[0]
result.g = color[1]
result.b = color[2]
elif len(color) == 4:
result.r = color[0]
result.g = color[1]
result.b = color[2]
result.a = color[3]
else:
raise ValueError('color must have 3 or 4 float values in getColor()')
elif (color == 'red'):
result.r = 0.8
result.g = 0.1
result.b = 0.1
elif (color == 'green'):
result.r = 0.1
result.g = 0.8
result.b = 0.1
elif (color == 'blue'):
result.r = 0.1
result.g = 0.1
result.b = 0.8
elif (color == 'grey') or (color == 'gray'):
result.r = 0.9
result.g = 0.9
result.b = 0.9
elif (color == 'white'):
result.r = 1.0
result.g = 1.0
result.b = 1.0
elif (color == 'orange'):
result.r = 1.0
result.g = 0.5
result.b = 0.0
elif (color == 'translucent_light'):
result.r = 0.1
result.g = 0.1
result.b = 0.1
result.a = 0.1
elif (color == 'translucent'):
result.r = 0.1
result.g = 0.1
result.b = 0.1
result.a = 0.25
elif (color == 'translucent_dark'):
result.r = 0.1
result.g = 0.1
result.b = 0.1
result.a = 0.5
elif (color == 'black'):
result.r = 0.0
result.g = 0.0
result.b = 0.0
elif (color == 'yellow'):
result.r = 1.0
result.g = 1.0
result.b = 0.0
elif (color == 'brown'):
result.r = 0.597
result.g = 0.296
result.b = 0.0
elif (color == 'pink'):
result.r = 1.0
result.g = 0.4
result.b = 1
elif (color == 'lime_green'):
result.r = 0.6
result.g = 1.0
result.b = 0.2
elif (color == 'clear'):
result.r=1.0
result.g=1.0
result.b=1.0
result.a=0.0
elif (color == 'purple'):
result.r = 0.597
result.g = 0.0
result.b = 0.597
elif (color == 'random'):
while True:
result.r = random.random()
result.g = random.random()
result.b = random.random()
if ((result.r + result.g + result.b) > 1.5):
break
else:
rospy.logerr("getColor() called with unknown color name '%s', defaulting to 'blue'", color)
result.r = 0.1
result.g = 0.1
result.b = 0.8
return result
def getRandomColor(self):
all_colors = []
all_colors.append('red')
all_colors.append('green')
all_colors.append('blue')
all_colors.append('grey')
all_colors.append('white')
all_colors.append('orange')
all_colors.append('yellow')
all_colors.append('brown')
all_colors.append('pink')
all_colors.append('lime_green')
all_colors.append('purple')
rand_num = random.randint(0, len(all_colors) - 1)
rand_color_name = all_colors[rand_num]
return rand_color_name
def publishSphere(self, pose, color, scale, lifetime=None):
if (self.muted == True):
return True
if (type(pose) == numpy.matrix) or (type(pose) == numpy.ndarray):
sphere_pose = mat_to_pose(pose)
elif type(pose) == Pose:
sphere_pose = pose
elif type(pose) == Point:
pose_msg = Pose()
pose_msg.position = pose
sphere_pose = pose_msg
else:
rospy.logerr("Pose is unsupported type '%s' in publishSphere()", type(pose).__name__)
return False
if type(scale) == Vector3:
sphere_scale = scale
elif type(scale) == float:
sphere_scale = Vector3(scale, scale, scale)
else:
rospy.logerr("Scale is unsupported type '%s' in publishSphere()", type(scale).__name__)
return False
self.sphere_marker.id += 1
sphere_marker = self.sphere_marker
if lifetime == None:
sphere_marker.lifetime = rospy.Duration(0.0)
else:
sphere_marker.lifetime = rospy.Duration(lifetime)
sphere_marker.header.stamp = rospy.Time.now()
sphere_marker.scale = sphere_scale
sphere_marker.color = self.getColor(color)
sphere_marker.pose = sphere_pose
return self.publishMarker(sphere_marker)
def publishSphere2(self, pose, color, scale, lifetime=None):
if (self.muted == True):
return True
if (type(pose) == numpy.matrix) or (type(pose) == numpy.ndarray):
sphere_pose = mat_to_pose(pose)
elif type(pose) == Pose:
sphere_pose = pose
elif type(pose) == Point:
pose_msg = Pose()
pose_msg.position = pose
sphere_pose = pose_msg
else:
rospy.logerr("Pose is unsupported type '%s' in publishSphere()", type(pose).__name__)
return False
if type(scale) == Vector3:
sphere_scale = scale
elif type(scale) == float:
sphere_scale = Vector3(scale, scale, scale)
else:
rospy.logerr("Scale is unsupported type '%s' in publishSphere()", type(scale).__name__)
return False
self.sphere_marker.id += 1
sphere_marker = self.sphere_marker2
if lifetime == None:
sphere_marker.lifetime = rospy.Duration(0.0)
else:
sphere_marker.lifetime = rospy.Duration(lifetime)
sphere_marker.header.stamp = rospy.Time.now()
sphere_marker.scale = sphere_scale
sphere_marker.color = self.getColor(color)
sphere_marker.points[0] = sphere_pose.position
sphere_marker.colors[0] = self.getColor(color)
return self.publishMarker(sphere_marker)
def publishArrow(self, pose, color, scale, lifetime=None):
if (self.muted == True):
return True
if (type(pose) == numpy.matrix) or (type(pose) == numpy.ndarray):
arrow_pose = mat_to_pose(pose)
elif type(pose) == Pose:
arrow_pose = pose
else:
rospy.logerr("Pose is unsupported type '%s' in publishArrow()", type(pose).__name__)
return False
if type(scale) == Vector3:
arrow_scale = scale
elif type(scale) == float:
arrow_scale = Vector3(scale, 0.1*scale, 0.1*scale)
else:
rospy.logerr("Scale is unsupported type '%s' in publishArrow()", type(scale).__name__)
return False
self.arrow_marker.id += 1
arrow_marker = self.arrow_marker
if lifetime == None:
arrow_marker.lifetime = rospy.Duration(0.0)
else:
arrow_marker.lifetime = rospy.Duration(lifetime)
arrow_marker.header.stamp = rospy.Time.now()
arrow_marker.pose = arrow_pose
arrow_marker.scale = arrow_scale
arrow_marker.color = self.getColor(color)
return self.publishMarker(arrow_marker)
def publishCube(self, pose, color, scale, lifetime=None):
if (self.muted == True):
return True
if (type(pose) == numpy.matrix) or (type(pose) == numpy.ndarray):
cube_pose = mat_to_pose(pose)
elif type(pose) == Pose:
cube_pose = pose
else:
rospy.logerr("Pose is unsupported type '%s' in publishCube()", type(pose).__name__)
return False
if type(scale) == Vector3:
cube_scale = scale
elif type(scale) == float:
cube_scale = Vector3(scale, scale, scale)
else:
rospy.logerr("Scale is unsupported type '%s' in publishCube()", type(scale).__name__)
return False
self.cube_marker.id += 1
cube_marker = self.cube_marker
if lifetime == None:
cube_marker.lifetime = rospy.Duration(0.0)
else:
cube_marker.lifetime = rospy.Duration(lifetime)
cube_marker.header.stamp = rospy.Time.now()
cube_marker.pose = cube_pose
cube_marker.scale = cube_scale
cube_marker.color = self.getColor(color)
return self.publishMarker(cube_marker)
def publishCubes(self, list_of_cubes, color, scale, lifetime=None):
if (self.muted == True):
return True
if type(list_of_cubes) != list:
rospy.logerr("list_of_cubes is unsupported type '%s' in publishCubes()", type(list_of_cubes).__name__)
return False
if type(scale) == Vector3:
cubes_scale = scale
elif type(scale) == float:
cubes_scale = Vector3(scale, scale, scale)
else:
rospy.logerr("Scale is unsupported type '%s' in publishCubes()", type(scale).__name__)
return False
self.cubes_marker.id += 1
cubes_marker = self.cubes_marker
if lifetime == None:
cubes_marker.lifetime = rospy.Duration(0.0)
else:
cubes_marker.lifetime = rospy.Duration(lifetime)
cubes_marker.header.stamp = rospy.Time.now()
cubes_marker.scale = cubes_scale
cubes_marker.color = self.getColor(color)
cubes_color = self.getColor(color)
cubes_marker.points[:] = []
cubes_marker.colors[:] = []
for i in range(0, len(list_of_cubes)):
if type(list_of_cubes[i]) == Pose:
cubes_marker.points.append(list_of_cubes[i].position)
cubes_marker.colors.append(cubes_color)
elif (type(list_of_cubes[i]) == numpy.matrix) or (type(list_of_cubes[i]) == numpy.ndarray):
pose_i = mat_to_pose(list_of_cubes[i])
cubes_marker.points.append(pose_i.position)
cubes_marker.colors.append(cubes_color)
elif type(list_of_cubes[i]) == Point:
cubes_marker.points.append(list_of_cubes[i])
cubes_marker.colors.append(cubes_color)
else:
rospy.logerr("list_of_cubes contains unsupported type '%s' in publishCubes()", type(list_of_cubes[i]).__name__)
return False
return self.publishMarker(cubes_marker)
def publishBlock(self, pose, color, scale, lifetime=None):
return self.publishCube(pose, color, scale)
def publishCylinder(self, pose, color, height, radius, lifetime=None):
if (self.muted == True):
return True
if (type(pose) == numpy.matrix) or (type(pose) == numpy.ndarray):
cylinder_pose = mat_to_pose(pose)
elif type(pose) == Pose:
cylinder_pose = pose
else:
rospy.logerr("Pose is unsupported type '%s' in publishCylinder()", type(pose).__name__)
return False
self.cylinder_marker.id += 1
cylinder_marker = self.cylinder_marker
if lifetime == None:
cylinder_marker.lifetime = rospy.Duration(0.0)
else:
cylinder_marker.lifetime = rospy.Duration(lifetime)
cylinder_marker.header.stamp = rospy.Time.now()
cylinder_marker.pose = cylinder_pose
cylinder_marker.scale.x = radius
cylinder_marker.scale.y = radius
cylinder_marker.scale.z = height
cylinder_marker.color = self.getColor(color)
return self.publishMarker(cylinder_marker)
def publishAxis(self, pose, length, radius, lifetime=None):
if (type(pose) == numpy.matrix) or (type(pose) == numpy.ndarray):
axis_pose = pose
elif type(pose) == Pose:
axis_pose = pose_to_mat(pose)
else:
rospy.logerr("Pose is unsupported type '%s' in publishAxis()", type(pose).__name__)
return False
t = tf.transformations.translation_matrix( (length/2.0, 0.0, 0.0) )
r = tf.transformations.rotation_matrix(numpy.pi/2.0, (0,1,0))
m = tf.transformations.concatenate_matrices(axis_pose, t, r)
x_pose = mat_to_pose(m)
self.publishCylinder(x_pose, 'red', length, radius, lifetime)
t = tf.transformations.translation_matrix( (0.0, length/2.0, 0.0) )
r = tf.transformations.rotation_matrix(numpy.pi/2.0, (1,0,0))
m = tf.transformations.concatenate_matrices(axis_pose, t, r)
y_pose = mat_to_pose(m)
self.publishCylinder(y_pose, 'green', length, radius, lifetime)
t = tf.transformations.translation_matrix( (0.0, 0.0, length/2.0) )
r = tf.transformations.rotation_matrix(0.0, (0,0,1))
m = tf.transformations.concatenate_matrices(axis_pose, t, r)
z_pose = mat_to_pose(m)
self.publishCylinder(z_pose, 'blue', length, radius, lifetime)
return True
def publishMesh(self, pose, file_name, color, scale, lifetime=None):
if (self.muted == True):
return True
if (type(pose) == numpy.matrix) or (type(pose) == numpy.ndarray):
mesh_pose = mat_to_pose(pose)
elif type(pose) == Pose:
mesh_pose = pose
else:
rospy.logerr("Pose is unsupported type '%s' in publishMesh()", type(pose).__name__)
return False
if type(scale) == Vector3:
mesh_scale = scale
elif type(scale) == float:
mesh_scale = Vector3(scale, scale, scale)
else:
rospy.logerr("Scale is unsupported type '%s' in publishMesh()", type(scale).__name__)
return False
self.mesh_marker.id += 1
mesh_marker = self.mesh_marker
if lifetime == None:
mesh_marker.lifetime = rospy.Duration(0.0)
else:
mesh_marker.lifetime = rospy.Duration(lifetime)
mesh_marker.header.stamp = rospy.Time.now()
mesh_marker.scale = mesh_scale
if color == None:
mesh_marker.color = ColorRGBA()
else:
mesh_marker.color = self.getColor(color)
mesh_marker.pose = mesh_pose
mesh_marker.mesh_resource = file_name
mesh_marker.mesh_use_embedded_materials = True
return self.publishMarker(mesh_marker)
def publishRectangle(self, point1, point2, color, lifetime=None):
if (self.muted == True):
return True
if type(point1) == Point:
rect_point1 = point1
else:
rospy.logerr("Point1 is unsupported type '%s' in publishRectangle()", type(point1).__name__)
return False
if type(point2) == Point:
rect_point2 = point2
else:
rospy.logerr("Point2 is unsupported type '%s' in publishRectangle()", type(point2).__name__)
return False
self.rectangle_marker.id += 1
rectangle_marker = self.rectangle_marker
if lifetime == None:
rectangle_marker.lifetime = rospy.Duration(0.0)
else:
rectangle_marker.lifetime = rospy.Duration(lifetime)
rectangle_marker.header.stamp = rospy.Time.now()
rectangle_marker.color = self.getColor(color)
rect_pose = Pose()
rect_pose.position.x = (rect_point1.x - rect_point2.x) / 2.0 + rect_point2.x
rect_pose.position.y = (rect_point1.y - rect_point2.y) / 2.0 + rect_point2.y
rect_pose.position.z = (rect_point1.z - rect_point2.z) / 2.0 + rect_point2.z
rectangle_marker.pose = rect_pose
rectangle_marker.scale.x = numpy.fabs(rect_point1.x - rect_point2.x)
rectangle_marker.scale.y = numpy.fabs(rect_point1.y - rect_point2.y)
rectangle_marker.scale.z = numpy.fabs(rect_point1.z - rect_point2.z)
return self.publishMarker(rectangle_marker)
def publishPlane(self, pose, depth, width, color, lifetime=None):
if (self.muted == True):
return True
if (type(pose) == numpy.matrix) or (type(pose) == numpy.ndarray):
rect_pose = mat_to_pose(pose)
elif type(pose) == Pose:
rect_pose = pose
else:
rospy.logerr("Pose is unsupported type '%s' in publishRectangle()", type(pose).__name__)
return False
self.rectangle_marker.id += 1
rectangle_marker = self.rectangle_marker
if lifetime == None:
rectangle_marker.lifetime = rospy.Duration(0.0)
else:
rectangle_marker.lifetime = rospy.Duration(lifetime)
rectangle_marker.header.stamp = rospy.Time.now()
rectangle_marker.color = self.getColor(color)
rectangle_marker.pose = rect_pose
rectangle_marker.scale.x = depth
rectangle_marker.scale.y = width
rectangle_marker.scale.z = 0.0
return self.publishMarker(rectangle_marker)
def publishLine(self, point1, point2, color, width, lifetime=None):
if (self.muted == True):
return True
if type(point1) == Point:
line_point1 = point1
elif type(point1) == Pose:
position = point1.position
line_point1 = Point(position.x, position.y, position.z)
elif (type(point1) == numpy.matrix) or (type(point1) == numpy.ndarray):
pose = mat_to_pose(point1)
position = pose.position
line_point1 = Point(position.x, position.y, position.z)
else:
rospy.logerr("Point1 is unsupported type '%s' in publishLine()", type(point1).__name__)
return False
if type(point2) == Point:
line_point2 = point2
elif type(point2) == Pose:
position = point2.position
line_point2 = Point(position.x, position.y, position.z)
elif (type(point2) == numpy.matrix) or (type(point2) == numpy.ndarray):
pose = mat_to_pose(point2)
position = pose.position
line_point2 = Point(position.x, position.y, position.z)
else:
rospy.logerr("Point2 is unsupported type '%s' in publishLine()", type(point2).__name__)
return False
self.line_marker.id += 1
line_marker = self.line_marker
if lifetime == None:
line_marker.lifetime = rospy.Duration(0.0)
else:
line_marker.lifetime = rospy.Duration(lifetime)
line_marker.header.stamp = rospy.Time.now()
line_marker.color = self.getColor(color)
line_marker.points[:] = []
line_marker.points.append(line_point1)
line_marker.points.append(line_point2)
line_marker.scale.x = width
return self.publishMarker(line_marker)
def publishPath(self, path, color, width, lifetime=None):
if (self.muted == True):
return True
if type(path) == list:
path_path = path
else:
rospy.logerr("Path is unsupported type '%s' in publishPath()", type(path).__name__)
return False
self.path_marker.id += 1
path_marker = self.path_marker
if lifetime == None:
path_marker.lifetime = rospy.Duration(0.0)
else:
path_marker.lifetime = rospy.Duration(lifetime)
path_marker.header.stamp = rospy.Time.now()
path_marker.scale.x = width
path_color = self.getColor(color)
path_marker.points[:] = []
path_marker.colors[:] = []
for i in range(1, len(path)):
if type(path[i]) == Point:
path_marker.points.append(path[i-1])
path_marker.colors.append(path_color)
path_marker.points.append(path[i])
path_marker.colors.append(path_color)
elif type(path[i]) == Pose:
position = path[i-1].position
point = Point(position.x, position.y, position.z)
path_marker.points.append(point)
path_marker.colors.append(path_color)
position = path[i].position
point = Point(position.x, position.y, position.z)
path_marker.points.append(point)
path_marker.colors.append(path_color)
elif (type(path[i]) == numpy.matrix) or (type(path[i]) == numpy.ndarray):
pose = mat_to_pose(path[i-1])
position = pose.position
point = Point(position.x, position.y, position.z)
path_marker.points.append(point)
path_marker.colors.append(path_color)
pose = mat_to_pose(path[i])
position = pose.position
point = Point(position.x, position.y, position.z)
path_marker.points.append(point)
path_marker.colors.append(path_color)
else:
rospy.logerr("path list contains unsupported type '%s' in publishPath()", type(path[i]).__name__)
return False
return self.publishMarker(path_marker)
def publishPolygon(self, polygon, color, width, lifetime=None):
if (self.muted == True):
return True
if type(polygon) == Polygon:
polygon_msg = polygon
else:
rospy.logerr("Path is unsupported type '%s' in publishPolygon()", type(polygon).__name__)
return False
polygon_path = []
for i in range(0, len(polygon_msg.points)):
x = polygon_msg.points[i].x
y = polygon_msg.points[i].y
z = polygon_msg.points[i].z
polygon_path.append( Point(x,y,z) )
x = polygon_msg.points[0].x
y = polygon_msg.points[0].y
z = polygon_msg.points[0].z
polygon_path.append( Point(x,y,z) )
return self.publishPath(polygon_path, color, width, lifetime)
def publishSpheres(self, list_of_spheres, color, scale, lifetime=None):
if (self.muted == True):
return True
if type(list_of_spheres) != list:
rospy.logerr("list_of_spheres is unsupported type '%s' in publishSpheres()", type(list_of_spheres).__name__)
return False
if type(scale) == Vector3:
spheres_scale = scale
elif type(scale) == float:
spheres_scale = Vector3(scale, scale, scale)
else:
rospy.logerr("Scale is unsupported type '%s' in publishSpheres()", type(scale).__name__)
return False
self.spheres_marker.id += 1
spheres_marker = self.spheres_marker
if lifetime == None:
spheres_marker.lifetime = rospy.Duration(0.0)
else:
spheres_marker.lifetime = rospy.Duration(lifetime)
spheres_marker.header.stamp = rospy.Time.now()
spheres_marker.scale = spheres_scale
spheres_marker.color = self.getColor(color)
spheres_color = self.getColor(color)
spheres_marker.points[:] = []
spheres_marker.colors[:] = []
for i in range(0, len(list_of_spheres)):
if type(list_of_spheres[i]) == Pose:
spheres_marker.points.append( list_of_spheres[i].position )
spheres_marker.colors.append(spheres_color)
elif (type(list_of_spheres[i]) == numpy.matrix) or (type(list_of_spheres[i]) == numpy.ndarray):
pose_i = mat_to_pose(list_of_spheres[i])
spheres_marker.points.append( pose_i.position )
spheres_marker.colors.append(spheres_color)
elif type(list_of_spheres[i]) == Point:
spheres_marker.points.append(list_of_spheres[i])
spheres_marker.colors.append(spheres_color)
else:
rospy.logerr("list_of_sphere contains unsupported type '%s' in publishSphere()", type(list_of_spheres[i]).__name__)
return False
return self.publishMarker(spheres_marker)
def publishText(self, pose, text, color, scale, lifetime=None):
if (self.muted == True):
return True
if (type(pose) == numpy.matrix) or (type(pose) == numpy.ndarray):
text_pose = mat_to_pose(pose)
elif type(pose) == Pose:
text_pose = pose
else:
rospy.logerr("Pose is unsupported type '%s' in publishText()", type(pose).__name__)
return False
if type(scale) == Vector3:
text_scale = scale
elif type(scale) == float:
text_scale = Vector3(scale, scale, scale)
else:
rospy.logerr("Scale is unsupported type '%s' in publishText()", type(scale).__name__)
return False
self.text_marker.id += 1
text_marker = self.text_marker
if lifetime == None:
text_marker.lifetime = rospy.Duration(0.0)
else:
text_marker.lifetime = rospy.Duration(lifetime)
text_marker.header.stamp = rospy.Time.now()
text_marker.pose = text_pose
text_marker.scale = text_scale
text_marker.color = self.getColor(color)
text_marker.text = text
return self.publishMarker(text_marker)
def pose_to_mat(pose):
quat = [pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w]
pos = numpy.matrix([pose.position.x, pose.position.y, pose.position.z]).T
mat = numpy.matrix(tf.transformations.quaternion_matrix(quat))
mat[0:3, 3] = pos
return mat
def mat_to_pose(mat):
pose = Pose()
pose.position.x = mat[0,3]
pose.position.y = mat[1,3]
pose.position.z = mat[2,3]
quat = tf.transformations.quaternion_from_matrix(mat)
pose.orientation.x = quat[0]
pose.orientation.y = quat[1]
pose.orientation.z = quat[2]
pose.orientation.w = quat[3]
return pose
| true | true |
1c2db61108d3ddcf50a9cebbf14fa96f71e52ba6 | 20,697 | py | Python | vendor/github.com/elastic/beats/libbeat/tests/system/beat/beat.py | opheelia/Blockchainbeat | cf2b2ab5778bbc88bb0346ce7624a3dda4438f74 | [
"Apache-2.0"
] | null | null | null | vendor/github.com/elastic/beats/libbeat/tests/system/beat/beat.py | opheelia/Blockchainbeat | cf2b2ab5778bbc88bb0346ce7624a3dda4438f74 | [
"Apache-2.0"
] | null | null | null | vendor/github.com/elastic/beats/libbeat/tests/system/beat/beat.py | opheelia/Blockchainbeat | cf2b2ab5778bbc88bb0346ce7624a3dda4438f74 | [
"Apache-2.0"
] | 1 | 2019-08-23T11:02:35.000Z | 2019-08-23T11:02:35.000Z | import subprocess
import jinja2
import unittest
import os
import shutil
import json
import signal
import sys
import time
import yaml
import hashlib
import re
from datetime import datetime, timedelta
from .compose import ComposeMixin
BEAT_REQUIRED_FIELDS = ["@timestamp",
"beat.name", "beat.hostname", "beat.version"]
INTEGRATION_TESTS = os.environ.get('INTEGRATION_TESTS', False)
yaml_cache = {}
REGEXP_TYPE = type(re.compile("t"))
class TimeoutError(Exception):
pass
class Proc(object):
"""
Slim wrapper on subprocess.Popen that redirects
both stdout and stderr to a file on disk and makes
sure to stop the process and close the output file when
the object gets collected.
"""
def __init__(self, args, outputfile, env={}):
self.args = args
self.output = open(outputfile, "ab")
self.stdin_read, self.stdin_write = os.pipe()
self.env = env
def start(self):
# ensure that the environment is inherited to the subprocess.
variables = os.environ.copy()
variables = variables.update(self.env)
if sys.platform.startswith("win"):
self.proc = subprocess.Popen(
self.args,
stdin=self.stdin_read,
stdout=self.output,
stderr=subprocess.STDOUT,
bufsize=0,
creationflags=subprocess.CREATE_NEW_PROCESS_GROUP,
env=variables)
else:
self.proc = subprocess.Popen(
self.args,
stdin=self.stdin_read,
stdout=self.output,
stderr=subprocess.STDOUT,
bufsize=0,
env=variables)
# If a "No such file or directory" error points you here, run
# "make metricbeat.test" on metricbeat folder
return self.proc
def kill(self):
if sys.platform.startswith("win"):
# proc.terminate on Windows does not initiate a graceful shutdown
# through the processes signal handlers it just kills it hard. So
# this sends a SIGBREAK. You cannot sends a SIGINT (CTRL_C_EVENT)
# to a process group in Windows, otherwise Ctrl+C would be
# sent.
self.proc.send_signal(signal.CTRL_BREAK_EVENT)
else:
self.proc.terminate()
def wait(self):
try:
return self.proc.wait()
finally:
self.output.close()
def check_wait(self, exit_code=0):
actual_exit_code = self.wait()
assert actual_exit_code == exit_code, "Expected exit code to be %d, but it was %d" % (
exit_code, actual_exit_code)
return actual_exit_code
def kill_and_wait(self):
self.kill()
os.close(self.stdin_write)
return self.wait()
def check_kill_and_wait(self, exit_code=0):
self.kill()
os.close(self.stdin_write)
return self.check_wait(exit_code=exit_code)
def __del__(self):
# Ensure the process is stopped.
try:
self.proc.terminate()
self.proc.kill()
except:
pass
# Ensure the output is closed.
try:
self.output.close()
except:
pass
class TestCase(unittest.TestCase, ComposeMixin):
@classmethod
def setUpClass(self):
# Path to test binary
if not hasattr(self, 'beat_name'):
self.beat_name = "beat"
if not hasattr(self, 'beat_path'):
self.beat_path = "."
# Path to test binary
if not hasattr(self, 'test_binary'):
self.test_binary = os.path.abspath(self.beat_path + "/" + self.beat_name + ".test")
# Create build path
build_dir = self.beat_path + "/build"
self.build_path = build_dir + "/system-tests/"
# Start the containers needed to run these tests
self.compose_up()
@classmethod
def tearDownClass(self):
self.compose_down()
def run_beat(self,
cmd=None,
config=None,
output=None,
logging_args=["-e", "-v", "-d", "*"],
extra_args=[],
exit_code=None,
env={}):
"""
Executes beat.
Waits for the process to finish before returning to
the caller.
"""
proc = self.start_beat(cmd=cmd, config=config, output=output,
logging_args=logging_args,
extra_args=extra_args, env=env)
if exit_code != None:
return proc.check_wait(exit_code)
return proc.wait()
def start_beat(self,
cmd=None,
config=None,
output=None,
logging_args=["-e", "-v", "-d", "*"],
extra_args=[],
env={}):
"""
Starts beat and returns the process handle. The
caller is responsible for stopping / waiting for the
Proc instance.
"""
# Init defaults
if cmd is None:
cmd = self.test_binary
if config is None:
config = self.beat_name + ".yml"
if output is None:
output = self.beat_name + ".log"
args = [cmd,
"-systemTest",
"-test.coverprofile",
os.path.join(self.working_dir, "coverage.cov"),
"-path.home", os.path.normpath(self.working_dir),
"-c", os.path.join(self.working_dir, config),
]
if logging_args:
args.extend(logging_args)
if extra_args:
args.extend(extra_args)
proc = Proc(args, os.path.join(self.working_dir, output), env)
proc.start()
return proc
def render_config_template(self, template_name=None,
output=None, **kargs):
# Init defaults
if template_name is None:
template_name = self.beat_name
template_path = "./tests/system/config/" + template_name + ".yml.j2"
if output is None:
output = self.beat_name + ".yml"
template = self.template_env.get_template(template_path)
kargs["beat"] = self
output_str = template.render(**kargs)
output_path = os.path.join(self.working_dir, output)
with open(output_path, "wb") as f:
os.chmod(output_path, 0o600)
f.write(output_str.encode('utf8'))
# Returns output as JSON object with flattened fields (. notation)
def read_output(self,
output_file=None,
required_fields=None):
# Init defaults
if output_file is None:
output_file = "output/" + self.beat_name
jsons = []
with open(os.path.join(self.working_dir, output_file), "r") as f:
for line in f:
if len(line) == 0 or line[len(line) - 1] != "\n":
# hit EOF
break
try:
jsons.append(self.flatten_object(json.loads(
line, object_pairs_hook=self.json_raise_on_duplicates), []))
except:
print("Fail to load the json {}".format(line))
raise
self.all_have_fields(jsons, required_fields or BEAT_REQUIRED_FIELDS)
return jsons
# Returns output as JSON object
def read_output_json(self, output_file=None):
# Init defaults
if output_file is None:
output_file = "output/" + self.beat_name
jsons = []
with open(os.path.join(self.working_dir, output_file), "r") as f:
for line in f:
if len(line) == 0 or line[len(line) - 1] != "\n":
# hit EOF
break
event = json.loads(line, object_pairs_hook=self.json_raise_on_duplicates)
del event['@metadata']
jsons.append(event)
return jsons
def json_raise_on_duplicates(self, ordered_pairs):
"""Reject duplicate keys. To be used as a custom hook in JSON unmarshaling
to error out in case of any duplicates in the keys."""
d = {}
for k, v in ordered_pairs:
if k in d:
raise ValueError("duplicate key: %r" % (k,))
else:
d[k] = v
return d
def copy_files(self, files, source_dir="files/"):
for file_ in files:
shutil.copy(os.path.join(source_dir, file_),
self.working_dir)
def setUp(self):
self.template_env = jinja2.Environment(
loader=jinja2.FileSystemLoader([
self.beat_path,
os.path.abspath(os.path.join(self.beat_path, "../libbeat"))
])
)
# create working dir
self.working_dir = os.path.abspath(os.path.join(
self.build_path + "run", self.id()))
if os.path.exists(self.working_dir):
shutil.rmtree(self.working_dir)
os.makedirs(self.working_dir)
fields_yml = os.path.join(self.beat_path, "fields.yml")
# Only add it if it exists
if os.path.isfile(fields_yml):
shutil.copyfile(fields_yml, os.path.join(self.working_dir, "fields.yml"))
try:
# update the last_run link
if os.path.islink(self.build_path + "last_run"):
os.unlink(self.build_path + "last_run")
os.symlink(self.build_path + "run/{}".format(self.id()),
self.build_path + "last_run")
except:
# symlink is best effort and can fail when
# running tests in parallel
pass
def wait_until(self, cond, max_timeout=10, poll_interval=0.1, name="cond"):
"""
Waits until the cond function returns true,
or until the max_timeout is reached. Calls the cond
function every poll_interval seconds.
If the max_timeout is reached before cond() returns
true, an exception is raised.
"""
start = datetime.now()
while not cond():
if datetime.now() - start > timedelta(seconds=max_timeout):
raise TimeoutError("Timeout waiting for '{}' to be true. ".format(name) +
"Waited {} seconds.".format(max_timeout))
time.sleep(poll_interval)
def get_log(self, logfile=None):
"""
Returns the log as a string.
"""
if logfile is None:
logfile = self.beat_name + ".log"
with open(os.path.join(self.working_dir, logfile), 'r') as f:
data = f.read()
return data
def wait_log_contains(self, msg, logfile=None,
max_timeout=10, poll_interval=0.1,
name="log_contains",
ignore_case=False):
self.wait_until(
cond=lambda: self.log_contains(msg, logfile, ignore_case=ignore_case),
max_timeout=max_timeout,
poll_interval=poll_interval,
name=name)
def log_contains(self, msg, logfile=None, ignore_case=False):
"""
Returns true if the give logfile contains the given message.
Note that the msg must be present in a single line.
"""
return self.log_contains_count(msg, logfile, ignore_case=ignore_case) > 0
def log_contains_count(self, msg, logfile=None, ignore_case=False):
"""
Returns the number of appearances of the given string in the log file
"""
is_regexp = type(msg) == REGEXP_TYPE
counter = 0
if ignore_case:
msg = msg.lower()
# Init defaults
if logfile is None:
logfile = self.beat_name + ".log"
try:
with open(os.path.join(self.working_dir, logfile), "r") as f:
for line in f:
if is_regexp:
if msg.search(line) is not None:
counter = counter + 1
continue
if ignore_case:
line = line.lower()
if line.find(msg) >= 0:
counter = counter + 1
except IOError:
counter = -1
return counter
def output_lines(self, output_file=None):
""" Count number of lines in a file."""
if output_file is None:
output_file = "output/" + self.beat_name
try:
with open(os.path.join(self.working_dir, output_file), "r") as f:
return sum([1 for line in f])
except IOError:
return 0
def output_has(self, lines, output_file=None):
"""
Returns true if the output has a given number of lines.
"""
# Init defaults
if output_file is None:
output_file = "output/" + self.beat_name
try:
with open(os.path.join(self.working_dir, output_file), "r") as f:
return len([1 for line in f]) == lines
except IOError:
return False
def output_has_message(self, message, output_file=None):
"""
Returns true if the output has the given message field.
"""
try:
return any(line for line in self.read_output(output_file=output_file, required_fields=["message"])
if line.get("message") == message)
except (IOError, TypeError):
return False
def all_have_fields(self, objs, fields):
"""
Checks that the given list of output objects have
all the given fields.
Raises Exception if not true.
"""
for field in fields:
if not all([field in o for o in objs]):
raise Exception("Not all objects have a '{}' field"
.format(field))
def all_have_only_fields(self, objs, fields):
"""
Checks if the given list of output objects have all
and only the given fields.
Raises Exception if not true.
"""
self.all_have_fields(objs, fields)
self.all_fields_are_expected(objs, fields)
def all_fields_are_expected(self, objs, expected_fields,
dict_fields=[]):
"""
Checks that all fields in the objects are from the
given list of expected fields.
"""
for o in objs:
for key in o.keys():
known = key in dict_fields or key in expected_fields
ismeta = key.startswith('@metadata.')
if not(known or ismeta):
raise Exception("Unexpected key '{}' found"
.format(key))
def load_fields(self, fields_doc=None):
"""
Returns a list of fields to expect in the output dictionaries
and a second list that contains the fields that have a
dictionary type.
Reads these lists from the fields documentation.
"""
if fields_doc is None:
fields_doc = self.beat_path + "/fields.yml"
def extract_fields(doc_list, name):
fields = []
dictfields = []
if doc_list is None:
return fields, dictfields
for field in doc_list:
# Skip fields without name entry
if "name" not in field:
continue
# Chain together names
if name != "":
newName = name + "." + field["name"]
else:
newName = field["name"]
if field.get("type") == "group":
subfields, subdictfields = extract_fields(field["fields"], newName)
fields.extend(subfields)
dictfields.extend(subdictfields)
else:
fields.append(newName)
if field.get("type") in ["object", "geo_point"]:
dictfields.append(newName)
return fields, dictfields
global yaml_cache
# TODO: Make fields_doc path more generic to work with beat-generator. If it can't find file
# "fields.yml" you should run "make update" on metricbeat folder
with open(fields_doc, "r") as f:
path = os.path.abspath(os.path.dirname(__file__) + "../../../../fields.yml")
if not os.path.isfile(path):
path = os.path.abspath(os.path.dirname(__file__) + "../../../../_meta/fields.common.yml")
with open(path) as f2:
content = f2.read()
content += f.read()
hash = hashlib.md5(content).hexdigest()
doc = ""
if hash in yaml_cache:
doc = yaml_cache[hash]
else:
doc = yaml.safe_load(content)
yaml_cache[hash] = doc
fields = []
dictfields = []
for item in doc:
subfields, subdictfields = extract_fields(item["fields"], "")
fields.extend(subfields)
dictfields.extend(subdictfields)
return fields, dictfields
def flatten_object(self, obj, dict_fields, prefix=""):
result = {}
for key, value in obj.items():
if isinstance(value, dict) and prefix + key not in dict_fields:
new_prefix = prefix + key + "."
result.update(self.flatten_object(value, dict_fields,
new_prefix))
else:
result[prefix + key] = value
return result
def copy_files(self, files, source_dir="", target_dir=""):
if not source_dir:
source_dir = self.beat_path + "/tests/files/"
if target_dir:
target_dir = os.path.join(self.working_dir, target_dir)
else:
target_dir = self.working_dir
for file_ in files:
shutil.copy(os.path.join(source_dir, file_),
target_dir)
def output_count(self, pred, output_file=None):
"""
Returns true if the output line count predicate returns true
"""
# Init defaults
if output_file is None:
output_file = "output/" + self.beat_name
try:
with open(os.path.join(self.working_dir, output_file), "r") as f:
return pred(len([1 for line in f]))
except IOError:
return False
def get_elasticsearch_url(self):
"""
Returns an elasticsearch.Elasticsearch instance built from the
env variables like the integration tests.
"""
return "http://{host}:{port}".format(
host=os.getenv("ES_HOST", "localhost"),
port=os.getenv("ES_PORT", "9200"),
)
def get_kibana_url(self):
"""
Returns kibana host URL
"""
return "http://{host}:{port}".format(
host=os.getenv("KIBANA_HOST", "localhost"),
port=os.getenv("KIBANA_PORT", "5601"),
)
def assert_fields_are_documented(self, evt):
"""
Assert that all keys present in evt are documented in fields.yml.
This reads from the global fields.yml, means `make collect` has to be run before the check.
"""
expected_fields, dict_fields = self.load_fields()
flat = self.flatten_object(evt, dict_fields)
def field_pattern_match(pattern, key):
pattern_fields = pattern.split(".")
key_fields = key.split(".")
if len(pattern_fields) != len(key_fields):
return False
for i in range(len(pattern_fields)):
if pattern_fields[i] == "*":
continue
if pattern_fields[i] != key_fields[i]:
return False
return True
def is_documented(key):
if key in expected_fields:
return True
for pattern in (f for f in expected_fields if "*" in f):
if field_pattern_match(pattern, key):
return True
return False
for key in flat.keys():
metaKey = key.startswith('@metadata.')
if not(is_documented(key) or metaKey):
raise Exception("Key '{}' found in event is not documented!".format(key))
| 32.90461 | 110 | 0.54119 | import subprocess
import jinja2
import unittest
import os
import shutil
import json
import signal
import sys
import time
import yaml
import hashlib
import re
from datetime import datetime, timedelta
from .compose import ComposeMixin
BEAT_REQUIRED_FIELDS = ["@timestamp",
"beat.name", "beat.hostname", "beat.version"]
INTEGRATION_TESTS = os.environ.get('INTEGRATION_TESTS', False)
yaml_cache = {}
REGEXP_TYPE = type(re.compile("t"))
class TimeoutError(Exception):
pass
class Proc(object):
def __init__(self, args, outputfile, env={}):
self.args = args
self.output = open(outputfile, "ab")
self.stdin_read, self.stdin_write = os.pipe()
self.env = env
def start(self):
variables = os.environ.copy()
variables = variables.update(self.env)
if sys.platform.startswith("win"):
self.proc = subprocess.Popen(
self.args,
stdin=self.stdin_read,
stdout=self.output,
stderr=subprocess.STDOUT,
bufsize=0,
creationflags=subprocess.CREATE_NEW_PROCESS_GROUP,
env=variables)
else:
self.proc = subprocess.Popen(
self.args,
stdin=self.stdin_read,
stdout=self.output,
stderr=subprocess.STDOUT,
bufsize=0,
env=variables)
return self.proc
def kill(self):
if sys.platform.startswith("win"):
self.proc.send_signal(signal.CTRL_BREAK_EVENT)
else:
self.proc.terminate()
def wait(self):
try:
return self.proc.wait()
finally:
self.output.close()
def check_wait(self, exit_code=0):
actual_exit_code = self.wait()
assert actual_exit_code == exit_code, "Expected exit code to be %d, but it was %d" % (
exit_code, actual_exit_code)
return actual_exit_code
def kill_and_wait(self):
self.kill()
os.close(self.stdin_write)
return self.wait()
def check_kill_and_wait(self, exit_code=0):
self.kill()
os.close(self.stdin_write)
return self.check_wait(exit_code=exit_code)
def __del__(self):
try:
self.proc.terminate()
self.proc.kill()
except:
pass
try:
self.output.close()
except:
pass
class TestCase(unittest.TestCase, ComposeMixin):
@classmethod
def setUpClass(self):
if not hasattr(self, 'beat_name'):
self.beat_name = "beat"
if not hasattr(self, 'beat_path'):
self.beat_path = "."
if not hasattr(self, 'test_binary'):
self.test_binary = os.path.abspath(self.beat_path + "/" + self.beat_name + ".test")
build_dir = self.beat_path + "/build"
self.build_path = build_dir + "/system-tests/"
self.compose_up()
@classmethod
def tearDownClass(self):
self.compose_down()
def run_beat(self,
cmd=None,
config=None,
output=None,
logging_args=["-e", "-v", "-d", "*"],
extra_args=[],
exit_code=None,
env={}):
proc = self.start_beat(cmd=cmd, config=config, output=output,
logging_args=logging_args,
extra_args=extra_args, env=env)
if exit_code != None:
return proc.check_wait(exit_code)
return proc.wait()
def start_beat(self,
cmd=None,
config=None,
output=None,
logging_args=["-e", "-v", "-d", "*"],
extra_args=[],
env={}):
if cmd is None:
cmd = self.test_binary
if config is None:
config = self.beat_name + ".yml"
if output is None:
output = self.beat_name + ".log"
args = [cmd,
"-systemTest",
"-test.coverprofile",
os.path.join(self.working_dir, "coverage.cov"),
"-path.home", os.path.normpath(self.working_dir),
"-c", os.path.join(self.working_dir, config),
]
if logging_args:
args.extend(logging_args)
if extra_args:
args.extend(extra_args)
proc = Proc(args, os.path.join(self.working_dir, output), env)
proc.start()
return proc
def render_config_template(self, template_name=None,
output=None, **kargs):
if template_name is None:
template_name = self.beat_name
template_path = "./tests/system/config/" + template_name + ".yml.j2"
if output is None:
output = self.beat_name + ".yml"
template = self.template_env.get_template(template_path)
kargs["beat"] = self
output_str = template.render(**kargs)
output_path = os.path.join(self.working_dir, output)
with open(output_path, "wb") as f:
os.chmod(output_path, 0o600)
f.write(output_str.encode('utf8'))
def read_output(self,
output_file=None,
required_fields=None):
if output_file is None:
output_file = "output/" + self.beat_name
jsons = []
with open(os.path.join(self.working_dir, output_file), "r") as f:
for line in f:
if len(line) == 0 or line[len(line) - 1] != "\n":
break
try:
jsons.append(self.flatten_object(json.loads(
line, object_pairs_hook=self.json_raise_on_duplicates), []))
except:
print("Fail to load the json {}".format(line))
raise
self.all_have_fields(jsons, required_fields or BEAT_REQUIRED_FIELDS)
return jsons
def read_output_json(self, output_file=None):
if output_file is None:
output_file = "output/" + self.beat_name
jsons = []
with open(os.path.join(self.working_dir, output_file), "r") as f:
for line in f:
if len(line) == 0 or line[len(line) - 1] != "\n":
break
event = json.loads(line, object_pairs_hook=self.json_raise_on_duplicates)
del event['@metadata']
jsons.append(event)
return jsons
def json_raise_on_duplicates(self, ordered_pairs):
d = {}
for k, v in ordered_pairs:
if k in d:
raise ValueError("duplicate key: %r" % (k,))
else:
d[k] = v
return d
def copy_files(self, files, source_dir="files/"):
for file_ in files:
shutil.copy(os.path.join(source_dir, file_),
self.working_dir)
def setUp(self):
self.template_env = jinja2.Environment(
loader=jinja2.FileSystemLoader([
self.beat_path,
os.path.abspath(os.path.join(self.beat_path, "../libbeat"))
])
)
self.working_dir = os.path.abspath(os.path.join(
self.build_path + "run", self.id()))
if os.path.exists(self.working_dir):
shutil.rmtree(self.working_dir)
os.makedirs(self.working_dir)
fields_yml = os.path.join(self.beat_path, "fields.yml")
if os.path.isfile(fields_yml):
shutil.copyfile(fields_yml, os.path.join(self.working_dir, "fields.yml"))
try:
if os.path.islink(self.build_path + "last_run"):
os.unlink(self.build_path + "last_run")
os.symlink(self.build_path + "run/{}".format(self.id()),
self.build_path + "last_run")
except:
pass
def wait_until(self, cond, max_timeout=10, poll_interval=0.1, name="cond"):
start = datetime.now()
while not cond():
if datetime.now() - start > timedelta(seconds=max_timeout):
raise TimeoutError("Timeout waiting for '{}' to be true. ".format(name) +
"Waited {} seconds.".format(max_timeout))
time.sleep(poll_interval)
def get_log(self, logfile=None):
if logfile is None:
logfile = self.beat_name + ".log"
with open(os.path.join(self.working_dir, logfile), 'r') as f:
data = f.read()
return data
def wait_log_contains(self, msg, logfile=None,
max_timeout=10, poll_interval=0.1,
name="log_contains",
ignore_case=False):
self.wait_until(
cond=lambda: self.log_contains(msg, logfile, ignore_case=ignore_case),
max_timeout=max_timeout,
poll_interval=poll_interval,
name=name)
def log_contains(self, msg, logfile=None, ignore_case=False):
return self.log_contains_count(msg, logfile, ignore_case=ignore_case) > 0
def log_contains_count(self, msg, logfile=None, ignore_case=False):
is_regexp = type(msg) == REGEXP_TYPE
counter = 0
if ignore_case:
msg = msg.lower()
if logfile is None:
logfile = self.beat_name + ".log"
try:
with open(os.path.join(self.working_dir, logfile), "r") as f:
for line in f:
if is_regexp:
if msg.search(line) is not None:
counter = counter + 1
continue
if ignore_case:
line = line.lower()
if line.find(msg) >= 0:
counter = counter + 1
except IOError:
counter = -1
return counter
def output_lines(self, output_file=None):
if output_file is None:
output_file = "output/" + self.beat_name
try:
with open(os.path.join(self.working_dir, output_file), "r") as f:
return sum([1 for line in f])
except IOError:
return 0
def output_has(self, lines, output_file=None):
if output_file is None:
output_file = "output/" + self.beat_name
try:
with open(os.path.join(self.working_dir, output_file), "r") as f:
return len([1 for line in f]) == lines
except IOError:
return False
def output_has_message(self, message, output_file=None):
try:
return any(line for line in self.read_output(output_file=output_file, required_fields=["message"])
if line.get("message") == message)
except (IOError, TypeError):
return False
def all_have_fields(self, objs, fields):
for field in fields:
if not all([field in o for o in objs]):
raise Exception("Not all objects have a '{}' field"
.format(field))
def all_have_only_fields(self, objs, fields):
self.all_have_fields(objs, fields)
self.all_fields_are_expected(objs, fields)
def all_fields_are_expected(self, objs, expected_fields,
dict_fields=[]):
for o in objs:
for key in o.keys():
known = key in dict_fields or key in expected_fields
ismeta = key.startswith('@metadata.')
if not(known or ismeta):
raise Exception("Unexpected key '{}' found"
.format(key))
def load_fields(self, fields_doc=None):
if fields_doc is None:
fields_doc = self.beat_path + "/fields.yml"
def extract_fields(doc_list, name):
fields = []
dictfields = []
if doc_list is None:
return fields, dictfields
for field in doc_list:
if "name" not in field:
continue
if name != "":
newName = name + "." + field["name"]
else:
newName = field["name"]
if field.get("type") == "group":
subfields, subdictfields = extract_fields(field["fields"], newName)
fields.extend(subfields)
dictfields.extend(subdictfields)
else:
fields.append(newName)
if field.get("type") in ["object", "geo_point"]:
dictfields.append(newName)
return fields, dictfields
global yaml_cache
# "fields.yml" you should run "make update" on metricbeat folder
with open(fields_doc, "r") as f:
path = os.path.abspath(os.path.dirname(__file__) + "../../../../fields.yml")
if not os.path.isfile(path):
path = os.path.abspath(os.path.dirname(__file__) + "../../../../_meta/fields.common.yml")
with open(path) as f2:
content = f2.read()
content += f.read()
hash = hashlib.md5(content).hexdigest()
doc = ""
if hash in yaml_cache:
doc = yaml_cache[hash]
else:
doc = yaml.safe_load(content)
yaml_cache[hash] = doc
fields = []
dictfields = []
for item in doc:
subfields, subdictfields = extract_fields(item["fields"], "")
fields.extend(subfields)
dictfields.extend(subdictfields)
return fields, dictfields
def flatten_object(self, obj, dict_fields, prefix=""):
result = {}
for key, value in obj.items():
if isinstance(value, dict) and prefix + key not in dict_fields:
new_prefix = prefix + key + "."
result.update(self.flatten_object(value, dict_fields,
new_prefix))
else:
result[prefix + key] = value
return result
def copy_files(self, files, source_dir="", target_dir=""):
if not source_dir:
source_dir = self.beat_path + "/tests/files/"
if target_dir:
target_dir = os.path.join(self.working_dir, target_dir)
else:
target_dir = self.working_dir
for file_ in files:
shutil.copy(os.path.join(source_dir, file_),
target_dir)
def output_count(self, pred, output_file=None):
# Init defaults
if output_file is None:
output_file = "output/" + self.beat_name
try:
with open(os.path.join(self.working_dir, output_file), "r") as f:
return pred(len([1 for line in f]))
except IOError:
return False
def get_elasticsearch_url(self):
return "http://{host}:{port}".format(
host=os.getenv("ES_HOST", "localhost"),
port=os.getenv("ES_PORT", "9200"),
)
def get_kibana_url(self):
return "http://{host}:{port}".format(
host=os.getenv("KIBANA_HOST", "localhost"),
port=os.getenv("KIBANA_PORT", "5601"),
)
def assert_fields_are_documented(self, evt):
expected_fields, dict_fields = self.load_fields()
flat = self.flatten_object(evt, dict_fields)
def field_pattern_match(pattern, key):
pattern_fields = pattern.split(".")
key_fields = key.split(".")
if len(pattern_fields) != len(key_fields):
return False
for i in range(len(pattern_fields)):
if pattern_fields[i] == "*":
continue
if pattern_fields[i] != key_fields[i]:
return False
return True
def is_documented(key):
if key in expected_fields:
return True
for pattern in (f for f in expected_fields if "*" in f):
if field_pattern_match(pattern, key):
return True
return False
for key in flat.keys():
metaKey = key.startswith('@metadata.')
if not(is_documented(key) or metaKey):
raise Exception("Key '{}' found in event is not documented!".format(key))
| true | true |
1c2db69ea1868e18381c8304ddbb18cbc4d74acc | 3,381 | py | Python | ldaptor/protocols/ldap/proxy.py | tv42/ldaptor | 3f227602c8c021b9e943136a2dc8d7db44a11e50 | [
"MIT"
] | 1 | 2015-11-25T04:01:26.000Z | 2015-11-25T04:01:26.000Z | ldaptor/protocols/ldap/proxy.py | tv42/ldaptor | 3f227602c8c021b9e943136a2dc8d7db44a11e50 | [
"MIT"
] | null | null | null | ldaptor/protocols/ldap/proxy.py | tv42/ldaptor | 3f227602c8c021b9e943136a2dc8d7db44a11e50 | [
"MIT"
] | 2 | 2019-11-06T02:14:10.000Z | 2022-01-10T08:34:11.000Z | """LDAP protocol proxy server"""
from twisted.internet import reactor, defer
from ldaptor.protocols.ldap import ldapserver, ldapconnector, ldapclient
from ldaptor.protocols import pureldap
class Proxy(ldapserver.BaseLDAPServer):
protocol = ldapclient.LDAPClient
client = None
waitingConnect = []
unbound = False
def __init__(self, config):
"""
Initialize the object.
@param config: The configuration.
@type config: ldaptor.interfaces.ILDAPConfig
"""
ldapserver.BaseLDAPServer.__init__(self)
self.config = config
def _whenConnected(self, fn, *a, **kw):
if self.client is None:
d = defer.Deferred()
self.waitingConnect.append((d, fn, a, kw))
return d
else:
return defer.maybeDeferred(fn, *a, **kw)
def _cbConnectionMade(self, proto):
self.client = proto
while self.waitingConnect:
d, fn, a, kw = self.waitingConnect.pop(0)
d2 = defer.maybeDeferred(fn, *a, **kw)
d2.chainDeferred(d)
def _clientQueue(self, request, controls, reply):
# TODO controls
if request.needs_answer:
d = self.client.send_multiResponse(request, self._gotResponse, reply)
# TODO handle d errbacks
else:
self.client.send_noResponse(request)
def _gotResponse(self, response, reply):
reply(response)
# TODO this is ugly
return isinstance(response, (
pureldap.LDAPSearchResultDone,
pureldap.LDAPBindResponse,
))
def _failConnection(self, reason):
#TODO self.loseConnection()
return reason # TODO
def connectionMade(self):
clientCreator = ldapconnector.LDAPClientCreator(
reactor, self.protocol)
d = clientCreator.connect(
dn='',
overrides=self.config.getServiceLocationOverrides())
d.addCallback(self._cbConnectionMade)
d.addErrback(self._failConnection)
ldapserver.BaseLDAPServer.connectionMade(self)
def connectionLost(self, reason):
assert self.client is not None
if self.client.connected:
if not self.unbound:
self.client.unbind()
self.unbound = True
else:
self.client.transport.loseConnection()
self.client = None
ldapserver.BaseLDAPServer.connectionLost(self, reason)
def _handleUnknown(self, request, controls, reply):
self._whenConnected(self._clientQueue, request, controls, reply)
return None
def handleUnknown(self, request, controls, reply):
d = defer.succeed(request)
d.addCallback(self._handleUnknown, controls, reply)
return d
def handle_LDAPUnbindRequest(self, request, controls, reply):
self.unbound = True
self.handleUnknown(request, controls, reply)
if __name__ == '__main__':
"""
Demonstration LDAP proxy; passes all requests to localhost:389.
"""
from twisted.internet import protocol
from twisted.python import log
import sys
log.startLogging(sys.stderr)
factory = protocol.ServerFactory()
factory.protocol = lambda : Proxy(overrides={
'': ('localhost', 389),
})
reactor.listenTCP(10389, factory)
reactor.run()
| 30.736364 | 81 | 0.630287 |
from twisted.internet import reactor, defer
from ldaptor.protocols.ldap import ldapserver, ldapconnector, ldapclient
from ldaptor.protocols import pureldap
class Proxy(ldapserver.BaseLDAPServer):
protocol = ldapclient.LDAPClient
client = None
waitingConnect = []
unbound = False
def __init__(self, config):
ldapserver.BaseLDAPServer.__init__(self)
self.config = config
def _whenConnected(self, fn, *a, **kw):
if self.client is None:
d = defer.Deferred()
self.waitingConnect.append((d, fn, a, kw))
return d
else:
return defer.maybeDeferred(fn, *a, **kw)
def _cbConnectionMade(self, proto):
self.client = proto
while self.waitingConnect:
d, fn, a, kw = self.waitingConnect.pop(0)
d2 = defer.maybeDeferred(fn, *a, **kw)
d2.chainDeferred(d)
def _clientQueue(self, request, controls, reply):
if request.needs_answer:
d = self.client.send_multiResponse(request, self._gotResponse, reply)
else:
self.client.send_noResponse(request)
def _gotResponse(self, response, reply):
reply(response)
return isinstance(response, (
pureldap.LDAPSearchResultDone,
pureldap.LDAPBindResponse,
))
def _failConnection(self, reason):
return reason
def connectionMade(self):
clientCreator = ldapconnector.LDAPClientCreator(
reactor, self.protocol)
d = clientCreator.connect(
dn='',
overrides=self.config.getServiceLocationOverrides())
d.addCallback(self._cbConnectionMade)
d.addErrback(self._failConnection)
ldapserver.BaseLDAPServer.connectionMade(self)
def connectionLost(self, reason):
assert self.client is not None
if self.client.connected:
if not self.unbound:
self.client.unbind()
self.unbound = True
else:
self.client.transport.loseConnection()
self.client = None
ldapserver.BaseLDAPServer.connectionLost(self, reason)
def _handleUnknown(self, request, controls, reply):
self._whenConnected(self._clientQueue, request, controls, reply)
return None
def handleUnknown(self, request, controls, reply):
d = defer.succeed(request)
d.addCallback(self._handleUnknown, controls, reply)
return d
def handle_LDAPUnbindRequest(self, request, controls, reply):
self.unbound = True
self.handleUnknown(request, controls, reply)
if __name__ == '__main__':
from twisted.internet import protocol
from twisted.python import log
import sys
log.startLogging(sys.stderr)
factory = protocol.ServerFactory()
factory.protocol = lambda : Proxy(overrides={
'': ('localhost', 389),
})
reactor.listenTCP(10389, factory)
reactor.run()
| true | true |
1c2db776e42abd808e2f2cbd7dd5ca11a3103424 | 7,930 | py | Python | nailgun/nailgun/db/sqlalchemy/fixman.py | Axam/nsx-web | 4f60d71c05e08740cbdf19b6c9bb0c4cb1e29ad5 | [
"Apache-2.0"
] | 1 | 2021-04-06T16:13:35.000Z | 2021-04-06T16:13:35.000Z | nailgun/nailgun/db/sqlalchemy/fixman.py | Axam/nsx-web | 4f60d71c05e08740cbdf19b6c9bb0c4cb1e29ad5 | [
"Apache-2.0"
] | null | null | null | nailgun/nailgun/db/sqlalchemy/fixman.py | Axam/nsx-web | 4f60d71c05e08740cbdf19b6c9bb0c4cb1e29ad5 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from datetime import datetime
import itertools
import jinja2
import os.path
import Queue
import StringIO
import sys
import yaml
from sqlalchemy import orm
import sqlalchemy.types
from nailgun.db import db
from nailgun.db.sqlalchemy import models
from nailgun.logger import logger
from nailgun import objects
from nailgun.openstack.common import jsonutils
from nailgun.settings import settings
from nailgun.utils import dict_merge
def capitalize_model_name(model_name):
return ''.join(map(lambda s: s.capitalize(), model_name.split('_')))
def template_fixture(fileobj, **kwargs):
if not kwargs.get('settings'):
kwargs["settings"] = settings
t = jinja2.Template(fileobj.read())
return StringIO.StringIO(t.render(**kwargs))
def load_fixture(fileobj, loader=None):
if not loader:
loaders = {'.json': jsonutils, '.yaml': yaml, '.yml': yaml}
extension = os.path.splitext(fileobj.name)[1]
if extension not in loaders:
raise Exception("Unknown file extension '{0}'".format(extension))
loader = loaders[extension]
fixture = loader.load(
template_fixture(fileobj)
)
fixture = filter(lambda obj: obj.get('pk') is not None, fixture)
for i in range(0, len(fixture)):
def extend(obj):
if 'extend' in obj:
obj['extend'] = extend(obj['extend'])
return dict_merge(obj.get('extend', {}), obj)
fixture[i] = extend(fixture[i])
fixture[i].pop('extend', None)
return fixture
def upload_fixture(fileobj, loader=None):
fixture = load_fixture(fileobj, loader)
queue = Queue.Queue()
keys = {}
for obj in fixture:
pk = obj['pk']
model_name = obj["model"].split(".")[1]
try:
itertools.dropwhile(
lambda m: not hasattr(models, m),
[model_name.capitalize(),
"".join(map(lambda n: n.capitalize(), model_name.split("_")))]
).next()
except StopIteration:
raise Exception("Couldn't find model {0}".format(model_name))
obj['model'] = getattr(models, capitalize_model_name(model_name))
keys[obj['model'].__tablename__] = {}
# Check if it's already uploaded
obj_from_db = db().query(obj['model']).get(pk)
if obj_from_db:
logger.info("Fixture model '%s' with pk='%s' already"
" uploaded. Skipping", model_name, pk)
continue
queue.put(obj)
pending_objects = []
while True:
try:
obj = queue.get_nowait()
except Exception:
break
new_obj = obj['model']()
fk_fields = {}
for field, value in obj["fields"].iteritems():
f = getattr(obj['model'], field)
impl = getattr(f, 'impl', None)
fk_model = None
try:
if hasattr(f.comparator.prop, "argument"):
if hasattr(f.comparator.prop.argument, "__call__"):
fk_model = f.comparator.prop.argument()
else:
fk_model = f.comparator.prop.argument.class_
except AttributeError:
pass
if fk_model:
if value not in keys[fk_model.__tablename__]:
if obj not in pending_objects:
queue.put(obj)
pending_objects.append(obj)
continue
else:
logger.error(
u"Can't resolve foreign key "
"'{0}' for object '{1}'".format(
field,
obj["model"]
)
)
break
else:
value = keys[fk_model.__tablename__][value].id
if isinstance(impl, orm.attributes.ScalarObjectAttributeImpl):
if value:
fk_fields[field] = (value, fk_model)
elif isinstance(impl, orm.attributes.CollectionAttributeImpl):
if value:
fk_fields[field] = (value, fk_model)
elif hasattr(f, 'property') and isinstance(
f.property.columns[0].type, sqlalchemy.types.DateTime
):
if value:
setattr(
new_obj,
field,
datetime.strptime(value, "%d-%m-%Y %H:%M:%S")
)
else:
setattr(
new_obj,
field,
datetime.now()
)
else:
setattr(new_obj, field, value)
for field, data in fk_fields.iteritems():
if isinstance(data[0], int):
setattr(new_obj, field, db().query(data[1]).get(data[0]))
elif isinstance(data[0], list):
for v in data[0]:
getattr(new_obj, field).append(
db().query(data[1]).get(v)
)
db().add(new_obj)
db().commit()
keys[obj['model'].__tablename__][obj["pk"]] = new_obj
# UGLY HACK for testing
if new_obj.__class__.__name__ == 'Node':
objects.Node.create_attributes(new_obj)
objects.Node.update_volumes(new_obj)
objects.Node.update_interfaces(new_obj)
db().commit()
def upload_fixtures():
fixtures_paths = [
'/etc/nailgun/fixtures',
os.path.join(os.path.dirname(__file__), '..', '..', 'fixtures')
]
for orig_path in settings.FIXTURES_TO_UPLOAD:
if os.path.isabs(orig_path):
path = orig_path
else:
for fixtures_path in fixtures_paths:
path = os.path.abspath(
os.path.join(
fixtures_path,
orig_path
)
)
if os.access(path, os.R_OK):
break
if os.access(path, os.R_OK):
with open(path, "r") as fileobj:
upload_fixture(fileobj)
logger.info("Fixture has been uploaded from file: %s", path)
def dump_fixture(model_name):
dump = []
app_name = 'nailgun'
model = getattr(models, capitalize_model_name(model_name))
for obj in db().query(model).all():
obj_dump = {}
obj_dump['pk'] = getattr(obj, model.__mapper__.primary_key[0].name)
obj_dump['model'] = "%s.%s" % (app_name, model_name)
obj_dump['fields'] = {}
dump.append(obj_dump)
for prop in model.__mapper__.iterate_properties:
if isinstance(prop, sqlalchemy.orm.ColumnProperty):
field = str(prop.key)
value = getattr(obj, field)
if value is None:
continue
if not isinstance(value, (
list, dict, str, unicode, int, float, bool)):
value = ""
obj_dump['fields'][field] = value
sys.stdout.write(jsonutils.dumps(dump, indent=4))
| 34.181034 | 79 | 0.533291 |
from datetime import datetime
import itertools
import jinja2
import os.path
import Queue
import StringIO
import sys
import yaml
from sqlalchemy import orm
import sqlalchemy.types
from nailgun.db import db
from nailgun.db.sqlalchemy import models
from nailgun.logger import logger
from nailgun import objects
from nailgun.openstack.common import jsonutils
from nailgun.settings import settings
from nailgun.utils import dict_merge
def capitalize_model_name(model_name):
return ''.join(map(lambda s: s.capitalize(), model_name.split('_')))
def template_fixture(fileobj, **kwargs):
if not kwargs.get('settings'):
kwargs["settings"] = settings
t = jinja2.Template(fileobj.read())
return StringIO.StringIO(t.render(**kwargs))
def load_fixture(fileobj, loader=None):
if not loader:
loaders = {'.json': jsonutils, '.yaml': yaml, '.yml': yaml}
extension = os.path.splitext(fileobj.name)[1]
if extension not in loaders:
raise Exception("Unknown file extension '{0}'".format(extension))
loader = loaders[extension]
fixture = loader.load(
template_fixture(fileobj)
)
fixture = filter(lambda obj: obj.get('pk') is not None, fixture)
for i in range(0, len(fixture)):
def extend(obj):
if 'extend' in obj:
obj['extend'] = extend(obj['extend'])
return dict_merge(obj.get('extend', {}), obj)
fixture[i] = extend(fixture[i])
fixture[i].pop('extend', None)
return fixture
def upload_fixture(fileobj, loader=None):
fixture = load_fixture(fileobj, loader)
queue = Queue.Queue()
keys = {}
for obj in fixture:
pk = obj['pk']
model_name = obj["model"].split(".")[1]
try:
itertools.dropwhile(
lambda m: not hasattr(models, m),
[model_name.capitalize(),
"".join(map(lambda n: n.capitalize(), model_name.split("_")))]
).next()
except StopIteration:
raise Exception("Couldn't find model {0}".format(model_name))
obj['model'] = getattr(models, capitalize_model_name(model_name))
keys[obj['model'].__tablename__] = {}
# Check if it's already uploaded
obj_from_db = db().query(obj['model']).get(pk)
if obj_from_db:
logger.info("Fixture model '%s' with pk='%s' already"
" uploaded. Skipping", model_name, pk)
continue
queue.put(obj)
pending_objects = []
while True:
try:
obj = queue.get_nowait()
except Exception:
break
new_obj = obj['model']()
fk_fields = {}
for field, value in obj["fields"].iteritems():
f = getattr(obj['model'], field)
impl = getattr(f, 'impl', None)
fk_model = None
try:
if hasattr(f.comparator.prop, "argument"):
if hasattr(f.comparator.prop.argument, "__call__"):
fk_model = f.comparator.prop.argument()
else:
fk_model = f.comparator.prop.argument.class_
except AttributeError:
pass
if fk_model:
if value not in keys[fk_model.__tablename__]:
if obj not in pending_objects:
queue.put(obj)
pending_objects.append(obj)
continue
else:
logger.error(
u"Can't resolve foreign key "
"'{0}' for object '{1}'".format(
field,
obj["model"]
)
)
break
else:
value = keys[fk_model.__tablename__][value].id
if isinstance(impl, orm.attributes.ScalarObjectAttributeImpl):
if value:
fk_fields[field] = (value, fk_model)
elif isinstance(impl, orm.attributes.CollectionAttributeImpl):
if value:
fk_fields[field] = (value, fk_model)
elif hasattr(f, 'property') and isinstance(
f.property.columns[0].type, sqlalchemy.types.DateTime
):
if value:
setattr(
new_obj,
field,
datetime.strptime(value, "%d-%m-%Y %H:%M:%S")
)
else:
setattr(
new_obj,
field,
datetime.now()
)
else:
setattr(new_obj, field, value)
for field, data in fk_fields.iteritems():
if isinstance(data[0], int):
setattr(new_obj, field, db().query(data[1]).get(data[0]))
elif isinstance(data[0], list):
for v in data[0]:
getattr(new_obj, field).append(
db().query(data[1]).get(v)
)
db().add(new_obj)
db().commit()
keys[obj['model'].__tablename__][obj["pk"]] = new_obj
# UGLY HACK for testing
if new_obj.__class__.__name__ == 'Node':
objects.Node.create_attributes(new_obj)
objects.Node.update_volumes(new_obj)
objects.Node.update_interfaces(new_obj)
db().commit()
def upload_fixtures():
fixtures_paths = [
'/etc/nailgun/fixtures',
os.path.join(os.path.dirname(__file__), '..', '..', 'fixtures')
]
for orig_path in settings.FIXTURES_TO_UPLOAD:
if os.path.isabs(orig_path):
path = orig_path
else:
for fixtures_path in fixtures_paths:
path = os.path.abspath(
os.path.join(
fixtures_path,
orig_path
)
)
if os.access(path, os.R_OK):
break
if os.access(path, os.R_OK):
with open(path, "r") as fileobj:
upload_fixture(fileobj)
logger.info("Fixture has been uploaded from file: %s", path)
def dump_fixture(model_name):
dump = []
app_name = 'nailgun'
model = getattr(models, capitalize_model_name(model_name))
for obj in db().query(model).all():
obj_dump = {}
obj_dump['pk'] = getattr(obj, model.__mapper__.primary_key[0].name)
obj_dump['model'] = "%s.%s" % (app_name, model_name)
obj_dump['fields'] = {}
dump.append(obj_dump)
for prop in model.__mapper__.iterate_properties:
if isinstance(prop, sqlalchemy.orm.ColumnProperty):
field = str(prop.key)
value = getattr(obj, field)
if value is None:
continue
if not isinstance(value, (
list, dict, str, unicode, int, float, bool)):
value = ""
obj_dump['fields'][field] = value
sys.stdout.write(jsonutils.dumps(dump, indent=4))
| true | true |
1c2db84513bb862f7de4e8448a426a3edf339f82 | 9,217 | py | Python | community-content/tf_keras_text_classification_distributed_single_worker_gpus_with_gcloud_local_run_and_vertex_sdk/trainer/task.py | lclc19/vertex-ai-samples | 1844df54a6fc3d7afff1110a6758afaf13181b19 | [
"Apache-2.0"
] | null | null | null | community-content/tf_keras_text_classification_distributed_single_worker_gpus_with_gcloud_local_run_and_vertex_sdk/trainer/task.py | lclc19/vertex-ai-samples | 1844df54a6fc3d7afff1110a6758afaf13181b19 | [
"Apache-2.0"
] | null | null | null | community-content/tf_keras_text_classification_distributed_single_worker_gpus_with_gcloud_local_run_and_vertex_sdk/trainer/task.py | lclc19/vertex-ai-samples | 1844df54a6fc3d7afff1110a6758afaf13181b19 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
import tensorflow as tf
from tensorflow.keras import layers, losses
from tensorflow.keras.layers.experimental.preprocessing import TextVectorization
import distribution_utils
import utils
VOCAB_SIZE = 10000
MAX_SEQUENCE_LENGTH = 250
def parse_args():
parser = argparse.ArgumentParser()
# Using environment variables for Cloud Storage directories
# see more details in https://cloud.google.com/vertex-ai/docs/training/code-requirements
parser.add_argument(
'--model-dir', default=os.getenv('AIP_MODEL_DIR'), type=str,
help='a Cloud Storage URI of a directory intended for saving model artifacts')
parser.add_argument(
'--tensorboard-log-dir', default=os.getenv('AIP_TENSORBOARD_LOG_DIR'), type=str,
help='a Cloud Storage URI of a directory intended for saving TensorBoard')
parser.add_argument(
'--checkpoint-dir', default=os.getenv('AIP_CHECKPOINT_DIR'), type=str,
help='a Cloud Storage URI of a directory intended for saving checkpoints')
parser.add_argument(
'--num-gpus', default=0, type=int, help='number of gpus')
parser.add_argument(
'--epochs', default=25, type=int, help='number of training epochs')
parser.add_argument(
'--batch-size', default=16, type=int, help='mini-batch size')
parser.add_argument(
'--model-version', default=1, type=int, help='model version')
parser.add_argument(
'--local-mode', action='store_true', help='use local mode when running on your local machine')
args = parser.parse_args()
return args
def download_data(data_dir):
"""Download data."""
if not os.path.exists(data_dir):
os.makedirs(data_dir)
data_url = "https://storage.googleapis.com/download.tensorflow.org/data/stack_overflow_16k.tar.gz"
dataset = tf.keras.utils.get_file(
fname="stack_overflow_16k.tar.gz",
origin=data_url,
untar=True,
cache_dir=data_dir,
cache_subdir="",
)
dataset_dir = os.path.join(os.path.dirname(dataset))
return dataset_dir
def load_dataset(dataset_dir, batch_size, validation_split=0.2, seed=42):
train_dir = os.path.join(dataset_dir, 'train')
test_dir = os.path.join(dataset_dir, 'test')
raw_train_ds = tf.keras.preprocessing.text_dataset_from_directory(
train_dir,
batch_size=batch_size,
validation_split=validation_split,
subset='training',
seed=seed)
raw_val_ds = tf.keras.preprocessing.text_dataset_from_directory(
train_dir,
batch_size=batch_size,
validation_split=validation_split,
subset='validation',
seed=seed)
raw_test_ds = tf.keras.preprocessing.text_dataset_from_directory(
test_dir,
batch_size=batch_size,
)
for text_batch, label_batch in raw_train_ds.take(1):
for i in range(10):
print("Question: ", text_batch.numpy()[i])
print("Label:", label_batch.numpy()[i])
for i, label in enumerate(raw_train_ds.class_names):
print("Label", i, "corresponds to", label)
return raw_train_ds, raw_val_ds, raw_test_ds
def build_model(num_classes, loss, optimizer, metrics):
# vocab_size is VOCAB_SIZE + 1 since 0 is used additionally for padding.
model = tf.keras.Sequential([
layers.Embedding(VOCAB_SIZE + 1, 64, mask_zero=True),
layers.Conv1D(64, 5, padding="valid", activation="relu", strides=2),
layers.GlobalMaxPooling1D(),
layers.Dense(num_classes)
])
model.compile(
loss=loss,
optimizer=optimizer,
metrics=metrics)
return model
def train(model, train_dataset, validation_dataset, epochs, tensorboard_log_dir, checkpoint_dir):
tensorboard_callback = tf.keras.callbacks.TensorBoard(
log_dir=tensorboard_log_dir,
update_freq=1
)
checkpoint_callback = tf.keras.callbacks.ModelCheckpoint(
filepath=os.path.join(checkpoint_dir, 'cp-{epoch:04d}.ckpt'),
verbose=1,
save_weights_only=True,
save_freq="epoch",
period=100
)
history = model.fit(
train_dataset,
epochs=epochs,
validation_data=validation_dataset,
callbacks=[tensorboard_callback, checkpoint_callback]
)
print('Training accuracy: {acc}, loss: {loss}'.format(
acc=history.history['accuracy'][-1], loss=history.history['loss'][-1]))
print('Validation accuracy: {acc}, loss: {loss}'.format(
acc=history.history['val_accuracy'][-1], loss=history.history['val_loss'][-1]))
return
def get_string_labels(predicted_scores_batch, class_names):
predicted_labels = tf.argmax(predicted_scores_batch, axis=1)
predicted_labels = tf.gather(class_names, predicted_labels)
return predicted_labels
def predict(export_model, class_names, inputs):
predicted_scores = export_model.predict(inputs)
predicted_labels = get_string_labels(predicted_scores, class_names)
return predicted_labels
def main():
args = parse_args()
local_data_dir = './tmp/data'
local_model_dir = './tmp/model'
local_checkpoint_dir = './tmp/checkpoints'
local_tensorboard_log_dir = './tmp/logs'
#TODO: update when gcsfuse ready
gcsfuse_ready = False
model_dir = args.model_dir or local_model_dir
checkpoint_dir = (gcsfuse_ready and
args.checkpoint_dir) or local_checkpoint_dir
tensorboard_log_dir = args.tensorboard_log_dir or local_tensorboard_log_dir
class_names = ['csharp', 'java', 'javascript', 'python']
class_indices = dict(zip(class_names, range(len(class_names))))
num_classes = len(class_names)
print(f' class names: {class_names}')
print(f' class indices: {class_indices}')
print(f' num classes: {num_classes}')
strategy = distribution_utils.get_distribution_mirrored_strategy(
num_gpus=args.num_gpus)
print('Number of devices: {}'.format(strategy.num_replicas_in_sync))
global_batch_size = args.batch_size * strategy.num_replicas_in_sync
print(f'Global batch size: {global_batch_size}')
dataset_dir = download_data(local_data_dir)
raw_train_ds, raw_val_ds, raw_test_ds = load_dataset(dataset_dir, global_batch_size)
vectorize_layer = TextVectorization(
max_tokens=VOCAB_SIZE,
output_mode='int',
output_sequence_length=MAX_SEQUENCE_LENGTH)
train_text = raw_train_ds.map(lambda text, labels: text)
vectorize_layer.adapt(train_text)
print('The vectorize_layer is adapted')
def vectorize_text(text, label):
text = tf.expand_dims(text, -1)
return vectorize_layer(text), label
# Retrieve a batch (of 32 reviews and labels) from the dataset
text_batch, label_batch = next(iter(raw_train_ds))
first_question, first_label = text_batch[0], label_batch[0]
print("Question", first_question)
print("Label", first_label)
print("Vectorized question:", vectorize_text(first_question, first_label)[0])
train_ds = raw_train_ds.map(vectorize_text)
val_ds = raw_val_ds.map(vectorize_text)
test_ds = raw_test_ds.map(vectorize_text)
AUTOTUNE = tf.data.AUTOTUNE
def configure_dataset(dataset):
return dataset.cache().prefetch(buffer_size=AUTOTUNE)
train_ds = configure_dataset(train_ds)
val_ds = configure_dataset(val_ds)
test_ds = configure_dataset(test_ds)
print('Build model')
loss = losses.SparseCategoricalCrossentropy(from_logits=True),
optimizer = 'adam'
metrics = ['accuracy']
with strategy.scope():
model = build_model(
num_classes=num_classes,
loss=loss,
optimizer=optimizer,
metrics=metrics,
)
train(
model=model,
train_dataset=train_ds,
validation_dataset=val_ds,
epochs=args.epochs,
tensorboard_log_dir=tensorboard_log_dir,
checkpoint_dir=checkpoint_dir
)
test_loss, test_accuracy = model.evaluate(test_ds)
print("Int model accuracy: {:2.2%}".format(test_accuracy))
with strategy.scope():
export_model = tf.keras.Sequential(
[vectorize_layer, model,
layers.Activation('softmax')])
export_model.compile(
loss=losses.SparseCategoricalCrossentropy(from_logits=False),
optimizer='adam',
metrics=['accuracy'])
loss, accuracy = export_model.evaluate(raw_test_ds)
print("Accuracy: {:2.2%}".format(accuracy))
model_path = os.path.join(model_dir, str(args.model_version))
model.save(model_path)
print(f'Model version {args.model_version} is saved to {model_dir}')
print(f'Tensorboard logs are saved to: {tensorboard_log_dir}')
print(f'Checkpoints are saved to: {checkpoint_dir}')
utils.gcs_upload(
dir=checkpoint_dir,
local_dir=local_checkpoint_dir,
gcs_dir=args.checkpoint_dir,
gcsfuse_ready=gcsfuse_ready,
local_mode=args.local_mode
)
return
if __name__ == '__main__':
main() | 31.565068 | 100 | 0.726701 |
import argparse
import os
import tensorflow as tf
from tensorflow.keras import layers, losses
from tensorflow.keras.layers.experimental.preprocessing import TextVectorization
import distribution_utils
import utils
VOCAB_SIZE = 10000
MAX_SEQUENCE_LENGTH = 250
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'--model-dir', default=os.getenv('AIP_MODEL_DIR'), type=str,
help='a Cloud Storage URI of a directory intended for saving model artifacts')
parser.add_argument(
'--tensorboard-log-dir', default=os.getenv('AIP_TENSORBOARD_LOG_DIR'), type=str,
help='a Cloud Storage URI of a directory intended for saving TensorBoard')
parser.add_argument(
'--checkpoint-dir', default=os.getenv('AIP_CHECKPOINT_DIR'), type=str,
help='a Cloud Storage URI of a directory intended for saving checkpoints')
parser.add_argument(
'--num-gpus', default=0, type=int, help='number of gpus')
parser.add_argument(
'--epochs', default=25, type=int, help='number of training epochs')
parser.add_argument(
'--batch-size', default=16, type=int, help='mini-batch size')
parser.add_argument(
'--model-version', default=1, type=int, help='model version')
parser.add_argument(
'--local-mode', action='store_true', help='use local mode when running on your local machine')
args = parser.parse_args()
return args
def download_data(data_dir):
if not os.path.exists(data_dir):
os.makedirs(data_dir)
data_url = "https://storage.googleapis.com/download.tensorflow.org/data/stack_overflow_16k.tar.gz"
dataset = tf.keras.utils.get_file(
fname="stack_overflow_16k.tar.gz",
origin=data_url,
untar=True,
cache_dir=data_dir,
cache_subdir="",
)
dataset_dir = os.path.join(os.path.dirname(dataset))
return dataset_dir
def load_dataset(dataset_dir, batch_size, validation_split=0.2, seed=42):
train_dir = os.path.join(dataset_dir, 'train')
test_dir = os.path.join(dataset_dir, 'test')
raw_train_ds = tf.keras.preprocessing.text_dataset_from_directory(
train_dir,
batch_size=batch_size,
validation_split=validation_split,
subset='training',
seed=seed)
raw_val_ds = tf.keras.preprocessing.text_dataset_from_directory(
train_dir,
batch_size=batch_size,
validation_split=validation_split,
subset='validation',
seed=seed)
raw_test_ds = tf.keras.preprocessing.text_dataset_from_directory(
test_dir,
batch_size=batch_size,
)
for text_batch, label_batch in raw_train_ds.take(1):
for i in range(10):
print("Question: ", text_batch.numpy()[i])
print("Label:", label_batch.numpy()[i])
for i, label in enumerate(raw_train_ds.class_names):
print("Label", i, "corresponds to", label)
return raw_train_ds, raw_val_ds, raw_test_ds
def build_model(num_classes, loss, optimizer, metrics):
model = tf.keras.Sequential([
layers.Embedding(VOCAB_SIZE + 1, 64, mask_zero=True),
layers.Conv1D(64, 5, padding="valid", activation="relu", strides=2),
layers.GlobalMaxPooling1D(),
layers.Dense(num_classes)
])
model.compile(
loss=loss,
optimizer=optimizer,
metrics=metrics)
return model
def train(model, train_dataset, validation_dataset, epochs, tensorboard_log_dir, checkpoint_dir):
tensorboard_callback = tf.keras.callbacks.TensorBoard(
log_dir=tensorboard_log_dir,
update_freq=1
)
checkpoint_callback = tf.keras.callbacks.ModelCheckpoint(
filepath=os.path.join(checkpoint_dir, 'cp-{epoch:04d}.ckpt'),
verbose=1,
save_weights_only=True,
save_freq="epoch",
period=100
)
history = model.fit(
train_dataset,
epochs=epochs,
validation_data=validation_dataset,
callbacks=[tensorboard_callback, checkpoint_callback]
)
print('Training accuracy: {acc}, loss: {loss}'.format(
acc=history.history['accuracy'][-1], loss=history.history['loss'][-1]))
print('Validation accuracy: {acc}, loss: {loss}'.format(
acc=history.history['val_accuracy'][-1], loss=history.history['val_loss'][-1]))
return
def get_string_labels(predicted_scores_batch, class_names):
predicted_labels = tf.argmax(predicted_scores_batch, axis=1)
predicted_labels = tf.gather(class_names, predicted_labels)
return predicted_labels
def predict(export_model, class_names, inputs):
predicted_scores = export_model.predict(inputs)
predicted_labels = get_string_labels(predicted_scores, class_names)
return predicted_labels
def main():
args = parse_args()
local_data_dir = './tmp/data'
local_model_dir = './tmp/model'
local_checkpoint_dir = './tmp/checkpoints'
local_tensorboard_log_dir = './tmp/logs'
gcsfuse_ready = False
model_dir = args.model_dir or local_model_dir
checkpoint_dir = (gcsfuse_ready and
args.checkpoint_dir) or local_checkpoint_dir
tensorboard_log_dir = args.tensorboard_log_dir or local_tensorboard_log_dir
class_names = ['csharp', 'java', 'javascript', 'python']
class_indices = dict(zip(class_names, range(len(class_names))))
num_classes = len(class_names)
print(f' class names: {class_names}')
print(f' class indices: {class_indices}')
print(f' num classes: {num_classes}')
strategy = distribution_utils.get_distribution_mirrored_strategy(
num_gpus=args.num_gpus)
print('Number of devices: {}'.format(strategy.num_replicas_in_sync))
global_batch_size = args.batch_size * strategy.num_replicas_in_sync
print(f'Global batch size: {global_batch_size}')
dataset_dir = download_data(local_data_dir)
raw_train_ds, raw_val_ds, raw_test_ds = load_dataset(dataset_dir, global_batch_size)
vectorize_layer = TextVectorization(
max_tokens=VOCAB_SIZE,
output_mode='int',
output_sequence_length=MAX_SEQUENCE_LENGTH)
train_text = raw_train_ds.map(lambda text, labels: text)
vectorize_layer.adapt(train_text)
print('The vectorize_layer is adapted')
def vectorize_text(text, label):
text = tf.expand_dims(text, -1)
return vectorize_layer(text), label
text_batch, label_batch = next(iter(raw_train_ds))
first_question, first_label = text_batch[0], label_batch[0]
print("Question", first_question)
print("Label", first_label)
print("Vectorized question:", vectorize_text(first_question, first_label)[0])
train_ds = raw_train_ds.map(vectorize_text)
val_ds = raw_val_ds.map(vectorize_text)
test_ds = raw_test_ds.map(vectorize_text)
AUTOTUNE = tf.data.AUTOTUNE
def configure_dataset(dataset):
return dataset.cache().prefetch(buffer_size=AUTOTUNE)
train_ds = configure_dataset(train_ds)
val_ds = configure_dataset(val_ds)
test_ds = configure_dataset(test_ds)
print('Build model')
loss = losses.SparseCategoricalCrossentropy(from_logits=True),
optimizer = 'adam'
metrics = ['accuracy']
with strategy.scope():
model = build_model(
num_classes=num_classes,
loss=loss,
optimizer=optimizer,
metrics=metrics,
)
train(
model=model,
train_dataset=train_ds,
validation_dataset=val_ds,
epochs=args.epochs,
tensorboard_log_dir=tensorboard_log_dir,
checkpoint_dir=checkpoint_dir
)
test_loss, test_accuracy = model.evaluate(test_ds)
print("Int model accuracy: {:2.2%}".format(test_accuracy))
with strategy.scope():
export_model = tf.keras.Sequential(
[vectorize_layer, model,
layers.Activation('softmax')])
export_model.compile(
loss=losses.SparseCategoricalCrossentropy(from_logits=False),
optimizer='adam',
metrics=['accuracy'])
loss, accuracy = export_model.evaluate(raw_test_ds)
print("Accuracy: {:2.2%}".format(accuracy))
model_path = os.path.join(model_dir, str(args.model_version))
model.save(model_path)
print(f'Model version {args.model_version} is saved to {model_dir}')
print(f'Tensorboard logs are saved to: {tensorboard_log_dir}')
print(f'Checkpoints are saved to: {checkpoint_dir}')
utils.gcs_upload(
dir=checkpoint_dir,
local_dir=local_checkpoint_dir,
gcs_dir=args.checkpoint_dir,
gcsfuse_ready=gcsfuse_ready,
local_mode=args.local_mode
)
return
if __name__ == '__main__':
main() | true | true |
1c2db91ba45746a7d95ab225a22bd3eb8692c5dc | 2,592 | py | Python | Prediction based on convolutional neural network/code/reference/mnist_train.py | Asurada2015/Test | 14d92c9cb88d293340d76b20d31ca937052addb6 | [
"Apache-2.0"
] | null | null | null | Prediction based on convolutional neural network/code/reference/mnist_train.py | Asurada2015/Test | 14d92c9cb88d293340d76b20d31ca937052addb6 | [
"Apache-2.0"
] | null | null | null | Prediction based on convolutional neural network/code/reference/mnist_train.py | Asurada2015/Test | 14d92c9cb88d293340d76b20d31ca937052addb6 | [
"Apache-2.0"
] | 1 | 2018-11-16T03:46:14.000Z | 2018-11-16T03:46:14.000Z | import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
import mnist_inference
import os
# 配置神经网络参数
BATCH_SIZE = 100 # 批处理数据大小
LEARNING_RATE_BASE = 0.8 # 基础学习率
LEARNING_RATE_DECAY = 0.99 # 学习率衰减速度
REGULARIZATION_RATE = 0.0001 # 正则化项
TRAINING_STEPS = 30000 # 训练次数
MOVING_AVERAGE_DECAY = 0.99 # 平均滑动模型衰减参数
MODEL_SAVE_PATH = "MNIST_model/"
MODEL_NAME = "mnist_model"
def train(mnist):
# 定义输入输出placeholder
x = tf.placeholder(tf.float32, [None, mnist_inference.INPUT_NODE], name='x-input') # 可以直接引用mnist_inference中的超参数
y_ = tf.placeholder(tf.float32, [None, mnist_inference.OUTPUT_NODE], name='y-input')
# 定义正则化器
regularizer = tf.contrib.layers.l2_regularizer(REGULARIZATION_RATE)
y = mnist_inference.inference(x, regularizer)
global_step = tf.Variable(0, trainable=False)
# 在可训练参数上3定义平均滑动模型
variable_averages = tf.train.ExponentialMovingAverage(MOVING_AVERAGE_DECAY, global_step)
variables_averages_op = variable_averages.apply(tf.trainable_variables())
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=y, labels=tf.argmax(y_, 1))
cross_entropy_mean = tf.reduce_mean(cross_entropy)
loss = cross_entropy_mean + tf.add_n(tf.get_collection('losses'))
learning_rate = tf.train.exponential_decay(
LEARNING_RATE_BASE,
global_step,
mnist.train.num_examples/BATCH_SIZE, LEARNING_RATE_DECAY,
staircase=True)
train_step = tf.train.GradientDescentOptimizer(learning_rate).minimize(loss, global_step=global_step)
# with tf.control_dependencies([train_step, variables_averages_op]):
# train_op = tf.no_op(name='train')
train_op = tf.group(train_step, variables_averages_op)
saver = tf.train.Saver()
with tf.Session() as sess:
tf.global_variables_initializer().run()
for i in range(TRAINING_STEPS):
xs, ys = mnist.train.next_batch(BATCH_SIZE)
_, loss_value, step = sess.run([train_op, loss, global_step], feed_dict={x: xs, y_: ys})
# 每1000轮保存一次模型
if i%1000 == 0:
# 输出当前的训练情况,这里只输出了模型在当前训练batch上的损失函数大小
# 通过损失函数的大小可以大概了解训练的情况,
# 在验证数据集上的正确率信息会有一个单独的程序来生成
print("After %d training step(s), loss on training batch is %g."%(step, loss_value))
saver.save(sess, os.path.join(MODEL_SAVE_PATH, MODEL_NAME), global_step=global_step)
def main(argv=None):
mnist = input_data.read_data_sets("../../../datasets/MNIST_data", one_hot=True)
train(mnist)
if __name__ == '__main__':
tf.app.run()
| 40.5 | 116 | 0.710648 | import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
import mnist_inference
import os
BATCH_SIZE = 100
LEARNING_RATE_BASE = 0.8
LEARNING_RATE_DECAY = 0.99
REGULARIZATION_RATE = 0.0001
TRAINING_STEPS = 30000
MOVING_AVERAGE_DECAY = 0.99
MODEL_SAVE_PATH = "MNIST_model/"
MODEL_NAME = "mnist_model"
def train(mnist):
x = tf.placeholder(tf.float32, [None, mnist_inference.INPUT_NODE], name='x-input')
y_ = tf.placeholder(tf.float32, [None, mnist_inference.OUTPUT_NODE], name='y-input')
regularizer = tf.contrib.layers.l2_regularizer(REGULARIZATION_RATE)
y = mnist_inference.inference(x, regularizer)
global_step = tf.Variable(0, trainable=False)
variable_averages = tf.train.ExponentialMovingAverage(MOVING_AVERAGE_DECAY, global_step)
variables_averages_op = variable_averages.apply(tf.trainable_variables())
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=y, labels=tf.argmax(y_, 1))
cross_entropy_mean = tf.reduce_mean(cross_entropy)
loss = cross_entropy_mean + tf.add_n(tf.get_collection('losses'))
learning_rate = tf.train.exponential_decay(
LEARNING_RATE_BASE,
global_step,
mnist.train.num_examples/BATCH_SIZE, LEARNING_RATE_DECAY,
staircase=True)
train_step = tf.train.GradientDescentOptimizer(learning_rate).minimize(loss, global_step=global_step)
train_op = tf.group(train_step, variables_averages_op)
saver = tf.train.Saver()
with tf.Session() as sess:
tf.global_variables_initializer().run()
for i in range(TRAINING_STEPS):
xs, ys = mnist.train.next_batch(BATCH_SIZE)
_, loss_value, step = sess.run([train_op, loss, global_step], feed_dict={x: xs, y_: ys})
if i%1000 == 0:
print("After %d training step(s), loss on training batch is %g."%(step, loss_value))
saver.save(sess, os.path.join(MODEL_SAVE_PATH, MODEL_NAME), global_step=global_step)
def main(argv=None):
mnist = input_data.read_data_sets("../../../datasets/MNIST_data", one_hot=True)
train(mnist)
if __name__ == '__main__':
tf.app.run()
| true | true |
1c2db9e2ec179c8bdde9e3dc44d50955fbe6c743 | 15,111 | py | Python | lib/jnpr/healthbot/swagger/models/rule_schema.py | minefuto/healthbot-py-client | bb81452c974456af44299aebf32a73abeda8a943 | [
"Apache-2.0"
] | null | null | null | lib/jnpr/healthbot/swagger/models/rule_schema.py | minefuto/healthbot-py-client | bb81452c974456af44299aebf32a73abeda8a943 | [
"Apache-2.0"
] | null | null | null | lib/jnpr/healthbot/swagger/models/rule_schema.py | minefuto/healthbot-py-client | bb81452c974456af44299aebf32a73abeda8a943 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Healthbot APIs
API interface for Healthbot application # noqa: E501
OpenAPI spec version: 1.0.0
Contact: healthbot-hackers@juniper.net
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class RuleSchema(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'description': 'str',
'field': 'list[RuleSchemaField]',
'function': 'list[RuleSchemaFunction]',
'keys': 'list[str]',
'network_rule': 'list[ERRORUNKNOWN]',
'rule_frequency': 'str',
'rule_name': 'str',
'sensor': 'list[RuleSchemaSensor1]',
'synopsis': 'str',
'field_aggregation_time_range': 'str',
'trigger': 'list[RuleSchemaTrigger]',
'variable': 'list[RuleSchemaVariable]',
'vector': 'list[RuleSchemaVector]',
'rule_properties': 'RuleSchemaRuleproperties'
}
attribute_map = {
'description': 'description',
'field': 'field',
'function': 'function',
'keys': 'keys',
'network_rule': 'network-rule',
'rule_frequency': 'rule-frequency',
'rule_name': 'rule-name',
'sensor': 'sensor',
'synopsis': 'synopsis',
'field_aggregation_time_range': 'field-aggregation-time-range',
'trigger': 'trigger',
'variable': 'variable',
'vector': 'vector',
'rule_properties': 'rule-properties'
}
def __init__(self, description=None, field=None, function=None, keys=None, network_rule=None, rule_frequency=None, rule_name=None, sensor=None, synopsis=None, field_aggregation_time_range=None, trigger=None, variable=None, vector=None, rule_properties=None): # noqa: E501
"""RuleSchema - a model defined in Swagger""" # noqa: E501
self._description = None
self._field = None
self._function = None
self._keys = None
self._network_rule = None
self._rule_frequency = None
self._rule_name = None
self._sensor = None
self._synopsis = None
self._field_aggregation_time_range = None
self._trigger = None
self._variable = None
self._vector = None
self._rule_properties = None
self.discriminator = None
if description is not None:
self.description = description
if field is not None:
self.field = field
if function is not None:
self.function = function
if keys is not None:
self.keys = keys
if network_rule is not None:
self.network_rule = network_rule
if rule_frequency is not None:
self.rule_frequency = rule_frequency
self.rule_name = rule_name
if sensor is not None:
self.sensor = sensor
if synopsis is not None:
self.synopsis = synopsis
if field_aggregation_time_range is not None:
self.field_aggregation_time_range = field_aggregation_time_range
if trigger is not None:
self.trigger = trigger
if variable is not None:
self.variable = variable
if vector is not None:
self.vector = vector
if rule_properties is not None:
self.rule_properties = rule_properties
@property
def description(self):
"""Gets the description of this RuleSchema. # noqa: E501
Description about the rule # noqa: E501
:return: The description of this RuleSchema. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this RuleSchema.
Description about the rule # noqa: E501
:param description: The description of this RuleSchema. # noqa: E501
:type: str
"""
self._description = description
@property
def field(self):
"""Gets the field of this RuleSchema. # noqa: E501
:return: The field of this RuleSchema. # noqa: E501
:rtype: list[RuleSchemaField]
"""
return self._field
@field.setter
def field(self, field):
"""Sets the field of this RuleSchema.
:param field: The field of this RuleSchema. # noqa: E501
:type: list[RuleSchemaField]
"""
self._field = field
@property
def function(self):
"""Gets the function of this RuleSchema. # noqa: E501
:return: The function of this RuleSchema. # noqa: E501
:rtype: list[RuleSchemaFunction]
"""
return self._function
@function.setter
def function(self, function):
"""Sets the function of this RuleSchema.
:param function: The function of this RuleSchema. # noqa: E501
:type: list[RuleSchemaFunction]
"""
self._function = function
@property
def keys(self):
"""Gets the keys of this RuleSchema. # noqa: E501
:return: The keys of this RuleSchema. # noqa: E501
:rtype: list[str]
"""
return self._keys
@keys.setter
def keys(self, keys):
"""Sets the keys of this RuleSchema.
:param keys: The keys of this RuleSchema. # noqa: E501
:type: list[str]
"""
self._keys = keys
@property
def network_rule(self):
"""Gets the network_rule of this RuleSchema. # noqa: E501
Flag to denote a network rule # noqa: E501
:return: The network_rule of this RuleSchema. # noqa: E501
:rtype: list[ERRORUNKNOWN]
"""
return self._network_rule
@network_rule.setter
def network_rule(self, network_rule):
"""Sets the network_rule of this RuleSchema.
Flag to denote a network rule # noqa: E501
:param network_rule: The network_rule of this RuleSchema. # noqa: E501
:type: list[ERRORUNKNOWN]
"""
self._network_rule = network_rule
@property
def rule_frequency(self):
"""Gets the rule_frequency of this RuleSchema. # noqa: E501
Frequency at which the rule’s field, reference, and vector elements should be computed. Required only when a rule doesn’t have a sensor defined. Specify integer >= 0 followed by s/m/h/d/w/y representing seconds/minutes/hours/days/weeks/years. Eg: 2s # noqa: E501
:return: The rule_frequency of this RuleSchema. # noqa: E501
:rtype: str
"""
return self._rule_frequency
@rule_frequency.setter
def rule_frequency(self, rule_frequency):
"""Sets the rule_frequency of this RuleSchema.
Frequency at which the rule’s field, reference, and vector elements should be computed. Required only when a rule doesn’t have a sensor defined. Specify integer >= 0 followed by s/m/h/d/w/y representing seconds/minutes/hours/days/weeks/years. Eg: 2s # noqa: E501
:param rule_frequency: The rule_frequency of this RuleSchema. # noqa: E501
:type: str
"""
if rule_frequency is not None and not re.search(r'^[1-9][0-9]*[smhdwy]$', rule_frequency): # noqa: E501
raise ValueError(r"Invalid value for `rule_frequency`, must be a follow pattern or equal to `/^[1-9][0-9]*[smhdwy]$/`") # noqa: E501
self._rule_frequency = rule_frequency
@property
def rule_name(self):
"""Gets the rule_name of this RuleSchema. # noqa: E501
Name of the rule. Should be of pattern [a-z][a-z0-9_-]* # noqa: E501
:return: The rule_name of this RuleSchema. # noqa: E501
:rtype: str
"""
return self._rule_name
@rule_name.setter
def rule_name(self, rule_name):
"""Sets the rule_name of this RuleSchema.
Name of the rule. Should be of pattern [a-z][a-z0-9_-]* # noqa: E501
:param rule_name: The rule_name of this RuleSchema. # noqa: E501
:type: str
"""
if rule_name is None:
raise ValueError("Invalid value for `rule_name`, must not be `None`") # noqa: E501
if rule_name is not None and len(rule_name) > 64:
raise ValueError("Invalid value for `rule_name`, length must be less than or equal to `64`") # noqa: E501
if rule_name is not None and not re.search(r'^[a-z][a-z0-9_-]*$', rule_name): # noqa: E501
raise ValueError(r"Invalid value for `rule_name`, must be a follow pattern or equal to `/^[a-z][a-z0-9_-]*$/`") # noqa: E501
self._rule_name = rule_name
@property
def sensor(self):
"""Gets the sensor of this RuleSchema. # noqa: E501
:return: The sensor of this RuleSchema. # noqa: E501
:rtype: list[RuleSchemaSensor1]
"""
return self._sensor
@sensor.setter
def sensor(self, sensor):
"""Sets the sensor of this RuleSchema.
:param sensor: The sensor of this RuleSchema. # noqa: E501
:type: list[RuleSchemaSensor1]
"""
self._sensor = sensor
@property
def synopsis(self):
"""Gets the synopsis of this RuleSchema. # noqa: E501
Synopsis about the rule # noqa: E501
:return: The synopsis of this RuleSchema. # noqa: E501
:rtype: str
"""
return self._synopsis
@synopsis.setter
def synopsis(self, synopsis):
"""Sets the synopsis of this RuleSchema.
Synopsis about the rule # noqa: E501
:param synopsis: The synopsis of this RuleSchema. # noqa: E501
:type: str
"""
self._synopsis = synopsis
@property
def field_aggregation_time_range(self):
"""Gets the field_aggregation_time_range of this RuleSchema. # noqa: E501
How much back in time should we look for field aggregation. Specify positive integer followed by s/m/h/d/w/y representing seconds/minutes/hours/days/weeks/years. Eg: 2s # noqa: E501
:return: The field_aggregation_time_range of this RuleSchema. # noqa: E501
:rtype: str
"""
return self._field_aggregation_time_range
@field_aggregation_time_range.setter
def field_aggregation_time_range(self, field_aggregation_time_range):
"""Sets the field_aggregation_time_range of this RuleSchema.
How much back in time should we look for field aggregation. Specify positive integer followed by s/m/h/d/w/y representing seconds/minutes/hours/days/weeks/years. Eg: 2s # noqa: E501
:param field_aggregation_time_range: The field_aggregation_time_range of this RuleSchema. # noqa: E501
:type: str
"""
if field_aggregation_time_range is not None and not re.search(r'^[1-9][0-9]*[smhdwy]$', field_aggregation_time_range): # noqa: E501
raise ValueError(r"Invalid value for `field_aggregation_time_range`, must be a follow pattern or equal to `/^[1-9][0-9]*[smhdwy]$/`") # noqa: E501
self._field_aggregation_time_range = field_aggregation_time_range
@property
def trigger(self):
"""Gets the trigger of this RuleSchema. # noqa: E501
:return: The trigger of this RuleSchema. # noqa: E501
:rtype: list[RuleSchemaTrigger]
"""
return self._trigger
@trigger.setter
def trigger(self, trigger):
"""Sets the trigger of this RuleSchema.
:param trigger: The trigger of this RuleSchema. # noqa: E501
:type: list[RuleSchemaTrigger]
"""
self._trigger = trigger
@property
def variable(self):
"""Gets the variable of this RuleSchema. # noqa: E501
Playbook variable configuration # noqa: E501
:return: The variable of this RuleSchema. # noqa: E501
:rtype: list[RuleSchemaVariable]
"""
return self._variable
@variable.setter
def variable(self, variable):
"""Sets the variable of this RuleSchema.
Playbook variable configuration # noqa: E501
:param variable: The variable of this RuleSchema. # noqa: E501
:type: list[RuleSchemaVariable]
"""
self._variable = variable
@property
def vector(self):
"""Gets the vector of this RuleSchema. # noqa: E501
:return: The vector of this RuleSchema. # noqa: E501
:rtype: list[RuleSchemaVector]
"""
return self._vector
@vector.setter
def vector(self, vector):
"""Sets the vector of this RuleSchema.
:param vector: The vector of this RuleSchema. # noqa: E501
:type: list[RuleSchemaVector]
"""
self._vector = vector
@property
def rule_properties(self):
"""Gets the rule_properties of this RuleSchema. # noqa: E501
:return: The rule_properties of this RuleSchema. # noqa: E501
:rtype: RuleSchemaRuleproperties
"""
return self._rule_properties
@rule_properties.setter
def rule_properties(self, rule_properties):
"""Sets the rule_properties of this RuleSchema.
:param rule_properties: The rule_properties of this RuleSchema. # noqa: E501
:type: RuleSchemaRuleproperties
"""
self._rule_properties = rule_properties
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(RuleSchema, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, RuleSchema):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 31.679245 | 276 | 0.610615 |
import pprint
import re
import six
class RuleSchema(object):
swagger_types = {
'description': 'str',
'field': 'list[RuleSchemaField]',
'function': 'list[RuleSchemaFunction]',
'keys': 'list[str]',
'network_rule': 'list[ERRORUNKNOWN]',
'rule_frequency': 'str',
'rule_name': 'str',
'sensor': 'list[RuleSchemaSensor1]',
'synopsis': 'str',
'field_aggregation_time_range': 'str',
'trigger': 'list[RuleSchemaTrigger]',
'variable': 'list[RuleSchemaVariable]',
'vector': 'list[RuleSchemaVector]',
'rule_properties': 'RuleSchemaRuleproperties'
}
attribute_map = {
'description': 'description',
'field': 'field',
'function': 'function',
'keys': 'keys',
'network_rule': 'network-rule',
'rule_frequency': 'rule-frequency',
'rule_name': 'rule-name',
'sensor': 'sensor',
'synopsis': 'synopsis',
'field_aggregation_time_range': 'field-aggregation-time-range',
'trigger': 'trigger',
'variable': 'variable',
'vector': 'vector',
'rule_properties': 'rule-properties'
}
def __init__(self, description=None, field=None, function=None, keys=None, network_rule=None, rule_frequency=None, rule_name=None, sensor=None, synopsis=None, field_aggregation_time_range=None, trigger=None, variable=None, vector=None, rule_properties=None):
self._description = None
self._field = None
self._function = None
self._keys = None
self._network_rule = None
self._rule_frequency = None
self._rule_name = None
self._sensor = None
self._synopsis = None
self._field_aggregation_time_range = None
self._trigger = None
self._variable = None
self._vector = None
self._rule_properties = None
self.discriminator = None
if description is not None:
self.description = description
if field is not None:
self.field = field
if function is not None:
self.function = function
if keys is not None:
self.keys = keys
if network_rule is not None:
self.network_rule = network_rule
if rule_frequency is not None:
self.rule_frequency = rule_frequency
self.rule_name = rule_name
if sensor is not None:
self.sensor = sensor
if synopsis is not None:
self.synopsis = synopsis
if field_aggregation_time_range is not None:
self.field_aggregation_time_range = field_aggregation_time_range
if trigger is not None:
self.trigger = trigger
if variable is not None:
self.variable = variable
if vector is not None:
self.vector = vector
if rule_properties is not None:
self.rule_properties = rule_properties
@property
def description(self):
return self._description
@description.setter
def description(self, description):
self._description = description
@property
def field(self):
return self._field
@field.setter
def field(self, field):
self._field = field
@property
def function(self):
return self._function
@function.setter
def function(self, function):
self._function = function
@property
def keys(self):
return self._keys
@keys.setter
def keys(self, keys):
self._keys = keys
@property
def network_rule(self):
return self._network_rule
@network_rule.setter
def network_rule(self, network_rule):
self._network_rule = network_rule
@property
def rule_frequency(self):
return self._rule_frequency
@rule_frequency.setter
def rule_frequency(self, rule_frequency):
if rule_frequency is not None and not re.search(r'^[1-9][0-9]*[smhdwy]$', rule_frequency):
raise ValueError(r"Invalid value for `rule_frequency`, must be a follow pattern or equal to `/^[1-9][0-9]*[smhdwy]$/`")
self._rule_frequency = rule_frequency
@property
def rule_name(self):
return self._rule_name
@rule_name.setter
def rule_name(self, rule_name):
if rule_name is None:
raise ValueError("Invalid value for `rule_name`, must not be `None`")
if rule_name is not None and len(rule_name) > 64:
raise ValueError("Invalid value for `rule_name`, length must be less than or equal to `64`")
if rule_name is not None and not re.search(r'^[a-z][a-z0-9_-]*$', rule_name):
raise ValueError(r"Invalid value for `rule_name`, must be a follow pattern or equal to `/^[a-z][a-z0-9_-]*$/`")
self._rule_name = rule_name
@property
def sensor(self):
return self._sensor
@sensor.setter
def sensor(self, sensor):
self._sensor = sensor
@property
def synopsis(self):
return self._synopsis
@synopsis.setter
def synopsis(self, synopsis):
self._synopsis = synopsis
@property
def field_aggregation_time_range(self):
return self._field_aggregation_time_range
@field_aggregation_time_range.setter
def field_aggregation_time_range(self, field_aggregation_time_range):
if field_aggregation_time_range is not None and not re.search(r'^[1-9][0-9]*[smhdwy]$', field_aggregation_time_range):
raise ValueError(r"Invalid value for `field_aggregation_time_range`, must be a follow pattern or equal to `/^[1-9][0-9]*[smhdwy]$/`")
self._field_aggregation_time_range = field_aggregation_time_range
@property
def trigger(self):
return self._trigger
@trigger.setter
def trigger(self, trigger):
self._trigger = trigger
@property
def variable(self):
return self._variable
@variable.setter
def variable(self, variable):
self._variable = variable
@property
def vector(self):
return self._vector
@vector.setter
def vector(self, vector):
self._vector = vector
@property
def rule_properties(self):
return self._rule_properties
@rule_properties.setter
def rule_properties(self, rule_properties):
self._rule_properties = rule_properties
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(RuleSchema, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, RuleSchema):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
1c2dbabee38b958cf582568343d598c8386b4440 | 286 | py | Python | src/betterproto/compile/naming.py | DK99/python-betterproto | d213abca03c90d0690e5e4d23894d51751478098 | [
"MIT"
] | 708 | 2019-10-11T06:23:40.000Z | 2022-03-31T09:39:08.000Z | src/betterproto/compile/naming.py | DK99/python-betterproto | d213abca03c90d0690e5e4d23894d51751478098 | [
"MIT"
] | 302 | 2019-11-11T22:09:21.000Z | 2022-03-29T11:21:04.000Z | src/betterproto/compile/naming.py | DK99/python-betterproto | d213abca03c90d0690e5e4d23894d51751478098 | [
"MIT"
] | 122 | 2019-12-04T16:22:53.000Z | 2022-03-20T09:31:10.000Z | from betterproto import casing
def pythonize_class_name(name: str) -> str:
return casing.pascal_case(name)
def pythonize_field_name(name: str) -> str:
return casing.safe_snake_case(name)
def pythonize_method_name(name: str) -> str:
return casing.safe_snake_case(name)
| 20.428571 | 44 | 0.755245 | from betterproto import casing
def pythonize_class_name(name: str) -> str:
return casing.pascal_case(name)
def pythonize_field_name(name: str) -> str:
return casing.safe_snake_case(name)
def pythonize_method_name(name: str) -> str:
return casing.safe_snake_case(name)
| true | true |
1c2dbac173571edf8c02d33825652024a416aa0a | 3,568 | py | Python | huaweicloud-sdk-elb/huaweicloudsdkelb/v3/model/show_member_request.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 64 | 2020-06-12T07:05:07.000Z | 2022-03-30T03:32:50.000Z | huaweicloud-sdk-elb/huaweicloudsdkelb/v3/model/show_member_request.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 11 | 2020-07-06T07:56:54.000Z | 2022-01-11T11:14:40.000Z | huaweicloud-sdk-elb/huaweicloudsdkelb/v3/model/show_member_request.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 24 | 2020-06-08T11:42:13.000Z | 2022-03-04T06:44:08.000Z | # coding: utf-8
import re
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ShowMemberRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'member_id': 'str',
'pool_id': 'str'
}
attribute_map = {
'member_id': 'member_id',
'pool_id': 'pool_id'
}
def __init__(self, member_id=None, pool_id=None):
"""ShowMemberRequest - a model defined in huaweicloud sdk"""
self._member_id = None
self._pool_id = None
self.discriminator = None
self.member_id = member_id
self.pool_id = pool_id
@property
def member_id(self):
"""Gets the member_id of this ShowMemberRequest.
后端服务器ID。
:return: The member_id of this ShowMemberRequest.
:rtype: str
"""
return self._member_id
@member_id.setter
def member_id(self, member_id):
"""Sets the member_id of this ShowMemberRequest.
后端服务器ID。
:param member_id: The member_id of this ShowMemberRequest.
:type: str
"""
self._member_id = member_id
@property
def pool_id(self):
"""Gets the pool_id of this ShowMemberRequest.
后端服务器组ID。
:return: The pool_id of this ShowMemberRequest.
:rtype: str
"""
return self._pool_id
@pool_id.setter
def pool_id(self, pool_id):
"""Sets the pool_id of this ShowMemberRequest.
后端服务器组ID。
:param pool_id: The pool_id of this ShowMemberRequest.
:type: str
"""
self._pool_id = pool_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ShowMemberRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 25.485714 | 79 | 0.549888 |
import re
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ShowMemberRequest:
sensitive_list = []
openapi_types = {
'member_id': 'str',
'pool_id': 'str'
}
attribute_map = {
'member_id': 'member_id',
'pool_id': 'pool_id'
}
def __init__(self, member_id=None, pool_id=None):
self._member_id = None
self._pool_id = None
self.discriminator = None
self.member_id = member_id
self.pool_id = pool_id
@property
def member_id(self):
return self._member_id
@member_id.setter
def member_id(self, member_id):
self._member_id = member_id
@property
def pool_id(self):
return self._pool_id
@pool_id.setter
def pool_id(self, pool_id):
self._pool_id = pool_id
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, ShowMemberRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
1c2dbb45650af032de5cca54d07f7432cdd8c925 | 60,536 | py | Python | tests/test_xpath1_parser.py | linw1995/elementpath | 3a1105a51295a0dc4410a0ac1231ca8700a54db1 | [
"MIT"
] | null | null | null | tests/test_xpath1_parser.py | linw1995/elementpath | 3a1105a51295a0dc4410a0ac1231ca8700a54db1 | [
"MIT"
] | null | null | null | tests/test_xpath1_parser.py | linw1995/elementpath | 3a1105a51295a0dc4410a0ac1231ca8700a54db1 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c), 2018-2019, SISSA (International School for Advanced Studies).
# All rights reserved.
# This file is distributed under the terms of the MIT License.
# See the file 'LICENSE' in the root directory of the present
# distribution, or http://opensource.org/licenses/MIT.
#
# @author Davide Brunato <brunato@sissa.it>
#
#
# Note: Many tests are built using the examples of the XPath standards,
# published by W3C under the W3C Document License.
#
# References:
# http://www.w3.org/TR/1999/REC-xpath-19991116/
# http://www.w3.org/TR/2010/REC-xpath20-20101214/
# http://www.w3.org/TR/2010/REC-xpath-functions-20101214/
# https://www.w3.org/Consortium/Legal/2015/doc-license
# https://www.w3.org/TR/charmod-norm/
#
import unittest
import sys
import io
import math
import pickle
from decimal import Decimal
from collections import namedtuple
from xml.etree import ElementTree
try:
import lxml.etree as lxml_etree
except ImportError:
lxml_etree = None
from elementpath import *
from elementpath.namespaces import XML_NAMESPACE, XSD_NAMESPACE, XSI_NAMESPACE, XPATH_FUNCTIONS_NAMESPACE
XML_GENERIC_TEST = """
<root>
<a id="a_id">
<b>some content</b>
<c> space space \t .</c></a>
</root>"""
XML_DATA_TEST = """
<values>
<a>3.4</a>
<a>20</a>
<a>-10.1</a>
<b>alpha</b>
<c>true</c>
<d>44</d>
</values>"""
# noinspection PyPropertyAccess
class XPath1ParserTest(unittest.TestCase):
namespaces = {
'xml': XML_NAMESPACE,
'xs': XSD_NAMESPACE,
'xsi': XSI_NAMESPACE,
'fn': XPATH_FUNCTIONS_NAMESPACE,
'eg': 'http://www.example.com/ns/',
}
variables = {
'values': [10, 20, 5],
'myaddress': 'admin@example.com',
'word': 'alpha',
}
etree = ElementTree
def setUp(self):
self.parser = XPath1Parser(namespaces=self.namespaces, variables=self.variables, strict=True)
self.token = XPath1Parser.symbol_table['(name)'](self.parser, 'test')
#
# Helper methods
def check_tokenizer(self, path, expected):
"""
Checks the list of lexemes generated by the parser tokenizer.
:param path: the XPath expression to be checked.
:param expected: a list with lexemes generated by the tokenizer.
"""
self.assertEqual([
lit or op or ref or unexpected
for lit, op, ref, unexpected in self.parser.__class__.tokenizer.findall(path)
], expected)
def check_token(self, symbol, expected_label=None, expected_str=None, expected_repr=None, value=None):
"""
Checks a token class of an XPath parser class. The instance of the token is created
using the value argument and than is checked against other optional arguments.
:param symbol: the string that identifies the token class in the parser's symbol table.
:param expected_label: the expected label for the token instance.
:param expected_str: the expected string conversion of the token instance.
:param expected_repr: the expected string representation of the token instance.
:param value: the value used to create the token instance.
"""
token = self.parser.symbol_table[symbol](self.parser, value)
self.assertEqual(token.symbol, symbol)
if expected_label is not None:
self.assertEqual(token.label, expected_label)
if expected_str is not None:
self.assertEqual(str(token), expected_str)
if expected_repr is not None:
self.assertEqual(repr(token), expected_repr)
def check_tree(self, path, expected):
"""
Checks the tree string representation of a parsed path.
:param path: an XPath expression.
:param expected: the expected result string.
"""
self.assertEqual(self.parser.parse(path).tree, expected)
def check_source(self, path, expected):
"""
Checks the source representation of a parsed path.
:param path: an XPath expression.
:param expected: the expected result string.
"""
self.assertEqual(self.parser.parse(path).source, expected)
def check_value(self, path, expected=None, context=None):
"""
Checks the result of the *evaluate* method with an XPath expression. The evaluation
is applied on the root token of the parsed XPath expression.
:param path: an XPath expression.
:param expected: the expected result. Can be a data instance to compare to the result, a type \
to be used to check the type of the result, a function that accepts the result as argument and \
returns a boolean value, an exception class that is raised by running the evaluate method.
:param context: an optional `XPathContext` instance to be passed to evaluate method.
"""
if context is not None:
context = context.copy()
try:
root_token = self.parser.parse(path)
except ElementPathError as err:
if isinstance(expected, type) and isinstance(err, expected):
return
raise
if isinstance(expected, type) and issubclass(expected, Exception):
self.assertRaises(expected, root_token.evaluate, context)
elif isinstance(expected, float) and math.isnan(expected):
self.assertTrue(math.isnan(root_token.evaluate(context)))
elif not callable(expected):
self.assertEqual(root_token.evaluate(context), expected)
elif isinstance(expected, type):
value = root_token.evaluate(context)
self.assertTrue(isinstance(value, expected), "%r is not a %r instance." % (value, expected))
else:
self.assertTrue(expected(root_token.evaluate(context)))
def check_select(self, path, expected, context=None):
"""
Checks the materialized result of the *select* method with an XPath expression.
The selection is applied on the root token of the parsed XPath expression.
:param path: an XPath expression.
:param expected: the expected result. Can be a data instance to compare to the result, \
a function that accepts the result as argument and returns a boolean value, an exception \
class that is raised by running the evaluate method.
:param context: an optional `XPathContext` instance to be passed to evaluate method. If no \
context is provided the method is called with a dummy context.
"""
if context is None:
context = XPathContext(root=self.etree.Element(u'dummy_root'))
else:
context = context.copy()
root_token = self.parser.parse(path)
if isinstance(expected, type) and issubclass(expected, Exception):
self.assertRaises(expected, root_token.select, context)
elif not callable(expected):
self.assertEqual(list(root_token.select(context)), expected)
else:
self.assertTrue(expected(list(root_token.parse(path).select(context))))
def check_selector(self, path, root, expected, namespaces=None, **kwargs):
"""
Checks using the selector API, namely the *select* function at package level.
:param path: an XPath expression.
:param root: an Element or an ElementTree instance.
:param expected: the expected result. Can be a data instance to compare to the result, a type \
to be used to check the type of the result, a function that accepts the result as argument and \
returns a boolean value, an exception class that is raised by running the evaluate method.
:param namespaces: an optional mapping from prefixes to namespace URIs.
:param kwargs: other optional arguments for the parser class.
"""
if isinstance(expected, type) and issubclass(expected, Exception):
self.assertRaises(expected, select, root, path, namespaces, self.parser.__class__, **kwargs)
else:
results = select(root, path, namespaces, self.parser.__class__, **kwargs)
if isinstance(expected, set):
self.assertEqual(set(results), expected)
elif isinstance(expected, float) and math.isnan(expected):
self.assertTrue(math.isnan(results))
elif not callable(expected):
self.assertEqual(results, expected)
elif isinstance(expected, type):
self.assertTrue(isinstance(results, expected))
else:
self.assertTrue(expected(results))
# Wrong XPath expression checker shortcuts
def wrong_syntax(self, path):
self.assertRaises(SyntaxError, self.parser.parse, path)
def wrong_value(self, path):
self.assertRaises(ValueError, self.parser.parse, path)
def wrong_type(self, path):
self.assertRaises(TypeError, self.parser.parse, path)
def wrong_name(self, path):
self.assertRaises(NameError, self.parser.parse, path)
#
# Test methods
@unittest.skipIf(sys.version_info < (3,), "Python 2 pickling is not supported.")
def test_parser_pickling(self):
if getattr(self.parser, 'schema', None) is None:
obj = pickle.dumps(self.parser)
parser = pickle.loads(obj)
obj = pickle.dumps(self.parser.symbol_table)
symbol_table = pickle.loads(obj)
self.assertEqual(self.parser, parser)
self.assertEqual(self.parser.symbol_table, symbol_table)
def test_xpath_tokenizer(self):
# tests from the XPath specification
self.check_tokenizer("*", ['*'])
self.check_tokenizer("text()", ['text', '(', ')'])
self.check_tokenizer("@name", ['@', 'name'])
self.check_tokenizer("@*", ['@', '*'])
self.check_tokenizer("para[1]", ['para', '[', '1', ']'])
self.check_tokenizer("para[last()]", ['para', '[', 'last', '(', ')', ']'])
self.check_tokenizer("*/para", ['*', '/', 'para'])
self.check_tokenizer("/doc/chapter[5]/section[2]",
['/', 'doc', '/', 'chapter', '[', '5', ']', '/', 'section', '[', '2', ']'])
self.check_tokenizer("chapter//para", ['chapter', '//', 'para'])
self.check_tokenizer("//para", ['//', 'para'])
self.check_tokenizer("//olist/item", ['//', 'olist', '/', 'item'])
self.check_tokenizer(".", ['.'])
self.check_tokenizer(".//para", ['.', '//', 'para'])
self.check_tokenizer("..", ['..'])
self.check_tokenizer("../@lang", ['..', '/', '@', 'lang'])
self.check_tokenizer("chapter[title]", ['chapter', '[', 'title', ']'])
self.check_tokenizer("employee[@secretary and @assistant]",
['employee', '[', '@', 'secretary', '', 'and', '', '@', 'assistant', ']'])
# additional tests from Python XML etree test cases
self.check_tokenizer("{http://spam}egg", ['{', 'http', ':', '//', 'spam', '}', 'egg'])
self.check_tokenizer("./spam.egg", ['.', '/', 'spam.egg'])
self.check_tokenizer(".//spam:egg", ['.', '//', 'spam', ':', 'egg'])
# additional tests
self.check_tokenizer("substring-after()", ['substring-after', '(', ')'])
self.check_tokenizer("contains('XML','XM')", ['contains', '(', "'XML'", ',', "'XM'", ')'])
self.check_tokenizer("concat('XML', true(), 10)",
['concat', '(', "'XML'", ',', '', 'true', '(', ')', ',', '', '10', ')'])
self.check_tokenizer("concat('a', 'b', 'c')", ['concat', '(', "'a'", ',', '', "'b'", ',', '', "'c'", ')'])
self.check_tokenizer("_last()", ['_last', '(', ')'])
self.check_tokenizer("last ()", ['last', '', '(', ')'])
self.check_tokenizer('child::text()', ['child', '::', 'text', '(', ')'])
self.check_tokenizer('./ /.', ['.', '/', '', '/', '.'])
self.check_tokenizer('tns :*', ['tns', '', ':', '*'])
def test_tokens(self):
# Literals
self.check_token('(string)', 'literal', "'hello' string",
"_string_literal_token(value='hello')", 'hello')
self.check_token('(integer)', 'literal', "1999 integer",
"_integer_literal_token(value=1999)", 1999)
self.check_token('(float)', 'literal', "3.1415 float",
"_float_literal_token(value=3.1415)", 3.1415)
self.check_token('(decimal)', 'literal', "217.35 decimal",
"_decimal_literal_token(value=217.35)", 217.35)
self.check_token('(name)', 'literal', "'schema' name",
"_name_literal_token(value='schema')", 'schema')
# Variables
self.check_token('$', 'operator', "$ variable reference",
"_DollarSign_operator_token()")
# Axes
self.check_token('self', 'axis', "'self' axis", "_self_axis_token()")
self.check_token('child', 'axis', "'child' axis", "_child_axis_token()")
self.check_token('parent', 'axis', "'parent' axis", "_parent_axis_token()")
self.check_token('ancestor', 'axis', "'ancestor' axis", "_ancestor_axis_token()")
self.check_token('preceding', 'axis', "'preceding' axis", "_preceding_axis_token()")
self.check_token('descendant-or-self', 'axis', "'descendant-or-self' axis")
self.check_token('following-sibling', 'axis', "'following-sibling' axis")
self.check_token('preceding-sibling', 'axis', "'preceding-sibling' axis")
self.check_token('ancestor-or-self', 'axis', "'ancestor-or-self' axis")
self.check_token('descendant', 'axis', "'descendant' axis")
if self.parser.version == '1.0':
self.check_token('attribute', 'axis', "'attribute' axis")
self.check_token('following', 'axis', "'following' axis")
self.check_token('namespace', 'axis', "'namespace' axis")
# Functions
self.check_token('position', 'function', "'position' function", "_position_function_token()")
# Operators
self.check_token('and', 'operator', "'and' operator", "_and_operator_token()")
if self.parser.version == '1.0':
self.check_token(',', 'symbol', "comma symbol", "_Comma_symbol_token()")
else:
self.check_token(',', 'operator', "comma operator", "_Comma_operator_token()")
def test_token_tree(self):
self.check_tree('child::B1', '(child (B1))')
self.check_tree('A/B//C/D', '(/ (// (/ (A) (B)) (C)) (D))')
self.check_tree('child::*/child::B1', '(/ (child (*)) (child (B1)))')
self.check_tree('attribute::name="Galileo"', "(= (attribute (name)) ('Galileo'))")
self.check_tree('1 + 2 * 3', '(+ (1) (* (2) (3)))')
self.check_tree('(1 + 2) * 3', '(* (+ (1) (2)) (3))')
self.check_tree("false() and true()", '(and (false) (true))')
self.check_tree("false() or true()", '(or (false) (true))')
self.check_tree("./A/B[C][D]/E", '(/ (/ (/ (.) (A)) ([ ([ (B) (C)) (D))) (E))')
self.check_tree("string(xml:lang)", '(string (: (xml) (lang)))')
def test_token_source(self):
self.check_source(' child ::B1', 'child::B1')
self.check_source('false()', 'false()')
self.check_source("concat('alpha', 'beta', 'gamma')", "concat('alpha', 'beta', 'gamma')")
self.check_source('1 +2 * 3 ', '1 + 2 * 3')
self.check_source('(1 + 2) * 3', '(1 + 2) * 3')
self.check_source(' eg:example ', 'eg:example')
self.check_source('attribute::name="Galileo"', "attribute::name = 'Galileo'")
self.check_source(".//eg:a | .//eg:b", '. // eg:a | . // eg:b')
self.check_source("/A/B[C]", '/ A / B[C]')
try:
self.parser.strict = False
self.check_source("{tns1}name", '{tns1}name')
finally:
self.parser.strict = True
def test_wrong_syntax(self):
self.wrong_syntax('')
self.wrong_syntax(" \n \n )")
self.wrong_syntax('child::1')
self.wrong_syntax("{}egg")
self.wrong_syntax("./*:*")
self.wrong_syntax('./ /.')
self.wrong_syntax(' eg : example ')
def test_wrong_nargs(self):
self.wrong_type("boolean()") # Too few arguments
self.wrong_type("count(0, 1, 2)") # Too many arguments
self.wrong_type("round(2.5, 1.7)")
self.wrong_type("contains('XPath', 'XP', 20)")
self.wrong_type("boolean(1, 5)")
# XPath expression tests
def test_node_selection(self):
self.check_value("mars", [])
def test_references(self):
namespaces = {'tst': "http://xpath.test/ns"}
root = self.etree.XML("""
<A xmlns:tst="http://xpath.test/ns">
<tst:B1 b1="beta1"/>
<tst:B2/>
<tst:B3 b2="tst:beta2" b3="beta3"/>
</A>""")
# Prefix references
self.check_tree('eg:unknown', '(: (eg) (unknown))')
self.check_tree('string(eg:unknown)', '(string (: (eg) (unknown)))')
self.check_value("fn:true()", True)
self.check_selector("./tst:B1", root, [root[0]], namespaces=namespaces)
self.check_selector("./tst:*", root, root[:], namespaces=namespaces)
# Namespace wildcard works only for XPath > 1.0
if self.parser.version == '1.0':
self.check_selector("./*:B2", root, Exception, namespaces=namespaces)
else:
self.check_selector("./*:B2", root, [root[1]], namespaces=namespaces)
# QName URI references
self.parser.strict = False
self.check_tree('{%s}string' % XSD_NAMESPACE, "({ ('http://www.w3.org/2001/XMLSchema') (string))")
self.check_tree('string({%s}unknown)' % XSD_NAMESPACE,
"(string ({ ('http://www.w3.org/2001/XMLSchema') (unknown)))")
self.wrong_syntax("{%s" % XSD_NAMESPACE)
self.check_value("{%s}true()" % XPATH_FUNCTIONS_NAMESPACE, True)
self.parser.strict = True
self.wrong_syntax('{%s}string' % XSD_NAMESPACE)
if not hasattr(self.etree, 'LxmlError') or self.parser.version > '1.0':
# Do not test with XPath 1.0 on lxml.
self.check_selector("./{http://www.w3.org/2001/04/xmlenc#}EncryptedData", root, [], strict=False)
self.check_selector("./{http://xpath.test/ns}B1", root, [root[0]], strict=False)
self.check_selector("./{http://xpath.test/ns}*", root, root[:], strict=False)
def test_node_types(self):
document = self.etree.parse(io.StringIO(u'<A/>'))
element = self.etree.Element('schema')
attribute = 'id', '0212349350'
namespace = namedtuple('Namespace', 'prefix uri')('xs', 'http://www.w3.org/2001/XMLSchema')
comment = self.etree.Comment('nothing important')
pi = self.etree.ProcessingInstruction('action', 'nothing to do')
text = u'aldebaran'
context = XPathContext(element)
self.check_select("node()", [document.getroot()], context=XPathContext(document))
self.check_selector("node()", element, [])
context.item = attribute
self.check_select("self::node()", [attribute], context)
context.item = namespace
self.check_select("self::node()", [namespace], context)
context.item = comment
self.check_select("self::node()", [comment], context)
self.check_select("self::comment()", [comment], context)
context.item = pi
self.check_select("self::node()", [pi], context)
self.check_select("self::processing-instruction()", [pi], context)
context.item = text
self.check_select("self::node()", [text], context)
self.check_select("text()", [], context) # Selects the children
self.check_selector("node()", self.etree.XML('<author>Dickens</author>'), ['Dickens'])
self.check_selector("text()", self.etree.XML('<author>Dickens</author>'), ['Dickens'])
root = self.etree.XML('<author>Dickens</author>')
if self.etree is not lxml_etree:
# Skip lxml test because lxml's XPath doesn't include document root
self.check_selector("//self::node()", root, [root, root, 'Dickens'])
self.check_selector("//self::text()", root, ['Dickens'])
def test_node_set_id_function(self):
# XPath 1.0 id() function: https://www.w3.org/TR/1999/REC-xpath-19991116/#function-id
root = self.etree.XML('<A><B1 xml:id="foo"/><B2/><B3 xml:id="bar"/><B4 xml:id="baz"/></A>')
self.check_selector('id("foo")', root, [root[0]])
def test_node_set_functions(self):
root = self.etree.XML('<A><B1><C1/><C2/></B1><B2/><B3><C3/><C4/><C5/></B3></A>')
context = XPathContext(root, item=root[1], size=3, position=3)
self.check_value("position()", 0)
self.check_value("position()", 4, context=context)
self.check_value("position()<=2", True)
self.check_value("position()<=2", False, context=context)
self.check_value("position()=4", True, context=context)
self.check_value("position()=3", False, context=context)
self.check_value("last()", 0)
self.check_value("last()", 3, context=context)
self.check_value("last()-1", 2, context=context)
self.check_selector("name(.)", root, 'A')
self.check_selector("name(A)", root, '')
self.check_selector("local-name(A)", root, '')
self.check_selector("namespace-uri(A)", root, '')
self.check_selector("name(B2)", root, 'B2')
self.check_selector("local-name(B2)", root, 'B2')
self.check_selector("namespace-uri(B2)", root, '')
if self.parser.version <= '1.0':
self.check_selector("name(*)", root, 'B1')
root = self.etree.XML('<tst:A xmlns:tst="http://xpath.test/ns"><tst:B1/></tst:A>')
self.check_selector("name(.)", root, 'tst:A', namespaces={'tst': "http://xpath.test/ns"})
self.check_selector("local-name(.)", root, 'A')
self.check_selector("namespace-uri(.)", root, 'http://xpath.test/ns')
self.check_selector("name(tst:B1)", root, 'tst:B1', namespaces={'tst': "http://xpath.test/ns"})
self.check_selector("name(tst:B1)", root, 'tst:B1', namespaces={'tst': "http://xpath.test/ns", '': ''})
def test_string_function(self):
self.check_value("string(10.0)", '10.0')
if self.parser.version == '1.0':
self.wrong_syntax("string(())")
else:
self.check_value("string(())", '')
def test_string_length_function(self):
root = self.etree.XML(XML_GENERIC_TEST)
self.check_value("string-length('hello world')", 11)
self.check_value("string-length('')", 0)
self.check_selector("a[string-length(@id) = 4]", root, [root[0]])
self.check_selector("a[string-length(@id) = 3]", root, [])
self.check_selector("//b[string-length(.) = 12]", root, [root[0][0]])
self.check_selector("//b[string-length(.) = 10]", root, [])
self.check_selector("//none[string-length(.) = 10]", root, [])
self.check_value('fn:string-length("Harp not on that string, madam; that is past.")', 45)
if self.parser.version == '1.0':
self.wrong_syntax("string-length(())")
self.check_value("string-length(12345)", 5)
else:
self.check_value("string-length(())", 0)
self.check_value("string-length(('alpha'))", 5)
self.check_value("string-length(('alpha'))", 5)
self.wrong_type("string-length(12345)")
self.wrong_type("string-length(('12345', 'abc'))")
self.parser.compatibility_mode = True
self.check_value("string-length(('12345', 'abc'))", 5)
self.check_value("string-length(12345)", 5)
self.parser.compatibility_mode = False
def test_normalize_space_function(self):
root = self.etree.XML(XML_GENERIC_TEST)
self.check_value("normalize-space(' hello \t world ')", 'hello world')
self.check_selector("//c[normalize-space(.) = 'space space .']", root, [root[0][1]])
self.check_value('fn:normalize-space(" The wealthy curled darlings of our nation. ")',
'The wealthy curled darlings of our nation.')
if self.parser.version == '1.0':
self.wrong_syntax('fn:normalize-space(())')
self.check_value("normalize-space(1000)", '1000')
self.check_value("normalize-space(true())", 'True')
else:
self.check_value('fn:normalize-space(())', '')
self.wrong_type("normalize-space(true())")
self.wrong_type("normalize-space(('\ta b c ', 'other'))")
self.parser.compatibility_mode = True
self.check_value("normalize-space(true())", 'True')
self.check_value("normalize-space(('\ta b\tc ', 'other'))", 'a b c')
self.parser.compatibility_mode = False
def test_translate_function(self):
root = self.etree.XML(XML_GENERIC_TEST)
self.check_value("translate('hello world!', 'hw', 'HW')", 'Hello World!')
self.check_value("translate('hello world!', 'hwx', 'HW')", 'Hello World!')
self.check_value("translate('hello world!', 'hw!', 'HW')", 'Hello World')
self.check_selector("a[translate(@id, 'id', 'no') = 'a_no']", root, [root[0]])
self.check_selector("a[translate(@id, 'id', 'na') = 'a_no']", root, [])
self.check_selector("//b[translate(., 'some', 'one2') = 'one2 cnnt2nt']", root, [root[0][0]])
self.check_selector("//b[translate(., 'some', 'two2') = 'one2 cnnt2nt']", root, [])
self.check_selector("//none[translate(., 'some', 'two2') = 'one2 cnnt2nt']", root, [])
self.check_value('fn:translate("bar","abc","ABC")', 'BAr')
self.check_value('fn:translate("--aaa--","abc-","ABC")', 'AAA')
self.check_value('fn:translate("abcdabc", "abc", "AB")', "ABdAB")
if self.parser.version > '1.0':
self.check_value("translate((), 'hw', 'HW')", '')
def test_variable_substitution(self):
root = self.etree.XML('<ups-units>'
' <unit><power>40kW</power></unit>'
' <unit><power>20kW</power></unit>'
' <unit><power>30kW</power><model>XYZ</model></unit>'
'</ups-units>')
variables = {'ups1': root[0], 'ups2': root[1], 'ups3': root[2]}
self.check_selector('string($ups1/power)', root, '40kW', variables=variables)
def test_substring_function(self):
root = self.etree.XML(XML_GENERIC_TEST)
self.check_value("substring('Preem Palver', 1)", 'Preem Palver')
self.check_value("substring('Preem Palver', 2)", 'reem Palver')
self.check_value("substring('Preem Palver', 7)", 'Palver')
self.check_value("substring('Preem Palver', 1, 5)", 'Preem')
self.wrong_type("substring('Preem Palver', 'c', 5)")
self.wrong_type("substring('Preem Palver', 1, '5')")
self.check_selector("a[substring(@id, 1) = 'a_id']", root, [root[0]])
self.check_selector("a[substring(@id, 2) = '_id']", root, [root[0]])
self.check_selector("a[substring(@id, 3) = '_id']", root, [])
self.check_selector("//b[substring(., 1, 5) = 'some ']", root, [root[0][0]])
self.check_selector("//b[substring(., 1, 6) = 'some ']", root, [])
self.check_selector("//none[substring(., 1, 6) = 'some ']", root, [])
self.check_value("substring('12345', 1.5, 2.6)", '234')
self.check_value("substring('12345', 0, 3)", '12')
if self.parser.version == '1.0':
self.check_value("substring('12345', 0 div 0, 3)", '')
self.check_value("substring('12345', 1, 0 div 0)", '')
self.check_value("substring('12345', -42, 1 div 0)", '12345')
self.check_value("substring('12345', -1 div 0, 1 div 0)", '')
else:
self.check_value('fn:substring("motor car", 6)', ' car')
self.check_value('fn:substring("metadata", 4, 3)', 'ada')
self.check_value('fn:substring("12345", 1.5, 2.6)', '234')
self.check_value('fn:substring("12345", 0, 3)', '12')
self.check_value('fn:substring("12345", 5, -3)', '')
self.check_value('fn:substring("12345", -3, 5)', '1')
self.check_value('fn:substring("12345", 0 div 0E0, 3)', '')
self.check_value('fn:substring("12345", 1, 0 div 0E0)', '')
self.check_value('fn:substring((), 1, 3)', '')
self.check_value('fn:substring("12345", -42, 1 div 0E0)', '12345')
self.check_value('fn:substring("12345", -1 div 0E0, 1 div 0E0)', '')
self.check_value('fn:substring(("alpha"), 1, 3)', 'alp')
self.check_value('fn:substring(("alpha"), (1), 3)', 'alp')
self.check_value('fn:substring(("alpha"), 1, (3))', 'alp')
self.wrong_type('fn:substring(("alpha"), (1, 2), 3)')
self.wrong_type('fn:substring(("alpha", "beta"), 1, 3)')
self.parser.compatibility_mode = True
self.check_value('fn:substring(("alpha", "beta"), 1, 3)', 'alp')
self.parser.compatibility_mode = False
def test_starts_with_function(self):
root = self.etree.XML(XML_GENERIC_TEST)
self.check_value("starts-with('Hello World', 'Hello')", True)
self.check_value("starts-with('Hello World', 'hello')", False)
self.check_selector("a[starts-with(@id, 'a_i')]", root, [root[0]])
self.check_selector("a[starts-with(@id, 'a_b')]", root, [])
self.check_selector("//b[starts-with(., 'some')]", root, [root[0][0]])
self.check_selector("//b[starts-with(., 'none')]", root, [])
self.check_selector("//none[starts-with(., 'none')]", root, [])
self.check_selector("a[starts-with(@id, 'a_id')]", root, [root[0]])
self.check_selector("a[starts-with(@id, 'a')]", root, [root[0]])
self.check_selector("a[starts-with(@id, 'a!')]", root, [])
self.check_selector("//b[starts-with(., 'some')]", root, [root[0][0]])
self.check_selector("//b[starts-with(., 'a')]", root, [])
self.check_value("starts-with('', '')", True)
self.check_value('fn:starts-with("abracadabra", "abra")', True)
self.check_value('fn:starts-with("abracadabra", "a")', True)
self.check_value('fn:starts-with("abracadabra", "bra")', False)
if self.parser.version == '1.0':
self.wrong_syntax("starts-with((), ())")
self.check_value("starts-with('1999', 19)", True)
else:
self.check_value('fn:starts-with("tattoo", "tat")', True)
self.check_value('fn:starts-with ( "tattoo", "att")', False)
self.check_value('fn:starts-with ((), ())', True)
self.wrong_type("starts-with('1999', 19)")
self.parser.compatibility_mode = True
self.check_value("starts-with('1999', 19)", True)
self.parser.compatibility_mode = False
def test_concat_function(self):
root = self.etree.XML(XML_GENERIC_TEST)
self.check_value("concat('alpha', 'beta', 'gamma')", 'alphabetagamma')
self.check_value("concat('', '', '')", '')
self.check_value("concat('alpha', 10, 'gamma')", 'alpha10gamma')
self.check_value("concat('alpha', 'beta', 'gamma')", 'alphabetagamma')
self.check_value("concat('alpha', 10, 'gamma')", 'alpha10gamma')
self.check_value("concat('alpha', 'gamma')", 'alphagamma')
self.check_selector("a[concat(@id, '_foo') = 'a_id_foo']", root, [root[0]])
self.check_selector("a[concat(@id, '_fo') = 'a_id_foo']", root, [])
self.check_selector("//b[concat(., '_foo') = 'some content_foo']", root, [root[0][0]])
self.check_selector("//b[concat(., '_fo') = 'some content_foo']", root, [])
self.check_selector("//none[concat(., '_fo') = 'some content_foo']", root, [])
self.wrong_syntax("concat()")
self.wrong_syntax("concat()")
if self.parser.version == '1.0':
self.wrong_syntax("concat((), (), ())")
else:
self.check_value("concat((), (), ())", '')
self.check_value("concat(('a'), (), ('c'))", 'ac')
self.wrong_type("concat(('a', 'b'), (), ('c'))")
self.parser.compatibility_mode = True
self.check_value("concat(('a', 'b'), (), ('c'))", 'ac')
self.parser.compatibility_mode = False
def test_contains_function(self):
root = self.etree.XML(XML_GENERIC_TEST)
self.check_value("contains('XPath','XP')", True)
self.check_value("contains('XP','XPath')", False)
self.check_value("contains('', '')", True)
self.check_selector("a[contains(@id, '_i')]", root, [root[0]])
self.check_selector("a[contains(@id, '_b')]", root, [])
self.check_selector("//b[contains(., 'c')]", root, [root[0][0]])
self.check_selector("//b[contains(., ' -con')]", root, [])
self.check_selector("//none[contains(., ' -con')]", root, [])
if self.parser.version == '1.0':
self.wrong_syntax("contains((), ())")
self.check_value("contains('XPath', 20)", False)
else:
self.check_value('fn:contains ( "tattoo", "t")', True)
self.check_value('fn:contains ( "tattoo", "ttt")', False)
self.check_value('fn:contains ( "", ())', True)
self.wrong_type("contains('XPath', 20)")
self.parser.compatibility_mode = True
self.check_value("contains('XPath', 20)", False)
self.parser.compatibility_mode = False
def test_substring_before_function(self):
root = self.etree.XML(XML_GENERIC_TEST)
self.check_value("substring-before('Wolfgang Amadeus Mozart', 'Wolfgang')", '')
self.check_value("substring-before('Wolfgang Amadeus Mozart', 'Amadeus')", 'Wolfgang ')
self.check_value('substring-before("1999/04/01","/")', '1999')
self.check_selector("a[substring-before(@id, 'a') = '']", root, [root[0]])
self.check_selector("a[substring-before(@id, 'id') = 'a_']", root, [root[0]])
self.check_selector("a[substring-before(@id, 'id') = '']", root, [])
self.check_selector("//b[substring-before(., ' ') = 'some']", root, [root[0][0]])
self.check_selector("//b[substring-before(., 'con') = 'some']", root, [])
self.check_selector("//none[substring-before(., 'con') = 'some']", root, [])
if self.parser.version == '1.0':
self.check_value("substring-before('2017-10-27', 10)", '2017-')
self.wrong_syntax("fn:substring-before((), ())")
else:
self.check_value('fn:substring-before ( "tattoo", "attoo")', 't')
self.check_value('fn:substring-before ( "tattoo", "tatto")', '')
self.check_value('fn:substring-before ((), ())', '')
self.wrong_type("substring-before('2017-10-27', 10)")
self.parser.compatibility_mode = True
self.check_value("substring-before('2017-10-27', 10)", '2017-')
self.parser.compatibility_mode = False
def test_substring_after_function(self):
root = self.etree.XML(XML_GENERIC_TEST)
self.check_value("substring-after('Wolfgang Amadeus Mozart', 'Amadeus ')", 'Mozart')
self.check_value("substring-after('Wolfgang Amadeus Mozart', 'Mozart')", '')
self.check_value("substring-after('', '')", '')
self.check_value("substring-after('Mozart', '')", 'Mozart')
self.check_value('substring-after("1999/04/01","/")', '04/01')
self.check_value('substring-after("1999/04/01","19")', '99/04/01')
self.check_value("substring-after('Wolfgang Amadeus Mozart', 'Amadeus ')", 'Mozart')
self.check_value("substring-after('Wolfgang Amadeus Mozart', 'Mozart')", '')
self.check_selector("a[substring-after(@id, 'a') = '_id']", root, [root[0]])
self.check_selector("a[substring-after(@id, 'id') = '']", root, [root[0]])
self.check_selector("a[substring-after(@id, 'i') = '']", root, [])
self.check_selector("//b[substring-after(., ' ') = 'content']", root, [root[0][0]])
self.check_selector("//b[substring-after(., 'con') = 'content']", root, [])
self.check_selector("//none[substring-after(., 'con') = 'content']", root, [])
if self.parser.version == '1.0':
self.wrong_syntax("fn:substring-after((), ())")
else:
self.check_value('fn:substring-after("tattoo", "tat")', 'too')
self.check_value('fn:substring-after("tattoo", "tattoo")', '')
self.check_value("fn:substring-after((), ())", '')
self.wrong_type("substring-after('2017-10-27', 10)")
self.parser.compatibility_mode = True
self.check_value("substring-after('2017-10-27', 10)", '-27')
self.parser.compatibility_mode = False
def test_boolean_functions(self):
self.check_value("true()", True)
self.check_value("false()", False)
self.check_value("not(false())", True)
self.check_value("not(true())", False)
self.check_value("boolean(0)", False)
self.check_value("boolean(1)", True)
self.check_value("boolean(-1)", True)
self.check_value("boolean('hello!')", True)
self.check_value("boolean(' ')", True)
self.check_value("boolean('')", False)
if self.parser.version == '1.0':
self.wrong_syntax("boolean(())")
else:
self.check_value("boolean(())", False)
def test_lang_function(self):
# From https://www.w3.org/TR/1999/REC-xpath-19991116/#section-Boolean-Functions
self.check_selector('lang("en")', self.etree.XML('<para xml:lang="en"/>'), True)
self.check_selector('lang("en")', self.etree.XML('<div xml:lang="en"><para/></div>'), True)
self.check_selector('lang("en")', self.etree.XML('<para xml:lang="EN"/>'), True)
self.check_selector('lang("en")', self.etree.XML('<para xml:lang="en-us"/>'), True)
self.check_selector('lang("en")', self.etree.XML('<para xml:lang="it"/>'), False)
def test_logical_expressions(self):
self.check_value("false() and true()", False)
self.check_value("false() or true()", True)
self.check_value("true() or false()", True)
self.check_value("true() and true()", True)
self.check_value("1 and 0", False)
self.check_value("1 and 1", True)
self.check_value("1 and 'jupiter'", True)
self.check_value("0 and 'mars'", False)
self.check_value("1 and mars", False)
def test_comparison_operators(self):
self.check_value("0.05 = 0.05", True)
self.check_value("19.03 != 19.02999", True)
self.check_value("-1.0 = 1.0", False)
self.check_value("1 <= 2", True)
self.check_value("5 >= 9", False)
self.check_value("5 > 3", True)
self.check_value("5 < 20.0", True)
self.check_value("false() = 1", False)
self.check_value("0 = false()", True)
self.check_value("2 * 2 = 4", True)
root = self.etree.XML('<table>'
' <unit id="1"><cost>50</cost></unit>'
' <unit id="2"><cost>30</cost></unit>'
' <unit id="3"><cost>20</cost></unit>'
' <unit id="2"><cost>40</cost></unit>'
'</table>')
self.check_selector("/table/unit[2]/cost <= /table/unit[1]/cost", root, True)
self.check_selector("/table/unit[2]/cost > /table/unit[position()!=2]/cost", root, True)
self.check_selector("/table/unit[3]/cost > /table/unit[position()!=3]/cost", root, False)
self.check_selector(". = 'Dickens'", self.etree.XML('<author>Dickens</author>'), True)
def test_numerical_expressions(self):
self.check_value("9", 9)
self.check_value("-3", -3)
self.check_value("7.1", Decimal('7.1'))
self.check_value("0.45e3", 0.45e3)
self.check_value(" 7+5 ", 12)
self.check_value("8 - 5", 3)
self.check_value("-8 - 5", -13)
self.check_value("5 div 2", 2.5)
self.check_value("-3 * 7", -21)
self.check_value("9 - 1 + 6", 14)
self.check_value("(5 * 7) + 9", 44)
self.check_value("-3 * 7", -21)
def test_numerical_add_operator(self):
self.check_value("3 + 8", 11)
self.check_value("9 - 5.0", 4)
root = self.etree.XML(XML_DATA_TEST)
if self.parser.version == '1.0':
self.check_value("'9' + 5.0", 14)
self.check_selector("/values/a + 2", root, 5.4)
self.check_value("/values/b + 2", float('nan'), context=XPathContext(root))
else:
self.check_selector("/values/a + 2", root, TypeError)
self.check_value("/values/b + 2", TypeError, context=XPathContext(root))
self.check_selector("/values/d + 3", root, 47)
def test_numerical_mod_operator(self):
self.check_value("11 mod 3", 2)
self.check_value("4.5 mod 1.2", Decimal('0.9'))
self.check_value("1.23E2 mod 0.6E1", 3.0E0)
root = self.etree.XML(XML_DATA_TEST)
if self.parser.version == '1.0':
self.check_selector("/values/a mod 2", root, 1.4)
self.check_value("/values/b mod 2", float('nan'), context=XPathContext(root))
else:
self.check_selector("/values/a mod 2", root, TypeError)
self.check_value("/values/b mod 2", TypeError, context=XPathContext(root))
self.check_selector("/values/d mod 3", root, 2)
def test_number_function(self):
root = self.etree.XML('<root>15</root>')
self.check_value("number()", MissingContextError)
self.check_value("number()", 15, context=XPathContext(root))
self.check_value("number()", 15, context=XPathContext(root, item=root.text))
self.check_value("number(.)", 15, context=XPathContext(root))
self.check_value("number(5.0)", 5.0)
self.check_value("number('text')", math.isnan)
self.check_value("number('-11')", -11)
self.check_selector("number(9)", root, 9.0)
if self.parser.version == '1.0':
self.wrong_syntax("number(())")
else:
self.check_value("number(())", float('nan'), context=XPathContext(root))
root = self.etree.XML(XML_DATA_TEST)
self.check_selector("/values/a/number()", root, [3.4, 20.0, -10.1])
results = select(root, "/values/*/number()", parser=self.parser.__class__)
self.assertEqual(results[:3], [3.4, 20.0, -10.1])
self.assertTrue(math.isnan(results[3]) and math.isnan(results[4]))
self.check_selector("number(/values/d)", root, 44.0)
self.check_selector("number(/values/a)", root, TypeError)
def test_count_function(self):
root = self.etree.XML('<A><B><C/><C/></B><B/><B><C/><C/><C/></B></A>')
self.check_selector("count(B)", root, 3)
self.check_selector("count(.//C)", root, 5)
root = self.etree.XML('<value max="10" min="0">5</value>')
self.check_selector("count(@avg)", root, 0)
self.check_selector("count(@max)", root, 1)
self.check_selector("count(@min)", root, 1)
self.check_selector("count(@min | @max)", root, 2)
self.check_selector("count(@min | @avg)", root, 1)
self.check_selector("count(@top | @avg)", root, 0)
self.check_selector("count(@min | @max) = 1", root, False)
self.check_selector("count(@min | @max) = 2", root, True)
def test_sum_function(self):
root = self.etree.XML(XML_DATA_TEST)
self.check_value("sum($values)", 35)
if self.parser.version == '1.0':
self.wrong_syntax("sum(())")
else:
self.check_value("sum(())", 0)
self.check_value("sum((), ())", [])
self.check_selector("sum(/values/a)", root, 13.299999999999999)
self.check_selector("sum(/values/*)", root, float('nan'))
def test_ceiling_function(self):
root = self.etree.XML(XML_DATA_TEST)
self.check_value("ceiling(10.5)", 11)
self.check_value("ceiling(-10.5)", -10)
self.check_selector("//a[ceiling(.) = 10]", root, [])
self.check_selector("//a[ceiling(.) = -10]", root, [root[2]])
if self.parser.version == '1.0':
self.wrong_syntax("ceiling(())")
else:
self.check_value("ceiling(())", [])
self.check_value("ceiling((10.5))", 11)
self.wrong_type("ceiling((10.5, 17.3))")
def test_floor_function(self):
root = self.etree.XML(XML_DATA_TEST)
self.check_value("floor(10.5)", 10)
self.check_value("floor(-10.5)", -11)
self.check_selector("//a[floor(.) = 10]", root, [])
self.check_selector("//a[floor(.) = 20]", root, [root[1]])
if self.parser.version == '1.0':
self.wrong_syntax("floor(())")
self.check_selector("//ab[floor(.) = 10]", root, [])
else:
self.check_value("floor(())", [])
self.check_value("floor((10.5))", 10)
self.wrong_type("floor((10.5, 17.3))")
def test_round_function(self):
self.check_value("round(2.5)", 3)
self.check_value("round(2.4999)", 2)
self.check_value("round(-2.5)", -2)
if self.parser.version == '1.0':
self.wrong_syntax("round(())")
else:
self.check_value("round(())", [])
self.check_value("round((10.5))", 11)
self.wrong_type("round((2.5, 12.2))")
def test_context_variables(self):
root = self.etree.XML('<A><B1><C/></B1><B2/><B3><C1/><C2/></B3></A>')
context = XPathContext(root, variables={'alpha': 10, 'id': '19273222'})
self.check_value("$alpha", None) # Do not raise if the dynamic context is None
self.check_value("$alpha", 10, context=context)
self.check_value("$beta", NameError, context=context)
self.check_value("$id", '19273222', context=context)
self.wrong_syntax("$id()")
def test_child_operator(self):
root = self.etree.XML('<A><B1><C1/></B1><B2/><B3><C1/><C2/></B3></A>')
self.check_selector('/', root, [])
self.check_selector('/B1', root, [])
self.check_selector('/A1', root, [])
self.check_selector('/A', root, [root])
self.check_selector('/A/B1', root, [root[0]])
self.check_selector('/A/*', root, [root[0], root[1], root[2]])
self.check_selector('/*/*', root, [root[0], root[1], root[2]])
self.check_selector('/A/B1/C1', root, [root[0][0]])
self.check_selector('/A/B1/*', root, [root[0][0]])
self.check_selector('/A/B3/*', root, [root[2][0], root[2][1]])
self.check_selector('child::*/child::C1', root, [root[0][0], root[2][0]])
self.check_selector('/A/child::B3', root, [root[2]])
self.check_selector('/A/child::C1', root, [])
def test_context_item_expression(self):
root = self.etree.XML('<A><B1><C/></B1><B2/><B3><C1/><C2/></B3></A>')
self.check_selector('.', root, [root])
self.check_selector('/././.', root, [])
self.check_selector('/A/.', root, [root])
self.check_selector('/A/B1/.', root, [root[0]])
self.check_selector('/A/B1/././.', root, [root[0]])
self.check_selector('1/.', root, TypeError)
def test_self_axis(self):
root = self.etree.XML('<A>A text<B1>B1 text</B1><B2/><B3>B3 text</B3></A>')
self.check_selector('self::node()', root, [root])
self.check_selector('self::text()', root, [])
def test_child_axis(self):
root = self.etree.XML('<A>A text<B1>B1 text</B1><B2/><B3>B3 text</B3></A>')
self.check_selector('child::B1', root, [root[0]])
self.check_selector('child::A', root, [])
self.check_selector('child::text()', root, ['A text'])
self.check_selector('child::node()', root, ['A text'] + root[:])
self.check_selector('child::*', root, root[:])
root = self.etree.XML('<A xmlns:ns="http://www.example.com/ns/"><ns:B1/><B2/></A>')
self.check_selector('child::eg:A', root, [], namespaces={'eg': 'http://www.example.com/ns/'})
self.check_selector('child::eg:B1', root, [root[0]], namespaces={'eg': 'http://www.example.com/ns/'})
def test_descendant_axis(self):
root = self.etree.XML('<A><B1><C/></B1><B2/><B3><C1/><C2/></B3></A>')
self.check_selector('descendant::node()', root, [e for e in root.iter()][1:])
self.check_selector('/descendant::node()', root, [e for e in root.iter()])
def test_descendant_or_self_axis(self):
root = self.etree.XML('<A><B1><C/></B1><B2/><B3><C/><C1/></B3></A>')
self.check_selector('descendant-or-self::node()', root, [e for e in root.iter()])
self.check_selector('descendant-or-self::node()/.', root, [e for e in root.iter()])
def test_double_slash_shortcut(self):
root = self.etree.XML('<A><B1><C/></B1><B2/><B3><C/><C1/></B3></A>')
self.check_selector('//.', root, [e for e in root.iter()])
self.check_selector('/A//.', root, [e for e in root.iter()])
self.check_selector('/A//self::node()', root, [e for e in root.iter()])
self.check_selector('//C1', root, [root[2][1]])
self.check_selector('//B2', root, [root[1]])
self.check_selector('//C', root, [root[0][0], root[2][0]])
self.check_selector('//*', root, [e for e in root.iter()])
# Issue #14
root = self.etree.XML("""
<pm>
<content>
<pmEntry>
<pmEntry pmEntryType="pm001">
</pmEntry>
</pmEntry>
</content>
</pm>""")
self.check_selector('/pm/content/pmEntry/pmEntry//pmEntry[@pmEntryType]', root, [])
def test_following_axis(self):
root = self.etree.XML('<A><B1><C1/></B1><B2/><B3><C1/><C2/></B3><B4><C1><D1/></C1></B4></A>')
self.check_selector('/A/B1/C1/following::*', root, [
root[1], root[2], root[2][0], root[2][1], root[3], root[3][0], root[3][0][0]
])
self.check_selector('/A/B1/following::C1', root, [root[2][0], root[3][0]])
def test_following_sibling_axis(self):
root = self.etree.XML('<A><B1><C1/><C2/><C3/></B1><B2><C1/><C2/><C3/><C4/></B2></A>')
self.check_selector('/A/B1/C1/following-sibling::*', root, [root[0][1], root[0][2]])
self.check_selector('/A/B2/C1/following-sibling::*', root, [root[1][1], root[1][2], root[1][3]])
self.check_selector('/A/B1/C1/following-sibling::C3', root, [root[0][2]])
def test_attribute_abbreviation_and_axis(self):
root = self.etree.XML('<A id="1" a="alpha"><B1 b1="beta1"/><B2/><B3 b2="beta2" b3="beta3"/></A>')
self.check_selector('/A/B1/attribute::*', root, ['beta1'])
self.check_selector('/A/B1/@*', root, ['beta1'])
self.check_selector('/A/B3/attribute::*', root, {'beta2', 'beta3'})
self.check_selector('/A/attribute::*', root, {'1', 'alpha'})
root = self.etree.XML('<value choice="int">10</value>')
self.check_selector('@choice', root, ['int'])
root = self.etree.XML('<ns:value xmlns:ns="ns" choice="int">10</ns:value>')
self.check_selector('@choice', root, ['int'])
self.check_selector('@choice="int"', root, True)
def test_namespace_axis(self):
root = self.etree.XML('<A xmlns:tst="http://xpath.test/ns"><tst:B1/></A>')
namespaces = list(self.parser.DEFAULT_NAMESPACES.items()) + [('tst', 'http://xpath.test/ns')]
self.check_selector('/A/namespace::*', root, expected=set(namespaces), namespaces=namespaces[-1:])
def test_parent_abbreviation_and_axis(self):
root = self.etree.XML('<A><B1><C1/></B1><B2/><B3><C1/><C2/></B3><B4><C3><D1/></C3></B4></A>')
self.check_selector('/A/*/C2/..', root, [root[2]])
self.check_selector('/A/*/*/..', root, [root[0], root[2], root[3]])
self.check_selector('//C2/..', root, [root[2]])
self.check_selector('/A/*/C2/parent::node()', root, [root[2]])
self.check_selector('/A/*/*/parent::node()', root, [root[0], root[2], root[3]])
self.check_selector('//C2/parent::node()', root, [root[2]])
def test_ancestor_axes(self):
root = self.etree.XML('<A><B1><C1/></B1><B2><C1/><D2><E1/><E2/></D2><C2/></B2><B3><C1><D1/></C1></B3></A>')
self.check_selector('/A/B3/C1/ancestor::*', root, [root, root[2]])
self.check_selector('/A/B4/C1/ancestor::*', root, [])
self.check_selector('/A/*/C1/ancestor::*', root, [root, root[0], root[1], root[2]])
self.check_selector('/A/*/C1/ancestor::B3', root, [root[2]])
self.check_selector('/A/B3/C1/ancestor-or-self::*', root, [root, root[2], root[2][0]])
self.check_selector('/A/*/C1/ancestor-or-self::*', root, [
root, root[0], root[0][0], root[1], root[1][0], root[2], root[2][0]
])
def test_preceding_axis(self):
root = self.etree.XML('<A><B1><C1/><C2/><C3/></B1><B2><C1/><C2/><C3/><C4/></B2></A>')
self.check_selector('/A/B1/C2/preceding::*', root, [root[0][0]])
self.check_selector('/A/B2/C4/preceding::*', root, [
root[0], root[0][0], root[0][1], root[0][2], root[1][0], root[1][1], root[1][2]
])
root = self.etree.XML("<root><e><a><b/></a><a><b/></a></e><e><a/></e></root>")
self.check_tree("/root/e/preceding::b", '(/ (/ (/ (root)) (e)) (preceding (b)))')
self.check_selector('/root/e[2]/preceding::b', root, [root[0][0][0], root[0][1][0]])
def test_preceding_sibling_axis(self):
root = self.etree.XML('<A><B1><C1/><C2/><C3/></B1><B2><C1/><C2/><C3/><C4/></B2></A>')
self.check_selector('/A/B1/C2/preceding-sibling::*', root, [root[0][0]])
self.check_selector('/A/B2/C4/preceding-sibling::*', root, [root[1][0], root[1][1], root[1][2]])
self.check_selector('/A/B1/C2/preceding-sibling::C3', root, [])
def test_default_axis(self):
"""Tests about when child:: default axis is applied."""
root = self.etree.XML('<root><a id="1">first<b/></a><a id="2">second</a></root>')
self.check_selector('/root/a/*', root, [root[0][0]])
self.check_selector('/root/a/node()', root, ['first', root[0][0], 'second'])
self.check_selector('/root/a/text()', root, ['first', 'second'])
self.check_selector('/root/a/attribute::*', root, ['1', '2'])
if self.parser.version > '1.0':
# Functions are not allowed after path step in XPath 1.0
self.check_selector('/root/a/attribute()', root, ['1', '2'])
self.check_selector('/root/a/element()', root, [root[0][0]])
self.check_selector('/root/a/name()', root, ['a', 'a'])
self.check_selector('/root/a/last()', root, [2, 2])
self.check_selector('/root/a/position()', root, [1, 2])
def test_unknown_axis(self):
self.check_value('unknown::node()', NameError)
def test_predicate(self):
root = self.etree.XML('<A><B1><C1/><C2/><C3/></B1><B2><C1/><C2/><C3/><C4/></B2></A>')
self.check_selector('/A/B1[C2]', root, [root[0]])
self.check_selector('/A/B1[1]', root, [root[0]])
self.check_selector('/A/B1[2]', root, [])
self.check_selector('/A/*[2]', root, [root[1]])
self.check_selector('/A/*[position()<2]', root, [root[0]])
self.check_selector('/A/*[last()-1]', root, [root[0]])
self.check_selector('/A/B2/*[position()>=2]', root, root[1][1:])
root = self.etree.XML("<bib><book><author>Asimov</author></book></bib>")
self.check_selector("book/author[. = 'Asimov']", root, [root[0][0]])
self.check_selector("book/author[. = 'Dickens']", root, [])
self.check_selector("book/author[text()='Asimov']", root, [root[0][0]])
root = self.etree.XML('<A><B1>hello</B1><B2/><B3> </B3></A>')
self.check_selector("/A/*[' ']", root, root[:])
self.check_selector("/A/*['']", root, [])
root = self.etree.XML("<root><a><b/></a><a><b/><c/></a><a><c/></a></root>")
self.check_tree("child::a[b][c]", '([ ([ (child (a)) (b)) (c))')
self.check_selector("child::a[b][c]", root, [root[1]])
root = self.etree.XML("<root><e><a><b/></a><a><b/></a></e><e><a/></e></root>")
self.check_tree("a[not(b)]", '([ (a) (not (b)))')
self.check_value("a[not(b)]", [], context=XPathContext(root, item=root[0]))
self.check_value("a[not(b)]", [root[1][0]], context=XPathContext(root, item=root[1]))
self.check_tree("preceding::a[not(b)]", '([ (preceding (a)) (not (b)))')
self.check_value("a[preceding::a[not(b)]]", [], context=XPathContext(root, item=root[0]))
self.check_value("a[preceding::a[not(b)]]", [], context=XPathContext(root, item=root[1]))
def test_union(self):
root = self.etree.XML('<A min="1" max="10"><B1><C1/><C2/><C3/></B1><B2><C1/><C2/><C3/><C4/></B2><B3/></A>')
self.check_selector('/A/B2 | /A/B1', root, root[:2])
self.check_selector('/A/B2 | /A/*', root, root[:])
self.check_selector('/A/B2 | /A/* | /A/B1', root, root[:])
self.check_selector('/A/@min | /A/@max', root, {'1', '10'})
def test_default_namespace(self):
root = self.etree.XML('<foo>bar</foo>')
self.check_selector('/foo', root, [root])
if self.parser.version == '1.0':
# XPath 1.0 ignores the default namespace
self.check_selector('/foo', root, [root], namespaces={'': 'ns'}) # foo --> foo
else:
self.check_selector('/foo', root, [], namespaces={'': 'ns'}) # foo --> {ns}foo
self.check_selector('/*:foo', root, [root], namespaces={'': 'ns'}) # foo --> {ns}foo
root = self.etree.XML('<foo xmlns="ns">bar</foo>')
self.check_selector('/foo', root, [])
if type(self.parser) is XPath1Parser:
self.check_selector('/foo', root, [], namespaces={'': 'ns'})
else:
self.check_selector('/foo', root, [root], namespaces={'': 'ns'})
root = self.etree.XML('<A xmlns="http://xpath.test/ns"><B1/></A>')
if self.parser.version > '1.0' or not hasattr(root, 'nsmap'):
self.check_selector("name(tst:B1)", root, 'tst:B1', namespaces={'tst': "http://xpath.test/ns"})
if self.parser.version > '1.0':
self.check_selector("name(B1)", root, 'B1', namespaces={'': "http://xpath.test/ns"})
else:
# XPath 1.0 ignores the default namespace declarations
self.check_selector("name(B1)", root, '', namespaces={'': "http://xpath.test/ns"})
@unittest.skipIf(lxml_etree is None, "The lxml library is not installed")
class LxmlXPath1ParserTest(XPath1ParserTest):
etree = lxml_etree
def check_selector(self, path, root, expected, namespaces=None, **kwargs):
"""Check using the selector API (the *select* function of the package)."""
if isinstance(expected, type) and issubclass(expected, Exception):
self.assertRaises(expected, select, root, path, namespaces, self.parser.__class__, **kwargs)
else:
results = select(root, path, namespaces, self.parser.__class__, **kwargs)
variables = kwargs.get('variables', {})
if namespaces and '' in namespaces:
namespaces = {k: v for k, v in namespaces.items() if k}
if isinstance(expected, set):
self.assertEqual(set(root.xpath(path, namespaces=namespaces, **variables)), expected)
self.assertEqual(set(results), expected)
elif not callable(expected):
self.assertEqual(root.xpath(path, namespaces=namespaces, **variables), expected)
self.assertEqual(results, expected)
elif isinstance(expected, type):
self.assertTrue(isinstance(results, expected))
else:
self.assertTrue(expected(results))
if __name__ == '__main__':
unittest.main()
| 50.029752 | 115 | 0.573989 |
import unittest
import sys
import io
import math
import pickle
from decimal import Decimal
from collections import namedtuple
from xml.etree import ElementTree
try:
import lxml.etree as lxml_etree
except ImportError:
lxml_etree = None
from elementpath import *
from elementpath.namespaces import XML_NAMESPACE, XSD_NAMESPACE, XSI_NAMESPACE, XPATH_FUNCTIONS_NAMESPACE
XML_GENERIC_TEST = """
<root>
<a id="a_id">
<b>some content</b>
<c> space space \t .</c></a>
</root>"""
XML_DATA_TEST = """
<values>
<a>3.4</a>
<a>20</a>
<a>-10.1</a>
<b>alpha</b>
<c>true</c>
<d>44</d>
</values>"""
class XPath1ParserTest(unittest.TestCase):
namespaces = {
'xml': XML_NAMESPACE,
'xs': XSD_NAMESPACE,
'xsi': XSI_NAMESPACE,
'fn': XPATH_FUNCTIONS_NAMESPACE,
'eg': 'http://www.example.com/ns/',
}
variables = {
'values': [10, 20, 5],
'myaddress': 'admin@example.com',
'word': 'alpha',
}
etree = ElementTree
def setUp(self):
self.parser = XPath1Parser(namespaces=self.namespaces, variables=self.variables, strict=True)
self.token = XPath1Parser.symbol_table['(name)'](self.parser, 'test')
def check_tokenizer(self, path, expected):
self.assertEqual([
lit or op or ref or unexpected
for lit, op, ref, unexpected in self.parser.__class__.tokenizer.findall(path)
], expected)
def check_token(self, symbol, expected_label=None, expected_str=None, expected_repr=None, value=None):
token = self.parser.symbol_table[symbol](self.parser, value)
self.assertEqual(token.symbol, symbol)
if expected_label is not None:
self.assertEqual(token.label, expected_label)
if expected_str is not None:
self.assertEqual(str(token), expected_str)
if expected_repr is not None:
self.assertEqual(repr(token), expected_repr)
def check_tree(self, path, expected):
self.assertEqual(self.parser.parse(path).tree, expected)
def check_source(self, path, expected):
self.assertEqual(self.parser.parse(path).source, expected)
def check_value(self, path, expected=None, context=None):
if context is not None:
context = context.copy()
try:
root_token = self.parser.parse(path)
except ElementPathError as err:
if isinstance(expected, type) and isinstance(err, expected):
return
raise
if isinstance(expected, type) and issubclass(expected, Exception):
self.assertRaises(expected, root_token.evaluate, context)
elif isinstance(expected, float) and math.isnan(expected):
self.assertTrue(math.isnan(root_token.evaluate(context)))
elif not callable(expected):
self.assertEqual(root_token.evaluate(context), expected)
elif isinstance(expected, type):
value = root_token.evaluate(context)
self.assertTrue(isinstance(value, expected), "%r is not a %r instance." % (value, expected))
else:
self.assertTrue(expected(root_token.evaluate(context)))
def check_select(self, path, expected, context=None):
if context is None:
context = XPathContext(root=self.etree.Element(u'dummy_root'))
else:
context = context.copy()
root_token = self.parser.parse(path)
if isinstance(expected, type) and issubclass(expected, Exception):
self.assertRaises(expected, root_token.select, context)
elif not callable(expected):
self.assertEqual(list(root_token.select(context)), expected)
else:
self.assertTrue(expected(list(root_token.parse(path).select(context))))
def check_selector(self, path, root, expected, namespaces=None, **kwargs):
if isinstance(expected, type) and issubclass(expected, Exception):
self.assertRaises(expected, select, root, path, namespaces, self.parser.__class__, **kwargs)
else:
results = select(root, path, namespaces, self.parser.__class__, **kwargs)
if isinstance(expected, set):
self.assertEqual(set(results), expected)
elif isinstance(expected, float) and math.isnan(expected):
self.assertTrue(math.isnan(results))
elif not callable(expected):
self.assertEqual(results, expected)
elif isinstance(expected, type):
self.assertTrue(isinstance(results, expected))
else:
self.assertTrue(expected(results))
def wrong_syntax(self, path):
self.assertRaises(SyntaxError, self.parser.parse, path)
def wrong_value(self, path):
self.assertRaises(ValueError, self.parser.parse, path)
def wrong_type(self, path):
self.assertRaises(TypeError, self.parser.parse, path)
def wrong_name(self, path):
self.assertRaises(NameError, self.parser.parse, path)
@unittest.skipIf(sys.version_info < (3,), "Python 2 pickling is not supported.")
def test_parser_pickling(self):
if getattr(self.parser, 'schema', None) is None:
obj = pickle.dumps(self.parser)
parser = pickle.loads(obj)
obj = pickle.dumps(self.parser.symbol_table)
symbol_table = pickle.loads(obj)
self.assertEqual(self.parser, parser)
self.assertEqual(self.parser.symbol_table, symbol_table)
def test_xpath_tokenizer(self):
self.check_tokenizer("*", ['*'])
self.check_tokenizer("text()", ['text', '(', ')'])
self.check_tokenizer("@name", ['@', 'name'])
self.check_tokenizer("@*", ['@', '*'])
self.check_tokenizer("para[1]", ['para', '[', '1', ']'])
self.check_tokenizer("para[last()]", ['para', '[', 'last', '(', ')', ']'])
self.check_tokenizer("*/para", ['*', '/', 'para'])
self.check_tokenizer("/doc/chapter[5]/section[2]",
['/', 'doc', '/', 'chapter', '[', '5', ']', '/', 'section', '[', '2', ']'])
self.check_tokenizer("chapter//para", ['chapter', '//', 'para'])
self.check_tokenizer("//para", ['//', 'para'])
self.check_tokenizer("//olist/item", ['//', 'olist', '/', 'item'])
self.check_tokenizer(".", ['.'])
self.check_tokenizer(".//para", ['.', '//', 'para'])
self.check_tokenizer("..", ['..'])
self.check_tokenizer("../@lang", ['..', '/', '@', 'lang'])
self.check_tokenizer("chapter[title]", ['chapter', '[', 'title', ']'])
self.check_tokenizer("employee[@secretary and @assistant]",
['employee', '[', '@', 'secretary', '', 'and', '', '@', 'assistant', ']'])
self.check_tokenizer("{http://spam}egg", ['{', 'http', ':', '//', 'spam', '}', 'egg'])
self.check_tokenizer("./spam.egg", ['.', '/', 'spam.egg'])
self.check_tokenizer(".//spam:egg", ['.', '//', 'spam', ':', 'egg'])
self.check_tokenizer("substring-after()", ['substring-after', '(', ')'])
self.check_tokenizer("contains('XML','XM')", ['contains', '(', "'XML'", ',', "'XM'", ')'])
self.check_tokenizer("concat('XML', true(), 10)",
['concat', '(', "'XML'", ',', '', 'true', '(', ')', ',', '', '10', ')'])
self.check_tokenizer("concat('a', 'b', 'c')", ['concat', '(', "'a'", ',', '', "'b'", ',', '', "'c'", ')'])
self.check_tokenizer("_last()", ['_last', '(', ')'])
self.check_tokenizer("last ()", ['last', '', '(', ')'])
self.check_tokenizer('child::text()', ['child', '::', 'text', '(', ')'])
self.check_tokenizer('./ /.', ['.', '/', '', '/', '.'])
self.check_tokenizer('tns :*', ['tns', '', ':', '*'])
def test_tokens(self):
self.check_token('(string)', 'literal', "'hello' string",
"_string_literal_token(value='hello')", 'hello')
self.check_token('(integer)', 'literal', "1999 integer",
"_integer_literal_token(value=1999)", 1999)
self.check_token('(float)', 'literal', "3.1415 float",
"_float_literal_token(value=3.1415)", 3.1415)
self.check_token('(decimal)', 'literal', "217.35 decimal",
"_decimal_literal_token(value=217.35)", 217.35)
self.check_token('(name)', 'literal', "'schema' name",
"_name_literal_token(value='schema')", 'schema')
self.check_token('$', 'operator', "$ variable reference",
"_DollarSign_operator_token()")
self.check_token('self', 'axis', "'self' axis", "_self_axis_token()")
self.check_token('child', 'axis', "'child' axis", "_child_axis_token()")
self.check_token('parent', 'axis', "'parent' axis", "_parent_axis_token()")
self.check_token('ancestor', 'axis', "'ancestor' axis", "_ancestor_axis_token()")
self.check_token('preceding', 'axis', "'preceding' axis", "_preceding_axis_token()")
self.check_token('descendant-or-self', 'axis', "'descendant-or-self' axis")
self.check_token('following-sibling', 'axis', "'following-sibling' axis")
self.check_token('preceding-sibling', 'axis', "'preceding-sibling' axis")
self.check_token('ancestor-or-self', 'axis', "'ancestor-or-self' axis")
self.check_token('descendant', 'axis', "'descendant' axis")
if self.parser.version == '1.0':
self.check_token('attribute', 'axis', "'attribute' axis")
self.check_token('following', 'axis', "'following' axis")
self.check_token('namespace', 'axis', "'namespace' axis")
self.check_token('position', 'function', "'position' function", "_position_function_token()")
self.check_token('and', 'operator', "'and' operator", "_and_operator_token()")
if self.parser.version == '1.0':
self.check_token(',', 'symbol', "comma symbol", "_Comma_symbol_token()")
else:
self.check_token(',', 'operator', "comma operator", "_Comma_operator_token()")
def test_token_tree(self):
self.check_tree('child::B1', '(child (B1))')
self.check_tree('A/B//C/D', '(/ (// (/ (A) (B)) (C)) (D))')
self.check_tree('child::*/child::B1', '(/ (child (*)) (child (B1)))')
self.check_tree('attribute::name="Galileo"', "(= (attribute (name)) ('Galileo'))")
self.check_tree('1 + 2 * 3', '(+ (1) (* (2) (3)))')
self.check_tree('(1 + 2) * 3', '(* (+ (1) (2)) (3))')
self.check_tree("false() and true()", '(and (false) (true))')
self.check_tree("false() or true()", '(or (false) (true))')
self.check_tree("./A/B[C][D]/E", '(/ (/ (/ (.) (A)) ([ ([ (B) (C)) (D))) (E))')
self.check_tree("string(xml:lang)", '(string (: (xml) (lang)))')
def test_token_source(self):
self.check_source(' child ::B1', 'child::B1')
self.check_source('false()', 'false()')
self.check_source("concat('alpha', 'beta', 'gamma')", "concat('alpha', 'beta', 'gamma')")
self.check_source('1 +2 * 3 ', '1 + 2 * 3')
self.check_source('(1 + 2) * 3', '(1 + 2) * 3')
self.check_source(' eg:example ', 'eg:example')
self.check_source('attribute::name="Galileo"', "attribute::name = 'Galileo'")
self.check_source(".//eg:a | .//eg:b", '. // eg:a | . // eg:b')
self.check_source("/A/B[C]", '/ A / B[C]')
try:
self.parser.strict = False
self.check_source("{tns1}name", '{tns1}name')
finally:
self.parser.strict = True
def test_wrong_syntax(self):
self.wrong_syntax('')
self.wrong_syntax(" \n \n )")
self.wrong_syntax('child::1')
self.wrong_syntax("{}egg")
self.wrong_syntax("./*:*")
self.wrong_syntax('./ /.')
self.wrong_syntax(' eg : example ')
def test_wrong_nargs(self):
self.wrong_type("boolean()")
self.wrong_type("count(0, 1, 2)")
self.wrong_type("round(2.5, 1.7)")
self.wrong_type("contains('XPath', 'XP', 20)")
self.wrong_type("boolean(1, 5)")
def test_node_selection(self):
self.check_value("mars", [])
def test_references(self):
namespaces = {'tst': "http://xpath.test/ns"}
root = self.etree.XML("""
<A xmlns:tst="http://xpath.test/ns">
<tst:B1 b1="beta1"/>
<tst:B2/>
<tst:B3 b2="tst:beta2" b3="beta3"/>
</A>""")
self.check_tree('eg:unknown', '(: (eg) (unknown))')
self.check_tree('string(eg:unknown)', '(string (: (eg) (unknown)))')
self.check_value("fn:true()", True)
self.check_selector("./tst:B1", root, [root[0]], namespaces=namespaces)
self.check_selector("./tst:*", root, root[:], namespaces=namespaces)
if self.parser.version == '1.0':
self.check_selector("./*:B2", root, Exception, namespaces=namespaces)
else:
self.check_selector("./*:B2", root, [root[1]], namespaces=namespaces)
self.parser.strict = False
self.check_tree('{%s}string' % XSD_NAMESPACE, "({ ('http://www.w3.org/2001/XMLSchema') (string))")
self.check_tree('string({%s}unknown)' % XSD_NAMESPACE,
"(string ({ ('http://www.w3.org/2001/XMLSchema') (unknown)))")
self.wrong_syntax("{%s" % XSD_NAMESPACE)
self.check_value("{%s}true()" % XPATH_FUNCTIONS_NAMESPACE, True)
self.parser.strict = True
self.wrong_syntax('{%s}string' % XSD_NAMESPACE)
if not hasattr(self.etree, 'LxmlError') or self.parser.version > '1.0':
self.check_selector("./{http://www.w3.org/2001/04/xmlenc#}EncryptedData", root, [], strict=False)
self.check_selector("./{http://xpath.test/ns}B1", root, [root[0]], strict=False)
self.check_selector("./{http://xpath.test/ns}*", root, root[:], strict=False)
def test_node_types(self):
document = self.etree.parse(io.StringIO(u'<A/>'))
element = self.etree.Element('schema')
attribute = 'id', '0212349350'
namespace = namedtuple('Namespace', 'prefix uri')('xs', 'http://www.w3.org/2001/XMLSchema')
comment = self.etree.Comment('nothing important')
pi = self.etree.ProcessingInstruction('action', 'nothing to do')
text = u'aldebaran'
context = XPathContext(element)
self.check_select("node()", [document.getroot()], context=XPathContext(document))
self.check_selector("node()", element, [])
context.item = attribute
self.check_select("self::node()", [attribute], context)
context.item = namespace
self.check_select("self::node()", [namespace], context)
context.item = comment
self.check_select("self::node()", [comment], context)
self.check_select("self::comment()", [comment], context)
context.item = pi
self.check_select("self::node()", [pi], context)
self.check_select("self::processing-instruction()", [pi], context)
context.item = text
self.check_select("self::node()", [text], context)
self.check_select("text()", [], context)
self.check_selector("node()", self.etree.XML('<author>Dickens</author>'), ['Dickens'])
self.check_selector("text()", self.etree.XML('<author>Dickens</author>'), ['Dickens'])
root = self.etree.XML('<author>Dickens</author>')
if self.etree is not lxml_etree:
self.check_selector("//self::node()", root, [root, root, 'Dickens'])
self.check_selector("//self::text()", root, ['Dickens'])
def test_node_set_id_function(self):
t = self.etree.XML('<A><B1 xml:id="foo"/><B2/><B3 xml:id="bar"/><B4 xml:id="baz"/></A>')
self.check_selector('id("foo")', root, [root[0]])
def test_node_set_functions(self):
root = self.etree.XML('<A><B1><C1/><C2/></B1><B2/><B3><C3/><C4/><C5/></B3></A>')
context = XPathContext(root, item=root[1], size=3, position=3)
self.check_value("position()", 0)
self.check_value("position()", 4, context=context)
self.check_value("position()<=2", True)
self.check_value("position()<=2", False, context=context)
self.check_value("position()=4", True, context=context)
self.check_value("position()=3", False, context=context)
self.check_value("last()", 0)
self.check_value("last()", 3, context=context)
self.check_value("last()-1", 2, context=context)
self.check_selector("name(.)", root, 'A')
self.check_selector("name(A)", root, '')
self.check_selector("local-name(A)", root, '')
self.check_selector("namespace-uri(A)", root, '')
self.check_selector("name(B2)", root, 'B2')
self.check_selector("local-name(B2)", root, 'B2')
self.check_selector("namespace-uri(B2)", root, '')
if self.parser.version <= '1.0':
self.check_selector("name(*)", root, 'B1')
root = self.etree.XML('<tst:A xmlns:tst="http://xpath.test/ns"><tst:B1/></tst:A>')
self.check_selector("name(.)", root, 'tst:A', namespaces={'tst': "http://xpath.test/ns"})
self.check_selector("local-name(.)", root, 'A')
self.check_selector("namespace-uri(.)", root, 'http://xpath.test/ns')
self.check_selector("name(tst:B1)", root, 'tst:B1', namespaces={'tst': "http://xpath.test/ns"})
self.check_selector("name(tst:B1)", root, 'tst:B1', namespaces={'tst': "http://xpath.test/ns", '': ''})
def test_string_function(self):
self.check_value("string(10.0)", '10.0')
if self.parser.version == '1.0':
self.wrong_syntax("string(())")
else:
self.check_value("string(())", '')
def test_string_length_function(self):
root = self.etree.XML(XML_GENERIC_TEST)
self.check_value("string-length('hello world')", 11)
self.check_value("string-length('')", 0)
self.check_selector("a[string-length(@id) = 4]", root, [root[0]])
self.check_selector("a[string-length(@id) = 3]", root, [])
self.check_selector("//b[string-length(.) = 12]", root, [root[0][0]])
self.check_selector("//b[string-length(.) = 10]", root, [])
self.check_selector("//none[string-length(.) = 10]", root, [])
self.check_value('fn:string-length("Harp not on that string, madam; that is past.")', 45)
if self.parser.version == '1.0':
self.wrong_syntax("string-length(())")
self.check_value("string-length(12345)", 5)
else:
self.check_value("string-length(())", 0)
self.check_value("string-length(('alpha'))", 5)
self.check_value("string-length(('alpha'))", 5)
self.wrong_type("string-length(12345)")
self.wrong_type("string-length(('12345', 'abc'))")
self.parser.compatibility_mode = True
self.check_value("string-length(('12345', 'abc'))", 5)
self.check_value("string-length(12345)", 5)
self.parser.compatibility_mode = False
def test_normalize_space_function(self):
root = self.etree.XML(XML_GENERIC_TEST)
self.check_value("normalize-space(' hello \t world ')", 'hello world')
self.check_selector("//c[normalize-space(.) = 'space space .']", root, [root[0][1]])
self.check_value('fn:normalize-space(" The wealthy curled darlings of our nation. ")',
'The wealthy curled darlings of our nation.')
if self.parser.version == '1.0':
self.wrong_syntax('fn:normalize-space(())')
self.check_value("normalize-space(1000)", '1000')
self.check_value("normalize-space(true())", 'True')
else:
self.check_value('fn:normalize-space(())', '')
self.wrong_type("normalize-space(true())")
self.wrong_type("normalize-space(('\ta b c ', 'other'))")
self.parser.compatibility_mode = True
self.check_value("normalize-space(true())", 'True')
self.check_value("normalize-space(('\ta b\tc ', 'other'))", 'a b c')
self.parser.compatibility_mode = False
def test_translate_function(self):
root = self.etree.XML(XML_GENERIC_TEST)
self.check_value("translate('hello world!', 'hw', 'HW')", 'Hello World!')
self.check_value("translate('hello world!', 'hwx', 'HW')", 'Hello World!')
self.check_value("translate('hello world!', 'hw!', 'HW')", 'Hello World')
self.check_selector("a[translate(@id, 'id', 'no') = 'a_no']", root, [root[0]])
self.check_selector("a[translate(@id, 'id', 'na') = 'a_no']", root, [])
self.check_selector("//b[translate(., 'some', 'one2') = 'one2 cnnt2nt']", root, [root[0][0]])
self.check_selector("//b[translate(., 'some', 'two2') = 'one2 cnnt2nt']", root, [])
self.check_selector("//none[translate(., 'some', 'two2') = 'one2 cnnt2nt']", root, [])
self.check_value('fn:translate("bar","abc","ABC")', 'BAr')
self.check_value('fn:translate("--aaa--","abc-","ABC")', 'AAA')
self.check_value('fn:translate("abcdabc", "abc", "AB")', "ABdAB")
if self.parser.version > '1.0':
self.check_value("translate((), 'hw', 'HW')", '')
def test_variable_substitution(self):
root = self.etree.XML('<ups-units>'
' <unit><power>40kW</power></unit>'
' <unit><power>20kW</power></unit>'
' <unit><power>30kW</power><model>XYZ</model></unit>'
'</ups-units>')
variables = {'ups1': root[0], 'ups2': root[1], 'ups3': root[2]}
self.check_selector('string($ups1/power)', root, '40kW', variables=variables)
def test_substring_function(self):
root = self.etree.XML(XML_GENERIC_TEST)
self.check_value("substring('Preem Palver', 1)", 'Preem Palver')
self.check_value("substring('Preem Palver', 2)", 'reem Palver')
self.check_value("substring('Preem Palver', 7)", 'Palver')
self.check_value("substring('Preem Palver', 1, 5)", 'Preem')
self.wrong_type("substring('Preem Palver', 'c', 5)")
self.wrong_type("substring('Preem Palver', 1, '5')")
self.check_selector("a[substring(@id, 1) = 'a_id']", root, [root[0]])
self.check_selector("a[substring(@id, 2) = '_id']", root, [root[0]])
self.check_selector("a[substring(@id, 3) = '_id']", root, [])
self.check_selector("//b[substring(., 1, 5) = 'some ']", root, [root[0][0]])
self.check_selector("//b[substring(., 1, 6) = 'some ']", root, [])
self.check_selector("//none[substring(., 1, 6) = 'some ']", root, [])
self.check_value("substring('12345', 1.5, 2.6)", '234')
self.check_value("substring('12345', 0, 3)", '12')
if self.parser.version == '1.0':
self.check_value("substring('12345', 0 div 0, 3)", '')
self.check_value("substring('12345', 1, 0 div 0)", '')
self.check_value("substring('12345', -42, 1 div 0)", '12345')
self.check_value("substring('12345', -1 div 0, 1 div 0)", '')
else:
self.check_value('fn:substring("motor car", 6)', ' car')
self.check_value('fn:substring("metadata", 4, 3)', 'ada')
self.check_value('fn:substring("12345", 1.5, 2.6)', '234')
self.check_value('fn:substring("12345", 0, 3)', '12')
self.check_value('fn:substring("12345", 5, -3)', '')
self.check_value('fn:substring("12345", -3, 5)', '1')
self.check_value('fn:substring("12345", 0 div 0E0, 3)', '')
self.check_value('fn:substring("12345", 1, 0 div 0E0)', '')
self.check_value('fn:substring((), 1, 3)', '')
self.check_value('fn:substring("12345", -42, 1 div 0E0)', '12345')
self.check_value('fn:substring("12345", -1 div 0E0, 1 div 0E0)', '')
self.check_value('fn:substring(("alpha"), 1, 3)', 'alp')
self.check_value('fn:substring(("alpha"), (1), 3)', 'alp')
self.check_value('fn:substring(("alpha"), 1, (3))', 'alp')
self.wrong_type('fn:substring(("alpha"), (1, 2), 3)')
self.wrong_type('fn:substring(("alpha", "beta"), 1, 3)')
self.parser.compatibility_mode = True
self.check_value('fn:substring(("alpha", "beta"), 1, 3)', 'alp')
self.parser.compatibility_mode = False
def test_starts_with_function(self):
root = self.etree.XML(XML_GENERIC_TEST)
self.check_value("starts-with('Hello World', 'Hello')", True)
self.check_value("starts-with('Hello World', 'hello')", False)
self.check_selector("a[starts-with(@id, 'a_i')]", root, [root[0]])
self.check_selector("a[starts-with(@id, 'a_b')]", root, [])
self.check_selector("//b[starts-with(., 'some')]", root, [root[0][0]])
self.check_selector("//b[starts-with(., 'none')]", root, [])
self.check_selector("//none[starts-with(., 'none')]", root, [])
self.check_selector("a[starts-with(@id, 'a_id')]", root, [root[0]])
self.check_selector("a[starts-with(@id, 'a')]", root, [root[0]])
self.check_selector("a[starts-with(@id, 'a!')]", root, [])
self.check_selector("//b[starts-with(., 'some')]", root, [root[0][0]])
self.check_selector("//b[starts-with(., 'a')]", root, [])
self.check_value("starts-with('', '')", True)
self.check_value('fn:starts-with("abracadabra", "abra")', True)
self.check_value('fn:starts-with("abracadabra", "a")', True)
self.check_value('fn:starts-with("abracadabra", "bra")', False)
if self.parser.version == '1.0':
self.wrong_syntax("starts-with((), ())")
self.check_value("starts-with('1999', 19)", True)
else:
self.check_value('fn:starts-with("tattoo", "tat")', True)
self.check_value('fn:starts-with ( "tattoo", "att")', False)
self.check_value('fn:starts-with ((), ())', True)
self.wrong_type("starts-with('1999', 19)")
self.parser.compatibility_mode = True
self.check_value("starts-with('1999', 19)", True)
self.parser.compatibility_mode = False
def test_concat_function(self):
root = self.etree.XML(XML_GENERIC_TEST)
self.check_value("concat('alpha', 'beta', 'gamma')", 'alphabetagamma')
self.check_value("concat('', '', '')", '')
self.check_value("concat('alpha', 10, 'gamma')", 'alpha10gamma')
self.check_value("concat('alpha', 'beta', 'gamma')", 'alphabetagamma')
self.check_value("concat('alpha', 10, 'gamma')", 'alpha10gamma')
self.check_value("concat('alpha', 'gamma')", 'alphagamma')
self.check_selector("a[concat(@id, '_foo') = 'a_id_foo']", root, [root[0]])
self.check_selector("a[concat(@id, '_fo') = 'a_id_foo']", root, [])
self.check_selector("//b[concat(., '_foo') = 'some content_foo']", root, [root[0][0]])
self.check_selector("//b[concat(., '_fo') = 'some content_foo']", root, [])
self.check_selector("//none[concat(., '_fo') = 'some content_foo']", root, [])
self.wrong_syntax("concat()")
self.wrong_syntax("concat()")
if self.parser.version == '1.0':
self.wrong_syntax("concat((), (), ())")
else:
self.check_value("concat((), (), ())", '')
self.check_value("concat(('a'), (), ('c'))", 'ac')
self.wrong_type("concat(('a', 'b'), (), ('c'))")
self.parser.compatibility_mode = True
self.check_value("concat(('a', 'b'), (), ('c'))", 'ac')
self.parser.compatibility_mode = False
def test_contains_function(self):
root = self.etree.XML(XML_GENERIC_TEST)
self.check_value("contains('XPath','XP')", True)
self.check_value("contains('XP','XPath')", False)
self.check_value("contains('', '')", True)
self.check_selector("a[contains(@id, '_i')]", root, [root[0]])
self.check_selector("a[contains(@id, '_b')]", root, [])
self.check_selector("//b[contains(., 'c')]", root, [root[0][0]])
self.check_selector("//b[contains(., ' -con')]", root, [])
self.check_selector("//none[contains(., ' -con')]", root, [])
if self.parser.version == '1.0':
self.wrong_syntax("contains((), ())")
self.check_value("contains('XPath', 20)", False)
else:
self.check_value('fn:contains ( "tattoo", "t")', True)
self.check_value('fn:contains ( "tattoo", "ttt")', False)
self.check_value('fn:contains ( "", ())', True)
self.wrong_type("contains('XPath', 20)")
self.parser.compatibility_mode = True
self.check_value("contains('XPath', 20)", False)
self.parser.compatibility_mode = False
def test_substring_before_function(self):
root = self.etree.XML(XML_GENERIC_TEST)
self.check_value("substring-before('Wolfgang Amadeus Mozart', 'Wolfgang')", '')
self.check_value("substring-before('Wolfgang Amadeus Mozart', 'Amadeus')", 'Wolfgang ')
self.check_value('substring-before("1999/04/01","/")', '1999')
self.check_selector("a[substring-before(@id, 'a') = '']", root, [root[0]])
self.check_selector("a[substring-before(@id, 'id') = 'a_']", root, [root[0]])
self.check_selector("a[substring-before(@id, 'id') = '']", root, [])
self.check_selector("//b[substring-before(., ' ') = 'some']", root, [root[0][0]])
self.check_selector("//b[substring-before(., 'con') = 'some']", root, [])
self.check_selector("//none[substring-before(., 'con') = 'some']", root, [])
if self.parser.version == '1.0':
self.check_value("substring-before('2017-10-27', 10)", '2017-')
self.wrong_syntax("fn:substring-before((), ())")
else:
self.check_value('fn:substring-before ( "tattoo", "attoo")', 't')
self.check_value('fn:substring-before ( "tattoo", "tatto")', '')
self.check_value('fn:substring-before ((), ())', '')
self.wrong_type("substring-before('2017-10-27', 10)")
self.parser.compatibility_mode = True
self.check_value("substring-before('2017-10-27', 10)", '2017-')
self.parser.compatibility_mode = False
def test_substring_after_function(self):
root = self.etree.XML(XML_GENERIC_TEST)
self.check_value("substring-after('Wolfgang Amadeus Mozart', 'Amadeus ')", 'Mozart')
self.check_value("substring-after('Wolfgang Amadeus Mozart', 'Mozart')", '')
self.check_value("substring-after('', '')", '')
self.check_value("substring-after('Mozart', '')", 'Mozart')
self.check_value('substring-after("1999/04/01","/")', '04/01')
self.check_value('substring-after("1999/04/01","19")', '99/04/01')
self.check_value("substring-after('Wolfgang Amadeus Mozart', 'Amadeus ')", 'Mozart')
self.check_value("substring-after('Wolfgang Amadeus Mozart', 'Mozart')", '')
self.check_selector("a[substring-after(@id, 'a') = '_id']", root, [root[0]])
self.check_selector("a[substring-after(@id, 'id') = '']", root, [root[0]])
self.check_selector("a[substring-after(@id, 'i') = '']", root, [])
self.check_selector("//b[substring-after(., ' ') = 'content']", root, [root[0][0]])
self.check_selector("//b[substring-after(., 'con') = 'content']", root, [])
self.check_selector("//none[substring-after(., 'con') = 'content']", root, [])
if self.parser.version == '1.0':
self.wrong_syntax("fn:substring-after((), ())")
else:
self.check_value('fn:substring-after("tattoo", "tat")', 'too')
self.check_value('fn:substring-after("tattoo", "tattoo")', '')
self.check_value("fn:substring-after((), ())", '')
self.wrong_type("substring-after('2017-10-27', 10)")
self.parser.compatibility_mode = True
self.check_value("substring-after('2017-10-27', 10)", '-27')
self.parser.compatibility_mode = False
def test_boolean_functions(self):
self.check_value("true()", True)
self.check_value("false()", False)
self.check_value("not(false())", True)
self.check_value("not(true())", False)
self.check_value("boolean(0)", False)
self.check_value("boolean(1)", True)
self.check_value("boolean(-1)", True)
self.check_value("boolean('hello!')", True)
self.check_value("boolean(' ')", True)
self.check_value("boolean('')", False)
if self.parser.version == '1.0':
self.wrong_syntax("boolean(())")
else:
self.check_value("boolean(())", False)
def test_lang_function(self):
or('lang("en")', self.etree.XML('<para xml:lang="en"/>'), True)
self.check_selector('lang("en")', self.etree.XML('<div xml:lang="en"><para/></div>'), True)
self.check_selector('lang("en")', self.etree.XML('<para xml:lang="EN"/>'), True)
self.check_selector('lang("en")', self.etree.XML('<para xml:lang="en-us"/>'), True)
self.check_selector('lang("en")', self.etree.XML('<para xml:lang="it"/>'), False)
def test_logical_expressions(self):
self.check_value("false() and true()", False)
self.check_value("false() or true()", True)
self.check_value("true() or false()", True)
self.check_value("true() and true()", True)
self.check_value("1 and 0", False)
self.check_value("1 and 1", True)
self.check_value("1 and 'jupiter'", True)
self.check_value("0 and 'mars'", False)
self.check_value("1 and mars", False)
def test_comparison_operators(self):
self.check_value("0.05 = 0.05", True)
self.check_value("19.03 != 19.02999", True)
self.check_value("-1.0 = 1.0", False)
self.check_value("1 <= 2", True)
self.check_value("5 >= 9", False)
self.check_value("5 > 3", True)
self.check_value("5 < 20.0", True)
self.check_value("false() = 1", False)
self.check_value("0 = false()", True)
self.check_value("2 * 2 = 4", True)
root = self.etree.XML('<table>'
' <unit id="1"><cost>50</cost></unit>'
' <unit id="2"><cost>30</cost></unit>'
' <unit id="3"><cost>20</cost></unit>'
' <unit id="2"><cost>40</cost></unit>'
'</table>')
self.check_selector("/table/unit[2]/cost <= /table/unit[1]/cost", root, True)
self.check_selector("/table/unit[2]/cost > /table/unit[position()!=2]/cost", root, True)
self.check_selector("/table/unit[3]/cost > /table/unit[position()!=3]/cost", root, False)
self.check_selector(". = 'Dickens'", self.etree.XML('<author>Dickens</author>'), True)
def test_numerical_expressions(self):
self.check_value("9", 9)
self.check_value("-3", -3)
self.check_value("7.1", Decimal('7.1'))
self.check_value("0.45e3", 0.45e3)
self.check_value(" 7+5 ", 12)
self.check_value("8 - 5", 3)
self.check_value("-8 - 5", -13)
self.check_value("5 div 2", 2.5)
self.check_value("-3 * 7", -21)
self.check_value("9 - 1 + 6", 14)
self.check_value("(5 * 7) + 9", 44)
self.check_value("-3 * 7", -21)
def test_numerical_add_operator(self):
self.check_value("3 + 8", 11)
self.check_value("9 - 5.0", 4)
root = self.etree.XML(XML_DATA_TEST)
if self.parser.version == '1.0':
self.check_value("'9' + 5.0", 14)
self.check_selector("/values/a + 2", root, 5.4)
self.check_value("/values/b + 2", float('nan'), context=XPathContext(root))
else:
self.check_selector("/values/a + 2", root, TypeError)
self.check_value("/values/b + 2", TypeError, context=XPathContext(root))
self.check_selector("/values/d + 3", root, 47)
def test_numerical_mod_operator(self):
self.check_value("11 mod 3", 2)
self.check_value("4.5 mod 1.2", Decimal('0.9'))
self.check_value("1.23E2 mod 0.6E1", 3.0E0)
root = self.etree.XML(XML_DATA_TEST)
if self.parser.version == '1.0':
self.check_selector("/values/a mod 2", root, 1.4)
self.check_value("/values/b mod 2", float('nan'), context=XPathContext(root))
else:
self.check_selector("/values/a mod 2", root, TypeError)
self.check_value("/values/b mod 2", TypeError, context=XPathContext(root))
self.check_selector("/values/d mod 3", root, 2)
def test_number_function(self):
root = self.etree.XML('<root>15</root>')
self.check_value("number()", MissingContextError)
self.check_value("number()", 15, context=XPathContext(root))
self.check_value("number()", 15, context=XPathContext(root, item=root.text))
self.check_value("number(.)", 15, context=XPathContext(root))
self.check_value("number(5.0)", 5.0)
self.check_value("number('text')", math.isnan)
self.check_value("number('-11')", -11)
self.check_selector("number(9)", root, 9.0)
if self.parser.version == '1.0':
self.wrong_syntax("number(())")
else:
self.check_value("number(())", float('nan'), context=XPathContext(root))
root = self.etree.XML(XML_DATA_TEST)
self.check_selector("/values/a/number()", root, [3.4, 20.0, -10.1])
results = select(root, "/values/*/number()", parser=self.parser.__class__)
self.assertEqual(results[:3], [3.4, 20.0, -10.1])
self.assertTrue(math.isnan(results[3]) and math.isnan(results[4]))
self.check_selector("number(/values/d)", root, 44.0)
self.check_selector("number(/values/a)", root, TypeError)
def test_count_function(self):
root = self.etree.XML('<A><B><C/><C/></B><B/><B><C/><C/><C/></B></A>')
self.check_selector("count(B)", root, 3)
self.check_selector("count(.//C)", root, 5)
root = self.etree.XML('<value max="10" min="0">5</value>')
self.check_selector("count(@avg)", root, 0)
self.check_selector("count(@max)", root, 1)
self.check_selector("count(@min)", root, 1)
self.check_selector("count(@min | @max)", root, 2)
self.check_selector("count(@min | @avg)", root, 1)
self.check_selector("count(@top | @avg)", root, 0)
self.check_selector("count(@min | @max) = 1", root, False)
self.check_selector("count(@min | @max) = 2", root, True)
def test_sum_function(self):
root = self.etree.XML(XML_DATA_TEST)
self.check_value("sum($values)", 35)
if self.parser.version == '1.0':
self.wrong_syntax("sum(())")
else:
self.check_value("sum(())", 0)
self.check_value("sum((), ())", [])
self.check_selector("sum(/values/a)", root, 13.299999999999999)
self.check_selector("sum(/values/*)", root, float('nan'))
def test_ceiling_function(self):
root = self.etree.XML(XML_DATA_TEST)
self.check_value("ceiling(10.5)", 11)
self.check_value("ceiling(-10.5)", -10)
self.check_selector("//a[ceiling(.) = 10]", root, [])
self.check_selector("//a[ceiling(.) = -10]", root, [root[2]])
if self.parser.version == '1.0':
self.wrong_syntax("ceiling(())")
else:
self.check_value("ceiling(())", [])
self.check_value("ceiling((10.5))", 11)
self.wrong_type("ceiling((10.5, 17.3))")
def test_floor_function(self):
root = self.etree.XML(XML_DATA_TEST)
self.check_value("floor(10.5)", 10)
self.check_value("floor(-10.5)", -11)
self.check_selector("//a[floor(.) = 10]", root, [])
self.check_selector("//a[floor(.) = 20]", root, [root[1]])
if self.parser.version == '1.0':
self.wrong_syntax("floor(())")
self.check_selector("//ab[floor(.) = 10]", root, [])
else:
self.check_value("floor(())", [])
self.check_value("floor((10.5))", 10)
self.wrong_type("floor((10.5, 17.3))")
def test_round_function(self):
self.check_value("round(2.5)", 3)
self.check_value("round(2.4999)", 2)
self.check_value("round(-2.5)", -2)
if self.parser.version == '1.0':
self.wrong_syntax("round(())")
else:
self.check_value("round(())", [])
self.check_value("round((10.5))", 11)
self.wrong_type("round((2.5, 12.2))")
def test_context_variables(self):
root = self.etree.XML('<A><B1><C/></B1><B2/><B3><C1/><C2/></B3></A>')
context = XPathContext(root, variables={'alpha': 10, 'id': '19273222'})
self.check_value("$alpha", None)
self.check_value("$alpha", 10, context=context)
self.check_value("$beta", NameError, context=context)
self.check_value("$id", '19273222', context=context)
self.wrong_syntax("$id()")
def test_child_operator(self):
root = self.etree.XML('<A><B1><C1/></B1><B2/><B3><C1/><C2/></B3></A>')
self.check_selector('/', root, [])
self.check_selector('/B1', root, [])
self.check_selector('/A1', root, [])
self.check_selector('/A', root, [root])
self.check_selector('/A/B1', root, [root[0]])
self.check_selector('/A/*', root, [root[0], root[1], root[2]])
self.check_selector('/*/*', root, [root[0], root[1], root[2]])
self.check_selector('/A/B1/C1', root, [root[0][0]])
self.check_selector('/A/B1/*', root, [root[0][0]])
self.check_selector('/A/B3/*', root, [root[2][0], root[2][1]])
self.check_selector('child::*/child::C1', root, [root[0][0], root[2][0]])
self.check_selector('/A/child::B3', root, [root[2]])
self.check_selector('/A/child::C1', root, [])
def test_context_item_expression(self):
root = self.etree.XML('<A><B1><C/></B1><B2/><B3><C1/><C2/></B3></A>')
self.check_selector('.', root, [root])
self.check_selector('/././.', root, [])
self.check_selector('/A/.', root, [root])
self.check_selector('/A/B1/.', root, [root[0]])
self.check_selector('/A/B1/././.', root, [root[0]])
self.check_selector('1/.', root, TypeError)
def test_self_axis(self):
root = self.etree.XML('<A>A text<B1>B1 text</B1><B2/><B3>B3 text</B3></A>')
self.check_selector('self::node()', root, [root])
self.check_selector('self::text()', root, [])
def test_child_axis(self):
root = self.etree.XML('<A>A text<B1>B1 text</B1><B2/><B3>B3 text</B3></A>')
self.check_selector('child::B1', root, [root[0]])
self.check_selector('child::A', root, [])
self.check_selector('child::text()', root, ['A text'])
self.check_selector('child::node()', root, ['A text'] + root[:])
self.check_selector('child::*', root, root[:])
root = self.etree.XML('<A xmlns:ns="http://www.example.com/ns/"><ns:B1/><B2/></A>')
self.check_selector('child::eg:A', root, [], namespaces={'eg': 'http://www.example.com/ns/'})
self.check_selector('child::eg:B1', root, [root[0]], namespaces={'eg': 'http://www.example.com/ns/'})
def test_descendant_axis(self):
root = self.etree.XML('<A><B1><C/></B1><B2/><B3><C1/><C2/></B3></A>')
self.check_selector('descendant::node()', root, [e for e in root.iter()][1:])
self.check_selector('/descendant::node()', root, [e for e in root.iter()])
def test_descendant_or_self_axis(self):
root = self.etree.XML('<A><B1><C/></B1><B2/><B3><C/><C1/></B3></A>')
self.check_selector('descendant-or-self::node()', root, [e for e in root.iter()])
self.check_selector('descendant-or-self::node()/.', root, [e for e in root.iter()])
def test_double_slash_shortcut(self):
root = self.etree.XML('<A><B1><C/></B1><B2/><B3><C/><C1/></B3></A>')
self.check_selector('//.', root, [e for e in root.iter()])
self.check_selector('/A//.', root, [e for e in root.iter()])
self.check_selector('/A//self::node()', root, [e for e in root.iter()])
self.check_selector('//C1', root, [root[2][1]])
self.check_selector('//B2', root, [root[1]])
self.check_selector('//C', root, [root[0][0], root[2][0]])
self.check_selector('//*', root, [e for e in root.iter()])
root = self.etree.XML("""
<pm>
<content>
<pmEntry>
<pmEntry pmEntryType="pm001">
</pmEntry>
</pmEntry>
</content>
</pm>""")
self.check_selector('/pm/content/pmEntry/pmEntry//pmEntry[@pmEntryType]', root, [])
def test_following_axis(self):
root = self.etree.XML('<A><B1><C1/></B1><B2/><B3><C1/><C2/></B3><B4><C1><D1/></C1></B4></A>')
self.check_selector('/A/B1/C1/following::*', root, [
root[1], root[2], root[2][0], root[2][1], root[3], root[3][0], root[3][0][0]
])
self.check_selector('/A/B1/following::C1', root, [root[2][0], root[3][0]])
def test_following_sibling_axis(self):
root = self.etree.XML('<A><B1><C1/><C2/><C3/></B1><B2><C1/><C2/><C3/><C4/></B2></A>')
self.check_selector('/A/B1/C1/following-sibling::*', root, [root[0][1], root[0][2]])
self.check_selector('/A/B2/C1/following-sibling::*', root, [root[1][1], root[1][2], root[1][3]])
self.check_selector('/A/B1/C1/following-sibling::C3', root, [root[0][2]])
def test_attribute_abbreviation_and_axis(self):
root = self.etree.XML('<A id="1" a="alpha"><B1 b1="beta1"/><B2/><B3 b2="beta2" b3="beta3"/></A>')
self.check_selector('/A/B1/attribute::*', root, ['beta1'])
self.check_selector('/A/B1/@*', root, ['beta1'])
self.check_selector('/A/B3/attribute::*', root, {'beta2', 'beta3'})
self.check_selector('/A/attribute::*', root, {'1', 'alpha'})
root = self.etree.XML('<value choice="int">10</value>')
self.check_selector('@choice', root, ['int'])
root = self.etree.XML('<ns:value xmlns:ns="ns" choice="int">10</ns:value>')
self.check_selector('@choice', root, ['int'])
self.check_selector('@choice="int"', root, True)
def test_namespace_axis(self):
root = self.etree.XML('<A xmlns:tst="http://xpath.test/ns"><tst:B1/></A>')
namespaces = list(self.parser.DEFAULT_NAMESPACES.items()) + [('tst', 'http://xpath.test/ns')]
self.check_selector('/A/namespace::*', root, expected=set(namespaces), namespaces=namespaces[-1:])
def test_parent_abbreviation_and_axis(self):
root = self.etree.XML('<A><B1><C1/></B1><B2/><B3><C1/><C2/></B3><B4><C3><D1/></C3></B4></A>')
self.check_selector('/A/*/C2/..', root, [root[2]])
self.check_selector('/A/*/*/..', root, [root[0], root[2], root[3]])
self.check_selector('//C2/..', root, [root[2]])
self.check_selector('/A/*/C2/parent::node()', root, [root[2]])
self.check_selector('/A/*/*/parent::node()', root, [root[0], root[2], root[3]])
self.check_selector('//C2/parent::node()', root, [root[2]])
def test_ancestor_axes(self):
root = self.etree.XML('<A><B1><C1/></B1><B2><C1/><D2><E1/><E2/></D2><C2/></B2><B3><C1><D1/></C1></B3></A>')
self.check_selector('/A/B3/C1/ancestor::*', root, [root, root[2]])
self.check_selector('/A/B4/C1/ancestor::*', root, [])
self.check_selector('/A/*/C1/ancestor::*', root, [root, root[0], root[1], root[2]])
self.check_selector('/A/*/C1/ancestor::B3', root, [root[2]])
self.check_selector('/A/B3/C1/ancestor-or-self::*', root, [root, root[2], root[2][0]])
self.check_selector('/A/*/C1/ancestor-or-self::*', root, [
root, root[0], root[0][0], root[1], root[1][0], root[2], root[2][0]
])
def test_preceding_axis(self):
root = self.etree.XML('<A><B1><C1/><C2/><C3/></B1><B2><C1/><C2/><C3/><C4/></B2></A>')
self.check_selector('/A/B1/C2/preceding::*', root, [root[0][0]])
self.check_selector('/A/B2/C4/preceding::*', root, [
root[0], root[0][0], root[0][1], root[0][2], root[1][0], root[1][1], root[1][2]
])
root = self.etree.XML("<root><e><a><b/></a><a><b/></a></e><e><a/></e></root>")
self.check_tree("/root/e/preceding::b", '(/ (/ (/ (root)) (e)) (preceding (b)))')
self.check_selector('/root/e[2]/preceding::b', root, [root[0][0][0], root[0][1][0]])
def test_preceding_sibling_axis(self):
root = self.etree.XML('<A><B1><C1/><C2/><C3/></B1><B2><C1/><C2/><C3/><C4/></B2></A>')
self.check_selector('/A/B1/C2/preceding-sibling::*', root, [root[0][0]])
self.check_selector('/A/B2/C4/preceding-sibling::*', root, [root[1][0], root[1][1], root[1][2]])
self.check_selector('/A/B1/C2/preceding-sibling::C3', root, [])
def test_default_axis(self):
root = self.etree.XML('<root><a id="1">first<b/></a><a id="2">second</a></root>')
self.check_selector('/root/a/*', root, [root[0][0]])
self.check_selector('/root/a/node()', root, ['first', root[0][0], 'second'])
self.check_selector('/root/a/text()', root, ['first', 'second'])
self.check_selector('/root/a/attribute::*', root, ['1', '2'])
if self.parser.version > '1.0':
self.check_selector('/root/a/attribute()', root, ['1', '2'])
self.check_selector('/root/a/element()', root, [root[0][0]])
self.check_selector('/root/a/name()', root, ['a', 'a'])
self.check_selector('/root/a/last()', root, [2, 2])
self.check_selector('/root/a/position()', root, [1, 2])
def test_unknown_axis(self):
self.check_value('unknown::node()', NameError)
def test_predicate(self):
root = self.etree.XML('<A><B1><C1/><C2/><C3/></B1><B2><C1/><C2/><C3/><C4/></B2></A>')
self.check_selector('/A/B1[C2]', root, [root[0]])
self.check_selector('/A/B1[1]', root, [root[0]])
self.check_selector('/A/B1[2]', root, [])
self.check_selector('/A/*[2]', root, [root[1]])
self.check_selector('/A/*[position()<2]', root, [root[0]])
self.check_selector('/A/*[last()-1]', root, [root[0]])
self.check_selector('/A/B2/*[position()>=2]', root, root[1][1:])
root = self.etree.XML("<bib><book><author>Asimov</author></book></bib>")
self.check_selector("book/author[. = 'Asimov']", root, [root[0][0]])
self.check_selector("book/author[. = 'Dickens']", root, [])
self.check_selector("book/author[text()='Asimov']", root, [root[0][0]])
root = self.etree.XML('<A><B1>hello</B1><B2/><B3> </B3></A>')
self.check_selector("/A/*[' ']", root, root[:])
self.check_selector("/A/*['']", root, [])
root = self.etree.XML("<root><a><b/></a><a><b/><c/></a><a><c/></a></root>")
self.check_tree("child::a[b][c]", '([ ([ (child (a)) (b)) (c))')
self.check_selector("child::a[b][c]", root, [root[1]])
root = self.etree.XML("<root><e><a><b/></a><a><b/></a></e><e><a/></e></root>")
self.check_tree("a[not(b)]", '([ (a) (not (b)))')
self.check_value("a[not(b)]", [], context=XPathContext(root, item=root[0]))
self.check_value("a[not(b)]", [root[1][0]], context=XPathContext(root, item=root[1]))
self.check_tree("preceding::a[not(b)]", '([ (preceding (a)) (not (b)))')
self.check_value("a[preceding::a[not(b)]]", [], context=XPathContext(root, item=root[0]))
self.check_value("a[preceding::a[not(b)]]", [], context=XPathContext(root, item=root[1]))
def test_union(self):
root = self.etree.XML('<A min="1" max="10"><B1><C1/><C2/><C3/></B1><B2><C1/><C2/><C3/><C4/></B2><B3/></A>')
self.check_selector('/A/B2 | /A/B1', root, root[:2])
self.check_selector('/A/B2 | /A/*', root, root[:])
self.check_selector('/A/B2 | /A/* | /A/B1', root, root[:])
self.check_selector('/A/@min | /A/@max', root, {'1', '10'})
def test_default_namespace(self):
root = self.etree.XML('<foo>bar</foo>')
self.check_selector('/foo', root, [root])
if self.parser.version == '1.0':
self.check_selector('/foo', root, [root], namespaces={'': 'ns'})
else:
self.check_selector('/foo', root, [], namespaces={'': 'ns'})
self.check_selector('/*:foo', root, [root], namespaces={'': 'ns'})
root = self.etree.XML('<foo xmlns="ns">bar</foo>')
self.check_selector('/foo', root, [])
if type(self.parser) is XPath1Parser:
self.check_selector('/foo', root, [], namespaces={'': 'ns'})
else:
self.check_selector('/foo', root, [root], namespaces={'': 'ns'})
root = self.etree.XML('<A xmlns="http://xpath.test/ns"><B1/></A>')
if self.parser.version > '1.0' or not hasattr(root, 'nsmap'):
self.check_selector("name(tst:B1)", root, 'tst:B1', namespaces={'tst': "http://xpath.test/ns"})
if self.parser.version > '1.0':
self.check_selector("name(B1)", root, 'B1', namespaces={'': "http://xpath.test/ns"})
else:
self.check_selector("name(B1)", root, '', namespaces={'': "http://xpath.test/ns"})
@unittest.skipIf(lxml_etree is None, "The lxml library is not installed")
class LxmlXPath1ParserTest(XPath1ParserTest):
etree = lxml_etree
def check_selector(self, path, root, expected, namespaces=None, **kwargs):
if isinstance(expected, type) and issubclass(expected, Exception):
self.assertRaises(expected, select, root, path, namespaces, self.parser.__class__, **kwargs)
else:
results = select(root, path, namespaces, self.parser.__class__, **kwargs)
variables = kwargs.get('variables', {})
if namespaces and '' in namespaces:
namespaces = {k: v for k, v in namespaces.items() if k}
if isinstance(expected, set):
self.assertEqual(set(root.xpath(path, namespaces=namespaces, **variables)), expected)
self.assertEqual(set(results), expected)
elif not callable(expected):
self.assertEqual(root.xpath(path, namespaces=namespaces, **variables), expected)
self.assertEqual(results, expected)
elif isinstance(expected, type):
self.assertTrue(isinstance(results, expected))
else:
self.assertTrue(expected(results))
if __name__ == '__main__':
unittest.main()
| true | true |
1c2dbcfe1ac7e8d9f58c1afcd5a420bf678d47d8 | 22,118 | py | Python | src/virtual-wan/azext_vwan/vendored_sdks/v2021_03_01/v2021_03_01/operations/_virtual_hub_ip_configuration_operations.py | haroonf/azure-cli-extensions | 61c044d34c224372f186934fa7c9313f1cd3a525 | [
"MIT"
] | 207 | 2017-11-29T06:59:41.000Z | 2022-03-31T10:00:53.000Z | src/virtual-wan/azext_vwan/vendored_sdks/v2021_03_01/v2021_03_01/operations/_virtual_hub_ip_configuration_operations.py | haroonf/azure-cli-extensions | 61c044d34c224372f186934fa7c9313f1cd3a525 | [
"MIT"
] | 4,061 | 2017-10-27T23:19:56.000Z | 2022-03-31T23:18:30.000Z | src/virtual-wan/azext_vwan/vendored_sdks/v2021_03_01/v2021_03_01/operations/_virtual_hub_ip_configuration_operations.py | haroonf/azure-cli-extensions | 61c044d34c224372f186934fa7c9313f1cd3a525 | [
"MIT"
] | 802 | 2017-10-11T17:36:26.000Z | 2022-03-31T22:24:32.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualHubIpConfigurationOperations(object):
"""VirtualHubIpConfigurationOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2021_03_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def get(
self,
resource_group_name, # type: str
virtual_hub_name, # type: str
ip_config_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.HubIpConfiguration"
"""Retrieves the details of a Virtual Hub Ip configuration.
:param resource_group_name: The resource group name of the VirtualHub.
:type resource_group_name: str
:param virtual_hub_name: The name of the VirtualHub.
:type virtual_hub_name: str
:param ip_config_name: The name of the ipconfig.
:type ip_config_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: HubIpConfiguration, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_03_01.models.HubIpConfiguration
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.HubIpConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
'ipConfigName': self._serialize.url("ip_config_name", ip_config_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('HubIpConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/ipConfigurations/{ipConfigName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
virtual_hub_name, # type: str
ip_config_name, # type: str
parameters, # type: "_models.HubIpConfiguration"
**kwargs # type: Any
):
# type: (...) -> "_models.HubIpConfiguration"
cls = kwargs.pop('cls', None) # type: ClsType["_models.HubIpConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
'ipConfigName': self._serialize.url("ip_config_name", ip_config_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'HubIpConfiguration')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('HubIpConfiguration', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('HubIpConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/ipConfigurations/{ipConfigName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
virtual_hub_name, # type: str
ip_config_name, # type: str
parameters, # type: "_models.HubIpConfiguration"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.HubIpConfiguration"]
"""Creates a VirtualHubIpConfiguration resource if it doesn't exist else updates the existing
VirtualHubIpConfiguration.
:param resource_group_name: The resource group name of the VirtualHub.
:type resource_group_name: str
:param virtual_hub_name: The name of the VirtualHub.
:type virtual_hub_name: str
:param ip_config_name: The name of the ipconfig.
:type ip_config_name: str
:param parameters: Hub Ip Configuration parameters.
:type parameters: ~azure.mgmt.network.v2021_03_01.models.HubIpConfiguration
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either HubIpConfiguration or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2021_03_01.models.HubIpConfiguration]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.HubIpConfiguration"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_hub_name=virtual_hub_name,
ip_config_name=ip_config_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('HubIpConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
'ipConfigName': self._serialize.url("ip_config_name", ip_config_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/ipConfigurations/{ipConfigName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
virtual_hub_name, # type: str
ip_config_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
'ipConfigName': self._serialize.url("ip_config_name", ip_config_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/ipConfigurations/{ipConfigName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
virtual_hub_name, # type: str
ip_config_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes a VirtualHubIpConfiguration.
:param resource_group_name: The resource group name of the VirtualHubBgpConnection.
:type resource_group_name: str
:param virtual_hub_name: The name of the VirtualHub.
:type virtual_hub_name: str
:param ip_config_name: The name of the ipconfig.
:type ip_config_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_hub_name=virtual_hub_name,
ip_config_name=ip_config_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
'ipConfigName': self._serialize.url("ip_config_name", ip_config_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/ipConfigurations/{ipConfigName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
virtual_hub_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ListVirtualHubIpConfigurationResults"]
"""Retrieves the details of all VirtualHubIpConfigurations.
:param resource_group_name: The resource group name of the VirtualHub.
:type resource_group_name: str
:param virtual_hub_name: The name of the VirtualHub.
:type virtual_hub_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListVirtualHubIpConfigurationResults or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2021_03_01.models.ListVirtualHubIpConfigurationResults]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListVirtualHubIpConfigurationResults"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ListVirtualHubIpConfigurationResults', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/ipConfigurations'} # type: ignore
| 50.040724 | 223 | 0.668008 |
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualHubIpConfigurationOperations(object):
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def get(
self,
resource_group_name,
virtual_hub_name,
ip_config_name,
**kwargs
):
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
url = self.get.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
'ipConfigName': self._serialize.url("ip_config_name", ip_config_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('HubIpConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/ipConfigurations/{ipConfigName}'}
def _create_or_update_initial(
self,
resource_group_name,
virtual_hub_name,
ip_config_name,
parameters,
**kwargs
):
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
url = self._create_or_update_initial.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
'ipConfigName': self._serialize.url("ip_config_name", ip_config_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {}
body_content = self._serialize.body(parameters, 'HubIpConfiguration')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('HubIpConfiguration', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('HubIpConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/ipConfigurations/{ipConfigName}'}
def begin_create_or_update(
self,
resource_group_name,
virtual_hub_name,
ip_config_name,
parameters,
**kwargs
):
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None)
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_hub_name=virtual_hub_name,
ip_config_name=ip_config_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('HubIpConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
'ipConfigName': self._serialize.url("ip_config_name", ip_config_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/ipConfigurations/{ipConfigName}'}
def _delete_initial(
self,
resource_group_name,
virtual_hub_name,
ip_config_name,
**kwargs
):
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
url = self._delete_initial.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
'ipConfigName': self._serialize.url("ip_config_name", ip_config_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/ipConfigurations/{ipConfigName}'}
def begin_delete(
self,
resource_group_name,
virtual_hub_name,
ip_config_name,
**kwargs
):
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None)
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_hub_name=virtual_hub_name,
ip_config_name=ip_config_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
'ipConfigName': self._serialize.url("ip_config_name", ip_config_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/ipConfigurations/{ipConfigName}'}
def list(
self,
resource_group_name,
virtual_hub_name,
**kwargs
):
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
url = self.list.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ListVirtualHubIpConfigurationResults', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/ipConfigurations'}
| true | true |
1c2dbcfec9ff34a834ed87b4ac2e2111e2ff2a7a | 2,348 | py | Python | app/modules/core/tests/fixtures.py | nickmoreton/nhsx-website | 2397d1308376c02b75323d30e6bc916af0daac9d | [
"MIT"
] | null | null | null | app/modules/core/tests/fixtures.py | nickmoreton/nhsx-website | 2397d1308376c02b75323d30e6bc916af0daac9d | [
"MIT"
] | null | null | null | app/modules/core/tests/fixtures.py | nickmoreton/nhsx-website | 2397d1308376c02b75323d30e6bc916af0daac9d | [
"MIT"
] | null | null | null | # 3rd party
import json
from typing import List
import pytest
from wagtail.core.models import Page
from modules.core.models import SectionPage, ArticlePage
from .blocks import RICHTEXT_BLOCK, PROMO, SMALL_PROMO
pytestmark = pytest.mark.django_db
def _create_section_page(title: str, parent: Page) -> SectionPage:
"""Abstracting this allows us to test more scenarios than just passing the
fixture around.
Args:
title (str): The page title
parent (Page): A page to attach our section page to
Returns:
SectionPage: Description
"""
p = SectionPage()
p.title = title
parent.add_child(instance=p)
p.save_revision().publish()
return p
def _create_article_page(title: str, parent: Page) -> SectionPage:
"""Abstracting this allows us to test more scenarios than just passing the
fixture around.
Args:
title (str): The page title
parent (Page): A page to attach our article page to
Returns:
SectionPage: Description
"""
p = ArticlePage()
p.title = title
parent.add_child(instance=p)
p.save_revision().publish()
return p
@pytest.fixture(scope="function")
def section_page(home_page) -> SectionPage:
p = _create_section_page("Test Section Page", home_page)
return p
@pytest.fixture(scope="function")
def article_page(section_page) -> ArticlePage:
p = _create_article_page("Test Article Page", section_page)
return p
@pytest.fixture(scope="function")
def section_pages(section_page) -> List[SectionPage]:
"""Fixture providing 10 SectionPages attached to section_page
"""
rv = []
for _ in range(0, 10):
p = _create_section_page(f"Test Section Page {_}", section_page)
rv.append(p)
return rv
@pytest.fixture(scope="function")
def article_pages(section_page) -> List[ArticlePage]:
"""Fixture providing 10 ArticlePages attached to section_page
"""
rv = []
for _ in range(0, 10):
p = _create_article_page(f"Test Article Page {_}", section_page)
rv.append(p)
return rv
@pytest.fixture(scope="function")
def article_page_with_body(section_page) -> ArticlePage:
p = _create_article_page("Test Article Page", section_page)
p.body = json.dumps([RICHTEXT_BLOCK, PROMO, SMALL_PROMO])
p.save_revision().publish()
return p
| 26.382022 | 78 | 0.691227 |
import json
from typing import List
import pytest
from wagtail.core.models import Page
from modules.core.models import SectionPage, ArticlePage
from .blocks import RICHTEXT_BLOCK, PROMO, SMALL_PROMO
pytestmark = pytest.mark.django_db
def _create_section_page(title: str, parent: Page) -> SectionPage:
p = SectionPage()
p.title = title
parent.add_child(instance=p)
p.save_revision().publish()
return p
def _create_article_page(title: str, parent: Page) -> SectionPage:
p = ArticlePage()
p.title = title
parent.add_child(instance=p)
p.save_revision().publish()
return p
@pytest.fixture(scope="function")
def section_page(home_page) -> SectionPage:
p = _create_section_page("Test Section Page", home_page)
return p
@pytest.fixture(scope="function")
def article_page(section_page) -> ArticlePage:
p = _create_article_page("Test Article Page", section_page)
return p
@pytest.fixture(scope="function")
def section_pages(section_page) -> List[SectionPage]:
rv = []
for _ in range(0, 10):
p = _create_section_page(f"Test Section Page {_}", section_page)
rv.append(p)
return rv
@pytest.fixture(scope="function")
def article_pages(section_page) -> List[ArticlePage]:
rv = []
for _ in range(0, 10):
p = _create_article_page(f"Test Article Page {_}", section_page)
rv.append(p)
return rv
@pytest.fixture(scope="function")
def article_page_with_body(section_page) -> ArticlePage:
p = _create_article_page("Test Article Page", section_page)
p.body = json.dumps([RICHTEXT_BLOCK, PROMO, SMALL_PROMO])
p.save_revision().publish()
return p
| true | true |
1c2dbe4a08bd41bf72da14d26c0d265c7ca59c69 | 231 | py | Python | 11799/horror_dash.py | sc458/uHunt-solutions | 37464e1db98c897995eab79caa6c70f379ad877a | [
"MIT"
] | null | null | null | 11799/horror_dash.py | sc458/uHunt-solutions | 37464e1db98c897995eab79caa6c70f379ad877a | [
"MIT"
] | null | null | null | 11799/horror_dash.py | sc458/uHunt-solutions | 37464e1db98c897995eab79caa6c70f379ad877a | [
"MIT"
] | null | null | null |
num = int(input())
for i in range(0,num):
arr = input()
arr = arr.split(' ')
ints = []
for j in range(0,len(arr)):
ints.append(int(arr[j]))
print('Case ' + str(i+1) + ': ' + str(max(ints)))
| 10.5 | 51 | 0.467532 |
num = int(input())
for i in range(0,num):
arr = input()
arr = arr.split(' ')
ints = []
for j in range(0,len(arr)):
ints.append(int(arr[j]))
print('Case ' + str(i+1) + ': ' + str(max(ints)))
| true | true |
1c2dbed81b714154b35521eda4ccf58ad3c299db | 496 | py | Python | python/5.py | dpetker/project-euler | d232367d5f21821871c53d6ecc43c8d6af801d2c | [
"MIT"
] | null | null | null | python/5.py | dpetker/project-euler | d232367d5f21821871c53d6ecc43c8d6af801d2c | [
"MIT"
] | null | null | null | python/5.py | dpetker/project-euler | d232367d5f21821871c53d6ecc43c8d6af801d2c | [
"MIT"
] | null | null | null | # Soultion for Project Euler Problem #5 - https://projecteuler.net/problem=5
# (c) 2016 dpetker
# Start with this as the problem states its the smallest value evenly divisible
# by 1-10
test_val = 2520
def test_divisors(n):
for i in range(1, 21):
if n % i != 0:
return False
return True
while True:
test_val += 20
if test_divisors(test_val):
break
print('The smallest positive number that is evenly divisible by all of the numbers from 1 to 20 is {}'.format(test_val))
| 23.619048 | 120 | 0.705645 | (n):
for i in range(1, 21):
if n % i != 0:
return False
return True
while True:
test_val += 20
if test_divisors(test_val):
break
print('The smallest positive number that is evenly divisible by all of the numbers from 1 to 20 is {}'.format(test_val))
| true | true |
1c2dbf0093f5b08eb9f80e323c62cbac7485263c | 875 | py | Python | tx_parse_xml/acl__prop_to_title.py | DazEB2/SimplePyScripts | 1dde0a42ba93fe89609855d6db8af1c63b1ab7cc | [
"CC-BY-4.0"
] | 117 | 2015-12-18T07:18:27.000Z | 2022-03-28T00:25:54.000Z | tx_parse_xml/acl__prop_to_title.py | DazEB2/SimplePyScripts | 1dde0a42ba93fe89609855d6db8af1c63b1ab7cc | [
"CC-BY-4.0"
] | 8 | 2018-10-03T09:38:46.000Z | 2021-12-13T19:51:09.000Z | tx_parse_xml/acl__prop_to_title.py | DazEB2/SimplePyScripts | 1dde0a42ba93fe89609855d6db8af1c63b1ab7cc | [
"CC-BY-4.0"
] | 28 | 2016-08-02T17:43:47.000Z | 2022-03-21T08:31:12.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
from pathlib import Path
from bs4 import BeautifulSoup
FILE_NAME_ACL = Path(r'C:\<...>\ads\<...>\src\<...>.xml')
FILE_NAME_ACL_LOCALE = FILE_NAME_ACL.parent.parent / 'locale' / 'en' / ('mlb' + FILE_NAME_ACL.name)
root_acl = BeautifulSoup(open(FILE_NAME_ACL, 'rb'), 'html.parser')
root_acl_locale = BeautifulSoup(open(FILE_NAME_ACL_LOCALE, 'rb'), 'html.parser')
# NOTE: <Group Id="cpg<...>" Name="<...>" Members="<PROP_IDS">
PROP_IDS = "prd<...> prd<...>".split()
items = []
for prop_id in PROP_IDS:
prop_el = root_acl.select_one('#' + prop_id)
name = prop_el['name']
title_id = prop_el.presentation['titleid']
title = root_acl_locale.select_one('#' + title_id).value.text
items.append((name, title))
items.sort()
for name, title in items:
print(name, title, sep='\t')
| 25.735294 | 99 | 0.659429 |
__author__ = 'ipetrash'
from pathlib import Path
from bs4 import BeautifulSoup
FILE_NAME_ACL = Path(r'C:\<...>\ads\<...>\src\<...>.xml')
FILE_NAME_ACL_LOCALE = FILE_NAME_ACL.parent.parent / 'locale' / 'en' / ('mlb' + FILE_NAME_ACL.name)
root_acl = BeautifulSoup(open(FILE_NAME_ACL, 'rb'), 'html.parser')
root_acl_locale = BeautifulSoup(open(FILE_NAME_ACL_LOCALE, 'rb'), 'html.parser')
PROP_IDS = "prd<...> prd<...>".split()
items = []
for prop_id in PROP_IDS:
prop_el = root_acl.select_one('#' + prop_id)
name = prop_el['name']
title_id = prop_el.presentation['titleid']
title = root_acl_locale.select_one('#' + title_id).value.text
items.append((name, title))
items.sort()
for name, title in items:
print(name, title, sep='\t')
| true | true |
1c2dbf26a88b8fd1f7357b6ade26d3238a810da4 | 51,362 | py | Python | detect_trucks/TruckDetector.py | hfisser/s2_trucks | 323e73edb82e314e6695e8cf8d89c2de22f54b04 | [
"MIT"
] | 4 | 2021-06-17T07:44:16.000Z | 2021-10-15T22:32:12.000Z | detect_trucks/TruckDetector.py | hfisser/s2_trucks | 323e73edb82e314e6695e8cf8d89c2de22f54b04 | [
"MIT"
] | null | null | null | detect_trucks/TruckDetector.py | hfisser/s2_trucks | 323e73edb82e314e6695e8cf8d89c2de22f54b04 | [
"MIT"
] | null | null | null | ####################################################
# Author: Henrik Fisser, 2020
####################################################
from array_utils.plot import plot_img
import os, warnings
import pandas as pd
import numpy as np
import geopandas as gpd
import xarray as xr
from shapely.geometry import box
from scipy.stats import linregress, spearmanr
from datetime import datetime
from array_utils.math import normalized_ratio, rescale
from array_utils.geocoding import lat_from_meta, lon_from_meta, metadata_to_bbox_epsg4326
from osm_utils.utils import get_roads, rasterize_osm
from utils.ProgressBar import ProgressBar
warnings.filterwarnings("ignore")
dir_ancillary = os.path.join("F:" + os.sep + "Masterarbeit", "DLR", "project", "1_truck_detection", "truth")
THRESHOLDS = pd.read_csv(os.path.join(dir_ancillary, "thresholds.csv"), index_col=0)
RGB_VECTORS = pd.read_csv(os.path.join(dir_ancillary, "rgb_vector_clusters.csv"), index_col=0)
# assume reflectance rescaled to [0., 1.]
# REFLECTANCE
MIN_RED = THRESHOLDS["red_low"][0]
MAX_RED = THRESHOLDS["red_high"][0]
#MAX_RED_BOX = THRESHOLDS["box_mean_red_high"][0]
MIN_GREEN = THRESHOLDS["green_low"][0]
MAX_GREEN = THRESHOLDS["green_high"][0]
#MAX_GREEN_BOX = THRESHOLDS["box_mean_green_high"][0]
MIN_BLUE = THRESHOLDS["blue_low"][0]
MAX_BLUE = THRESHOLDS["blue_high"][0]
#MAX_BLUE_BOX = THRESHOLDS["box_mean_blue_high"][0]
MIN_RGB_STD = THRESHOLDS["min_std"][0] / 3
# VEGETATION
MAX_NDVI = THRESHOLDS["ndvi_mean"][0] + THRESHOLDS["ndvi_std"][0] * 3
# RATIOS
MIN_BLUE_RED_RATIO = 0
MIN_BLUE_GREEN_RATIO = 0
MIN_GREEN_BLUE_RATIO = 0
MIN_RED_BLUE_RATIO = 0
# SPATIAL
MEAN_MAX_DIST_GREEN = THRESHOLDS["mean_max_dist_green"][0]
MEAN_MAX_DIST_RED = THRESHOLDS["mean_max_dist_red"][0]
MAX_MAX_DIST_GREEN = THRESHOLDS["max_max_dist_green"][0]
MAX_MAX_DIST_RED = THRESHOLDS["max_max_dist_red"][0]
MAX_ANGLE_BR_BG = THRESHOLDS["mean_red_green_spatial_angle"][0] + THRESHOLDS["std_red_green_spatial_angle"][0] * 3
# SPECTRAL ANGLE
#MIN_R_SQUARED = THRESHOLDS["mean_rgb_rsquared"][0] - THRESHOLDS["std_rgb_rsquared"][0] * 3
DEFAULT_MIN_CORRELATION = 0.5
MAX_SLOPE = 10
MIN_SLOPE = 0.05
# Open Street Maps buffer
OSM_BUFFER = 25
# Sensing offset
SECONDS_OFFSET_B02_B04 = 1.01 # seconds
TRUCK_LENGTH = 18.75 # meters
HOME = os.path.dirname(__file__)
class Detector:
def __init__(self, min_r_squared=None, min_blue_green_ratio=None, min_blue_red_ratio=None):
"""
Detector class for detecting large moving vehicles on roads using Sentinel-2 data
:param min_r_squared: float minimum correlation threshold
:param min_blue_green_ratio: float minimum blue-green ratio for detection
:param min_blue_red_ratio: float minimum blue-red ratio for detection
"""
self.min_r_squared = min_r_squared
self.min_blue_green = min_blue_green_ratio
self.min_blue_red = min_blue_red_ratio
self.min_score = None
self.band_stack_np = None
self.lat, self.lon = None, None
self.trucks_np = None
self.crs = None
def pre_process(self, band_dict, metadata, subset_box=None):
"""
rescales data to 0-1 and calculates lat, lon coordinates, masks to OSM roads
:param band_dict: dict holding 3 arrays with shape (height, width), keys are B02, B03, B04, B08
:param metadata: dict metadata from rasterio IO
:param subset_box: dict with int ymin, ymax, xmin, xmax
"""
self.min_r_squared = DEFAULT_MIN_CORRELATION #if self.min_r_squared is None else self.min_r_squared
if not isinstance(band_dict, dict):
raise TypeError("'band_dict' must be a dictionary")
try:
test = band_dict["B02"], band_dict["B03"], band_dict["B04"], band_dict["B08"]
except KeyError:
raise KeyError("'band_dict' must contain 'B02', 'B03', 'B04', 'B08'")
if not isinstance(metadata, dict):
raise TypeError("'metadata' must be a dictionary")
self.crs = metadata["crs"]
try:
self.lat, self.lon = metadata["lat"], metadata["lon"]
except KeyError:
try:
self.lat, self.lon = lat_from_meta(metadata), lon_from_meta(metadata)
except KeyError as e:
raise e
box_utm = [np.min(self.lat), np.max(self.lon), np.max(self.lat), np.min(self.lon)]
box_epsg4326 = metadata_to_bbox_epsg4326(metadata)
dir_ancil = os.path.join(HOME, "AUXILIARY")
if not os.path.exists(dir_ancil):
os.mkdir(dir_ancil)
box_epsg4326 = list(np.flip(box_epsg4326))
osm_mask = self.get_osm_mask(box_epsg4326, metadata["crs"], band_dict["B02"],
{"lat": self.lat, "lon": self.lon},
dir_ancil)
band_stack_np = np.array([band_dict["B04"], band_dict["B03"], band_dict["B02"], band_dict["B08"]])
low_rgb_mask = self.calc_low_quantile_mask(band_stack_np[0:3], [0.2]) # mask out lowest 20 % reflectances
#high_rgb_mask = self.calc_high_quantile_mask(band_stack_np[0:3], [0.98]) # mask out highest 1 % reflectances
band_stack_np[:, np.isnan(low_rgb_mask)] = np.nan
#band_stack_np[:, np.isnan(high_rgb_mask)] = np.nan
band_stack_np *= osm_mask
try:
band_stack_np = band_stack_np[:, subset_box["ymin"]:subset_box["ymax"], subset_box["xmin"]:subset_box["xmax"]]
self.lat = self.lat[subset_box["ymin"]:subset_box["ymax"] + 1]
self.lon = self.lon[subset_box["xmin"]:subset_box["xmax"] + 1]
except TypeError: # subset_box is allowed to be None
pass
band_stack_np_rescaled = band_stack_np.copy()
band_stack_np = None
band_stack_np_rescaled[np.isnan(band_stack_np_rescaled)] = 0
band_stack_np_rescaled = rescale(band_stack_np_rescaled, 0, 1)
# band_stack_np_rescaled[:, band_stack_np_rescaled[0] > THRESHOLDS["red_high"][0]] = np.nan
# band_stack_np_rescaled[:, band_stack_np_rescaled[1] > THRESHOLDS["green_high"][0]] = np.nan
# band_stack_np_rescaled[:, band_stack_np_rescaled[2] > THRESHOLDS["blue_high"][0]] = np.nan
band_stack_np_rescaled[band_stack_np_rescaled == 0] = np.nan
return band_stack_np_rescaled
def detect_trucks(self, band_stack_np):
"""
Method for detecting large moving vehicles, calls ratio-based detection and object delineation
:param band_stack_np: numpy ndarray containing the pre-processed Sentinel-2 reflectance bands
:return: GeoDataframe containing the detected boxes
"""
t0 = datetime.now()
if not isinstance(band_stack_np, np.ndarray):
raise TypeError("'band_stack_np' must be of type numpy.ndarray")
self.band_stack_np = band_stack_np
self._detect()
detections = self._context_zoom() # zoom into context around potential detection
print("Duration: %s minutes" % ((datetime.now() - t0).total_seconds() / 60))
return detections
def _detect(self):
"""
Detect pixels of superior blue reflectance based on band ratios
"""
b02, b03, b04 = self.band_stack_np[2], self.band_stack_np[1], self.band_stack_np[0]
min_quantile_blue, max_quantile_blue = np.nanquantile(b02, [0.5]), np.nanquantile(b02, [0.999])
max_quantile_green, max_quantile_red = np.nanquantile(b03, [0.9]), np.nanquantile(b04, [0.9])
bg_ratio, br_ratio = normalized_ratio(b02, b03), normalized_ratio(b02, b04)
bg = np.int8(bg_ratio > np.nanmean(b02) - np.nanmean(b03))
br = np.int8(br_ratio > np.nanmean(b02) - np.nanmean(b04))
blue_min = np.int8(b02 > min_quantile_blue) # exclude low 50 % blue
blue_max = np.int8(b02 < max_quantile_blue)
green_max = np.int8(b03 < max_quantile_green)
red_max = np.int8(b04 < max_quantile_red)
mask = self.expose_anomalous_pixels(self.band_stack_np)
self.band_stack_np = self.band_stack_np * mask
self.band_stack_np[self.band_stack_np == 0] = np.nan
# ratios B02-B03 (blue-green) and B02-B04 (blue-red)
std_min = np.int8(np.nanstd(self.band_stack_np[0:3], 0) * 10 >= THRESHOLDS["q1_std_at_max_blue"][0])
# self.trucks_np = np.int8(bg * br * blue_min * blue_max * std_min * green_max * red_max)
self.trucks_np = np.int8(bg * br * blue_min * green_max * red_max * std_min)
bg_ratio, br_ratio, blue_min, blue_max, green_max, red_max, std_min = None, None, None, None, None, None, None
def _context_zoom(self):
"""
Looks at the spatial context each detected pixel and calls method for delineating potential object
:return: GeoDataframe containing the detected boxes
"""
valid = np.where(self.trucks_np == 1) # y, x indices
boxes = [[], [], [], [], [], [], [], [], [], [], [], []]
y_max, x_max = self.trucks_np.shape
print("Context zoom\n%s" % (len(valid[0])))
pb = ProgressBar(len(valid[0]), 50)
for y, x, i in zip(valid[0], valid[1], range(len(valid[0]))):
pb.update(i)
if self.trucks_np[y, x] != 1: # may be the case because previously eliminated
continue
radius_low = int(MEAN_MAX_DIST_RED) + 2
radius_up = radius_low + 1
# subset around potential detection
y_low, y_up = y - radius_low, y + radius_up
y_low, y_up = 0 if y_low < 0 else y_low, y_max if y_up > y_max else y_up
x_low, x_up = x - radius_low, x + radius_up
x_low, x_up = 0 if x_low < 0 else x_low, x_max if x_up > x_max else x_up
self.trucks_np = self.eliminate_multi_detections(self.trucks_np, y, x)
sub_stack = self.band_stack_np[:, y_low:y_up, x_low:x_up].copy()
if np.count_nonzero(~np.isnan(sub_stack)) == 0:
continue
t0 = datetime.now()
box_test_result = self._box_test(sub_stack)
t1 = datetime.now()
# print("Total: %s" % str((t1 - t0).total_seconds()))
try:
the_box = box_test_result["box"]
except KeyError:
continue
else:
box_metrics = box_test_result["box_metrics"]
bounding_box = [the_box["xmin"], the_box["ymin"], the_box["xmax"], the_box["ymax"]]
# get box in full array
box_full_array = [x_low + bounding_box[0], y_low + bounding_box[1],
x_low + bounding_box[2], y_low + bounding_box[3]]
box_full_array[2] = self.lon.shape[0] - 1 if box_full_array[2] >= self.lon.shape[0] else box_full_array[2]
box_full_array[3] = self.lat.shape[0] - 1 if box_full_array[3] >= self.lat.shape[0] else box_full_array[3]
ymax, xmax = box_full_array[3] + 1, box_full_array[2] + 1
ymax = self.lat.shape[0] - 1 if ymax >= self.lat.shape[0] else ymax # may happen
xmax = self.lon.shape[0] - 1 if xmax >= self.lon.shape[0] else xmax
bounding_box = box(self.lon[box_full_array[0]],
self.lat[box_full_array[1]],
self.lon[xmax],
self.lat[ymax])
direction_degree = box_metrics["direction"]
values = [bounding_box, box_metrics["spectral_angle"], box_metrics["slope"],
self.direction_degree_to_description(direction_degree), direction_degree,
box_test_result["quantile"], box_test_result["speed"], box_metrics["score"],
box_metrics["std"], box_metrics["red_mean"], box_metrics["green_mean"], box_metrics["blue_mean"]]
for idx, value in enumerate(values):
boxes[idx].append(value)
detections = gpd.GeoDataFrame({"rsquared": boxes[1],
"slope": boxes[2],
"direction_description": boxes[3],
"direction_degree": boxes[4],
"localization_quantile": boxes[5],
"speed": boxes[6],
"score": boxes[7],
"std": boxes[8],
"red_ratio": boxes[9],
"green_ratio": boxes[10],
"blue_ratio": boxes[11]},
geometry=boxes[0],
crs=self.crs)
print("\nNumber of detections: %s" % (len(detections)))
return detections
def _box_test(self, subset):
"""
looks at subset around detection and localizes object as box
:param subset: numpy ndarray of shape (4, 9, 9) containing the reflectances of subset
:return: dict with resulting detection box and its metrics
"""
t0 = datetime.now()
subset_copy = subset.copy()
subset[:, normalized_ratio(subset[3], subset[0]) > MAX_NDVI] = np.nan
detection_y, detection_x = int(subset.shape[1] / 2), int(subset.shape[2] / 2) # index of detection (center)
detection_yx = [detection_y, detection_x]
if np.isnan(subset[0, detection_y, detection_x]): # NDVI too high. Mask here, saves time
return {}
detection_stack = subset[:, detection_y, detection_x].copy()
subset[:, detection_y, detection_x] = detection_stack.copy()
if np.count_nonzero(~np.isnan(subset[0])) < 3:
return {}
n_bands = subset.shape[0] - 1
ratios = np.zeros((n_bands * 2 + 2, subset.shape[1], subset.shape[2]))
ratio_counterparts = [[1, 2], [0, 2], [0, 1]]
for i in range(n_bands):
for j, k in enumerate(ratio_counterparts[i]):
ratios[i + i + j] = normalized_ratio(subset[i], subset[k])
ratios[6] = np.nanstd(subset[0:3], 0) * 10
ratios[7] = np.nanstd(ratios, 0) * 10
ratios[:, np.isnan(ratios[0])] = np.nan
# localize potential box through high quantile
q = np.float32([0.99])
# print("Section 1 took: %s" % str((datetime.now() - t0).total_seconds()))
t0 = datetime.now()
qantiles_dummy = np.float32([1, 1])
quantiles_sum = qantiles_dummy.copy()
while np.count_nonzero(quantiles_sum) < 6 and q[0] > 0.5:
quantiles_sum = self.quantile_filter(ratios, q)
if quantiles_sum is None:
quantiles_sum = qantiles_dummy.copy()
q -= 0.01
q += 0.01
# print("Section 2 took: %s" % str((datetime.now() - t0).total_seconds()))
t0 = datetime.now()
try:
s = all(quantiles_sum == qantiles_dummy)
except TypeError: # then it's alright
pass
else:
return {}
try:
quantiles_sum[quantiles_sum > 0] = 1
except TypeError:
return {}
# quantiles_sum = self.eliminate_single_nonzeros(quantiles_sum)
if np.count_nonzero(quantiles_sum > 0) < 3:
return {}
for j, k, t in zip([0, 2, 4], [1, 3, 5], [MAX_MAX_DIST_RED + 1, MAX_MAX_DIST_GREEN + 1, 2]):
subset, ratios, quantiles_sum = self._eliminate_distant_pixels(subset, ratios, ratios[j] + ratios[k],
quantiles_sum, detection_yx, t, q)
# apply cluster exposing method twice in order to account for changes introduced by filter
y_low, x_low, y_up, x_up = detection_y - 1, detection_x - 1, detection_y + 2, detection_x + 2
quantiles_sum[y_low:y_up, x_low:x_up] = np.zeros((3, 3)) # temporary
spatial_cluster = self._expose_cluster(quantiles_sum, subset[0:3], False)
# if a cluster has high amount of values exclude corners, potentially divide large cluster
boxes, boxes_metrics, scores, clusters = [], [], [], []
# print("Section 3 took: %s" % str((datetime.now() - t0).total_seconds()))
t0 = datetime.now()
for cluster in np.unique(spatial_cluster[spatial_cluster != 0]):
spatial_cluster[detection_y, detection_x] = cluster # assign value of cluster to detection pixel
ys, xs = np.where(spatial_cluster == cluster)
try:
a_box = [np.min(ys), np.min(xs), np.max(ys), np.max(xs)]
except ValueError:
continue
box_arr = subset[0:3, a_box[0]:a_box[2]+1, a_box[1]:a_box[3]+1].copy()
# if (np.nanmean(np.nanstd(box_arr, 0)) * 10) < MIN_RGB_STD * 0.5: # be tolerant here
# continue
cluster_sub = spatial_cluster[a_box[0]:a_box[2]+1, a_box[1]:a_box[3]+1].copy()
cluster_sub[np.isnan(box_arr[0])] = 0
ys, xs = np.where(spatial_cluster == cluster)
if len(ys) < 2:
continue
ys, xs = self.eliminate_outlier_indices(ys, xs)
a_box = [np.min(ys), np.min(xs), np.max(ys), np.max(xs)]
box_arr = subset[0:3, a_box[0]:a_box[2]+1, a_box[1]:a_box[3]+1].copy()
if np.count_nonzero(~np.isnan(box_arr)) / 3 / (box_arr.shape[1] * box_arr.shape[2]) < 0.3: # too few pixels
continue
box_ratios = ratios[:, a_box[0]:a_box[2]+1, a_box[1]:a_box[3]+1].copy()
t0b = datetime.now()
box_metrics = self._characterize_spatial_spectral(box_arr, box_ratios)
# a_box = self._crop_box(a_box, ratios, box_metrics["direction"], detection_yx)
#print("Section 4b took: %s" % str((datetime.now() - t0b).total_seconds()))
# box_arr = subset[0:3, a_box[0]:a_box[2] + 1, a_box[1]:a_box[3] + 1].copy()
if all([box_arr.shape[1] <= 2, box_arr.shape[2] <= 2]):
continue
box_metrics = self.calc_score(box_metrics, box_arr)
if self._spatial_spectral_match(box_metrics):
clusters.append(cluster)
boxes.append(a_box)
boxes_metrics.append(box_metrics)
scores.append(box_metrics["score"])
# print("Section 4 took: %s" % str((datetime.now() - t0).total_seconds()))
t0 = datetime.now()
scores = np.array(scores)
try:
max_score = np.max(scores)
match = np.where(scores == max_score)[0][0]
except ValueError:
return {}
box_metrics, selected_box = boxes_metrics[match], boxes[match]
if np.std(selected_box) == 0:
return {}
if any(self.box_too_large(selected_box, MAX_MAX_DIST_RED)):
selected_box = self._subset_by_ratios(ratios, selected_box) # subset box to high quantile ratios
if any(self.box_too_large(selected_box, MAX_MAX_DIST_RED)):
subset_dict = self._subset_by_boxes(subset, ratios, selected_box, [3, 4]) # try default sub boxes
try:
box_metrics = subset_dict["box_metrics"]
except KeyError:
pass
else:
a_box = subset_dict["selected_box"]
box_arr = subset[0:3, a_box[0]:a_box[2] + 1, a_box[1]:a_box[3] + 1]
box_metrics = self.calc_score(box_metrics, box_arr)
if not self._spatial_spectral_match(box_metrics):
return {}
box_too_small = all([(selected_box[2] - selected_box[0] + 1) <= 2, (selected_box[3] - selected_box[1] + 1) <= 2])
if box_too_small or box_metrics["score"] < self.min_score:
return {}
the_box = {"ymin": selected_box[0], "xmin": selected_box[1],
"ymax": selected_box[2], "xmax": selected_box[3]}
# print("Section 5 took: %s" % str((datetime.now() - t0).total_seconds()))
return {"box": the_box,
"box_metrics": box_metrics,
"quantile": q[0],
"speed": self.calc_speed(ratios[:, the_box["ymin"]:the_box["ymax"]+1, the_box["xmin"]:the_box["xmax"]+1])}
def _characterize_spatial_spectral(self, sub_arr, sub_variables):
"""
takes a subset of reflectance stack and corresponding variables (ratios, std) and returns metrics of correlation
and spatial relationships
:param sub_arr: numpy ndarray of shape (3, y, x) containing the reflectance bands
:param sub_variables: numpy ndarray of shape (7, y, x) containing ratios of reflectance bands, RGB std
and ratios std
:return: dict containing the box metrics
"""
return_dict = {}
keys = ["spectral_angle", "spatial_angle", "slope", "red_length", "green_length", "direction",
"blue_mean", "green_mean", "red_mean", "red_ratio_max", "green_ratio_max", "blue_ratio_max"]
for key in keys:
return_dict[key] = np.nan
return_dict_copy = return_dict.copy()
blue_ratios = np.nansum(sub_variables[4:6], 0) + sub_arr[2] * 10 # sum of blue ratios
green_ratios = np.nansum(sub_variables[2:4], 0) + sub_arr[1] * 10 # sum of green ratios
red_ratios = np.nansum(sub_variables[0:2], 0) + sub_arr[0] * 10 # sum of red ratios
try:
try:
blue_y, blue_x = self.crop_2d_indices(np.where(blue_ratios == np.nanmax(blue_ratios)))
except ValueError:
return return_dict
else:
green_ratios[blue_y, blue_x] = np.nan # set to nan in order to avoid double target
green_y, green_x = self.crop_2d_indices(np.where(green_ratios == np.nanmax(green_ratios)))
red_ratios[blue_y, blue_x] = np.nan # avoid double target
red_ratios[green_y, green_x] = np.nan # ""
red_y, red_x = self.crop_2d_indices(np.where(red_ratios == np.nanmax(red_ratios)))
except IndexError:
return return_dict
blue_indices = [blue_y, blue_x]
blue_red_spatial_vector = self.calc_vector([red_y, red_x], blue_indices) # spatial vector blue to red
blue_green_spatial_vector = self.calc_vector([green_y, green_x], blue_indices) # spatial vector blue to green
return_dict = {"red_length": self.calc_vector_length(blue_red_spatial_vector),
"green_length": self.calc_vector_length(blue_green_spatial_vector),
"spatial_angle": self.calc_vector_angle_in_degrees(blue_red_spatial_vector,
blue_green_spatial_vector)}
if not self._spatial_spectral_match(return_dict): # check that in order to reduce run time
return return_dict_copy # if spatial metrics do not satisfy thresholds return here alread
given_vector = np.hstack([sub_variables[4:6, blue_y, blue_x], # stack of variables and target pixels
sub_variables[2:4, green_y, green_x],
sub_variables[0:2, red_y, red_x],
sub_variables[6, blue_y, blue_x],
sub_variables[6, green_y, green_x],
sub_variables[6, red_y, red_x],
sub_variables[7, blue_y, blue_x],
sub_variables[7, green_y, green_x],
sub_variables[7, red_y, red_x],
sub_arr[2, blue_y, blue_x],
sub_arr[2, green_y, green_x],
sub_arr[2, red_y, red_x],
sub_arr[1, green_y, green_x],
sub_arr[1, blue_y, blue_x],
sub_arr[1, red_y, red_x],
sub_arr[0, red_y, red_x],
sub_arr[0, blue_y, blue_x],
sub_arr[0, green_y, green_x]])
col_names, spectral_angles, slopes, spearman = [], [], [], []
for i in range(7):
col_names = col_names + ["rgb_vector" + str(i) + str(j) for j in [0, 1, 2]]
# calculate spearmanr correlations between given variables and all reference variables
for row in RGB_VECTORS.iterrows():
r = row[1]
ref_vector = np.array([r[col_name] for col_name in col_names])
regression = linregress(given_vector, ref_vector)
spearman.append(spearmanr(given_vector, ref_vector)[0])
#spectral_angles.append(regression.rvalue)
slopes.append(regression.slope)
# use mean of all spearmanr correlation coefficients as indicator for agreement with reference dataset
return_dict["spectral_angle"] = np.nanmean(spearman) #np.nanquantile(spectral_angles, [0.75])[0] - np.nanstd(spectral_angles)
return_dict["slope"] = np.nanmean(slopes)
return_dict["direction"] = self.calc_vector_direction_in_degree(np.mean(np.vstack([blue_red_spatial_vector,
blue_green_spatial_vector]),
axis=0))
return_dict["red_mean"] = np.nanmean(sub_arr[0])
return_dict["green_mean"] = np.nanmean(sub_arr[1])
return_dict["blue_mean"] = np.nanmean(sub_arr[2])
return_dict["red_ratio_max"] = np.nanmax(np.nanmax(sub_variables[0:2]))
return_dict["green_ratio_max"] = np.nanmax(np.nanmax(sub_variables[2:4]))
return_dict["blue_ratio_max"] = np.nanmax(np.nanmax(sub_variables[4:6]))
return return_dict
def _subset_by_ratios(self, ratios, selected_box):
original_box = selected_box.copy()
box_ratios = ratios[:, selected_box[0]:selected_box[2]+1, selected_box[1]:selected_box[3]+1]
q = np.float32([0.2])
too_large_y, too_large_x = True, True
while any([too_large_y, too_large_x]) and q[0] < 1:
too_large_y, too_large_x = self.box_too_large(selected_box, MAX_MAX_DIST_RED)
if any([too_large_y, too_large_x]):
quantiles_sum = self.quantile_filter(box_ratios, q)
if quantiles_sum is not None:
ys, xs = np.where(quantiles_sum != 0)
try:
selected_box = [min(ys), min(xs), max(ys), max(xs)]
except ValueError:
q += 0.01
continue
q += 0.01
if selected_box != original_box:
selected_box[2] = original_box[0] + selected_box[2]
selected_box[3] = original_box[1] + selected_box[3]
selected_box[0] += original_box[0]
selected_box[1] += original_box[1]
return selected_box
def _subset_by_boxes(self, subset, ratios, selected_box, window_sizes):
box_arr = subset[0:3, selected_box[0]:selected_box[2] + 1, selected_box[1]:selected_box[3] + 1]
box_ratios = ratios[:, selected_box[0]:selected_box[2] + 1, selected_box[1]:selected_box[3] + 1]
boxes, boxes_metrics, boxes_rsquared, boxes_rgb_sums, boxes_spatial_angle = [], [], [], [], []
for w in window_sizes:
y_indices_low = np.arange(0, box_arr.shape[1] - w + 1, 1)
x_indices_low = np.arange(0, box_arr.shape[2] - w + 1, 1)
y_indices_up = [y + w for y in y_indices_low]
x_indices_up = [x + w for x in x_indices_low]
for y_low, y_up in zip(y_indices_low, y_indices_up):
for x_low, x_up in zip(x_indices_low, x_indices_up):
sub_box_arr = box_arr[:, y_low:y_up, x_low:x_up]
sub_box_ratios = box_ratios[:, y_low:y_up, x_low:x_up]
box_metrics = self._characterize_spatial_spectral(sub_box_arr, sub_box_ratios)
if self._spatial_spectral_match(box_metrics):
max_values = [np.nanmax(sub_box_arr[i]) for i in range(sub_box_arr.shape[0])]
boxes.append([y_low, x_low, y_up - 1, x_up - 1]) # -1 due to indexing
boxes_metrics.append(box_metrics)
boxes_rsquared.append(box_metrics["spectral_angle"])
boxes_rgb_sums.append(np.sum(max_values))
boxes_spatial_angle.append(box_metrics["spatial_angle"])
combined = np.array(boxes_rsquared) + np.array(boxes_rgb_sums) - np.array(boxes_spatial_angle)
try:
max_combined = np.max(combined)
except ValueError:
return {}
try:
match = np.where(combined == max_combined)[0][0]
except IndexError:
return {}
new_box = boxes[match]
selected_box[2] = selected_box[0] + new_box[2]
selected_box[3] = selected_box[1] + new_box[3]
selected_box[0] += new_box[0]
selected_box[1] += new_box[1]
return {"box_metrics": boxes_metrics[match], "selected_box": selected_box}
def _eliminate_distant_pixels(self, sub_arr, ratios, band_ratios, quantiles_sum, center, threshold, quantile):
try:
ys, xs = np.where(band_ratios > np.nanquantile(band_ratios, quantile))
except ValueError:
return sub_arr
else:
for y, x in zip(ys, xs):
if self.calc_vector_length(self.calc_vector(center, [y, x])) > threshold:
sub_arr[:, y, x] = np.nan
ratios[:, y, x] = np.nan
quantiles_sum[y, x] = 0
return sub_arr, ratios, quantiles_sum
def _expose_cluster(self, target_arr, band_stack, exclude_corners=True):
target_arr[np.isnan(target_arr)] = 0
if np.count_nonzero(target_arr) == 0:
return target_arr
try:
center = [int(target_arr.shape[0] / 2), int(target_arr.shape[1] / 2)]
except IndexError:
return target_arr
ys, xs = np.where(target_arr != 0)
yet_seen, cluster_value, clusters = [], 0, target_arr.copy()
for y, x in zip(ys, xs):
distance_center = self.calc_vector_length(self.calc_vector([y, x], center)) - 1
rgb_slice = band_stack[0:3, y, x]
max_idx = np.where(rgb_slice == np.nanmax(rgb_slice))[0][0]
distance_wrong = [distance_center > t for t in [MAX_MAX_DIST_RED, MEAN_MAX_DIST_RED, MEAN_MAX_DIST_GREEN]]
max_idx_wrong = [True, max_idx not in [0, 1], max_idx not in [0, 1, 2]]
should_continue = False
for condition_a, condition_b in zip(distance_wrong, max_idx_wrong):
if condition_a and condition_b:
clusters[y, x], should_continue = 0, True
break
if should_continue:
continue
if not [y, x] in yet_seen:
cluster_value += 1
clusters, yet_seen = self._search_adjacent_non_zero(clusters, [y, x], cluster_value, yet_seen,
exclude_corners)
return clusters
def _crop_box(self, given_box, ratios, direction, detection_yx):
max_size = MAX_MAX_DIST_RED * 2
box_size = (given_box[2] - given_box[0] + 1) * (given_box[3] - given_box[1] + 1)
direction_match = any(np.abs([x - direction for x in [0, 90, 180, 270]]) < 45)
q = [0.5]
while direction_match and box_size >= max_size and q[0] < 1:
box_ratios = ratios[:, given_box[0]:given_box[2] + 1, given_box[1]:given_box[3] + 1]
quantiles = self.quantile_filter(box_ratios, q)
if quantiles is not None:
try:
# always retain value 1 at detection
quantiles[np.abs(detection_yx[0] - given_box[0]), np.abs(detection_yx[1] - given_box[1])] = 1
except IndexError:
pass
ys, xs = np.where(quantiles != 0)
try:
given_box[2] = int(given_box[0] + max(ys))
given_box[3] = int(given_box[1] + max(xs))
given_box[0] += min(ys)
given_box[1] += min(xs)
except ValueError:
q[0] += 0.1
continue
else:
box_size = (given_box[2] - given_box[0] + 1) * (given_box[3] - given_box[1] + 1)
if box_size >= max_size:
q[0] += 0.1
return given_box
def _search_adjacent_non_zero(self, arr, point, new_value, yet_seen, exclude_corners):
"""
looks for non zeros in 3x3 window around point in array and assigns a new value to these non-zeros
:param arr: np array
:param point: list of int y, x indices
:param new_value: int value to assign
:param yet_seen: list of lists, each list is a point with int y, x indices that has been seen before
:param exclude_corners: bool, if True the corners of 3x3 window are excluded
:return: tuple of np array and list
"""
arr_modified = arr.copy()
original_value = arr_modified[point[0], point[1]].copy()
arr_modified[point[0], point[1]] = 0
ymin, ymax = point[0]-1, point[0]+2
xmin, xmax = point[1]-1, point[1]+2
ymin, xmin = 0 if ymin < 0 else ymin, 0 if xmin < 0 else xmin
window_3x3 = arr_modified[ymin:ymax, xmin:xmax].copy()
if exclude_corners:
for corner_y, corner_x in zip([0, 0, 2, 2], [0, 2, 0, 2]):
try:
window_3x3[corner_y, corner_x] = 0
except IndexError:
continue
ys, xs = np.where(window_3x3 != 0)
for y_local, x_local in zip(ys, xs):
y, x = ymin + y_local, xmin + x_local
if [y, x] not in yet_seen:
arr_modified[y, x] = new_value
arr_modified, yet_seen = self._search_adjacent_non_zero(arr_modified, [y, x], new_value, yet_seen,
exclude_corners)
yet_seen.append([y, x])
value = original_value if point in yet_seen else new_value
if point not in yet_seen:
yet_seen.append(point)
arr_modified[point[0], point[1]] = value
return arr_modified, yet_seen
def calc_speed(self, ratios):
resolution = 10 # meters
blue_ratios = np.nansum(ratios[4:6], 0)
red_ratios = np.nansum(ratios[0:2], 0)
green_ratios = np.nansum(ratios[2:4], 0)
try:
max_blue, max_red, max_green = np.nanmax(blue_ratios), np.nanmax(red_ratios), np.nanmax(green_ratios)
except IndexError:
return 0
diameter = (np.max(ratios.shape[1:3]) - (1.5 - max_blue)) * resolution
kilometers_hour = (diameter * (3600 / SECONDS_OFFSET_B02_B04)) / 1000
return kilometers_hour
def _spatial_spectral_match(self, metrics_dict):
is_match = True
has_values = 3
# try:
# ratios_means = [metrics_dict["red_ratio_max"], metrics_dict["green_ratio_max"], metrics_dict["blue_ratio_max"]]
# except KeyError:
# has_values -= 1
# else:
# ratios_high = np.max(ratios_means) > 0.2
# ratios_high_all = all([mean_value > 0.05 for mean_value in ratios_means])
# ratios_high_all = ratios_high_all or sum([mean_value > 0.25 for mean_value in ratios_means]) >= 2
# ratios_high_two = sum([mean_value > 0.15 for mean_value in ratios_means]) > 1
# is_match *= ratios_high * ratios_high_all * ratios_high_two
# try:
# is_match *= metrics_dict["std"] >= MIN_RGB_STD
# except KeyError:
# has_values -= 1
try:
is_match *= metrics_dict["spectral_angle"] >= self.min_r_squared
except KeyError:
has_values -= 1
try:
is_match *= metrics_dict["score"] >= self.min_score
except KeyError:
has_values -= 1
try:
green_length = metrics_dict["green_length"]
red_length = metrics_dict["red_length"]
is_match *= green_length < red_length
is_match *= red_length < (MAX_MAX_DIST_RED + 0.5)
is_match *= green_length < (MAX_MAX_DIST_GREEN + 0.5)
except KeyError:
has_values -= 1
# try:
# is_match *= metrics_dict["slope"] < MAX_SLOPE
# is_match *= metrics_dict["slope"] > MIN_SLOPE
# except KeyError:
# has_values -= 1
# try:
# is_match *= metrics_dict["spatial_angle"] < MAX_ANGLE_BR_BG
# except KeyError:
# has_values -= 1
if has_values == 0:
return False
else:
return is_match
@staticmethod
def calc_score(metrics_dict, sub_arr):
metrics_dict["std"] = np.nanmean(np.nanstd(sub_arr, 0)) * 10
reflectance_means_sum = (metrics_dict["red_mean"] + metrics_dict["blue_mean"] + metrics_dict[
"green_mean"]) * 10
ratio_means_sum = metrics_dict["red_ratio_max"] + metrics_dict["green_ratio_max"] \
+ metrics_dict["blue_ratio_max"]
metrics_dict["score"] = metrics_dict["spectral_angle"] + metrics_dict["std"] - np.abs(
1 - metrics_dict["slope"]) \
+ reflectance_means_sum + ratio_means_sum - metrics_dict["spatial_angle"] / 100
return metrics_dict
@staticmethod
def calc_primary_accuracy(detected_boxes, validation_boxes):
out_keys = ["validation_percentage", "detection_percentage", "validation_intersection_percentage",
"detection_intersection_percentage"]
out_dict = {}
lens = [len(detected_boxes) == 0, len(validation_boxes) == 0]
if lens[0]:
print("No entries in 'detected_boxes'")
if lens[1]:
print("No entries in 'validation_boxes'")
if any(lens):
for key in out_keys:
out_dict[key] = np.nan
return out_dict
intersections = {"validation": [], "detection": []}
intersection_areas = {"validation": [], "detection": []}
keys = ["validation", "detection"]
for boxes_a, boxes_b, key in zip([validation_boxes, detected_boxes], [detected_boxes, validation_boxes], keys):
for detected_box in boxes_a.geometry:
for i, validation_box in enumerate(boxes_b.geometry):
if detected_box.intersects(validation_box):
intersections[key].append(i)
detected_gpd = gpd.GeoDataFrame({"geometry": [detected_box]}).set_geometry("geometry")
validation_gpd = gpd.GeoDataFrame({"geometry": [validation_box]}).set_geometry("geometry")
detected_gpd.crs = detected_boxes.crs
validation_gpd.crs = detected_gpd.crs
intersected = gpd.overlay(detected_gpd, validation_gpd, how="intersection")
intersection_areas[key].append(intersected.area[0] / detected_gpd.area[0] * 100)
out_values = [(len(intersections["validation"]) / len(validation_boxes)) * 100,
(len(intersections["detection"]) / len(detected_boxes)) * 100,
np.nanmean(np.array(intersection_areas["validation"])),
np.nanmean(np.array(intersection_areas["detection"]))]
for key, value in zip(out_keys, out_values):
out_dict[key] = value
return out_dict
@staticmethod
def eliminate_single_nonzeros(arr):
for y in range(arr.shape[0]):
for x in range(arr.shape[1]):
window_3x3 = arr[y-1:y+2, x-1:x+2]
if np.count_nonzero(window_3x3[~np.isnan(window_3x3)]) < 2:
arr[y, x] = 0
return arr
@staticmethod
def eliminate_outlier_indices(ys, xs):
dtype_ys, dtype_xs = ys.dtype, xs.dtype
ys, xs = ys.astype(np.float32), xs.astype(np.float32)
unique_ys, unique_xs = np.unique(ys), np.unique(xs)
n = len(ys)
n_unique_ys, n_unique_xs = len(unique_ys), len(unique_xs)
amount_unique_ys, amount_unique_xs = np.zeros(n_unique_ys), np.zeros(n_unique_xs)
for unique_idx, amount_unique, indices in zip([unique_ys, unique_xs],
[amount_unique_ys, amount_unique_xs],
[ys, xs]):
for i, idx in enumerate(unique_idx):
amount_unique[i] = len(np.where(indices == idx)[0]) / n * 100
for amounts, uniques, indices in zip([amount_unique_ys, amount_unique_xs], [unique_ys, unique_xs], [ys, xs]):
if (amounts > 50).any(): # there is a major y
outlier_idxs = np.where(amounts < 15)
if len(outlier_idxs[0]) > 0:
for outlier_idx in outlier_idxs:
real_idx = uniques[outlier_idx]
to_nan = indices == real_idx
ys[to_nan] = np.nan # eliminate y and x index
xs[to_nan] = np.nan
ys, xs = ys[~np.isnan(ys)], xs[~np.isnan(xs)]
return ys.astype(dtype_ys), xs.astype(dtype_xs)
@staticmethod
def quantile_filter(arr, quantile_value):
"""
Targets values of specified quantile and eliminates isolated values
:param arr: numpy ndarray of shape (3, height, width) -> RGB
:param quantile_value: list with float quantile in range of 0 and 1
:return: numpy 2d array of shape (height, width)
"""
quantiles = np.array([arr[i] >= np.nanquantile(arr[i], quantile_value) for i in range(arr.shape[0])],
dtype=np.int8)
# quantiles_initial_sum = quantiles.sum(0)
# if np.count_nonzero(np.int8(quantiles_initial_sum > 0) * np.int8(quantiles_initial_sum < 3)) == 0:
# return None
shape = quantiles.shape
s = shape[1]
buffers = [2, 2, 1, 1, 2, 2, s, s, s, s]
for i in range(quantiles.shape[0]):
for y in range(shape[1]):
for x in range(shape[2]):
buffer = buffers[i]
y_low, y_up = y - buffer, y + buffer + 1
x_low, x_up = x - buffer, x + buffer + 1
y_low = 0 if y_low < 0 else y_low
x_low = 0 if x_low < 0 else x_low
y_up, x_up = shape[1] if y_up > shape[1] else y_up, shape[2] if x_up > shape[2] else x_up
y_low = y_low - 1 if y_up == (shape[1] + 1) else y_low
x_low = x_low - 1 if x_up == (shape[2] + 1) else x_low
y_up, x_up = y_up + 1 if y_low == 0 else y_up, x_up + 1 if x_low == 0 else x_up
original_value = quantiles[i, y, x]
if original_value == 0:
continue
quantiles_sub = quantiles[:, y_low:y_up, x_low:x_up].copy()
quantiles_sub[i] = np.zeros_like(quantiles_sub[i]) # look only for matches in other bands
sums = [np.nansum(quantiles_sub[j]) for j in range(quantiles_sub.shape[0])]
quantiles[i, y, x] = 0 if np.count_nonzero(sums) < 2 else original_value
return quantiles.sum(0)
@staticmethod
def box_too_large(the_box, max_size):
size_y, size_x = (the_box[2] - the_box[0] + 1), (the_box[3] - the_box[1] + 1)
too_large_y = size_y > max_size
too_large_x = size_x > max_size
return too_large_y, too_large_x
# not really needed
@staticmethod
def calc_low_ratios_mask(ratios, min_values_ratios):
ratio_mask = np.zeros_like(ratios[0:3], dtype=np.int8)
only_false = np.zeros(3, dtype=np.bool)
# reflectance and ratio filter
for i in range(ratio_mask.shape[0]):
idx = 2 * i
ratio_mask[i] = np.int8((ratios[idx] + ratios[idx + 1]) > min_values_ratios[i])
only_false[i] = np.count_nonzero(ratio_mask) == 0
ratio_mask = ratio_mask.sum(0)
ratio_mask[ratio_mask > 2] = 0
ratio_mask[(2 >= ratio_mask) * (ratio_mask > 0)] = 1
return ratio_mask, only_false
@staticmethod
def calc_low_quantile_mask(reflectances, q):
low_quantile_red = np.int8(reflectances[0] > np.nanquantile(reflectances[0], q))
low_quantile_green = np.int8(reflectances[1] > np.nanquantile(reflectances[1], q))
low_quantile_blue = np.int8(reflectances[2] > np.nanquantile(reflectances[2], q))
low_quantile_mask = np.float32(low_quantile_red + low_quantile_green + low_quantile_blue)
low_quantile_mask[low_quantile_mask == 0] = np.nan
low_quantile_mask[low_quantile_mask > 0] = 1
return low_quantile_mask
@staticmethod
def calc_high_quantile_mask(reflectances, q):
high_quantile_red = np.int8(reflectances[0] < np.nanquantile(reflectances[0], q))
high_quantile_green = np.int8(reflectances[1] < np.nanquantile(reflectances[1], q))
high_quantile_blue = np.int8(reflectances[2] < np.nanquantile(reflectances[2], q))
high_quantile_mask = np.float32(high_quantile_red + high_quantile_green + high_quantile_blue)
high_quantile_mask[high_quantile_mask == 0] = np.nan
high_quantile_mask[high_quantile_mask > 0] = 1
return high_quantile_mask
@staticmethod
def expose_anomalous_pixels(band_stack_np):
w = 100
y_bound, x_bound = band_stack_np.shape[1], band_stack_np.shape[2]
roads = np.zeros((3, band_stack_np.shape[1], band_stack_np.shape[2]), dtype=np.float32)
for y in range(int(np.round(y_bound / w))):
for x in range(int(np.round(x_bound / w))):
y_idx, x_idx = np.clip((y + 1) * w, 0, y_bound), np.clip((x + 1) * w, 0, x_bound)
y_low, x_low = int(np.clip(y_idx - w, 0, 1e+30)), int(np.clip(x_idx - w, 0, 1e+30))
y_up, x_up = np.clip(y_idx + w + 1, 0, y_bound), np.clip(x_idx + w + 1, 0, x_bound)
y_size, x_size = (y_up - y_low), (x_up - x_low)
n = y_size * x_size
subset = band_stack_np[:, y_low:y_up, x_low:x_up]
roads[0, y_low:y_up, x_low:x_up] = np.repeat(np.nanmedian(subset[0]), n).reshape(y_size, x_size)
roads[1, y_low:y_up, x_low:x_up] = np.repeat(np.nanmedian(subset[1]), n).reshape(y_size, x_size)
roads[2, y_low:y_up, x_low:x_up] = np.repeat(np.nanmedian(subset[2]), n).reshape(y_size, x_size)
#max_diff = np.nanmax(band_stack_np[0:3] - np.nanmin(roads, 0), 0)
#mask = np.int8(max_diff > np.nanquantile(max_diff, [0.6]))
diff_red = band_stack_np[0] - (roads[0] / 2)
diff_green = band_stack_np[1] - (roads[1] / 2)
diff_blue = band_stack_np[2] - (roads[2] / 2)
diff_stack = np.array([diff_red, diff_green, diff_blue])
mask = np.zeros_like(diff_stack[0])
for i in range(diff_stack.shape[0]):
mask += np.int8(diff_stack[i] > np.nanquantile(diff_stack[i], [0.6]))
mask[mask != 0] = 1
mask = np.int8(mask)
return mask
@staticmethod
def get_osm_mask(bbox, crs, reference_arr, lat_lon_dict, dir_out):
osm_file = get_roads(bbox, ["motorway", "trunk", "primary"], OSM_BUFFER,
dir_out, str(bbox).replace(", ", "_")[1:-1] + "_osm_roads", str(crs),
reference_arr)
osm_vec = gpd.read_file(osm_file)
ref_xr = xr.DataArray(data=reference_arr, coords=lat_lon_dict, dims=["lat", "lon"])
osm_raster = rasterize_osm(osm_vec, ref_xr).astype(np.float32)
osm_raster[osm_raster != 0] = 1
osm_raster[osm_raster == 0] = np.nan
return osm_raster
@staticmethod
def crop_2d_indices(indices):
"""
:param indices: tuple of np int64 indices as returned by np.where
:return: np int32 indices. Cropped if longer than 1
"""
return np.array([index_arr[0] for index_arr in indices]).astype(np.int32)
@staticmethod
def calc_vector_direction_in_degree(vector):
# [1,1] -> 45°; [-1,1] -> 135°; [-1,-1] -> 225°; [1,-1] -> 315°
y_offset = 90 if vector[0] > 0 else 0
x_offset = 90 if vector[1] < 0 else 0
offset = 180 if y_offset == 0 and x_offset == 90 else 0
if vector[0] == 0:
direction = 0.
else:
direction = np.degrees(np.arctan(np.abs(vector[1]) / np.abs(vector[0])))
direction += offset + y_offset + x_offset
return direction
@staticmethod
def direction_degree_to_description(direction_degree):
step = 22.5
bins = np.arange(0, 359, step, dtype=np.float32)
descriptions = np.array(["N", "NNE", "NE", "ENE",
"E", "ESE", "SE", "SEE",
"S", "SSW", "SW", "WSW",
"W", "WNW", "NW", "NNW"])
i, b = 0, -1
while b < direction_degree and i < len(bins):
b = bins[i]
i += 1
return descriptions[i - 1]
@staticmethod
def calc_vector_angle_in_degrees(a, b):
cos = np.dot(a, b) / np.linalg.norm(a) / np.linalg.norm(b)
if np.abs(cos) >= 1:
return 0
else:
return np.degrees(np.arccos(cos))
@staticmethod
def calc_vector(b, a):
"""
:param b: 1d np.float32 array or array-like
:param a: 1d np.float32 array or array-like
:return: 2d np.float32 array, a vector pointing to origin
"""
vector = []
for i in range(len(b)):
try:
vector.append(np.float32(b[i] - a[i]))
except IndexError:
raise IndexError("origin and target must be of equal length")
return np.array(vector).astype(np.float32)
@staticmethod
def calc_vector_length(vector):
"""
:param vector: np array vector
:return: np float32
"""
squared = np.float32([element ** 2 for element in vector])
return np.sqrt(squared.sum()).astype(np.float32)
@staticmethod
def get_smallest_deviation(arr, value):
dev = np.abs(arr - value)
return int(np.where(dev == dev.min())[0][0])
@staticmethod
def eliminate_multi_detections(arr, y, x):
y0 = y - 2 if (y - 2) >= 0 else y
x0 = x - 2 if (x - 2) >= 0 else x
y1 = y + 3 if (y + 3) <= arr.shape[0] else arr.shape[0]
x1 = x + 3 if (x + 3) <= arr.shape[1] else arr.shape[1]
arr[y0:y1, x0:x1] = np.zeros((y1 - y0, x1 - x0))
arr[y, x] = 1 # detection of interest remains
return arr
| 52.517382 | 134 | 0.580663 | (metadata, dict):
raise TypeError("'metadata' must be a dictionary")
self.crs = metadata["crs"]
try:
self.lat, self.lon = metadata["lat"], metadata["lon"]
except KeyError:
try:
self.lat, self.lon = lat_from_meta(metadata), lon_from_meta(metadata)
except KeyError as e:
raise e
box_utm = [np.min(self.lat), np.max(self.lon), np.max(self.lat), np.min(self.lon)]
box_epsg4326 = metadata_to_bbox_epsg4326(metadata)
dir_ancil = os.path.join(HOME, "AUXILIARY")
if not os.path.exists(dir_ancil):
os.mkdir(dir_ancil)
box_epsg4326 = list(np.flip(box_epsg4326))
osm_mask = self.get_osm_mask(box_epsg4326, metadata["crs"], band_dict["B02"],
{"lat": self.lat, "lon": self.lon},
dir_ancil)
band_stack_np = np.array([band_dict["B04"], band_dict["B03"], band_dict["B02"], band_dict["B08"]])
low_rgb_mask = self.calc_low_quantile_mask(band_stack_np[0:3], [0.2])
low_rgb_mask)] = np.nan
band_stack_np *= osm_mask
try:
band_stack_np = band_stack_np[:, subset_box["ymin"]:subset_box["ymax"], subset_box["xmin"]:subset_box["xmax"]]
self.lat = self.lat[subset_box["ymin"]:subset_box["ymax"] + 1]
self.lon = self.lon[subset_box["xmin"]:subset_box["xmax"] + 1]
except TypeError:
pass
band_stack_np_rescaled = band_stack_np.copy()
band_stack_np = None
band_stack_np_rescaled[np.isnan(band_stack_np_rescaled)] = 0
band_stack_np_rescaled = rescale(band_stack_np_rescaled, 0, 1)
band_stack_np_rescaled[band_stack_np_rescaled == 0] = np.nan
return band_stack_np_rescaled
def detect_trucks(self, band_stack_np):
t0 = datetime.now()
if not isinstance(band_stack_np, np.ndarray):
raise TypeError("'band_stack_np' must be of type numpy.ndarray")
self.band_stack_np = band_stack_np
self._detect()
detections = self._context_zoom()
print("Duration: %s minutes" % ((datetime.now() - t0).total_seconds() / 60))
return detections
def _detect(self):
b02, b03, b04 = self.band_stack_np[2], self.band_stack_np[1], self.band_stack_np[0]
min_quantile_blue, max_quantile_blue = np.nanquantile(b02, [0.5]), np.nanquantile(b02, [0.999])
max_quantile_green, max_quantile_red = np.nanquantile(b03, [0.9]), np.nanquantile(b04, [0.9])
bg_ratio, br_ratio = normalized_ratio(b02, b03), normalized_ratio(b02, b04)
bg = np.int8(bg_ratio > np.nanmean(b02) - np.nanmean(b03))
br = np.int8(br_ratio > np.nanmean(b02) - np.nanmean(b04))
blue_min = np.int8(b02 > min_quantile_blue)
blue_max = np.int8(b02 < max_quantile_blue)
green_max = np.int8(b03 < max_quantile_green)
red_max = np.int8(b04 < max_quantile_red)
mask = self.expose_anomalous_pixels(self.band_stack_np)
self.band_stack_np = self.band_stack_np * mask
self.band_stack_np[self.band_stack_np == 0] = np.nan
std_min = np.int8(np.nanstd(self.band_stack_np[0:3], 0) * 10 >= THRESHOLDS["q1_std_at_max_blue"][0])
self.trucks_np = np.int8(bg * br * blue_min * green_max * red_max * std_min)
bg_ratio, br_ratio, blue_min, blue_max, green_max, red_max, std_min = None, None, None, None, None, None, None
def _context_zoom(self):
valid = np.where(self.trucks_np == 1)
boxes = [[], [], [], [], [], [], [], [], [], [], [], []]
y_max, x_max = self.trucks_np.shape
print("Context zoom\n%s" % (len(valid[0])))
pb = ProgressBar(len(valid[0]), 50)
for y, x, i in zip(valid[0], valid[1], range(len(valid[0]))):
pb.update(i)
if self.trucks_np[y, x] != 1:
continue
radius_low = int(MEAN_MAX_DIST_RED) + 2
radius_up = radius_low + 1
y_low, y_up = y - radius_low, y + radius_up
y_low, y_up = 0 if y_low < 0 else y_low, y_max if y_up > y_max else y_up
x_low, x_up = x - radius_low, x + radius_up
x_low, x_up = 0 if x_low < 0 else x_low, x_max if x_up > x_max else x_up
self.trucks_np = self.eliminate_multi_detections(self.trucks_np, y, x)
sub_stack = self.band_stack_np[:, y_low:y_up, x_low:x_up].copy()
if np.count_nonzero(~np.isnan(sub_stack)) == 0:
continue
t0 = datetime.now()
box_test_result = self._box_test(sub_stack)
t1 = datetime.now()
try:
the_box = box_test_result["box"]
except KeyError:
continue
else:
box_metrics = box_test_result["box_metrics"]
bounding_box = [the_box["xmin"], the_box["ymin"], the_box["xmax"], the_box["ymax"]]
box_full_array = [x_low + bounding_box[0], y_low + bounding_box[1],
x_low + bounding_box[2], y_low + bounding_box[3]]
box_full_array[2] = self.lon.shape[0] - 1 if box_full_array[2] >= self.lon.shape[0] else box_full_array[2]
box_full_array[3] = self.lat.shape[0] - 1 if box_full_array[3] >= self.lat.shape[0] else box_full_array[3]
ymax, xmax = box_full_array[3] + 1, box_full_array[2] + 1
ymax = self.lat.shape[0] - 1 if ymax >= self.lat.shape[0] else ymax
xmax = self.lon.shape[0] - 1 if xmax >= self.lon.shape[0] else xmax
bounding_box = box(self.lon[box_full_array[0]],
self.lat[box_full_array[1]],
self.lon[xmax],
self.lat[ymax])
direction_degree = box_metrics["direction"]
values = [bounding_box, box_metrics["spectral_angle"], box_metrics["slope"],
self.direction_degree_to_description(direction_degree), direction_degree,
box_test_result["quantile"], box_test_result["speed"], box_metrics["score"],
box_metrics["std"], box_metrics["red_mean"], box_metrics["green_mean"], box_metrics["blue_mean"]]
for idx, value in enumerate(values):
boxes[idx].append(value)
detections = gpd.GeoDataFrame({"rsquared": boxes[1],
"slope": boxes[2],
"direction_description": boxes[3],
"direction_degree": boxes[4],
"localization_quantile": boxes[5],
"speed": boxes[6],
"score": boxes[7],
"std": boxes[8],
"red_ratio": boxes[9],
"green_ratio": boxes[10],
"blue_ratio": boxes[11]},
geometry=boxes[0],
crs=self.crs)
print("\nNumber of detections: %s" % (len(detections)))
return detections
def _box_test(self, subset):
t0 = datetime.now()
subset_copy = subset.copy()
subset[:, normalized_ratio(subset[3], subset[0]) > MAX_NDVI] = np.nan
detection_y, detection_x = int(subset.shape[1] / 2), int(subset.shape[2] / 2)
detection_yx = [detection_y, detection_x]
if np.isnan(subset[0, detection_y, detection_x]):
return {}
detection_stack = subset[:, detection_y, detection_x].copy()
subset[:, detection_y, detection_x] = detection_stack.copy()
if np.count_nonzero(~np.isnan(subset[0])) < 3:
return {}
n_bands = subset.shape[0] - 1
ratios = np.zeros((n_bands * 2 + 2, subset.shape[1], subset.shape[2]))
ratio_counterparts = [[1, 2], [0, 2], [0, 1]]
for i in range(n_bands):
for j, k in enumerate(ratio_counterparts[i]):
ratios[i + i + j] = normalized_ratio(subset[i], subset[k])
ratios[6] = np.nanstd(subset[0:3], 0) * 10
ratios[7] = np.nanstd(ratios, 0) * 10
ratios[:, np.isnan(ratios[0])] = np.nan
q = np.float32([0.99])
t0 = datetime.now()
qantiles_dummy = np.float32([1, 1])
quantiles_sum = qantiles_dummy.copy()
while np.count_nonzero(quantiles_sum) < 6 and q[0] > 0.5:
quantiles_sum = self.quantile_filter(ratios, q)
if quantiles_sum is None:
quantiles_sum = qantiles_dummy.copy()
q -= 0.01
q += 0.01
t0 = datetime.now()
try:
s = all(quantiles_sum == qantiles_dummy)
except TypeError:
pass
else:
return {}
try:
quantiles_sum[quantiles_sum > 0] = 1
except TypeError:
return {}
# quantiles_sum = self.eliminate_single_nonzeros(quantiles_sum)
if np.count_nonzero(quantiles_sum > 0) < 3:
return {}
for j, k, t in zip([0, 2, 4], [1, 3, 5], [MAX_MAX_DIST_RED + 1, MAX_MAX_DIST_GREEN + 1, 2]):
subset, ratios, quantiles_sum = self._eliminate_distant_pixels(subset, ratios, ratios[j] + ratios[k],
quantiles_sum, detection_yx, t, q)
# apply cluster exposing method twice in order to account for changes introduced by filter
y_low, x_low, y_up, x_up = detection_y - 1, detection_x - 1, detection_y + 2, detection_x + 2
quantiles_sum[y_low:y_up, x_low:x_up] = np.zeros((3, 3)) # temporary
spatial_cluster = self._expose_cluster(quantiles_sum, subset[0:3], False)
# if a cluster has high amount of values exclude corners, potentially divide large cluster
boxes, boxes_metrics, scores, clusters = [], [], [], []
# print("Section 3 took: %s" % str((datetime.now() - t0).total_seconds()))
t0 = datetime.now()
for cluster in np.unique(spatial_cluster[spatial_cluster != 0]):
spatial_cluster[detection_y, detection_x] = cluster # assign value of cluster to detection pixel
ys, xs = np.where(spatial_cluster == cluster)
try:
a_box = [np.min(ys), np.min(xs), np.max(ys), np.max(xs)]
except ValueError:
continue
box_arr = subset[0:3, a_box[0]:a_box[2]+1, a_box[1]:a_box[3]+1].copy()
# if (np.nanmean(np.nanstd(box_arr, 0)) * 10) < MIN_RGB_STD * 0.5: # be tolerant here
# continue
cluster_sub = spatial_cluster[a_box[0]:a_box[2]+1, a_box[1]:a_box[3]+1].copy()
cluster_sub[np.isnan(box_arr[0])] = 0
ys, xs = np.where(spatial_cluster == cluster)
if len(ys) < 2:
continue
ys, xs = self.eliminate_outlier_indices(ys, xs)
a_box = [np.min(ys), np.min(xs), np.max(ys), np.max(xs)]
box_arr = subset[0:3, a_box[0]:a_box[2]+1, a_box[1]:a_box[3]+1].copy()
if np.count_nonzero(~np.isnan(box_arr)) / 3 / (box_arr.shape[1] * box_arr.shape[2]) < 0.3: # too few pixels
continue
box_ratios = ratios[:, a_box[0]:a_box[2]+1, a_box[1]:a_box[3]+1].copy()
t0b = datetime.now()
box_metrics = self._characterize_spatial_spectral(box_arr, box_ratios)
# a_box = self._crop_box(a_box, ratios, box_metrics["direction"], detection_yx)
#print("Section 4b took: %s" % str((datetime.now() - t0b).total_seconds()))
# box_arr = subset[0:3, a_box[0]:a_box[2] + 1, a_box[1]:a_box[3] + 1].copy()
if all([box_arr.shape[1] <= 2, box_arr.shape[2] <= 2]):
continue
box_metrics = self.calc_score(box_metrics, box_arr)
if self._spatial_spectral_match(box_metrics):
clusters.append(cluster)
boxes.append(a_box)
boxes_metrics.append(box_metrics)
scores.append(box_metrics["score"])
# print("Section 4 took: %s" % str((datetime.now() - t0).total_seconds()))
t0 = datetime.now()
scores = np.array(scores)
try:
max_score = np.max(scores)
match = np.where(scores == max_score)[0][0]
except ValueError:
return {}
box_metrics, selected_box = boxes_metrics[match], boxes[match]
if np.std(selected_box) == 0:
return {}
if any(self.box_too_large(selected_box, MAX_MAX_DIST_RED)):
selected_box = self._subset_by_ratios(ratios, selected_box) # subset box to high quantile ratios
if any(self.box_too_large(selected_box, MAX_MAX_DIST_RED)):
subset_dict = self._subset_by_boxes(subset, ratios, selected_box, [3, 4]) # try default sub boxes
try:
box_metrics = subset_dict["box_metrics"]
except KeyError:
pass
else:
a_box = subset_dict["selected_box"]
box_arr = subset[0:3, a_box[0]:a_box[2] + 1, a_box[1]:a_box[3] + 1]
box_metrics = self.calc_score(box_metrics, box_arr)
if not self._spatial_spectral_match(box_metrics):
return {}
box_too_small = all([(selected_box[2] - selected_box[0] + 1) <= 2, (selected_box[3] - selected_box[1] + 1) <= 2])
if box_too_small or box_metrics["score"] < self.min_score:
return {}
the_box = {"ymin": selected_box[0], "xmin": selected_box[1],
"ymax": selected_box[2], "xmax": selected_box[3]}
# print("Section 5 took: %s" % str((datetime.now() - t0).total_seconds()))
return {"box": the_box,
"box_metrics": box_metrics,
"quantile": q[0],
"speed": self.calc_speed(ratios[:, the_box["ymin"]:the_box["ymax"]+1, the_box["xmin"]:the_box["xmax"]+1])}
def _characterize_spatial_spectral(self, sub_arr, sub_variables):
return_dict = {}
keys = ["spectral_angle", "spatial_angle", "slope", "red_length", "green_length", "direction",
"blue_mean", "green_mean", "red_mean", "red_ratio_max", "green_ratio_max", "blue_ratio_max"]
for key in keys:
return_dict[key] = np.nan
return_dict_copy = return_dict.copy()
blue_ratios = np.nansum(sub_variables[4:6], 0) + sub_arr[2] * 10 # sum of blue ratios
green_ratios = np.nansum(sub_variables[2:4], 0) + sub_arr[1] * 10 # sum of green ratios
red_ratios = np.nansum(sub_variables[0:2], 0) + sub_arr[0] * 10 # sum of red ratios
try:
try:
blue_y, blue_x = self.crop_2d_indices(np.where(blue_ratios == np.nanmax(blue_ratios)))
except ValueError:
return return_dict
else:
green_ratios[blue_y, blue_x] = np.nan # set to nan in order to avoid double target
green_y, green_x = self.crop_2d_indices(np.where(green_ratios == np.nanmax(green_ratios)))
red_ratios[blue_y, blue_x] = np.nan # avoid double target
red_ratios[green_y, green_x] = np.nan # ""
red_y, red_x = self.crop_2d_indices(np.where(red_ratios == np.nanmax(red_ratios)))
except IndexError:
return return_dict
blue_indices = [blue_y, blue_x]
blue_red_spatial_vector = self.calc_vector([red_y, red_x], blue_indices) # spatial vector blue to red
blue_green_spatial_vector = self.calc_vector([green_y, green_x], blue_indices) # spatial vector blue to green
return_dict = {"red_length": self.calc_vector_length(blue_red_spatial_vector),
"green_length": self.calc_vector_length(blue_green_spatial_vector),
"spatial_angle": self.calc_vector_angle_in_degrees(blue_red_spatial_vector,
blue_green_spatial_vector)}
if not self._spatial_spectral_match(return_dict): # check that in order to reduce run time
return return_dict_copy # if spatial metrics do not satisfy thresholds return here alread
given_vector = np.hstack([sub_variables[4:6, blue_y, blue_x], # stack of variables and target pixels
sub_variables[2:4, green_y, green_x],
sub_variables[0:2, red_y, red_x],
sub_variables[6, blue_y, blue_x],
sub_variables[6, green_y, green_x],
sub_variables[6, red_y, red_x],
sub_variables[7, blue_y, blue_x],
sub_variables[7, green_y, green_x],
sub_variables[7, red_y, red_x],
sub_arr[2, blue_y, blue_x],
sub_arr[2, green_y, green_x],
sub_arr[2, red_y, red_x],
sub_arr[1, green_y, green_x],
sub_arr[1, blue_y, blue_x],
sub_arr[1, red_y, red_x],
sub_arr[0, red_y, red_x],
sub_arr[0, blue_y, blue_x],
sub_arr[0, green_y, green_x]])
col_names, spectral_angles, slopes, spearman = [], [], [], []
for i in range(7):
col_names = col_names + ["rgb_vector" + str(i) + str(j) for j in [0, 1, 2]]
# calculate spearmanr correlations between given variables and all reference variables
for row in RGB_VECTORS.iterrows():
r = row[1]
ref_vector = np.array([r[col_name] for col_name in col_names])
regression = linregress(given_vector, ref_vector)
spearman.append(spearmanr(given_vector, ref_vector)[0])
#spectral_angles.append(regression.rvalue)
slopes.append(regression.slope)
# use mean of all spearmanr correlation coefficients as indicator for agreement with reference dataset
return_dict["spectral_angle"] = np.nanmean(spearman) #np.nanquantile(spectral_angles, [0.75])[0] - np.nanstd(spectral_angles)
return_dict["slope"] = np.nanmean(slopes)
return_dict["direction"] = self.calc_vector_direction_in_degree(np.mean(np.vstack([blue_red_spatial_vector,
blue_green_spatial_vector]),
axis=0))
return_dict["red_mean"] = np.nanmean(sub_arr[0])
return_dict["green_mean"] = np.nanmean(sub_arr[1])
return_dict["blue_mean"] = np.nanmean(sub_arr[2])
return_dict["red_ratio_max"] = np.nanmax(np.nanmax(sub_variables[0:2]))
return_dict["green_ratio_max"] = np.nanmax(np.nanmax(sub_variables[2:4]))
return_dict["blue_ratio_max"] = np.nanmax(np.nanmax(sub_variables[4:6]))
return return_dict
def _subset_by_ratios(self, ratios, selected_box):
original_box = selected_box.copy()
box_ratios = ratios[:, selected_box[0]:selected_box[2]+1, selected_box[1]:selected_box[3]+1]
q = np.float32([0.2])
too_large_y, too_large_x = True, True
while any([too_large_y, too_large_x]) and q[0] < 1:
too_large_y, too_large_x = self.box_too_large(selected_box, MAX_MAX_DIST_RED)
if any([too_large_y, too_large_x]):
quantiles_sum = self.quantile_filter(box_ratios, q)
if quantiles_sum is not None:
ys, xs = np.where(quantiles_sum != 0)
try:
selected_box = [min(ys), min(xs), max(ys), max(xs)]
except ValueError:
q += 0.01
continue
q += 0.01
if selected_box != original_box:
selected_box[2] = original_box[0] + selected_box[2]
selected_box[3] = original_box[1] + selected_box[3]
selected_box[0] += original_box[0]
selected_box[1] += original_box[1]
return selected_box
def _subset_by_boxes(self, subset, ratios, selected_box, window_sizes):
box_arr = subset[0:3, selected_box[0]:selected_box[2] + 1, selected_box[1]:selected_box[3] + 1]
box_ratios = ratios[:, selected_box[0]:selected_box[2] + 1, selected_box[1]:selected_box[3] + 1]
boxes, boxes_metrics, boxes_rsquared, boxes_rgb_sums, boxes_spatial_angle = [], [], [], [], []
for w in window_sizes:
y_indices_low = np.arange(0, box_arr.shape[1] - w + 1, 1)
x_indices_low = np.arange(0, box_arr.shape[2] - w + 1, 1)
y_indices_up = [y + w for y in y_indices_low]
x_indices_up = [x + w for x in x_indices_low]
for y_low, y_up in zip(y_indices_low, y_indices_up):
for x_low, x_up in zip(x_indices_low, x_indices_up):
sub_box_arr = box_arr[:, y_low:y_up, x_low:x_up]
sub_box_ratios = box_ratios[:, y_low:y_up, x_low:x_up]
box_metrics = self._characterize_spatial_spectral(sub_box_arr, sub_box_ratios)
if self._spatial_spectral_match(box_metrics):
max_values = [np.nanmax(sub_box_arr[i]) for i in range(sub_box_arr.shape[0])]
boxes.append([y_low, x_low, y_up - 1, x_up - 1]) # -1 due to indexing
boxes_metrics.append(box_metrics)
boxes_rsquared.append(box_metrics["spectral_angle"])
boxes_rgb_sums.append(np.sum(max_values))
boxes_spatial_angle.append(box_metrics["spatial_angle"])
combined = np.array(boxes_rsquared) + np.array(boxes_rgb_sums) - np.array(boxes_spatial_angle)
try:
max_combined = np.max(combined)
except ValueError:
return {}
try:
match = np.where(combined == max_combined)[0][0]
except IndexError:
return {}
new_box = boxes[match]
selected_box[2] = selected_box[0] + new_box[2]
selected_box[3] = selected_box[1] + new_box[3]
selected_box[0] += new_box[0]
selected_box[1] += new_box[1]
return {"box_metrics": boxes_metrics[match], "selected_box": selected_box}
def _eliminate_distant_pixels(self, sub_arr, ratios, band_ratios, quantiles_sum, center, threshold, quantile):
try:
ys, xs = np.where(band_ratios > np.nanquantile(band_ratios, quantile))
except ValueError:
return sub_arr
else:
for y, x in zip(ys, xs):
if self.calc_vector_length(self.calc_vector(center, [y, x])) > threshold:
sub_arr[:, y, x] = np.nan
ratios[:, y, x] = np.nan
quantiles_sum[y, x] = 0
return sub_arr, ratios, quantiles_sum
def _expose_cluster(self, target_arr, band_stack, exclude_corners=True):
target_arr[np.isnan(target_arr)] = 0
if np.count_nonzero(target_arr) == 0:
return target_arr
try:
center = [int(target_arr.shape[0] / 2), int(target_arr.shape[1] / 2)]
except IndexError:
return target_arr
ys, xs = np.where(target_arr != 0)
yet_seen, cluster_value, clusters = [], 0, target_arr.copy()
for y, x in zip(ys, xs):
distance_center = self.calc_vector_length(self.calc_vector([y, x], center)) - 1
rgb_slice = band_stack[0:3, y, x]
max_idx = np.where(rgb_slice == np.nanmax(rgb_slice))[0][0]
distance_wrong = [distance_center > t for t in [MAX_MAX_DIST_RED, MEAN_MAX_DIST_RED, MEAN_MAX_DIST_GREEN]]
max_idx_wrong = [True, max_idx not in [0, 1], max_idx not in [0, 1, 2]]
should_continue = False
for condition_a, condition_b in zip(distance_wrong, max_idx_wrong):
if condition_a and condition_b:
clusters[y, x], should_continue = 0, True
break
if should_continue:
continue
if not [y, x] in yet_seen:
cluster_value += 1
clusters, yet_seen = self._search_adjacent_non_zero(clusters, [y, x], cluster_value, yet_seen,
exclude_corners)
return clusters
def _crop_box(self, given_box, ratios, direction, detection_yx):
max_size = MAX_MAX_DIST_RED * 2
box_size = (given_box[2] - given_box[0] + 1) * (given_box[3] - given_box[1] + 1)
direction_match = any(np.abs([x - direction for x in [0, 90, 180, 270]]) < 45)
q = [0.5]
while direction_match and box_size >= max_size and q[0] < 1:
box_ratios = ratios[:, given_box[0]:given_box[2] + 1, given_box[1]:given_box[3] + 1]
quantiles = self.quantile_filter(box_ratios, q)
if quantiles is not None:
try:
# always retain value 1 at detection
quantiles[np.abs(detection_yx[0] - given_box[0]), np.abs(detection_yx[1] - given_box[1])] = 1
except IndexError:
pass
ys, xs = np.where(quantiles != 0)
try:
given_box[2] = int(given_box[0] + max(ys))
given_box[3] = int(given_box[1] + max(xs))
given_box[0] += min(ys)
given_box[1] += min(xs)
except ValueError:
q[0] += 0.1
continue
else:
box_size = (given_box[2] - given_box[0] + 1) * (given_box[3] - given_box[1] + 1)
if box_size >= max_size:
q[0] += 0.1
return given_box
def _search_adjacent_non_zero(self, arr, point, new_value, yet_seen, exclude_corners):
arr_modified = arr.copy()
original_value = arr_modified[point[0], point[1]].copy()
arr_modified[point[0], point[1]] = 0
ymin, ymax = point[0]-1, point[0]+2
xmin, xmax = point[1]-1, point[1]+2
ymin, xmin = 0 if ymin < 0 else ymin, 0 if xmin < 0 else xmin
window_3x3 = arr_modified[ymin:ymax, xmin:xmax].copy()
if exclude_corners:
for corner_y, corner_x in zip([0, 0, 2, 2], [0, 2, 0, 2]):
try:
window_3x3[corner_y, corner_x] = 0
except IndexError:
continue
ys, xs = np.where(window_3x3 != 0)
for y_local, x_local in zip(ys, xs):
y, x = ymin + y_local, xmin + x_local
if [y, x] not in yet_seen:
arr_modified[y, x] = new_value
arr_modified, yet_seen = self._search_adjacent_non_zero(arr_modified, [y, x], new_value, yet_seen,
exclude_corners)
yet_seen.append([y, x])
value = original_value if point in yet_seen else new_value
if point not in yet_seen:
yet_seen.append(point)
arr_modified[point[0], point[1]] = value
return arr_modified, yet_seen
def calc_speed(self, ratios):
resolution = 10 # meters
blue_ratios = np.nansum(ratios[4:6], 0)
red_ratios = np.nansum(ratios[0:2], 0)
green_ratios = np.nansum(ratios[2:4], 0)
try:
max_blue, max_red, max_green = np.nanmax(blue_ratios), np.nanmax(red_ratios), np.nanmax(green_ratios)
except IndexError:
return 0
diameter = (np.max(ratios.shape[1:3]) - (1.5 - max_blue)) * resolution
kilometers_hour = (diameter * (3600 / SECONDS_OFFSET_B02_B04)) / 1000
return kilometers_hour
def _spatial_spectral_match(self, metrics_dict):
is_match = True
has_values = 3
# try:
# ratios_means = [metrics_dict["red_ratio_max"], metrics_dict["green_ratio_max"], metrics_dict["blue_ratio_max"]]
# except KeyError:
# has_values -= 1
# else:
# ratios_high = np.max(ratios_means) > 0.2
# ratios_high_all = all([mean_value > 0.05 for mean_value in ratios_means])
# ratios_high_all = ratios_high_all or sum([mean_value > 0.25 for mean_value in ratios_means]) >= 2
# ratios_high_two = sum([mean_value > 0.15 for mean_value in ratios_means]) > 1
# is_match *= ratios_high * ratios_high_all * ratios_high_two
# try:
# is_match *= metrics_dict["std"] >= MIN_RGB_STD
# except KeyError:
# has_values -= 1
try:
is_match *= metrics_dict["spectral_angle"] >= self.min_r_squared
except KeyError:
has_values -= 1
try:
is_match *= metrics_dict["score"] >= self.min_score
except KeyError:
has_values -= 1
try:
green_length = metrics_dict["green_length"]
red_length = metrics_dict["red_length"]
is_match *= green_length < red_length
is_match *= red_length < (MAX_MAX_DIST_RED + 0.5)
is_match *= green_length < (MAX_MAX_DIST_GREEN + 0.5)
except KeyError:
has_values -= 1
# try:
# is_match *= metrics_dict["slope"] < MAX_SLOPE
# is_match *= metrics_dict["slope"] > MIN_SLOPE
# except KeyError:
# has_values -= 1
# try:
# is_match *= metrics_dict["spatial_angle"] < MAX_ANGLE_BR_BG
# except KeyError:
# has_values -= 1
if has_values == 0:
return False
else:
return is_match
@staticmethod
def calc_score(metrics_dict, sub_arr):
metrics_dict["std"] = np.nanmean(np.nanstd(sub_arr, 0)) * 10
reflectance_means_sum = (metrics_dict["red_mean"] + metrics_dict["blue_mean"] + metrics_dict[
"green_mean"]) * 10
ratio_means_sum = metrics_dict["red_ratio_max"] + metrics_dict["green_ratio_max"] \
+ metrics_dict["blue_ratio_max"]
metrics_dict["score"] = metrics_dict["spectral_angle"] + metrics_dict["std"] - np.abs(
1 - metrics_dict["slope"]) \
+ reflectance_means_sum + ratio_means_sum - metrics_dict["spatial_angle"] / 100
return metrics_dict
@staticmethod
def calc_primary_accuracy(detected_boxes, validation_boxes):
out_keys = ["validation_percentage", "detection_percentage", "validation_intersection_percentage",
"detection_intersection_percentage"]
out_dict = {}
lens = [len(detected_boxes) == 0, len(validation_boxes) == 0]
if lens[0]:
print("No entries in 'detected_boxes'")
if lens[1]:
print("No entries in 'validation_boxes'")
if any(lens):
for key in out_keys:
out_dict[key] = np.nan
return out_dict
intersections = {"validation": [], "detection": []}
intersection_areas = {"validation": [], "detection": []}
keys = ["validation", "detection"]
for boxes_a, boxes_b, key in zip([validation_boxes, detected_boxes], [detected_boxes, validation_boxes], keys):
for detected_box in boxes_a.geometry:
for i, validation_box in enumerate(boxes_b.geometry):
if detected_box.intersects(validation_box):
intersections[key].append(i)
detected_gpd = gpd.GeoDataFrame({"geometry": [detected_box]}).set_geometry("geometry")
validation_gpd = gpd.GeoDataFrame({"geometry": [validation_box]}).set_geometry("geometry")
detected_gpd.crs = detected_boxes.crs
validation_gpd.crs = detected_gpd.crs
intersected = gpd.overlay(detected_gpd, validation_gpd, how="intersection")
intersection_areas[key].append(intersected.area[0] / detected_gpd.area[0] * 100)
out_values = [(len(intersections["validation"]) / len(validation_boxes)) * 100,
(len(intersections["detection"]) / len(detected_boxes)) * 100,
np.nanmean(np.array(intersection_areas["validation"])),
np.nanmean(np.array(intersection_areas["detection"]))]
for key, value in zip(out_keys, out_values):
out_dict[key] = value
return out_dict
@staticmethod
def eliminate_single_nonzeros(arr):
for y in range(arr.shape[0]):
for x in range(arr.shape[1]):
window_3x3 = arr[y-1:y+2, x-1:x+2]
if np.count_nonzero(window_3x3[~np.isnan(window_3x3)]) < 2:
arr[y, x] = 0
return arr
@staticmethod
def eliminate_outlier_indices(ys, xs):
dtype_ys, dtype_xs = ys.dtype, xs.dtype
ys, xs = ys.astype(np.float32), xs.astype(np.float32)
unique_ys, unique_xs = np.unique(ys), np.unique(xs)
n = len(ys)
n_unique_ys, n_unique_xs = len(unique_ys), len(unique_xs)
amount_unique_ys, amount_unique_xs = np.zeros(n_unique_ys), np.zeros(n_unique_xs)
for unique_idx, amount_unique, indices in zip([unique_ys, unique_xs],
[amount_unique_ys, amount_unique_xs],
[ys, xs]):
for i, idx in enumerate(unique_idx):
amount_unique[i] = len(np.where(indices == idx)[0]) / n * 100
for amounts, uniques, indices in zip([amount_unique_ys, amount_unique_xs], [unique_ys, unique_xs], [ys, xs]):
if (amounts > 50).any(): # there is a major y
outlier_idxs = np.where(amounts < 15)
if len(outlier_idxs[0]) > 0:
for outlier_idx in outlier_idxs:
real_idx = uniques[outlier_idx]
to_nan = indices == real_idx
ys[to_nan] = np.nan # eliminate y and x index
xs[to_nan] = np.nan
ys, xs = ys[~np.isnan(ys)], xs[~np.isnan(xs)]
return ys.astype(dtype_ys), xs.astype(dtype_xs)
@staticmethod
def quantile_filter(arr, quantile_value):
quantiles = np.array([arr[i] >= np.nanquantile(arr[i], quantile_value) for i in range(arr.shape[0])],
dtype=np.int8)
# quantiles_initial_sum = quantiles.sum(0)
# if np.count_nonzero(np.int8(quantiles_initial_sum > 0) * np.int8(quantiles_initial_sum < 3)) == 0:
# return None
shape = quantiles.shape
s = shape[1]
buffers = [2, 2, 1, 1, 2, 2, s, s, s, s]
for i in range(quantiles.shape[0]):
for y in range(shape[1]):
for x in range(shape[2]):
buffer = buffers[i]
y_low, y_up = y - buffer, y + buffer + 1
x_low, x_up = x - buffer, x + buffer + 1
y_low = 0 if y_low < 0 else y_low
x_low = 0 if x_low < 0 else x_low
y_up, x_up = shape[1] if y_up > shape[1] else y_up, shape[2] if x_up > shape[2] else x_up
y_low = y_low - 1 if y_up == (shape[1] + 1) else y_low
x_low = x_low - 1 if x_up == (shape[2] + 1) else x_low
y_up, x_up = y_up + 1 if y_low == 0 else y_up, x_up + 1 if x_low == 0 else x_up
original_value = quantiles[i, y, x]
if original_value == 0:
continue
quantiles_sub = quantiles[:, y_low:y_up, x_low:x_up].copy()
quantiles_sub[i] = np.zeros_like(quantiles_sub[i]) # look only for matches in other bands
sums = [np.nansum(quantiles_sub[j]) for j in range(quantiles_sub.shape[0])]
quantiles[i, y, x] = 0 if np.count_nonzero(sums) < 2 else original_value
return quantiles.sum(0)
@staticmethod
def box_too_large(the_box, max_size):
size_y, size_x = (the_box[2] - the_box[0] + 1), (the_box[3] - the_box[1] + 1)
too_large_y = size_y > max_size
too_large_x = size_x > max_size
return too_large_y, too_large_x
# not really needed
@staticmethod
def calc_low_ratios_mask(ratios, min_values_ratios):
ratio_mask = np.zeros_like(ratios[0:3], dtype=np.int8)
only_false = np.zeros(3, dtype=np.bool)
# reflectance and ratio filter
for i in range(ratio_mask.shape[0]):
idx = 2 * i
ratio_mask[i] = np.int8((ratios[idx] + ratios[idx + 1]) > min_values_ratios[i])
only_false[i] = np.count_nonzero(ratio_mask) == 0
ratio_mask = ratio_mask.sum(0)
ratio_mask[ratio_mask > 2] = 0
ratio_mask[(2 >= ratio_mask) * (ratio_mask > 0)] = 1
return ratio_mask, only_false
@staticmethod
def calc_low_quantile_mask(reflectances, q):
low_quantile_red = np.int8(reflectances[0] > np.nanquantile(reflectances[0], q))
low_quantile_green = np.int8(reflectances[1] > np.nanquantile(reflectances[1], q))
low_quantile_blue = np.int8(reflectances[2] > np.nanquantile(reflectances[2], q))
low_quantile_mask = np.float32(low_quantile_red + low_quantile_green + low_quantile_blue)
low_quantile_mask[low_quantile_mask == 0] = np.nan
low_quantile_mask[low_quantile_mask > 0] = 1
return low_quantile_mask
@staticmethod
def calc_high_quantile_mask(reflectances, q):
high_quantile_red = np.int8(reflectances[0] < np.nanquantile(reflectances[0], q))
high_quantile_green = np.int8(reflectances[1] < np.nanquantile(reflectances[1], q))
high_quantile_blue = np.int8(reflectances[2] < np.nanquantile(reflectances[2], q))
high_quantile_mask = np.float32(high_quantile_red + high_quantile_green + high_quantile_blue)
high_quantile_mask[high_quantile_mask == 0] = np.nan
high_quantile_mask[high_quantile_mask > 0] = 1
return high_quantile_mask
@staticmethod
def expose_anomalous_pixels(band_stack_np):
w = 100
y_bound, x_bound = band_stack_np.shape[1], band_stack_np.shape[2]
roads = np.zeros((3, band_stack_np.shape[1], band_stack_np.shape[2]), dtype=np.float32)
for y in range(int(np.round(y_bound / w))):
for x in range(int(np.round(x_bound / w))):
y_idx, x_idx = np.clip((y + 1) * w, 0, y_bound), np.clip((x + 1) * w, 0, x_bound)
y_low, x_low = int(np.clip(y_idx - w, 0, 1e+30)), int(np.clip(x_idx - w, 0, 1e+30))
y_up, x_up = np.clip(y_idx + w + 1, 0, y_bound), np.clip(x_idx + w + 1, 0, x_bound)
y_size, x_size = (y_up - y_low), (x_up - x_low)
n = y_size * x_size
subset = band_stack_np[:, y_low:y_up, x_low:x_up]
roads[0, y_low:y_up, x_low:x_up] = np.repeat(np.nanmedian(subset[0]), n).reshape(y_size, x_size)
roads[1, y_low:y_up, x_low:x_up] = np.repeat(np.nanmedian(subset[1]), n).reshape(y_size, x_size)
roads[2, y_low:y_up, x_low:x_up] = np.repeat(np.nanmedian(subset[2]), n).reshape(y_size, x_size)
#max_diff = np.nanmax(band_stack_np[0:3] - np.nanmin(roads, 0), 0)
#mask = np.int8(max_diff > np.nanquantile(max_diff, [0.6]))
diff_red = band_stack_np[0] - (roads[0] / 2)
diff_green = band_stack_np[1] - (roads[1] / 2)
diff_blue = band_stack_np[2] - (roads[2] / 2)
diff_stack = np.array([diff_red, diff_green, diff_blue])
mask = np.zeros_like(diff_stack[0])
for i in range(diff_stack.shape[0]):
mask += np.int8(diff_stack[i] > np.nanquantile(diff_stack[i], [0.6]))
mask[mask != 0] = 1
mask = np.int8(mask)
return mask
@staticmethod
def get_osm_mask(bbox, crs, reference_arr, lat_lon_dict, dir_out):
osm_file = get_roads(bbox, ["motorway", "trunk", "primary"], OSM_BUFFER,
dir_out, str(bbox).replace(", ", "_")[1:-1] + "_osm_roads", str(crs),
reference_arr)
osm_vec = gpd.read_file(osm_file)
ref_xr = xr.DataArray(data=reference_arr, coords=lat_lon_dict, dims=["lat", "lon"])
osm_raster = rasterize_osm(osm_vec, ref_xr).astype(np.float32)
osm_raster[osm_raster != 0] = 1
osm_raster[osm_raster == 0] = np.nan
return osm_raster
@staticmethod
def crop_2d_indices(indices):
return np.array([index_arr[0] for index_arr in indices]).astype(np.int32)
@staticmethod
def calc_vector_direction_in_degree(vector):
# [1,1] -> 45°; [-1,1] -> 135°; [-1,-1] -> 225°; [1,-1] -> 315°
y_offset = 90 if vector[0] > 0 else 0
x_offset = 90 if vector[1] < 0 else 0
offset = 180 if y_offset == 0 and x_offset == 90 else 0
if vector[0] == 0:
direction = 0.
else:
direction = np.degrees(np.arctan(np.abs(vector[1]) / np.abs(vector[0])))
direction += offset + y_offset + x_offset
return direction
@staticmethod
def direction_degree_to_description(direction_degree):
step = 22.5
bins = np.arange(0, 359, step, dtype=np.float32)
descriptions = np.array(["N", "NNE", "NE", "ENE",
"E", "ESE", "SE", "SEE",
"S", "SSW", "SW", "WSW",
"W", "WNW", "NW", "NNW"])
i, b = 0, -1
while b < direction_degree and i < len(bins):
b = bins[i]
i += 1
return descriptions[i - 1]
@staticmethod
def calc_vector_angle_in_degrees(a, b):
cos = np.dot(a, b) / np.linalg.norm(a) / np.linalg.norm(b)
if np.abs(cos) >= 1:
return 0
else:
return np.degrees(np.arccos(cos))
@staticmethod
def calc_vector(b, a):
vector = []
for i in range(len(b)):
try:
vector.append(np.float32(b[i] - a[i]))
except IndexError:
raise IndexError("origin and target must be of equal length")
return np.array(vector).astype(np.float32)
@staticmethod
def calc_vector_length(vector):
squared = np.float32([element ** 2 for element in vector])
return np.sqrt(squared.sum()).astype(np.float32)
@staticmethod
def get_smallest_deviation(arr, value):
dev = np.abs(arr - value)
return int(np.where(dev == dev.min())[0][0])
@staticmethod
def eliminate_multi_detections(arr, y, x):
y0 = y - 2 if (y - 2) >= 0 else y
x0 = x - 2 if (x - 2) >= 0 else x
y1 = y + 3 if (y + 3) <= arr.shape[0] else arr.shape[0]
x1 = x + 3 if (x + 3) <= arr.shape[1] else arr.shape[1]
arr[y0:y1, x0:x1] = np.zeros((y1 - y0, x1 - x0))
arr[y, x] = 1 # detection of interest remains
return arr
| true | true |
1c2dbf27ffce954a671517058c5126dc69927784 | 668 | py | Python | __main__.py | cmgoffena13/Obelisk-Mint | 2715bcc214c8d72a6b15bd549c5fcd1caee65c9a | [
"MIT"
] | 1 | 2022-02-18T18:05:46.000Z | 2022-02-18T18:05:46.000Z | __main__.py | cmgoffena13/Obelisk-Mint | 2715bcc214c8d72a6b15bd549c5fcd1caee65c9a | [
"MIT"
] | null | null | null | __main__.py | cmgoffena13/Obelisk-Mint | 2715bcc214c8d72a6b15bd549c5fcd1caee65c9a | [
"MIT"
] | null | null | null | # Runs entire Obelisk ETL process
from Obelisk.mint.mint_extract import Mint_API
import os
file_path = os.path.abspath(os.path.dirname(__file__))
full_load = False
if __name__ == '__main__':
os.system(f"{file_path}\\venv\\Scripts\\activate")
print('Starting Obelisk ETL')
print('Starting Obelisk Extracts')
Mint = Mint_API(full_load=full_load)
Mint.extract()
print('Completed Obelisk Extracts')
print('Starting Obelisk Transforms')
print('Completed Obelisk Transforms')
print('Starting Obelisk Loads')
print('Completed Obelisk Loads')
print('Completed Obelisk ETL')
os.system("deactivate") | 26.72 | 55 | 0.693114 |
from Obelisk.mint.mint_extract import Mint_API
import os
file_path = os.path.abspath(os.path.dirname(__file__))
full_load = False
if __name__ == '__main__':
os.system(f"{file_path}\\venv\\Scripts\\activate")
print('Starting Obelisk ETL')
print('Starting Obelisk Extracts')
Mint = Mint_API(full_load=full_load)
Mint.extract()
print('Completed Obelisk Extracts')
print('Starting Obelisk Transforms')
print('Completed Obelisk Transforms')
print('Starting Obelisk Loads')
print('Completed Obelisk Loads')
print('Completed Obelisk ETL')
os.system("deactivate") | true | true |
1c2dbf6bdda6a28c5754429390f06bef5c7536aa | 394 | py | Python | 1_Basic/3_Operators/5_identity.py | hauntarl/real-python | 6ffb535648bf5c79c90e2ed7def842078bc7807f | [
"MIT"
] | 2 | 2020-12-15T18:11:00.000Z | 2021-03-01T11:43:16.000Z | 1_Basic/3_Operators/5_identity.py | hauntarl/real_python | 6ffb535648bf5c79c90e2ed7def842078bc7807f | [
"MIT"
] | null | null | null | 1_Basic/3_Operators/5_identity.py | hauntarl/real_python | 6ffb535648bf5c79c90e2ed7def842078bc7807f | [
"MIT"
] | null | null | null | # Python provides two operators, is and is not,
# that determine whether the given operands have the
# same identity i.e. refer to the same object.
# This is not the same thing as equality, which means
# the two operands refer to objects that contain the
# same data but are not necessarily the same object.
x = 1001
y = 1000
print('x == y + 1 ?', x == y + 1)
print('x is y + 1 ?', x is y + 1)
| 35.818182 | 53 | 0.692893 |
x = 1001
y = 1000
print('x == y + 1 ?', x == y + 1)
print('x is y + 1 ?', x is y + 1)
| true | true |
1c2dbfb659d1a96969b90a2470a0bb0aa4f4c5cc | 1,669 | py | Python | MNIST_NN_VS_SVM/plots.py | ahmednader10/Machine_Learning | fab0c7cd773b5e001b56c5349550085e34661e4d | [
"MIT"
] | null | null | null | MNIST_NN_VS_SVM/plots.py | ahmednader10/Machine_Learning | fab0c7cd773b5e001b56c5349550085e34661e4d | [
"MIT"
] | null | null | null | MNIST_NN_VS_SVM/plots.py | ahmednader10/Machine_Learning | fab0c7cd773b5e001b56c5349550085e34661e4d | [
"MIT"
] | null | null | null | import pandas as pd
import sklearn
from sklearn.neural_network import MLPClassifier, MLPRegressor
from sklearn.model_selection import train_test_split
from sklearn.svm import LinearSVC, SVC
from sklearn.multiclass import OneVsRestClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn import preprocessing
import numpy as np
import os, struct
from array import array as pyarray
from numpy import append, array, int8, uint8, zeros
from pylab import *
from numpy import *
from sklearn.model_selection import validation_curve
import matplotlib.pyplot as plt
param_range = [120,100,80,60,40,35,30,20]
train_scores_mean1 = [1,1,1,1,1] #1 => learning rates
train_scores_mean2 = [0.9706,0.9889,0.9962,0.9974,0.9987] #2 => C values
train_scores_mean3 = [1,1,1,1,1] #3 => Momentum values
train_scores_mean4 = [1,1,1,1,1] #4 => Batch size values
train_scores_mean5 = [1,1,1,1,1] #5 => hidden nodes size values
test_scores_mean1 = [0.9639,0.9667,0.9678,0.9681,0.9708]
test_scores_mean2 = [0.9302,0.9209,0.9137,0.9126,0.9111]
test_scores_mean3 = [0.965,0.9665,0.9679,0.9659,0.0964]
test_scores_mean4 = [0.9689,0.9704,0.9697,0.9656,0.9657]
test_scores_mean5 = [0.9509,0.9593,0.9618,0.9631,0.966]
pca_values = [0.9468, 0.9522, 0.9558, 0.9602, 0.9608, 0.962, 0.9622, 0.9616]
plt.title("Testing Curve for SVC using PCA")
plt.xlabel("Number of components")
plt.ylabel("Score")
plt.ylim(0.94, 0.975)
plt.plot(param_range, pca_values, label="Testing score",
color="navy")
#plt.plot(param_range, test_scores_mean4, label="Cross-validation score",
# color="navy")
plt.legend(loc="best")
plt.show() | 37.931818 | 77 | 0.729778 | import pandas as pd
import sklearn
from sklearn.neural_network import MLPClassifier, MLPRegressor
from sklearn.model_selection import train_test_split
from sklearn.svm import LinearSVC, SVC
from sklearn.multiclass import OneVsRestClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn import preprocessing
import numpy as np
import os, struct
from array import array as pyarray
from numpy import append, array, int8, uint8, zeros
from pylab import *
from numpy import *
from sklearn.model_selection import validation_curve
import matplotlib.pyplot as plt
param_range = [120,100,80,60,40,35,30,20]
train_scores_mean1 = [1,1,1,1,1]
train_scores_mean2 = [0.9706,0.9889,0.9962,0.9974,0.9987]
train_scores_mean3 = [1,1,1,1,1]
train_scores_mean4 = [1,1,1,1,1]
train_scores_mean5 = [1,1,1,1,1]
test_scores_mean1 = [0.9639,0.9667,0.9678,0.9681,0.9708]
test_scores_mean2 = [0.9302,0.9209,0.9137,0.9126,0.9111]
test_scores_mean3 = [0.965,0.9665,0.9679,0.9659,0.0964]
test_scores_mean4 = [0.9689,0.9704,0.9697,0.9656,0.9657]
test_scores_mean5 = [0.9509,0.9593,0.9618,0.9631,0.966]
pca_values = [0.9468, 0.9522, 0.9558, 0.9602, 0.9608, 0.962, 0.9622, 0.9616]
plt.title("Testing Curve for SVC using PCA")
plt.xlabel("Number of components")
plt.ylabel("Score")
plt.ylim(0.94, 0.975)
plt.plot(param_range, pca_values, label="Testing score",
color="navy")
plt.legend(loc="best")
plt.show() | true | true |
1c2dc2471b76dea8ec3d401a10cdefe4721804b6 | 795 | py | Python | employee/forms.py | FahadulShadhin/crudapp | cd82596a6261e15388c737e8399c3d20bb9c372a | [
"MIT"
] | null | null | null | employee/forms.py | FahadulShadhin/crudapp | cd82596a6261e15388c737e8399c3d20bb9c372a | [
"MIT"
] | 1 | 2022-01-03T06:37:17.000Z | 2022-01-03T13:09:11.000Z | employee/forms.py | FahadulShadhin/crudapp | cd82596a6261e15388c737e8399c3d20bb9c372a | [
"MIT"
] | 1 | 2022-03-23T17:02:22.000Z | 2022-03-23T17:02:22.000Z | from django import forms
from django.forms import ModelForm
from .models import Employee
class EmployeeForm(ModelForm):
class Meta:
model = Employee
fields = ('emp_name', 'emp_email', 'emp_contact', 'emp_role', 'emp_salary', 'image')
widgets = {
'emp_name': forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Name'}),
'emp_email': forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Email'}),
'emp_contact': forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Contact No.'}),
'emp_role': forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Role'}),
'emp_salary': forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Salary'}),
} | 46.764706 | 106 | 0.61761 | from django import forms
from django.forms import ModelForm
from .models import Employee
class EmployeeForm(ModelForm):
class Meta:
model = Employee
fields = ('emp_name', 'emp_email', 'emp_contact', 'emp_role', 'emp_salary', 'image')
widgets = {
'emp_name': forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Name'}),
'emp_email': forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Email'}),
'emp_contact': forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Contact No.'}),
'emp_role': forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Role'}),
'emp_salary': forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Salary'}),
} | true | true |
1c2dc38c10e4fa3324ca5f1d9f5ae10cfed1dc0b | 33,710 | py | Python | cartridge/shop/models.py | AlexHill/cartridge | cb8599d43600442a223a484dc75726bfbbec68a0 | [
"BSD-2-Clause"
] | null | null | null | cartridge/shop/models.py | AlexHill/cartridge | cb8599d43600442a223a484dc75726bfbbec68a0 | [
"BSD-2-Clause"
] | null | null | null | cartridge/shop/models.py | AlexHill/cartridge | cb8599d43600442a223a484dc75726bfbbec68a0 | [
"BSD-2-Clause"
] | null | null | null |
from __future__ import division, unicode_literals
from future.builtins import str, super
from future.utils import with_metaclass
from decimal import Decimal
from functools import reduce
from operator import iand, ior
from django.core.urlresolvers import reverse
from django.db import models, connection
from django.db.models.signals import m2m_changed
from django.db.models import CharField, Q
from django.db.models.base import ModelBase
from django.dispatch import receiver
from django.utils.timezone import now
from django.utils.translation import (ugettext, ugettext_lazy as _,
pgettext_lazy as __)
try:
from django.utils.encoding import force_text
except ImportError:
# Backward compatibility for Py2 and Django < 1.5
from django.utils.encoding import force_unicode as force_text
from mezzanine.conf import settings
from mezzanine.core.fields import FileField
from mezzanine.core.managers import DisplayableManager
from mezzanine.core.models import Displayable, RichText, Orderable, SiteRelated
from mezzanine.generic.fields import RatingField
from mezzanine.pages.models import Page
from mezzanine.utils.models import AdminThumbMixin, upload_to
from cartridge.shop import fields, managers
from cartridge.shop.utils import clear_session
class F(models.F):
"""
Django 1.4's F objects don't support true division, which
we need for Python 3.x. This should be removed when we
drop support for Django 1.4.
"""
def __truediv__(self, other):
return self._combine(other, self.DIV, False)
class Priced(models.Model):
"""
Abstract model with unit and sale price fields. Inherited by
``Product`` and ``ProductVariation`` models.
"""
unit_price = fields.MoneyField(_("Unit price"))
sale_id = models.IntegerField(null=True)
sale_price = fields.MoneyField(_("Sale price"))
sale_from = models.DateTimeField(_("Sale start"), blank=True, null=True)
sale_to = models.DateTimeField(_("Sale end"), blank=True, null=True)
sku = fields.SKUField(unique=True, blank=True, null=True)
num_in_stock = models.IntegerField(_("Number in stock"), blank=True,
null=True)
class Meta:
abstract = True
def on_sale(self):
"""
Returns True if the sale price is applicable.
"""
n = now()
valid_from = self.sale_from is None or self.sale_from < n
valid_to = self.sale_to is None or self.sale_to > n
return self.sale_price is not None and valid_from and valid_to
def has_price(self):
"""
Returns True if there is a valid price.
"""
return self.on_sale() or self.unit_price is not None
def price(self):
"""
Returns the actual price - sale price if applicable otherwise
the unit price.
"""
if self.on_sale():
return self.sale_price
elif self.has_price():
return self.unit_price
return Decimal("0")
def copy_price_fields_to(self, obj_to):
"""
Copies each of the fields for the ``Priced`` model from one
instance to another. Used for synchronising the denormalised
fields on ``Product`` instances with their default variation.
"""
for field in Priced._meta.fields:
if not isinstance(field, models.AutoField):
setattr(obj_to, field.name, getattr(self, field.name))
obj_to.save()
class Product(Displayable, Priced, RichText, AdminThumbMixin):
"""
Container model for a product that stores information common to
all of its variations such as the product's title and description.
"""
available = models.BooleanField(_("Available for purchase"),
default=False)
image = CharField(_("Image"), max_length=100, blank=True, null=True)
categories = models.ManyToManyField("Category", blank=True,
verbose_name=_("Product categories"))
date_added = models.DateTimeField(_("Date added"), auto_now_add=True,
null=True)
related_products = models.ManyToManyField("self",
verbose_name=_("Related products"), blank=True)
upsell_products = models.ManyToManyField("self",
verbose_name=_("Upsell products"), blank=True)
rating = RatingField(verbose_name=_("Rating"))
objects = DisplayableManager()
admin_thumb_field = "image"
search_fields = {"variations__sku": 100}
class Meta:
verbose_name = _("Product")
verbose_name_plural = _("Products")
def save(self, *args, **kwargs):
"""
Copies the price fields to the default variation when
``SHOP_USE_VARIATIONS`` is False, and the product is
updated via the admin change list.
"""
updating = self.id is not None
super(Product, self).save(*args, **kwargs)
if updating and not settings.SHOP_USE_VARIATIONS:
default = self.variations.get(default=True)
self.copy_price_fields_to(default)
@models.permalink
def get_absolute_url(self):
return ("shop_product", (), {"slug": self.slug})
def copy_default_variation(self):
"""
Copies the price and image fields from the default variation
when the product is updated via the change view.
"""
default = self.variations.get(default=True)
default.copy_price_fields_to(self)
if default.image:
self.image = default.image.file.name
self.save()
class ProductImage(Orderable):
"""
An image for a product - a relationship is also defined with the
product's variations so that each variation can potentially have
it own image, while the relationship between the ``Product`` and
``ProductImage`` models ensures there is a single set of images
for the product.
"""
file = models.ImageField(_("Image"),
upload_to=upload_to("shop.ProductImage.file", "product"))
description = CharField(_("Description"), blank=True, max_length=100)
product = models.ForeignKey("Product", related_name="images")
class Meta:
verbose_name = _("Image")
verbose_name_plural = _("Images")
order_with_respect_to = "product"
def __unicode__(self):
value = self.description
if not value:
value = self.file.name
if not value:
value = ""
return value
class ProductOption(models.Model):
"""
A selectable option for a product such as size or colour.
"""
type = models.IntegerField(_("Type"),
choices=settings.SHOP_OPTION_TYPE_CHOICES)
name = fields.OptionField(_("Name"))
objects = managers.ProductOptionManager()
def __unicode__(self):
return "%s: %s" % (self.get_type_display(), self.name)
class Meta:
verbose_name = _("Product option")
verbose_name_plural = _("Product options")
class ProductVariationMetaclass(ModelBase):
"""
Metaclass for the ``ProductVariation`` model that dynamcally
assigns an ``fields.OptionField`` for each option in the
``SHOP_PRODUCT_OPTIONS`` setting.
"""
def __new__(cls, name, bases, attrs):
# Only assign new attrs if not a proxy model.
if not ("Meta" in attrs and getattr(attrs["Meta"], "proxy", False)):
for option in settings.SHOP_OPTION_TYPE_CHOICES:
attrs["option%s" % option[0]] = fields.OptionField(option[1])
args = (cls, name, bases, attrs)
return super(ProductVariationMetaclass, cls).__new__(*args)
class ProductVariation(with_metaclass(ProductVariationMetaclass, Priced)):
"""
A combination of selected options from
``SHOP_OPTION_TYPE_CHOICES`` for a ``Product`` instance.
"""
product = models.ForeignKey("Product", related_name="variations")
default = models.BooleanField(_("Default"), default=False)
image = models.ForeignKey("ProductImage", verbose_name=_("Image"),
null=True, blank=True)
objects = managers.ProductVariationManager()
class Meta:
ordering = ("-default",)
def __unicode__(self):
"""
Display the option names and values for the variation.
"""
options = []
for field in self.option_fields():
name = getattr(self, field.name)
if name is not None:
option = u"%s: %s" % (field.verbose_name, name)
options.append(option)
result = u"%s %s" % (str(self.product), u", ".join(options))
return result.strip()
def save(self, *args, **kwargs):
"""
Use the variation's ID as the SKU when the variation is first
created.
"""
super(ProductVariation, self).save(*args, **kwargs)
if not self.sku:
self.sku = self.id
self.save()
def get_absolute_url(self):
return self.product.get_absolute_url()
@classmethod
def option_fields(cls):
"""
Returns each of the model fields that are dynamically created
from ``SHOP_OPTION_TYPE_CHOICES`` in
``ProductVariationMetaclass``.
"""
all_fields = cls._meta.fields
return [f for f in all_fields if isinstance(f, fields.OptionField)]
def options(self):
"""
Returns the field values of each of the model fields that are
dynamically created from ``SHOP_OPTION_TYPE_CHOICES`` in
``ProductVariationMetaclass``.
"""
return [getattr(self, field.name) for field in self.option_fields()]
def live_num_in_stock(self):
"""
Returns the live number in stock, which is
``self.num_in_stock - num in carts``. Also caches the value
for subsequent lookups.
"""
if self.num_in_stock is None:
return None
if not hasattr(self, "_cached_num_in_stock"):
num_in_stock = self.num_in_stock
carts = Cart.objects.current()
items = CartItem.objects.filter(sku=self.sku, cart__in=carts)
aggregate = items.aggregate(quantity_sum=models.Sum("quantity"))
num_in_carts = aggregate["quantity_sum"]
if num_in_carts is not None:
num_in_stock = num_in_stock - num_in_carts
self._cached_num_in_stock = num_in_stock
return self._cached_num_in_stock
def has_stock(self, quantity=1):
"""
Returns ``True`` if the given quantity is in stock, by checking
against ``live_num_in_stock``. ``True`` is returned when
``num_in_stock`` is ``None`` which is how stock control is
disabled.
"""
live = self.live_num_in_stock()
return live is None or quantity == 0 or live >= quantity
def update_stock(self, quantity):
"""
Update the stock amount - called when an order is complete.
Also update the denormalised stock amount of the product if
this is the default variation.
"""
if self.num_in_stock is not None:
self.num_in_stock += quantity
self.save()
if self.default:
self.product.num_in_stock = self.num_in_stock
self.product.save()
class Category(Page, RichText):
"""
A category of products on the website.
"""
featured_image = FileField(verbose_name=_("Featured Image"),
upload_to=upload_to("shop.Category.featured_image", "shop"),
format="Image", max_length=255, null=True, blank=True)
products = models.ManyToManyField("Product", blank=True,
verbose_name=_("Products"),
through=Product.categories.through)
options = models.ManyToManyField("ProductOption", blank=True,
verbose_name=_("Product options"),
related_name="product_options")
sale = models.ForeignKey("Sale", verbose_name=_("Sale"),
blank=True, null=True)
price_min = fields.MoneyField(_("Minimum price"), blank=True, null=True)
price_max = fields.MoneyField(_("Maximum price"), blank=True, null=True)
combined = models.BooleanField(_("Combined"), default=True,
help_text=_("If checked, "
"products must match all specified filters, otherwise products "
"can match any specified filter."))
class Meta:
verbose_name = _("Product category")
verbose_name_plural = _("Product categories")
def filters(self):
"""
Returns product filters as a Q object for the category.
"""
# Build a list of Q objects to filter variations by.
filters = []
# Build a lookup dict of selected options for variations.
options = self.options.as_fields()
if options:
lookup = dict([("%s__in" % k, v) for k, v in options.items()])
filters.append(Q(**lookup))
# Q objects used against variations to ensure sale date is
# valid when filtering by sale, or sale price.
n = now()
valid_sale_from = Q(sale_from__isnull=True) | Q(sale_from__lte=n)
valid_sale_to = Q(sale_to__isnull=True) | Q(sale_to__gte=n)
valid_sale_date = valid_sale_from & valid_sale_to
# Filter by variations with the selected sale if the sale date
# is valid.
if self.sale_id:
filters.append(Q(sale_id=self.sale_id) & valid_sale_date)
# If a price range is specified, use either the unit price or
# a sale price if the sale date is valid.
if self.price_min or self.price_max:
prices = []
if self.price_min:
sale = Q(sale_price__gte=self.price_min) & valid_sale_date
prices.append(Q(unit_price__gte=self.price_min) | sale)
if self.price_max:
sale = Q(sale_price__lte=self.price_max) & valid_sale_date
prices.append(Q(unit_price__lte=self.price_max) | sale)
filters.append(reduce(iand, prices))
# Turn the variation filters into a product filter.
operator = iand if self.combined else ior
products = Q(id__in=self.products.only("id"))
if filters:
filters = reduce(operator, filters)
variations = ProductVariation.objects.filter(filters)
filters = [Q(variations__in=variations)]
# If filters exist, checking that products have been
# selected is neccessary as combining the variations
# with an empty ID list lookup and ``AND`` will always
# result in an empty result.
if self.products.count() > 0:
filters.append(products)
return reduce(operator, filters)
return products
class Order(SiteRelated):
billing_detail_first_name = CharField(_("First name"), max_length=100)
billing_detail_last_name = CharField(_("Last name"), max_length=100)
billing_detail_street = CharField(_("Street"), max_length=100)
billing_detail_city = CharField(_("City/Suburb"), max_length=100)
billing_detail_state = CharField(_("State/Region"), max_length=100)
billing_detail_postcode = CharField(_("Zip/Postcode"), max_length=10)
billing_detail_country = CharField(_("Country"), max_length=100)
billing_detail_phone = CharField(_("Phone"), max_length=20)
billing_detail_email = models.EmailField(_("Email"))
shipping_detail_first_name = CharField(_("First name"), max_length=100)
shipping_detail_last_name = CharField(_("Last name"), max_length=100)
shipping_detail_street = CharField(_("Street"), max_length=100)
shipping_detail_city = CharField(_("City/Suburb"), max_length=100)
shipping_detail_state = CharField(_("State/Region"), max_length=100)
shipping_detail_postcode = CharField(_("Zip/Postcode"), max_length=10)
shipping_detail_country = CharField(_("Country"), max_length=100)
shipping_detail_phone = CharField(_("Phone"), max_length=20)
additional_instructions = models.TextField(_("Additional instructions"),
blank=True)
time = models.DateTimeField(_("Time"), auto_now_add=True, null=True)
key = CharField(max_length=40)
user_id = models.IntegerField(blank=True, null=True)
shipping_type = CharField(_("Shipping type"), max_length=50, blank=True)
shipping_total = fields.MoneyField(_("Shipping total"))
tax_type = CharField(_("Tax type"), max_length=50, blank=True)
tax_total = fields.MoneyField(_("Tax total"))
item_total = fields.MoneyField(_("Item total"))
discount_code = fields.DiscountCodeField(_("Discount code"), blank=True)
discount_total = fields.MoneyField(_("Discount total"))
total = fields.MoneyField(_("Order total"))
transaction_id = CharField(_("Transaction ID"), max_length=255, null=True,
blank=True)
status = models.IntegerField(_("Status"),
choices=settings.SHOP_ORDER_STATUS_CHOICES,
default=settings.SHOP_ORDER_STATUS_CHOICES[0][0])
objects = managers.OrderManager()
# These are fields that are stored in the session. They're copied to
# the order in setup() and removed from the session in complete().
session_fields = ("shipping_type", "shipping_total", "discount_total",
"discount_code", "tax_type", "tax_total")
class Meta:
verbose_name = __("commercial meaning", "Order")
verbose_name_plural = __("commercial meaning", "Orders")
ordering = ("-id",)
def __unicode__(self):
return "#%s %s %s" % (self.id, self.billing_name(), self.time)
def billing_name(self):
return "%s %s" % (self.billing_detail_first_name,
self.billing_detail_last_name)
def setup(self, request):
"""
Set order fields that are stored in the session, item_total
and total based on the given cart, and copy the cart items
to the order. Called in the final step of the checkout process
prior to the payment handler being called.
"""
self.key = request.session.session_key
self.user_id = request.user.id
for field in self.session_fields:
if field in request.session:
setattr(self, field, request.session[field])
self.total = self.item_total = request.cart.total_price()
if self.shipping_total is not None:
self.shipping_total = Decimal(str(self.shipping_total))
self.total += self.shipping_total
if self.discount_total is not None:
self.total -= Decimal(self.discount_total)
if self.tax_total is not None:
self.total += Decimal(self.tax_total)
self.save() # We need an ID before we can add related items.
for item in request.cart:
product_fields = [f.name for f in SelectedProduct._meta.fields]
item = dict([(f, getattr(item, f)) for f in product_fields])
self.items.create(**item)
def complete(self, request):
"""
Remove order fields that are stored in the session, reduce the
stock level for the items in the order, decrement the uses
remaining count for discount code (if applicable) and then
delete the cart.
"""
self.save() # Save the transaction ID.
discount_code = request.session.get('discount_code')
clear_session(request, "order", *self.session_fields)
for item in request.cart:
try:
variation = ProductVariation.objects.get(sku=item.sku)
except ProductVariation.DoesNotExist:
pass
else:
variation.update_stock(item.quantity * -1)
variation.product.actions.purchased()
if discount_code:
DiscountCode.objects.active().filter(code=discount_code).update(
uses_remaining=models.F('uses_remaining') - 1)
request.cart.delete()
def details_as_dict(self):
"""
Returns the billing_detail_* and shipping_detail_* fields
as two name/value pairs of fields in a dict for each type.
Used in template contexts for rendering each type as groups
of names/values.
"""
context = {}
for fieldset in ("billing_detail", "shipping_detail"):
fields = [(f.verbose_name, getattr(self, f.name)) for f in
self._meta.fields if f.name.startswith(fieldset)]
context["order_%s_fields" % fieldset] = fields
return context
def invoice(self):
"""
Returns the HTML for a link to the PDF invoice for use in the
order listing view of the admin.
"""
url = reverse("shop_invoice", args=(self.id,))
text = ugettext("Download PDF invoice")
return "<a href='%s?format=pdf'>%s</a>" % (url, text)
invoice.allow_tags = True
invoice.short_description = ""
class Cart(models.Model):
last_updated = models.DateTimeField(_("Last updated"), null=True)
objects = managers.CartManager()
def __iter__(self):
"""
Allow the cart to be iterated giving access to the cart's items,
ensuring the items are only retrieved once and cached.
"""
if not hasattr(self, "_cached_items"):
self._cached_items = self.items.all()
return iter(self._cached_items)
def add_item(self, variation, quantity):
"""
Increase quantity of existing item if SKU matches, otherwise create
new.
"""
kwargs = {"sku": variation.sku, "unit_price": variation.price()}
item, created = self.items.get_or_create(**kwargs)
if created:
item.description = force_text(variation)
item.unit_price = variation.price()
item.url = variation.product.get_absolute_url()
image = variation.image
if image is not None:
item.image = force_text(image.file)
variation.product.actions.added_to_cart()
item.quantity += quantity
item.save()
def has_items(self):
"""
Template helper function - does the cart have items?
"""
return len(list(self)) > 0
def total_quantity(self):
"""
Template helper function - sum of all item quantities.
"""
return sum([item.quantity for item in self])
def total_price(self):
"""
Template helper function - sum of all costs of item quantities.
"""
return sum([item.total_price for item in self])
def skus(self):
"""
Returns a list of skus for items in the cart. Used by
``upsell_products`` and ``calculate_discount``.
"""
return [item.sku for item in self]
def upsell_products(self):
"""
Returns the upsell products for each of the items in the cart.
"""
if not settings.SHOP_USE_UPSELL_PRODUCTS:
return []
cart = Product.objects.filter(variations__sku__in=self.skus())
published_products = Product.objects.published()
for_cart = published_products.filter(upsell_products__in=cart)
with_cart_excluded = for_cart.exclude(variations__sku__in=self.skus())
return list(with_cart_excluded.distinct())
def calculate_discount(self, discount):
"""
Calculates the discount based on the items in a cart, some
might have the discount, others might not.
"""
# Discount applies to cart total if not product specific.
products = discount.all_products()
if products.count() == 0:
return discount.calculate(self.total_price())
total = Decimal("0")
# Create a list of skus in the cart that are applicable to
# the discount, and total the discount for appllicable items.
lookup = {"product__in": products, "sku__in": self.skus()}
discount_variations = ProductVariation.objects.filter(**lookup)
discount_skus = discount_variations.values_list("sku", flat=True)
for item in self:
if item.sku in discount_skus:
total += discount.calculate(item.unit_price) * item.quantity
return total
class SelectedProduct(models.Model):
"""
Abstract model representing a "selected" product in a cart or order.
"""
sku = fields.SKUField()
description = CharField(_("Description"), max_length=2000)
quantity = models.IntegerField(_("Quantity"), default=0)
unit_price = fields.MoneyField(_("Unit price"), default=Decimal("0"))
total_price = fields.MoneyField(_("Total price"), default=Decimal("0"))
class Meta:
abstract = True
def __unicode__(self):
return ""
def save(self, *args, **kwargs):
"""
Set the total price based on the given quantity. If the
quantity is zero, which may occur via the cart page, just
delete it.
"""
if not self.id or self.quantity > 0:
self.total_price = self.unit_price * self.quantity
super(SelectedProduct, self).save(*args, **kwargs)
else:
self.delete()
class CartItem(SelectedProduct):
cart = models.ForeignKey("Cart", related_name="items")
url = CharField(max_length=2000)
image = CharField(max_length=200, null=True)
def get_absolute_url(self):
return self.url
class OrderItem(SelectedProduct):
"""
A selected product in a completed order.
"""
order = models.ForeignKey("Order", related_name="items")
class ProductAction(models.Model):
"""
Records an incremental value for an action against a product such
as adding to cart or purchasing, for sales reporting and
calculating popularity. Not yet used but will be used for product
popularity and sales reporting.
"""
product = models.ForeignKey("Product", related_name="actions")
timestamp = models.IntegerField()
total_cart = models.IntegerField(default=0)
total_purchase = models.IntegerField(default=0)
objects = managers.ProductActionManager()
class Meta:
unique_together = ("product", "timestamp")
class Discount(models.Model):
"""
Abstract model representing one of several types of monetary
reductions, as well as a date range they're applicable for, and
the products and products in categories that the reduction is
applicable for.
"""
title = CharField(_("Title"), max_length=100)
active = models.BooleanField(_("Active"), default=False)
products = models.ManyToManyField("Product", blank=True,
verbose_name=_("Products"))
categories = models.ManyToManyField("Category", blank=True,
related_name="%(class)s_related",
verbose_name=_("Categories"))
discount_deduct = fields.MoneyField(_("Reduce by amount"))
discount_percent = fields.PercentageField(_("Reduce by percent"),
max_digits=5, decimal_places=2,
blank=True, null=True)
discount_exact = fields.MoneyField(_("Reduce to amount"))
valid_from = models.DateTimeField(_("Valid from"), blank=True, null=True)
valid_to = models.DateTimeField(_("Valid to"), blank=True, null=True)
class Meta:
abstract = True
def __unicode__(self):
return self.title
def all_products(self):
"""
Return the selected products as well as the products in the
selected categories.
"""
filters = [category.filters() for category in self.categories.all()]
filters = reduce(ior, filters + [Q(id__in=self.products.only("id"))])
return Product.objects.filter(filters).distinct()
class Sale(Discount):
"""
Stores sales field values for price and date range which when saved
are then applied across products and variations according to the
selected categories and products for the sale.
"""
class Meta:
verbose_name = _("Sale")
verbose_name_plural = _("Sales")
def save(self, *args, **kwargs):
super(Sale, self).save(*args, **kwargs)
self.update_products()
def update_products(self):
"""
Apply sales field value to products and variations according
to the selected categories and products for the sale.
"""
self._clear()
if self.active:
extra_filter = {}
if self.discount_deduct is not None:
# Don't apply to prices that would be negative
# after deduction.
extra_filter["unit_price__gt"] = self.discount_deduct
sale_price = models.F("unit_price") - self.discount_deduct
elif self.discount_percent is not None:
sale_price = models.F("unit_price") - (
F("unit_price") / "100.0" * self.discount_percent)
elif self.discount_exact is not None:
# Don't apply to prices that are cheaper than the sale
# amount.
extra_filter["unit_price__gt"] = self.discount_exact
sale_price = self.discount_exact
else:
return
products = self.all_products()
variations = ProductVariation.objects.filter(product__in=products)
for priced_objects in (products, variations):
update = {"sale_id": self.id,
"sale_price": sale_price,
"sale_to": self.valid_to,
"sale_from": self.valid_from}
using = priced_objects.db
if "mysql" not in settings.DATABASES[using]["ENGINE"]:
priced_objects.filter(**extra_filter).update(**update)
else:
# Work around for MySQL which does not allow update
# to operate on subquery where the FROM clause would
# have it operate on the same table, so we update
# each instance individually:
# http://dev.mysql.com/doc/refman/5.0/en/subquery-errors.html
# Also MySQL may raise a 'Data truncated' warning here
# when doing a calculation that exceeds the precision
# of the price column. In this case it's safe to ignore
# it and the calculation will still be applied, but
# we need to massage transaction management in order
# to continue successfully:
# https://groups.google.com/forum/#!topic/django-developers/ACLQRF-71s8
for priced in priced_objects.filter(**extra_filter):
for field, value in list(update.items()):
setattr(priced, field, value)
try:
priced.save()
except Warning:
connection.set_rollback(False)
def delete(self, *args, **kwargs):
"""
Clear this sale from products when deleting the sale.
"""
self._clear()
super(Sale, self).delete(*args, **kwargs)
def _clear(self):
"""
Clears previously applied sale field values from products prior
to updating the sale, when deactivating it or deleting it.
"""
update = {"sale_id": None, "sale_price": None,
"sale_from": None, "sale_to": None}
for priced_model in (Product, ProductVariation):
priced_model.objects.filter(sale_id=self.id).update(**update)
@receiver(m2m_changed, sender=Sale.products.through)
def sale_update_products(sender, instance, action, *args, **kwargs):
"""
Signal for updating products for the sale - needed since the
products won't be assigned to the sale when it is first saved.
"""
if action == "post_add":
instance.update_products()
class DiscountCode(Discount):
"""
A code that can be entered at the checkout process to have a
discount applied to the total purchase amount.
"""
code = fields.DiscountCodeField(_("Code"), unique=True)
min_purchase = fields.MoneyField(_("Minimum total purchase"))
free_shipping = models.BooleanField(_("Free shipping"), default=False)
uses_remaining = models.IntegerField(_("Uses remaining"), blank=True,
null=True, help_text=_("If you wish to limit the number of times a "
"code may be used, set this value. It will be decremented upon "
"each use."))
objects = managers.DiscountCodeManager()
def calculate(self, amount):
"""
Calculates the discount for the given amount.
"""
if self.discount_deduct is not None:
# Don't apply to amounts that would be negative after
# deduction.
if self.discount_deduct <= amount:
return self.discount_deduct
elif self.discount_percent is not None:
return amount / Decimal("100") * self.discount_percent
return 0
class Meta:
verbose_name = _("Discount code")
verbose_name_plural = _("Discount codes")
| 39.061414 | 79 | 0.625037 |
from __future__ import division, unicode_literals
from future.builtins import str, super
from future.utils import with_metaclass
from decimal import Decimal
from functools import reduce
from operator import iand, ior
from django.core.urlresolvers import reverse
from django.db import models, connection
from django.db.models.signals import m2m_changed
from django.db.models import CharField, Q
from django.db.models.base import ModelBase
from django.dispatch import receiver
from django.utils.timezone import now
from django.utils.translation import (ugettext, ugettext_lazy as _,
pgettext_lazy as __)
try:
from django.utils.encoding import force_text
except ImportError:
from django.utils.encoding import force_unicode as force_text
from mezzanine.conf import settings
from mezzanine.core.fields import FileField
from mezzanine.core.managers import DisplayableManager
from mezzanine.core.models import Displayable, RichText, Orderable, SiteRelated
from mezzanine.generic.fields import RatingField
from mezzanine.pages.models import Page
from mezzanine.utils.models import AdminThumbMixin, upload_to
from cartridge.shop import fields, managers
from cartridge.shop.utils import clear_session
class F(models.F):
def __truediv__(self, other):
return self._combine(other, self.DIV, False)
class Priced(models.Model):
unit_price = fields.MoneyField(_("Unit price"))
sale_id = models.IntegerField(null=True)
sale_price = fields.MoneyField(_("Sale price"))
sale_from = models.DateTimeField(_("Sale start"), blank=True, null=True)
sale_to = models.DateTimeField(_("Sale end"), blank=True, null=True)
sku = fields.SKUField(unique=True, blank=True, null=True)
num_in_stock = models.IntegerField(_("Number in stock"), blank=True,
null=True)
class Meta:
abstract = True
def on_sale(self):
n = now()
valid_from = self.sale_from is None or self.sale_from < n
valid_to = self.sale_to is None or self.sale_to > n
return self.sale_price is not None and valid_from and valid_to
def has_price(self):
return self.on_sale() or self.unit_price is not None
def price(self):
if self.on_sale():
return self.sale_price
elif self.has_price():
return self.unit_price
return Decimal("0")
def copy_price_fields_to(self, obj_to):
for field in Priced._meta.fields:
if not isinstance(field, models.AutoField):
setattr(obj_to, field.name, getattr(self, field.name))
obj_to.save()
class Product(Displayable, Priced, RichText, AdminThumbMixin):
available = models.BooleanField(_("Available for purchase"),
default=False)
image = CharField(_("Image"), max_length=100, blank=True, null=True)
categories = models.ManyToManyField("Category", blank=True,
verbose_name=_("Product categories"))
date_added = models.DateTimeField(_("Date added"), auto_now_add=True,
null=True)
related_products = models.ManyToManyField("self",
verbose_name=_("Related products"), blank=True)
upsell_products = models.ManyToManyField("self",
verbose_name=_("Upsell products"), blank=True)
rating = RatingField(verbose_name=_("Rating"))
objects = DisplayableManager()
admin_thumb_field = "image"
search_fields = {"variations__sku": 100}
class Meta:
verbose_name = _("Product")
verbose_name_plural = _("Products")
def save(self, *args, **kwargs):
updating = self.id is not None
super(Product, self).save(*args, **kwargs)
if updating and not settings.SHOP_USE_VARIATIONS:
default = self.variations.get(default=True)
self.copy_price_fields_to(default)
@models.permalink
def get_absolute_url(self):
return ("shop_product", (), {"slug": self.slug})
def copy_default_variation(self):
default = self.variations.get(default=True)
default.copy_price_fields_to(self)
if default.image:
self.image = default.image.file.name
self.save()
class ProductImage(Orderable):
file = models.ImageField(_("Image"),
upload_to=upload_to("shop.ProductImage.file", "product"))
description = CharField(_("Description"), blank=True, max_length=100)
product = models.ForeignKey("Product", related_name="images")
class Meta:
verbose_name = _("Image")
verbose_name_plural = _("Images")
order_with_respect_to = "product"
def __unicode__(self):
value = self.description
if not value:
value = self.file.name
if not value:
value = ""
return value
class ProductOption(models.Model):
type = models.IntegerField(_("Type"),
choices=settings.SHOP_OPTION_TYPE_CHOICES)
name = fields.OptionField(_("Name"))
objects = managers.ProductOptionManager()
def __unicode__(self):
return "%s: %s" % (self.get_type_display(), self.name)
class Meta:
verbose_name = _("Product option")
verbose_name_plural = _("Product options")
class ProductVariationMetaclass(ModelBase):
def __new__(cls, name, bases, attrs):
if not ("Meta" in attrs and getattr(attrs["Meta"], "proxy", False)):
for option in settings.SHOP_OPTION_TYPE_CHOICES:
attrs["option%s" % option[0]] = fields.OptionField(option[1])
args = (cls, name, bases, attrs)
return super(ProductVariationMetaclass, cls).__new__(*args)
class ProductVariation(with_metaclass(ProductVariationMetaclass, Priced)):
product = models.ForeignKey("Product", related_name="variations")
default = models.BooleanField(_("Default"), default=False)
image = models.ForeignKey("ProductImage", verbose_name=_("Image"),
null=True, blank=True)
objects = managers.ProductVariationManager()
class Meta:
ordering = ("-default",)
def __unicode__(self):
options = []
for field in self.option_fields():
name = getattr(self, field.name)
if name is not None:
option = u"%s: %s" % (field.verbose_name, name)
options.append(option)
result = u"%s %s" % (str(self.product), u", ".join(options))
return result.strip()
def save(self, *args, **kwargs):
super(ProductVariation, self).save(*args, **kwargs)
if not self.sku:
self.sku = self.id
self.save()
def get_absolute_url(self):
return self.product.get_absolute_url()
@classmethod
def option_fields(cls):
all_fields = cls._meta.fields
return [f for f in all_fields if isinstance(f, fields.OptionField)]
def options(self):
return [getattr(self, field.name) for field in self.option_fields()]
def live_num_in_stock(self):
if self.num_in_stock is None:
return None
if not hasattr(self, "_cached_num_in_stock"):
num_in_stock = self.num_in_stock
carts = Cart.objects.current()
items = CartItem.objects.filter(sku=self.sku, cart__in=carts)
aggregate = items.aggregate(quantity_sum=models.Sum("quantity"))
num_in_carts = aggregate["quantity_sum"]
if num_in_carts is not None:
num_in_stock = num_in_stock - num_in_carts
self._cached_num_in_stock = num_in_stock
return self._cached_num_in_stock
def has_stock(self, quantity=1):
live = self.live_num_in_stock()
return live is None or quantity == 0 or live >= quantity
def update_stock(self, quantity):
if self.num_in_stock is not None:
self.num_in_stock += quantity
self.save()
if self.default:
self.product.num_in_stock = self.num_in_stock
self.product.save()
class Category(Page, RichText):
featured_image = FileField(verbose_name=_("Featured Image"),
upload_to=upload_to("shop.Category.featured_image", "shop"),
format="Image", max_length=255, null=True, blank=True)
products = models.ManyToManyField("Product", blank=True,
verbose_name=_("Products"),
through=Product.categories.through)
options = models.ManyToManyField("ProductOption", blank=True,
verbose_name=_("Product options"),
related_name="product_options")
sale = models.ForeignKey("Sale", verbose_name=_("Sale"),
blank=True, null=True)
price_min = fields.MoneyField(_("Minimum price"), blank=True, null=True)
price_max = fields.MoneyField(_("Maximum price"), blank=True, null=True)
combined = models.BooleanField(_("Combined"), default=True,
help_text=_("If checked, "
"products must match all specified filters, otherwise products "
"can match any specified filter."))
class Meta:
verbose_name = _("Product category")
verbose_name_plural = _("Product categories")
def filters(self):
filters = []
options = self.options.as_fields()
if options:
lookup = dict([("%s__in" % k, v) for k, v in options.items()])
filters.append(Q(**lookup))
n = now()
valid_sale_from = Q(sale_from__isnull=True) | Q(sale_from__lte=n)
valid_sale_to = Q(sale_to__isnull=True) | Q(sale_to__gte=n)
valid_sale_date = valid_sale_from & valid_sale_to
if self.sale_id:
filters.append(Q(sale_id=self.sale_id) & valid_sale_date)
if self.price_min or self.price_max:
prices = []
if self.price_min:
sale = Q(sale_price__gte=self.price_min) & valid_sale_date
prices.append(Q(unit_price__gte=self.price_min) | sale)
if self.price_max:
sale = Q(sale_price__lte=self.price_max) & valid_sale_date
prices.append(Q(unit_price__lte=self.price_max) | sale)
filters.append(reduce(iand, prices))
operator = iand if self.combined else ior
products = Q(id__in=self.products.only("id"))
if filters:
filters = reduce(operator, filters)
variations = ProductVariation.objects.filter(filters)
filters = [Q(variations__in=variations)]
if self.products.count() > 0:
filters.append(products)
return reduce(operator, filters)
return products
class Order(SiteRelated):
billing_detail_first_name = CharField(_("First name"), max_length=100)
billing_detail_last_name = CharField(_("Last name"), max_length=100)
billing_detail_street = CharField(_("Street"), max_length=100)
billing_detail_city = CharField(_("City/Suburb"), max_length=100)
billing_detail_state = CharField(_("State/Region"), max_length=100)
billing_detail_postcode = CharField(_("Zip/Postcode"), max_length=10)
billing_detail_country = CharField(_("Country"), max_length=100)
billing_detail_phone = CharField(_("Phone"), max_length=20)
billing_detail_email = models.EmailField(_("Email"))
shipping_detail_first_name = CharField(_("First name"), max_length=100)
shipping_detail_last_name = CharField(_("Last name"), max_length=100)
shipping_detail_street = CharField(_("Street"), max_length=100)
shipping_detail_city = CharField(_("City/Suburb"), max_length=100)
shipping_detail_state = CharField(_("State/Region"), max_length=100)
shipping_detail_postcode = CharField(_("Zip/Postcode"), max_length=10)
shipping_detail_country = CharField(_("Country"), max_length=100)
shipping_detail_phone = CharField(_("Phone"), max_length=20)
additional_instructions = models.TextField(_("Additional instructions"),
blank=True)
time = models.DateTimeField(_("Time"), auto_now_add=True, null=True)
key = CharField(max_length=40)
user_id = models.IntegerField(blank=True, null=True)
shipping_type = CharField(_("Shipping type"), max_length=50, blank=True)
shipping_total = fields.MoneyField(_("Shipping total"))
tax_type = CharField(_("Tax type"), max_length=50, blank=True)
tax_total = fields.MoneyField(_("Tax total"))
item_total = fields.MoneyField(_("Item total"))
discount_code = fields.DiscountCodeField(_("Discount code"), blank=True)
discount_total = fields.MoneyField(_("Discount total"))
total = fields.MoneyField(_("Order total"))
transaction_id = CharField(_("Transaction ID"), max_length=255, null=True,
blank=True)
status = models.IntegerField(_("Status"),
choices=settings.SHOP_ORDER_STATUS_CHOICES,
default=settings.SHOP_ORDER_STATUS_CHOICES[0][0])
objects = managers.OrderManager()
# the order in setup() and removed from the session in complete().
session_fields = ("shipping_type", "shipping_total", "discount_total",
"discount_code", "tax_type", "tax_total")
class Meta:
verbose_name = __("commercial meaning", "Order")
verbose_name_plural = __("commercial meaning", "Orders")
ordering = ("-id",)
def __unicode__(self):
return "#%s %s %s" % (self.id, self.billing_name(), self.time)
def billing_name(self):
return "%s %s" % (self.billing_detail_first_name,
self.billing_detail_last_name)
def setup(self, request):
self.key = request.session.session_key
self.user_id = request.user.id
for field in self.session_fields:
if field in request.session:
setattr(self, field, request.session[field])
self.total = self.item_total = request.cart.total_price()
if self.shipping_total is not None:
self.shipping_total = Decimal(str(self.shipping_total))
self.total += self.shipping_total
if self.discount_total is not None:
self.total -= Decimal(self.discount_total)
if self.tax_total is not None:
self.total += Decimal(self.tax_total)
self.save() # We need an ID before we can add related items.
for item in request.cart:
product_fields = [f.name for f in SelectedProduct._meta.fields]
item = dict([(f, getattr(item, f)) for f in product_fields])
self.items.create(**item)
def complete(self, request):
self.save() # Save the transaction ID.
discount_code = request.session.get('discount_code')
clear_session(request, "order", *self.session_fields)
for item in request.cart:
try:
variation = ProductVariation.objects.get(sku=item.sku)
except ProductVariation.DoesNotExist:
pass
else:
variation.update_stock(item.quantity * -1)
variation.product.actions.purchased()
if discount_code:
DiscountCode.objects.active().filter(code=discount_code).update(
uses_remaining=models.F('uses_remaining') - 1)
request.cart.delete()
def details_as_dict(self):
context = {}
for fieldset in ("billing_detail", "shipping_detail"):
fields = [(f.verbose_name, getattr(self, f.name)) for f in
self._meta.fields if f.name.startswith(fieldset)]
context["order_%s_fields" % fieldset] = fields
return context
def invoice(self):
url = reverse("shop_invoice", args=(self.id,))
text = ugettext("Download PDF invoice")
return "<a href='%s?format=pdf'>%s</a>" % (url, text)
invoice.allow_tags = True
invoice.short_description = ""
class Cart(models.Model):
last_updated = models.DateTimeField(_("Last updated"), null=True)
objects = managers.CartManager()
def __iter__(self):
if not hasattr(self, "_cached_items"):
self._cached_items = self.items.all()
return iter(self._cached_items)
def add_item(self, variation, quantity):
kwargs = {"sku": variation.sku, "unit_price": variation.price()}
item, created = self.items.get_or_create(**kwargs)
if created:
item.description = force_text(variation)
item.unit_price = variation.price()
item.url = variation.product.get_absolute_url()
image = variation.image
if image is not None:
item.image = force_text(image.file)
variation.product.actions.added_to_cart()
item.quantity += quantity
item.save()
def has_items(self):
return len(list(self)) > 0
def total_quantity(self):
return sum([item.quantity for item in self])
def total_price(self):
return sum([item.total_price for item in self])
def skus(self):
return [item.sku for item in self]
def upsell_products(self):
if not settings.SHOP_USE_UPSELL_PRODUCTS:
return []
cart = Product.objects.filter(variations__sku__in=self.skus())
published_products = Product.objects.published()
for_cart = published_products.filter(upsell_products__in=cart)
with_cart_excluded = for_cart.exclude(variations__sku__in=self.skus())
return list(with_cart_excluded.distinct())
def calculate_discount(self, discount):
# Discount applies to cart total if not product specific.
products = discount.all_products()
if products.count() == 0:
return discount.calculate(self.total_price())
total = Decimal("0")
# Create a list of skus in the cart that are applicable to
# the discount, and total the discount for appllicable items.
lookup = {"product__in": products, "sku__in": self.skus()}
discount_variations = ProductVariation.objects.filter(**lookup)
discount_skus = discount_variations.values_list("sku", flat=True)
for item in self:
if item.sku in discount_skus:
total += discount.calculate(item.unit_price) * item.quantity
return total
class SelectedProduct(models.Model):
sku = fields.SKUField()
description = CharField(_("Description"), max_length=2000)
quantity = models.IntegerField(_("Quantity"), default=0)
unit_price = fields.MoneyField(_("Unit price"), default=Decimal("0"))
total_price = fields.MoneyField(_("Total price"), default=Decimal("0"))
class Meta:
abstract = True
def __unicode__(self):
return ""
def save(self, *args, **kwargs):
if not self.id or self.quantity > 0:
self.total_price = self.unit_price * self.quantity
super(SelectedProduct, self).save(*args, **kwargs)
else:
self.delete()
class CartItem(SelectedProduct):
cart = models.ForeignKey("Cart", related_name="items")
url = CharField(max_length=2000)
image = CharField(max_length=200, null=True)
def get_absolute_url(self):
return self.url
class OrderItem(SelectedProduct):
order = models.ForeignKey("Order", related_name="items")
class ProductAction(models.Model):
product = models.ForeignKey("Product", related_name="actions")
timestamp = models.IntegerField()
total_cart = models.IntegerField(default=0)
total_purchase = models.IntegerField(default=0)
objects = managers.ProductActionManager()
class Meta:
unique_together = ("product", "timestamp")
class Discount(models.Model):
title = CharField(_("Title"), max_length=100)
active = models.BooleanField(_("Active"), default=False)
products = models.ManyToManyField("Product", blank=True,
verbose_name=_("Products"))
categories = models.ManyToManyField("Category", blank=True,
related_name="%(class)s_related",
verbose_name=_("Categories"))
discount_deduct = fields.MoneyField(_("Reduce by amount"))
discount_percent = fields.PercentageField(_("Reduce by percent"),
max_digits=5, decimal_places=2,
blank=True, null=True)
discount_exact = fields.MoneyField(_("Reduce to amount"))
valid_from = models.DateTimeField(_("Valid from"), blank=True, null=True)
valid_to = models.DateTimeField(_("Valid to"), blank=True, null=True)
class Meta:
abstract = True
def __unicode__(self):
return self.title
def all_products(self):
filters = [category.filters() for category in self.categories.all()]
filters = reduce(ior, filters + [Q(id__in=self.products.only("id"))])
return Product.objects.filter(filters).distinct()
class Sale(Discount):
class Meta:
verbose_name = _("Sale")
verbose_name_plural = _("Sales")
def save(self, *args, **kwargs):
super(Sale, self).save(*args, **kwargs)
self.update_products()
def update_products(self):
self._clear()
if self.active:
extra_filter = {}
if self.discount_deduct is not None:
# Don't apply to prices that would be negative
extra_filter["unit_price__gt"] = self.discount_deduct
sale_price = models.F("unit_price") - self.discount_deduct
elif self.discount_percent is not None:
sale_price = models.F("unit_price") - (
F("unit_price") / "100.0" * self.discount_percent)
elif self.discount_exact is not None:
# amount.
extra_filter["unit_price__gt"] = self.discount_exact
sale_price = self.discount_exact
else:
return
products = self.all_products()
variations = ProductVariation.objects.filter(product__in=products)
for priced_objects in (products, variations):
update = {"sale_id": self.id,
"sale_price": sale_price,
"sale_to": self.valid_to,
"sale_from": self.valid_from}
using = priced_objects.db
if "mysql" not in settings.DATABASES[using]["ENGINE"]:
priced_objects.filter(**extra_filter).update(**update)
else:
# Work around for MySQL which does not allow update
# to operate on subquery where the FROM clause would
# have it operate on the same table, so we update
# each instance individually:
# http://dev.mysql.com/doc/refman/5.0/en/subquery-errors.html
# Also MySQL may raise a 'Data truncated' warning here
# when doing a calculation that exceeds the precision
# of the price column. In this case it's safe to ignore
riced_objects.filter(**extra_filter):
for field, value in list(update.items()):
setattr(priced, field, value)
try:
priced.save()
except Warning:
connection.set_rollback(False)
def delete(self, *args, **kwargs):
self._clear()
super(Sale, self).delete(*args, **kwargs)
def _clear(self):
update = {"sale_id": None, "sale_price": None,
"sale_from": None, "sale_to": None}
for priced_model in (Product, ProductVariation):
priced_model.objects.filter(sale_id=self.id).update(**update)
@receiver(m2m_changed, sender=Sale.products.through)
def sale_update_products(sender, instance, action, *args, **kwargs):
if action == "post_add":
instance.update_products()
class DiscountCode(Discount):
code = fields.DiscountCodeField(_("Code"), unique=True)
min_purchase = fields.MoneyField(_("Minimum total purchase"))
free_shipping = models.BooleanField(_("Free shipping"), default=False)
uses_remaining = models.IntegerField(_("Uses remaining"), blank=True,
null=True, help_text=_("If you wish to limit the number of times a "
"code may be used, set this value. It will be decremented upon "
"each use."))
objects = managers.DiscountCodeManager()
def calculate(self, amount):
if self.discount_deduct is not None:
# deduction.
if self.discount_deduct <= amount:
return self.discount_deduct
elif self.discount_percent is not None:
return amount / Decimal("100") * self.discount_percent
return 0
class Meta:
verbose_name = _("Discount code")
verbose_name_plural = _("Discount codes")
| true | true |
1c2dc5337abd79b8f0b9228777e2f687113e494c | 3,068 | py | Python | TheDigger_src/lib/dns_handler.py | Jistrokz/TheDigger | d2831b0b8fdf75595c4049d885abb3e6a79b9a30 | [
"MIT"
] | 5 | 2021-06-20T16:49:06.000Z | 2022-03-03T07:21:42.000Z | TheDigger_src/lib/dns_handler.py | Jistrokz/TheDigger | d2831b0b8fdf75595c4049d885abb3e6a79b9a30 | [
"MIT"
] | null | null | null | TheDigger_src/lib/dns_handler.py | Jistrokz/TheDigger | d2831b0b8fdf75595c4049d885abb3e6a79b9a30 | [
"MIT"
] | null | null | null | from dns import resolver
from asyncio.subprocess import PIPE, create_subprocess_exec
from requests.exceptions import ConnectionError
from TheDigger_src.utils.help_utils import HelpUtilities
from TheDigger_src.utils.exceptions import TheDiggerException
from TheDigger_src.utils.logger import Logger
from TheDigger_src.utils.coloring import COLOR, COLORED_COMBOS
# noinspection PyUnboundLocalVariable
class DNS_Handler:
"""Handles Lookups and DNS queries"""
resolver = resolver.Resolver()
@classmethod
def query_dns(cls, domains, records):
"""
Query DNS records for host.
:param domains: Iterable of domains to get DNS Records for
:param records: Iterable of DNS records to get from domain.
"""
results = {k: set() for k in records}
for record in records:
for domain in domains:
try:
answers = cls.resolver.query(domain, record)
for answer in answers:
# Add value to record type
results.get(record).add(answer)
except (resolver.NoAnswer, resolver.NXDOMAIN, resolver.NoNameservers):
# Type of record doesn't fit domain or no answer from NameServer
continue
return {k: v for k, v in results.items() if v}
@classmethod
async def grab_whois(cls, host):
if not host.naked:
return
script = "whois {}".format(host.naked).split()
log_file = HelpUtilities.get_output_path("{}/whois.txt".format(host.target))
logger = Logger(log_file)
process = await create_subprocess_exec(
*script,
stdout=PIPE,
stderr=PIPE
)
result, err = await process.communicate() #err has not been used, Please make sure to implement the variable.
if process.returncode == 0:
logger.info("{} {} WHOIS information has been retrieved".format(COLORED_COMBOS.GOOD, host))
for line in result.decode().strip().split("\n"):
if ":" in line:
logger.debug(line)
@classmethod
async def generate_dns_dumpster_mapping(cls, host, sout_logger):
sout_logger.info("{} DNS Dumpster is fetching data for {} ".format(
COLORED_COMBOS.INFO, host))
try:
page = HelpUtilities.query_dns_dumpster(host=host)
if page.status_code == 200:
path = HelpUtilities.get_output_path("{}/dns_mapping.png".format(host.target))
with open(path, "wb") as target_image:
target_image.write(page.content)
sout_logger.info("{} DNS Mapping sucessfully fetched for {}".format(
COLORED_COMBOS.GOOD, host.target)
)
else:
raise TheDiggerException
except TheDiggerException:
sout_logger.info("{} DNS Mapping has Failed. There is a connection error.".format(
COLORED_COMBOS.BAD))
| 39.844156 | 118 | 0.610821 | from dns import resolver
from asyncio.subprocess import PIPE, create_subprocess_exec
from requests.exceptions import ConnectionError
from TheDigger_src.utils.help_utils import HelpUtilities
from TheDigger_src.utils.exceptions import TheDiggerException
from TheDigger_src.utils.logger import Logger
from TheDigger_src.utils.coloring import COLOR, COLORED_COMBOS
class DNS_Handler:
resolver = resolver.Resolver()
@classmethod
def query_dns(cls, domains, records):
results = {k: set() for k in records}
for record in records:
for domain in domains:
try:
answers = cls.resolver.query(domain, record)
for answer in answers:
results.get(record).add(answer)
except (resolver.NoAnswer, resolver.NXDOMAIN, resolver.NoNameservers):
continue
return {k: v for k, v in results.items() if v}
@classmethod
async def grab_whois(cls, host):
if not host.naked:
return
script = "whois {}".format(host.naked).split()
log_file = HelpUtilities.get_output_path("{}/whois.txt".format(host.target))
logger = Logger(log_file)
process = await create_subprocess_exec(
*script,
stdout=PIPE,
stderr=PIPE
)
result, err = await process.communicate() #err has not been used, Please make sure to implement the variable.
if process.returncode == 0:
logger.info("{} {} WHOIS information has been retrieved".format(COLORED_COMBOS.GOOD, host))
for line in result.decode().strip().split("\n"):
if ":" in line:
logger.debug(line)
@classmethod
async def generate_dns_dumpster_mapping(cls, host, sout_logger):
sout_logger.info("{} DNS Dumpster is fetching data for {} ".format(
COLORED_COMBOS.INFO, host))
try:
page = HelpUtilities.query_dns_dumpster(host=host)
if page.status_code == 200:
path = HelpUtilities.get_output_path("{}/dns_mapping.png".format(host.target))
with open(path, "wb") as target_image:
target_image.write(page.content)
sout_logger.info("{} DNS Mapping sucessfully fetched for {}".format(
COLORED_COMBOS.GOOD, host.target)
)
else:
raise TheDiggerException
except TheDiggerException:
sout_logger.info("{} DNS Mapping has Failed. There is a connection error.".format(
COLORED_COMBOS.BAD))
| true | true |
1c2dc589b65e5e2a15c250e688b9e52057911300 | 760 | py | Python | dali/python/nvidia/dali/__init__.py | SamanthaFeidFischer/DALI | 1c57da0a4ea210dad4219db2b217d04c319c308e | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | dali/python/nvidia/dali/__init__.py | SamanthaFeidFischer/DALI | 1c57da0a4ea210dad4219db2b217d04c319c308e | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | dali/python/nvidia/dali/__init__.py | SamanthaFeidFischer/DALI | 1c57da0a4ea210dad4219db2b217d04c319c308e | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2020-07-03T00:34:07.000Z | 2020-07-03T00:34:07.000Z | # Copyright (c) 2017-2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from . import ops
from . import pipeline
from . import tensor
from . import tfrecord
from . import types
| 34.545455 | 74 | 0.767105 |
from __future__ import absolute_import
from . import ops
from . import pipeline
from . import tensor
from . import tfrecord
from . import types
| true | true |
1c2dc60dba7987b05d5eb2937f05b5bc2b7d68dd | 400 | py | Python | src/beanmachine/ppl/compiler/hint.py | rodrigodesalvobraz/beanmachine-1 | 1c0d5ffeb505167f581e518809ea1320861bdf18 | [
"MIT"
] | 1 | 2021-12-22T13:19:14.000Z | 2021-12-22T13:19:14.000Z | src/beanmachine/ppl/compiler/hint.py | rodrigodesalvobraz/beanmachine-1 | 1c0d5ffeb505167f581e518809ea1320861bdf18 | [
"MIT"
] | null | null | null | src/beanmachine/ppl/compiler/hint.py | rodrigodesalvobraz/beanmachine-1 | 1c0d5ffeb505167f581e518809ea1320861bdf18 | [
"MIT"
] | null | null | null | # Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""Operations that are intended as hints to the Beanstalk compiler"""
import math
import torch
def math_log1mexp(x):
return math.log(1.0 - math.exp(x))
def log1mexp(x):
return torch.log(1.0 - torch.exp(x))
| 21.052632 | 69 | 0.72 |
import math
import torch
def math_log1mexp(x):
return math.log(1.0 - math.exp(x))
def log1mexp(x):
return torch.log(1.0 - torch.exp(x))
| true | true |
1c2dc82c5a5b3819257130d67fab3879c58e5685 | 10,090 | py | Python | integrations/test_lightning.py | gagan3012/metrics | 5a2388ccaa97cc3608b1fa28879f77436434a6d6 | [
"Apache-2.0"
] | 1 | 2021-09-14T23:34:48.000Z | 2021-09-14T23:34:48.000Z | integrations/test_lightning.py | gagan3012/metrics | 5a2388ccaa97cc3608b1fa28879f77436434a6d6 | [
"Apache-2.0"
] | 1 | 2021-10-16T05:02:56.000Z | 2021-12-15T07:02:17.000Z | integrations/test_lightning.py | gagan3012/metrics | 5a2388ccaa97cc3608b1fa28879f77436434a6d6 | [
"Apache-2.0"
] | 2 | 2021-10-16T05:02:43.000Z | 2022-02-10T16:01:52.000Z | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import mock
import pytest
import torch
from pytorch_lightning import LightningModule, Trainer
from torch import tensor
from torch.utils.data import DataLoader
from integrations.lightning.boring_model import BoringModel, RandomDataset
from tests.helpers import _LIGHTNING_GREATER_EQUAL_1_3
from torchmetrics import Accuracy, AveragePrecision, Metric
class SumMetric(Metric):
def __init__(self):
super().__init__()
self.add_state("x", tensor(0.0), dist_reduce_fx="sum")
def update(self, x):
self.x += x
def compute(self):
return self.x
class DiffMetric(Metric):
def __init__(self):
super().__init__()
self.add_state("x", tensor(0.0), dist_reduce_fx="sum")
def update(self, x):
self.x -= x
def compute(self):
return self.x
def test_metric_lightning(tmpdir):
class TestModel(BoringModel):
def __init__(self):
super().__init__()
self.metric = SumMetric()
self.sum = 0.0
def training_step(self, batch, batch_idx):
x = batch
self.metric(x.sum())
self.sum += x.sum()
return self.step(x)
def training_epoch_end(self, outs):
if not torch.allclose(self.sum, self.metric.compute()):
raise ValueError("Sum and computed value must be equal")
self.sum = 0.0
self.metric.reset()
model = TestModel()
model.val_dataloader = None
trainer = Trainer(
default_root_dir=tmpdir,
limit_train_batches=2,
limit_val_batches=2,
max_epochs=2,
log_every_n_steps=1,
weights_summary=None,
)
trainer.fit(model)
@pytest.mark.skipif(not _LIGHTNING_GREATER_EQUAL_1_3, reason="test requires lightning v1.3 or higher")
def test_metrics_reset(tmpdir):
"""Tests that metrics are reset correctly after the end of the train/val/test epoch.
Taken from:
https://github.com/PyTorchLightning/pytorch-lightning/pull/7055
"""
class TestModel(LightningModule):
def __init__(self):
super().__init__()
self.layer = torch.nn.Linear(32, 1)
for stage in ["train", "val", "test"]:
acc = Accuracy()
acc.reset = mock.Mock(side_effect=acc.reset)
ap = AveragePrecision(num_classes=1, pos_label=1)
ap.reset = mock.Mock(side_effect=ap.reset)
self.add_module(f"acc_{stage}", acc)
self.add_module(f"ap_{stage}", ap)
def forward(self, x):
return self.layer(x)
def _step(self, stage, batch):
labels = (batch.detach().sum(1) > 0).float() # Fake some targets
logits = self.forward(batch)
loss = torch.nn.functional.binary_cross_entropy_with_logits(logits, labels.unsqueeze(1))
probs = torch.sigmoid(logits.detach())
self.log(f"loss/{stage}", loss)
acc = self._modules[f"acc_{stage}"]
ap = self._modules[f"ap_{stage}"]
labels_int = labels.to(torch.long)
acc(probs.flatten(), labels_int)
ap(probs.flatten(), labels_int)
# Metric.forward calls reset so reset the mocks here
acc.reset.reset_mock()
ap.reset.reset_mock()
self.log(f"{stage}/accuracy", acc)
self.log(f"{stage}/ap", ap)
return loss
def training_step(self, batch, batch_idx, *args, **kwargs):
return self._step("train", batch)
def validation_step(self, batch, batch_idx, *args, **kwargs):
return self._step("val", batch)
def test_step(self, batch, batch_idx, *args, **kwargs):
return self._step("test", batch)
def configure_optimizers(self):
optimizer = torch.optim.SGD(self.layer.parameters(), lr=0.1)
lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=1)
return [optimizer], [lr_scheduler]
@staticmethod
def train_dataloader():
return DataLoader(RandomDataset(32, 64), batch_size=2)
@staticmethod
def val_dataloader():
return DataLoader(RandomDataset(32, 64), batch_size=2)
@staticmethod
def test_dataloader():
return DataLoader(RandomDataset(32, 64), batch_size=2)
def _assert_epoch_end(self, stage):
acc = self._modules[f"acc_{stage}"]
ap = self._modules[f"ap_{stage}"]
acc.reset.asset_not_called()
ap.reset.assert_not_called()
def train_epoch_end(self, outputs):
self._assert_epoch_end("train")
def validation_epoch_end(self, outputs):
self._assert_epoch_end("val")
def test_epoch_end(self, outputs):
self._assert_epoch_end("test")
def _assert_called(model, stage):
acc = model._modules[f"acc_{stage}"]
ap = model._modules[f"ap_{stage}"]
acc.reset.assert_called_once()
acc.reset.reset_mock()
ap.reset.assert_called_once()
ap.reset.reset_mock()
model = TestModel()
trainer = Trainer(
default_root_dir=tmpdir,
limit_train_batches=2,
limit_val_batches=2,
limit_test_batches=2,
max_epochs=1,
progress_bar_refresh_rate=0,
)
trainer.fit(model)
_assert_called(model, "train")
_assert_called(model, "val")
trainer.validate(model)
_assert_called(model, "val")
trainer.test(model)
_assert_called(model, "test")
# todo: reconsider if it make sense to keep here
# def test_metric_lightning_log(tmpdir):
# """ Test logging a metric object and that the metric state gets reset after each epoch."""
# class TestModel(BoringModel):
# def __init__(self):
# super().__init__()
# self.metric_step = SumMetric()
# self.metric_epoch = SumMetric()
# self.sum = 0.0
#
# def on_epoch_start(self):
# self.sum = 0.0
#
# def training_step(self, batch, batch_idx):
# x = batch
# self.metric_step(x.sum())
# self.sum += x.sum()
# self.log("sum_step", self.metric_step, on_epoch=True, on_step=False)
# return {'loss': self.step(x), 'data': x}
#
# def training_epoch_end(self, outs):
# self.log("sum_epoch", self.metric_epoch(torch.stack([o['data'] for o in outs]).sum()))
#
# model = TestModel()
# model.val_dataloader = None
#
# trainer = Trainer(
# default_root_dir=tmpdir,
# limit_train_batches=2,
# limit_val_batches=2,
# max_epochs=2,
# log_every_n_steps=1,
# weights_summary=None,
# )
# trainer.fit(model)
#
# logged = trainer.logged_metrics
# assert torch.allclose(tensor(logged["sum_step"]), model.sum)
# assert torch.allclose(tensor(logged["sum_epoch"]), model.sum)
# todo: need to be fixed
# def test_scriptable(tmpdir):
# class TestModel(BoringModel):
# def __init__(self):
# super().__init__()
# # the metric is not used in the module's `forward`
# # so the module should be exportable to TorchScript
# self.metric = SumMetric()
# self.sum = 0.0
#
# def training_step(self, batch, batch_idx):
# x = batch
# self.metric(x.sum())
# self.sum += x.sum()
# self.log("sum", self.metric, on_epoch=True, on_step=False)
# return self.step(x)
#
# model = TestModel()
# trainer = Trainer(
# default_root_dir=tmpdir,
# limit_train_batches=2,
# limit_val_batches=2,
# max_epochs=1,
# log_every_n_steps=1,
# weights_summary=None,
# logger=False,
# checkpoint_callback=False,
# )
# trainer.fit(model)
# rand_input = torch.randn(10, 32)
#
# script_model = model.to_torchscript()
#
# # test that we can still do inference
# output = model(rand_input)
# script_output = script_model(rand_input)
# assert torch.allclose(output, script_output)
# def test_metric_collection_lightning_log(tmpdir):
#
# class TestModel(BoringModel):
#
# def __init__(self):
# super().__init__()
# self.metric = MetricCollection([SumMetric(), DiffMetric()])
# self.sum = 0.0
# self.diff = 0.0
#
# def training_step(self, batch, batch_idx):
# x = batch
# metric_vals = self.metric(x.sum())
# self.sum += x.sum()
# self.diff -= x.sum()
# self.log_dict({f'{k}_step': v for k, v in metric_vals.items()})
# return self.step(x)
#
# def training_epoch_end(self, outputs):
# metric_vals = self.metric.compute()
# self.log_dict({f'{k}_epoch': v for k, v in metric_vals.items()})
#
# model = TestModel()
# model.val_dataloader = None
#
# trainer = Trainer(
# default_root_dir=tmpdir,
# limit_train_batches=2,
# limit_val_batches=2,
# max_epochs=1,
# log_every_n_steps=1,
# weights_summary=None,
# )
# trainer.fit(model)
#
# logged = trainer.logged_metrics
# assert torch.allclose(tensor(logged["SumMetric_epoch"]), model.sum)
# assert torch.allclose(tensor(logged["DiffMetric_epoch"]), model.diff)
| 31.433022 | 102 | 0.603865 |
from unittest import mock
import pytest
import torch
from pytorch_lightning import LightningModule, Trainer
from torch import tensor
from torch.utils.data import DataLoader
from integrations.lightning.boring_model import BoringModel, RandomDataset
from tests.helpers import _LIGHTNING_GREATER_EQUAL_1_3
from torchmetrics import Accuracy, AveragePrecision, Metric
class SumMetric(Metric):
def __init__(self):
super().__init__()
self.add_state("x", tensor(0.0), dist_reduce_fx="sum")
def update(self, x):
self.x += x
def compute(self):
return self.x
class DiffMetric(Metric):
def __init__(self):
super().__init__()
self.add_state("x", tensor(0.0), dist_reduce_fx="sum")
def update(self, x):
self.x -= x
def compute(self):
return self.x
def test_metric_lightning(tmpdir):
class TestModel(BoringModel):
def __init__(self):
super().__init__()
self.metric = SumMetric()
self.sum = 0.0
def training_step(self, batch, batch_idx):
x = batch
self.metric(x.sum())
self.sum += x.sum()
return self.step(x)
def training_epoch_end(self, outs):
if not torch.allclose(self.sum, self.metric.compute()):
raise ValueError("Sum and computed value must be equal")
self.sum = 0.0
self.metric.reset()
model = TestModel()
model.val_dataloader = None
trainer = Trainer(
default_root_dir=tmpdir,
limit_train_batches=2,
limit_val_batches=2,
max_epochs=2,
log_every_n_steps=1,
weights_summary=None,
)
trainer.fit(model)
@pytest.mark.skipif(not _LIGHTNING_GREATER_EQUAL_1_3, reason="test requires lightning v1.3 or higher")
def test_metrics_reset(tmpdir):
class TestModel(LightningModule):
def __init__(self):
super().__init__()
self.layer = torch.nn.Linear(32, 1)
for stage in ["train", "val", "test"]:
acc = Accuracy()
acc.reset = mock.Mock(side_effect=acc.reset)
ap = AveragePrecision(num_classes=1, pos_label=1)
ap.reset = mock.Mock(side_effect=ap.reset)
self.add_module(f"acc_{stage}", acc)
self.add_module(f"ap_{stage}", ap)
def forward(self, x):
return self.layer(x)
def _step(self, stage, batch):
labels = (batch.detach().sum(1) > 0).float()
logits = self.forward(batch)
loss = torch.nn.functional.binary_cross_entropy_with_logits(logits, labels.unsqueeze(1))
probs = torch.sigmoid(logits.detach())
self.log(f"loss/{stage}", loss)
acc = self._modules[f"acc_{stage}"]
ap = self._modules[f"ap_{stage}"]
labels_int = labels.to(torch.long)
acc(probs.flatten(), labels_int)
ap(probs.flatten(), labels_int)
acc.reset.reset_mock()
ap.reset.reset_mock()
self.log(f"{stage}/accuracy", acc)
self.log(f"{stage}/ap", ap)
return loss
def training_step(self, batch, batch_idx, *args, **kwargs):
return self._step("train", batch)
def validation_step(self, batch, batch_idx, *args, **kwargs):
return self._step("val", batch)
def test_step(self, batch, batch_idx, *args, **kwargs):
return self._step("test", batch)
def configure_optimizers(self):
optimizer = torch.optim.SGD(self.layer.parameters(), lr=0.1)
lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=1)
return [optimizer], [lr_scheduler]
@staticmethod
def train_dataloader():
return DataLoader(RandomDataset(32, 64), batch_size=2)
@staticmethod
def val_dataloader():
return DataLoader(RandomDataset(32, 64), batch_size=2)
@staticmethod
def test_dataloader():
return DataLoader(RandomDataset(32, 64), batch_size=2)
def _assert_epoch_end(self, stage):
acc = self._modules[f"acc_{stage}"]
ap = self._modules[f"ap_{stage}"]
acc.reset.asset_not_called()
ap.reset.assert_not_called()
def train_epoch_end(self, outputs):
self._assert_epoch_end("train")
def validation_epoch_end(self, outputs):
self._assert_epoch_end("val")
def test_epoch_end(self, outputs):
self._assert_epoch_end("test")
def _assert_called(model, stage):
acc = model._modules[f"acc_{stage}"]
ap = model._modules[f"ap_{stage}"]
acc.reset.assert_called_once()
acc.reset.reset_mock()
ap.reset.assert_called_once()
ap.reset.reset_mock()
model = TestModel()
trainer = Trainer(
default_root_dir=tmpdir,
limit_train_batches=2,
limit_val_batches=2,
limit_test_batches=2,
max_epochs=1,
progress_bar_refresh_rate=0,
)
trainer.fit(model)
_assert_called(model, "train")
_assert_called(model, "val")
trainer.validate(model)
_assert_called(model, "val")
trainer.test(model)
_assert_called(model, "test")
e to TorchScript
# self.metric = SumMetric()
# self.sum = 0.0
#
# def training_step(self, batch, batch_idx):
# x = batch
# self.metric(x.sum())
# self.sum += x.sum()
# self.log("sum", self.metric, on_epoch=True, on_step=False)
# return self.step(x)
#
# model = TestModel()
# trainer = Trainer(
# default_root_dir=tmpdir,
# limit_train_batches=2,
# limit_val_batches=2,
# max_epochs=1,
# log_every_n_steps=1,
# weights_summary=None,
# logger=False,
# checkpoint_callback=False,
# )
# trainer.fit(model)
# rand_input = torch.randn(10, 32)
#
# script_model = model.to_torchscript()
#
# # test that we can still do inference
# output = model(rand_input)
# script_output = script_model(rand_input)
# assert torch.allclose(output, script_output)
# def test_metric_collection_lightning_log(tmpdir):
#
# class TestModel(BoringModel):
#
# def __init__(self):
# super().__init__()
# self.metric = MetricCollection([SumMetric(), DiffMetric()])
# self.sum = 0.0
# self.diff = 0.0
#
# def training_step(self, batch, batch_idx):
# x = batch
# metric_vals = self.metric(x.sum())
# self.sum += x.sum()
# self.diff -= x.sum()
# self.log_dict({f'{k}_step': v for k, v in metric_vals.items()})
# return self.step(x)
#
# def training_epoch_end(self, outputs):
# metric_vals = self.metric.compute()
# self.log_dict({f'{k}_epoch': v for k, v in metric_vals.items()})
#
# model = TestModel()
# model.val_dataloader = None
#
# trainer = Trainer(
# default_root_dir=tmpdir,
# limit_train_batches=2,
# limit_val_batches=2,
# max_epochs=1,
# log_every_n_steps=1,
# weights_summary=None,
# )
# trainer.fit(model)
#
# logged = trainer.logged_metrics
# assert torch.allclose(tensor(logged["SumMetric_epoch"]), model.sum)
# assert torch.allclose(tensor(logged["DiffMetric_epoch"]), model.diff)
| true | true |
1c2dc85f5648ba0a8dd851c547d3b60121d6c46a | 897 | py | Python | flask/api/models/Event.py | mktung/tvgs-crm | be992a19b46f7d7eeaf90c9c9105a3630ff20292 | [
"MIT"
] | 1 | 2019-10-18T00:49:27.000Z | 2019-10-18T00:49:27.000Z | flask/api/models/Event.py | mktung/tvgs-crm | be992a19b46f7d7eeaf90c9c9105a3630ff20292 | [
"MIT"
] | null | null | null | flask/api/models/Event.py | mktung/tvgs-crm | be992a19b46f7d7eeaf90c9c9105a3630ff20292 | [
"MIT"
] | null | null | null | from api.core import Mixin
from .base import db
class Event(Mixin, db.Model):
"""Person Table."""
__tablename__ = "event"
id = db.Column(db.Integer, unique=True, primary_key=True)
title = db.Column(db.String, nullable=False)
date = db.Column(db.DATE, nullable=True)
name_of_volunteer = db.Column(db.String, nullable=True)
attendance = db.Column(db.Integer, nullable=False)
tags = db.Column(db.String, nullable=True)
sheet = db.Column(
db.Integer, db.ForeignKey("sheet.id", ondelete="SET NULL"), nullable=True
)
def __init__(self, title: str, date: str, name_of_volunteer: str, attendance: str, tags: str):
self.title = title
self.date = date
self.name_of_volunteer = name_of_volunteer
self.attendance = attendance
self.tags = tags
def __repr__(self):
return f"<Event {Event.title}>"
| 29.9 | 98 | 0.656633 | from api.core import Mixin
from .base import db
class Event(Mixin, db.Model):
__tablename__ = "event"
id = db.Column(db.Integer, unique=True, primary_key=True)
title = db.Column(db.String, nullable=False)
date = db.Column(db.DATE, nullable=True)
name_of_volunteer = db.Column(db.String, nullable=True)
attendance = db.Column(db.Integer, nullable=False)
tags = db.Column(db.String, nullable=True)
sheet = db.Column(
db.Integer, db.ForeignKey("sheet.id", ondelete="SET NULL"), nullable=True
)
def __init__(self, title: str, date: str, name_of_volunteer: str, attendance: str, tags: str):
self.title = title
self.date = date
self.name_of_volunteer = name_of_volunteer
self.attendance = attendance
self.tags = tags
def __repr__(self):
return f"<Event {Event.title}>"
| true | true |
1c2dc8c1fbbbba6ba513d82076b677947a48c85e | 7,325 | py | Python | ci/test-suite/universal.py | tundranerd/FEBio | fb0ca6d04af51f005d933029df232058a30f1f8f | [
"MIT"
] | null | null | null | ci/test-suite/universal.py | tundranerd/FEBio | fb0ca6d04af51f005d933029df232058a30f1f8f | [
"MIT"
] | null | null | null | ci/test-suite/universal.py | tundranerd/FEBio | fb0ca6d04af51f005d933029df232058a30f1f8f | [
"MIT"
] | null | null | null | REMOTE_RELEASE_DIR = "/root/update2/FEBioStudio/"
REMOTE_DEV_DIR = "/root/update2/FEBioStudioDev/"
exemptTests = ['ri02', 'hi01']
longTests = ['sh24', 'fl37', 'fl36']
dataField = {'bp04': '2.5',
'bi24': '1',
'bp05': '0.1',
'bp07': '2',
'bp08': '2',
'bp09': '2',
'bp10': '1',
'bp11': '1000',
'bp12': '1000',
'bp13': '1000',
'bp14': '1000',
'bp15': '0.1',
'bp16': '10000.1',
'bp17': '1000',
'bp18': '1000',
'bp19': '1',
'bp20': '2000',
'bp21': '1',
'bp22': '10000',
'bp23': '4000',
'bs01': '0.1',
'bs02': '180',
'bs03': '0.1',
'bs04': '7200',
'bs05': '3000',
'bs06': '0.1',
'bs07': '30000',
'bs08': '10000',
'cf01': '1',
'cf02': '1',
'cf03': '1',
'cf04': '1',
'cf05': '1',
'cf06': '1',
'cf07': '1',
'co01': '1',
'co02': '1',
'co04': '1.2',
'co07': '4',
'co08': '0.6',
'co09': '1',
'co10': '1',
'co11': '0.8',
'co13': '1',
'co12': '0.8',
'co15': '1',
'co16': '1',
'co17': '1',
'co18': '0.3',
'co19': '7.5',
'co20': '1',
'co21': '4',
'co22': '1',
'co25': '1',
'co26': '1',
'co27': '23',
'co28': '1',
'co29': '1',
'co30': '5',
'co31': '1.5',
'co32': '5000',
'co34': '1',
'co35': '1',
'co36': '1',
'co37': '1',
'co38': '1',
'co39': '1',
'co40': '1',
'co41': '1',
'co42': '0.26150041',
'co43': '4001',
'co44': '1',
'co45': '1',
'co46': '1',
'co47': '1',
'co48': '2.9',
'co49': '3',
'cr01': '40000000',
'cr02': '200',
'cr03': '6',
'cr04': '3636000',
'cr05': '3',
'di01': '1',
'di02': '1',
'di03': '1',
'di04': '1',
'dm01': '1',
'dm02': '1',
'dm03': '1',
'dm04': '1',
'dm05': '1',
'dm06': '1',
'dm07': '1',
'dm08': '1',
'dm09': '1',
'dm10': '1',
'dm11': '1',
'dm12': '1',
'dm13': '1',
'dm14': '1',
'dm15': '1',
'dm16': '1',
'dy01': '0.6',
'dy02': '16.8',
'dy03': '1.8',
'dy04': '1.9',
'dy05': '1',
'dy07': '1',
'dy09': '10',
'fi01': '1',
'fi02': '1',
'fi03': '1',
'fi04': '1',
'fi05': '1',
'fi06': '1',
'fi07': '1',
'fi08': '1',
'fi09': '1',
'fi10': '1',
'fi11': '1',
'fi12': '0.8',
'fi13': '1',
'fi14': '0.1',
'fi15': '0.4',
'fi16': '1',
'ho01': '1',
'ht01': '1',
'ma01': '1',
'ma02': '1',
'ma03': '1',
'ma05': '1',
'ma06': '1',
'ma07': '94.5',
'ma11': '1',
'ma12': '1',
'ma13': '1',
'ma14': '1',
'ma15': '1',
'ma16': '1',
'ma17': '450',
'ma18': '1',
'ma19': '1',
'ma20': '1',
'ma21': '1',
'ma22': '1',
'ma23': '1',
'ma24': '1',
'ma25': '1',
'ma26': '10',
'ma27': '10',
'ma28': '10',
'ma29': '10',
'ma30': '10',
'ma31': '10',
'mg01': '1',
'mg02': '1',
'mi01': '1',
'mi02': '1',
'mi03': '1',
'mi04': '1',
'mi05': '1',
'mi06': '1',
'mi09': '1',
'mi16': '1',
'mi17': '1',
'mi19': '1',
'mi24': '1',
'mi25': '1',
'mi26': '1',
'mi27': '1',
'mi28': '1',
'mi29': '1',
'mi30': '1',
'mi31': '1',
'mi32': '1',
'mi33': '10',
'mi34': '0.05',
'mp01': '1',
'mp02': '2002',
'mp03': '1',
'mp06': '1',
'mp07': '1',
'mp08': '7201',
'mp09': '2',
'ms01': '1',
'ms02': '1',
'ms03': '1',
'ms04': '2',
'ms05': '5',
'mu02': '1',
'mu03': '1',
'pi01': '1',
'pi02': '1',
'pi03': '23.5',
'pi04': '1',
'pi05': '1',
'pi06': '1',
'pi07': '1',
'ri01': '1',
'ri02': '1',
'ri03': '1',
'ri04': '1',
'ri05': '1',
'ri06': '1',
'ri07': '1',
'rj01': '0.8',
'rj02': '36',
'rj03': '36',
'rj04': '36',
'rj05': '72',
'rj06': '1',
'rj07': '10',
'rj08': '1',
'rj09': '1',
'rj10': '1',
'rj11': '1',
'rj12': '0.4',
'rj13': '36',
'rj14': '10',
'rj15': '7',
'sh01': '1',
'sh02': '1',
'sh03': '1',
'sh04': '1',
'sh05': '1',
'sh06': '1',
'sh07': '1',
'sh08': '1',
'sh09': '1',
'sh10': '1',
'sh11': '1',
'sh12': '1',
'sh13': '1',
'sh14': '1',
'sh15': '1',
'sh16': '1',
'sh17': '1',
'sh18': '1',
'sh19': '1',
'sh20': '1',
'sh21': '2',
'sh22': '2',
'sh23': '4',
'sh24': '4',
'sh25': '0.5',
'sh26': '1',
'sh27': '1',
'sh28': '1',
'sh29': '1',
'sh30': '0.5',
'sh31': '0.5',
'sh32': '1',
'sh33': '1',
'sh34': '20',
'sh35': '20',
'sh36': '1',
'sh37': '1',
'sh38': '1',
'sh39': '1',
'sh40': '1',
'sh41': '1',
'sh42': '1',
'sh43': '1',
'sh44': '1',
'sh45': '1',
'sh46': '1',
'sh47': '1',
'sh48': '1',
'sh49': '1',
'sh50': '1',
'sh51': '1',
'sh52': '1',
'sh53': '1',
'sh54': '1',
'sh55': '1',
'sh56': '1',
'sh57': '1',
'sh58': '10',
'sh59': '10',
'sh60': '1',
'te01': '1',
'te02': '1',
'te03': '1',
'te04': '1',
'te05': '1',
'tr01': '2001',
'tr02': '2002',
'tr03': '1',
'tr04': '1',
'tu01': '1',
'tu02': '1',
'tu03': '1',
'vc01': '1',
'vc02': '2'}
pluginTests = {'ht01': 'heat',
'ht02': 'heat',
'ht03': 'heat',
'ch01': 'chem',
'ch02': 'chem',
'ch03': 'chem',
'ch04': 'chem',
'ch05': 'chem',
'ch06': 'chem',
'ch07': 'chem',
'ch08': 'chem',
'ch09': 'chem',
'ch10': 'chem',
'ch11': 'chem',
'ch12': 'chem',
'ch13': 'chem',
'ch14': 'chem',
'ch15': 'chem',
'ch16': 'chem',
'ch17': 'chem',
'ch18': 'chem',
'ch19': 'chem',
'ch20': 'chem',
'ch21': 'chem',
'ch22': 'chem',
'ch23': 'chem',
'ch24': 'chem',
'ch25': 'chem',
'ch26': 'chem',
'ch27': 'chem',
'ch28': 'chem',
'ch29': 'chem',
'ch30': 'chem'}
| 21.800595 | 49 | 0.275495 | REMOTE_RELEASE_DIR = "/root/update2/FEBioStudio/"
REMOTE_DEV_DIR = "/root/update2/FEBioStudioDev/"
exemptTests = ['ri02', 'hi01']
longTests = ['sh24', 'fl37', 'fl36']
dataField = {'bp04': '2.5',
'bi24': '1',
'bp05': '0.1',
'bp07': '2',
'bp08': '2',
'bp09': '2',
'bp10': '1',
'bp11': '1000',
'bp12': '1000',
'bp13': '1000',
'bp14': '1000',
'bp15': '0.1',
'bp16': '10000.1',
'bp17': '1000',
'bp18': '1000',
'bp19': '1',
'bp20': '2000',
'bp21': '1',
'bp22': '10000',
'bp23': '4000',
'bs01': '0.1',
'bs02': '180',
'bs03': '0.1',
'bs04': '7200',
'bs05': '3000',
'bs06': '0.1',
'bs07': '30000',
'bs08': '10000',
'cf01': '1',
'cf02': '1',
'cf03': '1',
'cf04': '1',
'cf05': '1',
'cf06': '1',
'cf07': '1',
'co01': '1',
'co02': '1',
'co04': '1.2',
'co07': '4',
'co08': '0.6',
'co09': '1',
'co10': '1',
'co11': '0.8',
'co13': '1',
'co12': '0.8',
'co15': '1',
'co16': '1',
'co17': '1',
'co18': '0.3',
'co19': '7.5',
'co20': '1',
'co21': '4',
'co22': '1',
'co25': '1',
'co26': '1',
'co27': '23',
'co28': '1',
'co29': '1',
'co30': '5',
'co31': '1.5',
'co32': '5000',
'co34': '1',
'co35': '1',
'co36': '1',
'co37': '1',
'co38': '1',
'co39': '1',
'co40': '1',
'co41': '1',
'co42': '0.26150041',
'co43': '4001',
'co44': '1',
'co45': '1',
'co46': '1',
'co47': '1',
'co48': '2.9',
'co49': '3',
'cr01': '40000000',
'cr02': '200',
'cr03': '6',
'cr04': '3636000',
'cr05': '3',
'di01': '1',
'di02': '1',
'di03': '1',
'di04': '1',
'dm01': '1',
'dm02': '1',
'dm03': '1',
'dm04': '1',
'dm05': '1',
'dm06': '1',
'dm07': '1',
'dm08': '1',
'dm09': '1',
'dm10': '1',
'dm11': '1',
'dm12': '1',
'dm13': '1',
'dm14': '1',
'dm15': '1',
'dm16': '1',
'dy01': '0.6',
'dy02': '16.8',
'dy03': '1.8',
'dy04': '1.9',
'dy05': '1',
'dy07': '1',
'dy09': '10',
'fi01': '1',
'fi02': '1',
'fi03': '1',
'fi04': '1',
'fi05': '1',
'fi06': '1',
'fi07': '1',
'fi08': '1',
'fi09': '1',
'fi10': '1',
'fi11': '1',
'fi12': '0.8',
'fi13': '1',
'fi14': '0.1',
'fi15': '0.4',
'fi16': '1',
'ho01': '1',
'ht01': '1',
'ma01': '1',
'ma02': '1',
'ma03': '1',
'ma05': '1',
'ma06': '1',
'ma07': '94.5',
'ma11': '1',
'ma12': '1',
'ma13': '1',
'ma14': '1',
'ma15': '1',
'ma16': '1',
'ma17': '450',
'ma18': '1',
'ma19': '1',
'ma20': '1',
'ma21': '1',
'ma22': '1',
'ma23': '1',
'ma24': '1',
'ma25': '1',
'ma26': '10',
'ma27': '10',
'ma28': '10',
'ma29': '10',
'ma30': '10',
'ma31': '10',
'mg01': '1',
'mg02': '1',
'mi01': '1',
'mi02': '1',
'mi03': '1',
'mi04': '1',
'mi05': '1',
'mi06': '1',
'mi09': '1',
'mi16': '1',
'mi17': '1',
'mi19': '1',
'mi24': '1',
'mi25': '1',
'mi26': '1',
'mi27': '1',
'mi28': '1',
'mi29': '1',
'mi30': '1',
'mi31': '1',
'mi32': '1',
'mi33': '10',
'mi34': '0.05',
'mp01': '1',
'mp02': '2002',
'mp03': '1',
'mp06': '1',
'mp07': '1',
'mp08': '7201',
'mp09': '2',
'ms01': '1',
'ms02': '1',
'ms03': '1',
'ms04': '2',
'ms05': '5',
'mu02': '1',
'mu03': '1',
'pi01': '1',
'pi02': '1',
'pi03': '23.5',
'pi04': '1',
'pi05': '1',
'pi06': '1',
'pi07': '1',
'ri01': '1',
'ri02': '1',
'ri03': '1',
'ri04': '1',
'ri05': '1',
'ri06': '1',
'ri07': '1',
'rj01': '0.8',
'rj02': '36',
'rj03': '36',
'rj04': '36',
'rj05': '72',
'rj06': '1',
'rj07': '10',
'rj08': '1',
'rj09': '1',
'rj10': '1',
'rj11': '1',
'rj12': '0.4',
'rj13': '36',
'rj14': '10',
'rj15': '7',
'sh01': '1',
'sh02': '1',
'sh03': '1',
'sh04': '1',
'sh05': '1',
'sh06': '1',
'sh07': '1',
'sh08': '1',
'sh09': '1',
'sh10': '1',
'sh11': '1',
'sh12': '1',
'sh13': '1',
'sh14': '1',
'sh15': '1',
'sh16': '1',
'sh17': '1',
'sh18': '1',
'sh19': '1',
'sh20': '1',
'sh21': '2',
'sh22': '2',
'sh23': '4',
'sh24': '4',
'sh25': '0.5',
'sh26': '1',
'sh27': '1',
'sh28': '1',
'sh29': '1',
'sh30': '0.5',
'sh31': '0.5',
'sh32': '1',
'sh33': '1',
'sh34': '20',
'sh35': '20',
'sh36': '1',
'sh37': '1',
'sh38': '1',
'sh39': '1',
'sh40': '1',
'sh41': '1',
'sh42': '1',
'sh43': '1',
'sh44': '1',
'sh45': '1',
'sh46': '1',
'sh47': '1',
'sh48': '1',
'sh49': '1',
'sh50': '1',
'sh51': '1',
'sh52': '1',
'sh53': '1',
'sh54': '1',
'sh55': '1',
'sh56': '1',
'sh57': '1',
'sh58': '10',
'sh59': '10',
'sh60': '1',
'te01': '1',
'te02': '1',
'te03': '1',
'te04': '1',
'te05': '1',
'tr01': '2001',
'tr02': '2002',
'tr03': '1',
'tr04': '1',
'tu01': '1',
'tu02': '1',
'tu03': '1',
'vc01': '1',
'vc02': '2'}
pluginTests = {'ht01': 'heat',
'ht02': 'heat',
'ht03': 'heat',
'ch01': 'chem',
'ch02': 'chem',
'ch03': 'chem',
'ch04': 'chem',
'ch05': 'chem',
'ch06': 'chem',
'ch07': 'chem',
'ch08': 'chem',
'ch09': 'chem',
'ch10': 'chem',
'ch11': 'chem',
'ch12': 'chem',
'ch13': 'chem',
'ch14': 'chem',
'ch15': 'chem',
'ch16': 'chem',
'ch17': 'chem',
'ch18': 'chem',
'ch19': 'chem',
'ch20': 'chem',
'ch21': 'chem',
'ch22': 'chem',
'ch23': 'chem',
'ch24': 'chem',
'ch25': 'chem',
'ch26': 'chem',
'ch27': 'chem',
'ch28': 'chem',
'ch29': 'chem',
'ch30': 'chem'}
| true | true |
1c2dc95ab3a9dc14f9cb1d3e4fdf13d597524b8d | 7,093 | py | Python | mux_python/models/real_time_breakdown_value.py | moaazsidat/mux-python | 3f03b9dd0761fa1a0cd5bdbeac85ccf4f326508c | [
"MIT"
] | 36 | 2019-02-28T21:18:39.000Z | 2022-03-04T19:58:45.000Z | mux_python/models/real_time_breakdown_value.py | moaazsidat/mux-python | 3f03b9dd0761fa1a0cd5bdbeac85ccf4f326508c | [
"MIT"
] | 7 | 2019-04-01T14:48:34.000Z | 2022-03-04T16:31:34.000Z | mux_python/models/real_time_breakdown_value.py | moaazsidat/mux-python | 3f03b9dd0761fa1a0cd5bdbeac85ccf4f326508c | [
"MIT"
] | 9 | 2019-11-29T03:57:58.000Z | 2022-03-02T17:29:25.000Z | # coding: utf-8
"""
Mux API
Mux is how developers build online video. This API encompasses both Mux Video and Mux Data functionality to help you build your video-related projects better and faster than ever before. # noqa: E501
The version of the OpenAPI document: v1
Contact: devex@mux.com
Generated by: https://openapi-generator.tech
"""
import inspect
import pprint
import re # noqa: F401
import six
from mux_python.configuration import Configuration
class RealTimeBreakdownValue(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'value': 'str',
'negative_impact': 'int',
'metric_value': 'float',
'display_value': 'str',
'concurrent_viewers': 'int'
}
attribute_map = {
'value': 'value',
'negative_impact': 'negative_impact',
'metric_value': 'metric_value',
'display_value': 'display_value',
'concurrent_viewers': 'concurrent_viewers'
}
def __init__(self, value=None, negative_impact=None, metric_value=None, display_value=None, concurrent_viewers=None, local_vars_configuration=None): # noqa: E501
"""RealTimeBreakdownValue - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration.get_default_copy()
self.local_vars_configuration = local_vars_configuration
self._value = None
self._negative_impact = None
self._metric_value = None
self._display_value = None
self._concurrent_viewers = None
self.discriminator = None
if value is not None:
self.value = value
if negative_impact is not None:
self.negative_impact = negative_impact
if metric_value is not None:
self.metric_value = metric_value
if display_value is not None:
self.display_value = display_value
if concurrent_viewers is not None:
self.concurrent_viewers = concurrent_viewers
@property
def value(self):
"""Gets the value of this RealTimeBreakdownValue. # noqa: E501
:return: The value of this RealTimeBreakdownValue. # noqa: E501
:rtype: str
"""
return self._value
@value.setter
def value(self, value):
"""Sets the value of this RealTimeBreakdownValue.
:param value: The value of this RealTimeBreakdownValue. # noqa: E501
:type value: str
"""
self._value = value
@property
def negative_impact(self):
"""Gets the negative_impact of this RealTimeBreakdownValue. # noqa: E501
:return: The negative_impact of this RealTimeBreakdownValue. # noqa: E501
:rtype: int
"""
return self._negative_impact
@negative_impact.setter
def negative_impact(self, negative_impact):
"""Sets the negative_impact of this RealTimeBreakdownValue.
:param negative_impact: The negative_impact of this RealTimeBreakdownValue. # noqa: E501
:type negative_impact: int
"""
self._negative_impact = negative_impact
@property
def metric_value(self):
"""Gets the metric_value of this RealTimeBreakdownValue. # noqa: E501
:return: The metric_value of this RealTimeBreakdownValue. # noqa: E501
:rtype: float
"""
return self._metric_value
@metric_value.setter
def metric_value(self, metric_value):
"""Sets the metric_value of this RealTimeBreakdownValue.
:param metric_value: The metric_value of this RealTimeBreakdownValue. # noqa: E501
:type metric_value: float
"""
self._metric_value = metric_value
@property
def display_value(self):
"""Gets the display_value of this RealTimeBreakdownValue. # noqa: E501
:return: The display_value of this RealTimeBreakdownValue. # noqa: E501
:rtype: str
"""
return self._display_value
@display_value.setter
def display_value(self, display_value):
"""Sets the display_value of this RealTimeBreakdownValue.
:param display_value: The display_value of this RealTimeBreakdownValue. # noqa: E501
:type display_value: str
"""
self._display_value = display_value
@property
def concurrent_viewers(self):
"""Gets the concurrent_viewers of this RealTimeBreakdownValue. # noqa: E501
:return: The concurrent_viewers of this RealTimeBreakdownValue. # noqa: E501
:rtype: int
"""
return self._concurrent_viewers
@concurrent_viewers.setter
def concurrent_viewers(self, concurrent_viewers):
"""Sets the concurrent_viewers of this RealTimeBreakdownValue.
:param concurrent_viewers: The concurrent_viewers of this RealTimeBreakdownValue. # noqa: E501
:type concurrent_viewers: int
"""
self._concurrent_viewers = concurrent_viewers
def to_dict(self, serialize=False):
"""Returns the model properties as a dict"""
result = {}
def convert(x):
if hasattr(x, "to_dict"):
args = inspect.getargspec(x.to_dict).args
if len(args) == 1:
return x.to_dict()
else:
return x.to_dict(serialize)
else:
return x
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
attr = self.attribute_map.get(attr, attr) if serialize else attr
if isinstance(value, list):
result[attr] = list(map(
lambda x: convert(x),
value
))
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], convert(item[1])),
value.items()
))
else:
result[attr] = convert(value)
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, RealTimeBreakdownValue):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, RealTimeBreakdownValue):
return True
return self.to_dict() != other.to_dict()
| 30.311966 | 204 | 0.621176 |
import inspect
import pprint
import re
import six
from mux_python.configuration import Configuration
class RealTimeBreakdownValue(object):
openapi_types = {
'value': 'str',
'negative_impact': 'int',
'metric_value': 'float',
'display_value': 'str',
'concurrent_viewers': 'int'
}
attribute_map = {
'value': 'value',
'negative_impact': 'negative_impact',
'metric_value': 'metric_value',
'display_value': 'display_value',
'concurrent_viewers': 'concurrent_viewers'
}
def __init__(self, value=None, negative_impact=None, metric_value=None, display_value=None, concurrent_viewers=None, local_vars_configuration=None):
if local_vars_configuration is None:
local_vars_configuration = Configuration.get_default_copy()
self.local_vars_configuration = local_vars_configuration
self._value = None
self._negative_impact = None
self._metric_value = None
self._display_value = None
self._concurrent_viewers = None
self.discriminator = None
if value is not None:
self.value = value
if negative_impact is not None:
self.negative_impact = negative_impact
if metric_value is not None:
self.metric_value = metric_value
if display_value is not None:
self.display_value = display_value
if concurrent_viewers is not None:
self.concurrent_viewers = concurrent_viewers
@property
def value(self):
return self._value
@value.setter
def value(self, value):
self._value = value
@property
def negative_impact(self):
return self._negative_impact
@negative_impact.setter
def negative_impact(self, negative_impact):
self._negative_impact = negative_impact
@property
def metric_value(self):
return self._metric_value
@metric_value.setter
def metric_value(self, metric_value):
self._metric_value = metric_value
@property
def display_value(self):
return self._display_value
@display_value.setter
def display_value(self, display_value):
self._display_value = display_value
@property
def concurrent_viewers(self):
return self._concurrent_viewers
@concurrent_viewers.setter
def concurrent_viewers(self, concurrent_viewers):
self._concurrent_viewers = concurrent_viewers
def to_dict(self, serialize=False):
result = {}
def convert(x):
if hasattr(x, "to_dict"):
args = inspect.getargspec(x.to_dict).args
if len(args) == 1:
return x.to_dict()
else:
return x.to_dict(serialize)
else:
return x
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
attr = self.attribute_map.get(attr, attr) if serialize else attr
if isinstance(value, list):
result[attr] = list(map(
lambda x: convert(x),
value
))
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], convert(item[1])),
value.items()
))
else:
result[attr] = convert(value)
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, RealTimeBreakdownValue):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
if not isinstance(other, RealTimeBreakdownValue):
return True
return self.to_dict() != other.to_dict()
| true | true |
1c2dcb2cd8344904ed92b020972ac557bf1fb37a | 1,379 | py | Python | 03 AccessWebData/JSONdataInAPI.py | blueicy/Python-achieve | cbe7a0f898bef5f1d951d69cef0c305a62faaaf8 | [
"MIT"
] | null | null | null | 03 AccessWebData/JSONdataInAPI.py | blueicy/Python-achieve | cbe7a0f898bef5f1d951d69cef0c305a62faaaf8 | [
"MIT"
] | null | null | null | 03 AccessWebData/JSONdataInAPI.py | blueicy/Python-achieve | cbe7a0f898bef5f1d951d69cef0c305a62faaaf8 | [
"MIT"
] | null | null | null | import urllib.request, urllib.parse, urllib.error
import json
import ssl
api_key = False
#api_key = 'AIzaSy___IDByT70'
# https://developers.google.com/maps/documentation/geocoding/intro
if api_key is False:
api_key = 42
serviceurl = 'http://py4e-data.dr-chuck.net/json?'
else :
serviceurl = 'https://maps.googleapis.com/maps/api/geocode/json?'
# Ignore SSL certificate errors
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
#INPUT WEB ADDRESS
address = input('Enter location: ')
if len(address) < 1:
address = 'South Federal University'
# URL CHANGER : check geoxml.py
parms = dict()
parms['address'] = address
if api_key is not False:
parms['key'] = api_key
#CONCANATE URL with address
url = serviceurl + urllib.parse.urlencode(parms)
print('Retrieving', url)
uh = urllib.request.urlopen(url, context=ctx)
data = uh.read().decode()
print('Retrieved', len(data), 'characters')
#READ data by json
try:
js = json.loads(data)
except:
js = None
#FAIL SAFE
if not js or 'status' not in js or js['status'] != 'OK':
print('=== No JS ===')
#PRINT JSON PRETTY
#print(json.dumps(js, indent=4, sort_keys=True))
#print(len(js))
#FIND place_id in JS
for i in range(len(js)):
try:
placeid = js['results'][i]['place_id']
print('Place id', placeid)
except:
continue
| 20.279412 | 69 | 0.684554 | import urllib.request, urllib.parse, urllib.error
import json
import ssl
api_key = False
if api_key is False:
api_key = 42
serviceurl = 'http://py4e-data.dr-chuck.net/json?'
else :
serviceurl = 'https://maps.googleapis.com/maps/api/geocode/json?'
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
address = input('Enter location: ')
if len(address) < 1:
address = 'South Federal University'
parms = dict()
parms['address'] = address
if api_key is not False:
parms['key'] = api_key
url = serviceurl + urllib.parse.urlencode(parms)
print('Retrieving', url)
uh = urllib.request.urlopen(url, context=ctx)
data = uh.read().decode()
print('Retrieved', len(data), 'characters')
try:
js = json.loads(data)
except:
js = None
if not js or 'status' not in js or js['status'] != 'OK':
print('=== No JS ===')
for i in range(len(js)):
try:
placeid = js['results'][i]['place_id']
print('Place id', placeid)
except:
continue
| true | true |
1c2dccd0e95cc8419c1e9dad4a3fa248bfb5459c | 441 | py | Python | Get_pid.py | tokyohost/Download-Thz-Torrent | 4f90cf710aaa143cab2e07e7348c625d34f9ad7c | [
"MIT"
] | 4 | 2019-11-28T05:56:36.000Z | 2021-12-25T01:48:21.000Z | Get_pid.py | tokyohost/get-Thz-Torrent-and-info | 4f90cf710aaa143cab2e07e7348c625d34f9ad7c | [
"MIT"
] | null | null | null | Get_pid.py | tokyohost/get-Thz-Torrent-and-info | 4f90cf710aaa143cab2e07e7348c625d34f9ad7c | [
"MIT"
] | 2 | 2020-02-10T15:23:59.000Z | 2020-02-29T13:11:26.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
import re
def Get_pid(soup):
#返回当前页面的唯一pid
pid = soup.findAll('div', {'class': "pls"}) #匹配到指定容器
Get_pid = str(pid) # 转换成文本文档
p = re.compile(r'\d+') # 匹配数字也就是每个页面单独的 Pid
pidnumber = p.findall(Get_pid)
# print(pidnumber)
# print("页面唯一pid为:"+pidnumber[0])
#infoMsg = soup.select('#postmessage_' + pidnumber[0]) # 获取的数字数组中第一个才是当前页面唯一的pid
return pidnumber[0] #返回pid | 31.5 | 85 | 0.637188 |
import re
def Get_pid(soup):
pid = soup.findAll('div', {'class': "pls"})
Get_pid = str(pid)
p = re.compile(r'\d+')
pidnumber = p.findall(Get_pid)
| true | true |
1c2dcdc18755de16380093bc5feef68ea0db1a64 | 9,695 | py | Python | tests/test_buffer_io.py | Infinidat/infi.instruct | 69dfd8a35d17f8687581e838ea13e7554f7e5034 | [
"BSD-3-Clause"
] | 2 | 2015-01-12T21:16:06.000Z | 2019-12-12T05:59:56.000Z | tests/test_buffer_io.py | Infinidat/infi.instruct | 69dfd8a35d17f8687581e838ea13e7554f7e5034 | [
"BSD-3-Clause"
] | 4 | 2015-02-24T09:18:00.000Z | 2021-06-16T12:55:19.000Z | tests/test_buffer_io.py | Infinidat/infi.instruct | 69dfd8a35d17f8687581e838ea13e7554f7e5034 | [
"BSD-3-Clause"
] | 4 | 2015-01-07T12:37:54.000Z | 2018-02-08T15:07:17.000Z | import random
from bitarray import bitarray
from infi.instruct._compat import range, PY2
from infi.unittest import TestCase
from infi.instruct.buffer.io_buffer import BitView, BitAwareByteArray
random.seed(0)
class IOBufferTestCase(TestCase):
def test_getitem__byte(self):
buf = BitAwareByteArray(bytearray((1, 2, 4)), 0, 3)
self.assertEqual(1, buf[0])
self.assertEqual(2, buf[1])
self.assertEqual(1, buf[1.125])
self.assertEqual(4, buf[2])
self.assertEqual(2, buf[2.125])
self.assertEqual(1, buf[2.25])
self.assertEqual(0, buf[2.375])
buf = BitAwareByteArray(bytearray((128, 1)), 0, 2)
self.assertEqual(3, buf[1 - 0.125])
buf = BitAwareByteArray(bytearray((2, 4)), 0.125, 2)
self.assertEqual(1, buf[0])
self.assertEqual(2, buf[1])
self.assertEqual(1, buf[1.125])
def test_getitem__range(self):
buf = BitAwareByteArray(bytearray((1, 2, 4, 128, 1)), 0, 5)
self.assertEqual([1], list(buf[0:1]))
self.assertEqual([1, 2], list(buf[0:2]))
self.assertEqual([2, 4], list(buf[1:3]))
self.assertEqual([2, 4, 128, 1], list(buf[1:]))
self.assertEqual([1, 2], list(buf[:2]))
self.assertEqual([0, 1], list(buf[0.125:2.125]))
self.assertEqual([0, 1], list(buf[0.125:2.125]))
self.assertEqual([128, 1], list(buf[-2:]))
self.assertEqual([1], list(buf[-10:-4]))
self.assertEqual([], list(buf[-10:-5]))
def test_setitem__byte(self):
buf = BitAwareByteArray(bytearray((1, 2, 4)), 0, 3)
buf[0:1] = 3
self.assertEqual([3, 2, 4], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)), 0, 3)
buf[0.125:1.125] = 3
self.assertEqual([7, 2, 4], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)), 0, 3)
buf[0.125:1.125] = 0x83
self.assertEqual([7, 3, 4], list(buf))
def test_setitem__bits(self):
buf = BitAwareByteArray(bytearray((1, 2, 4)), 0, 3)
buf[0:0.125] = 0
self.assertEqual([0, 2, 4], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)), 0, 3)
buf[0.125:0.25] = 1
self.assertEqual([3, 2, 4], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)), 0, 3)
buf[1.125:1.375] = 3
self.assertEqual([1, 6, 4], list(buf))
def test_setitem__insert_into_empty_range(self):
buf = BitAwareByteArray(bytearray((1, 2, 4)))
buf[0.125:0.125] = BitView((1,), 0, 0.125)
self.assertEqual([3, 4, 8, 0], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)), 0, 3)
buf[0:0] = BitView(bytearray((1,)), 0, 0.125)
self.assertEqual([3, 4, 8, 0], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)), 0, 3)
buf[0.25:0.25] = BitView(bytearray(1), 0, 0.125)
self.assertEqual([1, 4, 8, 0], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)), 0, 3)
buf[0.25:0.25] = BitView(bytearray((1,)), 0, 0.125)
self.assertEqual([5, 4, 8, 0], list(buf))
def test_setitem__smaller_val(self):
ba = bitarray('1001010111', endian='little')
bv = BitAwareByteArray(self._bitarray_to_bytes(ba), stop=float(ba.length()) / 8)
val = bitarray('10', endian='little')
ba[3:7] = val
bv[3.0 / 8:7.0 / 8] = BitView(self._bitarray_to_bytes(val), stop=2.0 / 8)
self.assertEqualBitArrayBitView(ba, bv)
def test_delitem__bits(self):
buf = BitAwareByteArray(bytearray((1, 2, 4)))
del buf[0:1]
self.assertEqual([2, 4], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)))
del buf[1:]
self.assertEqual([1], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)))
del buf[0:0.125]
self.assertEqual([0, 1, 2], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)))
del buf[0:1.125]
self.assertEqual([1, 2], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)))
del buf[0:2.25]
self.assertEqual([1], list(buf))
def test_insert__bytes(self):
buf = BitAwareByteArray(bytearray((1, 2, 4)))
buf.insert(3, bytearray((8, 16)))
self.assertEqual([1, 2, 4, 8, 16], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)))
# 100000000 01000000 00100000
buf.insert(1.25, bytearray((8, 16)))
# 100000000 01 00010000 00001000 000000 00100000
# = 100000000 01000100 00000010 00000000 00100000
# = 1 34 64 0 4
self.assertEqual([1, 34, 64, 0, 4], list(buf))
def test_extend(self):
buf = BitAwareByteArray(bytearray((1, 2, 3)))
buf.extend(bytearray((4, 5)))
self.assertEqual([1, 2, 3, 4, 5], list(buf))
def test_bitview_getitem__single_byte_bitslice(self):
for i in range(0, 256):
for j in range(0, 8):
bv = BitView(bytearray([i]))
self.assertEqual(list(bv[float(j) / 8:])[0], i >> j)
def test_bitview_getitem__single_byte_bitslice_with_bits(self):
for i in range(0, 256):
for j in range(0, 8):
bv = BitView(bytearray([i]))
bv_slice = bv[float(j) / 8:]
ba = bitarray(endian='little')
ba.frombytes(chr(i) if PY2 else bytes([i]))
ba_slice = ba[j:]
self.assertEqualBitArrayBitView(ba_slice, bv_slice)
def test_bitview__positive_slicing(self):
for i in range(0, 100):
ba = self._create_random_bit_array()
bv = BitView(self._bitarray_to_bytes(ba), stop=float(ba.length()) / 8)
self.assertEqualBitArrayBitView(ba, bv)
slice_start_in_bits = random.choice(range(0, ba.length() + 10))
slice_end_in_bits = random.choice(range(slice_start_in_bits, ba.length() + 10))
ba_slice = ba[slice_start_in_bits:slice_end_in_bits]
bv_slice = bv[float(slice_start_in_bits) / 8:float(slice_end_in_bits) / 8]
self.assertEqualBitArrayBitView(ba_slice, bv_slice)
def test_add(self):
ba1 = self._create_random_bit_array()
ba2 = self._create_random_bit_array()
ba = ba1 + ba2
bv1 = BitAwareByteArray(self._bitarray_to_bytes(ba1), stop=float(ba1.length()) / 8)
bv2 = BitAwareByteArray(self._bitarray_to_bytes(ba2), stop=float(ba2.length()) / 8)
bv = bv1 + bv2
self.assertEqualBitArrayBitView(ba, bv)
def test_radd(self):
ba1 = self._create_random_bit_array()
ba2 = self._create_random_bit_array()
ba = ba1 + ba2
bv1 = BitView(self._bitarray_to_bytes(ba1), stop=float(ba1.length()) / 8)
bv2 = BitAwareByteArray(self._bitarray_to_bytes(ba2), stop=float(ba2.length()) / 8)
bv = bv1 + bv2
self.assertEqualBitArrayBitView(ba, bv)
def test_iadd(self):
ba1 = self._create_random_bit_array()
ba2 = self._create_random_bit_array()
bv1 = BitAwareByteArray(self._bitarray_to_bytes(ba1), stop=float(ba1.length()) / 8)
bv2 = BitView(self._bitarray_to_bytes(ba2), stop=float(ba2.length()) / 8)
ba1 += ba2
bv1 += bv2
self.assertEqualBitArrayBitView(ba1, bv1)
def test_iadd_1(self):
a = bytearray(b'\xd3\x94Q`\xb1\x93\x17\xed\xb2W\xa5\x00')
b = bytearray(b'MK\xa3Li\xf9>\x039')
bv1 = BitAwareByteArray(bytearray(a), start=0, stop=11.125)
bv2 = BitView(bytearray(b), start=0, stop=8.75)
bv1 += bv2
a[-1] &= 0x01
a[-1] |= (b[0] & 0x7F) << 1
for i in range(len(b) - 1):
a.append((b[i] >> 7) + ((b[i + 1] & 0x7F) << 1))
self.assertEquals(list(bv1), list(a))
def test_insert_zeros(self):
bv = BitAwareByteArray(bytearray(1), 0, 0.5)
bv[0.5:1.5] = BitView((1,))
self.assertEqualBitArrayBitView(self._bitarray_from_bitstring('000000010000'), bv)
def test_insert_zeros_1(self):
bv = BitAwareByteArray(bytearray((0xFF, 0, 0, 0)))
bv[0:0] = BitView(bytearray((0,)), 0, 0.5)
self.assertEqualBitArrayBitView(self._bitarray_from_bitstring('000000000000000000000000111111110000'), bv)
def test_insert_zeros_2(self):
bv = BitAwareByteArray(bytearray())
bv.zfill(0.5)
bv[0.5:1.5] = BitView([0xFF])
bv.zfill(2.5)
bv[2.5:3.5] = BitView([0])
self.assertEqualBitArrayBitView(self._bitarray_from_bitstring('0000000000000000111111110000'), bv)
def test_bitview_fetch_small(self):
bv = BitView(b"\xFF\x00", 0, 6 * 0.125)
self.assertEquals(bv[0], 63)
def test_array_half_byte(self):
a = BitAwareByteArray(bytearray(b'\x02'), start=0, stop=0.5)
self.assertEquals(a[0], 2)
self.assertEquals(list(a), [2])
def assertEqualBitArrayBitView(self, ba, bv):
self.assertEqual(ba.length(), 8 * bv.length())
ba_bytes = self._bitarray_to_bytes(ba)
if PY2:
bv_bytes = str(bv)
else:
bv_bytes = bv.to_bytes()
self.assertEqual(ba_bytes, bv_bytes)
def _bitarray_from_bitstring(self, str):
return bitarray("".join(reversed(str)), endian='little')
def _create_random_bit_array(self):
length_in_bits = random.randint(0, 8 * 16)
return bitarray("".join(random.choice(('0', '1')) for i in range(length_in_bits)), endian='little')
def _bitarray_to_bytes(self, b):
copy = bitarray(b, endian='little')
copy.fill()
return bytearray(copy.tobytes())
| 38.78 | 114 | 0.588345 | import random
from bitarray import bitarray
from infi.instruct._compat import range, PY2
from infi.unittest import TestCase
from infi.instruct.buffer.io_buffer import BitView, BitAwareByteArray
random.seed(0)
class IOBufferTestCase(TestCase):
def test_getitem__byte(self):
buf = BitAwareByteArray(bytearray((1, 2, 4)), 0, 3)
self.assertEqual(1, buf[0])
self.assertEqual(2, buf[1])
self.assertEqual(1, buf[1.125])
self.assertEqual(4, buf[2])
self.assertEqual(2, buf[2.125])
self.assertEqual(1, buf[2.25])
self.assertEqual(0, buf[2.375])
buf = BitAwareByteArray(bytearray((128, 1)), 0, 2)
self.assertEqual(3, buf[1 - 0.125])
buf = BitAwareByteArray(bytearray((2, 4)), 0.125, 2)
self.assertEqual(1, buf[0])
self.assertEqual(2, buf[1])
self.assertEqual(1, buf[1.125])
def test_getitem__range(self):
buf = BitAwareByteArray(bytearray((1, 2, 4, 128, 1)), 0, 5)
self.assertEqual([1], list(buf[0:1]))
self.assertEqual([1, 2], list(buf[0:2]))
self.assertEqual([2, 4], list(buf[1:3]))
self.assertEqual([2, 4, 128, 1], list(buf[1:]))
self.assertEqual([1, 2], list(buf[:2]))
self.assertEqual([0, 1], list(buf[0.125:2.125]))
self.assertEqual([0, 1], list(buf[0.125:2.125]))
self.assertEqual([128, 1], list(buf[-2:]))
self.assertEqual([1], list(buf[-10:-4]))
self.assertEqual([], list(buf[-10:-5]))
def test_setitem__byte(self):
buf = BitAwareByteArray(bytearray((1, 2, 4)), 0, 3)
buf[0:1] = 3
self.assertEqual([3, 2, 4], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)), 0, 3)
buf[0.125:1.125] = 3
self.assertEqual([7, 2, 4], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)), 0, 3)
buf[0.125:1.125] = 0x83
self.assertEqual([7, 3, 4], list(buf))
def test_setitem__bits(self):
buf = BitAwareByteArray(bytearray((1, 2, 4)), 0, 3)
buf[0:0.125] = 0
self.assertEqual([0, 2, 4], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)), 0, 3)
buf[0.125:0.25] = 1
self.assertEqual([3, 2, 4], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)), 0, 3)
buf[1.125:1.375] = 3
self.assertEqual([1, 6, 4], list(buf))
def test_setitem__insert_into_empty_range(self):
buf = BitAwareByteArray(bytearray((1, 2, 4)))
buf[0.125:0.125] = BitView((1,), 0, 0.125)
self.assertEqual([3, 4, 8, 0], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)), 0, 3)
buf[0:0] = BitView(bytearray((1,)), 0, 0.125)
self.assertEqual([3, 4, 8, 0], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)), 0, 3)
buf[0.25:0.25] = BitView(bytearray(1), 0, 0.125)
self.assertEqual([1, 4, 8, 0], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)), 0, 3)
buf[0.25:0.25] = BitView(bytearray((1,)), 0, 0.125)
self.assertEqual([5, 4, 8, 0], list(buf))
def test_setitem__smaller_val(self):
ba = bitarray('1001010111', endian='little')
bv = BitAwareByteArray(self._bitarray_to_bytes(ba), stop=float(ba.length()) / 8)
val = bitarray('10', endian='little')
ba[3:7] = val
bv[3.0 / 8:7.0 / 8] = BitView(self._bitarray_to_bytes(val), stop=2.0 / 8)
self.assertEqualBitArrayBitView(ba, bv)
def test_delitem__bits(self):
buf = BitAwareByteArray(bytearray((1, 2, 4)))
del buf[0:1]
self.assertEqual([2, 4], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)))
del buf[1:]
self.assertEqual([1], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)))
del buf[0:0.125]
self.assertEqual([0, 1, 2], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)))
del buf[0:1.125]
self.assertEqual([1, 2], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)))
del buf[0:2.25]
self.assertEqual([1], list(buf))
def test_insert__bytes(self):
buf = BitAwareByteArray(bytearray((1, 2, 4)))
buf.insert(3, bytearray((8, 16)))
self.assertEqual([1, 2, 4, 8, 16], list(buf))
buf = BitAwareByteArray(bytearray((1, 2, 4)))
buf.insert(1.25, bytearray((8, 16)))
self.assertEqual([1, 34, 64, 0, 4], list(buf))
def test_extend(self):
buf = BitAwareByteArray(bytearray((1, 2, 3)))
buf.extend(bytearray((4, 5)))
self.assertEqual([1, 2, 3, 4, 5], list(buf))
def test_bitview_getitem__single_byte_bitslice(self):
for i in range(0, 256):
for j in range(0, 8):
bv = BitView(bytearray([i]))
self.assertEqual(list(bv[float(j) / 8:])[0], i >> j)
def test_bitview_getitem__single_byte_bitslice_with_bits(self):
for i in range(0, 256):
for j in range(0, 8):
bv = BitView(bytearray([i]))
bv_slice = bv[float(j) / 8:]
ba = bitarray(endian='little')
ba.frombytes(chr(i) if PY2 else bytes([i]))
ba_slice = ba[j:]
self.assertEqualBitArrayBitView(ba_slice, bv_slice)
def test_bitview__positive_slicing(self):
for i in range(0, 100):
ba = self._create_random_bit_array()
bv = BitView(self._bitarray_to_bytes(ba), stop=float(ba.length()) / 8)
self.assertEqualBitArrayBitView(ba, bv)
slice_start_in_bits = random.choice(range(0, ba.length() + 10))
slice_end_in_bits = random.choice(range(slice_start_in_bits, ba.length() + 10))
ba_slice = ba[slice_start_in_bits:slice_end_in_bits]
bv_slice = bv[float(slice_start_in_bits) / 8:float(slice_end_in_bits) / 8]
self.assertEqualBitArrayBitView(ba_slice, bv_slice)
def test_add(self):
ba1 = self._create_random_bit_array()
ba2 = self._create_random_bit_array()
ba = ba1 + ba2
bv1 = BitAwareByteArray(self._bitarray_to_bytes(ba1), stop=float(ba1.length()) / 8)
bv2 = BitAwareByteArray(self._bitarray_to_bytes(ba2), stop=float(ba2.length()) / 8)
bv = bv1 + bv2
self.assertEqualBitArrayBitView(ba, bv)
def test_radd(self):
ba1 = self._create_random_bit_array()
ba2 = self._create_random_bit_array()
ba = ba1 + ba2
bv1 = BitView(self._bitarray_to_bytes(ba1), stop=float(ba1.length()) / 8)
bv2 = BitAwareByteArray(self._bitarray_to_bytes(ba2), stop=float(ba2.length()) / 8)
bv = bv1 + bv2
self.assertEqualBitArrayBitView(ba, bv)
def test_iadd(self):
ba1 = self._create_random_bit_array()
ba2 = self._create_random_bit_array()
bv1 = BitAwareByteArray(self._bitarray_to_bytes(ba1), stop=float(ba1.length()) / 8)
bv2 = BitView(self._bitarray_to_bytes(ba2), stop=float(ba2.length()) / 8)
ba1 += ba2
bv1 += bv2
self.assertEqualBitArrayBitView(ba1, bv1)
def test_iadd_1(self):
a = bytearray(b'\xd3\x94Q`\xb1\x93\x17\xed\xb2W\xa5\x00')
b = bytearray(b'MK\xa3Li\xf9>\x039')
bv1 = BitAwareByteArray(bytearray(a), start=0, stop=11.125)
bv2 = BitView(bytearray(b), start=0, stop=8.75)
bv1 += bv2
a[-1] &= 0x01
a[-1] |= (b[0] & 0x7F) << 1
for i in range(len(b) - 1):
a.append((b[i] >> 7) + ((b[i + 1] & 0x7F) << 1))
self.assertEquals(list(bv1), list(a))
def test_insert_zeros(self):
bv = BitAwareByteArray(bytearray(1), 0, 0.5)
bv[0.5:1.5] = BitView((1,))
self.assertEqualBitArrayBitView(self._bitarray_from_bitstring('000000010000'), bv)
def test_insert_zeros_1(self):
bv = BitAwareByteArray(bytearray((0xFF, 0, 0, 0)))
bv[0:0] = BitView(bytearray((0,)), 0, 0.5)
self.assertEqualBitArrayBitView(self._bitarray_from_bitstring('000000000000000000000000111111110000'), bv)
def test_insert_zeros_2(self):
bv = BitAwareByteArray(bytearray())
bv.zfill(0.5)
bv[0.5:1.5] = BitView([0xFF])
bv.zfill(2.5)
bv[2.5:3.5] = BitView([0])
self.assertEqualBitArrayBitView(self._bitarray_from_bitstring('0000000000000000111111110000'), bv)
def test_bitview_fetch_small(self):
bv = BitView(b"\xFF\x00", 0, 6 * 0.125)
self.assertEquals(bv[0], 63)
def test_array_half_byte(self):
a = BitAwareByteArray(bytearray(b'\x02'), start=0, stop=0.5)
self.assertEquals(a[0], 2)
self.assertEquals(list(a), [2])
def assertEqualBitArrayBitView(self, ba, bv):
self.assertEqual(ba.length(), 8 * bv.length())
ba_bytes = self._bitarray_to_bytes(ba)
if PY2:
bv_bytes = str(bv)
else:
bv_bytes = bv.to_bytes()
self.assertEqual(ba_bytes, bv_bytes)
def _bitarray_from_bitstring(self, str):
return bitarray("".join(reversed(str)), endian='little')
def _create_random_bit_array(self):
length_in_bits = random.randint(0, 8 * 16)
return bitarray("".join(random.choice(('0', '1')) for i in range(length_in_bits)), endian='little')
def _bitarray_to_bytes(self, b):
copy = bitarray(b, endian='little')
copy.fill()
return bytearray(copy.tobytes())
| true | true |
1c2dce5bfdbcf0694685a6a9b512196a77735640 | 59 | py | Python | hackerrank/contest/30-days-of-code/day-0.py | everyevery/programming_study | ff35e97e13953e4d7a26591f7cdb301d3e8e36c6 | [
"MIT"
] | null | null | null | hackerrank/contest/30-days-of-code/day-0.py | everyevery/programming_study | ff35e97e13953e4d7a26591f7cdb301d3e8e36c6 | [
"MIT"
] | null | null | null | hackerrank/contest/30-days-of-code/day-0.py | everyevery/programming_study | ff35e97e13953e4d7a26591f7cdb301d3e8e36c6 | [
"MIT"
] | 1 | 2017-04-01T21:34:23.000Z | 2017-04-01T21:34:23.000Z | print("Hello World.")
print("Welcome to 30 Days of Code.")
| 19.666667 | 36 | 0.694915 | print("Hello World.")
print("Welcome to 30 Days of Code.")
| true | true |
1c2dcf6bc1e94dd061c0dcbff3ca0362a7c8ab4f | 48,202 | bzl | Python | examples/checked_in_requirements_bzl/requirements.bzl | therc/rules_python | d2716fb59f8e60ccc2af347859ad162a067e6d13 | [
"Apache-2.0"
] | 1 | 2019-02-11T04:46:51.000Z | 2019-02-11T04:46:51.000Z | examples/checked_in_requirements_bzl/requirements.bzl | therc/rules_python | d2716fb59f8e60ccc2af347859ad162a067e6d13 | [
"Apache-2.0"
] | 2 | 2018-02-22T11:09:50.000Z | 2018-04-20T05:28:20.000Z | examples/checked_in_requirements_bzl/requirements.bzl | mirandaconrado/rules_python | 670a7b7357024fb4803022a66c977931f12bac6c | [
"Apache-2.0"
] | null | null | null | # Install pip requirements.
#
# Generated from /home/lpeltonen/go/src/github.com/bazelbuild/rules_python/examples/checked_in_requirements_bzl/requirements.txt
# Generated from /home/lpeltonen/go/src/github.com/bazelbuild/rules_python/examples/checked_in_requirements_bzl/requirements-2.txt
load("@examples_checked_in_requirements_bzl//python:whl.bzl", "whl_library")
_requirements = {
"atomicwrites": "@examples_checked_in_requirements_bzl__atomicwrites_1_2_1//:pkg",
"atomicwrites:dirty": "@examples_checked_in_requirements_bzl__atomicwrites_1_2_1_dirty//:pkg",
"attrs": "@examples_checked_in_requirements_bzl__attrs_18_2_0//:pkg",
"attrs:dirty": "@examples_checked_in_requirements_bzl__attrs_18_2_0_dirty//:pkg",
"attrs[dev]": "@examples_checked_in_requirements_bzl__attrs_18_2_0//:dev",
"attrs:dirty[dev]": "@examples_checked_in_requirements_bzl__attrs_18_2_0_dirty//:dev",
"attrs[docs]": "@examples_checked_in_requirements_bzl__attrs_18_2_0//:docs",
"attrs:dirty[docs]": "@examples_checked_in_requirements_bzl__attrs_18_2_0_dirty//:docs",
"attrs[tests]": "@examples_checked_in_requirements_bzl__attrs_18_2_0//:tests",
"attrs:dirty[tests]": "@examples_checked_in_requirements_bzl__attrs_18_2_0_dirty//:tests",
"backports.ssl-match-hostname": "@examples_checked_in_requirements_bzl__backports_ssl_match_hostname_3_5_0_1//:pkg",
"backports.ssl-match-hostname:dirty": "@examples_checked_in_requirements_bzl__backports_ssl_match_hostname_3_5_0_1_dirty//:pkg",
"botocore": "@examples_checked_in_requirements_bzl__botocore_1_12_5//:pkg",
"botocore:dirty": "@examples_checked_in_requirements_bzl__botocore_1_12_5_dirty//:pkg",
"cachetools": "@examples_checked_in_requirements_bzl__cachetools_2_1_0//:pkg",
"cachetools:dirty": "@examples_checked_in_requirements_bzl__cachetools_2_1_0_dirty//:pkg",
"certifi": "@examples_checked_in_requirements_bzl__certifi_2018_8_24//:pkg",
"certifi:dirty": "@examples_checked_in_requirements_bzl__certifi_2018_8_24_dirty//:pkg",
"chardet": "@examples_checked_in_requirements_bzl__chardet_3_0_4//:pkg",
"chardet:dirty": "@examples_checked_in_requirements_bzl__chardet_3_0_4_dirty//:pkg",
"dill": "@examples_checked_in_requirements_bzl__dill_0_2_8_2//:pkg",
"dill:dirty": "@examples_checked_in_requirements_bzl__dill_0_2_8_2_dirty//:pkg",
"docutils": "@examples_checked_in_requirements_bzl__docutils_0_14//:pkg",
"docutils:dirty": "@examples_checked_in_requirements_bzl__docutils_0_14_dirty//:pkg",
"enum34": "@examples_checked_in_requirements_bzl__enum34_1_1_6//:pkg",
"enum34:dirty": "@examples_checked_in_requirements_bzl__enum34_1_1_6_dirty//:pkg",
"funcsigs": "@examples_checked_in_requirements_bzl__funcsigs_1_0_2//:pkg",
"funcsigs:dirty": "@examples_checked_in_requirements_bzl__funcsigs_1_0_2_dirty//:pkg",
"future": "@examples_checked_in_requirements_bzl__future_0_16_0//:pkg",
"future:dirty": "@examples_checked_in_requirements_bzl__future_0_16_0_dirty//:pkg",
"futures": "@examples_checked_in_requirements_bzl__futures_3_2_0//:pkg",
"futures:dirty": "@examples_checked_in_requirements_bzl__futures_3_2_0_dirty//:pkg",
"gapic-google-cloud-datastore-v1": "@examples_checked_in_requirements_bzl__gapic_google_cloud_datastore_v1_0_15_3//:pkg",
"gapic-google-cloud-datastore-v1:dirty": "@examples_checked_in_requirements_bzl__gapic_google_cloud_datastore_v1_0_15_3_dirty//:pkg",
"gapic-google-cloud-error-reporting-v1beta1": "@examples_checked_in_requirements_bzl__gapic_google_cloud_error_reporting_v1beta1_0_15_3//:pkg",
"gapic-google-cloud-error-reporting-v1beta1:dirty": "@examples_checked_in_requirements_bzl__gapic_google_cloud_error_reporting_v1beta1_0_15_3_dirty//:pkg",
"gapic-google-cloud-logging-v2": "@examples_checked_in_requirements_bzl__gapic_google_cloud_logging_v2_0_91_3//:pkg",
"gapic-google-cloud-logging-v2:dirty": "@examples_checked_in_requirements_bzl__gapic_google_cloud_logging_v2_0_91_3_dirty//:pkg",
"google-api-core": "@examples_checked_in_requirements_bzl__google_api_core_0_1_4//:pkg",
"google-api-core:dirty": "@examples_checked_in_requirements_bzl__google_api_core_0_1_4_dirty//:pkg",
"google-api-core[grpc]": "@examples_checked_in_requirements_bzl__google_api_core_0_1_4//:grpc",
"google-api-core:dirty[grpc]": "@examples_checked_in_requirements_bzl__google_api_core_0_1_4_dirty//:grpc",
"google-auth": "@examples_checked_in_requirements_bzl__google_auth_1_5_1//:pkg",
"google-auth:dirty": "@examples_checked_in_requirements_bzl__google_auth_1_5_1_dirty//:pkg",
"google-cloud": "@examples_checked_in_requirements_bzl__google_cloud_0_29_0//:pkg",
"google-cloud:dirty": "@examples_checked_in_requirements_bzl__google_cloud_0_29_0_dirty//:pkg",
"google-cloud-bigquery": "@examples_checked_in_requirements_bzl__google_cloud_bigquery_0_28_0//:pkg",
"google-cloud-bigquery:dirty": "@examples_checked_in_requirements_bzl__google_cloud_bigquery_0_28_0_dirty//:pkg",
"google-cloud-bigtable": "@examples_checked_in_requirements_bzl__google_cloud_bigtable_0_28_1//:pkg",
"google-cloud-bigtable:dirty": "@examples_checked_in_requirements_bzl__google_cloud_bigtable_0_28_1_dirty//:pkg",
"google-cloud-core": "@examples_checked_in_requirements_bzl__google_cloud_core_0_28_1//:pkg",
"google-cloud-core:dirty": "@examples_checked_in_requirements_bzl__google_cloud_core_0_28_1_dirty//:pkg",
"google-cloud-core[grpc]": "@examples_checked_in_requirements_bzl__google_cloud_core_0_28_1//:grpc",
"google-cloud-core:dirty[grpc]": "@examples_checked_in_requirements_bzl__google_cloud_core_0_28_1_dirty//:grpc",
"google-cloud-datastore": "@examples_checked_in_requirements_bzl__google_cloud_datastore_1_4_0//:pkg",
"google-cloud-datastore:dirty": "@examples_checked_in_requirements_bzl__google_cloud_datastore_1_4_0_dirty//:pkg",
"google-cloud-dns": "@examples_checked_in_requirements_bzl__google_cloud_dns_0_28_0//:pkg",
"google-cloud-dns:dirty": "@examples_checked_in_requirements_bzl__google_cloud_dns_0_28_0_dirty//:pkg",
"google-cloud-error-reporting": "@examples_checked_in_requirements_bzl__google_cloud_error_reporting_0_28_0//:pkg",
"google-cloud-error-reporting:dirty": "@examples_checked_in_requirements_bzl__google_cloud_error_reporting_0_28_0_dirty//:pkg",
"google-cloud-firestore": "@examples_checked_in_requirements_bzl__google_cloud_firestore_0_28_0//:pkg",
"google-cloud-firestore:dirty": "@examples_checked_in_requirements_bzl__google_cloud_firestore_0_28_0_dirty//:pkg",
"google-cloud-language": "@examples_checked_in_requirements_bzl__google_cloud_language_0_31_0//:pkg",
"google-cloud-language:dirty": "@examples_checked_in_requirements_bzl__google_cloud_language_0_31_0_dirty//:pkg",
"google-cloud-logging": "@examples_checked_in_requirements_bzl__google_cloud_logging_1_4_0//:pkg",
"google-cloud-logging:dirty": "@examples_checked_in_requirements_bzl__google_cloud_logging_1_4_0_dirty//:pkg",
"google-cloud-monitoring": "@examples_checked_in_requirements_bzl__google_cloud_monitoring_0_28_1//:pkg",
"google-cloud-monitoring:dirty": "@examples_checked_in_requirements_bzl__google_cloud_monitoring_0_28_1_dirty//:pkg",
"google-cloud-pubsub": "@examples_checked_in_requirements_bzl__google_cloud_pubsub_0_29_4//:pkg",
"google-cloud-pubsub:dirty": "@examples_checked_in_requirements_bzl__google_cloud_pubsub_0_29_4_dirty//:pkg",
"google-cloud-resource-manager": "@examples_checked_in_requirements_bzl__google_cloud_resource_manager_0_28_1//:pkg",
"google-cloud-resource-manager:dirty": "@examples_checked_in_requirements_bzl__google_cloud_resource_manager_0_28_1_dirty//:pkg",
"google-cloud-runtimeconfig": "@examples_checked_in_requirements_bzl__google_cloud_runtimeconfig_0_28_1//:pkg",
"google-cloud-runtimeconfig:dirty": "@examples_checked_in_requirements_bzl__google_cloud_runtimeconfig_0_28_1_dirty//:pkg",
"google-cloud-spanner": "@examples_checked_in_requirements_bzl__google_cloud_spanner_0_29_0//:pkg",
"google-cloud-spanner:dirty": "@examples_checked_in_requirements_bzl__google_cloud_spanner_0_29_0_dirty//:pkg",
"google-cloud-speech": "@examples_checked_in_requirements_bzl__google_cloud_speech_0_30_0//:pkg",
"google-cloud-speech:dirty": "@examples_checked_in_requirements_bzl__google_cloud_speech_0_30_0_dirty//:pkg",
"google-cloud-storage": "@examples_checked_in_requirements_bzl__google_cloud_storage_1_6_0//:pkg",
"google-cloud-storage:dirty": "@examples_checked_in_requirements_bzl__google_cloud_storage_1_6_0_dirty//:pkg",
"google-cloud-trace": "@examples_checked_in_requirements_bzl__google_cloud_trace_0_16_0//:pkg",
"google-cloud-trace:dirty": "@examples_checked_in_requirements_bzl__google_cloud_trace_0_16_0_dirty//:pkg",
"google-cloud-translate": "@examples_checked_in_requirements_bzl__google_cloud_translate_1_3_1//:pkg",
"google-cloud-translate:dirty": "@examples_checked_in_requirements_bzl__google_cloud_translate_1_3_1_dirty//:pkg",
"google-cloud-videointelligence": "@examples_checked_in_requirements_bzl__google_cloud_videointelligence_0_28_0//:pkg",
"google-cloud-videointelligence:dirty": "@examples_checked_in_requirements_bzl__google_cloud_videointelligence_0_28_0_dirty//:pkg",
"google-cloud-vision": "@examples_checked_in_requirements_bzl__google_cloud_vision_0_28_0//:pkg",
"google-cloud-vision:dirty": "@examples_checked_in_requirements_bzl__google_cloud_vision_0_28_0_dirty//:pkg",
"google-gax": "@examples_checked_in_requirements_bzl__google_gax_0_15_16//:pkg",
"google-gax:dirty": "@examples_checked_in_requirements_bzl__google_gax_0_15_16_dirty//:pkg",
"google-resumable-media": "@examples_checked_in_requirements_bzl__google_resumable_media_0_3_1//:pkg",
"google-resumable-media:dirty": "@examples_checked_in_requirements_bzl__google_resumable_media_0_3_1_dirty//:pkg",
"google-resumable-media[requests]": "@examples_checked_in_requirements_bzl__google_resumable_media_0_3_1//:requests",
"google-resumable-media:dirty[requests]": "@examples_checked_in_requirements_bzl__google_resumable_media_0_3_1_dirty//:requests",
"googleapis-common-protos": "@examples_checked_in_requirements_bzl__googleapis_common_protos_1_5_3//:pkg",
"googleapis-common-protos:dirty": "@examples_checked_in_requirements_bzl__googleapis_common_protos_1_5_3_dirty//:pkg",
"googleapis-common-protos[grpc]": "@examples_checked_in_requirements_bzl__googleapis_common_protos_1_5_3//:grpc",
"googleapis-common-protos:dirty[grpc]": "@examples_checked_in_requirements_bzl__googleapis_common_protos_1_5_3_dirty//:grpc",
"grpc-google-iam-v1": "@examples_checked_in_requirements_bzl__grpc_google_iam_v1_0_11_4//:pkg",
"grpc-google-iam-v1:dirty": "@examples_checked_in_requirements_bzl__grpc_google_iam_v1_0_11_4_dirty//:pkg",
"grpcio": "@examples_checked_in_requirements_bzl__grpcio_1_15_0//:pkg",
"grpcio:dirty": "@examples_checked_in_requirements_bzl__grpcio_1_15_0_dirty//:pkg",
"h5py": "@examples_checked_in_requirements_bzl__h5py_2_8_0//:pkg",
"h5py:dirty": "@examples_checked_in_requirements_bzl__h5py_2_8_0_dirty//:pkg",
"httplib2": "@examples_checked_in_requirements_bzl__httplib2_0_11_3//:pkg",
"httplib2:dirty": "@examples_checked_in_requirements_bzl__httplib2_0_11_3_dirty//:pkg",
"idna": "@examples_checked_in_requirements_bzl__idna_2_7//:pkg",
"idna:dirty": "@examples_checked_in_requirements_bzl__idna_2_7_dirty//:pkg",
"jmespath": "@examples_checked_in_requirements_bzl__jmespath_0_9_3//:pkg",
"jmespath:dirty": "@examples_checked_in_requirements_bzl__jmespath_0_9_3_dirty//:pkg",
"keras": "@examples_checked_in_requirements_bzl__Keras_2_2_2//:pkg",
"keras:dirty": "@examples_checked_in_requirements_bzl__Keras_2_2_2_dirty//:pkg",
"keras[tests]": "@examples_checked_in_requirements_bzl__Keras_2_2_2//:tests",
"keras:dirty[tests]": "@examples_checked_in_requirements_bzl__Keras_2_2_2_dirty//:tests",
"keras[visualize]": "@examples_checked_in_requirements_bzl__Keras_2_2_2//:visualize",
"keras:dirty[visualize]": "@examples_checked_in_requirements_bzl__Keras_2_2_2_dirty//:visualize",
"keras-applications": "@examples_checked_in_requirements_bzl__Keras_Applications_1_0_4//:pkg",
"keras-applications:dirty": "@examples_checked_in_requirements_bzl__Keras_Applications_1_0_4_dirty//:pkg",
"keras-applications[tests]": "@examples_checked_in_requirements_bzl__Keras_Applications_1_0_4//:tests",
"keras-applications:dirty[tests]": "@examples_checked_in_requirements_bzl__Keras_Applications_1_0_4_dirty//:tests",
"keras-preprocessing": "@examples_checked_in_requirements_bzl__Keras_Preprocessing_1_0_2//:pkg",
"keras-preprocessing:dirty": "@examples_checked_in_requirements_bzl__Keras_Preprocessing_1_0_2_dirty//:pkg",
"keras-preprocessing[tests]": "@examples_checked_in_requirements_bzl__Keras_Preprocessing_1_0_2//:tests",
"keras-preprocessing:dirty[tests]": "@examples_checked_in_requirements_bzl__Keras_Preprocessing_1_0_2_dirty//:tests",
"mock": "@examples_checked_in_requirements_bzl__mock_2_0_0//:pkg",
"mock:dirty": "@examples_checked_in_requirements_bzl__mock_2_0_0_dirty//:pkg",
"more-itertools": "@examples_checked_in_requirements_bzl__more_itertools_4_3_0//:pkg",
"more-itertools:dirty": "@examples_checked_in_requirements_bzl__more_itertools_4_3_0_dirty//:pkg",
"numpy": "@examples_checked_in_requirements_bzl__numpy_1_14_0//:pkg",
"numpy:dirty": "@examples_checked_in_requirements_bzl__numpy_1_14_0_dirty//:pkg",
"oauth2client": "@examples_checked_in_requirements_bzl__oauth2client_3_0_0//:pkg",
"oauth2client:dirty": "@examples_checked_in_requirements_bzl__oauth2client_3_0_0_dirty//:pkg",
"pathlib2": "@examples_checked_in_requirements_bzl__pathlib2_2_3_2//:pkg",
"pathlib2:dirty": "@examples_checked_in_requirements_bzl__pathlib2_2_3_2_dirty//:pkg",
"pbr": "@examples_checked_in_requirements_bzl__pbr_4_2_0//:pkg",
"pbr:dirty": "@examples_checked_in_requirements_bzl__pbr_4_2_0_dirty//:pkg",
"pip": "@examples_checked_in_requirements_bzl__pip_9_0_0//:pkg",
"pip:dirty": "@examples_checked_in_requirements_bzl__pip_9_0_0_dirty//:pkg",
"pluggy": "@examples_checked_in_requirements_bzl__pluggy_0_7_1//:pkg",
"pluggy:dirty": "@examples_checked_in_requirements_bzl__pluggy_0_7_1_dirty//:pkg",
"ply": "@examples_checked_in_requirements_bzl__ply_3_8//:pkg",
"ply:dirty": "@examples_checked_in_requirements_bzl__ply_3_8_dirty//:pkg",
"proto-google-cloud-datastore-v1": "@examples_checked_in_requirements_bzl__proto_google_cloud_datastore_v1_0_90_4//:pkg",
"proto-google-cloud-datastore-v1:dirty": "@examples_checked_in_requirements_bzl__proto_google_cloud_datastore_v1_0_90_4_dirty//:pkg",
"proto-google-cloud-datastore-v1[grpc]": "@examples_checked_in_requirements_bzl__proto_google_cloud_datastore_v1_0_90_4//:grpc",
"proto-google-cloud-datastore-v1:dirty[grpc]": "@examples_checked_in_requirements_bzl__proto_google_cloud_datastore_v1_0_90_4_dirty//:grpc",
"proto-google-cloud-error-reporting-v1beta1": "@examples_checked_in_requirements_bzl__proto_google_cloud_error_reporting_v1beta1_0_15_3//:pkg",
"proto-google-cloud-error-reporting-v1beta1:dirty": "@examples_checked_in_requirements_bzl__proto_google_cloud_error_reporting_v1beta1_0_15_3_dirty//:pkg",
"proto-google-cloud-error-reporting-v1beta1[grpc]": "@examples_checked_in_requirements_bzl__proto_google_cloud_error_reporting_v1beta1_0_15_3//:grpc",
"proto-google-cloud-error-reporting-v1beta1:dirty[grpc]": "@examples_checked_in_requirements_bzl__proto_google_cloud_error_reporting_v1beta1_0_15_3_dirty//:grpc",
"proto-google-cloud-logging-v2": "@examples_checked_in_requirements_bzl__proto_google_cloud_logging_v2_0_91_3//:pkg",
"proto-google-cloud-logging-v2:dirty": "@examples_checked_in_requirements_bzl__proto_google_cloud_logging_v2_0_91_3_dirty//:pkg",
"proto-google-cloud-logging-v2[grpc]": "@examples_checked_in_requirements_bzl__proto_google_cloud_logging_v2_0_91_3//:grpc",
"proto-google-cloud-logging-v2:dirty[grpc]": "@examples_checked_in_requirements_bzl__proto_google_cloud_logging_v2_0_91_3_dirty//:grpc",
"protobuf": "@examples_checked_in_requirements_bzl__protobuf_3_6_1//:pkg",
"protobuf:dirty": "@examples_checked_in_requirements_bzl__protobuf_3_6_1_dirty//:pkg",
"psutil": "@examples_checked_in_requirements_bzl__psutil_5_4_7//:pkg",
"psutil:dirty": "@examples_checked_in_requirements_bzl__psutil_5_4_7_dirty//:pkg",
"psutil[enum]": "@examples_checked_in_requirements_bzl__psutil_5_4_7//:enum",
"psutil:dirty[enum]": "@examples_checked_in_requirements_bzl__psutil_5_4_7_dirty//:enum",
"py": "@examples_checked_in_requirements_bzl__py_1_6_0//:pkg",
"py:dirty": "@examples_checked_in_requirements_bzl__py_1_6_0_dirty//:pkg",
"pyasn1": "@examples_checked_in_requirements_bzl__pyasn1_0_4_4//:pkg",
"pyasn1:dirty": "@examples_checked_in_requirements_bzl__pyasn1_0_4_4_dirty//:pkg",
"pyasn1-modules": "@examples_checked_in_requirements_bzl__pyasn1_modules_0_2_2//:pkg",
"pyasn1-modules:dirty": "@examples_checked_in_requirements_bzl__pyasn1_modules_0_2_2_dirty//:pkg",
"pytest": "@examples_checked_in_requirements_bzl__pytest_3_8_0//:pkg",
"pytest:dirty": "@examples_checked_in_requirements_bzl__pytest_3_8_0_dirty//:pkg",
"pytest-mock": "@examples_checked_in_requirements_bzl__pytest_mock_1_6_2//:pkg",
"pytest-mock:dirty": "@examples_checked_in_requirements_bzl__pytest_mock_1_6_2_dirty//:pkg",
"python-dateutil": "@examples_checked_in_requirements_bzl__python_dateutil_2_7_3//:pkg",
"python-dateutil:dirty": "@examples_checked_in_requirements_bzl__python_dateutil_2_7_3_dirty//:pkg",
"pytz": "@examples_checked_in_requirements_bzl__pytz_2018_5//:pkg",
"pytz:dirty": "@examples_checked_in_requirements_bzl__pytz_2018_5_dirty//:pkg",
"pyyaml": "@examples_checked_in_requirements_bzl__PyYAML_3_13//:pkg",
"pyyaml:dirty": "@examples_checked_in_requirements_bzl__PyYAML_3_13_dirty//:pkg",
"requests": "@examples_checked_in_requirements_bzl__requests_2_19_1//:pkg",
"requests:dirty": "@examples_checked_in_requirements_bzl__requests_2_19_1_dirty//:pkg",
"rsa": "@examples_checked_in_requirements_bzl__rsa_4_0//:pkg",
"rsa:dirty": "@examples_checked_in_requirements_bzl__rsa_4_0_dirty//:pkg",
"scandir": "@examples_checked_in_requirements_bzl__scandir_1_9_0//:pkg",
"scandir:dirty": "@examples_checked_in_requirements_bzl__scandir_1_9_0_dirty//:pkg",
"scikit-learn": "@examples_checked_in_requirements_bzl__scikit_learn_0_17_1//:pkg",
"scikit-learn:dirty": "@examples_checked_in_requirements_bzl__scikit_learn_0_17_1_dirty//:pkg",
"scipy": "@examples_checked_in_requirements_bzl__scipy_0_17_1//:pkg",
"scipy:dirty": "@examples_checked_in_requirements_bzl__scipy_0_17_1_dirty//:pkg",
"setuptools": "@examples_checked_in_requirements_bzl__setuptools_40_4_0//:pkg",
"setuptools:dirty": "@examples_checked_in_requirements_bzl__setuptools_40_4_0_dirty//:pkg",
"setuptools[certs]": "@examples_checked_in_requirements_bzl__setuptools_40_4_0//:certs",
"setuptools:dirty[certs]": "@examples_checked_in_requirements_bzl__setuptools_40_4_0_dirty//:certs",
"setuptools[ssl]": "@examples_checked_in_requirements_bzl__setuptools_40_4_0//:ssl",
"setuptools:dirty[ssl]": "@examples_checked_in_requirements_bzl__setuptools_40_4_0_dirty//:ssl",
"setuptools-scm": "@examples_checked_in_requirements_bzl__setuptools_scm_2_0_0//:pkg",
"setuptools-scm:dirty": "@examples_checked_in_requirements_bzl__setuptools_scm_2_0_0_dirty//:pkg",
"six": "@examples_checked_in_requirements_bzl__six_1_11_0//:pkg",
"six:dirty": "@examples_checked_in_requirements_bzl__six_1_11_0_dirty//:pkg",
"urllib3": "@examples_checked_in_requirements_bzl__urllib3_1_23//:pkg",
"urllib3:dirty": "@examples_checked_in_requirements_bzl__urllib3_1_23_dirty//:pkg"
}
all_requirements = _requirements.values()
requirements_map = _requirements
def requirement_repo(name):
return requirement(name).split(":")[0]
def requirement(name, binary=None):
key = name.lower()
if key not in _requirements:
fail("Could not find pip-provided dependency: '%s'" % name)
if binary:
return _requirements[key].split(":")[0] + ":entrypoint_" + binary
return _requirements[key]
def pip_install():
all_libs = {
"atomicwrites": {
"name": "examples_checked_in_requirements_bzl__atomicwrites_1_2_1",
"version": "1.2.1",
"wheel_name": "atomicwrites-1.2.1-py2.py3-none-any.whl",
},
"attrs": {
"name": "examples_checked_in_requirements_bzl__attrs_18_2_0",
"version": "18.2.0",
"wheel_name": "attrs-18.2.0-py2.py3-none-any.whl",
"extras": ["dev", "docs", "tests"],
},
"backports.ssl-match-hostname": {
"name": "examples_checked_in_requirements_bzl__backports_ssl_match_hostname_3_5_0_1",
"version": "3.5.0.1",
"wheel_name": "backports.ssl_match_hostname-3.5.0.1-py2-none-any.whl",
},
"botocore": {
"name": "examples_checked_in_requirements_bzl__botocore_1_12_5",
"version": "1.12.5",
"wheel_name": "botocore-1.12.5-py2.py3-none-any.whl",
"transitive_runtime_deps": ["docutils", "jmespath", "python-dateutil", "six", "urllib3"],
},
"cachetools": {
"name": "examples_checked_in_requirements_bzl__cachetools_2_1_0",
"version": "2.1.0",
"wheel_name": "cachetools-2.1.0-py2.py3-none-any.whl",
},
"certifi": {
"name": "examples_checked_in_requirements_bzl__certifi_2018_8_24",
"version": "2018.8.24",
"wheel_name": "certifi-2018.8.24-py2.py3-none-any.whl",
},
"chardet": {
"name": "examples_checked_in_requirements_bzl__chardet_3_0_4",
"version": "3.0.4",
"wheel_name": "chardet-3.0.4-py2.py3-none-any.whl",
},
"dill": {
"name": "examples_checked_in_requirements_bzl__dill_0_2_8_2",
"version": "0.2.8.2",
"wheel_name": "dill-0.2.8.2-py2-none-any.whl",
},
"docutils": {
"name": "examples_checked_in_requirements_bzl__docutils_0_14",
"version": "0.14",
"wheel_name": "docutils-0.14-py2-none-any.whl",
},
"enum34": {
"name": "examples_checked_in_requirements_bzl__enum34_1_1_6",
"version": "1.1.6",
"wheel_name": "enum34-1.1.6-py2-none-any.whl",
},
"funcsigs": {
"name": "examples_checked_in_requirements_bzl__funcsigs_1_0_2",
"version": "1.0.2",
"wheel_name": "funcsigs-1.0.2-py2.py3-none-any.whl",
},
"future": {
"name": "examples_checked_in_requirements_bzl__future_0_16_0",
"version": "0.16.0",
"wheel_name": "future-0.16.0-py2-none-any.whl",
},
"futures": {
"name": "examples_checked_in_requirements_bzl__futures_3_2_0",
"version": "3.2.0",
"wheel_name": "futures-3.2.0-py2-none-any.whl",
},
"gapic-google-cloud-datastore-v1": {
"name": "examples_checked_in_requirements_bzl__gapic_google_cloud_datastore_v1_0_15_3",
"version": "0.15.3",
"wheel_name": "gapic_google_cloud_datastore_v1-0.15.3-py2-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-auth", "google-gax", "googleapis-common-protos", "grpcio", "httplib2", "idna", "oauth2client", "ply", "proto-google-cloud-datastore-v1", "protobuf", "pyasn1", "pyasn1-modules", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"gapic-google-cloud-error-reporting-v1beta1": {
"name": "examples_checked_in_requirements_bzl__gapic_google_cloud_error_reporting_v1beta1_0_15_3",
"version": "0.15.3",
"wheel_name": "gapic_google_cloud_error_reporting_v1beta1-0.15.3-py2-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-auth", "google-gax", "googleapis-common-protos", "grpcio", "httplib2", "idna", "oauth2client", "ply", "proto-google-cloud-error-reporting-v1beta1", "protobuf", "pyasn1", "pyasn1-modules", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"gapic-google-cloud-logging-v2": {
"name": "examples_checked_in_requirements_bzl__gapic_google_cloud_logging_v2_0_91_3",
"version": "0.91.3",
"wheel_name": "gapic_google_cloud_logging_v2-0.91.3-py2-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-auth", "google-gax", "googleapis-common-protos", "grpcio", "httplib2", "idna", "oauth2client", "ply", "proto-google-cloud-logging-v2", "protobuf", "pyasn1", "pyasn1-modules", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-api-core": {
"name": "examples_checked_in_requirements_bzl__google_api_core_0_1_4",
"version": "0.1.4",
"wheel_name": "google_api_core-0.1.4-py2.py3-none-any.whl",
"extras": ["grpc"],
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "futures", "google-auth", "googleapis-common-protos", "idna", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-auth": {
"name": "examples_checked_in_requirements_bzl__google_auth_1_5_1",
"version": "1.5.1",
"wheel_name": "google_auth-1.5.1-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "pyasn1", "pyasn1-modules", "rsa", "six"],
},
"google-cloud": {
"name": "examples_checked_in_requirements_bzl__google_cloud_0_29_0",
"version": "0.29.0",
"wheel_name": "google_cloud-0.29.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "gapic-google-cloud-datastore-v1", "gapic-google-cloud-error-reporting-v1beta1", "gapic-google-cloud-logging-v2", "google-api-core", "google-auth", "google-cloud-bigquery", "google-cloud-bigtable", "google-cloud-core", "google-cloud-datastore", "google-cloud-dns", "google-cloud-error-reporting", "google-cloud-firestore", "google-cloud-language", "google-cloud-logging", "google-cloud-monitoring", "google-cloud-pubsub", "google-cloud-resource-manager", "google-cloud-runtimeconfig", "google-cloud-spanner", "google-cloud-speech", "google-cloud-storage", "google-cloud-trace", "google-cloud-translate", "google-cloud-videointelligence", "google-cloud-vision", "google-gax", "google-resumable-media", "googleapis-common-protos", "grpc-google-iam-v1", "grpcio", "httplib2", "idna", "oauth2client", "ply", "proto-google-cloud-datastore-v1", "proto-google-cloud-error-reporting-v1beta1", "proto-google-cloud-logging-v2", "protobuf", "psutil", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-bigquery": {
"name": "examples_checked_in_requirements_bzl__google_cloud_bigquery_0_28_0",
"version": "0.28.0",
"wheel_name": "google_cloud_bigquery-0.28.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "futures", "google-api-core", "google-auth", "google-cloud-core", "google-resumable-media", "googleapis-common-protos", "idna", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-bigtable": {
"name": "examples_checked_in_requirements_bzl__google_cloud_bigtable_0_28_1",
"version": "0.28.1",
"wheel_name": "google_cloud_bigtable-0.28.1-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-api-core", "google-auth", "google-cloud-core", "google-gax", "googleapis-common-protos", "grpcio", "idna", "ply", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-core": {
"name": "examples_checked_in_requirements_bzl__google_cloud_core_0_28_1",
"version": "0.28.1",
"wheel_name": "google_cloud_core-0.28.1-py2.py3-none-any.whl",
"extras": ["grpc"],
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "futures", "google-api-core", "google-auth", "googleapis-common-protos", "idna", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-datastore": {
"name": "examples_checked_in_requirements_bzl__google_cloud_datastore_1_4_0",
"version": "1.4.0",
"wheel_name": "google_cloud_datastore-1.4.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "gapic-google-cloud-datastore-v1", "google-api-core", "google-auth", "google-cloud-core", "google-gax", "googleapis-common-protos", "grpcio", "httplib2", "idna", "oauth2client", "ply", "proto-google-cloud-datastore-v1", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-dns": {
"name": "examples_checked_in_requirements_bzl__google_cloud_dns_0_28_0",
"version": "0.28.0",
"wheel_name": "google_cloud_dns-0.28.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "futures", "google-api-core", "google-auth", "google-cloud-core", "googleapis-common-protos", "idna", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-error-reporting": {
"name": "examples_checked_in_requirements_bzl__google_cloud_error_reporting_0_28_0",
"version": "0.28.0",
"wheel_name": "google_cloud_error_reporting-0.28.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "gapic-google-cloud-error-reporting-v1beta1", "gapic-google-cloud-logging-v2", "google-api-core", "google-auth", "google-cloud-core", "google-cloud-logging", "google-gax", "googleapis-common-protos", "grpcio", "httplib2", "idna", "oauth2client", "ply", "proto-google-cloud-error-reporting-v1beta1", "proto-google-cloud-logging-v2", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-firestore": {
"name": "examples_checked_in_requirements_bzl__google_cloud_firestore_0_28_0",
"version": "0.28.0",
"wheel_name": "google_cloud_firestore-0.28.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-api-core", "google-auth", "google-cloud-core", "google-gax", "googleapis-common-protos", "grpcio", "idna", "ply", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-language": {
"name": "examples_checked_in_requirements_bzl__google_cloud_language_0_31_0",
"version": "0.31.0",
"wheel_name": "google_cloud_language-0.31.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "enum34", "futures", "google-api-core", "google-auth", "grpcio", "pyasn1", "pyasn1-modules", "rsa", "six"],
},
"google-cloud-logging": {
"name": "examples_checked_in_requirements_bzl__google_cloud_logging_1_4_0",
"version": "1.4.0",
"wheel_name": "google_cloud_logging-1.4.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "gapic-google-cloud-logging-v2", "google-api-core", "google-auth", "google-cloud-core", "google-gax", "googleapis-common-protos", "grpcio", "httplib2", "idna", "oauth2client", "ply", "proto-google-cloud-logging-v2", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-monitoring": {
"name": "examples_checked_in_requirements_bzl__google_cloud_monitoring_0_28_1",
"version": "0.28.1",
"wheel_name": "google_cloud_monitoring-0.28.1-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "futures", "google-api-core", "google-auth", "google-cloud-core", "googleapis-common-protos", "idna", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-pubsub": {
"name": "examples_checked_in_requirements_bzl__google_cloud_pubsub_0_29_4",
"version": "0.29.4",
"wheel_name": "google_cloud_pubsub-0.29.4-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "enum34", "futures", "google-api-core", "google-auth", "googleapis-common-protos", "grpc-google-iam-v1", "grpcio", "psutil", "pyasn1", "pyasn1-modules", "rsa", "six"],
},
"google-cloud-resource-manager": {
"name": "examples_checked_in_requirements_bzl__google_cloud_resource_manager_0_28_1",
"version": "0.28.1",
"wheel_name": "google_cloud_resource_manager-0.28.1-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "futures", "google-api-core", "google-auth", "google-cloud-core", "googleapis-common-protos", "idna", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-runtimeconfig": {
"name": "examples_checked_in_requirements_bzl__google_cloud_runtimeconfig_0_28_1",
"version": "0.28.1",
"wheel_name": "google_cloud_runtimeconfig-0.28.1-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "futures", "google-api-core", "google-auth", "google-cloud-core", "googleapis-common-protos", "idna", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-spanner": {
"name": "examples_checked_in_requirements_bzl__google_cloud_spanner_0_29_0",
"version": "0.29.0",
"wheel_name": "google_cloud_spanner-0.29.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-api-core", "google-auth", "google-cloud-core", "google-gax", "googleapis-common-protos", "grpc-google-iam-v1", "grpcio", "idna", "ply", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-speech": {
"name": "examples_checked_in_requirements_bzl__google_cloud_speech_0_30_0",
"version": "0.30.0",
"wheel_name": "google_cloud_speech-0.30.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-api-core", "google-auth", "google-cloud-core", "google-gax", "googleapis-common-protos", "grpcio", "idna", "ply", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-storage": {
"name": "examples_checked_in_requirements_bzl__google_cloud_storage_1_6_0",
"version": "1.6.0",
"wheel_name": "google_cloud_storage-1.6.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "futures", "google-api-core", "google-auth", "google-cloud-core", "google-resumable-media", "googleapis-common-protos", "idna", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-trace": {
"name": "examples_checked_in_requirements_bzl__google_cloud_trace_0_16_0",
"version": "0.16.0",
"wheel_name": "google_cloud_trace-0.16.0-py2-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-api-core", "google-auth", "google-cloud-core", "google-gax", "googleapis-common-protos", "grpcio", "idna", "ply", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-translate": {
"name": "examples_checked_in_requirements_bzl__google_cloud_translate_1_3_1",
"version": "1.3.1",
"wheel_name": "google_cloud_translate-1.3.1-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "futures", "google-api-core", "google-auth", "google-cloud-core", "googleapis-common-protos", "idna", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-videointelligence": {
"name": "examples_checked_in_requirements_bzl__google_cloud_videointelligence_0_28_0",
"version": "0.28.0",
"wheel_name": "google_cloud_videointelligence-0.28.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-auth", "google-gax", "googleapis-common-protos", "grpcio", "idna", "ply", "protobuf", "pyasn1", "pyasn1-modules", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-vision": {
"name": "examples_checked_in_requirements_bzl__google_cloud_vision_0_28_0",
"version": "0.28.0",
"wheel_name": "google_cloud_vision-0.28.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-api-core", "google-auth", "google-cloud-core", "google-gax", "googleapis-common-protos", "grpcio", "idna", "ply", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-gax": {
"name": "examples_checked_in_requirements_bzl__google_gax_0_15_16",
"version": "0.15.16",
"wheel_name": "google_gax-0.15.16-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-auth", "googleapis-common-protos", "grpcio", "idna", "ply", "protobuf", "pyasn1", "pyasn1-modules", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-resumable-media": {
"name": "examples_checked_in_requirements_bzl__google_resumable_media_0_3_1",
"version": "0.3.1",
"wheel_name": "google_resumable_media-0.3.1-py2.py3-none-any.whl",
"extras": ["requests"],
"transitive_runtime_deps": ["six"],
},
"googleapis-common-protos": {
"name": "examples_checked_in_requirements_bzl__googleapis_common_protos_1_5_3",
"version": "1.5.3",
"wheel_name": "googleapis_common_protos-1.5.3-py2-none-any.whl",
"extras": ["grpc"],
"transitive_runtime_deps": ["protobuf", "setuptools", "six"],
},
"grpc-google-iam-v1": {
"name": "examples_checked_in_requirements_bzl__grpc_google_iam_v1_0_11_4",
"version": "0.11.4",
"wheel_name": "grpc_google_iam_v1-0.11.4-py2-none-any.whl",
"transitive_runtime_deps": ["enum34", "futures", "googleapis-common-protos", "grpcio", "six"],
},
"grpcio": {
"name": "examples_checked_in_requirements_bzl__grpcio_1_15_0",
"version": "1.15.0",
"wheel_name": "grpcio-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl",
"transitive_runtime_deps": ["enum34", "futures", "six"],
},
"h5py": {
"name": "examples_checked_in_requirements_bzl__h5py_2_8_0",
"version": "2.8.0",
"wheel_name": "h5py-2.8.0-cp27-cp27mu-manylinux1_x86_64.whl",
"transitive_runtime_deps": ["numpy", "six"],
},
"httplib2": {
"name": "examples_checked_in_requirements_bzl__httplib2_0_11_3",
"version": "0.11.3",
"wheel_name": "httplib2-0.11.3-py2-none-any.whl",
},
"idna": {
"name": "examples_checked_in_requirements_bzl__idna_2_7",
"version": "2.7",
"wheel_name": "idna-2.7-py2.py3-none-any.whl",
},
"jmespath": {
"name": "examples_checked_in_requirements_bzl__jmespath_0_9_3",
"version": "0.9.3",
"wheel_name": "jmespath-0.9.3-py2.py3-none-any.whl",
},
"keras": {
"name": "examples_checked_in_requirements_bzl__Keras_2_2_2",
"version": "2.2.2",
"wheel_name": "Keras-2.2.2-py2.py3-none-any.whl",
"extras": ["tests", "visualize"],
"transitive_runtime_deps": ["h5py", "keras", "keras-applications", "keras-preprocessing", "numpy", "pyyaml", "scipy", "six"],
},
"keras-applications": {
"name": "examples_checked_in_requirements_bzl__Keras_Applications_1_0_4",
"version": "1.0.4",
"wheel_name": "Keras_Applications-1.0.4-py2.py3-none-any.whl",
"extras": ["tests"],
"transitive_runtime_deps": ["h5py", "keras", "keras-applications", "keras-preprocessing", "numpy", "pyyaml", "scipy", "six"],
},
"keras-preprocessing": {
"name": "examples_checked_in_requirements_bzl__Keras_Preprocessing_1_0_2",
"version": "1.0.2",
"wheel_name": "Keras_Preprocessing-1.0.2-py2.py3-none-any.whl",
"extras": ["tests"],
"transitive_runtime_deps": ["h5py", "keras", "keras-applications", "keras-preprocessing", "numpy", "pyyaml", "scipy", "six"],
},
"mock": {
"name": "examples_checked_in_requirements_bzl__mock_2_0_0",
"version": "2.0.0",
"wheel_name": "mock-2.0.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["funcsigs", "pbr", "six"],
},
"more-itertools": {
"name": "examples_checked_in_requirements_bzl__more_itertools_4_3_0",
"version": "4.3.0",
"wheel_name": "more_itertools-4.3.0-py2-none-any.whl",
"transitive_runtime_deps": ["six"],
},
"numpy": {
"name": "examples_checked_in_requirements_bzl__numpy_1_14_0",
"version": "1.14.0",
"wheel_name": "numpy-1.14.0-cp27-cp27mu-manylinux1_x86_64.whl",
},
"oauth2client": {
"name": "examples_checked_in_requirements_bzl__oauth2client_3_0_0",
"version": "3.0.0",
"wheel_name": "oauth2client-3.0.0-py2-none-any.whl",
"transitive_runtime_deps": ["httplib2", "pyasn1", "pyasn1-modules", "rsa", "six"],
},
"pathlib2": {
"name": "examples_checked_in_requirements_bzl__pathlib2_2_3_2",
"version": "2.3.2",
"wheel_name": "pathlib2-2.3.2-py2.py3-none-any.whl",
"transitive_runtime_deps": ["scandir", "six"],
},
"pbr": {
"name": "examples_checked_in_requirements_bzl__pbr_4_2_0",
"version": "4.2.0",
"wheel_name": "pbr-4.2.0-py2.py3-none-any.whl",
},
"pip": {
"name": "examples_checked_in_requirements_bzl__pip_9_0_0",
"version": "9.0.0",
"wheel_name": "pip-9.0.0-py2.py3-none-any.whl",
},
"pluggy": {
"name": "examples_checked_in_requirements_bzl__pluggy_0_7_1",
"version": "0.7.1",
"wheel_name": "pluggy-0.7.1-py2.py3-none-any.whl",
},
"ply": {
"name": "examples_checked_in_requirements_bzl__ply_3_8",
"version": "3.8",
"wheel_name": "ply-3.8-py2.py3-none-any.whl",
},
"proto-google-cloud-datastore-v1": {
"name": "examples_checked_in_requirements_bzl__proto_google_cloud_datastore_v1_0_90_4",
"version": "0.90.4",
"wheel_name": "proto_google_cloud_datastore_v1-0.90.4-py2-none-any.whl",
"extras": ["grpc"],
"transitive_runtime_deps": ["googleapis-common-protos", "httplib2", "oauth2client", "protobuf", "pyasn1", "pyasn1-modules", "rsa", "setuptools", "six"],
},
"proto-google-cloud-error-reporting-v1beta1": {
"name": "examples_checked_in_requirements_bzl__proto_google_cloud_error_reporting_v1beta1_0_15_3",
"version": "0.15.3",
"wheel_name": "proto_google_cloud_error_reporting_v1beta1-0.15.3-py2-none-any.whl",
"extras": ["grpc"],
"transitive_runtime_deps": ["googleapis-common-protos", "httplib2", "oauth2client", "protobuf", "pyasn1", "pyasn1-modules", "rsa", "setuptools", "six"],
},
"proto-google-cloud-logging-v2": {
"name": "examples_checked_in_requirements_bzl__proto_google_cloud_logging_v2_0_91_3",
"version": "0.91.3",
"wheel_name": "proto_google_cloud_logging_v2-0.91.3-py2-none-any.whl",
"extras": ["grpc"],
"transitive_runtime_deps": ["googleapis-common-protos", "httplib2", "oauth2client", "protobuf", "pyasn1", "pyasn1-modules", "rsa", "setuptools", "six"],
},
"protobuf": {
"name": "examples_checked_in_requirements_bzl__protobuf_3_6_1",
"version": "3.6.1",
"wheel_name": "protobuf-3.6.1-cp27-cp27mu-manylinux1_x86_64.whl",
"transitive_runtime_deps": ["setuptools", "six"],
},
"psutil": {
"name": "examples_checked_in_requirements_bzl__psutil_5_4_7",
"version": "5.4.7",
"wheel_name": "psutil-5.4.7-cp27-cp27mu-linux_x86_64.whl",
"extras": ["enum"],
},
"py": {
"name": "examples_checked_in_requirements_bzl__py_1_6_0",
"version": "1.6.0",
"wheel_name": "py-1.6.0-py2.py3-none-any.whl",
},
"pyasn1": {
"name": "examples_checked_in_requirements_bzl__pyasn1_0_4_4",
"version": "0.4.4",
"wheel_name": "pyasn1-0.4.4-py2.py3-none-any.whl",
},
"pyasn1-modules": {
"name": "examples_checked_in_requirements_bzl__pyasn1_modules_0_2_2",
"version": "0.2.2",
"wheel_name": "pyasn1_modules-0.2.2-py2.py3-none-any.whl",
"transitive_runtime_deps": ["pyasn1"],
},
"pytest": {
"name": "examples_checked_in_requirements_bzl__pytest_3_8_0",
"version": "3.8.0",
"wheel_name": "pytest-3.8.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["atomicwrites", "attrs", "funcsigs", "more-itertools", "pathlib2", "pluggy", "py", "scandir", "setuptools", "six"],
},
"pytest-mock": {
"name": "examples_checked_in_requirements_bzl__pytest_mock_1_6_2",
"version": "1.6.2",
"wheel_name": "pytest_mock-1.6.2-py2.py3-none-any.whl",
"transitive_runtime_deps": ["atomicwrites", "attrs", "funcsigs", "mock", "more-itertools", "pathlib2", "pbr", "pluggy", "py", "pytest", "scandir", "setuptools", "six"],
},
"python-dateutil": {
"name": "examples_checked_in_requirements_bzl__python_dateutil_2_7_3",
"version": "2.7.3",
"wheel_name": "python_dateutil-2.7.3-py2.py3-none-any.whl",
"transitive_runtime_deps": ["six"],
},
"pytz": {
"name": "examples_checked_in_requirements_bzl__pytz_2018_5",
"version": "2018.5",
"wheel_name": "pytz-2018.5-py2.py3-none-any.whl",
},
"pyyaml": {
"name": "examples_checked_in_requirements_bzl__PyYAML_3_13",
"version": "3.13",
"wheel_name": "PyYAML-3.13-cp27-cp27mu-linux_x86_64.whl",
},
"requests": {
"name": "examples_checked_in_requirements_bzl__requests_2_19_1",
"version": "2.19.1",
"wheel_name": "requests-2.19.1-py2.py3-none-any.whl",
"transitive_runtime_deps": ["certifi", "chardet", "idna", "urllib3"],
},
"rsa": {
"name": "examples_checked_in_requirements_bzl__rsa_4_0",
"version": "4.0",
"wheel_name": "rsa-4.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["pyasn1"],
},
"scandir": {
"name": "examples_checked_in_requirements_bzl__scandir_1_9_0",
"version": "1.9.0",
"wheel_name": "scandir-1.9.0-cp27-cp27mu-linux_x86_64.whl",
},
"scikit-learn": {
"name": "examples_checked_in_requirements_bzl__scikit_learn_0_17_1",
"version": "0.17.1",
"wheel_name": "scikit_learn-0.17.1-cp27-cp27mu-manylinux1_x86_64.whl",
},
"scipy": {
"name": "examples_checked_in_requirements_bzl__scipy_0_17_1",
"version": "0.17.1",
"wheel_name": "scipy-0.17.1-cp27-cp27mu-manylinux1_x86_64.whl",
},
"setuptools": {
"name": "examples_checked_in_requirements_bzl__setuptools_40_4_0",
"version": "40.4.0",
"wheel_name": "setuptools-40.4.0-py2.py3-none-any.whl",
"extras": ["certs", "ssl"],
},
"setuptools-scm": {
"name": "examples_checked_in_requirements_bzl__setuptools_scm_2_0_0",
"version": "2.0.0",
"wheel_name": "setuptools_scm-2.0.0-py2.py3-none-any.whl",
},
"six": {
"name": "examples_checked_in_requirements_bzl__six_1_11_0",
"version": "1.11.0",
"wheel_name": "six-1.11.0-py2.py3-none-any.whl",
},
"urllib3": {
"name": "examples_checked_in_requirements_bzl__urllib3_1_23",
"version": "1.23",
"wheel_name": "urllib3-1.23-py2.py3-none-any.whl",
},
}
for key, attributes in all_libs.items():
whl_library(
key = key,
all_libs = all_libs,
python = "@python2//:bin/python",
**attributes
)
| 68.081921 | 1,136 | 0.72775 |
load("@examples_checked_in_requirements_bzl//python:whl.bzl", "whl_library")
_requirements = {
"atomicwrites": "@examples_checked_in_requirements_bzl__atomicwrites_1_2_1//:pkg",
"atomicwrites:dirty": "@examples_checked_in_requirements_bzl__atomicwrites_1_2_1_dirty//:pkg",
"attrs": "@examples_checked_in_requirements_bzl__attrs_18_2_0//:pkg",
"attrs:dirty": "@examples_checked_in_requirements_bzl__attrs_18_2_0_dirty//:pkg",
"attrs[dev]": "@examples_checked_in_requirements_bzl__attrs_18_2_0//:dev",
"attrs:dirty[dev]": "@examples_checked_in_requirements_bzl__attrs_18_2_0_dirty//:dev",
"attrs[docs]": "@examples_checked_in_requirements_bzl__attrs_18_2_0//:docs",
"attrs:dirty[docs]": "@examples_checked_in_requirements_bzl__attrs_18_2_0_dirty//:docs",
"attrs[tests]": "@examples_checked_in_requirements_bzl__attrs_18_2_0//:tests",
"attrs:dirty[tests]": "@examples_checked_in_requirements_bzl__attrs_18_2_0_dirty//:tests",
"backports.ssl-match-hostname": "@examples_checked_in_requirements_bzl__backports_ssl_match_hostname_3_5_0_1//:pkg",
"backports.ssl-match-hostname:dirty": "@examples_checked_in_requirements_bzl__backports_ssl_match_hostname_3_5_0_1_dirty//:pkg",
"botocore": "@examples_checked_in_requirements_bzl__botocore_1_12_5//:pkg",
"botocore:dirty": "@examples_checked_in_requirements_bzl__botocore_1_12_5_dirty//:pkg",
"cachetools": "@examples_checked_in_requirements_bzl__cachetools_2_1_0//:pkg",
"cachetools:dirty": "@examples_checked_in_requirements_bzl__cachetools_2_1_0_dirty//:pkg",
"certifi": "@examples_checked_in_requirements_bzl__certifi_2018_8_24//:pkg",
"certifi:dirty": "@examples_checked_in_requirements_bzl__certifi_2018_8_24_dirty//:pkg",
"chardet": "@examples_checked_in_requirements_bzl__chardet_3_0_4//:pkg",
"chardet:dirty": "@examples_checked_in_requirements_bzl__chardet_3_0_4_dirty//:pkg",
"dill": "@examples_checked_in_requirements_bzl__dill_0_2_8_2//:pkg",
"dill:dirty": "@examples_checked_in_requirements_bzl__dill_0_2_8_2_dirty//:pkg",
"docutils": "@examples_checked_in_requirements_bzl__docutils_0_14//:pkg",
"docutils:dirty": "@examples_checked_in_requirements_bzl__docutils_0_14_dirty//:pkg",
"enum34": "@examples_checked_in_requirements_bzl__enum34_1_1_6//:pkg",
"enum34:dirty": "@examples_checked_in_requirements_bzl__enum34_1_1_6_dirty//:pkg",
"funcsigs": "@examples_checked_in_requirements_bzl__funcsigs_1_0_2//:pkg",
"funcsigs:dirty": "@examples_checked_in_requirements_bzl__funcsigs_1_0_2_dirty//:pkg",
"future": "@examples_checked_in_requirements_bzl__future_0_16_0//:pkg",
"future:dirty": "@examples_checked_in_requirements_bzl__future_0_16_0_dirty//:pkg",
"futures": "@examples_checked_in_requirements_bzl__futures_3_2_0//:pkg",
"futures:dirty": "@examples_checked_in_requirements_bzl__futures_3_2_0_dirty//:pkg",
"gapic-google-cloud-datastore-v1": "@examples_checked_in_requirements_bzl__gapic_google_cloud_datastore_v1_0_15_3//:pkg",
"gapic-google-cloud-datastore-v1:dirty": "@examples_checked_in_requirements_bzl__gapic_google_cloud_datastore_v1_0_15_3_dirty//:pkg",
"gapic-google-cloud-error-reporting-v1beta1": "@examples_checked_in_requirements_bzl__gapic_google_cloud_error_reporting_v1beta1_0_15_3//:pkg",
"gapic-google-cloud-error-reporting-v1beta1:dirty": "@examples_checked_in_requirements_bzl__gapic_google_cloud_error_reporting_v1beta1_0_15_3_dirty//:pkg",
"gapic-google-cloud-logging-v2": "@examples_checked_in_requirements_bzl__gapic_google_cloud_logging_v2_0_91_3//:pkg",
"gapic-google-cloud-logging-v2:dirty": "@examples_checked_in_requirements_bzl__gapic_google_cloud_logging_v2_0_91_3_dirty//:pkg",
"google-api-core": "@examples_checked_in_requirements_bzl__google_api_core_0_1_4//:pkg",
"google-api-core:dirty": "@examples_checked_in_requirements_bzl__google_api_core_0_1_4_dirty//:pkg",
"google-api-core[grpc]": "@examples_checked_in_requirements_bzl__google_api_core_0_1_4//:grpc",
"google-api-core:dirty[grpc]": "@examples_checked_in_requirements_bzl__google_api_core_0_1_4_dirty//:grpc",
"google-auth": "@examples_checked_in_requirements_bzl__google_auth_1_5_1//:pkg",
"google-auth:dirty": "@examples_checked_in_requirements_bzl__google_auth_1_5_1_dirty//:pkg",
"google-cloud": "@examples_checked_in_requirements_bzl__google_cloud_0_29_0//:pkg",
"google-cloud:dirty": "@examples_checked_in_requirements_bzl__google_cloud_0_29_0_dirty//:pkg",
"google-cloud-bigquery": "@examples_checked_in_requirements_bzl__google_cloud_bigquery_0_28_0//:pkg",
"google-cloud-bigquery:dirty": "@examples_checked_in_requirements_bzl__google_cloud_bigquery_0_28_0_dirty//:pkg",
"google-cloud-bigtable": "@examples_checked_in_requirements_bzl__google_cloud_bigtable_0_28_1//:pkg",
"google-cloud-bigtable:dirty": "@examples_checked_in_requirements_bzl__google_cloud_bigtable_0_28_1_dirty//:pkg",
"google-cloud-core": "@examples_checked_in_requirements_bzl__google_cloud_core_0_28_1//:pkg",
"google-cloud-core:dirty": "@examples_checked_in_requirements_bzl__google_cloud_core_0_28_1_dirty//:pkg",
"google-cloud-core[grpc]": "@examples_checked_in_requirements_bzl__google_cloud_core_0_28_1//:grpc",
"google-cloud-core:dirty[grpc]": "@examples_checked_in_requirements_bzl__google_cloud_core_0_28_1_dirty//:grpc",
"google-cloud-datastore": "@examples_checked_in_requirements_bzl__google_cloud_datastore_1_4_0//:pkg",
"google-cloud-datastore:dirty": "@examples_checked_in_requirements_bzl__google_cloud_datastore_1_4_0_dirty//:pkg",
"google-cloud-dns": "@examples_checked_in_requirements_bzl__google_cloud_dns_0_28_0//:pkg",
"google-cloud-dns:dirty": "@examples_checked_in_requirements_bzl__google_cloud_dns_0_28_0_dirty//:pkg",
"google-cloud-error-reporting": "@examples_checked_in_requirements_bzl__google_cloud_error_reporting_0_28_0//:pkg",
"google-cloud-error-reporting:dirty": "@examples_checked_in_requirements_bzl__google_cloud_error_reporting_0_28_0_dirty//:pkg",
"google-cloud-firestore": "@examples_checked_in_requirements_bzl__google_cloud_firestore_0_28_0//:pkg",
"google-cloud-firestore:dirty": "@examples_checked_in_requirements_bzl__google_cloud_firestore_0_28_0_dirty//:pkg",
"google-cloud-language": "@examples_checked_in_requirements_bzl__google_cloud_language_0_31_0//:pkg",
"google-cloud-language:dirty": "@examples_checked_in_requirements_bzl__google_cloud_language_0_31_0_dirty//:pkg",
"google-cloud-logging": "@examples_checked_in_requirements_bzl__google_cloud_logging_1_4_0//:pkg",
"google-cloud-logging:dirty": "@examples_checked_in_requirements_bzl__google_cloud_logging_1_4_0_dirty//:pkg",
"google-cloud-monitoring": "@examples_checked_in_requirements_bzl__google_cloud_monitoring_0_28_1//:pkg",
"google-cloud-monitoring:dirty": "@examples_checked_in_requirements_bzl__google_cloud_monitoring_0_28_1_dirty//:pkg",
"google-cloud-pubsub": "@examples_checked_in_requirements_bzl__google_cloud_pubsub_0_29_4//:pkg",
"google-cloud-pubsub:dirty": "@examples_checked_in_requirements_bzl__google_cloud_pubsub_0_29_4_dirty//:pkg",
"google-cloud-resource-manager": "@examples_checked_in_requirements_bzl__google_cloud_resource_manager_0_28_1//:pkg",
"google-cloud-resource-manager:dirty": "@examples_checked_in_requirements_bzl__google_cloud_resource_manager_0_28_1_dirty//:pkg",
"google-cloud-runtimeconfig": "@examples_checked_in_requirements_bzl__google_cloud_runtimeconfig_0_28_1//:pkg",
"google-cloud-runtimeconfig:dirty": "@examples_checked_in_requirements_bzl__google_cloud_runtimeconfig_0_28_1_dirty//:pkg",
"google-cloud-spanner": "@examples_checked_in_requirements_bzl__google_cloud_spanner_0_29_0//:pkg",
"google-cloud-spanner:dirty": "@examples_checked_in_requirements_bzl__google_cloud_spanner_0_29_0_dirty//:pkg",
"google-cloud-speech": "@examples_checked_in_requirements_bzl__google_cloud_speech_0_30_0//:pkg",
"google-cloud-speech:dirty": "@examples_checked_in_requirements_bzl__google_cloud_speech_0_30_0_dirty//:pkg",
"google-cloud-storage": "@examples_checked_in_requirements_bzl__google_cloud_storage_1_6_0//:pkg",
"google-cloud-storage:dirty": "@examples_checked_in_requirements_bzl__google_cloud_storage_1_6_0_dirty//:pkg",
"google-cloud-trace": "@examples_checked_in_requirements_bzl__google_cloud_trace_0_16_0//:pkg",
"google-cloud-trace:dirty": "@examples_checked_in_requirements_bzl__google_cloud_trace_0_16_0_dirty//:pkg",
"google-cloud-translate": "@examples_checked_in_requirements_bzl__google_cloud_translate_1_3_1//:pkg",
"google-cloud-translate:dirty": "@examples_checked_in_requirements_bzl__google_cloud_translate_1_3_1_dirty//:pkg",
"google-cloud-videointelligence": "@examples_checked_in_requirements_bzl__google_cloud_videointelligence_0_28_0//:pkg",
"google-cloud-videointelligence:dirty": "@examples_checked_in_requirements_bzl__google_cloud_videointelligence_0_28_0_dirty//:pkg",
"google-cloud-vision": "@examples_checked_in_requirements_bzl__google_cloud_vision_0_28_0//:pkg",
"google-cloud-vision:dirty": "@examples_checked_in_requirements_bzl__google_cloud_vision_0_28_0_dirty//:pkg",
"google-gax": "@examples_checked_in_requirements_bzl__google_gax_0_15_16//:pkg",
"google-gax:dirty": "@examples_checked_in_requirements_bzl__google_gax_0_15_16_dirty//:pkg",
"google-resumable-media": "@examples_checked_in_requirements_bzl__google_resumable_media_0_3_1//:pkg",
"google-resumable-media:dirty": "@examples_checked_in_requirements_bzl__google_resumable_media_0_3_1_dirty//:pkg",
"google-resumable-media[requests]": "@examples_checked_in_requirements_bzl__google_resumable_media_0_3_1//:requests",
"google-resumable-media:dirty[requests]": "@examples_checked_in_requirements_bzl__google_resumable_media_0_3_1_dirty//:requests",
"googleapis-common-protos": "@examples_checked_in_requirements_bzl__googleapis_common_protos_1_5_3//:pkg",
"googleapis-common-protos:dirty": "@examples_checked_in_requirements_bzl__googleapis_common_protos_1_5_3_dirty//:pkg",
"googleapis-common-protos[grpc]": "@examples_checked_in_requirements_bzl__googleapis_common_protos_1_5_3//:grpc",
"googleapis-common-protos:dirty[grpc]": "@examples_checked_in_requirements_bzl__googleapis_common_protos_1_5_3_dirty//:grpc",
"grpc-google-iam-v1": "@examples_checked_in_requirements_bzl__grpc_google_iam_v1_0_11_4//:pkg",
"grpc-google-iam-v1:dirty": "@examples_checked_in_requirements_bzl__grpc_google_iam_v1_0_11_4_dirty//:pkg",
"grpcio": "@examples_checked_in_requirements_bzl__grpcio_1_15_0//:pkg",
"grpcio:dirty": "@examples_checked_in_requirements_bzl__grpcio_1_15_0_dirty//:pkg",
"h5py": "@examples_checked_in_requirements_bzl__h5py_2_8_0//:pkg",
"h5py:dirty": "@examples_checked_in_requirements_bzl__h5py_2_8_0_dirty//:pkg",
"httplib2": "@examples_checked_in_requirements_bzl__httplib2_0_11_3//:pkg",
"httplib2:dirty": "@examples_checked_in_requirements_bzl__httplib2_0_11_3_dirty//:pkg",
"idna": "@examples_checked_in_requirements_bzl__idna_2_7//:pkg",
"idna:dirty": "@examples_checked_in_requirements_bzl__idna_2_7_dirty//:pkg",
"jmespath": "@examples_checked_in_requirements_bzl__jmespath_0_9_3//:pkg",
"jmespath:dirty": "@examples_checked_in_requirements_bzl__jmespath_0_9_3_dirty//:pkg",
"keras": "@examples_checked_in_requirements_bzl__Keras_2_2_2//:pkg",
"keras:dirty": "@examples_checked_in_requirements_bzl__Keras_2_2_2_dirty//:pkg",
"keras[tests]": "@examples_checked_in_requirements_bzl__Keras_2_2_2//:tests",
"keras:dirty[tests]": "@examples_checked_in_requirements_bzl__Keras_2_2_2_dirty//:tests",
"keras[visualize]": "@examples_checked_in_requirements_bzl__Keras_2_2_2//:visualize",
"keras:dirty[visualize]": "@examples_checked_in_requirements_bzl__Keras_2_2_2_dirty//:visualize",
"keras-applications": "@examples_checked_in_requirements_bzl__Keras_Applications_1_0_4//:pkg",
"keras-applications:dirty": "@examples_checked_in_requirements_bzl__Keras_Applications_1_0_4_dirty//:pkg",
"keras-applications[tests]": "@examples_checked_in_requirements_bzl__Keras_Applications_1_0_4//:tests",
"keras-applications:dirty[tests]": "@examples_checked_in_requirements_bzl__Keras_Applications_1_0_4_dirty//:tests",
"keras-preprocessing": "@examples_checked_in_requirements_bzl__Keras_Preprocessing_1_0_2//:pkg",
"keras-preprocessing:dirty": "@examples_checked_in_requirements_bzl__Keras_Preprocessing_1_0_2_dirty//:pkg",
"keras-preprocessing[tests]": "@examples_checked_in_requirements_bzl__Keras_Preprocessing_1_0_2//:tests",
"keras-preprocessing:dirty[tests]": "@examples_checked_in_requirements_bzl__Keras_Preprocessing_1_0_2_dirty//:tests",
"mock": "@examples_checked_in_requirements_bzl__mock_2_0_0//:pkg",
"mock:dirty": "@examples_checked_in_requirements_bzl__mock_2_0_0_dirty//:pkg",
"more-itertools": "@examples_checked_in_requirements_bzl__more_itertools_4_3_0//:pkg",
"more-itertools:dirty": "@examples_checked_in_requirements_bzl__more_itertools_4_3_0_dirty//:pkg",
"numpy": "@examples_checked_in_requirements_bzl__numpy_1_14_0//:pkg",
"numpy:dirty": "@examples_checked_in_requirements_bzl__numpy_1_14_0_dirty//:pkg",
"oauth2client": "@examples_checked_in_requirements_bzl__oauth2client_3_0_0//:pkg",
"oauth2client:dirty": "@examples_checked_in_requirements_bzl__oauth2client_3_0_0_dirty//:pkg",
"pathlib2": "@examples_checked_in_requirements_bzl__pathlib2_2_3_2//:pkg",
"pathlib2:dirty": "@examples_checked_in_requirements_bzl__pathlib2_2_3_2_dirty//:pkg",
"pbr": "@examples_checked_in_requirements_bzl__pbr_4_2_0//:pkg",
"pbr:dirty": "@examples_checked_in_requirements_bzl__pbr_4_2_0_dirty//:pkg",
"pip": "@examples_checked_in_requirements_bzl__pip_9_0_0//:pkg",
"pip:dirty": "@examples_checked_in_requirements_bzl__pip_9_0_0_dirty//:pkg",
"pluggy": "@examples_checked_in_requirements_bzl__pluggy_0_7_1//:pkg",
"pluggy:dirty": "@examples_checked_in_requirements_bzl__pluggy_0_7_1_dirty//:pkg",
"ply": "@examples_checked_in_requirements_bzl__ply_3_8//:pkg",
"ply:dirty": "@examples_checked_in_requirements_bzl__ply_3_8_dirty//:pkg",
"proto-google-cloud-datastore-v1": "@examples_checked_in_requirements_bzl__proto_google_cloud_datastore_v1_0_90_4//:pkg",
"proto-google-cloud-datastore-v1:dirty": "@examples_checked_in_requirements_bzl__proto_google_cloud_datastore_v1_0_90_4_dirty//:pkg",
"proto-google-cloud-datastore-v1[grpc]": "@examples_checked_in_requirements_bzl__proto_google_cloud_datastore_v1_0_90_4//:grpc",
"proto-google-cloud-datastore-v1:dirty[grpc]": "@examples_checked_in_requirements_bzl__proto_google_cloud_datastore_v1_0_90_4_dirty//:grpc",
"proto-google-cloud-error-reporting-v1beta1": "@examples_checked_in_requirements_bzl__proto_google_cloud_error_reporting_v1beta1_0_15_3//:pkg",
"proto-google-cloud-error-reporting-v1beta1:dirty": "@examples_checked_in_requirements_bzl__proto_google_cloud_error_reporting_v1beta1_0_15_3_dirty//:pkg",
"proto-google-cloud-error-reporting-v1beta1[grpc]": "@examples_checked_in_requirements_bzl__proto_google_cloud_error_reporting_v1beta1_0_15_3//:grpc",
"proto-google-cloud-error-reporting-v1beta1:dirty[grpc]": "@examples_checked_in_requirements_bzl__proto_google_cloud_error_reporting_v1beta1_0_15_3_dirty//:grpc",
"proto-google-cloud-logging-v2": "@examples_checked_in_requirements_bzl__proto_google_cloud_logging_v2_0_91_3//:pkg",
"proto-google-cloud-logging-v2:dirty": "@examples_checked_in_requirements_bzl__proto_google_cloud_logging_v2_0_91_3_dirty//:pkg",
"proto-google-cloud-logging-v2[grpc]": "@examples_checked_in_requirements_bzl__proto_google_cloud_logging_v2_0_91_3//:grpc",
"proto-google-cloud-logging-v2:dirty[grpc]": "@examples_checked_in_requirements_bzl__proto_google_cloud_logging_v2_0_91_3_dirty//:grpc",
"protobuf": "@examples_checked_in_requirements_bzl__protobuf_3_6_1//:pkg",
"protobuf:dirty": "@examples_checked_in_requirements_bzl__protobuf_3_6_1_dirty//:pkg",
"psutil": "@examples_checked_in_requirements_bzl__psutil_5_4_7//:pkg",
"psutil:dirty": "@examples_checked_in_requirements_bzl__psutil_5_4_7_dirty//:pkg",
"psutil[enum]": "@examples_checked_in_requirements_bzl__psutil_5_4_7//:enum",
"psutil:dirty[enum]": "@examples_checked_in_requirements_bzl__psutil_5_4_7_dirty//:enum",
"py": "@examples_checked_in_requirements_bzl__py_1_6_0//:pkg",
"py:dirty": "@examples_checked_in_requirements_bzl__py_1_6_0_dirty//:pkg",
"pyasn1": "@examples_checked_in_requirements_bzl__pyasn1_0_4_4//:pkg",
"pyasn1:dirty": "@examples_checked_in_requirements_bzl__pyasn1_0_4_4_dirty//:pkg",
"pyasn1-modules": "@examples_checked_in_requirements_bzl__pyasn1_modules_0_2_2//:pkg",
"pyasn1-modules:dirty": "@examples_checked_in_requirements_bzl__pyasn1_modules_0_2_2_dirty//:pkg",
"pytest": "@examples_checked_in_requirements_bzl__pytest_3_8_0//:pkg",
"pytest:dirty": "@examples_checked_in_requirements_bzl__pytest_3_8_0_dirty//:pkg",
"pytest-mock": "@examples_checked_in_requirements_bzl__pytest_mock_1_6_2//:pkg",
"pytest-mock:dirty": "@examples_checked_in_requirements_bzl__pytest_mock_1_6_2_dirty//:pkg",
"python-dateutil": "@examples_checked_in_requirements_bzl__python_dateutil_2_7_3//:pkg",
"python-dateutil:dirty": "@examples_checked_in_requirements_bzl__python_dateutil_2_7_3_dirty//:pkg",
"pytz": "@examples_checked_in_requirements_bzl__pytz_2018_5//:pkg",
"pytz:dirty": "@examples_checked_in_requirements_bzl__pytz_2018_5_dirty//:pkg",
"pyyaml": "@examples_checked_in_requirements_bzl__PyYAML_3_13//:pkg",
"pyyaml:dirty": "@examples_checked_in_requirements_bzl__PyYAML_3_13_dirty//:pkg",
"requests": "@examples_checked_in_requirements_bzl__requests_2_19_1//:pkg",
"requests:dirty": "@examples_checked_in_requirements_bzl__requests_2_19_1_dirty//:pkg",
"rsa": "@examples_checked_in_requirements_bzl__rsa_4_0//:pkg",
"rsa:dirty": "@examples_checked_in_requirements_bzl__rsa_4_0_dirty//:pkg",
"scandir": "@examples_checked_in_requirements_bzl__scandir_1_9_0//:pkg",
"scandir:dirty": "@examples_checked_in_requirements_bzl__scandir_1_9_0_dirty//:pkg",
"scikit-learn": "@examples_checked_in_requirements_bzl__scikit_learn_0_17_1//:pkg",
"scikit-learn:dirty": "@examples_checked_in_requirements_bzl__scikit_learn_0_17_1_dirty//:pkg",
"scipy": "@examples_checked_in_requirements_bzl__scipy_0_17_1//:pkg",
"scipy:dirty": "@examples_checked_in_requirements_bzl__scipy_0_17_1_dirty//:pkg",
"setuptools": "@examples_checked_in_requirements_bzl__setuptools_40_4_0//:pkg",
"setuptools:dirty": "@examples_checked_in_requirements_bzl__setuptools_40_4_0_dirty//:pkg",
"setuptools[certs]": "@examples_checked_in_requirements_bzl__setuptools_40_4_0//:certs",
"setuptools:dirty[certs]": "@examples_checked_in_requirements_bzl__setuptools_40_4_0_dirty//:certs",
"setuptools[ssl]": "@examples_checked_in_requirements_bzl__setuptools_40_4_0//:ssl",
"setuptools:dirty[ssl]": "@examples_checked_in_requirements_bzl__setuptools_40_4_0_dirty//:ssl",
"setuptools-scm": "@examples_checked_in_requirements_bzl__setuptools_scm_2_0_0//:pkg",
"setuptools-scm:dirty": "@examples_checked_in_requirements_bzl__setuptools_scm_2_0_0_dirty//:pkg",
"six": "@examples_checked_in_requirements_bzl__six_1_11_0//:pkg",
"six:dirty": "@examples_checked_in_requirements_bzl__six_1_11_0_dirty//:pkg",
"urllib3": "@examples_checked_in_requirements_bzl__urllib3_1_23//:pkg",
"urllib3:dirty": "@examples_checked_in_requirements_bzl__urllib3_1_23_dirty//:pkg"
}
all_requirements = _requirements.values()
requirements_map = _requirements
def requirement_repo(name):
return requirement(name).split(":")[0]
def requirement(name, binary=None):
key = name.lower()
if key not in _requirements:
fail("Could not find pip-provided dependency: '%s'" % name)
if binary:
return _requirements[key].split(":")[0] + ":entrypoint_" + binary
return _requirements[key]
def pip_install():
all_libs = {
"atomicwrites": {
"name": "examples_checked_in_requirements_bzl__atomicwrites_1_2_1",
"version": "1.2.1",
"wheel_name": "atomicwrites-1.2.1-py2.py3-none-any.whl",
},
"attrs": {
"name": "examples_checked_in_requirements_bzl__attrs_18_2_0",
"version": "18.2.0",
"wheel_name": "attrs-18.2.0-py2.py3-none-any.whl",
"extras": ["dev", "docs", "tests"],
},
"backports.ssl-match-hostname": {
"name": "examples_checked_in_requirements_bzl__backports_ssl_match_hostname_3_5_0_1",
"version": "3.5.0.1",
"wheel_name": "backports.ssl_match_hostname-3.5.0.1-py2-none-any.whl",
},
"botocore": {
"name": "examples_checked_in_requirements_bzl__botocore_1_12_5",
"version": "1.12.5",
"wheel_name": "botocore-1.12.5-py2.py3-none-any.whl",
"transitive_runtime_deps": ["docutils", "jmespath", "python-dateutil", "six", "urllib3"],
},
"cachetools": {
"name": "examples_checked_in_requirements_bzl__cachetools_2_1_0",
"version": "2.1.0",
"wheel_name": "cachetools-2.1.0-py2.py3-none-any.whl",
},
"certifi": {
"name": "examples_checked_in_requirements_bzl__certifi_2018_8_24",
"version": "2018.8.24",
"wheel_name": "certifi-2018.8.24-py2.py3-none-any.whl",
},
"chardet": {
"name": "examples_checked_in_requirements_bzl__chardet_3_0_4",
"version": "3.0.4",
"wheel_name": "chardet-3.0.4-py2.py3-none-any.whl",
},
"dill": {
"name": "examples_checked_in_requirements_bzl__dill_0_2_8_2",
"version": "0.2.8.2",
"wheel_name": "dill-0.2.8.2-py2-none-any.whl",
},
"docutils": {
"name": "examples_checked_in_requirements_bzl__docutils_0_14",
"version": "0.14",
"wheel_name": "docutils-0.14-py2-none-any.whl",
},
"enum34": {
"name": "examples_checked_in_requirements_bzl__enum34_1_1_6",
"version": "1.1.6",
"wheel_name": "enum34-1.1.6-py2-none-any.whl",
},
"funcsigs": {
"name": "examples_checked_in_requirements_bzl__funcsigs_1_0_2",
"version": "1.0.2",
"wheel_name": "funcsigs-1.0.2-py2.py3-none-any.whl",
},
"future": {
"name": "examples_checked_in_requirements_bzl__future_0_16_0",
"version": "0.16.0",
"wheel_name": "future-0.16.0-py2-none-any.whl",
},
"futures": {
"name": "examples_checked_in_requirements_bzl__futures_3_2_0",
"version": "3.2.0",
"wheel_name": "futures-3.2.0-py2-none-any.whl",
},
"gapic-google-cloud-datastore-v1": {
"name": "examples_checked_in_requirements_bzl__gapic_google_cloud_datastore_v1_0_15_3",
"version": "0.15.3",
"wheel_name": "gapic_google_cloud_datastore_v1-0.15.3-py2-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-auth", "google-gax", "googleapis-common-protos", "grpcio", "httplib2", "idna", "oauth2client", "ply", "proto-google-cloud-datastore-v1", "protobuf", "pyasn1", "pyasn1-modules", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"gapic-google-cloud-error-reporting-v1beta1": {
"name": "examples_checked_in_requirements_bzl__gapic_google_cloud_error_reporting_v1beta1_0_15_3",
"version": "0.15.3",
"wheel_name": "gapic_google_cloud_error_reporting_v1beta1-0.15.3-py2-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-auth", "google-gax", "googleapis-common-protos", "grpcio", "httplib2", "idna", "oauth2client", "ply", "proto-google-cloud-error-reporting-v1beta1", "protobuf", "pyasn1", "pyasn1-modules", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"gapic-google-cloud-logging-v2": {
"name": "examples_checked_in_requirements_bzl__gapic_google_cloud_logging_v2_0_91_3",
"version": "0.91.3",
"wheel_name": "gapic_google_cloud_logging_v2-0.91.3-py2-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-auth", "google-gax", "googleapis-common-protos", "grpcio", "httplib2", "idna", "oauth2client", "ply", "proto-google-cloud-logging-v2", "protobuf", "pyasn1", "pyasn1-modules", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-api-core": {
"name": "examples_checked_in_requirements_bzl__google_api_core_0_1_4",
"version": "0.1.4",
"wheel_name": "google_api_core-0.1.4-py2.py3-none-any.whl",
"extras": ["grpc"],
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "futures", "google-auth", "googleapis-common-protos", "idna", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-auth": {
"name": "examples_checked_in_requirements_bzl__google_auth_1_5_1",
"version": "1.5.1",
"wheel_name": "google_auth-1.5.1-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "pyasn1", "pyasn1-modules", "rsa", "six"],
},
"google-cloud": {
"name": "examples_checked_in_requirements_bzl__google_cloud_0_29_0",
"version": "0.29.0",
"wheel_name": "google_cloud-0.29.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "gapic-google-cloud-datastore-v1", "gapic-google-cloud-error-reporting-v1beta1", "gapic-google-cloud-logging-v2", "google-api-core", "google-auth", "google-cloud-bigquery", "google-cloud-bigtable", "google-cloud-core", "google-cloud-datastore", "google-cloud-dns", "google-cloud-error-reporting", "google-cloud-firestore", "google-cloud-language", "google-cloud-logging", "google-cloud-monitoring", "google-cloud-pubsub", "google-cloud-resource-manager", "google-cloud-runtimeconfig", "google-cloud-spanner", "google-cloud-speech", "google-cloud-storage", "google-cloud-trace", "google-cloud-translate", "google-cloud-videointelligence", "google-cloud-vision", "google-gax", "google-resumable-media", "googleapis-common-protos", "grpc-google-iam-v1", "grpcio", "httplib2", "idna", "oauth2client", "ply", "proto-google-cloud-datastore-v1", "proto-google-cloud-error-reporting-v1beta1", "proto-google-cloud-logging-v2", "protobuf", "psutil", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-bigquery": {
"name": "examples_checked_in_requirements_bzl__google_cloud_bigquery_0_28_0",
"version": "0.28.0",
"wheel_name": "google_cloud_bigquery-0.28.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "futures", "google-api-core", "google-auth", "google-cloud-core", "google-resumable-media", "googleapis-common-protos", "idna", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-bigtable": {
"name": "examples_checked_in_requirements_bzl__google_cloud_bigtable_0_28_1",
"version": "0.28.1",
"wheel_name": "google_cloud_bigtable-0.28.1-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-api-core", "google-auth", "google-cloud-core", "google-gax", "googleapis-common-protos", "grpcio", "idna", "ply", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-core": {
"name": "examples_checked_in_requirements_bzl__google_cloud_core_0_28_1",
"version": "0.28.1",
"wheel_name": "google_cloud_core-0.28.1-py2.py3-none-any.whl",
"extras": ["grpc"],
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "futures", "google-api-core", "google-auth", "googleapis-common-protos", "idna", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-datastore": {
"name": "examples_checked_in_requirements_bzl__google_cloud_datastore_1_4_0",
"version": "1.4.0",
"wheel_name": "google_cloud_datastore-1.4.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "gapic-google-cloud-datastore-v1", "google-api-core", "google-auth", "google-cloud-core", "google-gax", "googleapis-common-protos", "grpcio", "httplib2", "idna", "oauth2client", "ply", "proto-google-cloud-datastore-v1", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-dns": {
"name": "examples_checked_in_requirements_bzl__google_cloud_dns_0_28_0",
"version": "0.28.0",
"wheel_name": "google_cloud_dns-0.28.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "futures", "google-api-core", "google-auth", "google-cloud-core", "googleapis-common-protos", "idna", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-error-reporting": {
"name": "examples_checked_in_requirements_bzl__google_cloud_error_reporting_0_28_0",
"version": "0.28.0",
"wheel_name": "google_cloud_error_reporting-0.28.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "gapic-google-cloud-error-reporting-v1beta1", "gapic-google-cloud-logging-v2", "google-api-core", "google-auth", "google-cloud-core", "google-cloud-logging", "google-gax", "googleapis-common-protos", "grpcio", "httplib2", "idna", "oauth2client", "ply", "proto-google-cloud-error-reporting-v1beta1", "proto-google-cloud-logging-v2", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-firestore": {
"name": "examples_checked_in_requirements_bzl__google_cloud_firestore_0_28_0",
"version": "0.28.0",
"wheel_name": "google_cloud_firestore-0.28.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-api-core", "google-auth", "google-cloud-core", "google-gax", "googleapis-common-protos", "grpcio", "idna", "ply", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-language": {
"name": "examples_checked_in_requirements_bzl__google_cloud_language_0_31_0",
"version": "0.31.0",
"wheel_name": "google_cloud_language-0.31.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "enum34", "futures", "google-api-core", "google-auth", "grpcio", "pyasn1", "pyasn1-modules", "rsa", "six"],
},
"google-cloud-logging": {
"name": "examples_checked_in_requirements_bzl__google_cloud_logging_1_4_0",
"version": "1.4.0",
"wheel_name": "google_cloud_logging-1.4.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "gapic-google-cloud-logging-v2", "google-api-core", "google-auth", "google-cloud-core", "google-gax", "googleapis-common-protos", "grpcio", "httplib2", "idna", "oauth2client", "ply", "proto-google-cloud-logging-v2", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-monitoring": {
"name": "examples_checked_in_requirements_bzl__google_cloud_monitoring_0_28_1",
"version": "0.28.1",
"wheel_name": "google_cloud_monitoring-0.28.1-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "futures", "google-api-core", "google-auth", "google-cloud-core", "googleapis-common-protos", "idna", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-pubsub": {
"name": "examples_checked_in_requirements_bzl__google_cloud_pubsub_0_29_4",
"version": "0.29.4",
"wheel_name": "google_cloud_pubsub-0.29.4-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "enum34", "futures", "google-api-core", "google-auth", "googleapis-common-protos", "grpc-google-iam-v1", "grpcio", "psutil", "pyasn1", "pyasn1-modules", "rsa", "six"],
},
"google-cloud-resource-manager": {
"name": "examples_checked_in_requirements_bzl__google_cloud_resource_manager_0_28_1",
"version": "0.28.1",
"wheel_name": "google_cloud_resource_manager-0.28.1-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "futures", "google-api-core", "google-auth", "google-cloud-core", "googleapis-common-protos", "idna", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-runtimeconfig": {
"name": "examples_checked_in_requirements_bzl__google_cloud_runtimeconfig_0_28_1",
"version": "0.28.1",
"wheel_name": "google_cloud_runtimeconfig-0.28.1-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "futures", "google-api-core", "google-auth", "google-cloud-core", "googleapis-common-protos", "idna", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-spanner": {
"name": "examples_checked_in_requirements_bzl__google_cloud_spanner_0_29_0",
"version": "0.29.0",
"wheel_name": "google_cloud_spanner-0.29.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-api-core", "google-auth", "google-cloud-core", "google-gax", "googleapis-common-protos", "grpc-google-iam-v1", "grpcio", "idna", "ply", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-speech": {
"name": "examples_checked_in_requirements_bzl__google_cloud_speech_0_30_0",
"version": "0.30.0",
"wheel_name": "google_cloud_speech-0.30.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-api-core", "google-auth", "google-cloud-core", "google-gax", "googleapis-common-protos", "grpcio", "idna", "ply", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-storage": {
"name": "examples_checked_in_requirements_bzl__google_cloud_storage_1_6_0",
"version": "1.6.0",
"wheel_name": "google_cloud_storage-1.6.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "futures", "google-api-core", "google-auth", "google-cloud-core", "google-resumable-media", "googleapis-common-protos", "idna", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-trace": {
"name": "examples_checked_in_requirements_bzl__google_cloud_trace_0_16_0",
"version": "0.16.0",
"wheel_name": "google_cloud_trace-0.16.0-py2-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-api-core", "google-auth", "google-cloud-core", "google-gax", "googleapis-common-protos", "grpcio", "idna", "ply", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-translate": {
"name": "examples_checked_in_requirements_bzl__google_cloud_translate_1_3_1",
"version": "1.3.1",
"wheel_name": "google_cloud_translate-1.3.1-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "futures", "google-api-core", "google-auth", "google-cloud-core", "googleapis-common-protos", "idna", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-videointelligence": {
"name": "examples_checked_in_requirements_bzl__google_cloud_videointelligence_0_28_0",
"version": "0.28.0",
"wheel_name": "google_cloud_videointelligence-0.28.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-auth", "google-gax", "googleapis-common-protos", "grpcio", "idna", "ply", "protobuf", "pyasn1", "pyasn1-modules", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-cloud-vision": {
"name": "examples_checked_in_requirements_bzl__google_cloud_vision_0_28_0",
"version": "0.28.0",
"wheel_name": "google_cloud_vision-0.28.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-api-core", "google-auth", "google-cloud-core", "google-gax", "googleapis-common-protos", "grpcio", "idna", "ply", "protobuf", "pyasn1", "pyasn1-modules", "pytz", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-gax": {
"name": "examples_checked_in_requirements_bzl__google_gax_0_15_16",
"version": "0.15.16",
"wheel_name": "google_gax-0.15.16-py2.py3-none-any.whl",
"transitive_runtime_deps": ["cachetools", "certifi", "chardet", "dill", "enum34", "future", "futures", "google-auth", "googleapis-common-protos", "grpcio", "idna", "ply", "protobuf", "pyasn1", "pyasn1-modules", "requests", "rsa", "setuptools", "six", "urllib3"],
},
"google-resumable-media": {
"name": "examples_checked_in_requirements_bzl__google_resumable_media_0_3_1",
"version": "0.3.1",
"wheel_name": "google_resumable_media-0.3.1-py2.py3-none-any.whl",
"extras": ["requests"],
"transitive_runtime_deps": ["six"],
},
"googleapis-common-protos": {
"name": "examples_checked_in_requirements_bzl__googleapis_common_protos_1_5_3",
"version": "1.5.3",
"wheel_name": "googleapis_common_protos-1.5.3-py2-none-any.whl",
"extras": ["grpc"],
"transitive_runtime_deps": ["protobuf", "setuptools", "six"],
},
"grpc-google-iam-v1": {
"name": "examples_checked_in_requirements_bzl__grpc_google_iam_v1_0_11_4",
"version": "0.11.4",
"wheel_name": "grpc_google_iam_v1-0.11.4-py2-none-any.whl",
"transitive_runtime_deps": ["enum34", "futures", "googleapis-common-protos", "grpcio", "six"],
},
"grpcio": {
"name": "examples_checked_in_requirements_bzl__grpcio_1_15_0",
"version": "1.15.0",
"wheel_name": "grpcio-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl",
"transitive_runtime_deps": ["enum34", "futures", "six"],
},
"h5py": {
"name": "examples_checked_in_requirements_bzl__h5py_2_8_0",
"version": "2.8.0",
"wheel_name": "h5py-2.8.0-cp27-cp27mu-manylinux1_x86_64.whl",
"transitive_runtime_deps": ["numpy", "six"],
},
"httplib2": {
"name": "examples_checked_in_requirements_bzl__httplib2_0_11_3",
"version": "0.11.3",
"wheel_name": "httplib2-0.11.3-py2-none-any.whl",
},
"idna": {
"name": "examples_checked_in_requirements_bzl__idna_2_7",
"version": "2.7",
"wheel_name": "idna-2.7-py2.py3-none-any.whl",
},
"jmespath": {
"name": "examples_checked_in_requirements_bzl__jmespath_0_9_3",
"version": "0.9.3",
"wheel_name": "jmespath-0.9.3-py2.py3-none-any.whl",
},
"keras": {
"name": "examples_checked_in_requirements_bzl__Keras_2_2_2",
"version": "2.2.2",
"wheel_name": "Keras-2.2.2-py2.py3-none-any.whl",
"extras": ["tests", "visualize"],
"transitive_runtime_deps": ["h5py", "keras", "keras-applications", "keras-preprocessing", "numpy", "pyyaml", "scipy", "six"],
},
"keras-applications": {
"name": "examples_checked_in_requirements_bzl__Keras_Applications_1_0_4",
"version": "1.0.4",
"wheel_name": "Keras_Applications-1.0.4-py2.py3-none-any.whl",
"extras": ["tests"],
"transitive_runtime_deps": ["h5py", "keras", "keras-applications", "keras-preprocessing", "numpy", "pyyaml", "scipy", "six"],
},
"keras-preprocessing": {
"name": "examples_checked_in_requirements_bzl__Keras_Preprocessing_1_0_2",
"version": "1.0.2",
"wheel_name": "Keras_Preprocessing-1.0.2-py2.py3-none-any.whl",
"extras": ["tests"],
"transitive_runtime_deps": ["h5py", "keras", "keras-applications", "keras-preprocessing", "numpy", "pyyaml", "scipy", "six"],
},
"mock": {
"name": "examples_checked_in_requirements_bzl__mock_2_0_0",
"version": "2.0.0",
"wheel_name": "mock-2.0.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["funcsigs", "pbr", "six"],
},
"more-itertools": {
"name": "examples_checked_in_requirements_bzl__more_itertools_4_3_0",
"version": "4.3.0",
"wheel_name": "more_itertools-4.3.0-py2-none-any.whl",
"transitive_runtime_deps": ["six"],
},
"numpy": {
"name": "examples_checked_in_requirements_bzl__numpy_1_14_0",
"version": "1.14.0",
"wheel_name": "numpy-1.14.0-cp27-cp27mu-manylinux1_x86_64.whl",
},
"oauth2client": {
"name": "examples_checked_in_requirements_bzl__oauth2client_3_0_0",
"version": "3.0.0",
"wheel_name": "oauth2client-3.0.0-py2-none-any.whl",
"transitive_runtime_deps": ["httplib2", "pyasn1", "pyasn1-modules", "rsa", "six"],
},
"pathlib2": {
"name": "examples_checked_in_requirements_bzl__pathlib2_2_3_2",
"version": "2.3.2",
"wheel_name": "pathlib2-2.3.2-py2.py3-none-any.whl",
"transitive_runtime_deps": ["scandir", "six"],
},
"pbr": {
"name": "examples_checked_in_requirements_bzl__pbr_4_2_0",
"version": "4.2.0",
"wheel_name": "pbr-4.2.0-py2.py3-none-any.whl",
},
"pip": {
"name": "examples_checked_in_requirements_bzl__pip_9_0_0",
"version": "9.0.0",
"wheel_name": "pip-9.0.0-py2.py3-none-any.whl",
},
"pluggy": {
"name": "examples_checked_in_requirements_bzl__pluggy_0_7_1",
"version": "0.7.1",
"wheel_name": "pluggy-0.7.1-py2.py3-none-any.whl",
},
"ply": {
"name": "examples_checked_in_requirements_bzl__ply_3_8",
"version": "3.8",
"wheel_name": "ply-3.8-py2.py3-none-any.whl",
},
"proto-google-cloud-datastore-v1": {
"name": "examples_checked_in_requirements_bzl__proto_google_cloud_datastore_v1_0_90_4",
"version": "0.90.4",
"wheel_name": "proto_google_cloud_datastore_v1-0.90.4-py2-none-any.whl",
"extras": ["grpc"],
"transitive_runtime_deps": ["googleapis-common-protos", "httplib2", "oauth2client", "protobuf", "pyasn1", "pyasn1-modules", "rsa", "setuptools", "six"],
},
"proto-google-cloud-error-reporting-v1beta1": {
"name": "examples_checked_in_requirements_bzl__proto_google_cloud_error_reporting_v1beta1_0_15_3",
"version": "0.15.3",
"wheel_name": "proto_google_cloud_error_reporting_v1beta1-0.15.3-py2-none-any.whl",
"extras": ["grpc"],
"transitive_runtime_deps": ["googleapis-common-protos", "httplib2", "oauth2client", "protobuf", "pyasn1", "pyasn1-modules", "rsa", "setuptools", "six"],
},
"proto-google-cloud-logging-v2": {
"name": "examples_checked_in_requirements_bzl__proto_google_cloud_logging_v2_0_91_3",
"version": "0.91.3",
"wheel_name": "proto_google_cloud_logging_v2-0.91.3-py2-none-any.whl",
"extras": ["grpc"],
"transitive_runtime_deps": ["googleapis-common-protos", "httplib2", "oauth2client", "protobuf", "pyasn1", "pyasn1-modules", "rsa", "setuptools", "six"],
},
"protobuf": {
"name": "examples_checked_in_requirements_bzl__protobuf_3_6_1",
"version": "3.6.1",
"wheel_name": "protobuf-3.6.1-cp27-cp27mu-manylinux1_x86_64.whl",
"transitive_runtime_deps": ["setuptools", "six"],
},
"psutil": {
"name": "examples_checked_in_requirements_bzl__psutil_5_4_7",
"version": "5.4.7",
"wheel_name": "psutil-5.4.7-cp27-cp27mu-linux_x86_64.whl",
"extras": ["enum"],
},
"py": {
"name": "examples_checked_in_requirements_bzl__py_1_6_0",
"version": "1.6.0",
"wheel_name": "py-1.6.0-py2.py3-none-any.whl",
},
"pyasn1": {
"name": "examples_checked_in_requirements_bzl__pyasn1_0_4_4",
"version": "0.4.4",
"wheel_name": "pyasn1-0.4.4-py2.py3-none-any.whl",
},
"pyasn1-modules": {
"name": "examples_checked_in_requirements_bzl__pyasn1_modules_0_2_2",
"version": "0.2.2",
"wheel_name": "pyasn1_modules-0.2.2-py2.py3-none-any.whl",
"transitive_runtime_deps": ["pyasn1"],
},
"pytest": {
"name": "examples_checked_in_requirements_bzl__pytest_3_8_0",
"version": "3.8.0",
"wheel_name": "pytest-3.8.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["atomicwrites", "attrs", "funcsigs", "more-itertools", "pathlib2", "pluggy", "py", "scandir", "setuptools", "six"],
},
"pytest-mock": {
"name": "examples_checked_in_requirements_bzl__pytest_mock_1_6_2",
"version": "1.6.2",
"wheel_name": "pytest_mock-1.6.2-py2.py3-none-any.whl",
"transitive_runtime_deps": ["atomicwrites", "attrs", "funcsigs", "mock", "more-itertools", "pathlib2", "pbr", "pluggy", "py", "pytest", "scandir", "setuptools", "six"],
},
"python-dateutil": {
"name": "examples_checked_in_requirements_bzl__python_dateutil_2_7_3",
"version": "2.7.3",
"wheel_name": "python_dateutil-2.7.3-py2.py3-none-any.whl",
"transitive_runtime_deps": ["six"],
},
"pytz": {
"name": "examples_checked_in_requirements_bzl__pytz_2018_5",
"version": "2018.5",
"wheel_name": "pytz-2018.5-py2.py3-none-any.whl",
},
"pyyaml": {
"name": "examples_checked_in_requirements_bzl__PyYAML_3_13",
"version": "3.13",
"wheel_name": "PyYAML-3.13-cp27-cp27mu-linux_x86_64.whl",
},
"requests": {
"name": "examples_checked_in_requirements_bzl__requests_2_19_1",
"version": "2.19.1",
"wheel_name": "requests-2.19.1-py2.py3-none-any.whl",
"transitive_runtime_deps": ["certifi", "chardet", "idna", "urllib3"],
},
"rsa": {
"name": "examples_checked_in_requirements_bzl__rsa_4_0",
"version": "4.0",
"wheel_name": "rsa-4.0-py2.py3-none-any.whl",
"transitive_runtime_deps": ["pyasn1"],
},
"scandir": {
"name": "examples_checked_in_requirements_bzl__scandir_1_9_0",
"version": "1.9.0",
"wheel_name": "scandir-1.9.0-cp27-cp27mu-linux_x86_64.whl",
},
"scikit-learn": {
"name": "examples_checked_in_requirements_bzl__scikit_learn_0_17_1",
"version": "0.17.1",
"wheel_name": "scikit_learn-0.17.1-cp27-cp27mu-manylinux1_x86_64.whl",
},
"scipy": {
"name": "examples_checked_in_requirements_bzl__scipy_0_17_1",
"version": "0.17.1",
"wheel_name": "scipy-0.17.1-cp27-cp27mu-manylinux1_x86_64.whl",
},
"setuptools": {
"name": "examples_checked_in_requirements_bzl__setuptools_40_4_0",
"version": "40.4.0",
"wheel_name": "setuptools-40.4.0-py2.py3-none-any.whl",
"extras": ["certs", "ssl"],
},
"setuptools-scm": {
"name": "examples_checked_in_requirements_bzl__setuptools_scm_2_0_0",
"version": "2.0.0",
"wheel_name": "setuptools_scm-2.0.0-py2.py3-none-any.whl",
},
"six": {
"name": "examples_checked_in_requirements_bzl__six_1_11_0",
"version": "1.11.0",
"wheel_name": "six-1.11.0-py2.py3-none-any.whl",
},
"urllib3": {
"name": "examples_checked_in_requirements_bzl__urllib3_1_23",
"version": "1.23",
"wheel_name": "urllib3-1.23-py2.py3-none-any.whl",
},
}
for key, attributes in all_libs.items():
whl_library(
key = key,
all_libs = all_libs,
python = "@python2//:bin/python",
**attributes
)
| true | true |
1c2dcf8e3706cc01b274591fa63e3b945e1e564e | 7,149 | py | Python | pyro/distributions/empirical.py | adam-coogan/pyro | 6395b0f5f0b4744d3c822a39526027fb3fdb8b04 | [
"MIT"
] | null | null | null | pyro/distributions/empirical.py | adam-coogan/pyro | 6395b0f5f0b4744d3c822a39526027fb3fdb8b04 | [
"MIT"
] | null | null | null | pyro/distributions/empirical.py | adam-coogan/pyro | 6395b0f5f0b4744d3c822a39526027fb3fdb8b04 | [
"MIT"
] | null | null | null | from __future__ import absolute_import, division, print_function
import math
import numbers
import torch
from torch.distributions import constraints
from pyro.distributions.torch import Categorical
from pyro.distributions.torch_distribution import TorchDistribution
from pyro.distributions.util import copy_docs_from, logsumexp
@copy_docs_from(TorchDistribution)
class Empirical(TorchDistribution):
r"""
Empirical distribution associated with the sampled data.
"""
arg_constraints = {}
support = constraints.real
has_enumerate_support = True
def __init__(self, validate_args=None):
self._samples = None
self._log_weights = None
self._categorical = None
self._samples_buffer = []
self._weights_buffer = []
super(TorchDistribution, self).__init__(batch_shape=torch.Size(), validate_args=validate_args)
@staticmethod
def _append_from_buffer(tensor, buffer):
"""
Append values from the buffer to the finalized tensor, along the
leftmost dimension.
:param torch.Tensor tensor: tensor containing existing values.
:param list buffer: list of new values.
:return: tensor with new values appended at the bottom.
"""
buffer_tensor = torch.stack(buffer, dim=0)
return torch.cat([tensor, buffer_tensor], dim=0)
def _finalize(self):
"""
Appends values collected in the samples/weights buffers to their
corresponding tensors.
"""
if not self._samples_buffer:
return
self._samples = self._append_from_buffer(self._samples, self._samples_buffer)
self._log_weights = self._append_from_buffer(self._log_weights, self._weights_buffer)
self._categorical = Categorical(logits=self._log_weights)
# Reset buffers.
self._samples_buffer, self._weights_buffer = [], []
@property
def sample_size(self):
"""
Number of samples that constitute the empirical distribution.
:return int: number of samples collected.
"""
self._finalize()
if self._samples is None:
return 0
return self._samples.size(0)
def add(self, value, weight=None, log_weight=None):
"""
Adds a new data point to the sample. The values in successive calls to
``add`` must have the same tensor shape and size. Optionally, an
importance weight can be specified via ``log_weight`` or ``weight``
(default value of `1` is used if not specified).
:param torch.Tensor value: tensor to add to the sample.
:param torch.Tensor weight: log weight (optional) corresponding
to the sample.
:param torch.Tensor log_weight: weight (optional) corresponding
to the sample.
"""
if self._validate_args:
if weight is not None and log_weight is not None:
raise ValueError("Only one of ```weight`` or ``log_weight`` should be specified.")
weight_type = value.new_empty(1).float().type() if value.dtype in (torch.int32, torch.int64) \
else value.type()
# Apply default weight of 1.0.
if log_weight is None and weight is None:
log_weight = torch.tensor(0.0).type(weight_type)
elif weight is not None and log_weight is None:
log_weight = math.log(weight)
if isinstance(log_weight, numbers.Number):
log_weight = torch.tensor(log_weight).type(weight_type)
if self._validate_args and log_weight.dim() > 0:
raise ValueError("``weight.dim() > 0``, but weight should be a scalar.")
# Seed the container tensors with the correct tensor types
if self._samples is None:
self._samples = value.new_tensor([])
self._log_weights = log_weight.new_tensor([])
# Append to the buffer list
self._samples_buffer.append(value)
self._weights_buffer.append(log_weight)
def sample(self, sample_shape=torch.Size()):
self._finalize()
idxs = self._categorical.sample(sample_shape=sample_shape)
return self._samples[idxs]
def log_prob(self, value):
"""
Returns the log of the probability mass function evaluated at ``value``.
Note that this currently only supports scoring values with empty
``sample_shape``, i.e. an arbitrary batched sample is not allowed.
:param torch.Tensor value: scalar or tensor value to be scored.
"""
if self._validate_args:
if value.shape != self.event_shape:
raise ValueError("``value.shape`` must be {}".format(self.event_shape))
self._finalize()
selection_mask = self._samples.eq(value).contiguous().view(self.sample_size, -1)
# Return -Inf if value is outside the support.
if not selection_mask.any():
return self._log_weights.new_zeros(torch.Size()).log()
idxs = torch.arange(self.sample_size)[selection_mask.min(dim=-1)[0]]
log_probs = self._categorical.log_prob(idxs)
return logsumexp(log_probs, dim=-1)
def _weighted_mean(self, value, dim=0):
weights = self._log_weights.reshape([-1] + (value.dim() - 1) * [1])
max_weight = weights.max(dim=dim)[0]
relative_probs = (weights - max_weight).exp()
return (value * relative_probs).sum(dim=dim) / relative_probs.sum(dim=dim)
@property
def event_shape(self):
self._finalize()
if self._samples is None:
return None
return self._samples.shape[1:]
@property
def mean(self):
self._finalize()
if self._samples.dtype in (torch.int32, torch.int64):
raise ValueError("Mean for discrete empirical distribution undefined. " +
"Consider converting samples to ``torch.float32`` " +
"or ``torch.float64``. If these are samples from a " +
"`Categorical` distribution, consider converting to a " +
"`OneHotCategorical` distribution.")
return self._weighted_mean(self._samples)
@property
def variance(self):
self._finalize()
if self._samples.dtype in (torch.int32, torch.int64):
raise ValueError("Variance for discrete empirical distribution undefined. " +
"Consider converting samples to ``torch.float32`` " +
"or ``torch.float64``. If these are samples from a " +
"`Categorical` distribution, consider converting to a " +
"`OneHotCategorical` distribution.")
deviation_squared = torch.pow(self._samples - self.mean, 2)
return self._weighted_mean(deviation_squared)
def get_samples_and_weights(self):
self._finalize()
return self._samples, self._log_weights
def enumerate_support(self, expand=True):
# Empirical does not support batching, so expanding is a no-op.
self._finalize()
return self._samples
| 40.619318 | 102 | 0.638131 | from __future__ import absolute_import, division, print_function
import math
import numbers
import torch
from torch.distributions import constraints
from pyro.distributions.torch import Categorical
from pyro.distributions.torch_distribution import TorchDistribution
from pyro.distributions.util import copy_docs_from, logsumexp
@copy_docs_from(TorchDistribution)
class Empirical(TorchDistribution):
arg_constraints = {}
support = constraints.real
has_enumerate_support = True
def __init__(self, validate_args=None):
self._samples = None
self._log_weights = None
self._categorical = None
self._samples_buffer = []
self._weights_buffer = []
super(TorchDistribution, self).__init__(batch_shape=torch.Size(), validate_args=validate_args)
@staticmethod
def _append_from_buffer(tensor, buffer):
buffer_tensor = torch.stack(buffer, dim=0)
return torch.cat([tensor, buffer_tensor], dim=0)
def _finalize(self):
if not self._samples_buffer:
return
self._samples = self._append_from_buffer(self._samples, self._samples_buffer)
self._log_weights = self._append_from_buffer(self._log_weights, self._weights_buffer)
self._categorical = Categorical(logits=self._log_weights)
self._samples_buffer, self._weights_buffer = [], []
@property
def sample_size(self):
self._finalize()
if self._samples is None:
return 0
return self._samples.size(0)
def add(self, value, weight=None, log_weight=None):
if self._validate_args:
if weight is not None and log_weight is not None:
raise ValueError("Only one of ```weight`` or ``log_weight`` should be specified.")
weight_type = value.new_empty(1).float().type() if value.dtype in (torch.int32, torch.int64) \
else value.type()
if log_weight is None and weight is None:
log_weight = torch.tensor(0.0).type(weight_type)
elif weight is not None and log_weight is None:
log_weight = math.log(weight)
if isinstance(log_weight, numbers.Number):
log_weight = torch.tensor(log_weight).type(weight_type)
if self._validate_args and log_weight.dim() > 0:
raise ValueError("``weight.dim() > 0``, but weight should be a scalar.")
if self._samples is None:
self._samples = value.new_tensor([])
self._log_weights = log_weight.new_tensor([])
self._samples_buffer.append(value)
self._weights_buffer.append(log_weight)
def sample(self, sample_shape=torch.Size()):
self._finalize()
idxs = self._categorical.sample(sample_shape=sample_shape)
return self._samples[idxs]
def log_prob(self, value):
if self._validate_args:
if value.shape != self.event_shape:
raise ValueError("``value.shape`` must be {}".format(self.event_shape))
self._finalize()
selection_mask = self._samples.eq(value).contiguous().view(self.sample_size, -1)
if not selection_mask.any():
return self._log_weights.new_zeros(torch.Size()).log()
idxs = torch.arange(self.sample_size)[selection_mask.min(dim=-1)[0]]
log_probs = self._categorical.log_prob(idxs)
return logsumexp(log_probs, dim=-1)
def _weighted_mean(self, value, dim=0):
weights = self._log_weights.reshape([-1] + (value.dim() - 1) * [1])
max_weight = weights.max(dim=dim)[0]
relative_probs = (weights - max_weight).exp()
return (value * relative_probs).sum(dim=dim) / relative_probs.sum(dim=dim)
@property
def event_shape(self):
self._finalize()
if self._samples is None:
return None
return self._samples.shape[1:]
@property
def mean(self):
self._finalize()
if self._samples.dtype in (torch.int32, torch.int64):
raise ValueError("Mean for discrete empirical distribution undefined. " +
"Consider converting samples to ``torch.float32`` " +
"or ``torch.float64``. If these are samples from a " +
"`Categorical` distribution, consider converting to a " +
"`OneHotCategorical` distribution.")
return self._weighted_mean(self._samples)
@property
def variance(self):
self._finalize()
if self._samples.dtype in (torch.int32, torch.int64):
raise ValueError("Variance for discrete empirical distribution undefined. " +
"Consider converting samples to ``torch.float32`` " +
"or ``torch.float64``. If these are samples from a " +
"`Categorical` distribution, consider converting to a " +
"`OneHotCategorical` distribution.")
deviation_squared = torch.pow(self._samples - self.mean, 2)
return self._weighted_mean(deviation_squared)
def get_samples_and_weights(self):
self._finalize()
return self._samples, self._log_weights
def enumerate_support(self, expand=True):
self._finalize()
return self._samples
| true | true |
1c2dcfe0288e9e98dc8564dd67eee1a75307cf99 | 165 | py | Python | tests/model_control/detailed/transf_Quantization/model_control_one_enabled_Quantization_ConstantTrend_BestCycle_LSTM.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | null | null | null | tests/model_control/detailed/transf_Quantization/model_control_one_enabled_Quantization_ConstantTrend_BestCycle_LSTM.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | 1 | 2019-11-30T23:39:38.000Z | 2019-12-01T04:34:35.000Z | tests/model_control/detailed/transf_Quantization/model_control_one_enabled_Quantization_ConstantTrend_BestCycle_LSTM.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | null | null | null | import pyaf.tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Quantization'] , ['ConstantTrend'] , ['BestCycle'] , ['LSTM'] ); | 41.25 | 87 | 0.757576 | import pyaf.tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Quantization'] , ['ConstantTrend'] , ['BestCycle'] , ['LSTM'] ); | true | true |
1c2dcfeff930260114a1a4a118ef0bf1cb482eaf | 16,703 | py | Python | openstack_dashboard/usage/quotas.py | ilay09/horizon | a362e4b767f7616d344545aa0f9205857d3900a4 | [
"Apache-2.0"
] | null | null | null | openstack_dashboard/usage/quotas.py | ilay09/horizon | a362e4b767f7616d344545aa0f9205857d3900a4 | [
"Apache-2.0"
] | null | null | null | openstack_dashboard/usage/quotas.py | ilay09/horizon | a362e4b767f7616d344545aa0f9205857d3900a4 | [
"Apache-2.0"
] | null | null | null | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from collections import defaultdict
import itertools
import logging
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon.utils.memoized import memoized
from openstack_dashboard.api import base
from openstack_dashboard.api import cinder
from openstack_dashboard.api import network
from openstack_dashboard.api import neutron
from openstack_dashboard.api import nova
from openstack_dashboard.contrib.developer.profiler import api as profiler
LOG = logging.getLogger(__name__)
NOVA_COMPUTE_QUOTA_FIELDS = {
"metadata_items",
"cores",
"instances",
"injected_files",
"injected_file_content_bytes",
"injected_file_path_bytes",
"ram",
"key_pairs",
}
NOVA_NETWORK_QUOTA_FIELDS = {
"floating_ips",
"fixed_ips",
"security_groups",
"security_group_rules",
}
NOVA_QUOTA_FIELDS = NOVA_COMPUTE_QUOTA_FIELDS | NOVA_NETWORK_QUOTA_FIELDS
CINDER_QUOTA_FIELDS = {"volumes",
"snapshots",
"gigabytes"}
NEUTRON_QUOTA_FIELDS = {"network",
"subnet",
"port",
"router",
"floatingip",
"security_group",
"security_group_rule",
}
QUOTA_FIELDS = NOVA_QUOTA_FIELDS | CINDER_QUOTA_FIELDS | NEUTRON_QUOTA_FIELDS
QUOTA_NAMES = {
"metadata_items": _('Metadata Items'),
"cores": _('VCPUs'),
"instances": _('Instances'),
"injected_files": _('Injected Files'),
"injected_file_content_bytes": _('Injected File Content Bytes'),
"ram": _('RAM (MB)'),
"floating_ips": _('Floating IPs'),
"fixed_ips": _('Fixed IPs'),
"security_groups": _('Security Groups'),
"security_group_rules": _('Security Group Rules'),
"key_pairs": _('Key Pairs'),
"injected_file_path_bytes": _('Injected File Path Bytes'),
"volumes": _('Volumes'),
"snapshots": _('Volume Snapshots'),
"gigabytes": _('Total Size of Volumes and Snapshots (GB)'),
"network": _("Networks"),
"subnet": _("Subnets"),
"port": _("Ports"),
"router": _("Routers"),
"floatingip": _('Floating IPs'),
"security_group": _("Security Groups"),
"security_group_rule": _("Security Group Rules")
}
class QuotaUsage(dict):
"""Tracks quota limit, used, and available for a given set of quotas."""
def __init__(self):
self.usages = defaultdict(dict)
def __contains__(self, key):
return key in self.usages
def __getitem__(self, key):
return self.usages[key]
def __setitem__(self, key, value):
raise NotImplementedError("Directly setting QuotaUsage values is not "
"supported. Please use the add_quota and "
"tally methods.")
def __repr__(self):
return repr(dict(self.usages))
def get(self, key, default=None):
return self.usages.get(key, default)
def add_quota(self, quota):
"""Adds an internal tracking reference for the given quota."""
if quota.limit is None or quota.limit == -1:
# Handle "unlimited" quotas.
self.usages[quota.name]['quota'] = float("inf")
self.usages[quota.name]['available'] = float("inf")
else:
self.usages[quota.name]['quota'] = int(quota.limit)
def tally(self, name, value):
"""Adds to the "used" metric for the given quota."""
value = value or 0 # Protection against None.
# Start at 0 if this is the first value.
if 'used' not in self.usages[name]:
self.usages[name]['used'] = 0
# Increment our usage and update the "available" metric.
self.usages[name]['used'] += int(value) # Fail if can't coerce to int.
self.update_available(name)
def update_available(self, name):
"""Updates the "available" metric for the given quota."""
quota = self.usages.get(name, {}).get('quota', float('inf'))
available = quota - self.usages[name]['used']
if available < 0:
available = 0
self.usages[name]['available'] = available
def _get_quota_data(request, tenant_mode=True, disabled_quotas=None,
tenant_id=None):
quotasets = []
if not tenant_id:
tenant_id = request.user.tenant_id
if disabled_quotas is None:
disabled_quotas = get_disabled_quotas(request)
qs = base.QuotaSet()
if NOVA_QUOTA_FIELDS - disabled_quotas:
if tenant_mode:
quotasets.append(nova.tenant_quota_get(request, tenant_id))
else:
quotasets.append(nova.default_quota_get(request, tenant_id))
if CINDER_QUOTA_FIELDS - disabled_quotas:
try:
if tenant_mode:
quotasets.append(cinder.tenant_quota_get(request, tenant_id))
else:
quotasets.append(cinder.default_quota_get(request, tenant_id))
except cinder.cinder_exception.ClientException:
disabled_quotas.update(CINDER_QUOTA_FIELDS)
msg = _("Unable to retrieve volume limit information.")
exceptions.handle(request, msg)
for quota in itertools.chain(*quotasets):
if quota.name not in disabled_quotas:
qs[quota.name] = quota.limit
return qs
@profiler.trace
def get_default_quota_data(request, disabled_quotas=None, tenant_id=None):
return _get_quota_data(request,
tenant_mode=False,
disabled_quotas=disabled_quotas,
tenant_id=tenant_id)
@profiler.trace
def get_tenant_quota_data(request, disabled_quotas=None, tenant_id=None):
qs = _get_quota_data(request,
tenant_mode=True,
disabled_quotas=disabled_quotas,
tenant_id=tenant_id)
# TODO(jpichon): There is no API to get the default system quotas
# in Neutron (cf. LP#1204956), so for now handle tenant quotas here.
# This should be handled in _get_quota_data() eventually.
# TODO(amotoki): Purge this tricky usage.
# openstack_dashboard/dashboards/identity/projects/views.py
# calls get_tenant_quota_data directly and it expects
# neutron data is not returned.
if not disabled_quotas:
return qs
# Check if neutron is enabled by looking for network
if not (NEUTRON_QUOTA_FIELDS - disabled_quotas):
return qs
tenant_id = tenant_id or request.user.tenant_id
neutron_quotas = neutron.tenant_quota_get(request, tenant_id)
if 'floating_ips' in disabled_quotas:
if 'floatingip' not in disabled_quotas:
# Rename floatingip to floating_ips since that's how it's
# expected in some places (e.g. Security & Access' Floating IPs)
fips_quota = neutron_quotas.get('floatingip').limit
qs.add(base.QuotaSet({'floating_ips': fips_quota}))
if 'security_groups' in disabled_quotas:
if 'security_group' not in disabled_quotas:
# Rename security_group to security_groups since that's how it's
# expected in some places (e.g. Security & Access' Security Groups)
sec_quota = neutron_quotas.get('security_group').limit
qs.add(base.QuotaSet({'security_groups': sec_quota}))
if 'network' in disabled_quotas:
for item in qs.items:
if item.name == 'networks':
qs.items.remove(item)
break
else:
net_quota = neutron_quotas.get('network').limit
qs.add(base.QuotaSet({'networks': net_quota}))
if 'subnet' in disabled_quotas:
for item in qs.items:
if item.name == 'subnets':
qs.items.remove(item)
break
else:
net_quota = neutron_quotas.get('subnet').limit
qs.add(base.QuotaSet({'subnets': net_quota}))
if 'router' in disabled_quotas:
for item in qs.items:
if item.name == 'routers':
qs.items.remove(item)
break
else:
router_quota = neutron_quotas.get('router').limit
qs.add(base.QuotaSet({'routers': router_quota}))
return qs
@profiler.trace
def get_disabled_quotas(request):
disabled_quotas = set([])
# Cinder
if not cinder.is_volume_service_enabled(request):
disabled_quotas.update(CINDER_QUOTA_FIELDS)
# Neutron
if not base.is_service_enabled(request, 'network'):
disabled_quotas.update(NEUTRON_QUOTA_FIELDS)
else:
# Remove the nova network quotas
disabled_quotas.update(['floating_ips', 'fixed_ips'])
if neutron.is_extension_supported(request, 'security-group'):
# If Neutron security group is supported, disable Nova quotas
disabled_quotas.update(['security_groups', 'security_group_rules'])
else:
# If Nova security group is used, disable Neutron quotas
disabled_quotas.update(['security_group', 'security_group_rule'])
if not neutron.is_router_enabled(request):
disabled_quotas.update(['router', 'floatingip'])
try:
if not neutron.is_quotas_extension_supported(request):
disabled_quotas.update(NEUTRON_QUOTA_FIELDS)
except Exception:
LOG.exception("There was an error checking if the Neutron "
"quotas extension is enabled.")
# Nova
if not (base.is_service_enabled(request, 'compute') and
nova.can_set_quotas()):
disabled_quotas.update(NOVA_QUOTA_FIELDS)
# There appear to be no glance quota fields currently
return disabled_quotas
@profiler.trace
def _get_tenant_compute_usages(request, usages, disabled_quotas, tenant_id):
enabled_compute_quotas = NOVA_COMPUTE_QUOTA_FIELDS - disabled_quotas
if not enabled_compute_quotas:
return
# Unlike the other services it can be the case that nova is enabled but
# doesn't support quotas, in which case we still want to get usage info,
# so don't rely on '"instances" in disabled_quotas' as elsewhere
if not base.is_service_enabled(request, 'compute'):
return
if tenant_id:
instances, has_more = nova.server_list(
request, search_opts={'tenant_id': tenant_id})
else:
instances, has_more = nova.server_list(request)
# Fetch deleted flavors if necessary.
flavors = dict([(f.id, f) for f in nova.flavor_list(request)])
missing_flavors = [instance.flavor['id'] for instance in instances
if instance.flavor['id'] not in flavors]
for missing in missing_flavors:
if missing not in flavors:
try:
flavors[missing] = nova.flavor_get(request, missing)
except Exception:
flavors[missing] = {}
exceptions.handle(request, ignore=True)
usages.tally('instances', len(instances))
# Sum our usage based on the flavors of the instances.
for flavor in [flavors[instance.flavor['id']] for instance in instances]:
usages.tally('cores', getattr(flavor, 'vcpus', None))
usages.tally('ram', getattr(flavor, 'ram', None))
# Initialize the tally if no instances have been launched yet
if len(instances) == 0:
usages.tally('cores', 0)
usages.tally('ram', 0)
@profiler.trace
def _get_tenant_network_usages(request, usages, disabled_quotas, tenant_id):
enabled_quotas = ((NOVA_NETWORK_QUOTA_FIELDS | NEUTRON_QUOTA_FIELDS)
- disabled_quotas)
if not enabled_quotas:
return
# NOTE(amotoki): floatingip is Neutron quota and floating_ips is
# Nova quota. We need to check both.
if {'floatingip', 'floating_ips'} & enabled_quotas:
floating_ips = []
try:
if network.floating_ip_supported(request):
floating_ips = network.tenant_floating_ip_list(request)
except Exception:
pass
usages.tally('floating_ips', len(floating_ips))
if 'security_group' not in disabled_quotas:
security_groups = []
security_groups = network.security_group_list(request)
usages.tally('security_groups', len(security_groups))
if 'network' not in disabled_quotas:
networks = neutron.network_list(request, tenant_id=tenant_id)
usages.tally('networks', len(networks))
if 'subnet' not in disabled_quotas:
subnets = neutron.subnet_list(request, tenant_id=tenant_id)
usages.tally('subnets', len(subnets))
if 'router' not in disabled_quotas:
routers = neutron.router_list(request, tenant_id=tenant_id)
usages.tally('routers', len(routers))
@profiler.trace
def _get_tenant_volume_usages(request, usages, disabled_quotas, tenant_id):
if CINDER_QUOTA_FIELDS - disabled_quotas:
try:
if tenant_id:
opts = {'all_tenants': 1, 'project_id': tenant_id}
volumes = cinder.volume_list(request, opts)
snapshots = cinder.volume_snapshot_list(request, opts)
else:
volumes = cinder.volume_list(request)
snapshots = cinder.volume_snapshot_list(request)
volume_usage = sum([int(v.size) for v in volumes])
snapshot_usage = sum([int(s.size) for s in snapshots])
usages.tally('gigabytes', (snapshot_usage + volume_usage))
usages.tally('volumes', len(volumes))
usages.tally('snapshots', len(snapshots))
except cinder.cinder_exception.ClientException:
msg = _("Unable to retrieve volume limit information.")
exceptions.handle(request, msg)
@profiler.trace
@memoized
def tenant_quota_usages(request, tenant_id=None):
"""Get our quotas and construct our usage object.
If no tenant_id is provided, a the request.user.project_id
is assumed to be used
"""
if not tenant_id:
tenant_id = request.user.project_id
disabled_quotas = get_disabled_quotas(request)
usages = QuotaUsage()
for quota in get_tenant_quota_data(request,
disabled_quotas=disabled_quotas,
tenant_id=tenant_id):
usages.add_quota(quota)
# Get our usages.
_get_tenant_compute_usages(request, usages, disabled_quotas, tenant_id)
_get_tenant_network_usages(request, usages, disabled_quotas, tenant_id)
_get_tenant_volume_usages(request, usages, disabled_quotas, tenant_id)
return usages
@profiler.trace
def tenant_limit_usages(request):
# TODO(licostan): This method shall be removed from Quota module.
# ProjectUsage/BaseUsage maybe used instead on volume/image dashboards.
limits = {}
try:
if base.is_service_enabled(request, 'compute'):
limits.update(nova.tenant_absolute_limits(request, reserved=True))
except Exception:
msg = _("Unable to retrieve compute limit information.")
exceptions.handle(request, msg)
if cinder.is_volume_service_enabled(request):
try:
limits.update(cinder.tenant_absolute_limits(request))
volumes = cinder.volume_list(request)
snapshots = cinder.volume_snapshot_list(request)
# gigabytesUsed should be a total of volumes and snapshots
vol_size = sum([getattr(volume, 'size', 0) for volume
in volumes])
snap_size = sum([getattr(snap, 'size', 0) for snap
in snapshots])
limits['gigabytesUsed'] = vol_size + snap_size
limits['volumesUsed'] = len(volumes)
limits['snapshotsUsed'] = len(snapshots)
except cinder.cinder_exception.ClientException:
msg = _("Unable to retrieve volume limit information.")
exceptions.handle(request, msg)
return limits
def enabled_quotas(request):
"""Returns the list of quotas available minus those that are disabled"""
return QUOTA_FIELDS - get_disabled_quotas(request)
| 36.469432 | 79 | 0.64695 |
from collections import defaultdict
import itertools
import logging
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon.utils.memoized import memoized
from openstack_dashboard.api import base
from openstack_dashboard.api import cinder
from openstack_dashboard.api import network
from openstack_dashboard.api import neutron
from openstack_dashboard.api import nova
from openstack_dashboard.contrib.developer.profiler import api as profiler
LOG = logging.getLogger(__name__)
NOVA_COMPUTE_QUOTA_FIELDS = {
"metadata_items",
"cores",
"instances",
"injected_files",
"injected_file_content_bytes",
"injected_file_path_bytes",
"ram",
"key_pairs",
}
NOVA_NETWORK_QUOTA_FIELDS = {
"floating_ips",
"fixed_ips",
"security_groups",
"security_group_rules",
}
NOVA_QUOTA_FIELDS = NOVA_COMPUTE_QUOTA_FIELDS | NOVA_NETWORK_QUOTA_FIELDS
CINDER_QUOTA_FIELDS = {"volumes",
"snapshots",
"gigabytes"}
NEUTRON_QUOTA_FIELDS = {"network",
"subnet",
"port",
"router",
"floatingip",
"security_group",
"security_group_rule",
}
QUOTA_FIELDS = NOVA_QUOTA_FIELDS | CINDER_QUOTA_FIELDS | NEUTRON_QUOTA_FIELDS
QUOTA_NAMES = {
"metadata_items": _('Metadata Items'),
"cores": _('VCPUs'),
"instances": _('Instances'),
"injected_files": _('Injected Files'),
"injected_file_content_bytes": _('Injected File Content Bytes'),
"ram": _('RAM (MB)'),
"floating_ips": _('Floating IPs'),
"fixed_ips": _('Fixed IPs'),
"security_groups": _('Security Groups'),
"security_group_rules": _('Security Group Rules'),
"key_pairs": _('Key Pairs'),
"injected_file_path_bytes": _('Injected File Path Bytes'),
"volumes": _('Volumes'),
"snapshots": _('Volume Snapshots'),
"gigabytes": _('Total Size of Volumes and Snapshots (GB)'),
"network": _("Networks"),
"subnet": _("Subnets"),
"port": _("Ports"),
"router": _("Routers"),
"floatingip": _('Floating IPs'),
"security_group": _("Security Groups"),
"security_group_rule": _("Security Group Rules")
}
class QuotaUsage(dict):
def __init__(self):
self.usages = defaultdict(dict)
def __contains__(self, key):
return key in self.usages
def __getitem__(self, key):
return self.usages[key]
def __setitem__(self, key, value):
raise NotImplementedError("Directly setting QuotaUsage values is not "
"supported. Please use the add_quota and "
"tally methods.")
def __repr__(self):
return repr(dict(self.usages))
def get(self, key, default=None):
return self.usages.get(key, default)
def add_quota(self, quota):
if quota.limit is None or quota.limit == -1:
self.usages[quota.name]['quota'] = float("inf")
self.usages[quota.name]['available'] = float("inf")
else:
self.usages[quota.name]['quota'] = int(quota.limit)
def tally(self, name, value):
value = value or 0
if 'used' not in self.usages[name]:
self.usages[name]['used'] = 0
self.usages[name]['used'] += int(value)
self.update_available(name)
def update_available(self, name):
quota = self.usages.get(name, {}).get('quota', float('inf'))
available = quota - self.usages[name]['used']
if available < 0:
available = 0
self.usages[name]['available'] = available
def _get_quota_data(request, tenant_mode=True, disabled_quotas=None,
tenant_id=None):
quotasets = []
if not tenant_id:
tenant_id = request.user.tenant_id
if disabled_quotas is None:
disabled_quotas = get_disabled_quotas(request)
qs = base.QuotaSet()
if NOVA_QUOTA_FIELDS - disabled_quotas:
if tenant_mode:
quotasets.append(nova.tenant_quota_get(request, tenant_id))
else:
quotasets.append(nova.default_quota_get(request, tenant_id))
if CINDER_QUOTA_FIELDS - disabled_quotas:
try:
if tenant_mode:
quotasets.append(cinder.tenant_quota_get(request, tenant_id))
else:
quotasets.append(cinder.default_quota_get(request, tenant_id))
except cinder.cinder_exception.ClientException:
disabled_quotas.update(CINDER_QUOTA_FIELDS)
msg = _("Unable to retrieve volume limit information.")
exceptions.handle(request, msg)
for quota in itertools.chain(*quotasets):
if quota.name not in disabled_quotas:
qs[quota.name] = quota.limit
return qs
@profiler.trace
def get_default_quota_data(request, disabled_quotas=None, tenant_id=None):
return _get_quota_data(request,
tenant_mode=False,
disabled_quotas=disabled_quotas,
tenant_id=tenant_id)
@profiler.trace
def get_tenant_quota_data(request, disabled_quotas=None, tenant_id=None):
qs = _get_quota_data(request,
tenant_mode=True,
disabled_quotas=disabled_quotas,
tenant_id=tenant_id)
# TODO(jpichon): There is no API to get the default system quotas
# in Neutron (cf. LP#1204956), so for now handle tenant quotas here.
# This should be handled in _get_quota_data() eventually.
# TODO(amotoki): Purge this tricky usage.
# openstack_dashboard/dashboards/identity/projects/views.py
# calls get_tenant_quota_data directly and it expects
# neutron data is not returned.
if not disabled_quotas:
return qs
# Check if neutron is enabled by looking for network
if not (NEUTRON_QUOTA_FIELDS - disabled_quotas):
return qs
tenant_id = tenant_id or request.user.tenant_id
neutron_quotas = neutron.tenant_quota_get(request, tenant_id)
if 'floating_ips' in disabled_quotas:
if 'floatingip' not in disabled_quotas:
# Rename floatingip to floating_ips since that's how it's
# expected in some places (e.g. Security & Access' Floating IPs)
fips_quota = neutron_quotas.get('floatingip').limit
qs.add(base.QuotaSet({'floating_ips': fips_quota}))
if 'security_groups' in disabled_quotas:
if 'security_group' not in disabled_quotas:
sec_quota = neutron_quotas.get('security_group').limit
qs.add(base.QuotaSet({'security_groups': sec_quota}))
if 'network' in disabled_quotas:
for item in qs.items:
if item.name == 'networks':
qs.items.remove(item)
break
else:
net_quota = neutron_quotas.get('network').limit
qs.add(base.QuotaSet({'networks': net_quota}))
if 'subnet' in disabled_quotas:
for item in qs.items:
if item.name == 'subnets':
qs.items.remove(item)
break
else:
net_quota = neutron_quotas.get('subnet').limit
qs.add(base.QuotaSet({'subnets': net_quota}))
if 'router' in disabled_quotas:
for item in qs.items:
if item.name == 'routers':
qs.items.remove(item)
break
else:
router_quota = neutron_quotas.get('router').limit
qs.add(base.QuotaSet({'routers': router_quota}))
return qs
@profiler.trace
def get_disabled_quotas(request):
disabled_quotas = set([])
# Cinder
if not cinder.is_volume_service_enabled(request):
disabled_quotas.update(CINDER_QUOTA_FIELDS)
# Neutron
if not base.is_service_enabled(request, 'network'):
disabled_quotas.update(NEUTRON_QUOTA_FIELDS)
else:
# Remove the nova network quotas
disabled_quotas.update(['floating_ips', 'fixed_ips'])
if neutron.is_extension_supported(request, 'security-group'):
# If Neutron security group is supported, disable Nova quotas
disabled_quotas.update(['security_groups', 'security_group_rules'])
else:
# If Nova security group is used, disable Neutron quotas
disabled_quotas.update(['security_group', 'security_group_rule'])
if not neutron.is_router_enabled(request):
disabled_quotas.update(['router', 'floatingip'])
try:
if not neutron.is_quotas_extension_supported(request):
disabled_quotas.update(NEUTRON_QUOTA_FIELDS)
except Exception:
LOG.exception("There was an error checking if the Neutron "
"quotas extension is enabled.")
# Nova
if not (base.is_service_enabled(request, 'compute') and
nova.can_set_quotas()):
disabled_quotas.update(NOVA_QUOTA_FIELDS)
# There appear to be no glance quota fields currently
return disabled_quotas
@profiler.trace
def _get_tenant_compute_usages(request, usages, disabled_quotas, tenant_id):
enabled_compute_quotas = NOVA_COMPUTE_QUOTA_FIELDS - disabled_quotas
if not enabled_compute_quotas:
return
# Unlike the other services it can be the case that nova is enabled but
# doesn't support quotas, in which case we still want to get usage info,
if not base.is_service_enabled(request, 'compute'):
return
if tenant_id:
instances, has_more = nova.server_list(
request, search_opts={'tenant_id': tenant_id})
else:
instances, has_more = nova.server_list(request)
# Fetch deleted flavors if necessary.
flavors = dict([(f.id, f) for f in nova.flavor_list(request)])
missing_flavors = [instance.flavor['id'] for instance in instances
if instance.flavor['id'] not in flavors]
for missing in missing_flavors:
if missing not in flavors:
try:
flavors[missing] = nova.flavor_get(request, missing)
except Exception:
flavors[missing] = {}
exceptions.handle(request, ignore=True)
usages.tally('instances', len(instances))
# Sum our usage based on the flavors of the instances.
for flavor in [flavors[instance.flavor['id']] for instance in instances]:
usages.tally('cores', getattr(flavor, 'vcpus', None))
usages.tally('ram', getattr(flavor, 'ram', None))
# Initialize the tally if no instances have been launched yet
if len(instances) == 0:
usages.tally('cores', 0)
usages.tally('ram', 0)
@profiler.trace
def _get_tenant_network_usages(request, usages, disabled_quotas, tenant_id):
enabled_quotas = ((NOVA_NETWORK_QUOTA_FIELDS | NEUTRON_QUOTA_FIELDS)
- disabled_quotas)
if not enabled_quotas:
return
# NOTE(amotoki): floatingip is Neutron quota and floating_ips is
# Nova quota. We need to check both.
if {'floatingip', 'floating_ips'} & enabled_quotas:
floating_ips = []
try:
if network.floating_ip_supported(request):
floating_ips = network.tenant_floating_ip_list(request)
except Exception:
pass
usages.tally('floating_ips', len(floating_ips))
if 'security_group' not in disabled_quotas:
security_groups = []
security_groups = network.security_group_list(request)
usages.tally('security_groups', len(security_groups))
if 'network' not in disabled_quotas:
networks = neutron.network_list(request, tenant_id=tenant_id)
usages.tally('networks', len(networks))
if 'subnet' not in disabled_quotas:
subnets = neutron.subnet_list(request, tenant_id=tenant_id)
usages.tally('subnets', len(subnets))
if 'router' not in disabled_quotas:
routers = neutron.router_list(request, tenant_id=tenant_id)
usages.tally('routers', len(routers))
@profiler.trace
def _get_tenant_volume_usages(request, usages, disabled_quotas, tenant_id):
if CINDER_QUOTA_FIELDS - disabled_quotas:
try:
if tenant_id:
opts = {'all_tenants': 1, 'project_id': tenant_id}
volumes = cinder.volume_list(request, opts)
snapshots = cinder.volume_snapshot_list(request, opts)
else:
volumes = cinder.volume_list(request)
snapshots = cinder.volume_snapshot_list(request)
volume_usage = sum([int(v.size) for v in volumes])
snapshot_usage = sum([int(s.size) for s in snapshots])
usages.tally('gigabytes', (snapshot_usage + volume_usage))
usages.tally('volumes', len(volumes))
usages.tally('snapshots', len(snapshots))
except cinder.cinder_exception.ClientException:
msg = _("Unable to retrieve volume limit information.")
exceptions.handle(request, msg)
@profiler.trace
@memoized
def tenant_quota_usages(request, tenant_id=None):
if not tenant_id:
tenant_id = request.user.project_id
disabled_quotas = get_disabled_quotas(request)
usages = QuotaUsage()
for quota in get_tenant_quota_data(request,
disabled_quotas=disabled_quotas,
tenant_id=tenant_id):
usages.add_quota(quota)
# Get our usages.
_get_tenant_compute_usages(request, usages, disabled_quotas, tenant_id)
_get_tenant_network_usages(request, usages, disabled_quotas, tenant_id)
_get_tenant_volume_usages(request, usages, disabled_quotas, tenant_id)
return usages
@profiler.trace
def tenant_limit_usages(request):
# TODO(licostan): This method shall be removed from Quota module.
# ProjectUsage/BaseUsage maybe used instead on volume/image dashboards.
limits = {}
try:
if base.is_service_enabled(request, 'compute'):
limits.update(nova.tenant_absolute_limits(request, reserved=True))
except Exception:
msg = _("Unable to retrieve compute limit information.")
exceptions.handle(request, msg)
if cinder.is_volume_service_enabled(request):
try:
limits.update(cinder.tenant_absolute_limits(request))
volumes = cinder.volume_list(request)
snapshots = cinder.volume_snapshot_list(request)
# gigabytesUsed should be a total of volumes and snapshots
vol_size = sum([getattr(volume, 'size', 0) for volume
in volumes])
snap_size = sum([getattr(snap, 'size', 0) for snap
in snapshots])
limits['gigabytesUsed'] = vol_size + snap_size
limits['volumesUsed'] = len(volumes)
limits['snapshotsUsed'] = len(snapshots)
except cinder.cinder_exception.ClientException:
msg = _("Unable to retrieve volume limit information.")
exceptions.handle(request, msg)
return limits
def enabled_quotas(request):
return QUOTA_FIELDS - get_disabled_quotas(request)
| true | true |
1c2dd03ed4752a62c2b24818fa3674ec47b4f620 | 11,122 | py | Python | wagtail/wagtailimages/tests/test_image_operations.py | isabella232/wagtail | 52bc8ae62719d3b955f1016efc9c691d4ac584e1 | [
"BSD-3-Clause"
] | 1 | 2021-09-21T00:06:52.000Z | 2021-09-21T00:06:52.000Z | wagtail/wagtailimages/tests/test_image_operations.py | revsys/wagtail | 52bc8ae62719d3b955f1016efc9c691d4ac584e1 | [
"BSD-3-Clause"
] | 1 | 2021-02-24T08:25:30.000Z | 2021-02-24T08:25:30.000Z | wagtail/wagtailimages/tests/test_image_operations.py | isabella232/wagtail | 52bc8ae62719d3b955f1016efc9c691d4ac584e1 | [
"BSD-3-Clause"
] | 1 | 2020-11-24T10:21:24.000Z | 2020-11-24T10:21:24.000Z | import unittest
from wagtail.wagtailimages import image_operations
from wagtail.wagtailimages.exceptions import InvalidFilterSpecError
from wagtail.wagtailimages.models import Image, Filter
class WillowOperationRecorder(object):
"""
This class pretends to be a Willow image but instead, it records
the operations that have been performed on the image for testing
"""
def __init__(self, start_size):
self.ran_operations = []
self.start_size = start_size
def __getattr__(self, attr):
def operation(*args, **kwargs):
self.ran_operations.append((attr, args, kwargs))
return operation
def get_size(self):
size = self.start_size
for operation in self.ran_operations:
if operation[0] == 'resize':
size = operation[1][0]
elif operation[0] == 'crop':
crop = operation[1][0]
size = crop[2] - crop[0], crop[3] - crop[1]
return size
class ImageOperationTestCase(unittest.TestCase):
operation_class = None
filter_spec_tests = []
filter_spec_error_tests = []
run_tests = []
@classmethod
def make_filter_spec_test(cls, filter_spec, expected_output):
def test_filter_spec(self):
operation = self.operation_class(*filter_spec.split('-'))
# Check the attributes are set correctly
for attr, value in expected_output.items():
self.assertEqual(getattr(operation, attr), value)
test_name = 'test_filter_%s' % filter_spec
test_filter_spec.__name__ = test_name
return test_filter_spec
@classmethod
def make_filter_spec_error_test(cls, filter_spec):
def test_filter_spec_error(self):
self.assertRaises(InvalidFilterSpecError, self.operation_class, *filter_spec.split('-'))
test_name = 'test_filter_%s_raises_%s' % (filter_spec, InvalidFilterSpecError.__name__)
test_filter_spec_error.__name__ = test_name
return test_filter_spec_error
@classmethod
def make_run_test(cls, filter_spec, image, expected_output):
def test_run(self):
# Make operation
operation = self.operation_class(*filter_spec.split('-'))
# Make operation recorder
operation_recorder = WillowOperationRecorder((image.width, image.height))
# Run
operation.run(operation_recorder, image)
# Check
self.assertEqual(operation_recorder.ran_operations, expected_output)
test_name = 'test_run_%s' % filter_spec
test_run.__name__ = test_name
return test_run
@classmethod
def setup_test_methods(cls):
if cls.operation_class is None:
return
# Filter spec tests
for args in cls.filter_spec_tests:
filter_spec_test = cls.make_filter_spec_test(*args)
setattr(cls, filter_spec_test.__name__, filter_spec_test)
# Filter spec error tests
for filter_spec in cls.filter_spec_error_tests:
filter_spec_error_test = cls.make_filter_spec_error_test(filter_spec)
setattr(cls, filter_spec_error_test.__name__, filter_spec_error_test)
# Running tests
for args in cls.run_tests:
run_test = cls.make_run_test(*args)
setattr(cls, run_test.__name__, run_test)
class TestDoNothingOperation(ImageOperationTestCase):
operation_class = image_operations.DoNothingOperation
filter_spec_tests = [
('original', dict()),
('blahblahblah', dict()),
('123456', dict()),
]
filter_spec_error_tests = [
'cannot-take-multiple-parameters',
]
run_tests = [
('original', Image(width=1000, height=1000), []),
]
TestDoNothingOperation.setup_test_methods()
class TestFillOperation(ImageOperationTestCase):
operation_class = image_operations.FillOperation
filter_spec_tests = [
('fill-800x600', dict(width=800, height=600, crop_closeness=0)),
('hello-800x600', dict(width=800, height=600, crop_closeness=0)),
('fill-800x600-c0', dict(width=800, height=600, crop_closeness=0)),
('fill-800x600-c100', dict(width=800, height=600, crop_closeness=1)),
('fill-800x600-c50', dict(width=800, height=600, crop_closeness=0.5)),
('fill-800x600-c1000', dict(width=800, height=600, crop_closeness=1)),
('fill-800000x100', dict(width=800000, height=100, crop_closeness=0)),
]
filter_spec_error_tests = [
'fill',
'fill-800',
'fill-abc',
'fill-800xabc',
'fill-800x600-',
'fill-800x600x10',
'fill-800x600-d100',
]
run_tests = [
# Basic usage
('fill-800x600', Image(width=1000, height=1000), [
('crop', ((0, 125, 1000, 875), ), {}),
('resize', ((800, 600), ), {}),
]),
# Basic usage with an oddly-sized original image
# This checks for a rounding precision issue (#968)
('fill-200x200', Image(width=539, height=720), [
('crop', ((0, 90, 539, 629), ), {}),
('resize', ((200, 200), ), {}),
]),
# Closeness shouldn't have any effect when used without a focal point
('fill-800x600-c100', Image(width=1000, height=1000), [
('crop', ((0, 125, 1000, 875), ), {}),
('resize', ((800, 600), ), {}),
]),
# Should always crop towards focal point. Even if no closeness is set
('fill-80x60', Image(
width=1000,
height=1000,
focal_point_x=1000,
focal_point_y=500,
focal_point_width=0,
focal_point_height=0,
), [
# Crop the largest possible crop box towards the focal point
('crop', ((0, 125, 1000, 875), ), {}),
# Resize it down to final size
('resize', ((80, 60), ), {}),
]),
# Should crop as close as possible without upscaling
('fill-80x60-c100', Image(
width=1000,
height=1000,
focal_point_x=1000,
focal_point_y=500,
focal_point_width=0,
focal_point_height=0,
), [
# Crop as close as possible to the focal point
('crop', ((920, 470, 1000, 530), ), {}),
# No need to resize, crop should've created an 80x60 image
]),
# Ditto with a wide image
# Using a different filter so method name doesn't clash
('fill-100x60-c100', Image(
width=2000,
height=1000,
focal_point_x=2000,
focal_point_y=500,
focal_point_width=0,
focal_point_height=0,
), [
# Crop to the right hand side
('crop', ((1900, 470, 2000, 530), ), {}),
]),
# Make sure that the crop box never enters the focal point
('fill-50x50-c100', Image(
width=2000,
height=1000,
focal_point_x=1000,
focal_point_y=500,
focal_point_width=100,
focal_point_height=20,
), [
# Crop a 100x100 box around the entire focal point
('crop', ((950, 450, 1050, 550), ), {}),
# Resize it down to 50x50
('resize', ((50, 50), ), {}),
]),
# Test that the image is never upscaled
('fill-1000x800', Image(width=100, height=100), [
('crop', ((0, 10, 100, 90), ), {}),
]),
# Test that the crop closeness gets capped to prevent upscaling
('fill-1000x800-c100', Image(
width=1500,
height=1000,
focal_point_x=750,
focal_point_y=500,
focal_point_width=0,
focal_point_height=0,
), [
# Crop a 1000x800 square out of the image as close to the
# focal point as possible. Will not zoom too far in to
# prevent upscaling
('crop', ((250, 100, 1250, 900), ), {}),
]),
# Test for an issue where a ZeroDivisionError would occur when the
# focal point size, image size and filter size match
# See: #797
('fill-1500x1500-c100', Image(
width=1500,
height=1500,
focal_point_x=750,
focal_point_y=750,
focal_point_width=1500,
focal_point_height=1500,
), [
# This operation could probably be optimised out
('crop', ((0, 0, 1500, 1500), ), {}),
])
]
TestFillOperation.setup_test_methods()
class TestMinMaxOperation(ImageOperationTestCase):
operation_class = image_operations.MinMaxOperation
filter_spec_tests = [
('min-800x600', dict(method='min', width=800, height=600)),
('max-800x600', dict(method='max', width=800, height=600)),
]
filter_spec_error_tests = [
'min',
'min-800',
'min-abc',
'min-800xabc',
'min-800x600-',
'min-800x600-c100',
'min-800x600x10',
]
run_tests = [
# Basic usage of min
('min-800x600', Image(width=1000, height=1000), [
('resize', ((800, 800), ), {}),
]),
# Basic usage of max
('max-800x600', Image(width=1000, height=1000), [
('resize', ((600, 600), ), {}),
]),
]
TestMinMaxOperation.setup_test_methods()
class TestWidthHeightOperation(ImageOperationTestCase):
operation_class = image_operations.WidthHeightOperation
filter_spec_tests = [
('width-800', dict(method='width', size=800)),
('height-600', dict(method='height', size=600)),
]
filter_spec_error_tests = [
'width',
'width-800x600',
'width-abc',
'width-800-c100',
]
run_tests = [
# Basic usage of width
('width-400', Image(width=1000, height=500), [
('resize', ((400, 200), ), {}),
]),
# Basic usage of height
('height-400', Image(width=1000, height=500), [
('resize', ((800, 400), ), {}),
]),
]
TestWidthHeightOperation.setup_test_methods()
class TestVaryKey(unittest.TestCase):
def test_vary_key(self):
image = Image(width=1000, height=1000)
fil = Filter(spec='max-100x100')
vary_key = fil.get_vary_key(image)
self.assertEqual(vary_key, '')
def test_vary_key_fill_filter(self):
image = Image(width=1000, height=1000)
fil = Filter(spec='fill-100x100')
vary_key = fil.get_vary_key(image)
self.assertEqual(vary_key, '2e16d0ba')
def test_vary_key_fill_filter_with_focal_point(self):
image = Image(
width=1000,
height=1000,
focal_point_width=100,
focal_point_height=100,
focal_point_x=500,
focal_point_y=500,
)
fil = Filter(spec='fill-100x100')
vary_key = fil.get_vary_key(image)
self.assertEqual(vary_key, '0bbe3b2f')
| 31.241573 | 100 | 0.577864 | import unittest
from wagtail.wagtailimages import image_operations
from wagtail.wagtailimages.exceptions import InvalidFilterSpecError
from wagtail.wagtailimages.models import Image, Filter
class WillowOperationRecorder(object):
def __init__(self, start_size):
self.ran_operations = []
self.start_size = start_size
def __getattr__(self, attr):
def operation(*args, **kwargs):
self.ran_operations.append((attr, args, kwargs))
return operation
def get_size(self):
size = self.start_size
for operation in self.ran_operations:
if operation[0] == 'resize':
size = operation[1][0]
elif operation[0] == 'crop':
crop = operation[1][0]
size = crop[2] - crop[0], crop[3] - crop[1]
return size
class ImageOperationTestCase(unittest.TestCase):
operation_class = None
filter_spec_tests = []
filter_spec_error_tests = []
run_tests = []
@classmethod
def make_filter_spec_test(cls, filter_spec, expected_output):
def test_filter_spec(self):
operation = self.operation_class(*filter_spec.split('-'))
for attr, value in expected_output.items():
self.assertEqual(getattr(operation, attr), value)
test_name = 'test_filter_%s' % filter_spec
test_filter_spec.__name__ = test_name
return test_filter_spec
@classmethod
def make_filter_spec_error_test(cls, filter_spec):
def test_filter_spec_error(self):
self.assertRaises(InvalidFilterSpecError, self.operation_class, *filter_spec.split('-'))
test_name = 'test_filter_%s_raises_%s' % (filter_spec, InvalidFilterSpecError.__name__)
test_filter_spec_error.__name__ = test_name
return test_filter_spec_error
@classmethod
def make_run_test(cls, filter_spec, image, expected_output):
def test_run(self):
operation = self.operation_class(*filter_spec.split('-'))
operation_recorder = WillowOperationRecorder((image.width, image.height))
operation.run(operation_recorder, image)
self.assertEqual(operation_recorder.ran_operations, expected_output)
test_name = 'test_run_%s' % filter_spec
test_run.__name__ = test_name
return test_run
@classmethod
def setup_test_methods(cls):
if cls.operation_class is None:
return
for args in cls.filter_spec_tests:
filter_spec_test = cls.make_filter_spec_test(*args)
setattr(cls, filter_spec_test.__name__, filter_spec_test)
for filter_spec in cls.filter_spec_error_tests:
filter_spec_error_test = cls.make_filter_spec_error_test(filter_spec)
setattr(cls, filter_spec_error_test.__name__, filter_spec_error_test)
for args in cls.run_tests:
run_test = cls.make_run_test(*args)
setattr(cls, run_test.__name__, run_test)
class TestDoNothingOperation(ImageOperationTestCase):
operation_class = image_operations.DoNothingOperation
filter_spec_tests = [
('original', dict()),
('blahblahblah', dict()),
('123456', dict()),
]
filter_spec_error_tests = [
'cannot-take-multiple-parameters',
]
run_tests = [
('original', Image(width=1000, height=1000), []),
]
TestDoNothingOperation.setup_test_methods()
class TestFillOperation(ImageOperationTestCase):
operation_class = image_operations.FillOperation
filter_spec_tests = [
('fill-800x600', dict(width=800, height=600, crop_closeness=0)),
('hello-800x600', dict(width=800, height=600, crop_closeness=0)),
('fill-800x600-c0', dict(width=800, height=600, crop_closeness=0)),
('fill-800x600-c100', dict(width=800, height=600, crop_closeness=1)),
('fill-800x600-c50', dict(width=800, height=600, crop_closeness=0.5)),
('fill-800x600-c1000', dict(width=800, height=600, crop_closeness=1)),
('fill-800000x100', dict(width=800000, height=100, crop_closeness=0)),
]
filter_spec_error_tests = [
'fill',
'fill-800',
'fill-abc',
'fill-800xabc',
'fill-800x600-',
'fill-800x600x10',
'fill-800x600-d100',
]
run_tests = [
('fill-800x600', Image(width=1000, height=1000), [
('crop', ((0, 125, 1000, 875), ), {}),
('resize', ((800, 600), ), {}),
]),
('fill-200x200', Image(width=539, height=720), [
('crop', ((0, 90, 539, 629), ), {}),
('resize', ((200, 200), ), {}),
]),
('fill-800x600-c100', Image(width=1000, height=1000), [
('crop', ((0, 125, 1000, 875), ), {}),
('resize', ((800, 600), ), {}),
]),
# Should always crop towards focal point. Even if no closeness is set
('fill-80x60', Image(
width=1000,
height=1000,
focal_point_x=1000,
focal_point_y=500,
focal_point_width=0,
focal_point_height=0,
), [
# Crop the largest possible crop box towards the focal point
('crop', ((0, 125, 1000, 875), ), {}),
# Resize it down to final size
('resize', ((80, 60), ), {}),
]),
# Should crop as close as possible without upscaling
('fill-80x60-c100', Image(
width=1000,
height=1000,
focal_point_x=1000,
focal_point_y=500,
focal_point_width=0,
focal_point_height=0,
), [
# Crop as close as possible to the focal point
('crop', ((920, 470, 1000, 530), ), {}),
# No need to resize, crop should've created an 80x60 image
]),
('fill-100x60-c100', Image(
width=2000,
height=1000,
focal_point_x=2000,
focal_point_y=500,
focal_point_width=0,
focal_point_height=0,
), [
# Crop to the right hand side
('crop', ((1900, 470, 2000, 530), ), {}),
]),
# Make sure that the crop box never enters the focal point
('fill-50x50-c100', Image(
width=2000,
height=1000,
focal_point_x=1000,
focal_point_y=500,
focal_point_width=100,
focal_point_height=20,
), [
# Crop a 100x100 box around the entire focal point
('crop', ((950, 450, 1050, 550), ), {}),
# Resize it down to 50x50
('resize', ((50, 50), ), {}),
]),
# Test that the image is never upscaled
('fill-1000x800', Image(width=100, height=100), [
('crop', ((0, 10, 100, 90), ), {}),
]),
# Test that the crop closeness gets capped to prevent upscaling
('fill-1000x800-c100', Image(
width=1500,
height=1000,
focal_point_x=750,
focal_point_y=500,
focal_point_width=0,
focal_point_height=0,
), [
# Crop a 1000x800 square out of the image as close to the
# focal point as possible. Will not zoom too far in to
# prevent upscaling
('crop', ((250, 100, 1250, 900), ), {}),
]),
# Test for an issue where a ZeroDivisionError would occur when the
# focal point size, image size and filter size match
# See: #797
('fill-1500x1500-c100', Image(
width=1500,
height=1500,
focal_point_x=750,
focal_point_y=750,
focal_point_width=1500,
focal_point_height=1500,
), [
# This operation could probably be optimised out
('crop', ((0, 0, 1500, 1500), ), {}),
])
]
TestFillOperation.setup_test_methods()
class TestMinMaxOperation(ImageOperationTestCase):
operation_class = image_operations.MinMaxOperation
filter_spec_tests = [
('min-800x600', dict(method='min', width=800, height=600)),
('max-800x600', dict(method='max', width=800, height=600)),
]
filter_spec_error_tests = [
'min',
'min-800',
'min-abc',
'min-800xabc',
'min-800x600-',
'min-800x600-c100',
'min-800x600x10',
]
run_tests = [
# Basic usage of min
('min-800x600', Image(width=1000, height=1000), [
('resize', ((800, 800), ), {}),
]),
# Basic usage of max
('max-800x600', Image(width=1000, height=1000), [
('resize', ((600, 600), ), {}),
]),
]
TestMinMaxOperation.setup_test_methods()
class TestWidthHeightOperation(ImageOperationTestCase):
operation_class = image_operations.WidthHeightOperation
filter_spec_tests = [
('width-800', dict(method='width', size=800)),
('height-600', dict(method='height', size=600)),
]
filter_spec_error_tests = [
'width',
'width-800x600',
'width-abc',
'width-800-c100',
]
run_tests = [
# Basic usage of width
('width-400', Image(width=1000, height=500), [
('resize', ((400, 200), ), {}),
]),
# Basic usage of height
('height-400', Image(width=1000, height=500), [
('resize', ((800, 400), ), {}),
]),
]
TestWidthHeightOperation.setup_test_methods()
class TestVaryKey(unittest.TestCase):
def test_vary_key(self):
image = Image(width=1000, height=1000)
fil = Filter(spec='max-100x100')
vary_key = fil.get_vary_key(image)
self.assertEqual(vary_key, '')
def test_vary_key_fill_filter(self):
image = Image(width=1000, height=1000)
fil = Filter(spec='fill-100x100')
vary_key = fil.get_vary_key(image)
self.assertEqual(vary_key, '2e16d0ba')
def test_vary_key_fill_filter_with_focal_point(self):
image = Image(
width=1000,
height=1000,
focal_point_width=100,
focal_point_height=100,
focal_point_x=500,
focal_point_y=500,
)
fil = Filter(spec='fill-100x100')
vary_key = fil.get_vary_key(image)
self.assertEqual(vary_key, '0bbe3b2f')
| true | true |
1c2dd1c1e204abef3b610274e85d4878859d378e | 77,002 | py | Python | Lib/unittest/mock.py | adamwen829/cpython | 0f1c7c760c6b2804f5d05cae9ca045d1fdf3d667 | [
"PSF-2.0"
] | 2 | 2017-05-05T02:07:59.000Z | 2017-08-18T09:24:48.000Z | Lib/unittest/mock.py | adamwen829/cpython | 0f1c7c760c6b2804f5d05cae9ca045d1fdf3d667 | [
"PSF-2.0"
] | null | null | null | Lib/unittest/mock.py | adamwen829/cpython | 0f1c7c760c6b2804f5d05cae9ca045d1fdf3d667 | [
"PSF-2.0"
] | 3 | 2016-04-21T07:58:27.000Z | 2016-05-06T21:34:44.000Z | # mock.py
# Test tools for mocking and patching.
# Maintained by Michael Foord
# Backport for other versions of Python available from
# http://pypi.python.org/pypi/mock
__all__ = (
'Mock',
'MagicMock',
'patch',
'sentinel',
'DEFAULT',
'ANY',
'call',
'create_autospec',
'FILTER_DIR',
'NonCallableMock',
'NonCallableMagicMock',
'mock_open',
'PropertyMock',
)
__version__ = '1.0'
import inspect
import pprint
import sys
import builtins
from types import ModuleType
from functools import wraps, partial
_builtins = {name for name in dir(builtins) if not name.startswith('_')}
BaseExceptions = (BaseException,)
if 'java' in sys.platform:
# jython
import java
BaseExceptions = (BaseException, java.lang.Throwable)
FILTER_DIR = True
# Workaround for issue #12370
# Without this, the __class__ properties wouldn't be set correctly
_safe_super = super
def _is_instance_mock(obj):
# can't use isinstance on Mock objects because they override __class__
# The base class for all mocks is NonCallableMock
return issubclass(type(obj), NonCallableMock)
def _is_exception(obj):
return (
isinstance(obj, BaseExceptions) or
isinstance(obj, type) and issubclass(obj, BaseExceptions)
)
class _slotted(object):
__slots__ = ['a']
DescriptorTypes = (
type(_slotted.a),
property,
)
def _get_signature_object(func, as_instance, eat_self):
"""
Given an arbitrary, possibly callable object, try to create a suitable
signature object.
Return a (reduced func, signature) tuple, or None.
"""
if isinstance(func, type) and not as_instance:
# If it's a type and should be modelled as a type, use __init__.
try:
func = func.__init__
except AttributeError:
return None
# Skip the `self` argument in __init__
eat_self = True
elif not isinstance(func, FunctionTypes):
# If we really want to model an instance of the passed type,
# __call__ should be looked up, not __init__.
try:
func = func.__call__
except AttributeError:
return None
if eat_self:
sig_func = partial(func, None)
else:
sig_func = func
try:
return func, inspect.signature(sig_func)
except ValueError:
# Certain callable types are not supported by inspect.signature()
return None
def _check_signature(func, mock, skipfirst, instance=False):
sig = _get_signature_object(func, instance, skipfirst)
if sig is None:
return
func, sig = sig
def checksig(_mock_self, *args, **kwargs):
sig.bind(*args, **kwargs)
_copy_func_details(func, checksig)
type(mock)._mock_check_sig = checksig
def _copy_func_details(func, funcopy):
funcopy.__name__ = func.__name__
funcopy.__doc__ = func.__doc__
try:
funcopy.__text_signature__ = func.__text_signature__
except AttributeError:
pass
# we explicitly don't copy func.__dict__ into this copy as it would
# expose original attributes that should be mocked
try:
funcopy.__module__ = func.__module__
except AttributeError:
pass
try:
funcopy.__defaults__ = func.__defaults__
except AttributeError:
pass
try:
funcopy.__kwdefaults__ = func.__kwdefaults__
except AttributeError:
pass
def _callable(obj):
if isinstance(obj, type):
return True
if getattr(obj, '__call__', None) is not None:
return True
return False
def _is_list(obj):
# checks for list or tuples
# XXXX badly named!
return type(obj) in (list, tuple)
def _instance_callable(obj):
"""Given an object, return True if the object is callable.
For classes, return True if instances would be callable."""
if not isinstance(obj, type):
# already an instance
return getattr(obj, '__call__', None) is not None
# *could* be broken by a class overriding __mro__ or __dict__ via
# a metaclass
for base in (obj,) + obj.__mro__:
if base.__dict__.get('__call__') is not None:
return True
return False
def _set_signature(mock, original, instance=False):
# creates a function with signature (*args, **kwargs) that delegates to a
# mock. It still does signature checking by calling a lambda with the same
# signature as the original.
if not _callable(original):
return
skipfirst = isinstance(original, type)
result = _get_signature_object(original, instance, skipfirst)
if result is None:
return
func, sig = result
def checksig(*args, **kwargs):
sig.bind(*args, **kwargs)
_copy_func_details(func, checksig)
name = original.__name__
if not name.isidentifier():
name = 'funcopy'
context = {'_checksig_': checksig, 'mock': mock}
src = """def %s(*args, **kwargs):
_checksig_(*args, **kwargs)
return mock(*args, **kwargs)""" % name
exec (src, context)
funcopy = context[name]
_setup_func(funcopy, mock)
return funcopy
def _setup_func(funcopy, mock):
funcopy.mock = mock
# can't use isinstance with mocks
if not _is_instance_mock(mock):
return
def assert_called_with(*args, **kwargs):
return mock.assert_called_with(*args, **kwargs)
def assert_called_once_with(*args, **kwargs):
return mock.assert_called_once_with(*args, **kwargs)
def assert_has_calls(*args, **kwargs):
return mock.assert_has_calls(*args, **kwargs)
def assert_any_call(*args, **kwargs):
return mock.assert_any_call(*args, **kwargs)
def reset_mock():
funcopy.method_calls = _CallList()
funcopy.mock_calls = _CallList()
mock.reset_mock()
ret = funcopy.return_value
if _is_instance_mock(ret) and not ret is mock:
ret.reset_mock()
funcopy.called = False
funcopy.call_count = 0
funcopy.call_args = None
funcopy.call_args_list = _CallList()
funcopy.method_calls = _CallList()
funcopy.mock_calls = _CallList()
funcopy.return_value = mock.return_value
funcopy.side_effect = mock.side_effect
funcopy._mock_children = mock._mock_children
funcopy.assert_called_with = assert_called_with
funcopy.assert_called_once_with = assert_called_once_with
funcopy.assert_has_calls = assert_has_calls
funcopy.assert_any_call = assert_any_call
funcopy.reset_mock = reset_mock
mock._mock_delegate = funcopy
def _is_magic(name):
return '__%s__' % name[2:-2] == name
class _SentinelObject(object):
"A unique, named, sentinel object."
def __init__(self, name):
self.name = name
def __repr__(self):
return 'sentinel.%s' % self.name
class _Sentinel(object):
"""Access attributes to return a named object, usable as a sentinel."""
def __init__(self):
self._sentinels = {}
def __getattr__(self, name):
if name == '__bases__':
# Without this help(unittest.mock) raises an exception
raise AttributeError
return self._sentinels.setdefault(name, _SentinelObject(name))
sentinel = _Sentinel()
DEFAULT = sentinel.DEFAULT
_missing = sentinel.MISSING
_deleted = sentinel.DELETED
def _copy(value):
if type(value) in (dict, list, tuple, set):
return type(value)(value)
return value
_allowed_names = set(
[
'return_value', '_mock_return_value', 'side_effect',
'_mock_side_effect', '_mock_parent', '_mock_new_parent',
'_mock_name', '_mock_new_name'
]
)
def _delegating_property(name):
_allowed_names.add(name)
_the_name = '_mock_' + name
def _get(self, name=name, _the_name=_the_name):
sig = self._mock_delegate
if sig is None:
return getattr(self, _the_name)
return getattr(sig, name)
def _set(self, value, name=name, _the_name=_the_name):
sig = self._mock_delegate
if sig is None:
self.__dict__[_the_name] = value
else:
setattr(sig, name, value)
return property(_get, _set)
class _CallList(list):
def __contains__(self, value):
if not isinstance(value, list):
return list.__contains__(self, value)
len_value = len(value)
len_self = len(self)
if len_value > len_self:
return False
for i in range(0, len_self - len_value + 1):
sub_list = self[i:i+len_value]
if sub_list == value:
return True
return False
def __repr__(self):
return pprint.pformat(list(self))
def _check_and_set_parent(parent, value, name, new_name):
if not _is_instance_mock(value):
return False
if ((value._mock_name or value._mock_new_name) or
(value._mock_parent is not None) or
(value._mock_new_parent is not None)):
return False
_parent = parent
while _parent is not None:
# setting a mock (value) as a child or return value of itself
# should not modify the mock
if _parent is value:
return False
_parent = _parent._mock_new_parent
if new_name:
value._mock_new_parent = parent
value._mock_new_name = new_name
if name:
value._mock_parent = parent
value._mock_name = name
return True
# Internal class to identify if we wrapped an iterator object or not.
class _MockIter(object):
def __init__(self, obj):
self.obj = iter(obj)
def __iter__(self):
return self
def __next__(self):
return next(self.obj)
class Base(object):
_mock_return_value = DEFAULT
_mock_side_effect = None
def __init__(self, *args, **kwargs):
pass
class NonCallableMock(Base):
"""A non-callable version of `Mock`"""
def __new__(cls, *args, **kw):
# every instance has its own class
# so we can create magic methods on the
# class without stomping on other mocks
new = type(cls.__name__, (cls,), {'__doc__': cls.__doc__})
instance = object.__new__(new)
return instance
def __init__(
self, spec=None, wraps=None, name=None, spec_set=None,
parent=None, _spec_state=None, _new_name='', _new_parent=None,
_spec_as_instance=False, _eat_self=None, unsafe=False, **kwargs
):
if _new_parent is None:
_new_parent = parent
__dict__ = self.__dict__
__dict__['_mock_parent'] = parent
__dict__['_mock_name'] = name
__dict__['_mock_new_name'] = _new_name
__dict__['_mock_new_parent'] = _new_parent
if spec_set is not None:
spec = spec_set
spec_set = True
if _eat_self is None:
_eat_self = parent is not None
self._mock_add_spec(spec, spec_set, _spec_as_instance, _eat_self)
__dict__['_mock_children'] = {}
__dict__['_mock_wraps'] = wraps
__dict__['_mock_delegate'] = None
__dict__['_mock_called'] = False
__dict__['_mock_call_args'] = None
__dict__['_mock_call_count'] = 0
__dict__['_mock_call_args_list'] = _CallList()
__dict__['_mock_mock_calls'] = _CallList()
__dict__['method_calls'] = _CallList()
__dict__['_mock_unsafe'] = unsafe
if kwargs:
self.configure_mock(**kwargs)
_safe_super(NonCallableMock, self).__init__(
spec, wraps, name, spec_set, parent,
_spec_state
)
def attach_mock(self, mock, attribute):
"""
Attach a mock as an attribute of this one, replacing its name and
parent. Calls to the attached mock will be recorded in the
`method_calls` and `mock_calls` attributes of this one."""
mock._mock_parent = None
mock._mock_new_parent = None
mock._mock_name = ''
mock._mock_new_name = None
setattr(self, attribute, mock)
def mock_add_spec(self, spec, spec_set=False):
"""Add a spec to a mock. `spec` can either be an object or a
list of strings. Only attributes on the `spec` can be fetched as
attributes from the mock.
If `spec_set` is True then only attributes on the spec can be set."""
self._mock_add_spec(spec, spec_set)
def _mock_add_spec(self, spec, spec_set, _spec_as_instance=False,
_eat_self=False):
_spec_class = None
_spec_signature = None
if spec is not None and not _is_list(spec):
if isinstance(spec, type):
_spec_class = spec
else:
_spec_class = _get_class(spec)
res = _get_signature_object(spec,
_spec_as_instance, _eat_self)
_spec_signature = res and res[1]
spec = dir(spec)
__dict__ = self.__dict__
__dict__['_spec_class'] = _spec_class
__dict__['_spec_set'] = spec_set
__dict__['_spec_signature'] = _spec_signature
__dict__['_mock_methods'] = spec
def __get_return_value(self):
ret = self._mock_return_value
if self._mock_delegate is not None:
ret = self._mock_delegate.return_value
if ret is DEFAULT:
ret = self._get_child_mock(
_new_parent=self, _new_name='()'
)
self.return_value = ret
return ret
def __set_return_value(self, value):
if self._mock_delegate is not None:
self._mock_delegate.return_value = value
else:
self._mock_return_value = value
_check_and_set_parent(self, value, None, '()')
__return_value_doc = "The value to be returned when the mock is called."
return_value = property(__get_return_value, __set_return_value,
__return_value_doc)
@property
def __class__(self):
if self._spec_class is None:
return type(self)
return self._spec_class
called = _delegating_property('called')
call_count = _delegating_property('call_count')
call_args = _delegating_property('call_args')
call_args_list = _delegating_property('call_args_list')
mock_calls = _delegating_property('mock_calls')
def __get_side_effect(self):
delegated = self._mock_delegate
if delegated is None:
return self._mock_side_effect
sf = delegated.side_effect
if sf is not None and not callable(sf) and not isinstance(sf, _MockIter):
sf = _MockIter(sf)
delegated.side_effect = sf
return sf
def __set_side_effect(self, value):
value = _try_iter(value)
delegated = self._mock_delegate
if delegated is None:
self._mock_side_effect = value
else:
delegated.side_effect = value
side_effect = property(__get_side_effect, __set_side_effect)
def reset_mock(self):
"Restore the mock object to its initial state."
self.called = False
self.call_args = None
self.call_count = 0
self.mock_calls = _CallList()
self.call_args_list = _CallList()
self.method_calls = _CallList()
for child in self._mock_children.values():
if isinstance(child, _SpecState):
continue
child.reset_mock()
ret = self._mock_return_value
if _is_instance_mock(ret) and ret is not self:
ret.reset_mock()
def configure_mock(self, **kwargs):
"""Set attributes on the mock through keyword arguments.
Attributes plus return values and side effects can be set on child
mocks using standard dot notation and unpacking a dictionary in the
method call:
>>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError}
>>> mock.configure_mock(**attrs)"""
for arg, val in sorted(kwargs.items(),
# we sort on the number of dots so that
# attributes are set before we set attributes on
# attributes
key=lambda entry: entry[0].count('.')):
args = arg.split('.')
final = args.pop()
obj = self
for entry in args:
obj = getattr(obj, entry)
setattr(obj, final, val)
def __getattr__(self, name):
if name in {'_mock_methods', '_mock_unsafe'}:
raise AttributeError(name)
elif self._mock_methods is not None:
if name not in self._mock_methods or name in _all_magics:
raise AttributeError("Mock object has no attribute %r" % name)
elif _is_magic(name):
raise AttributeError(name)
if not self._mock_unsafe:
if name.startswith(('assert', 'assret')):
raise AttributeError(name)
result = self._mock_children.get(name)
if result is _deleted:
raise AttributeError(name)
elif result is None:
wraps = None
if self._mock_wraps is not None:
# XXXX should we get the attribute without triggering code
# execution?
wraps = getattr(self._mock_wraps, name)
result = self._get_child_mock(
parent=self, name=name, wraps=wraps, _new_name=name,
_new_parent=self
)
self._mock_children[name] = result
elif isinstance(result, _SpecState):
result = create_autospec(
result.spec, result.spec_set, result.instance,
result.parent, result.name
)
self._mock_children[name] = result
return result
def __repr__(self):
_name_list = [self._mock_new_name]
_parent = self._mock_new_parent
last = self
dot = '.'
if _name_list == ['()']:
dot = ''
seen = set()
while _parent is not None:
last = _parent
_name_list.append(_parent._mock_new_name + dot)
dot = '.'
if _parent._mock_new_name == '()':
dot = ''
_parent = _parent._mock_new_parent
# use ids here so as not to call __hash__ on the mocks
if id(_parent) in seen:
break
seen.add(id(_parent))
_name_list = list(reversed(_name_list))
_first = last._mock_name or 'mock'
if len(_name_list) > 1:
if _name_list[1] not in ('()', '().'):
_first += '.'
_name_list[0] = _first
name = ''.join(_name_list)
name_string = ''
if name not in ('mock', 'mock.'):
name_string = ' name=%r' % name
spec_string = ''
if self._spec_class is not None:
spec_string = ' spec=%r'
if self._spec_set:
spec_string = ' spec_set=%r'
spec_string = spec_string % self._spec_class.__name__
return "<%s%s%s id='%s'>" % (
type(self).__name__,
name_string,
spec_string,
id(self)
)
def __dir__(self):
"""Filter the output of `dir(mock)` to only useful members."""
if not FILTER_DIR:
return object.__dir__(self)
extras = self._mock_methods or []
from_type = dir(type(self))
from_dict = list(self.__dict__)
from_type = [e for e in from_type if not e.startswith('_')]
from_dict = [e for e in from_dict if not e.startswith('_') or
_is_magic(e)]
return sorted(set(extras + from_type + from_dict +
list(self._mock_children)))
def __setattr__(self, name, value):
if name in _allowed_names:
# property setters go through here
return object.__setattr__(self, name, value)
elif (self._spec_set and self._mock_methods is not None and
name not in self._mock_methods and
name not in self.__dict__):
raise AttributeError("Mock object has no attribute '%s'" % name)
elif name in _unsupported_magics:
msg = 'Attempting to set unsupported magic method %r.' % name
raise AttributeError(msg)
elif name in _all_magics:
if self._mock_methods is not None and name not in self._mock_methods:
raise AttributeError("Mock object has no attribute '%s'" % name)
if not _is_instance_mock(value):
setattr(type(self), name, _get_method(name, value))
original = value
value = lambda *args, **kw: original(self, *args, **kw)
else:
# only set _new_name and not name so that mock_calls is tracked
# but not method calls
_check_and_set_parent(self, value, None, name)
setattr(type(self), name, value)
self._mock_children[name] = value
elif name == '__class__':
self._spec_class = value
return
else:
if _check_and_set_parent(self, value, name, name):
self._mock_children[name] = value
return object.__setattr__(self, name, value)
def __delattr__(self, name):
if name in _all_magics and name in type(self).__dict__:
delattr(type(self), name)
if name not in self.__dict__:
# for magic methods that are still MagicProxy objects and
# not set on the instance itself
return
if name in self.__dict__:
object.__delattr__(self, name)
obj = self._mock_children.get(name, _missing)
if obj is _deleted:
raise AttributeError(name)
if obj is not _missing:
del self._mock_children[name]
self._mock_children[name] = _deleted
def _format_mock_call_signature(self, args, kwargs):
name = self._mock_name or 'mock'
return _format_call_signature(name, args, kwargs)
def _format_mock_failure_message(self, args, kwargs):
message = 'Expected call: %s\nActual call: %s'
expected_string = self._format_mock_call_signature(args, kwargs)
call_args = self.call_args
if len(call_args) == 3:
call_args = call_args[1:]
actual_string = self._format_mock_call_signature(*call_args)
return message % (expected_string, actual_string)
def _call_matcher(self, _call):
"""
Given a call (or simply a (args, kwargs) tuple), return a
comparison key suitable for matching with other calls.
This is a best effort method which relies on the spec's signature,
if available, or falls back on the arguments themselves.
"""
sig = self._spec_signature
if sig is not None:
if len(_call) == 2:
name = ''
args, kwargs = _call
else:
name, args, kwargs = _call
try:
return name, sig.bind(*args, **kwargs)
except TypeError as e:
return e.with_traceback(None)
else:
return _call
def assert_not_called(_mock_self):
"""assert that the mock was never called.
"""
self = _mock_self
if self.call_count != 0:
msg = ("Expected '%s' to not have been called. Called %s times." %
(self._mock_name or 'mock', self.call_count))
raise AssertionError(msg)
def assert_called_with(_mock_self, *args, **kwargs):
"""assert that the mock was called with the specified arguments.
Raises an AssertionError if the args and keyword args passed in are
different to the last call to the mock."""
self = _mock_self
if self.call_args is None:
expected = self._format_mock_call_signature(args, kwargs)
raise AssertionError('Expected call: %s\nNot called' % (expected,))
def _error_message():
msg = self._format_mock_failure_message(args, kwargs)
return msg
expected = self._call_matcher((args, kwargs))
actual = self._call_matcher(self.call_args)
if expected != actual:
cause = expected if isinstance(expected, Exception) else None
raise AssertionError(_error_message()) from cause
def assert_called_once_with(_mock_self, *args, **kwargs):
"""assert that the mock was called exactly once and with the specified
arguments."""
self = _mock_self
if not self.call_count == 1:
msg = ("Expected '%s' to be called once. Called %s times." %
(self._mock_name or 'mock', self.call_count))
raise AssertionError(msg)
return self.assert_called_with(*args, **kwargs)
def assert_has_calls(self, calls, any_order=False):
"""assert the mock has been called with the specified calls.
The `mock_calls` list is checked for the calls.
If `any_order` is False (the default) then the calls must be
sequential. There can be extra calls before or after the
specified calls.
If `any_order` is True then the calls can be in any order, but
they must all appear in `mock_calls`."""
expected = [self._call_matcher(c) for c in calls]
cause = expected if isinstance(expected, Exception) else None
all_calls = _CallList(self._call_matcher(c) for c in self.mock_calls)
if not any_order:
if expected not in all_calls:
raise AssertionError(
'Calls not found.\nExpected: %r\n'
'Actual: %r' % (calls, self.mock_calls)
) from cause
return
all_calls = list(all_calls)
not_found = []
for kall in expected:
try:
all_calls.remove(kall)
except ValueError:
not_found.append(kall)
if not_found:
raise AssertionError(
'%r not all found in call list' % (tuple(not_found),)
) from cause
def assert_any_call(self, *args, **kwargs):
"""assert the mock has been called with the specified arguments.
The assert passes if the mock has *ever* been called, unlike
`assert_called_with` and `assert_called_once_with` that only pass if
the call is the most recent one."""
expected = self._call_matcher((args, kwargs))
actual = [self._call_matcher(c) for c in self.call_args_list]
if expected not in actual:
cause = expected if isinstance(expected, Exception) else None
expected_string = self._format_mock_call_signature(args, kwargs)
raise AssertionError(
'%s call not found' % expected_string
) from cause
def _get_child_mock(self, **kw):
"""Create the child mocks for attributes and return value.
By default child mocks will be the same type as the parent.
Subclasses of Mock may want to override this to customize the way
child mocks are made.
For non-callable mocks the callable variant will be used (rather than
any custom subclass)."""
_type = type(self)
if not issubclass(_type, CallableMixin):
if issubclass(_type, NonCallableMagicMock):
klass = MagicMock
elif issubclass(_type, NonCallableMock) :
klass = Mock
else:
klass = _type.__mro__[1]
return klass(**kw)
def _try_iter(obj):
if obj is None:
return obj
if _is_exception(obj):
return obj
if _callable(obj):
return obj
try:
return iter(obj)
except TypeError:
# XXXX backwards compatibility
# but this will blow up on first call - so maybe we should fail early?
return obj
class CallableMixin(Base):
def __init__(self, spec=None, side_effect=None, return_value=DEFAULT,
wraps=None, name=None, spec_set=None, parent=None,
_spec_state=None, _new_name='', _new_parent=None, **kwargs):
self.__dict__['_mock_return_value'] = return_value
_safe_super(CallableMixin, self).__init__(
spec, wraps, name, spec_set, parent,
_spec_state, _new_name, _new_parent, **kwargs
)
self.side_effect = side_effect
def _mock_check_sig(self, *args, **kwargs):
# stub method that can be replaced with one with a specific signature
pass
def __call__(_mock_self, *args, **kwargs):
# can't use self in-case a function / method we are mocking uses self
# in the signature
_mock_self._mock_check_sig(*args, **kwargs)
return _mock_self._mock_call(*args, **kwargs)
def _mock_call(_mock_self, *args, **kwargs):
self = _mock_self
self.called = True
self.call_count += 1
_new_name = self._mock_new_name
_new_parent = self._mock_new_parent
_call = _Call((args, kwargs), two=True)
self.call_args = _call
self.call_args_list.append(_call)
self.mock_calls.append(_Call(('', args, kwargs)))
seen = set()
skip_next_dot = _new_name == '()'
do_method_calls = self._mock_parent is not None
name = self._mock_name
while _new_parent is not None:
this_mock_call = _Call((_new_name, args, kwargs))
if _new_parent._mock_new_name:
dot = '.'
if skip_next_dot:
dot = ''
skip_next_dot = False
if _new_parent._mock_new_name == '()':
skip_next_dot = True
_new_name = _new_parent._mock_new_name + dot + _new_name
if do_method_calls:
if _new_name == name:
this_method_call = this_mock_call
else:
this_method_call = _Call((name, args, kwargs))
_new_parent.method_calls.append(this_method_call)
do_method_calls = _new_parent._mock_parent is not None
if do_method_calls:
name = _new_parent._mock_name + '.' + name
_new_parent.mock_calls.append(this_mock_call)
_new_parent = _new_parent._mock_new_parent
# use ids here so as not to call __hash__ on the mocks
_new_parent_id = id(_new_parent)
if _new_parent_id in seen:
break
seen.add(_new_parent_id)
ret_val = DEFAULT
effect = self.side_effect
if effect is not None:
if _is_exception(effect):
raise effect
if not _callable(effect):
result = next(effect)
if _is_exception(result):
raise result
if result is DEFAULT:
result = self.return_value
return result
ret_val = effect(*args, **kwargs)
if (self._mock_wraps is not None and
self._mock_return_value is DEFAULT):
return self._mock_wraps(*args, **kwargs)
if ret_val is DEFAULT:
ret_val = self.return_value
return ret_val
class Mock(CallableMixin, NonCallableMock):
"""
Create a new `Mock` object. `Mock` takes several optional arguments
that specify the behaviour of the Mock object:
* `spec`: This can be either a list of strings or an existing object (a
class or instance) that acts as the specification for the mock object. If
you pass in an object then a list of strings is formed by calling dir on
the object (excluding unsupported magic attributes and methods). Accessing
any attribute not in this list will raise an `AttributeError`.
If `spec` is an object (rather than a list of strings) then
`mock.__class__` returns the class of the spec object. This allows mocks
to pass `isinstance` tests.
* `spec_set`: A stricter variant of `spec`. If used, attempting to *set*
or get an attribute on the mock that isn't on the object passed as
`spec_set` will raise an `AttributeError`.
* `side_effect`: A function to be called whenever the Mock is called. See
the `side_effect` attribute. Useful for raising exceptions or
dynamically changing return values. The function is called with the same
arguments as the mock, and unless it returns `DEFAULT`, the return
value of this function is used as the return value.
If `side_effect` is an iterable then each call to the mock will return
the next value from the iterable. If any of the members of the iterable
are exceptions they will be raised instead of returned.
* `return_value`: The value returned when the mock is called. By default
this is a new Mock (created on first access). See the
`return_value` attribute.
* `wraps`: Item for the mock object to wrap. If `wraps` is not None then
calling the Mock will pass the call through to the wrapped object
(returning the real result). Attribute access on the mock will return a
Mock object that wraps the corresponding attribute of the wrapped object
(so attempting to access an attribute that doesn't exist will raise an
`AttributeError`).
If the mock has an explicit `return_value` set then calls are not passed
to the wrapped object and the `return_value` is returned instead.
* `name`: If the mock has a name then it will be used in the repr of the
mock. This can be useful for debugging. The name is propagated to child
mocks.
Mocks can also be called with arbitrary keyword arguments. These will be
used to set attributes on the mock after it is created.
"""
def _dot_lookup(thing, comp, import_path):
try:
return getattr(thing, comp)
except AttributeError:
__import__(import_path)
return getattr(thing, comp)
def _importer(target):
components = target.split('.')
import_path = components.pop(0)
thing = __import__(import_path)
for comp in components:
import_path += ".%s" % comp
thing = _dot_lookup(thing, comp, import_path)
return thing
def _is_started(patcher):
# XXXX horrible
return hasattr(patcher, 'is_local')
class _patch(object):
attribute_name = None
_active_patches = []
def __init__(
self, getter, attribute, new, spec, create,
spec_set, autospec, new_callable, kwargs
):
if new_callable is not None:
if new is not DEFAULT:
raise ValueError(
"Cannot use 'new' and 'new_callable' together"
)
if autospec is not None:
raise ValueError(
"Cannot use 'autospec' and 'new_callable' together"
)
self.getter = getter
self.attribute = attribute
self.new = new
self.new_callable = new_callable
self.spec = spec
self.create = create
self.has_local = False
self.spec_set = spec_set
self.autospec = autospec
self.kwargs = kwargs
self.additional_patchers = []
def copy(self):
patcher = _patch(
self.getter, self.attribute, self.new, self.spec,
self.create, self.spec_set,
self.autospec, self.new_callable, self.kwargs
)
patcher.attribute_name = self.attribute_name
patcher.additional_patchers = [
p.copy() for p in self.additional_patchers
]
return patcher
def __call__(self, func):
if isinstance(func, type):
return self.decorate_class(func)
return self.decorate_callable(func)
def decorate_class(self, klass):
for attr in dir(klass):
if not attr.startswith(patch.TEST_PREFIX):
continue
attr_value = getattr(klass, attr)
if not hasattr(attr_value, "__call__"):
continue
patcher = self.copy()
setattr(klass, attr, patcher(attr_value))
return klass
def decorate_callable(self, func):
if hasattr(func, 'patchings'):
func.patchings.append(self)
return func
@wraps(func)
def patched(*args, **keywargs):
extra_args = []
entered_patchers = []
exc_info = tuple()
try:
for patching in patched.patchings:
arg = patching.__enter__()
entered_patchers.append(patching)
if patching.attribute_name is not None:
keywargs.update(arg)
elif patching.new is DEFAULT:
extra_args.append(arg)
args += tuple(extra_args)
return func(*args, **keywargs)
except:
if (patching not in entered_patchers and
_is_started(patching)):
# the patcher may have been started, but an exception
# raised whilst entering one of its additional_patchers
entered_patchers.append(patching)
# Pass the exception to __exit__
exc_info = sys.exc_info()
# re-raise the exception
raise
finally:
for patching in reversed(entered_patchers):
patching.__exit__(*exc_info)
patched.patchings = [self]
return patched
def get_original(self):
target = self.getter()
name = self.attribute
original = DEFAULT
local = False
try:
original = target.__dict__[name]
except (AttributeError, KeyError):
original = getattr(target, name, DEFAULT)
else:
local = True
if name in _builtins and isinstance(target, ModuleType):
self.create = True
if not self.create and original is DEFAULT:
raise AttributeError(
"%s does not have the attribute %r" % (target, name)
)
return original, local
def __enter__(self):
"""Perform the patch."""
new, spec, spec_set = self.new, self.spec, self.spec_set
autospec, kwargs = self.autospec, self.kwargs
new_callable = self.new_callable
self.target = self.getter()
# normalise False to None
if spec is False:
spec = None
if spec_set is False:
spec_set = None
if autospec is False:
autospec = None
if spec is not None and autospec is not None:
raise TypeError("Can't specify spec and autospec")
if ((spec is not None or autospec is not None) and
spec_set not in (True, None)):
raise TypeError("Can't provide explicit spec_set *and* spec or autospec")
original, local = self.get_original()
if new is DEFAULT and autospec is None:
inherit = False
if spec is True:
# set spec to the object we are replacing
spec = original
if spec_set is True:
spec_set = original
spec = None
elif spec is not None:
if spec_set is True:
spec_set = spec
spec = None
elif spec_set is True:
spec_set = original
if spec is not None or spec_set is not None:
if original is DEFAULT:
raise TypeError("Can't use 'spec' with create=True")
if isinstance(original, type):
# If we're patching out a class and there is a spec
inherit = True
Klass = MagicMock
_kwargs = {}
if new_callable is not None:
Klass = new_callable
elif spec is not None or spec_set is not None:
this_spec = spec
if spec_set is not None:
this_spec = spec_set
if _is_list(this_spec):
not_callable = '__call__' not in this_spec
else:
not_callable = not callable(this_spec)
if not_callable:
Klass = NonCallableMagicMock
if spec is not None:
_kwargs['spec'] = spec
if spec_set is not None:
_kwargs['spec_set'] = spec_set
# add a name to mocks
if (isinstance(Klass, type) and
issubclass(Klass, NonCallableMock) and self.attribute):
_kwargs['name'] = self.attribute
_kwargs.update(kwargs)
new = Klass(**_kwargs)
if inherit and _is_instance_mock(new):
# we can only tell if the instance should be callable if the
# spec is not a list
this_spec = spec
if spec_set is not None:
this_spec = spec_set
if (not _is_list(this_spec) and not
_instance_callable(this_spec)):
Klass = NonCallableMagicMock
_kwargs.pop('name')
new.return_value = Klass(_new_parent=new, _new_name='()',
**_kwargs)
elif autospec is not None:
# spec is ignored, new *must* be default, spec_set is treated
# as a boolean. Should we check spec is not None and that spec_set
# is a bool?
if new is not DEFAULT:
raise TypeError(
"autospec creates the mock for you. Can't specify "
"autospec and new."
)
if original is DEFAULT:
raise TypeError("Can't use 'autospec' with create=True")
spec_set = bool(spec_set)
if autospec is True:
autospec = original
new = create_autospec(autospec, spec_set=spec_set,
_name=self.attribute, **kwargs)
elif kwargs:
# can't set keyword args when we aren't creating the mock
# XXXX If new is a Mock we could call new.configure_mock(**kwargs)
raise TypeError("Can't pass kwargs to a mock we aren't creating")
new_attr = new
self.temp_original = original
self.is_local = local
setattr(self.target, self.attribute, new_attr)
if self.attribute_name is not None:
extra_args = {}
if self.new is DEFAULT:
extra_args[self.attribute_name] = new
for patching in self.additional_patchers:
arg = patching.__enter__()
if patching.new is DEFAULT:
extra_args.update(arg)
return extra_args
return new
def __exit__(self, *exc_info):
"""Undo the patch."""
if not _is_started(self):
raise RuntimeError('stop called on unstarted patcher')
if self.is_local and self.temp_original is not DEFAULT:
setattr(self.target, self.attribute, self.temp_original)
else:
delattr(self.target, self.attribute)
if not self.create and not hasattr(self.target, self.attribute):
# needed for proxy objects like django settings
setattr(self.target, self.attribute, self.temp_original)
del self.temp_original
del self.is_local
del self.target
for patcher in reversed(self.additional_patchers):
if _is_started(patcher):
patcher.__exit__(*exc_info)
def start(self):
"""Activate a patch, returning any created mock."""
result = self.__enter__()
self._active_patches.append(self)
return result
def stop(self):
"""Stop an active patch."""
try:
self._active_patches.remove(self)
except ValueError:
# If the patch hasn't been started this will fail
pass
return self.__exit__()
def _get_target(target):
try:
target, attribute = target.rsplit('.', 1)
except (TypeError, ValueError):
raise TypeError("Need a valid target to patch. You supplied: %r" %
(target,))
getter = lambda: _importer(target)
return getter, attribute
def _patch_object(
target, attribute, new=DEFAULT, spec=None,
create=False, spec_set=None, autospec=None,
new_callable=None, **kwargs
):
"""
patch the named member (`attribute`) on an object (`target`) with a mock
object.
`patch.object` can be used as a decorator, class decorator or a context
manager. Arguments `new`, `spec`, `create`, `spec_set`,
`autospec` and `new_callable` have the same meaning as for `patch`. Like
`patch`, `patch.object` takes arbitrary keyword arguments for configuring
the mock object it creates.
When used as a class decorator `patch.object` honours `patch.TEST_PREFIX`
for choosing which methods to wrap.
"""
getter = lambda: target
return _patch(
getter, attribute, new, spec, create,
spec_set, autospec, new_callable, kwargs
)
def _patch_multiple(target, spec=None, create=False, spec_set=None,
autospec=None, new_callable=None, **kwargs):
"""Perform multiple patches in a single call. It takes the object to be
patched (either as an object or a string to fetch the object by importing)
and keyword arguments for the patches::
with patch.multiple(settings, FIRST_PATCH='one', SECOND_PATCH='two'):
...
Use `DEFAULT` as the value if you want `patch.multiple` to create
mocks for you. In this case the created mocks are passed into a decorated
function by keyword, and a dictionary is returned when `patch.multiple` is
used as a context manager.
`patch.multiple` can be used as a decorator, class decorator or a context
manager. The arguments `spec`, `spec_set`, `create`,
`autospec` and `new_callable` have the same meaning as for `patch`. These
arguments will be applied to *all* patches done by `patch.multiple`.
When used as a class decorator `patch.multiple` honours `patch.TEST_PREFIX`
for choosing which methods to wrap.
"""
if type(target) is str:
getter = lambda: _importer(target)
else:
getter = lambda: target
if not kwargs:
raise ValueError(
'Must supply at least one keyword argument with patch.multiple'
)
# need to wrap in a list for python 3, where items is a view
items = list(kwargs.items())
attribute, new = items[0]
patcher = _patch(
getter, attribute, new, spec, create, spec_set,
autospec, new_callable, {}
)
patcher.attribute_name = attribute
for attribute, new in items[1:]:
this_patcher = _patch(
getter, attribute, new, spec, create, spec_set,
autospec, new_callable, {}
)
this_patcher.attribute_name = attribute
patcher.additional_patchers.append(this_patcher)
return patcher
def patch(
target, new=DEFAULT, spec=None, create=False,
spec_set=None, autospec=None, new_callable=None, **kwargs
):
"""
`patch` acts as a function decorator, class decorator or a context
manager. Inside the body of the function or with statement, the `target`
is patched with a `new` object. When the function/with statement exits
the patch is undone.
If `new` is omitted, then the target is replaced with a
`MagicMock`. If `patch` is used as a decorator and `new` is
omitted, the created mock is passed in as an extra argument to the
decorated function. If `patch` is used as a context manager the created
mock is returned by the context manager.
`target` should be a string in the form `'package.module.ClassName'`. The
`target` is imported and the specified object replaced with the `new`
object, so the `target` must be importable from the environment you are
calling `patch` from. The target is imported when the decorated function
is executed, not at decoration time.
The `spec` and `spec_set` keyword arguments are passed to the `MagicMock`
if patch is creating one for you.
In addition you can pass `spec=True` or `spec_set=True`, which causes
patch to pass in the object being mocked as the spec/spec_set object.
`new_callable` allows you to specify a different class, or callable object,
that will be called to create the `new` object. By default `MagicMock` is
used.
A more powerful form of `spec` is `autospec`. If you set `autospec=True`
then the mock with be created with a spec from the object being replaced.
All attributes of the mock will also have the spec of the corresponding
attribute of the object being replaced. Methods and functions being
mocked will have their arguments checked and will raise a `TypeError` if
they are called with the wrong signature. For mocks replacing a class,
their return value (the 'instance') will have the same spec as the class.
Instead of `autospec=True` you can pass `autospec=some_object` to use an
arbitrary object as the spec instead of the one being replaced.
By default `patch` will fail to replace attributes that don't exist. If
you pass in `create=True`, and the attribute doesn't exist, patch will
create the attribute for you when the patched function is called, and
delete it again afterwards. This is useful for writing tests against
attributes that your production code creates at runtime. It is off by
default because it can be dangerous. With it switched on you can write
passing tests against APIs that don't actually exist!
Patch can be used as a `TestCase` class decorator. It works by
decorating each test method in the class. This reduces the boilerplate
code when your test methods share a common patchings set. `patch` finds
tests by looking for method names that start with `patch.TEST_PREFIX`.
By default this is `test`, which matches the way `unittest` finds tests.
You can specify an alternative prefix by setting `patch.TEST_PREFIX`.
Patch can be used as a context manager, with the with statement. Here the
patching applies to the indented block after the with statement. If you
use "as" then the patched object will be bound to the name after the
"as"; very useful if `patch` is creating a mock object for you.
`patch` takes arbitrary keyword arguments. These will be passed to
the `Mock` (or `new_callable`) on construction.
`patch.dict(...)`, `patch.multiple(...)` and `patch.object(...)` are
available for alternate use-cases.
"""
getter, attribute = _get_target(target)
return _patch(
getter, attribute, new, spec, create,
spec_set, autospec, new_callable, kwargs
)
class _patch_dict(object):
"""
Patch a dictionary, or dictionary like object, and restore the dictionary
to its original state after the test.
`in_dict` can be a dictionary or a mapping like container. If it is a
mapping then it must at least support getting, setting and deleting items
plus iterating over keys.
`in_dict` can also be a string specifying the name of the dictionary, which
will then be fetched by importing it.
`values` can be a dictionary of values to set in the dictionary. `values`
can also be an iterable of `(key, value)` pairs.
If `clear` is True then the dictionary will be cleared before the new
values are set.
`patch.dict` can also be called with arbitrary keyword arguments to set
values in the dictionary::
with patch.dict('sys.modules', mymodule=Mock(), other_module=Mock()):
...
`patch.dict` can be used as a context manager, decorator or class
decorator. When used as a class decorator `patch.dict` honours
`patch.TEST_PREFIX` for choosing which methods to wrap.
"""
def __init__(self, in_dict, values=(), clear=False, **kwargs):
if isinstance(in_dict, str):
in_dict = _importer(in_dict)
self.in_dict = in_dict
# support any argument supported by dict(...) constructor
self.values = dict(values)
self.values.update(kwargs)
self.clear = clear
self._original = None
def __call__(self, f):
if isinstance(f, type):
return self.decorate_class(f)
@wraps(f)
def _inner(*args, **kw):
self._patch_dict()
try:
return f(*args, **kw)
finally:
self._unpatch_dict()
return _inner
def decorate_class(self, klass):
for attr in dir(klass):
attr_value = getattr(klass, attr)
if (attr.startswith(patch.TEST_PREFIX) and
hasattr(attr_value, "__call__")):
decorator = _patch_dict(self.in_dict, self.values, self.clear)
decorated = decorator(attr_value)
setattr(klass, attr, decorated)
return klass
def __enter__(self):
"""Patch the dict."""
self._patch_dict()
def _patch_dict(self):
values = self.values
in_dict = self.in_dict
clear = self.clear
try:
original = in_dict.copy()
except AttributeError:
# dict like object with no copy method
# must support iteration over keys
original = {}
for key in in_dict:
original[key] = in_dict[key]
self._original = original
if clear:
_clear_dict(in_dict)
try:
in_dict.update(values)
except AttributeError:
# dict like object with no update method
for key in values:
in_dict[key] = values[key]
def _unpatch_dict(self):
in_dict = self.in_dict
original = self._original
_clear_dict(in_dict)
try:
in_dict.update(original)
except AttributeError:
for key in original:
in_dict[key] = original[key]
def __exit__(self, *args):
"""Unpatch the dict."""
self._unpatch_dict()
return False
start = __enter__
stop = __exit__
def _clear_dict(in_dict):
try:
in_dict.clear()
except AttributeError:
keys = list(in_dict)
for key in keys:
del in_dict[key]
def _patch_stopall():
"""Stop all active patches. LIFO to unroll nested patches."""
for patch in reversed(_patch._active_patches):
patch.stop()
patch.object = _patch_object
patch.dict = _patch_dict
patch.multiple = _patch_multiple
patch.stopall = _patch_stopall
patch.TEST_PREFIX = 'test'
magic_methods = (
"lt le gt ge eq ne "
"getitem setitem delitem "
"len contains iter "
"hash str sizeof "
"enter exit "
"divmod neg pos abs invert "
"complex int float index "
"trunc floor ceil "
"bool next "
)
numerics = (
"add sub mul div floordiv mod lshift rshift and xor or pow truediv"
)
inplace = ' '.join('i%s' % n for n in numerics.split())
right = ' '.join('r%s' % n for n in numerics.split())
# not including __prepare__, __instancecheck__, __subclasscheck__
# (as they are metaclass methods)
# __del__ is not supported at all as it causes problems if it exists
_non_defaults = set('__%s__' % method for method in [
'get', 'set', 'delete', 'reversed', 'missing', 'reduce', 'reduce_ex',
'getinitargs', 'getnewargs', 'getstate', 'setstate', 'getformat',
'setformat', 'repr', 'dir', 'subclasses', 'format',
])
def _get_method(name, func):
"Turns a callable object (like a mock) into a real function"
def method(self, *args, **kw):
return func(self, *args, **kw)
method.__name__ = name
return method
_magics = set(
'__%s__' % method for method in
' '.join([magic_methods, numerics, inplace, right]).split()
)
_all_magics = _magics | _non_defaults
_unsupported_magics = set([
'__getattr__', '__setattr__',
'__init__', '__new__', '__prepare__'
'__instancecheck__', '__subclasscheck__',
'__del__'
])
_calculate_return_value = {
'__hash__': lambda self: object.__hash__(self),
'__str__': lambda self: object.__str__(self),
'__sizeof__': lambda self: object.__sizeof__(self),
}
_return_values = {
'__lt__': NotImplemented,
'__gt__': NotImplemented,
'__le__': NotImplemented,
'__ge__': NotImplemented,
'__int__': 1,
'__contains__': False,
'__len__': 0,
'__exit__': False,
'__complex__': 1j,
'__float__': 1.0,
'__bool__': True,
'__index__': 1,
}
def _get_eq(self):
def __eq__(other):
ret_val = self.__eq__._mock_return_value
if ret_val is not DEFAULT:
return ret_val
return self is other
return __eq__
def _get_ne(self):
def __ne__(other):
if self.__ne__._mock_return_value is not DEFAULT:
return DEFAULT
return self is not other
return __ne__
def _get_iter(self):
def __iter__():
ret_val = self.__iter__._mock_return_value
if ret_val is DEFAULT:
return iter([])
# if ret_val was already an iterator, then calling iter on it should
# return the iterator unchanged
return iter(ret_val)
return __iter__
_side_effect_methods = {
'__eq__': _get_eq,
'__ne__': _get_ne,
'__iter__': _get_iter,
}
def _set_return_value(mock, method, name):
fixed = _return_values.get(name, DEFAULT)
if fixed is not DEFAULT:
method.return_value = fixed
return
return_calulator = _calculate_return_value.get(name)
if return_calulator is not None:
try:
return_value = return_calulator(mock)
except AttributeError:
# XXXX why do we return AttributeError here?
# set it as a side_effect instead?
return_value = AttributeError(name)
method.return_value = return_value
return
side_effector = _side_effect_methods.get(name)
if side_effector is not None:
method.side_effect = side_effector(mock)
class MagicMixin(object):
def __init__(self, *args, **kw):
_safe_super(MagicMixin, self).__init__(*args, **kw)
self._mock_set_magics()
def _mock_set_magics(self):
these_magics = _magics
if self._mock_methods is not None:
these_magics = _magics.intersection(self._mock_methods)
remove_magics = set()
remove_magics = _magics - these_magics
for entry in remove_magics:
if entry in type(self).__dict__:
# remove unneeded magic methods
delattr(self, entry)
# don't overwrite existing attributes if called a second time
these_magics = these_magics - set(type(self).__dict__)
_type = type(self)
for entry in these_magics:
setattr(_type, entry, MagicProxy(entry, self))
class NonCallableMagicMock(MagicMixin, NonCallableMock):
"""A version of `MagicMock` that isn't callable."""
def mock_add_spec(self, spec, spec_set=False):
"""Add a spec to a mock. `spec` can either be an object or a
list of strings. Only attributes on the `spec` can be fetched as
attributes from the mock.
If `spec_set` is True then only attributes on the spec can be set."""
self._mock_add_spec(spec, spec_set)
self._mock_set_magics()
class MagicMock(MagicMixin, Mock):
"""
MagicMock is a subclass of Mock with default implementations
of most of the magic methods. You can use MagicMock without having to
configure the magic methods yourself.
If you use the `spec` or `spec_set` arguments then *only* magic
methods that exist in the spec will be created.
Attributes and the return value of a `MagicMock` will also be `MagicMocks`.
"""
def mock_add_spec(self, spec, spec_set=False):
"""Add a spec to a mock. `spec` can either be an object or a
list of strings. Only attributes on the `spec` can be fetched as
attributes from the mock.
If `spec_set` is True then only attributes on the spec can be set."""
self._mock_add_spec(spec, spec_set)
self._mock_set_magics()
class MagicProxy(object):
def __init__(self, name, parent):
self.name = name
self.parent = parent
def __call__(self, *args, **kwargs):
m = self.create_mock()
return m(*args, **kwargs)
def create_mock(self):
entry = self.name
parent = self.parent
m = parent._get_child_mock(name=entry, _new_name=entry,
_new_parent=parent)
setattr(parent, entry, m)
_set_return_value(parent, m, entry)
return m
def __get__(self, obj, _type=None):
return self.create_mock()
class _ANY(object):
"A helper object that compares equal to everything."
def __eq__(self, other):
return True
def __ne__(self, other):
return False
def __repr__(self):
return '<ANY>'
ANY = _ANY()
def _format_call_signature(name, args, kwargs):
message = '%s(%%s)' % name
formatted_args = ''
args_string = ', '.join([repr(arg) for arg in args])
kwargs_string = ', '.join([
'%s=%r' % (key, value) for key, value in sorted(kwargs.items())
])
if args_string:
formatted_args = args_string
if kwargs_string:
if formatted_args:
formatted_args += ', '
formatted_args += kwargs_string
return message % formatted_args
class _Call(tuple):
"""
A tuple for holding the results of a call to a mock, either in the form
`(args, kwargs)` or `(name, args, kwargs)`.
If args or kwargs are empty then a call tuple will compare equal to
a tuple without those values. This makes comparisons less verbose::
_Call(('name', (), {})) == ('name',)
_Call(('name', (1,), {})) == ('name', (1,))
_Call(((), {'a': 'b'})) == ({'a': 'b'},)
The `_Call` object provides a useful shortcut for comparing with call::
_Call(((1, 2), {'a': 3})) == call(1, 2, a=3)
_Call(('foo', (1, 2), {'a': 3})) == call.foo(1, 2, a=3)
If the _Call has no name then it will match any name.
"""
def __new__(cls, value=(), name=None, parent=None, two=False,
from_kall=True):
name = ''
args = ()
kwargs = {}
_len = len(value)
if _len == 3:
name, args, kwargs = value
elif _len == 2:
first, second = value
if isinstance(first, str):
name = first
if isinstance(second, tuple):
args = second
else:
kwargs = second
else:
args, kwargs = first, second
elif _len == 1:
value, = value
if isinstance(value, str):
name = value
elif isinstance(value, tuple):
args = value
else:
kwargs = value
if two:
return tuple.__new__(cls, (args, kwargs))
return tuple.__new__(cls, (name, args, kwargs))
def __init__(self, value=(), name=None, parent=None, two=False,
from_kall=True):
self.name = name
self.parent = parent
self.from_kall = from_kall
def __eq__(self, other):
if other is ANY:
return True
try:
len_other = len(other)
except TypeError:
return False
self_name = ''
if len(self) == 2:
self_args, self_kwargs = self
else:
self_name, self_args, self_kwargs = self
other_name = ''
if len_other == 0:
other_args, other_kwargs = (), {}
elif len_other == 3:
other_name, other_args, other_kwargs = other
elif len_other == 1:
value, = other
if isinstance(value, tuple):
other_args = value
other_kwargs = {}
elif isinstance(value, str):
other_name = value
other_args, other_kwargs = (), {}
else:
other_args = ()
other_kwargs = value
else:
# len 2
# could be (name, args) or (name, kwargs) or (args, kwargs)
first, second = other
if isinstance(first, str):
other_name = first
if isinstance(second, tuple):
other_args, other_kwargs = second, {}
else:
other_args, other_kwargs = (), second
else:
other_args, other_kwargs = first, second
if self_name and other_name != self_name:
return False
# this order is important for ANY to work!
return (other_args, other_kwargs) == (self_args, self_kwargs)
def __ne__(self, other):
return not self.__eq__(other)
def __call__(self, *args, **kwargs):
if self.name is None:
return _Call(('', args, kwargs), name='()')
name = self.name + '()'
return _Call((self.name, args, kwargs), name=name, parent=self)
def __getattr__(self, attr):
if self.name is None:
return _Call(name=attr, from_kall=False)
name = '%s.%s' % (self.name, attr)
return _Call(name=name, parent=self, from_kall=False)
def count(self, *args, **kwargs):
return self.__getattr__('count')(*args, **kwargs)
def index(self, *args, **kwargs):
return self.__getattr__('index')(*args, **kwargs)
def __repr__(self):
if not self.from_kall:
name = self.name or 'call'
if name.startswith('()'):
name = 'call%s' % name
return name
if len(self) == 2:
name = 'call'
args, kwargs = self
else:
name, args, kwargs = self
if not name:
name = 'call'
elif not name.startswith('()'):
name = 'call.%s' % name
else:
name = 'call%s' % name
return _format_call_signature(name, args, kwargs)
def call_list(self):
"""For a call object that represents multiple calls, `call_list`
returns a list of all the intermediate calls as well as the
final call."""
vals = []
thing = self
while thing is not None:
if thing.from_kall:
vals.append(thing)
thing = thing.parent
return _CallList(reversed(vals))
call = _Call(from_kall=False)
def create_autospec(spec, spec_set=False, instance=False, _parent=None,
_name=None, **kwargs):
"""Create a mock object using another object as a spec. Attributes on the
mock will use the corresponding attribute on the `spec` object as their
spec.
Functions or methods being mocked will have their arguments checked
to check that they are called with the correct signature.
If `spec_set` is True then attempting to set attributes that don't exist
on the spec object will raise an `AttributeError`.
If a class is used as a spec then the return value of the mock (the
instance of the class) will have the same spec. You can use a class as the
spec for an instance object by passing `instance=True`. The returned mock
will only be callable if instances of the mock are callable.
`create_autospec` also takes arbitrary keyword arguments that are passed to
the constructor of the created mock."""
if _is_list(spec):
# can't pass a list instance to the mock constructor as it will be
# interpreted as a list of strings
spec = type(spec)
is_type = isinstance(spec, type)
_kwargs = {'spec': spec}
if spec_set:
_kwargs = {'spec_set': spec}
elif spec is None:
# None we mock with a normal mock without a spec
_kwargs = {}
if _kwargs and instance:
_kwargs['_spec_as_instance'] = True
_kwargs.update(kwargs)
Klass = MagicMock
if type(spec) in DescriptorTypes:
# descriptors don't have a spec
# because we don't know what type they return
_kwargs = {}
elif not _callable(spec):
Klass = NonCallableMagicMock
elif is_type and instance and not _instance_callable(spec):
Klass = NonCallableMagicMock
_name = _kwargs.pop('name', _name)
_new_name = _name
if _parent is None:
# for a top level object no _new_name should be set
_new_name = ''
mock = Klass(parent=_parent, _new_parent=_parent, _new_name=_new_name,
name=_name, **_kwargs)
if isinstance(spec, FunctionTypes):
# should only happen at the top level because we don't
# recurse for functions
mock = _set_signature(mock, spec)
else:
_check_signature(spec, mock, is_type, instance)
if _parent is not None and not instance:
_parent._mock_children[_name] = mock
if is_type and not instance and 'return_value' not in kwargs:
mock.return_value = create_autospec(spec, spec_set, instance=True,
_name='()', _parent=mock)
for entry in dir(spec):
if _is_magic(entry):
# MagicMock already does the useful magic methods for us
continue
# XXXX do we need a better way of getting attributes without
# triggering code execution (?) Probably not - we need the actual
# object to mock it so we would rather trigger a property than mock
# the property descriptor. Likewise we want to mock out dynamically
# provided attributes.
# XXXX what about attributes that raise exceptions other than
# AttributeError on being fetched?
# we could be resilient against it, or catch and propagate the
# exception when the attribute is fetched from the mock
try:
original = getattr(spec, entry)
except AttributeError:
continue
kwargs = {'spec': original}
if spec_set:
kwargs = {'spec_set': original}
if not isinstance(original, FunctionTypes):
new = _SpecState(original, spec_set, mock, entry, instance)
mock._mock_children[entry] = new
else:
parent = mock
if isinstance(spec, FunctionTypes):
parent = mock.mock
skipfirst = _must_skip(spec, entry, is_type)
kwargs['_eat_self'] = skipfirst
new = MagicMock(parent=parent, name=entry, _new_name=entry,
_new_parent=parent,
**kwargs)
mock._mock_children[entry] = new
_check_signature(original, new, skipfirst=skipfirst)
# so functions created with _set_signature become instance attributes,
# *plus* their underlying mock exists in _mock_children of the parent
# mock. Adding to _mock_children may be unnecessary where we are also
# setting as an instance attribute?
if isinstance(new, FunctionTypes):
setattr(mock, entry, new)
return mock
def _must_skip(spec, entry, is_type):
"""
Return whether we should skip the first argument on spec's `entry`
attribute.
"""
if not isinstance(spec, type):
if entry in getattr(spec, '__dict__', {}):
# instance attribute - shouldn't skip
return False
spec = spec.__class__
for klass in spec.__mro__:
result = klass.__dict__.get(entry, DEFAULT)
if result is DEFAULT:
continue
if isinstance(result, (staticmethod, classmethod)):
return False
elif isinstance(getattr(result, '__get__', None), MethodWrapperTypes):
# Normal method => skip if looked up on type
# (if looked up on instance, self is already skipped)
return is_type
else:
return False
# shouldn't get here unless function is a dynamically provided attribute
# XXXX untested behaviour
return is_type
def _get_class(obj):
try:
return obj.__class__
except AttributeError:
# it is possible for objects to have no __class__
return type(obj)
class _SpecState(object):
def __init__(self, spec, spec_set=False, parent=None,
name=None, ids=None, instance=False):
self.spec = spec
self.ids = ids
self.spec_set = spec_set
self.parent = parent
self.instance = instance
self.name = name
FunctionTypes = (
# python function
type(create_autospec),
# instance method
type(ANY.__eq__),
)
MethodWrapperTypes = (
type(ANY.__eq__.__get__),
)
file_spec = None
def _iterate_read_data(read_data):
# Helper for mock_open:
# Retrieve lines from read_data via a generator so that separate calls to
# readline, read, and readlines are properly interleaved
data_as_list = ['{}\n'.format(l) for l in read_data.split('\n')]
if data_as_list[-1] == '\n':
# If the last line ended in a newline, the list comprehension will have an
# extra entry that's just a newline. Remove this.
data_as_list = data_as_list[:-1]
else:
# If there wasn't an extra newline by itself, then the file being
# emulated doesn't have a newline to end the last line remove the
# newline that our naive format() added
data_as_list[-1] = data_as_list[-1][:-1]
for line in data_as_list:
yield line
def mock_open(mock=None, read_data=''):
"""
A helper function to create a mock to replace the use of `open`. It works
for `open` called directly or used as a context manager.
The `mock` argument is the mock object to configure. If `None` (the
default) then a `MagicMock` will be created for you, with the API limited
to methods or attributes available on standard file handles.
`read_data` is a string for the `read` methoddline`, and `readlines` of the
file handle to return. This is an empty string by default.
"""
def _readlines_side_effect(*args, **kwargs):
if handle.readlines.return_value is not None:
return handle.readlines.return_value
return list(_data)
def _read_side_effect(*args, **kwargs):
if handle.read.return_value is not None:
return handle.read.return_value
return ''.join(_data)
def _readline_side_effect():
if handle.readline.return_value is not None:
while True:
yield handle.readline.return_value
for line in _data:
yield line
global file_spec
if file_spec is None:
import _io
file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))))
if mock is None:
mock = MagicMock(name='open', spec=open)
handle = MagicMock(spec=file_spec)
handle.__enter__.return_value = handle
_data = _iterate_read_data(read_data)
handle.write.return_value = None
handle.read.return_value = None
handle.readline.return_value = None
handle.readlines.return_value = None
handle.read.side_effect = _read_side_effect
handle.readline.side_effect = _readline_side_effect()
handle.readlines.side_effect = _readlines_side_effect
mock.return_value = handle
return mock
class PropertyMock(Mock):
"""
A mock intended to be used as a property, or other descriptor, on a class.
`PropertyMock` provides `__get__` and `__set__` methods so you can specify
a return value when it is fetched.
Fetching a `PropertyMock` instance from an object calls the mock, with
no args. Setting it calls the mock with the value being set.
"""
def _get_child_mock(self, **kwargs):
return MagicMock(**kwargs)
def __get__(self, obj, obj_type):
return self()
def __set__(self, obj, val):
self(val)
| 32.683362 | 85 | 0.609504 |
__all__ = (
'Mock',
'MagicMock',
'patch',
'sentinel',
'DEFAULT',
'ANY',
'call',
'create_autospec',
'FILTER_DIR',
'NonCallableMock',
'NonCallableMagicMock',
'mock_open',
'PropertyMock',
)
__version__ = '1.0'
import inspect
import pprint
import sys
import builtins
from types import ModuleType
from functools import wraps, partial
_builtins = {name for name in dir(builtins) if not name.startswith('_')}
BaseExceptions = (BaseException,)
if 'java' in sys.platform:
import java
BaseExceptions = (BaseException, java.lang.Throwable)
FILTER_DIR = True
e_super = super
def _is_instance_mock(obj):
# can't use isinstance on Mock objects because they override __class__
return issubclass(type(obj), NonCallableMock)
def _is_exception(obj):
return (
isinstance(obj, BaseExceptions) or
isinstance(obj, type) and issubclass(obj, BaseExceptions)
)
class _slotted(object):
__slots__ = ['a']
DescriptorTypes = (
type(_slotted.a),
property,
)
def _get_signature_object(func, as_instance, eat_self):
if isinstance(func, type) and not as_instance:
try:
func = func.__init__
except AttributeError:
return None
# Skip the `self` argument in __init__
eat_self = True
elif not isinstance(func, FunctionTypes):
# If we really want to model an instance of the passed type,
# __call__ should be looked up, not __init__.
try:
func = func.__call__
except AttributeError:
return None
if eat_self:
sig_func = partial(func, None)
else:
sig_func = func
try:
return func, inspect.signature(sig_func)
except ValueError:
# Certain callable types are not supported by inspect.signature()
return None
def _check_signature(func, mock, skipfirst, instance=False):
sig = _get_signature_object(func, instance, skipfirst)
if sig is None:
return
func, sig = sig
def checksig(_mock_self, *args, **kwargs):
sig.bind(*args, **kwargs)
_copy_func_details(func, checksig)
type(mock)._mock_check_sig = checksig
def _copy_func_details(func, funcopy):
funcopy.__name__ = func.__name__
funcopy.__doc__ = func.__doc__
try:
funcopy.__text_signature__ = func.__text_signature__
except AttributeError:
pass
# we explicitly don't copy func.__dict__ into this copy as it would
try:
funcopy.__module__ = func.__module__
except AttributeError:
pass
try:
funcopy.__defaults__ = func.__defaults__
except AttributeError:
pass
try:
funcopy.__kwdefaults__ = func.__kwdefaults__
except AttributeError:
pass
def _callable(obj):
if isinstance(obj, type):
return True
if getattr(obj, '__call__', None) is not None:
return True
return False
def _is_list(obj):
return type(obj) in (list, tuple)
def _instance_callable(obj):
if not isinstance(obj, type):
return getattr(obj, '__call__', None) is not None
for base in (obj,) + obj.__mro__:
if base.__dict__.get('__call__') is not None:
return True
return False
def _set_signature(mock, original, instance=False):
if not _callable(original):
return
skipfirst = isinstance(original, type)
result = _get_signature_object(original, instance, skipfirst)
if result is None:
return
func, sig = result
def checksig(*args, **kwargs):
sig.bind(*args, **kwargs)
_copy_func_details(func, checksig)
name = original.__name__
if not name.isidentifier():
name = 'funcopy'
context = {'_checksig_': checksig, 'mock': mock}
src = """def %s(*args, **kwargs):
_checksig_(*args, **kwargs)
return mock(*args, **kwargs)""" % name
exec (src, context)
funcopy = context[name]
_setup_func(funcopy, mock)
return funcopy
def _setup_func(funcopy, mock):
funcopy.mock = mock
if not _is_instance_mock(mock):
return
def assert_called_with(*args, **kwargs):
return mock.assert_called_with(*args, **kwargs)
def assert_called_once_with(*args, **kwargs):
return mock.assert_called_once_with(*args, **kwargs)
def assert_has_calls(*args, **kwargs):
return mock.assert_has_calls(*args, **kwargs)
def assert_any_call(*args, **kwargs):
return mock.assert_any_call(*args, **kwargs)
def reset_mock():
funcopy.method_calls = _CallList()
funcopy.mock_calls = _CallList()
mock.reset_mock()
ret = funcopy.return_value
if _is_instance_mock(ret) and not ret is mock:
ret.reset_mock()
funcopy.called = False
funcopy.call_count = 0
funcopy.call_args = None
funcopy.call_args_list = _CallList()
funcopy.method_calls = _CallList()
funcopy.mock_calls = _CallList()
funcopy.return_value = mock.return_value
funcopy.side_effect = mock.side_effect
funcopy._mock_children = mock._mock_children
funcopy.assert_called_with = assert_called_with
funcopy.assert_called_once_with = assert_called_once_with
funcopy.assert_has_calls = assert_has_calls
funcopy.assert_any_call = assert_any_call
funcopy.reset_mock = reset_mock
mock._mock_delegate = funcopy
def _is_magic(name):
return '__%s__' % name[2:-2] == name
class _SentinelObject(object):
def __init__(self, name):
self.name = name
def __repr__(self):
return 'sentinel.%s' % self.name
class _Sentinel(object):
def __init__(self):
self._sentinels = {}
def __getattr__(self, name):
if name == '__bases__':
# Without this help(unittest.mock) raises an exception
raise AttributeError
return self._sentinels.setdefault(name, _SentinelObject(name))
sentinel = _Sentinel()
DEFAULT = sentinel.DEFAULT
_missing = sentinel.MISSING
_deleted = sentinel.DELETED
def _copy(value):
if type(value) in (dict, list, tuple, set):
return type(value)(value)
return value
_allowed_names = set(
[
'return_value', '_mock_return_value', 'side_effect',
'_mock_side_effect', '_mock_parent', '_mock_new_parent',
'_mock_name', '_mock_new_name'
]
)
def _delegating_property(name):
_allowed_names.add(name)
_the_name = '_mock_' + name
def _get(self, name=name, _the_name=_the_name):
sig = self._mock_delegate
if sig is None:
return getattr(self, _the_name)
return getattr(sig, name)
def _set(self, value, name=name, _the_name=_the_name):
sig = self._mock_delegate
if sig is None:
self.__dict__[_the_name] = value
else:
setattr(sig, name, value)
return property(_get, _set)
class _CallList(list):
def __contains__(self, value):
if not isinstance(value, list):
return list.__contains__(self, value)
len_value = len(value)
len_self = len(self)
if len_value > len_self:
return False
for i in range(0, len_self - len_value + 1):
sub_list = self[i:i+len_value]
if sub_list == value:
return True
return False
def __repr__(self):
return pprint.pformat(list(self))
def _check_and_set_parent(parent, value, name, new_name):
if not _is_instance_mock(value):
return False
if ((value._mock_name or value._mock_new_name) or
(value._mock_parent is not None) or
(value._mock_new_parent is not None)):
return False
_parent = parent
while _parent is not None:
# setting a mock (value) as a child or return value of itself
# should not modify the mock
if _parent is value:
return False
_parent = _parent._mock_new_parent
if new_name:
value._mock_new_parent = parent
value._mock_new_name = new_name
if name:
value._mock_parent = parent
value._mock_name = name
return True
# Internal class to identify if we wrapped an iterator object or not.
class _MockIter(object):
def __init__(self, obj):
self.obj = iter(obj)
def __iter__(self):
return self
def __next__(self):
return next(self.obj)
class Base(object):
_mock_return_value = DEFAULT
_mock_side_effect = None
def __init__(self, *args, **kwargs):
pass
class NonCallableMock(Base):
def __new__(cls, *args, **kw):
# every instance has its own class
# so we can create magic methods on the
# class without stomping on other mocks
new = type(cls.__name__, (cls,), {'__doc__': cls.__doc__})
instance = object.__new__(new)
return instance
def __init__(
self, spec=None, wraps=None, name=None, spec_set=None,
parent=None, _spec_state=None, _new_name='', _new_parent=None,
_spec_as_instance=False, _eat_self=None, unsafe=False, **kwargs
):
if _new_parent is None:
_new_parent = parent
__dict__ = self.__dict__
__dict__['_mock_parent'] = parent
__dict__['_mock_name'] = name
__dict__['_mock_new_name'] = _new_name
__dict__['_mock_new_parent'] = _new_parent
if spec_set is not None:
spec = spec_set
spec_set = True
if _eat_self is None:
_eat_self = parent is not None
self._mock_add_spec(spec, spec_set, _spec_as_instance, _eat_self)
__dict__['_mock_children'] = {}
__dict__['_mock_wraps'] = wraps
__dict__['_mock_delegate'] = None
__dict__['_mock_called'] = False
__dict__['_mock_call_args'] = None
__dict__['_mock_call_count'] = 0
__dict__['_mock_call_args_list'] = _CallList()
__dict__['_mock_mock_calls'] = _CallList()
__dict__['method_calls'] = _CallList()
__dict__['_mock_unsafe'] = unsafe
if kwargs:
self.configure_mock(**kwargs)
_safe_super(NonCallableMock, self).__init__(
spec, wraps, name, spec_set, parent,
_spec_state
)
def attach_mock(self, mock, attribute):
mock._mock_parent = None
mock._mock_new_parent = None
mock._mock_name = ''
mock._mock_new_name = None
setattr(self, attribute, mock)
def mock_add_spec(self, spec, spec_set=False):
self._mock_add_spec(spec, spec_set)
def _mock_add_spec(self, spec, spec_set, _spec_as_instance=False,
_eat_self=False):
_spec_class = None
_spec_signature = None
if spec is not None and not _is_list(spec):
if isinstance(spec, type):
_spec_class = spec
else:
_spec_class = _get_class(spec)
res = _get_signature_object(spec,
_spec_as_instance, _eat_self)
_spec_signature = res and res[1]
spec = dir(spec)
__dict__ = self.__dict__
__dict__['_spec_class'] = _spec_class
__dict__['_spec_set'] = spec_set
__dict__['_spec_signature'] = _spec_signature
__dict__['_mock_methods'] = spec
def __get_return_value(self):
ret = self._mock_return_value
if self._mock_delegate is not None:
ret = self._mock_delegate.return_value
if ret is DEFAULT:
ret = self._get_child_mock(
_new_parent=self, _new_name='()'
)
self.return_value = ret
return ret
def __set_return_value(self, value):
if self._mock_delegate is not None:
self._mock_delegate.return_value = value
else:
self._mock_return_value = value
_check_and_set_parent(self, value, None, '()')
__return_value_doc = "The value to be returned when the mock is called."
return_value = property(__get_return_value, __set_return_value,
__return_value_doc)
@property
def __class__(self):
if self._spec_class is None:
return type(self)
return self._spec_class
called = _delegating_property('called')
call_count = _delegating_property('call_count')
call_args = _delegating_property('call_args')
call_args_list = _delegating_property('call_args_list')
mock_calls = _delegating_property('mock_calls')
def __get_side_effect(self):
delegated = self._mock_delegate
if delegated is None:
return self._mock_side_effect
sf = delegated.side_effect
if sf is not None and not callable(sf) and not isinstance(sf, _MockIter):
sf = _MockIter(sf)
delegated.side_effect = sf
return sf
def __set_side_effect(self, value):
value = _try_iter(value)
delegated = self._mock_delegate
if delegated is None:
self._mock_side_effect = value
else:
delegated.side_effect = value
side_effect = property(__get_side_effect, __set_side_effect)
def reset_mock(self):
self.called = False
self.call_args = None
self.call_count = 0
self.mock_calls = _CallList()
self.call_args_list = _CallList()
self.method_calls = _CallList()
for child in self._mock_children.values():
if isinstance(child, _SpecState):
continue
child.reset_mock()
ret = self._mock_return_value
if _is_instance_mock(ret) and ret is not self:
ret.reset_mock()
def configure_mock(self, **kwargs):
for arg, val in sorted(kwargs.items(),
# we sort on the number of dots so that
# attributes are set before we set attributes on
# attributes
key=lambda entry: entry[0].count('.')):
args = arg.split('.')
final = args.pop()
obj = self
for entry in args:
obj = getattr(obj, entry)
setattr(obj, final, val)
def __getattr__(self, name):
if name in {'_mock_methods', '_mock_unsafe'}:
raise AttributeError(name)
elif self._mock_methods is not None:
if name not in self._mock_methods or name in _all_magics:
raise AttributeError("Mock object has no attribute %r" % name)
elif _is_magic(name):
raise AttributeError(name)
if not self._mock_unsafe:
if name.startswith(('assert', 'assret')):
raise AttributeError(name)
result = self._mock_children.get(name)
if result is _deleted:
raise AttributeError(name)
elif result is None:
wraps = None
if self._mock_wraps is not None:
# XXXX should we get the attribute without triggering code
# execution?
wraps = getattr(self._mock_wraps, name)
result = self._get_child_mock(
parent=self, name=name, wraps=wraps, _new_name=name,
_new_parent=self
)
self._mock_children[name] = result
elif isinstance(result, _SpecState):
result = create_autospec(
result.spec, result.spec_set, result.instance,
result.parent, result.name
)
self._mock_children[name] = result
return result
def __repr__(self):
_name_list = [self._mock_new_name]
_parent = self._mock_new_parent
last = self
dot = '.'
if _name_list == ['()']:
dot = ''
seen = set()
while _parent is not None:
last = _parent
_name_list.append(_parent._mock_new_name + dot)
dot = '.'
if _parent._mock_new_name == '()':
dot = ''
_parent = _parent._mock_new_parent
# use ids here so as not to call __hash__ on the mocks
if id(_parent) in seen:
break
seen.add(id(_parent))
_name_list = list(reversed(_name_list))
_first = last._mock_name or 'mock'
if len(_name_list) > 1:
if _name_list[1] not in ('()', '().'):
_first += '.'
_name_list[0] = _first
name = ''.join(_name_list)
name_string = ''
if name not in ('mock', 'mock.'):
name_string = ' name=%r' % name
spec_string = ''
if self._spec_class is not None:
spec_string = ' spec=%r'
if self._spec_set:
spec_string = ' spec_set=%r'
spec_string = spec_string % self._spec_class.__name__
return "<%s%s%s id='%s'>" % (
type(self).__name__,
name_string,
spec_string,
id(self)
)
def __dir__(self):
if not FILTER_DIR:
return object.__dir__(self)
extras = self._mock_methods or []
from_type = dir(type(self))
from_dict = list(self.__dict__)
from_type = [e for e in from_type if not e.startswith('_')]
from_dict = [e for e in from_dict if not e.startswith('_') or
_is_magic(e)]
return sorted(set(extras + from_type + from_dict +
list(self._mock_children)))
def __setattr__(self, name, value):
if name in _allowed_names:
# property setters go through here
return object.__setattr__(self, name, value)
elif (self._spec_set and self._mock_methods is not None and
name not in self._mock_methods and
name not in self.__dict__):
raise AttributeError("Mock object has no attribute '%s'" % name)
elif name in _unsupported_magics:
msg = 'Attempting to set unsupported magic method %r.' % name
raise AttributeError(msg)
elif name in _all_magics:
if self._mock_methods is not None and name not in self._mock_methods:
raise AttributeError("Mock object has no attribute '%s'" % name)
if not _is_instance_mock(value):
setattr(type(self), name, _get_method(name, value))
original = value
value = lambda *args, **kw: original(self, *args, **kw)
else:
# only set _new_name and not name so that mock_calls is tracked
# but not method calls
_check_and_set_parent(self, value, None, name)
setattr(type(self), name, value)
self._mock_children[name] = value
elif name == '__class__':
self._spec_class = value
return
else:
if _check_and_set_parent(self, value, name, name):
self._mock_children[name] = value
return object.__setattr__(self, name, value)
def __delattr__(self, name):
if name in _all_magics and name in type(self).__dict__:
delattr(type(self), name)
if name not in self.__dict__:
# for magic methods that are still MagicProxy objects and
# not set on the instance itself
return
if name in self.__dict__:
object.__delattr__(self, name)
obj = self._mock_children.get(name, _missing)
if obj is _deleted:
raise AttributeError(name)
if obj is not _missing:
del self._mock_children[name]
self._mock_children[name] = _deleted
def _format_mock_call_signature(self, args, kwargs):
name = self._mock_name or 'mock'
return _format_call_signature(name, args, kwargs)
def _format_mock_failure_message(self, args, kwargs):
message = 'Expected call: %s\nActual call: %s'
expected_string = self._format_mock_call_signature(args, kwargs)
call_args = self.call_args
if len(call_args) == 3:
call_args = call_args[1:]
actual_string = self._format_mock_call_signature(*call_args)
return message % (expected_string, actual_string)
def _call_matcher(self, _call):
sig = self._spec_signature
if sig is not None:
if len(_call) == 2:
name = ''
args, kwargs = _call
else:
name, args, kwargs = _call
try:
return name, sig.bind(*args, **kwargs)
except TypeError as e:
return e.with_traceback(None)
else:
return _call
def assert_not_called(_mock_self):
self = _mock_self
if self.call_count != 0:
msg = ("Expected '%s' to not have been called. Called %s times." %
(self._mock_name or 'mock', self.call_count))
raise AssertionError(msg)
def assert_called_with(_mock_self, *args, **kwargs):
self = _mock_self
if self.call_args is None:
expected = self._format_mock_call_signature(args, kwargs)
raise AssertionError('Expected call: %s\nNot called' % (expected,))
def _error_message():
msg = self._format_mock_failure_message(args, kwargs)
return msg
expected = self._call_matcher((args, kwargs))
actual = self._call_matcher(self.call_args)
if expected != actual:
cause = expected if isinstance(expected, Exception) else None
raise AssertionError(_error_message()) from cause
def assert_called_once_with(_mock_self, *args, **kwargs):
self = _mock_self
if not self.call_count == 1:
msg = ("Expected '%s' to be called once. Called %s times." %
(self._mock_name or 'mock', self.call_count))
raise AssertionError(msg)
return self.assert_called_with(*args, **kwargs)
def assert_has_calls(self, calls, any_order=False):
expected = [self._call_matcher(c) for c in calls]
cause = expected if isinstance(expected, Exception) else None
all_calls = _CallList(self._call_matcher(c) for c in self.mock_calls)
if not any_order:
if expected not in all_calls:
raise AssertionError(
'Calls not found.\nExpected: %r\n'
'Actual: %r' % (calls, self.mock_calls)
) from cause
return
all_calls = list(all_calls)
not_found = []
for kall in expected:
try:
all_calls.remove(kall)
except ValueError:
not_found.append(kall)
if not_found:
raise AssertionError(
'%r not all found in call list' % (tuple(not_found),)
) from cause
def assert_any_call(self, *args, **kwargs):
expected = self._call_matcher((args, kwargs))
actual = [self._call_matcher(c) for c in self.call_args_list]
if expected not in actual:
cause = expected if isinstance(expected, Exception) else None
expected_string = self._format_mock_call_signature(args, kwargs)
raise AssertionError(
'%s call not found' % expected_string
) from cause
def _get_child_mock(self, **kw):
_type = type(self)
if not issubclass(_type, CallableMixin):
if issubclass(_type, NonCallableMagicMock):
klass = MagicMock
elif issubclass(_type, NonCallableMock) :
klass = Mock
else:
klass = _type.__mro__[1]
return klass(**kw)
def _try_iter(obj):
if obj is None:
return obj
if _is_exception(obj):
return obj
if _callable(obj):
return obj
try:
return iter(obj)
except TypeError:
# XXXX backwards compatibility
# but this will blow up on first call - so maybe we should fail early?
return obj
class CallableMixin(Base):
def __init__(self, spec=None, side_effect=None, return_value=DEFAULT,
wraps=None, name=None, spec_set=None, parent=None,
_spec_state=None, _new_name='', _new_parent=None, **kwargs):
self.__dict__['_mock_return_value'] = return_value
_safe_super(CallableMixin, self).__init__(
spec, wraps, name, spec_set, parent,
_spec_state, _new_name, _new_parent, **kwargs
)
self.side_effect = side_effect
def _mock_check_sig(self, *args, **kwargs):
# stub method that can be replaced with one with a specific signature
pass
def __call__(_mock_self, *args, **kwargs):
# can't use self in-case a function / method we are mocking uses self
_mock_self._mock_check_sig(*args, **kwargs)
return _mock_self._mock_call(*args, **kwargs)
def _mock_call(_mock_self, *args, **kwargs):
self = _mock_self
self.called = True
self.call_count += 1
_new_name = self._mock_new_name
_new_parent = self._mock_new_parent
_call = _Call((args, kwargs), two=True)
self.call_args = _call
self.call_args_list.append(_call)
self.mock_calls.append(_Call(('', args, kwargs)))
seen = set()
skip_next_dot = _new_name == '()'
do_method_calls = self._mock_parent is not None
name = self._mock_name
while _new_parent is not None:
this_mock_call = _Call((_new_name, args, kwargs))
if _new_parent._mock_new_name:
dot = '.'
if skip_next_dot:
dot = ''
skip_next_dot = False
if _new_parent._mock_new_name == '()':
skip_next_dot = True
_new_name = _new_parent._mock_new_name + dot + _new_name
if do_method_calls:
if _new_name == name:
this_method_call = this_mock_call
else:
this_method_call = _Call((name, args, kwargs))
_new_parent.method_calls.append(this_method_call)
do_method_calls = _new_parent._mock_parent is not None
if do_method_calls:
name = _new_parent._mock_name + '.' + name
_new_parent.mock_calls.append(this_mock_call)
_new_parent = _new_parent._mock_new_parent
_new_parent_id = id(_new_parent)
if _new_parent_id in seen:
break
seen.add(_new_parent_id)
ret_val = DEFAULT
effect = self.side_effect
if effect is not None:
if _is_exception(effect):
raise effect
if not _callable(effect):
result = next(effect)
if _is_exception(result):
raise result
if result is DEFAULT:
result = self.return_value
return result
ret_val = effect(*args, **kwargs)
if (self._mock_wraps is not None and
self._mock_return_value is DEFAULT):
return self._mock_wraps(*args, **kwargs)
if ret_val is DEFAULT:
ret_val = self.return_value
return ret_val
class Mock(CallableMixin, NonCallableMock):
def _dot_lookup(thing, comp, import_path):
try:
return getattr(thing, comp)
except AttributeError:
__import__(import_path)
return getattr(thing, comp)
def _importer(target):
components = target.split('.')
import_path = components.pop(0)
thing = __import__(import_path)
for comp in components:
import_path += ".%s" % comp
thing = _dot_lookup(thing, comp, import_path)
return thing
def _is_started(patcher):
return hasattr(patcher, 'is_local')
class _patch(object):
attribute_name = None
_active_patches = []
def __init__(
self, getter, attribute, new, spec, create,
spec_set, autospec, new_callable, kwargs
):
if new_callable is not None:
if new is not DEFAULT:
raise ValueError(
"Cannot use 'new' and 'new_callable' together"
)
if autospec is not None:
raise ValueError(
"Cannot use 'autospec' and 'new_callable' together"
)
self.getter = getter
self.attribute = attribute
self.new = new
self.new_callable = new_callable
self.spec = spec
self.create = create
self.has_local = False
self.spec_set = spec_set
self.autospec = autospec
self.kwargs = kwargs
self.additional_patchers = []
def copy(self):
patcher = _patch(
self.getter, self.attribute, self.new, self.spec,
self.create, self.spec_set,
self.autospec, self.new_callable, self.kwargs
)
patcher.attribute_name = self.attribute_name
patcher.additional_patchers = [
p.copy() for p in self.additional_patchers
]
return patcher
def __call__(self, func):
if isinstance(func, type):
return self.decorate_class(func)
return self.decorate_callable(func)
def decorate_class(self, klass):
for attr in dir(klass):
if not attr.startswith(patch.TEST_PREFIX):
continue
attr_value = getattr(klass, attr)
if not hasattr(attr_value, "__call__"):
continue
patcher = self.copy()
setattr(klass, attr, patcher(attr_value))
return klass
def decorate_callable(self, func):
if hasattr(func, 'patchings'):
func.patchings.append(self)
return func
@wraps(func)
def patched(*args, **keywargs):
extra_args = []
entered_patchers = []
exc_info = tuple()
try:
for patching in patched.patchings:
arg = patching.__enter__()
entered_patchers.append(patching)
if patching.attribute_name is not None:
keywargs.update(arg)
elif patching.new is DEFAULT:
extra_args.append(arg)
args += tuple(extra_args)
return func(*args, **keywargs)
except:
if (patching not in entered_patchers and
_is_started(patching)):
entered_patchers.append(patching)
exc_info = sys.exc_info()
raise
finally:
for patching in reversed(entered_patchers):
patching.__exit__(*exc_info)
patched.patchings = [self]
return patched
def get_original(self):
target = self.getter()
name = self.attribute
original = DEFAULT
local = False
try:
original = target.__dict__[name]
except (AttributeError, KeyError):
original = getattr(target, name, DEFAULT)
else:
local = True
if name in _builtins and isinstance(target, ModuleType):
self.create = True
if not self.create and original is DEFAULT:
raise AttributeError(
"%s does not have the attribute %r" % (target, name)
)
return original, local
def __enter__(self):
new, spec, spec_set = self.new, self.spec, self.spec_set
autospec, kwargs = self.autospec, self.kwargs
new_callable = self.new_callable
self.target = self.getter()
if spec is False:
spec = None
if spec_set is False:
spec_set = None
if autospec is False:
autospec = None
if spec is not None and autospec is not None:
raise TypeError("Can't specify spec and autospec")
if ((spec is not None or autospec is not None) and
spec_set not in (True, None)):
raise TypeError("Can't provide explicit spec_set *and* spec or autospec")
original, local = self.get_original()
if new is DEFAULT and autospec is None:
inherit = False
if spec is True:
spec = original
if spec_set is True:
spec_set = original
spec = None
elif spec is not None:
if spec_set is True:
spec_set = spec
spec = None
elif spec_set is True:
spec_set = original
if spec is not None or spec_set is not None:
if original is DEFAULT:
raise TypeError("Can't use 'spec' with create=True")
if isinstance(original, type):
# If we're patching out a class and there is a spec
inherit = True
Klass = MagicMock
_kwargs = {}
if new_callable is not None:
Klass = new_callable
elif spec is not None or spec_set is not None:
this_spec = spec
if spec_set is not None:
this_spec = spec_set
if _is_list(this_spec):
not_callable = '__call__' not in this_spec
else:
not_callable = not callable(this_spec)
if not_callable:
Klass = NonCallableMagicMock
if spec is not None:
_kwargs['spec'] = spec
if spec_set is not None:
_kwargs['spec_set'] = spec_set
if (isinstance(Klass, type) and
issubclass(Klass, NonCallableMock) and self.attribute):
_kwargs['name'] = self.attribute
_kwargs.update(kwargs)
new = Klass(**_kwargs)
if inherit and _is_instance_mock(new):
this_spec = spec
if spec_set is not None:
this_spec = spec_set
if (not _is_list(this_spec) and not
_instance_callable(this_spec)):
Klass = NonCallableMagicMock
_kwargs.pop('name')
new.return_value = Klass(_new_parent=new, _new_name='()',
**_kwargs)
elif autospec is not None:
if new is not DEFAULT:
raise TypeError(
"autospec creates the mock for you. Can't specify "
"autospec and new."
)
if original is DEFAULT:
raise TypeError("Can't use 'autospec' with create=True")
spec_set = bool(spec_set)
if autospec is True:
autospec = original
new = create_autospec(autospec, spec_set=spec_set,
_name=self.attribute, **kwargs)
elif kwargs:
raise TypeError("Can't pass kwargs to a mock we aren't creating")
new_attr = new
self.temp_original = original
self.is_local = local
setattr(self.target, self.attribute, new_attr)
if self.attribute_name is not None:
extra_args = {}
if self.new is DEFAULT:
extra_args[self.attribute_name] = new
for patching in self.additional_patchers:
arg = patching.__enter__()
if patching.new is DEFAULT:
extra_args.update(arg)
return extra_args
return new
def __exit__(self, *exc_info):
if not _is_started(self):
raise RuntimeError('stop called on unstarted patcher')
if self.is_local and self.temp_original is not DEFAULT:
setattr(self.target, self.attribute, self.temp_original)
else:
delattr(self.target, self.attribute)
if not self.create and not hasattr(self.target, self.attribute):
setattr(self.target, self.attribute, self.temp_original)
del self.temp_original
del self.is_local
del self.target
for patcher in reversed(self.additional_patchers):
if _is_started(patcher):
patcher.__exit__(*exc_info)
def start(self):
result = self.__enter__()
self._active_patches.append(self)
return result
def stop(self):
try:
self._active_patches.remove(self)
except ValueError:
pass
return self.__exit__()
def _get_target(target):
try:
target, attribute = target.rsplit('.', 1)
except (TypeError, ValueError):
raise TypeError("Need a valid target to patch. You supplied: %r" %
(target,))
getter = lambda: _importer(target)
return getter, attribute
def _patch_object(
target, attribute, new=DEFAULT, spec=None,
create=False, spec_set=None, autospec=None,
new_callable=None, **kwargs
):
getter = lambda: target
return _patch(
getter, attribute, new, spec, create,
spec_set, autospec, new_callable, kwargs
)
def _patch_multiple(target, spec=None, create=False, spec_set=None,
autospec=None, new_callable=None, **kwargs):
if type(target) is str:
getter = lambda: _importer(target)
else:
getter = lambda: target
if not kwargs:
raise ValueError(
'Must supply at least one keyword argument with patch.multiple'
)
# need to wrap in a list for python 3, where items is a view
items = list(kwargs.items())
attribute, new = items[0]
patcher = _patch(
getter, attribute, new, spec, create, spec_set,
autospec, new_callable, {}
)
patcher.attribute_name = attribute
for attribute, new in items[1:]:
this_patcher = _patch(
getter, attribute, new, spec, create, spec_set,
autospec, new_callable, {}
)
this_patcher.attribute_name = attribute
patcher.additional_patchers.append(this_patcher)
return patcher
def patch(
target, new=DEFAULT, spec=None, create=False,
spec_set=None, autospec=None, new_callable=None, **kwargs
):
getter, attribute = _get_target(target)
return _patch(
getter, attribute, new, spec, create,
spec_set, autospec, new_callable, kwargs
)
class _patch_dict(object):
def __init__(self, in_dict, values=(), clear=False, **kwargs):
if isinstance(in_dict, str):
in_dict = _importer(in_dict)
self.in_dict = in_dict
# support any argument supported by dict(...) constructor
self.values = dict(values)
self.values.update(kwargs)
self.clear = clear
self._original = None
def __call__(self, f):
if isinstance(f, type):
return self.decorate_class(f)
@wraps(f)
def _inner(*args, **kw):
self._patch_dict()
try:
return f(*args, **kw)
finally:
self._unpatch_dict()
return _inner
def decorate_class(self, klass):
for attr in dir(klass):
attr_value = getattr(klass, attr)
if (attr.startswith(patch.TEST_PREFIX) and
hasattr(attr_value, "__call__")):
decorator = _patch_dict(self.in_dict, self.values, self.clear)
decorated = decorator(attr_value)
setattr(klass, attr, decorated)
return klass
def __enter__(self):
self._patch_dict()
def _patch_dict(self):
values = self.values
in_dict = self.in_dict
clear = self.clear
try:
original = in_dict.copy()
except AttributeError:
# dict like object with no copy method
# must support iteration over keys
original = {}
for key in in_dict:
original[key] = in_dict[key]
self._original = original
if clear:
_clear_dict(in_dict)
try:
in_dict.update(values)
except AttributeError:
# dict like object with no update method
for key in values:
in_dict[key] = values[key]
def _unpatch_dict(self):
in_dict = self.in_dict
original = self._original
_clear_dict(in_dict)
try:
in_dict.update(original)
except AttributeError:
for key in original:
in_dict[key] = original[key]
def __exit__(self, *args):
self._unpatch_dict()
return False
start = __enter__
stop = __exit__
def _clear_dict(in_dict):
try:
in_dict.clear()
except AttributeError:
keys = list(in_dict)
for key in keys:
del in_dict[key]
def _patch_stopall():
for patch in reversed(_patch._active_patches):
patch.stop()
patch.object = _patch_object
patch.dict = _patch_dict
patch.multiple = _patch_multiple
patch.stopall = _patch_stopall
patch.TEST_PREFIX = 'test'
magic_methods = (
"lt le gt ge eq ne "
"getitem setitem delitem "
"len contains iter "
"hash str sizeof "
"enter exit "
"divmod neg pos abs invert "
"complex int float index "
"trunc floor ceil "
"bool next "
)
numerics = (
"add sub mul div floordiv mod lshift rshift and xor or pow truediv"
)
inplace = ' '.join('i%s' % n for n in numerics.split())
right = ' '.join('r%s' % n for n in numerics.split())
# not including __prepare__, __instancecheck__, __subclasscheck__
# (as they are metaclass methods)
# __del__ is not supported at all as it causes problems if it exists
_non_defaults = set('__%s__' % method for method in [
'get', 'set', 'delete', 'reversed', 'missing', 'reduce', 'reduce_ex',
'getinitargs', 'getnewargs', 'getstate', 'setstate', 'getformat',
'setformat', 'repr', 'dir', 'subclasses', 'format',
])
def _get_method(name, func):
def method(self, *args, **kw):
return func(self, *args, **kw)
method.__name__ = name
return method
_magics = set(
'__%s__' % method for method in
' '.join([magic_methods, numerics, inplace, right]).split()
)
_all_magics = _magics | _non_defaults
_unsupported_magics = set([
'__getattr__', '__setattr__',
'__init__', '__new__', '__prepare__'
'__instancecheck__', '__subclasscheck__',
'__del__'
])
_calculate_return_value = {
'__hash__': lambda self: object.__hash__(self),
'__str__': lambda self: object.__str__(self),
'__sizeof__': lambda self: object.__sizeof__(self),
}
_return_values = {
'__lt__': NotImplemented,
'__gt__': NotImplemented,
'__le__': NotImplemented,
'__ge__': NotImplemented,
'__int__': 1,
'__contains__': False,
'__len__': 0,
'__exit__': False,
'__complex__': 1j,
'__float__': 1.0,
'__bool__': True,
'__index__': 1,
}
def _get_eq(self):
def __eq__(other):
ret_val = self.__eq__._mock_return_value
if ret_val is not DEFAULT:
return ret_val
return self is other
return __eq__
def _get_ne(self):
def __ne__(other):
if self.__ne__._mock_return_value is not DEFAULT:
return DEFAULT
return self is not other
return __ne__
def _get_iter(self):
def __iter__():
ret_val = self.__iter__._mock_return_value
if ret_val is DEFAULT:
return iter([])
# if ret_val was already an iterator, then calling iter on it should
# return the iterator unchanged
return iter(ret_val)
return __iter__
_side_effect_methods = {
'__eq__': _get_eq,
'__ne__': _get_ne,
'__iter__': _get_iter,
}
def _set_return_value(mock, method, name):
fixed = _return_values.get(name, DEFAULT)
if fixed is not DEFAULT:
method.return_value = fixed
return
return_calulator = _calculate_return_value.get(name)
if return_calulator is not None:
try:
return_value = return_calulator(mock)
except AttributeError:
# XXXX why do we return AttributeError here?
# set it as a side_effect instead?
return_value = AttributeError(name)
method.return_value = return_value
return
side_effector = _side_effect_methods.get(name)
if side_effector is not None:
method.side_effect = side_effector(mock)
class MagicMixin(object):
def __init__(self, *args, **kw):
_safe_super(MagicMixin, self).__init__(*args, **kw)
self._mock_set_magics()
def _mock_set_magics(self):
these_magics = _magics
if self._mock_methods is not None:
these_magics = _magics.intersection(self._mock_methods)
remove_magics = set()
remove_magics = _magics - these_magics
for entry in remove_magics:
if entry in type(self).__dict__:
# remove unneeded magic methods
delattr(self, entry)
# don't overwrite existing attributes if called a second time
these_magics = these_magics - set(type(self).__dict__)
_type = type(self)
for entry in these_magics:
setattr(_type, entry, MagicProxy(entry, self))
class NonCallableMagicMock(MagicMixin, NonCallableMock):
def mock_add_spec(self, spec, spec_set=False):
self._mock_add_spec(spec, spec_set)
self._mock_set_magics()
class MagicMock(MagicMixin, Mock):
def mock_add_spec(self, spec, spec_set=False):
self._mock_add_spec(spec, spec_set)
self._mock_set_magics()
class MagicProxy(object):
def __init__(self, name, parent):
self.name = name
self.parent = parent
def __call__(self, *args, **kwargs):
m = self.create_mock()
return m(*args, **kwargs)
def create_mock(self):
entry = self.name
parent = self.parent
m = parent._get_child_mock(name=entry, _new_name=entry,
_new_parent=parent)
setattr(parent, entry, m)
_set_return_value(parent, m, entry)
return m
def __get__(self, obj, _type=None):
return self.create_mock()
class _ANY(object):
def __eq__(self, other):
return True
def __ne__(self, other):
return False
def __repr__(self):
return '<ANY>'
ANY = _ANY()
def _format_call_signature(name, args, kwargs):
message = '%s(%%s)' % name
formatted_args = ''
args_string = ', '.join([repr(arg) for arg in args])
kwargs_string = ', '.join([
'%s=%r' % (key, value) for key, value in sorted(kwargs.items())
])
if args_string:
formatted_args = args_string
if kwargs_string:
if formatted_args:
formatted_args += ', '
formatted_args += kwargs_string
return message % formatted_args
class _Call(tuple):
def __new__(cls, value=(), name=None, parent=None, two=False,
from_kall=True):
name = ''
args = ()
kwargs = {}
_len = len(value)
if _len == 3:
name, args, kwargs = value
elif _len == 2:
first, second = value
if isinstance(first, str):
name = first
if isinstance(second, tuple):
args = second
else:
kwargs = second
else:
args, kwargs = first, second
elif _len == 1:
value, = value
if isinstance(value, str):
name = value
elif isinstance(value, tuple):
args = value
else:
kwargs = value
if two:
return tuple.__new__(cls, (args, kwargs))
return tuple.__new__(cls, (name, args, kwargs))
def __init__(self, value=(), name=None, parent=None, two=False,
from_kall=True):
self.name = name
self.parent = parent
self.from_kall = from_kall
def __eq__(self, other):
if other is ANY:
return True
try:
len_other = len(other)
except TypeError:
return False
self_name = ''
if len(self) == 2:
self_args, self_kwargs = self
else:
self_name, self_args, self_kwargs = self
other_name = ''
if len_other == 0:
other_args, other_kwargs = (), {}
elif len_other == 3:
other_name, other_args, other_kwargs = other
elif len_other == 1:
value, = other
if isinstance(value, tuple):
other_args = value
other_kwargs = {}
elif isinstance(value, str):
other_name = value
other_args, other_kwargs = (), {}
else:
other_args = ()
other_kwargs = value
else:
first, second = other
if isinstance(first, str):
other_name = first
if isinstance(second, tuple):
other_args, other_kwargs = second, {}
else:
other_args, other_kwargs = (), second
else:
other_args, other_kwargs = first, second
if self_name and other_name != self_name:
return False
return (other_args, other_kwargs) == (self_args, self_kwargs)
def __ne__(self, other):
return not self.__eq__(other)
def __call__(self, *args, **kwargs):
if self.name is None:
return _Call(('', args, kwargs), name='()')
name = self.name + '()'
return _Call((self.name, args, kwargs), name=name, parent=self)
def __getattr__(self, attr):
if self.name is None:
return _Call(name=attr, from_kall=False)
name = '%s.%s' % (self.name, attr)
return _Call(name=name, parent=self, from_kall=False)
def count(self, *args, **kwargs):
return self.__getattr__('count')(*args, **kwargs)
def index(self, *args, **kwargs):
return self.__getattr__('index')(*args, **kwargs)
def __repr__(self):
if not self.from_kall:
name = self.name or 'call'
if name.startswith('()'):
name = 'call%s' % name
return name
if len(self) == 2:
name = 'call'
args, kwargs = self
else:
name, args, kwargs = self
if not name:
name = 'call'
elif not name.startswith('()'):
name = 'call.%s' % name
else:
name = 'call%s' % name
return _format_call_signature(name, args, kwargs)
def call_list(self):
vals = []
thing = self
while thing is not None:
if thing.from_kall:
vals.append(thing)
thing = thing.parent
return _CallList(reversed(vals))
call = _Call(from_kall=False)
def create_autospec(spec, spec_set=False, instance=False, _parent=None,
_name=None, **kwargs):
if _is_list(spec):
# interpreted as a list of strings
spec = type(spec)
is_type = isinstance(spec, type)
_kwargs = {'spec': spec}
if spec_set:
_kwargs = {'spec_set': spec}
elif spec is None:
# None we mock with a normal mock without a spec
_kwargs = {}
if _kwargs and instance:
_kwargs['_spec_as_instance'] = True
_kwargs.update(kwargs)
Klass = MagicMock
if type(spec) in DescriptorTypes:
# descriptors don't have a spec
_kwargs = {}
elif not _callable(spec):
Klass = NonCallableMagicMock
elif is_type and instance and not _instance_callable(spec):
Klass = NonCallableMagicMock
_name = _kwargs.pop('name', _name)
_new_name = _name
if _parent is None:
# for a top level object no _new_name should be set
_new_name = ''
mock = Klass(parent=_parent, _new_parent=_parent, _new_name=_new_name,
name=_name, **_kwargs)
if isinstance(spec, FunctionTypes):
# should only happen at the top level because we don't
mock = _set_signature(mock, spec)
else:
_check_signature(spec, mock, is_type, instance)
if _parent is not None and not instance:
_parent._mock_children[_name] = mock
if is_type and not instance and 'return_value' not in kwargs:
mock.return_value = create_autospec(spec, spec_set, instance=True,
_name='()', _parent=mock)
for entry in dir(spec):
if _is_magic(entry):
continue
try:
original = getattr(spec, entry)
except AttributeError:
continue
kwargs = {'spec': original}
if spec_set:
kwargs = {'spec_set': original}
if not isinstance(original, FunctionTypes):
new = _SpecState(original, spec_set, mock, entry, instance)
mock._mock_children[entry] = new
else:
parent = mock
if isinstance(spec, FunctionTypes):
parent = mock.mock
skipfirst = _must_skip(spec, entry, is_type)
kwargs['_eat_self'] = skipfirst
new = MagicMock(parent=parent, name=entry, _new_name=entry,
_new_parent=parent,
**kwargs)
mock._mock_children[entry] = new
_check_signature(original, new, skipfirst=skipfirst)
if isinstance(new, FunctionTypes):
setattr(mock, entry, new)
return mock
def _must_skip(spec, entry, is_type):
if not isinstance(spec, type):
if entry in getattr(spec, '__dict__', {}):
return False
spec = spec.__class__
for klass in spec.__mro__:
result = klass.__dict__.get(entry, DEFAULT)
if result is DEFAULT:
continue
if isinstance(result, (staticmethod, classmethod)):
return False
elif isinstance(getattr(result, '__get__', None), MethodWrapperTypes):
# Normal method => skip if looked up on type
# (if looked up on instance, self is already skipped)
return is_type
else:
return False
# shouldn't get here unless function is a dynamically provided attribute
return is_type
def _get_class(obj):
try:
return obj.__class__
except AttributeError:
return type(obj)
class _SpecState(object):
def __init__(self, spec, spec_set=False, parent=None,
name=None, ids=None, instance=False):
self.spec = spec
self.ids = ids
self.spec_set = spec_set
self.parent = parent
self.instance = instance
self.name = name
FunctionTypes = (
type(create_autospec),
type(ANY.__eq__),
)
MethodWrapperTypes = (
type(ANY.__eq__.__get__),
)
file_spec = None
def _iterate_read_data(read_data):
data_as_list = ['{}\n'.format(l) for l in read_data.split('\n')]
if data_as_list[-1] == '\n':
data_as_list = data_as_list[:-1]
else:
# If there wasn't an extra newline by itself, then the file being
# newline that our naive format() added
data_as_list[-1] = data_as_list[-1][:-1]
for line in data_as_list:
yield line
def mock_open(mock=None, read_data=''):
def _readlines_side_effect(*args, **kwargs):
if handle.readlines.return_value is not None:
return handle.readlines.return_value
return list(_data)
def _read_side_effect(*args, **kwargs):
if handle.read.return_value is not None:
return handle.read.return_value
return ''.join(_data)
def _readline_side_effect():
if handle.readline.return_value is not None:
while True:
yield handle.readline.return_value
for line in _data:
yield line
global file_spec
if file_spec is None:
import _io
file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))))
if mock is None:
mock = MagicMock(name='open', spec=open)
handle = MagicMock(spec=file_spec)
handle.__enter__.return_value = handle
_data = _iterate_read_data(read_data)
handle.write.return_value = None
handle.read.return_value = None
handle.readline.return_value = None
handle.readlines.return_value = None
handle.read.side_effect = _read_side_effect
handle.readline.side_effect = _readline_side_effect()
handle.readlines.side_effect = _readlines_side_effect
mock.return_value = handle
return mock
class PropertyMock(Mock):
def _get_child_mock(self, **kwargs):
return MagicMock(**kwargs)
def __get__(self, obj, obj_type):
return self()
def __set__(self, obj, val):
self(val)
| true | true |
1c2dd28147cce5307dcfac0fa27383f178868eeb | 2,046 | py | Python | paddlespeech/server/restful/api.py | qingen/PaddleSpeech | 657c424f6c679873118c4e94bc24a3ff00b58ae1 | [
"Apache-2.0"
] | null | null | null | paddlespeech/server/restful/api.py | qingen/PaddleSpeech | 657c424f6c679873118c4e94bc24a3ff00b58ae1 | [
"Apache-2.0"
] | null | null | null | paddlespeech/server/restful/api.py | qingen/PaddleSpeech | 657c424f6c679873118c4e94bc24a3ff00b58ae1 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from typing import List
from fastapi import APIRouter
from paddlespeech.cli.log import logger
from paddlespeech.server.restful.asr_api import router as asr_router
from paddlespeech.server.restful.cls_api import router as cls_router
from paddlespeech.server.restful.text_api import router as text_router
from paddlespeech.server.restful.tts_api import router as tts_router
from paddlespeech.server.restful.vector_api import router as vec_router
from paddlespeech.server.restful.acs_api import router as acs_router
_router = APIRouter()
def setup_router(api_list: List):
"""setup router for fastapi
Args:
api_list (List): [asr, tts, cls, text, vecotr]
Returns:
APIRouter
"""
for api_name in api_list:
if api_name.lower() == 'asr':
_router.include_router(asr_router)
elif api_name.lower() == 'tts':
_router.include_router(tts_router)
elif api_name.lower() == 'cls':
_router.include_router(cls_router)
elif api_name.lower() == 'text':
_router.include_router(text_router)
elif api_name.lower() == 'vector':
_router.include_router(vec_router)
elif api_name.lower() == 'acs':
_router.include_router(acs_router)
else:
logger.error(
f"PaddleSpeech has not support such service: {api_name}")
sys.exit(-1)
return _router
| 35.894737 | 74 | 0.706745 |
import sys
from typing import List
from fastapi import APIRouter
from paddlespeech.cli.log import logger
from paddlespeech.server.restful.asr_api import router as asr_router
from paddlespeech.server.restful.cls_api import router as cls_router
from paddlespeech.server.restful.text_api import router as text_router
from paddlespeech.server.restful.tts_api import router as tts_router
from paddlespeech.server.restful.vector_api import router as vec_router
from paddlespeech.server.restful.acs_api import router as acs_router
_router = APIRouter()
def setup_router(api_list: List):
for api_name in api_list:
if api_name.lower() == 'asr':
_router.include_router(asr_router)
elif api_name.lower() == 'tts':
_router.include_router(tts_router)
elif api_name.lower() == 'cls':
_router.include_router(cls_router)
elif api_name.lower() == 'text':
_router.include_router(text_router)
elif api_name.lower() == 'vector':
_router.include_router(vec_router)
elif api_name.lower() == 'acs':
_router.include_router(acs_router)
else:
logger.error(
f"PaddleSpeech has not support such service: {api_name}")
sys.exit(-1)
return _router
| true | true |
1c2dd2b635c7b8fbba9234c5737e118e5d548eff | 182 | py | Python | main.py | Iampato/Smart-Meter | 9531c860ca48a452212f5122a3c5d84965e4ce42 | [
"MIT"
] | 2 | 2020-11-20T12:29:42.000Z | 2020-11-24T07:28:32.000Z | main.py | Iampato/Smart-Meter | 9531c860ca48a452212f5122a3c5d84965e4ce42 | [
"MIT"
] | 1 | 2021-02-01T07:12:57.000Z | 2021-02-01T07:12:57.000Z | main.py | Iampato/Smart-Meter | 9531c860ca48a452212f5122a3c5d84965e4ce42 | [
"MIT"
] | null | null | null | from config.connect import SmartMeterConfig
def main():
smartMeterConfig = SmartMeterConfig()
print(smartMeterConfig.database_user)
if __name__ == "__main__":
main()
| 16.545455 | 43 | 0.736264 | from config.connect import SmartMeterConfig
def main():
smartMeterConfig = SmartMeterConfig()
print(smartMeterConfig.database_user)
if __name__ == "__main__":
main()
| true | true |
1c2dd3309b544c4e3f22b3bb0f618dee13a80a3c | 54 | py | Python | codingbat.com/Warmup-1/missing_char.py | ahmedelq/PythonicAlgorithms | ce10dbb6e1fd0ea5c922a932b0f920236aa411bf | [
"MIT"
] | null | null | null | codingbat.com/Warmup-1/missing_char.py | ahmedelq/PythonicAlgorithms | ce10dbb6e1fd0ea5c922a932b0f920236aa411bf | [
"MIT"
] | null | null | null | codingbat.com/Warmup-1/missing_char.py | ahmedelq/PythonicAlgorithms | ce10dbb6e1fd0ea5c922a932b0f920236aa411bf | [
"MIT"
] | null | null | null | def missing_char(str, n):
return str[:n] + str[n+1:] | 27 | 28 | 0.62963 | def missing_char(str, n):
return str[:n] + str[n+1:] | true | true |
1c2dd48d13f791fc40091655352263415312c629 | 6,845 | py | Python | sim_results/results/read_results.py | maxrudolph1/robotarium_mpe | 025c182899c0092c95e1ed3c2a38117f257cbe25 | [
"MIT"
] | null | null | null | sim_results/results/read_results.py | maxrudolph1/robotarium_mpe | 025c182899c0092c95e1ed3c2a38117f257cbe25 | [
"MIT"
] | null | null | null | sim_results/results/read_results.py | maxrudolph1/robotarium_mpe | 025c182899c0092c95e1ed3c2a38117f257cbe25 | [
"MIT"
] | null | null | null | import numpy as np
from matplotlib import pyplot as plt
import pandas as pd
import seaborn as sb
import matplotlib.patches as mpatches
from scipy.stats import mannwhitneyu
sb.set_theme(style="darkgrid")
bcfc_rew = []
unif_rew = []
mono_rew = []
rand_rew = []
tasks = ['navigation','coverage','transport']
meths = ['expert', 'assigned', 'loc_based', 'uniform', 'combined']
data_dict = {}
diff_dict = {}
for meth in meths:
data_dict[meth] = {}
for i,task in enumerate(tasks):
if meth == 'combined':
data_dict[meth][task] = np.sum(np.load('./'+meth+'/reward_' + 'combined' + '.npy')[i].squeeze(), axis=0)
else:
data_dict[meth][task] = np.sum(np.load('./'+meth+'/reward_' + task + '.npy').squeeze(), axis=0)
other_meths = [ 'loc_based', 'uniform', 'combined']
for meth in other_meths:
diff_dict[meth] = {}
for i, task in enumerate(tasks):
diff_dict[meth][task] = data_dict['assigned'][task] - data_dict[meth][task]
runs = data_dict['assigned']['navigation'].shape[0]
task_list = []
meth_list = []
val_list = []
diff_task_list = []
diff_meth_list = []
diff_val_list = []
for meth in meths:
for task in tasks:
for i in range(runs):
task_list.append(task)
meth_list.append(meth)
val_list.append(data_dict[meth][task][i])
for meth in other_meths:
for task in tasks:
for i in range(runs):
diff_task_list.append(task)
diff_meth_list.append(meth)
diff_val_list.append(diff_dict[meth][task][i])
diffs = np.array(diff_val_list)
print(np.sum(diffs >= 0) / len(diff_val_list))
df = pd.DataFrame({'task' : task_list, 'meth': meth_list, 'rew':val_list})
diff_df = pd.DataFrame({'task' : diff_task_list, 'meth': diff_meth_list, 'rew':diff_val_list})
for task in tasks:
for meth in other_meths:
U1, p = mannwhitneyu(data_dict[meth][task],data_dict['assigned'][task])
nx, ny = data_dict[meth][task].shape[0], data_dict['assigned'][task].shape[0]
count = 0
fig, axs = plt.subplots(1,3)
for i,task in enumerate(tasks):
for j,meth in enumerate(['combined']):
count += 1
mask = data_dict['assigned'][task] > data_dict[meth][task]
axs[i].plot(data_dict[meth][task][mask],data_dict['assigned'][task][mask], 'g.')
axs[i].plot(data_dict[meth][task][np.logical_not(mask)],data_dict['assigned'][task][np.logical_not(mask)], 'r.')
perf = str(np.sum(mask)/mask.shape[0])
print(perf)
yl = axs[i].get_ylim()
xl = axs[i].get_xlim()
lim = np.array(xl if xl[1] - xl[0] > yl[1] - yl[0] else yl)
axs[i].set_xlim(lim[0], lim[1])
axs[i].set_ylim(lim[0], lim[1])
axs[i].set_title(perf)
axs[i].plot(lim, lim, '-')
axs[i].set_aspect('equal')
## Plotting Violins
'''
plt.figure()
for op,task in enumerate(tasks):
plt.subplot(1,3,op+1)
cur_data = df.query("task == '" + task + "'")
violin_width = 1 if task == 'navigation' else 1
ax = sb.violinplot(data=cur_data,
x='meth',
y='rew',
linewidth=0,
label='_nolegend_',
width=violin_width)
ax = sb.stripplot(data=cur_data,
x='meth',
y='rew',
size=1.5)
boxwidth= 0.075 if task == 'transport' else 0.075
sb.boxplot(data=cur_data,
x='meth',
y='rew',
width=boxwidth,
fliersize=0)
#patches = []
print(len(meths))
ax.set_xticklabels([''] * len(meths))
ax.set_xlabel('')
#patches = []
patches = []
# Collect colors of violin plots and make opaque
for col in [ax.collections[l] for l in [0,2,4,6,8]]: #[0,2,4,6] for 4 different plots
patches.append(col.get_facecolor())
print(col.get_facecolor())
col.set_alpha(.2)
for col in [ax.collections[l] for l in [9,10,11,12,13]]:#,12,13]]: # [8,9,10,11] for 4 different plots
col.set_alpha(.3)
print('----------')
patch0 = mpatches.Patch(color=patches[0], label='Expert')
patch1 = mpatches.Patch(color=patches[1], label='BCFC (Full Pipeline)')
patch2 = mpatches.Patch(color=patches[2], label='BCFC (Random Task Allocation)')
patch3 = mpatches.Patch(color=patches[3], label='BCFC (Uniform Task Allocation)')
patch4 = mpatches.Patch(color=patches[4], label='Monolithic')
if task == 'transport':
plt.legend(handles=[patch0, patch1, patch2, patch3, patch4])
plt.ylabel('Cumulative Episodic Reward')
else:
plt.ylabel('')
pass#ax.get_legend().remove()
lab = [task in e.lower() for e in tasks]
res = [i for i, val in enumerate(lab) if val]
plt.title(tasks[res[0]])
plt.figure()
for op,task in enumerate(tasks):
plt.subplot(1,3,op+1)
cur_data = diff_df.query("task == '" + task + "'")
violin_width = 1 if task == 'navigation' else 1
ax = sb.violinplot(data=cur_data,
x='meth',
y='rew',
linewidth=0,
label='_nolegend_',
width=violin_width)
ax = sb.stripplot(data=cur_data,
x='meth',
y='rew',
size=1.5)
boxwidth= 0.075 if task == 'transport' else 0.075
sb.boxplot(data=cur_data,
x='meth',
y='rew',
width=boxwidth,
fliersize=0)
#patches = []
print(len(meths))
ax.set_xticklabels([''] * len(other_meths))
ax.set_xlabel('')
#patches = []
patches = []
# Collect colors of violin plots and make opaque
for col in [ax.collections[l] for l in [0,2,4]]: #[0,2,4,6] for 4 different plots
patches.append(col.get_facecolor())
print(col.get_facecolor())
col.set_alpha(.2)
for col in [ax.collections[l] for l in [6,7,8]]:#,12,13]]: # [8,9,10,11] for 4 different plots
col.set_alpha(.3)
print('----------')
patch0 = mpatches.Patch(color=patches[0], label='BCFC (Random Task Allocation)')
patch1 = mpatches.Patch(color=patches[1], label='BCFC (Uniform Task Allocation)')
patch2 = mpatches.Patch(color=patches[2], label='Monolithic')
if task == 'transport':
plt.legend(handles=[patch0, patch1, patch2])
plt.ylabel('Cumulative Episodic Reward')
else:
plt.ylabel('')
pass#ax.get_legend().remove()
lab = [task in e.lower() for e in tasks]
res = [i for i, val in enumerate(lab) if val]
plt.title(tasks[res[0]])
'''
plt.show(block=False)
plt.pause(0.001) # Pause for interval seconds.
input("hit[enter] to end.")
plt.close('all')
| 32.595238 | 120 | 0.57195 | import numpy as np
from matplotlib import pyplot as plt
import pandas as pd
import seaborn as sb
import matplotlib.patches as mpatches
from scipy.stats import mannwhitneyu
sb.set_theme(style="darkgrid")
bcfc_rew = []
unif_rew = []
mono_rew = []
rand_rew = []
tasks = ['navigation','coverage','transport']
meths = ['expert', 'assigned', 'loc_based', 'uniform', 'combined']
data_dict = {}
diff_dict = {}
for meth in meths:
data_dict[meth] = {}
for i,task in enumerate(tasks):
if meth == 'combined':
data_dict[meth][task] = np.sum(np.load('./'+meth+'/reward_' + 'combined' + '.npy')[i].squeeze(), axis=0)
else:
data_dict[meth][task] = np.sum(np.load('./'+meth+'/reward_' + task + '.npy').squeeze(), axis=0)
other_meths = [ 'loc_based', 'uniform', 'combined']
for meth in other_meths:
diff_dict[meth] = {}
for i, task in enumerate(tasks):
diff_dict[meth][task] = data_dict['assigned'][task] - data_dict[meth][task]
runs = data_dict['assigned']['navigation'].shape[0]
task_list = []
meth_list = []
val_list = []
diff_task_list = []
diff_meth_list = []
diff_val_list = []
for meth in meths:
for task in tasks:
for i in range(runs):
task_list.append(task)
meth_list.append(meth)
val_list.append(data_dict[meth][task][i])
for meth in other_meths:
for task in tasks:
for i in range(runs):
diff_task_list.append(task)
diff_meth_list.append(meth)
diff_val_list.append(diff_dict[meth][task][i])
diffs = np.array(diff_val_list)
print(np.sum(diffs >= 0) / len(diff_val_list))
df = pd.DataFrame({'task' : task_list, 'meth': meth_list, 'rew':val_list})
diff_df = pd.DataFrame({'task' : diff_task_list, 'meth': diff_meth_list, 'rew':diff_val_list})
for task in tasks:
for meth in other_meths:
U1, p = mannwhitneyu(data_dict[meth][task],data_dict['assigned'][task])
nx, ny = data_dict[meth][task].shape[0], data_dict['assigned'][task].shape[0]
count = 0
fig, axs = plt.subplots(1,3)
for i,task in enumerate(tasks):
for j,meth in enumerate(['combined']):
count += 1
mask = data_dict['assigned'][task] > data_dict[meth][task]
axs[i].plot(data_dict[meth][task][mask],data_dict['assigned'][task][mask], 'g.')
axs[i].plot(data_dict[meth][task][np.logical_not(mask)],data_dict['assigned'][task][np.logical_not(mask)], 'r.')
perf = str(np.sum(mask)/mask.shape[0])
print(perf)
yl = axs[i].get_ylim()
xl = axs[i].get_xlim()
lim = np.array(xl if xl[1] - xl[0] > yl[1] - yl[0] else yl)
axs[i].set_xlim(lim[0], lim[1])
axs[i].set_ylim(lim[0], lim[1])
axs[i].set_title(perf)
axs[i].plot(lim, lim, '-')
axs[i].set_aspect('equal')
lse)
plt.pause(0.001)
input("hit[enter] to end.")
plt.close('all')
| true | true |
1c2dd54f4917d6742cc724519b3825c068474f33 | 368 | py | Python | pages/tests.py | waseidel/django | 59b32cf9d0a9104976038015bcaea1243a8e48f9 | [
"MIT"
] | null | null | null | pages/tests.py | waseidel/django | 59b32cf9d0a9104976038015bcaea1243a8e48f9 | [
"MIT"
] | null | null | null | pages/tests.py | waseidel/django | 59b32cf9d0a9104976038015bcaea1243a8e48f9 | [
"MIT"
] | null | null | null | # pages/tests.py
from django.test import SimpleTestCase
class PagesTests(SimpleTestCase):
def test_home_page_status_code(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
def test_about_page_status_code(self):
response = self.client.get('/about/')
self.assertEqual(response.status_code, 200)
| 28.307692 | 51 | 0.714674 |
from django.test import SimpleTestCase
class PagesTests(SimpleTestCase):
def test_home_page_status_code(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
def test_about_page_status_code(self):
response = self.client.get('/about/')
self.assertEqual(response.status_code, 200)
| true | true |
1c2dd7bd6529ffac6d7aa07152e39823165fea30 | 52,799 | py | Python | openprocurement/auctions/rubble/tests/blanks/migration_blanks.py | openprocurement/openprocurement.auctions.rubble | 72369d411085fe50030f99320928636307b18426 | [
"Apache-2.0"
] | 1 | 2020-09-29T08:34:32.000Z | 2020-09-29T08:34:32.000Z | openprocurement/auctions/rubble/tests/blanks/migration_blanks.py | openprocurement/openprocurement.auctions.rubble | 72369d411085fe50030f99320928636307b18426 | [
"Apache-2.0"
] | 21 | 2018-06-06T12:45:49.000Z | 2022-03-21T22:16:26.000Z | openprocurement/auctions/rubble/tests/blanks/migration_blanks.py | openprocurement/openprocurement.auctions.rubble | 72369d411085fe50030f99320928636307b18426 | [
"Apache-2.0"
] | 8 | 2018-05-02T07:54:09.000Z | 2019-03-06T14:31:12.000Z | # -*- coding: utf-8 -*-
from copy import deepcopy
from datetime import timedelta
from uuid import uuid4
from openprocurement.auctions.core.utils import get_now
# MigrateTestFrom1To2Bids
def migrate_one_pending(self):
auction = self.db.get(self.auction_id)
award = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][1]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
auction['awards'] = [award]
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
auction = self.app.get('/auctions/{}'.format(self.auction_id)).json['data']
self.assertEqual(len(auction['awards']), 2)
self.assertEqual(auction['awards'][0]['status'], 'pending.payment')
self.assertIn('verificationPeriod', auction['awards'][0])
self.assertIn('paymentPeriod', auction['awards'][0])
self.assertEqual(auction['awards'][1]['status'], 'pending.waiting')
self.assertEqual(auction['bids'][0]['status'], 'active')
self.assertEqual(auction['bids'][1]['status'], 'active')
self.assertEqual(auction['status'], 'active.qualification')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.qualification')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 2)
self.assertEqual(response.json['data'][0]['status'], u'pending.payment')
self.assertEqual(response.json['data'][1]['status'], u'pending.waiting')
pending_award = response.json['data'][0]
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, pending_award['id'], self.auction_token
), {"data": {"status": "unsuccessful"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.qualification')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data'][1]['status'], u'pending.verification')
def migrate_one_active(self):
auction = self.db.get(self.auction_id)
now = get_now()
award = {
'id': uuid4().hex,
"date": now.isoformat(),
"bid_id": auction['bids'][1]['id'],
'suppliers': auction['bids'][1]['tenderers'],
'value': auction['bids'][1]['value'],
"status": "active",
"complaintPeriod": {
"startDate": now.isoformat(),
"endDate": now.isoformat()
}
}
auction['awards'] = [award]
auction.update({
"enquiryPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=7)).isoformat()
},
"tenderPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=1)).isoformat()
},
"auctionPeriod": {
"startDate": (now - timedelta(days=1)).isoformat(),
"endDate": (now).isoformat()
},
"awardPeriod": {
"startDate": (now).isoformat(),
"endDate": (now).isoformat()
}
})
contract_id = uuid4().hex
auction['contracts'] = [{
'awardID': award['id'],
'id': contract_id,
'suppliers': award['suppliers'],
'value': award['value'],
'date': now.isoformat(),
'items': auction['items'],
'contractID': '{}-11'.format(auction['auctionID'])}]
auction['status'] = 'active.awarded'
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
auction = self.app.get('/auctions/{}'.format(self.auction_id)).json['data']
self.assertEqual(len(auction['awards']), 2)
self.assertEqual(auction['awards'][0]['status'], 'active')
self.assertEqual(auction['awards'][1]['status'], 'pending.waiting')
self.assertIn('verificationPeriod', auction['awards'][0])
self.assertIn('paymentPeriod', auction['awards'][0])
self.assertIn('signingPeriod', auction['awards'][0])
self.assertEqual(auction['bids'][0]['status'], 'active')
self.assertEqual(auction['bids'][1]['status'], 'active')
self.assertEqual(auction['contracts'][0]['status'], 'pending')
self.assertEqual(auction['status'], 'active.awarded')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.awarded')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 2)
self.assertEqual(response.json['data'][0]['status'], u'active')
self.assertEqual(response.json['data'][1]['status'], u'pending.waiting')
active_award = response.json['data'][0]
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, active_award['id'], self.auction_token
), {"data": {"status": "unsuccessful"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.qualification')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data'][1]['status'], u'pending.verification')
def migrate_unsuccessful_pending(self):
auction = self.db.get(self.auction_id)
pending_award = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][1]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
unsuccessful_award = deepcopy(pending_award)
unsuccessful_award['id'] = uuid4().hex
unsuccessful_award['status'] = 'unsuccessful'
unsuccessful_award['complaintPeriod']['endDate'] = get_now().isoformat()
pending_award['bid_id'] = auction['bids'][0]['id']
auction['awards'] = [unsuccessful_award, pending_award]
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
auction = self.app.get('/auctions/{}'.format(self.auction_id)).json['data']
self.assertEqual(len(auction['awards']), 2)
self.assertEqual(auction['bids'][0]['status'], 'active')
self.assertEqual(auction['bids'][1]['status'], 'active')
self.assertEqual(auction['awards'][0]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][1]['status'], 'pending.payment')
self.assertEqual(auction['status'], 'active.qualification')
def migrate_unsuccessful_active(self):
auction = self.db.get(self.auction_id)
now = get_now()
active_award = {
'id': uuid4().hex,
"date": now.isoformat(),
"bid_id": auction['bids'][1]['id'],
'suppliers': auction['bids'][1]['tenderers'],
'value': auction['bids'][1]['value'],
"status": "active",
"complaintPeriod": {
"startDate": now.isoformat(),
"endDate": now.isoformat()
}
}
unsuccessful_award = deepcopy(active_award)
unsuccessful_award['id'] = uuid4().hex
unsuccessful_award['status'] = 'unsuccessful'
active_award['bid_id'] = auction['bids'][0]['id']
auction['awards'] = [unsuccessful_award, active_award]
auction.update({
"enquiryPeriod": {
"startDate": (now - timedelta(days=8)).isoformat(),
"endDate": (now - timedelta(days=1)).isoformat()
},
"tenderPeriod": {
"startDate": (now - timedelta(days=8)).isoformat(),
"endDate": (now - timedelta(days=1)).isoformat()
},
"auctionPeriod": {
"startDate": (now - timedelta(days=1)).isoformat(),
"endDate": (now).isoformat()
},
"awardPeriod": {
"startDate": (now).isoformat(),
"endDate": (now).isoformat()
}
})
auction['contracts'] = [{
'awardID': active_award['id'],
'suppliers': active_award['suppliers'],
'value': active_award['value'],
'date': now.isoformat(),
'items': auction['items'],
'contractID': '{}-11'.format(auction['auctionID'])}]
auction['status'] = 'active.awarded'
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
auction = self.app.get('/auctions/{}'.format(self.auction_id)).json['data']
self.assertEqual(len(auction['awards']), 2)
self.assertEqual(auction['bids'][0]['status'], 'active')
self.assertEqual(auction['bids'][1]['status'], 'active')
self.assertEqual(auction['awards'][0]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][1]['status'], 'active')
self.assertEqual(auction['contracts'][0]['status'], 'pending')
self.assertEqual(auction['status'], 'active.awarded')
# MigrateTestFrom1To2WithTwoBids
def migrate_pending_to_unsuccesful(self):
auction = self.db.get(self.auction_id)
award = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][1]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
auction['awards'] = [award]
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
auction = self.app.get('/auctions/{}'.format(self.auction_id)).json['data']
self.assertEqual(len(auction['awards']), 2)
self.assertEqual(auction['awards'][0]['status'], 'pending.payment')
self.assertIn('verificationPeriod', auction['awards'][0])
self.assertIn('paymentPeriod', auction['awards'][0])
self.assertEqual(auction['awards'][1]['status'], 'pending.waiting')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.qualification')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 2)
self.assertEqual(response.json['data'][0]['status'], u'pending.payment')
self.assertEqual(response.json['data'][1]['status'], u'pending.waiting')
pending_award = response.json['data'][0]
waiting_award = response.json['data'][1]
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, pending_award['id'], self.auction_token
), {"data": {"status": "unsuccessful"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, waiting_award['id'], self.auction_token
), {"data": {"status": "unsuccessful"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
def migrate_pending_to_complete(self):
auction = self.db.get(self.auction_id)
award = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][1]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
auction['awards'] = [award]
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
auction = self.app.get('/auctions/{}'.format(self.auction_id)).json['data']
self.assertEqual(len(auction['awards']), 2)
self.assertEqual(auction['awards'][0]['status'], 'pending.payment')
self.assertIn('verificationPeriod', auction['awards'][0])
self.assertIn('paymentPeriod', auction['awards'][0])
self.assertEqual(auction['awards'][1]['status'], 'pending.waiting')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.qualification')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 2)
self.assertEqual(response.json['data'][0]['status'], u'pending.payment')
self.assertEqual(response.json['data'][1]['status'], u'pending.waiting')
pending_award = response.json['data'][0]
waiting_award = response.json['data'][1]
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, pending_award['id'], self.auction_token
), {"data": {"status": "unsuccessful"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.post('/auctions/{}/awards/{}/documents?acc_token={}'.format(
self.auction_id, waiting_award['id'], self.auction_token), upload_files=[('file', 'auction_protocol.pdf', 'content')])
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
doc_id = response.json["data"]['id']
response = self.app.patch_json('/auctions/{}/awards/{}/documents/{}?acc_token={}'.format(self.auction_id, waiting_award['id'], doc_id, self.auction_token), {"data": {
"description": "auction protocol",
"documentType": 'auctionProtocol'
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json["data"]["documentType"], 'auctionProtocol')
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, waiting_award['id'], self.auction_token
), {"data": {"status": "pending.payment"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'pending.payment')
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, waiting_award['id'], self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
contract = response.json['data']['contracts'][0]
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.awarded')
response = self.app.patch_json('/auctions/{}/contracts/{}?acc_token={}'.format(
self.auction_id, contract['id'], self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'complete')
def migrate_active_to_unsuccessful(self):
auction = self.db.get(self.auction_id)
now = get_now()
award = {
'id': uuid4().hex,
"date": now.isoformat(),
"bid_id": auction['bids'][1]['id'],
'suppliers': auction['bids'][1]['tenderers'],
'value': auction['bids'][1]['value'],
"status": "active",
"complaintPeriod": {
"startDate": now.isoformat(),
"endDate": now.isoformat()
}
}
auction['awards'] = [award]
auction.update({
"enquiryPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=7)).isoformat()
},
"tenderPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=1)).isoformat()
},
"auctionPeriod": {
"startDate": (now - timedelta(days=1)).isoformat(),
"endDate": (now).isoformat()
},
"awardPeriod": {
"startDate": (now).isoformat(),
"endDate": (now).isoformat()
}
})
contract_id = uuid4().hex
auction['contracts'] = [{
'awardID': award['id'],
'id': contract_id,
'suppliers': award['suppliers'],
'value': award['value'],
'date': now.isoformat(),
'items': auction['items'],
'contractID': '{}-11'.format(auction['auctionID'])}]
auction['status'] = 'active.awarded'
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
auction = self.app.get('/auctions/{}'.format(self.auction_id)).json['data']
self.assertEqual(len(auction['awards']), 2)
self.assertEqual(auction['awards'][0]['status'], 'active')
self.assertIn('verificationPeriod', auction['awards'][0])
self.assertIn('paymentPeriod', auction['awards'][0])
self.assertIn('signingPeriod', auction['awards'][0])
self.assertEqual(auction['awards'][1]['status'], 'pending.waiting')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.awarded')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 2)
self.assertEqual(response.json['data'][0]['status'], u'active')
self.assertEqual(response.json['data'][1]['status'], u'pending.waiting')
active_award = response.json['data'][0]
waiting_award = response.json['data'][1]
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, active_award['id'], self.auction_token
), {"data": {"status": "unsuccessful"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, waiting_award['id'], self.auction_token
), {"data": {"status": "unsuccessful"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
def migrate_active_to_complete(self):
auction = self.db.get(self.auction_id)
now = get_now()
award = {
'id': uuid4().hex,
"date": now.isoformat(),
"bid_id": auction['bids'][1]['id'],
'suppliers': auction['bids'][1]['tenderers'],
'value': auction['bids'][1]['value'],
"status": "active",
"complaintPeriod": {
"startDate": now.isoformat(),
"endDate": now.isoformat()
}
}
auction['awards'] = [award]
auction.update({
"enquiryPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=7)).isoformat()
},
"tenderPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=1)).isoformat()
},
"auctionPeriod": {
"startDate": (now - timedelta(days=1)).isoformat(),
"endDate": (now).isoformat()
},
"awardPeriod": {
"startDate": (now).isoformat(),
"endDate": (now).isoformat()
}
})
contract_id = uuid4().hex
auction['contracts'] = [{
'awardID': award['id'],
'id': contract_id,
'suppliers': award['suppliers'],
'value': award['value'],
'date': now.isoformat(),
'items': auction['items'],
'contractID': '{}-11'.format(auction['auctionID'])}]
auction['status'] = 'active.awarded'
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
response = self.app.get('/auctions/{}'.format(self.auction_id))
auction = response.json['data']
self.assertEqual(len(auction['awards']), 2)
self.assertEqual(auction['awards'][0]['status'], 'active')
self.assertIn('verificationPeriod', auction['awards'][0])
self.assertIn('paymentPeriod', auction['awards'][0])
self.assertIn('signingPeriod', auction['awards'][0])
self.assertEqual(auction['awards'][1]['status'], 'pending.waiting')
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(auction['status'], u'active.awarded')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 2)
self.assertEqual(response.json['data'][0]['status'], u'active')
self.assertEqual(response.json['data'][1]['status'], u'pending.waiting')
response = self.app.patch_json('/auctions/{}/contracts/{}?acc_token={}'.format(
self.auction_id, contract_id, self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'complete')
def migrate_cancelled_pending_to_complete(self):
auction = self.db.get(self.auction_id)
pending_award = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][1]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
cancelled_award = deepcopy(pending_award)
cancelled_award['id'] = uuid4().hex
cancelled_award['status'] = 'cancelled'
cancelled_award['complaintPeriod']['endDate'] = get_now().isoformat()
auction['awards'] = [cancelled_award, pending_award]
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
response = self.app.get('/auctions/{}'.format(self.auction_id))
auction = response.json['data']
self.assertEqual(auction['status'], u'active.qualification')
self.assertEqual(len(auction['awards']), 3)
self.assertEqual(auction['awards'][0]['status'], 'cancelled')
self.assertEqual(auction['awards'][1]['status'], 'pending.payment')
self.assertIn('verificationPeriod', auction['awards'][1])
self.assertIn('paymentPeriod', auction['awards'][1])
self.assertIn('signingPeriod', auction['awards'][1])
self.assertEqual(auction['awards'][2]['status'], 'pending.waiting')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(response.json['data'][0]['status'], u'cancelled')
self.assertEqual(response.json['data'][1]['status'], u'pending.payment')
self.assertEqual(response.json['data'][2]['status'], u'pending.waiting')
pending_award = response.json['data'][1]
waiting_award = response.json['data'][2]
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, pending_award['id'], self.auction_token
), {"data": {"status": "unsuccessful"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.post('/auctions/{}/awards/{}/documents?acc_token={}'.format(
self.auction_id, waiting_award['id'], self.auction_token), upload_files=[('file', 'auction_protocol.pdf', 'content')])
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
doc_id = response.json["data"]['id']
response = self.app.patch_json('/auctions/{}/awards/{}/documents/{}?acc_token={}'.format(self.auction_id, waiting_award['id'], doc_id, self.auction_token), {"data": {
"description": "auction protocol",
"documentType": 'auctionProtocol'
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json["data"]["documentType"], 'auctionProtocol')
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, waiting_award['id'], self.auction_token
), {"data": {"status": "pending.payment"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'pending.payment')
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, waiting_award['id'], self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
contract = response.json['data']['contracts'][0]
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.awarded')
response = self.app.patch_json('/auctions/{}/contracts/{}?acc_token={}'.format(
self.auction_id, contract['id'], self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'complete')
def migrate_unsuccessful_pending_to_complete(self):
auction = self.db.get(self.auction_id)
pending_award = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][0]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
unsuccessful_award = deepcopy(pending_award)
unsuccessful_award['id'] = uuid4().hex
unsuccessful_award['status'] = 'unsuccessful'
unsuccessful_award['complaintPeriod']['endDate'] = get_now().isoformat()
pending_award['bid_id'] = auction['bids'][1]['id']
auction['awards'] = [unsuccessful_award, pending_award]
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
response = self.app.get('/auctions/{}'.format(self.auction_id))
auction = response.json['data']
self.assertEqual(auction['status'], u'active.qualification')
self.assertEqual(len(auction['awards']), 2)
self.assertEqual(auction['awards'][0]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][1]['status'], 'pending.payment')
self.assertIn('verificationPeriod', auction['awards'][1])
self.assertIn('paymentPeriod', auction['awards'][1])
self.assertIn('signingPeriod', auction['awards'][1])
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 2)
self.assertEqual(response.json['data'][0]['status'], u'unsuccessful')
self.assertEqual(response.json['data'][1]['status'], u'pending.payment')
pending_award = response.json['data'][1]
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, pending_award['id'], self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
contract = response.json['data']['contracts'][0]
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.awarded')
response = self.app.patch_json('/auctions/{}/contracts/{}?acc_token={}'.format(
self.auction_id, contract['id'], self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'complete')
def migrate_unsuccessful_active_to_complete(self):
auction = self.db.get(self.auction_id)
now = get_now()
active_award = {
'id': uuid4().hex,
"date": now.isoformat(),
"bid_id": auction['bids'][0]['id'],
'suppliers': auction['bids'][0]['tenderers'],
'value': auction['bids'][0]['value'],
"status": "active",
"complaintPeriod": {
"startDate": now.isoformat(),
"endDate": now.isoformat()
}
}
unsuccessful_award = deepcopy(active_award)
unsuccessful_award['id'] = uuid4().hex
unsuccessful_award['status'] = 'unsuccessful'
active_award['bid_id'] = auction['bids'][1]['id']
auction['awards'] = [unsuccessful_award, active_award]
auction.update({
"enquiryPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=9)).isoformat()
},
"tenderPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=1)).isoformat()
},
"auctionPeriod": {
"startDate": (now - timedelta(days=1)).isoformat(),
"endDate": (now).isoformat()
},
"awardPeriod": {
"startDate": (now).isoformat(),
"endDate": (now).isoformat()
}
})
auction['contracts'] = [{
'awardID': active_award['id'],
'suppliers': active_award['suppliers'],
'value': active_award['value'],
'date': now.isoformat(),
'items': auction['items'],
'contractID': '{}-11'.format(auction['auctionID'])}]
auction['status'] = 'active.awarded'
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
response = self.app.get('/auctions/{}'.format(self.auction_id))
auction = response.json['data']
self.assertEqual(auction['status'], u'active.awarded')
self.assertEqual(len(auction['awards']), 2)
self.assertEqual(auction['awards'][0]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][1]['status'], 'active')
self.assertIn('verificationPeriod', auction['awards'][1])
self.assertIn('paymentPeriod', auction['awards'][1])
self.assertIn('signingPeriod', auction['awards'][1])
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 2)
self.assertEqual(response.json['data'][0]['status'], u'unsuccessful')
self.assertEqual(response.json['data'][1]['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
contract = response.json['data']['contracts'][0]
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.awarded')
response = self.app.patch_json('/auctions/{}/contracts/{}?acc_token={}'.format(
self.auction_id, contract['id'], self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'complete')
def migrate_cancelled_unsuccessful_pending(self):
auction = self.db.get(self.auction_id)
pending_award = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][1]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
unsuccessful_award = deepcopy(pending_award)
unsuccessful_award['complaintPeriod']['endDate'] = get_now().isoformat()
unsuccessful_award['id'] = uuid4().hex
unsuccessful_award['status'] = 'unsuccessful'
cancelled_award = deepcopy(unsuccessful_award)
cancelled_award['id'] = uuid4().hex
cancelled_award['status'] = 'cancelled'
pending_award['bid_id'] = auction['bids'][0]['id']
auction['awards'] = [cancelled_award, unsuccessful_award, pending_award]
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
response = self.app.get('/auctions/{}'.format(self.auction_id))
auction = response.json['data']
self.assertEqual(auction['status'], u'active.qualification')
self.assertEqual(len(auction['awards']), 3)
self.assertEqual(auction['awards'][0]['status'], 'cancelled')
self.assertEqual(auction['awards'][1]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][2]['status'], 'pending.payment')
self.assertIn('verificationPeriod', auction['awards'][2])
self.assertIn('paymentPeriod', auction['awards'][2])
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(response.json['data'][0]['status'], u'cancelled')
self.assertEqual(response.json['data'][1]['status'], u'unsuccessful')
self.assertEqual(response.json['data'][2]['status'], u'pending.payment')
pending_award = response.json['data'][2]
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, pending_award['id'], self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
contract = response.json['data']['contracts'][0]
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.awarded')
response = self.app.patch_json('/auctions/{}/contracts/{}?acc_token={}'.format(
self.auction_id, contract['id'], self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'complete')
def migrate_cancelled_unsuccessful_cancelled_pending_to_unsuccessful(self):
auction = self.db.get(self.auction_id)
pending_award = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][1]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
unsuccessful_award = deepcopy(pending_award)
unsuccessful_award['complaintPeriod']['endDate'] = get_now().isoformat()
unsuccessful_award['id'] = uuid4().hex
unsuccessful_award['status'] = 'unsuccessful'
cancelled_award = deepcopy(unsuccessful_award)
cancelled_award['id'] = uuid4().hex
cancelled_award['status'] = 'cancelled'
cancelled_award2 = deepcopy(cancelled_award)
cancelled_award2['bid_id'] = pending_award['bid_id'] = auction['bids'][0]['id']
auction['awards'] = [cancelled_award, unsuccessful_award, cancelled_award2, pending_award]
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
response = self.app.get('/auctions/{}'.format(self.auction_id))
auction = response.json['data']
self.assertEqual(auction['status'], u'active.qualification')
self.assertEqual(len(auction['awards']), 4)
self.assertEqual(auction['awards'][0]['status'], 'cancelled')
self.assertEqual(auction['awards'][1]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][2]['status'], 'cancelled')
self.assertEqual(auction['awards'][3]['status'], 'pending.payment')
self.assertIn('verificationPeriod', auction['awards'][3])
self.assertIn('paymentPeriod', auction['awards'][3])
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, pending_award['id'], self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.awarded')
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, pending_award['id'], self.auction_token
), {"data": {"status": "unsuccessful"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']['awards']), 4)
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 4)
self.assertEqual(response.json['data'][0]['status'], u'cancelled')
self.assertEqual(response.json['data'][1]['status'], u'unsuccessful')
self.assertEqual(response.json['data'][2]['status'], u'cancelled')
self.assertEqual(response.json['data'][3]['status'], u'unsuccessful')
def migrate_cancelled_unsuccessful_cancelled_active_to_unsuccessful(self):
auction = self.db.get(self.auction_id)
now = get_now()
active_award = {
'id': uuid4().hex,
"date": now.isoformat(),
"bid_id": auction['bids'][1]['id'],
'suppliers': auction['bids'][1]['tenderers'],
'value': auction['bids'][1]['value'],
"status": "active",
"complaintPeriod": {
"startDate": now.isoformat(),
"endDate": now.isoformat()
}
}
unsuccessful_award = deepcopy(active_award)
unsuccessful_award['id'] = uuid4().hex
unsuccessful_award['status'] = 'unsuccessful'
cancelled_award = deepcopy(unsuccessful_award)
cancelled_award['id'] = uuid4().hex
cancelled_award['status'] = 'cancelled'
cancelled_award2 = deepcopy(cancelled_award)
cancelled_award2['bid_id'] = active_award['bid_id'] = auction['bids'][0]['id']
auction['awards'] = [cancelled_award, unsuccessful_award, cancelled_award2, active_award]
auction.update({
"enquiryPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=7)).isoformat()
},
"tenderPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=1)).isoformat()
},
"auctionPeriod": {
"startDate": (now - timedelta(days=1)).isoformat(),
"endDate": (now).isoformat()
},
"awardPeriod": {
"startDate": (now).isoformat(),
"endDate": (now).isoformat()
}
})
auction['contracts'] = [{
'awardID': active_award['id'],
'suppliers': active_award['suppliers'],
'value': active_award['value'],
'date': now.isoformat(),
'items': auction['items'],
'contractID': '{}-11'.format(auction['auctionID'])}]
auction['status'] = 'active.awarded'
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
response = self.app.get('/auctions/{}'.format(self.auction_id))
auction = response.json['data']
self.assertEqual(auction['status'], u'active.awarded')
self.assertEqual(len(auction['awards']), 4)
self.assertEqual(auction['awards'][0]['status'], 'cancelled')
self.assertEqual(auction['awards'][1]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][2]['status'], 'cancelled')
self.assertEqual(auction['awards'][3]['status'], 'active')
self.assertIn('verificationPeriod', auction['awards'][3])
self.assertIn('paymentPeriod', auction['awards'][3])
self.assertIn('signingPeriod', auction['awards'][3])
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, active_award['id'], self.auction_token
), {"data": {"status": "unsuccessful"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']['awards']), 4)
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 4)
self.assertEqual(response.json['data'][0]['status'], u'cancelled')
self.assertEqual(response.json['data'][1]['status'], u'unsuccessful')
self.assertEqual(response.json['data'][2]['status'], u'cancelled')
self.assertEqual(response.json['data'][3]['status'], u'unsuccessful')
def migrate_awards_number(self):
auction = self.db.get(self.auction_id)
award_1 = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][0]['id'],
"status": "active",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
award_2 = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][0]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
award_3 = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][1]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
auction['awards'] = [award_1, award_2, award_3]
auction.update(auction)
awards_num = len(auction['awards'])
self.db.save(auction)
self.runner.migrate(self.steps)
auction = self.app.get('/auctions/{}'.format(self.auction_id)).json['data']
migrated_awards_num = len(auction['awards'])
self.assertEqual(awards_num, migrated_awards_num)
# MigrateTestFrom1To2WithThreeBids
def migrate_unsuccessful_unsuccessful_pending(self):
auction = self.db.get(self.auction_id)
pending_award = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][0]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
unsuccessful_award = deepcopy(pending_award)
unsuccessful_award['complaintPeriod']['endDate'] = get_now().isoformat()
unsuccessful_award['id'] = uuid4().hex
unsuccessful_award['status'] = 'unsuccessful'
unsuccessful_award2 = deepcopy(unsuccessful_award)
unsuccessful_award['bid_id'] = auction['bids'][2]['id']
unsuccessful_award2['bid_id'] = auction['bids'][1]['id']
auction['awards'] = [unsuccessful_award, unsuccessful_award2, pending_award]
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
response = self.app.get('/auctions/{}'.format(self.auction_id))
auction = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(auction['status'], u'active.qualification')
self.assertEqual(len(auction['awards']), 3)
self.assertEqual(auction['awards'][0]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][1]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][2]['status'], 'pending.payment')
def migrate_unsuccessful_unsuccessful_active(self):
auction = self.db.get(self.auction_id)
now = get_now()
active_award = {
'id': uuid4().hex,
"date": now.isoformat(),
"bid_id": auction['bids'][0]['id'],
'suppliers': auction['bids'][0]['tenderers'],
'value': auction['bids'][0]['value'],
"status": "active",
"complaintPeriod": {
"startDate": now.isoformat(),
"endDate": now.isoformat()
}
}
unsuccessful_award = deepcopy(active_award)
unsuccessful_award['id'] = uuid4().hex
unsuccessful_award['status'] = 'unsuccessful'
auction.update({
"enquiryPeriod": {
"startDate": (now - timedelta(days=8)).isoformat(),
"endDate": (now - timedelta(days=1)).isoformat()
},
"tenderPeriod": {
"startDate": (now - timedelta(days=8)).isoformat(),
"endDate": (now - timedelta(days=1)).isoformat()
},
"auctionPeriod": {
"startDate": (now - timedelta(days=1)).isoformat(),
"endDate": (now).isoformat()
},
"awardPeriod": {
"startDate": (now).isoformat(),
"endDate": (now).isoformat()
}
})
auction['contracts'] = [{
'awardID': active_award['id'],
'suppliers': active_award['suppliers'],
'value': active_award['value'],
'date': now.isoformat(),
'items': auction['items'],
'contractID': '{}-11'.format(auction['auctionID'])}]
auction['status'] = 'active.awarded'
unsuccessful_award = deepcopy(active_award)
unsuccessful_award['complaintPeriod']['endDate'] = get_now().isoformat()
unsuccessful_award['id'] = uuid4().hex
unsuccessful_award['status'] = 'unsuccessful'
unsuccessful_award2 = deepcopy(unsuccessful_award)
unsuccessful_award['bid_id'] = auction['bids'][2]['id']
unsuccessful_award2['bid_id'] = auction['bids'][1]['id']
auction['awards'] = [unsuccessful_award, unsuccessful_award2, active_award]
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
response = self.app.get('/auctions/{}'.format(self.auction_id))
auction = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(auction['status'], u'active.awarded')
self.assertEqual(len(auction['awards']), 3)
self.assertEqual(auction['awards'][0]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][1]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][2]['status'], 'active')
self.assertEqual(auction['contracts'][0]['status'], 'pending')
def migrate_dgfId_to_lotIdentefier(self):
auction = self.db.get(self.auction_id)
response = self.app.get('/auctions/{}'.format(self.auction_id))
db_auction = response.json['data']
self.assertEqual(db_auction['lotIdentifier'], auction['lotIdentifier'])
auction['dgfID'] = auction.pop('lotIdentifier')
self.assertEqual(db_auction['lotIdentifier'], auction['dgfID'])
self.db.save(auction)
self.assertTrue('dgfID' in self.db.get(self.auction_id))
self.runner.migrate(self.steps)
self.assertFalse('dgfID' in self.db.get(self.auction_id))
self.assertTrue('lotIdentifier' in self.db.get(self.auction_id))
response = self.app.get('/auctions/{}'.format(self.auction_id))
db_auction = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
lotIdentifier = db_auction.get('lotIdentifier', None)
self.assertIsNotNone(lotIdentifier) | 42.511272 | 170 | 0.645561 |
from copy import deepcopy
from datetime import timedelta
from uuid import uuid4
from openprocurement.auctions.core.utils import get_now
def migrate_one_pending(self):
auction = self.db.get(self.auction_id)
award = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][1]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
auction['awards'] = [award]
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
auction = self.app.get('/auctions/{}'.format(self.auction_id)).json['data']
self.assertEqual(len(auction['awards']), 2)
self.assertEqual(auction['awards'][0]['status'], 'pending.payment')
self.assertIn('verificationPeriod', auction['awards'][0])
self.assertIn('paymentPeriod', auction['awards'][0])
self.assertEqual(auction['awards'][1]['status'], 'pending.waiting')
self.assertEqual(auction['bids'][0]['status'], 'active')
self.assertEqual(auction['bids'][1]['status'], 'active')
self.assertEqual(auction['status'], 'active.qualification')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.qualification')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 2)
self.assertEqual(response.json['data'][0]['status'], u'pending.payment')
self.assertEqual(response.json['data'][1]['status'], u'pending.waiting')
pending_award = response.json['data'][0]
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, pending_award['id'], self.auction_token
), {"data": {"status": "unsuccessful"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.qualification')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data'][1]['status'], u'pending.verification')
def migrate_one_active(self):
auction = self.db.get(self.auction_id)
now = get_now()
award = {
'id': uuid4().hex,
"date": now.isoformat(),
"bid_id": auction['bids'][1]['id'],
'suppliers': auction['bids'][1]['tenderers'],
'value': auction['bids'][1]['value'],
"status": "active",
"complaintPeriod": {
"startDate": now.isoformat(),
"endDate": now.isoformat()
}
}
auction['awards'] = [award]
auction.update({
"enquiryPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=7)).isoformat()
},
"tenderPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=1)).isoformat()
},
"auctionPeriod": {
"startDate": (now - timedelta(days=1)).isoformat(),
"endDate": (now).isoformat()
},
"awardPeriod": {
"startDate": (now).isoformat(),
"endDate": (now).isoformat()
}
})
contract_id = uuid4().hex
auction['contracts'] = [{
'awardID': award['id'],
'id': contract_id,
'suppliers': award['suppliers'],
'value': award['value'],
'date': now.isoformat(),
'items': auction['items'],
'contractID': '{}-11'.format(auction['auctionID'])}]
auction['status'] = 'active.awarded'
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
auction = self.app.get('/auctions/{}'.format(self.auction_id)).json['data']
self.assertEqual(len(auction['awards']), 2)
self.assertEqual(auction['awards'][0]['status'], 'active')
self.assertEqual(auction['awards'][1]['status'], 'pending.waiting')
self.assertIn('verificationPeriod', auction['awards'][0])
self.assertIn('paymentPeriod', auction['awards'][0])
self.assertIn('signingPeriod', auction['awards'][0])
self.assertEqual(auction['bids'][0]['status'], 'active')
self.assertEqual(auction['bids'][1]['status'], 'active')
self.assertEqual(auction['contracts'][0]['status'], 'pending')
self.assertEqual(auction['status'], 'active.awarded')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.awarded')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 2)
self.assertEqual(response.json['data'][0]['status'], u'active')
self.assertEqual(response.json['data'][1]['status'], u'pending.waiting')
active_award = response.json['data'][0]
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, active_award['id'], self.auction_token
), {"data": {"status": "unsuccessful"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.qualification')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data'][1]['status'], u'pending.verification')
def migrate_unsuccessful_pending(self):
auction = self.db.get(self.auction_id)
pending_award = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][1]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
unsuccessful_award = deepcopy(pending_award)
unsuccessful_award['id'] = uuid4().hex
unsuccessful_award['status'] = 'unsuccessful'
unsuccessful_award['complaintPeriod']['endDate'] = get_now().isoformat()
pending_award['bid_id'] = auction['bids'][0]['id']
auction['awards'] = [unsuccessful_award, pending_award]
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
auction = self.app.get('/auctions/{}'.format(self.auction_id)).json['data']
self.assertEqual(len(auction['awards']), 2)
self.assertEqual(auction['bids'][0]['status'], 'active')
self.assertEqual(auction['bids'][1]['status'], 'active')
self.assertEqual(auction['awards'][0]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][1]['status'], 'pending.payment')
self.assertEqual(auction['status'], 'active.qualification')
def migrate_unsuccessful_active(self):
auction = self.db.get(self.auction_id)
now = get_now()
active_award = {
'id': uuid4().hex,
"date": now.isoformat(),
"bid_id": auction['bids'][1]['id'],
'suppliers': auction['bids'][1]['tenderers'],
'value': auction['bids'][1]['value'],
"status": "active",
"complaintPeriod": {
"startDate": now.isoformat(),
"endDate": now.isoformat()
}
}
unsuccessful_award = deepcopy(active_award)
unsuccessful_award['id'] = uuid4().hex
unsuccessful_award['status'] = 'unsuccessful'
active_award['bid_id'] = auction['bids'][0]['id']
auction['awards'] = [unsuccessful_award, active_award]
auction.update({
"enquiryPeriod": {
"startDate": (now - timedelta(days=8)).isoformat(),
"endDate": (now - timedelta(days=1)).isoformat()
},
"tenderPeriod": {
"startDate": (now - timedelta(days=8)).isoformat(),
"endDate": (now - timedelta(days=1)).isoformat()
},
"auctionPeriod": {
"startDate": (now - timedelta(days=1)).isoformat(),
"endDate": (now).isoformat()
},
"awardPeriod": {
"startDate": (now).isoformat(),
"endDate": (now).isoformat()
}
})
auction['contracts'] = [{
'awardID': active_award['id'],
'suppliers': active_award['suppliers'],
'value': active_award['value'],
'date': now.isoformat(),
'items': auction['items'],
'contractID': '{}-11'.format(auction['auctionID'])}]
auction['status'] = 'active.awarded'
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
auction = self.app.get('/auctions/{}'.format(self.auction_id)).json['data']
self.assertEqual(len(auction['awards']), 2)
self.assertEqual(auction['bids'][0]['status'], 'active')
self.assertEqual(auction['bids'][1]['status'], 'active')
self.assertEqual(auction['awards'][0]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][1]['status'], 'active')
self.assertEqual(auction['contracts'][0]['status'], 'pending')
self.assertEqual(auction['status'], 'active.awarded')
def migrate_pending_to_unsuccesful(self):
auction = self.db.get(self.auction_id)
award = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][1]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
auction['awards'] = [award]
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
auction = self.app.get('/auctions/{}'.format(self.auction_id)).json['data']
self.assertEqual(len(auction['awards']), 2)
self.assertEqual(auction['awards'][0]['status'], 'pending.payment')
self.assertIn('verificationPeriod', auction['awards'][0])
self.assertIn('paymentPeriod', auction['awards'][0])
self.assertEqual(auction['awards'][1]['status'], 'pending.waiting')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.qualification')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 2)
self.assertEqual(response.json['data'][0]['status'], u'pending.payment')
self.assertEqual(response.json['data'][1]['status'], u'pending.waiting')
pending_award = response.json['data'][0]
waiting_award = response.json['data'][1]
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, pending_award['id'], self.auction_token
), {"data": {"status": "unsuccessful"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, waiting_award['id'], self.auction_token
), {"data": {"status": "unsuccessful"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
def migrate_pending_to_complete(self):
auction = self.db.get(self.auction_id)
award = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][1]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
auction['awards'] = [award]
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
auction = self.app.get('/auctions/{}'.format(self.auction_id)).json['data']
self.assertEqual(len(auction['awards']), 2)
self.assertEqual(auction['awards'][0]['status'], 'pending.payment')
self.assertIn('verificationPeriod', auction['awards'][0])
self.assertIn('paymentPeriod', auction['awards'][0])
self.assertEqual(auction['awards'][1]['status'], 'pending.waiting')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.qualification')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 2)
self.assertEqual(response.json['data'][0]['status'], u'pending.payment')
self.assertEqual(response.json['data'][1]['status'], u'pending.waiting')
pending_award = response.json['data'][0]
waiting_award = response.json['data'][1]
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, pending_award['id'], self.auction_token
), {"data": {"status": "unsuccessful"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.post('/auctions/{}/awards/{}/documents?acc_token={}'.format(
self.auction_id, waiting_award['id'], self.auction_token), upload_files=[('file', 'auction_protocol.pdf', 'content')])
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
doc_id = response.json["data"]['id']
response = self.app.patch_json('/auctions/{}/awards/{}/documents/{}?acc_token={}'.format(self.auction_id, waiting_award['id'], doc_id, self.auction_token), {"data": {
"description": "auction protocol",
"documentType": 'auctionProtocol'
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json["data"]["documentType"], 'auctionProtocol')
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, waiting_award['id'], self.auction_token
), {"data": {"status": "pending.payment"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'pending.payment')
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, waiting_award['id'], self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
contract = response.json['data']['contracts'][0]
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.awarded')
response = self.app.patch_json('/auctions/{}/contracts/{}?acc_token={}'.format(
self.auction_id, contract['id'], self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'complete')
def migrate_active_to_unsuccessful(self):
auction = self.db.get(self.auction_id)
now = get_now()
award = {
'id': uuid4().hex,
"date": now.isoformat(),
"bid_id": auction['bids'][1]['id'],
'suppliers': auction['bids'][1]['tenderers'],
'value': auction['bids'][1]['value'],
"status": "active",
"complaintPeriod": {
"startDate": now.isoformat(),
"endDate": now.isoformat()
}
}
auction['awards'] = [award]
auction.update({
"enquiryPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=7)).isoformat()
},
"tenderPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=1)).isoformat()
},
"auctionPeriod": {
"startDate": (now - timedelta(days=1)).isoformat(),
"endDate": (now).isoformat()
},
"awardPeriod": {
"startDate": (now).isoformat(),
"endDate": (now).isoformat()
}
})
contract_id = uuid4().hex
auction['contracts'] = [{
'awardID': award['id'],
'id': contract_id,
'suppliers': award['suppliers'],
'value': award['value'],
'date': now.isoformat(),
'items': auction['items'],
'contractID': '{}-11'.format(auction['auctionID'])}]
auction['status'] = 'active.awarded'
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
auction = self.app.get('/auctions/{}'.format(self.auction_id)).json['data']
self.assertEqual(len(auction['awards']), 2)
self.assertEqual(auction['awards'][0]['status'], 'active')
self.assertIn('verificationPeriod', auction['awards'][0])
self.assertIn('paymentPeriod', auction['awards'][0])
self.assertIn('signingPeriod', auction['awards'][0])
self.assertEqual(auction['awards'][1]['status'], 'pending.waiting')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.awarded')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 2)
self.assertEqual(response.json['data'][0]['status'], u'active')
self.assertEqual(response.json['data'][1]['status'], u'pending.waiting')
active_award = response.json['data'][0]
waiting_award = response.json['data'][1]
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, active_award['id'], self.auction_token
), {"data": {"status": "unsuccessful"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, waiting_award['id'], self.auction_token
), {"data": {"status": "unsuccessful"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
def migrate_active_to_complete(self):
auction = self.db.get(self.auction_id)
now = get_now()
award = {
'id': uuid4().hex,
"date": now.isoformat(),
"bid_id": auction['bids'][1]['id'],
'suppliers': auction['bids'][1]['tenderers'],
'value': auction['bids'][1]['value'],
"status": "active",
"complaintPeriod": {
"startDate": now.isoformat(),
"endDate": now.isoformat()
}
}
auction['awards'] = [award]
auction.update({
"enquiryPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=7)).isoformat()
},
"tenderPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=1)).isoformat()
},
"auctionPeriod": {
"startDate": (now - timedelta(days=1)).isoformat(),
"endDate": (now).isoformat()
},
"awardPeriod": {
"startDate": (now).isoformat(),
"endDate": (now).isoformat()
}
})
contract_id = uuid4().hex
auction['contracts'] = [{
'awardID': award['id'],
'id': contract_id,
'suppliers': award['suppliers'],
'value': award['value'],
'date': now.isoformat(),
'items': auction['items'],
'contractID': '{}-11'.format(auction['auctionID'])}]
auction['status'] = 'active.awarded'
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
response = self.app.get('/auctions/{}'.format(self.auction_id))
auction = response.json['data']
self.assertEqual(len(auction['awards']), 2)
self.assertEqual(auction['awards'][0]['status'], 'active')
self.assertIn('verificationPeriod', auction['awards'][0])
self.assertIn('paymentPeriod', auction['awards'][0])
self.assertIn('signingPeriod', auction['awards'][0])
self.assertEqual(auction['awards'][1]['status'], 'pending.waiting')
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(auction['status'], u'active.awarded')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 2)
self.assertEqual(response.json['data'][0]['status'], u'active')
self.assertEqual(response.json['data'][1]['status'], u'pending.waiting')
response = self.app.patch_json('/auctions/{}/contracts/{}?acc_token={}'.format(
self.auction_id, contract_id, self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'complete')
def migrate_cancelled_pending_to_complete(self):
auction = self.db.get(self.auction_id)
pending_award = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][1]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
cancelled_award = deepcopy(pending_award)
cancelled_award['id'] = uuid4().hex
cancelled_award['status'] = 'cancelled'
cancelled_award['complaintPeriod']['endDate'] = get_now().isoformat()
auction['awards'] = [cancelled_award, pending_award]
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
response = self.app.get('/auctions/{}'.format(self.auction_id))
auction = response.json['data']
self.assertEqual(auction['status'], u'active.qualification')
self.assertEqual(len(auction['awards']), 3)
self.assertEqual(auction['awards'][0]['status'], 'cancelled')
self.assertEqual(auction['awards'][1]['status'], 'pending.payment')
self.assertIn('verificationPeriod', auction['awards'][1])
self.assertIn('paymentPeriod', auction['awards'][1])
self.assertIn('signingPeriod', auction['awards'][1])
self.assertEqual(auction['awards'][2]['status'], 'pending.waiting')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(response.json['data'][0]['status'], u'cancelled')
self.assertEqual(response.json['data'][1]['status'], u'pending.payment')
self.assertEqual(response.json['data'][2]['status'], u'pending.waiting')
pending_award = response.json['data'][1]
waiting_award = response.json['data'][2]
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, pending_award['id'], self.auction_token
), {"data": {"status": "unsuccessful"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.post('/auctions/{}/awards/{}/documents?acc_token={}'.format(
self.auction_id, waiting_award['id'], self.auction_token), upload_files=[('file', 'auction_protocol.pdf', 'content')])
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
doc_id = response.json["data"]['id']
response = self.app.patch_json('/auctions/{}/awards/{}/documents/{}?acc_token={}'.format(self.auction_id, waiting_award['id'], doc_id, self.auction_token), {"data": {
"description": "auction protocol",
"documentType": 'auctionProtocol'
}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json["data"]["documentType"], 'auctionProtocol')
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, waiting_award['id'], self.auction_token
), {"data": {"status": "pending.payment"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'pending.payment')
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, waiting_award['id'], self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
contract = response.json['data']['contracts'][0]
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.awarded')
response = self.app.patch_json('/auctions/{}/contracts/{}?acc_token={}'.format(
self.auction_id, contract['id'], self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'complete')
def migrate_unsuccessful_pending_to_complete(self):
auction = self.db.get(self.auction_id)
pending_award = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][0]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
unsuccessful_award = deepcopy(pending_award)
unsuccessful_award['id'] = uuid4().hex
unsuccessful_award['status'] = 'unsuccessful'
unsuccessful_award['complaintPeriod']['endDate'] = get_now().isoformat()
pending_award['bid_id'] = auction['bids'][1]['id']
auction['awards'] = [unsuccessful_award, pending_award]
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
response = self.app.get('/auctions/{}'.format(self.auction_id))
auction = response.json['data']
self.assertEqual(auction['status'], u'active.qualification')
self.assertEqual(len(auction['awards']), 2)
self.assertEqual(auction['awards'][0]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][1]['status'], 'pending.payment')
self.assertIn('verificationPeriod', auction['awards'][1])
self.assertIn('paymentPeriod', auction['awards'][1])
self.assertIn('signingPeriod', auction['awards'][1])
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 2)
self.assertEqual(response.json['data'][0]['status'], u'unsuccessful')
self.assertEqual(response.json['data'][1]['status'], u'pending.payment')
pending_award = response.json['data'][1]
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, pending_award['id'], self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
contract = response.json['data']['contracts'][0]
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.awarded')
response = self.app.patch_json('/auctions/{}/contracts/{}?acc_token={}'.format(
self.auction_id, contract['id'], self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'complete')
def migrate_unsuccessful_active_to_complete(self):
auction = self.db.get(self.auction_id)
now = get_now()
active_award = {
'id': uuid4().hex,
"date": now.isoformat(),
"bid_id": auction['bids'][0]['id'],
'suppliers': auction['bids'][0]['tenderers'],
'value': auction['bids'][0]['value'],
"status": "active",
"complaintPeriod": {
"startDate": now.isoformat(),
"endDate": now.isoformat()
}
}
unsuccessful_award = deepcopy(active_award)
unsuccessful_award['id'] = uuid4().hex
unsuccessful_award['status'] = 'unsuccessful'
active_award['bid_id'] = auction['bids'][1]['id']
auction['awards'] = [unsuccessful_award, active_award]
auction.update({
"enquiryPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=9)).isoformat()
},
"tenderPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=1)).isoformat()
},
"auctionPeriod": {
"startDate": (now - timedelta(days=1)).isoformat(),
"endDate": (now).isoformat()
},
"awardPeriod": {
"startDate": (now).isoformat(),
"endDate": (now).isoformat()
}
})
auction['contracts'] = [{
'awardID': active_award['id'],
'suppliers': active_award['suppliers'],
'value': active_award['value'],
'date': now.isoformat(),
'items': auction['items'],
'contractID': '{}-11'.format(auction['auctionID'])}]
auction['status'] = 'active.awarded'
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
response = self.app.get('/auctions/{}'.format(self.auction_id))
auction = response.json['data']
self.assertEqual(auction['status'], u'active.awarded')
self.assertEqual(len(auction['awards']), 2)
self.assertEqual(auction['awards'][0]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][1]['status'], 'active')
self.assertIn('verificationPeriod', auction['awards'][1])
self.assertIn('paymentPeriod', auction['awards'][1])
self.assertIn('signingPeriod', auction['awards'][1])
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 2)
self.assertEqual(response.json['data'][0]['status'], u'unsuccessful')
self.assertEqual(response.json['data'][1]['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
contract = response.json['data']['contracts'][0]
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.awarded')
response = self.app.patch_json('/auctions/{}/contracts/{}?acc_token={}'.format(
self.auction_id, contract['id'], self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'complete')
def migrate_cancelled_unsuccessful_pending(self):
auction = self.db.get(self.auction_id)
pending_award = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][1]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
unsuccessful_award = deepcopy(pending_award)
unsuccessful_award['complaintPeriod']['endDate'] = get_now().isoformat()
unsuccessful_award['id'] = uuid4().hex
unsuccessful_award['status'] = 'unsuccessful'
cancelled_award = deepcopy(unsuccessful_award)
cancelled_award['id'] = uuid4().hex
cancelled_award['status'] = 'cancelled'
pending_award['bid_id'] = auction['bids'][0]['id']
auction['awards'] = [cancelled_award, unsuccessful_award, pending_award]
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
response = self.app.get('/auctions/{}'.format(self.auction_id))
auction = response.json['data']
self.assertEqual(auction['status'], u'active.qualification')
self.assertEqual(len(auction['awards']), 3)
self.assertEqual(auction['awards'][0]['status'], 'cancelled')
self.assertEqual(auction['awards'][1]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][2]['status'], 'pending.payment')
self.assertIn('verificationPeriod', auction['awards'][2])
self.assertIn('paymentPeriod', auction['awards'][2])
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 3)
self.assertEqual(response.json['data'][0]['status'], u'cancelled')
self.assertEqual(response.json['data'][1]['status'], u'unsuccessful')
self.assertEqual(response.json['data'][2]['status'], u'pending.payment')
pending_award = response.json['data'][2]
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, pending_award['id'], self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
contract = response.json['data']['contracts'][0]
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.awarded')
response = self.app.patch_json('/auctions/{}/contracts/{}?acc_token={}'.format(
self.auction_id, contract['id'], self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'complete')
def migrate_cancelled_unsuccessful_cancelled_pending_to_unsuccessful(self):
auction = self.db.get(self.auction_id)
pending_award = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][1]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
unsuccessful_award = deepcopy(pending_award)
unsuccessful_award['complaintPeriod']['endDate'] = get_now().isoformat()
unsuccessful_award['id'] = uuid4().hex
unsuccessful_award['status'] = 'unsuccessful'
cancelled_award = deepcopy(unsuccessful_award)
cancelled_award['id'] = uuid4().hex
cancelled_award['status'] = 'cancelled'
cancelled_award2 = deepcopy(cancelled_award)
cancelled_award2['bid_id'] = pending_award['bid_id'] = auction['bids'][0]['id']
auction['awards'] = [cancelled_award, unsuccessful_award, cancelled_award2, pending_award]
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
response = self.app.get('/auctions/{}'.format(self.auction_id))
auction = response.json['data']
self.assertEqual(auction['status'], u'active.qualification')
self.assertEqual(len(auction['awards']), 4)
self.assertEqual(auction['awards'][0]['status'], 'cancelled')
self.assertEqual(auction['awards'][1]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][2]['status'], 'cancelled')
self.assertEqual(auction['awards'][3]['status'], 'pending.payment')
self.assertIn('verificationPeriod', auction['awards'][3])
self.assertIn('paymentPeriod', auction['awards'][3])
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, pending_award['id'], self.auction_token
), {"data": {"status": "active"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'active.awarded')
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, pending_award['id'], self.auction_token
), {"data": {"status": "unsuccessful"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']['awards']), 4)
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 4)
self.assertEqual(response.json['data'][0]['status'], u'cancelled')
self.assertEqual(response.json['data'][1]['status'], u'unsuccessful')
self.assertEqual(response.json['data'][2]['status'], u'cancelled')
self.assertEqual(response.json['data'][3]['status'], u'unsuccessful')
def migrate_cancelled_unsuccessful_cancelled_active_to_unsuccessful(self):
auction = self.db.get(self.auction_id)
now = get_now()
active_award = {
'id': uuid4().hex,
"date": now.isoformat(),
"bid_id": auction['bids'][1]['id'],
'suppliers': auction['bids'][1]['tenderers'],
'value': auction['bids'][1]['value'],
"status": "active",
"complaintPeriod": {
"startDate": now.isoformat(),
"endDate": now.isoformat()
}
}
unsuccessful_award = deepcopy(active_award)
unsuccessful_award['id'] = uuid4().hex
unsuccessful_award['status'] = 'unsuccessful'
cancelled_award = deepcopy(unsuccessful_award)
cancelled_award['id'] = uuid4().hex
cancelled_award['status'] = 'cancelled'
cancelled_award2 = deepcopy(cancelled_award)
cancelled_award2['bid_id'] = active_award['bid_id'] = auction['bids'][0]['id']
auction['awards'] = [cancelled_award, unsuccessful_award, cancelled_award2, active_award]
auction.update({
"enquiryPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=7)).isoformat()
},
"tenderPeriod": {
"startDate": (now - timedelta(days=15)).isoformat(),
"endDate": (now - timedelta(days=1)).isoformat()
},
"auctionPeriod": {
"startDate": (now - timedelta(days=1)).isoformat(),
"endDate": (now).isoformat()
},
"awardPeriod": {
"startDate": (now).isoformat(),
"endDate": (now).isoformat()
}
})
auction['contracts'] = [{
'awardID': active_award['id'],
'suppliers': active_award['suppliers'],
'value': active_award['value'],
'date': now.isoformat(),
'items': auction['items'],
'contractID': '{}-11'.format(auction['auctionID'])}]
auction['status'] = 'active.awarded'
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
response = self.app.get('/auctions/{}'.format(self.auction_id))
auction = response.json['data']
self.assertEqual(auction['status'], u'active.awarded')
self.assertEqual(len(auction['awards']), 4)
self.assertEqual(auction['awards'][0]['status'], 'cancelled')
self.assertEqual(auction['awards'][1]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][2]['status'], 'cancelled')
self.assertEqual(auction['awards'][3]['status'], 'active')
self.assertIn('verificationPeriod', auction['awards'][3])
self.assertIn('paymentPeriod', auction['awards'][3])
self.assertIn('signingPeriod', auction['awards'][3])
response = self.app.patch_json('/auctions/{}/awards/{}?acc_token={}'.format(
self.auction_id, active_award['id'], self.auction_token
), {"data": {"status": "unsuccessful"}})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.get('/auctions/{}'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']['awards']), 4)
self.assertEqual(response.json['data']['status'], u'unsuccessful')
response = self.app.get('/auctions/{}/awards'.format(self.auction_id))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']), 4)
self.assertEqual(response.json['data'][0]['status'], u'cancelled')
self.assertEqual(response.json['data'][1]['status'], u'unsuccessful')
self.assertEqual(response.json['data'][2]['status'], u'cancelled')
self.assertEqual(response.json['data'][3]['status'], u'unsuccessful')
def migrate_awards_number(self):
auction = self.db.get(self.auction_id)
award_1 = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][0]['id'],
"status": "active",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
award_2 = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][0]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
award_3 = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][1]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
auction['awards'] = [award_1, award_2, award_3]
auction.update(auction)
awards_num = len(auction['awards'])
self.db.save(auction)
self.runner.migrate(self.steps)
auction = self.app.get('/auctions/{}'.format(self.auction_id)).json['data']
migrated_awards_num = len(auction['awards'])
self.assertEqual(awards_num, migrated_awards_num)
def migrate_unsuccessful_unsuccessful_pending(self):
auction = self.db.get(self.auction_id)
pending_award = {
'id': uuid4().hex,
"date": get_now().isoformat(),
"bid_id": auction['bids'][0]['id'],
"status": "pending",
"complaintPeriod": {
"startDate": get_now().isoformat(),
}
}
unsuccessful_award = deepcopy(pending_award)
unsuccessful_award['complaintPeriod']['endDate'] = get_now().isoformat()
unsuccessful_award['id'] = uuid4().hex
unsuccessful_award['status'] = 'unsuccessful'
unsuccessful_award2 = deepcopy(unsuccessful_award)
unsuccessful_award['bid_id'] = auction['bids'][2]['id']
unsuccessful_award2['bid_id'] = auction['bids'][1]['id']
auction['awards'] = [unsuccessful_award, unsuccessful_award2, pending_award]
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
response = self.app.get('/auctions/{}'.format(self.auction_id))
auction = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(auction['status'], u'active.qualification')
self.assertEqual(len(auction['awards']), 3)
self.assertEqual(auction['awards'][0]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][1]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][2]['status'], 'pending.payment')
def migrate_unsuccessful_unsuccessful_active(self):
auction = self.db.get(self.auction_id)
now = get_now()
active_award = {
'id': uuid4().hex,
"date": now.isoformat(),
"bid_id": auction['bids'][0]['id'],
'suppliers': auction['bids'][0]['tenderers'],
'value': auction['bids'][0]['value'],
"status": "active",
"complaintPeriod": {
"startDate": now.isoformat(),
"endDate": now.isoformat()
}
}
unsuccessful_award = deepcopy(active_award)
unsuccessful_award['id'] = uuid4().hex
unsuccessful_award['status'] = 'unsuccessful'
auction.update({
"enquiryPeriod": {
"startDate": (now - timedelta(days=8)).isoformat(),
"endDate": (now - timedelta(days=1)).isoformat()
},
"tenderPeriod": {
"startDate": (now - timedelta(days=8)).isoformat(),
"endDate": (now - timedelta(days=1)).isoformat()
},
"auctionPeriod": {
"startDate": (now - timedelta(days=1)).isoformat(),
"endDate": (now).isoformat()
},
"awardPeriod": {
"startDate": (now).isoformat(),
"endDate": (now).isoformat()
}
})
auction['contracts'] = [{
'awardID': active_award['id'],
'suppliers': active_award['suppliers'],
'value': active_award['value'],
'date': now.isoformat(),
'items': auction['items'],
'contractID': '{}-11'.format(auction['auctionID'])}]
auction['status'] = 'active.awarded'
unsuccessful_award = deepcopy(active_award)
unsuccessful_award['complaintPeriod']['endDate'] = get_now().isoformat()
unsuccessful_award['id'] = uuid4().hex
unsuccessful_award['status'] = 'unsuccessful'
unsuccessful_award2 = deepcopy(unsuccessful_award)
unsuccessful_award['bid_id'] = auction['bids'][2]['id']
unsuccessful_award2['bid_id'] = auction['bids'][1]['id']
auction['awards'] = [unsuccessful_award, unsuccessful_award2, active_award]
auction.update(auction)
self.db.save(auction)
self.runner.migrate(self.steps)
response = self.app.get('/auctions/{}'.format(self.auction_id))
auction = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(auction['status'], u'active.awarded')
self.assertEqual(len(auction['awards']), 3)
self.assertEqual(auction['awards'][0]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][1]['status'], 'unsuccessful')
self.assertEqual(auction['awards'][2]['status'], 'active')
self.assertEqual(auction['contracts'][0]['status'], 'pending')
def migrate_dgfId_to_lotIdentefier(self):
auction = self.db.get(self.auction_id)
response = self.app.get('/auctions/{}'.format(self.auction_id))
db_auction = response.json['data']
self.assertEqual(db_auction['lotIdentifier'], auction['lotIdentifier'])
auction['dgfID'] = auction.pop('lotIdentifier')
self.assertEqual(db_auction['lotIdentifier'], auction['dgfID'])
self.db.save(auction)
self.assertTrue('dgfID' in self.db.get(self.auction_id))
self.runner.migrate(self.steps)
self.assertFalse('dgfID' in self.db.get(self.auction_id))
self.assertTrue('lotIdentifier' in self.db.get(self.auction_id))
response = self.app.get('/auctions/{}'.format(self.auction_id))
db_auction = response.json['data']
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
lotIdentifier = db_auction.get('lotIdentifier', None)
self.assertIsNotNone(lotIdentifier) | true | true |
1c2ddb1aa5471a29b28d3e92c036bb3198402c5f | 186 | py | Python | tests/test_loggers.py | DuinoDu/pl-extension | 1ed8f3dd95aa569ee3493fcc69634d3ab9322430 | [
"Apache-2.0"
] | null | null | null | tests/test_loggers.py | DuinoDu/pl-extension | 1ed8f3dd95aa569ee3493fcc69634d3ab9322430 | [
"Apache-2.0"
] | null | null | null | tests/test_loggers.py | DuinoDu/pl-extension | 1ed8f3dd95aa569ee3493fcc69634d3ab9322430 | [
"Apache-2.0"
] | null | null | null | from pl_extension.loggers import logging_logger
def test_logginglogger(tmpdir):
logger = logging_logger.LoggingLogger(tmpdir, prefix="ple")
logger.info("hello, pl-extension!")
| 26.571429 | 63 | 0.774194 | from pl_extension.loggers import logging_logger
def test_logginglogger(tmpdir):
logger = logging_logger.LoggingLogger(tmpdir, prefix="ple")
logger.info("hello, pl-extension!")
| true | true |
1c2ddbf80da42ca685b9a2e8003bcc2cac058518 | 1,630 | py | Python | lagou/lagou/pipelines.py | githubsuzhou/ScrapyLagou | 1e505d584792046d47aea47d5b475fe3581cb51a | [
"Apache-2.0"
] | null | null | null | lagou/lagou/pipelines.py | githubsuzhou/ScrapyLagou | 1e505d584792046d47aea47d5b475fe3581cb51a | [
"Apache-2.0"
] | null | null | null | lagou/lagou/pipelines.py | githubsuzhou/ScrapyLagou | 1e505d584792046d47aea47d5b475fe3581cb51a | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
import pymysql,json
from scrapy.conf import settings
class LagouPipeline(object):
def process_item(self, item, spider):
char=settings['CHARSET']
host = settings['MYSQL_HOST']
psd = settings['MYSQL_PASSWD']
db = settings['MYSQL_DBNAME']
user= settings['MYSQL_USER']
port=settings['MYSQL_PORT']
#数据库连接
con=pymysql.connect(host=host,user=user,passwd=psd,db=db,charset=char,port=port)
#数据库游标
cue=con.cursor()
print("mysql connect succes")#测试语句,这在程序执行时非常有效的理解程序是否执行到这一步
try:
# cue.execute("insert into jobs (name,working_years,salary,education,company,city,welfare,create_time)values(%s,%s,%s,%s,%s,%s,%s,%s)",[ item['name'],item['working_years'],
# item['salary'],item['education'],item['company'] ,item['city'],item['welfare'],item['create_time']])
cue.execute("insert into jobs (Name,working_years,salary,education,company,city,welfare,creat_time)values(%s,%s,%s,%s,%s,%s,%s,%s)",[ item['Name'],item['working_years'],
item['salary'],item['education'],item['company'] ,item['city'],item['welfare'],item['creat_time'] ])
print("insert success")#测试语句
except Exception as e:
print('Insert error:',e)
con.rollback()
else:
con.commit()
con.close()
return item | 46.571429 | 184 | 0.607362 |
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
import pymysql,json
from scrapy.conf import settings
class LagouPipeline(object):
def process_item(self, item, spider):
char=settings['CHARSET']
host = settings['MYSQL_HOST']
psd = settings['MYSQL_PASSWD']
db = settings['MYSQL_DBNAME']
user= settings['MYSQL_USER']
port=settings['MYSQL_PORT']
#数据库连接
con=pymysql.connect(host=host,user=user,passwd=psd,db=db,charset=char,port=port)
#数据库游标
cue=con.cursor()
print("mysql connect succes")#测试语句,这在程序执行时非常有效的理解程序是否执行到这一步
try:
# cue.execute("insert into jobs (name,working_years,salary,education,company,city,welfare,create_time)values(%s,%s,%s,%s,%s,%s,%s,%s)",[ item['name'],item['working_years'],
# item['salary'],item['education'],item['company'] ,item['city'],item['welfare'],item['create_time']])
cue.execute("insert into jobs (Name,working_years,salary,education,company,city,welfare,creat_time)values(%s,%s,%s,%s,%s,%s,%s,%s)",[ item['Name'],item['working_years'],
item['salary'],item['education'],item['company'] ,item['city'],item['welfare'],item['creat_time'] ])
print("insert success")#测试语句
except Exception as e:
print('Insert error:',e)
con.rollback()
else:
con.commit()
con.close()
return item | true | true |
1c2ddc8f9a5d1ee3adbdbd7d0706246544cde6b7 | 5,748 | py | Python | cloudbutton_geospatial/datafetch_utils/sentinel.py | berkevaroll/geospatial-usecase | d3db18607be0976badde073b3ee7c8b9613372e1 | [
"Apache-2.0"
] | null | null | null | cloudbutton_geospatial/datafetch_utils/sentinel.py | berkevaroll/geospatial-usecase | d3db18607be0976badde073b3ee7c8b9613372e1 | [
"Apache-2.0"
] | null | null | null | cloudbutton_geospatial/datafetch_utils/sentinel.py | berkevaroll/geospatial-usecase | d3db18607be0976badde073b3ee7c8b9613372e1 | [
"Apache-2.0"
] | 4 | 2021-03-29T09:03:52.000Z | 2021-09-21T18:27:01.000Z | """
Este módulo contiene métodos de utilidad para la descarga de
imágenes del satélite Sentinel-2.
Los distintos tiles del sistema de coordenadas MGRS en que
se divide España se pueden encontrar
en esta web https://www.asturnatura.com/sinflac/utm-mgrs.php
La documentación de la librería sentinelsat se puede consultar
en la siguiente dirección:
https://sentinelsat.readthedocs.io/en/stable/api.html
"""
import collections
import os
import os.path
import zipfile
import sentinelsat
SENT_USER = 'vmoreno'
SENT_PASS = '12345678'
BANDS_DIR = 'bands'
ZIP_EXTENSION = ".zip"
GRANULE_DIR = 'GRANULE'
IMAGE_DATA_DIR = 'IMG_DATA'
SAFE_EXTENSION = '.SAFE'
JP2_EXTENSION = '.jp2'
def download_products(tiles, start_date, end_date, output_folder, show_progressbars=True):
"""
Descarga todos los productos del satélite Sentinel-2 para los tipos de producto S2MS2Ap y S2MSI1C
:param tiles: Tiles para filtrar la descarga
:param start_date: Fecha inicial en que se tomaron las imágenes
:param end_date: Fecha final en que se tomaron las imágenes
:param output_folder: Directorio en el que se almacenarán las imágenes
:param show_progressbars: Indica si se muestran las barras de progreso durante la descarga
"""
print('Downloading products')
api = sentinelsat.SentinelAPI(user=SENT_USER,
password=SENT_PASS,
api_url='https://scihub.copernicus.eu/dhus',
show_progressbars=show_progressbars)
query_kwargs = {
'platformname': 'Sentinel-2',
'producttype': ('S2MS2Ap', 'S2MSI1C'),
'cloudcoverpercentage': (0, 15),
'date': (start_date, end_date)
}
products = collections.OrderedDict()
for tile in tiles:
kw = query_kwargs.copy()
kw['tileid'] = tile
pp = api.query(**kw)
products.update(pp)
api.download_all(products, output_folder)
# def extract_bands(sentinel_data_dir, sentinel_downloads_dir, sentinel_zip_filename, bands):
# """
# Recupera los ficheros correspondientes a las bandas *bands* contenidos en
# el fichero zip (*sentinel_zip_filename*) descargado como producto del satélite Sentinel-2.
# Las bandas las guarda en el directorio 'bands', en una carpeta con el nombre de producto.
#
# :param sentinel_data_dir: Directorio de datos para los scripts de SENTINEL
# :param sentinel_downloads_dir: Directorio en el que se encuentran los ficheros descargados
# :param sentinel_zip_filename: Nombre del fichero del que extraer las bandas
# :param bands: Nombre de las bandas a extraer
# """
#
# print('Extracting band ' + sentinel_zip_filename)
# # Unzip the file product
# sentinel_zip_file_path = os.path.abspath(os.path.join(sentinel_downloads_dir, sentinel_zip_filename))
# zip_ref = zipfile.ZipFile(sentinel_zip_file_path)
# zip_ref.extractall(sentinel_downloads_dir)
# zip_ref.close()
#
# # Create dir for product if doesn't exist
# full_product_name = sentinel_zip_filename.split('.')[0]
# bands_dir_path = os.path.join(sentinel_data_dir, BANDS_DIR, full_product_name)
# try:
# os.makedirs(bands_dir_path)
# except OSError as e:
# if e.errno != errno.EEXIST:
# raise
#
# # Extract the bands from product.SAFE dir to product bands dir
# product_safe_dir = full_product_name + SAFE_EXTENSION
# granule_dir_path = os.path.join(sentinel_downloads_dir, product_safe_dir, GRANULE_DIR)
# granule_dirs = [d for d in os.listdir(granule_dir_path) if
# (os.path.isdir(os.path.join(granule_dir_path, d))) and (d != '.') and (d != '..')]
# # There is only one folder
# granule_dir = granule_dirs[0]
# img_data_path = os.path.join(granule_dir_path, granule_dir, IMAGE_DATA_DIR)
# band_files = [bf for bf in os.listdir(img_data_path)]
# selected_bands = [band_file for band_file in band_files for band in bands if band_file.endswith(band + JP2_EXTENSION)]
# for band in selected_bands:
# band_path = os.path.join(img_data_path, band)
# print(f'Copying band from {band_path} to {bands_dir_path}')
# shutil.copy(band_path, bands_dir_path)
#
#
# def extract_bands_from_downloads(sentinel_data_dir, sentinel_downloads_dir, bands):
# print('Extracting bands')
# sentinel_file_names = [f for f in os.listdir(sentinel_downloads_dir) if
# (os.path.isfile(os.path.join(sentinel_downloads_dir, f))) and (f.endswith(ZIP_EXTENSION))]
# for sentinel_zip_filename in sentinel_file_names:
# extract_bands(sentinel_data_dir, sentinel_downloads_dir, sentinel_zip_filename, bands)
def unzip_bands_dirs(sentinel_downloads_dir):
print('Unzipping bands')
sentinel_file_names = [os.path.join(sentinel_downloads_dir, f) for f in os.listdir(sentinel_downloads_dir) if
(os.path.isfile(os.path.join(sentinel_downloads_dir, f))) and (f.endswith(ZIP_EXTENSION))]
for sentinel_zip_filename in sentinel_file_names:
print(f'Unzipping {sentinel_zip_filename}')
zip_ref = zipfile.ZipFile(sentinel_zip_filename)
zip_ref.extractall(sentinel_downloads_dir)
zip_ref.close()
def download_bands(tiles, start_date, end_date, sentinel_downloads_dir):
print('Downloading bands from Sentinel')
download_products(tiles=tiles,
start_date=start_date,
end_date=end_date,
output_folder=sentinel_downloads_dir)
unzip_bands_dirs(sentinel_downloads_dir)
# extract_bands_from_downloads(sentinel_data_dir, sentinel_downloads_dir)
print('Downloading bands from Sentinel finished')
| 40.765957 | 124 | 0.705811 |
import collections
import os
import os.path
import zipfile
import sentinelsat
SENT_USER = 'vmoreno'
SENT_PASS = '12345678'
BANDS_DIR = 'bands'
ZIP_EXTENSION = ".zip"
GRANULE_DIR = 'GRANULE'
IMAGE_DATA_DIR = 'IMG_DATA'
SAFE_EXTENSION = '.SAFE'
JP2_EXTENSION = '.jp2'
def download_products(tiles, start_date, end_date, output_folder, show_progressbars=True):
print('Downloading products')
api = sentinelsat.SentinelAPI(user=SENT_USER,
password=SENT_PASS,
api_url='https://scihub.copernicus.eu/dhus',
show_progressbars=show_progressbars)
query_kwargs = {
'platformname': 'Sentinel-2',
'producttype': ('S2MS2Ap', 'S2MSI1C'),
'cloudcoverpercentage': (0, 15),
'date': (start_date, end_date)
}
products = collections.OrderedDict()
for tile in tiles:
kw = query_kwargs.copy()
kw['tileid'] = tile
pp = api.query(**kw)
products.update(pp)
api.download_all(products, output_folder)
# Recupera los ficheros correspondientes a las bandas *bands* contenidos en
# el fichero zip (*sentinel_zip_filename*) descargado como producto del satélite Sentinel-2.
# Las bandas las guarda en el directorio 'bands', en una carpeta con el nombre de producto.
#
# :param sentinel_data_dir: Directorio de datos para los scripts de SENTINEL
# :param sentinel_downloads_dir: Directorio en el que se encuentran los ficheros descargados
# :param sentinel_zip_filename: Nombre del fichero del que extraer las bandas
# :param bands: Nombre de las bandas a extraer
# """
[0]
# bands_dir_path = os.path.join(sentinel_data_dir, BANDS_DIR, full_product_name)
# try:
# os.makedirs(bands_dir_path)
# except OSError as e:
# if e.errno != errno.EEXIST:
# raise
#
# # Extract the bands from product.SAFE dir to product bands dir
# product_safe_dir = full_product_name + SAFE_EXTENSION
# granule_dir_path = os.path.join(sentinel_downloads_dir, product_safe_dir, GRANULE_DIR)
# granule_dirs = [d for d in os.listdir(granule_dir_path) if
# (os.path.isdir(os.path.join(granule_dir_path, d))) and (d != '.') and (d != '..')]
# # There is only one folder
# granule_dir = granule_dirs[0]
# img_data_path = os.path.join(granule_dir_path, granule_dir, IMAGE_DATA_DIR)
# band_files = [bf for bf in os.listdir(img_data_path)]
# selected_bands = [band_file for band_file in band_files for band in bands if band_file.endswith(band + JP2_EXTENSION)]
# for band in selected_bands:
# band_path = os.path.join(img_data_path, band)
# print(f'Copying band from {band_path} to {bands_dir_path}')
# shutil.copy(band_path, bands_dir_path)
#
#
# def extract_bands_from_downloads(sentinel_data_dir, sentinel_downloads_dir, bands):
# print('Extracting bands')
# sentinel_file_names = [f for f in os.listdir(sentinel_downloads_dir) if
# (os.path.isfile(os.path.join(sentinel_downloads_dir, f))) and (f.endswith(ZIP_EXTENSION))]
# for sentinel_zip_filename in sentinel_file_names:
# extract_bands(sentinel_data_dir, sentinel_downloads_dir, sentinel_zip_filename, bands)
def unzip_bands_dirs(sentinel_downloads_dir):
print('Unzipping bands')
sentinel_file_names = [os.path.join(sentinel_downloads_dir, f) for f in os.listdir(sentinel_downloads_dir) if
(os.path.isfile(os.path.join(sentinel_downloads_dir, f))) and (f.endswith(ZIP_EXTENSION))]
for sentinel_zip_filename in sentinel_file_names:
print(f'Unzipping {sentinel_zip_filename}')
zip_ref = zipfile.ZipFile(sentinel_zip_filename)
zip_ref.extractall(sentinel_downloads_dir)
zip_ref.close()
def download_bands(tiles, start_date, end_date, sentinel_downloads_dir):
print('Downloading bands from Sentinel')
download_products(tiles=tiles,
start_date=start_date,
end_date=end_date,
output_folder=sentinel_downloads_dir)
unzip_bands_dirs(sentinel_downloads_dir)
# extract_bands_from_downloads(sentinel_data_dir, sentinel_downloads_dir)
print('Downloading bands from Sentinel finished')
| true | true |
1c2ddde4c9264ef06c744be59061bedaa0f73663 | 926 | py | Python | 1.7.video_stream.py | enesonmez/opencv-learning | 3cbefc16a8793b5f1c24cc9080c005b4f58714cd | [
"MIT"
] | null | null | null | 1.7.video_stream.py | enesonmez/opencv-learning | 3cbefc16a8793b5f1c24cc9080c005b4f58714cd | [
"MIT"
] | null | null | null | 1.7.video_stream.py | enesonmez/opencv-learning | 3cbefc16a8793b5f1c24cc9080c005b4f58714cd | [
"MIT"
] | null | null | null | import cv2
cap = cv2.VideoCapture(0) # 0 => pc kamerası, 1 => usb'ye bağlı kamera, 2 =>
# video'yu kaydetmek için
fourcc = cv2.VideoWriter_fourcc(*'XVID') # 4 byte'lık video codec kodu alarak int veri döndürür.
out = cv2.VideoWriter('img/output.avi', fourcc, 20.0, (640,480)) # video adı, codec code, fps, video size
while True:
ret, frame = cap.read() # kameradan o anki görüntü okunuyor. ret => kamera çalışıp çalışmadığını döndürür.
print(cap.get(cv2.CAP_PROP_FRAME_WIDTH)) # frame genişliğini döndürür
print(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)) # frame yüksekliğini döndürür
out.write(frame)
cv2.imshow("camera",frame) # görüntü ekrana bastırılıyor.
if cv2.waitKey(30) & 0xFF == ord('q'): # 30 ms'de bir görüntü alınıyor ve q'ya basılırsa döngüden çıkılıyor.
break
cap.release() # kamera serbest bırakılıyor.
out.release() # kayıt çıktısı serbest bırakılıyor.
cv2.destroyAllWindows() | 40.26087 | 112 | 0.719222 | import cv2
cap = cv2.VideoCapture(0)
# video'yu kaydetmek için
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter('img/output.avi', fourcc, 20.0, (640,480)) # video adı, codec code, fps, video size
while True:
ret, frame = cap.read() # kameradan o anki görüntü okunuyor. ret => kamera çalışıp çalışmadığını döndürür.
print(cap.get(cv2.CAP_PROP_FRAME_WIDTH)) # frame genişliğini döndürür
print(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)) # frame yüksekliğini döndürür
out.write(frame)
cv2.imshow("camera",frame) # görüntü ekrana bastırılıyor.
if cv2.waitKey(30) & 0xFF == ord('q'): # 30 ms'de bir görüntü alınıyor ve q'ya basılırsa döngüden çıkılıyor.
break
cap.release() # kamera serbest bırakılıyor.
out.release() # kayıt çıktısı serbest bırakılıyor.
cv2.destroyAllWindows() | true | true |
1c2dde7de9058c62d67b5be58652d6aa606b8599 | 1,509 | py | Python | recursion-cellular/utils/logger.py | rebryk/kaggle | 0c656f64ce681dd313ca5145f0ff834a1a6d822e | [
"MIT"
] | 17 | 2019-01-11T01:57:29.000Z | 2020-08-25T04:52:28.000Z | recursion-cellular/utils/logger.py | rebryk/kaggle | 0c656f64ce681dd313ca5145f0ff834a1a6d822e | [
"MIT"
] | 10 | 2020-01-28T23:01:43.000Z | 2022-03-11T23:37:54.000Z | recursion-cellular/utils/logger.py | rebryk/kaggle | 0c656f64ce681dd313ca5145f0ff834a1a6d822e | [
"MIT"
] | 3 | 2019-01-11T03:12:04.000Z | 2019-01-28T14:41:14.000Z | import logging
import sys
from pathlib import Path
from typing import Union, Any
from torch.utils.tensorboard import SummaryWriter
class Logger(logging.Logger):
"""This class allows you to log information to the console, file and tensorboardX."""
LOG_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
DATE_FORMAT = '%m/%d/%Y %I:%M:%S %p'
def __init__(self,
name: str = 'logger',
level: int = logging.INFO,
path: Union[str, Path] = None,
pathx: Union[str, Path] = None,
stream: Any = None):
super().__init__(name, level)
self.pathx = pathx
self.writers = dict()
formatter = logging.Formatter(Logger.LOG_FORMAT, Logger.DATE_FORMAT)
if path is not None:
handler = logging.FileHandler(path)
handler.setLevel(self.level)
handler.setFormatter(formatter)
self.addHandler(handler)
if stream is not None:
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(level)
handler.setFormatter(formatter)
self.addHandler(handler)
def scalar_summary(self, logger_tag: str, tag: str, value: float, step: int):
if self.pathx is None:
pass
if logger_tag not in self.writers:
self.writers[logger_tag] = SummaryWriter(f'{self.pathx}/{logger_tag}')
self.writers[logger_tag].add_scalar(tag, value, step)
| 32.106383 | 89 | 0.603711 | import logging
import sys
from pathlib import Path
from typing import Union, Any
from torch.utils.tensorboard import SummaryWriter
class Logger(logging.Logger):
LOG_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
DATE_FORMAT = '%m/%d/%Y %I:%M:%S %p'
def __init__(self,
name: str = 'logger',
level: int = logging.INFO,
path: Union[str, Path] = None,
pathx: Union[str, Path] = None,
stream: Any = None):
super().__init__(name, level)
self.pathx = pathx
self.writers = dict()
formatter = logging.Formatter(Logger.LOG_FORMAT, Logger.DATE_FORMAT)
if path is not None:
handler = logging.FileHandler(path)
handler.setLevel(self.level)
handler.setFormatter(formatter)
self.addHandler(handler)
if stream is not None:
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(level)
handler.setFormatter(formatter)
self.addHandler(handler)
def scalar_summary(self, logger_tag: str, tag: str, value: float, step: int):
if self.pathx is None:
pass
if logger_tag not in self.writers:
self.writers[logger_tag] = SummaryWriter(f'{self.pathx}/{logger_tag}')
self.writers[logger_tag].add_scalar(tag, value, step)
| true | true |
1c2de051eef67fd3e5e73674a713b6ba775d6631 | 4,282 | py | Python | src/mbf_anysnake/util.py | IMTMarburg/mbf_anysnake | fab457a8058f74e5729fd6317393d126e0329f31 | [
"MIT"
] | null | null | null | src/mbf_anysnake/util.py | IMTMarburg/mbf_anysnake | fab457a8058f74e5729fd6317393d126e0329f31 | [
"MIT"
] | null | null | null | src/mbf_anysnake/util.py | IMTMarburg/mbf_anysnake | fab457a8058f74e5729fd6317393d126e0329f31 | [
"MIT"
] | 1 | 2021-04-15T06:44:15.000Z | 2021-04-15T06:44:15.000Z | # -*- coding: future_fstrings -*-
import re
import requests
import subprocess
import time
import shutil
import time
from pathlib import Path
re_github = r"[A-Za-z0-9-]+\/[A-Za-z0-9]+"
def combine_volumes(ro=[], rw=[]):
d = dict()
for (what, mode) in [(ro, "ro"), (rw, "rw")]:
if isinstance(what, dict):
what = [what]
for dd in what:
for target, source in dd.items():
if isinstance(target, dict):
raise ValueError("fix me")
elif isinstance(target, tuple):
raise ValueError("fix me")
source = str(Path(source).absolute())
d[target] = source, mode
return d
def find_storage_path_from_other_machine(anysnake, postfix, check_func=None):
"""Find a usable storage path for this if it was already done by another machine
and storage_per_hostname is set.
Otherwise return the local storage_path / postfix
"""
if check_func is None:
check_func = lambda x: x.exists()
search_path = anysnake.paths["storage"].parent.parent
docker_image = Path(anysnake.paths["storage"].name)
result = anysnake.paths["storage"] / postfix
postfix = docker_image / postfix
if not result.exists():
if anysnake.storage_per_hostname:
for d in search_path.glob("*"):
if d.is_dir():
if check_func(d / postfix):
result = d / postfix
break
return result
def download_file(url, filename):
"""Download a file with requests if the target does not exist yet"""
if not Path(filename).exists():
print("downloading", url, filename)
r = requests.get(url, stream=True)
if r.status_code != 200:
raise ValueError(f"Error return on {url} {r.status_code}")
start = time.time()
count = 0
with open(str(filename) + "_temp", "wb") as op:
for block in r.iter_content(1024 * 1024):
op.write(block)
count += len(block)
shutil.move(str(filename) + "_temp", str(filename))
stop = time.time()
print("Rate: %.2f MB/s" % ((count / 1024 / 1024 / (stop - start))))
def dict_to_toml(d):
import tomlkit
toml = tomlkit.document()
toml.add(tomlkit.comment("Autogenertod by anysnake"))
for key, sub_d in d.items():
table = tomlkit.table()
for k, v in sub_d.items():
table.add(k, v)
toml.add(key, table)
return toml
def get_next_free_port(start_at):
import socket
try_next = True
port = start_at
while try_next:
try:
s = socket.socket()
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(("localhost", port))
s.close()
try_next = False
except socket.error:
port += 1
if port > start_at + 100:
raise ValueError("No empty port found within search range")
return port
def clone_repo(url, name, target_path, log_file):
print(f"]\tCloning {name} to {target_path} from {url}")
if url.startswith("@"):
url = url[1:]
if re.match(re_github, url):
method = "git"
url = "https://github.com/" + url
elif url.startswith("git+"):
method = "git"
url = url[4:]
elif url.startswith("hg+"):
method = "hg"
url = url[3:]
else:
raise ValueError(
"Could not parse url / must be git+http(s) / hg+https, or github path"
)
if method == "git":
try:
subprocess.check_call(
["git", "clone", url, str(target_path)],
stdout=log_file,
stderr=log_file,
)
except subprocess.CalledProcessError:
import shutil
shutil.rmtree(target_path)
raise
elif method == "hg":
try:
subprocess.check_call(
["hg", "clone", url, str(target_path)], stdout=log_file, stderr=log_file
)
except subprocess.CalledProcessError:
import shutil
if target_path.exists():
shutil.rmtree(target_path)
raise
| 30.368794 | 88 | 0.556049 |
import re
import requests
import subprocess
import time
import shutil
import time
from pathlib import Path
re_github = r"[A-Za-z0-9-]+\/[A-Za-z0-9]+"
def combine_volumes(ro=[], rw=[]):
d = dict()
for (what, mode) in [(ro, "ro"), (rw, "rw")]:
if isinstance(what, dict):
what = [what]
for dd in what:
for target, source in dd.items():
if isinstance(target, dict):
raise ValueError("fix me")
elif isinstance(target, tuple):
raise ValueError("fix me")
source = str(Path(source).absolute())
d[target] = source, mode
return d
def find_storage_path_from_other_machine(anysnake, postfix, check_func=None):
if check_func is None:
check_func = lambda x: x.exists()
search_path = anysnake.paths["storage"].parent.parent
docker_image = Path(anysnake.paths["storage"].name)
result = anysnake.paths["storage"] / postfix
postfix = docker_image / postfix
if not result.exists():
if anysnake.storage_per_hostname:
for d in search_path.glob("*"):
if d.is_dir():
if check_func(d / postfix):
result = d / postfix
break
return result
def download_file(url, filename):
if not Path(filename).exists():
print("downloading", url, filename)
r = requests.get(url, stream=True)
if r.status_code != 200:
raise ValueError(f"Error return on {url} {r.status_code}")
start = time.time()
count = 0
with open(str(filename) + "_temp", "wb") as op:
for block in r.iter_content(1024 * 1024):
op.write(block)
count += len(block)
shutil.move(str(filename) + "_temp", str(filename))
stop = time.time()
print("Rate: %.2f MB/s" % ((count / 1024 / 1024 / (stop - start))))
def dict_to_toml(d):
import tomlkit
toml = tomlkit.document()
toml.add(tomlkit.comment("Autogenertod by anysnake"))
for key, sub_d in d.items():
table = tomlkit.table()
for k, v in sub_d.items():
table.add(k, v)
toml.add(key, table)
return toml
def get_next_free_port(start_at):
import socket
try_next = True
port = start_at
while try_next:
try:
s = socket.socket()
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(("localhost", port))
s.close()
try_next = False
except socket.error:
port += 1
if port > start_at + 100:
raise ValueError("No empty port found within search range")
return port
def clone_repo(url, name, target_path, log_file):
print(f"]\tCloning {name} to {target_path} from {url}")
if url.startswith("@"):
url = url[1:]
if re.match(re_github, url):
method = "git"
url = "https://github.com/" + url
elif url.startswith("git+"):
method = "git"
url = url[4:]
elif url.startswith("hg+"):
method = "hg"
url = url[3:]
else:
raise ValueError(
"Could not parse url / must be git+http(s) / hg+https, or github path"
)
if method == "git":
try:
subprocess.check_call(
["git", "clone", url, str(target_path)],
stdout=log_file,
stderr=log_file,
)
except subprocess.CalledProcessError:
import shutil
shutil.rmtree(target_path)
raise
elif method == "hg":
try:
subprocess.check_call(
["hg", "clone", url, str(target_path)], stdout=log_file, stderr=log_file
)
except subprocess.CalledProcessError:
import shutil
if target_path.exists():
shutil.rmtree(target_path)
raise
| true | true |
1c2de0769c358be38996bcb87d548b7c10da2a40 | 70 | py | Python | test/test_cases/google_noqa.py | PFacheris/flake8-function-definition | 74e7ee29dd3bccb08e5636603e60aaa4b7e505af | [
"MIT"
] | null | null | null | test/test_cases/google_noqa.py | PFacheris/flake8-function-definition | 74e7ee29dd3bccb08e5636603e60aaa4b7e505af | [
"MIT"
] | 1 | 2016-09-19T14:01:13.000Z | 2016-09-20T00:36:41.000Z | test/test_cases/google_noqa.py | PFacheris/flake8-function-definition | 74e7ee29dd3bccb08e5636603e60aaa4b7e505af | [
"MIT"
] | null | null | null | def foo( # noqa
bar1, bar2, bar3,
bar4
): # noqa
return
| 11.666667 | 21 | 0.514286 | def foo(
bar1, bar2, bar3,
bar4
):
return
| true | true |
1c2de0ca36c4668ac520ad4379744e1b65453ae1 | 3,521 | py | Python | app/wallet/permissions.py | HenriqueLR/payments | f2f7316fe12b683705e9a78813a86e43c08a2cf6 | [
"MIT"
] | null | null | null | app/wallet/permissions.py | HenriqueLR/payments | f2f7316fe12b683705e9a78813a86e43c08a2cf6 | [
"MIT"
] | 9 | 2017-06-01T12:28:25.000Z | 2017-10-26T11:21:37.000Z | app/wallet/permissions.py | HenriqueLR/payments | f2f7316fe12b683705e9a78813a86e43c08a2cf6 | [
"MIT"
] | null | null | null | #encoding: utf-8
from django.db.models import Q
from django.contrib.auth.decorators import login_required, user_passes_test
from django.views.decorators.cache import never_cache
from django.utils.decorators import method_decorator
from django.http import Http404
from main.utils import apps_permissions, format_date
class PermissionsGeralMixin(object):
template_name_ajax = None
@classmethod
def as_view(cls):
return login_required(super(PermissionsGeralMixin, cls).as_view())
@method_decorator(never_cache)
@method_decorator(user_passes_test(lambda u: u.is_active,login_url='accounts:logout'))
def dispatch(self, request, *args, **kwargs):
if not self.request.user.has_perms(self.required_permissions):
raise Http404
if self.request.is_ajax() and self.template_name_ajax:
self.template_name = self.template_name_ajax
return super(PermissionsGeralMixin, self).dispatch(request, *args, **kwargs)
class PermissionsNoteMixin(PermissionsGeralMixin):
def get_queryset(self):
qs = self.model.objects.list_notes(self.request.user).order_by('-date_note')
date = self.request.GET.get('date', '')
if date != '':
range_date = date.split('-')
date_start, date_end = format_date(range_date[0], range_date[1])
qs = qs.filter(date_note__gte=date_start, date_note__lte=date_end)
note = self.request.GET.get('status_note', '')
if note != '' and note != 'all':
qs = qs.filter(status_note=eval(note))
alert = self.request.GET.get('status_alert', '')
if alert != '' and alert != 'all':
qs = qs.filter(status_alert=eval(alert))
return qs
def get_context_data(self, **kwargs):
context = super(PermissionsNoteMixin, self).get_context_data(**kwargs)
context.update({'object_name':'Note', 'apps':apps_permissions(self.request),
'label_app':'Wallet'})
return context
class PermissionsDebitMixin(PermissionsGeralMixin):
def get_queryset(self):
qs = self.model.objects.list_debits(self.request.user).order_by('-date_releases')
date = self.request.GET.get('date', '')
if date != '':
range_date = date.split('-')
date_start, date_end = format_date(range_date[0], range_date[1])
qs = qs.filter(date_releases__gte=date_start, date_releases__lte=date_end)
return qs
def get_context_data(self, **kwargs):
context = super(PermissionsDebitMixin, self).get_context_data(**kwargs)
context.update({'object_name':'Debit', 'apps':apps_permissions(self.request),
'label_app':'Wallet'})
return context
class PermissionsDepositMixin(PermissionsGeralMixin):
def get_queryset(self):
qs = self.model.objects.list_deposits(self.request.user).order_by('-date_releases')
date = self.request.GET.get('date', '')
if date != '':
range_date = date.split('-')
date_start, date_end = format_date(range_date[0], range_date[1])
qs = qs.filter(date_releases__gte=date_start, date_releases__lte=date_end)
return qs
def get_context_data(self, **kwargs):
context = super(PermissionsDepositMixin, self).get_context_data(**kwargs)
context.update({'object_name':'Deposit', 'apps':apps_permissions(self.request),
'label_app':'Wallet'})
return context | 36.298969 | 91 | 0.664016 |
from django.db.models import Q
from django.contrib.auth.decorators import login_required, user_passes_test
from django.views.decorators.cache import never_cache
from django.utils.decorators import method_decorator
from django.http import Http404
from main.utils import apps_permissions, format_date
class PermissionsGeralMixin(object):
template_name_ajax = None
@classmethod
def as_view(cls):
return login_required(super(PermissionsGeralMixin, cls).as_view())
@method_decorator(never_cache)
@method_decorator(user_passes_test(lambda u: u.is_active,login_url='accounts:logout'))
def dispatch(self, request, *args, **kwargs):
if not self.request.user.has_perms(self.required_permissions):
raise Http404
if self.request.is_ajax() and self.template_name_ajax:
self.template_name = self.template_name_ajax
return super(PermissionsGeralMixin, self).dispatch(request, *args, **kwargs)
class PermissionsNoteMixin(PermissionsGeralMixin):
def get_queryset(self):
qs = self.model.objects.list_notes(self.request.user).order_by('-date_note')
date = self.request.GET.get('date', '')
if date != '':
range_date = date.split('-')
date_start, date_end = format_date(range_date[0], range_date[1])
qs = qs.filter(date_note__gte=date_start, date_note__lte=date_end)
note = self.request.GET.get('status_note', '')
if note != '' and note != 'all':
qs = qs.filter(status_note=eval(note))
alert = self.request.GET.get('status_alert', '')
if alert != '' and alert != 'all':
qs = qs.filter(status_alert=eval(alert))
return qs
def get_context_data(self, **kwargs):
context = super(PermissionsNoteMixin, self).get_context_data(**kwargs)
context.update({'object_name':'Note', 'apps':apps_permissions(self.request),
'label_app':'Wallet'})
return context
class PermissionsDebitMixin(PermissionsGeralMixin):
def get_queryset(self):
qs = self.model.objects.list_debits(self.request.user).order_by('-date_releases')
date = self.request.GET.get('date', '')
if date != '':
range_date = date.split('-')
date_start, date_end = format_date(range_date[0], range_date[1])
qs = qs.filter(date_releases__gte=date_start, date_releases__lte=date_end)
return qs
def get_context_data(self, **kwargs):
context = super(PermissionsDebitMixin, self).get_context_data(**kwargs)
context.update({'object_name':'Debit', 'apps':apps_permissions(self.request),
'label_app':'Wallet'})
return context
class PermissionsDepositMixin(PermissionsGeralMixin):
def get_queryset(self):
qs = self.model.objects.list_deposits(self.request.user).order_by('-date_releases')
date = self.request.GET.get('date', '')
if date != '':
range_date = date.split('-')
date_start, date_end = format_date(range_date[0], range_date[1])
qs = qs.filter(date_releases__gte=date_start, date_releases__lte=date_end)
return qs
def get_context_data(self, **kwargs):
context = super(PermissionsDepositMixin, self).get_context_data(**kwargs)
context.update({'object_name':'Deposit', 'apps':apps_permissions(self.request),
'label_app':'Wallet'})
return context | true | true |
1c2de10847f0ffbd5d32cfa1851e433c0275227d | 1,654 | py | Python | catalyst/utils/__init__.py | olgaiv39/catalyst | 005a123482b0340c599a58856f396355a76a7db5 | [
"Apache-2.0"
] | null | null | null | catalyst/utils/__init__.py | olgaiv39/catalyst | 005a123482b0340c599a58856f396355a76a7db5 | [
"Apache-2.0"
] | null | null | null | catalyst/utils/__init__.py | olgaiv39/catalyst | 005a123482b0340c599a58856f396355a76a7db5 | [
"Apache-2.0"
] | null | null | null | # flake8: noqa
from .argparse import args_are_not_none, boolean_flag
from .checkpoint import pack_checkpoint, unpack_checkpoint, \
save_checkpoint, load_checkpoint
from .compression import pack, pack_if_needed, unpack, unpack_if_needed
from .config import load_ordered_yaml, get_environment_vars, dump_environment, \
parse_config_args, parse_args_uargs
# from .dataset import *
from .ddp import is_wrapped_with_ddp, get_real_module
# from .frozen import *
from .hash import get_hash, get_short_hash
from .image import imread, imwrite, mimwrite_with_meta, \
tensor_from_rgb_image, tensor_to_ndimage, \
binary_mask_to_overlay_image
from .initialization import create_optimal_inner_init, outer_init, \
constant_init, uniform_init, normal_init, xavier_init, kaiming_init, \
bias_init_with_prob
from .misc import pairwise, make_tuple, merge_dicts, append_dict, is_exception
from .numpy import np_softmax, geometric_cumsum, structed2dict, dict2structed
# from .pandas import *
from .parallel import Pool, DumbPool, get_pool, \
parallel_imap, tqdm_parallel_imap
from .plotly import plot_tensorboard_log
# from .registry import *
from .seed import set_global_seed, Seeder
from .serialization import serialize, deserialize
# from .tensorboard import *
from .torch import ce_with_logits, log1p_exp, normal_sample, normal_logprob, \
soft_update, get_optimizable_params, \
get_optimizer_momentum, set_optimizer_momentum, assert_fp16_available, \
get_device, get_activation_fn, any2device, get_available_gpus, \
prepare_cudnn, process_model_params
from .visualization import plot_confusion_matrix, render_figure_to_tensor
| 47.257143 | 80 | 0.819831 |
from .argparse import args_are_not_none, boolean_flag
from .checkpoint import pack_checkpoint, unpack_checkpoint, \
save_checkpoint, load_checkpoint
from .compression import pack, pack_if_needed, unpack, unpack_if_needed
from .config import load_ordered_yaml, get_environment_vars, dump_environment, \
parse_config_args, parse_args_uargs
from .ddp import is_wrapped_with_ddp, get_real_module
from .hash import get_hash, get_short_hash
from .image import imread, imwrite, mimwrite_with_meta, \
tensor_from_rgb_image, tensor_to_ndimage, \
binary_mask_to_overlay_image
from .initialization import create_optimal_inner_init, outer_init, \
constant_init, uniform_init, normal_init, xavier_init, kaiming_init, \
bias_init_with_prob
from .misc import pairwise, make_tuple, merge_dicts, append_dict, is_exception
from .numpy import np_softmax, geometric_cumsum, structed2dict, dict2structed
from .parallel import Pool, DumbPool, get_pool, \
parallel_imap, tqdm_parallel_imap
from .plotly import plot_tensorboard_log
from .seed import set_global_seed, Seeder
from .serialization import serialize, deserialize
from .torch import ce_with_logits, log1p_exp, normal_sample, normal_logprob, \
soft_update, get_optimizable_params, \
get_optimizer_momentum, set_optimizer_momentum, assert_fp16_available, \
get_device, get_activation_fn, any2device, get_available_gpus, \
prepare_cudnn, process_model_params
from .visualization import plot_confusion_matrix, render_figure_to_tensor
| true | true |
1c2de16d36015f768686e1bdd57367d8a669bf1e | 956 | py | Python | setup.py | p-w/block-parser | 180866de25f8133b412b15a022bd2dcad2ddef00 | [
"Apache-2.0"
] | 77 | 2016-02-23T04:42:53.000Z | 2022-03-17T20:29:49.000Z | setup.py | p-w/block-parser | 180866de25f8133b412b15a022bd2dcad2ddef00 | [
"Apache-2.0"
] | 6 | 2016-03-02T08:31:38.000Z | 2020-02-28T13:06:53.000Z | setup.py | p-w/block-parser | 180866de25f8133b412b15a022bd2dcad2ddef00 | [
"Apache-2.0"
] | 13 | 2016-02-24T11:32:04.000Z | 2021-08-11T09:39:12.000Z | from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='block-parser',
version='1.0.0',
description='A tool for parsing Windows PowerShell script block logging events',
long_description=long_description,
url='https://github.com/matthewdunwoody/block-parser',
author='Matthew Dunwoody',
license='Apache Software License',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Information Technology',
'Topic :: Security',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7'
],
packages=find_packages(),
install_requires=['python-evtx', 'lxml'],
scripts=[path.join(here, 'block-parser', 'block-parser.py')],
)
| 31.866667 | 84 | 0.677824 | from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='block-parser',
version='1.0.0',
description='A tool for parsing Windows PowerShell script block logging events',
long_description=long_description,
url='https://github.com/matthewdunwoody/block-parser',
author='Matthew Dunwoody',
license='Apache Software License',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Information Technology',
'Topic :: Security',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7'
],
packages=find_packages(),
install_requires=['python-evtx', 'lxml'],
scripts=[path.join(here, 'block-parser', 'block-parser.py')],
)
| true | true |
1c2de1ba4743e8b4b37bf5153ada3c6e67fdb3b9 | 688 | py | Python | setup.py | vertiond/verthash-pospace | 442c51b877f2c99327f6a2f8a946a210cb09f789 | [
"MIT"
] | 1 | 2021-05-23T23:55:43.000Z | 2021-05-23T23:55:43.000Z | setup.py | vertiond/verthash-pospace | 442c51b877f2c99327f6a2f8a946a210cb09f789 | [
"MIT"
] | null | null | null | setup.py | vertiond/verthash-pospace | 442c51b877f2c99327f6a2f8a946a210cb09f789 | [
"MIT"
] | 8 | 2020-09-05T02:48:13.000Z | 2022-03-26T22:56:57.000Z | from setuptools import setup, Extension
verthashsources = [
'h2.c',
'tiny_sha3/sha3.c'
]
verthashincludes = [
'.',
'./tiny_sha3'
]
verthash_module = Extension('verthash',
sources=verthashsources+['verthashmodule.c'],
extra_compile_args=['-std=c99'],
include_dirs=verthashincludes)
setup(name = 'verthash',
version = '0.0.1',
author_email = 'jameslovejoy1@gmail.com',
author = 'James Lovejoy',
url = 'https://github.com/metalicjames/verthash-pospace',
description = 'Python bindings for Verthash proof of work function',
ext_modules = [verthash_module])
| 27.52 | 74 | 0.603198 | from setuptools import setup, Extension
verthashsources = [
'h2.c',
'tiny_sha3/sha3.c'
]
verthashincludes = [
'.',
'./tiny_sha3'
]
verthash_module = Extension('verthash',
sources=verthashsources+['verthashmodule.c'],
extra_compile_args=['-std=c99'],
include_dirs=verthashincludes)
setup(name = 'verthash',
version = '0.0.1',
author_email = 'jameslovejoy1@gmail.com',
author = 'James Lovejoy',
url = 'https://github.com/metalicjames/verthash-pospace',
description = 'Python bindings for Verthash proof of work function',
ext_modules = [verthash_module])
| true | true |
1c2de21bd4edb67adc137df05213d1b1725e17c7 | 7,265 | py | Python | slot/__init__.py | naqintosh/dl | 3615b78b5fd7a7808a49bb0b9124b41842100a2e | [
"Apache-2.0"
] | null | null | null | slot/__init__.py | naqintosh/dl | 3615b78b5fd7a7808a49bb0b9124b41842100a2e | [
"Apache-2.0"
] | null | null | null | slot/__init__.py | naqintosh/dl | 3615b78b5fd7a7808a49bb0b9124b41842100a2e | [
"Apache-2.0"
] | null | null | null | import copy
from core import Conf
from ability import Ability, ability_dict
from itertools import islice
class Slot(object):
att = 0
ele = 'none'
wt = 'none'
stype = 'slot'
onele = 0
a = None
mod = None
conf = None
def __init__(self):
if not self.mod:
self.mod = []
if not self.conf:
self.conf = Conf()
if not self.a:
self.a = []
self.name = type(self).__name__
def setup(self, c):
if c.ele == self.ele :
self.onele = 1
if self.wt != 'none' and c.wt != self.wt:
raise ValueError('Wrong weapon type, expected {} but got {}'.format(self.wt, c.wt))
def oninit(self, adv):
adv.conf(self.conf)
i = self.stype
j = self.mod
if type(j) == tuple:
adv.Modifier(i,*j)
elif type(j) == list:
idx = 0
for k in j:
adv.Modifier(i+'_%d'%idx,*k)
idx += 1
elif type(j) == dict:
idx = 0
for k in j:
adv.Modifier(i+k+'_%d'%idx,*j[k])
idx += 1
class CharacterBase(Slot):
name = 'null'
stars = 5
max_coab = 4
def __init__(self):
super().__init__()
self.coabs = {}
def setup(self):
return
def oninit(self, adv):
Slot.oninit(self, adv)
count = 0
ex_set = set()
coabs = list(islice(self.coabs.items(), self.max_coab))
self.coabs = {}
for key, coab in coabs:
self.coabs[key] = coab
chain, ex = coab
if ex:
ex_set.add(('ex', ex))
if chain:
self.a.append(tuple(chain))
count += 1
self.a.extend(ex_set)
def has_ex(self, ex):
for _, coab in self.coabs.items():
if ex == coab[1]:
return True
return False
class WeaponBase(Slot):
stype = 'w'
wt = 'none'
s3 = Conf()
ele = [] # or ''
def setup(self, c, adv):
super(WeaponBase, self).setup(c)
if type(self.ele) == list:
for i in self.ele:
if c.ele == i :
self.onele = 1
break
if self.onele:
self.att *= 1.5
if adv is not None and adv.s3.owner is None:
self.conf.s3 = Conf(self.s3)
elif 'all' in self.ele:
if adv is not None and adv.s3.owner is None:
self.conf.s3 = Conf(self.s3)
if self.wt == 'axe':
self.mod.append(('crit','chance',0.04))
else :
self.mod.append(('crit','chance',0.02))
def s3_proc(self, adv, e):
pass
class DragonBase(Slot):
stype = 'd'
a = [('a', 0.60)]
default_dragonform = {
'duration': 600 / 60, # 10s dragon time
'dracolith': 0.40, # base dragon damage
'exhilaration': 0, # psiren aura
'skill_use': 1, # number of skill usage
'gauge_iv': 15, # gauge interval
'gauge_val': 10, # gauge regen value
'latency': 0, # amount of delay for cancel
'act': 'end',
'dshift.startup': 96 / 60, # shift 102 -> 96 + 6
'dshift.recovery': 0 / 60, # assumed cancel
'dshift.dmg': 2.00,
'dshift.hit': 1,
'dx1.recovery': 0,
'dx2.recovery': 0,
'dx3.recovery': 0,
'dx4.recovery': 0,
'dx5.recovery': 0,
'ds.startup': 0,
'dodge.startup': 40 / 60, # dodge frames
'dodge.recovery': 0,
'dodge.hit': 0,
'end.startup': 0, # amount of time needed to kys, 0 default
'end.recovery': 0
}
dragonform = {}
def setup(self, c):
Slot.setup(self, c)
if self.onele:
self.att *= 1.5
else:
self.a = []
def ds_proc(self):
try:
return self.adv.dmg_make('ds',self.adv.dragonform.conf.ds.dmg,'s')
except:
return 0
def oninit(self, adv):
super().oninit(adv)
gauge_iv = min(int(adv.duration/12), 15)
from core.dragonform import DragonForm
self.adv = adv
if 'dragonform' in adv.conf:
name = type(adv).__name__
dconf = Conf(self.default_dragonform)
dconf += adv.conf.dragonform
dconf.gauge_iv = gauge_iv
self.adv.dragonform = DragonForm(name, dconf, adv, adv.ds_proc)
else:
name = type(self).__name__
dconf = Conf({**self.default_dragonform, **self.dragonform})
dconf.gauge_iv = gauge_iv
self.adv.dragonform = DragonForm(name, dconf, adv, self.ds_proc)
class Amuletempty(object):
stype = 'a2'
def oninit(self,adv):
return
def setup(self, c, adv):
return
class AmuletBase(Slot):
ae = Amuletempty()
stype = 'a'
a2 = None
def __add__(self, another):
if type(self) is type(another):
raise ValueError('Cannot equip two of the same wyrmprint')
self.a2 = another
self.a2.stype = 'a2'
return self
def oninit(self, adv):
Slot.oninit(self, adv)
if self.a2:
self.a2.a2 = None
self.a2.oninit(adv)
class Slots(object):
#w = None
#d = None
#a = None
#a2 = None
#w = WeaponBase()
#d = DragonBase()
#a = AmuletBase()+AmuletBase()
#c = CharacterBase()
#a2 = AmuletBase()
def __str__(self):
r = str(self.c) + '\n'
r += str(self.d) + '\n'
r += str(self.w) + '\n'
r += str(self.a) + '\n'
r += str(self.a.a2) + '\n'
return r
def __init__(self):
self.c = CharacterBase()
#self.w = WeaponBase()
#self.d = DragonBase()
#self.a = AmuletBase()+AmuletBase()
self.w = None
self.d = None
self.a = None
def __setup(self, adv):
self.c.setup()
self.w.setup(self.c, adv)
self.d.setup(self.c)
self.a.setup(self.c)
def oninit(self, adv):
tmp = copy.deepcopy(self)
self.tmp = tmp
tmp.__setup(adv)
tmp.c.oninit(adv)
tmp.w.oninit(adv)
tmp.d.oninit(adv)
tmp.a.oninit(adv)
self.abilities = {'c':{}, 'w':{}, 'd':{}, 'a':{}}
for afrom, alist in [('c', tmp.c.a), ('w', tmp.w.a), ('d', tmp.d.a), ('a', tmp.a.a)]:
for ab in alist:
name = ab[0]
if '_' in name:
acat = name.split('_')[0]
else:
acat = name
self.abilities[afrom][name] = ability_dict[acat](*ab)
self.abilities[afrom][name].oninit(adv, afrom)
def att(self, forte=None):
tmp = copy.deepcopy(self)
self.tmp = tmp
tmp.__setup(None)
if not forte:
return tmp.c.att + tmp.d.att + tmp.w.att + tmp.a.att
# return tmp.c.att*forte.c(tmp.c.ele,tmp.c.wt) + tmp.d.att*forte.d(tmp.d.ele) + tmp.w.att + tmp.a.att
return (tmp.c.att+100)*forte.c(tmp.c.ele,tmp.c.wt) + tmp.d.att*forte.d(tmp.d.ele) + tmp.w.att + (tmp.a.att+200)
import slot.d as d
import slot.w as w
import slot.a as a
| 26.709559 | 119 | 0.494701 | import copy
from core import Conf
from ability import Ability, ability_dict
from itertools import islice
class Slot(object):
att = 0
ele = 'none'
wt = 'none'
stype = 'slot'
onele = 0
a = None
mod = None
conf = None
def __init__(self):
if not self.mod:
self.mod = []
if not self.conf:
self.conf = Conf()
if not self.a:
self.a = []
self.name = type(self).__name__
def setup(self, c):
if c.ele == self.ele :
self.onele = 1
if self.wt != 'none' and c.wt != self.wt:
raise ValueError('Wrong weapon type, expected {} but got {}'.format(self.wt, c.wt))
def oninit(self, adv):
adv.conf(self.conf)
i = self.stype
j = self.mod
if type(j) == tuple:
adv.Modifier(i,*j)
elif type(j) == list:
idx = 0
for k in j:
adv.Modifier(i+'_%d'%idx,*k)
idx += 1
elif type(j) == dict:
idx = 0
for k in j:
adv.Modifier(i+k+'_%d'%idx,*j[k])
idx += 1
class CharacterBase(Slot):
name = 'null'
stars = 5
max_coab = 4
def __init__(self):
super().__init__()
self.coabs = {}
def setup(self):
return
def oninit(self, adv):
Slot.oninit(self, adv)
count = 0
ex_set = set()
coabs = list(islice(self.coabs.items(), self.max_coab))
self.coabs = {}
for key, coab in coabs:
self.coabs[key] = coab
chain, ex = coab
if ex:
ex_set.add(('ex', ex))
if chain:
self.a.append(tuple(chain))
count += 1
self.a.extend(ex_set)
def has_ex(self, ex):
for _, coab in self.coabs.items():
if ex == coab[1]:
return True
return False
class WeaponBase(Slot):
stype = 'w'
wt = 'none'
s3 = Conf()
ele = []
def setup(self, c, adv):
super(WeaponBase, self).setup(c)
if type(self.ele) == list:
for i in self.ele:
if c.ele == i :
self.onele = 1
break
if self.onele:
self.att *= 1.5
if adv is not None and adv.s3.owner is None:
self.conf.s3 = Conf(self.s3)
elif 'all' in self.ele:
if adv is not None and adv.s3.owner is None:
self.conf.s3 = Conf(self.s3)
if self.wt == 'axe':
self.mod.append(('crit','chance',0.04))
else :
self.mod.append(('crit','chance',0.02))
def s3_proc(self, adv, e):
pass
class DragonBase(Slot):
stype = 'd'
a = [('a', 0.60)]
default_dragonform = {
'duration': 600 / 60,
'dracolith': 0.40,
'exhilaration': 0,
'skill_use': 1,
'gauge_iv': 15,
'gauge_val': 10,
'latency': 0,
'act': 'end',
'dshift.startup': 96 / 60,
'dshift.recovery': 0 / 60,
'dshift.dmg': 2.00,
'dshift.hit': 1,
'dx1.recovery': 0,
'dx2.recovery': 0,
'dx3.recovery': 0,
'dx4.recovery': 0,
'dx5.recovery': 0,
'ds.startup': 0,
'dodge.startup': 40 / 60,
'dodge.recovery': 0,
'dodge.hit': 0,
'end.startup': 0,
'end.recovery': 0
}
dragonform = {}
def setup(self, c):
Slot.setup(self, c)
if self.onele:
self.att *= 1.5
else:
self.a = []
def ds_proc(self):
try:
return self.adv.dmg_make('ds',self.adv.dragonform.conf.ds.dmg,'s')
except:
return 0
def oninit(self, adv):
super().oninit(adv)
gauge_iv = min(int(adv.duration/12), 15)
from core.dragonform import DragonForm
self.adv = adv
if 'dragonform' in adv.conf:
name = type(adv).__name__
dconf = Conf(self.default_dragonform)
dconf += adv.conf.dragonform
dconf.gauge_iv = gauge_iv
self.adv.dragonform = DragonForm(name, dconf, adv, adv.ds_proc)
else:
name = type(self).__name__
dconf = Conf({**self.default_dragonform, **self.dragonform})
dconf.gauge_iv = gauge_iv
self.adv.dragonform = DragonForm(name, dconf, adv, self.ds_proc)
class Amuletempty(object):
stype = 'a2'
def oninit(self,adv):
return
def setup(self, c, adv):
return
class AmuletBase(Slot):
ae = Amuletempty()
stype = 'a'
a2 = None
def __add__(self, another):
if type(self) is type(another):
raise ValueError('Cannot equip two of the same wyrmprint')
self.a2 = another
self.a2.stype = 'a2'
return self
def oninit(self, adv):
Slot.oninit(self, adv)
if self.a2:
self.a2.a2 = None
self.a2.oninit(adv)
class Slots(object):
def __str__(self):
r = str(self.c) + '\n'
r += str(self.d) + '\n'
r += str(self.w) + '\n'
r += str(self.a) + '\n'
r += str(self.a.a2) + '\n'
return r
def __init__(self):
self.c = CharacterBase()
self.w = None
self.d = None
self.a = None
def __setup(self, adv):
self.c.setup()
self.w.setup(self.c, adv)
self.d.setup(self.c)
self.a.setup(self.c)
def oninit(self, adv):
tmp = copy.deepcopy(self)
self.tmp = tmp
tmp.__setup(adv)
tmp.c.oninit(adv)
tmp.w.oninit(adv)
tmp.d.oninit(adv)
tmp.a.oninit(adv)
self.abilities = {'c':{}, 'w':{}, 'd':{}, 'a':{}}
for afrom, alist in [('c', tmp.c.a), ('w', tmp.w.a), ('d', tmp.d.a), ('a', tmp.a.a)]:
for ab in alist:
name = ab[0]
if '_' in name:
acat = name.split('_')[0]
else:
acat = name
self.abilities[afrom][name] = ability_dict[acat](*ab)
self.abilities[afrom][name].oninit(adv, afrom)
def att(self, forte=None):
tmp = copy.deepcopy(self)
self.tmp = tmp
tmp.__setup(None)
if not forte:
return tmp.c.att + tmp.d.att + tmp.w.att + tmp.a.att
return (tmp.c.att+100)*forte.c(tmp.c.ele,tmp.c.wt) + tmp.d.att*forte.d(tmp.d.ele) + tmp.w.att + (tmp.a.att+200)
import slot.d as d
import slot.w as w
import slot.a as a
| true | true |
1c2de30774589ca44009bc61cb6c73fe5ddc8883 | 2,464 | py | Python | src/directional_clustering/transformations/smooth.py | arpastrana/directional_clustering | 78fd39fe4ad207b2a639deddf4ba12d5580df5c6 | [
"MIT"
] | 6 | 2020-08-04T15:24:22.000Z | 2022-02-02T21:34:33.000Z | src/directional_clustering/transformations/smooth.py | arpastrana/apc524_directional_clustering | 9a53312c2ff983778253185f0f2946cd74e2bbd2 | [
"MIT"
] | 30 | 2020-11-12T17:13:30.000Z | 2020-12-15T16:45:33.000Z | src/directional_clustering/transformations/smooth.py | arpastrana/directional_clustering | 78fd39fe4ad207b2a639deddf4ba12d5580df5c6 | [
"MIT"
] | 3 | 2020-11-06T14:25:47.000Z | 2020-11-07T15:03:05.000Z | from compas.geometry import add_vectors
from compas.geometry import subtract_vectors
from compas.geometry import scale_vector
__all__ = ["smoothen_vector_field",
"adjacent_vectors",
"mean_vector",
"smoothed_vector"]
def smoothen_vector_field(vector_field, adjacency, iters, damping=0.5):
"""
Apply Laplacian smoothing to a vector field.
Parameters
----------
vector_field : `directional_clustering.clustering.VectorField`
A vector field.
adjacency : `dict`
A dictionary that maps a key to all the other keys neighboring it.
iters : `int`
The number of iterations to run this algorithm for.
damping : `float`, optional.
A coefficient between 0.0 and 1.0 that controls the smoothing strength.
1.0 is maximum smoothing.
Defaults to 0.5
Notes
-----
Modifies vector field in place.
"""
assert vector_field.size() == len(adjacency)
for _ in range(iters):
smoothed_vectors = {}
# do one full round of laplacian smoothing
for key in vector_field.keys():
vector = vector_field.vector(key)
neighbors = adjacency[key]
if not neighbors:
smoothed_vectors[key] = vector
continue
adj_vector = mean_vector(adjacent_vectors(vector_field, neighbors))
smoothed_vectors[key] = smoothed_vector(vector, adj_vector, damping)
# update vector field
for key in vector_field.keys():
vector_field.add_vector(key, smoothed_vectors[key])
def adjacent_vectors(vector_field, neighbors):
"""
Query the vectors neighboring a vector field entry.
"""
return [vector_field.vector(key) for key in neighbors]
def mean_vector(vectors):
"""
Compute the mean of a sequence of vectors.
"""
if not vectors:
raise ValueError("Sequence of vectors is empty")
m_vector = [0.0, 0.0, 0.0]
for vector in vectors:
m_vector = add_vectors(vector, m_vector)
return scale_vector(m_vector, 1.0 / len(vectors))
def smoothed_vector(vector, s_vector, damping):
"""
Apply Laplacian smoothing to a vector.
"""
assert damping <= 1.0
assert damping >= 0.0
difference = subtract_vectors(s_vector, vector)
s_vector = scale_vector(difference, 1.0 - damping)
return add_vectors(vector, s_vector)
if __name__ == "__main__":
pass
| 25.936842 | 80 | 0.648945 | from compas.geometry import add_vectors
from compas.geometry import subtract_vectors
from compas.geometry import scale_vector
__all__ = ["smoothen_vector_field",
"adjacent_vectors",
"mean_vector",
"smoothed_vector"]
def smoothen_vector_field(vector_field, adjacency, iters, damping=0.5):
assert vector_field.size() == len(adjacency)
for _ in range(iters):
smoothed_vectors = {}
for key in vector_field.keys():
vector = vector_field.vector(key)
neighbors = adjacency[key]
if not neighbors:
smoothed_vectors[key] = vector
continue
adj_vector = mean_vector(adjacent_vectors(vector_field, neighbors))
smoothed_vectors[key] = smoothed_vector(vector, adj_vector, damping)
for key in vector_field.keys():
vector_field.add_vector(key, smoothed_vectors[key])
def adjacent_vectors(vector_field, neighbors):
return [vector_field.vector(key) for key in neighbors]
def mean_vector(vectors):
if not vectors:
raise ValueError("Sequence of vectors is empty")
m_vector = [0.0, 0.0, 0.0]
for vector in vectors:
m_vector = add_vectors(vector, m_vector)
return scale_vector(m_vector, 1.0 / len(vectors))
def smoothed_vector(vector, s_vector, damping):
assert damping <= 1.0
assert damping >= 0.0
difference = subtract_vectors(s_vector, vector)
s_vector = scale_vector(difference, 1.0 - damping)
return add_vectors(vector, s_vector)
if __name__ == "__main__":
pass
| true | true |
1c2de324ee2d6f537bdcbce01218360cf5959274 | 8,551 | py | Python | spytest/apis/system/logging.py | shubav/sonic-mgmt | 0ff71b907a55489bb4ed7d17b1682380fd459bf2 | [
"Apache-2.0"
] | 132 | 2016-10-19T12:34:44.000Z | 2022-03-16T09:00:39.000Z | spytest/apis/system/logging.py | shubav/sonic-mgmt | 0ff71b907a55489bb4ed7d17b1682380fd459bf2 | [
"Apache-2.0"
] | 3,152 | 2016-09-21T23:05:58.000Z | 2022-03-31T23:29:08.000Z | spytest/apis/system/logging.py | shubav/sonic-mgmt | 0ff71b907a55489bb4ed7d17b1682380fd459bf2 | [
"Apache-2.0"
] | 563 | 2016-09-20T01:00:15.000Z | 2022-03-31T22:43:54.000Z | # This file contains the list of API's which performs logging / Syslog operations.
# Author : Prudvi Mangadu (prudvi.mangadu@broadcom.com)
import re
import json
from spytest import st, putils
import apis.system.connection as conf_obj
import apis.system.switch_configuration as sc_obj
import utilities.utils as utils
from utilities.common import make_list
log_files = [r'/var/log/syslog', r'/var/log/syslog.1']
def show_logging(dut, severity=None, filter_list=None, lines=None, cli_type=""):
cli_type = st.get_ui_type(dut, cli_type=cli_type)
"""
To get logs from DUT.
Author: Prudvi Mangadu (prudvi.mangadu@broadcom.com)
:param dut:
:param severity:
:param filter_list:
:param lines:
:return:
"""
if filter_list is None:
filter_list = []
filter_list = list(filter_list) if isinstance(filter_list, list) else [filter_list]
cli_type = 'click' if cli_type in ['rest-patch', 'rest-put', 'klish'] else cli_type
command = "show logging"
if lines:
if cli_type == 'click':
command += " -l {}".format(lines)
elif cli_type == 'klish':
command += "lines {}".format(lines)
if severity:
command += " | grep '{}'".format(severity)
for each_filter in filter_list:
if cli_type == 'click':
command += " | grep -i '{}'".format(each_filter)
elif cli_type == 'klish':
command += " | grep '{}'".format(each_filter)
output = st.show(dut, command, skip_tmpl=True, skip_error_check=True, faster_cli=False, max_time=1200)
out_list = output.strip().split('\n')[:-1]
for _ in range(out_list.count("'")):
out_list.remove("'")
return out_list
def get_logging_count(dut, severity=None, filter_list=None):
"""
To get log count
Author: Prudvi Mangadu (prudvi.mangadu@broadcom.com)
:param dut:
:param severity:
:param filter_list:
:return:
"""
if not severity and not filter_list:
command = "sudo wc -l {} | grep total".format(' '.join(log_files))
output = st.config(dut, command)
output2 = re.findall(r'\d+', output)
return int(output2[0]) if output2 else 0
else:
return len(show_logging(dut, severity, filter_list, lines=None))
def set_logging_severity(dut, **kwargs):
"""
Set logging severity
Author: Prudvi Mangadu (prudvi.mangadu@broadcom.com)
:param dut:
:param severity:
:param comp:
:return:
"""
if 'severity' not in kwargs:
st.log("API: Mandatory parameter 'severity' is not provied.")
return False
command = "swssloglevel -l {} -a".format(kwargs['severity'].upper())
if 'comp' in kwargs:
command = ''
comp_li = list( kwargs['comp']) if isinstance(kwargs['comp'], list) else [kwargs['comp']]
for each_comp in comp_li:
command += "swssloglevel -l {} -c {}\n".format(kwargs['severity'].upper(),each_comp)
st.config(dut, command)
return True
def clear_logging(dut, thread=True):
"""
Clear all logging
Author: Prudvi Mangadu (prudvi.mangadu@broadcom.com)
:param dut: list
:param thread: true
:return:
"""
def _clear_logging(dut):
for each_log in log_files:
command = "sudo truncate -s 0 {}".format(each_log)
st.config(dut, command)
return True
dut_li = utils.make_list(dut)
[out, _] = putils.exec_foreach(thread, dut_li, _clear_logging)
return False if False in out else True
def write_logging(dut, message):
"""
Write logging
Author: Prudvi Mangadu (prudvi.mangadu@broadcom.com)
:param dut:
:param message:
:return:
"""
command = "logger {}".format(message)
st.config(dut, command)
return True
def check_unwanted_logs_in_logging(dut, user_filter=None):
"""
Check unwanted log based on uers filter list
Author: Prudvi Mangadu (prudvi.mangadu@broadcom.com)
:param dut:
:param user_filter:
:return:
"""
result = True
static_filter = ['i2c', 'fan', 'power']
over_all_filter = static_filter + make_list(user_filter) if user_filter else static_filter
for filter in over_all_filter:
temp_count = get_logging_count(dut, filter_list=filter)
st.debug("{} - logs found on the error string '{}'".format(temp_count, filter))
if temp_count:
if filter == 'fan':
filters = ["INFO system#monitor: MEM :: Name:fand"]
logs = show_logging(dut, filter_list=filter)
for log in logs:
if not any(fil.lower() in log.lower() for fil in filters):
result = False
else:
result = False
return result
def config_syslog_server(dut, ipaddress_list):
"""
Configure syslog servers.
Author: Prudvi Mangadu (prudvi.mangadu@broadcom.com)
:param dut:
:param ipaddress_list:
:return:
"""
ipaddress_li = list(ipaddress_list) if isinstance(ipaddress_list, list) else [ipaddress_list]
st.log("Adding syslog server(s)")
temp_local_data = {}
syslog_local_final = {}
for each_address in ipaddress_li:
temp_local_data[each_address] = {}
syslog_local_final['SYSLOG_SERVER'] = temp_local_data
syslog_local_final_json = json.dumps(syslog_local_final)
st.apply_json(dut, syslog_local_final_json)
return True
def get_syslog_server(dut):
"""
Get syslog servers.
Author: Prudvi Mangadu (prudvi.mangadu@broadcom.com)
:param dut:
:return:
"""
output = sc_obj.get_running_config(dut, 'SYSLOG_SERVER')
return output
def clear_syslog_from_remote_server(dut):
"""
Clear the logs from the syslog server
Author: Chaitanya Lohith Bollapragada (chaitanyalohith.bollapragada@broadcom.com)
:param dut:
:return:
"""
syslog_ip = utils.ensure_service_params(dut, "syslog", "ip")
syslog_port = utils.ensure_service_params(dut, "syslog", "port")
syslog_username = utils.ensure_service_params(dut, "syslog", "username")
syslog_password = utils.ensure_service_params(dut, "syslog", "password")
syslog_path = utils.ensure_service_params(dut, "syslog", "path")
command = "sudo truncate -s 0 {}".format(syslog_path)
syslog_con_obj = conf_obj.connect_to_device(syslog_ip, syslog_username, syslog_password, port=syslog_port)
conf_obj.execute_command(syslog_con_obj, command)
return True
def get_syslog_from_remote_server(dut, severity=None, filter_list=None, lines=None):
"""
Get the logs from the syslog server
Author: Chaitanya Lohith Bollapragada (chaitanyalohith.bollapragada@broadcom.com)
:param dut:
:param severity:
:param filter_list:
:param lines:
:return:
"""
syslog_ip = utils.ensure_service_params(dut, "syslog", "ip")
syslog_port = utils.ensure_service_params(dut, "syslog", "port")
syslog_username = utils.ensure_service_params(dut, "syslog", "username")
syslog_password = utils.ensure_service_params(dut, "syslog", "password")
syslog_path = utils.ensure_service_params(dut, "syslog", "path")
if filter_list is None:
filter_list = []
filter_list = list(filter_list) if isinstance(filter_list, list) else [filter_list]
command = "cat {}".format(syslog_path)
if severity:
command += " | grep '{}'".format(severity)
for each_filter in filter_list:
command += " | grep '{}'".format(each_filter)
if lines:
command += "| tail -n {} ".format(lines)
syslog_con_obj = conf_obj.connect_to_device(syslog_ip, syslog_username, syslog_password, port=syslog_port)
syslog_file_contents = conf_obj.execute_command(syslog_con_obj, command)
return syslog_file_contents
def sonic_clear(dut, skip_error_check=True):
if st.is_feature_supported("sonic-clear-logging-command", dut):
st.config(dut, "sonic-clear logging", skip_error_check=skip_error_check)
def check_for_logs_after_reboot(dut, severity=None, log_severity=[], except_logs=[]):
output = show_logging(dut, severity)
for log in output:
results = re.findall(r".*.*sonic\s*(\S+)\s*.*", log)
retval = [result in log_severity for result in results]
if not all(retval):
for except_log in except_logs:
if not except_log.lower() in log.lower():
st.error('Unexpected log: {}'.format(log))
return False
else:
continue
return True
| 32.637405 | 110 | 0.652672 |
# Author : Prudvi Mangadu (prudvi.mangadu@broadcom.com)
import re
import json
from spytest import st, putils
import apis.system.connection as conf_obj
import apis.system.switch_configuration as sc_obj
import utilities.utils as utils
from utilities.common import make_list
log_files = [r'/var/log/syslog', r'/var/log/syslog.1']
def show_logging(dut, severity=None, filter_list=None, lines=None, cli_type=""):
cli_type = st.get_ui_type(dut, cli_type=cli_type)
if filter_list is None:
filter_list = []
filter_list = list(filter_list) if isinstance(filter_list, list) else [filter_list]
cli_type = 'click' if cli_type in ['rest-patch', 'rest-put', 'klish'] else cli_type
command = "show logging"
if lines:
if cli_type == 'click':
command += " -l {}".format(lines)
elif cli_type == 'klish':
command += "lines {}".format(lines)
if severity:
command += " | grep '{}'".format(severity)
for each_filter in filter_list:
if cli_type == 'click':
command += " | grep -i '{}'".format(each_filter)
elif cli_type == 'klish':
command += " | grep '{}'".format(each_filter)
output = st.show(dut, command, skip_tmpl=True, skip_error_check=True, faster_cli=False, max_time=1200)
out_list = output.strip().split('\n')[:-1]
for _ in range(out_list.count("'")):
out_list.remove("'")
return out_list
def get_logging_count(dut, severity=None, filter_list=None):
if not severity and not filter_list:
command = "sudo wc -l {} | grep total".format(' '.join(log_files))
output = st.config(dut, command)
output2 = re.findall(r'\d+', output)
return int(output2[0]) if output2 else 0
else:
return len(show_logging(dut, severity, filter_list, lines=None))
def set_logging_severity(dut, **kwargs):
if 'severity' not in kwargs:
st.log("API: Mandatory parameter 'severity' is not provied.")
return False
command = "swssloglevel -l {} -a".format(kwargs['severity'].upper())
if 'comp' in kwargs:
command = ''
comp_li = list( kwargs['comp']) if isinstance(kwargs['comp'], list) else [kwargs['comp']]
for each_comp in comp_li:
command += "swssloglevel -l {} -c {}\n".format(kwargs['severity'].upper(),each_comp)
st.config(dut, command)
return True
def clear_logging(dut, thread=True):
def _clear_logging(dut):
for each_log in log_files:
command = "sudo truncate -s 0 {}".format(each_log)
st.config(dut, command)
return True
dut_li = utils.make_list(dut)
[out, _] = putils.exec_foreach(thread, dut_li, _clear_logging)
return False if False in out else True
def write_logging(dut, message):
command = "logger {}".format(message)
st.config(dut, command)
return True
def check_unwanted_logs_in_logging(dut, user_filter=None):
result = True
static_filter = ['i2c', 'fan', 'power']
over_all_filter = static_filter + make_list(user_filter) if user_filter else static_filter
for filter in over_all_filter:
temp_count = get_logging_count(dut, filter_list=filter)
st.debug("{} - logs found on the error string '{}'".format(temp_count, filter))
if temp_count:
if filter == 'fan':
filters = ["INFO system#monitor: MEM :: Name:fand"]
logs = show_logging(dut, filter_list=filter)
for log in logs:
if not any(fil.lower() in log.lower() for fil in filters):
result = False
else:
result = False
return result
def config_syslog_server(dut, ipaddress_list):
ipaddress_li = list(ipaddress_list) if isinstance(ipaddress_list, list) else [ipaddress_list]
st.log("Adding syslog server(s)")
temp_local_data = {}
syslog_local_final = {}
for each_address in ipaddress_li:
temp_local_data[each_address] = {}
syslog_local_final['SYSLOG_SERVER'] = temp_local_data
syslog_local_final_json = json.dumps(syslog_local_final)
st.apply_json(dut, syslog_local_final_json)
return True
def get_syslog_server(dut):
output = sc_obj.get_running_config(dut, 'SYSLOG_SERVER')
return output
def clear_syslog_from_remote_server(dut):
syslog_ip = utils.ensure_service_params(dut, "syslog", "ip")
syslog_port = utils.ensure_service_params(dut, "syslog", "port")
syslog_username = utils.ensure_service_params(dut, "syslog", "username")
syslog_password = utils.ensure_service_params(dut, "syslog", "password")
syslog_path = utils.ensure_service_params(dut, "syslog", "path")
command = "sudo truncate -s 0 {}".format(syslog_path)
syslog_con_obj = conf_obj.connect_to_device(syslog_ip, syslog_username, syslog_password, port=syslog_port)
conf_obj.execute_command(syslog_con_obj, command)
return True
def get_syslog_from_remote_server(dut, severity=None, filter_list=None, lines=None):
syslog_ip = utils.ensure_service_params(dut, "syslog", "ip")
syslog_port = utils.ensure_service_params(dut, "syslog", "port")
syslog_username = utils.ensure_service_params(dut, "syslog", "username")
syslog_password = utils.ensure_service_params(dut, "syslog", "password")
syslog_path = utils.ensure_service_params(dut, "syslog", "path")
if filter_list is None:
filter_list = []
filter_list = list(filter_list) if isinstance(filter_list, list) else [filter_list]
command = "cat {}".format(syslog_path)
if severity:
command += " | grep '{}'".format(severity)
for each_filter in filter_list:
command += " | grep '{}'".format(each_filter)
if lines:
command += "| tail -n {} ".format(lines)
syslog_con_obj = conf_obj.connect_to_device(syslog_ip, syslog_username, syslog_password, port=syslog_port)
syslog_file_contents = conf_obj.execute_command(syslog_con_obj, command)
return syslog_file_contents
def sonic_clear(dut, skip_error_check=True):
if st.is_feature_supported("sonic-clear-logging-command", dut):
st.config(dut, "sonic-clear logging", skip_error_check=skip_error_check)
def check_for_logs_after_reboot(dut, severity=None, log_severity=[], except_logs=[]):
output = show_logging(dut, severity)
for log in output:
results = re.findall(r".*.*sonic\s*(\S+)\s*.*", log)
retval = [result in log_severity for result in results]
if not all(retval):
for except_log in except_logs:
if not except_log.lower() in log.lower():
st.error('Unexpected log: {}'.format(log))
return False
else:
continue
return True
| true | true |
1c2de8c8c223f883eb5c6e24df71e2795c55607e | 45,417 | py | Python | env/lib/python3.8/site-packages/pandas/tests/groupby/test_categorical.py | acrucetta/Chicago_COVI_WebApp | a37c9f492a20dcd625f8647067394617988de913 | [
"MIT",
"Unlicense"
] | 1,738 | 2017-09-21T10:59:12.000Z | 2022-03-31T21:05:46.000Z | env/lib/python3.8/site-packages/pandas/tests/groupby/test_categorical.py | acrucetta/Chicago_COVI_WebApp | a37c9f492a20dcd625f8647067394617988de913 | [
"MIT",
"Unlicense"
] | 427 | 2017-09-29T22:54:36.000Z | 2022-02-15T19:26:50.000Z | env/lib/python3.8/site-packages/pandas/tests/groupby/test_categorical.py | acrucetta/Chicago_COVI_WebApp | a37c9f492a20dcd625f8647067394617988de913 | [
"MIT",
"Unlicense"
] | 671 | 2017-09-21T08:04:01.000Z | 2022-03-29T14:30:07.000Z | from datetime import datetime
import numpy as np
import pytest
from pandas.compat import PY37
import pandas as pd
from pandas import (
Categorical,
CategoricalIndex,
DataFrame,
Index,
MultiIndex,
Series,
qcut,
)
import pandas._testing as tm
def cartesian_product_for_groupers(result, args, names):
""" Reindex to a cartesian production for the groupers,
preserving the nature (Categorical) of each grouper """
def f(a):
if isinstance(a, (CategoricalIndex, Categorical)):
categories = a.categories
a = Categorical.from_codes(
np.arange(len(categories)), categories=categories, ordered=a.ordered
)
return a
index = MultiIndex.from_product(map(f, args), names=names)
return result.reindex(index).sort_index()
def test_apply_use_categorical_name(df):
cats = qcut(df.C, 4)
def get_stats(group):
return {
"min": group.min(),
"max": group.max(),
"count": group.count(),
"mean": group.mean(),
}
result = df.groupby(cats, observed=False).D.apply(get_stats)
assert result.index.names[0] == "C"
def test_basic():
cats = Categorical(
["a", "a", "a", "b", "b", "b", "c", "c", "c"],
categories=["a", "b", "c", "d"],
ordered=True,
)
data = DataFrame({"a": [1, 1, 1, 2, 2, 2, 3, 4, 5], "b": cats})
exp_index = CategoricalIndex(list("abcd"), name="b", ordered=True)
expected = DataFrame({"a": [1, 2, 4, np.nan]}, index=exp_index)
result = data.groupby("b", observed=False).mean()
tm.assert_frame_equal(result, expected)
cat1 = Categorical(["a", "a", "b", "b"], categories=["a", "b", "z"], ordered=True)
cat2 = Categorical(["c", "d", "c", "d"], categories=["c", "d", "y"], ordered=True)
df = DataFrame({"A": cat1, "B": cat2, "values": [1, 2, 3, 4]})
# single grouper
gb = df.groupby("A", observed=False)
exp_idx = CategoricalIndex(["a", "b", "z"], name="A", ordered=True)
expected = DataFrame({"values": Series([3, 7, 0], index=exp_idx)})
result = gb.sum()
tm.assert_frame_equal(result, expected)
# GH 8623
x = DataFrame(
[[1, "John P. Doe"], [2, "Jane Dove"], [1, "John P. Doe"]],
columns=["person_id", "person_name"],
)
x["person_name"] = Categorical(x.person_name)
g = x.groupby(["person_id"], observed=False)
result = g.transform(lambda x: x)
tm.assert_frame_equal(result, x[["person_name"]])
result = x.drop_duplicates("person_name")
expected = x.iloc[[0, 1]]
tm.assert_frame_equal(result, expected)
def f(x):
return x.drop_duplicates("person_name").iloc[0]
result = g.apply(f)
expected = x.iloc[[0, 1]].copy()
expected.index = Index([1, 2], name="person_id")
expected["person_name"] = expected["person_name"].astype("object")
tm.assert_frame_equal(result, expected)
# GH 9921
# Monotonic
df = DataFrame({"a": [5, 15, 25]})
c = pd.cut(df.a, bins=[0, 10, 20, 30, 40])
result = df.a.groupby(c, observed=False).transform(sum)
tm.assert_series_equal(result, df["a"])
tm.assert_series_equal(
df.a.groupby(c, observed=False).transform(lambda xs: np.sum(xs)), df["a"]
)
tm.assert_frame_equal(df.groupby(c, observed=False).transform(sum), df[["a"]])
tm.assert_frame_equal(
df.groupby(c, observed=False).transform(lambda xs: np.max(xs)), df[["a"]]
)
# Filter
tm.assert_series_equal(df.a.groupby(c, observed=False).filter(np.all), df["a"])
tm.assert_frame_equal(df.groupby(c, observed=False).filter(np.all), df)
# Non-monotonic
df = DataFrame({"a": [5, 15, 25, -5]})
c = pd.cut(df.a, bins=[-10, 0, 10, 20, 30, 40])
result = df.a.groupby(c, observed=False).transform(sum)
tm.assert_series_equal(result, df["a"])
tm.assert_series_equal(
df.a.groupby(c, observed=False).transform(lambda xs: np.sum(xs)), df["a"]
)
tm.assert_frame_equal(df.groupby(c, observed=False).transform(sum), df[["a"]])
tm.assert_frame_equal(
df.groupby(c, observed=False).transform(lambda xs: np.sum(xs)), df[["a"]]
)
# GH 9603
df = DataFrame({"a": [1, 0, 0, 0]})
c = pd.cut(df.a, [0, 1, 2, 3, 4], labels=Categorical(list("abcd")))
result = df.groupby(c, observed=False).apply(len)
exp_index = CategoricalIndex(c.values.categories, ordered=c.values.ordered)
expected = Series([1, 0, 0, 0], index=exp_index)
expected.index.name = "a"
tm.assert_series_equal(result, expected)
# more basic
levels = ["foo", "bar", "baz", "qux"]
codes = np.random.randint(0, 4, size=100)
cats = Categorical.from_codes(codes, levels, ordered=True)
data = DataFrame(np.random.randn(100, 4))
result = data.groupby(cats, observed=False).mean()
expected = data.groupby(np.asarray(cats), observed=False).mean()
exp_idx = CategoricalIndex(levels, categories=cats.categories, ordered=True)
expected = expected.reindex(exp_idx)
tm.assert_frame_equal(result, expected)
grouped = data.groupby(cats, observed=False)
desc_result = grouped.describe()
idx = cats.codes.argsort()
ord_labels = np.asarray(cats).take(idx)
ord_data = data.take(idx)
exp_cats = Categorical(
ord_labels, ordered=True, categories=["foo", "bar", "baz", "qux"]
)
expected = ord_data.groupby(exp_cats, sort=False, observed=False).describe()
tm.assert_frame_equal(desc_result, expected)
# GH 10460
expc = Categorical.from_codes(np.arange(4).repeat(8), levels, ordered=True)
exp = CategoricalIndex(expc)
tm.assert_index_equal((desc_result.stack().index.get_level_values(0)), exp)
exp = Index(["count", "mean", "std", "min", "25%", "50%", "75%", "max"] * 4)
tm.assert_index_equal((desc_result.stack().index.get_level_values(1)), exp)
def test_level_get_group(observed):
# GH15155
df = DataFrame(
data=np.arange(2, 22, 2),
index=MultiIndex(
levels=[CategoricalIndex(["a", "b"]), range(10)],
codes=[[0] * 5 + [1] * 5, range(10)],
names=["Index1", "Index2"],
),
)
g = df.groupby(level=["Index1"], observed=observed)
# expected should equal test.loc[["a"]]
# GH15166
expected = DataFrame(
data=np.arange(2, 12, 2),
index=MultiIndex(
levels=[CategoricalIndex(["a", "b"]), range(5)],
codes=[[0] * 5, range(5)],
names=["Index1", "Index2"],
),
)
result = g.get_group("a")
tm.assert_frame_equal(result, expected)
# GH#21636 flaky on py37; may be related to older numpy, see discussion
# https://github.com/MacPython/pandas-wheels/pull/64
@pytest.mark.xfail(PY37, reason="Flaky, GH-27902", strict=False)
@pytest.mark.parametrize("ordered", [True, False])
def test_apply(ordered):
# GH 10138
dense = Categorical(list("abc"), ordered=ordered)
# 'b' is in the categories but not in the list
missing = Categorical(list("aaa"), categories=["a", "b"], ordered=ordered)
values = np.arange(len(dense))
df = DataFrame({"missing": missing, "dense": dense, "values": values})
grouped = df.groupby(["missing", "dense"], observed=True)
# missing category 'b' should still exist in the output index
idx = MultiIndex.from_arrays([missing, dense], names=["missing", "dense"])
expected = DataFrame([0, 1, 2.0], index=idx, columns=["values"])
# GH#21636 tracking down the xfail, in some builds np.mean(df.loc[[0]])
# is coming back as Series([0., 1., 0.], index=["missing", "dense", "values"])
# when we expect Series(0., index=["values"])
result = grouped.apply(lambda x: np.mean(x))
tm.assert_frame_equal(result, expected)
# we coerce back to ints
expected = expected.astype("int")
result = grouped.mean()
tm.assert_frame_equal(result, expected)
result = grouped.agg(np.mean)
tm.assert_frame_equal(result, expected)
# but for transform we should still get back the original index
idx = MultiIndex.from_arrays([missing, dense], names=["missing", "dense"])
expected = Series(1, index=idx)
result = grouped.apply(lambda x: 1)
tm.assert_series_equal(result, expected)
def test_observed(observed):
# multiple groupers, don't re-expand the output space
# of the grouper
# gh-14942 (implement)
# gh-10132 (back-compat)
# gh-8138 (back-compat)
# gh-8869
cat1 = Categorical(["a", "a", "b", "b"], categories=["a", "b", "z"], ordered=True)
cat2 = Categorical(["c", "d", "c", "d"], categories=["c", "d", "y"], ordered=True)
df = DataFrame({"A": cat1, "B": cat2, "values": [1, 2, 3, 4]})
df["C"] = ["foo", "bar"] * 2
# multiple groupers with a non-cat
gb = df.groupby(["A", "B", "C"], observed=observed)
exp_index = MultiIndex.from_arrays(
[cat1, cat2, ["foo", "bar"] * 2], names=["A", "B", "C"]
)
expected = DataFrame({"values": Series([1, 2, 3, 4], index=exp_index)}).sort_index()
result = gb.sum()
if not observed:
expected = cartesian_product_for_groupers(
expected, [cat1, cat2, ["foo", "bar"]], list("ABC")
)
tm.assert_frame_equal(result, expected)
gb = df.groupby(["A", "B"], observed=observed)
exp_index = MultiIndex.from_arrays([cat1, cat2], names=["A", "B"])
expected = DataFrame({"values": [1, 2, 3, 4]}, index=exp_index)
result = gb.sum()
if not observed:
expected = cartesian_product_for_groupers(expected, [cat1, cat2], list("AB"))
tm.assert_frame_equal(result, expected)
# https://github.com/pandas-dev/pandas/issues/8138
d = {
"cat": Categorical(
["a", "b", "a", "b"], categories=["a", "b", "c"], ordered=True
),
"ints": [1, 1, 2, 2],
"val": [10, 20, 30, 40],
}
df = DataFrame(d)
# Grouping on a single column
groups_single_key = df.groupby("cat", observed=observed)
result = groups_single_key.mean()
exp_index = CategoricalIndex(
list("ab"), name="cat", categories=list("abc"), ordered=True
)
expected = DataFrame({"ints": [1.5, 1.5], "val": [20.0, 30]}, index=exp_index)
if not observed:
index = CategoricalIndex(
list("abc"), name="cat", categories=list("abc"), ordered=True
)
expected = expected.reindex(index)
tm.assert_frame_equal(result, expected)
# Grouping on two columns
groups_double_key = df.groupby(["cat", "ints"], observed=observed)
result = groups_double_key.agg("mean")
expected = DataFrame(
{
"val": [10, 30, 20, 40],
"cat": Categorical(
["a", "a", "b", "b"], categories=["a", "b", "c"], ordered=True
),
"ints": [1, 2, 1, 2],
}
).set_index(["cat", "ints"])
if not observed:
expected = cartesian_product_for_groupers(
expected, [df.cat.values, [1, 2]], ["cat", "ints"]
)
tm.assert_frame_equal(result, expected)
# GH 10132
for key in [("a", 1), ("b", 2), ("b", 1), ("a", 2)]:
c, i = key
result = groups_double_key.get_group(key)
expected = df[(df.cat == c) & (df.ints == i)]
tm.assert_frame_equal(result, expected)
# gh-8869
# with as_index
d = {
"foo": [10, 8, 4, 8, 4, 1, 1],
"bar": [10, 20, 30, 40, 50, 60, 70],
"baz": ["d", "c", "e", "a", "a", "d", "c"],
}
df = DataFrame(d)
cat = pd.cut(df["foo"], np.linspace(0, 10, 3))
df["range"] = cat
groups = df.groupby(["range", "baz"], as_index=False, observed=observed)
result = groups.agg("mean")
groups2 = df.groupby(["range", "baz"], as_index=True, observed=observed)
expected = groups2.agg("mean").reset_index()
tm.assert_frame_equal(result, expected)
def test_observed_codes_remap(observed):
d = {"C1": [3, 3, 4, 5], "C2": [1, 2, 3, 4], "C3": [10, 100, 200, 34]}
df = DataFrame(d)
values = pd.cut(df["C1"], [1, 2, 3, 6])
values.name = "cat"
groups_double_key = df.groupby([values, "C2"], observed=observed)
idx = MultiIndex.from_arrays([values, [1, 2, 3, 4]], names=["cat", "C2"])
expected = DataFrame({"C1": [3, 3, 4, 5], "C3": [10, 100, 200, 34]}, index=idx)
if not observed:
expected = cartesian_product_for_groupers(
expected, [values.values, [1, 2, 3, 4]], ["cat", "C2"]
)
result = groups_double_key.agg("mean")
tm.assert_frame_equal(result, expected)
def test_observed_perf():
# we create a cartesian product, so this is
# non-performant if we don't use observed values
# gh-14942
df = DataFrame(
{
"cat": np.random.randint(0, 255, size=30000),
"int_id": np.random.randint(0, 255, size=30000),
"other_id": np.random.randint(0, 10000, size=30000),
"foo": 0,
}
)
df["cat"] = df.cat.astype(str).astype("category")
grouped = df.groupby(["cat", "int_id", "other_id"], observed=True)
result = grouped.count()
assert result.index.levels[0].nunique() == df.cat.nunique()
assert result.index.levels[1].nunique() == df.int_id.nunique()
assert result.index.levels[2].nunique() == df.other_id.nunique()
def test_observed_groups(observed):
# gh-20583
# test that we have the appropriate groups
cat = Categorical(["a", "c", "a"], categories=["a", "b", "c"])
df = DataFrame({"cat": cat, "vals": [1, 2, 3]})
g = df.groupby("cat", observed=observed)
result = g.groups
if observed:
expected = {"a": Index([0, 2], dtype="int64"), "c": Index([1], dtype="int64")}
else:
expected = {
"a": Index([0, 2], dtype="int64"),
"b": Index([], dtype="int64"),
"c": Index([1], dtype="int64"),
}
tm.assert_dict_equal(result, expected)
def test_observed_groups_with_nan(observed):
# GH 24740
df = DataFrame(
{
"cat": Categorical(["a", np.nan, "a"], categories=["a", "b", "d"]),
"vals": [1, 2, 3],
}
)
g = df.groupby("cat", observed=observed)
result = g.groups
if observed:
expected = {"a": Index([0, 2], dtype="int64")}
else:
expected = {
"a": Index([0, 2], dtype="int64"),
"b": Index([], dtype="int64"),
"d": Index([], dtype="int64"),
}
tm.assert_dict_equal(result, expected)
def test_observed_nth():
# GH 26385
cat = pd.Categorical(["a", np.nan, np.nan], categories=["a", "b", "c"])
ser = pd.Series([1, 2, 3])
df = pd.DataFrame({"cat": cat, "ser": ser})
result = df.groupby("cat", observed=False)["ser"].nth(0)
index = pd.Categorical(["a", "b", "c"], categories=["a", "b", "c"])
expected = pd.Series([1, np.nan, np.nan], index=index, name="ser")
expected.index.name = "cat"
tm.assert_series_equal(result, expected)
def test_dataframe_categorical_with_nan(observed):
# GH 21151
s1 = Categorical([np.nan, "a", np.nan, "a"], categories=["a", "b", "c"])
s2 = Series([1, 2, 3, 4])
df = DataFrame({"s1": s1, "s2": s2})
result = df.groupby("s1", observed=observed).first().reset_index()
if observed:
expected = DataFrame(
{"s1": Categorical(["a"], categories=["a", "b", "c"]), "s2": [2]}
)
else:
expected = DataFrame(
{
"s1": Categorical(["a", "b", "c"], categories=["a", "b", "c"]),
"s2": [2, np.nan, np.nan],
}
)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("ordered", [True, False])
@pytest.mark.parametrize("observed", [True, False])
@pytest.mark.parametrize("sort", [True, False])
def test_dataframe_categorical_ordered_observed_sort(ordered, observed, sort):
# GH 25871: Fix groupby sorting on ordered Categoricals
# GH 25167: Groupby with observed=True doesn't sort
# Build a dataframe with cat having one unobserved category ('missing'),
# and a Series with identical values
label = Categorical(
["d", "a", "b", "a", "d", "b"],
categories=["a", "b", "missing", "d"],
ordered=ordered,
)
val = Series(["d", "a", "b", "a", "d", "b"])
df = DataFrame({"label": label, "val": val})
# aggregate on the Categorical
result = df.groupby("label", observed=observed, sort=sort)["val"].aggregate("first")
# If ordering works, we expect index labels equal to aggregation results,
# except for 'observed=False': label 'missing' has aggregation None
label = Series(result.index.array, dtype="object")
aggr = Series(result.array)
if not observed:
aggr[aggr.isna()] = "missing"
if not all(label == aggr):
msg = (
f"Labels and aggregation results not consistently sorted\n"
+ "for (ordered={ordered}, observed={observed}, sort={sort})\n"
+ "Result:\n{result}"
)
assert False, msg
def test_datetime():
# GH9049: ensure backward compatibility
levels = pd.date_range("2014-01-01", periods=4)
codes = np.random.randint(0, 4, size=100)
cats = Categorical.from_codes(codes, levels, ordered=True)
data = DataFrame(np.random.randn(100, 4))
result = data.groupby(cats, observed=False).mean()
expected = data.groupby(np.asarray(cats), observed=False).mean()
expected = expected.reindex(levels)
expected.index = CategoricalIndex(
expected.index, categories=expected.index, ordered=True
)
tm.assert_frame_equal(result, expected)
grouped = data.groupby(cats, observed=False)
desc_result = grouped.describe()
idx = cats.codes.argsort()
ord_labels = cats.take(idx)
ord_data = data.take(idx)
expected = ord_data.groupby(ord_labels, observed=False).describe()
tm.assert_frame_equal(desc_result, expected)
tm.assert_index_equal(desc_result.index, expected.index)
tm.assert_index_equal(
desc_result.index.get_level_values(0), expected.index.get_level_values(0)
)
# GH 10460
expc = Categorical.from_codes(np.arange(4).repeat(8), levels, ordered=True)
exp = CategoricalIndex(expc)
tm.assert_index_equal((desc_result.stack().index.get_level_values(0)), exp)
exp = Index(["count", "mean", "std", "min", "25%", "50%", "75%", "max"] * 4)
tm.assert_index_equal((desc_result.stack().index.get_level_values(1)), exp)
def test_categorical_index():
s = np.random.RandomState(12345)
levels = ["foo", "bar", "baz", "qux"]
codes = s.randint(0, 4, size=20)
cats = Categorical.from_codes(codes, levels, ordered=True)
df = DataFrame(np.repeat(np.arange(20), 4).reshape(-1, 4), columns=list("abcd"))
df["cats"] = cats
# with a cat index
result = df.set_index("cats").groupby(level=0, observed=False).sum()
expected = df[list("abcd")].groupby(cats.codes, observed=False).sum()
expected.index = CategoricalIndex(
Categorical.from_codes([0, 1, 2, 3], levels, ordered=True), name="cats"
)
tm.assert_frame_equal(result, expected)
# with a cat column, should produce a cat index
result = df.groupby("cats", observed=False).sum()
expected = df[list("abcd")].groupby(cats.codes, observed=False).sum()
expected.index = CategoricalIndex(
Categorical.from_codes([0, 1, 2, 3], levels, ordered=True), name="cats"
)
tm.assert_frame_equal(result, expected)
def test_describe_categorical_columns():
# GH 11558
cats = CategoricalIndex(
["qux", "foo", "baz", "bar"],
categories=["foo", "bar", "baz", "qux"],
ordered=True,
)
df = DataFrame(np.random.randn(20, 4), columns=cats)
result = df.groupby([1, 2, 3, 4] * 5).describe()
tm.assert_index_equal(result.stack().columns, cats)
tm.assert_categorical_equal(result.stack().columns.values, cats.values)
def test_unstack_categorical():
# GH11558 (example is taken from the original issue)
df = DataFrame(
{"a": range(10), "medium": ["A", "B"] * 5, "artist": list("XYXXY") * 2}
)
df["medium"] = df["medium"].astype("category")
gcat = df.groupby(["artist", "medium"], observed=False)["a"].count().unstack()
result = gcat.describe()
exp_columns = CategoricalIndex(["A", "B"], ordered=False, name="medium")
tm.assert_index_equal(result.columns, exp_columns)
tm.assert_categorical_equal(result.columns.values, exp_columns.values)
result = gcat["A"] + gcat["B"]
expected = Series([6, 4], index=Index(["X", "Y"], name="artist"))
tm.assert_series_equal(result, expected)
def test_bins_unequal_len():
# GH3011
series = Series([np.nan, np.nan, 1, 1, 2, 2, 3, 3, 4, 4])
bins = pd.cut(series.dropna().values, 4)
# len(bins) != len(series) here
with pytest.raises(ValueError):
series.groupby(bins).mean()
def test_as_index():
# GH13204
df = DataFrame(
{
"cat": Categorical([1, 2, 2], [1, 2, 3]),
"A": [10, 11, 11],
"B": [101, 102, 103],
}
)
result = df.groupby(["cat", "A"], as_index=False, observed=True).sum()
expected = DataFrame(
{
"cat": Categorical([1, 2], categories=df.cat.cat.categories),
"A": [10, 11],
"B": [101, 205],
},
columns=["cat", "A", "B"],
)
tm.assert_frame_equal(result, expected)
# function grouper
f = lambda r: df.loc[r, "A"]
result = df.groupby(["cat", f], as_index=False, observed=True).sum()
expected = DataFrame(
{
"cat": Categorical([1, 2], categories=df.cat.cat.categories),
"A": [10, 22],
"B": [101, 205],
},
columns=["cat", "A", "B"],
)
tm.assert_frame_equal(result, expected)
# another not in-axis grouper (conflicting names in index)
s = Series(["a", "b", "b"], name="cat")
result = df.groupby(["cat", s], as_index=False, observed=True).sum()
tm.assert_frame_equal(result, expected)
# is original index dropped?
group_columns = ["cat", "A"]
expected = DataFrame(
{
"cat": Categorical([1, 2], categories=df.cat.cat.categories),
"A": [10, 11],
"B": [101, 205],
},
columns=["cat", "A", "B"],
)
for name in [None, "X", "B"]:
df.index = Index(list("abc"), name=name)
result = df.groupby(group_columns, as_index=False, observed=True).sum()
tm.assert_frame_equal(result, expected)
def test_preserve_categories():
# GH-13179
categories = list("abc")
# ordered=True
df = DataFrame({"A": Categorical(list("ba"), categories=categories, ordered=True)})
index = CategoricalIndex(categories, categories, ordered=True, name="A")
tm.assert_index_equal(
df.groupby("A", sort=True, observed=False).first().index, index
)
tm.assert_index_equal(
df.groupby("A", sort=False, observed=False).first().index, index
)
# ordered=False
df = DataFrame({"A": Categorical(list("ba"), categories=categories, ordered=False)})
sort_index = CategoricalIndex(categories, categories, ordered=False, name="A")
nosort_index = CategoricalIndex(list("bac"), list("bac"), ordered=False, name="A")
tm.assert_index_equal(
df.groupby("A", sort=True, observed=False).first().index, sort_index
)
tm.assert_index_equal(
df.groupby("A", sort=False, observed=False).first().index, nosort_index
)
def test_preserve_categorical_dtype():
# GH13743, GH13854
df = DataFrame(
{
"A": [1, 2, 1, 1, 2],
"B": [10, 16, 22, 28, 34],
"C1": Categorical(list("abaab"), categories=list("bac"), ordered=False),
"C2": Categorical(list("abaab"), categories=list("bac"), ordered=True),
}
)
# single grouper
exp_full = DataFrame(
{
"A": [2.0, 1.0, np.nan],
"B": [25.0, 20.0, np.nan],
"C1": Categorical(list("bac"), categories=list("bac"), ordered=False),
"C2": Categorical(list("bac"), categories=list("bac"), ordered=True),
}
)
for col in ["C1", "C2"]:
result1 = df.groupby(by=col, as_index=False, observed=False).mean()
result2 = df.groupby(by=col, as_index=True, observed=False).mean().reset_index()
expected = exp_full.reindex(columns=result1.columns)
tm.assert_frame_equal(result1, expected)
tm.assert_frame_equal(result2, expected)
@pytest.mark.parametrize(
"func, values",
[
("first", ["second", "first"]),
("last", ["fourth", "third"]),
("min", ["fourth", "first"]),
("max", ["second", "third"]),
],
)
def test_preserve_on_ordered_ops(func, values):
# gh-18502
# preserve the categoricals on ops
c = pd.Categorical(["first", "second", "third", "fourth"], ordered=True)
df = pd.DataFrame({"payload": [-1, -2, -1, -2], "col": c})
g = df.groupby("payload")
result = getattr(g, func)()
expected = pd.DataFrame(
{"payload": [-2, -1], "col": pd.Series(values, dtype=c.dtype)}
).set_index("payload")
tm.assert_frame_equal(result, expected)
def test_categorical_no_compress():
data = Series(np.random.randn(9))
codes = np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])
cats = Categorical.from_codes(codes, [0, 1, 2], ordered=True)
result = data.groupby(cats, observed=False).mean()
exp = data.groupby(codes, observed=False).mean()
exp.index = CategoricalIndex(
exp.index, categories=cats.categories, ordered=cats.ordered
)
tm.assert_series_equal(result, exp)
codes = np.array([0, 0, 0, 1, 1, 1, 3, 3, 3])
cats = Categorical.from_codes(codes, [0, 1, 2, 3], ordered=True)
result = data.groupby(cats, observed=False).mean()
exp = data.groupby(codes, observed=False).mean().reindex(cats.categories)
exp.index = CategoricalIndex(
exp.index, categories=cats.categories, ordered=cats.ordered
)
tm.assert_series_equal(result, exp)
cats = Categorical(
["a", "a", "a", "b", "b", "b", "c", "c", "c"],
categories=["a", "b", "c", "d"],
ordered=True,
)
data = DataFrame({"a": [1, 1, 1, 2, 2, 2, 3, 4, 5], "b": cats})
result = data.groupby("b", observed=False).mean()
result = result["a"].values
exp = np.array([1, 2, 4, np.nan])
tm.assert_numpy_array_equal(result, exp)
def test_groupby_empty_with_category():
# GH-9614
# test fix for when group by on None resulted in
# coercion of dtype categorical -> float
df = pd.DataFrame(
{"A": [None] * 3, "B": pd.Categorical(["train", "train", "test"])}
)
result = df.groupby("A").first()["B"]
expected = pd.Series(
pd.Categorical([], categories=["test", "train"]),
index=pd.Series([], dtype="object", name="A"),
name="B",
)
tm.assert_series_equal(result, expected)
def test_sort():
# https://stackoverflow.com/questions/23814368/sorting-pandas-
# categorical-labels-after-groupby
# This should result in a properly sorted Series so that the plot
# has a sorted x axis
# self.cat.groupby(['value_group'])['value_group'].count().plot(kind='bar')
df = DataFrame({"value": np.random.randint(0, 10000, 100)})
labels = [f"{i} - {i+499}" for i in range(0, 10000, 500)]
cat_labels = Categorical(labels, labels)
df = df.sort_values(by=["value"], ascending=True)
df["value_group"] = pd.cut(
df.value, range(0, 10500, 500), right=False, labels=cat_labels
)
res = df.groupby(["value_group"], observed=False)["value_group"].count()
exp = res[sorted(res.index, key=lambda x: float(x.split()[0]))]
exp.index = CategoricalIndex(exp.index, name=exp.index.name)
tm.assert_series_equal(res, exp)
def test_sort2():
# dataframe groupby sort was being ignored # GH 8868
df = DataFrame(
[
["(7.5, 10]", 10, 10],
["(7.5, 10]", 8, 20],
["(2.5, 5]", 5, 30],
["(5, 7.5]", 6, 40],
["(2.5, 5]", 4, 50],
["(0, 2.5]", 1, 60],
["(5, 7.5]", 7, 70],
],
columns=["range", "foo", "bar"],
)
df["range"] = Categorical(df["range"], ordered=True)
index = CategoricalIndex(
["(0, 2.5]", "(2.5, 5]", "(5, 7.5]", "(7.5, 10]"], name="range", ordered=True
)
expected_sort = DataFrame(
[[1, 60], [5, 30], [6, 40], [10, 10]], columns=["foo", "bar"], index=index
)
col = "range"
result_sort = df.groupby(col, sort=True, observed=False).first()
tm.assert_frame_equal(result_sort, expected_sort)
# when categories is ordered, group is ordered by category's order
expected_sort = result_sort
result_sort = df.groupby(col, sort=False, observed=False).first()
tm.assert_frame_equal(result_sort, expected_sort)
df["range"] = Categorical(df["range"], ordered=False)
index = CategoricalIndex(
["(0, 2.5]", "(2.5, 5]", "(5, 7.5]", "(7.5, 10]"], name="range"
)
expected_sort = DataFrame(
[[1, 60], [5, 30], [6, 40], [10, 10]], columns=["foo", "bar"], index=index
)
index = CategoricalIndex(
["(7.5, 10]", "(2.5, 5]", "(5, 7.5]", "(0, 2.5]"],
categories=["(7.5, 10]", "(2.5, 5]", "(5, 7.5]", "(0, 2.5]"],
name="range",
)
expected_nosort = DataFrame(
[[10, 10], [5, 30], [6, 40], [1, 60]], index=index, columns=["foo", "bar"]
)
col = "range"
# this is an unordered categorical, but we allow this ####
result_sort = df.groupby(col, sort=True, observed=False).first()
tm.assert_frame_equal(result_sort, expected_sort)
result_nosort = df.groupby(col, sort=False, observed=False).first()
tm.assert_frame_equal(result_nosort, expected_nosort)
def test_sort_datetimelike():
# GH10505
# use same data as test_groupby_sort_categorical, which category is
# corresponding to datetime.month
df = DataFrame(
{
"dt": [
datetime(2011, 7, 1),
datetime(2011, 7, 1),
datetime(2011, 2, 1),
datetime(2011, 5, 1),
datetime(2011, 2, 1),
datetime(2011, 1, 1),
datetime(2011, 5, 1),
],
"foo": [10, 8, 5, 6, 4, 1, 7],
"bar": [10, 20, 30, 40, 50, 60, 70],
},
columns=["dt", "foo", "bar"],
)
# ordered=True
df["dt"] = Categorical(df["dt"], ordered=True)
index = [
datetime(2011, 1, 1),
datetime(2011, 2, 1),
datetime(2011, 5, 1),
datetime(2011, 7, 1),
]
result_sort = DataFrame(
[[1, 60], [5, 30], [6, 40], [10, 10]], columns=["foo", "bar"]
)
result_sort.index = CategoricalIndex(index, name="dt", ordered=True)
index = [
datetime(2011, 7, 1),
datetime(2011, 2, 1),
datetime(2011, 5, 1),
datetime(2011, 1, 1),
]
result_nosort = DataFrame(
[[10, 10], [5, 30], [6, 40], [1, 60]], columns=["foo", "bar"]
)
result_nosort.index = CategoricalIndex(
index, categories=index, name="dt", ordered=True
)
col = "dt"
tm.assert_frame_equal(
result_sort, df.groupby(col, sort=True, observed=False).first()
)
# when categories is ordered, group is ordered by category's order
tm.assert_frame_equal(
result_sort, df.groupby(col, sort=False, observed=False).first()
)
# ordered = False
df["dt"] = Categorical(df["dt"], ordered=False)
index = [
datetime(2011, 1, 1),
datetime(2011, 2, 1),
datetime(2011, 5, 1),
datetime(2011, 7, 1),
]
result_sort = DataFrame(
[[1, 60], [5, 30], [6, 40], [10, 10]], columns=["foo", "bar"]
)
result_sort.index = CategoricalIndex(index, name="dt")
index = [
datetime(2011, 7, 1),
datetime(2011, 2, 1),
datetime(2011, 5, 1),
datetime(2011, 1, 1),
]
result_nosort = DataFrame(
[[10, 10], [5, 30], [6, 40], [1, 60]], columns=["foo", "bar"]
)
result_nosort.index = CategoricalIndex(index, categories=index, name="dt")
col = "dt"
tm.assert_frame_equal(
result_sort, df.groupby(col, sort=True, observed=False).first()
)
tm.assert_frame_equal(
result_nosort, df.groupby(col, sort=False, observed=False).first()
)
def test_empty_sum():
# https://github.com/pandas-dev/pandas/issues/18678
df = DataFrame(
{"A": Categorical(["a", "a", "b"], categories=["a", "b", "c"]), "B": [1, 2, 1]}
)
expected_idx = CategoricalIndex(["a", "b", "c"], name="A")
# 0 by default
result = df.groupby("A", observed=False).B.sum()
expected = Series([3, 1, 0], expected_idx, name="B")
tm.assert_series_equal(result, expected)
# min_count=0
result = df.groupby("A", observed=False).B.sum(min_count=0)
expected = Series([3, 1, 0], expected_idx, name="B")
tm.assert_series_equal(result, expected)
# min_count=1
result = df.groupby("A", observed=False).B.sum(min_count=1)
expected = Series([3, 1, np.nan], expected_idx, name="B")
tm.assert_series_equal(result, expected)
# min_count>1
result = df.groupby("A", observed=False).B.sum(min_count=2)
expected = Series([3, np.nan, np.nan], expected_idx, name="B")
tm.assert_series_equal(result, expected)
def test_empty_prod():
# https://github.com/pandas-dev/pandas/issues/18678
df = DataFrame(
{"A": Categorical(["a", "a", "b"], categories=["a", "b", "c"]), "B": [1, 2, 1]}
)
expected_idx = CategoricalIndex(["a", "b", "c"], name="A")
# 1 by default
result = df.groupby("A", observed=False).B.prod()
expected = Series([2, 1, 1], expected_idx, name="B")
tm.assert_series_equal(result, expected)
# min_count=0
result = df.groupby("A", observed=False).B.prod(min_count=0)
expected = Series([2, 1, 1], expected_idx, name="B")
tm.assert_series_equal(result, expected)
# min_count=1
result = df.groupby("A", observed=False).B.prod(min_count=1)
expected = Series([2, 1, np.nan], expected_idx, name="B")
tm.assert_series_equal(result, expected)
def test_groupby_multiindex_categorical_datetime():
# https://github.com/pandas-dev/pandas/issues/21390
df = DataFrame(
{
"key1": Categorical(list("abcbabcba")),
"key2": Categorical(
list(pd.date_range("2018-06-01 00", freq="1T", periods=3)) * 3
),
"values": np.arange(9),
}
)
result = df.groupby(["key1", "key2"]).mean()
idx = MultiIndex.from_product(
[
Categorical(["a", "b", "c"]),
Categorical(pd.date_range("2018-06-01 00", freq="1T", periods=3)),
],
names=["key1", "key2"],
)
expected = DataFrame({"values": [0, 4, 8, 3, 4, 5, 6, np.nan, 2]}, index=idx)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"as_index, expected",
[
(
True,
Series(
index=MultiIndex.from_arrays(
[Series([1, 1, 2], dtype="category"), [1, 2, 2]], names=["a", "b"]
),
data=[1, 2, 3],
name="x",
),
),
(
False,
DataFrame(
{
"a": Series([1, 1, 2], dtype="category"),
"b": [1, 2, 2],
"x": [1, 2, 3],
}
),
),
],
)
def test_groupby_agg_observed_true_single_column(as_index, expected):
# GH-23970
df = DataFrame(
{"a": Series([1, 1, 2], dtype="category"), "b": [1, 2, 2], "x": [1, 2, 3]}
)
result = df.groupby(["a", "b"], as_index=as_index, observed=True)["x"].sum()
tm.assert_equal(result, expected)
@pytest.mark.parametrize("fill_value", [None, np.nan, pd.NaT])
def test_shift(fill_value):
ct = Categorical(
["a", "b", "c", "d"], categories=["a", "b", "c", "d"], ordered=False
)
expected = Categorical(
[None, "a", "b", "c"], categories=["a", "b", "c", "d"], ordered=False
)
res = ct.shift(1, fill_value=fill_value)
tm.assert_equal(res, expected)
@pytest.fixture
def df_cat(df):
"""
DataFrame with multiple categorical columns and a column of integers.
Shortened so as not to contain all possible combinations of categories.
Useful for testing `observed` kwarg functionality on GroupBy objects.
Parameters
----------
df: DataFrame
Non-categorical, longer DataFrame from another fixture, used to derive
this one
Returns
-------
df_cat: DataFrame
"""
df_cat = df.copy()[:4] # leave out some groups
df_cat["A"] = df_cat["A"].astype("category")
df_cat["B"] = df_cat["B"].astype("category")
df_cat["C"] = Series([1, 2, 3, 4])
df_cat = df_cat.drop(["D"], axis=1)
return df_cat
@pytest.mark.parametrize(
"operation, kwargs", [("agg", dict(dtype="category")), ("apply", dict())]
)
def test_seriesgroupby_observed_true(df_cat, operation, kwargs):
# GH 24880
index = MultiIndex.from_frame(
DataFrame(
{"A": ["foo", "foo", "bar", "bar"], "B": ["one", "two", "one", "three"]},
**kwargs,
)
)
expected = Series(data=[1, 3, 2, 4], index=index, name="C")
grouped = df_cat.groupby(["A", "B"], observed=True)["C"]
result = getattr(grouped, operation)(sum)
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("operation", ["agg", "apply"])
@pytest.mark.parametrize("observed", [False, None])
def test_seriesgroupby_observed_false_or_none(df_cat, observed, operation):
# GH 24880
index, _ = MultiIndex.from_product(
[
CategoricalIndex(["bar", "foo"], ordered=False),
CategoricalIndex(["one", "three", "two"], ordered=False),
],
names=["A", "B"],
).sortlevel()
expected = Series(data=[2, 4, np.nan, 1, np.nan, 3], index=index, name="C")
grouped = df_cat.groupby(["A", "B"], observed=observed)["C"]
result = getattr(grouped, operation)(sum)
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize(
"observed, index, data",
[
(
True,
MultiIndex.from_tuples(
[
("foo", "one", "min"),
("foo", "one", "max"),
("foo", "two", "min"),
("foo", "two", "max"),
("bar", "one", "min"),
("bar", "one", "max"),
("bar", "three", "min"),
("bar", "three", "max"),
],
names=["A", "B", None],
),
[1, 1, 3, 3, 2, 2, 4, 4],
),
(
False,
MultiIndex.from_product(
[
CategoricalIndex(["bar", "foo"], ordered=False),
CategoricalIndex(["one", "three", "two"], ordered=False),
Index(["min", "max"]),
],
names=["A", "B", None],
),
[2, 2, 4, 4, np.nan, np.nan, 1, 1, np.nan, np.nan, 3, 3],
),
(
None,
MultiIndex.from_product(
[
CategoricalIndex(["bar", "foo"], ordered=False),
CategoricalIndex(["one", "three", "two"], ordered=False),
Index(["min", "max"]),
],
names=["A", "B", None],
),
[2, 2, 4, 4, np.nan, np.nan, 1, 1, np.nan, np.nan, 3, 3],
),
],
)
def test_seriesgroupby_observed_apply_dict(df_cat, observed, index, data):
# GH 24880
expected = Series(data=data, index=index, name="C")
result = df_cat.groupby(["A", "B"], observed=observed)["C"].apply(
lambda x: {"min": x.min(), "max": x.max()}
)
tm.assert_series_equal(result, expected)
def test_groupby_categorical_series_dataframe_consistent(df_cat):
# GH 20416
expected = df_cat.groupby(["A", "B"])["C"].mean()
result = df_cat.groupby(["A", "B"]).mean()["C"]
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("code", [([1, 0, 0]), ([0, 0, 0])])
def test_groupby_categorical_axis_1(code):
# GH 13420
df = DataFrame({"a": [1, 2, 3, 4], "b": [-1, -2, -3, -4], "c": [5, 6, 7, 8]})
cat = pd.Categorical.from_codes(code, categories=list("abc"))
result = df.groupby(cat, axis=1).mean()
expected = df.T.groupby(cat, axis=0).mean().T
tm.assert_frame_equal(result, expected)
def test_groupby_cat_preserves_structure(observed, ordered_fixture):
# GH 28787
df = DataFrame(
{"Name": Categorical(["Bob", "Greg"], ordered=ordered_fixture), "Item": [1, 2]},
columns=["Name", "Item"],
)
expected = df.copy()
result = (
df.groupby("Name", observed=observed)
.agg(pd.DataFrame.sum, skipna=True)
.reset_index()
)
tm.assert_frame_equal(result, expected)
def test_get_nonexistent_category():
# Accessing a Category that is not in the dataframe
df = pd.DataFrame({"var": ["a", "a", "b", "b"], "val": range(4)})
with pytest.raises(KeyError, match="'vau'"):
df.groupby("var").apply(
lambda rows: pd.DataFrame(
{"var": [rows.iloc[-1]["var"]], "val": [rows.iloc[-1]["vau"]]}
)
)
def test_series_groupby_on_2_categoricals_unobserved(
reduction_func: str, observed: bool
):
# GH 17605
if reduction_func == "ngroup":
pytest.skip("ngroup is not truly a reduction")
df = pd.DataFrame(
{
"cat_1": pd.Categorical(list("AABB"), categories=list("ABCD")),
"cat_2": pd.Categorical(list("AB") * 2, categories=list("ABCD")),
"value": [0.1] * 4,
}
)
args = {"nth": [0]}.get(reduction_func, [])
expected_length = 4 if observed else 16
series_groupby = df.groupby(["cat_1", "cat_2"], observed=observed)["value"]
agg = getattr(series_groupby, reduction_func)
result = agg(*args)
assert len(result) == expected_length
@pytest.mark.parametrize(
"func, zero_or_nan",
[
("all", np.NaN),
("any", np.NaN),
("count", 0),
("first", np.NaN),
("idxmax", np.NaN),
("idxmin", np.NaN),
("last", np.NaN),
("mad", np.NaN),
("max", np.NaN),
("mean", np.NaN),
("median", np.NaN),
("min", np.NaN),
("nth", np.NaN),
("nunique", 0),
("prod", np.NaN),
("quantile", np.NaN),
("sem", np.NaN),
("size", 0),
("skew", np.NaN),
("std", np.NaN),
("sum", np.NaN),
("var", np.NaN),
],
)
def test_series_groupby_on_2_categoricals_unobserved_zeroes_or_nans(func, zero_or_nan):
# GH 17605
# Tests whether the unobserved categories in the result contain 0 or NaN
df = pd.DataFrame(
{
"cat_1": pd.Categorical(list("AABB"), categories=list("ABC")),
"cat_2": pd.Categorical(list("AB") * 2, categories=list("ABC")),
"value": [0.1] * 4,
}
)
unobserved = [tuple("AC"), tuple("BC"), tuple("CA"), tuple("CB"), tuple("CC")]
args = {"nth": [0]}.get(func, [])
series_groupby = df.groupby(["cat_1", "cat_2"], observed=False)["value"]
agg = getattr(series_groupby, func)
result = agg(*args)
for idx in unobserved:
val = result.loc[idx]
assert (pd.isna(zero_or_nan) and pd.isna(val)) or (val == zero_or_nan)
# If we expect unobserved values to be zero, we also expect the dtype to be int
if zero_or_nan == 0:
assert np.issubdtype(result.dtype, np.integer)
def test_series_groupby_categorical_aggregation_getitem():
# GH 8870
d = {"foo": [10, 8, 4, 1], "bar": [10, 20, 30, 40], "baz": ["d", "c", "d", "c"]}
df = pd.DataFrame(d)
cat = pd.cut(df["foo"], np.linspace(0, 20, 5))
df["range"] = cat
groups = df.groupby(["range", "baz"], as_index=True, sort=True)
result = groups["foo"].agg("mean")
expected = groups.agg("mean")["foo"]
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize(
"func, expected_values",
[(pd.Series.nunique, [1, 1, 2]), (pd.Series.count, [1, 2, 2])],
)
def test_groupby_agg_categorical_columns(func, expected_values):
# 31256
df = pd.DataFrame(
{
"id": [0, 1, 2, 3, 4],
"groups": [0, 1, 1, 2, 2],
"value": pd.Categorical([0, 0, 0, 0, 1]),
}
).set_index("id")
result = df.groupby("groups").agg(func)
expected = pd.DataFrame(
{"value": expected_values}, index=pd.Index([0, 1, 2], name="groups"),
)
tm.assert_frame_equal(result, expected)
def test_groupby_agg_non_numeric():
df = pd.DataFrame(
{"A": pd.Categorical(["a", "a", "b"], categories=["a", "b", "c"])}
)
expected = pd.DataFrame({"A": [2, 1]}, index=[1, 2])
result = df.groupby([1, 2, 1]).agg(pd.Series.nunique)
tm.assert_frame_equal(result, expected)
result = df.groupby([1, 2, 1]).nunique()
tm.assert_frame_equal(result, expected)
| 32.934735 | 88 | 0.571592 | from datetime import datetime
import numpy as np
import pytest
from pandas.compat import PY37
import pandas as pd
from pandas import (
Categorical,
CategoricalIndex,
DataFrame,
Index,
MultiIndex,
Series,
qcut,
)
import pandas._testing as tm
def cartesian_product_for_groupers(result, args, names):
def f(a):
if isinstance(a, (CategoricalIndex, Categorical)):
categories = a.categories
a = Categorical.from_codes(
np.arange(len(categories)), categories=categories, ordered=a.ordered
)
return a
index = MultiIndex.from_product(map(f, args), names=names)
return result.reindex(index).sort_index()
def test_apply_use_categorical_name(df):
cats = qcut(df.C, 4)
def get_stats(group):
return {
"min": group.min(),
"max": group.max(),
"count": group.count(),
"mean": group.mean(),
}
result = df.groupby(cats, observed=False).D.apply(get_stats)
assert result.index.names[0] == "C"
def test_basic():
cats = Categorical(
["a", "a", "a", "b", "b", "b", "c", "c", "c"],
categories=["a", "b", "c", "d"],
ordered=True,
)
data = DataFrame({"a": [1, 1, 1, 2, 2, 2, 3, 4, 5], "b": cats})
exp_index = CategoricalIndex(list("abcd"), name="b", ordered=True)
expected = DataFrame({"a": [1, 2, 4, np.nan]}, index=exp_index)
result = data.groupby("b", observed=False).mean()
tm.assert_frame_equal(result, expected)
cat1 = Categorical(["a", "a", "b", "b"], categories=["a", "b", "z"], ordered=True)
cat2 = Categorical(["c", "d", "c", "d"], categories=["c", "d", "y"], ordered=True)
df = DataFrame({"A": cat1, "B": cat2, "values": [1, 2, 3, 4]})
gb = df.groupby("A", observed=False)
exp_idx = CategoricalIndex(["a", "b", "z"], name="A", ordered=True)
expected = DataFrame({"values": Series([3, 7, 0], index=exp_idx)})
result = gb.sum()
tm.assert_frame_equal(result, expected)
x = DataFrame(
[[1, "John P. Doe"], [2, "Jane Dove"], [1, "John P. Doe"]],
columns=["person_id", "person_name"],
)
x["person_name"] = Categorical(x.person_name)
g = x.groupby(["person_id"], observed=False)
result = g.transform(lambda x: x)
tm.assert_frame_equal(result, x[["person_name"]])
result = x.drop_duplicates("person_name")
expected = x.iloc[[0, 1]]
tm.assert_frame_equal(result, expected)
def f(x):
return x.drop_duplicates("person_name").iloc[0]
result = g.apply(f)
expected = x.iloc[[0, 1]].copy()
expected.index = Index([1, 2], name="person_id")
expected["person_name"] = expected["person_name"].astype("object")
tm.assert_frame_equal(result, expected)
df = DataFrame({"a": [5, 15, 25]})
c = pd.cut(df.a, bins=[0, 10, 20, 30, 40])
result = df.a.groupby(c, observed=False).transform(sum)
tm.assert_series_equal(result, df["a"])
tm.assert_series_equal(
df.a.groupby(c, observed=False).transform(lambda xs: np.sum(xs)), df["a"]
)
tm.assert_frame_equal(df.groupby(c, observed=False).transform(sum), df[["a"]])
tm.assert_frame_equal(
df.groupby(c, observed=False).transform(lambda xs: np.max(xs)), df[["a"]]
)
tm.assert_series_equal(df.a.groupby(c, observed=False).filter(np.all), df["a"])
tm.assert_frame_equal(df.groupby(c, observed=False).filter(np.all), df)
df = DataFrame({"a": [5, 15, 25, -5]})
c = pd.cut(df.a, bins=[-10, 0, 10, 20, 30, 40])
result = df.a.groupby(c, observed=False).transform(sum)
tm.assert_series_equal(result, df["a"])
tm.assert_series_equal(
df.a.groupby(c, observed=False).transform(lambda xs: np.sum(xs)), df["a"]
)
tm.assert_frame_equal(df.groupby(c, observed=False).transform(sum), df[["a"]])
tm.assert_frame_equal(
df.groupby(c, observed=False).transform(lambda xs: np.sum(xs)), df[["a"]]
)
df = DataFrame({"a": [1, 0, 0, 0]})
c = pd.cut(df.a, [0, 1, 2, 3, 4], labels=Categorical(list("abcd")))
result = df.groupby(c, observed=False).apply(len)
exp_index = CategoricalIndex(c.values.categories, ordered=c.values.ordered)
expected = Series([1, 0, 0, 0], index=exp_index)
expected.index.name = "a"
tm.assert_series_equal(result, expected)
levels = ["foo", "bar", "baz", "qux"]
codes = np.random.randint(0, 4, size=100)
cats = Categorical.from_codes(codes, levels, ordered=True)
data = DataFrame(np.random.randn(100, 4))
result = data.groupby(cats, observed=False).mean()
expected = data.groupby(np.asarray(cats), observed=False).mean()
exp_idx = CategoricalIndex(levels, categories=cats.categories, ordered=True)
expected = expected.reindex(exp_idx)
tm.assert_frame_equal(result, expected)
grouped = data.groupby(cats, observed=False)
desc_result = grouped.describe()
idx = cats.codes.argsort()
ord_labels = np.asarray(cats).take(idx)
ord_data = data.take(idx)
exp_cats = Categorical(
ord_labels, ordered=True, categories=["foo", "bar", "baz", "qux"]
)
expected = ord_data.groupby(exp_cats, sort=False, observed=False).describe()
tm.assert_frame_equal(desc_result, expected)
expc = Categorical.from_codes(np.arange(4).repeat(8), levels, ordered=True)
exp = CategoricalIndex(expc)
tm.assert_index_equal((desc_result.stack().index.get_level_values(0)), exp)
exp = Index(["count", "mean", "std", "min", "25%", "50%", "75%", "max"] * 4)
tm.assert_index_equal((desc_result.stack().index.get_level_values(1)), exp)
def test_level_get_group(observed):
df = DataFrame(
data=np.arange(2, 22, 2),
index=MultiIndex(
levels=[CategoricalIndex(["a", "b"]), range(10)],
codes=[[0] * 5 + [1] * 5, range(10)],
names=["Index1", "Index2"],
),
)
g = df.groupby(level=["Index1"], observed=observed)
expected = DataFrame(
data=np.arange(2, 12, 2),
index=MultiIndex(
levels=[CategoricalIndex(["a", "b"]), range(5)],
codes=[[0] * 5, range(5)],
names=["Index1", "Index2"],
),
)
result = g.get_group("a")
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("ordered", [True, False])
def test_apply(ordered):
dense = Categorical(list("abc"), ordered=ordered)
missing = Categorical(list("aaa"), categories=["a", "b"], ordered=ordered)
values = np.arange(len(dense))
df = DataFrame({"missing": missing, "dense": dense, "values": values})
grouped = df.groupby(["missing", "dense"], observed=True)
idx = MultiIndex.from_arrays([missing, dense], names=["missing", "dense"])
expected = DataFrame([0, 1, 2.0], index=idx, columns=["values"])
assert_frame_equal(result, expected)
expected = expected.astype("int")
result = grouped.mean()
tm.assert_frame_equal(result, expected)
result = grouped.agg(np.mean)
tm.assert_frame_equal(result, expected)
idx = MultiIndex.from_arrays([missing, dense], names=["missing", "dense"])
expected = Series(1, index=idx)
result = grouped.apply(lambda x: 1)
tm.assert_series_equal(result, expected)
def test_observed(observed):
# of the grouper
# gh-14942 (implement)
# gh-10132 (back-compat)
# gh-8138 (back-compat)
# gh-8869
cat1 = Categorical(["a", "a", "b", "b"], categories=["a", "b", "z"], ordered=True)
cat2 = Categorical(["c", "d", "c", "d"], categories=["c", "d", "y"], ordered=True)
df = DataFrame({"A": cat1, "B": cat2, "values": [1, 2, 3, 4]})
df["C"] = ["foo", "bar"] * 2
# multiple groupers with a non-cat
gb = df.groupby(["A", "B", "C"], observed=observed)
exp_index = MultiIndex.from_arrays(
[cat1, cat2, ["foo", "bar"] * 2], names=["A", "B", "C"]
)
expected = DataFrame({"values": Series([1, 2, 3, 4], index=exp_index)}).sort_index()
result = gb.sum()
if not observed:
expected = cartesian_product_for_groupers(
expected, [cat1, cat2, ["foo", "bar"]], list("ABC")
)
tm.assert_frame_equal(result, expected)
gb = df.groupby(["A", "B"], observed=observed)
exp_index = MultiIndex.from_arrays([cat1, cat2], names=["A", "B"])
expected = DataFrame({"values": [1, 2, 3, 4]}, index=exp_index)
result = gb.sum()
if not observed:
expected = cartesian_product_for_groupers(expected, [cat1, cat2], list("AB"))
tm.assert_frame_equal(result, expected)
# https://github.com/pandas-dev/pandas/issues/8138
d = {
"cat": Categorical(
["a", "b", "a", "b"], categories=["a", "b", "c"], ordered=True
),
"ints": [1, 1, 2, 2],
"val": [10, 20, 30, 40],
}
df = DataFrame(d)
# Grouping on a single column
groups_single_key = df.groupby("cat", observed=observed)
result = groups_single_key.mean()
exp_index = CategoricalIndex(
list("ab"), name="cat", categories=list("abc"), ordered=True
)
expected = DataFrame({"ints": [1.5, 1.5], "val": [20.0, 30]}, index=exp_index)
if not observed:
index = CategoricalIndex(
list("abc"), name="cat", categories=list("abc"), ordered=True
)
expected = expected.reindex(index)
tm.assert_frame_equal(result, expected)
# Grouping on two columns
groups_double_key = df.groupby(["cat", "ints"], observed=observed)
result = groups_double_key.agg("mean")
expected = DataFrame(
{
"val": [10, 30, 20, 40],
"cat": Categorical(
["a", "a", "b", "b"], categories=["a", "b", "c"], ordered=True
),
"ints": [1, 2, 1, 2],
}
).set_index(["cat", "ints"])
if not observed:
expected = cartesian_product_for_groupers(
expected, [df.cat.values, [1, 2]], ["cat", "ints"]
)
tm.assert_frame_equal(result, expected)
# GH 10132
for key in [("a", 1), ("b", 2), ("b", 1), ("a", 2)]:
c, i = key
result = groups_double_key.get_group(key)
expected = df[(df.cat == c) & (df.ints == i)]
tm.assert_frame_equal(result, expected)
# gh-8869
# with as_index
d = {
"foo": [10, 8, 4, 8, 4, 1, 1],
"bar": [10, 20, 30, 40, 50, 60, 70],
"baz": ["d", "c", "e", "a", "a", "d", "c"],
}
df = DataFrame(d)
cat = pd.cut(df["foo"], np.linspace(0, 10, 3))
df["range"] = cat
groups = df.groupby(["range", "baz"], as_index=False, observed=observed)
result = groups.agg("mean")
groups2 = df.groupby(["range", "baz"], as_index=True, observed=observed)
expected = groups2.agg("mean").reset_index()
tm.assert_frame_equal(result, expected)
def test_observed_codes_remap(observed):
d = {"C1": [3, 3, 4, 5], "C2": [1, 2, 3, 4], "C3": [10, 100, 200, 34]}
df = DataFrame(d)
values = pd.cut(df["C1"], [1, 2, 3, 6])
values.name = "cat"
groups_double_key = df.groupby([values, "C2"], observed=observed)
idx = MultiIndex.from_arrays([values, [1, 2, 3, 4]], names=["cat", "C2"])
expected = DataFrame({"C1": [3, 3, 4, 5], "C3": [10, 100, 200, 34]}, index=idx)
if not observed:
expected = cartesian_product_for_groupers(
expected, [values.values, [1, 2, 3, 4]], ["cat", "C2"]
)
result = groups_double_key.agg("mean")
tm.assert_frame_equal(result, expected)
def test_observed_perf():
# we create a cartesian product, so this is
# non-performant if we don't use observed values
df = DataFrame(
{
"cat": np.random.randint(0, 255, size=30000),
"int_id": np.random.randint(0, 255, size=30000),
"other_id": np.random.randint(0, 10000, size=30000),
"foo": 0,
}
)
df["cat"] = df.cat.astype(str).astype("category")
grouped = df.groupby(["cat", "int_id", "other_id"], observed=True)
result = grouped.count()
assert result.index.levels[0].nunique() == df.cat.nunique()
assert result.index.levels[1].nunique() == df.int_id.nunique()
assert result.index.levels[2].nunique() == df.other_id.nunique()
def test_observed_groups(observed):
cat = Categorical(["a", "c", "a"], categories=["a", "b", "c"])
df = DataFrame({"cat": cat, "vals": [1, 2, 3]})
g = df.groupby("cat", observed=observed)
result = g.groups
if observed:
expected = {"a": Index([0, 2], dtype="int64"), "c": Index([1], dtype="int64")}
else:
expected = {
"a": Index([0, 2], dtype="int64"),
"b": Index([], dtype="int64"),
"c": Index([1], dtype="int64"),
}
tm.assert_dict_equal(result, expected)
def test_observed_groups_with_nan(observed):
df = DataFrame(
{
"cat": Categorical(["a", np.nan, "a"], categories=["a", "b", "d"]),
"vals": [1, 2, 3],
}
)
g = df.groupby("cat", observed=observed)
result = g.groups
if observed:
expected = {"a": Index([0, 2], dtype="int64")}
else:
expected = {
"a": Index([0, 2], dtype="int64"),
"b": Index([], dtype="int64"),
"d": Index([], dtype="int64"),
}
tm.assert_dict_equal(result, expected)
def test_observed_nth():
cat = pd.Categorical(["a", np.nan, np.nan], categories=["a", "b", "c"])
ser = pd.Series([1, 2, 3])
df = pd.DataFrame({"cat": cat, "ser": ser})
result = df.groupby("cat", observed=False)["ser"].nth(0)
index = pd.Categorical(["a", "b", "c"], categories=["a", "b", "c"])
expected = pd.Series([1, np.nan, np.nan], index=index, name="ser")
expected.index.name = "cat"
tm.assert_series_equal(result, expected)
def test_dataframe_categorical_with_nan(observed):
s1 = Categorical([np.nan, "a", np.nan, "a"], categories=["a", "b", "c"])
s2 = Series([1, 2, 3, 4])
df = DataFrame({"s1": s1, "s2": s2})
result = df.groupby("s1", observed=observed).first().reset_index()
if observed:
expected = DataFrame(
{"s1": Categorical(["a"], categories=["a", "b", "c"]), "s2": [2]}
)
else:
expected = DataFrame(
{
"s1": Categorical(["a", "b", "c"], categories=["a", "b", "c"]),
"s2": [2, np.nan, np.nan],
}
)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("ordered", [True, False])
@pytest.mark.parametrize("observed", [True, False])
@pytest.mark.parametrize("sort", [True, False])
def test_dataframe_categorical_ordered_observed_sort(ordered, observed, sort):
# Build a dataframe with cat having one unobserved category ('missing'),
# and a Series with identical values
label = Categorical(
["d", "a", "b", "a", "d", "b"],
categories=["a", "b", "missing", "d"],
ordered=ordered,
)
val = Series(["d", "a", "b", "a", "d", "b"])
df = DataFrame({"label": label, "val": val})
# aggregate on the Categorical
result = df.groupby("label", observed=observed, sort=sort)["val"].aggregate("first")
# If ordering works, we expect index labels equal to aggregation results,
# except for 'observed=False': label 'missing' has aggregation None
label = Series(result.index.array, dtype="object")
aggr = Series(result.array)
if not observed:
aggr[aggr.isna()] = "missing"
if not all(label == aggr):
msg = (
f"Labels and aggregation results not consistently sorted\n"
+ "for (ordered={ordered}, observed={observed}, sort={sort})\n"
+ "Result:\n{result}"
)
assert False, msg
def test_datetime():
# GH9049: ensure backward compatibility
levels = pd.date_range("2014-01-01", periods=4)
codes = np.random.randint(0, 4, size=100)
cats = Categorical.from_codes(codes, levels, ordered=True)
data = DataFrame(np.random.randn(100, 4))
result = data.groupby(cats, observed=False).mean()
expected = data.groupby(np.asarray(cats), observed=False).mean()
expected = expected.reindex(levels)
expected.index = CategoricalIndex(
expected.index, categories=expected.index, ordered=True
)
tm.assert_frame_equal(result, expected)
grouped = data.groupby(cats, observed=False)
desc_result = grouped.describe()
idx = cats.codes.argsort()
ord_labels = cats.take(idx)
ord_data = data.take(idx)
expected = ord_data.groupby(ord_labels, observed=False).describe()
tm.assert_frame_equal(desc_result, expected)
tm.assert_index_equal(desc_result.index, expected.index)
tm.assert_index_equal(
desc_result.index.get_level_values(0), expected.index.get_level_values(0)
)
# GH 10460
expc = Categorical.from_codes(np.arange(4).repeat(8), levels, ordered=True)
exp = CategoricalIndex(expc)
tm.assert_index_equal((desc_result.stack().index.get_level_values(0)), exp)
exp = Index(["count", "mean", "std", "min", "25%", "50%", "75%", "max"] * 4)
tm.assert_index_equal((desc_result.stack().index.get_level_values(1)), exp)
def test_categorical_index():
s = np.random.RandomState(12345)
levels = ["foo", "bar", "baz", "qux"]
codes = s.randint(0, 4, size=20)
cats = Categorical.from_codes(codes, levels, ordered=True)
df = DataFrame(np.repeat(np.arange(20), 4).reshape(-1, 4), columns=list("abcd"))
df["cats"] = cats
# with a cat index
result = df.set_index("cats").groupby(level=0, observed=False).sum()
expected = df[list("abcd")].groupby(cats.codes, observed=False).sum()
expected.index = CategoricalIndex(
Categorical.from_codes([0, 1, 2, 3], levels, ordered=True), name="cats"
)
tm.assert_frame_equal(result, expected)
# with a cat column, should produce a cat index
result = df.groupby("cats", observed=False).sum()
expected = df[list("abcd")].groupby(cats.codes, observed=False).sum()
expected.index = CategoricalIndex(
Categorical.from_codes([0, 1, 2, 3], levels, ordered=True), name="cats"
)
tm.assert_frame_equal(result, expected)
def test_describe_categorical_columns():
# GH 11558
cats = CategoricalIndex(
["qux", "foo", "baz", "bar"],
categories=["foo", "bar", "baz", "qux"],
ordered=True,
)
df = DataFrame(np.random.randn(20, 4), columns=cats)
result = df.groupby([1, 2, 3, 4] * 5).describe()
tm.assert_index_equal(result.stack().columns, cats)
tm.assert_categorical_equal(result.stack().columns.values, cats.values)
def test_unstack_categorical():
# GH11558 (example is taken from the original issue)
df = DataFrame(
{"a": range(10), "medium": ["A", "B"] * 5, "artist": list("XYXXY") * 2}
)
df["medium"] = df["medium"].astype("category")
gcat = df.groupby(["artist", "medium"], observed=False)["a"].count().unstack()
result = gcat.describe()
exp_columns = CategoricalIndex(["A", "B"], ordered=False, name="medium")
tm.assert_index_equal(result.columns, exp_columns)
tm.assert_categorical_equal(result.columns.values, exp_columns.values)
result = gcat["A"] + gcat["B"]
expected = Series([6, 4], index=Index(["X", "Y"], name="artist"))
tm.assert_series_equal(result, expected)
def test_bins_unequal_len():
# GH3011
series = Series([np.nan, np.nan, 1, 1, 2, 2, 3, 3, 4, 4])
bins = pd.cut(series.dropna().values, 4)
# len(bins) != len(series) here
with pytest.raises(ValueError):
series.groupby(bins).mean()
def test_as_index():
# GH13204
df = DataFrame(
{
"cat": Categorical([1, 2, 2], [1, 2, 3]),
"A": [10, 11, 11],
"B": [101, 102, 103],
}
)
result = df.groupby(["cat", "A"], as_index=False, observed=True).sum()
expected = DataFrame(
{
"cat": Categorical([1, 2], categories=df.cat.cat.categories),
"A": [10, 11],
"B": [101, 205],
},
columns=["cat", "A", "B"],
)
tm.assert_frame_equal(result, expected)
# function grouper
f = lambda r: df.loc[r, "A"]
result = df.groupby(["cat", f], as_index=False, observed=True).sum()
expected = DataFrame(
{
"cat": Categorical([1, 2], categories=df.cat.cat.categories),
"A": [10, 22],
"B": [101, 205],
},
columns=["cat", "A", "B"],
)
tm.assert_frame_equal(result, expected)
# another not in-axis grouper (conflicting names in index)
s = Series(["a", "b", "b"], name="cat")
result = df.groupby(["cat", s], as_index=False, observed=True).sum()
tm.assert_frame_equal(result, expected)
# is original index dropped?
group_columns = ["cat", "A"]
expected = DataFrame(
{
"cat": Categorical([1, 2], categories=df.cat.cat.categories),
"A": [10, 11],
"B": [101, 205],
},
columns=["cat", "A", "B"],
)
for name in [None, "X", "B"]:
df.index = Index(list("abc"), name=name)
result = df.groupby(group_columns, as_index=False, observed=True).sum()
tm.assert_frame_equal(result, expected)
def test_preserve_categories():
# GH-13179
categories = list("abc")
# ordered=True
df = DataFrame({"A": Categorical(list("ba"), categories=categories, ordered=True)})
index = CategoricalIndex(categories, categories, ordered=True, name="A")
tm.assert_index_equal(
df.groupby("A", sort=True, observed=False).first().index, index
)
tm.assert_index_equal(
df.groupby("A", sort=False, observed=False).first().index, index
)
# ordered=False
df = DataFrame({"A": Categorical(list("ba"), categories=categories, ordered=False)})
sort_index = CategoricalIndex(categories, categories, ordered=False, name="A")
nosort_index = CategoricalIndex(list("bac"), list("bac"), ordered=False, name="A")
tm.assert_index_equal(
df.groupby("A", sort=True, observed=False).first().index, sort_index
)
tm.assert_index_equal(
df.groupby("A", sort=False, observed=False).first().index, nosort_index
)
def test_preserve_categorical_dtype():
# GH13743, GH13854
df = DataFrame(
{
"A": [1, 2, 1, 1, 2],
"B": [10, 16, 22, 28, 34],
"C1": Categorical(list("abaab"), categories=list("bac"), ordered=False),
"C2": Categorical(list("abaab"), categories=list("bac"), ordered=True),
}
)
# single grouper
exp_full = DataFrame(
{
"A": [2.0, 1.0, np.nan],
"B": [25.0, 20.0, np.nan],
"C1": Categorical(list("bac"), categories=list("bac"), ordered=False),
"C2": Categorical(list("bac"), categories=list("bac"), ordered=True),
}
)
for col in ["C1", "C2"]:
result1 = df.groupby(by=col, as_index=False, observed=False).mean()
result2 = df.groupby(by=col, as_index=True, observed=False).mean().reset_index()
expected = exp_full.reindex(columns=result1.columns)
tm.assert_frame_equal(result1, expected)
tm.assert_frame_equal(result2, expected)
@pytest.mark.parametrize(
"func, values",
[
("first", ["second", "first"]),
("last", ["fourth", "third"]),
("min", ["fourth", "first"]),
("max", ["second", "third"]),
],
)
def test_preserve_on_ordered_ops(func, values):
# gh-18502
# preserve the categoricals on ops
c = pd.Categorical(["first", "second", "third", "fourth"], ordered=True)
df = pd.DataFrame({"payload": [-1, -2, -1, -2], "col": c})
g = df.groupby("payload")
result = getattr(g, func)()
expected = pd.DataFrame(
{"payload": [-2, -1], "col": pd.Series(values, dtype=c.dtype)}
).set_index("payload")
tm.assert_frame_equal(result, expected)
def test_categorical_no_compress():
data = Series(np.random.randn(9))
codes = np.array([0, 0, 0, 1, 1, 1, 2, 2, 2])
cats = Categorical.from_codes(codes, [0, 1, 2], ordered=True)
result = data.groupby(cats, observed=False).mean()
exp = data.groupby(codes, observed=False).mean()
exp.index = CategoricalIndex(
exp.index, categories=cats.categories, ordered=cats.ordered
)
tm.assert_series_equal(result, exp)
codes = np.array([0, 0, 0, 1, 1, 1, 3, 3, 3])
cats = Categorical.from_codes(codes, [0, 1, 2, 3], ordered=True)
result = data.groupby(cats, observed=False).mean()
exp = data.groupby(codes, observed=False).mean().reindex(cats.categories)
exp.index = CategoricalIndex(
exp.index, categories=cats.categories, ordered=cats.ordered
)
tm.assert_series_equal(result, exp)
cats = Categorical(
["a", "a", "a", "b", "b", "b", "c", "c", "c"],
categories=["a", "b", "c", "d"],
ordered=True,
)
data = DataFrame({"a": [1, 1, 1, 2, 2, 2, 3, 4, 5], "b": cats})
result = data.groupby("b", observed=False).mean()
result = result["a"].values
exp = np.array([1, 2, 4, np.nan])
tm.assert_numpy_array_equal(result, exp)
def test_groupby_empty_with_category():
# GH-9614
# test fix for when group by on None resulted in
# coercion of dtype categorical -> float
df = pd.DataFrame(
{"A": [None] * 3, "B": pd.Categorical(["train", "train", "test"])}
)
result = df.groupby("A").first()["B"]
expected = pd.Series(
pd.Categorical([], categories=["test", "train"]),
index=pd.Series([], dtype="object", name="A"),
name="B",
)
tm.assert_series_equal(result, expected)
def test_sort():
# https://stackoverflow.com/questions/23814368/sorting-pandas-
# categorical-labels-after-groupby
# This should result in a properly sorted Series so that the plot
# has a sorted x axis
# self.cat.groupby(['value_group'])['value_group'].count().plot(kind='bar')
df = DataFrame({"value": np.random.randint(0, 10000, 100)})
labels = [f"{i} - {i+499}" for i in range(0, 10000, 500)]
cat_labels = Categorical(labels, labels)
df = df.sort_values(by=["value"], ascending=True)
df["value_group"] = pd.cut(
df.value, range(0, 10500, 500), right=False, labels=cat_labels
)
res = df.groupby(["value_group"], observed=False)["value_group"].count()
exp = res[sorted(res.index, key=lambda x: float(x.split()[0]))]
exp.index = CategoricalIndex(exp.index, name=exp.index.name)
tm.assert_series_equal(res, exp)
def test_sort2():
# dataframe groupby sort was being ignored # GH 8868
df = DataFrame(
[
["(7.5, 10]", 10, 10],
["(7.5, 10]", 8, 20],
["(2.5, 5]", 5, 30],
["(5, 7.5]", 6, 40],
["(2.5, 5]", 4, 50],
["(0, 2.5]", 1, 60],
["(5, 7.5]", 7, 70],
],
columns=["range", "foo", "bar"],
)
df["range"] = Categorical(df["range"], ordered=True)
index = CategoricalIndex(
["(0, 2.5]", "(2.5, 5]", "(5, 7.5]", "(7.5, 10]"], name="range", ordered=True
)
expected_sort = DataFrame(
[[1, 60], [5, 30], [6, 40], [10, 10]], columns=["foo", "bar"], index=index
)
col = "range"
result_sort = df.groupby(col, sort=True, observed=False).first()
tm.assert_frame_equal(result_sort, expected_sort)
# when categories is ordered, group is ordered by category's order
expected_sort = result_sort
result_sort = df.groupby(col, sort=False, observed=False).first()
tm.assert_frame_equal(result_sort, expected_sort)
df["range"] = Categorical(df["range"], ordered=False)
index = CategoricalIndex(
["(0, 2.5]", "(2.5, 5]", "(5, 7.5]", "(7.5, 10]"], name="range"
)
expected_sort = DataFrame(
[[1, 60], [5, 30], [6, 40], [10, 10]], columns=["foo", "bar"], index=index
)
index = CategoricalIndex(
["(7.5, 10]", "(2.5, 5]", "(5, 7.5]", "(0, 2.5]"],
categories=["(7.5, 10]", "(2.5, 5]", "(5, 7.5]", "(0, 2.5]"],
name="range",
)
expected_nosort = DataFrame(
[[10, 10], [5, 30], [6, 40], [1, 60]], index=index, columns=["foo", "bar"]
)
col = "range"
t_sort = df.groupby(col, sort=True, observed=False).first()
tm.assert_frame_equal(result_sort, expected_sort)
result_nosort = df.groupby(col, sort=False, observed=False).first()
tm.assert_frame_equal(result_nosort, expected_nosort)
def test_sort_datetimelike():
df = DataFrame(
{
"dt": [
datetime(2011, 7, 1),
datetime(2011, 7, 1),
datetime(2011, 2, 1),
datetime(2011, 5, 1),
datetime(2011, 2, 1),
datetime(2011, 1, 1),
datetime(2011, 5, 1),
],
"foo": [10, 8, 5, 6, 4, 1, 7],
"bar": [10, 20, 30, 40, 50, 60, 70],
},
columns=["dt", "foo", "bar"],
)
df["dt"] = Categorical(df["dt"], ordered=True)
index = [
datetime(2011, 1, 1),
datetime(2011, 2, 1),
datetime(2011, 5, 1),
datetime(2011, 7, 1),
]
result_sort = DataFrame(
[[1, 60], [5, 30], [6, 40], [10, 10]], columns=["foo", "bar"]
)
result_sort.index = CategoricalIndex(index, name="dt", ordered=True)
index = [
datetime(2011, 7, 1),
datetime(2011, 2, 1),
datetime(2011, 5, 1),
datetime(2011, 1, 1),
]
result_nosort = DataFrame(
[[10, 10], [5, 30], [6, 40], [1, 60]], columns=["foo", "bar"]
)
result_nosort.index = CategoricalIndex(
index, categories=index, name="dt", ordered=True
)
col = "dt"
tm.assert_frame_equal(
result_sort, df.groupby(col, sort=True, observed=False).first()
)
tm.assert_frame_equal(
result_sort, df.groupby(col, sort=False, observed=False).first()
)
# ordered = False
df["dt"] = Categorical(df["dt"], ordered=False)
index = [
datetime(2011, 1, 1),
datetime(2011, 2, 1),
datetime(2011, 5, 1),
datetime(2011, 7, 1),
]
result_sort = DataFrame(
[[1, 60], [5, 30], [6, 40], [10, 10]], columns=["foo", "bar"]
)
result_sort.index = CategoricalIndex(index, name="dt")
index = [
datetime(2011, 7, 1),
datetime(2011, 2, 1),
datetime(2011, 5, 1),
datetime(2011, 1, 1),
]
result_nosort = DataFrame(
[[10, 10], [5, 30], [6, 40], [1, 60]], columns=["foo", "bar"]
)
result_nosort.index = CategoricalIndex(index, categories=index, name="dt")
col = "dt"
tm.assert_frame_equal(
result_sort, df.groupby(col, sort=True, observed=False).first()
)
tm.assert_frame_equal(
result_nosort, df.groupby(col, sort=False, observed=False).first()
)
def test_empty_sum():
# https://github.com/pandas-dev/pandas/issues/18678
df = DataFrame(
{"A": Categorical(["a", "a", "b"], categories=["a", "b", "c"]), "B": [1, 2, 1]}
)
expected_idx = CategoricalIndex(["a", "b", "c"], name="A")
# 0 by default
result = df.groupby("A", observed=False).B.sum()
expected = Series([3, 1, 0], expected_idx, name="B")
tm.assert_series_equal(result, expected)
# min_count=0
result = df.groupby("A", observed=False).B.sum(min_count=0)
expected = Series([3, 1, 0], expected_idx, name="B")
tm.assert_series_equal(result, expected)
# min_count=1
result = df.groupby("A", observed=False).B.sum(min_count=1)
expected = Series([3, 1, np.nan], expected_idx, name="B")
tm.assert_series_equal(result, expected)
# min_count>1
result = df.groupby("A", observed=False).B.sum(min_count=2)
expected = Series([3, np.nan, np.nan], expected_idx, name="B")
tm.assert_series_equal(result, expected)
def test_empty_prod():
# https://github.com/pandas-dev/pandas/issues/18678
df = DataFrame(
{"A": Categorical(["a", "a", "b"], categories=["a", "b", "c"]), "B": [1, 2, 1]}
)
expected_idx = CategoricalIndex(["a", "b", "c"], name="A")
# 1 by default
result = df.groupby("A", observed=False).B.prod()
expected = Series([2, 1, 1], expected_idx, name="B")
tm.assert_series_equal(result, expected)
# min_count=0
result = df.groupby("A", observed=False).B.prod(min_count=0)
expected = Series([2, 1, 1], expected_idx, name="B")
tm.assert_series_equal(result, expected)
# min_count=1
result = df.groupby("A", observed=False).B.prod(min_count=1)
expected = Series([2, 1, np.nan], expected_idx, name="B")
tm.assert_series_equal(result, expected)
def test_groupby_multiindex_categorical_datetime():
# https://github.com/pandas-dev/pandas/issues/21390
df = DataFrame(
{
"key1": Categorical(list("abcbabcba")),
"key2": Categorical(
list(pd.date_range("2018-06-01 00", freq="1T", periods=3)) * 3
),
"values": np.arange(9),
}
)
result = df.groupby(["key1", "key2"]).mean()
idx = MultiIndex.from_product(
[
Categorical(["a", "b", "c"]),
Categorical(pd.date_range("2018-06-01 00", freq="1T", periods=3)),
],
names=["key1", "key2"],
)
expected = DataFrame({"values": [0, 4, 8, 3, 4, 5, 6, np.nan, 2]}, index=idx)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"as_index, expected",
[
(
True,
Series(
index=MultiIndex.from_arrays(
[Series([1, 1, 2], dtype="category"), [1, 2, 2]], names=["a", "b"]
),
data=[1, 2, 3],
name="x",
),
),
(
False,
DataFrame(
{
"a": Series([1, 1, 2], dtype="category"),
"b": [1, 2, 2],
"x": [1, 2, 3],
}
),
),
],
)
def test_groupby_agg_observed_true_single_column(as_index, expected):
# GH-23970
df = DataFrame(
{"a": Series([1, 1, 2], dtype="category"), "b": [1, 2, 2], "x": [1, 2, 3]}
)
result = df.groupby(["a", "b"], as_index=as_index, observed=True)["x"].sum()
tm.assert_equal(result, expected)
@pytest.mark.parametrize("fill_value", [None, np.nan, pd.NaT])
def test_shift(fill_value):
ct = Categorical(
["a", "b", "c", "d"], categories=["a", "b", "c", "d"], ordered=False
)
expected = Categorical(
[None, "a", "b", "c"], categories=["a", "b", "c", "d"], ordered=False
)
res = ct.shift(1, fill_value=fill_value)
tm.assert_equal(res, expected)
@pytest.fixture
def df_cat(df):
df_cat = df.copy()[:4] # leave out some groups
df_cat["A"] = df_cat["A"].astype("category")
df_cat["B"] = df_cat["B"].astype("category")
df_cat["C"] = Series([1, 2, 3, 4])
df_cat = df_cat.drop(["D"], axis=1)
return df_cat
@pytest.mark.parametrize(
"operation, kwargs", [("agg", dict(dtype="category")), ("apply", dict())]
)
def test_seriesgroupby_observed_true(df_cat, operation, kwargs):
# GH 24880
index = MultiIndex.from_frame(
DataFrame(
{"A": ["foo", "foo", "bar", "bar"], "B": ["one", "two", "one", "three"]},
**kwargs,
)
)
expected = Series(data=[1, 3, 2, 4], index=index, name="C")
grouped = df_cat.groupby(["A", "B"], observed=True)["C"]
result = getattr(grouped, operation)(sum)
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("operation", ["agg", "apply"])
@pytest.mark.parametrize("observed", [False, None])
def test_seriesgroupby_observed_false_or_none(df_cat, observed, operation):
# GH 24880
index, _ = MultiIndex.from_product(
[
CategoricalIndex(["bar", "foo"], ordered=False),
CategoricalIndex(["one", "three", "two"], ordered=False),
],
names=["A", "B"],
).sortlevel()
expected = Series(data=[2, 4, np.nan, 1, np.nan, 3], index=index, name="C")
grouped = df_cat.groupby(["A", "B"], observed=observed)["C"]
result = getattr(grouped, operation)(sum)
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize(
"observed, index, data",
[
(
True,
MultiIndex.from_tuples(
[
("foo", "one", "min"),
("foo", "one", "max"),
("foo", "two", "min"),
("foo", "two", "max"),
("bar", "one", "min"),
("bar", "one", "max"),
("bar", "three", "min"),
("bar", "three", "max"),
],
names=["A", "B", None],
),
[1, 1, 3, 3, 2, 2, 4, 4],
),
(
False,
MultiIndex.from_product(
[
CategoricalIndex(["bar", "foo"], ordered=False),
CategoricalIndex(["one", "three", "two"], ordered=False),
Index(["min", "max"]),
],
names=["A", "B", None],
),
[2, 2, 4, 4, np.nan, np.nan, 1, 1, np.nan, np.nan, 3, 3],
),
(
None,
MultiIndex.from_product(
[
CategoricalIndex(["bar", "foo"], ordered=False),
CategoricalIndex(["one", "three", "two"], ordered=False),
Index(["min", "max"]),
],
names=["A", "B", None],
),
[2, 2, 4, 4, np.nan, np.nan, 1, 1, np.nan, np.nan, 3, 3],
),
],
)
def test_seriesgroupby_observed_apply_dict(df_cat, observed, index, data):
# GH 24880
expected = Series(data=data, index=index, name="C")
result = df_cat.groupby(["A", "B"], observed=observed)["C"].apply(
lambda x: {"min": x.min(), "max": x.max()}
)
tm.assert_series_equal(result, expected)
def test_groupby_categorical_series_dataframe_consistent(df_cat):
# GH 20416
expected = df_cat.groupby(["A", "B"])["C"].mean()
result = df_cat.groupby(["A", "B"]).mean()["C"]
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("code", [([1, 0, 0]), ([0, 0, 0])])
def test_groupby_categorical_axis_1(code):
# GH 13420
df = DataFrame({"a": [1, 2, 3, 4], "b": [-1, -2, -3, -4], "c": [5, 6, 7, 8]})
cat = pd.Categorical.from_codes(code, categories=list("abc"))
result = df.groupby(cat, axis=1).mean()
expected = df.T.groupby(cat, axis=0).mean().T
tm.assert_frame_equal(result, expected)
def test_groupby_cat_preserves_structure(observed, ordered_fixture):
# GH 28787
df = DataFrame(
{"Name": Categorical(["Bob", "Greg"], ordered=ordered_fixture), "Item": [1, 2]},
columns=["Name", "Item"],
)
expected = df.copy()
result = (
df.groupby("Name", observed=observed)
.agg(pd.DataFrame.sum, skipna=True)
.reset_index()
)
tm.assert_frame_equal(result, expected)
def test_get_nonexistent_category():
# Accessing a Category that is not in the dataframe
df = pd.DataFrame({"var": ["a", "a", "b", "b"], "val": range(4)})
with pytest.raises(KeyError, match="'vau'"):
df.groupby("var").apply(
lambda rows: pd.DataFrame(
{"var": [rows.iloc[-1]["var"]], "val": [rows.iloc[-1]["vau"]]}
)
)
def test_series_groupby_on_2_categoricals_unobserved(
reduction_func: str, observed: bool
):
# GH 17605
if reduction_func == "ngroup":
pytest.skip("ngroup is not truly a reduction")
df = pd.DataFrame(
{
"cat_1": pd.Categorical(list("AABB"), categories=list("ABCD")),
"cat_2": pd.Categorical(list("AB") * 2, categories=list("ABCD")),
"value": [0.1] * 4,
}
)
args = {"nth": [0]}.get(reduction_func, [])
expected_length = 4 if observed else 16
series_groupby = df.groupby(["cat_1", "cat_2"], observed=observed)["value"]
agg = getattr(series_groupby, reduction_func)
result = agg(*args)
assert len(result) == expected_length
@pytest.mark.parametrize(
"func, zero_or_nan",
[
("all", np.NaN),
("any", np.NaN),
("count", 0),
("first", np.NaN),
("idxmax", np.NaN),
("idxmin", np.NaN),
("last", np.NaN),
("mad", np.NaN),
("max", np.NaN),
("mean", np.NaN),
("median", np.NaN),
("min", np.NaN),
("nth", np.NaN),
("nunique", 0),
("prod", np.NaN),
("quantile", np.NaN),
("sem", np.NaN),
("size", 0),
("skew", np.NaN),
("std", np.NaN),
("sum", np.NaN),
("var", np.NaN),
],
)
def test_series_groupby_on_2_categoricals_unobserved_zeroes_or_nans(func, zero_or_nan):
# GH 17605
# Tests whether the unobserved categories in the result contain 0 or NaN
df = pd.DataFrame(
{
"cat_1": pd.Categorical(list("AABB"), categories=list("ABC")),
"cat_2": pd.Categorical(list("AB") * 2, categories=list("ABC")),
"value": [0.1] * 4,
}
)
unobserved = [tuple("AC"), tuple("BC"), tuple("CA"), tuple("CB"), tuple("CC")]
args = {"nth": [0]}.get(func, [])
series_groupby = df.groupby(["cat_1", "cat_2"], observed=False)["value"]
agg = getattr(series_groupby, func)
result = agg(*args)
for idx in unobserved:
val = result.loc[idx]
assert (pd.isna(zero_or_nan) and pd.isna(val)) or (val == zero_or_nan)
# If we expect unobserved values to be zero, we also expect the dtype to be int
if zero_or_nan == 0:
assert np.issubdtype(result.dtype, np.integer)
def test_series_groupby_categorical_aggregation_getitem():
# GH 8870
d = {"foo": [10, 8, 4, 1], "bar": [10, 20, 30, 40], "baz": ["d", "c", "d", "c"]}
df = pd.DataFrame(d)
cat = pd.cut(df["foo"], np.linspace(0, 20, 5))
df["range"] = cat
groups = df.groupby(["range", "baz"], as_index=True, sort=True)
result = groups["foo"].agg("mean")
expected = groups.agg("mean")["foo"]
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize(
"func, expected_values",
[(pd.Series.nunique, [1, 1, 2]), (pd.Series.count, [1, 2, 2])],
)
def test_groupby_agg_categorical_columns(func, expected_values):
# 31256
df = pd.DataFrame(
{
"id": [0, 1, 2, 3, 4],
"groups": [0, 1, 1, 2, 2],
"value": pd.Categorical([0, 0, 0, 0, 1]),
}
).set_index("id")
result = df.groupby("groups").agg(func)
expected = pd.DataFrame(
{"value": expected_values}, index=pd.Index([0, 1, 2], name="groups"),
)
tm.assert_frame_equal(result, expected)
def test_groupby_agg_non_numeric():
df = pd.DataFrame(
{"A": pd.Categorical(["a", "a", "b"], categories=["a", "b", "c"])}
)
expected = pd.DataFrame({"A": [2, 1]}, index=[1, 2])
result = df.groupby([1, 2, 1]).agg(pd.Series.nunique)
tm.assert_frame_equal(result, expected)
result = df.groupby([1, 2, 1]).nunique()
tm.assert_frame_equal(result, expected)
| true | true |
1c2dea0484ffeef999767ba4ebabb9f092c5771c | 10,585 | py | Python | desktop/core/ext-py/eventlet-0.21.0/eventlet/tpool.py | HSunboy/hue | caccd8c058eabb8f5899006a6566be46e3af871b | [
"Apache-2.0"
] | 1 | 2021-06-06T04:10:44.000Z | 2021-06-06T04:10:44.000Z | desktop/core/ext-py/eventlet-0.21.0/eventlet/tpool.py | HSunboy/hue | caccd8c058eabb8f5899006a6566be46e3af871b | [
"Apache-2.0"
] | null | null | null | desktop/core/ext-py/eventlet-0.21.0/eventlet/tpool.py | HSunboy/hue | caccd8c058eabb8f5899006a6566be46e3af871b | [
"Apache-2.0"
] | 2 | 2019-06-17T11:51:56.000Z | 2020-07-25T08:29:56.000Z | # Copyright (c) 2007-2009, Linden Research, Inc.
# Copyright (c) 2007, IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import atexit
import imp
import os
import sys
import traceback
import eventlet
from eventlet import event, greenio, greenthread, patcher, timeout
from eventlet.support import six
__all__ = ['execute', 'Proxy', 'killall', 'set_num_threads']
EXC_CLASSES = (Exception, timeout.Timeout)
SYS_EXCS = (GeneratorExit, KeyboardInterrupt, SystemExit)
QUIET = True
socket = patcher.original('socket')
threading = patcher.original('threading')
if six.PY2:
Queue_module = patcher.original('Queue')
if six.PY3:
Queue_module = patcher.original('queue')
Empty = Queue_module.Empty
Queue = Queue_module.Queue
_bytetosend = b' '
_coro = None
_nthreads = int(os.environ.get('EVENTLET_THREADPOOL_SIZE', 20))
_reqq = _rspq = None
_rsock = _wsock = None
_setup_already = False
_threads = []
def tpool_trampoline():
global _rspq
while True:
try:
_c = _rsock.recv(1)
assert _c
# FIXME: this is probably redundant since using sockets instead of pipe now
except ValueError:
break # will be raised when pipe is closed
while not _rspq.empty():
try:
(e, rv) = _rspq.get(block=False)
e.send(rv)
e = rv = None
except Empty:
pass
def tworker():
global _rspq
while True:
try:
msg = _reqq.get()
except AttributeError:
return # can't get anything off of a dud queue
if msg is None:
return
(e, meth, args, kwargs) = msg
rv = None
try:
rv = meth(*args, **kwargs)
except SYS_EXCS:
raise
except EXC_CLASSES:
rv = sys.exc_info()
# test_leakage_from_tracebacks verifies that the use of
# exc_info does not lead to memory leaks
_rspq.put((e, rv))
msg = meth = args = kwargs = e = rv = None
_wsock.sendall(_bytetosend)
def execute(meth, *args, **kwargs):
"""
Execute *meth* in a Python thread, blocking the current coroutine/
greenthread until the method completes.
The primary use case for this is to wrap an object or module that is not
amenable to monkeypatching or any of the other tricks that Eventlet uses
to achieve cooperative yielding. With tpool, you can force such objects to
cooperate with green threads by sticking them in native threads, at the cost
of some overhead.
"""
setup()
# if already in tpool, don't recurse into the tpool
# also, call functions directly if we're inside an import lock, because
# if meth does any importing (sadly common), it will hang
my_thread = threading.currentThread()
if my_thread in _threads or imp.lock_held() or _nthreads == 0:
return meth(*args, **kwargs)
e = event.Event()
_reqq.put((e, meth, args, kwargs))
rv = e.wait()
if isinstance(rv, tuple) \
and len(rv) == 3 \
and isinstance(rv[1], EXC_CLASSES):
(c, e, tb) = rv
if not QUIET:
traceback.print_exception(c, e, tb)
traceback.print_stack()
six.reraise(c, e, tb)
return rv
def proxy_call(autowrap, f, *args, **kwargs):
"""
Call a function *f* and returns the value. If the type of the return value
is in the *autowrap* collection, then it is wrapped in a :class:`Proxy`
object before return.
Normally *f* will be called in the threadpool with :func:`execute`; if the
keyword argument "nonblocking" is set to ``True``, it will simply be
executed directly. This is useful if you have an object which has methods
that don't need to be called in a separate thread, but which return objects
that should be Proxy wrapped.
"""
if kwargs.pop('nonblocking', False):
rv = f(*args, **kwargs)
else:
rv = execute(f, *args, **kwargs)
if isinstance(rv, autowrap):
return Proxy(rv, autowrap)
else:
return rv
class Proxy(object):
"""
a simple proxy-wrapper of any object that comes with a
methods-only interface, in order to forward every method
invocation onto a thread in the native-thread pool. A key
restriction is that the object's methods should not switch
greenlets or use Eventlet primitives, since they are in a
different thread from the main hub, and therefore might behave
unexpectedly. This is for running native-threaded code
only.
It's common to want to have some of the attributes or return
values also wrapped in Proxy objects (for example, database
connection objects produce cursor objects which also should be
wrapped in Proxy objects to remain nonblocking). *autowrap*, if
supplied, is a collection of types; if an attribute or return
value matches one of those types (via isinstance), it will be
wrapped in a Proxy. *autowrap_names* is a collection
of strings, which represent the names of attributes that should be
wrapped in Proxy objects when accessed.
"""
def __init__(self, obj, autowrap=(), autowrap_names=()):
self._obj = obj
self._autowrap = autowrap
self._autowrap_names = autowrap_names
def __getattr__(self, attr_name):
f = getattr(self._obj, attr_name)
if not hasattr(f, '__call__'):
if isinstance(f, self._autowrap) or attr_name in self._autowrap_names:
return Proxy(f, self._autowrap)
return f
def doit(*args, **kwargs):
result = proxy_call(self._autowrap, f, *args, **kwargs)
if attr_name in self._autowrap_names and not isinstance(result, Proxy):
return Proxy(result)
return result
return doit
# the following are a buncha methods that the python interpeter
# doesn't use getattr to retrieve and therefore have to be defined
# explicitly
def __getitem__(self, key):
return proxy_call(self._autowrap, self._obj.__getitem__, key)
def __setitem__(self, key, value):
return proxy_call(self._autowrap, self._obj.__setitem__, key, value)
def __deepcopy__(self, memo=None):
return proxy_call(self._autowrap, self._obj.__deepcopy__, memo)
def __copy__(self, memo=None):
return proxy_call(self._autowrap, self._obj.__copy__, memo)
def __call__(self, *a, **kw):
if '__call__' in self._autowrap_names:
return Proxy(proxy_call(self._autowrap, self._obj, *a, **kw))
else:
return proxy_call(self._autowrap, self._obj, *a, **kw)
def __enter__(self):
return proxy_call(self._autowrap, self._obj.__enter__)
def __exit__(self, *exc):
return proxy_call(self._autowrap, self._obj.__exit__, *exc)
# these don't go through a proxy call, because they're likely to
# be called often, and are unlikely to be implemented on the
# wrapped object in such a way that they would block
def __eq__(self, rhs):
return self._obj == rhs
def __hash__(self):
return self._obj.__hash__()
def __repr__(self):
return self._obj.__repr__()
def __str__(self):
return self._obj.__str__()
def __len__(self):
return len(self._obj)
def __nonzero__(self):
return bool(self._obj)
# Python3
__bool__ = __nonzero__
def __iter__(self):
it = iter(self._obj)
if it == self._obj:
return self
else:
return Proxy(it)
def next(self):
return proxy_call(self._autowrap, next, self._obj)
# Python3
__next__ = next
def setup():
global _rsock, _wsock, _coro, _setup_already, _rspq, _reqq
if _setup_already:
return
else:
_setup_already = True
assert _nthreads >= 0, "Can't specify negative number of threads"
if _nthreads == 0:
import warnings
warnings.warn("Zero threads in tpool. All tpool.execute calls will\
execute in main thread. Check the value of the environment \
variable EVENTLET_THREADPOOL_SIZE.", RuntimeWarning)
_reqq = Queue(maxsize=-1)
_rspq = Queue(maxsize=-1)
# connected socket pair
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(('127.0.0.1', 0))
sock.listen(1)
csock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
csock.connect(sock.getsockname())
csock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, True)
_wsock, _addr = sock.accept()
_wsock.settimeout(None)
_wsock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, True)
sock.close()
_rsock = greenio.GreenSocket(csock)
_rsock.settimeout(None)
for i in six.moves.range(_nthreads):
t = threading.Thread(target=tworker,
name="tpool_thread_%s" % i)
t.setDaemon(True)
t.start()
_threads.append(t)
_coro = greenthread.spawn_n(tpool_trampoline)
# This yield fixes subtle error with GreenSocket.__del__
eventlet.sleep(0)
# Avoid ResourceWarning unclosed socket on Python3.2+
@atexit.register
def killall():
global _setup_already, _rspq, _rsock, _wsock
if not _setup_already:
return
# This yield fixes freeze in some scenarios
eventlet.sleep(0)
for thr in _threads:
_reqq.put(None)
for thr in _threads:
thr.join()
del _threads[:]
# return any remaining results
while (_rspq is not None) and not _rspq.empty():
try:
(e, rv) = _rspq.get(block=False)
e.send(rv)
e = rv = None
except Empty:
pass
if _coro is not None:
greenthread.kill(_coro)
if _rsock is not None:
_rsock.close()
_rsock = None
if _wsock is not None:
_wsock.close()
_wsock = None
_rspq = None
_setup_already = False
def set_num_threads(nthreads):
global _nthreads
_nthreads = nthreads
| 31.409496 | 83 | 0.649032 |
import atexit
import imp
import os
import sys
import traceback
import eventlet
from eventlet import event, greenio, greenthread, patcher, timeout
from eventlet.support import six
__all__ = ['execute', 'Proxy', 'killall', 'set_num_threads']
EXC_CLASSES = (Exception, timeout.Timeout)
SYS_EXCS = (GeneratorExit, KeyboardInterrupt, SystemExit)
QUIET = True
socket = patcher.original('socket')
threading = patcher.original('threading')
if six.PY2:
Queue_module = patcher.original('Queue')
if six.PY3:
Queue_module = patcher.original('queue')
Empty = Queue_module.Empty
Queue = Queue_module.Queue
_bytetosend = b' '
_coro = None
_nthreads = int(os.environ.get('EVENTLET_THREADPOOL_SIZE', 20))
_reqq = _rspq = None
_rsock = _wsock = None
_setup_already = False
_threads = []
def tpool_trampoline():
global _rspq
while True:
try:
_c = _rsock.recv(1)
assert _c
except ValueError:
break
while not _rspq.empty():
try:
(e, rv) = _rspq.get(block=False)
e.send(rv)
e = rv = None
except Empty:
pass
def tworker():
global _rspq
while True:
try:
msg = _reqq.get()
except AttributeError:
return
if msg is None:
return
(e, meth, args, kwargs) = msg
rv = None
try:
rv = meth(*args, **kwargs)
except SYS_EXCS:
raise
except EXC_CLASSES:
rv = sys.exc_info()
# test_leakage_from_tracebacks verifies that the use of
# exc_info does not lead to memory leaks
_rspq.put((e, rv))
msg = meth = args = kwargs = e = rv = None
_wsock.sendall(_bytetosend)
def execute(meth, *args, **kwargs):
setup()
# if already in tpool, don't recurse into the tpool
# if meth does any importing (sadly common), it will hang
my_thread = threading.currentThread()
if my_thread in _threads or imp.lock_held() or _nthreads == 0:
return meth(*args, **kwargs)
e = event.Event()
_reqq.put((e, meth, args, kwargs))
rv = e.wait()
if isinstance(rv, tuple) \
and len(rv) == 3 \
and isinstance(rv[1], EXC_CLASSES):
(c, e, tb) = rv
if not QUIET:
traceback.print_exception(c, e, tb)
traceback.print_stack()
six.reraise(c, e, tb)
return rv
def proxy_call(autowrap, f, *args, **kwargs):
if kwargs.pop('nonblocking', False):
rv = f(*args, **kwargs)
else:
rv = execute(f, *args, **kwargs)
if isinstance(rv, autowrap):
return Proxy(rv, autowrap)
else:
return rv
class Proxy(object):
def __init__(self, obj, autowrap=(), autowrap_names=()):
self._obj = obj
self._autowrap = autowrap
self._autowrap_names = autowrap_names
def __getattr__(self, attr_name):
f = getattr(self._obj, attr_name)
if not hasattr(f, '__call__'):
if isinstance(f, self._autowrap) or attr_name in self._autowrap_names:
return Proxy(f, self._autowrap)
return f
def doit(*args, **kwargs):
result = proxy_call(self._autowrap, f, *args, **kwargs)
if attr_name in self._autowrap_names and not isinstance(result, Proxy):
return Proxy(result)
return result
return doit
# the following are a buncha methods that the python interpeter
# doesn't use getattr to retrieve and therefore have to be defined
def __getitem__(self, key):
return proxy_call(self._autowrap, self._obj.__getitem__, key)
def __setitem__(self, key, value):
return proxy_call(self._autowrap, self._obj.__setitem__, key, value)
def __deepcopy__(self, memo=None):
return proxy_call(self._autowrap, self._obj.__deepcopy__, memo)
def __copy__(self, memo=None):
return proxy_call(self._autowrap, self._obj.__copy__, memo)
def __call__(self, *a, **kw):
if '__call__' in self._autowrap_names:
return Proxy(proxy_call(self._autowrap, self._obj, *a, **kw))
else:
return proxy_call(self._autowrap, self._obj, *a, **kw)
def __enter__(self):
return proxy_call(self._autowrap, self._obj.__enter__)
def __exit__(self, *exc):
return proxy_call(self._autowrap, self._obj.__exit__, *exc)
def __eq__(self, rhs):
return self._obj == rhs
def __hash__(self):
return self._obj.__hash__()
def __repr__(self):
return self._obj.__repr__()
def __str__(self):
return self._obj.__str__()
def __len__(self):
return len(self._obj)
def __nonzero__(self):
return bool(self._obj)
__bool__ = __nonzero__
def __iter__(self):
it = iter(self._obj)
if it == self._obj:
return self
else:
return Proxy(it)
def next(self):
return proxy_call(self._autowrap, next, self._obj)
__next__ = next
def setup():
global _rsock, _wsock, _coro, _setup_already, _rspq, _reqq
if _setup_already:
return
else:
_setup_already = True
assert _nthreads >= 0, "Can't specify negative number of threads"
if _nthreads == 0:
import warnings
warnings.warn("Zero threads in tpool. All tpool.execute calls will\
execute in main thread. Check the value of the environment \
variable EVENTLET_THREADPOOL_SIZE.", RuntimeWarning)
_reqq = Queue(maxsize=-1)
_rspq = Queue(maxsize=-1)
# connected socket pair
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(('127.0.0.1', 0))
sock.listen(1)
csock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
csock.connect(sock.getsockname())
csock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, True)
_wsock, _addr = sock.accept()
_wsock.settimeout(None)
_wsock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, True)
sock.close()
_rsock = greenio.GreenSocket(csock)
_rsock.settimeout(None)
for i in six.moves.range(_nthreads):
t = threading.Thread(target=tworker,
name="tpool_thread_%s" % i)
t.setDaemon(True)
t.start()
_threads.append(t)
_coro = greenthread.spawn_n(tpool_trampoline)
# This yield fixes subtle error with GreenSocket.__del__
eventlet.sleep(0)
# Avoid ResourceWarning unclosed socket on Python3.2+
@atexit.register
def killall():
global _setup_already, _rspq, _rsock, _wsock
if not _setup_already:
return
# This yield fixes freeze in some scenarios
eventlet.sleep(0)
for thr in _threads:
_reqq.put(None)
for thr in _threads:
thr.join()
del _threads[:]
# return any remaining results
while (_rspq is not None) and not _rspq.empty():
try:
(e, rv) = _rspq.get(block=False)
e.send(rv)
e = rv = None
except Empty:
pass
if _coro is not None:
greenthread.kill(_coro)
if _rsock is not None:
_rsock.close()
_rsock = None
if _wsock is not None:
_wsock.close()
_wsock = None
_rspq = None
_setup_already = False
def set_num_threads(nthreads):
global _nthreads
_nthreads = nthreads
| true | true |
1c2deb37c0ac62169875bd44d9a996130cb99911 | 1,205 | py | Python | torch2trt_dynamic/converters/squeeze.py | jinfagang/pilgrim_torch2trt | 27a8e6a195cbc3a83b16483ec4c0930da4aa77e6 | [
"MIT"
] | 20 | 2020-10-10T06:14:50.000Z | 2021-09-22T08:50:16.000Z | torch2trt_dynamic/converters/squeeze.py | jinfagang/pilgrim_torch2trt | 27a8e6a195cbc3a83b16483ec4c0930da4aa77e6 | [
"MIT"
] | 2 | 2020-11-02T11:45:24.000Z | 2021-02-17T15:20:04.000Z | torch2trt_dynamic/converters/squeeze.py | jinfagang/pilgrim_torch2trt | 27a8e6a195cbc3a83b16483ec4c0930da4aa77e6 | [
"MIT"
] | 4 | 2020-10-10T05:14:18.000Z | 2020-10-27T01:47:30.000Z | from torch2trt_dynamic.torch2trt_dynamic import *
from torch2trt_dynamic.module_test import add_module_test
from .identity import *
@tensorrt_converter('torch.Tensor.squeeze')
@tensorrt_converter('torch.squeeze')
def convert_squeeze(ctx):
input = ctx.method_args[0]
dim = get_arg(ctx, 'dim', pos=1, default=None)
if dim is None:
dim = list(filter(lambda x:input.shape[x]==1, range(len(input.shape))))
else:
if input.shape[dim]!=1:
ctx.method_args = [input]
convert_identity(ctx)
return
if dim <0:
dim = len(input.shape)+dim
dim = [dim]
input_trt = trt_(ctx.network, input)
shape_trt = ctx.network.add_shape(input_trt).get_output(0)
output = ctx.method_return
reverse_dim = list(filter(lambda x: x not in dim, range(len(input.shape))))
reverse_dim_trt = trt_(ctx.network, torch.tensor(reverse_dim,dtype=torch.int32).to(input.device))
new_shape_trt = ctx.network.add_gather(shape_trt, reverse_dim_trt, 0).get_output(0)
layer = ctx.network.add_shuffle(input_trt)
layer.set_input(1, new_shape_trt)
output._trt = layer.get_output(0) | 36.515152 | 102 | 0.663071 | from torch2trt_dynamic.torch2trt_dynamic import *
from torch2trt_dynamic.module_test import add_module_test
from .identity import *
@tensorrt_converter('torch.Tensor.squeeze')
@tensorrt_converter('torch.squeeze')
def convert_squeeze(ctx):
input = ctx.method_args[0]
dim = get_arg(ctx, 'dim', pos=1, default=None)
if dim is None:
dim = list(filter(lambda x:input.shape[x]==1, range(len(input.shape))))
else:
if input.shape[dim]!=1:
ctx.method_args = [input]
convert_identity(ctx)
return
if dim <0:
dim = len(input.shape)+dim
dim = [dim]
input_trt = trt_(ctx.network, input)
shape_trt = ctx.network.add_shape(input_trt).get_output(0)
output = ctx.method_return
reverse_dim = list(filter(lambda x: x not in dim, range(len(input.shape))))
reverse_dim_trt = trt_(ctx.network, torch.tensor(reverse_dim,dtype=torch.int32).to(input.device))
new_shape_trt = ctx.network.add_gather(shape_trt, reverse_dim_trt, 0).get_output(0)
layer = ctx.network.add_shuffle(input_trt)
layer.set_input(1, new_shape_trt)
output._trt = layer.get_output(0) | true | true |
1c2deb6095ef7a4cf2ee20f56169182f5e2efe48 | 35,086 | py | Python | plugins/modules/dellemc_unity_smbshare.py | fobrice/ansible-unity | ad7271cf285ee07a18abdbb06e4490c091c936cb | [
"Apache-2.0"
] | null | null | null | plugins/modules/dellemc_unity_smbshare.py | fobrice/ansible-unity | ad7271cf285ee07a18abdbb06e4490c091c936cb | [
"Apache-2.0"
] | null | null | null | plugins/modules/dellemc_unity_smbshare.py | fobrice/ansible-unity | ad7271cf285ee07a18abdbb06e4490c091c936cb | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# Copyright: (c) 2020, DellEMC
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: dellemc_unity_smbshare
version_added: '1.1.0'
short_description: Manage SMB shares on Unity storage system.
extends_documentation_fragment:
- dellemc.unity.dellemc_unity.unity
author:
- P Srinivas Rao (@srinivas-rao5) <ansible.team@dell.com>
description:
- Managing SMB Shares on Unity storage system includes create, get,
modify, and delete the smb shares.
options:
share_name:
description:
- Name of the SMB share.
- Required during creation of the SMB share.
- For all other operations either share_name or share_id is required.
type: str
share_id:
description:
- ID of the SMB share.
- Should not be specified during creation. Id is auto generated.
- For all other operations either share_name or share_id is required.
- If share_id is used then no need to pass nas_server/filesystem/snapshot/path.
type: str
path:
description:
- Local path to the file system/Snapshot or any existing sub-folder of
the file system/Snapshot that is shared over the network.
- Path is relative to the root of the filesystem.
- Required for creation of the SMB share.
type: str
filesystem_id:
description:
- The ID of the File System.
- Either filesystem_name or filesystem_id is required for creation of the SMB share for filesystem.
- If filesystem name is specified, then nas_server_name/nas_server_id is required to
uniquely identify the filesystem.
- filesystem_name and filesystem_id are mutually exclusive parameters.
type: str
snapshot_id:
description:
- The ID of the Filesystem Snapshot.
- Either snapshot_name or snapshot_id is required for creation of the SMB share for a snapshot.
- If snapshot name is specified, then nas_server_name/nas_server_id is required to
uniquely identify the snapshot.
- snapshot_name and snapshot_id are mutually exclusive parameters.
type: str
nas_server_id:
description:
- The ID of the NAS Server.
- It is not required if share_id is used.
type: str
filesystem_name:
description:
- The Name of the File System.
- Either filesystem_name or filesystem_id is required for creation of the SMB share for filesystem.
- If filesystem name is specified, then nas_server_name/nas_server_id is required to
uniquely identify the filesystem.
- filesystem_name and filesytem_id are mutually exclusive parameters.
type: str
snapshot_name:
description:
- The Name of the Filesystem Snapshot.
- Either snapshot_name or snapshot_id is required for creation of the SMB share for a snapshot.
- If snapshot name is specified, then nas_server_name/nas_server_id is required to
uniquely identify the snapshot.
- snapshot_name and snapshot_id are mutually exclusive parameters.
type: str
nas_server_name:
description:
- The Name of the NAS Server.
- It is not required if share_id is used.
- nas_server_name and nas_server_id are mutually exclusive parameters.
type: str
description:
description:
- Description for the SMB share.
- Optional parameter when creating a share.
- To modify, pass the new value in description field.
type: str
is_abe_enabled:
description:
- Indicates whether Access-based Enumeration (ABE) for SMB share is enabled.
- During creation, if not mentioned then default is False.
type: bool
is_branch_cache_enabled:
description:
- Indicates whether Branch Cache optimization for SMB share is enabled.
- During creation, if not mentioned then default is False.
type: bool
is_continuous_availability_enabled:
description:
- Indicates whether continuous availability for SMB 3.0 is enabled.
- During creation, if not mentioned then default is False.
type: bool
is_encryption_enabled:
description:
- Indicates whether encryption for SMB 3.0 is enabled at the shared folder level.
- During creation, if not mentioned then default is False.
type: bool
offline_availability:
description:
- Defines valid states of Offline Availability.
- MANUAL- Only specified files will be available offline.
- DOCUMENTS- All files that users open will be available offline.
- PROGRAMS- Program will preferably run from the offline cache even when
connected to the network. All files that users open will be available offline.
- NONE- Prevents clients from storing documents and programs in offline cache.
type: str
choices: ["MANUAL","DOCUMENTS","PROGRAMS","NONE"]
umask:
description:
- The default UNIX umask for new files created on the SMB Share.
type: str
state:
description:
- Define whether the SMB share should exist or not.
- present indicates that the share should exist on the system.
- absent indicates that the share should not exist on the system.
type: str
required: true
choices: ['absent', 'present']
notes:
- When ID/Name of the filesystem/snapshot is passed then nas_server is not required.
If passed, then filesystem/snapshot should exist for the mentioned nas_server,
else the task will fail.
'''
EXAMPLES = r'''
- name: Create SMB share for a filesystem
dellemc_unity_smbshare:
unispherehost: "{{unispherehost}}"
username: "{{username}}"
password: "{{password}}"
verifycert: "{{verifycert}}"
share_name: "sample_smb_share"
filesystem_name: "sample_fs"
nas_server_id: "NAS_11"
path: "/sample_fs"
description: "Sample SMB share created"
is_abe_enabled: True
is_branch_cache_enabled: True
offline_availability: "DOCUMENTS"
is_continuous_availability_enabled: True
is_encryption_enabled: True
umask: "777"
state: "present"
- name: Modify Attributes of SMB share for a filesystem
dellemc_unity_smbshare:
unispherehost: "{{unispherehost}}"
username: "{{username}}"
password: "{{password}}"
verifycert: "{{verifycert}}"
share_name: "sample_smb_share"
nas_server_name: "sample_nas_server"
description: "Sample SMB share attributes updated"
is_abe_enabled: False
is_branch_cache_enabled: False
offline_availability: "MANUAL"
is_continuous_availability_enabled: "False"
is_encryption_enabled: "False"
umask: "022"
state: "present"
- name: Create SMB share for a snapshot
dellemc_unity_smbshare:
unispherehost: "{{unispherehost}}"
username: "{{username}}"
password: "{{password}}"
verifycert: "{{verifycert}}"
share_name: "sample_snap_smb_share"
snapshot_name: "sample_snapshot"
nas_server_id: "NAS_11"
path: "/sample_snapshot"
description: "Sample SMB share created for snapshot"
is_abe_enabled: True
is_branch_cache_enabled: True
is_continuous_availability_enabled: True
is_encryption_enabled: True
umask: "777"
state: "present"
- name: Modify Attributes of SMB share for a snapshot
dellemc_unity_smbshare:
unispherehost: "{{unispherehost}}"
username: "{{username}}"
password: "{{password}}"
verifycert: "{{verifycert}}"
share_name: "sample_snap_smb_share"
snapshot_name: "sample_snapshot"
description: "Sample SMB share attributes updated for snapshot"
is_abe_enabled: False
is_branch_cache_enabled: False
offline_availability: "MANUAL"
is_continuous_availability_enabled: "False"
is_encryption_enabled: "False"
umask: "022"
state: "present"
- name: Get details of SMB share
dellemc_unity_smbshare:
unispherehost: "{{unispherehost}}"
username: "{{username}}"
password: "{{password}}"
verifycert: "{{verifycert}}"
share_id: "{{smb_share_id}}"
state: "present"
- name: Delete SMB share
dellemc_unity_smbshare:
unispherehost: "{{unispherehost}}"
username: "{{username}}"
password: "{{password}}"
verifycert: "{{verifycert}}"
share_id: "{{smb_share_id}}"
state: "absent"
'''
RETURN = r'''
changed:
description: Whether or not the resource has changed
returned: always
type: bool
sample: True
smb_share_details:
description: The SMB share details.
type: complex
returned: When share exists.
contains:
id:
description: The ID of the SMB share.
type: str
name:
description: Name of the SMB share.
type: str
sample: "sample_smb_share"
filesystem_id:
description: The ID of the Filesystem.
type: str
filesystem_name:
description: The Name of the filesystem
type: str
snapshot_id:
description: The ID of the Snapshot.
type: str
snapshot_name:
description: The Name of the Snapshot.
type: str
nas_server_id:
description: The ID of the nas_server.
type: str
nas_server_name:
description: The Name of the nas_server.
type: str
description:
description: Additional information about the share.
type: str
sample: "This share is created for demo purpose only."
is_abe_enabled:
description: Whether Access Based enumeration is enforced or not
type: bool
sample: false
is_branch_cache_enabled:
description: Whether branch cache is enabled or not.
type: bool
sample: false
is_continuous_availability_enabled:
description: Whether the share will be available continuously or not
type: bool
sample: false
is_encryption_enabled:
description: Whether encryption is enabled or not.
type: bool
sample: false
umask:
description: Unix mask for the SMB share
type: str
'''
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.dellemc.unity.plugins.module_utils.storage.dell \
import dellemc_ansible_unity_utils as utils
LOG = utils.get_logger('dellemc_unity_smbshare')
HAS_UNITY_SDK = utils.get_unity_sdk()
UNITY_SDK_VERSION_CHECK = utils.storops_version_check()
application_type = "Ansible/1.2.0"
class UnitySMBShare(object):
"""Class with SMB Share operations"""
def __init__(self):
""" Define all parameters required by this module"""
self.module_params = utils.get_unity_management_host_parameters()
self.module_params.update(get_unity_smb_share_parameters())
# initialize the ansible module
mut_ex_args = [['share_name', 'share_id'],
['nas_server_name', 'nas_server_id'],
['filesystem_name', 'snapshot_name',
'filesystem_id', 'snapshot_id'],
['share_id', 'nas_server_name'],
['share_id', 'nas_server_id'],
['share_id', 'filesystem_name'],
['share_id', 'filesystem_id'],
['share_id', 'path'],
['share_id', 'snapshot_name'],
['share_id', 'snapshot_id']]
required_one_of = [['share_id', 'share_name']]
self.module = AnsibleModule(
argument_spec=self.module_params,
supports_check_mode=False,
mutually_exclusive=mut_ex_args,
required_one_of=required_one_of
)
# result is a dictionary that contains changed status and
# snapshot details
self.result = {"changed": False,
'smb_share_details': None}
if not HAS_UNITY_SDK:
self.module.fail_json(msg="Ansible modules for Unity require the"
" Unity python library to be"
" installed. Please install the "
"library before using these modules.")
if UNITY_SDK_VERSION_CHECK and \
not UNITY_SDK_VERSION_CHECK['supported_version']:
err_msg = UNITY_SDK_VERSION_CHECK['unsupported_version_message']
LOG.error(err_msg)
self.module.fail_json(msg=err_msg)
self.unity_conn = utils.get_unity_unisphere_connection(
self.module.params, application_type)
self.smb_share_conn_obj = utils.cifs_share.UnityCifsShare(
self.unity_conn)
LOG.info('Connection established with the Unity Array')
def get_offline_availability_enum(self, offline_availability):
"""
Get the enum of the Offline Availability parameter.
:param offline_availability: The offline_availability string
:return: offline_availability enum
"""
if offline_availability in \
utils.CifsShareOfflineAvailabilityEnum.__members__:
return utils.CifsShareOfflineAvailabilityEnum[
offline_availability]
else:
error_msg = "Invalid value {0} for offline availability" \
" provided".format(offline_availability)
LOG.error(error_msg)
self.module.fail_json(msg=error_msg)
def get_smb_share_obj(self, share_id=None, share_name=None,
filesystem_obj=None, snap_obj=None, nas_obj=None):
"""Get SMB share details"""
msg = "Failed to get details of SMB Share {0} with error {1} "
smb_share = share_name if share_name else share_id
try:
if share_id:
obj_smb = self.unity_conn.get_cifs_share(_id=share_id)
if obj_smb and obj_smb.existed:
LOG.info("Successfully got the SMB share "
"object %s ", obj_smb)
return obj_smb
elif share_name is not None and filesystem_obj:
# There might be a case where SMB share with same name exists
# for different nas server. Hence, filesystem_obj is passed
# along with share name to get a unique resource.
return self.unity_conn.get_cifs_share(
name=share_name, filesystem=filesystem_obj)
elif share_name is not None and snap_obj:
# There might be a case where SMB share with same name exists
# for different nas server. Hence, snap_obj is passed
# along with share name to get a unique resource.
return self.unity_conn.get_cifs_share(
name=share_name, snap=snap_obj)
# This elif is addressing scenario where nas server details is
# passed and neither filesystem nor snapshot details are passed.
elif share_name is not None and nas_obj:
# Multiple smb shares can be received, as only name is passed
smb_share_obj = self.unity_conn.get_cifs_share(
name=share_name)
# Checking if instance or list of instance is returned.
if isinstance(smb_share_obj,
utils.cifs_share.UnityCifsShareList):
LOG.info("Multiple SMB share with same name found.")
smb_share_obj_list = smb_share_obj
for smb_share in smb_share_obj_list:
if smb_share.filesystem.nas_server == nas_obj:
return smb_share
msg = "No SMB share found with the given NAS Server." \
" Please provide correct share name and" \
" nas server details."
return None
# Below statements will execute when there is only single
# smb share returned.
if smb_share_obj.filesystem.nas_server == nas_obj:
return smb_share_obj
msg = "No SMB share found with the given NAS Server." \
" Please provide correct share name and" \
" nas server details."
return None
else:
self.module.fail_json(
msg="Share Name is Passed. Please enter Filesystem/"
"Snapshot/NAS Server Resource along with share_name"
" to get the details of the SMB share")
except utils.HttpError as e:
if e.http_status == 401:
cred_err = "Incorrect username or password , {0}".format(
e.message)
self.module.fail_json(msg=cred_err)
else:
err_msg = msg.format(smb_share, str(e))
LOG.error(err_msg)
self.module.fail_json(msg=err_msg)
except utils.UnityResourceNotFoundError as e:
err_msg = msg.format(smb_share, str(e))
LOG.error(err_msg)
return None
except Exception as e:
err_msg = msg.format(smb_share, str(e))
LOG.error(err_msg)
self.module.fail_json(msg=err_msg)
def create_smb_share(self, share_name, path, filesystem_obj=None,
snapshot_obj=None, description=None,
is_abe_enabled=None, is_branch_cache_enabled=None,
is_continuous_availability_enabled=None,
is_encryption_enabled=None,
offline_availability=None, umask=None):
"""
Create SMB Share
:return: SMB Share Object if successful, else error.
"""
if path is None or path == "":
self.module.fail_json(msg="Please enter a valid path."
" Empty string or None provided.")
if not filesystem_obj and not snapshot_obj:
self.module.fail_json(msg="Either Filesystem or Snapshot "
"Resource's Name/ID is required to"
" Create a SMB share")
try:
if filesystem_obj:
return self.smb_share_conn_obj.create(
cli=self.unity_conn._cli, name=share_name,
fs=filesystem_obj, path=path,
is_encryption_enabled=is_encryption_enabled,
is_con_avail_enabled=is_continuous_availability_enabled,
is_abe_enabled=is_abe_enabled,
is_branch_cache_enabled=is_branch_cache_enabled,
umask=umask, description=description,
offline_availability=offline_availability)
else:
return self.smb_share_conn_obj.create_from_snap(
cli=self.unity_conn._cli, name=share_name,
snap=snapshot_obj, path=path,
is_encryption_enabled=is_encryption_enabled,
is_con_avail_enabled=is_continuous_availability_enabled,
is_abe_enabled=is_abe_enabled,
is_branch_cache_enabled=is_branch_cache_enabled,
umask=umask, description=description,
offline_availability=offline_availability)
except Exception as e:
error_msg = "Failed to create SMB share" \
" %s with error %s" % (share_name, str(e))
LOG.error(error_msg)
self.module.fail_json(msg=error_msg)
def get_filesystem(self, filesystem_id=None, filesystem_name=None,
nas_server_obj=None):
"""
Get the Filesystem Object.
:param filesystem_id: ID of the Filesystem.
:param filesystem_name: Name of the filesystem.
:param nas_server_obj: NAS Server object.
:return: Object of the filesystem.
"""
try:
if filesystem_id:
obj_fs = self.unity_conn.get_filesystem(_id=filesystem_id)
if obj_fs and obj_fs.existed:
LOG.info("Successfully got the filesystem "
"object %s ", obj_fs)
return obj_fs
else:
return self.unity_conn.get_filesystem(
name=filesystem_name, nas_server=nas_server_obj)
return None
except Exception as e:
filesystem = filesystem_name if filesystem_name \
else filesystem_id
err_msg = "Failed to get filesystem details {0} with" \
" error {1}".format(filesystem, str(e))
LOG.error(err_msg)
self.module.fail_json(msg=err_msg)
def get_snapshot(self, snapshot_name, snapshot_id):
"""
Get the Snapshot Object.
:param snapshot_id: ID of the Snapshot.
:param snapshot_name: Name of the Snapshot
:return: Object of the filesystem.
"""
try:
obj_snap = self.unity_conn.get_snap(_id=snapshot_id,
name=snapshot_name)
if snapshot_id and obj_snap and not obj_snap.existed:
LOG.info("Snapshot object does not exist %s ", obj_snap)
return None
return obj_snap
except Exception as e:
snapshot = snapshot_name if snapshot_name else snapshot_id
err_msg = "Failed to get filesystem snapshots details {0} with" \
" error {1}".format(snapshot, str(e))
LOG.error(err_msg)
self.module.fail_json(msg=err_msg)
def get_nas_server(self, nas_server_name, nas_server_id):
"""
Get the NAS Server Object using NAME/ID of the NAS Server.
:param nas_server_name: Name of the NAS Server
:param nas_server_id: ID of the NAS Server
:return: NAS Server object.
"""
nas_server = nas_server_name if nas_server_name else nas_server_id
try:
obj_nas = self.unity_conn.get_nas_server(_id=nas_server_id,
name=nas_server_name)
if nas_server_id and obj_nas and not obj_nas.existed:
LOG.info("NAS Server object does not exist %s ", obj_nas)
return None
return obj_nas
except utils.HttpError as e:
if e.http_status == 401:
cred_err = "Incorrect username or password , {0}".format(
e.message)
self.module.fail_json(msg=cred_err)
else:
err_msg = "Failed to get details of NAS Server" \
" {0} with error {1}".format(nas_server, str(e))
LOG.error(err_msg)
self.module.fail_json(msg=err_msg)
except Exception as e:
nas_server = nas_server_name if nas_server_name \
else nas_server_id
err_msg = "Failed to get nas server details {0} with" \
" error {1}".format(nas_server, str(e))
LOG.error(err_msg)
self.module.fail_json(msg=err_msg)
def delete_smb_share(self, smb_share_obj):
"""
Delete SMB share if exists, else thrown error.
"""
try:
smb_share_obj.delete()
except Exception as e:
error_msg = "Failed to Delete SMB share" \
" %s with error %s" % (smb_share_obj.name, str(e))
LOG.error(error_msg)
self.module.fail_json(msg=error_msg)
def to_update(self, smb_share_obj):
LOG.info("Checking Whether the parameters are modified or not.")
offline_availability = self.module.params['offline_availability']
# Get the enum for the corresponding offline_availability
if offline_availability:
offline_availability = \
self.get_offline_availability_enum(offline_availability)
if offline_availability is not None and \
offline_availability != smb_share_obj.offline_availability:
return True
smb_share_dict = smb_share_obj._get_properties()
params_list = ['is_abe_enabled', 'is_branch_cache_enabled',
'is_continuous_availability_enabled',
'is_encryption_enabled', 'description', 'umask']
for param in params_list:
if self.module.params[param] is not None and \
self.module.params[param] != smb_share_dict[param]:
return True
return False
def update_smb_share(self, smb_share_obj, is_encryption_enabled=None,
is_continuous_availability_enabled=None,
is_abe_enabled=None,
is_branch_cache_enabled=None,
umask=None, description=None,
offline_availability=None):
"""
The Details of the SMB share will be updated in the function.
"""
try:
smb_share_obj.modify(
is_encryption_enabled=is_encryption_enabled,
is_con_avail_enabled=is_continuous_availability_enabled,
is_abe_enabled=is_abe_enabled,
is_branch_cache_enabled=is_branch_cache_enabled,
umask=umask, description=description,
offline_availability=offline_availability)
except Exception as e:
error_msg = "Failed to Update parameters of SMB share" \
" %s with error %s" % (smb_share_obj.name, str(e))
LOG.error(error_msg)
self.module.fail_json(msg=error_msg)
def perform_module_operation(self):
"""
Perform different actions on SMB share based on user parameters
chosen in playbook
"""
state = self.module.params['state']
share_name = self.module.params['share_name']
filesystem_name = self.module.params['filesystem_name']
snapshot_name = self.module.params['snapshot_name']
nas_server_name = self.module.params['nas_server_name']
share_id = self.module.params['share_id']
filesystem_id = self.module.params['filesystem_id']
snapshot_id = self.module.params['snapshot_id']
nas_server_id = self.module.params['nas_server_id']
path = self.module.params['path']
description = self.module.params['description']
is_branch_cache_enabled = \
self.module.params['is_branch_cache_enabled']
is_continuous_availability_enabled = \
self.module.params['is_continuous_availability_enabled']
is_encryption_enabled = self.module.params['is_encryption_enabled']
is_abe_enabled = self.module.params['is_abe_enabled']
umask = self.module.params['umask']
offline_availability = self.module.params['offline_availability']
# Get the enum for the corresponding offline_availability
if offline_availability:
offline_availability = \
self.get_offline_availability_enum(offline_availability)
changed = False
'''
Validate parameters.
'''
if share_id is not None and \
(share_id == "" or len(share_id.split()) == 0):
self.module.fail_json(msg="Invalid share id provided."
" Please enter a valid share ID.")
'''
Get details of NAS Server, if entered.
'''
nas_server_obj = None
if nas_server_name or nas_server_id:
nas_server_obj = self.get_nas_server(nas_server_name,
nas_server_id)
if nas_server_obj:
msg = "NAS Server Object:" \
" {0}".format(nas_server_obj._get_properties())
LOG.info(msg)
else:
msg = "NAS Server Resource not fetched."
LOG.info(msg)
'''
Get details of Filesystem, if entered.
'''
filesystem_obj = None
if filesystem_id:
filesystem_obj = self.get_filesystem(filesystem_id)
if filesystem_name:
# nas_server_obj is required to uniquely identify filesystem
# resource. If neither nas_server_name nor nas_server_id
# is passed along with filesystem_name then error is thrown.
if not nas_server_obj:
self.module.fail_json(msg="nas_server_id/nas_server_name is "
"required when filesystem_name is "
"passed")
filesystem_obj = self.get_filesystem(
None, filesystem_name, nas_server_obj)
if filesystem_obj:
msg = "Filesystem Object:" \
" {0}".format(filesystem_obj._get_properties())
LOG.info(msg)
# Checking if filesystem supports SMB protocol or not.
if filesystem_obj and \
filesystem_obj.supported_protocols.name == "NFS":
self.module.fail_json(msg="Cannot perform SMB share operations "
"as file system supports only NFS "
"protocol. Please enter a valid "
"Filesystem having supported protocol"
" as SMB or Multiprotocol.")
'''
Get details of Snapshot, if entered.
'''
snapshot_obj = None
if snapshot_id or snapshot_name:
# Snapshot Name and Snapshot ID both are unique across array.
# Hence no need to mention nas server details
snapshot_obj = self.get_snapshot(snapshot_name, snapshot_id)
if snapshot_obj:
msg = "Snapshot Object:" \
" {0}".format(snapshot_obj._get_properties())
LOG.info(msg)
else:
msg = "Snapshot Resource not fetched."
LOG.info(msg)
'''
Get the Details of the SMB Share
'''
smb_share_obj = self.get_smb_share_obj(
share_id, share_name, filesystem_obj, snapshot_obj,
nas_server_obj)
if smb_share_obj:
msg = "SMB Share Object:" \
" {0}".format(smb_share_obj._get_properties())
LOG.info(msg)
elif state == 'present' and share_id:
msg = "Unable to fetch SMB Share Resource. " \
"Incorrect SMB share id provided. " \
"Please enter a correct share id."
LOG.error(msg)
self.module.fail_json(msg=msg)
'''
Creation of SMB Share
'''
if state == "present" and not smb_share_obj:
smb_share_obj = self.create_smb_share(
share_name, path, filesystem_obj, snapshot_obj, description,
is_abe_enabled, is_branch_cache_enabled,
is_continuous_availability_enabled, is_encryption_enabled,
offline_availability, umask)
changed = True
'''
Update the SMB share details
'''
if state == "present" and smb_share_obj:
LOG.info("Modify the details of the SMB share.")
update_flag = self.to_update(smb_share_obj)
msg = "Update Flag: {0}".format(str(update_flag))
LOG.info(msg)
if update_flag:
self.update_smb_share(smb_share_obj, is_encryption_enabled,
is_continuous_availability_enabled,
is_abe_enabled,
is_branch_cache_enabled,
umask, description,
offline_availability)
changed = True
'''
Delete the SMB share details
'''
if state == "absent" and smb_share_obj:
self.delete_smb_share(smb_share_obj)
changed = True
'''
Update the changed state and SMB share details
'''
self.result["changed"] = changed
smb_details = self.prepare_output_dict(state, share_id, share_name,
filesystem_obj, snapshot_obj,
nas_server_obj)
self.result["smb_share_details"] = smb_details
self.module.exit_json(**self.result)
def prepare_output_dict(self, state, share_id, share_name,
filesystem_obj, snapshot_obj, nas_server_obj):
smb_share_details = None
smb_share_obj = None
if state == 'present':
smb_share_obj = self.get_smb_share_obj(
share_id, share_name, filesystem_obj,
snapshot_obj, nas_server_obj)
smb_share_details = smb_share_obj._get_properties()
if smb_share_details:
# Get Snapshot NAME and ID if SMB share exists for Snapshot
if smb_share_obj.type.name == "CIFS_SNAPSHOT":
smb_share_details['snapshot_name'] = smb_share_obj.snap.name
smb_share_details['snapshot_id'] = smb_share_obj.snap.id
# Get Filesystem NAME and ID
smb_share_details['filesystem_name'] = \
smb_share_obj.filesystem.name
smb_share_details['filesystem_id'] = smb_share_obj.filesystem.id
# Get NAS server NAME and ID
smb_share_details['nas_server_name'] = \
smb_share_obj.filesystem.nas_server.name
smb_share_details['nas_server_id'] = \
smb_share_obj.filesystem.nas_server.id
return smb_share_details
def get_unity_smb_share_parameters():
"""
This method provides parameters required for the ansible smb share
modules on Unity
"""
return dict(
share_name=dict(), share_id=dict(),
filesystem_name=dict(), filesystem_id=dict(),
snapshot_name=dict(), snapshot_id=dict(),
nas_server_name=dict(), nas_server_id=dict(),
path=dict(), umask=dict(), description=dict(),
offline_availability=dict(
choices=["MANUAL", "DOCUMENTS", "PROGRAMS", "NONE"]),
is_abe_enabled=dict(type='bool'),
is_branch_cache_enabled=dict(type='bool'),
is_continuous_availability_enabled=dict(type='bool'),
is_encryption_enabled=dict(type='bool'),
state=dict(required=True, choices=['present', 'absent'], type='str')
)
def main():
""" Create Unity SMB share object and perform action on it
based on user input from playbook"""
obj = UnitySMBShare()
obj.perform_module_operation()
if __name__ == '__main__':
main()
| 40.94049 | 103 | 0.60745 |
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: dellemc_unity_smbshare
version_added: '1.1.0'
short_description: Manage SMB shares on Unity storage system.
extends_documentation_fragment:
- dellemc.unity.dellemc_unity.unity
author:
- P Srinivas Rao (@srinivas-rao5) <ansible.team@dell.com>
description:
- Managing SMB Shares on Unity storage system includes create, get,
modify, and delete the smb shares.
options:
share_name:
description:
- Name of the SMB share.
- Required during creation of the SMB share.
- For all other operations either share_name or share_id is required.
type: str
share_id:
description:
- ID of the SMB share.
- Should not be specified during creation. Id is auto generated.
- For all other operations either share_name or share_id is required.
- If share_id is used then no need to pass nas_server/filesystem/snapshot/path.
type: str
path:
description:
- Local path to the file system/Snapshot or any existing sub-folder of
the file system/Snapshot that is shared over the network.
- Path is relative to the root of the filesystem.
- Required for creation of the SMB share.
type: str
filesystem_id:
description:
- The ID of the File System.
- Either filesystem_name or filesystem_id is required for creation of the SMB share for filesystem.
- If filesystem name is specified, then nas_server_name/nas_server_id is required to
uniquely identify the filesystem.
- filesystem_name and filesystem_id are mutually exclusive parameters.
type: str
snapshot_id:
description:
- The ID of the Filesystem Snapshot.
- Either snapshot_name or snapshot_id is required for creation of the SMB share for a snapshot.
- If snapshot name is specified, then nas_server_name/nas_server_id is required to
uniquely identify the snapshot.
- snapshot_name and snapshot_id are mutually exclusive parameters.
type: str
nas_server_id:
description:
- The ID of the NAS Server.
- It is not required if share_id is used.
type: str
filesystem_name:
description:
- The Name of the File System.
- Either filesystem_name or filesystem_id is required for creation of the SMB share for filesystem.
- If filesystem name is specified, then nas_server_name/nas_server_id is required to
uniquely identify the filesystem.
- filesystem_name and filesytem_id are mutually exclusive parameters.
type: str
snapshot_name:
description:
- The Name of the Filesystem Snapshot.
- Either snapshot_name or snapshot_id is required for creation of the SMB share for a snapshot.
- If snapshot name is specified, then nas_server_name/nas_server_id is required to
uniquely identify the snapshot.
- snapshot_name and snapshot_id are mutually exclusive parameters.
type: str
nas_server_name:
description:
- The Name of the NAS Server.
- It is not required if share_id is used.
- nas_server_name and nas_server_id are mutually exclusive parameters.
type: str
description:
description:
- Description for the SMB share.
- Optional parameter when creating a share.
- To modify, pass the new value in description field.
type: str
is_abe_enabled:
description:
- Indicates whether Access-based Enumeration (ABE) for SMB share is enabled.
- During creation, if not mentioned then default is False.
type: bool
is_branch_cache_enabled:
description:
- Indicates whether Branch Cache optimization for SMB share is enabled.
- During creation, if not mentioned then default is False.
type: bool
is_continuous_availability_enabled:
description:
- Indicates whether continuous availability for SMB 3.0 is enabled.
- During creation, if not mentioned then default is False.
type: bool
is_encryption_enabled:
description:
- Indicates whether encryption for SMB 3.0 is enabled at the shared folder level.
- During creation, if not mentioned then default is False.
type: bool
offline_availability:
description:
- Defines valid states of Offline Availability.
- MANUAL- Only specified files will be available offline.
- DOCUMENTS- All files that users open will be available offline.
- PROGRAMS- Program will preferably run from the offline cache even when
connected to the network. All files that users open will be available offline.
- NONE- Prevents clients from storing documents and programs in offline cache.
type: str
choices: ["MANUAL","DOCUMENTS","PROGRAMS","NONE"]
umask:
description:
- The default UNIX umask for new files created on the SMB Share.
type: str
state:
description:
- Define whether the SMB share should exist or not.
- present indicates that the share should exist on the system.
- absent indicates that the share should not exist on the system.
type: str
required: true
choices: ['absent', 'present']
notes:
- When ID/Name of the filesystem/snapshot is passed then nas_server is not required.
If passed, then filesystem/snapshot should exist for the mentioned nas_server,
else the task will fail.
'''
EXAMPLES = r'''
- name: Create SMB share for a filesystem
dellemc_unity_smbshare:
unispherehost: "{{unispherehost}}"
username: "{{username}}"
password: "{{password}}"
verifycert: "{{verifycert}}"
share_name: "sample_smb_share"
filesystem_name: "sample_fs"
nas_server_id: "NAS_11"
path: "/sample_fs"
description: "Sample SMB share created"
is_abe_enabled: True
is_branch_cache_enabled: True
offline_availability: "DOCUMENTS"
is_continuous_availability_enabled: True
is_encryption_enabled: True
umask: "777"
state: "present"
- name: Modify Attributes of SMB share for a filesystem
dellemc_unity_smbshare:
unispherehost: "{{unispherehost}}"
username: "{{username}}"
password: "{{password}}"
verifycert: "{{verifycert}}"
share_name: "sample_smb_share"
nas_server_name: "sample_nas_server"
description: "Sample SMB share attributes updated"
is_abe_enabled: False
is_branch_cache_enabled: False
offline_availability: "MANUAL"
is_continuous_availability_enabled: "False"
is_encryption_enabled: "False"
umask: "022"
state: "present"
- name: Create SMB share for a snapshot
dellemc_unity_smbshare:
unispherehost: "{{unispherehost}}"
username: "{{username}}"
password: "{{password}}"
verifycert: "{{verifycert}}"
share_name: "sample_snap_smb_share"
snapshot_name: "sample_snapshot"
nas_server_id: "NAS_11"
path: "/sample_snapshot"
description: "Sample SMB share created for snapshot"
is_abe_enabled: True
is_branch_cache_enabled: True
is_continuous_availability_enabled: True
is_encryption_enabled: True
umask: "777"
state: "present"
- name: Modify Attributes of SMB share for a snapshot
dellemc_unity_smbshare:
unispherehost: "{{unispherehost}}"
username: "{{username}}"
password: "{{password}}"
verifycert: "{{verifycert}}"
share_name: "sample_snap_smb_share"
snapshot_name: "sample_snapshot"
description: "Sample SMB share attributes updated for snapshot"
is_abe_enabled: False
is_branch_cache_enabled: False
offline_availability: "MANUAL"
is_continuous_availability_enabled: "False"
is_encryption_enabled: "False"
umask: "022"
state: "present"
- name: Get details of SMB share
dellemc_unity_smbshare:
unispherehost: "{{unispherehost}}"
username: "{{username}}"
password: "{{password}}"
verifycert: "{{verifycert}}"
share_id: "{{smb_share_id}}"
state: "present"
- name: Delete SMB share
dellemc_unity_smbshare:
unispherehost: "{{unispherehost}}"
username: "{{username}}"
password: "{{password}}"
verifycert: "{{verifycert}}"
share_id: "{{smb_share_id}}"
state: "absent"
'''
RETURN = r'''
changed:
description: Whether or not the resource has changed
returned: always
type: bool
sample: True
smb_share_details:
description: The SMB share details.
type: complex
returned: When share exists.
contains:
id:
description: The ID of the SMB share.
type: str
name:
description: Name of the SMB share.
type: str
sample: "sample_smb_share"
filesystem_id:
description: The ID of the Filesystem.
type: str
filesystem_name:
description: The Name of the filesystem
type: str
snapshot_id:
description: The ID of the Snapshot.
type: str
snapshot_name:
description: The Name of the Snapshot.
type: str
nas_server_id:
description: The ID of the nas_server.
type: str
nas_server_name:
description: The Name of the nas_server.
type: str
description:
description: Additional information about the share.
type: str
sample: "This share is created for demo purpose only."
is_abe_enabled:
description: Whether Access Based enumeration is enforced or not
type: bool
sample: false
is_branch_cache_enabled:
description: Whether branch cache is enabled or not.
type: bool
sample: false
is_continuous_availability_enabled:
description: Whether the share will be available continuously or not
type: bool
sample: false
is_encryption_enabled:
description: Whether encryption is enabled or not.
type: bool
sample: false
umask:
description: Unix mask for the SMB share
type: str
'''
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.dellemc.unity.plugins.module_utils.storage.dell \
import dellemc_ansible_unity_utils as utils
LOG = utils.get_logger('dellemc_unity_smbshare')
HAS_UNITY_SDK = utils.get_unity_sdk()
UNITY_SDK_VERSION_CHECK = utils.storops_version_check()
application_type = "Ansible/1.2.0"
class UnitySMBShare(object):
def __init__(self):
self.module_params = utils.get_unity_management_host_parameters()
self.module_params.update(get_unity_smb_share_parameters())
mut_ex_args = [['share_name', 'share_id'],
['nas_server_name', 'nas_server_id'],
['filesystem_name', 'snapshot_name',
'filesystem_id', 'snapshot_id'],
['share_id', 'nas_server_name'],
['share_id', 'nas_server_id'],
['share_id', 'filesystem_name'],
['share_id', 'filesystem_id'],
['share_id', 'path'],
['share_id', 'snapshot_name'],
['share_id', 'snapshot_id']]
required_one_of = [['share_id', 'share_name']]
self.module = AnsibleModule(
argument_spec=self.module_params,
supports_check_mode=False,
mutually_exclusive=mut_ex_args,
required_one_of=required_one_of
)
self.result = {"changed": False,
'smb_share_details': None}
if not HAS_UNITY_SDK:
self.module.fail_json(msg="Ansible modules for Unity require the"
" Unity python library to be"
" installed. Please install the "
"library before using these modules.")
if UNITY_SDK_VERSION_CHECK and \
not UNITY_SDK_VERSION_CHECK['supported_version']:
err_msg = UNITY_SDK_VERSION_CHECK['unsupported_version_message']
LOG.error(err_msg)
self.module.fail_json(msg=err_msg)
self.unity_conn = utils.get_unity_unisphere_connection(
self.module.params, application_type)
self.smb_share_conn_obj = utils.cifs_share.UnityCifsShare(
self.unity_conn)
LOG.info('Connection established with the Unity Array')
def get_offline_availability_enum(self, offline_availability):
if offline_availability in \
utils.CifsShareOfflineAvailabilityEnum.__members__:
return utils.CifsShareOfflineAvailabilityEnum[
offline_availability]
else:
error_msg = "Invalid value {0} for offline availability" \
" provided".format(offline_availability)
LOG.error(error_msg)
self.module.fail_json(msg=error_msg)
def get_smb_share_obj(self, share_id=None, share_name=None,
filesystem_obj=None, snap_obj=None, nas_obj=None):
msg = "Failed to get details of SMB Share {0} with error {1} "
smb_share = share_name if share_name else share_id
try:
if share_id:
obj_smb = self.unity_conn.get_cifs_share(_id=share_id)
if obj_smb and obj_smb.existed:
LOG.info("Successfully got the SMB share "
"object %s ", obj_smb)
return obj_smb
elif share_name is not None and filesystem_obj:
return self.unity_conn.get_cifs_share(
name=share_name, filesystem=filesystem_obj)
elif share_name is not None and snap_obj:
return self.unity_conn.get_cifs_share(
name=share_name, snap=snap_obj)
elif share_name is not None and nas_obj:
smb_share_obj = self.unity_conn.get_cifs_share(
name=share_name)
if isinstance(smb_share_obj,
utils.cifs_share.UnityCifsShareList):
LOG.info("Multiple SMB share with same name found.")
smb_share_obj_list = smb_share_obj
for smb_share in smb_share_obj_list:
if smb_share.filesystem.nas_server == nas_obj:
return smb_share
msg = "No SMB share found with the given NAS Server." \
" Please provide correct share name and" \
" nas server details."
return None
if smb_share_obj.filesystem.nas_server == nas_obj:
return smb_share_obj
msg = "No SMB share found with the given NAS Server." \
" Please provide correct share name and" \
" nas server details."
return None
else:
self.module.fail_json(
msg="Share Name is Passed. Please enter Filesystem/"
"Snapshot/NAS Server Resource along with share_name"
" to get the details of the SMB share")
except utils.HttpError as e:
if e.http_status == 401:
cred_err = "Incorrect username or password , {0}".format(
e.message)
self.module.fail_json(msg=cred_err)
else:
err_msg = msg.format(smb_share, str(e))
LOG.error(err_msg)
self.module.fail_json(msg=err_msg)
except utils.UnityResourceNotFoundError as e:
err_msg = msg.format(smb_share, str(e))
LOG.error(err_msg)
return None
except Exception as e:
err_msg = msg.format(smb_share, str(e))
LOG.error(err_msg)
self.module.fail_json(msg=err_msg)
def create_smb_share(self, share_name, path, filesystem_obj=None,
snapshot_obj=None, description=None,
is_abe_enabled=None, is_branch_cache_enabled=None,
is_continuous_availability_enabled=None,
is_encryption_enabled=None,
offline_availability=None, umask=None):
if path is None or path == "":
self.module.fail_json(msg="Please enter a valid path."
" Empty string or None provided.")
if not filesystem_obj and not snapshot_obj:
self.module.fail_json(msg="Either Filesystem or Snapshot "
"Resource's Name/ID is required to"
" Create a SMB share")
try:
if filesystem_obj:
return self.smb_share_conn_obj.create(
cli=self.unity_conn._cli, name=share_name,
fs=filesystem_obj, path=path,
is_encryption_enabled=is_encryption_enabled,
is_con_avail_enabled=is_continuous_availability_enabled,
is_abe_enabled=is_abe_enabled,
is_branch_cache_enabled=is_branch_cache_enabled,
umask=umask, description=description,
offline_availability=offline_availability)
else:
return self.smb_share_conn_obj.create_from_snap(
cli=self.unity_conn._cli, name=share_name,
snap=snapshot_obj, path=path,
is_encryption_enabled=is_encryption_enabled,
is_con_avail_enabled=is_continuous_availability_enabled,
is_abe_enabled=is_abe_enabled,
is_branch_cache_enabled=is_branch_cache_enabled,
umask=umask, description=description,
offline_availability=offline_availability)
except Exception as e:
error_msg = "Failed to create SMB share" \
" %s with error %s" % (share_name, str(e))
LOG.error(error_msg)
self.module.fail_json(msg=error_msg)
def get_filesystem(self, filesystem_id=None, filesystem_name=None,
nas_server_obj=None):
try:
if filesystem_id:
obj_fs = self.unity_conn.get_filesystem(_id=filesystem_id)
if obj_fs and obj_fs.existed:
LOG.info("Successfully got the filesystem "
"object %s ", obj_fs)
return obj_fs
else:
return self.unity_conn.get_filesystem(
name=filesystem_name, nas_server=nas_server_obj)
return None
except Exception as e:
filesystem = filesystem_name if filesystem_name \
else filesystem_id
err_msg = "Failed to get filesystem details {0} with" \
" error {1}".format(filesystem, str(e))
LOG.error(err_msg)
self.module.fail_json(msg=err_msg)
def get_snapshot(self, snapshot_name, snapshot_id):
try:
obj_snap = self.unity_conn.get_snap(_id=snapshot_id,
name=snapshot_name)
if snapshot_id and obj_snap and not obj_snap.existed:
LOG.info("Snapshot object does not exist %s ", obj_snap)
return None
return obj_snap
except Exception as e:
snapshot = snapshot_name if snapshot_name else snapshot_id
err_msg = "Failed to get filesystem snapshots details {0} with" \
" error {1}".format(snapshot, str(e))
LOG.error(err_msg)
self.module.fail_json(msg=err_msg)
def get_nas_server(self, nas_server_name, nas_server_id):
nas_server = nas_server_name if nas_server_name else nas_server_id
try:
obj_nas = self.unity_conn.get_nas_server(_id=nas_server_id,
name=nas_server_name)
if nas_server_id and obj_nas and not obj_nas.existed:
LOG.info("NAS Server object does not exist %s ", obj_nas)
return None
return obj_nas
except utils.HttpError as e:
if e.http_status == 401:
cred_err = "Incorrect username or password , {0}".format(
e.message)
self.module.fail_json(msg=cred_err)
else:
err_msg = "Failed to get details of NAS Server" \
" {0} with error {1}".format(nas_server, str(e))
LOG.error(err_msg)
self.module.fail_json(msg=err_msg)
except Exception as e:
nas_server = nas_server_name if nas_server_name \
else nas_server_id
err_msg = "Failed to get nas server details {0} with" \
" error {1}".format(nas_server, str(e))
LOG.error(err_msg)
self.module.fail_json(msg=err_msg)
def delete_smb_share(self, smb_share_obj):
try:
smb_share_obj.delete()
except Exception as e:
error_msg = "Failed to Delete SMB share" \
" %s with error %s" % (smb_share_obj.name, str(e))
LOG.error(error_msg)
self.module.fail_json(msg=error_msg)
def to_update(self, smb_share_obj):
LOG.info("Checking Whether the parameters are modified or not.")
offline_availability = self.module.params['offline_availability']
# Get the enum for the corresponding offline_availability
if offline_availability:
offline_availability = \
self.get_offline_availability_enum(offline_availability)
if offline_availability is not None and \
offline_availability != smb_share_obj.offline_availability:
return True
smb_share_dict = smb_share_obj._get_properties()
params_list = ['is_abe_enabled', 'is_branch_cache_enabled',
'is_continuous_availability_enabled',
'is_encryption_enabled', 'description', 'umask']
for param in params_list:
if self.module.params[param] is not None and \
self.module.params[param] != smb_share_dict[param]:
return True
return False
def update_smb_share(self, smb_share_obj, is_encryption_enabled=None,
is_continuous_availability_enabled=None,
is_abe_enabled=None,
is_branch_cache_enabled=None,
umask=None, description=None,
offline_availability=None):
try:
smb_share_obj.modify(
is_encryption_enabled=is_encryption_enabled,
is_con_avail_enabled=is_continuous_availability_enabled,
is_abe_enabled=is_abe_enabled,
is_branch_cache_enabled=is_branch_cache_enabled,
umask=umask, description=description,
offline_availability=offline_availability)
except Exception as e:
error_msg = "Failed to Update parameters of SMB share" \
" %s with error %s" % (smb_share_obj.name, str(e))
LOG.error(error_msg)
self.module.fail_json(msg=error_msg)
def perform_module_operation(self):
state = self.module.params['state']
share_name = self.module.params['share_name']
filesystem_name = self.module.params['filesystem_name']
snapshot_name = self.module.params['snapshot_name']
nas_server_name = self.module.params['nas_server_name']
share_id = self.module.params['share_id']
filesystem_id = self.module.params['filesystem_id']
snapshot_id = self.module.params['snapshot_id']
nas_server_id = self.module.params['nas_server_id']
path = self.module.params['path']
description = self.module.params['description']
is_branch_cache_enabled = \
self.module.params['is_branch_cache_enabled']
is_continuous_availability_enabled = \
self.module.params['is_continuous_availability_enabled']
is_encryption_enabled = self.module.params['is_encryption_enabled']
is_abe_enabled = self.module.params['is_abe_enabled']
umask = self.module.params['umask']
offline_availability = self.module.params['offline_availability']
# Get the enum for the corresponding offline_availability
if offline_availability:
offline_availability = \
self.get_offline_availability_enum(offline_availability)
changed = False
if share_id is not None and \
(share_id == "" or len(share_id.split()) == 0):
self.module.fail_json(msg="Invalid share id provided."
" Please enter a valid share ID.")
nas_server_obj = None
if nas_server_name or nas_server_id:
nas_server_obj = self.get_nas_server(nas_server_name,
nas_server_id)
if nas_server_obj:
msg = "NAS Server Object:" \
" {0}".format(nas_server_obj._get_properties())
LOG.info(msg)
else:
msg = "NAS Server Resource not fetched."
LOG.info(msg)
filesystem_obj = None
if filesystem_id:
filesystem_obj = self.get_filesystem(filesystem_id)
if filesystem_name:
# nas_server_obj is required to uniquely identify filesystem
# resource. If neither nas_server_name nor nas_server_id
# is passed along with filesystem_name then error is thrown.
if not nas_server_obj:
self.module.fail_json(msg="nas_server_id/nas_server_name is "
"required when filesystem_name is "
"passed")
filesystem_obj = self.get_filesystem(
None, filesystem_name, nas_server_obj)
if filesystem_obj:
msg = "Filesystem Object:" \
" {0}".format(filesystem_obj._get_properties())
LOG.info(msg)
# Checking if filesystem supports SMB protocol or not.
if filesystem_obj and \
filesystem_obj.supported_protocols.name == "NFS":
self.module.fail_json(msg="Cannot perform SMB share operations "
"as file system supports only NFS "
"protocol. Please enter a valid "
"Filesystem having supported protocol"
" as SMB or Multiprotocol.")
snapshot_obj = None
if snapshot_id or snapshot_name:
# Snapshot Name and Snapshot ID both are unique across array.
# Hence no need to mention nas server details
snapshot_obj = self.get_snapshot(snapshot_name, snapshot_id)
if snapshot_obj:
msg = "Snapshot Object:" \
" {0}".format(snapshot_obj._get_properties())
LOG.info(msg)
else:
msg = "Snapshot Resource not fetched."
LOG.info(msg)
smb_share_obj = self.get_smb_share_obj(
share_id, share_name, filesystem_obj, snapshot_obj,
nas_server_obj)
if smb_share_obj:
msg = "SMB Share Object:" \
" {0}".format(smb_share_obj._get_properties())
LOG.info(msg)
elif state == 'present' and share_id:
msg = "Unable to fetch SMB Share Resource. " \
"Incorrect SMB share id provided. " \
"Please enter a correct share id."
LOG.error(msg)
self.module.fail_json(msg=msg)
if state == "present" and not smb_share_obj:
smb_share_obj = self.create_smb_share(
share_name, path, filesystem_obj, snapshot_obj, description,
is_abe_enabled, is_branch_cache_enabled,
is_continuous_availability_enabled, is_encryption_enabled,
offline_availability, umask)
changed = True
if state == "present" and smb_share_obj:
LOG.info("Modify the details of the SMB share.")
update_flag = self.to_update(smb_share_obj)
msg = "Update Flag: {0}".format(str(update_flag))
LOG.info(msg)
if update_flag:
self.update_smb_share(smb_share_obj, is_encryption_enabled,
is_continuous_availability_enabled,
is_abe_enabled,
is_branch_cache_enabled,
umask, description,
offline_availability)
changed = True
if state == "absent" and smb_share_obj:
self.delete_smb_share(smb_share_obj)
changed = True
self.result["changed"] = changed
smb_details = self.prepare_output_dict(state, share_id, share_name,
filesystem_obj, snapshot_obj,
nas_server_obj)
self.result["smb_share_details"] = smb_details
self.module.exit_json(**self.result)
def prepare_output_dict(self, state, share_id, share_name,
filesystem_obj, snapshot_obj, nas_server_obj):
smb_share_details = None
smb_share_obj = None
if state == 'present':
smb_share_obj = self.get_smb_share_obj(
share_id, share_name, filesystem_obj,
snapshot_obj, nas_server_obj)
smb_share_details = smb_share_obj._get_properties()
if smb_share_details:
# Get Snapshot NAME and ID if SMB share exists for Snapshot
if smb_share_obj.type.name == "CIFS_SNAPSHOT":
smb_share_details['snapshot_name'] = smb_share_obj.snap.name
smb_share_details['snapshot_id'] = smb_share_obj.snap.id
# Get Filesystem NAME and ID
smb_share_details['filesystem_name'] = \
smb_share_obj.filesystem.name
smb_share_details['filesystem_id'] = smb_share_obj.filesystem.id
# Get NAS server NAME and ID
smb_share_details['nas_server_name'] = \
smb_share_obj.filesystem.nas_server.name
smb_share_details['nas_server_id'] = \
smb_share_obj.filesystem.nas_server.id
return smb_share_details
def get_unity_smb_share_parameters():
return dict(
share_name=dict(), share_id=dict(),
filesystem_name=dict(), filesystem_id=dict(),
snapshot_name=dict(), snapshot_id=dict(),
nas_server_name=dict(), nas_server_id=dict(),
path=dict(), umask=dict(), description=dict(),
offline_availability=dict(
choices=["MANUAL", "DOCUMENTS", "PROGRAMS", "NONE"]),
is_abe_enabled=dict(type='bool'),
is_branch_cache_enabled=dict(type='bool'),
is_continuous_availability_enabled=dict(type='bool'),
is_encryption_enabled=dict(type='bool'),
state=dict(required=True, choices=['present', 'absent'], type='str')
)
def main():
obj = UnitySMBShare()
obj.perform_module_operation()
if __name__ == '__main__':
main()
| true | true |
1c2deb96868b2115a7d83bdec3c3e111743d28b1 | 13,072 | py | Python | neutron/plugins/ml2/db.py | krissterckx/neutron | 396deed808dc9b69d4641ffe16bcbe6655bc6cd5 | [
"Apache-2.0"
] | null | null | null | neutron/plugins/ml2/db.py | krissterckx/neutron | 396deed808dc9b69d4641ffe16bcbe6655bc6cd5 | [
"Apache-2.0"
] | null | null | null | neutron/plugins/ml2/db.py | krissterckx/neutron | 396deed808dc9b69d4641ffe16bcbe6655bc6cd5 | [
"Apache-2.0"
] | 1 | 2020-02-29T18:29:59.000Z | 2020-02-29T18:29:59.000Z | # Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api.definitions import portbindings
from neutron_lib.callbacks import events
from neutron_lib.callbacks import registry
from neutron_lib.callbacks import resources
from neutron_lib import constants as n_const
from neutron_lib.db import api as db_api
from neutron_lib.plugins import directory
from oslo_db import exception as db_exc
from oslo_log import log
from oslo_utils import uuidutils
import six
from sqlalchemy import or_
from sqlalchemy.orm import exc
from neutron._i18n import _
from neutron.db.models import securitygroup as sg_models
from neutron.db import models_v2
from neutron.objects import base as objects_base
from neutron.objects import ports as port_obj
from neutron.plugins.ml2 import models
from neutron.services.segments import exceptions as seg_exc
LOG = log.getLogger(__name__)
# limit the number of port OR LIKE statements in one query
MAX_PORTS_PER_QUERY = 500
@db_api.CONTEXT_WRITER
def add_port_binding(context, port_id):
record = models.PortBinding(
port_id=port_id,
vif_type=portbindings.VIF_TYPE_UNBOUND)
context.session.add(record)
return record
@db_api.CONTEXT_WRITER
def set_binding_levels(context, levels):
if levels:
for level in levels:
level.create()
LOG.debug("For port %(port_id)s, host %(host)s, "
"set binding levels %(levels)s",
{'port_id': levels[0].port_id,
'host': levels[0].host,
'levels': levels})
else:
LOG.debug("Attempted to set empty binding levels")
@db_api.CONTEXT_READER
def get_binding_level_objs(context, port_id, host):
if host:
pager = objects_base.Pager(sorts=[('level', True)])
port_bl_objs = port_obj.PortBindingLevel.get_objects(
context, _pager=pager, port_id=port_id, host=host)
LOG.debug("For port %(port_id)s, host %(host)s, "
"got binding levels %(levels)s",
{'port_id': port_id,
'host': host,
'levels': port_bl_objs})
return port_bl_objs
@db_api.CONTEXT_WRITER
def clear_binding_levels(context, port_id, host):
if host:
port_obj.PortBindingLevel.delete_objects(
context, port_id=port_id, host=host)
LOG.debug("For port %(port_id)s, host %(host)s, "
"cleared binding levels",
{'port_id': port_id,
'host': host})
def ensure_distributed_port_binding(context, port_id, host, router_id=None):
with db_api.CONTEXT_READER.using(context):
record = (context.session.query(models.DistributedPortBinding).
filter_by(port_id=port_id, host=host).first())
if record:
return record
try:
with db_api.CONTEXT_WRITER.using(context):
record = models.DistributedPortBinding(
port_id=port_id,
host=host,
router_id=router_id,
vif_type=portbindings.VIF_TYPE_UNBOUND,
vnic_type=portbindings.VNIC_NORMAL,
status=n_const.PORT_STATUS_DOWN)
context.session.add(record)
return record
except db_exc.DBDuplicateEntry:
LOG.debug("Distributed Port %s already bound", port_id)
with db_api.CONTEXT_READER.using(context):
return (context.session.query(models.DistributedPortBinding).
filter_by(port_id=port_id, host=host).one())
def delete_distributed_port_binding_if_stale(context, binding):
if not binding.router_id and binding.status == n_const.PORT_STATUS_DOWN:
with db_api.CONTEXT_WRITER.using(context):
LOG.debug("Distributed port: Deleting binding %s", binding)
context.session.delete(binding)
def get_port(context, port_id):
"""Get port record for update within transaction."""
with db_api.CONTEXT_READER.using(context):
try:
# Set enable_eagerloads to True, so that lazy load can be
# proceed later.
record = (context.session.query(models_v2.Port).
enable_eagerloads(True).
filter(models_v2.Port.id.startswith(port_id)).
one())
return record
except exc.NoResultFound:
return
except exc.MultipleResultsFound:
LOG.error("Multiple ports have port_id starting with %s",
port_id)
return
@db_api.CONTEXT_READER
def get_port_from_device_mac(context, device_mac):
LOG.debug("get_port_from_device_mac() called for mac %s", device_mac)
ports = port_obj.Port.get_objects(context, mac_address=device_mac)
return ports.pop() if ports else None
def get_ports_and_sgs(context, port_ids):
"""Get ports from database with security group info."""
# break large queries into smaller parts
if len(port_ids) > MAX_PORTS_PER_QUERY:
LOG.debug("Number of ports %(pcount)s exceeds the maximum per "
"query %(maxp)s. Partitioning queries.",
{'pcount': len(port_ids), 'maxp': MAX_PORTS_PER_QUERY})
return (get_ports_and_sgs(context, port_ids[:MAX_PORTS_PER_QUERY]) +
get_ports_and_sgs(context, port_ids[MAX_PORTS_PER_QUERY:]))
LOG.debug("get_ports_and_sgs() called for port_ids %s", port_ids)
if not port_ids:
# if port_ids is empty, avoid querying to DB to ask it for nothing
return []
ports_to_sg_ids = get_sg_ids_grouped_by_port(context, port_ids)
return [make_port_dict_with_security_groups(port, sec_groups)
for port, sec_groups in six.iteritems(ports_to_sg_ids)]
def get_sg_ids_grouped_by_port(context, port_ids):
sg_ids_grouped_by_port = {}
sg_binding_port = sg_models.SecurityGroupPortBinding.port_id
with db_api.CONTEXT_READER.using(context):
# partial UUIDs must be individually matched with startswith.
# full UUIDs may be matched directly in an IN statement
partial_uuids = set(port_id for port_id in port_ids
if not uuidutils.is_uuid_like(port_id))
full_uuids = set(port_ids) - partial_uuids
or_criteria = [models_v2.Port.id.startswith(port_id)
for port_id in partial_uuids]
if full_uuids:
or_criteria.append(models_v2.Port.id.in_(full_uuids))
query = context.session.query(
models_v2.Port,
sg_models.SecurityGroupPortBinding.security_group_id)
query = query.outerjoin(sg_models.SecurityGroupPortBinding,
models_v2.Port.id == sg_binding_port)
query = query.filter(or_(*or_criteria))
for port, sg_id in query:
if port not in sg_ids_grouped_by_port:
sg_ids_grouped_by_port[port] = []
if sg_id:
sg_ids_grouped_by_port[port].append(sg_id)
return sg_ids_grouped_by_port
def make_port_dict_with_security_groups(port, sec_groups):
plugin = directory.get_plugin()
port_dict = plugin._make_port_dict(port)
port_dict['security_groups'] = sec_groups
port_dict['security_group_rules'] = []
port_dict['security_group_source_groups'] = []
port_dict['fixed_ips'] = [ip['ip_address']
for ip in port['fixed_ips']]
return port_dict
def get_port_binding_host(context, port_id):
try:
with db_api.CONTEXT_READER.using(context):
query = (context.session.query(models.PortBinding.host).
filter(models.PortBinding.port_id.startswith(port_id)))
query = query.filter(
models.PortBinding.status == n_const.ACTIVE).one()
except exc.NoResultFound:
LOG.debug("No active binding found for port %(port_id)s",
{'port_id': port_id})
return
except exc.MultipleResultsFound:
LOG.error("Multiple ports have port_id starting with %s",
port_id)
return
return query.host
@db_api.CONTEXT_READER
def generate_distributed_port_status(context, port_id):
# an OR'ed value of status assigned to parent port from the
# distributedportbinding bucket
query = context.session.query(models.DistributedPortBinding.status)
final_status = n_const.PORT_STATUS_BUILD
for bind in query.filter(models.DistributedPortBinding.port_id == port_id):
if bind.status == n_const.PORT_STATUS_ACTIVE:
return bind.status
elif bind.status == n_const.PORT_STATUS_DOWN:
final_status = bind.status
return final_status
def get_distributed_port_binding_by_host(context, port_id, host):
with db_api.CONTEXT_READER.using(context):
binding = (
context.session.query(models.DistributedPortBinding).
filter(models.DistributedPortBinding.port_id.startswith(port_id),
models.DistributedPortBinding.host == host).first())
if not binding:
LOG.debug("No binding for distributed port %(port_id)s with host "
"%(host)s", {'port_id': port_id, 'host': host})
return binding
def get_distributed_port_bindings(context, port_id):
with db_api.CONTEXT_READER.using(context):
bindings = (context.session.query(models.DistributedPortBinding).
filter(models.DistributedPortBinding.port_id.startswith(
port_id)).all())
if not bindings:
LOG.debug("No bindings for distributed port %s", port_id)
return bindings
@db_api.CONTEXT_READER
def partial_port_ids_to_full_ids(context, partial_ids):
"""Takes a list of the start of port IDs and returns full IDs.
Returns dictionary of partial IDs to full IDs if a single match
is found.
"""
result = {}
to_full_query = (context.session.query(models_v2.Port.id).
filter(or_(*[models_v2.Port.id.startswith(p)
for p in partial_ids])))
candidates = [match[0] for match in to_full_query]
for partial_id in partial_ids:
matching = [c for c in candidates if c.startswith(partial_id)]
if len(matching) == 1:
result[partial_id] = matching[0]
continue
if len(matching) < 1:
LOG.info("No ports have port_id starting with %s", partial_id)
elif len(matching) > 1:
LOG.error("Multiple ports have port_id starting with %s",
partial_id)
return result
@db_api.CONTEXT_READER
def get_port_db_objects(context, port_ids):
"""Takes a list of port_ids and returns matching port db objects.
return format is a dictionary keyed by passed in IDs with db objects
for values or None if the port was not present.
"""
port_qry = (context.session.query(models_v2.Port).
filter(models_v2.Port.id.in_(port_ids)))
result = {p: None for p in port_ids}
for port in port_qry:
result[port.id] = port
return result
@db_api.CONTEXT_READER
def is_dhcp_active_on_any_subnet(context, subnet_ids):
if not subnet_ids:
return False
return bool(context.session.query(models_v2.Subnet.id).
enable_eagerloads(False).filter_by(enable_dhcp=True).
filter(models_v2.Subnet.id.in_(subnet_ids)).count())
def _prevent_segment_delete_with_port_bound(resource, event, trigger,
payload=None):
"""Raise exception if there are any ports bound with segment_id."""
if payload.metadata.get('for_net_delete'):
# don't check for network deletes
return
with db_api.CONTEXT_READER.using(payload.context):
port_ids = port_obj.Port.get_port_ids_filter_by_segment_id(
payload.context, segment_id=payload.resource_id)
# There are still some ports in the segment, segment should not be deleted
# TODO(xiaohhui): Should we delete the dhcp port automatically here?
if port_ids:
reason = _("The segment is still bound with port(s) "
"%s") % ", ".join(port_ids)
raise seg_exc.SegmentInUse(segment_id=payload.resource_id,
reason=reason)
def subscribe():
registry.subscribe(_prevent_segment_delete_with_port_bound,
resources.SEGMENT,
events.BEFORE_DELETE)
subscribe()
| 38.110787 | 79 | 0.662255 |
from neutron_lib.api.definitions import portbindings
from neutron_lib.callbacks import events
from neutron_lib.callbacks import registry
from neutron_lib.callbacks import resources
from neutron_lib import constants as n_const
from neutron_lib.db import api as db_api
from neutron_lib.plugins import directory
from oslo_db import exception as db_exc
from oslo_log import log
from oslo_utils import uuidutils
import six
from sqlalchemy import or_
from sqlalchemy.orm import exc
from neutron._i18n import _
from neutron.db.models import securitygroup as sg_models
from neutron.db import models_v2
from neutron.objects import base as objects_base
from neutron.objects import ports as port_obj
from neutron.plugins.ml2 import models
from neutron.services.segments import exceptions as seg_exc
LOG = log.getLogger(__name__)
MAX_PORTS_PER_QUERY = 500
@db_api.CONTEXT_WRITER
def add_port_binding(context, port_id):
record = models.PortBinding(
port_id=port_id,
vif_type=portbindings.VIF_TYPE_UNBOUND)
context.session.add(record)
return record
@db_api.CONTEXT_WRITER
def set_binding_levels(context, levels):
if levels:
for level in levels:
level.create()
LOG.debug("For port %(port_id)s, host %(host)s, "
"set binding levels %(levels)s",
{'port_id': levels[0].port_id,
'host': levels[0].host,
'levels': levels})
else:
LOG.debug("Attempted to set empty binding levels")
@db_api.CONTEXT_READER
def get_binding_level_objs(context, port_id, host):
if host:
pager = objects_base.Pager(sorts=[('level', True)])
port_bl_objs = port_obj.PortBindingLevel.get_objects(
context, _pager=pager, port_id=port_id, host=host)
LOG.debug("For port %(port_id)s, host %(host)s, "
"got binding levels %(levels)s",
{'port_id': port_id,
'host': host,
'levels': port_bl_objs})
return port_bl_objs
@db_api.CONTEXT_WRITER
def clear_binding_levels(context, port_id, host):
if host:
port_obj.PortBindingLevel.delete_objects(
context, port_id=port_id, host=host)
LOG.debug("For port %(port_id)s, host %(host)s, "
"cleared binding levels",
{'port_id': port_id,
'host': host})
def ensure_distributed_port_binding(context, port_id, host, router_id=None):
with db_api.CONTEXT_READER.using(context):
record = (context.session.query(models.DistributedPortBinding).
filter_by(port_id=port_id, host=host).first())
if record:
return record
try:
with db_api.CONTEXT_WRITER.using(context):
record = models.DistributedPortBinding(
port_id=port_id,
host=host,
router_id=router_id,
vif_type=portbindings.VIF_TYPE_UNBOUND,
vnic_type=portbindings.VNIC_NORMAL,
status=n_const.PORT_STATUS_DOWN)
context.session.add(record)
return record
except db_exc.DBDuplicateEntry:
LOG.debug("Distributed Port %s already bound", port_id)
with db_api.CONTEXT_READER.using(context):
return (context.session.query(models.DistributedPortBinding).
filter_by(port_id=port_id, host=host).one())
def delete_distributed_port_binding_if_stale(context, binding):
if not binding.router_id and binding.status == n_const.PORT_STATUS_DOWN:
with db_api.CONTEXT_WRITER.using(context):
LOG.debug("Distributed port: Deleting binding %s", binding)
context.session.delete(binding)
def get_port(context, port_id):
with db_api.CONTEXT_READER.using(context):
try:
record = (context.session.query(models_v2.Port).
enable_eagerloads(True).
filter(models_v2.Port.id.startswith(port_id)).
one())
return record
except exc.NoResultFound:
return
except exc.MultipleResultsFound:
LOG.error("Multiple ports have port_id starting with %s",
port_id)
return
@db_api.CONTEXT_READER
def get_port_from_device_mac(context, device_mac):
LOG.debug("get_port_from_device_mac() called for mac %s", device_mac)
ports = port_obj.Port.get_objects(context, mac_address=device_mac)
return ports.pop() if ports else None
def get_ports_and_sgs(context, port_ids):
if len(port_ids) > MAX_PORTS_PER_QUERY:
LOG.debug("Number of ports %(pcount)s exceeds the maximum per "
"query %(maxp)s. Partitioning queries.",
{'pcount': len(port_ids), 'maxp': MAX_PORTS_PER_QUERY})
return (get_ports_and_sgs(context, port_ids[:MAX_PORTS_PER_QUERY]) +
get_ports_and_sgs(context, port_ids[MAX_PORTS_PER_QUERY:]))
LOG.debug("get_ports_and_sgs() called for port_ids %s", port_ids)
if not port_ids:
return []
ports_to_sg_ids = get_sg_ids_grouped_by_port(context, port_ids)
return [make_port_dict_with_security_groups(port, sec_groups)
for port, sec_groups in six.iteritems(ports_to_sg_ids)]
def get_sg_ids_grouped_by_port(context, port_ids):
sg_ids_grouped_by_port = {}
sg_binding_port = sg_models.SecurityGroupPortBinding.port_id
with db_api.CONTEXT_READER.using(context):
partial_uuids = set(port_id for port_id in port_ids
if not uuidutils.is_uuid_like(port_id))
full_uuids = set(port_ids) - partial_uuids
or_criteria = [models_v2.Port.id.startswith(port_id)
for port_id in partial_uuids]
if full_uuids:
or_criteria.append(models_v2.Port.id.in_(full_uuids))
query = context.session.query(
models_v2.Port,
sg_models.SecurityGroupPortBinding.security_group_id)
query = query.outerjoin(sg_models.SecurityGroupPortBinding,
models_v2.Port.id == sg_binding_port)
query = query.filter(or_(*or_criteria))
for port, sg_id in query:
if port not in sg_ids_grouped_by_port:
sg_ids_grouped_by_port[port] = []
if sg_id:
sg_ids_grouped_by_port[port].append(sg_id)
return sg_ids_grouped_by_port
def make_port_dict_with_security_groups(port, sec_groups):
plugin = directory.get_plugin()
port_dict = plugin._make_port_dict(port)
port_dict['security_groups'] = sec_groups
port_dict['security_group_rules'] = []
port_dict['security_group_source_groups'] = []
port_dict['fixed_ips'] = [ip['ip_address']
for ip in port['fixed_ips']]
return port_dict
def get_port_binding_host(context, port_id):
try:
with db_api.CONTEXT_READER.using(context):
query = (context.session.query(models.PortBinding.host).
filter(models.PortBinding.port_id.startswith(port_id)))
query = query.filter(
models.PortBinding.status == n_const.ACTIVE).one()
except exc.NoResultFound:
LOG.debug("No active binding found for port %(port_id)s",
{'port_id': port_id})
return
except exc.MultipleResultsFound:
LOG.error("Multiple ports have port_id starting with %s",
port_id)
return
return query.host
@db_api.CONTEXT_READER
def generate_distributed_port_status(context, port_id):
# distributedportbinding bucket
query = context.session.query(models.DistributedPortBinding.status)
final_status = n_const.PORT_STATUS_BUILD
for bind in query.filter(models.DistributedPortBinding.port_id == port_id):
if bind.status == n_const.PORT_STATUS_ACTIVE:
return bind.status
elif bind.status == n_const.PORT_STATUS_DOWN:
final_status = bind.status
return final_status
def get_distributed_port_binding_by_host(context, port_id, host):
with db_api.CONTEXT_READER.using(context):
binding = (
context.session.query(models.DistributedPortBinding).
filter(models.DistributedPortBinding.port_id.startswith(port_id),
models.DistributedPortBinding.host == host).first())
if not binding:
LOG.debug("No binding for distributed port %(port_id)s with host "
"%(host)s", {'port_id': port_id, 'host': host})
return binding
def get_distributed_port_bindings(context, port_id):
with db_api.CONTEXT_READER.using(context):
bindings = (context.session.query(models.DistributedPortBinding).
filter(models.DistributedPortBinding.port_id.startswith(
port_id)).all())
if not bindings:
LOG.debug("No bindings for distributed port %s", port_id)
return bindings
@db_api.CONTEXT_READER
def partial_port_ids_to_full_ids(context, partial_ids):
result = {}
to_full_query = (context.session.query(models_v2.Port.id).
filter(or_(*[models_v2.Port.id.startswith(p)
for p in partial_ids])))
candidates = [match[0] for match in to_full_query]
for partial_id in partial_ids:
matching = [c for c in candidates if c.startswith(partial_id)]
if len(matching) == 1:
result[partial_id] = matching[0]
continue
if len(matching) < 1:
LOG.info("No ports have port_id starting with %s", partial_id)
elif len(matching) > 1:
LOG.error("Multiple ports have port_id starting with %s",
partial_id)
return result
@db_api.CONTEXT_READER
def get_port_db_objects(context, port_ids):
port_qry = (context.session.query(models_v2.Port).
filter(models_v2.Port.id.in_(port_ids)))
result = {p: None for p in port_ids}
for port in port_qry:
result[port.id] = port
return result
@db_api.CONTEXT_READER
def is_dhcp_active_on_any_subnet(context, subnet_ids):
if not subnet_ids:
return False
return bool(context.session.query(models_v2.Subnet.id).
enable_eagerloads(False).filter_by(enable_dhcp=True).
filter(models_v2.Subnet.id.in_(subnet_ids)).count())
def _prevent_segment_delete_with_port_bound(resource, event, trigger,
payload=None):
if payload.metadata.get('for_net_delete'):
# don't check for network deletes
return
with db_api.CONTEXT_READER.using(payload.context):
port_ids = port_obj.Port.get_port_ids_filter_by_segment_id(
payload.context, segment_id=payload.resource_id)
if port_ids:
reason = _("The segment is still bound with port(s) "
"%s") % ", ".join(port_ids)
raise seg_exc.SegmentInUse(segment_id=payload.resource_id,
reason=reason)
def subscribe():
registry.subscribe(_prevent_segment_delete_with_port_bound,
resources.SEGMENT,
events.BEFORE_DELETE)
subscribe()
| true | true |
1c2deba39066dfdf0e1f993d8003d0900604f9f3 | 1,011 | py | Python | tempest/services/volume/base/base_availability_zone_client.py | Hybrid-Cloud/hybrid-tempest | 319e90c6fa6e46925b495c93cd5258f088a30ec0 | [
"Apache-2.0"
] | 3 | 2016-07-15T12:27:23.000Z | 2021-04-23T04:41:10.000Z | tempest/services/volume/base/base_availability_zone_client.py | LIS/lis-tempest | 8e6403b2d6de81c5d18ed867b4977385c8278b75 | [
"Apache-2.0"
] | null | null | null | tempest/services/volume/base/base_availability_zone_client.py | LIS/lis-tempest | 8e6403b2d6de81c5d18ed867b4977385c8278b75 | [
"Apache-2.0"
] | 12 | 2016-07-14T18:13:05.000Z | 2017-07-08T18:45:42.000Z | # Copyright 2014 NEC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from tempest.lib.common import rest_client
class BaseAvailabilityZoneClient(rest_client.RestClient):
def list_availability_zones(self):
resp, body = self.get('os-availability-zone')
body = json.loads(body)
self.expected_success(200, resp.status)
return rest_client.ResponseBody(resp, body)
| 36.107143 | 78 | 0.733927 |
from oslo_serialization import jsonutils as json
from tempest.lib.common import rest_client
class BaseAvailabilityZoneClient(rest_client.RestClient):
def list_availability_zones(self):
resp, body = self.get('os-availability-zone')
body = json.loads(body)
self.expected_success(200, resp.status)
return rest_client.ResponseBody(resp, body)
| true | true |
1c2dec8c24007e60bf500f4a92983eb7e68a347f | 262 | py | Python | charactertype.py | pawankumarsharm/Pythoncoding | f0e5f6c1d22b101e109088529640326dd5405a6a | [
"bzip2-1.0.6"
] | null | null | null | charactertype.py | pawankumarsharm/Pythoncoding | f0e5f6c1d22b101e109088529640326dd5405a6a | [
"bzip2-1.0.6"
] | null | null | null | charactertype.py | pawankumarsharm/Pythoncoding | f0e5f6c1d22b101e109088529640326dd5405a6a | [
"bzip2-1.0.6"
] | null | null | null | print('Durga786'.isalnum()) #True
print('durga786'.isalpha())#False
print('durga'.isalpha()) #True
print('durga'.islower()) #True
print('durga'.isupper()) #False
print('Durga Software Solutions'.istitle()) #True
print('Durga Software Solutions'.isspace()) #True
| 32.75 | 49 | 0.721374 | print('Durga786'.isalnum())
print('durga786'.isalpha())
print('durga'.isalpha())
print('durga'.islower())
print('durga'.isupper())
print('Durga Software Solutions'.istitle())
print('Durga Software Solutions'.isspace())
| true | true |
1c2decf1c1c20da6ef2ec52b0d97bfc386723926 | 1,374 | py | Python | face_recognition.py | Vargha-Kh/Face-Recognition | b0990be8b145e5529e138b31ba988710397fca6d | [
"CC-BY-3.0",
"MIT"
] | 2 | 2022-03-16T11:40:16.000Z | 2022-03-18T12:38:16.000Z | face_recognition.py | Vargha-Kh/Face-Recognition | b0990be8b145e5529e138b31ba988710397fca6d | [
"CC-BY-3.0",
"MIT"
] | null | null | null | face_recognition.py | Vargha-Kh/Face-Recognition | b0990be8b145e5529e138b31ba988710397fca6d | [
"CC-BY-3.0",
"MIT"
] | null | null | null | from add_face import AddingFace
from deepface import DeepFace
import sys
if __name__ == '__main__':
while True:
print(" ************ main menu ************")
print('1. Add a new face')
print('2. Recognize a face')
print('3. Exit')
database_path = '/home/vargha/Desktop/database'
try:
menu_item = int(input('Choose the menu item: '))
if menu_item == 1:
full_name = input("Enter the full name: ")
database_path = input("Enter the database path: ") or database_path
new_face = AddingFace(database_path, full_name)
new_face.capturing_image()
elif menu_item == 2:
DeepFace.stream(db_path=database_path, detector_backend='ssd', model_name='Facenet512',
distance_metric="cosine", enable_face_analysis=False, time_threshold=2,
frame_threshold=2, source=0)
# if output:
# print("Face detected, Welcome")
elif menu_item == 3:
print("Exiting...")
sys.exit()
else:
raise ValueError
except ValueError:
print("Invalid input. Please enter a number.")
else:
print("Invalid input. Please enter a number.")
| 36.157895 | 103 | 0.52984 | from add_face import AddingFace
from deepface import DeepFace
import sys
if __name__ == '__main__':
while True:
print(" ************ main menu ************")
print('1. Add a new face')
print('2. Recognize a face')
print('3. Exit')
database_path = '/home/vargha/Desktop/database'
try:
menu_item = int(input('Choose the menu item: '))
if menu_item == 1:
full_name = input("Enter the full name: ")
database_path = input("Enter the database path: ") or database_path
new_face = AddingFace(database_path, full_name)
new_face.capturing_image()
elif menu_item == 2:
DeepFace.stream(db_path=database_path, detector_backend='ssd', model_name='Facenet512',
distance_metric="cosine", enable_face_analysis=False, time_threshold=2,
frame_threshold=2, source=0)
elif menu_item == 3:
print("Exiting...")
sys.exit()
else:
raise ValueError
except ValueError:
print("Invalid input. Please enter a number.")
else:
print("Invalid input. Please enter a number.")
| true | true |
1c2def4561651ae473540f97572120d8637b7556 | 431 | py | Python | u3dunpack/file/serialized/SerializedFileHeader.py | smalls0098/u3d-studio | b5fb9875afdebaf457ee75c3ab42e4e828a88680 | [
"MIT"
] | 1 | 2020-07-27T03:43:47.000Z | 2020-07-27T03:43:47.000Z | u3dunpack/file/serialized/SerializedFileHeader.py | smalls0098/u3d-assets-tools | b5fb9875afdebaf457ee75c3ab42e4e828a88680 | [
"MIT"
] | null | null | null | u3dunpack/file/serialized/SerializedFileHeader.py | smalls0098/u3d-assets-tools | b5fb9875afdebaf457ee75c3ab42e4e828a88680 | [
"MIT"
] | 1 | 2021-10-03T11:23:14.000Z | 2021-10-03T11:23:14.000Z | from ...streams import EndianBinaryReader
class SerializedFileHeader:
metadataSize: int
fileSize: int
version: int
dataOffset: int
endian: bytes
reserved: bytes
def __init__(self, reader: EndianBinaryReader):
self.metadataSize = reader.readUInt()
self.fileSize = reader.readUInt()
self.version = reader.readUInt()
self.dataOffset = reader.readUInt()
| 23.944444 | 52 | 0.651972 | from ...streams import EndianBinaryReader
class SerializedFileHeader:
metadataSize: int
fileSize: int
version: int
dataOffset: int
endian: bytes
reserved: bytes
def __init__(self, reader: EndianBinaryReader):
self.metadataSize = reader.readUInt()
self.fileSize = reader.readUInt()
self.version = reader.readUInt()
self.dataOffset = reader.readUInt()
| true | true |
1c2defcad6eb30907b1195b71d0eb2c40d5b42ff | 4,390 | py | Python | analyses/utils.py | lukassnoek/MVCA | dd194140a5babb4605b9248d34508b9d9e4f799c | [
"MIT"
] | 11 | 2018-03-29T09:39:28.000Z | 2021-09-09T15:49:53.000Z | analyses/suppl_simulations/utils.py | lukassnoek/MVCA | dd194140a5babb4605b9248d34508b9d9e4f799c | [
"MIT"
] | 2 | 2021-02-04T11:10:34.000Z | 2022-03-07T14:41:54.000Z | analyses/suppl_simulations/utils.py | lukassnoek/MVCA | dd194140a5babb4605b9248d34508b9d9e4f799c | [
"MIT"
] | 3 | 2018-04-12T09:11:31.000Z | 2018-11-30T10:17:54.000Z | import numpy as np
from scipy.special import hyp2f1, gammaln
def get_r2(iv, dv, stack_intercept=True):
""" Regress dv onto iv and return r-squared.
Parameters
----------
iv : numpy array
Array of shape N (samples) x K (features)
dv : numpy array
Array of shape N (samples) x 1
stack_intercept : bool
Whether to stack an intercept (vector with ones of length N).
Returns
-------
r2 : float
R-squared model fit.
"""
if iv.ndim == 1:
# Add axis if shape is (N,)
iv = iv[:, np.newaxis]
if stack_intercept:
iv = np.hstack((np.ones((iv.shape[0], 1)), iv))
beta = np.linalg.lstsq(iv, dv)[0]
dv_hat = iv.dot(beta).squeeze()
r2 = 1 - (((dv - dv_hat) ** 2).sum() / ((dv - dv.mean()) ** 2).sum())
return r2
def vectorized_corr(arr, arr_2D):
""" Computes the correlation between an array and each column
in a 2D array (each column represents a variable) in a vectorized
way.
Parameters
----------
arr : numpy array
Array of shape (N,)
arr_2D : numpy array
Array of shape (N, P), with P indicating different variables that
will be correlated with arr
Returns
-------
corrs : numpy array
Array of shape (P,) with all correlations between arr and columns in arr_2D
"""
if arr.ndim == 1:
arr = arr[:, np.newaxis]
arr_c, arr_2D_c = arr - arr.mean(), arr_2D - arr_2D.mean(axis=0)
r_num = np.sum(arr_c * arr_2D_c, axis=0)
r_den = np.sqrt(np.sum(arr_c ** 2, axis=0) * np.sum(arr_2D_c ** 2, axis=0))
corrs = r_num / r_den
return corrs
def vectorized_partial_corr(arr, c, arr_2D, stack_intercept=True):
""" Computes the correlation between an array and each column
in a 2D array (each column represents a variable) in a vectorized
way.
Parameters
----------
arr : numpy array
Array of shape (N,)
c : numpy array
Array of shape (N,) that should be partialled out of arr_2D and arr
arr_2D : numpy array
Array of shape (N, P), with P indicating different variables that
will be correlated with arr
Returns
-------
corrs : numpy array
Array of shape (P,) with all correlations between arr and columns in arr_2D
"""
if arr.ndim == 1:
arr = arr[:, np.newaxis]
if c.ndim == 1:
# Add axis if shape is (N,)
c = c[:, np.newaxis]
if stack_intercept:
c = np.hstack((np.ones((c.shape[0], 1)), c))
arr_resid = arr - c.dot(np.linalg.lstsq(c, arr, rcond=None)[0])
arr_2d_resid = arr_2D - c.dot(np.linalg.lstsq(c, arr_2D, rcond=None)[0])
return vectorized_corr(arr_resid, arr_2d_resid)
def vectorized_semipartial_corr(arr, c, arr_2D, which='2D', stack_intercept=True):
""" Computes the semipartial correlation between an array and each column
in a 2D array (each column represents a variable) in a vectorized
way.
Parameters
----------
arr : numpy array
Array of shape (N,)
c : numpy array
Array of shape (N,) that should be partialled out of arr_2D and arr
arr_2D : numpy array
Array of shape (N, P), with P indicating different variables that
will be correlated with arr
Returns
-------
corrs : numpy array
Array of shape (P,) with all correlations between arr and columns in arr_2D
"""
if arr.ndim == 1:
arr = arr[:, np.newaxis]
if c.ndim == 1:
# Add axis if shape is (N,)
c = c[:, np.newaxis]
if stack_intercept:
c = np.hstack((np.ones((c.shape[0], 1)), c))
if which == '2D':
arr_2D_resid = arr_2D - c.dot(np.linalg.lstsq(c, arr_2D, rcond=None)[0])
return vectorized_corr(arr, arr_2D_resid)
else:
arr_resid = arr - c.dot(np.linalg.lstsq(c, arr)[0])
return vectorized_corr(arr_resid, arr_2D)
def rpdf(rho, n, rs):
""" rho = population correlation coefficient. """
lnum = np.log(n-2) + gammaln(n-1) + np.log((1-rho**2)**(.5*(n-1))) + np.log((1-rs**2)**(.5*(n-4)))
lden = np.log(np.sqrt(2*np.pi)) + gammaln(n-.5) + np.log((1-rho*rs)**(n-3/2))
fac = lnum - lden
hyp = hyp2f1(.5, .5, (2*n-1)/2, (rho*rs+1)/2)
return np.exp(fac) * hyp
| 29.463087 | 102 | 0.580182 | import numpy as np
from scipy.special import hyp2f1, gammaln
def get_r2(iv, dv, stack_intercept=True):
if iv.ndim == 1:
iv = iv[:, np.newaxis]
if stack_intercept:
iv = np.hstack((np.ones((iv.shape[0], 1)), iv))
beta = np.linalg.lstsq(iv, dv)[0]
dv_hat = iv.dot(beta).squeeze()
r2 = 1 - (((dv - dv_hat) ** 2).sum() / ((dv - dv.mean()) ** 2).sum())
return r2
def vectorized_corr(arr, arr_2D):
if arr.ndim == 1:
arr = arr[:, np.newaxis]
arr_c, arr_2D_c = arr - arr.mean(), arr_2D - arr_2D.mean(axis=0)
r_num = np.sum(arr_c * arr_2D_c, axis=0)
r_den = np.sqrt(np.sum(arr_c ** 2, axis=0) * np.sum(arr_2D_c ** 2, axis=0))
corrs = r_num / r_den
return corrs
def vectorized_partial_corr(arr, c, arr_2D, stack_intercept=True):
if arr.ndim == 1:
arr = arr[:, np.newaxis]
if c.ndim == 1:
c = c[:, np.newaxis]
if stack_intercept:
c = np.hstack((np.ones((c.shape[0], 1)), c))
arr_resid = arr - c.dot(np.linalg.lstsq(c, arr, rcond=None)[0])
arr_2d_resid = arr_2D - c.dot(np.linalg.lstsq(c, arr_2D, rcond=None)[0])
return vectorized_corr(arr_resid, arr_2d_resid)
def vectorized_semipartial_corr(arr, c, arr_2D, which='2D', stack_intercept=True):
if arr.ndim == 1:
arr = arr[:, np.newaxis]
if c.ndim == 1:
c = c[:, np.newaxis]
if stack_intercept:
c = np.hstack((np.ones((c.shape[0], 1)), c))
if which == '2D':
arr_2D_resid = arr_2D - c.dot(np.linalg.lstsq(c, arr_2D, rcond=None)[0])
return vectorized_corr(arr, arr_2D_resid)
else:
arr_resid = arr - c.dot(np.linalg.lstsq(c, arr)[0])
return vectorized_corr(arr_resid, arr_2D)
def rpdf(rho, n, rs):
lnum = np.log(n-2) + gammaln(n-1) + np.log((1-rho**2)**(.5*(n-1))) + np.log((1-rs**2)**(.5*(n-4)))
lden = np.log(np.sqrt(2*np.pi)) + gammaln(n-.5) + np.log((1-rho*rs)**(n-3/2))
fac = lnum - lden
hyp = hyp2f1(.5, .5, (2*n-1)/2, (rho*rs+1)/2)
return np.exp(fac) * hyp
| true | true |
1c2df018e40f65e521e67b3ef8b247d491b3a2e4 | 1,628 | py | Python | Tetris/events.py | Yatsuuw/Jeux-Python | 3c9bce9f41b537897ea88ef1fe329be6820ab7aa | [
"MIT"
] | null | null | null | Tetris/events.py | Yatsuuw/Jeux-Python | 3c9bce9f41b537897ea88ef1fe329be6820ab7aa | [
"MIT"
] | null | null | null | Tetris/events.py | Yatsuuw/Jeux-Python | 3c9bce9f41b537897ea88ef1fe329be6820ab7aa | [
"MIT"
] | null | null | null | import pygame
from sys import exit
# écouter chaque événement et réagir
def check_events(sqs, status, AI):
for event in pygame.event.get():
if event.type == pygame.QUIT:
exit()
if event.type == pygame.KEYDOWN:
key_down(sqs, event.key, status)
if event.type == pygame.KEYUP:
key_up(event.key, status)
if status.is_AI():
AI.control(sqs, status)
# traiter les touches qui sont enfoncées
def key_down(sqs, key, status):
if status.is_game_new():
status.game_status = status.ACTIVE
elif status.is_game_over():
status.game_status = status.RENEW
status.new_AI = False
if key == pygame.K_q: # q stands for quit
exit()
if key == pygame.K_DOWN:
status.down = True
elif key == pygame.K_LEFT:
status.left = True
sqs.clock.update_left_down()
elif key == pygame.K_RIGHT:
status.right = True
sqs.clock.update_right_down()
elif key == pygame.K_UP:
status.rotate = True
elif key == pygame.K_SPACE:
status.straight_drop = True
if key == pygame.K_a:
status.AI = True
status.new_AI = True
sqs.st.adjust_for_AI()
# traiter les clés qui sont libérées
def key_up(key, status):
if key == pygame.K_q:
exit()
if key == pygame.K_DOWN:
status.down = False
elif key == pygame.K_LEFT:
status.left = False
elif key == pygame.K_RIGHT:
status.right = False
elif key == pygame.K_UP:
status.rotate = False
elif key == pygame.K_SPACE:
status.straight_drop = False
| 29.071429 | 47 | 0.60688 | import pygame
from sys import exit
def check_events(sqs, status, AI):
for event in pygame.event.get():
if event.type == pygame.QUIT:
exit()
if event.type == pygame.KEYDOWN:
key_down(sqs, event.key, status)
if event.type == pygame.KEYUP:
key_up(event.key, status)
if status.is_AI():
AI.control(sqs, status)
def key_down(sqs, key, status):
if status.is_game_new():
status.game_status = status.ACTIVE
elif status.is_game_over():
status.game_status = status.RENEW
status.new_AI = False
if key == pygame.K_q:
exit()
if key == pygame.K_DOWN:
status.down = True
elif key == pygame.K_LEFT:
status.left = True
sqs.clock.update_left_down()
elif key == pygame.K_RIGHT:
status.right = True
sqs.clock.update_right_down()
elif key == pygame.K_UP:
status.rotate = True
elif key == pygame.K_SPACE:
status.straight_drop = True
if key == pygame.K_a:
status.AI = True
status.new_AI = True
sqs.st.adjust_for_AI()
def key_up(key, status):
if key == pygame.K_q:
exit()
if key == pygame.K_DOWN:
status.down = False
elif key == pygame.K_LEFT:
status.left = False
elif key == pygame.K_RIGHT:
status.right = False
elif key == pygame.K_UP:
status.rotate = False
elif key == pygame.K_SPACE:
status.straight_drop = False
| true | true |
1c2df1ce4dc3ae3a428f9f69916080e0009f64ea | 29,376 | py | Python | sphinxcontrib/matlab.py | ilent2/matlabdomain | 73776457ca0f81266de9ada227354e4322a92bbe | [
"BSD-2-Clause"
] | null | null | null | sphinxcontrib/matlab.py | ilent2/matlabdomain | 73776457ca0f81266de9ada227354e4322a92bbe | [
"BSD-2-Clause"
] | null | null | null | sphinxcontrib/matlab.py | ilent2/matlabdomain | 73776457ca0f81266de9ada227354e4322a92bbe | [
"BSD-2-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
sphinxcontrib.matlab
~~~~~~~~~~~~~~~~~~~~
The MATLAB domain.
:copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import absolute_import, unicode_literals
from . import mat_documenters as doc
from . import mat_directives
import re
from docutils import nodes
from docutils.parsers.rst import directives, Directive
from sphinx import addnodes
from sphinx.roles import XRefRole
from sphinx.locale import _
from sphinx.domains import Domain, ObjType, Index
from sphinx.directives import ObjectDescription
from sphinx.util.nodes import make_refnode
from sphinx.util.docfields import Field, GroupedField, TypedField
# REs for MATLAB signatures
mat_sig_re = re.compile(
r'''^ ([+@]?[+@\w.]*\.)? # class name(s)
([+@]?\w+) \s* # thing name
(?: \((.*)\) # optional: arguments
(?:\s* -> \s* (.*))? # return annotation
)? $ # and nothing more
''', re.VERBOSE)
def _pseudo_parse_arglist(signode, arglist):
""""Parse" a list of arguments separated by commas.
Arguments can have "optional" annotations given by enclosing them in
brackets. Currently, this will split at any comma, even if it's inside a
string literal (e.g. default argument value).
"""
paramlist = addnodes.desc_parameterlist()
stack = [paramlist]
try:
for argument in arglist.split(','):
argument = argument.strip()
ends_open = ends_close = 0
while argument.startswith('['):
stack.append(addnodes.desc_optional())
stack[-2] += stack[-1]
argument = argument[1:].strip()
while argument.startswith(']'):
stack.pop()
argument = argument[1:].strip()
while argument.endswith(']'):
ends_close += 1
argument = argument[:-1].strip()
while argument.endswith('['):
ends_open += 1
argument = argument[:-1].strip()
if argument:
stack[-1] += addnodes.desc_parameter(argument, argument)
while ends_open:
stack.append(addnodes.desc_optional())
stack[-2] += stack[-1]
ends_open -= 1
while ends_close:
stack.pop()
ends_close -= 1
if len(stack) != 1:
raise IndexError
except IndexError:
# if there are too few or too many elements on the stack, just give up
# and treat the whole argument list as one argument, discarding the
# already partially populated paramlist node
signode += addnodes.desc_parameterlist()
signode[-1] += addnodes.desc_parameter(arglist, arglist)
else:
signode += paramlist
class MatObject(ObjectDescription):
"""
Description of a general MATLAB object.
"""
option_spec = {
'noindex': directives.flag,
'module': directives.unchanged,
'annotation': directives.unchanged,
}
doc_field_types = [
TypedField('parameter', label=_('Parameters'),
names=('param', 'parameter', 'arg', 'argument',
'keyword', 'kwarg', 'kwparam'),
typerolename='obj', typenames=('paramtype', 'type'),
can_collapse=True),
TypedField('variable', label=_('Variables'), rolename='obj',
names=('var', 'ivar', 'cvar'),
typerolename='obj', typenames=('vartype',),
can_collapse=True),
GroupedField('exceptions', label=_('Raises'), rolename='exc',
names=('raises', 'raise', 'exception', 'except'),
can_collapse=True),
Field('returnvalue', label=_('Returns'), has_arg=False,
names=('returns', 'return')),
Field('returntype', label=_('Return type'), has_arg=False,
names=('rtype',)),
]
def get_signature_prefix(self, sig):
"""May return a prefix to put before the object name in the
signature.
"""
return ''
def needs_arglist(self):
"""May return true if an empty argument list is to be generated even if
the document contains none.
"""
return False
def handle_signature(self, sig, signode):
"""Transform a MATLAB signature into RST nodes.
Return (fully qualified name of the thing, classname if any).
If inside a class, the current class name is handled intelligently:
* it is stripped from the displayed name if present
* it is added to the full name (return value) if not present
"""
m = mat_sig_re.match(sig)
if m is None:
raise ValueError
name_prefix, name, arglist, retann = m.groups()
# determine module and class name (if applicable), as well as full name
modname = self.options.get(
'module', self.env.temp_data.get('mat:module'))
classname = self.env.temp_data.get('mat:class')
if classname:
add_module = False
if name_prefix and name_prefix.startswith(classname):
fullname = name_prefix + name
# class name is given again in the signature
name_prefix = name_prefix[len(classname):].lstrip('.')
elif name_prefix:
# class name is given in the signature, but different
# (shouldn't happen)
fullname = classname + '.' + name_prefix + name
else:
# class name is not given in the signature
fullname = classname + '.' + name
else:
add_module = True
if name_prefix:
classname = name_prefix.rstrip('.')
fullname = name_prefix + name
else:
classname = ''
fullname = name
signode['module'] = modname
signode['class'] = classname
signode['fullname'] = fullname
sig_prefix = self.get_signature_prefix(sig)
if sig_prefix:
signode += addnodes.desc_annotation(sig_prefix, sig_prefix)
if name_prefix:
signode += addnodes.desc_addname(name_prefix, name_prefix)
# exceptions are a special case, since they are documented in the
# 'exceptions' module.
elif add_module and self.env.config.add_module_names:
modname = self.options.get(
'module', self.env.temp_data.get('mat:module'))
if modname and modname != 'exceptions':
nodetext = modname + '.'
signode += addnodes.desc_addname(nodetext, nodetext)
anno = self.options.get('annotation')
signode += addnodes.desc_name(name, name)
if not arglist:
if self.needs_arglist():
# for callables, add an empty parameter list
signode += addnodes.desc_parameterlist()
if retann:
signode += addnodes.desc_returns(retann, retann)
if anno:
signode += addnodes.desc_annotation(' ' + anno, ' ' + anno)
return fullname, name_prefix
_pseudo_parse_arglist(signode, arglist)
if retann:
signode += addnodes.desc_returns(retann, retann)
if anno:
signode += addnodes.desc_annotation(' ' + anno, ' ' + anno)
return fullname, name_prefix
def get_index_text(self, modname, name):
"""Return the text for the index entry of the object."""
raise NotImplementedError('must be implemented in subclasses')
def add_target_and_index(self, name_cls, sig, signode):
modname = self.options.get(
'module', self.env.temp_data.get('mat:module'))
fullname = (modname and modname + '.' or '') + name_cls[0]
# note target
if fullname not in self.state.document.ids:
signode['names'].append(fullname)
signode['ids'].append(fullname)
signode['first'] = (not self.names)
self.state.document.note_explicit_target(signode)
objects = self.env.domaindata['mat']['objects']
if fullname in objects:
self.state_machine.reporter.warning(
'duplicate object description of %s, ' % fullname +
'other instance in ' +
self.env.doc2path(objects[fullname][0]) +
', use :noindex: for one of them',
line=self.lineno)
objects[fullname] = (self.env.docname, self.objtype)
indextext = self.get_index_text(modname, name_cls)
if indextext:
entry = ('single', indextext, fullname, '', None)
self.indexnode['entries'].append(entry)
def before_content(self):
# needed for automatic qualification of members (reset in subclasses)
self.clsname_set = False
def after_content(self):
if self.clsname_set:
self.env.temp_data['mat:class'] = None
class MatModulelevel(MatObject):
"""
Description of an object on module level (functions, data).
"""
def needs_arglist(self):
return self.objtype == 'function'
def get_index_text(self, modname, name_cls):
if self.objtype == 'function':
if not modname:
return _('%s() (built-in function)') % name_cls[0]
return _('%s() (in module %s)') % (name_cls[0], modname)
elif self.objtype == 'data':
if not modname:
return _('%s (built-in variable)') % name_cls[0]
return _('%s (in module %s)') % (name_cls[0], modname)
else:
return ''
class MatClasslike(MatObject):
"""
Description of a class-like object (classes, interfaces, exceptions).
"""
def get_signature_prefix(self, sig):
return self.objtype + ' '
def get_index_text(self, modname, name_cls):
if self.objtype == 'class':
if not modname:
return _('%s (built-in class)') % name_cls[0]
return _('%s (class in %s)') % (name_cls[0], modname)
elif self.objtype == 'exception':
return name_cls[0]
else:
return ''
def before_content(self):
MatObject.before_content(self)
if self.names:
self.env.temp_data['mat:class'] = self.names[0][0]
self.clsname_set = True
class MatClassmember(MatObject):
"""
Description of a class member (methods, attributes).
"""
def needs_arglist(self):
return self.objtype.endswith('method')
def get_signature_prefix(self, sig):
if self.objtype == 'staticmethod':
return 'static '
elif self.objtype == 'classmethod':
return 'classmethod '
return ''
def get_index_text(self, modname, name_cls):
name, cls = name_cls
add_modules = self.env.config.add_module_names
if self.objtype == 'method':
try:
clsname, methname = name.rsplit('.', 1)
except ValueError:
if modname:
return _('%s() (in module %s)') % (name, modname)
else:
return '%s()' % name
if modname and add_modules:
return _('%s() (%s.%s method)') % (methname, modname, clsname)
else:
return _('%s() (%s method)') % (methname, clsname)
elif self.objtype == 'staticmethod':
try:
clsname, methname = name.rsplit('.', 1)
except ValueError:
if modname:
return _('%s() (in module %s)') % (name, modname)
else:
return '%s()' % name
if modname and add_modules:
return _('%s() (%s.%s static method)') % (methname, modname,
clsname)
else:
return _('%s() (%s static method)') % (methname, clsname)
elif self.objtype == 'classmethod':
try:
clsname, methname = name.rsplit('.', 1)
except ValueError:
if modname:
return _('%s() (in module %s)') % (name, modname)
else:
return '%s()' % name
if modname:
return _('%s() (%s.%s class method)') % (methname, modname,
clsname)
else:
return _('%s() (%s class method)') % (methname, clsname)
elif self.objtype == 'attribute':
try:
clsname, attrname = name.rsplit('.', 1)
except ValueError:
if modname:
return _('%s (in module %s)') % (name, modname)
else:
return name
if modname and add_modules:
return _('%s (%s.%s attribute)') % (attrname, modname, clsname)
else:
return _('%s (%s attribute)') % (attrname, clsname)
else:
return ''
def before_content(self):
MatObject.before_content(self)
lastname = self.names and self.names[-1][1]
if lastname and not self.env.temp_data.get('mat:class'):
self.env.temp_data['mat:class'] = lastname.strip('.')
self.clsname_set = True
class MatDecoratorMixin(object):
"""
Mixin for decorator directives.
"""
def handle_signature(self, sig, signode):
ret = super(MatDecoratorMixin, self).handle_signature(sig, signode)
signode.insert(0, addnodes.desc_addname('@', '@'))
return ret
def needs_arglist(self):
return False
class MatDecoratorFunction(MatDecoratorMixin, MatModulelevel):
"""
Directive to mark functions meant to be used as decorators.
"""
def run(self):
# a decorator function is a function after all
self.name = 'mat:function'
return MatModulelevel.run(self)
class MatDecoratorMethod(MatDecoratorMixin, MatClassmember):
"""
Directive to mark methods meant to be used as decorators.
"""
def run(self):
self.name = 'mat:method'
return MatClassmember.run(self)
class MatModule(Directive):
"""
Directive to mark description of a new module.
"""
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = False
option_spec = {
'platform': lambda x: x,
'synopsis': lambda x: x,
'noindex': directives.flag,
'deprecated': directives.flag,
}
def run(self):
env = self.state.document.settings.env
modname = self.arguments[0].strip()
noindex = 'noindex' in self.options
env.temp_data['mat:module'] = modname
ret = []
if not noindex:
env.domaindata['mat']['modules'][modname] = \
(env.docname, self.options.get('synopsis', ''),
self.options.get('platform', ''), 'deprecated' in self.options)
# make a duplicate entry in 'objects' to facilitate searching for
# the module in MATLABDomain.find_obj()
env.domaindata['mat']['objects'][modname] = (env.docname, 'module')
targetnode = nodes.target('', '', ids=['module-' + modname],
ismod=True)
self.state.document.note_explicit_target(targetnode)
# the platform and synopsis aren't printed; in fact, they are only
# used in the modindex currently
ret.append(targetnode)
indextext = _('%s (module)') % modname
entry = ('single', indextext, 'module-' + modname, '', None)
inode = addnodes.index(entries=[entry])
ret.append(inode)
return ret
class MatCurrentModule(Directive):
"""
This directive is just to tell Sphinx that we're documenting
stuff in module foo, but links to module foo won't lead here.
"""
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = False
option_spec = {}
def run(self):
env = self.state.document.settings.env
modname = self.arguments[0].strip()
if modname == 'None':
env.temp_data['mat:module'] = None
else:
env.temp_data['mat:module'] = modname
return []
class MatXRefRole(XRefRole):
def process_link(self, env, refnode, has_explicit_title, title, target):
refnode['mat:module'] = env.temp_data.get('mat:module')
refnode['mat:class'] = env.temp_data.get('mat:class')
if not has_explicit_title:
title = title.lstrip('.') # only has a meaning for the target
target = target.lstrip('~') # only has a meaning for the title
# if the first character is a tilde, don't display the module/class
# parts of the contents
if title[0:1] == '~':
title = title[1:]
dot = title.rfind('.')
if dot != -1:
title = title[dot+1:]
# if the first character is a dot, search more specific namespaces first
# else search builtins first
if target[0:1] == '.':
target = target[1:]
refnode['refspecific'] = True
return title, target
class MATLABModuleIndex(Index):
"""
Index subclass to provide the MATLAB module index.
"""
name = 'modindex'
localname = _('MATLAB Module Index')
shortname = _('matlab index')
def generate(self, docnames=None):
content = {}
# list of prefixes to ignore
ignores = self.domain.env.config['modindex_common_prefix']
ignores = sorted(ignores, key=len, reverse=True)
# list of all modules, sorted by module name
modules = sorted(iter(self.domain.data['modules'].items()),
key=lambda x: x[0].lower())
# sort out collapsable modules
prev_modname = ''
num_toplevels = 0
for modname, (docname, synopsis, platforms, deprecated) in modules:
if docnames and docname not in docnames:
continue
for ignore in ignores:
if modname.startswith(ignore):
modname = modname[len(ignore):]
stripped = ignore
break
else:
stripped = ''
# we stripped the whole module name?
if not modname:
modname, stripped = stripped, ''
entries = content.setdefault(modname[0].lower(), [])
package = modname.split('.')[0]
if package != modname:
# it's a submodule
if prev_modname == package:
# first submodule - make parent a group head
if entries:
entries[-1][1] = 1
elif not prev_modname.startswith(package):
# submodule without parent in list, add dummy entry
entries.append([stripped + package, 1, '', '', '', '', ''])
subtype = 2
else:
num_toplevels += 1
subtype = 0
qualifier = deprecated and _('Deprecated') or ''
entries.append([stripped + modname, subtype, docname,
'module-' + stripped + modname, platforms,
qualifier, synopsis])
prev_modname = modname
# apply heuristics when to collapse modindex at page load:
# only collapse if number of toplevel modules is larger than
# number of submodules
collapse = len(modules) - num_toplevels < num_toplevels
# sort by first letter
content = sorted(content.items())
return content, collapse
class MATLABDomain(Domain):
"""MATLAB language domain."""
name = 'mat'
label = 'MATLAB'
object_types = {
'function': ObjType(_('function'), 'func', 'obj'),
'data': ObjType(_('data'), 'data', 'obj'),
'class': ObjType(_('class'), 'class', 'obj'),
'exception': ObjType(_('exception'), 'exc', 'obj'),
'method': ObjType(_('method'), 'meth', 'obj'),
'classmethod': ObjType(_('class method'), 'meth', 'obj'),
'staticmethod': ObjType(_('static method'), 'meth', 'obj'),
'attribute': ObjType(_('attribute'), 'attr', 'obj'),
'module': ObjType(_('module'), 'mod', 'obj'),
'script': ObjType(_('script'), 'scpt', 'obj'),
}
directives = {
'function': MatModulelevel,
'data': MatModulelevel,
'class': MatClasslike,
'exception': MatClasslike,
'method': MatClassmember,
'classmethod': MatClassmember,
'staticmethod': MatClassmember,
'attribute': MatClassmember,
'module': MatModule,
'currentmodule': MatCurrentModule,
'decorator': MatDecoratorFunction,
'decoratormethod': MatDecoratorMethod,
'script': MatModulelevel,
}
roles = {
'data': MatXRefRole(),
'exc': MatXRefRole(),
'func': MatXRefRole(fix_parens=True),
'class': MatXRefRole(),
'const': MatXRefRole(),
'attr': MatXRefRole(),
'meth': MatXRefRole(fix_parens=True),
'mod': MatXRefRole(),
'obj': MatXRefRole(),
'scpt': MatXRefRole(),
}
initial_data = {
'objects': {}, # fullname -> docname, objtype
'modules': {}, # modname -> docname, synopsis, platform, deprecated
}
indices = [
MATLABModuleIndex,
]
def clear_doc(self, docname):
for fullname, (fn, _) in list(self.data['objects'].items()): # noqa: 401
if fn == docname:
del self.data['objects'][fullname]
for modname, (fn, _, _, _) in list(self.data['modules'].items()):
if fn == docname:
del self.data['modules'][modname]
def find_obj(self, env, modname, classname, name, type, searchmode=0):
"""Find a MATLAB object for "name", perhaps using the given module
and/or classname. Returns a list of (name, object entry) tuples.
"""
# skip parens
if name[-2:] == '()':
name = name[:-2]
if not name:
return []
objects = self.data['objects']
matches = []
newname = None
if searchmode == 1:
objtypes = self.objtypes_for_role(type)
if objtypes is not None:
if modname and classname:
fullname = modname + '.' + classname + '.' + name
if fullname in objects and objects[fullname][1] in objtypes:
newname = fullname
if not newname:
if modname and modname + '.' + name in objects and \
objects[modname + '.' + name][1] in objtypes:
newname = modname + '.' + name
elif name in objects and objects[name][1] in objtypes:
newname = name
else:
# "fuzzy" searching mode
searchname = '.' + name
matches = [(oname, objects[oname]) for oname in objects
if oname.endswith(searchname)
and objects[oname][1] in objtypes]
else:
# NOTE: searching for exact match, object type is not considered
if name in objects:
newname = name
elif type == 'mod':
# only exact matches allowed for modules
return []
elif classname and classname + '.' + name in objects:
newname = classname + '.' + name
elif modname and modname + '.' + name in objects:
newname = modname + '.' + name
elif modname and classname and \
modname + '.' + classname + '.' + name in objects:
newname = modname + '.' + classname + '.' + name
# special case: builtin exceptions have module "exceptions" set
elif type == 'exc' and '.' not in name and \
'exceptions.' + name in objects:
newname = 'exceptions.' + name
# special case: object methods
elif type in ('func', 'meth') and '.' not in name and \
'object.' + name in objects:
newname = 'object.' + name
if newname is not None:
matches.append((newname, objects[newname]))
return matches
def resolve_xref(self, env, fromdocname, builder,
type, target, node, contnode):
modname = node.get('mat:module')
clsname = node.get('mat:class')
searchmode = node.hasattr('refspecific') and 1 or 0
matches = self.find_obj(env, modname, clsname, target,
type, searchmode)
if not matches:
return None
elif len(matches) > 1:
env.warn_node(
'more than one target found for cross-reference '
'%r: %s' % (target, ', '.join(match[0] for match in matches)),
node)
name, obj = matches[0]
if obj[1] == 'module':
# get additional info for modules
docname, synopsis, platform, deprecated = self.data['modules'][name]
assert docname == obj[0]
title = name
if synopsis:
title += ': ' + synopsis
if deprecated:
title += _(' (deprecated)')
if platform:
title += ' (' + platform + ')'
return make_refnode(builder, fromdocname, docname,
'module-' + name, contnode, title)
else:
return make_refnode(builder, fromdocname, obj[0], name,
contnode, name)
def get_objects(self):
for modname, info in self.data['modules'].items():
yield (modname, modname, 'module', info[0], 'module-' + modname, 0)
for refname, (docname, type) in self.data['objects'].items():
yield (refname, refname, type, docname, refname, 1)
def setup(app):
app.add_domain(MATLABDomain)
# autodoc
app.add_config_value('matlab_src_dir', None, 'env')
app.add_config_value('matlab_src_encoding', None, 'env')
app.registry.add_documenter('mat:module', doc.MatModuleDocumenter)
app.add_directive_to_domain('mat',
'automodule',
mat_directives.MatlabAutodocDirective)
app.registry.add_documenter('mat:function', doc.MatFunctionDocumenter)
app.add_directive_to_domain('mat',
'autofunction',
mat_directives.MatlabAutodocDirective)
app.registry.add_documenter('mat:class', doc.MatClassDocumenter)
app.add_directive_to_domain('mat',
'autoclass',
mat_directives.MatlabAutodocDirective)
app.registry.add_documenter('mat:method', doc.MatMethodDocumenter)
app.add_directive_to_domain('mat',
'automethod',
mat_directives.MatlabAutodocDirective)
app.registry.add_documenter('mat:script', doc.MatScriptDocumenter)
app.add_directive_to_domain('mat',
'autoscript',
mat_directives.MatlabAutodocDirective)
app.registry.add_documenter('mat:exception', doc.MatExceptionDocumenter)
app.add_directive_to_domain('mat',
'autoexception',
mat_directives.MatlabAutodocDirective)
app.registry.add_documenter('mat:attribute', doc.MatAttributeDocumenter)
app.add_directive_to_domain('mat',
'autoattribute',
mat_directives.MatlabAutodocDirective)
app.registry.add_documenter('mat:data', doc.MatDataDocumenter)
app.add_directive_to_domain('mat',
'autodata',
mat_directives.MatlabAutodocDirective)
app.registry.add_documenter('mat:instanceattribute', doc.MatInstanceAttributeDocumenter)
app.add_directive_to_domain('mat',
'autoinstanceattribute',
mat_directives.MatlabAutodocDirective)
app.add_autodoc_attrgetter(doc.MatModule, doc.MatModule.getter)
app.add_autodoc_attrgetter(doc.MatClass, doc.MatClass.getter)
| 38.150649 | 92 | 0.542382 |
from __future__ import absolute_import, unicode_literals
from . import mat_documenters as doc
from . import mat_directives
import re
from docutils import nodes
from docutils.parsers.rst import directives, Directive
from sphinx import addnodes
from sphinx.roles import XRefRole
from sphinx.locale import _
from sphinx.domains import Domain, ObjType, Index
from sphinx.directives import ObjectDescription
from sphinx.util.nodes import make_refnode
from sphinx.util.docfields import Field, GroupedField, TypedField
mat_sig_re = re.compile(
r'''^ ([+@]?[+@\w.]*\.)? # class name(s)
([+@]?\w+) \s* # thing name
(?: \((.*)\) # optional: arguments
(?:\s* -> \s* (.*))? # return annotation
)? $ # and nothing more
''', re.VERBOSE)
def _pseudo_parse_arglist(signode, arglist):
paramlist = addnodes.desc_parameterlist()
stack = [paramlist]
try:
for argument in arglist.split(','):
argument = argument.strip()
ends_open = ends_close = 0
while argument.startswith('['):
stack.append(addnodes.desc_optional())
stack[-2] += stack[-1]
argument = argument[1:].strip()
while argument.startswith(']'):
stack.pop()
argument = argument[1:].strip()
while argument.endswith(']'):
ends_close += 1
argument = argument[:-1].strip()
while argument.endswith('['):
ends_open += 1
argument = argument[:-1].strip()
if argument:
stack[-1] += addnodes.desc_parameter(argument, argument)
while ends_open:
stack.append(addnodes.desc_optional())
stack[-2] += stack[-1]
ends_open -= 1
while ends_close:
stack.pop()
ends_close -= 1
if len(stack) != 1:
raise IndexError
except IndexError:
signode += addnodes.desc_parameterlist()
signode[-1] += addnodes.desc_parameter(arglist, arglist)
else:
signode += paramlist
class MatObject(ObjectDescription):
option_spec = {
'noindex': directives.flag,
'module': directives.unchanged,
'annotation': directives.unchanged,
}
doc_field_types = [
TypedField('parameter', label=_('Parameters'),
names=('param', 'parameter', 'arg', 'argument',
'keyword', 'kwarg', 'kwparam'),
typerolename='obj', typenames=('paramtype', 'type'),
can_collapse=True),
TypedField('variable', label=_('Variables'), rolename='obj',
names=('var', 'ivar', 'cvar'),
typerolename='obj', typenames=('vartype',),
can_collapse=True),
GroupedField('exceptions', label=_('Raises'), rolename='exc',
names=('raises', 'raise', 'exception', 'except'),
can_collapse=True),
Field('returnvalue', label=_('Returns'), has_arg=False,
names=('returns', 'return')),
Field('returntype', label=_('Return type'), has_arg=False,
names=('rtype',)),
]
def get_signature_prefix(self, sig):
return ''
def needs_arglist(self):
return False
def handle_signature(self, sig, signode):
m = mat_sig_re.match(sig)
if m is None:
raise ValueError
name_prefix, name, arglist, retann = m.groups()
modname = self.options.get(
'module', self.env.temp_data.get('mat:module'))
classname = self.env.temp_data.get('mat:class')
if classname:
add_module = False
if name_prefix and name_prefix.startswith(classname):
fullname = name_prefix + name
name_prefix = name_prefix[len(classname):].lstrip('.')
elif name_prefix:
fullname = classname + '.' + name_prefix + name
else:
# class name is not given in the signature
fullname = classname + '.' + name
else:
add_module = True
if name_prefix:
classname = name_prefix.rstrip('.')
fullname = name_prefix + name
else:
classname = ''
fullname = name
signode['module'] = modname
signode['class'] = classname
signode['fullname'] = fullname
sig_prefix = self.get_signature_prefix(sig)
if sig_prefix:
signode += addnodes.desc_annotation(sig_prefix, sig_prefix)
if name_prefix:
signode += addnodes.desc_addname(name_prefix, name_prefix)
# exceptions are a special case, since they are documented in the
# 'exceptions' module.
elif add_module and self.env.config.add_module_names:
modname = self.options.get(
'module', self.env.temp_data.get('mat:module'))
if modname and modname != 'exceptions':
nodetext = modname + '.'
signode += addnodes.desc_addname(nodetext, nodetext)
anno = self.options.get('annotation')
signode += addnodes.desc_name(name, name)
if not arglist:
if self.needs_arglist():
# for callables, add an empty parameter list
signode += addnodes.desc_parameterlist()
if retann:
signode += addnodes.desc_returns(retann, retann)
if anno:
signode += addnodes.desc_annotation(' ' + anno, ' ' + anno)
return fullname, name_prefix
_pseudo_parse_arglist(signode, arglist)
if retann:
signode += addnodes.desc_returns(retann, retann)
if anno:
signode += addnodes.desc_annotation(' ' + anno, ' ' + anno)
return fullname, name_prefix
def get_index_text(self, modname, name):
raise NotImplementedError('must be implemented in subclasses')
def add_target_and_index(self, name_cls, sig, signode):
modname = self.options.get(
'module', self.env.temp_data.get('mat:module'))
fullname = (modname and modname + '.' or '') + name_cls[0]
# note target
if fullname not in self.state.document.ids:
signode['names'].append(fullname)
signode['ids'].append(fullname)
signode['first'] = (not self.names)
self.state.document.note_explicit_target(signode)
objects = self.env.domaindata['mat']['objects']
if fullname in objects:
self.state_machine.reporter.warning(
'duplicate object description of %s, ' % fullname +
'other instance in ' +
self.env.doc2path(objects[fullname][0]) +
', use :noindex: for one of them',
line=self.lineno)
objects[fullname] = (self.env.docname, self.objtype)
indextext = self.get_index_text(modname, name_cls)
if indextext:
entry = ('single', indextext, fullname, '', None)
self.indexnode['entries'].append(entry)
def before_content(self):
# needed for automatic qualification of members (reset in subclasses)
self.clsname_set = False
def after_content(self):
if self.clsname_set:
self.env.temp_data['mat:class'] = None
class MatModulelevel(MatObject):
def needs_arglist(self):
return self.objtype == 'function'
def get_index_text(self, modname, name_cls):
if self.objtype == 'function':
if not modname:
return _('%s() (built-in function)') % name_cls[0]
return _('%s() (in module %s)') % (name_cls[0], modname)
elif self.objtype == 'data':
if not modname:
return _('%s (built-in variable)') % name_cls[0]
return _('%s (in module %s)') % (name_cls[0], modname)
else:
return ''
class MatClasslike(MatObject):
def get_signature_prefix(self, sig):
return self.objtype + ' '
def get_index_text(self, modname, name_cls):
if self.objtype == 'class':
if not modname:
return _('%s (built-in class)') % name_cls[0]
return _('%s (class in %s)') % (name_cls[0], modname)
elif self.objtype == 'exception':
return name_cls[0]
else:
return ''
def before_content(self):
MatObject.before_content(self)
if self.names:
self.env.temp_data['mat:class'] = self.names[0][0]
self.clsname_set = True
class MatClassmember(MatObject):
def needs_arglist(self):
return self.objtype.endswith('method')
def get_signature_prefix(self, sig):
if self.objtype == 'staticmethod':
return 'static '
elif self.objtype == 'classmethod':
return 'classmethod '
return ''
def get_index_text(self, modname, name_cls):
name, cls = name_cls
add_modules = self.env.config.add_module_names
if self.objtype == 'method':
try:
clsname, methname = name.rsplit('.', 1)
except ValueError:
if modname:
return _('%s() (in module %s)') % (name, modname)
else:
return '%s()' % name
if modname and add_modules:
return _('%s() (%s.%s method)') % (methname, modname, clsname)
else:
return _('%s() (%s method)') % (methname, clsname)
elif self.objtype == 'staticmethod':
try:
clsname, methname = name.rsplit('.', 1)
except ValueError:
if modname:
return _('%s() (in module %s)') % (name, modname)
else:
return '%s()' % name
if modname and add_modules:
return _('%s() (%s.%s static method)') % (methname, modname,
clsname)
else:
return _('%s() (%s static method)') % (methname, clsname)
elif self.objtype == 'classmethod':
try:
clsname, methname = name.rsplit('.', 1)
except ValueError:
if modname:
return _('%s() (in module %s)') % (name, modname)
else:
return '%s()' % name
if modname:
return _('%s() (%s.%s class method)') % (methname, modname,
clsname)
else:
return _('%s() (%s class method)') % (methname, clsname)
elif self.objtype == 'attribute':
try:
clsname, attrname = name.rsplit('.', 1)
except ValueError:
if modname:
return _('%s (in module %s)') % (name, modname)
else:
return name
if modname and add_modules:
return _('%s (%s.%s attribute)') % (attrname, modname, clsname)
else:
return _('%s (%s attribute)') % (attrname, clsname)
else:
return ''
def before_content(self):
MatObject.before_content(self)
lastname = self.names and self.names[-1][1]
if lastname and not self.env.temp_data.get('mat:class'):
self.env.temp_data['mat:class'] = lastname.strip('.')
self.clsname_set = True
class MatDecoratorMixin(object):
def handle_signature(self, sig, signode):
ret = super(MatDecoratorMixin, self).handle_signature(sig, signode)
signode.insert(0, addnodes.desc_addname('@', '@'))
return ret
def needs_arglist(self):
return False
class MatDecoratorFunction(MatDecoratorMixin, MatModulelevel):
def run(self):
# a decorator function is a function after all
self.name = 'mat:function'
return MatModulelevel.run(self)
class MatDecoratorMethod(MatDecoratorMixin, MatClassmember):
def run(self):
self.name = 'mat:method'
return MatClassmember.run(self)
class MatModule(Directive):
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = False
option_spec = {
'platform': lambda x: x,
'synopsis': lambda x: x,
'noindex': directives.flag,
'deprecated': directives.flag,
}
def run(self):
env = self.state.document.settings.env
modname = self.arguments[0].strip()
noindex = 'noindex' in self.options
env.temp_data['mat:module'] = modname
ret = []
if not noindex:
env.domaindata['mat']['modules'][modname] = \
(env.docname, self.options.get('synopsis', ''),
self.options.get('platform', ''), 'deprecated' in self.options)
# make a duplicate entry in 'objects' to facilitate searching for
# the module in MATLABDomain.find_obj()
env.domaindata['mat']['objects'][modname] = (env.docname, 'module')
targetnode = nodes.target('', '', ids=['module-' + modname],
ismod=True)
self.state.document.note_explicit_target(targetnode)
# the platform and synopsis aren't printed; in fact, they are only
ret.append(targetnode)
indextext = _('%s (module)') % modname
entry = ('single', indextext, 'module-' + modname, '', None)
inode = addnodes.index(entries=[entry])
ret.append(inode)
return ret
class MatCurrentModule(Directive):
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = False
option_spec = {}
def run(self):
env = self.state.document.settings.env
modname = self.arguments[0].strip()
if modname == 'None':
env.temp_data['mat:module'] = None
else:
env.temp_data['mat:module'] = modname
return []
class MatXRefRole(XRefRole):
def process_link(self, env, refnode, has_explicit_title, title, target):
refnode['mat:module'] = env.temp_data.get('mat:module')
refnode['mat:class'] = env.temp_data.get('mat:class')
if not has_explicit_title:
title = title.lstrip('.')
target = target.lstrip('~')
# parts of the contents
if title[0:1] == '~':
title = title[1:]
dot = title.rfind('.')
if dot != -1:
title = title[dot+1:]
# if the first character is a dot, search more specific namespaces first
# else search builtins first
if target[0:1] == '.':
target = target[1:]
refnode['refspecific'] = True
return title, target
class MATLABModuleIndex(Index):
name = 'modindex'
localname = _('MATLAB Module Index')
shortname = _('matlab index')
def generate(self, docnames=None):
content = {}
# list of prefixes to ignore
ignores = self.domain.env.config['modindex_common_prefix']
ignores = sorted(ignores, key=len, reverse=True)
# list of all modules, sorted by module name
modules = sorted(iter(self.domain.data['modules'].items()),
key=lambda x: x[0].lower())
# sort out collapsable modules
prev_modname = ''
num_toplevels = 0
for modname, (docname, synopsis, platforms, deprecated) in modules:
if docnames and docname not in docnames:
continue
for ignore in ignores:
if modname.startswith(ignore):
modname = modname[len(ignore):]
stripped = ignore
break
else:
stripped = ''
# we stripped the whole module name?
if not modname:
modname, stripped = stripped, ''
entries = content.setdefault(modname[0].lower(), [])
package = modname.split('.')[0]
if package != modname:
# it's a submodule
if prev_modname == package:
if entries:
entries[-1][1] = 1
elif not prev_modname.startswith(package):
entries.append([stripped + package, 1, '', '', '', '', ''])
subtype = 2
else:
num_toplevels += 1
subtype = 0
qualifier = deprecated and _('Deprecated') or ''
entries.append([stripped + modname, subtype, docname,
'module-' + stripped + modname, platforms,
qualifier, synopsis])
prev_modname = modname
collapse = len(modules) - num_toplevels < num_toplevels
content = sorted(content.items())
return content, collapse
class MATLABDomain(Domain):
name = 'mat'
label = 'MATLAB'
object_types = {
'function': ObjType(_('function'), 'func', 'obj'),
'data': ObjType(_('data'), 'data', 'obj'),
'class': ObjType(_('class'), 'class', 'obj'),
'exception': ObjType(_('exception'), 'exc', 'obj'),
'method': ObjType(_('method'), 'meth', 'obj'),
'classmethod': ObjType(_('class method'), 'meth', 'obj'),
'staticmethod': ObjType(_('static method'), 'meth', 'obj'),
'attribute': ObjType(_('attribute'), 'attr', 'obj'),
'module': ObjType(_('module'), 'mod', 'obj'),
'script': ObjType(_('script'), 'scpt', 'obj'),
}
directives = {
'function': MatModulelevel,
'data': MatModulelevel,
'class': MatClasslike,
'exception': MatClasslike,
'method': MatClassmember,
'classmethod': MatClassmember,
'staticmethod': MatClassmember,
'attribute': MatClassmember,
'module': MatModule,
'currentmodule': MatCurrentModule,
'decorator': MatDecoratorFunction,
'decoratormethod': MatDecoratorMethod,
'script': MatModulelevel,
}
roles = {
'data': MatXRefRole(),
'exc': MatXRefRole(),
'func': MatXRefRole(fix_parens=True),
'class': MatXRefRole(),
'const': MatXRefRole(),
'attr': MatXRefRole(),
'meth': MatXRefRole(fix_parens=True),
'mod': MatXRefRole(),
'obj': MatXRefRole(),
'scpt': MatXRefRole(),
}
initial_data = {
'objects': {},
'modules': {},
}
indices = [
MATLABModuleIndex,
]
def clear_doc(self, docname):
for fullname, (fn, _) in list(self.data['objects'].items()):
if fn == docname:
del self.data['objects'][fullname]
for modname, (fn, _, _, _) in list(self.data['modules'].items()):
if fn == docname:
del self.data['modules'][modname]
def find_obj(self, env, modname, classname, name, type, searchmode=0):
if name[-2:] == '()':
name = name[:-2]
if not name:
return []
objects = self.data['objects']
matches = []
newname = None
if searchmode == 1:
objtypes = self.objtypes_for_role(type)
if objtypes is not None:
if modname and classname:
fullname = modname + '.' + classname + '.' + name
if fullname in objects and objects[fullname][1] in objtypes:
newname = fullname
if not newname:
if modname and modname + '.' + name in objects and \
objects[modname + '.' + name][1] in objtypes:
newname = modname + '.' + name
elif name in objects and objects[name][1] in objtypes:
newname = name
else:
searchname = '.' + name
matches = [(oname, objects[oname]) for oname in objects
if oname.endswith(searchname)
and objects[oname][1] in objtypes]
else:
if name in objects:
newname = name
elif type == 'mod':
return []
elif classname and classname + '.' + name in objects:
newname = classname + '.' + name
elif modname and modname + '.' + name in objects:
newname = modname + '.' + name
elif modname and classname and \
modname + '.' + classname + '.' + name in objects:
newname = modname + '.' + classname + '.' + name
elif type == 'exc' and '.' not in name and \
'exceptions.' + name in objects:
newname = 'exceptions.' + name
elif type in ('func', 'meth') and '.' not in name and \
'object.' + name in objects:
newname = 'object.' + name
if newname is not None:
matches.append((newname, objects[newname]))
return matches
def resolve_xref(self, env, fromdocname, builder,
type, target, node, contnode):
modname = node.get('mat:module')
clsname = node.get('mat:class')
searchmode = node.hasattr('refspecific') and 1 or 0
matches = self.find_obj(env, modname, clsname, target,
type, searchmode)
if not matches:
return None
elif len(matches) > 1:
env.warn_node(
'more than one target found for cross-reference '
'%r: %s' % (target, ', '.join(match[0] for match in matches)),
node)
name, obj = matches[0]
if obj[1] == 'module':
docname, synopsis, platform, deprecated = self.data['modules'][name]
assert docname == obj[0]
title = name
if synopsis:
title += ': ' + synopsis
if deprecated:
title += _(' (deprecated)')
if platform:
title += ' (' + platform + ')'
return make_refnode(builder, fromdocname, docname,
'module-' + name, contnode, title)
else:
return make_refnode(builder, fromdocname, obj[0], name,
contnode, name)
def get_objects(self):
for modname, info in self.data['modules'].items():
yield (modname, modname, 'module', info[0], 'module-' + modname, 0)
for refname, (docname, type) in self.data['objects'].items():
yield (refname, refname, type, docname, refname, 1)
def setup(app):
app.add_domain(MATLABDomain)
app.add_config_value('matlab_src_dir', None, 'env')
app.add_config_value('matlab_src_encoding', None, 'env')
app.registry.add_documenter('mat:module', doc.MatModuleDocumenter)
app.add_directive_to_domain('mat',
'automodule',
mat_directives.MatlabAutodocDirective)
app.registry.add_documenter('mat:function', doc.MatFunctionDocumenter)
app.add_directive_to_domain('mat',
'autofunction',
mat_directives.MatlabAutodocDirective)
app.registry.add_documenter('mat:class', doc.MatClassDocumenter)
app.add_directive_to_domain('mat',
'autoclass',
mat_directives.MatlabAutodocDirective)
app.registry.add_documenter('mat:method', doc.MatMethodDocumenter)
app.add_directive_to_domain('mat',
'automethod',
mat_directives.MatlabAutodocDirective)
app.registry.add_documenter('mat:script', doc.MatScriptDocumenter)
app.add_directive_to_domain('mat',
'autoscript',
mat_directives.MatlabAutodocDirective)
app.registry.add_documenter('mat:exception', doc.MatExceptionDocumenter)
app.add_directive_to_domain('mat',
'autoexception',
mat_directives.MatlabAutodocDirective)
app.registry.add_documenter('mat:attribute', doc.MatAttributeDocumenter)
app.add_directive_to_domain('mat',
'autoattribute',
mat_directives.MatlabAutodocDirective)
app.registry.add_documenter('mat:data', doc.MatDataDocumenter)
app.add_directive_to_domain('mat',
'autodata',
mat_directives.MatlabAutodocDirective)
app.registry.add_documenter('mat:instanceattribute', doc.MatInstanceAttributeDocumenter)
app.add_directive_to_domain('mat',
'autoinstanceattribute',
mat_directives.MatlabAutodocDirective)
app.add_autodoc_attrgetter(doc.MatModule, doc.MatModule.getter)
app.add_autodoc_attrgetter(doc.MatClass, doc.MatClass.getter)
| true | true |
1c2df23cd27c41834a1e60dfbcd69a296561cd37 | 19,531 | py | Python | boxUpdate/boxUpdate.py | bobofei/Mohou_Box-master | 3d1c320a6258422406e2ba2f96ec7986beba1330 | [
"Apache-2.0"
] | null | null | null | boxUpdate/boxUpdate.py | bobofei/Mohou_Box-master | 3d1c320a6258422406e2ba2f96ec7986beba1330 | [
"Apache-2.0"
] | null | null | null | boxUpdate/boxUpdate.py | bobofei/Mohou_Box-master | 3d1c320a6258422406e2ba2f96ec7986beba1330 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import sys
reload(sys)
sys.setdefaultencoding('utf8')
#sys.path.append("/home/pi/oprint/lib/python2.7/site-packages/tornado-4.0.1-py2.7-linux-armv7l.egg/")
#sys.path.append("/home/pi/oprint/lib/python2.7/site-packages/backports.ssl_match_hostname-3.4.0.2-py2.7.egg/")
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
import uuid
import hashlib
import time
import logging
import os
import urllib
import httplib
import json
import md5
from tornado.httpclient import HTTPClient
from tornado.escape import json_decode
from tornado.options import define, options
from common import Application
from network_api import get_allwifi_info, get_network_info, get_dns_info, set_wifi, set_network, machine_is_online, get_serial_number, set_serial_number
from user_api import md5, get_user_info, set_user_info, bind_box_api, unbind_box_api, init_box_config_info
from update_api import getLatestVer, getCurrentVer, getUpdateMeta, netUpdate, initUpdateInfo, clearUpdateInfoBegin, getUpdatePkgDesc
import settings as WebConfig
from machine_api import update_machine_config, update_setting_gcode, update_preferences_file_info, get_current_activity_print_machine, get_active_machine_print_info, \
get_default_machine_print_info, write_print_info, restart_web_service
define("host", default="*", help="run on the given host")
define("port", default=8092, help="run on the given port", type=int)
app = Application()
WebConfig.settings(True);
logger = logging.getLogger("__name__")
bind_messages = ["绑定成功".encode("utf8"),
"绑定失败,请重试".encode("utf8"),
"数据读取失败,配置文件丢失".encode("utf8"),
"连接认证服务器网络失败".encode("utf8")]
unbind_messages = ["解除绑定成功".encode("utf8"),
"解除绑定失败,请重试".encode("utf8"),
"数据读取失败,配置文件丢失".encode("utf8"),
"连接认证服务器网络失败".encode("utf8")]
machine_config_messages = ["设定成功".encode("utf8"),
"设定失败".encode("utf8")]
@app.route(r"/bind")
class bind(tornado.web.RequestHandler):
def post(self):
username = self.get_argument("username")
password = md5(self.get_argument("password"))
result = None
is_on_line = machine_is_online()
if is_on_line:
user_info = get_user_info()
if user_info["device_id"]:
response = bind_box_api(username, password, user_info["device_id"], user_info["box_name"])
if response and response["code"] in [1, 81]:
user_info["username"] = username
user_info["password"] = password
user_info["user_token"] = response["data"]["token"]
user_info["remember_information"] = 1
user_info["binding_mohou"] = 1
user_info["is_login"] = 1
set_user_info(user_info);
result = 0
else:
result = 1
else:
result = 2
else:
result = 3
return self.write({"result" : result, "msg" : bind_messages[result]})
@app.route(r"/unbind")
class unbind(tornado.web.RequestHandler):
def post(self):
result = None
is_on_line = machine_is_online()
if is_on_line:
user_info = get_user_info()
if user_info and user_info["user_token"] and user_info["device_id"]:
response = unbind_box_api(user_info["user_token"], user_info["device_id"])
if response and response["code"] == 1:
user_info_default = {
"username" : "",
"password" : "",
"user_token" : "",
"remember_information" : 0,
"binding_mohou" : 0,
"is_login" : 0
}
set_user_info(user_info_default);
result = 0
else:
result = 1
else:
result = 2
else:
result = 3
return self.write({"result" : result, "msg" : unbind_messages[result]})
@app.route(r"/update")
class update(tornado.web.RequestHandler):
def get(self):
clearUpdateInfoBegin()
initUpdateInfo()
return self.render(
"update.jinja2",
update_mode=self.get_argument("mode"),
latest_ver=getLatestVer(),
current_ver=getCurrentVer(),
update_desc=getUpdatePkgDesc(),
update_meta=getUpdateMeta()
)
@app.route(r"/pre_update")
class pre_update(tornado.web.RequestHandler):
def get(self):
result = "0"
clearUpdateInfoBegin()
initUpdateInfo()
return self.write(result)
@app.route(r"/netupdate_ajax")
class netupdate_ajax(tornado.web.RequestHandler):
def post(self):
result = "0"
clearUpdateInfoBegin()
initUpdateInfo()
netUpdate()
return self.write(result)
def get(self):
type = self.get_argument("type", default="meta")
retContent = {}
if type == "meta":
retContent=getUpdateMeta()
elif type == "cur_ver":
retContent = {"current_ver" : getCurrentVer()}
#retContent = {"current_ver" : "1.1"}
else:
pass
return self.write(retContent)
@app.route(r"/")
class moWifi(tornado.web.RequestHandler):
def get(self):
wifi_info = get_network_info("wlan0")
wire_info = get_network_info("eth0")
dns_info = get_dns_info()
serial_number = get_serial_number()
#user_info = get_user_info()
#print_info = get_active_machine_print_info()
return self.render(
"mowifi.jinja2",
wifi_info = wifi_info,
wire_info = wire_info,
dns_info = dns_info,
sn=serial_number
#user_info = user_info,
#print_info = print_info
)
@app.route(r"/setserialnumber")
class SerialNumber(tornado.web.RequestHandler):
def post(self):
serial_number = self.get_argument("sn", None)
if serial_number:
if set_serial_number(serial_number) == 0:
return self.write("0")
return self.write("1")
@app.route(r"/wifi")
class WifiSetting(tornado.web.RequestHandler):
def get(self):
wifissid = self.get_argument("ssid", None)
wifi_list = get_allwifi_info()
if wifissid:
wifi_list = filter(lambda x: x[0]==wifissid and x or False , wifi_list)
if wifi_list:
return self.write({'code': 0, 'msg': 'Success', 'data': {'ssid': wifi_list[0][0], 'state': wifi_list[0][1], 'lock': wifi_list[0][2], 'signal': wifi_list[0][3]}})
else:
return self.write({'code': 1, 'msg': 'SSID error.', 'data': {'wifi_list': []}})
else:
return self.write({'code': 0, 'msg': 'Success', 'data': {'wifi_list': wifi_list}})
def post(self):
wifissid = self.get_argument("ssid")
wifipwd = self.get_argument("pwd")
set_wifi(wifissid, wifipwd)
return self.write({'code': 0, 'msg': 'Success', 'data': {}})
@app.route(r"/isaccesscloud")
class AccessCloud(tornado.web.RequestHandler):
def get(self):
is_on_line = machine_is_online()
cur_client = HTTPClient()
response = cur_client.fetch("http://127.0.0.1:5000/status", request_timeout=10)
if response.error:
logger.warn("Failed to get current box info. error=%s", response.error)
is_on_line = False
res = json_decode(response.body)
if res["code"] != 0:
logger.warn("Failed to get current box info. ret_value=%d", res["ret_value"])
is_on_line = False
if is_on_line:
boxid = res["data"]["boxid"]
params=urllib.urlencode({
"token": "box_setting",
"boxid": boxid,
"progress": 2
})
headers = {"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8", "Connection": "Keep-Alive"}
conn = httplib.HTTPConnection("yun.mohou.com")
conn.request(method="POST", url="/api/box/init-setting", body=params, headers=headers)
response = conn.getresponse()
response_json = response.read()
conn.close()
logger.info("Box setting result: " + str(response_json))
is_access_cloud = True
else:
is_access_cloud = False
return self.write({'code': 0, 'msg': 'Success', 'data': {'is_access_cloud': is_access_cloud}})
@app.route(r"/mowifiinfoajax")
class moWifiAjax(tornado.web.RequestHandler):
def get(self):
return self.render(
"wifiinfo.jinja2",
wifi_list=get_allwifi_info()
)
def post(self):
result = "0"
type = int(self.get_argument("type"))
if type == 1:
#connect wifi
wifissid = self.get_argument("wifissid")
wifipwd = self.get_argument("wifipwd")
set_wifi(wifissid, wifipwd)
return self.write(result)
elif (type == 2) or (type == 3):
#set ip address
if type == 2:
iface_name = "wlan0"
else:
iface_name = "eth0"
result = "0"
iface_info = {}
dns_info = {}
iface_info["dhcp"] = self.get_argument("dhcp")
iface_info["ip"] = ""
iface_info["netmask"] = ""
iface_info["gateway"] = ""
dns_info["dns"] = ""
if iface_info["dhcp"] == "0":
iface_info["ip"] = self.get_argument("ip")
iface_info["netmask"] = self.get_argument("mask")
iface_info["gateway"] = self.get_argument("gateway")
dns_info["dns"] = self.get_argument("dns")
set_network(iface_name, iface_info, dns_info)
return self.write(result)
else:
#Log incorrect type
pass
@app.route(r"/settings/machines")
class MachineDefaultConfig(tornado.web.RequestHandler):
def post(self):
json_strings = self.request.body
data = json.loads(json_strings)
alter_machine_info = get_default_machine_print_info(data["machine_name"], data["machine_type"])
return self.write({"result" : 0, "msg" : machine_config_messages[0],"data": alter_machine_info})
@app.route(r"/settings/machines/edit")
class MachineConfig(tornado.web.RequestHandler):
def post(self):
json_strings = self.request.body
data = json.loads(json_strings)
set_user_info({ "box_name": data["add_machine_data"]["box_name"] })
del data["add_machine_data"]["box_name"]
if data["machine_type_changed"] == "1":
write_print_info(data["add_machine_data"]["machine_name"], data["add_machine_data"]["machine_type"])
web_config = WebConfig.settings()
#保存打印机信息和切片参数
write_result_update=update_machine_config(data["machine_type_name"],data)
if write_result_update == 0:
return self.write({"result" : 1, "msg" : machine_config_messages[1]})
#如果是活动打印机的话还得更新CuraConfig.ini中的信息
current_activity_print_machine = get_current_activity_print_machine()
if current_activity_print_machine:
if data["machine_type_name"]:
if current_activity_print_machine==data["machine_type_name"]:
#如果是激活的打印机则更新CuraConfig
update_setting_gcode(current_activity_print_machine)
#更新preferences.ini中的machine_n节点信息
write_results=update_preferences_file_info(data["add_machine_data"])
if write_results==0:
return self.write({"result" : 1, "msg" : machine_config_messages[1]})
#
# if "api" in data.keys():
# if "enabled" in data["api"].keys(): web_config.set(["api", "enabled"], data["api"]["enabled"])
# if "key" in data["api"].keys(): web_config.set(["api", "key"], data["api"]["key"], True)
#
# if "appearance" in data.keys():
# if "name" in data["appearance"].keys(): web_config.set(["appearance", "name"], data["appearance"]["name"])
# if "color" in data["appearance"].keys(): web_config.set(["appearance", "color"], data["appearance"]["color"])
#
# if "printer" in data.keys():
# if "movementSpeedX" in data["printer"].keys(): web_config.setInt(["printerParameters", "movementSpeed", "x"], data["printer"]["movementSpeedX"])
# if "movementSpeedY" in data["printer"].keys(): web_config.setInt(["printerParameters", "movementSpeed", "y"], data["printer"]["movementSpeedY"])
# if "movementSpeedZ" in data["printer"].keys(): web_config.setInt(["printerParameters", "movementSpeed", "z"], data["printer"]["movementSpeedZ"])
# if "movementSpeedE" in data["printer"].keys(): web_config.setInt(["printerParameters", "movementSpeed", "e"], data["printer"]["movementSpeedE"])
# if "invertAxes" in data["printer"].keys(): web_config.set(["printerParameters", "invertAxes"], data["printer"]["invertAxes"])
#
# if "webcam" in data.keys():
# if "streamUrl" in data["webcam"].keys(): web_config.set(["webcam", "stream"], data["webcam"]["streamUrl"])
# if "snapshotUrl" in data["webcam"].keys(): web_config.set(["webcam", "snapshot"], data["webcam"]["snapshotUrl"])
# if "ffmpegPath" in data["webcam"].keys(): web_config.set(["webcam", "ffmpeg"], data["webcam"]["ffmpegPath"])
# if "bitrate" in data["webcam"].keys(): web_config.set(["webcam", "bitrate"], data["webcam"]["bitrate"])
# if "watermark" in data["webcam"].keys(): web_config.setBoolean(["webcam", "watermark"], data["webcam"]["watermark"])
# if "flipH" in data["webcam"].keys(): web_config.setBoolean(["webcam", "flipH"], data["webcam"]["flipH"])
# if "flipV" in data["webcam"].keys(): web_config.setBoolean(["webcam", "flipV"], data["webcam"]["flipV"])
#
# if "feature" in data.keys():
# if "gcodeViewer" in data["feature"].keys(): web_config.setBoolean(["feature", "gCodeVisualizer"], data["feature"]["gcodeViewer"])
# if "temperatureGraph" in data["feature"].keys(): web_config.setBoolean(["feature", "temperatureGraph"], data["feature"]["temperatureGraph"])
# if "waitForStart" in data["feature"].keys(): web_config.setBoolean(["feature", "waitForStartOnConnect"], data["feature"]["waitForStart"])
# if "alwaysSendChecksum" in data["feature"].keys(): web_config.setBoolean(["feature", "alwaysSendChecksum"], data["feature"]["alwaysSendChecksum"])
# if "sdSupport" in data["feature"].keys(): web_config.setBoolean(["feature", "sdSupport"], data["feature"]["sdSupport"])
# if "sdAlwaysAvailable" in data["feature"].keys(): web_config.setBoolean(["feature", "sdAlwaysAvailable"], data["feature"]["sdAlwaysAvailable"])
# if "swallowOkAfterResend" in data["feature"].keys(): web_config.setBoolean(["feature", "swallowOkAfterResend"], data["feature"]["swallowOkAfterResend"])
if "serial" in data.keys():
# if "autoconnect" in data["serial"].keys(): web_config.setBoolean(["serial", "autoconnect"], data["serial"]["autoconnect"])
if "port" in data["serial"].keys(): web_config.set(["serial", "port"], data["serial"]["port"])
if "baudrate" in data["serial"].keys():
if data["serial"]["baudrate"] == "AUTO":
web_config.set(["serial", "baudrate"], "AUTO")
else:
web_config.setInt(["serial", "baudrate"], data["serial"]["baudrate"])
else:
web_config.set(["serial", "baudrate"], "AUTO")
# if "timeoutConnection" in data["serial"].keys(): web_config.setFloat(["serial", "timeout", "connection"], data["serial"]["timeoutConnection"])
# if "timeoutDetection" in data["serial"].keys(): web_config.setFloat(["serial", "timeout", "detection"], data["serial"]["timeoutDetection"])
# if "timeoutCommunication" in data["serial"].keys(): web_config.setFloat(["serial", "timeout", "communication"], data["serial"]["timeoutCommunication"])
#
# oldLog = web_config.getBoolean(["serial", "log"])
# if "log" in data["serial"].keys(): web_config.setBoolean(["serial", "log"], data["serial"]["log"])
# if oldLog and not web_config.getBoolean(["serial", "log"]):
# # disable debug logging to serial.log
# logging.getLogger("SERIAL").debug("Disabling serial logging")
# logging.getLogger("SERIAL").setLevel(logging.CRITICAL)
# elif not oldLog and web_config.getBoolean(["serial", "log"]):
# # enable debug logging to serial.log
# logging.getLogger("SERIAL").setLevel(logging.DEBUG)
# logging.getLogger("SERIAL").debug("Enabling serial logging")
# if "folder" in data.keys():
# if "uploads" in data["folder"].keys(): web_config.setBaseFolder("uploads", data["folder"]["uploads"])
# if "timelapse" in data["folder"].keys(): web_config.setBaseFolder("timelapse", data["folder"]["timelapse"])
# if "timelapseTmp" in data["folder"].keys(): web_config.setBaseFolder("timelapse_tmp", data["folder"]["timelapseTmp"])
# if "logs" in data["folder"].keys(): web_config.setBaseFolder("logs", data["folder"]["logs"])
#
# if "temperature" in data.keys():
# if "profiles" in data["temperature"].keys(): web_config.set(["temperature", "profiles"], data["temperature"]["profiles"])
#
# if "terminalFilters" in data.keys():
# web_config.set(["terminalFilters"], data["terminalFilters"])
# cura = data.get("cura", None)
# if cura:
# path = cura.get("path")
# if path:
# web_config.set(["cura", "path"], path)
#
# config = cura.get("config")
# if config:
# web_config.set(["cura", "config"], config)
#
# # Enabled is a boolean so we cannot check that we have a result
# enabled = cura.get("enabled")
# web_config.setBoolean(["cura", "enabled"], enabled)
web_config.save()
restart_web_service()
return self.write({"result" : 0, "msg" : machine_config_messages[0]})
#~~ startup code
if __name__ == "__main__":
pid = os.fork()
if pid > 0:
sys.exit(0)
os.chdir("/")
os.setsid()
os.umask(0)
pid = os.fork()
if pid > 0:
sys.exit(0)
tornado.options.parse_command_line()
logger.info("Box management server start.")
app = app.instance()
server = tornado.httpserver.HTTPServer(app)
server.listen(options.port, options.host)
tornado.ioloop.IOLoop.instance().start() # start the tornado ioloop to
| 45.526807 | 176 | 0.584711 |
import sys
reload(sys)
sys.setdefaultencoding('utf8')
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
import uuid
import hashlib
import time
import logging
import os
import urllib
import httplib
import json
import md5
from tornado.httpclient import HTTPClient
from tornado.escape import json_decode
from tornado.options import define, options
from common import Application
from network_api import get_allwifi_info, get_network_info, get_dns_info, set_wifi, set_network, machine_is_online, get_serial_number, set_serial_number
from user_api import md5, get_user_info, set_user_info, bind_box_api, unbind_box_api, init_box_config_info
from update_api import getLatestVer, getCurrentVer, getUpdateMeta, netUpdate, initUpdateInfo, clearUpdateInfoBegin, getUpdatePkgDesc
import settings as WebConfig
from machine_api import update_machine_config, update_setting_gcode, update_preferences_file_info, get_current_activity_print_machine, get_active_machine_print_info, \
get_default_machine_print_info, write_print_info, restart_web_service
define("host", default="*", help="run on the given host")
define("port", default=8092, help="run on the given port", type=int)
app = Application()
WebConfig.settings(True);
logger = logging.getLogger("__name__")
bind_messages = ["绑定成功".encode("utf8"),
"绑定失败,请重试".encode("utf8"),
"数据读取失败,配置文件丢失".encode("utf8"),
"连接认证服务器网络失败".encode("utf8")]
unbind_messages = ["解除绑定成功".encode("utf8"),
"解除绑定失败,请重试".encode("utf8"),
"数据读取失败,配置文件丢失".encode("utf8"),
"连接认证服务器网络失败".encode("utf8")]
machine_config_messages = ["设定成功".encode("utf8"),
"设定失败".encode("utf8")]
@app.route(r"/bind")
class bind(tornado.web.RequestHandler):
def post(self):
username = self.get_argument("username")
password = md5(self.get_argument("password"))
result = None
is_on_line = machine_is_online()
if is_on_line:
user_info = get_user_info()
if user_info["device_id"]:
response = bind_box_api(username, password, user_info["device_id"], user_info["box_name"])
if response and response["code"] in [1, 81]:
user_info["username"] = username
user_info["password"] = password
user_info["user_token"] = response["data"]["token"]
user_info["remember_information"] = 1
user_info["binding_mohou"] = 1
user_info["is_login"] = 1
set_user_info(user_info);
result = 0
else:
result = 1
else:
result = 2
else:
result = 3
return self.write({"result" : result, "msg" : bind_messages[result]})
@app.route(r"/unbind")
class unbind(tornado.web.RequestHandler):
def post(self):
result = None
is_on_line = machine_is_online()
if is_on_line:
user_info = get_user_info()
if user_info and user_info["user_token"] and user_info["device_id"]:
response = unbind_box_api(user_info["user_token"], user_info["device_id"])
if response and response["code"] == 1:
user_info_default = {
"username" : "",
"password" : "",
"user_token" : "",
"remember_information" : 0,
"binding_mohou" : 0,
"is_login" : 0
}
set_user_info(user_info_default);
result = 0
else:
result = 1
else:
result = 2
else:
result = 3
return self.write({"result" : result, "msg" : unbind_messages[result]})
@app.route(r"/update")
class update(tornado.web.RequestHandler):
def get(self):
clearUpdateInfoBegin()
initUpdateInfo()
return self.render(
"update.jinja2",
update_mode=self.get_argument("mode"),
latest_ver=getLatestVer(),
current_ver=getCurrentVer(),
update_desc=getUpdatePkgDesc(),
update_meta=getUpdateMeta()
)
@app.route(r"/pre_update")
class pre_update(tornado.web.RequestHandler):
def get(self):
result = "0"
clearUpdateInfoBegin()
initUpdateInfo()
return self.write(result)
@app.route(r"/netupdate_ajax")
class netupdate_ajax(tornado.web.RequestHandler):
def post(self):
result = "0"
clearUpdateInfoBegin()
initUpdateInfo()
netUpdate()
return self.write(result)
def get(self):
type = self.get_argument("type", default="meta")
retContent = {}
if type == "meta":
retContent=getUpdateMeta()
elif type == "cur_ver":
retContent = {"current_ver" : getCurrentVer()}
else:
pass
return self.write(retContent)
@app.route(r"/")
class moWifi(tornado.web.RequestHandler):
def get(self):
wifi_info = get_network_info("wlan0")
wire_info = get_network_info("eth0")
dns_info = get_dns_info()
serial_number = get_serial_number()
return self.render(
"mowifi.jinja2",
wifi_info = wifi_info,
wire_info = wire_info,
dns_info = dns_info,
sn=serial_number
)
@app.route(r"/setserialnumber")
class SerialNumber(tornado.web.RequestHandler):
def post(self):
serial_number = self.get_argument("sn", None)
if serial_number:
if set_serial_number(serial_number) == 0:
return self.write("0")
return self.write("1")
@app.route(r"/wifi")
class WifiSetting(tornado.web.RequestHandler):
def get(self):
wifissid = self.get_argument("ssid", None)
wifi_list = get_allwifi_info()
if wifissid:
wifi_list = filter(lambda x: x[0]==wifissid and x or False , wifi_list)
if wifi_list:
return self.write({'code': 0, 'msg': 'Success', 'data': {'ssid': wifi_list[0][0], 'state': wifi_list[0][1], 'lock': wifi_list[0][2], 'signal': wifi_list[0][3]}})
else:
return self.write({'code': 1, 'msg': 'SSID error.', 'data': {'wifi_list': []}})
else:
return self.write({'code': 0, 'msg': 'Success', 'data': {'wifi_list': wifi_list}})
def post(self):
wifissid = self.get_argument("ssid")
wifipwd = self.get_argument("pwd")
set_wifi(wifissid, wifipwd)
return self.write({'code': 0, 'msg': 'Success', 'data': {}})
@app.route(r"/isaccesscloud")
class AccessCloud(tornado.web.RequestHandler):
def get(self):
is_on_line = machine_is_online()
cur_client = HTTPClient()
response = cur_client.fetch("http://127.0.0.1:5000/status", request_timeout=10)
if response.error:
logger.warn("Failed to get current box info. error=%s", response.error)
is_on_line = False
res = json_decode(response.body)
if res["code"] != 0:
logger.warn("Failed to get current box info. ret_value=%d", res["ret_value"])
is_on_line = False
if is_on_line:
boxid = res["data"]["boxid"]
params=urllib.urlencode({
"token": "box_setting",
"boxid": boxid,
"progress": 2
})
headers = {"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8", "Connection": "Keep-Alive"}
conn = httplib.HTTPConnection("yun.mohou.com")
conn.request(method="POST", url="/api/box/init-setting", body=params, headers=headers)
response = conn.getresponse()
response_json = response.read()
conn.close()
logger.info("Box setting result: " + str(response_json))
is_access_cloud = True
else:
is_access_cloud = False
return self.write({'code': 0, 'msg': 'Success', 'data': {'is_access_cloud': is_access_cloud}})
@app.route(r"/mowifiinfoajax")
class moWifiAjax(tornado.web.RequestHandler):
def get(self):
return self.render(
"wifiinfo.jinja2",
wifi_list=get_allwifi_info()
)
def post(self):
result = "0"
type = int(self.get_argument("type"))
if type == 1:
wifissid = self.get_argument("wifissid")
wifipwd = self.get_argument("wifipwd")
set_wifi(wifissid, wifipwd)
return self.write(result)
elif (type == 2) or (type == 3):
if type == 2:
iface_name = "wlan0"
else:
iface_name = "eth0"
result = "0"
iface_info = {}
dns_info = {}
iface_info["dhcp"] = self.get_argument("dhcp")
iface_info["ip"] = ""
iface_info["netmask"] = ""
iface_info["gateway"] = ""
dns_info["dns"] = ""
if iface_info["dhcp"] == "0":
iface_info["ip"] = self.get_argument("ip")
iface_info["netmask"] = self.get_argument("mask")
iface_info["gateway"] = self.get_argument("gateway")
dns_info["dns"] = self.get_argument("dns")
set_network(iface_name, iface_info, dns_info)
return self.write(result)
else:
pass
@app.route(r"/settings/machines")
class MachineDefaultConfig(tornado.web.RequestHandler):
def post(self):
json_strings = self.request.body
data = json.loads(json_strings)
alter_machine_info = get_default_machine_print_info(data["machine_name"], data["machine_type"])
return self.write({"result" : 0, "msg" : machine_config_messages[0],"data": alter_machine_info})
@app.route(r"/settings/machines/edit")
class MachineConfig(tornado.web.RequestHandler):
def post(self):
json_strings = self.request.body
data = json.loads(json_strings)
set_user_info({ "box_name": data["add_machine_data"]["box_name"] })
del data["add_machine_data"]["box_name"]
if data["machine_type_changed"] == "1":
write_print_info(data["add_machine_data"]["machine_name"], data["add_machine_data"]["machine_type"])
web_config = WebConfig.settings()
write_result_update=update_machine_config(data["machine_type_name"],data)
if write_result_update == 0:
return self.write({"result" : 1, "msg" : machine_config_messages[1]})
current_activity_print_machine = get_current_activity_print_machine()
if current_activity_print_machine:
if data["machine_type_name"]:
if current_activity_print_machine==data["machine_type_name"]:
update_setting_gcode(current_activity_print_machine)
write_results=update_preferences_file_info(data["add_machine_data"])
if write_results==0:
return self.write({"result" : 1, "msg" : machine_config_messages[1]})
if "serial" in data.keys():
if "port" in data["serial"].keys(): web_config.set(["serial", "port"], data["serial"]["port"])
if "baudrate" in data["serial"].keys():
if data["serial"]["baudrate"] == "AUTO":
web_config.set(["serial", "baudrate"], "AUTO")
else:
web_config.setInt(["serial", "baudrate"], data["serial"]["baudrate"])
else:
web_config.set(["serial", "baudrate"], "AUTO")
f.write({"result" : 0, "msg" : machine_config_messages[0]})
if __name__ == "__main__":
pid = os.fork()
if pid > 0:
sys.exit(0)
os.chdir("/")
os.setsid()
os.umask(0)
pid = os.fork()
if pid > 0:
sys.exit(0)
tornado.options.parse_command_line()
logger.info("Box management server start.")
app = app.instance()
server = tornado.httpserver.HTTPServer(app)
server.listen(options.port, options.host)
tornado.ioloop.IOLoop.instance().start()
| true | true |
1c2df3dddda4b02ec9b4bef2f12046d3516e3362 | 358 | py | Python | sboapp/migrations/0002_auto_20180504_0704.py | tmaunier/sboucru | c01c34f909fb89b9eb35c476ff4ac595116ad024 | [
"MIT"
] | 3 | 2020-11-18T10:11:40.000Z | 2021-11-08T08:48:05.000Z | sboapp/migrations/0002_auto_20180504_0704.py | tmaunier/sboucru | c01c34f909fb89b9eb35c476ff4ac595116ad024 | [
"MIT"
] | 3 | 2020-06-05T18:42:49.000Z | 2021-06-10T20:42:06.000Z | sboapp/migrations/0002_auto_20180504_0704.py | tmaunier/sboucru | c01c34f909fb89b9eb35c476ff4ac595116ad024 | [
"MIT"
] | null | null | null | # Generated by Django 2.0.3 on 2018-05-04 07:04
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('sboapp', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='serum',
old_name='birth_date',
new_name='birth_year',
),
]
| 18.842105 | 47 | 0.578212 |
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('sboapp', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='serum',
old_name='birth_date',
new_name='birth_year',
),
]
| true | true |
1c2df4f095a2a8b079e34af579926a63403363c4 | 5,467 | py | Python | tg/request_local.py | devilicecream/tg2 | 0aadc0d1595e7326b187deb7b0198de1715225b0 | [
"MIT"
] | null | null | null | tg/request_local.py | devilicecream/tg2 | 0aadc0d1595e7326b187deb7b0198de1715225b0 | [
"MIT"
] | null | null | null | tg/request_local.py | devilicecream/tg2 | 0aadc0d1595e7326b187deb7b0198de1715225b0 | [
"MIT"
] | null | null | null | import hmac, base64, binascii, re
from tg.support.objectproxy import TurboGearsObjectProxy
from tg.support.registry import StackedObjectProxy, DispatchingConfig
from tg.caching import cached_property
try:
import cPickle as pickle
except ImportError: #pragma: no cover
import pickle
try:
from hashlib import sha1
except ImportError: #pragma: no cover
import sha as sha1
from webob import Request as WebObRequest
from webob import Response as WebObResponse
from webob.request import PATH_SAFE
from webob.compat import url_quote as webob_url_quote, bytes_ as webob_bytes_
class Request(WebObRequest):
"""WebOb Request subclass
The WebOb :class:`webob.Request` has no charset, or other defaults. This subclass
adds defaults, along with several methods for backwards
compatibility with paste.wsgiwrappers.WSGIRequest.
"""
def languages_best_match(self, fallback=None):
al = self.accept_language
try:
items = [i for i, q in sorted(al._parsed, key=lambda iq: -iq[1])]
except AttributeError:
#NilAccept has no _parsed, here for test units
items = []
if fallback:
for index, item in enumerate(items):
if al._match(item, fallback):
items[index:] = [fallback]
break
else:
items.append(fallback)
return items
@cached_property
def controller_state(self):
return self._controller_state
@cached_property
def controller_url(self):
state = self._controller_state
return '/'.join(state.path[:-len(state.remainder)])
@cached_property
def plain_languages(self):
return self.languages_best_match()
@property
def languages(self):
return self.languages_best_match(self._language)
@property
def language(self):
return self._language
@language.setter
def language(self, value):
self._language = value
@property
def response_type(self):
return self._response_type
@property
def response_ext(self):
return self._response_ext
def match_accept(self, mimetypes):
return self.accept.best_match(mimetypes)
def signed_cookie(self, name, secret):
"""Extract a signed cookie of ``name`` from the request
The cookie is expected to have been created with
``Response.signed_cookie``, and the ``secret`` should be the
same as the one used to sign it.
Any failure in the signature of the data will result in None
being returned.
"""
cookie = self.cookies.get(name)
if not cookie:
return
secret = secret.encode('ascii')
try:
sig, pickled = cookie[:40], base64.decodestring(cookie[40:].encode('ascii'))
except binascii.Error: #pragma: no cover
# Badly formed data can make base64 die
return
if hmac.new(secret, pickled, sha1).hexdigest() == sig:
return pickle.loads(pickled)
@cached_property
def args_params(self):
# This was: dict(((str(n), v) for n,v in self.params.mixed().items()))
# so that keys were all strings making possible to use them as arguments.
# Now it seems that all keys are always strings, did WebOb change behavior?
return self.params.mixed()
@cached_property
def quoted_path_info(self):
bpath = webob_bytes_(self.path_info, self.url_encoding)
return webob_url_quote(bpath, PATH_SAFE)
def _fast_setattr(self, name, value):
object.__setattr__(self, name, value)
class Response(WebObResponse):
"""WebOb Response subclass"""
content = WebObResponse.body
def wsgi_response(self):
return self.status, self.headers, self.body
def signed_cookie(self, name, data, secret, **kwargs):
"""Save a signed cookie with ``secret`` signature
Saves a signed cookie of the pickled data. All other keyword
arguments that ``WebOb.set_cookie`` accepts are usable and
passed to the WebOb set_cookie method after creating the signed
cookie value.
"""
secret = secret.encode('ascii')
pickled = pickle.dumps(data, pickle.HIGHEST_PROTOCOL)
sig = hmac.new(secret, pickled, sha1).hexdigest().encode('ascii')
cookie_value = sig + base64.encodestring(pickled)
self.set_cookie(name, cookie_value, **kwargs)
config = DispatchingConfig()
context = StackedObjectProxy(name="context")
class TurboGearsContextMember(TurboGearsObjectProxy):
"""Member of the TurboGears request context.
Provides access to turbogears context members
like request, response, template context and so on
"""
def __init__(self, name):
self.__dict__['name'] = name
def _current_obj(self):
return getattr(context, self.name)
request = TurboGearsContextMember(name="request")
app_globals = TurboGearsContextMember(name="app_globals")
cache = TurboGearsContextMember(name="cache")
response = TurboGearsContextMember(name="response")
session = TurboGearsContextMember(name="session")
tmpl_context = TurboGearsContextMember(name="tmpl_context")
url = TurboGearsContextMember(name="url")
translator = TurboGearsContextMember(name="translator")
__all__ = ['app_globals', 'request', 'response', 'tmpl_context', 'session', 'cache', 'translator', 'url', 'config'] | 31.601156 | 115 | 0.674227 | import hmac, base64, binascii, re
from tg.support.objectproxy import TurboGearsObjectProxy
from tg.support.registry import StackedObjectProxy, DispatchingConfig
from tg.caching import cached_property
try:
import cPickle as pickle
except ImportError:
import pickle
try:
from hashlib import sha1
except ImportError:
import sha as sha1
from webob import Request as WebObRequest
from webob import Response as WebObResponse
from webob.request import PATH_SAFE
from webob.compat import url_quote as webob_url_quote, bytes_ as webob_bytes_
class Request(WebObRequest):
def languages_best_match(self, fallback=None):
al = self.accept_language
try:
items = [i for i, q in sorted(al._parsed, key=lambda iq: -iq[1])]
except AttributeError:
items = []
if fallback:
for index, item in enumerate(items):
if al._match(item, fallback):
items[index:] = [fallback]
break
else:
items.append(fallback)
return items
@cached_property
def controller_state(self):
return self._controller_state
@cached_property
def controller_url(self):
state = self._controller_state
return '/'.join(state.path[:-len(state.remainder)])
@cached_property
def plain_languages(self):
return self.languages_best_match()
@property
def languages(self):
return self.languages_best_match(self._language)
@property
def language(self):
return self._language
@language.setter
def language(self, value):
self._language = value
@property
def response_type(self):
return self._response_type
@property
def response_ext(self):
return self._response_ext
def match_accept(self, mimetypes):
return self.accept.best_match(mimetypes)
def signed_cookie(self, name, secret):
cookie = self.cookies.get(name)
if not cookie:
return
secret = secret.encode('ascii')
try:
sig, pickled = cookie[:40], base64.decodestring(cookie[40:].encode('ascii'))
except binascii.Error:
return
if hmac.new(secret, pickled, sha1).hexdigest() == sig:
return pickle.loads(pickled)
@cached_property
def args_params(self):
return self.params.mixed()
@cached_property
def quoted_path_info(self):
bpath = webob_bytes_(self.path_info, self.url_encoding)
return webob_url_quote(bpath, PATH_SAFE)
def _fast_setattr(self, name, value):
object.__setattr__(self, name, value)
class Response(WebObResponse):
content = WebObResponse.body
def wsgi_response(self):
return self.status, self.headers, self.body
def signed_cookie(self, name, data, secret, **kwargs):
secret = secret.encode('ascii')
pickled = pickle.dumps(data, pickle.HIGHEST_PROTOCOL)
sig = hmac.new(secret, pickled, sha1).hexdigest().encode('ascii')
cookie_value = sig + base64.encodestring(pickled)
self.set_cookie(name, cookie_value, **kwargs)
config = DispatchingConfig()
context = StackedObjectProxy(name="context")
class TurboGearsContextMember(TurboGearsObjectProxy):
def __init__(self, name):
self.__dict__['name'] = name
def _current_obj(self):
return getattr(context, self.name)
request = TurboGearsContextMember(name="request")
app_globals = TurboGearsContextMember(name="app_globals")
cache = TurboGearsContextMember(name="cache")
response = TurboGearsContextMember(name="response")
session = TurboGearsContextMember(name="session")
tmpl_context = TurboGearsContextMember(name="tmpl_context")
url = TurboGearsContextMember(name="url")
translator = TurboGearsContextMember(name="translator")
__all__ = ['app_globals', 'request', 'response', 'tmpl_context', 'session', 'cache', 'translator', 'url', 'config'] | true | true |
1c2df54ecc53525de3e6db310a0b230620103196 | 14,415 | py | Python | tests/storage/test_client_ips.py | rhetenor/synapse | 5154afc00d841c7685a97700be3cd1398e633e05 | [
"Apache-2.0"
] | 7 | 2020-07-03T13:51:31.000Z | 2022-03-10T01:26:18.000Z | tests/storage/test_client_ips.py | rhetenor/synapse | 5154afc00d841c7685a97700be3cd1398e633e05 | [
"Apache-2.0"
] | 69 | 2019-09-09T13:54:30.000Z | 2022-03-23T10:45:15.000Z | tests/storage/test_client_ips.py | rhetenor/synapse | 5154afc00d841c7685a97700be3cd1398e633e05 | [
"Apache-2.0"
] | 7 | 2020-04-24T17:04:40.000Z | 2021-07-29T23:06:25.000Z | # Copyright 2016 OpenMarket Ltd
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest.mock import Mock
import synapse.rest.admin
from synapse.http.site import XForwardedForRequest
from synapse.rest.client import login
from tests import unittest
from tests.server import make_request
from tests.test_utils import make_awaitable
from tests.unittest import override_config
class ClientIpStoreTestCase(unittest.HomeserverTestCase):
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver()
return hs
def prepare(self, hs, reactor, clock):
self.store = self.hs.get_datastore()
def test_insert_new_client_ip(self):
self.reactor.advance(12345678)
user_id = "@user:id"
device_id = "MY_DEVICE"
# Insert a user IP
self.get_success(
self.store.store_device(
user_id,
device_id,
"display name",
)
)
self.get_success(
self.store.insert_client_ip(
user_id, "access_token", "ip", "user_agent", device_id
)
)
# Trigger the storage loop
self.reactor.advance(10)
result = self.get_success(
self.store.get_last_client_ip_by_device(user_id, device_id)
)
r = result[(user_id, device_id)]
self.assertDictContainsSubset(
{
"user_id": user_id,
"device_id": device_id,
"ip": "ip",
"user_agent": "user_agent",
"last_seen": 12345678000,
},
r,
)
def test_insert_new_client_ip_none_device_id(self):
"""
An insert with a device ID of NULL will not create a new entry, but
update an existing entry in the user_ips table.
"""
self.reactor.advance(12345678)
user_id = "@user:id"
# Add & trigger the storage loop
self.get_success(
self.store.insert_client_ip(
user_id, "access_token", "ip", "user_agent", None
)
)
self.reactor.advance(200)
self.pump(0)
result = self.get_success(
self.store.db_pool.simple_select_list(
table="user_ips",
keyvalues={"user_id": user_id},
retcols=["access_token", "ip", "user_agent", "device_id", "last_seen"],
desc="get_user_ip_and_agents",
)
)
self.assertEqual(
result,
[
{
"access_token": "access_token",
"ip": "ip",
"user_agent": "user_agent",
"device_id": None,
"last_seen": 12345678000,
}
],
)
# Add another & trigger the storage loop
self.get_success(
self.store.insert_client_ip(
user_id, "access_token", "ip", "user_agent", None
)
)
self.reactor.advance(10)
self.pump(0)
result = self.get_success(
self.store.db_pool.simple_select_list(
table="user_ips",
keyvalues={"user_id": user_id},
retcols=["access_token", "ip", "user_agent", "device_id", "last_seen"],
desc="get_user_ip_and_agents",
)
)
# Only one result, has been upserted.
self.assertEqual(
result,
[
{
"access_token": "access_token",
"ip": "ip",
"user_agent": "user_agent",
"device_id": None,
"last_seen": 12345878000,
}
],
)
@override_config({"limit_usage_by_mau": False, "max_mau_value": 50})
def test_disabled_monthly_active_user(self):
user_id = "@user:server"
self.get_success(
self.store.insert_client_ip(
user_id, "access_token", "ip", "user_agent", "device_id"
)
)
active = self.get_success(self.store.user_last_seen_monthly_active(user_id))
self.assertFalse(active)
@override_config({"limit_usage_by_mau": True, "max_mau_value": 50})
def test_adding_monthly_active_user_when_full(self):
lots_of_users = 100
user_id = "@user:server"
self.store.get_monthly_active_count = Mock(
return_value=make_awaitable(lots_of_users)
)
self.get_success(
self.store.insert_client_ip(
user_id, "access_token", "ip", "user_agent", "device_id"
)
)
active = self.get_success(self.store.user_last_seen_monthly_active(user_id))
self.assertFalse(active)
@override_config({"limit_usage_by_mau": True, "max_mau_value": 50})
def test_adding_monthly_active_user_when_space(self):
user_id = "@user:server"
active = self.get_success(self.store.user_last_seen_monthly_active(user_id))
self.assertFalse(active)
# Trigger the saving loop
self.reactor.advance(10)
self.get_success(
self.store.insert_client_ip(
user_id, "access_token", "ip", "user_agent", "device_id"
)
)
active = self.get_success(self.store.user_last_seen_monthly_active(user_id))
self.assertTrue(active)
@override_config({"limit_usage_by_mau": True, "max_mau_value": 50})
def test_updating_monthly_active_user_when_space(self):
user_id = "@user:server"
self.get_success(self.store.register_user(user_id=user_id, password_hash=None))
active = self.get_success(self.store.user_last_seen_monthly_active(user_id))
self.assertFalse(active)
# Trigger the saving loop
self.reactor.advance(10)
self.get_success(
self.store.insert_client_ip(
user_id, "access_token", "ip", "user_agent", "device_id"
)
)
active = self.get_success(self.store.user_last_seen_monthly_active(user_id))
self.assertTrue(active)
def test_devices_last_seen_bg_update(self):
# First make sure we have completed all updates.
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
user_id = "@user:id"
device_id = "MY_DEVICE"
# Insert a user IP
self.get_success(
self.store.store_device(
user_id,
device_id,
"display name",
)
)
self.get_success(
self.store.insert_client_ip(
user_id, "access_token", "ip", "user_agent", device_id
)
)
# Force persisting to disk
self.reactor.advance(200)
# But clear the associated entry in devices table
self.get_success(
self.store.db_pool.simple_update(
table="devices",
keyvalues={"user_id": user_id, "device_id": device_id},
updatevalues={"last_seen": None, "ip": None, "user_agent": None},
desc="test_devices_last_seen_bg_update",
)
)
# We should now get nulls when querying
result = self.get_success(
self.store.get_last_client_ip_by_device(user_id, device_id)
)
r = result[(user_id, device_id)]
self.assertDictContainsSubset(
{
"user_id": user_id,
"device_id": device_id,
"ip": None,
"user_agent": None,
"last_seen": None,
},
r,
)
# Register the background update to run again.
self.get_success(
self.store.db_pool.simple_insert(
table="background_updates",
values={
"update_name": "devices_last_seen",
"progress_json": "{}",
"depends_on": None,
},
)
)
# ... and tell the DataStore that it hasn't finished all updates yet
self.store.db_pool.updates._all_done = False
# Now let's actually drive the updates to completion
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
# We should now get the correct result again
result = self.get_success(
self.store.get_last_client_ip_by_device(user_id, device_id)
)
r = result[(user_id, device_id)]
self.assertDictContainsSubset(
{
"user_id": user_id,
"device_id": device_id,
"ip": "ip",
"user_agent": "user_agent",
"last_seen": 0,
},
r,
)
def test_old_user_ips_pruned(self):
# First make sure we have completed all updates.
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
user_id = "@user:id"
device_id = "MY_DEVICE"
# Insert a user IP
self.get_success(
self.store.store_device(
user_id,
device_id,
"display name",
)
)
self.get_success(
self.store.insert_client_ip(
user_id, "access_token", "ip", "user_agent", device_id
)
)
# Force persisting to disk
self.reactor.advance(200)
# We should see that in the DB
result = self.get_success(
self.store.db_pool.simple_select_list(
table="user_ips",
keyvalues={"user_id": user_id},
retcols=["access_token", "ip", "user_agent", "device_id", "last_seen"],
desc="get_user_ip_and_agents",
)
)
self.assertEqual(
result,
[
{
"access_token": "access_token",
"ip": "ip",
"user_agent": "user_agent",
"device_id": device_id,
"last_seen": 0,
}
],
)
# Now advance by a couple of months
self.reactor.advance(60 * 24 * 60 * 60)
# We should get no results.
result = self.get_success(
self.store.db_pool.simple_select_list(
table="user_ips",
keyvalues={"user_id": user_id},
retcols=["access_token", "ip", "user_agent", "device_id", "last_seen"],
desc="get_user_ip_and_agents",
)
)
self.assertEqual(result, [])
# But we should still get the correct values for the device
result = self.get_success(
self.store.get_last_client_ip_by_device(user_id, device_id)
)
r = result[(user_id, device_id)]
self.assertDictContainsSubset(
{
"user_id": user_id,
"device_id": device_id,
"ip": "ip",
"user_agent": "user_agent",
"last_seen": 0,
},
r,
)
class ClientIpAuthTestCase(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
]
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver()
return hs
def prepare(self, hs, reactor, clock):
self.store = self.hs.get_datastore()
self.user_id = self.register_user("bob", "abc123", True)
def test_request_with_xforwarded(self):
"""
The IP in X-Forwarded-For is entered into the client IPs table.
"""
self._runtest(
{b"X-Forwarded-For": b"127.9.0.1"},
"127.9.0.1",
{"request": XForwardedForRequest},
)
def test_request_from_getPeer(self):
"""
The IP returned by getPeer is entered into the client IPs table, if
there's no X-Forwarded-For header.
"""
self._runtest({}, "127.0.0.1", {})
def _runtest(self, headers, expected_ip, make_request_args):
device_id = "bleb"
access_token = self.login("bob", "abc123", device_id=device_id)
# Advance to a known time
self.reactor.advance(123456 - self.reactor.seconds())
headers1 = {b"User-Agent": b"Mozzila pizza"}
headers1.update(headers)
make_request(
self.reactor,
self.site,
"GET",
"/_synapse/admin/v2/users/" + self.user_id,
access_token=access_token,
custom_headers=headers1.items(),
**make_request_args,
)
# Advance so the save loop occurs
self.reactor.advance(100)
result = self.get_success(
self.store.get_last_client_ip_by_device(self.user_id, device_id)
)
r = result[(self.user_id, device_id)]
self.assertDictContainsSubset(
{
"user_id": self.user_id,
"device_id": device_id,
"ip": expected_ip,
"user_agent": "Mozzila pizza",
"last_seen": 123456100,
},
r,
)
| 31.405229 | 87 | 0.549636 |
from unittest.mock import Mock
import synapse.rest.admin
from synapse.http.site import XForwardedForRequest
from synapse.rest.client import login
from tests import unittest
from tests.server import make_request
from tests.test_utils import make_awaitable
from tests.unittest import override_config
class ClientIpStoreTestCase(unittest.HomeserverTestCase):
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver()
return hs
def prepare(self, hs, reactor, clock):
self.store = self.hs.get_datastore()
def test_insert_new_client_ip(self):
self.reactor.advance(12345678)
user_id = "@user:id"
device_id = "MY_DEVICE"
self.get_success(
self.store.store_device(
user_id,
device_id,
"display name",
)
)
self.get_success(
self.store.insert_client_ip(
user_id, "access_token", "ip", "user_agent", device_id
)
)
self.reactor.advance(10)
result = self.get_success(
self.store.get_last_client_ip_by_device(user_id, device_id)
)
r = result[(user_id, device_id)]
self.assertDictContainsSubset(
{
"user_id": user_id,
"device_id": device_id,
"ip": "ip",
"user_agent": "user_agent",
"last_seen": 12345678000,
},
r,
)
def test_insert_new_client_ip_none_device_id(self):
self.reactor.advance(12345678)
user_id = "@user:id"
self.get_success(
self.store.insert_client_ip(
user_id, "access_token", "ip", "user_agent", None
)
)
self.reactor.advance(200)
self.pump(0)
result = self.get_success(
self.store.db_pool.simple_select_list(
table="user_ips",
keyvalues={"user_id": user_id},
retcols=["access_token", "ip", "user_agent", "device_id", "last_seen"],
desc="get_user_ip_and_agents",
)
)
self.assertEqual(
result,
[
{
"access_token": "access_token",
"ip": "ip",
"user_agent": "user_agent",
"device_id": None,
"last_seen": 12345678000,
}
],
)
self.get_success(
self.store.insert_client_ip(
user_id, "access_token", "ip", "user_agent", None
)
)
self.reactor.advance(10)
self.pump(0)
result = self.get_success(
self.store.db_pool.simple_select_list(
table="user_ips",
keyvalues={"user_id": user_id},
retcols=["access_token", "ip", "user_agent", "device_id", "last_seen"],
desc="get_user_ip_and_agents",
)
)
self.assertEqual(
result,
[
{
"access_token": "access_token",
"ip": "ip",
"user_agent": "user_agent",
"device_id": None,
"last_seen": 12345878000,
}
],
)
@override_config({"limit_usage_by_mau": False, "max_mau_value": 50})
def test_disabled_monthly_active_user(self):
user_id = "@user:server"
self.get_success(
self.store.insert_client_ip(
user_id, "access_token", "ip", "user_agent", "device_id"
)
)
active = self.get_success(self.store.user_last_seen_monthly_active(user_id))
self.assertFalse(active)
@override_config({"limit_usage_by_mau": True, "max_mau_value": 50})
def test_adding_monthly_active_user_when_full(self):
lots_of_users = 100
user_id = "@user:server"
self.store.get_monthly_active_count = Mock(
return_value=make_awaitable(lots_of_users)
)
self.get_success(
self.store.insert_client_ip(
user_id, "access_token", "ip", "user_agent", "device_id"
)
)
active = self.get_success(self.store.user_last_seen_monthly_active(user_id))
self.assertFalse(active)
@override_config({"limit_usage_by_mau": True, "max_mau_value": 50})
def test_adding_monthly_active_user_when_space(self):
user_id = "@user:server"
active = self.get_success(self.store.user_last_seen_monthly_active(user_id))
self.assertFalse(active)
self.reactor.advance(10)
self.get_success(
self.store.insert_client_ip(
user_id, "access_token", "ip", "user_agent", "device_id"
)
)
active = self.get_success(self.store.user_last_seen_monthly_active(user_id))
self.assertTrue(active)
@override_config({"limit_usage_by_mau": True, "max_mau_value": 50})
def test_updating_monthly_active_user_when_space(self):
user_id = "@user:server"
self.get_success(self.store.register_user(user_id=user_id, password_hash=None))
active = self.get_success(self.store.user_last_seen_monthly_active(user_id))
self.assertFalse(active)
self.reactor.advance(10)
self.get_success(
self.store.insert_client_ip(
user_id, "access_token", "ip", "user_agent", "device_id"
)
)
active = self.get_success(self.store.user_last_seen_monthly_active(user_id))
self.assertTrue(active)
def test_devices_last_seen_bg_update(self):
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
user_id = "@user:id"
device_id = "MY_DEVICE"
self.get_success(
self.store.store_device(
user_id,
device_id,
"display name",
)
)
self.get_success(
self.store.insert_client_ip(
user_id, "access_token", "ip", "user_agent", device_id
)
)
self.reactor.advance(200)
self.get_success(
self.store.db_pool.simple_update(
table="devices",
keyvalues={"user_id": user_id, "device_id": device_id},
updatevalues={"last_seen": None, "ip": None, "user_agent": None},
desc="test_devices_last_seen_bg_update",
)
)
result = self.get_success(
self.store.get_last_client_ip_by_device(user_id, device_id)
)
r = result[(user_id, device_id)]
self.assertDictContainsSubset(
{
"user_id": user_id,
"device_id": device_id,
"ip": None,
"user_agent": None,
"last_seen": None,
},
r,
)
self.get_success(
self.store.db_pool.simple_insert(
table="background_updates",
values={
"update_name": "devices_last_seen",
"progress_json": "{}",
"depends_on": None,
},
)
)
self.store.db_pool.updates._all_done = False
# Now let's actually drive the updates to completion
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
result = self.get_success(
self.store.get_last_client_ip_by_device(user_id, device_id)
)
r = result[(user_id, device_id)]
self.assertDictContainsSubset(
{
"user_id": user_id,
"device_id": device_id,
"ip": "ip",
"user_agent": "user_agent",
"last_seen": 0,
},
r,
)
def test_old_user_ips_pruned(self):
while not self.get_success(
self.store.db_pool.updates.has_completed_background_updates()
):
self.get_success(
self.store.db_pool.updates.do_next_background_update(100), by=0.1
)
user_id = "@user:id"
device_id = "MY_DEVICE"
self.get_success(
self.store.store_device(
user_id,
device_id,
"display name",
)
)
self.get_success(
self.store.insert_client_ip(
user_id, "access_token", "ip", "user_agent", device_id
)
)
self.reactor.advance(200)
result = self.get_success(
self.store.db_pool.simple_select_list(
table="user_ips",
keyvalues={"user_id": user_id},
retcols=["access_token", "ip", "user_agent", "device_id", "last_seen"],
desc="get_user_ip_and_agents",
)
)
self.assertEqual(
result,
[
{
"access_token": "access_token",
"ip": "ip",
"user_agent": "user_agent",
"device_id": device_id,
"last_seen": 0,
}
],
)
self.reactor.advance(60 * 24 * 60 * 60)
result = self.get_success(
self.store.db_pool.simple_select_list(
table="user_ips",
keyvalues={"user_id": user_id},
retcols=["access_token", "ip", "user_agent", "device_id", "last_seen"],
desc="get_user_ip_and_agents",
)
)
self.assertEqual(result, [])
result = self.get_success(
self.store.get_last_client_ip_by_device(user_id, device_id)
)
r = result[(user_id, device_id)]
self.assertDictContainsSubset(
{
"user_id": user_id,
"device_id": device_id,
"ip": "ip",
"user_agent": "user_agent",
"last_seen": 0,
},
r,
)
class ClientIpAuthTestCase(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
]
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver()
return hs
def prepare(self, hs, reactor, clock):
self.store = self.hs.get_datastore()
self.user_id = self.register_user("bob", "abc123", True)
def test_request_with_xforwarded(self):
self._runtest(
{b"X-Forwarded-For": b"127.9.0.1"},
"127.9.0.1",
{"request": XForwardedForRequest},
)
def test_request_from_getPeer(self):
self._runtest({}, "127.0.0.1", {})
def _runtest(self, headers, expected_ip, make_request_args):
device_id = "bleb"
access_token = self.login("bob", "abc123", device_id=device_id)
self.reactor.advance(123456 - self.reactor.seconds())
headers1 = {b"User-Agent": b"Mozzila pizza"}
headers1.update(headers)
make_request(
self.reactor,
self.site,
"GET",
"/_synapse/admin/v2/users/" + self.user_id,
access_token=access_token,
custom_headers=headers1.items(),
**make_request_args,
)
self.reactor.advance(100)
result = self.get_success(
self.store.get_last_client_ip_by_device(self.user_id, device_id)
)
r = result[(self.user_id, device_id)]
self.assertDictContainsSubset(
{
"user_id": self.user_id,
"device_id": device_id,
"ip": expected_ip,
"user_agent": "Mozzila pizza",
"last_seen": 123456100,
},
r,
)
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.