hexsha stringlengths 40 40 | size int64 4 996k | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 996k | avg_line_length float64 1.33 58.2k | max_line_length int64 2 323k | alphanum_fraction float64 0 0.97 | content_no_comment stringlengths 0 946k | is_comment_constant_removed bool 2
classes | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f7f7afd9fa60e22f8e3a82ea3ec80f8b51d72af3 | 6,659 | py | Python | mega_core/modeling/roi_heads/box_head/inference.py | hanranCode/mega.pytorch | 28c8a184372aa57a942576a944b3526590bc1ace | [
"BSD-2-Clause"
] | 521 | 2020-03-23T13:08:44.000Z | 2022-03-31T08:50:01.000Z | mega_core/modeling/roi_heads/box_head/inference.py | hanranCode/mega.pytorch | 28c8a184372aa57a942576a944b3526590bc1ace | [
"BSD-2-Clause"
] | 108 | 2020-03-27T07:20:12.000Z | 2022-03-22T03:30:04.000Z | mega_core/modeling/roi_heads/box_head/inference.py | hanranCode/mega.pytorch | 28c8a184372aa57a942576a944b3526590bc1ace | [
"BSD-2-Clause"
] | 115 | 2020-03-27T06:40:57.000Z | 2022-02-28T07:27:27.000Z | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import torch
import torch.nn.functional as F
from torch import nn
from mega_core.structures.bounding_box import BoxList
from mega_core.structures.boxlist_ops import boxlist_nms
from mega_core.structures.boxlist_ops import cat_boxlist
from mega_core.modeling.box_coder import BoxCoder
class PostProcessor(nn.Module):
"""
From a set of classification scores, box regression and proposals,
computes the post-processed boxes, and applies NMS to obtain the
final results
"""
def __init__(
self,
score_thresh=0.05,
nms=0.5,
detections_per_img=100,
box_coder=None,
cls_agnostic_bbox_reg=False,
bbox_aug_enabled=False
):
"""
Arguments:
score_thresh (float)
nms (float)
detections_per_img (int)
box_coder (BoxCoder)
"""
super(PostProcessor, self).__init__()
self.score_thresh = score_thresh
self.nms = nms
self.detections_per_img = detections_per_img
if box_coder is None:
box_coder = BoxCoder(weights=(10., 10., 5., 5.))
self.box_coder = box_coder
self.cls_agnostic_bbox_reg = cls_agnostic_bbox_reg
self.bbox_aug_enabled = bbox_aug_enabled
def forward(self, x, boxes):
"""
Arguments:
x (tuple[tensor, tensor]): x contains the class logits
and the box_regression from the model.
boxes (list[BoxList]): bounding boxes that are used as
reference, one for each image
Returns:
results (list[BoxList]): one BoxList for each image, containing
the extra fields labels and scores
"""
class_logits, box_regression = x
class_prob = F.softmax(class_logits, -1)
# TODO think about a representation of batch of boxes
image_shapes = [box.size for box in boxes]
boxes_per_image = [len(box) for box in boxes]
concat_boxes = torch.cat([a.bbox for a in boxes], dim=0)
if self.cls_agnostic_bbox_reg:
box_regression = box_regression[:, -4:]
proposals = self.box_coder.decode(
box_regression.view(sum(boxes_per_image), -1), concat_boxes
)
if self.cls_agnostic_bbox_reg:
proposals = proposals.repeat(1, class_prob.shape[1])
num_classes = class_prob.shape[1]
proposals = proposals.split(boxes_per_image, dim=0)
class_prob = class_prob.split(boxes_per_image, dim=0)
results = []
for prob, boxes_per_img, image_shape in zip(
class_prob, proposals, image_shapes
):
boxlist = self.prepare_boxlist(boxes_per_img, prob, image_shape)
boxlist = boxlist.clip_to_image(remove_empty=False)
if not self.bbox_aug_enabled: # If bbox aug is enabled, we will do it later
boxlist = self.filter_results(boxlist, num_classes)
results.append(boxlist)
return results
def prepare_boxlist(self, boxes, scores, image_shape):
"""
Returns BoxList from `boxes` and adds probability scores information
as an extra field
`boxes` has shape (#detections, 4 * #classes), where each row represents
a list of predicted bounding boxes for each of the object classes in the
dataset (including the background class). The detections in each row
originate from the same object proposal.
`scores` has shape (#detection, #classes), where each row represents a list
of object detection confidence scores for each of the object classes in the
dataset (including the background class). `scores[i, j]`` corresponds to the
box at `boxes[i, j * 4:(j + 1) * 4]`.
"""
boxes = boxes.reshape(-1, 4)
scores = scores.reshape(-1)
boxlist = BoxList(boxes, image_shape, mode="xyxy")
boxlist.add_field("scores", scores)
return boxlist
def filter_results(self, boxlist, num_classes):
"""Returns bounding-box detection results by thresholding on scores and
applying non-maximum suppression (NMS).
"""
# unwrap the boxlist to avoid additional overhead.
# if we had multi-class NMS, we could perform this directly on the boxlist
boxes = boxlist.bbox.reshape(-1, num_classes * 4)
scores = boxlist.get_field("scores").reshape(-1, num_classes)
device = scores.device
result = []
# Apply threshold on detection probabilities and apply NMS
# Skip j = 0, because it's the background class
inds_all = scores > self.score_thresh
for j in range(1, num_classes):
inds = inds_all[:, j].nonzero().squeeze(1)
scores_j = scores[inds, j]
boxes_j = boxes[inds, j * 4 : (j + 1) * 4]
boxlist_for_class = BoxList(boxes_j, boxlist.size, mode="xyxy")
boxlist_for_class.add_field("scores", scores_j)
boxlist_for_class = boxlist_nms(
boxlist_for_class, self.nms
)
num_labels = len(boxlist_for_class)
boxlist_for_class.add_field(
"labels", torch.full((num_labels,), j, dtype=torch.int64, device=device)
)
result.append(boxlist_for_class)
result = cat_boxlist(result)
number_of_detections = len(result)
# Limit to max_per_image detections **over all classes**
if number_of_detections > self.detections_per_img > 0:
cls_scores = result.get_field("scores")
image_thresh, _ = torch.kthvalue(
cls_scores.cpu(), number_of_detections - self.detections_per_img + 1
)
keep = cls_scores >= image_thresh.item()
keep = torch.nonzero(keep).squeeze(1)
result = result[keep]
return result
def make_roi_box_post_processor(cfg):
use_fpn = cfg.MODEL.ROI_HEADS.USE_FPN
bbox_reg_weights = cfg.MODEL.ROI_HEADS.BBOX_REG_WEIGHTS
box_coder = BoxCoder(weights=bbox_reg_weights)
score_thresh = cfg.MODEL.ROI_HEADS.SCORE_THRESH
nms_thresh = cfg.MODEL.ROI_HEADS.NMS
detections_per_img = cfg.MODEL.ROI_HEADS.DETECTIONS_PER_IMG
cls_agnostic_bbox_reg = cfg.MODEL.CLS_AGNOSTIC_BBOX_REG
bbox_aug_enabled = cfg.TEST.BBOX_AUG.ENABLED
postprocessor = PostProcessor(
score_thresh,
nms_thresh,
detections_per_img,
box_coder,
cls_agnostic_bbox_reg,
bbox_aug_enabled
)
return postprocessor
| 38.491329 | 88 | 0.640036 |
import torch
import torch.nn.functional as F
from torch import nn
from mega_core.structures.bounding_box import BoxList
from mega_core.structures.boxlist_ops import boxlist_nms
from mega_core.structures.boxlist_ops import cat_boxlist
from mega_core.modeling.box_coder import BoxCoder
class PostProcessor(nn.Module):
def __init__(
self,
score_thresh=0.05,
nms=0.5,
detections_per_img=100,
box_coder=None,
cls_agnostic_bbox_reg=False,
bbox_aug_enabled=False
):
super(PostProcessor, self).__init__()
self.score_thresh = score_thresh
self.nms = nms
self.detections_per_img = detections_per_img
if box_coder is None:
box_coder = BoxCoder(weights=(10., 10., 5., 5.))
self.box_coder = box_coder
self.cls_agnostic_bbox_reg = cls_agnostic_bbox_reg
self.bbox_aug_enabled = bbox_aug_enabled
def forward(self, x, boxes):
class_logits, box_regression = x
class_prob = F.softmax(class_logits, -1)
image_shapes = [box.size for box in boxes]
boxes_per_image = [len(box) for box in boxes]
concat_boxes = torch.cat([a.bbox for a in boxes], dim=0)
if self.cls_agnostic_bbox_reg:
box_regression = box_regression[:, -4:]
proposals = self.box_coder.decode(
box_regression.view(sum(boxes_per_image), -1), concat_boxes
)
if self.cls_agnostic_bbox_reg:
proposals = proposals.repeat(1, class_prob.shape[1])
num_classes = class_prob.shape[1]
proposals = proposals.split(boxes_per_image, dim=0)
class_prob = class_prob.split(boxes_per_image, dim=0)
results = []
for prob, boxes_per_img, image_shape in zip(
class_prob, proposals, image_shapes
):
boxlist = self.prepare_boxlist(boxes_per_img, prob, image_shape)
boxlist = boxlist.clip_to_image(remove_empty=False)
if not self.bbox_aug_enabled:
boxlist = self.filter_results(boxlist, num_classes)
results.append(boxlist)
return results
def prepare_boxlist(self, boxes, scores, image_shape):
boxes = boxes.reshape(-1, 4)
scores = scores.reshape(-1)
boxlist = BoxList(boxes, image_shape, mode="xyxy")
boxlist.add_field("scores", scores)
return boxlist
def filter_results(self, boxlist, num_classes):
boxes = boxlist.bbox.reshape(-1, num_classes * 4)
scores = boxlist.get_field("scores").reshape(-1, num_classes)
device = scores.device
result = []
inds_all = scores > self.score_thresh
for j in range(1, num_classes):
inds = inds_all[:, j].nonzero().squeeze(1)
scores_j = scores[inds, j]
boxes_j = boxes[inds, j * 4 : (j + 1) * 4]
boxlist_for_class = BoxList(boxes_j, boxlist.size, mode="xyxy")
boxlist_for_class.add_field("scores", scores_j)
boxlist_for_class = boxlist_nms(
boxlist_for_class, self.nms
)
num_labels = len(boxlist_for_class)
boxlist_for_class.add_field(
"labels", torch.full((num_labels,), j, dtype=torch.int64, device=device)
)
result.append(boxlist_for_class)
result = cat_boxlist(result)
number_of_detections = len(result)
# Limit to max_per_image detections **over all classes**
if number_of_detections > self.detections_per_img > 0:
cls_scores = result.get_field("scores")
image_thresh, _ = torch.kthvalue(
cls_scores.cpu(), number_of_detections - self.detections_per_img + 1
)
keep = cls_scores >= image_thresh.item()
keep = torch.nonzero(keep).squeeze(1)
result = result[keep]
return result
def make_roi_box_post_processor(cfg):
use_fpn = cfg.MODEL.ROI_HEADS.USE_FPN
bbox_reg_weights = cfg.MODEL.ROI_HEADS.BBOX_REG_WEIGHTS
box_coder = BoxCoder(weights=bbox_reg_weights)
score_thresh = cfg.MODEL.ROI_HEADS.SCORE_THRESH
nms_thresh = cfg.MODEL.ROI_HEADS.NMS
detections_per_img = cfg.MODEL.ROI_HEADS.DETECTIONS_PER_IMG
cls_agnostic_bbox_reg = cfg.MODEL.CLS_AGNOSTIC_BBOX_REG
bbox_aug_enabled = cfg.TEST.BBOX_AUG.ENABLED
postprocessor = PostProcessor(
score_thresh,
nms_thresh,
detections_per_img,
box_coder,
cls_agnostic_bbox_reg,
bbox_aug_enabled
)
return postprocessor
| true | true |
f7f7affade8d8a542114890e32421fd8e3b829ba | 3,839 | py | Python | hubspot/crm/tickets/models/batch_input_simple_public_object_batch_input.py | fakepop/hubspot-api-python | f04103a09f93f5c26c99991b25fa76801074f3d3 | [
"Apache-2.0"
] | 1 | 2020-11-12T08:46:32.000Z | 2020-11-12T08:46:32.000Z | hubspot/crm/tickets/models/batch_input_simple_public_object_batch_input.py | fakepop/hubspot-api-python | f04103a09f93f5c26c99991b25fa76801074f3d3 | [
"Apache-2.0"
] | null | null | null | hubspot/crm/tickets/models/batch_input_simple_public_object_batch_input.py | fakepop/hubspot-api-python | f04103a09f93f5c26c99991b25fa76801074f3d3 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Tickets
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v3
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from hubspot.crm.tickets.configuration import Configuration
class BatchInputSimplePublicObjectBatchInput(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {"inputs": "list[SimplePublicObjectBatchInput]"}
attribute_map = {"inputs": "inputs"}
def __init__(self, inputs=None, local_vars_configuration=None): # noqa: E501
"""BatchInputSimplePublicObjectBatchInput - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._inputs = None
self.discriminator = None
self.inputs = inputs
@property
def inputs(self):
"""Gets the inputs of this BatchInputSimplePublicObjectBatchInput. # noqa: E501
:return: The inputs of this BatchInputSimplePublicObjectBatchInput. # noqa: E501
:rtype: list[SimplePublicObjectBatchInput]
"""
return self._inputs
@inputs.setter
def inputs(self, inputs):
"""Sets the inputs of this BatchInputSimplePublicObjectBatchInput.
:param inputs: The inputs of this BatchInputSimplePublicObjectBatchInput. # noqa: E501
:type: list[SimplePublicObjectBatchInput]
"""
if (
self.local_vars_configuration.client_side_validation and inputs is None
): # noqa: E501
raise ValueError(
"Invalid value for `inputs`, must not be `None`"
) # noqa: E501
self._inputs = inputs
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, BatchInputSimplePublicObjectBatchInput):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, BatchInputSimplePublicObjectBatchInput):
return True
return self.to_dict() != other.to_dict()
| 30.959677 | 124 | 0.598333 |
import pprint
import re
import six
from hubspot.crm.tickets.configuration import Configuration
class BatchInputSimplePublicObjectBatchInput(object):
openapi_types = {"inputs": "list[SimplePublicObjectBatchInput]"}
attribute_map = {"inputs": "inputs"}
def __init__(self, inputs=None, local_vars_configuration=None):
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._inputs = None
self.discriminator = None
self.inputs = inputs
@property
def inputs(self):
return self._inputs
@inputs.setter
def inputs(self, inputs):
if (
self.local_vars_configuration.client_side_validation and inputs is None
):
raise ValueError(
"Invalid value for `inputs`, must not be `None`"
)
self._inputs = inputs
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, BatchInputSimplePublicObjectBatchInput):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
if not isinstance(other, BatchInputSimplePublicObjectBatchInput):
return True
return self.to_dict() != other.to_dict()
| true | true |
f7f7b1147798ce46ece79acebd6afb2506af7e3b | 4,106 | py | Python | snakescale/formatters.py | clintval/snakeskin | 0d282ae8331756ca5b93ffe7146af1ba6491d579 | [
"MIT"
] | 6 | 2018-01-24T04:40:45.000Z | 2018-08-10T18:03:35.000Z | snakescale/formatters.py | clintval/snakeskin | 0d282ae8331756ca5b93ffe7146af1ba6491d579 | [
"MIT"
] | 9 | 2018-12-27T18:13:15.000Z | 2019-01-01T23:06:22.000Z | snakescale/formatters.py | clintval/snakeskin | 0d282ae8331756ca5b93ffe7146af1ba6491d579 | [
"MIT"
] | null | null | null | from types import GeneratorType
from typing import List, Mapping, Union
__all__ = [
'clean_picard_style_value',
'snakecase_to_kebab_case',
'clean_picard_style_key',
'format_bedtools_params',
'format_bwa_params',
'format_dwgsim_params',
'format_fgbio_params',
'format_kraken_params',
'format_picard_params',
]
def clean_picard_style_value(value: Union[List[str], str]) -> Union[List[str], str]:
"""Clean a dictionary of Picard key-value pairs."""
if isinstance(value, (list, tuple, GeneratorType)):
return list(map(clean_picard_style_value, value)) # type: ignore
elif value is None:
return 'null'
elif value is True:
return 'true'
elif value is False:
return 'false'
else:
return value
def format_bed_key(key: str) -> str:
"""Clean a bedtools parameter key."""
return '-' + key.replace('_', '')
def snakecase_to_kebab_case(key: str) -> str:
"""Convert snake_case to kebab-case."""
return f'--{key.lower().replace("_", "-")}'
def clean_picard_style_key(key: str) -> str:
"""Clean a Picard parameter key."""
return key.upper()
def format_bedtools_params(params: Mapping) -> str:
"""Clean a dictionary of bedtools key-value pairs."""
formatted_params = ''
for key, value in params.items():
if key == 'extra':
continue
key = format_bed_key(key)
if value is True:
formatted_params += f' {key}'
elif value is False:
continue
else:
formatted_params += f' {key} {value}'
return formatted_params
def format_bwa_params(params: Mapping) -> str:
"""Clean a dictionary of bwa key-value pairs."""
formatted_params = ''
for key, value in params.items():
if key == 'extra':
continue
elif value is True:
formatted_params += f' -{key}'
elif value is False:
continue
else:
formatted_params += f' -{key} {value}'
return formatted_params
def format_dwgsim_params(params: Mapping) -> str:
"""Clean a dictionary of dwgsim key-value pairs."""
formatted_params = ''
for key, value in params.items():
if key in ('extra', 'output_prefix'):
continue
key = '1' if key == 'r1' else key
key = '2' if key == 'r2' else key
if value is True:
formatted_params += f' -{key}'
elif value is False:
continue
else:
formatted_params += f' -{key} {value}'
return formatted_params
def format_fgbio_params(params: Mapping) -> str:
"""Clean a dictionary of fgbio key-value pairs."""
formatted_params = ''
for key, value in params.items():
key = snakecase_to_kebab_case(key)
value = clean_picard_style_value(value)
if key == 'extra':
continue
elif isinstance(value, list):
formatted_params += ''.join(f' --{key}={v}' for v in value)
else:
formatted_params += f' --{key}={value}'
return formatted_params
def format_kraken_params(params: Mapping) -> str:
"""Clean a dictionary of kraken key-value pairs."""
formatted_params = ''
for key, value in params.items():
key = snakecase_to_kebab_case(key)
if key == 'extra':
continue
elif value is True:
formatted_params += f' --{key}'
elif value is False:
continue
else:
formatted_params += f' --{key} {value}'
return formatted_params
def format_picard_params(params: Mapping) -> str:
"""Clean a dictionary of picard key-value pairs."""
formatted_params = ''
for key, value in params.items():
key = clean_picard_style_key(key)
value = clean_picard_style_value(value)
if key == 'extra':
continue
elif isinstance(value, list):
formatted_params += ''.join(f' {key}={v}' for v in value)
else:
formatted_params += f' {key}={value}'
return formatted_params
| 27.013158 | 84 | 0.595226 | from types import GeneratorType
from typing import List, Mapping, Union
__all__ = [
'clean_picard_style_value',
'snakecase_to_kebab_case',
'clean_picard_style_key',
'format_bedtools_params',
'format_bwa_params',
'format_dwgsim_params',
'format_fgbio_params',
'format_kraken_params',
'format_picard_params',
]
def clean_picard_style_value(value: Union[List[str], str]) -> Union[List[str], str]:
if isinstance(value, (list, tuple, GeneratorType)):
return list(map(clean_picard_style_value, value))
elif value is None:
return 'null'
elif value is True:
return 'true'
elif value is False:
return 'false'
else:
return value
def format_bed_key(key: str) -> str:
return '-' + key.replace('_', '')
def snakecase_to_kebab_case(key: str) -> str:
return f'--{key.lower().replace("_", "-")}'
def clean_picard_style_key(key: str) -> str:
return key.upper()
def format_bedtools_params(params: Mapping) -> str:
formatted_params = ''
for key, value in params.items():
if key == 'extra':
continue
key = format_bed_key(key)
if value is True:
formatted_params += f' {key}'
elif value is False:
continue
else:
formatted_params += f' {key} {value}'
return formatted_params
def format_bwa_params(params: Mapping) -> str:
formatted_params = ''
for key, value in params.items():
if key == 'extra':
continue
elif value is True:
formatted_params += f' -{key}'
elif value is False:
continue
else:
formatted_params += f' -{key} {value}'
return formatted_params
def format_dwgsim_params(params: Mapping) -> str:
formatted_params = ''
for key, value in params.items():
if key in ('extra', 'output_prefix'):
continue
key = '1' if key == 'r1' else key
key = '2' if key == 'r2' else key
if value is True:
formatted_params += f' -{key}'
elif value is False:
continue
else:
formatted_params += f' -{key} {value}'
return formatted_params
def format_fgbio_params(params: Mapping) -> str:
formatted_params = ''
for key, value in params.items():
key = snakecase_to_kebab_case(key)
value = clean_picard_style_value(value)
if key == 'extra':
continue
elif isinstance(value, list):
formatted_params += ''.join(f' --{key}={v}' for v in value)
else:
formatted_params += f' --{key}={value}'
return formatted_params
def format_kraken_params(params: Mapping) -> str:
formatted_params = ''
for key, value in params.items():
key = snakecase_to_kebab_case(key)
if key == 'extra':
continue
elif value is True:
formatted_params += f' --{key}'
elif value is False:
continue
else:
formatted_params += f' --{key} {value}'
return formatted_params
def format_picard_params(params: Mapping) -> str:
formatted_params = ''
for key, value in params.items():
key = clean_picard_style_key(key)
value = clean_picard_style_value(value)
if key == 'extra':
continue
elif isinstance(value, list):
formatted_params += ''.join(f' {key}={v}' for v in value)
else:
formatted_params += f' {key}={value}'
return formatted_params
| true | true |
f7f7b1197246e351dfa64cedb61e7d02f4b2b224 | 5,756 | py | Python | server_benchmark.py | ljishen/server | b641df7de19afb67df28d70c9b64b4faa3c56b23 | [
"Apache-2.0"
] | null | null | null | server_benchmark.py | ljishen/server | b641df7de19afb67df28d70c9b64b4faa3c56b23 | [
"Apache-2.0"
] | null | null | null | server_benchmark.py | ljishen/server | b641df7de19afb67df28d70c9b64b4faa3c56b23 | [
"Apache-2.0"
] | null | null | null | """
Stand-alone benchmark for the GA4GH reference implementation.
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import re
import time
import pstats
import argparse
import cProfile
import ga4gh.backend as backend
import ga4gh.protocol as protocol
import ga4gh.datarepo as datarepo
import guppy
class HeapProfilerBackend(backend.Backend):
def __init__(self, dataDir):
dataRepository = datarepo.FileSystemDataRepository(dataDir)
super(HeapProfilerBackend, self).__init__(dataRepository)
self.profiler = guppy.hpy()
def startProfile(self):
self.profiler.setrelheap()
def endProfile(self):
print(self.profiler.heap())
class CpuProfilerBackend(backend.Backend):
def __init__(self, dataDir):
dataRepository = datarepo.FileSystemDataRepository(dataDir)
super(CpuProfilerBackend, self).__init__(dataRepository)
self.profiler = cProfile.Profile()
def startProfile(self):
self.profiler.enable()
def endProfile(self):
self.profiler.disable()
def _heavyQuery(variantSetId, callSetIds):
"""
Very heavy query: calls for the specified list of callSetIds
on chromosome 2 (11 pages, 90 seconds to fetch the entire thing
on a high-end desktop machine)
"""
request = protocol.GASearchVariantsRequest()
request.referenceName = '2'
request.variantSetIds = [variantSetId]
request.callSetIds = callSetIds
request.pageSize = 100
request.end = 100000
return request
def timeOneSearch(queryString):
"""
Returns (search result as JSON string, time elapsed during search)
"""
startTime = time.clock()
resultString = backend.searchVariants(queryString)
endTime = time.clock()
elapsedTime = endTime - startTime
return resultString, elapsedTime
def extractNextPageToken(resultString):
"""
Calling GASearchVariantsResponse.fromJsonString() can be slower
than doing the variant search in the first place; instead we use
a regexp to extract the next page token.
"""
m = re.search('(?<=nextPageToken": )(?:")?([0-9]*?:[0-9]*)|null',
resultString)
if m is not None:
return m.group(1)
return None
def benchmarkOneQuery(request, repeatLimit=3, pageLimit=3):
"""
Repeat the query several times; perhaps don't go through *all* the
pages. Returns minimum time to run backend.searchVariants() to execute
the query (as far as pageLimit allows), *not* including JSON
processing to prepare queries or parse responses.
"""
times = []
queryString = request.toJsonString()
for i in range(0, repeatLimit):
resultString, elapsedTime = timeOneSearch(queryString)
accruedTime = elapsedTime
pageCount = 1
token = extractNextPageToken(resultString)
# Iterate to go beyond the first page of results.
while token is not None and pageCount < pageLimit:
pageRequest = request
pageRequest.pageToken = token
pageRequestString = pageRequest.toJsonString()
resultString, elapsedTime = timeOneSearch(pageRequestString)
accruedTime += elapsedTime
pageCount = pageCount + 1
token = extractNextPageToken(resultString)
times.append(accruedTime)
# TODO: more sophisticated statistics. Sometimes we want min(),
# sometimes mean = sum() / len(), sometimes other measures,
# perhaps exclude outliers...
# If we compute average we should throw out at least the first one.
# return sum(times[2:])/len(times[2:])
return min(times)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="GA4GH reference server benchmark")
parser.add_argument(
'variantSetId',
help="The variant set ID to run the query against")
parser.add_argument(
'--profile', default='none',
choices=['none', 'heap', 'cpu'],
help='"heap" runs a heap profiler once inside the backend, '
'"cpu" runs a cpu profiler.')
parser.add_argument(
'--repeatLimit', type=int, default=3, metavar='N',
help='how many times to run each test case (default: %(default)s)')
parser.add_argument(
'--pageLimit', type=int, default=3, metavar='N',
help='how many pages (max) to load '
'from each test case (default: %(default)s)')
parser.add_argument(
"--callSetIds", "-c", default=[],
help="""Return variant calls which belong to call sets
with these IDs. Pass in IDs as a comma separated list (no spaces),
or '*' (with the single quotes!) to indicate 'all call sets'.
Omit this option to indicate 'no call sets'.
""")
args = parser.parse_args()
dataDir = "ga4gh-example-data"
backend = backend.Backend(datarepo.FileSystemDataRepository(dataDir))
if args.profile == 'heap':
backendClass = HeapProfilerBackend
backend = backendClass(dataDir)
args.repeatLimit = 1
args.pageLimit = 1
elif args.profile == 'cpu':
backendClass = CpuProfilerBackend
backend = backendClass(dataDir)
# Get our list of callSetids
callSetIds = args.callSetIds
if callSetIds != []:
callSetIds = None
if args.callSetIds != "*":
callSetIds = args.callSetIds.split(",")
minTime = benchmarkOneQuery(
_heavyQuery(args.variantSetId, callSetIds), args.repeatLimit,
args.pageLimit)
print(minTime)
if args.profile == 'cpu':
stats = pstats.Stats(backend.profiler)
stats.sort_stats('time')
stats.print_stats(.25)
| 33.271676 | 78 | 0.66435 | from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import re
import time
import pstats
import argparse
import cProfile
import ga4gh.backend as backend
import ga4gh.protocol as protocol
import ga4gh.datarepo as datarepo
import guppy
class HeapProfilerBackend(backend.Backend):
def __init__(self, dataDir):
dataRepository = datarepo.FileSystemDataRepository(dataDir)
super(HeapProfilerBackend, self).__init__(dataRepository)
self.profiler = guppy.hpy()
def startProfile(self):
self.profiler.setrelheap()
def endProfile(self):
print(self.profiler.heap())
class CpuProfilerBackend(backend.Backend):
def __init__(self, dataDir):
dataRepository = datarepo.FileSystemDataRepository(dataDir)
super(CpuProfilerBackend, self).__init__(dataRepository)
self.profiler = cProfile.Profile()
def startProfile(self):
self.profiler.enable()
def endProfile(self):
self.profiler.disable()
def _heavyQuery(variantSetId, callSetIds):
request = protocol.GASearchVariantsRequest()
request.referenceName = '2'
request.variantSetIds = [variantSetId]
request.callSetIds = callSetIds
request.pageSize = 100
request.end = 100000
return request
def timeOneSearch(queryString):
startTime = time.clock()
resultString = backend.searchVariants(queryString)
endTime = time.clock()
elapsedTime = endTime - startTime
return resultString, elapsedTime
def extractNextPageToken(resultString):
m = re.search('(?<=nextPageToken": )(?:")?([0-9]*?:[0-9]*)|null',
resultString)
if m is not None:
return m.group(1)
return None
def benchmarkOneQuery(request, repeatLimit=3, pageLimit=3):
times = []
queryString = request.toJsonString()
for i in range(0, repeatLimit):
resultString, elapsedTime = timeOneSearch(queryString)
accruedTime = elapsedTime
pageCount = 1
token = extractNextPageToken(resultString)
while token is not None and pageCount < pageLimit:
pageRequest = request
pageRequest.pageToken = token
pageRequestString = pageRequest.toJsonString()
resultString, elapsedTime = timeOneSearch(pageRequestString)
accruedTime += elapsedTime
pageCount = pageCount + 1
token = extractNextPageToken(resultString)
times.append(accruedTime)
return min(times)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="GA4GH reference server benchmark")
parser.add_argument(
'variantSetId',
help="The variant set ID to run the query against")
parser.add_argument(
'--profile', default='none',
choices=['none', 'heap', 'cpu'],
help='"heap" runs a heap profiler once inside the backend, '
'"cpu" runs a cpu profiler.')
parser.add_argument(
'--repeatLimit', type=int, default=3, metavar='N',
help='how many times to run each test case (default: %(default)s)')
parser.add_argument(
'--pageLimit', type=int, default=3, metavar='N',
help='how many pages (max) to load '
'from each test case (default: %(default)s)')
parser.add_argument(
"--callSetIds", "-c", default=[],
help="""Return variant calls which belong to call sets
with these IDs. Pass in IDs as a comma separated list (no spaces),
or '*' (with the single quotes!) to indicate 'all call sets'.
Omit this option to indicate 'no call sets'.
""")
args = parser.parse_args()
dataDir = "ga4gh-example-data"
backend = backend.Backend(datarepo.FileSystemDataRepository(dataDir))
if args.profile == 'heap':
backendClass = HeapProfilerBackend
backend = backendClass(dataDir)
args.repeatLimit = 1
args.pageLimit = 1
elif args.profile == 'cpu':
backendClass = CpuProfilerBackend
backend = backendClass(dataDir)
callSetIds = args.callSetIds
if callSetIds != []:
callSetIds = None
if args.callSetIds != "*":
callSetIds = args.callSetIds.split(",")
minTime = benchmarkOneQuery(
_heavyQuery(args.variantSetId, callSetIds), args.repeatLimit,
args.pageLimit)
print(minTime)
if args.profile == 'cpu':
stats = pstats.Stats(backend.profiler)
stats.sort_stats('time')
stats.print_stats(.25)
| true | true |
f7f7b18f35a9a358c26fa33887077fdb3fe6def0 | 3,712 | py | Python | lmnet/lmnet/datasets/lm_things_on_a_table.py | toohsk/blueoil | 596922caa939db9c5ecbac3286fbf6f703865ee6 | [
"Apache-2.0"
] | 1 | 2019-10-09T04:41:02.000Z | 2019-10-09T04:41:02.000Z | lmnet/lmnet/datasets/lm_things_on_a_table.py | toohsk/blueoil | 596922caa939db9c5ecbac3286fbf6f703865ee6 | [
"Apache-2.0"
] | 1 | 2018-11-21T07:06:17.000Z | 2018-11-21T07:06:17.000Z | lmnet/lmnet/datasets/lm_things_on_a_table.py | toohsk/blueoil | 596922caa939db9c5ecbac3286fbf6f703865ee6 | [
"Apache-2.0"
] | 2 | 2019-02-08T10:03:34.000Z | 2019-03-20T06:25:55.000Z | # -*- coding: utf-8 -*-
# Copyright 2018 The Blueoil Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
import functools
import os.path
from lmnet.datasets.delta_mark import ObjectDetectionBase
from lmnet.utils.random import shuffle, train_test_split
class LmThingsOnATable(ObjectDetectionBase):
"""Leapmind things on a table dataset for object detection.
images: images numpy array. shape is [batch_size, height, width]
labels: gt_boxes numpy array. shape is [batch_size, num_max_boxes, 5(x, y, w, h, class_id)]
"""
classes = ["hand", "salad", "steak", "whiskey", "book"]
num_classes = len(classes)
available_subsets = ["train", "validation"]
extend_dir = "lm_things_on_a_table"
def __init__(
self,
*args,
**kwargs
):
super().__init__(
*args,
**kwargs,
)
self.single = {
"json": os.path.join(self.data_dir, "json/single_label_bound.json"),
"dir": os.path.join(self.data_dir, "Data_single")
}
self.multi = {
"json": os.path.join(self.data_dir, "json/multi_label_bound_all.json"),
"dir": os.path.join(self.data_dir, "Data_multi")
}
def _single_files_and_annotations(self):
json_file = self.single["json"]
image_dir = self.single["dir"]
files, labels = self._files_and_annotations(json_file, image_dir)
return files, labels
def _multi_files_and_annotations(self):
json_file = self.multi["json"]
image_dir = self.multi["dir"]
files, labels = self._files_and_annotations(json_file, image_dir)
return files, labels
@property
@functools.lru_cache(maxsize=None)
def files_and_annotations(self):
"""Return all files and labels list."""
single_split_rate = 0.1
multi_split_rate = 0.1
single_files, single_labels = self._single_files_and_annotations()
multi_files, multi_labels = self._multi_files_and_annotations()
train_single_files, test_single_files, train_single_labels, test_single_labels =\
train_test_split(single_files,
single_labels,
test_size=single_split_rate,
seed=1)
train_multi_files, test_multi_files, train_multi_labels, test_multi_labels =\
train_test_split(multi_files,
multi_labels,
test_size=multi_split_rate,
seed=1)
if self.subset == "train":
files = train_multi_files + train_single_files
labels = train_multi_labels + train_single_labels
else:
files = test_multi_files + test_single_files
labels = test_multi_labels + test_single_labels
files, labels = shuffle(files, labels, seed=1)
print("files and annotations are ready")
return files, labels
@property
def num_max_boxes(self):
# calulated by cls.count_max_boxes()
return 6
| 35.018868 | 95 | 0.625269 |
import functools
import os.path
from lmnet.datasets.delta_mark import ObjectDetectionBase
from lmnet.utils.random import shuffle, train_test_split
class LmThingsOnATable(ObjectDetectionBase):
classes = ["hand", "salad", "steak", "whiskey", "book"]
num_classes = len(classes)
available_subsets = ["train", "validation"]
extend_dir = "lm_things_on_a_table"
def __init__(
self,
*args,
**kwargs
):
super().__init__(
*args,
**kwargs,
)
self.single = {
"json": os.path.join(self.data_dir, "json/single_label_bound.json"),
"dir": os.path.join(self.data_dir, "Data_single")
}
self.multi = {
"json": os.path.join(self.data_dir, "json/multi_label_bound_all.json"),
"dir": os.path.join(self.data_dir, "Data_multi")
}
def _single_files_and_annotations(self):
json_file = self.single["json"]
image_dir = self.single["dir"]
files, labels = self._files_and_annotations(json_file, image_dir)
return files, labels
def _multi_files_and_annotations(self):
json_file = self.multi["json"]
image_dir = self.multi["dir"]
files, labels = self._files_and_annotations(json_file, image_dir)
return files, labels
@property
@functools.lru_cache(maxsize=None)
def files_and_annotations(self):
single_split_rate = 0.1
multi_split_rate = 0.1
single_files, single_labels = self._single_files_and_annotations()
multi_files, multi_labels = self._multi_files_and_annotations()
train_single_files, test_single_files, train_single_labels, test_single_labels =\
train_test_split(single_files,
single_labels,
test_size=single_split_rate,
seed=1)
train_multi_files, test_multi_files, train_multi_labels, test_multi_labels =\
train_test_split(multi_files,
multi_labels,
test_size=multi_split_rate,
seed=1)
if self.subset == "train":
files = train_multi_files + train_single_files
labels = train_multi_labels + train_single_labels
else:
files = test_multi_files + test_single_files
labels = test_multi_labels + test_single_labels
files, labels = shuffle(files, labels, seed=1)
print("files and annotations are ready")
return files, labels
@property
def num_max_boxes(self):
return 6
| true | true |
f7f7b1f561e066a4d18e2b4947e76d5429e1b13e | 2,651 | py | Python | Code/Plugins/Replacement_Plugin.py | jschneidewind/pyH2A | 66f3943916ebdf5a4bae1706b74e5cef6131460e | [
"CC-BY-4.0",
"MIT"
] | 4 | 2022-03-24T01:04:55.000Z | 2022-03-30T03:06:41.000Z | Code/Plugins/Replacement_Plugin.py | jschneidewind/pyH2A | 66f3943916ebdf5a4bae1706b74e5cef6131460e | [
"CC-BY-4.0",
"MIT"
] | null | null | null | Code/Plugins/Replacement_Plugin.py | jschneidewind/pyH2A | 66f3943916ebdf5a4bae1706b74e5cef6131460e | [
"CC-BY-4.0",
"MIT"
] | null | null | null | from input_modification import insert, process_input, sum_all_tables
import numpy as np
import find_nearest as fn
class Replacement_Plugin:
'''
______________
Required Input
______________
# Planned Replacement
Name | Frequency (years) | Cost ($) | Path (optional)
--- | --- | ---
str | num | num
process_input() is used, meaning that if a path is specified, the corresponding value at that path is retrieved and
multiplied by "Cost ($)" to obtain the actual replacement cost.
# Unplanned Replacement [...]
Name | Value
--- | ---
str | num
sum_all_tables() processed
______________
Output
______________
Insertion of "Summed Total" for each sum_all_tables() processed table
Replacement > Total > Value
'''
def __init__(replace, self, print_info):
replace.initialize_yearly_costs(self)
replace.calculate_planned_replacement(self)
replace.unplanned_replacement(self, print_info)
yearly_inflated = replace.yearly * self.inflation_correction * self.inflation_factor
insert(self, 'Replacement', 'Total', 'Value', yearly_inflated, __name__, print_info = print_info)
def initialize_yearly_costs(replace, self):
replace.yearly = np.zeros(len(self.inflation_factor))
def calculate_planned_replacement(replace, self):
for key in self.inp['Planned Replacement']:
planned_replacement = Planned_Replacement(self.inp['Planned Replacement'][key], key, self)
replace.yearly[planned_replacement.years_idx] += planned_replacement.cost
def unplanned_replacement(replace, self, print_info):
replace.unplanned = sum_all_tables(self.inp, 'Unplanned Replacement', 'Value', insert_total = True, class_object = self, print_info = print_info)
replace.yearly += replace.unplanned
class Planned_Replacement:
'''Replacement costs are billed annually, replacements which are performed at a non-integer rate are corrected using non_interger_correction
'''
def __init__(planned, dictionary, key, self):
planned.calculate_yearly_cost(dictionary, key, self)
def calculate_yearly_cost(planned, dictionary, key, self):
replacement_frequency = int(np.ceil(dictionary['Frequency (years)']))
non_integer_correction = replacement_frequency / dictionary['Frequency (years)']
raw_replacement_cost = process_input(self.inp, 'Planned Replacement', key, 'Cost ($)')
initial_replacement_year_idx = fn.find_nearest(self.plant_years, replacement_frequency)[0]
planned.cost = raw_replacement_cost * non_integer_correction * self.combined_inflator
planned.years = self.plant_years[initial_replacement_year_idx:][0::replacement_frequency]
planned.years_idx = fn.find_nearest(self.plant_years, planned.years) | 34.881579 | 147 | 0.774425 | from input_modification import insert, process_input, sum_all_tables
import numpy as np
import find_nearest as fn
class Replacement_Plugin:
def __init__(replace, self, print_info):
replace.initialize_yearly_costs(self)
replace.calculate_planned_replacement(self)
replace.unplanned_replacement(self, print_info)
yearly_inflated = replace.yearly * self.inflation_correction * self.inflation_factor
insert(self, 'Replacement', 'Total', 'Value', yearly_inflated, __name__, print_info = print_info)
def initialize_yearly_costs(replace, self):
replace.yearly = np.zeros(len(self.inflation_factor))
def calculate_planned_replacement(replace, self):
for key in self.inp['Planned Replacement']:
planned_replacement = Planned_Replacement(self.inp['Planned Replacement'][key], key, self)
replace.yearly[planned_replacement.years_idx] += planned_replacement.cost
def unplanned_replacement(replace, self, print_info):
replace.unplanned = sum_all_tables(self.inp, 'Unplanned Replacement', 'Value', insert_total = True, class_object = self, print_info = print_info)
replace.yearly += replace.unplanned
class Planned_Replacement:
def __init__(planned, dictionary, key, self):
planned.calculate_yearly_cost(dictionary, key, self)
def calculate_yearly_cost(planned, dictionary, key, self):
replacement_frequency = int(np.ceil(dictionary['Frequency (years)']))
non_integer_correction = replacement_frequency / dictionary['Frequency (years)']
raw_replacement_cost = process_input(self.inp, 'Planned Replacement', key, 'Cost ($)')
initial_replacement_year_idx = fn.find_nearest(self.plant_years, replacement_frequency)[0]
planned.cost = raw_replacement_cost * non_integer_correction * self.combined_inflator
planned.years = self.plant_years[initial_replacement_year_idx:][0::replacement_frequency]
planned.years_idx = fn.find_nearest(self.plant_years, planned.years) | true | true |
f7f7b36c568a98a3ec5c55eae9e612330e472879 | 275 | py | Python | tests/mocks.py | ashu-tosh-kumar/Calendar-Python | 603d3bed028bfd0e159f2ea86a8743cd3dae346c | [
"MIT"
] | null | null | null | tests/mocks.py | ashu-tosh-kumar/Calendar-Python | 603d3bed028bfd0e159f2ea86a8743cd3dae346c | [
"MIT"
] | null | null | null | tests/mocks.py | ashu-tosh-kumar/Calendar-Python | 603d3bed028bfd0e159f2ea86a8743cd3dae346c | [
"MIT"
] | null | null | null | class FakeResponse:
def __init__(self, data=None, status_code=None):
self._data = data
self._status_code = status_code
@property
def data(self):
return self._data
@property
def status_code(self):
return self._status_code
| 21.153846 | 52 | 0.643636 | class FakeResponse:
def __init__(self, data=None, status_code=None):
self._data = data
self._status_code = status_code
@property
def data(self):
return self._data
@property
def status_code(self):
return self._status_code
| true | true |
f7f7b3d679284fa1433a02cc9391e9b1b29dbaa2 | 8,072 | py | Python | sptm/run_classifier.py | ishine/qa_match | f1ede11a3e799edfb5e90d5b4396b304d2365778 | [
"Apache-2.0"
] | 320 | 2020-03-09T03:49:52.000Z | 2022-03-18T10:59:54.000Z | sptm/run_classifier.py | ishine/qa_match | f1ede11a3e799edfb5e90d5b4396b304d2365778 | [
"Apache-2.0"
] | 13 | 2020-03-12T02:37:24.000Z | 2021-05-19T03:34:52.000Z | sptm/run_classifier.py | ishine/qa_match | f1ede11a3e799edfb5e90d5b4396b304d2365778 | [
"Apache-2.0"
] | 81 | 2020-03-11T10:05:09.000Z | 2022-03-01T14:08:00.000Z | # -*- coding: utf-8 -*-
"""
finetune on pretrained model with trainset and devset
"""
import sys
import os
import tensorflow as tf
import numpy as np
import argparse
import models
import utils
def evaluate(sess, full_tensors, args, model):
total_num = 0
right_num = 0
for batch_data in utils.gen_batchs(full_tensors, args.batch_size, is_shuffle=False):
softmax_re = sess.run(model.softmax_op,
feed_dict={model.ph_dropout_rate: 0,
model.ph_tokens: batch_data[0],
model.ph_labels: batch_data[1],
model.ph_length: batch_data[2],
model.ph_input_mask: batch_data[3]})
pred_re = np.argmax(softmax_re, axis=1)
total_num += len(pred_re)
right_num += np.sum(pred_re == batch_data[1])
acc = 1.0 * right_num / (total_num + 1e-5)
tf.logging.info("dev total num: " + str(total_num) + ", right num: " + str(right_num) + ", acc: " + str(acc))
return acc
def main(_):
tf.logging.set_verbosity(tf.logging.INFO)
parser = argparse.ArgumentParser()
parser.add_argument("--train_file", type=str, default="", help="Input train file.")
parser.add_argument("--dev_file", type=str, default="", help="Input dev file.")
parser.add_argument("--vocab_file", type=str, default="", help="Input vocab file.")
parser.add_argument("--output_id2label_file", type=str, default="./id2label",
help="File containing (id, class label) map.")
parser.add_argument("--model_save_dir", type=str, default="",
help="Specified the directory in which the model should stored.")
parser.add_argument("--lstm_dim", type=int, default=500, help="Dimension of LSTM cell.")
parser.add_argument("--embedding_dim", type=int, default=1000, help="Dimension of word embedding.")
parser.add_argument("--opt_type", type=str, default='adam', help="Type of optimizer.")
parser.add_argument("--batch_size", type=int, default=32, help="Batch size.")
parser.add_argument("--epoch", type=int, default=20, help="Epoch.")
parser.add_argument("--learning_rate", type=float, default=1e-4, help="Learning rate.")
parser.add_argument("--dropout_rate", type=float, default=0.1, help="Dropout rate")
parser.add_argument("--seed", type=int, default=1, help="Random seed value.")
parser.add_argument("--print_step", type=int, default=1000, help="Print log every x step.")
parser.add_argument("--init_checkpoint", type=str, default='',
help="Initial checkpoint (usually from a pre-trained model).")
parser.add_argument("--max_len", type=int, default=100, help="Max seqence length.")
parser.add_argument("--layer_num", type=int, default=2, help="LSTM layer num.")
parser.add_argument("--representation_type", type=str, default="lstm",
help="representation type include:lstm, transformer")
# transformer args
parser.add_argument("--initializer_range", type=float, default="0.02", help="Embedding initialization range")
parser.add_argument("--max_position_embeddings", type=int, default=512, help="max position num")
parser.add_argument("--hidden_size", type=int, default=768, help="hidden size")
parser.add_argument("--num_hidden_layers", type=int, default=12, help="num hidden layer")
parser.add_argument("--num_attention_heads", type=int, default=12, help="num attention heads")
parser.add_argument("--intermediate_size", type=int, default=3072, help="intermediate_size")
args = parser.parse_args()
np.random.seed(args.seed)
tf.set_random_seed(args.seed)
tf.logging.info(str(args))
if not os.path.exists(args.model_save_dir):
os.mkdir(args.model_save_dir)
tf.logging.info("load training sens")
train_sens = utils.load_training_data(args.train_file, skip_invalid=True)
tf.logging.info("\nload dev sens")
dev_sens = utils.load_training_data(args.dev_file, skip_invalid=True)
word2id, id2word, label2id, id2label = utils.load_vocab(train_sens + dev_sens, args.vocab_file)
fw = open(args.output_id2label_file, 'w+')
for k, v in id2label.items():
fw.write(str(k) + "\t" + v + "\n")
fw.close()
utils.gen_ids(train_sens, word2id, label2id, args.max_len)
utils.gen_ids(dev_sens, word2id, label2id, args.max_len)
train_full_tensors = utils.make_full_tensors(train_sens)
dev_full_tensors = utils.make_full_tensors(dev_sens)
other_arg_dict = {}
other_arg_dict['token_num'] = len(word2id)
other_arg_dict['label_num'] = len(label2id)
model = models.create_finetune_classification_training_op(args, other_arg_dict)
steps_in_epoch = int(len(train_sens) // args.batch_size)
tf.logging.info("batch size: " + str(args.batch_size) + ", training sample num : " + str(
len(train_sens)) + ", print step : " + str(args.print_step))
tf.logging.info(
"steps_in_epoch : " + str(steps_in_epoch) + ", epoch num :" + str(args.epoch) + ", total steps : " + str(
args.epoch * steps_in_epoch))
print_step = min(args.print_step, steps_in_epoch)
tf.logging.info("eval dev every {} step".format(print_step))
save_vars = [v for v in tf.global_variables() if
v.name.find('adam') < 0 and v.name.find('Adam') < 0 and v.name.find('ADAM') < 0]
tf.logging.info(str(save_vars))
tf.logging.info(str(tf.all_variables()))
saver = tf.train.Saver(max_to_keep=2)
config = tf.ConfigProto(allow_soft_placement=True)
config.gpu_options.allow_growth = True
with tf.Session(config=config) as sess:
sess.run(tf.global_variables_initializer())
total_loss = 0
dev_best_so_far = 0
for epoch in range(1, args.epoch + 1):
tf.logging.info("\n" + "*" * 20 + "epoch num :" + str(epoch) + "*" * 20)
for batch_data in utils.gen_batchs(train_full_tensors, args.batch_size, is_shuffle=True):
_, global_step, loss = sess.run([model.train_op, model.global_step_op, model.loss_op],
feed_dict={model.ph_dropout_rate: args.dropout_rate,
model.ph_tokens: batch_data[0],
model.ph_labels: batch_data[1],
model.ph_length: batch_data[2],
model.ph_input_mask: batch_data[3]})
total_loss += loss
if global_step % print_step == 0:
tf.logging.info(
"\nglobal step : " + str(global_step) + ", avg loss so far : " + str(total_loss / global_step))
tf.logging.info("begin to eval dev set: ")
acc = evaluate(sess, dev_full_tensors, args, model)
if acc > dev_best_so_far:
dev_best_so_far = acc
tf.logging.info("!" * 20 + "best got : " + str(acc))
# constant_graph = graph_util.convert_variables_to_constants(sess, sess.graph_def, ["scores"])
saver.save(sess, args.model_save_dir + '/finetune.ckpt', global_step=global_step)
tf.logging.info("\n----------------------eval after one epoch: ")
tf.logging.info(
"global step : " + str(global_step) + ", avg loss so far : " + str(total_loss / global_step))
tf.logging.info("begin to eval dev set: ")
sys.stdout.flush()
acc = evaluate(sess, dev_full_tensors, args, model)
if acc > dev_best_so_far:
dev_best_so_far = acc
tf.logging.info("!" * 20 + "best got : " + str(acc))
saver.save(sess, args.model_save_dir + '/finetune.ckpt', global_step=global_step)
if __name__ == "__main__":
tf.app.run()
| 52.077419 | 119 | 0.613974 |
import sys
import os
import tensorflow as tf
import numpy as np
import argparse
import models
import utils
def evaluate(sess, full_tensors, args, model):
total_num = 0
right_num = 0
for batch_data in utils.gen_batchs(full_tensors, args.batch_size, is_shuffle=False):
softmax_re = sess.run(model.softmax_op,
feed_dict={model.ph_dropout_rate: 0,
model.ph_tokens: batch_data[0],
model.ph_labels: batch_data[1],
model.ph_length: batch_data[2],
model.ph_input_mask: batch_data[3]})
pred_re = np.argmax(softmax_re, axis=1)
total_num += len(pred_re)
right_num += np.sum(pred_re == batch_data[1])
acc = 1.0 * right_num / (total_num + 1e-5)
tf.logging.info("dev total num: " + str(total_num) + ", right num: " + str(right_num) + ", acc: " + str(acc))
return acc
def main(_):
tf.logging.set_verbosity(tf.logging.INFO)
parser = argparse.ArgumentParser()
parser.add_argument("--train_file", type=str, default="", help="Input train file.")
parser.add_argument("--dev_file", type=str, default="", help="Input dev file.")
parser.add_argument("--vocab_file", type=str, default="", help="Input vocab file.")
parser.add_argument("--output_id2label_file", type=str, default="./id2label",
help="File containing (id, class label) map.")
parser.add_argument("--model_save_dir", type=str, default="",
help="Specified the directory in which the model should stored.")
parser.add_argument("--lstm_dim", type=int, default=500, help="Dimension of LSTM cell.")
parser.add_argument("--embedding_dim", type=int, default=1000, help="Dimension of word embedding.")
parser.add_argument("--opt_type", type=str, default='adam', help="Type of optimizer.")
parser.add_argument("--batch_size", type=int, default=32, help="Batch size.")
parser.add_argument("--epoch", type=int, default=20, help="Epoch.")
parser.add_argument("--learning_rate", type=float, default=1e-4, help="Learning rate.")
parser.add_argument("--dropout_rate", type=float, default=0.1, help="Dropout rate")
parser.add_argument("--seed", type=int, default=1, help="Random seed value.")
parser.add_argument("--print_step", type=int, default=1000, help="Print log every x step.")
parser.add_argument("--init_checkpoint", type=str, default='',
help="Initial checkpoint (usually from a pre-trained model).")
parser.add_argument("--max_len", type=int, default=100, help="Max seqence length.")
parser.add_argument("--layer_num", type=int, default=2, help="LSTM layer num.")
parser.add_argument("--representation_type", type=str, default="lstm",
help="representation type include:lstm, transformer")
parser.add_argument("--initializer_range", type=float, default="0.02", help="Embedding initialization range")
parser.add_argument("--max_position_embeddings", type=int, default=512, help="max position num")
parser.add_argument("--hidden_size", type=int, default=768, help="hidden size")
parser.add_argument("--num_hidden_layers", type=int, default=12, help="num hidden layer")
parser.add_argument("--num_attention_heads", type=int, default=12, help="num attention heads")
parser.add_argument("--intermediate_size", type=int, default=3072, help="intermediate_size")
args = parser.parse_args()
np.random.seed(args.seed)
tf.set_random_seed(args.seed)
tf.logging.info(str(args))
if not os.path.exists(args.model_save_dir):
os.mkdir(args.model_save_dir)
tf.logging.info("load training sens")
train_sens = utils.load_training_data(args.train_file, skip_invalid=True)
tf.logging.info("\nload dev sens")
dev_sens = utils.load_training_data(args.dev_file, skip_invalid=True)
word2id, id2word, label2id, id2label = utils.load_vocab(train_sens + dev_sens, args.vocab_file)
fw = open(args.output_id2label_file, 'w+')
for k, v in id2label.items():
fw.write(str(k) + "\t" + v + "\n")
fw.close()
utils.gen_ids(train_sens, word2id, label2id, args.max_len)
utils.gen_ids(dev_sens, word2id, label2id, args.max_len)
train_full_tensors = utils.make_full_tensors(train_sens)
dev_full_tensors = utils.make_full_tensors(dev_sens)
other_arg_dict = {}
other_arg_dict['token_num'] = len(word2id)
other_arg_dict['label_num'] = len(label2id)
model = models.create_finetune_classification_training_op(args, other_arg_dict)
steps_in_epoch = int(len(train_sens) // args.batch_size)
tf.logging.info("batch size: " + str(args.batch_size) + ", training sample num : " + str(
len(train_sens)) + ", print step : " + str(args.print_step))
tf.logging.info(
"steps_in_epoch : " + str(steps_in_epoch) + ", epoch num :" + str(args.epoch) + ", total steps : " + str(
args.epoch * steps_in_epoch))
print_step = min(args.print_step, steps_in_epoch)
tf.logging.info("eval dev every {} step".format(print_step))
save_vars = [v for v in tf.global_variables() if
v.name.find('adam') < 0 and v.name.find('Adam') < 0 and v.name.find('ADAM') < 0]
tf.logging.info(str(save_vars))
tf.logging.info(str(tf.all_variables()))
saver = tf.train.Saver(max_to_keep=2)
config = tf.ConfigProto(allow_soft_placement=True)
config.gpu_options.allow_growth = True
with tf.Session(config=config) as sess:
sess.run(tf.global_variables_initializer())
total_loss = 0
dev_best_so_far = 0
for epoch in range(1, args.epoch + 1):
tf.logging.info("\n" + "*" * 20 + "epoch num :" + str(epoch) + "*" * 20)
for batch_data in utils.gen_batchs(train_full_tensors, args.batch_size, is_shuffle=True):
_, global_step, loss = sess.run([model.train_op, model.global_step_op, model.loss_op],
feed_dict={model.ph_dropout_rate: args.dropout_rate,
model.ph_tokens: batch_data[0],
model.ph_labels: batch_data[1],
model.ph_length: batch_data[2],
model.ph_input_mask: batch_data[3]})
total_loss += loss
if global_step % print_step == 0:
tf.logging.info(
"\nglobal step : " + str(global_step) + ", avg loss so far : " + str(total_loss / global_step))
tf.logging.info("begin to eval dev set: ")
acc = evaluate(sess, dev_full_tensors, args, model)
if acc > dev_best_so_far:
dev_best_so_far = acc
tf.logging.info("!" * 20 + "best got : " + str(acc))
saver.save(sess, args.model_save_dir + '/finetune.ckpt', global_step=global_step)
tf.logging.info("\n----------------------eval after one epoch: ")
tf.logging.info(
"global step : " + str(global_step) + ", avg loss so far : " + str(total_loss / global_step))
tf.logging.info("begin to eval dev set: ")
sys.stdout.flush()
acc = evaluate(sess, dev_full_tensors, args, model)
if acc > dev_best_so_far:
dev_best_so_far = acc
tf.logging.info("!" * 20 + "best got : " + str(acc))
saver.save(sess, args.model_save_dir + '/finetune.ckpt', global_step=global_step)
if __name__ == "__main__":
tf.app.run()
| true | true |
f7f7b4b2cfe5f6f19386ce669780320a304fc4c9 | 4,476 | py | Python | test/modules/md/test_730_static.py | tititiou36/httpd | 1348607c00ba58ce371f2f8ecb08abf610227043 | [
"Apache-2.0"
] | 2,529 | 2015-01-02T11:52:53.000Z | 2022-03-30T19:54:27.000Z | test/modules/md/test_730_static.py | tititiou36/httpd | 1348607c00ba58ce371f2f8ecb08abf610227043 | [
"Apache-2.0"
] | 133 | 2015-04-21T05:50:45.000Z | 2022-03-30T14:23:40.000Z | test/modules/md/test_730_static.py | tititiou36/httpd | 1348607c00ba58ce371f2f8ecb08abf610227043 | [
"Apache-2.0"
] | 1,113 | 2015-01-01T14:47:02.000Z | 2022-03-29T16:47:18.000Z | import os
import pytest
from .md_conf import MDConf
from .md_env import MDTestEnv
@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
reason="no ACME test server configured")
class TestStatic:
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env, acme):
env.APACHE_CONF_SRC = "data/test_auto"
acme.start(config='default')
env.check_acme()
env.clear_store()
MDConf(env).install()
assert env.apache_restart() == 0
@pytest.fixture(autouse=True, scope='function')
def _method_scope(self, env, request):
env.clear_store()
self.test_domain = env.get_request_domain(request)
def test_md_730_001(self, env):
# MD with static cert files, will not be driven
domain = self.test_domain
domains = [domain, 'www.%s' % domain]
testpath = os.path.join(env.gen_dir, 'test_920_001')
# cert that is only 10 more days valid
env.create_self_signed_cert(domains, {"notBefore": -80, "notAfter": 10},
serial=730001, path=testpath)
cert_file = os.path.join(testpath, 'pubcert.pem')
pkey_file = os.path.join(testpath, 'privkey.pem')
assert os.path.exists(cert_file)
assert os.path.exists(pkey_file)
conf = MDConf(env)
conf.start_md(domains)
conf.add(f"MDCertificateFile {cert_file}")
conf.add(f"MDCertificateKeyFile {pkey_file}")
conf.end_md()
conf.add_vhost(domain)
conf.install()
assert env.apache_restart() == 0
# check if the domain uses it, it appears in our stats and renewal is off
cert = env.get_cert(domain)
assert cert.same_serial_as(730001)
stat = env.get_md_status(domain)
assert stat
assert 'cert' in stat
assert stat['renew'] is True
assert 'renewal' not in stat
def test_md_730_002(self, env):
# MD with static cert files, force driving
domain = self.test_domain
domains = [domain, 'www.%s' % domain]
testpath = os.path.join(env.gen_dir, 'test_920_001')
# cert that is only 10 more days valid
env.create_self_signed_cert(domains, {"notBefore": -80, "notAfter": 10},
serial=730001, path=testpath)
cert_file = os.path.join(testpath, 'pubcert.pem')
pkey_file = os.path.join(testpath, 'privkey.pem')
assert os.path.exists(cert_file)
assert os.path.exists(pkey_file)
conf = MDConf(env)
conf.start_md(domains)
conf.add(f"MDPrivateKeys secp384r1 rsa3072")
conf.add(f"MDCertificateFile {cert_file}")
conf.add(f"MDCertificateKeyFile {pkey_file}")
conf.add("MDRenewMode always")
conf.end_md()
conf.add_vhost(domain)
conf.install()
assert env.apache_restart() == 0
# this should enforce a renewal
stat = env.get_md_status(domain)
assert stat['renew'] is True, stat
assert env.await_completion(domains, restart=False)
# and show the newly created certificates
stat = env.get_md_status(domain)
assert 'renewal' in stat
assert 'cert' in stat['renewal']
assert 'secp384r1' in stat['renewal']['cert']
assert 'rsa' in stat['renewal']['cert']
def test_md_730_003(self, env):
# just configuring one file will not work
domain = self.test_domain
domains = [domain, 'www.%s' % domain]
testpath = os.path.join(env.gen_dir, 'test_920_001')
# cert that is only 10 more days valid
env.create_self_signed_cert(domains, {"notBefore": -80, "notAfter": 10},
serial=730001, path=testpath)
cert_file = os.path.join(testpath, 'pubcert.pem')
pkey_file = os.path.join(testpath, 'privkey.pem')
assert os.path.exists(cert_file)
assert os.path.exists(pkey_file)
conf = MDConf(env)
conf.start_md(domains)
conf.add(f"MDCertificateFile {cert_file}")
conf.end_md()
conf.add_vhost(domain)
conf.install()
assert env.apache_fail() == 0
conf = MDConf(env)
conf.start_md(domains)
conf.add(f"MDCertificateKeyFile {pkey_file}")
conf.end_md()
conf.add_vhost(domain)
conf.install()
assert env.apache_fail() == 0
| 37.932203 | 81 | 0.61193 | import os
import pytest
from .md_conf import MDConf
from .md_env import MDTestEnv
@pytest.mark.skipif(condition=not MDTestEnv.has_acme_server(),
reason="no ACME test server configured")
class TestStatic:
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env, acme):
env.APACHE_CONF_SRC = "data/test_auto"
acme.start(config='default')
env.check_acme()
env.clear_store()
MDConf(env).install()
assert env.apache_restart() == 0
@pytest.fixture(autouse=True, scope='function')
def _method_scope(self, env, request):
env.clear_store()
self.test_domain = env.get_request_domain(request)
def test_md_730_001(self, env):
domain = self.test_domain
domains = [domain, 'www.%s' % domain]
testpath = os.path.join(env.gen_dir, 'test_920_001')
env.create_self_signed_cert(domains, {"notBefore": -80, "notAfter": 10},
serial=730001, path=testpath)
cert_file = os.path.join(testpath, 'pubcert.pem')
pkey_file = os.path.join(testpath, 'privkey.pem')
assert os.path.exists(cert_file)
assert os.path.exists(pkey_file)
conf = MDConf(env)
conf.start_md(domains)
conf.add(f"MDCertificateFile {cert_file}")
conf.add(f"MDCertificateKeyFile {pkey_file}")
conf.end_md()
conf.add_vhost(domain)
conf.install()
assert env.apache_restart() == 0
cert = env.get_cert(domain)
assert cert.same_serial_as(730001)
stat = env.get_md_status(domain)
assert stat
assert 'cert' in stat
assert stat['renew'] is True
assert 'renewal' not in stat
def test_md_730_002(self, env):
domain = self.test_domain
domains = [domain, 'www.%s' % domain]
testpath = os.path.join(env.gen_dir, 'test_920_001')
env.create_self_signed_cert(domains, {"notBefore": -80, "notAfter": 10},
serial=730001, path=testpath)
cert_file = os.path.join(testpath, 'pubcert.pem')
pkey_file = os.path.join(testpath, 'privkey.pem')
assert os.path.exists(cert_file)
assert os.path.exists(pkey_file)
conf = MDConf(env)
conf.start_md(domains)
conf.add(f"MDPrivateKeys secp384r1 rsa3072")
conf.add(f"MDCertificateFile {cert_file}")
conf.add(f"MDCertificateKeyFile {pkey_file}")
conf.add("MDRenewMode always")
conf.end_md()
conf.add_vhost(domain)
conf.install()
assert env.apache_restart() == 0
stat = env.get_md_status(domain)
assert stat['renew'] is True, stat
assert env.await_completion(domains, restart=False)
stat = env.get_md_status(domain)
assert 'renewal' in stat
assert 'cert' in stat['renewal']
assert 'secp384r1' in stat['renewal']['cert']
assert 'rsa' in stat['renewal']['cert']
def test_md_730_003(self, env):
domain = self.test_domain
domains = [domain, 'www.%s' % domain]
testpath = os.path.join(env.gen_dir, 'test_920_001')
env.create_self_signed_cert(domains, {"notBefore": -80, "notAfter": 10},
serial=730001, path=testpath)
cert_file = os.path.join(testpath, 'pubcert.pem')
pkey_file = os.path.join(testpath, 'privkey.pem')
assert os.path.exists(cert_file)
assert os.path.exists(pkey_file)
conf = MDConf(env)
conf.start_md(domains)
conf.add(f"MDCertificateFile {cert_file}")
conf.end_md()
conf.add_vhost(domain)
conf.install()
assert env.apache_fail() == 0
conf = MDConf(env)
conf.start_md(domains)
conf.add(f"MDCertificateKeyFile {pkey_file}")
conf.end_md()
conf.add_vhost(domain)
conf.install()
assert env.apache_fail() == 0
| true | true |
f7f7b55d09b158a9f03da48d9d8cd31b1a9a5ac7 | 9,095 | py | Python | tests/contrib/sensors/test_hdfs_sensor.py | InigoSJ/airflow | 8b97a387dc30d8c88390d500ec99333798c20f1c | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1 | 2019-09-06T09:55:18.000Z | 2019-09-06T09:55:18.000Z | tests/contrib/sensors/test_hdfs_sensor.py | InigoSJ/airflow | 8b97a387dc30d8c88390d500ec99333798c20f1c | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1 | 2017-05-11T22:57:49.000Z | 2017-05-11T22:57:49.000Z | tests/contrib/sensors/test_hdfs_sensor.py | InigoSJ/airflow | 8b97a387dc30d8c88390d500ec99333798c20f1c | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1 | 2019-12-09T08:41:32.000Z | 2019-12-09T08:41:32.000Z | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import unittest
import re
from datetime import timedelta
from airflow.contrib.sensors.hdfs_sensor import HdfsSensorFolder, HdfsSensorRegex
from airflow.exceptions import AirflowSensorTimeout
class TestHdfsSensorFolder(unittest.TestCase):
def setUp(self):
from tests.core import FakeHDFSHook
self.hook = FakeHDFSHook
self.log = logging.getLogger()
self.log.setLevel(logging.DEBUG)
def test_should_be_empty_directory(self):
"""
test the empty directory behaviour
:return:
"""
# Given
self.log.debug('#' * 10)
self.log.debug('Running %s', self._testMethodName)
self.log.debug('#' * 10)
task = HdfsSensorFolder(task_id='Should_be_empty_directory',
filepath='/datadirectory/empty_directory',
be_empty=True,
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
# When
task.execute(None)
# Then
# Nothing happens, nothing is raised exec is ok
def test_should_be_empty_directory_fail(self):
"""
test the empty directory behaviour
:return:
"""
# Given
self.log.debug('#' * 10)
self.log.debug('Running %s', self._testMethodName)
self.log.debug('#' * 10)
task = HdfsSensorFolder(task_id='Should_be_empty_directory_fail',
filepath='/datadirectory/not_empty_directory',
be_empty=True,
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
# When
# Then
with self.assertRaises(AirflowSensorTimeout):
task.execute(None)
def test_should_be_a_non_empty_directory(self):
"""
test the empty directory behaviour
:return:
"""
# Given
self.log.debug('#' * 10)
self.log.debug('Running %s', self._testMethodName)
self.log.debug('#' * 10)
task = HdfsSensorFolder(task_id='Should_be_non_empty_directory',
filepath='/datadirectory/not_empty_directory',
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
# When
task.execute(None)
# Then
# Nothing happens, nothing is raised exec is ok
def test_should_be_non_empty_directory_fail(self):
"""
test the empty directory behaviour
:return:
"""
# Given
self.log.debug('#' * 10)
self.log.debug('Running %s', self._testMethodName)
self.log.debug('#' * 10)
task = HdfsSensorFolder(task_id='Should_be_empty_directory_fail',
filepath='/datadirectory/empty_directory',
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
# When
# Then
with self.assertRaises(AirflowSensorTimeout):
task.execute(None)
class TestHdfsSensorRegex(unittest.TestCase):
def setUp(self):
from tests.core import FakeHDFSHook
self.hook = FakeHDFSHook
self.log = logging.getLogger()
self.log.setLevel(logging.DEBUG)
def test_should_match_regex(self):
"""
test the empty directory behaviour
:return:
"""
# Given
self.log.debug('#' * 10)
self.log.debug('Running %s', self._testMethodName)
self.log.debug('#' * 10)
compiled_regex = re.compile("test[1-2]file")
task = HdfsSensorRegex(task_id='Should_match_the_regex',
filepath='/datadirectory/regex_dir',
regex=compiled_regex,
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
# When
task.execute(None)
# Then
# Nothing happens, nothing is raised exec is ok
def test_should_not_match_regex(self):
"""
test the empty directory behaviour
:return:
"""
# Given
self.log.debug('#' * 10)
self.log.debug('Running %s', self._testMethodName)
self.log.debug('#' * 10)
compiled_regex = re.compile("^IDoNotExist")
task = HdfsSensorRegex(task_id='Should_not_match_the_regex',
filepath='/datadirectory/regex_dir',
regex=compiled_regex,
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
# When
# Then
with self.assertRaises(AirflowSensorTimeout):
task.execute(None)
def test_should_match_regex_and_filesize(self):
"""
test the file size behaviour with regex
:return:
"""
# Given
self.log.debug('#' * 10)
self.log.debug('Running %s', self._testMethodName)
self.log.debug('#' * 10)
compiled_regex = re.compile("test[1-2]file")
task = HdfsSensorRegex(task_id='Should_match_the_regex_and_filesize',
filepath='/datadirectory/regex_dir',
regex=compiled_regex,
ignore_copying=True,
ignored_ext=['_COPYING_', 'sftp'],
file_size=10,
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
# When
task.execute(None)
# Then
# Nothing happens, nothing is raised exec is ok
def test_should_match_regex_but_filesize(self):
"""
test the file size behaviour with regex
:return:
"""
# Given
self.log.debug('#' * 10)
self.log.debug('Running %s', self._testMethodName)
self.log.debug('#' * 10)
compiled_regex = re.compile("test[1-2]file")
task = HdfsSensorRegex(task_id='Should_match_the_regex_but_filesize',
filepath='/datadirectory/regex_dir',
regex=compiled_regex,
file_size=20,
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
# When
# Then
with self.assertRaises(AirflowSensorTimeout):
task.execute(None)
def test_should_match_regex_but_copyingext(self):
"""
test the file size behaviour with regex
:return:
"""
# Given
self.log.debug('#' * 10)
self.log.debug('Running %s', self._testMethodName)
self.log.debug('#' * 10)
compiled_regex = re.compile(r"copying_file_\d+.txt")
task = HdfsSensorRegex(task_id='Should_match_the_regex_but_filesize',
filepath='/datadirectory/regex_dir',
regex=compiled_regex,
ignored_ext=['_COPYING_', 'sftp'],
file_size=20,
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
# When
# Then
with self.assertRaises(AirflowSensorTimeout):
task.execute(None)
| 35.807087 | 81 | 0.523584 |
import logging
import unittest
import re
from datetime import timedelta
from airflow.contrib.sensors.hdfs_sensor import HdfsSensorFolder, HdfsSensorRegex
from airflow.exceptions import AirflowSensorTimeout
class TestHdfsSensorFolder(unittest.TestCase):
def setUp(self):
from tests.core import FakeHDFSHook
self.hook = FakeHDFSHook
self.log = logging.getLogger()
self.log.setLevel(logging.DEBUG)
def test_should_be_empty_directory(self):
self.log.debug('#' * 10)
self.log.debug('Running %s', self._testMethodName)
self.log.debug('#' * 10)
task = HdfsSensorFolder(task_id='Should_be_empty_directory',
filepath='/datadirectory/empty_directory',
be_empty=True,
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
task.execute(None)
def test_should_be_empty_directory_fail(self):
self.log.debug('#' * 10)
self.log.debug('Running %s', self._testMethodName)
self.log.debug('#' * 10)
task = HdfsSensorFolder(task_id='Should_be_empty_directory_fail',
filepath='/datadirectory/not_empty_directory',
be_empty=True,
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
with self.assertRaises(AirflowSensorTimeout):
task.execute(None)
def test_should_be_a_non_empty_directory(self):
self.log.debug('#' * 10)
self.log.debug('Running %s', self._testMethodName)
self.log.debug('#' * 10)
task = HdfsSensorFolder(task_id='Should_be_non_empty_directory',
filepath='/datadirectory/not_empty_directory',
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
task.execute(None)
def test_should_be_non_empty_directory_fail(self):
self.log.debug('#' * 10)
self.log.debug('Running %s', self._testMethodName)
self.log.debug('#' * 10)
task = HdfsSensorFolder(task_id='Should_be_empty_directory_fail',
filepath='/datadirectory/empty_directory',
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
with self.assertRaises(AirflowSensorTimeout):
task.execute(None)
class TestHdfsSensorRegex(unittest.TestCase):
def setUp(self):
from tests.core import FakeHDFSHook
self.hook = FakeHDFSHook
self.log = logging.getLogger()
self.log.setLevel(logging.DEBUG)
def test_should_match_regex(self):
self.log.debug('#' * 10)
self.log.debug('Running %s', self._testMethodName)
self.log.debug('#' * 10)
compiled_regex = re.compile("test[1-2]file")
task = HdfsSensorRegex(task_id='Should_match_the_regex',
filepath='/datadirectory/regex_dir',
regex=compiled_regex,
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
task.execute(None)
def test_should_not_match_regex(self):
self.log.debug('#' * 10)
self.log.debug('Running %s', self._testMethodName)
self.log.debug('#' * 10)
compiled_regex = re.compile("^IDoNotExist")
task = HdfsSensorRegex(task_id='Should_not_match_the_regex',
filepath='/datadirectory/regex_dir',
regex=compiled_regex,
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
with self.assertRaises(AirflowSensorTimeout):
task.execute(None)
def test_should_match_regex_and_filesize(self):
self.log.debug('#' * 10)
self.log.debug('Running %s', self._testMethodName)
self.log.debug('#' * 10)
compiled_regex = re.compile("test[1-2]file")
task = HdfsSensorRegex(task_id='Should_match_the_regex_and_filesize',
filepath='/datadirectory/regex_dir',
regex=compiled_regex,
ignore_copying=True,
ignored_ext=['_COPYING_', 'sftp'],
file_size=10,
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
task.execute(None)
def test_should_match_regex_but_filesize(self):
self.log.debug('#' * 10)
self.log.debug('Running %s', self._testMethodName)
self.log.debug('#' * 10)
compiled_regex = re.compile("test[1-2]file")
task = HdfsSensorRegex(task_id='Should_match_the_regex_but_filesize',
filepath='/datadirectory/regex_dir',
regex=compiled_regex,
file_size=20,
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
with self.assertRaises(AirflowSensorTimeout):
task.execute(None)
def test_should_match_regex_but_copyingext(self):
self.log.debug('#' * 10)
self.log.debug('Running %s', self._testMethodName)
self.log.debug('#' * 10)
compiled_regex = re.compile(r"copying_file_\d+.txt")
task = HdfsSensorRegex(task_id='Should_match_the_regex_but_filesize',
filepath='/datadirectory/regex_dir',
regex=compiled_regex,
ignored_ext=['_COPYING_', 'sftp'],
file_size=20,
timeout=1,
retry_delay=timedelta(seconds=1),
poke_interval=1,
hook=self.hook)
with self.assertRaises(AirflowSensorTimeout):
task.execute(None)
| true | true |
f7f7b596546ff95f2ec719176c8bc9ab81995aa9 | 884 | py | Python | app/core/tests/test_commands.py | enpifa-travelperk/recipe-app-api | d735e923b26ac07498e8c30a77f73d5b9456ef86 | [
"MIT"
] | null | null | null | app/core/tests/test_commands.py | enpifa-travelperk/recipe-app-api | d735e923b26ac07498e8c30a77f73d5b9456ef86 | [
"MIT"
] | null | null | null | app/core/tests/test_commands.py | enpifa-travelperk/recipe-app-api | d735e923b26ac07498e8c30a77f73d5b9456ef86 | [
"MIT"
] | null | null | null | from unittest.mock import patch
from django.core.management import call_command
from django.db.utils import OperationalError
from django.test import TestCase
class CommandTests(TestCase):
# we will use this function to mock the behavior of getitem
def test_wait_for_db_ready(self):
"""Test waiting for db when db is available"""
with patch('django.db.utils.ConnectionHandler.__getitem__') as gi:
gi.return_value = True
call_command('wait_for_db')
self.assertEqual(gi.call_count, 1)
@patch('time.sleep', return_value=True)
def test_wait_for_db(self, ts):
"""Test waiting for db"""
with patch('django.db.utils.ConnectionHandler.__getitem__') as gi:
gi.side_effect = [OperationalError] * 5 + [True]
call_command('wait_for_db')
self.assertEqual(gi.call_count, 6)
| 35.36 | 74 | 0.682127 | from unittest.mock import patch
from django.core.management import call_command
from django.db.utils import OperationalError
from django.test import TestCase
class CommandTests(TestCase):
def test_wait_for_db_ready(self):
with patch('django.db.utils.ConnectionHandler.__getitem__') as gi:
gi.return_value = True
call_command('wait_for_db')
self.assertEqual(gi.call_count, 1)
@patch('time.sleep', return_value=True)
def test_wait_for_db(self, ts):
with patch('django.db.utils.ConnectionHandler.__getitem__') as gi:
gi.side_effect = [OperationalError] * 5 + [True]
call_command('wait_for_db')
self.assertEqual(gi.call_count, 6)
| true | true |
f7f7b59785907eb7f204b3a4477927efd9f84331 | 7,206 | py | Python | coredis/exceptions.py | alisaifee/coredis | e72f5d7c665b53e6a1d41e1a7fb9e400858a8b19 | [
"MIT"
] | 9 | 2022-01-07T07:42:08.000Z | 2022-03-21T15:54:09.000Z | coredis/exceptions.py | alisaifee/coredis | e72f5d7c665b53e6a1d41e1a7fb9e400858a8b19 | [
"MIT"
] | 30 | 2022-01-15T23:33:36.000Z | 2022-03-30T22:39:53.000Z | coredis/exceptions.py | alisaifee/coredis | e72f5d7c665b53e6a1d41e1a7fb9e400858a8b19 | [
"MIT"
] | 3 | 2022-01-13T06:11:13.000Z | 2022-02-21T11:19:33.000Z | from __future__ import annotations
import re
from typing import Any
from coredis.typing import Mapping, Optional, Set, Tuple, ValueT
class RedisError(Exception):
"""
Base exception from which all other exceptions in coredis
derive from.
"""
class CommandSyntaxError(RedisError):
"""
Raised when a redis command is called with an invalid syntax
"""
def __init__(self, arguments: Set[str], message: str) -> None:
self.arguments: Set[str] = arguments
super().__init__(message)
class CommandNotSupportedError(RedisError):
"""
Raised when the target server doesn't support a command due to
version mismatch
"""
def __init__(self, cmd: str, current_version: str) -> None:
super().__init__(
self, f"{cmd} is not supported on server version {current_version}"
)
class ConnectionError(RedisError):
pass
class ProtocolError(ConnectionError):
"""
Raised on errors related to ser/deser protocol parsing
"""
class TimeoutError(RedisError):
pass
class BusyLoadingError(ConnectionError):
pass
class InvalidResponse(RedisError):
pass
class ResponseError(RedisError):
pass
class DataError(RedisError):
pass
class NoKeyError(RedisError):
"""
Raised when a key provided in the command is missing
"""
class WrongTypeError(ResponseError):
"""
Raised when an operation is performed on a key
containing a datatype that doesn't support the operation
"""
class PubSubError(RedisError):
pass
class WatchError(RedisError):
pass
class NoScriptError(ResponseError):
pass
class ExecAbortError(ResponseError):
pass
class ReadOnlyError(ResponseError):
pass
class LockError(RedisError, ValueError):
"""Errors acquiring or releasing a lock"""
# NOTE: For backwards compatability, this class derives from ValueError.
# This was originally chosen to behave like threading.Lock.
class RedisClusterException(Exception):
"""Base exception for the RedisCluster client"""
class ClusterError(RedisError):
"""
Cluster errors occurred multiple times, resulting in an exhaustion of the
command execution ``TTL``
"""
class ClusterCrossSlotError(ResponseError):
"""Raised when keys in request don't hash to the same slot"""
def __init__(
self,
message: Optional[str] = None,
command: Optional[bytes] = None,
keys: Optional[Tuple[ValueT, ...]] = None,
) -> None:
super().__init__(message or "Keys in request don't hash to the same slot")
self.command = command
self.keys = keys
class ClusterRoutingError(RedisClusterException):
"""Raised when keys in request can't be routed to destination nodes"""
class ClusterDownError(ClusterError, ResponseError):
"""
Error indicated ``CLUSTERDOWN`` error received from cluster.
By default Redis Cluster nodes stop accepting queries if they detect there
is at least a hash slot uncovered (no available node is serving it).
This way if the cluster is partially down (for example a range of hash
slots are no longer covered) the entire cluster eventually becomes
unavailable. It automatically returns available as soon as all the slots
are covered again.
"""
def __init__(self, resp: str) -> None:
self.args = (resp,)
self.message = resp
class ClusterTransactionError(ClusterError):
def __init__(self, msg: str) -> None:
self.msg = msg
class ClusterResponseError(ClusterError):
"""
Raised when application logic to combine multi node
cluster responses has errors.
"""
def __init__(self, message: str, responses: Mapping[str, Any]) -> None:
super().__init__(message)
self.responses = responses
class AskError(ResponseError):
"""
Error indicated ``ASK`` error received from cluster.
When a slot is set as ``MIGRATING``, the node will accept all queries that
pertain to this hash slot, but only if the key in question exists,
otherwise the query is forwarded using a -ASK redirection to the node that
is target of the migration.
src node: ``MIGRATING`` to dst node
get > ``ASK`` error
ask dst node > ``ASKING`` command
dst node: ``IMPORTING`` from src node
asking command only affects next command
any op will be allowed after asking command
"""
def __init__(self, resp: str) -> None:
self.args = (resp,)
self.message = resp
slot_id, new_node = resp.split(" ")
host, port = new_node.rsplit(":", 1)
self.slot_id = int(slot_id)
self.node_addr = self.host, self.port = host, int(port)
class TryAgainError(ResponseError):
"""
Error indicated ``TRYAGAIN`` error received from cluster.
Operations on keys that don't exist or are - during resharding - split
between the source and destination nodes, will generate a -``TRYAGAIN`` error.
"""
class MovedError(AskError):
"""
Error indicated ``MOVED`` error received from cluster.
A request sent to a node that doesn't serve this key will be replayed with
a ``MOVED`` error that points to the correct node.
"""
class AuthenticationError(ResponseError):
"""
Base class for authentication errors
"""
class AuthenticationFailureError(AuthenticationError):
"""
Raised when authentication parameters were provided
but were invalid
"""
class AuthenticationRequiredError(AuthenticationError):
"""
Raised when authentication parameters are required
but not provided
"""
class AuthorizationError(RedisError):
"""
Base class for authorization errors
"""
class FunctionError(RedisError):
"""
Raised for errors relating to redis functions
"""
class SentinelConnectionError(ConnectionError):
pass
class PrimaryNotFoundError(SentinelConnectionError):
"""
Raised when a primary cannot be located in a
sentinel managed redis
"""
class ReplicaNotFoundError(SentinelConnectionError):
"""
Raised when a replica cannot be located in a
sentinel managed redis
"""
class UnknownCommandError(ResponseError):
"""
Raised when the server returns an error response relating
to an unknown command.
"""
ERROR_REGEX = re.compile("unknown command `(.*?)`")
#: Name of command requested
command: str
def __init__(self, message: str) -> None:
self.command = self.ERROR_REGEX.findall(message).pop()
super().__init__(self, message)
class StreamConsumerError(RedisError):
"""
Base exception for stream consumer related errors
"""
class StreamConsumerGroupError(StreamConsumerError):
"""
Base exception for consumer group related errors
"""
class StreamDuplicateConsumerGroupError(StreamConsumerGroupError):
"""
Raised when and attempt to create a stream consumer
group fails because it already exists
"""
class StreamConsumerInitializationError(StreamConsumerError):
"""
Raised when a stream consumer could not be initialized
based on the configuration provided
"""
| 23.940199 | 82 | 0.687899 | from __future__ import annotations
import re
from typing import Any
from coredis.typing import Mapping, Optional, Set, Tuple, ValueT
class RedisError(Exception):
class CommandSyntaxError(RedisError):
def __init__(self, arguments: Set[str], message: str) -> None:
self.arguments: Set[str] = arguments
super().__init__(message)
class CommandNotSupportedError(RedisError):
def __init__(self, cmd: str, current_version: str) -> None:
super().__init__(
self, f"{cmd} is not supported on server version {current_version}"
)
class ConnectionError(RedisError):
pass
class ProtocolError(ConnectionError):
class TimeoutError(RedisError):
pass
class BusyLoadingError(ConnectionError):
pass
class InvalidResponse(RedisError):
pass
class ResponseError(RedisError):
pass
class DataError(RedisError):
pass
class NoKeyError(RedisError):
class WrongTypeError(ResponseError):
class PubSubError(RedisError):
pass
class WatchError(RedisError):
pass
class NoScriptError(ResponseError):
pass
class ExecAbortError(ResponseError):
pass
class ReadOnlyError(ResponseError):
pass
class LockError(RedisError, ValueError):
class RedisClusterException(Exception):
class ClusterError(RedisError):
class ClusterCrossSlotError(ResponseError):
def __init__(
self,
message: Optional[str] = None,
command: Optional[bytes] = None,
keys: Optional[Tuple[ValueT, ...]] = None,
) -> None:
super().__init__(message or "Keys in request don't hash to the same slot")
self.command = command
self.keys = keys
class ClusterRoutingError(RedisClusterException):
class ClusterDownError(ClusterError, ResponseError):
def __init__(self, resp: str) -> None:
self.args = (resp,)
self.message = resp
class ClusterTransactionError(ClusterError):
def __init__(self, msg: str) -> None:
self.msg = msg
class ClusterResponseError(ClusterError):
def __init__(self, message: str, responses: Mapping[str, Any]) -> None:
super().__init__(message)
self.responses = responses
class AskError(ResponseError):
def __init__(self, resp: str) -> None:
self.args = (resp,)
self.message = resp
slot_id, new_node = resp.split(" ")
host, port = new_node.rsplit(":", 1)
self.slot_id = int(slot_id)
self.node_addr = self.host, self.port = host, int(port)
class TryAgainError(ResponseError):
class MovedError(AskError):
class AuthenticationError(ResponseError):
class AuthenticationFailureError(AuthenticationError):
class AuthenticationRequiredError(AuthenticationError):
class AuthorizationError(RedisError):
class FunctionError(RedisError):
class SentinelConnectionError(ConnectionError):
pass
class PrimaryNotFoundError(SentinelConnectionError):
class ReplicaNotFoundError(SentinelConnectionError):
class UnknownCommandError(ResponseError):
ERROR_REGEX = re.compile("unknown command `(.*?)`")
#: Name of command requested
command: str
def __init__(self, message: str) -> None:
self.command = self.ERROR_REGEX.findall(message).pop()
super().__init__(self, message)
class StreamConsumerError(RedisError):
class StreamConsumerGroupError(StreamConsumerError):
class StreamDuplicateConsumerGroupError(StreamConsumerGroupError):
class StreamConsumerInitializationError(StreamConsumerError):
| true | true |
f7f7b5c174e46a6f09811dfba7cacc5d8b7ae1bd | 208 | py | Python | Modulo III/aula021-funcaoParImparRetorno.py | ascaniopy/python | 6d8892b7b9ff803b7422a61e68a383ec6ac7d62d | [
"MIT"
] | null | null | null | Modulo III/aula021-funcaoParImparRetorno.py | ascaniopy/python | 6d8892b7b9ff803b7422a61e68a383ec6ac7d62d | [
"MIT"
] | null | null | null | Modulo III/aula021-funcaoParImparRetorno.py | ascaniopy/python | 6d8892b7b9ff803b7422a61e68a383ec6ac7d62d | [
"MIT"
] | null | null | null | def par(n=0):
if n % 2 == 0:
return True
else:
return False
# Programa principal
num = int(input('Digite um número: '))
if par(num):
print('É PAR.')
else:
print('NÃO É PAR.')
| 16 | 38 | 0.543269 | def par(n=0):
if n % 2 == 0:
return True
else:
return False
num = int(input('Digite um número: '))
if par(num):
print('É PAR.')
else:
print('NÃO É PAR.')
| true | true |
f7f7b5cbbc6c83984aa449d5d994dd9a545f8ecf | 1,591 | py | Python | app/run.py | tnkarthik/stock_analysis | 1b91469908bc7cfe9f5ed838540f5e5c67566f37 | [
"MIT"
] | null | null | null | app/run.py | tnkarthik/stock_analysis | 1b91469908bc7cfe9f5ed838540f5e5c67566f37 | [
"MIT"
] | null | null | null | app/run.py | tnkarthik/stock_analysis | 1b91469908bc7cfe9f5ed838540f5e5c67566f37 | [
"MIT"
] | null | null | null | import json
import plotly
import pandas as pd
from flask import Flask
from flask import render_template, request, jsonify
from plotly.graph_objs import Bar, Histogram, Scatter, Table
import plotly.graph_objs as go
import yfinance as yf
import sys
import re
import datetime as dt
import sys
sys.path.append("..")
from src.stock import stock
from src.plotting import basic_plots
app = Flask(__name__)
# index webpage displays cool visuals and receives user input text for model
@app.route('/')
@app.route('/index')
def index():
ticker = 'AAPL'
period = '1y'
graphs = basic_plots(ticker, period)
# encode plotly graphs in JSON
ids = ["graph-{}".format(i) for i, _ in enumerate(graphs)]
graphJSON = json.dumps(graphs, cls=plotly.utils.PlotlyJSONEncoder)
# render web page with plotly graphs
return render_template('master.html', ids=ids, graphJSON=graphJSON)
# web page that handles user query and displays model results
@app.route('/go')
def go():
# save user input in query
ticker = request.args.get('query', '').upper()
period = request.args.get('timeperiod', '1y')
graphs = basic_plots(ticker, period)
# encode plotly graphs in JSON
ids = ["graph-{}".format(i) for i, _ in enumerate(graphs)]
graphJSON = json.dumps(graphs, cls=plotly.utils.PlotlyJSONEncoder)
# This will render the go.html Please see that file.
return render_template(
'go.html',
query=ticker, ids=ids, graphJSON=graphJSON)
def main():
app.run(host='127.0.0.1', port=3001, debug=True)
if __name__ == '__main__':
main()
| 24.859375 | 76 | 0.703331 | import json
import plotly
import pandas as pd
from flask import Flask
from flask import render_template, request, jsonify
from plotly.graph_objs import Bar, Histogram, Scatter, Table
import plotly.graph_objs as go
import yfinance as yf
import sys
import re
import datetime as dt
import sys
sys.path.append("..")
from src.stock import stock
from src.plotting import basic_plots
app = Flask(__name__)
@app.route('/')
@app.route('/index')
def index():
ticker = 'AAPL'
period = '1y'
graphs = basic_plots(ticker, period)
ids = ["graph-{}".format(i) for i, _ in enumerate(graphs)]
graphJSON = json.dumps(graphs, cls=plotly.utils.PlotlyJSONEncoder)
return render_template('master.html', ids=ids, graphJSON=graphJSON)
@app.route('/go')
def go():
ticker = request.args.get('query', '').upper()
period = request.args.get('timeperiod', '1y')
graphs = basic_plots(ticker, period)
ids = ["graph-{}".format(i) for i, _ in enumerate(graphs)]
graphJSON = json.dumps(graphs, cls=plotly.utils.PlotlyJSONEncoder)
return render_template(
'go.html',
query=ticker, ids=ids, graphJSON=graphJSON)
def main():
app.run(host='127.0.0.1', port=3001, debug=True)
if __name__ == '__main__':
main()
| true | true |
f7f7b74e8b48c23e613392d850029ec6621cee77 | 14,231 | py | Python | Lib/site-packages/pandas_ml/snsaccessors/test/test_sns.py | jsturtz/env | d523b0be3345f883a727679d58ff29efb4389d16 | [
"bzip2-1.0.6"
] | null | null | null | Lib/site-packages/pandas_ml/snsaccessors/test/test_sns.py | jsturtz/env | d523b0be3345f883a727679d58ff29efb4389d16 | [
"bzip2-1.0.6"
] | null | null | null | Lib/site-packages/pandas_ml/snsaccessors/test/test_sns.py | jsturtz/env | d523b0be3345f883a727679d58ff29efb4389d16 | [
"bzip2-1.0.6"
] | null | null | null | #!/usr/bin/env python
import pytest
import matplotlib
matplotlib.use('Agg')
import numpy as np # noqa
import sklearn.datasets as datasets # noqa
import pandas_ml as pdml # noqa
import pandas_ml.util.testing as tm # noqa
try:
import seaborn as sns # noqa
except ImportError:
pass
class SeabornCase(tm.PlottingTestCase):
def setup_method(self):
try:
import matplotlib.pyplot # noqa
except ImportError:
import nose
# matplotlib.use doesn't work on Travis
# PYTHON=3.4 PANDAS=0.17.1 SKLEARN=0.16.1
raise nose.SkipTest()
self.iris = pdml.ModelFrame(datasets.load_iris())
self.diabetes = pdml.ModelFrame(datasets.load_diabetes())
# convert columns to str
self.diabetes.columns = ['col{0}'.format(c) if isinstance(c, int)
else c for c in self.diabetes.columns]
class TestSeabornAttrs(SeabornCase):
def test_objectmapper(self):
df = pdml.ModelFrame([])
self.assertIs(df.sns.palplot, sns.palplot)
self.assertIs(df.sns.set, sns.set)
self.assertIs(df.sns.axes_style, sns.axes_style)
self.assertIs(df.sns.plotting_context, sns.plotting_context)
self.assertIs(df.sns.set_context, sns.set_context)
self.assertIs(df.sns.set_color_codes, sns.set_color_codes)
self.assertIs(df.sns.reset_defaults, sns.reset_defaults)
self.assertIs(df.sns.reset_orig, sns.reset_orig)
self.assertIs(df.sns.set_palette, sns.set_palette)
self.assertIs(df.sns.color_palette, sns.color_palette)
self.assertIs(df.sns.husl_palette, sns.husl_palette)
self.assertIs(df.sns.hls_palette, sns.hls_palette)
self.assertIs(df.sns.cubehelix_palette, sns.cubehelix_palette)
self.assertIs(df.sns.dark_palette, sns.dark_palette)
self.assertIs(df.sns.light_palette, sns.light_palette)
self.assertIs(df.sns.diverging_palette, sns.diverging_palette)
self.assertIs(df.sns.blend_palette, sns.blend_palette)
self.assertIs(df.sns.xkcd_palette, sns.xkcd_palette)
self.assertIs(df.sns.crayon_palette, sns.crayon_palette)
self.assertIs(df.sns.mpl_palette, sns.mpl_palette)
self.assertIs(df.sns.choose_colorbrewer_palette,
sns.choose_colorbrewer_palette)
self.assertIs(df.sns.choose_cubehelix_palette,
sns.choose_cubehelix_palette)
self.assertIs(df.sns.choose_light_palette,
sns.choose_light_palette)
self.assertIs(df.sns.choose_dark_palette, sns.choose_dark_palette)
self.assertIs(df.sns.choose_diverging_palette,
sns.choose_diverging_palette)
self.assertIs(df.sns.despine, sns.despine)
self.assertIs(df.sns.desaturate, sns.desaturate)
self.assertIs(df.sns.saturate, sns.saturate)
self.assertIs(df.sns.set_hls_values, sns.set_hls_values)
# self.assertIs(df.sns.ci_to_errsize, sns.ci_to_errsize)
# self.assertIs(df.sns.axlabel, sns.axlabel)
class TestSeabornDistribution(SeabornCase):
def test_jointplot(self):
df = self.iris
jg = df.sns.jointplot(df.columns[1])
self.assertIsInstance(jg, sns.JointGrid)
self.assertEqual(jg.ax_joint.get_xlabel(), df.columns[1])
self.assertEqual(jg.ax_joint.get_ylabel(), '.target')
tm.close()
jg = df.sns.jointplot(df.columns[2], df.columns[3])
self.assertIsInstance(jg, sns.JointGrid)
self.assertEqual(jg.ax_joint.get_xlabel(), df.columns[2])
self.assertEqual(jg.ax_joint.get_ylabel(), df.columns[3])
def test_pairplot(self):
df = self.iris
pg = df.sns.pairplot()
self._check_axes_shape(pg.axes, axes_num=25,
layout=(5, 5), figsize=None)
for i in range(5):
self.assertEqual(pg.axes[i][0].get_ylabel(), df.columns[i])
self.assertEqual(pg.axes[-1][i].get_xlabel(), df.columns[i])
tm.close()
def test_distplot(self):
return # ToDo: only fails on Travis
df = self.iris
ax = df.sns.distplot()
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), '.target')
tm.close()
# pass scalar (str)
ax = df.sns.distplot(df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), df.columns[1])
tm.close()
# pass Series
ax = df.sns.distplot(df[df.columns[2]])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), df.columns[2])
def test_dist_error(self):
df = pdml.ModelFrame(np.random.randn(100, 5), columns=list('abcde'))
msg = "a can't be ommitted when ModelFrame doesn't have target column"
with pytest.raises(ValueError, match=msg):
df.sns.distplot()
df.target = df[['a', 'b']]
self.assertTrue(df.has_multi_targets())
msg = "a can't be ommitted when ModelFrame has multiple target columns"
with pytest.raises(ValueError, match=msg):
df.sns.distplot()
def test_kdeplot(self):
df = pdml.ModelFrame(np.random.randn(100, 5), columns=list('abcde'))
df.target = df['a']
ax = df.sns.kdeplot()
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), '')
self.assertEqual(ax.get_ylabel(), '')
tm.close()
ax = df.sns.kdeplot(data='b', data2='c')
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), 'b')
self.assertEqual(ax.get_ylabel(), 'c')
tm.close()
ax = df.sns.kdeplot(data=df['b'], data2=df['c'])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), 'b')
self.assertEqual(ax.get_ylabel(), 'c')
def test_rugplot(self):
df = self.iris
ax = df.sns.rugplot()
self.assertIsInstance(ax, matplotlib.axes.Axes)
# rugplot does not add label
# pass scalar (str)
ax = df.sns.rugplot(df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
# pass Series
ax = df.sns.rugplot(df[df.columns[2]])
self.assertIsInstance(ax, matplotlib.axes.Axes)
def test_kde_rug_mix(self):
import matplotlib.pyplot as plt
df = pdml.ModelFrame(np.random.randn(100, 5), columns=list('abcde'))
df.target = df['a']
f, ax = plt.subplots(figsize=(6, 6))
ax = df.sns.kdeplot('b', 'c', ax=ax)
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), 'b')
self.assertEqual(ax.get_ylabel(), 'c')
# plot continues, do not reset by tm.close()
ax = df.sns.rugplot('b', color="g", ax=ax)
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), 'b')
self.assertEqual(ax.get_ylabel(), 'c')
ax = df.sns.rugplot('c', vertical=True, ax=ax)
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), 'b')
self.assertEqual(ax.get_ylabel(), 'c')
class TestSeabornRegression(SeabornCase):
def test_lmplot(self):
df = self.diabetes
fg = df.sns.lmplot(df.columns[1])
self.assertIsInstance(fg, sns.FacetGrid)
self.assertEqual(fg.ax.get_xlabel(), df.columns[1])
self.assertEqual(fg.ax.get_ylabel(), '.target')
tm.close()
fg = df.sns.lmplot(df.columns[1], df.columns[2])
self.assertIsInstance(fg, sns.FacetGrid)
self.assertEqual(fg.ax.get_xlabel(), df.columns[1])
self.assertEqual(fg.ax.get_ylabel(), df.columns[2])
def test_regression_plot(self):
df = self.diabetes
plots = ['regplot', 'residplot']
for plot in plots:
func = getattr(df.sns, plot)
ax = func(df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), df.columns[1])
self.assertEqual(ax.get_ylabel(), '.target')
tm.close()
ax = func(df.columns[1], df.columns[2])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), df.columns[1])
self.assertEqual(ax.get_ylabel(), df.columns[2])
tm.close()
def test_interactplot(self):
pass
def test_coefplot(self):
pass
class TestSeabornCategorical(SeabornCase):
def test_factorplots(self):
df = self.iris
fg = df.sns.factorplot(df.columns[1])
self.assertIsInstance(fg, sns.FacetGrid)
self.assertEqual(fg.ax.get_xlabel(), '.target')
self.assertEqual(fg.ax.get_ylabel(), df.columns[1])
tm.close()
fg = df.sns.factorplot(x=df.columns[1])
self.assertIsInstance(fg, sns.FacetGrid)
self.assertEqual(fg.ax.get_xlabel(), df.columns[1])
self.assertEqual(fg.ax.get_ylabel(), '.target')
tm.close()
fg = df.sns.factorplot(x=df.columns[1], y=df.columns[2])
self.assertIsInstance(fg, sns.FacetGrid)
self.assertEqual(fg.ax.get_xlabel(), df.columns[1])
self.assertEqual(fg.ax.get_ylabel(), df.columns[2])
def test_categoricalplots(self):
df = self.iris
plots = ['boxplot', 'violinplot', 'stripplot']
for plot in plots:
func = getattr(df.sns, plot)
ax = func(df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), '.target')
self.assertEqual(ax.get_ylabel(), df.columns[1])
tm.close()
ax = func(y=df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), '.target')
self.assertEqual(ax.get_ylabel(), df.columns[1])
tm.close()
ax = func(x=df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), df.columns[1])
self.assertEqual(ax.get_ylabel(), '.target')
tm.close()
ax = func(x=df.columns[1], y=df.columns[2])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), df.columns[1])
self.assertEqual(ax.get_ylabel(), df.columns[2])
tm.close()
def test_categorical_mean_plots(self):
df = self.iris
plots = ['pointplot', 'barplot']
for plot in plots:
func = getattr(df.sns, plot)
ax = func(df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), '.target')
self.assertEqual(ax.get_ylabel(), '{0}'.format(df.columns[1]))
tm.close()
ax = func(y=df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), '.target')
self.assertEqual(ax.get_ylabel(), '{0}'.format(df.columns[1]))
tm.close()
ax = func(x=df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), df.columns[1])
self.assertEqual(ax.get_ylabel(), '{0}'.format('.target'))
tm.close()
ax = func(x=df.columns[1], y=df.columns[2])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), df.columns[1])
self.assertEqual(ax.get_ylabel(), '{0}'.format(df.columns[2]))
tm.close()
def test_count_plots(self):
df = self.iris
ax = df.sns.countplot()
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), '.target')
self.assertEqual(ax.get_ylabel(), 'count')
tm.close()
return # ToDo: only fails on Travis
ax = df.sns.countplot(df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), df.columns[1])
self.assertEqual(ax.get_ylabel(), 'count')
tm.close()
ax = df.sns.countplot(x=df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), df.columns[1])
self.assertEqual(ax.get_ylabel(), 'count')
tm.close()
ax = df.sns.countplot(y=df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), 'count')
self.assertEqual(ax.get_ylabel(), df.columns[1])
tm.close()
with tm.assertRaises(TypeError):
# can't pass both x and y
df.sns.countplot(x=df.columns[1], y=df.columns[2])
# Matrix
def test_heatmap(self):
pass
def test_clustermap(self):
pass
# Timeseries
def test_tsplot(self):
pass
# AxisGrid
def test_facetgrid(self):
df = self.iris
fg = df.sns.FacetGrid(df.columns[0])
self.assertIsInstance(fg, sns.FacetGrid)
self._check_axes_shape(fg.axes, axes_num=3, layout=(3, 1), figsize=None)
tm.close()
fg = df.sns.FacetGrid(row=df.columns[0])
self.assertIsInstance(fg, sns.FacetGrid)
self._check_axes_shape(fg.axes, axes_num=3, layout=(3, 1), figsize=None)
tm.close()
fg = df.sns.FacetGrid(col=df.columns[0])
self.assertIsInstance(fg, sns.FacetGrid)
self._check_axes_shape(fg.axes, axes_num=3, layout=(1, 3), figsize=None)
tm.close()
def test_pairgrid(self):
df = self.iris
pg = df.sns.PairGrid()
self.assertIsInstance(pg, sns.PairGrid)
self._check_axes_shape(pg.axes, axes_num=25, layout=(5, 5), figsize=None)
def test_jointgrid(self):
df = self.iris
jg = df.sns.JointGrid(x=df.columns[1], y=df.columns[1])
self.assertIsInstance(jg, sns.JointGrid)
| 34.794621 | 81 | 0.610217 |
import pytest
import matplotlib
matplotlib.use('Agg')
import numpy as np
import sklearn.datasets as datasets
import pandas_ml as pdml
import pandas_ml.util.testing as tm
try:
import seaborn as sns
except ImportError:
pass
class SeabornCase(tm.PlottingTestCase):
def setup_method(self):
try:
import matplotlib.pyplot
except ImportError:
import nose
# PYTHON=3.4 PANDAS=0.17.1 SKLEARN=0.16.1
raise nose.SkipTest()
self.iris = pdml.ModelFrame(datasets.load_iris())
self.diabetes = pdml.ModelFrame(datasets.load_diabetes())
# convert columns to str
self.diabetes.columns = ['col{0}'.format(c) if isinstance(c, int)
else c for c in self.diabetes.columns]
class TestSeabornAttrs(SeabornCase):
def test_objectmapper(self):
df = pdml.ModelFrame([])
self.assertIs(df.sns.palplot, sns.palplot)
self.assertIs(df.sns.set, sns.set)
self.assertIs(df.sns.axes_style, sns.axes_style)
self.assertIs(df.sns.plotting_context, sns.plotting_context)
self.assertIs(df.sns.set_context, sns.set_context)
self.assertIs(df.sns.set_color_codes, sns.set_color_codes)
self.assertIs(df.sns.reset_defaults, sns.reset_defaults)
self.assertIs(df.sns.reset_orig, sns.reset_orig)
self.assertIs(df.sns.set_palette, sns.set_palette)
self.assertIs(df.sns.color_palette, sns.color_palette)
self.assertIs(df.sns.husl_palette, sns.husl_palette)
self.assertIs(df.sns.hls_palette, sns.hls_palette)
self.assertIs(df.sns.cubehelix_palette, sns.cubehelix_palette)
self.assertIs(df.sns.dark_palette, sns.dark_palette)
self.assertIs(df.sns.light_palette, sns.light_palette)
self.assertIs(df.sns.diverging_palette, sns.diverging_palette)
self.assertIs(df.sns.blend_palette, sns.blend_palette)
self.assertIs(df.sns.xkcd_palette, sns.xkcd_palette)
self.assertIs(df.sns.crayon_palette, sns.crayon_palette)
self.assertIs(df.sns.mpl_palette, sns.mpl_palette)
self.assertIs(df.sns.choose_colorbrewer_palette,
sns.choose_colorbrewer_palette)
self.assertIs(df.sns.choose_cubehelix_palette,
sns.choose_cubehelix_palette)
self.assertIs(df.sns.choose_light_palette,
sns.choose_light_palette)
self.assertIs(df.sns.choose_dark_palette, sns.choose_dark_palette)
self.assertIs(df.sns.choose_diverging_palette,
sns.choose_diverging_palette)
self.assertIs(df.sns.despine, sns.despine)
self.assertIs(df.sns.desaturate, sns.desaturate)
self.assertIs(df.sns.saturate, sns.saturate)
self.assertIs(df.sns.set_hls_values, sns.set_hls_values)
# self.assertIs(df.sns.ci_to_errsize, sns.ci_to_errsize)
# self.assertIs(df.sns.axlabel, sns.axlabel)
class TestSeabornDistribution(SeabornCase):
def test_jointplot(self):
df = self.iris
jg = df.sns.jointplot(df.columns[1])
self.assertIsInstance(jg, sns.JointGrid)
self.assertEqual(jg.ax_joint.get_xlabel(), df.columns[1])
self.assertEqual(jg.ax_joint.get_ylabel(), '.target')
tm.close()
jg = df.sns.jointplot(df.columns[2], df.columns[3])
self.assertIsInstance(jg, sns.JointGrid)
self.assertEqual(jg.ax_joint.get_xlabel(), df.columns[2])
self.assertEqual(jg.ax_joint.get_ylabel(), df.columns[3])
def test_pairplot(self):
df = self.iris
pg = df.sns.pairplot()
self._check_axes_shape(pg.axes, axes_num=25,
layout=(5, 5), figsize=None)
for i in range(5):
self.assertEqual(pg.axes[i][0].get_ylabel(), df.columns[i])
self.assertEqual(pg.axes[-1][i].get_xlabel(), df.columns[i])
tm.close()
def test_distplot(self):
return # ToDo: only fails on Travis
df = self.iris
ax = df.sns.distplot()
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), '.target')
tm.close()
# pass scalar (str)
ax = df.sns.distplot(df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), df.columns[1])
tm.close()
# pass Series
ax = df.sns.distplot(df[df.columns[2]])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), df.columns[2])
def test_dist_error(self):
df = pdml.ModelFrame(np.random.randn(100, 5), columns=list('abcde'))
msg = "a can't be ommitted when ModelFrame doesn't have target column"
with pytest.raises(ValueError, match=msg):
df.sns.distplot()
df.target = df[['a', 'b']]
self.assertTrue(df.has_multi_targets())
msg = "a can't be ommitted when ModelFrame has multiple target columns"
with pytest.raises(ValueError, match=msg):
df.sns.distplot()
def test_kdeplot(self):
df = pdml.ModelFrame(np.random.randn(100, 5), columns=list('abcde'))
df.target = df['a']
ax = df.sns.kdeplot()
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), '')
self.assertEqual(ax.get_ylabel(), '')
tm.close()
ax = df.sns.kdeplot(data='b', data2='c')
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), 'b')
self.assertEqual(ax.get_ylabel(), 'c')
tm.close()
ax = df.sns.kdeplot(data=df['b'], data2=df['c'])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), 'b')
self.assertEqual(ax.get_ylabel(), 'c')
def test_rugplot(self):
df = self.iris
ax = df.sns.rugplot()
self.assertIsInstance(ax, matplotlib.axes.Axes)
ax = df.sns.rugplot(df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
ax = df.sns.rugplot(df[df.columns[2]])
self.assertIsInstance(ax, matplotlib.axes.Axes)
def test_kde_rug_mix(self):
import matplotlib.pyplot as plt
df = pdml.ModelFrame(np.random.randn(100, 5), columns=list('abcde'))
df.target = df['a']
f, ax = plt.subplots(figsize=(6, 6))
ax = df.sns.kdeplot('b', 'c', ax=ax)
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), 'b')
self.assertEqual(ax.get_ylabel(), 'c')
ax = df.sns.rugplot('b', color="g", ax=ax)
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), 'b')
self.assertEqual(ax.get_ylabel(), 'c')
ax = df.sns.rugplot('c', vertical=True, ax=ax)
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), 'b')
self.assertEqual(ax.get_ylabel(), 'c')
class TestSeabornRegression(SeabornCase):
def test_lmplot(self):
df = self.diabetes
fg = df.sns.lmplot(df.columns[1])
self.assertIsInstance(fg, sns.FacetGrid)
self.assertEqual(fg.ax.get_xlabel(), df.columns[1])
self.assertEqual(fg.ax.get_ylabel(), '.target')
tm.close()
fg = df.sns.lmplot(df.columns[1], df.columns[2])
self.assertIsInstance(fg, sns.FacetGrid)
self.assertEqual(fg.ax.get_xlabel(), df.columns[1])
self.assertEqual(fg.ax.get_ylabel(), df.columns[2])
def test_regression_plot(self):
df = self.diabetes
plots = ['regplot', 'residplot']
for plot in plots:
func = getattr(df.sns, plot)
ax = func(df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), df.columns[1])
self.assertEqual(ax.get_ylabel(), '.target')
tm.close()
ax = func(df.columns[1], df.columns[2])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), df.columns[1])
self.assertEqual(ax.get_ylabel(), df.columns[2])
tm.close()
def test_interactplot(self):
pass
def test_coefplot(self):
pass
class TestSeabornCategorical(SeabornCase):
def test_factorplots(self):
df = self.iris
fg = df.sns.factorplot(df.columns[1])
self.assertIsInstance(fg, sns.FacetGrid)
self.assertEqual(fg.ax.get_xlabel(), '.target')
self.assertEqual(fg.ax.get_ylabel(), df.columns[1])
tm.close()
fg = df.sns.factorplot(x=df.columns[1])
self.assertIsInstance(fg, sns.FacetGrid)
self.assertEqual(fg.ax.get_xlabel(), df.columns[1])
self.assertEqual(fg.ax.get_ylabel(), '.target')
tm.close()
fg = df.sns.factorplot(x=df.columns[1], y=df.columns[2])
self.assertIsInstance(fg, sns.FacetGrid)
self.assertEqual(fg.ax.get_xlabel(), df.columns[1])
self.assertEqual(fg.ax.get_ylabel(), df.columns[2])
def test_categoricalplots(self):
df = self.iris
plots = ['boxplot', 'violinplot', 'stripplot']
for plot in plots:
func = getattr(df.sns, plot)
ax = func(df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), '.target')
self.assertEqual(ax.get_ylabel(), df.columns[1])
tm.close()
ax = func(y=df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), '.target')
self.assertEqual(ax.get_ylabel(), df.columns[1])
tm.close()
ax = func(x=df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), df.columns[1])
self.assertEqual(ax.get_ylabel(), '.target')
tm.close()
ax = func(x=df.columns[1], y=df.columns[2])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), df.columns[1])
self.assertEqual(ax.get_ylabel(), df.columns[2])
tm.close()
def test_categorical_mean_plots(self):
df = self.iris
plots = ['pointplot', 'barplot']
for plot in plots:
func = getattr(df.sns, plot)
ax = func(df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), '.target')
self.assertEqual(ax.get_ylabel(), '{0}'.format(df.columns[1]))
tm.close()
ax = func(y=df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), '.target')
self.assertEqual(ax.get_ylabel(), '{0}'.format(df.columns[1]))
tm.close()
ax = func(x=df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), df.columns[1])
self.assertEqual(ax.get_ylabel(), '{0}'.format('.target'))
tm.close()
ax = func(x=df.columns[1], y=df.columns[2])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), df.columns[1])
self.assertEqual(ax.get_ylabel(), '{0}'.format(df.columns[2]))
tm.close()
def test_count_plots(self):
df = self.iris
ax = df.sns.countplot()
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), '.target')
self.assertEqual(ax.get_ylabel(), 'count')
tm.close()
return
ax = df.sns.countplot(df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), df.columns[1])
self.assertEqual(ax.get_ylabel(), 'count')
tm.close()
ax = df.sns.countplot(x=df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), df.columns[1])
self.assertEqual(ax.get_ylabel(), 'count')
tm.close()
ax = df.sns.countplot(y=df.columns[1])
self.assertIsInstance(ax, matplotlib.axes.Axes)
self.assertEqual(ax.get_xlabel(), 'count')
self.assertEqual(ax.get_ylabel(), df.columns[1])
tm.close()
with tm.assertRaises(TypeError):
df.sns.countplot(x=df.columns[1], y=df.columns[2])
# Matrix
def test_heatmap(self):
pass
def test_clustermap(self):
pass
# Timeseries
def test_tsplot(self):
pass
# AxisGrid
def test_facetgrid(self):
df = self.iris
fg = df.sns.FacetGrid(df.columns[0])
self.assertIsInstance(fg, sns.FacetGrid)
self._check_axes_shape(fg.axes, axes_num=3, layout=(3, 1), figsize=None)
tm.close()
fg = df.sns.FacetGrid(row=df.columns[0])
self.assertIsInstance(fg, sns.FacetGrid)
self._check_axes_shape(fg.axes, axes_num=3, layout=(3, 1), figsize=None)
tm.close()
fg = df.sns.FacetGrid(col=df.columns[0])
self.assertIsInstance(fg, sns.FacetGrid)
self._check_axes_shape(fg.axes, axes_num=3, layout=(1, 3), figsize=None)
tm.close()
def test_pairgrid(self):
df = self.iris
pg = df.sns.PairGrid()
self.assertIsInstance(pg, sns.PairGrid)
self._check_axes_shape(pg.axes, axes_num=25, layout=(5, 5), figsize=None)
def test_jointgrid(self):
df = self.iris
jg = df.sns.JointGrid(x=df.columns[1], y=df.columns[1])
self.assertIsInstance(jg, sns.JointGrid)
| true | true |
f7f7b9b7d2e8b252ba130788c5f4992643f1e895 | 4,804 | py | Python | UserInterface/AC_step_PC.py | PNNL-CompBio/ion-mob-ms | 5465e1ebc5282db5654c90a09be1c6fe84d8c196 | [
"BSD-2-Clause"
] | null | null | null | UserInterface/AC_step_PC.py | PNNL-CompBio/ion-mob-ms | 5465e1ebc5282db5654c90a09be1c6fe84d8c196 | [
"BSD-2-Clause"
] | 3 | 2022-03-28T21:55:49.000Z | 2022-03-28T22:00:14.000Z | UserInterface/AC_step_PC.py | PNNL-CompBio/ion-mob-ms | 5465e1ebc5282db5654c90a09be1c6fe84d8c196 | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/env python3.9
import sys
import docker
import os
import tarfile
import time
def copy_a_file(client, src,dst):
name, dst = dst.split(':')
container = client.containers.get(name)
srcname = os.path.basename(src).replace('"',"")
src = src.replace('"', "")
os.chdir(os.path.dirname(src))
tar = tarfile.open(src + '.tar', mode='w')
tar.add(srcname)
tar.close()
data = open(src + '.tar', 'rb').read()
container.put_archive(os.path.dirname(dst), data)
os.remove((src + '.tar'))
def copy_some_files(client, src_list,dst):
for src in src_list:
if src != "":
srcname = os.path.basename(src)
dst = dst + srcname
copy_a_file(client, src,dst)
def run_container(exp,version,calibrant_file,framemeta_files, feature_files, target_list_file,raw_file_metadata):
cur_dir = os.path.dirname(__file__)
os.chdir(cur_dir)
if exp == "single" and version == "standard":
command_list = ["python3.8","/AutoCCS/autoCCS.py", "--config_file", "/tmp/CF/autoCCS_single_config.xml", "--feature_files", '/tmp/FF/*.csv',
"--sample_meta", ("/tmp/MD/" + os.path.basename(raw_file_metadata)), "--calibrant_file", ("/tmp/CBF/" + os.path.basename(calibrant_file)), "--output_dir", "/tmp/IV_Results", "--mode", "single",
"--colname_for_filename", "RawFileName", "--tunemix_sample_type", "AgTune", "--colname_for_sample_type", "SampleType", "--single_mode", "batch"]
if version == "enhanced":
framemeta_files_quote = '"' + framemeta_files + '"'
cmd1 = "dir/b " + framemeta_files_quote
test1 = os.popen(cmd1).read()
test1 = test1.split("\n")
counter = 0
for item in test1[:-1]:
test1[counter] = '"'+ framemeta_files[:-5] + item +'"'
counter +=1
if exp == "single":
command_list = ["python3.8","/AutoCCS/autoCCS.py", "--config_file", "/tmp/CF/autoCCS_single_config.xml", "--framemeta_files", '/tmp/FMF/*.txt', "--sample_meta",
("/tmp/MD/" + os.path.basename(raw_file_metadata)), "--calibrant_file", ("/tmp/CBF/" + os.path.basename(calibrant_file)), "--feature_files", '/tmp/FF/*.csv', "--output_dir", "/tmp/IV_Results", "--mode",
"single", "--colname_for_filename", "RawFileName", "--tunemix_sample_type", "AgTune", "--colname_for_sample_type", "SampleType", "--single_mode", "batch"]
elif exp == "step":
command_list = ["python3.8","/AutoCCS/autoCCS.py", "--config_file", "/tmp/CF/autoCCS_step_config.xml", "--framemeta_files",
'/tmp/FMF/*.txt', "--feature_files", '/tmp/FF/*.csv', "--output_dir", "/tmp/IV_Results", "--target_list_file", ("/tmp/TLF/" + os.path.basename(target_list_file)), "--mode", "multi"]
feature_files_quote = '"' + feature_files + '"'
cmd2 = "dir/b " + feature_files_quote
test2 = os.popen(cmd2).read()
test2 = test2.split("\n")
counter = 0
for item in test2[:-1]:
test2[counter] = '"'+ feature_files[:-5] + item +'"'
counter +=1
image = "anubhav0fnu/autoccs"
local_mem = os.getcwd() + "\\tmp"
os.makedirs(".\\tmp\\CF", exist_ok=True)
os.makedirs(".\\tmp\\TLF", exist_ok=True)
os.makedirs(".\\tmp\\FF", exist_ok=True)
os.makedirs(".\\tmp\\FMF", exist_ok=True)
os.makedirs(".\\tmp\\IV_Results", exist_ok=True)
os.makedirs(".\\tmp\\MD", exist_ok=True)
os.makedirs(".\\tmp\\CBF", exist_ok=True)
time.sleep(5)
print("Z\n")
client = docker.from_env()
print("Y\n")
client.containers.run(image,name="AC_container",volumes={local_mem: {'bind': '/tmp', 'mode': 'rw'}}, detach=True, tty=True)
print("A\n")
if exp == "single":
config_file = "\\Users\\jaco059\\OneDrive - PNNL\\Desktop\\IonMobility_Desktop_App_Front_End\\docker_test_area\\AC_python_area\\autoCCS_single_config.xml"
copy_a_file(client, raw_file_metadata, 'AC_container:/tmp/MD/meta_data')
copy_a_file(client, calibrant_file, 'AC_container:/tmp/CBF/calibrant_file')
print("B\n")
if version == "enhanced":
copy_some_files(client, test1, 'AC_container:/tmp/FMF/framemeta_files')
print("C\n")
copy_some_files(client, test2, 'AC_container:/tmp/FF/feature_files')
print("D\n")
if exp == "step":
config_file = "\\Users\\jaco059\\OneDrive - PNNL\\Desktop\\IonMobility_Desktop_App_Front_End\\docker_test_area\\AC_python_area\\autoCCS_step_config.xml"
copy_a_file(client, target_list_file, 'AC_container:/tmp/TLF/target_list_file')
print("E\n")
copy_a_file(client, config_file, 'AC_container:/tmp/CF/config_file')
AC_Container = client.containers.get('AC_container')
time.sleep(5)
print("F\n")
AC_Container.exec_run(cmd=command_list)
print("G\n")
| 45.320755 | 215 | 0.6301 |
import sys
import docker
import os
import tarfile
import time
def copy_a_file(client, src,dst):
name, dst = dst.split(':')
container = client.containers.get(name)
srcname = os.path.basename(src).replace('"',"")
src = src.replace('"', "")
os.chdir(os.path.dirname(src))
tar = tarfile.open(src + '.tar', mode='w')
tar.add(srcname)
tar.close()
data = open(src + '.tar', 'rb').read()
container.put_archive(os.path.dirname(dst), data)
os.remove((src + '.tar'))
def copy_some_files(client, src_list,dst):
for src in src_list:
if src != "":
srcname = os.path.basename(src)
dst = dst + srcname
copy_a_file(client, src,dst)
def run_container(exp,version,calibrant_file,framemeta_files, feature_files, target_list_file,raw_file_metadata):
cur_dir = os.path.dirname(__file__)
os.chdir(cur_dir)
if exp == "single" and version == "standard":
command_list = ["python3.8","/AutoCCS/autoCCS.py", "--config_file", "/tmp/CF/autoCCS_single_config.xml", "--feature_files", '/tmp/FF/*.csv',
"--sample_meta", ("/tmp/MD/" + os.path.basename(raw_file_metadata)), "--calibrant_file", ("/tmp/CBF/" + os.path.basename(calibrant_file)), "--output_dir", "/tmp/IV_Results", "--mode", "single",
"--colname_for_filename", "RawFileName", "--tunemix_sample_type", "AgTune", "--colname_for_sample_type", "SampleType", "--single_mode", "batch"]
if version == "enhanced":
framemeta_files_quote = '"' + framemeta_files + '"'
cmd1 = "dir/b " + framemeta_files_quote
test1 = os.popen(cmd1).read()
test1 = test1.split("\n")
counter = 0
for item in test1[:-1]:
test1[counter] = '"'+ framemeta_files[:-5] + item +'"'
counter +=1
if exp == "single":
command_list = ["python3.8","/AutoCCS/autoCCS.py", "--config_file", "/tmp/CF/autoCCS_single_config.xml", "--framemeta_files", '/tmp/FMF/*.txt', "--sample_meta",
("/tmp/MD/" + os.path.basename(raw_file_metadata)), "--calibrant_file", ("/tmp/CBF/" + os.path.basename(calibrant_file)), "--feature_files", '/tmp/FF/*.csv', "--output_dir", "/tmp/IV_Results", "--mode",
"single", "--colname_for_filename", "RawFileName", "--tunemix_sample_type", "AgTune", "--colname_for_sample_type", "SampleType", "--single_mode", "batch"]
elif exp == "step":
command_list = ["python3.8","/AutoCCS/autoCCS.py", "--config_file", "/tmp/CF/autoCCS_step_config.xml", "--framemeta_files",
'/tmp/FMF/*.txt', "--feature_files", '/tmp/FF/*.csv', "--output_dir", "/tmp/IV_Results", "--target_list_file", ("/tmp/TLF/" + os.path.basename(target_list_file)), "--mode", "multi"]
feature_files_quote = '"' + feature_files + '"'
cmd2 = "dir/b " + feature_files_quote
test2 = os.popen(cmd2).read()
test2 = test2.split("\n")
counter = 0
for item in test2[:-1]:
test2[counter] = '"'+ feature_files[:-5] + item +'"'
counter +=1
image = "anubhav0fnu/autoccs"
local_mem = os.getcwd() + "\\tmp"
os.makedirs(".\\tmp\\CF", exist_ok=True)
os.makedirs(".\\tmp\\TLF", exist_ok=True)
os.makedirs(".\\tmp\\FF", exist_ok=True)
os.makedirs(".\\tmp\\FMF", exist_ok=True)
os.makedirs(".\\tmp\\IV_Results", exist_ok=True)
os.makedirs(".\\tmp\\MD", exist_ok=True)
os.makedirs(".\\tmp\\CBF", exist_ok=True)
time.sleep(5)
print("Z\n")
client = docker.from_env()
print("Y\n")
client.containers.run(image,name="AC_container",volumes={local_mem: {'bind': '/tmp', 'mode': 'rw'}}, detach=True, tty=True)
print("A\n")
if exp == "single":
config_file = "\\Users\\jaco059\\OneDrive - PNNL\\Desktop\\IonMobility_Desktop_App_Front_End\\docker_test_area\\AC_python_area\\autoCCS_single_config.xml"
copy_a_file(client, raw_file_metadata, 'AC_container:/tmp/MD/meta_data')
copy_a_file(client, calibrant_file, 'AC_container:/tmp/CBF/calibrant_file')
print("B\n")
if version == "enhanced":
copy_some_files(client, test1, 'AC_container:/tmp/FMF/framemeta_files')
print("C\n")
copy_some_files(client, test2, 'AC_container:/tmp/FF/feature_files')
print("D\n")
if exp == "step":
config_file = "\\Users\\jaco059\\OneDrive - PNNL\\Desktop\\IonMobility_Desktop_App_Front_End\\docker_test_area\\AC_python_area\\autoCCS_step_config.xml"
copy_a_file(client, target_list_file, 'AC_container:/tmp/TLF/target_list_file')
print("E\n")
copy_a_file(client, config_file, 'AC_container:/tmp/CF/config_file')
AC_Container = client.containers.get('AC_container')
time.sleep(5)
print("F\n")
AC_Container.exec_run(cmd=command_list)
print("G\n")
| true | true |
f7f7ba79deb010fbfad298ecb6f92180be0264c5 | 544 | py | Python | api/enums.py | BerniWittmann/beachanmeldung | 9014dea5c31ea9e26f18d753d8d836741865c38e | [
"Unlicense",
"MIT"
] | null | null | null | api/enums.py | BerniWittmann/beachanmeldung | 9014dea5c31ea9e26f18d753d8d836741865c38e | [
"Unlicense",
"MIT"
] | 5 | 2020-06-05T17:31:08.000Z | 2022-03-11T23:16:12.000Z | api/enums.py | BerniWittmann/beachanmeldung | 9014dea5c31ea9e26f18d753d8d836741865c38e | [
"Unlicense",
"MIT"
] | null | null | null | from django.utils.translation import gettext_lazy as _
from djchoices import DjangoChoices, ChoiceItem
class TournamentGenderTypes(DjangoChoices):
female = ChoiceItem('female', _('female'))
male = ChoiceItem('male', _('male'))
mixed = ChoiceItem('mixed', _('mixed'))
class TeamStateTypes(DjangoChoices):
waiting = ChoiceItem('waiting', _('waiting'))
signed_up = ChoiceItem('signed up', _('signed up'))
needs_approval = ChoiceItem('needs approval', _('needs approval'))
denied = ChoiceItem('denied', _('denied'))
| 34 | 70 | 0.709559 | from django.utils.translation import gettext_lazy as _
from djchoices import DjangoChoices, ChoiceItem
class TournamentGenderTypes(DjangoChoices):
female = ChoiceItem('female', _('female'))
male = ChoiceItem('male', _('male'))
mixed = ChoiceItem('mixed', _('mixed'))
class TeamStateTypes(DjangoChoices):
waiting = ChoiceItem('waiting', _('waiting'))
signed_up = ChoiceItem('signed up', _('signed up'))
needs_approval = ChoiceItem('needs approval', _('needs approval'))
denied = ChoiceItem('denied', _('denied'))
| true | true |
f7f7ba91c7df393b76cb02103f077a5f7e325da7 | 11,152 | py | Python | orchestra/contrib/accounts/actions.py | udm88/django-orchestra | 49c84f13a8f92427b01231615136549fb5be3a78 | [
"Unlicense"
] | 68 | 2015-02-09T10:28:44.000Z | 2022-03-12T11:08:36.000Z | orchestra/contrib/accounts/actions.py | ferminhg/django-orchestra | 49c84f13a8f92427b01231615136549fb5be3a78 | [
"Unlicense"
] | 17 | 2015-05-01T18:10:03.000Z | 2021-03-19T21:52:55.000Z | orchestra/contrib/accounts/actions.py | ferminhg/django-orchestra | 49c84f13a8f92427b01231615136549fb5be3a78 | [
"Unlicense"
] | 29 | 2015-03-31T04:51:03.000Z | 2022-02-17T02:58:50.000Z | from functools import partial, wraps
from django.contrib import messages
from django.contrib.admin import helpers
from django.contrib.admin.utils import NestedObjects, quote
from django.contrib.auth import get_permission_codename
from django.core.urlresolvers import reverse, NoReverseMatch
from django.db import router
from django.shortcuts import redirect, render
from django.template.response import TemplateResponse
from django.utils import timezone
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.text import capfirst
from django.utils.translation import ungettext, ugettext_lazy as _
from orchestra.core import services
from . import settings
def list_contacts(modeladmin, request, queryset):
ids = queryset.order_by().values_list('id', flat=True).distinct()
if not ids:
messages.warning(request, "Select at least one account.")
return
url = reverse('admin:contacts_contact_changelist')
url += '?account__in=%s' % ','.join(map(str, ids))
return redirect(url)
list_contacts.short_description = _("List contacts")
def list_accounts(modeladmin, request, queryset):
accounts = queryset.order_by().values_list('account_id', flat=True).distinct()
if not accounts:
messages.warning(request, "Select at least one instance.")
return
url = reverse('admin:contacts_contact_changelist')
url += '?id__in=%s' % ','.join(map(str, accounts))
return redirect(url)
list_accounts.short_description = _("List accounts")
def service_report(modeladmin, request, queryset):
# TODO resources
accounts = []
fields = []
registered_services = services.get()
# First we get related manager names to fire a prefetch related
for name, field in queryset.model._meta.fields_map.items():
model = field.related_model
if model in registered_services and model != queryset.model:
fields.append((model, name))
fields = sorted(fields, key=lambda f: f[0]._meta.verbose_name_plural.lower())
fields = [field for model, field in fields]
for account in queryset.prefetch_related(*fields):
items = []
for field in fields:
related_manager = getattr(account, field)
items.append((related_manager.model._meta, related_manager.all()))
accounts.append((account, items))
context = {
'accounts': accounts,
'date': timezone.now().today()
}
return render(request, settings.ACCOUNTS_SERVICE_REPORT_TEMPLATE, context)
def delete_related_services(modeladmin, request, queryset):
opts = modeladmin.model._meta
app_label = opts.app_label
using = router.db_for_write(modeladmin.model)
collector = NestedObjects(using=using)
collector.collect(queryset)
registered_services = services.get()
related_services = []
to_delete = []
admin_site = modeladmin.admin_site
def format(obj, account=False):
has_admin = obj.__class__ in admin_site._registry
opts = obj._meta
no_edit_link = '%s: %s' % (capfirst(opts.verbose_name), force_text(obj))
if has_admin:
try:
admin_url = reverse(
'admin:%s_%s_change' % (opts.app_label, opts.model_name),
None, (quote(obj._get_pk_val()),)
)
except NoReverseMatch:
# Change url doesn't exist -- don't display link to edit
return no_edit_link
# Display a link to the admin page.
context = (capfirst(opts.verbose_name), admin_url, obj)
if account:
context += (_("services to delete:"),)
return format_html('{} <a href="{}">{}</a> {}', *context)
return format_html('{}: <a href="{}">{}</a>', *context)
else:
# Don't display link to edit, because it either has no
# admin or is edited inline.
return no_edit_link
def format_nested(objs, result):
if isinstance(objs, list):
current = []
for obj in objs:
format_nested(obj, current)
result.append(current)
else:
result.append(format(objs))
for nested in collector.nested():
if isinstance(nested, list):
# Is lists of objects
current = []
is_service = False
for service in nested:
if type(service) in registered_services:
if service == main_systemuser:
continue
current.append(format(service))
to_delete.append(service)
is_service = True
elif is_service and isinstance(service, list):
nested = []
format_nested(service, nested)
current.append(nested[0])
is_service = False
else:
is_service = False
related_services.append(current)
elif isinstance(nested, modeladmin.model):
# Is account
# Prevent the deletion of the main system user, which will delete the account
main_systemuser = nested.main_systemuser
related_services.append(format(nested, account=True))
# The user has already confirmed the deletion.
# Do the deletion and return a None to display the change list view again.
if request.POST.get('post'):
accounts = len(queryset)
msg = _("Related services deleted and account disabled.")
for account in queryset:
account.is_active = False
account.save(update_fields=('is_active',))
modeladmin.log_change(request, account, msg)
if accounts:
relateds = len(to_delete)
for obj in to_delete:
obj_display = force_text(obj)
modeladmin.log_deletion(request, obj, obj_display)
obj.delete()
context = {
'accounts': accounts,
'relateds': relateds,
}
msg = _("Successfully disabled %(accounts)d account and deleted %(relateds)d related services.") % context
modeladmin.message_user(request, msg, messages.SUCCESS)
# Return None to display the change list page again.
return None
if len(queryset) == 1:
objects_name = force_text(opts.verbose_name)
else:
objects_name = force_text(opts.verbose_name_plural)
model_count = {}
for model, objs in collector.model_objs.items():
count = 0
# discount main systemuser
if model is modeladmin.model.main_systemuser.field.rel.to:
count = len(objs) - 1
# Discount account
elif model is not modeladmin.model and model in registered_services:
count = len(objs)
if count:
model_count[model._meta.verbose_name_plural] = count
if not model_count:
modeladmin.message_user(request, _("Nothing to delete"), messages.WARNING)
return None
context = dict(
admin_site.each_context(request),
title=_("Are you sure?"),
objects_name=objects_name,
deletable_objects=[related_services],
model_count=dict(model_count).items(),
queryset=queryset,
opts=opts,
action_checkbox_name=helpers.ACTION_CHECKBOX_NAME,
)
request.current_app = admin_site.name
# Display the confirmation page
template = 'admin/%s/%s/delete_related_services_confirmation.html' % (app_label, opts.model_name)
return TemplateResponse(request, template, context)
delete_related_services.short_description = _("Delete related services")
def disable_selected(modeladmin, request, queryset, disable=True):
opts = modeladmin.model._meta
app_label = opts.app_label
verbose_action_name = _("disabled") if disable else _("enabled")
# The user has already confirmed the deletion.
# Do the disable and return a None to display the change list view again.
if request.POST.get('post'):
n = 0
for account in queryset:
account.disable() if disable else account.enable()
modeladmin.log_change(request, account, verbose_action_name.capitalize())
n += 1
modeladmin.message_user(request, ungettext(
_("One account has been successfully %s.") % verbose_action_name,
_("%i accounts have been successfully %s.") % (n, verbose_action_name),
n)
)
return None
user = request.user
admin_site = modeladmin.admin_site
def format(obj):
has_admin = obj.__class__ in admin_site._registry
opts = obj._meta
no_edit_link = '%s: %s' % (capfirst(opts.verbose_name), force_text(obj))
if has_admin:
try:
admin_url = reverse(
'admin:%s_%s_change' % (opts.app_label, opts.model_name),
None,
(quote(obj._get_pk_val()),)
)
except NoReverseMatch:
# Change url doesn't exist -- don't display link to edit
return no_edit_link
p = '%s.%s' % (opts.app_label, get_permission_codename('delete', opts))
if not user.has_perm(p):
perms_needed.add(opts.verbose_name)
# Display a link to the admin page.
context = (capfirst(opts.verbose_name), admin_url, obj)
return format_html('{}: <a href="{}">{}</a>', *context)
else:
# Don't display link to edit, because it either has no
# admin or is edited inline.
return no_edit_link
display = []
for account in queryset:
current = []
for related in account.get_services_to_disable():
current.append(format(related))
display.append([format(account), current])
if len(queryset) == 1:
objects_name = force_text(opts.verbose_name)
else:
objects_name = force_text(opts.verbose_name_plural)
context = dict(
admin_site.each_context(request),
action_name='disable_selected' if disable else 'enable_selected',
disable=disable,
title=_("Are you sure?"),
objects_name=objects_name,
deletable_objects=display,
queryset=queryset,
opts=opts,
action_checkbox_name=helpers.ACTION_CHECKBOX_NAME,
)
request.current_app = admin_site.name
template = 'admin/%s/%s/disable_selected_confirmation.html' % (app_label, opts.model_name)
return TemplateResponse(request, template, context)
disable_selected.short_description = _("Disable selected accounts")
disable_selected.url_name = 'disable'
disable_selected.tool_description = _("Disable")
enable_selected = partial(disable_selected, disable=False)
enable_selected.__name__ = 'enable_selected'
enable_selected.url_name = 'enable'
enable_selected.tool_description = _("Enable")
| 38.722222 | 118 | 0.627242 | from functools import partial, wraps
from django.contrib import messages
from django.contrib.admin import helpers
from django.contrib.admin.utils import NestedObjects, quote
from django.contrib.auth import get_permission_codename
from django.core.urlresolvers import reverse, NoReverseMatch
from django.db import router
from django.shortcuts import redirect, render
from django.template.response import TemplateResponse
from django.utils import timezone
from django.utils.encoding import force_text
from django.utils.html import format_html
from django.utils.text import capfirst
from django.utils.translation import ungettext, ugettext_lazy as _
from orchestra.core import services
from . import settings
def list_contacts(modeladmin, request, queryset):
ids = queryset.order_by().values_list('id', flat=True).distinct()
if not ids:
messages.warning(request, "Select at least one account.")
return
url = reverse('admin:contacts_contact_changelist')
url += '?account__in=%s' % ','.join(map(str, ids))
return redirect(url)
list_contacts.short_description = _("List contacts")
def list_accounts(modeladmin, request, queryset):
accounts = queryset.order_by().values_list('account_id', flat=True).distinct()
if not accounts:
messages.warning(request, "Select at least one instance.")
return
url = reverse('admin:contacts_contact_changelist')
url += '?id__in=%s' % ','.join(map(str, accounts))
return redirect(url)
list_accounts.short_description = _("List accounts")
def service_report(modeladmin, request, queryset):
accounts = []
fields = []
registered_services = services.get()
for name, field in queryset.model._meta.fields_map.items():
model = field.related_model
if model in registered_services and model != queryset.model:
fields.append((model, name))
fields = sorted(fields, key=lambda f: f[0]._meta.verbose_name_plural.lower())
fields = [field for model, field in fields]
for account in queryset.prefetch_related(*fields):
items = []
for field in fields:
related_manager = getattr(account, field)
items.append((related_manager.model._meta, related_manager.all()))
accounts.append((account, items))
context = {
'accounts': accounts,
'date': timezone.now().today()
}
return render(request, settings.ACCOUNTS_SERVICE_REPORT_TEMPLATE, context)
def delete_related_services(modeladmin, request, queryset):
opts = modeladmin.model._meta
app_label = opts.app_label
using = router.db_for_write(modeladmin.model)
collector = NestedObjects(using=using)
collector.collect(queryset)
registered_services = services.get()
related_services = []
to_delete = []
admin_site = modeladmin.admin_site
def format(obj, account=False):
has_admin = obj.__class__ in admin_site._registry
opts = obj._meta
no_edit_link = '%s: %s' % (capfirst(opts.verbose_name), force_text(obj))
if has_admin:
try:
admin_url = reverse(
'admin:%s_%s_change' % (opts.app_label, opts.model_name),
None, (quote(obj._get_pk_val()),)
)
except NoReverseMatch:
return no_edit_link
context = (capfirst(opts.verbose_name), admin_url, obj)
if account:
context += (_("services to delete:"),)
return format_html('{} <a href="{}">{}</a> {}', *context)
return format_html('{}: <a href="{}">{}</a>', *context)
else:
# admin or is edited inline.
return no_edit_link
def format_nested(objs, result):
if isinstance(objs, list):
current = []
for obj in objs:
format_nested(obj, current)
result.append(current)
else:
result.append(format(objs))
for nested in collector.nested():
if isinstance(nested, list):
# Is lists of objects
current = []
is_service = False
for service in nested:
if type(service) in registered_services:
if service == main_systemuser:
continue
current.append(format(service))
to_delete.append(service)
is_service = True
elif is_service and isinstance(service, list):
nested = []
format_nested(service, nested)
current.append(nested[0])
is_service = False
else:
is_service = False
related_services.append(current)
elif isinstance(nested, modeladmin.model):
# Is account
# Prevent the deletion of the main system user, which will delete the account
main_systemuser = nested.main_systemuser
related_services.append(format(nested, account=True))
# The user has already confirmed the deletion.
# Do the deletion and return a None to display the change list view again.
if request.POST.get('post'):
accounts = len(queryset)
msg = _("Related services deleted and account disabled.")
for account in queryset:
account.is_active = False
account.save(update_fields=('is_active',))
modeladmin.log_change(request, account, msg)
if accounts:
relateds = len(to_delete)
for obj in to_delete:
obj_display = force_text(obj)
modeladmin.log_deletion(request, obj, obj_display)
obj.delete()
context = {
'accounts': accounts,
'relateds': relateds,
}
msg = _("Successfully disabled %(accounts)d account and deleted %(relateds)d related services.") % context
modeladmin.message_user(request, msg, messages.SUCCESS)
# Return None to display the change list page again.
return None
if len(queryset) == 1:
objects_name = force_text(opts.verbose_name)
else:
objects_name = force_text(opts.verbose_name_plural)
model_count = {}
for model, objs in collector.model_objs.items():
count = 0
# discount main systemuser
if model is modeladmin.model.main_systemuser.field.rel.to:
count = len(objs) - 1
# Discount account
elif model is not modeladmin.model and model in registered_services:
count = len(objs)
if count:
model_count[model._meta.verbose_name_plural] = count
if not model_count:
modeladmin.message_user(request, _("Nothing to delete"), messages.WARNING)
return None
context = dict(
admin_site.each_context(request),
title=_("Are you sure?"),
objects_name=objects_name,
deletable_objects=[related_services],
model_count=dict(model_count).items(),
queryset=queryset,
opts=opts,
action_checkbox_name=helpers.ACTION_CHECKBOX_NAME,
)
request.current_app = admin_site.name
# Display the confirmation page
template = 'admin/%s/%s/delete_related_services_confirmation.html' % (app_label, opts.model_name)
return TemplateResponse(request, template, context)
delete_related_services.short_description = _("Delete related services")
def disable_selected(modeladmin, request, queryset, disable=True):
opts = modeladmin.model._meta
app_label = opts.app_label
verbose_action_name = _("disabled") if disable else _("enabled")
# The user has already confirmed the deletion.
# Do the disable and return a None to display the change list view again.
if request.POST.get('post'):
n = 0
for account in queryset:
account.disable() if disable else account.enable()
modeladmin.log_change(request, account, verbose_action_name.capitalize())
n += 1
modeladmin.message_user(request, ungettext(
_("One account has been successfully %s.") % verbose_action_name,
_("%i accounts have been successfully %s.") % (n, verbose_action_name),
n)
)
return None
user = request.user
admin_site = modeladmin.admin_site
def format(obj):
has_admin = obj.__class__ in admin_site._registry
opts = obj._meta
no_edit_link = '%s: %s' % (capfirst(opts.verbose_name), force_text(obj))
if has_admin:
try:
admin_url = reverse(
'admin:%s_%s_change' % (opts.app_label, opts.model_name),
None,
(quote(obj._get_pk_val()),)
)
except NoReverseMatch:
# Change url doesn't exist -- don't display link to edit
return no_edit_link
p = '%s.%s' % (opts.app_label, get_permission_codename('delete', opts))
if not user.has_perm(p):
perms_needed.add(opts.verbose_name)
# Display a link to the admin page.
context = (capfirst(opts.verbose_name), admin_url, obj)
return format_html('{}: <a href="{}">{}</a>', *context)
else:
# Don't display link to edit, because it either has no
return no_edit_link
display = []
for account in queryset:
current = []
for related in account.get_services_to_disable():
current.append(format(related))
display.append([format(account), current])
if len(queryset) == 1:
objects_name = force_text(opts.verbose_name)
else:
objects_name = force_text(opts.verbose_name_plural)
context = dict(
admin_site.each_context(request),
action_name='disable_selected' if disable else 'enable_selected',
disable=disable,
title=_("Are you sure?"),
objects_name=objects_name,
deletable_objects=display,
queryset=queryset,
opts=opts,
action_checkbox_name=helpers.ACTION_CHECKBOX_NAME,
)
request.current_app = admin_site.name
template = 'admin/%s/%s/disable_selected_confirmation.html' % (app_label, opts.model_name)
return TemplateResponse(request, template, context)
disable_selected.short_description = _("Disable selected accounts")
disable_selected.url_name = 'disable'
disable_selected.tool_description = _("Disable")
enable_selected = partial(disable_selected, disable=False)
enable_selected.__name__ = 'enable_selected'
enable_selected.url_name = 'enable'
enable_selected.tool_description = _("Enable")
| true | true |
f7f7bc1c2027d7ad761ccdb4868af0a55ecf784a | 92 | py | Python | Codes/07. Example 03.py | mrfoxie/Python-for-beginners | 4aaff536cf4c311fd4fbd6cb913a546c4db66c50 | [
"MIT"
] | null | null | null | Codes/07. Example 03.py | mrfoxie/Python-for-beginners | 4aaff536cf4c311fd4fbd6cb913a546c4db66c50 | [
"MIT"
] | null | null | null | Codes/07. Example 03.py | mrfoxie/Python-for-beginners | 4aaff536cf4c311fd4fbd6cb913a546c4db66c50 | [
"MIT"
] | null | null | null | # Ask a user their weight (in pounds), convert it to kilograms and print on it on terminal.
| 46 | 91 | 0.75 | true | true | |
f7f7bcaff3181a9533a78329e9cde90510846141 | 15,020 | py | Python | library/f5bigip_gtm_pool.py | erjac77/ansible-role-f5 | c45b5d9d5f34a8ac6d19ded836d0a6b7ee7f8056 | [
"Apache-2.0"
] | 1 | 2020-02-21T06:48:14.000Z | 2020-02-21T06:48:14.000Z | library/f5bigip_gtm_pool.py | erjac77/ansible-role-f5 | c45b5d9d5f34a8ac6d19ded836d0a6b7ee7f8056 | [
"Apache-2.0"
] | null | null | null | library/f5bigip_gtm_pool.py | erjac77/ansible-role-f5 | c45b5d9d5f34a8ac6d19ded836d0a6b7ee7f8056 | [
"Apache-2.0"
] | 1 | 2021-03-29T03:55:34.000Z | 2021-03-29T03:55:34.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2016 Eric Jacob <erjac77@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: f5bigip_gtm_pool
short_description: BIG-IP gtm pool module
description:
- Configures load balancing pools for the Global Traffic Manager.
version_added: "1.0.0" # of erjac77.f5 role
author:
- "Eric Jacob (@erjac77)"
options:
alternate_mode:
description:
- Specifies the load balancing mode that the system uses to load balance name resolution requests among the
members of this pool, if the preferred method is unsuccessful in picking a pool.
default: round-robin
choices: [
'drop-packet', 'fallback-ip', 'global-availability', 'none', 'packet-rate', 'ratio', 'return-to-dns',
'round-robin', 'static-persistence', 'topology', 'virtual-server-capacity', 'virtual-server-score'
]
canonical_name:
description:
- Specifies the canonical name of the zone.
disabled:
description:
- Specifies whether the data center and its resources are available for load balancing.
default: false
dynamic_ratio:
description:
- Enables or disables a dynamic ratio load balancing algorithm for this pool.
default: disabled
choices: ['disabled', 'enabled']
enabled:
description:
- Specifies whether the data center and its resources are available for load balancing.
default: true
fallback_ipv4:
description:
- Specifies the IPv4 address of the server to which the system directs requests in the event that the load
balancing methods configured for this pool fail to return a valid virtual server.
default: '::'
fallback_ipv6:
description:
- Specifies the IPv6 address of the server to which the system directs requests in the event that the load
balancing methods configured for this pool fail to return a valid virtual server.
default: '::'
fallback_mode:
description:
- Specifies the load balancing mode that the system uses to load balance name resolution requests among the
members of this pool, if the preferred and alternate modes are unsuccessful in picking a pool.
default: return-to-dns
choices: [
'completion-rate', 'cpu', 'drop-packet', 'fallback-ip', 'fewest-hops', 'global-availability',
'kilobytes-per-second', 'least-connections', 'lowest-round-trip-time', 'none', 'packet-rate',
'quality-of-service', 'ratio', 'return-to-dns', 'round-robin', 'static-persistence', 'topology',
'virtual-server-capacity', 'virtual-server-score'
]
limit_max_bps:
description:
- Specifies the maximum allowable data throughput rate, in bits per second, for the virtual servers in the
pool.
default: 0
limit_max_bps_status:
description:
- Enables or disables the limit-max-bps option for this pool.
default: disabled
choices: ['disabled', 'enabled']
limit_max_connections:
description:
- Specifies the number of current connections allowed for the virtual servers in the pool.
default: 0
limit_max_connections_status:
description:
- Enables or disables the limit-max-connections option for this pool.
default: disabled
choices: ['disabled', 'enabled']
limit_max_pps:
description:
- Specifies the maximum allowable data transfer rate, in packets per second, for the virtual servers in the
pool.
default: 0
limit_max_pps_status:
description:
- Enables or disables the limit-max-pps option for this pool.
default: disabled
choices: ['disabled', 'enabled']
load_balancing_mode:
description:
- Specifies the preferred load balancing mode that the system uses to load balance name resolution requests
among the members of this pool.
default: round-robin
choices: [
'completion-rate', 'cpu', 'drop-packet', 'fallback-ip', 'fewest-hops', 'global-availability',
'kilobytes-per-second', 'least-connections', 'lowest-round-trip-time', 'packet-rate',
'quality-of-service', 'ratio', 'return-to-dns', 'round-robin', 'static-persistence', 'topology',
'virtual-server-capacity', 'virtual-server-score'
]
manual_resume:
description:
- Enables or disables the manual resume function for this pool.
default: disabled
choices: ['disabled', 'enabled']
max_addresses_returned:
description:
- Specifies the maximum number of available virtual servers that the system lists in an A record response.
default: 1
members:
description:
- Specifies the vs-name of the pool members.
monitor:
description:
- Specifies the health monitors that the system uses to determine whether it can use this pool for load
balancing.
qos_hit_ratio:
description:
- Assigns a weight to the Hit Ratio performance factor for the Quality of Service dynamic load balancing
mode.
default: 5
qos_hops:
description:
- Assigns a weight to the Hops performance factor when the value of the either the load-balancing-mode or
fallback-mode options is quality-of-service.
default: 0
qos_kilobytes_second:
description:
- Assigns a weight to the Kilobytes per Second performance factor when the value of the either the
load-balancing-mode or fallback-mode options is quality-of-service.
default: 3
qos_lcs:
description:
- Assigns a weight to the Link Capacity performance factor when the value of the either the
load-balancing-mode or fallback-mode options is quality-of-service.
default: 30
qos_packet_rate:
description:
- Assigns a weight to the Packet Rate performance factor when the value of the either the
load-balancing-mode or fallback-mode options is quality-of-service.
default: 1
qos_rtt:
description:
- Assigns a weight to the Round Trip Time performance factor when the value of the either the
load-balancing-mode or fallback-mode options is quality-of-service.
default: 50
qos_topology:
description:
- Assigns a weight to the Topology performance factor when the value of the either the load-balancing-mode
or fallback-mode options is quality-of-service.
default: 0
qos_vs_capacity:
description:
- Assigns a weight to the Virtual Server performance factor when the value of the either the
load-balancing-mode or fallback-mode options is quality-of-service.
default: 0
qos_vs_score:
description:
- Assigns a weight to the Virtual Server Score performance factor when the value of the either the
load-balancing-mode or fallback-mode options is quality-of-service.
default: 0
ttl:
description:
- Specifies the number of seconds that the IP address, once found, is valid.
default: 30
verify_member_availability:
description:
- Specifies that the system verifies the availability of the members before sending a connection to those
resources.
default: enabled
choices: ['disabled', 'enabled']
extends_documentation_fragment:
- f5_common
- f5_app_service
- f5_description
- f5_name
- f5_partition
- f5_state
"""
EXAMPLES = """
- name: Create GTM Pool
f5bigip_gtm_pool:
provider:
server: "{{ ansible_host }}"
server_port: "{{ http_port | default(443) }}"
user: "{{ http_user }}"
password: "{{ http_pass }}"
validate_certs: false
name: my_pool
partition: Common
description: My pool
load_balancing_mode: global-availability
members:
- my_server:my_vs1
- my_server:my_vs2
state: present
delegate_to: localhost
"""
RETURN = """ # """
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.erjac77.network.f5.common import F5_ACTIVATION_CHOICES
from ansible.module_utils.erjac77.network.f5.common import F5_NAMED_OBJ_ARGS
from ansible.module_utils.erjac77.network.f5.common import F5_PROVIDER_ARGS
from ansible.module_utils.erjac77.network.f5.bigip import F5BigIpNamedObject
from f5.bigip.resource import OrganizingCollection
class ModuleParams(object):
@property
def argument_spec(self):
argument_spec = dict(
alternate_mode=dict(
type="str",
choices=[
"drop-packet",
"fallback-ip",
"global-availability",
"none",
"packet-rate",
"ratio",
"return-to-dns",
"round-robin",
"static-persistence",
"topology",
"virtual-server-capacity",
"virtual-server-score",
],
),
app_service=dict(type="str"),
canonical_name=dict(type="str"),
description=dict(type="str"),
disabled=dict(type="bool"),
enabled=dict(type="bool"),
dynamic_ratio=dict(type="str", choices=[F5_ACTIVATION_CHOICES]),
fallback_ipv4=dict(type="str"),
fallback_ipv6=dict(type="str"),
fallback_mode=dict(
type="str",
choices=[
"completion-rate",
"cpu",
"drop-packet",
"fallback-ip",
"fewest-hops",
"global-availability",
"kilobytes-per-second",
"least-connections",
"lowest-round-trip-time",
"none",
"packet-rate",
"quality-of-service",
"ratio",
"return-to-dns",
"round-robin",
"static-persistence",
"topology",
"virtual-server-capacity",
"virtual-server-score",
],
),
limit_max_bps=dict(type="int"),
limit_max_bps_status=dict(type="str", choices=[F5_ACTIVATION_CHOICES]),
limit_max_connections=dict(type="int"),
limit_max_connections_status=dict(
type="str", choices=[F5_ACTIVATION_CHOICES]
),
limit_max_pps=dict(type="int"),
limit_max_pps_status=dict(type="str", choices=[F5_ACTIVATION_CHOICES]),
load_balancing_mode=dict(
type="str",
choices=[
"completion-rate",
"cpu",
"drop-packet",
"fallback-ip",
"fewest-hops",
"global-availability",
"kilobytes-per-second",
"least-connections",
"lowest-round-trip-time",
"packet-rate",
"quality-of-service",
"ratio",
"return-to-dns",
"round-robin",
"static-persistence",
"topology",
"virtual-server-capacity",
"virtual-server-score",
],
),
manual_resume=dict(type="str", choices=[F5_ACTIVATION_CHOICES]),
max_addresses_returned=dict(type="int"),
members=dict(type="list"),
# metadata=dict(type="list"),
monitor=dict(type="str"),
qos_hit_ratio=dict(type="int"),
qos_hops=dict(type="int"),
qos_kilobytes_second=dict(type="int"),
qos_lcs=dict(type="int"),
qos_packet_rate=dict(type="int"),
qos_rtt=dict(type="int"),
qos_topology=dict(type="int"),
qos_vs_capacity=dict(type="int"),
qos_vs_score=dict(type="int"),
ttl=dict(type="int"),
verify_member_availability=dict(
type="str", choices=[F5_ACTIVATION_CHOICES]
),
)
argument_spec.update(F5_PROVIDER_ARGS)
argument_spec.update(F5_NAMED_OBJ_ARGS)
return argument_spec
@property
def supports_check_mode(self):
return True
@property
def mutually_exclusive(self):
return [["disabled", "enabled"]]
class F5BigIpGtmPool(F5BigIpNamedObject):
def _set_crud_methods(self):
if isinstance(self._api.tm.gtm.pools, OrganizingCollection):
self._methods = {
"create": self._api.tm.gtm.pools.a_s.a.create,
"read": self._api.tm.gtm.pools.a_s.a.load,
"update": self._api.tm.gtm.pools.a_s.a.update,
"delete": self._api.tm.gtm.pools.a_s.a.delete,
"exists": self._api.tm.gtm.pools.a_s.a.exists,
}
else:
self._methods = {
"create": self._api.tm.gtm.pools.pool.create,
"read": self._api.tm.gtm.pools.pool.load,
"update": self._api.tm.gtm.pools.pool.update,
"delete": self._api.tm.gtm.pools.pool.delete,
"exists": self._api.tm.gtm.pools.pool.exists,
}
def main():
params = ModuleParams()
module = AnsibleModule(
argument_spec=params.argument_spec,
supports_check_mode=params.supports_check_mode,
mutually_exclusive=params.mutually_exclusive,
)
try:
obj = F5BigIpGtmPool(check_mode=module.check_mode, **module.params)
result = obj.flush()
module.exit_json(**result)
except Exception as exc:
module.fail_json(msg=str(exc))
if __name__ == "__main__":
main()
| 39.114583 | 119 | 0.593342 |
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: f5bigip_gtm_pool
short_description: BIG-IP gtm pool module
description:
- Configures load balancing pools for the Global Traffic Manager.
version_added: "1.0.0" # of erjac77.f5 role
author:
- "Eric Jacob (@erjac77)"
options:
alternate_mode:
description:
- Specifies the load balancing mode that the system uses to load balance name resolution requests among the
members of this pool, if the preferred method is unsuccessful in picking a pool.
default: round-robin
choices: [
'drop-packet', 'fallback-ip', 'global-availability', 'none', 'packet-rate', 'ratio', 'return-to-dns',
'round-robin', 'static-persistence', 'topology', 'virtual-server-capacity', 'virtual-server-score'
]
canonical_name:
description:
- Specifies the canonical name of the zone.
disabled:
description:
- Specifies whether the data center and its resources are available for load balancing.
default: false
dynamic_ratio:
description:
- Enables or disables a dynamic ratio load balancing algorithm for this pool.
default: disabled
choices: ['disabled', 'enabled']
enabled:
description:
- Specifies whether the data center and its resources are available for load balancing.
default: true
fallback_ipv4:
description:
- Specifies the IPv4 address of the server to which the system directs requests in the event that the load
balancing methods configured for this pool fail to return a valid virtual server.
default: '::'
fallback_ipv6:
description:
- Specifies the IPv6 address of the server to which the system directs requests in the event that the load
balancing methods configured for this pool fail to return a valid virtual server.
default: '::'
fallback_mode:
description:
- Specifies the load balancing mode that the system uses to load balance name resolution requests among the
members of this pool, if the preferred and alternate modes are unsuccessful in picking a pool.
default: return-to-dns
choices: [
'completion-rate', 'cpu', 'drop-packet', 'fallback-ip', 'fewest-hops', 'global-availability',
'kilobytes-per-second', 'least-connections', 'lowest-round-trip-time', 'none', 'packet-rate',
'quality-of-service', 'ratio', 'return-to-dns', 'round-robin', 'static-persistence', 'topology',
'virtual-server-capacity', 'virtual-server-score'
]
limit_max_bps:
description:
- Specifies the maximum allowable data throughput rate, in bits per second, for the virtual servers in the
pool.
default: 0
limit_max_bps_status:
description:
- Enables or disables the limit-max-bps option for this pool.
default: disabled
choices: ['disabled', 'enabled']
limit_max_connections:
description:
- Specifies the number of current connections allowed for the virtual servers in the pool.
default: 0
limit_max_connections_status:
description:
- Enables or disables the limit-max-connections option for this pool.
default: disabled
choices: ['disabled', 'enabled']
limit_max_pps:
description:
- Specifies the maximum allowable data transfer rate, in packets per second, for the virtual servers in the
pool.
default: 0
limit_max_pps_status:
description:
- Enables or disables the limit-max-pps option for this pool.
default: disabled
choices: ['disabled', 'enabled']
load_balancing_mode:
description:
- Specifies the preferred load balancing mode that the system uses to load balance name resolution requests
among the members of this pool.
default: round-robin
choices: [
'completion-rate', 'cpu', 'drop-packet', 'fallback-ip', 'fewest-hops', 'global-availability',
'kilobytes-per-second', 'least-connections', 'lowest-round-trip-time', 'packet-rate',
'quality-of-service', 'ratio', 'return-to-dns', 'round-robin', 'static-persistence', 'topology',
'virtual-server-capacity', 'virtual-server-score'
]
manual_resume:
description:
- Enables or disables the manual resume function for this pool.
default: disabled
choices: ['disabled', 'enabled']
max_addresses_returned:
description:
- Specifies the maximum number of available virtual servers that the system lists in an A record response.
default: 1
members:
description:
- Specifies the vs-name of the pool members.
monitor:
description:
- Specifies the health monitors that the system uses to determine whether it can use this pool for load
balancing.
qos_hit_ratio:
description:
- Assigns a weight to the Hit Ratio performance factor for the Quality of Service dynamic load balancing
mode.
default: 5
qos_hops:
description:
- Assigns a weight to the Hops performance factor when the value of the either the load-balancing-mode or
fallback-mode options is quality-of-service.
default: 0
qos_kilobytes_second:
description:
- Assigns a weight to the Kilobytes per Second performance factor when the value of the either the
load-balancing-mode or fallback-mode options is quality-of-service.
default: 3
qos_lcs:
description:
- Assigns a weight to the Link Capacity performance factor when the value of the either the
load-balancing-mode or fallback-mode options is quality-of-service.
default: 30
qos_packet_rate:
description:
- Assigns a weight to the Packet Rate performance factor when the value of the either the
load-balancing-mode or fallback-mode options is quality-of-service.
default: 1
qos_rtt:
description:
- Assigns a weight to the Round Trip Time performance factor when the value of the either the
load-balancing-mode or fallback-mode options is quality-of-service.
default: 50
qos_topology:
description:
- Assigns a weight to the Topology performance factor when the value of the either the load-balancing-mode
or fallback-mode options is quality-of-service.
default: 0
qos_vs_capacity:
description:
- Assigns a weight to the Virtual Server performance factor when the value of the either the
load-balancing-mode or fallback-mode options is quality-of-service.
default: 0
qos_vs_score:
description:
- Assigns a weight to the Virtual Server Score performance factor when the value of the either the
load-balancing-mode or fallback-mode options is quality-of-service.
default: 0
ttl:
description:
- Specifies the number of seconds that the IP address, once found, is valid.
default: 30
verify_member_availability:
description:
- Specifies that the system verifies the availability of the members before sending a connection to those
resources.
default: enabled
choices: ['disabled', 'enabled']
extends_documentation_fragment:
- f5_common
- f5_app_service
- f5_description
- f5_name
- f5_partition
- f5_state
"""
EXAMPLES = """
- name: Create GTM Pool
f5bigip_gtm_pool:
provider:
server: "{{ ansible_host }}"
server_port: "{{ http_port | default(443) }}"
user: "{{ http_user }}"
password: "{{ http_pass }}"
validate_certs: false
name: my_pool
partition: Common
description: My pool
load_balancing_mode: global-availability
members:
- my_server:my_vs1
- my_server:my_vs2
state: present
delegate_to: localhost
"""
RETURN = """ # """
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.erjac77.network.f5.common import F5_ACTIVATION_CHOICES
from ansible.module_utils.erjac77.network.f5.common import F5_NAMED_OBJ_ARGS
from ansible.module_utils.erjac77.network.f5.common import F5_PROVIDER_ARGS
from ansible.module_utils.erjac77.network.f5.bigip import F5BigIpNamedObject
from f5.bigip.resource import OrganizingCollection
class ModuleParams(object):
@property
def argument_spec(self):
argument_spec = dict(
alternate_mode=dict(
type="str",
choices=[
"drop-packet",
"fallback-ip",
"global-availability",
"none",
"packet-rate",
"ratio",
"return-to-dns",
"round-robin",
"static-persistence",
"topology",
"virtual-server-capacity",
"virtual-server-score",
],
),
app_service=dict(type="str"),
canonical_name=dict(type="str"),
description=dict(type="str"),
disabled=dict(type="bool"),
enabled=dict(type="bool"),
dynamic_ratio=dict(type="str", choices=[F5_ACTIVATION_CHOICES]),
fallback_ipv4=dict(type="str"),
fallback_ipv6=dict(type="str"),
fallback_mode=dict(
type="str",
choices=[
"completion-rate",
"cpu",
"drop-packet",
"fallback-ip",
"fewest-hops",
"global-availability",
"kilobytes-per-second",
"least-connections",
"lowest-round-trip-time",
"none",
"packet-rate",
"quality-of-service",
"ratio",
"return-to-dns",
"round-robin",
"static-persistence",
"topology",
"virtual-server-capacity",
"virtual-server-score",
],
),
limit_max_bps=dict(type="int"),
limit_max_bps_status=dict(type="str", choices=[F5_ACTIVATION_CHOICES]),
limit_max_connections=dict(type="int"),
limit_max_connections_status=dict(
type="str", choices=[F5_ACTIVATION_CHOICES]
),
limit_max_pps=dict(type="int"),
limit_max_pps_status=dict(type="str", choices=[F5_ACTIVATION_CHOICES]),
load_balancing_mode=dict(
type="str",
choices=[
"completion-rate",
"cpu",
"drop-packet",
"fallback-ip",
"fewest-hops",
"global-availability",
"kilobytes-per-second",
"least-connections",
"lowest-round-trip-time",
"packet-rate",
"quality-of-service",
"ratio",
"return-to-dns",
"round-robin",
"static-persistence",
"topology",
"virtual-server-capacity",
"virtual-server-score",
],
),
manual_resume=dict(type="str", choices=[F5_ACTIVATION_CHOICES]),
max_addresses_returned=dict(type="int"),
members=dict(type="list"),
monitor=dict(type="str"),
qos_hit_ratio=dict(type="int"),
qos_hops=dict(type="int"),
qos_kilobytes_second=dict(type="int"),
qos_lcs=dict(type="int"),
qos_packet_rate=dict(type="int"),
qos_rtt=dict(type="int"),
qos_topology=dict(type="int"),
qos_vs_capacity=dict(type="int"),
qos_vs_score=dict(type="int"),
ttl=dict(type="int"),
verify_member_availability=dict(
type="str", choices=[F5_ACTIVATION_CHOICES]
),
)
argument_spec.update(F5_PROVIDER_ARGS)
argument_spec.update(F5_NAMED_OBJ_ARGS)
return argument_spec
@property
def supports_check_mode(self):
return True
@property
def mutually_exclusive(self):
return [["disabled", "enabled"]]
class F5BigIpGtmPool(F5BigIpNamedObject):
def _set_crud_methods(self):
if isinstance(self._api.tm.gtm.pools, OrganizingCollection):
self._methods = {
"create": self._api.tm.gtm.pools.a_s.a.create,
"read": self._api.tm.gtm.pools.a_s.a.load,
"update": self._api.tm.gtm.pools.a_s.a.update,
"delete": self._api.tm.gtm.pools.a_s.a.delete,
"exists": self._api.tm.gtm.pools.a_s.a.exists,
}
else:
self._methods = {
"create": self._api.tm.gtm.pools.pool.create,
"read": self._api.tm.gtm.pools.pool.load,
"update": self._api.tm.gtm.pools.pool.update,
"delete": self._api.tm.gtm.pools.pool.delete,
"exists": self._api.tm.gtm.pools.pool.exists,
}
def main():
params = ModuleParams()
module = AnsibleModule(
argument_spec=params.argument_spec,
supports_check_mode=params.supports_check_mode,
mutually_exclusive=params.mutually_exclusive,
)
try:
obj = F5BigIpGtmPool(check_mode=module.check_mode, **module.params)
result = obj.flush()
module.exit_json(**result)
except Exception as exc:
module.fail_json(msg=str(exc))
if __name__ == "__main__":
main()
| true | true |
f7f7bced4604789f25a4e7e82c825d4fb6b982a9 | 1,724 | py | Python | facedistance/main.py | imakin/ProsesKiller | 2575a3516b4cfa245e1053876e61f0f222f214b2 | [
"MIT"
] | null | null | null | facedistance/main.py | imakin/ProsesKiller | 2575a3516b4cfa245e1053876e61f0f222f214b2 | [
"MIT"
] | null | null | null | facedistance/main.py | imakin/ProsesKiller | 2575a3516b4cfa245e1053876e61f0f222f214b2 | [
"MIT"
] | null | null | null |
import time
import dlib
import cv2
from interface_dummy import Connection
class Main(object):
def __init__(self):
self.camera = cv2.VideoCapture(0)
self.camera.set(cv2.CAP_PROP_FRAME_WIDTH, 1280)
self.camera.set(cv2.CAP_PROP_FRAME_HEIGHT, 720)
self.face_detector_dlib = dlib.get_frontal_face_detector()
self.face_detector_cascade = cv2.CascadeClassifier("haarcascade_frontalface_alt2.xml")
self.connection = Connection()
self.connection.start()
def run(self):
while True:
s, image = self.camera.read()
if not s:continue
s = time.clock()
dets_dlib = self.face_detector_dlib(image, 1)
time_dlib = time.clock()-s
max_width = 0
for rectangle in dets_dlib:
if rectangle.width()>max_width:
max_width = rectangle.width()
if max_width>0:
sisi_depan = max_width/2
print(max_width)
self.connection.send("distance", str(max_width))
#~ s = time.clock()
#~ gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
#~ dets_cv2 = self.face_detector_cascade.detectMultiScale(
#~ gray,
#~ scaleFactor=1.1,
#~ minNeighbors=40,
#~ minSize=(30, 30),
#~ flags=cv2.CASCADE_SCALE_IMAGE,
#~ )
#~ time_cv2 = time.clock()-s
#~ if len(dets_cv2)>0 or len(dets_dlib):
#~ print(dets_cv2, time_cv2, " | ", dets_dlib, time_dlib)
app = Main()
app.run()
| 30.245614 | 94 | 0.531903 |
import time
import dlib
import cv2
from interface_dummy import Connection
class Main(object):
def __init__(self):
self.camera = cv2.VideoCapture(0)
self.camera.set(cv2.CAP_PROP_FRAME_WIDTH, 1280)
self.camera.set(cv2.CAP_PROP_FRAME_HEIGHT, 720)
self.face_detector_dlib = dlib.get_frontal_face_detector()
self.face_detector_cascade = cv2.CascadeClassifier("haarcascade_frontalface_alt2.xml")
self.connection = Connection()
self.connection.start()
def run(self):
while True:
s, image = self.camera.read()
if not s:continue
s = time.clock()
dets_dlib = self.face_detector_dlib(image, 1)
time_dlib = time.clock()-s
max_width = 0
for rectangle in dets_dlib:
if rectangle.width()>max_width:
max_width = rectangle.width()
if max_width>0:
sisi_depan = max_width/2
print(max_width)
self.connection.send("distance", str(max_width))
app = Main()
app.run()
| true | true |
f7f7bcf0cda6e348bf9539ecba2327376a5fac62 | 3,818 | py | Python | PLC/Methods/GenerateNodeConfFile.py | dreibh/planetlab-lxc-plcapi | 065dfc54a2b668e99eab343d113f1a31fb154b13 | [
"BSD-3-Clause"
] | null | null | null | PLC/Methods/GenerateNodeConfFile.py | dreibh/planetlab-lxc-plcapi | 065dfc54a2b668e99eab343d113f1a31fb154b13 | [
"BSD-3-Clause"
] | null | null | null | PLC/Methods/GenerateNodeConfFile.py | dreibh/planetlab-lxc-plcapi | 065dfc54a2b668e99eab343d113f1a31fb154b13 | [
"BSD-3-Clause"
] | null | null | null | import random
import base64
from PLC.Faults import *
from PLC.Method import Method
from PLC.Parameter import Parameter, Mixed
from PLC.Nodes import Node, Nodes
from PLC.Interfaces import Interface, Interfaces
from PLC.Auth import Auth
class GenerateNodeConfFile(Method):
"""
Creates a new node configuration file if all network settings are
present. This function will generate a new node key for the
specified node, effectively invalidating any old configuration
files.
Non-admins can only generate files for nodes at their sites.
Returns the contents of the file if successful, faults otherwise.
"""
roles = ['admin', 'pi', 'tech']
accepts = [
Auth(),
Mixed(Node.fields['node_id'],
Node.fields['hostname']),
Parameter(bool, "True if you want to regenerate node key")
]
returns = Parameter(str, "Node configuration file")
def call(self, auth, node_id_or_hostname, regenerate_node_key = True):
# Get node information
nodes = Nodes(self.api, [node_id_or_hostname])
if not nodes:
raise PLCInvalidArgument("No such node")
node = nodes[0]
if node['peer_id'] is not None:
raise PLCInvalidArgument("Not a local node")
# If we are not an admin, make sure that the caller is a
# member of the site at which the node is located.
if 'admin' not in self.caller['roles']:
if node['site_id'] not in self.caller['site_ids']:
raise PLCPermissionDenied("Not allowed to generate a configuration file for that node")
# Get interfaces for this node
primary = None
interfaces = Interfaces(self.api, node['interface_ids'])
for interface in interfaces:
if interface['is_primary']:
primary = interface
break
if primary is None:
raise PLCInvalidArgument("No primary network configured")
# Split hostname into host and domain parts
parts = node['hostname'].split(".", 1)
if len(parts) < 2:
raise PLCInvalidArgument("Node hostname is invalid")
host = parts[0]
domain = parts[1]
if regenerate_node_key:
# Generate 32 random bytes
int8s = random.sample(range(0, 256), 32)
# Base64 encode their string representation
node['key'] = base64.b64encode(bytes(int8s)).decode()
# XXX Boot Manager cannot handle = in the key
node['key'] = node['key'].replace("=", "")
# Save it
node.sync()
# Generate node configuration file suitable for BootCD
file = ""
file += 'NODE_ID="%d"\n' % node['node_id']
file += 'NODE_KEY="%s"\n' % node['key']
if primary['mac']:
file += 'NET_DEVICE="%s"\n' % primary['mac'].lower()
file += 'IP_METHOD="%s"\n' % primary['method']
if primary['method'] == 'static':
file += 'IP_ADDRESS="%s"\n' % primary['ip']
file += 'IP_GATEWAY="%s"\n' % primary['gateway']
file += 'IP_NETMASK="%s"\n' % primary['netmask']
file += 'IP_NETADDR="%s"\n' % primary['network']
file += 'IP_BROADCASTADDR="%s"\n' % primary['broadcast']
file += 'IP_DNS1="%s"\n' % primary['dns1']
file += 'IP_DNS2="%s"\n' % (primary['dns2'] or "")
file += 'HOST_NAME="%s"\n' % host
file += 'DOMAIN_NAME="%s"\n' % domain
for interface in interfaces:
if interface['method'] == 'ipmi':
file += 'IPMI_ADDRESS="%s"\n' % interface['ip']
if interface['mac']:
file += 'IPMI_MAC="%s"\n' % interface['mac'].lower()
break
return file
| 35.351852 | 103 | 0.577266 | import random
import base64
from PLC.Faults import *
from PLC.Method import Method
from PLC.Parameter import Parameter, Mixed
from PLC.Nodes import Node, Nodes
from PLC.Interfaces import Interface, Interfaces
from PLC.Auth import Auth
class GenerateNodeConfFile(Method):
roles = ['admin', 'pi', 'tech']
accepts = [
Auth(),
Mixed(Node.fields['node_id'],
Node.fields['hostname']),
Parameter(bool, "True if you want to regenerate node key")
]
returns = Parameter(str, "Node configuration file")
def call(self, auth, node_id_or_hostname, regenerate_node_key = True):
nodes = Nodes(self.api, [node_id_or_hostname])
if not nodes:
raise PLCInvalidArgument("No such node")
node = nodes[0]
if node['peer_id'] is not None:
raise PLCInvalidArgument("Not a local node")
if 'admin' not in self.caller['roles']:
if node['site_id'] not in self.caller['site_ids']:
raise PLCPermissionDenied("Not allowed to generate a configuration file for that node")
primary = None
interfaces = Interfaces(self.api, node['interface_ids'])
for interface in interfaces:
if interface['is_primary']:
primary = interface
break
if primary is None:
raise PLCInvalidArgument("No primary network configured")
parts = node['hostname'].split(".", 1)
if len(parts) < 2:
raise PLCInvalidArgument("Node hostname is invalid")
host = parts[0]
domain = parts[1]
if regenerate_node_key:
int8s = random.sample(range(0, 256), 32)
node['key'] = base64.b64encode(bytes(int8s)).decode()
node['key'] = node['key'].replace("=", "")
node.sync()
file = ""
file += 'NODE_ID="%d"\n' % node['node_id']
file += 'NODE_KEY="%s"\n' % node['key']
if primary['mac']:
file += 'NET_DEVICE="%s"\n' % primary['mac'].lower()
file += 'IP_METHOD="%s"\n' % primary['method']
if primary['method'] == 'static':
file += 'IP_ADDRESS="%s"\n' % primary['ip']
file += 'IP_GATEWAY="%s"\n' % primary['gateway']
file += 'IP_NETMASK="%s"\n' % primary['netmask']
file += 'IP_NETADDR="%s"\n' % primary['network']
file += 'IP_BROADCASTADDR="%s"\n' % primary['broadcast']
file += 'IP_DNS1="%s"\n' % primary['dns1']
file += 'IP_DNS2="%s"\n' % (primary['dns2'] or "")
file += 'HOST_NAME="%s"\n' % host
file += 'DOMAIN_NAME="%s"\n' % domain
for interface in interfaces:
if interface['method'] == 'ipmi':
file += 'IPMI_ADDRESS="%s"\n' % interface['ip']
if interface['mac']:
file += 'IPMI_MAC="%s"\n' % interface['mac'].lower()
break
return file
| true | true |
f7f7bd00eb4e1a11bbc96dfc9abc1fc7012a5fbb | 14 | py | Python | pypi_test/module2/__init__.py | a524631266/pypi_test | 8be0cfe200457c152612a38c3d2ca0167c667fe0 | [
"MIT"
] | null | null | null | pypi_test/module2/__init__.py | a524631266/pypi_test | 8be0cfe200457c152612a38c3d2ca0167c667fe0 | [
"MIT"
] | null | null | null | pypi_test/module2/__init__.py | a524631266/pypi_test | 8be0cfe200457c152612a38c3d2ca0167c667fe0 | [
"MIT"
] | null | null | null | name2 = "mode" | 14 | 14 | 0.642857 | name2 = "mode" | true | true |
f7f7bdb27dac197e1416af3086c7324dfbcaf0b9 | 1,342 | py | Python | src/factory/evaluate/metrics.py | i-pan/kaggle-melanoma | caaec0d7e9cafc7b405eb86e7fdf00107d89e1d9 | [
"MIT"
] | 68 | 2020-08-26T00:50:45.000Z | 2022-03-04T05:31:44.000Z | src/factory/evaluate/metrics.py | FurkanThePythoneer/SkinCancerClassification | def5cfbbb7cbf80560b41ef93fd2d6ee6e9e7b5a | [
"MIT"
] | 2 | 2021-03-05T07:56:30.000Z | 2021-07-02T08:23:07.000Z | src/factory/evaluate/metrics.py | FurkanThePythoneer/SkinCancerClassification | def5cfbbb7cbf80560b41ef93fd2d6ee6e9e7b5a | [
"MIT"
] | 19 | 2020-08-26T11:36:42.000Z | 2021-11-04T02:02:28.000Z | import numpy as np
from sklearn import metrics
def auc(y_true, y_pred, **kwargs):
# y_pred.shape = (N, C)
# AUC for melanoma (class 0)
return {'auc': metrics.roc_auc_score((y_true==0).astype('float'), y_pred[:,0])}
def mel_auc(y_true, y_pred, **kwargs):
# y_pred.shape = (N, C)
# AUC for melanoma + nevi (class 0+1)
return {'mel_auc': metrics.roc_auc_score((y_true<=1).astype('float'), y_pred[:,0]+y_pred[:,1])}
def mel_f1(y_true, y_pred, **kwargs):
# y_pred.shape = (N, C)
# AUC for melanoma + nevi (class 0+1)
t = (y_true <= 1).astype('float')
p = (y_pred[:,0] + y_pred[:,1]) >= 0.5
p = p.astype('float')
return {'mel_f1': metrics.f1_score(t, p)}
def accuracy(y_true, y_pred, **kwargs):
return {'accuracy': np.mean(y_true == np.argmax(y_pred, axis=1))}
def auc2(y_true, y_pred, **kwargs):
# y_pred.shape = (N, 2)
# AUC for melanoma (class 1)
return {'auc2': metrics.roc_auc_score(y_true, y_pred)}
def arc_auc(y_true, y_pred, **kwargs):
# y_pred.shape = (N, 2)
# AUC for melanoma (class 1)
return {'arc_auc': metrics.roc_auc_score(y_true, y_pred)}
def auc3(y_true, y_pred, **kwargs):
# y_pred.shape = (N, 3) - includes prediction for nevus
t = (y_true == 2).astype('float')
p = y_pred[:,2]
return {'auc3': metrics.roc_auc_score(t, p)} | 28.553191 | 99 | 0.616244 | import numpy as np
from sklearn import metrics
def auc(y_true, y_pred, **kwargs):
return {'auc': metrics.roc_auc_score((y_true==0).astype('float'), y_pred[:,0])}
def mel_auc(y_true, y_pred, **kwargs):
return {'mel_auc': metrics.roc_auc_score((y_true<=1).astype('float'), y_pred[:,0]+y_pred[:,1])}
def mel_f1(y_true, y_pred, **kwargs):
t = (y_true <= 1).astype('float')
p = (y_pred[:,0] + y_pred[:,1]) >= 0.5
p = p.astype('float')
return {'mel_f1': metrics.f1_score(t, p)}
def accuracy(y_true, y_pred, **kwargs):
return {'accuracy': np.mean(y_true == np.argmax(y_pred, axis=1))}
def auc2(y_true, y_pred, **kwargs):
return {'auc2': metrics.roc_auc_score(y_true, y_pred)}
def arc_auc(y_true, y_pred, **kwargs):
return {'arc_auc': metrics.roc_auc_score(y_true, y_pred)}
def auc3(y_true, y_pred, **kwargs):
t = (y_true == 2).astype('float')
p = y_pred[:,2]
return {'auc3': metrics.roc_auc_score(t, p)} | true | true |
f7f7bdffe6e7d03547f245739dffe47f5b63f0a2 | 26,472 | py | Python | sapp/ui/tests/issues_test.py | facebook/sapp | 4b85d10a791d8e9c8ae83d1f62fbded24845f053 | [
"MIT"
] | 74 | 2020-12-18T20:04:30.000Z | 2022-03-22T22:26:02.000Z | sapp/ui/tests/issues_test.py | facebook/sapp | 4b85d10a791d8e9c8ae83d1f62fbded24845f053 | [
"MIT"
] | 61 | 2020-12-21T21:33:05.000Z | 2022-01-27T21:22:20.000Z | sapp/ui/tests/issues_test.py | facebook/sapp | 4b85d10a791d8e9c8ae83d1f62fbded24845f053 | [
"MIT"
] | 20 | 2021-04-08T01:28:53.000Z | 2022-03-22T22:26:05.000Z | # Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from unittest import TestCase
from ... import queries
from ...db import DB, DBType
from ...models import IssueInstanceSharedTextAssoc
from ...models import create as create_models
from ...tests.fake_object_generator import FakeObjectGenerator
from ..issues import Instance
class QueryTest(TestCase):
def setUp(self) -> None:
self.db = DB(DBType.MEMORY)
create_models(self.db)
self.fakes = FakeObjectGenerator()
run = self.fakes.run()
issue1 = self.fakes.issue(code=6016, status="do_not_care")
self.fakes.instance(
issue_id=issue1.id,
callable="module.sub.function1",
filename="module/sub.py",
min_trace_length_to_sources=1,
min_trace_length_to_sinks=1,
)
self.fakes.save_all(self.db)
issue2 = self.fakes.issue(code=6017, status="valid_bug")
self.fakes.instance(
issue_id=issue2.id,
callable="module.sub.function2",
filename="module/sub.py",
min_trace_length_to_sources=2,
min_trace_length_to_sinks=2,
)
self.fakes.save_all(self.db)
issue3 = self.fakes.issue(code=6018, status="bad_practice")
self.fakes.instance(
issue_id=issue3.id,
callable="module.function3",
filename="module/__init__.py",
min_trace_length_to_sources=3,
min_trace_length_to_sinks=3,
)
self.fakes.save_all(self.db)
issue4 = self.fakes.issue(code=6019)
self.fakes.instance(
issue_id=issue4.id,
callable="module.function3",
filename="module/__init__.py",
min_trace_length_to_sources=0,
min_trace_length_to_sinks=0,
)
self.fakes.save_all(self.db)
with self.db.make_session() as session:
session.add(run)
session.commit()
def testWhereCode(self) -> None:
with self.db.make_session() as session:
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_codes_is_any_of([6016]).get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_codes_is_any_of([6017, 6018]).get()
}
self.assertNotIn(1, issue_ids)
self.assertIn(2, issue_ids)
self.assertIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_codes_is_any_of([1234]).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_codes_is_any_of([6017])
.where_codes_is_any_of([6018])
.get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
def testWhereStatus(self) -> None:
with self.db.make_session() as session:
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_status_is_any_of(["do_not_care"]).get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
def testWhereCallables(self) -> None:
with self.db.make_session() as session:
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_callables_matches(".*sub.*").get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_callables_is_any_of(["1234"]).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_callables_matches(".*function3").get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_callables_is_any_of(["%function3"])
.where_callables_is_any_of(["%sub%"])
.get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
def testWhereFileNames(self) -> None:
with self.db.make_session() as session:
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_path_is_any_of(["1234"]).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_path_is_any_of(["module/s%"]).get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_path_is_any_of(["%__init__.py"]).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertIn(3, issue_ids)
def testWhereTraceLength(self) -> None:
with self.db.make_session() as session:
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_trace_length_to_sinks(1, 1).get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_trace_length_to_sources(1, 1).get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_trace_length_to_sources(0, 1).get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_trace_length_to_sinks(0, 1).get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_trace_length_to_sinks(0, 2).get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_trace_length_to_sinks(0, 2).get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_trace_length_to_sources(0, 1)
.where_trace_length_to_sinks(0, 1)
.get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_trace_length_to_sources(0, 1)
.where_trace_length_to_sinks(0, 2)
.get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_trace_length_to_sources(0, 0).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
self.assertIn(4, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_trace_length_to_sinks(0, 0).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
self.assertIn(4, issue_ids)
def testWhereSourceName(self) -> None:
self.fakes.instance()
source_name_1 = self.fakes.source_detail("source_name_1")
source_name_2 = self.fakes.source_detail("source_name_2")
self.fakes.save_all(self.db)
with self.db.make_session() as session:
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=source_name_1.id, issue_instance_id=1
)
)
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=source_name_2.id, issue_instance_id=2
)
)
session.commit()
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_source_name_is_any_of(
["source_name_1"]
).get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_source_name_matches("source_name_1").get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_source_name_is_any_of(
["source_name_1", "source_name_2"]
).get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_source_name_matches("source_name").get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
def testWhereSourceKind(self) -> None:
self.fakes.instance()
source_kind_1 = self.fakes.source("source_kind_1")
source_kind_2 = self.fakes.source("source_kind_2")
self.fakes.save_all(self.db)
with self.db.make_session() as session:
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=source_kind_1.id, issue_instance_id=1
)
)
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=source_kind_2.id, issue_instance_id=2
)
)
session.commit()
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_source_kind_is_any_of(
["source_kind_1"]
).get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_source_kind_matches("source_kind_1").get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_source_kind_is_any_of(
["source_kind_1", "source_kind_2"]
).get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_source_kind_matches("source_kind").get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
def testWhereSinkName(self) -> None:
self.fakes.instance()
sink_name_1 = self.fakes.sink_detail("sink_name_1")
sink_name_2 = self.fakes.sink_detail("sink_name_2")
self.fakes.save_all(self.db)
with self.db.make_session() as session:
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=sink_name_1.id, issue_instance_id=1
)
)
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=sink_name_2.id, issue_instance_id=2
)
)
session.commit()
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_sink_name_is_any_of(["sink_name_1"]).get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_sink_name_matches("sink_name_1").get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_sink_name_is_any_of(
["sink_name_1", "sink_name_2"]
).get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_sink_name_matches("sink_name").get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
def testWhereSinkKind(self) -> None:
self.fakes.instance()
sink_kind_1 = self.fakes.sink("sink_kind_1")
sink_kind_2 = self.fakes.sink("sink_kind_2")
self.fakes.save_all(self.db)
with self.db.make_session() as session:
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=sink_kind_1.id, issue_instance_id=1
)
)
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=sink_kind_2.id, issue_instance_id=2
)
)
session.commit()
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_sink_kind_is_any_of(["sink_kind_1"]).get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_sink_kind_matches("sink_kind_1").get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_sink_kind_is_any_of(
["sink_kind_1", "sink_kind_2"]
).get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_sink_kind_matches("sink_kind").get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
def testWhereAnyFeatures(self) -> None:
self.fakes.instance()
feature1 = self.fakes.feature("via:feature1")
feature2 = self.fakes.feature("via:feature2")
self.fakes.feature("via:feature3")
self.fakes.save_all(self.db)
with self.db.make_session() as session:
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=feature1.id, issue_instance_id=1
)
)
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=feature2.id, issue_instance_id=1
)
)
session.commit()
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_any_features(["via:feature1"]).get()
}
self.assertIn(1, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_any_features(
["via:feature1", "via:feature2"]
).get()
}
self.assertIn(1, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_any_features(
["via:feature1", "via:feature3"]
).get()
}
self.assertIn(1, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_any_features(["via:feature3"]).get()
}
self.assertNotIn(1, issue_ids)
def testAssertAllFeatures(self) -> None:
self.fakes.instance()
feature1 = self.fakes.feature("via:feature1")
feature2 = self.fakes.feature("via:feature2")
self.fakes.feature("via:feature3")
self.fakes.save_all(self.db)
with self.db.make_session() as session:
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=feature1.id, issue_instance_id=1
)
)
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=feature2.id, issue_instance_id=1
)
)
session.commit()
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_all_features(["via:feature1"]).get()
}
self.assertIn(1, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_all_features(
["via:feature1", "via:feature2"]
).get()
}
self.assertIn(1, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_all_features(["via:feature3"]).get()
}
self.assertNotIn(1, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_all_features(
["via:feature1", "via:feature3"]
).get()
}
self.assertNotIn(1, issue_ids)
def testAssertExcludeFeatures(self) -> None:
feature1 = self.fakes.feature("via:feature1")
feature2 = self.fakes.feature("via:feature2")
self.fakes.feature("via:feature3")
feature4 = self.fakes.feature("via:feature4")
self.fakes.save_all(self.db)
with self.db.make_session() as session:
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=feature1.id, issue_instance_id=1
)
)
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=feature2.id, issue_instance_id=1
)
)
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=feature1.id, issue_instance_id=2
)
)
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=feature4.id, issue_instance_id=2
)
)
session.commit()
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_exclude_features([]).get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_exclude_features(["via:feature1"]).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_exclude_features(["via:feature2"]).get()
}
self.assertNotIn(1, issue_ids)
self.assertIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_exclude_features(["via:feature3"]).get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_exclude_features(
["via:feature1", "via:feature2"]
).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_exclude_features(
["via:feature1", "via:feature4"]
).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_exclude_features(
["via:feature2", "via:feature4"]
).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_exclude_features(
["via:feature1", "via:feature3"]
).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
| 36.817803 | 85 | 0.549448 |
from unittest import TestCase
from ... import queries
from ...db import DB, DBType
from ...models import IssueInstanceSharedTextAssoc
from ...models import create as create_models
from ...tests.fake_object_generator import FakeObjectGenerator
from ..issues import Instance
class QueryTest(TestCase):
def setUp(self) -> None:
self.db = DB(DBType.MEMORY)
create_models(self.db)
self.fakes = FakeObjectGenerator()
run = self.fakes.run()
issue1 = self.fakes.issue(code=6016, status="do_not_care")
self.fakes.instance(
issue_id=issue1.id,
callable="module.sub.function1",
filename="module/sub.py",
min_trace_length_to_sources=1,
min_trace_length_to_sinks=1,
)
self.fakes.save_all(self.db)
issue2 = self.fakes.issue(code=6017, status="valid_bug")
self.fakes.instance(
issue_id=issue2.id,
callable="module.sub.function2",
filename="module/sub.py",
min_trace_length_to_sources=2,
min_trace_length_to_sinks=2,
)
self.fakes.save_all(self.db)
issue3 = self.fakes.issue(code=6018, status="bad_practice")
self.fakes.instance(
issue_id=issue3.id,
callable="module.function3",
filename="module/__init__.py",
min_trace_length_to_sources=3,
min_trace_length_to_sinks=3,
)
self.fakes.save_all(self.db)
issue4 = self.fakes.issue(code=6019)
self.fakes.instance(
issue_id=issue4.id,
callable="module.function3",
filename="module/__init__.py",
min_trace_length_to_sources=0,
min_trace_length_to_sinks=0,
)
self.fakes.save_all(self.db)
with self.db.make_session() as session:
session.add(run)
session.commit()
def testWhereCode(self) -> None:
with self.db.make_session() as session:
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_codes_is_any_of([6016]).get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_codes_is_any_of([6017, 6018]).get()
}
self.assertNotIn(1, issue_ids)
self.assertIn(2, issue_ids)
self.assertIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_codes_is_any_of([1234]).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_codes_is_any_of([6017])
.where_codes_is_any_of([6018])
.get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
def testWhereStatus(self) -> None:
with self.db.make_session() as session:
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_status_is_any_of(["do_not_care"]).get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
def testWhereCallables(self) -> None:
with self.db.make_session() as session:
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_callables_matches(".*sub.*").get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_callables_is_any_of(["1234"]).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_callables_matches(".*function3").get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_callables_is_any_of(["%function3"])
.where_callables_is_any_of(["%sub%"])
.get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
def testWhereFileNames(self) -> None:
with self.db.make_session() as session:
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_path_is_any_of(["1234"]).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_path_is_any_of(["module/s%"]).get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_path_is_any_of(["%__init__.py"]).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertIn(3, issue_ids)
def testWhereTraceLength(self) -> None:
with self.db.make_session() as session:
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_trace_length_to_sinks(1, 1).get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_trace_length_to_sources(1, 1).get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_trace_length_to_sources(0, 1).get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_trace_length_to_sinks(0, 1).get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_trace_length_to_sinks(0, 2).get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_trace_length_to_sinks(0, 2).get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_trace_length_to_sources(0, 1)
.where_trace_length_to_sinks(0, 1)
.get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_trace_length_to_sources(0, 1)
.where_trace_length_to_sinks(0, 2)
.get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_trace_length_to_sources(0, 0).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
self.assertIn(4, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_trace_length_to_sinks(0, 0).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
self.assertNotIn(3, issue_ids)
self.assertIn(4, issue_ids)
def testWhereSourceName(self) -> None:
self.fakes.instance()
source_name_1 = self.fakes.source_detail("source_name_1")
source_name_2 = self.fakes.source_detail("source_name_2")
self.fakes.save_all(self.db)
with self.db.make_session() as session:
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=source_name_1.id, issue_instance_id=1
)
)
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=source_name_2.id, issue_instance_id=2
)
)
session.commit()
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_source_name_is_any_of(
["source_name_1"]
).get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_source_name_matches("source_name_1").get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_source_name_is_any_of(
["source_name_1", "source_name_2"]
).get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_source_name_matches("source_name").get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
def testWhereSourceKind(self) -> None:
self.fakes.instance()
source_kind_1 = self.fakes.source("source_kind_1")
source_kind_2 = self.fakes.source("source_kind_2")
self.fakes.save_all(self.db)
with self.db.make_session() as session:
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=source_kind_1.id, issue_instance_id=1
)
)
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=source_kind_2.id, issue_instance_id=2
)
)
session.commit()
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_source_kind_is_any_of(
["source_kind_1"]
).get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_source_kind_matches("source_kind_1").get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_source_kind_is_any_of(
["source_kind_1", "source_kind_2"]
).get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_source_kind_matches("source_kind").get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
def testWhereSinkName(self) -> None:
self.fakes.instance()
sink_name_1 = self.fakes.sink_detail("sink_name_1")
sink_name_2 = self.fakes.sink_detail("sink_name_2")
self.fakes.save_all(self.db)
with self.db.make_session() as session:
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=sink_name_1.id, issue_instance_id=1
)
)
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=sink_name_2.id, issue_instance_id=2
)
)
session.commit()
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_sink_name_is_any_of(["sink_name_1"]).get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_sink_name_matches("sink_name_1").get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_sink_name_is_any_of(
["sink_name_1", "sink_name_2"]
).get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_sink_name_matches("sink_name").get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
def testWhereSinkKind(self) -> None:
self.fakes.instance()
sink_kind_1 = self.fakes.sink("sink_kind_1")
sink_kind_2 = self.fakes.sink("sink_kind_2")
self.fakes.save_all(self.db)
with self.db.make_session() as session:
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=sink_kind_1.id, issue_instance_id=1
)
)
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=sink_kind_2.id, issue_instance_id=2
)
)
session.commit()
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_sink_kind_is_any_of(["sink_kind_1"]).get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_sink_kind_matches("sink_kind_1").get()
}
self.assertIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_sink_kind_is_any_of(
["sink_kind_1", "sink_kind_2"]
).get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_sink_kind_matches("sink_kind").get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
def testWhereAnyFeatures(self) -> None:
self.fakes.instance()
feature1 = self.fakes.feature("via:feature1")
feature2 = self.fakes.feature("via:feature2")
self.fakes.feature("via:feature3")
self.fakes.save_all(self.db)
with self.db.make_session() as session:
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=feature1.id, issue_instance_id=1
)
)
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=feature2.id, issue_instance_id=1
)
)
session.commit()
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_any_features(["via:feature1"]).get()
}
self.assertIn(1, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_any_features(
["via:feature1", "via:feature2"]
).get()
}
self.assertIn(1, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_any_features(
["via:feature1", "via:feature3"]
).get()
}
self.assertIn(1, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_any_features(["via:feature3"]).get()
}
self.assertNotIn(1, issue_ids)
def testAssertAllFeatures(self) -> None:
self.fakes.instance()
feature1 = self.fakes.feature("via:feature1")
feature2 = self.fakes.feature("via:feature2")
self.fakes.feature("via:feature3")
self.fakes.save_all(self.db)
with self.db.make_session() as session:
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=feature1.id, issue_instance_id=1
)
)
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=feature2.id, issue_instance_id=1
)
)
session.commit()
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_all_features(["via:feature1"]).get()
}
self.assertIn(1, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_all_features(
["via:feature1", "via:feature2"]
).get()
}
self.assertIn(1, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_all_features(["via:feature3"]).get()
}
self.assertNotIn(1, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_all_features(
["via:feature1", "via:feature3"]
).get()
}
self.assertNotIn(1, issue_ids)
def testAssertExcludeFeatures(self) -> None:
feature1 = self.fakes.feature("via:feature1")
feature2 = self.fakes.feature("via:feature2")
self.fakes.feature("via:feature3")
feature4 = self.fakes.feature("via:feature4")
self.fakes.save_all(self.db)
with self.db.make_session() as session:
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=feature1.id, issue_instance_id=1
)
)
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=feature2.id, issue_instance_id=1
)
)
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=feature1.id, issue_instance_id=2
)
)
session.add(
IssueInstanceSharedTextAssoc(
shared_text_id=feature4.id, issue_instance_id=2
)
)
session.commit()
latest_run_id = queries.latest_run_id(session)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_exclude_features([]).get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_exclude_features(["via:feature1"]).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_exclude_features(["via:feature2"]).get()
}
self.assertNotIn(1, issue_ids)
self.assertIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_exclude_features(["via:feature3"]).get()
}
self.assertIn(1, issue_ids)
self.assertIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_exclude_features(
["via:feature1", "via:feature2"]
).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_exclude_features(
["via:feature1", "via:feature4"]
).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_exclude_features(
["via:feature2", "via:feature4"]
).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
builder = Instance(session, latest_run_id)
issue_ids = {
int(issue.issue_instance_id)
for issue in builder.where_exclude_features(
["via:feature1", "via:feature3"]
).get()
}
self.assertNotIn(1, issue_ids)
self.assertNotIn(2, issue_ids)
| true | true |
f7f7bf7403549acd5a44118e3f532538e22423d5 | 2,198 | py | Python | scripts/baseline-identify.py | pdfinn/macos_security | 33b58df094bcec6e4d5c22c15978422ba2b3399c | [
"CC-BY-4.0"
] | 1 | 2021-11-19T04:56:57.000Z | 2021-11-19T04:56:57.000Z | scripts/baseline-identify.py | pdfinn/macos_security | 33b58df094bcec6e4d5c22c15978422ba2b3399c | [
"CC-BY-4.0"
] | null | null | null | scripts/baseline-identify.py | pdfinn/macos_security | 33b58df094bcec6e4d5c22c15978422ba2b3399c | [
"CC-BY-4.0"
] | null | null | null | #!/usr/bin/env python3
# filename: baseline_identify.py
# description: Identify which rules fall on specific baselines.
import argparse
import io
import yaml
import os
from string import Template
from itertools import groupby
import glob
# File path setup
file_dir = os.path.dirname(os.path.abspath(__file__))
parent_dir = os.path.dirname(file_dir)
# import profile_manifests.plist
baselines_file = os.path.join(parent_dir, 'includes', '800-53_baselines.yaml')
with open(baselines_file) as r:
baselines = yaml.load(r, Loader=yaml.SafeLoader)
low_rules = []
mod_rules = []
high_rules = []
# Create sections and rules
for rule in sorted(glob.glob(parent_dir + '/rules/*/*.yaml')):
with open(rule) as r:
rule_yaml = yaml.load(r, Loader=yaml.SafeLoader)
try:
rule_yaml['references']['800-53r4']
except KeyError:
nist_80053r4 = 'N/A'
else:
#nist_80053r4 = ulify(rule_yaml['references']['800-53r4'])
nist_80053r4 = rule_yaml['references']['800-53r4']
for control in nist_80053r4:
if control in baselines['low']:
#print("rule: {} contains: {} which falls on low baseline".format(rule_yaml['id'], control))
if rule_yaml['id'] not in low_rules:
low_rules.append(rule_yaml['id'])
if control in baselines['moderate']:
#print("rule: {} contains: {} which falls on moderate baseline".format(rule_yaml['id'], control))
if rule_yaml['id'] not in mod_rules:
mod_rules.append(rule_yaml['id'])
if control in baselines['high']:
#print("rule: {} contains: {} which falls on high baseline".format(rule_yaml['id'], control))
if rule_yaml['id'] not in high_rules:
high_rules.append(rule_yaml['id'])
print("{} Rules belong on LOW baseline".format(len(low_rules)))
for rule in low_rules:
print(" - {}".format(rule))
print("\n {} Rules that belong on MODERATE baseline".format(len(mod_rules)))
for rule in mod_rules:
print(" - {}".format(rule))
print("\n {} Rules that belong on HIGH baseline".format(len(high_rules)))
for rule in high_rules:
print(" - {}".format(rule)) | 34.34375 | 109 | 0.653321 |
import argparse
import io
import yaml
import os
from string import Template
from itertools import groupby
import glob
file_dir = os.path.dirname(os.path.abspath(__file__))
parent_dir = os.path.dirname(file_dir)
baselines_file = os.path.join(parent_dir, 'includes', '800-53_baselines.yaml')
with open(baselines_file) as r:
baselines = yaml.load(r, Loader=yaml.SafeLoader)
low_rules = []
mod_rules = []
high_rules = []
for rule in sorted(glob.glob(parent_dir + '/rules/*/*.yaml')):
with open(rule) as r:
rule_yaml = yaml.load(r, Loader=yaml.SafeLoader)
try:
rule_yaml['references']['800-53r4']
except KeyError:
nist_80053r4 = 'N/A'
else:
nist_80053r4 = rule_yaml['references']['800-53r4']
for control in nist_80053r4:
if control in baselines['low']:
if rule_yaml['id'] not in low_rules:
low_rules.append(rule_yaml['id'])
if control in baselines['moderate']:
if rule_yaml['id'] not in mod_rules:
mod_rules.append(rule_yaml['id'])
if control in baselines['high']:
if rule_yaml['id'] not in high_rules:
high_rules.append(rule_yaml['id'])
print("{} Rules belong on LOW baseline".format(len(low_rules)))
for rule in low_rules:
print(" - {}".format(rule))
print("\n {} Rules that belong on MODERATE baseline".format(len(mod_rules)))
for rule in mod_rules:
print(" - {}".format(rule))
print("\n {} Rules that belong on HIGH baseline".format(len(high_rules)))
for rule in high_rules:
print(" - {}".format(rule)) | true | true |
f7f7bfb177c589624808a5f6f119160b11116252 | 6,316 | py | Python | ceph/ceph_admin/osd.py | pdhiran/cephci | ba06523ad4a10eb70dc5295439abbe2a29723798 | [
"MIT"
] | null | null | null | ceph/ceph_admin/osd.py | pdhiran/cephci | ba06523ad4a10eb70dc5295439abbe2a29723798 | [
"MIT"
] | null | null | null | ceph/ceph_admin/osd.py | pdhiran/cephci | ba06523ad4a10eb70dc5295439abbe2a29723798 | [
"MIT"
] | null | null | null | """Manage OSD service via cephadm CLI."""
import json
import logging
from time import sleep
from typing import Dict
from .apply import ApplyMixin
from .common import config_dict_to_string
from .orch import Orch
LOG = logging.getLogger()
class OSDServiceFailure(Exception):
pass
class DevicesNotFound(Exception):
pass
class OSD(ApplyMixin, Orch):
"""Interface to ceph orch osd."""
SERVICE_NAME = "osd"
def apply(self, config: Dict) -> None:
"""
Deploy the ODS service using the provided configuration.
Args:
config (Dict): Key/value pairs provided by the test case to create the service.
Example::
config:
command: apply
service: osd
base_cmd_args: # arguments to ceph orch
concise: true
verbose: true
args:
all-available-devices: true
dry-run: true
unmanaged: true
"""
cmd = ["ceph orch device ls -f json"]
self.shell(args=["ceph orch device ls --refresh"])
logging.info("Sleeping for 60 seconds for disks to be discovered")
sleep(60)
out, _ = self.shell(args=cmd)
node_device_dict = dict()
for node in json.loads(out):
if not node.get("devices"):
continue
devices = {"available": [], "unavailable": []}
for device in node.get("devices"):
# avoid considering devices which is less than 5GB
if "Insufficient space (<5GB)" not in device.get(
"rejected_reasons", []
):
if device["available"]:
devices["available"].append(device["path"])
continue
devices["unavailable"].append(device["path"])
if devices["available"]:
node_device_dict.update({node["addr"]: devices})
if not node_device_dict:
raise DevicesNotFound("No devices available to create OSD(s)")
if not config.get("args", {}).get("all-available-devices"):
config["args"]["all-available-devices"] = True
# print out discovered device list
out, _ = self.shell(args=["ceph orch device ls -f yaml"])
logging.info(f"Node device list : {out}")
super().apply(config)
# validate of osd(s)
interval = 5
timeout = self.TIMEOUT
checks = timeout / interval
while checks:
checks -= 1
out, _ = self.shell(
args=["ceph", "orch", "ps", "-f", "json-pretty"],
)
out = json.loads(out)
daemons = [i for i in out if i.get("daemon_type") == "osd"]
deployed = 0
for node, devices in node_device_dict.items():
count = 0
for dmn in daemons:
if dmn["hostname"] == node and dmn["status_desc"] == "running":
count += 1
count = count - len(devices["unavailable"])
LOG.info(
"%s %s/%s osd daemon(s) up... Retries: %s"
% (node, count, len(devices["available"]), checks)
)
if count == len(devices["available"]):
deployed += 1
if deployed == len(node_device_dict):
return
sleep(interval)
raise OSDServiceFailure("OSDs are not up and running in hosts")
def rm_status(self, config: Dict):
"""
Execute the command ceph orch osd rm status.
Args:
config (Dict): OSD Remove status configuration parameters
Returns:
output, error returned by the command.
Example::
config:
command: rm status
base_cmd_args:
verbose: true
args:
format: json-pretty
"""
base_cmd = ["ceph", "orch", "osd", "rm", "status"]
if config.get("base_cmd_args"):
base_cmd.append(config_dict_to_string(config["base_cmd_args"]))
if config and config.get("args"):
args = config.get("args")
base_cmd.append(config_dict_to_string(args))
return self.shell(args=base_cmd)
def rm(self, config: Dict):
"""
Execute the command ceph orch osd rm <OSD ID> .
Args:
config (Dict): OSD Remove configuration parameters
Returns:
output, error returned by the command.
Example::
config:
command: rm
base_cmd_args:
verbose: true
pos_args:
- 1
"""
base_cmd = ["ceph", "orch", "osd"]
if config.get("base_cmd_args"):
base_cmd.append(config_dict_to_string(config["base_cmd_args"]))
base_cmd.append("rm")
osd_id = config["pos_args"][0]
base_cmd.append(str(osd_id))
self.shell(args=base_cmd)
check_osd_id_dict = {
"args": {"format": "json"},
}
while True:
# "ceph orch osd rm status -f json"
# condition
# continue loop if OSD_ID present
# if not exit the loop
out, _ = self.rm_status(check_osd_id_dict)
try:
status = json.loads(out)
for osd_id_ in status:
if osd_id_["osd_id"] == osd_id:
LOG.info(f"OSDs removal in progress: {osd_id_}")
break
else:
break
sleep(2)
except json.decoder.JSONDecodeError:
break
# validate OSD removal
out, verify = self.shell(
args=["ceph", "osd", "tree", "-f", "json"],
)
out = json.loads(out)
for id_ in out["nodes"]:
if id_["id"] == osd_id:
LOG.error("OSD Removed ID found")
raise AssertionError("fail, OSD is present still after removing")
LOG.info(f" OSD {osd_id} Removal is successfully")
| 29.376744 | 91 | 0.504908 | import json
import logging
from time import sleep
from typing import Dict
from .apply import ApplyMixin
from .common import config_dict_to_string
from .orch import Orch
LOG = logging.getLogger()
class OSDServiceFailure(Exception):
pass
class DevicesNotFound(Exception):
pass
class OSD(ApplyMixin, Orch):
SERVICE_NAME = "osd"
def apply(self, config: Dict) -> None:
cmd = ["ceph orch device ls -f json"]
self.shell(args=["ceph orch device ls --refresh"])
logging.info("Sleeping for 60 seconds for disks to be discovered")
sleep(60)
out, _ = self.shell(args=cmd)
node_device_dict = dict()
for node in json.loads(out):
if not node.get("devices"):
continue
devices = {"available": [], "unavailable": []}
for device in node.get("devices"):
if "Insufficient space (<5GB)" not in device.get(
"rejected_reasons", []
):
if device["available"]:
devices["available"].append(device["path"])
continue
devices["unavailable"].append(device["path"])
if devices["available"]:
node_device_dict.update({node["addr"]: devices})
if not node_device_dict:
raise DevicesNotFound("No devices available to create OSD(s)")
if not config.get("args", {}).get("all-available-devices"):
config["args"]["all-available-devices"] = True
out, _ = self.shell(args=["ceph orch device ls -f yaml"])
logging.info(f"Node device list : {out}")
super().apply(config)
interval = 5
timeout = self.TIMEOUT
checks = timeout / interval
while checks:
checks -= 1
out, _ = self.shell(
args=["ceph", "orch", "ps", "-f", "json-pretty"],
)
out = json.loads(out)
daemons = [i for i in out if i.get("daemon_type") == "osd"]
deployed = 0
for node, devices in node_device_dict.items():
count = 0
for dmn in daemons:
if dmn["hostname"] == node and dmn["status_desc"] == "running":
count += 1
count = count - len(devices["unavailable"])
LOG.info(
"%s %s/%s osd daemon(s) up... Retries: %s"
% (node, count, len(devices["available"]), checks)
)
if count == len(devices["available"]):
deployed += 1
if deployed == len(node_device_dict):
return
sleep(interval)
raise OSDServiceFailure("OSDs are not up and running in hosts")
def rm_status(self, config: Dict):
base_cmd = ["ceph", "orch", "osd", "rm", "status"]
if config.get("base_cmd_args"):
base_cmd.append(config_dict_to_string(config["base_cmd_args"]))
if config and config.get("args"):
args = config.get("args")
base_cmd.append(config_dict_to_string(args))
return self.shell(args=base_cmd)
def rm(self, config: Dict):
base_cmd = ["ceph", "orch", "osd"]
if config.get("base_cmd_args"):
base_cmd.append(config_dict_to_string(config["base_cmd_args"]))
base_cmd.append("rm")
osd_id = config["pos_args"][0]
base_cmd.append(str(osd_id))
self.shell(args=base_cmd)
check_osd_id_dict = {
"args": {"format": "json"},
}
while True:
out, _ = self.rm_status(check_osd_id_dict)
try:
status = json.loads(out)
for osd_id_ in status:
if osd_id_["osd_id"] == osd_id:
LOG.info(f"OSDs removal in progress: {osd_id_}")
break
else:
break
sleep(2)
except json.decoder.JSONDecodeError:
break
out, verify = self.shell(
args=["ceph", "osd", "tree", "-f", "json"],
)
out = json.loads(out)
for id_ in out["nodes"]:
if id_["id"] == osd_id:
LOG.error("OSD Removed ID found")
raise AssertionError("fail, OSD is present still after removing")
LOG.info(f" OSD {osd_id} Removal is successfully")
| true | true |
f7f7c0ebf01c78f6753900a3b7a292ba1dd316d9 | 5,020 | py | Python | google-cloud-sdk/platform/bq/third_party/pyasn1/codec/cer/encoder.py | bopopescu/searchparty | afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6 | [
"Apache-2.0"
] | 1 | 2017-11-29T18:52:27.000Z | 2017-11-29T18:52:27.000Z | google-cloud-sdk/platform/bq/third_party/pyasn1/codec/cer/encoder.py | bopopescu/searchparty | afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6 | [
"Apache-2.0"
] | null | null | null | google-cloud-sdk/platform/bq/third_party/pyasn1/codec/cer/encoder.py | bopopescu/searchparty | afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6 | [
"Apache-2.0"
] | 3 | 2017-07-27T18:44:13.000Z | 2020-07-25T17:48:53.000Z | #!/usr/bin/env python
# CER encoder
from pyasn1.type import univ
from pyasn1.type import useful
from pyasn1.codec.ber import encoder
from pyasn1.compat.octets import int2oct, str2octs, null
from pyasn1 import error
class BooleanEncoder(encoder.IntegerEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
if client == 0:
substrate = int2oct(0)
else:
substrate = int2oct(255)
return substrate, 0
class BitStringEncoder(encoder.BitStringEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
return encoder.BitStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
class OctetStringEncoder(encoder.OctetStringEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
return encoder.OctetStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
class RealEncoder(encoder.RealEncoder):
def _chooseEncBase(self, value):
m, b, e = value
return self._dropFloatingPoint(m, b, e)
# specialized GeneralStringEncoder here
class GeneralizedTimeEncoder(OctetStringEncoder):
zchar = str2octs('Z')
pluschar = str2octs('+')
minuschar = str2octs('-')
zero = str2octs('0')
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
octets = client.asOctets()
# This breaks too many existing data items
# if '.' not in octets:
# raise error.PyAsn1Error('Format must include fraction of second: %r' % octets)
if len(octets) < 15:
raise error.PyAsn1Error('Bad UTC time length: %r' % octets)
if self.pluschar in octets or self.minuschar in octets:
raise error.PyAsn1Error('Must be UTC time: %r' % octets)
if octets[-1] != self.zchar[0]:
raise error.PyAsn1Error('Missing timezone specifier: %r' % octets)
return encoder.OctetStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
class UTCTimeEncoder(encoder.OctetStringEncoder):
zchar = str2octs('Z')
pluschar = str2octs('+')
minuschar = str2octs('-')
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
octets = client.asOctets()
if self.pluschar in octets or self.minuschar in octets:
raise error.PyAsn1Error('Must be UTC time: %r' % octets)
if octets and octets[-1] != self.zchar[0]:
client = client.clone(octets + self.zchar)
if len(client) != 13:
raise error.PyAsn1Error('Bad UTC time length: %r' % client)
return encoder.OctetStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
class SetOfEncoder(encoder.SequenceOfEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
if isinstance(client, univ.SequenceAndSetBase):
client.setDefaultComponents()
client.verifySizeSpec()
substrate = null; idx = len(client)
# This is certainly a hack but how else do I distinguish SetOf
# from Set if they have the same tags&constraints?
if isinstance(client, univ.SequenceAndSetBase):
# Set
comps = []
while idx > 0:
idx = idx - 1
if client[idx] is None: # Optional component
continue
if client.getDefaultComponentByPosition(idx) == client[idx]:
continue
comps.append(client[idx])
comps.sort(key=lambda x: isinstance(x, univ.Choice) and \
x.getMinTagSet() or x.getTagSet())
for c in comps:
substrate += encodeFun(c, defMode, maxChunkSize)
else:
# SetOf
compSubs = []
while idx > 0:
idx = idx - 1
compSubs.append(
encodeFun(client[idx], defMode, maxChunkSize)
)
compSubs.sort() # perhaps padding's not needed
substrate = null
for compSub in compSubs:
substrate += compSub
return substrate, 1
tagMap = encoder.tagMap.copy()
tagMap.update({
univ.Boolean.tagSet: BooleanEncoder(),
univ.BitString.tagSet: BitStringEncoder(),
univ.OctetString.tagSet: OctetStringEncoder(),
univ.Real.tagSet: RealEncoder(),
useful.GeneralizedTime.tagSet: GeneralizedTimeEncoder(),
useful.UTCTime.tagSet: UTCTimeEncoder(),
univ.SetOf().tagSet: SetOfEncoder() # conflcts with Set
})
typeMap = encoder.typeMap.copy()
typeMap.update({
univ.Set.typeId: SetOfEncoder(),
univ.SetOf.typeId: SetOfEncoder()
})
class Encoder(encoder.Encoder):
def __call__(self, client, defMode=False, maxChunkSize=0):
return encoder.Encoder.__call__(self, client, defMode, maxChunkSize)
encode = Encoder(tagMap, typeMap)
# EncoderFactory queries class instance and builds a map of tags -> encoders
| 38.030303 | 91 | 0.634263 |
from pyasn1.type import univ
from pyasn1.type import useful
from pyasn1.codec.ber import encoder
from pyasn1.compat.octets import int2oct, str2octs, null
from pyasn1 import error
class BooleanEncoder(encoder.IntegerEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
if client == 0:
substrate = int2oct(0)
else:
substrate = int2oct(255)
return substrate, 0
class BitStringEncoder(encoder.BitStringEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
return encoder.BitStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
class OctetStringEncoder(encoder.OctetStringEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
return encoder.OctetStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
class RealEncoder(encoder.RealEncoder):
def _chooseEncBase(self, value):
m, b, e = value
return self._dropFloatingPoint(m, b, e)
class GeneralizedTimeEncoder(OctetStringEncoder):
zchar = str2octs('Z')
pluschar = str2octs('+')
minuschar = str2octs('-')
zero = str2octs('0')
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
octets = client.asOctets()
if len(octets) < 15:
raise error.PyAsn1Error('Bad UTC time length: %r' % octets)
if self.pluschar in octets or self.minuschar in octets:
raise error.PyAsn1Error('Must be UTC time: %r' % octets)
if octets[-1] != self.zchar[0]:
raise error.PyAsn1Error('Missing timezone specifier: %r' % octets)
return encoder.OctetStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
class UTCTimeEncoder(encoder.OctetStringEncoder):
zchar = str2octs('Z')
pluschar = str2octs('+')
minuschar = str2octs('-')
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
octets = client.asOctets()
if self.pluschar in octets or self.minuschar in octets:
raise error.PyAsn1Error('Must be UTC time: %r' % octets)
if octets and octets[-1] != self.zchar[0]:
client = client.clone(octets + self.zchar)
if len(client) != 13:
raise error.PyAsn1Error('Bad UTC time length: %r' % client)
return encoder.OctetStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
class SetOfEncoder(encoder.SequenceOfEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
if isinstance(client, univ.SequenceAndSetBase):
client.setDefaultComponents()
client.verifySizeSpec()
substrate = null; idx = len(client)
if isinstance(client, univ.SequenceAndSetBase):
comps = []
while idx > 0:
idx = idx - 1
if client[idx] is None:
continue
if client.getDefaultComponentByPosition(idx) == client[idx]:
continue
comps.append(client[idx])
comps.sort(key=lambda x: isinstance(x, univ.Choice) and \
x.getMinTagSet() or x.getTagSet())
for c in comps:
substrate += encodeFun(c, defMode, maxChunkSize)
else:
compSubs = []
while idx > 0:
idx = idx - 1
compSubs.append(
encodeFun(client[idx], defMode, maxChunkSize)
)
compSubs.sort()
substrate = null
for compSub in compSubs:
substrate += compSub
return substrate, 1
tagMap = encoder.tagMap.copy()
tagMap.update({
univ.Boolean.tagSet: BooleanEncoder(),
univ.BitString.tagSet: BitStringEncoder(),
univ.OctetString.tagSet: OctetStringEncoder(),
univ.Real.tagSet: RealEncoder(),
useful.GeneralizedTime.tagSet: GeneralizedTimeEncoder(),
useful.UTCTime.tagSet: UTCTimeEncoder(),
univ.SetOf().tagSet: SetOfEncoder() # conflcts with Set
})
typeMap = encoder.typeMap.copy()
typeMap.update({
univ.Set.typeId: SetOfEncoder(),
univ.SetOf.typeId: SetOfEncoder()
})
class Encoder(encoder.Encoder):
def __call__(self, client, defMode=False, maxChunkSize=0):
return encoder.Encoder.__call__(self, client, defMode, maxChunkSize)
encode = Encoder(tagMap, typeMap)
# EncoderFactory queries class instance and builds a map of tags -> encoders
| true | true |
f7f7c1e6fd84f3ef761be973c859c21750883d78 | 8,738 | py | Python | HW 3/ps3a.py | minotaur487/MIT-OCW-Problem-Sets | 40ca55009f1543a1249cca4304357ff39d72e3e6 | [
"MIT"
] | null | null | null | HW 3/ps3a.py | minotaur487/MIT-OCW-Problem-Sets | 40ca55009f1543a1249cca4304357ff39d72e3e6 | [
"MIT"
] | null | null | null | HW 3/ps3a.py | minotaur487/MIT-OCW-Problem-Sets | 40ca55009f1543a1249cca4304357ff39d72e3e6 | [
"MIT"
] | null | null | null | # 6.00 Problem Set 3A Solutions
#
# The 6.00 Word Game
# Created by: Kevin Luu <luuk> and Jenna Wiens <jwiens>
#
#
import random
import string
VOWELS = 'aeiou'
CONSONANTS = 'bcdfghjklmnpqrstvwxyz'
HAND_SIZE = 7
SCRABBLE_LETTER_VALUES = {
'a': 1, 'b': 3, 'c': 3, 'd': 2, 'e': 1, 'f': 4, 'g': 2, 'h': 4, 'i': 1, 'j': 8, 'k': 5, 'l': 1, 'm': 3, 'n': 1, 'o': 1, 'p': 3, 'q': 10, 'r': 1, 's': 1, 't': 1, 'u': 1, 'v': 4, 'w': 4, 'x': 8, 'y': 4, 'z': 10
}
# -----------------------------------
# Helper code
# (you don't need to understand this helper code)
WORDLIST_FILENAME = "words.txt"
def load_words():
"""
Returns a list of valid words. Words are strings of lowercase letters.
Depending on the size of the word list, this function may
take a while to finish.
"""
print("Loading word list from file...")
# inFile: file
inFile = open(WORDLIST_FILENAME, 'r')
# wordlist: list of strings
wordlist = []
for line in inFile:
wordlist.append(line.strip().lower())
print(" ", len(wordlist), "words loaded.")
return wordlist
def get_frequency_dict(sequence):
"""
Returns a dictionary where the keys are elements of the sequence
and the values are integer counts, for the number of times that
an element is repeated in the sequence.
sequence: string or list
return: dictionary
"""
# freqs: dictionary (element_type -> int)
freq = {}
for x in sequence:
freq[x] = freq.get(x, 0) + 1
return freq
# (end of helper code)
# -----------------------------------
#
# Problem #1: Scoring a word
#
def get_word_score(word, n):
"""
Returns the score for a word. Assumes the word is a
valid word.
The score for a word is the sum of the points for letters
in the word multiplied by the length of the word, plus 50
points if all n letters are used on the first go.
Letters are scored as in Scrabble; A is worth 1, B is
worth 3, C is worth 3, D is worth 2, E is worth 1, and so on.
word: string (lowercase letters)
returns: int >= 0
"""
# TO DO...
score = 0
for x in word:
if x in SCRABBLE_LETTER_VALUES:
score += SCRABBLE_LETTER_VALUES[x]
score *= len(word)
if len(word) == n:
score += 50
return score
#
# Make sure you understand how this function works and what it does!
#
def display_hand(hand):
"""
Displays the letters currently in the hand.
For example:
display_hand({'a':1, 'x':2, 'l':3, 'e':1})
Should print out something like:
a x x l l l e
The order of the letters is unimportant.
hand: dictionary (string -> int)
"""
for letter in hand.keys():
for j in range(hand[letter]):
print(letter, end = ' ') # print all on the same line
print() # print an empty line
#
# Make sure you understand how this function works and what it does!
#
def deal_hand(n):
"""
Returns a random hand containing n lowercase letters.
At least n/3 the letters in the hand should be VOWELS.
Hands are represented as dictionaries. The keys are
letters and the values are the number of times the
particular letter is repeated in that hand.
n: int >= 0
returns: dictionary (string -> int)
"""
hand = {}
num_vowels = int(n / 3)
for i in range(num_vowels):
x = VOWELS[random.randrange(0, len(VOWELS))]
hand[x] = hand.get(x, 0) + 1
for i in range(num_vowels, n):
x = CONSONANTS[random.randrange(0, len(CONSONANTS))]
hand[x] = hand.get(x, 0) + 1
return hand
#
# Problem #2: Update a hand by removing letters
#
def update_hand(hand, word):
"""
Assumes that 'hand' has all the letters in word.
In other words, this assumes that however many times
a letter appears in 'word', 'hand' has at least as
many of that letter in it.
Updates the hand: uses up the letters in the given word
and returns the new hand, without those letters in it.
Has no side effects: does not modify hand.
word: string
hand: dictionary (string -> int)
returns: dictionary (string -> int)
"""
# TO DO ...
new_hand = hand.copy()
for x in word:
if x in hand and new_hand[x] == 1:
new_hand.pop(x)
else:
new_hand[x] -= 1
return new_hand
#
# Problem #3: Test word validity
#
def is_valid_word(word, hand, word_list):
"""
Returns True if word is in the word_list and is entirely
composed of letters in the hand. Otherwise, returns False.
Does not mutate hand or word_list.
word: string
hand: dictionary (string -> int)
word_list: list of lowercase strings
"""
# TO DO...
hand_copy = hand.copy()
if word in word_list:
pass
else:
return False
for x in word:
hand_copy[x] = hand_copy.get(x, 0) - 1
if hand_copy[x] < 0:
return False
return True
# def multi():
# wordlist = load_words()
# hand = {'y': 1, 'a': 3, 'b': 2, 'e': 1, 't': 1, 'p':1}
# for x in range(500):
# y = is_valid_word('abate', hand, wordlist)
# if y is False:
# print(x, "false")
# return
# return print("SUCCESS")
# multi()
def calculate_handlen(hand):
handlen = 0
for v in hand.values():
handlen += v
return handlen
#
# Problem #4: Playing a hand
#
def play_hand(hand, word_list):
score = 0
print('Hand: ', end='')
display_hand(hand)
response = input(f'Input a word based on the hand given. If you want, you can quit by inputting a \'.\'')
u_hand = hand.copy()
if response == '.':
print(f'Hand Score: {score}')
return score
while hand != {}:
while is_valid_word(response, hand, word_list) is False:
response = input(f'That word is invalid. Please input another word. Or quit by entering \'.\'')
if response == '.':
print(f'Hand Score: {score}')
return score
u_hand = update_hand(u_hand, response)
score += get_word_score(response, len(hand))
print(f'Word Score: {get_word_score(response, len(hand))} \n'
f'Hand:', end=' ')
display_hand(u_hand)
response = input(f'Input another word or quit by entering \'.\':')
if response == '.':
print(f'Hand Score: {score}')
return score
print(f'Hand Score: {score}')
"""
Allows the user to play the given hand, as follows:
* The hand is displayed.
* The user may input a word.
* An invalid word is rejected, and a message is displayed asking
the user to choose another word.
* When a valid word is entered, it uses up letters from the hand.
* After every valid word: the score for that word is displayed,
the remaining letters in the hand are displayed, and the user
is asked to input another word.
* The sum of the word scores is displayed when the hand finishes.
* The hand finishes when there are no more unused letters.
The user can also finish playing the hand by inputing a single
period (the string '.') instead of a word.
hand: dictionary (string -> int)
word_list: list of lowercase strings
"""
# TO DO ...
#
# Problem #5: Playing a game
# Make sure you understand how this code works!
#
def play_game(word_list):
"""
Allow the user to play an arbitrary number of hands.
* Asks the user to input 'n' or 'r' or 'e'.
* If the user inputs 'n', let the user play a new (random) hand.
When done playing the hand, ask the 'n' or 'e' question again.
* If the user inputs 'r', let the user play the last hand again.
* If the user inputs 'e', exit the game.
* If the user inputs anything else, ask them again.
"""
# TO DO...
score = 0
decision = 'start'
hand = {}
while decision == 'n' or decision == 'r' or decision == 'start':
while decision != 'e' and decision != 'n' and decision != 'r':
decision = input(f'Input \'n\' for a new hand, \'r\' to play the last hand again, and \'e\' to exit the game')
if decision == 'e':
print(f'Final Score: {score}')
return
elif decision == 'n':
hand = deal_hand(HAND_SIZE)
score += play_hand(hand, word_list)
# return play_game(word_list, score, hand)
elif decision == 'r':
score += play_hand(hand, word_list)
# return play_game(word_list, score)
decision = 'start'
#
# Build data structures used for entire session and play game
#
if __name__ == '__main__':
word_list = load_words()
play_game(word_list)
| 27.739683 | 212 | 0.597391 |
import random
import string
VOWELS = 'aeiou'
CONSONANTS = 'bcdfghjklmnpqrstvwxyz'
HAND_SIZE = 7
SCRABBLE_LETTER_VALUES = {
'a': 1, 'b': 3, 'c': 3, 'd': 2, 'e': 1, 'f': 4, 'g': 2, 'h': 4, 'i': 1, 'j': 8, 'k': 5, 'l': 1, 'm': 3, 'n': 1, 'o': 1, 'p': 3, 'q': 10, 'r': 1, 's': 1, 't': 1, 'u': 1, 'v': 4, 'w': 4, 'x': 8, 'y': 4, 'z': 10
}
WORDLIST_FILENAME = "words.txt"
def load_words():
print("Loading word list from file...")
# inFile: file
inFile = open(WORDLIST_FILENAME, 'r')
# wordlist: list of strings
wordlist = []
for line in inFile:
wordlist.append(line.strip().lower())
print(" ", len(wordlist), "words loaded.")
return wordlist
def get_frequency_dict(sequence):
# freqs: dictionary (element_type -> int)
freq = {}
for x in sequence:
freq[x] = freq.get(x, 0) + 1
return freq
# (end of helper code)
# -----------------------------------
#
# Problem #1: Scoring a word
#
def get_word_score(word, n):
# TO DO...
score = 0
for x in word:
if x in SCRABBLE_LETTER_VALUES:
score += SCRABBLE_LETTER_VALUES[x]
score *= len(word)
if len(word) == n:
score += 50
return score
#
# Make sure you understand how this function works and what it does!
#
def display_hand(hand):
for letter in hand.keys():
for j in range(hand[letter]):
print(letter, end = ' ') # print all on the same line
print() # print an empty line
#
# Make sure you understand how this function works and what it does!
#
def deal_hand(n):
hand = {}
num_vowels = int(n / 3)
for i in range(num_vowels):
x = VOWELS[random.randrange(0, len(VOWELS))]
hand[x] = hand.get(x, 0) + 1
for i in range(num_vowels, n):
x = CONSONANTS[random.randrange(0, len(CONSONANTS))]
hand[x] = hand.get(x, 0) + 1
return hand
#
# Problem #2: Update a hand by removing letters
#
def update_hand(hand, word):
# TO DO ...
new_hand = hand.copy()
for x in word:
if x in hand and new_hand[x] == 1:
new_hand.pop(x)
else:
new_hand[x] -= 1
return new_hand
#
# Problem #3: Test word validity
#
def is_valid_word(word, hand, word_list):
# TO DO...
hand_copy = hand.copy()
if word in word_list:
pass
else:
return False
for x in word:
hand_copy[x] = hand_copy.get(x, 0) - 1
if hand_copy[x] < 0:
return False
return True
# def multi():
# wordlist = load_words()
# hand = {'y': 1, 'a': 3, 'b': 2, 'e': 1, 't': 1, 'p':1}
# for x in range(500):
# y = is_valid_word('abate', hand, wordlist)
# if y is False:
# print(x, "false")
# return
# return print("SUCCESS")
# multi()
def calculate_handlen(hand):
handlen = 0
for v in hand.values():
handlen += v
return handlen
#
# Problem #4: Playing a hand
#
def play_hand(hand, word_list):
score = 0
print('Hand: ', end='')
display_hand(hand)
response = input(f'Input a word based on the hand given. If you want, you can quit by inputting a \'.\'')
u_hand = hand.copy()
if response == '.':
print(f'Hand Score: {score}')
return score
while hand != {}:
while is_valid_word(response, hand, word_list) is False:
response = input(f'That word is invalid. Please input another word. Or quit by entering \'.\'')
if response == '.':
print(f'Hand Score: {score}')
return score
u_hand = update_hand(u_hand, response)
score += get_word_score(response, len(hand))
print(f'Word Score: {get_word_score(response, len(hand))} \n'
f'Hand:', end=' ')
display_hand(u_hand)
response = input(f'Input another word or quit by entering \'.\':')
if response == '.':
print(f'Hand Score: {score}')
return score
print(f'Hand Score: {score}')
# TO DO ...
#
# Problem #5: Playing a game
# Make sure you understand how this code works!
#
def play_game(word_list):
# TO DO...
score = 0
decision = 'start'
hand = {}
while decision == 'n' or decision == 'r' or decision == 'start':
while decision != 'e' and decision != 'n' and decision != 'r':
decision = input(f'Input \'n\' for a new hand, \'r\' to play the last hand again, and \'e\' to exit the game')
if decision == 'e':
print(f'Final Score: {score}')
return
elif decision == 'n':
hand = deal_hand(HAND_SIZE)
score += play_hand(hand, word_list)
# return play_game(word_list, score, hand)
elif decision == 'r':
score += play_hand(hand, word_list)
# return play_game(word_list, score)
decision = 'start'
#
# Build data structures used for entire session and play game
#
if __name__ == '__main__':
word_list = load_words()
play_game(word_list)
| true | true |
f7f7c35e8faa2a683324e9ae002588f00364ea7e | 3,517 | py | Python | code/babymapping_1219/Models/pggan_tf_official/mapping_4.py | zhaoyuzhi/ChildPredictor | ba36d9851a37522ec5a0de4eab3b973f872d885e | [
"MIT"
] | 6 | 2022-01-22T06:26:36.000Z | 2022-03-31T14:37:49.000Z | code/babymapping_1219/Models/pggan_tf_official/mapping_4.py | zhaoyuzhi/ChildPredictor | ba36d9851a37522ec5a0de4eab3b973f872d885e | [
"MIT"
] | null | null | null | code/babymapping_1219/Models/pggan_tf_official/mapping_4.py | zhaoyuzhi/ChildPredictor | ba36d9851a37522ec5a0de4eab3b973f872d885e | [
"MIT"
] | null | null | null | import torch
import torch.nn as nn
import torch.autograd as autograd
from torchvision import models
import torch.nn.functional as F
import numpy as np
import math
import torch
import torch.nn as nn
"""
Mapping network
Input: two tensor of size (batchsize, 512, 4, 4)
Output: a tensor of size (batchsize, 480)
how to combine two tensor into one tensor is a challenge.
"""
class MappingResBlock(nn.Module):
def __init__(self, in_channels, ksize=3, padding=0, stride=1, res_weight=0.1):
super(MappingResBlock, self).__init__()
self.res_weight = res_weight
# Initialize the conv scheme
self.conv2d = nn.Sequential(
nn.Conv2d(in_channels, in_channels, ksize, padding=padding, stride=stride),
#nn.BatchNorm2d(in_channels),
nn.LeakyReLU(0.2, inplace = False),
nn.Conv2d(in_channels, in_channels, ksize, padding=padding, stride=stride)
#nn.BatchNorm2d(in_channels),
)
def forward(self, x):
residual = x
out = self.conv2d(x)
out = self.res_weight * out + residual
return out
class MappingNet(nn.Module):
def __init__(self, in_channels, out_channels, input_norm=False, output_norm=True):
super(MappingNet, self).__init__()
self.input_norm = input_norm
self.output_norm = output_norm
# Head block
self.head = nn.Sequential(
nn.Conv2d(in_channels * 4, in_channels*2, 3, stride=2, padding=1), #in: 2048,4,4 out: 1024,3,3
nn.LeakyReLU(0.2, inplace = False),
nn.Conv2d(in_channels*2, in_channels, 2, stride=1, padding=0) #in:1024,3,3 out:512,1,1
)
# Bottle neck 感觉5个resblock应该够了把...
self.bottle = nn.Sequential(
MappingResBlock(in_channels, 1, 0, 1),
MappingResBlock(in_channels, 1, 0, 1),
MappingResBlock(in_channels, 1, 0, 1),
MappingResBlock(in_channels, 1, 0, 1),
MappingResBlock(in_channels, 1, 0, 1),
# MappingResBlock(in_channels),
# MappingResBlock(in_channels),
# MappingResBlock(in_channels)
)
self.final = nn.Linear(in_channels, out_channels) #in_channels=512, out_channels = 480
def forward(self, x_father, x_mother):
assert x_father.shape==x_mother.shape, 'shape of x_father and x_mother is different, x_father:{} x_mother'.format(x_father.shape, x_mother.shape)
if self.input_norm:
x_father = (x_father - x_father.mean(dim=[1,2,3]).reshape(x_father.shape[0],1,1,1)) / x_father.var(dim=[1,2,3]).reshape(x_father.shape[0],1,1,1)
x_mother = (x_mother - x_mother.mean(dim=[1,2,3]).reshape(x_mother.shape[0],1,1,1)) / x_mother.var(dim=[1,2,3]).reshape(x_mother.shape[0],1,1,1)
x = torch.cat((x_father, x_mother), dim=1) #在channel维进行合并 -> [bs, 1024, 4, 4]
#head block
out = self.head(x)
# Bottle neck
out = self.bottle(out)
# Final conv
out = out.reshape(out.shape[0], out.shape[1])
out = self.final(out)
if self.output_norm:
out = (out - out.mean(dim=1).reshape(out.shape[0], 1)) / out.var(dim=1).reshape(out.shape[0], 1)
return out #[batchsize, 512]
if __name__ == '__main__':
x_father = torch.randn((1,1024,4,4)).cuda()
x_mother = torch.randn((1,1024,4,4)).cuda()
net = MappingNet(512, 480).cuda()
code_of_child = net(x_father, x_mother)
print(code_of_child.shape)
| 39.516854 | 156 | 0.626386 | import torch
import torch.nn as nn
import torch.autograd as autograd
from torchvision import models
import torch.nn.functional as F
import numpy as np
import math
import torch
import torch.nn as nn
class MappingResBlock(nn.Module):
def __init__(self, in_channels, ksize=3, padding=0, stride=1, res_weight=0.1):
super(MappingResBlock, self).__init__()
self.res_weight = res_weight
self.conv2d = nn.Sequential(
nn.Conv2d(in_channels, in_channels, ksize, padding=padding, stride=stride),
nn.LeakyReLU(0.2, inplace = False),
nn.Conv2d(in_channels, in_channels, ksize, padding=padding, stride=stride)
)
def forward(self, x):
residual = x
out = self.conv2d(x)
out = self.res_weight * out + residual
return out
class MappingNet(nn.Module):
def __init__(self, in_channels, out_channels, input_norm=False, output_norm=True):
super(MappingNet, self).__init__()
self.input_norm = input_norm
self.output_norm = output_norm
self.head = nn.Sequential(
nn.Conv2d(in_channels * 4, in_channels*2, 3, stride=2, padding=1),
nn.LeakyReLU(0.2, inplace = False),
nn.Conv2d(in_channels*2, in_channels, 2, stride=1, padding=0)
)
self.bottle = nn.Sequential(
MappingResBlock(in_channels, 1, 0, 1),
MappingResBlock(in_channels, 1, 0, 1),
MappingResBlock(in_channels, 1, 0, 1),
MappingResBlock(in_channels, 1, 0, 1),
MappingResBlock(in_channels, 1, 0, 1),
)
self.final = nn.Linear(in_channels, out_channels)
def forward(self, x_father, x_mother):
assert x_father.shape==x_mother.shape, 'shape of x_father and x_mother is different, x_father:{} x_mother'.format(x_father.shape, x_mother.shape)
if self.input_norm:
x_father = (x_father - x_father.mean(dim=[1,2,3]).reshape(x_father.shape[0],1,1,1)) / x_father.var(dim=[1,2,3]).reshape(x_father.shape[0],1,1,1)
x_mother = (x_mother - x_mother.mean(dim=[1,2,3]).reshape(x_mother.shape[0],1,1,1)) / x_mother.var(dim=[1,2,3]).reshape(x_mother.shape[0],1,1,1)
x = torch.cat((x_father, x_mother), dim=1)
out = self.head(x)
out = self.bottle(out)
out = out.reshape(out.shape[0], out.shape[1])
out = self.final(out)
if self.output_norm:
out = (out - out.mean(dim=1).reshape(out.shape[0], 1)) / out.var(dim=1).reshape(out.shape[0], 1)
return out
if __name__ == '__main__':
x_father = torch.randn((1,1024,4,4)).cuda()
x_mother = torch.randn((1,1024,4,4)).cuda()
net = MappingNet(512, 480).cuda()
code_of_child = net(x_father, x_mother)
print(code_of_child.shape)
| true | true |
f7f7c4799f40587aa8e69a92f4b09b39052bf0da | 30,953 | py | Python | mailchimp_marketing/api/templates_api.py | OlegBugaichuk/mailchimp-marketing-python | c00c09dcbe0e9680e8956be97eca3c963f92b3dd | [
"Apache-2.0"
] | null | null | null | mailchimp_marketing/api/templates_api.py | OlegBugaichuk/mailchimp-marketing-python | c00c09dcbe0e9680e8956be97eca3c963f92b3dd | [
"Apache-2.0"
] | null | null | null | mailchimp_marketing/api/templates_api.py | OlegBugaichuk/mailchimp-marketing-python | c00c09dcbe0e9680e8956be97eca3c963f92b3dd | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Mailchimp Marketing API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 3.0.22
Contact: apihelp@mailchimp.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from mailchimp_marketing.api_client import ApiClient
class TemplatesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client):
self.api_client = api_client
def delete_template(self, template_id, **kwargs): # noqa: E501
"""Delete template # noqa: E501
Delete a specific template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_template(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_template_with_http_info(template_id, **kwargs) # noqa: E501
else:
(data) = self.delete_template_with_http_info(template_id, **kwargs) # noqa: E501
return data
def delete_template_with_http_info(self, template_id, **kwargs): # noqa: E501
"""Delete template # noqa: E501
Delete a specific template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_template_with_http_info(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['template_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'template_id' is set
if ('template_id' not in params or
params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'template_id' in params:
path_params['template_id'] = params['template_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/templates/{template_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list(self, **kwargs): # noqa: E501
"""List templates # noqa: E501
Get a list of an account's available templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:param str created_by: The Mailchimp account user who created the template.
:param str since_created_at: Restrict the response to templates created after the set date. We recommend [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) time format: 2015-10-21T15:41:36+00:00.
:param str before_created_at: Restrict the response to templates created before the set date. We recommend [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) time format: 2015-10-21T15:41:36+00:00.
:param str type: Limit results based on template type.
:param str category: Limit results based on category.
:param str folder_id: The unique folder id.
:param str sort_field: Returns user templates sorted by the specified field.
:return: Templates
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_with_http_info(**kwargs) # noqa: E501
return data
def list_with_http_info(self, **kwargs): # noqa: E501
"""List templates # noqa: E501
Get a list of an account's available templates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:param int count: The number of records to return. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **10**. [Maximum value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **1000**
:param int offset: The number of records from a collection to skip. Iterating over large collections with this parameter can be slow. [Default value](/developer/guides/get-started-with-mailchimp-api-3/#Parameters) is **0**.
:param str created_by: The Mailchimp account user who created the template.
:param str since_created_at: Restrict the response to templates created after the set date. We recommend [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) time format: 2015-10-21T15:41:36+00:00.
:param str before_created_at: Restrict the response to templates created before the set date. We recommend [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) time format: 2015-10-21T15:41:36+00:00.
:param str type: Limit results based on template type.
:param str category: Limit results based on category.
:param str folder_id: The unique folder id.
:param str sort_field: Returns user templates sorted by the specified field.
:return: Templates
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'exclude_fields', 'count', 'offset', 'created_by', 'since_created_at', 'before_created_at', 'type', 'category', 'folder_id', 'sort_field'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list" % key
)
params[key] = val
del params['kwargs']
if 'count' in params and params['count'] > 1000: # noqa: E501
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
if 'count' in params:
query_params.append(('count', params['count'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'created_by' in params:
query_params.append(('created_by', params['created_by'])) # noqa: E501
if 'since_created_at' in params:
query_params.append(('since_created_at', params['since_created_at'])) # noqa: E501
if 'before_created_at' in params:
query_params.append(('before_created_at', params['before_created_at'])) # noqa: E501
if 'type' in params:
query_params.append(('type', params['type'])) # noqa: E501
if 'category' in params:
query_params.append(('category', params['category'])) # noqa: E501
if 'folder_id' in params:
query_params.append(('folder_id', params['folder_id'])) # noqa: E501
if 'sort_field' in params:
query_params.append(('sort_field', params['sort_field'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/templates', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Templates', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_template(self, template_id, **kwargs): # noqa: E501
"""Get template info # noqa: E501
Get information about a specific template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_template(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_template_with_http_info(template_id, **kwargs) # noqa: E501
else:
(data) = self.get_template_with_http_info(template_id, **kwargs) # noqa: E501
return data
def get_template_with_http_info(self, template_id, **kwargs): # noqa: E501
"""Get template info # noqa: E501
Get information about a specific template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_template_with_http_info(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['template_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'template_id' is set
if ('template_id' not in params or
params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'template_id' in params:
path_params['template_id'] = params['template_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/templates/{template_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateInstance', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_default_content_for_template(self, template_id, **kwargs): # noqa: E501
"""View default content # noqa: E501
Get the sections that you can edit in a template, including each section's default content. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_default_content_for_template(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateDefaultContent
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_default_content_for_template_with_http_info(template_id, **kwargs) # noqa: E501
else:
(data) = self.get_default_content_for_template_with_http_info(template_id, **kwargs) # noqa: E501
return data
def get_default_content_for_template_with_http_info(self, template_id, **kwargs): # noqa: E501
"""View default content # noqa: E501
Get the sections that you can edit in a template, including each section's default content. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_default_content_for_template_with_http_info(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param list[str] fields: A comma-separated list of fields to return. Reference parameters of sub-objects with dot notation.
:param list[str] exclude_fields: A comma-separated list of fields to exclude. Reference parameters of sub-objects with dot notation.
:return: TemplateDefaultContent
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['template_id', 'fields', 'exclude_fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_default_content_for_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'template_id' is set
if ('template_id' not in params or
params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'template_id' in params:
path_params['template_id'] = params['template_id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
collection_formats['fields'] = 'csv' # noqa: E501
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields'])) # noqa: E501
collection_formats['exclude_fields'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/templates/{template_id}/default-content', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateDefaultContent', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_template(self, template_id, body, **kwargs): # noqa: E501
"""Update template # noqa: E501
Update the name, HTML, or `folder_id` of an existing template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_template(template_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param TemplateInstance2 body: (required)
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_template_with_http_info(template_id, body, **kwargs) # noqa: E501
else:
(data) = self.update_template_with_http_info(template_id, body, **kwargs) # noqa: E501
return data
def update_template_with_http_info(self, template_id, body, **kwargs): # noqa: E501
"""Update template # noqa: E501
Update the name, HTML, or `folder_id` of an existing template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_template_with_http_info(template_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str template_id: The unique id for the template. (required)
:param TemplateInstance2 body: (required)
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['template_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'template_id' is set
if ('template_id' not in params or
params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling ``") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
if 'template_id' in params:
path_params['template_id'] = params['template_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/templates/{template_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateInstance', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create(self, body, **kwargs): # noqa: E501
"""Add template # noqa: E501
Create a new template for the account. Only Classic templates are supported. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TemplateInstance1 body: (required)
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_with_http_info(body, **kwargs) # noqa: E501
return data
def create_with_http_info(self, body, **kwargs): # noqa: E501
"""Add template # noqa: E501
Create a new template for the account. Only Classic templates are supported. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TemplateInstance1 body: (required)
:return: TemplateInstance
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/templates', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateInstance', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 44.536691 | 241 | 0.627661 |
from __future__ import absolute_import
import re
import six
from mailchimp_marketing.api_client import ApiClient
class TemplatesApi(object):
def __init__(self, api_client):
self.api_client = api_client
def delete_template(self, template_id, **kwargs):
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_template_with_http_info(template_id, **kwargs)
else:
(data) = self.delete_template_with_http_info(template_id, **kwargs)
return data
def delete_template_with_http_info(self, template_id, **kwargs):
all_params = ['template_id']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_template" % key
)
params[key] = val
del params['kwargs']
if ('template_id' not in params or
params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling ``")
collection_formats = {}
path_params = {}
if 'template_id' in params:
path_params['template_id'] = params['template_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
auth_settings = ['basicAuth']
return self.api_client.call_api(
'/templates/{template_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list(self, **kwargs):
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_with_http_info(**kwargs)
else:
(data) = self.list_with_http_info(**kwargs)
return data
def list_with_http_info(self, **kwargs):
all_params = ['fields', 'exclude_fields', 'count', 'offset', 'created_by', 'since_created_at', 'before_created_at', 'type', 'category', 'folder_id', 'sort_field']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list" % key
)
params[key] = val
del params['kwargs']
if 'count' in params and params['count'] > 1000:
raise ValueError("Invalid value for parameter `count` when calling ``, must be a value less than or equal to `1000`")
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields']))
collection_formats['fields'] = 'csv'
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields']))
collection_formats['exclude_fields'] = 'csv'
if 'count' in params:
query_params.append(('count', params['count']))
if 'offset' in params:
query_params.append(('offset', params['offset']))
if 'created_by' in params:
query_params.append(('created_by', params['created_by']))
if 'since_created_at' in params:
query_params.append(('since_created_at', params['since_created_at']))
if 'before_created_at' in params:
query_params.append(('before_created_at', params['before_created_at']))
if 'type' in params:
query_params.append(('type', params['type']))
if 'category' in params:
query_params.append(('category', params['category']))
if 'folder_id' in params:
query_params.append(('folder_id', params['folder_id']))
if 'sort_field' in params:
query_params.append(('sort_field', params['sort_field']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
auth_settings = ['basicAuth']
return self.api_client.call_api(
'/templates', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Templates',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_template(self, template_id, **kwargs):
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_template_with_http_info(template_id, **kwargs)
else:
(data) = self.get_template_with_http_info(template_id, **kwargs)
return data
def get_template_with_http_info(self, template_id, **kwargs):
all_params = ['template_id', 'fields', 'exclude_fields']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_template" % key
)
params[key] = val
del params['kwargs']
if ('template_id' not in params or
params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling ``")
collection_formats = {}
path_params = {}
if 'template_id' in params:
path_params['template_id'] = params['template_id']
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields']))
collection_formats['fields'] = 'csv'
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields']))
collection_formats['exclude_fields'] = 'csv'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
auth_settings = ['basicAuth']
return self.api_client.call_api(
'/templates/{template_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateInstance',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_default_content_for_template(self, template_id, **kwargs):
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_default_content_for_template_with_http_info(template_id, **kwargs)
else:
(data) = self.get_default_content_for_template_with_http_info(template_id, **kwargs)
return data
def get_default_content_for_template_with_http_info(self, template_id, **kwargs):
all_params = ['template_id', 'fields', 'exclude_fields']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_default_content_for_template" % key
)
params[key] = val
del params['kwargs']
if ('template_id' not in params or
params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling ``")
collection_formats = {}
path_params = {}
if 'template_id' in params:
path_params['template_id'] = params['template_id']
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields']))
collection_formats['fields'] = 'csv'
if 'exclude_fields' in params:
query_params.append(('exclude_fields', params['exclude_fields']))
collection_formats['exclude_fields'] = 'csv'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
auth_settings = ['basicAuth']
return self.api_client.call_api(
'/templates/{template_id}/default-content', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateDefaultContent',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_template(self, template_id, body, **kwargs):
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_template_with_http_info(template_id, body, **kwargs)
else:
(data) = self.update_template_with_http_info(template_id, body, **kwargs)
return data
def update_template_with_http_info(self, template_id, body, **kwargs):
all_params = ['template_id', 'body']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_template" % key
)
params[key] = val
del params['kwargs']
if ('template_id' not in params or
params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling ``")
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``")
collection_formats = {}
path_params = {}
if 'template_id' in params:
path_params['template_id'] = params['template_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
auth_settings = ['basicAuth']
return self.api_client.call_api(
'/templates/{template_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateInstance',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create(self, body, **kwargs):
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_with_http_info(body, **kwargs)
else:
(data) = self.create_with_http_info(body, **kwargs)
return data
def create_with_http_info(self, body, **kwargs):
all_params = ['body']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create" % key
)
params[key] = val
del params['kwargs']
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling ``")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/problem+json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
auth_settings = ['basicAuth']
return self.api_client.call_api(
'/templates', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateInstance',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| true | true |
f7f7c49dabbf26ff1f99f5903ed816341f297fc0 | 5,361 | py | Python | ggly/ggly.py | rzinurov/ggly | 9b9a9983d87e416b6bd03149c47b410dcfe66724 | [
"MIT"
] | 1 | 2020-03-31T20:17:33.000Z | 2020-03-31T20:17:33.000Z | ggly/ggly.py | rzinurov/ggly | 9b9a9983d87e416b6bd03149c47b410dcfe66724 | [
"MIT"
] | 1 | 2021-10-12T22:59:58.000Z | 2021-10-12T22:59:58.000Z | ggly/ggly.py | rzinurov/ggly | 9b9a9983d87e416b6bd03149c47b410dcfe66724 | [
"MIT"
] | null | null | null | import logging
import os
import random
import cv2
from numpy.core.records import ndarray
from ggly import img_utils
data_dir = os.path.dirname(os.path.realpath(__file__)) + '/data/'
class Rect(object):
def __init__(self, dimensions):
(self.x, self.y, self.width, self.height) = dimensions
@property
def center(self):
return int(self.x + self.width / 2), int(self.y + self.height / 2)
class Ggly(object):
img_max_size = (2048, 2048)
eye_img = cv2.imread(data_dir + '/img/googly_eye.png', -1)
face_cascade = cv2.CascadeClassifier(data_dir + '/haarcascades/haarcascade_frontalface_default.xml')
right_eye_cascade = cv2.CascadeClassifier(data_dir + '/haarcascades/haarcascade_righteye_2splits.xml')
left_eye_cascade = cv2.CascadeClassifier(data_dir + '/haarcascades/haarcascade_lefteye_2splits.xml')
debug = False
def __init__(self, debug: bool = False):
super().__init__()
self.debug = debug
def go(self, img: ndarray):
img = img_utils.resize_to_fit(img.copy(), self.img_max_size[0], self.img_max_size[1])
img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
height, width, _ = img.shape
faces = self.face_cascade.detectMultiScale(
img_gray,
scaleFactor=1.05,
minNeighbors=2,
minSize=(30, 30),
flags=cv2.CASCADE_SCALE_IMAGE
)
face_count = 0
for face in [Rect(x) for x in faces]:
logging.debug("Found face at %s:%s" % (face.x, face.y))
face_img_gray = img_gray[face.y:face.y + face.height, face.x:face.x + face.width]
face_img = img[face.y:face.y + face.height, face.x:face.x + face.width]
r_eye = self.__search_right_eye(face_img_gray, face)
if r_eye:
l_eye = self.__search_left_eye(face_img_gray, face)
if not l_eye:
l_eye = self.__fake_left_eye(r_eye, face)
self.__draw_eye(r_eye, face, face_img)
self.__draw_eye(l_eye, face, face_img)
face_count += 1
if self.debug:
img_utils.draw_rect(face_img, l_eye, (0, 255, 0))
img_utils.draw_center_line(face_img, r_eye, l_eye, (0, 255, 0))
img_utils.draw_rect(face_img, r_eye, (255, 255, 0))
if self.debug:
img_utils.draw_rect(img, face, (255, 0, 255))
return img, face_count
def __search_right_eye(self, face_img_gray: ndarray, face: Rect):
right_eyes = []
for i in range(3, -1, -1):
candidates = self.right_eye_cascade.detectMultiScale(face_img_gray, minNeighbors=i)
right_eyes = [x for x in candidates if self.__is_right_eye_valid(Rect(x), face)]
if len(right_eyes) > 0:
logging.debug("Found right eye with accuracy %s" % i)
break
return Rect(right_eyes[0]) if right_eyes else None
def __search_left_eye(self, face_img_gray: ndarray, face: Rect):
left_eyes = []
for i in range(3, -1, -1):
candidates = self.left_eye_cascade.detectMultiScale(face_img_gray, minNeighbors=i)
left_eyes = [x for x in candidates if self.__is_left_eye_valid(Rect(x), face)]
if len(left_eyes) > 0:
logging.debug("Found left eye with accuracy %s" % i)
break
return Rect(left_eyes[0]) if left_eyes else None
@staticmethod
def __fake_left_eye(r_eye: Rect, face: Rect):
logging.debug("Made fake left eye")
return Rect([r_eye.x + int(2 * (face.width / 2 - r_eye.center[0])), r_eye.y,
r_eye.width, r_eye.height])
@staticmethod
def __is_right_eye_valid(eye: Rect, face: Rect):
if eye.center[0] > face.width / 2: # left eye recognized as right eye
return False
if eye.center[1] > face.height / 2: # nose recognized as right eye
return False
return True
@staticmethod
def __is_left_eye_valid(eye: Rect, face: Rect):
if eye.center[0] < face.width / 2: # right eye recognized as left eye
return False
if eye.center[1] > face.height / 2: # noses recognized as left eye
return False
return True
def __draw_eye(self, eye_rect: Rect, face: Rect, face_img: ndarray):
scale_factor = 1.5
eye_img = self.eye_img.copy()
eye_img = img_utils.rotate(eye_img, random.randint(0, 360))
eyes_width_scaled = int(eye_rect.width * scale_factor)
eyes_height_scaled = int(eye_rect.height * scale_factor)
x1 = max(eye_rect.center[0] - int(eyes_width_scaled / 2), 0)
x2 = min(x1 + eyes_width_scaled, face.width)
y1 = max(eye_rect.center[1] - int(eyes_height_scaled / 2), 0)
y2 = min(y1 + eyes_height_scaled, face.height)
eye_img = cv2.resize(eye_img, (x2 - x1, y2 - y1), interpolation=cv2.INTER_AREA)
mask = eye_img[:, :, 3]
mask_inv = cv2.bitwise_not(mask)
eye_img = eye_img[:, :, 0:3] # convert to BGR
roi = face_img[y1:y2, x1:x2]
roi_bg = cv2.bitwise_and(roi, roi, mask=mask_inv)
roi_fg = cv2.bitwise_and(eye_img, eye_img, mask=mask)
dst = cv2.add(roi_bg, roi_fg)
face_img[y1:y2, x1:x2] = dst
| 41.55814 | 106 | 0.612945 | import logging
import os
import random
import cv2
from numpy.core.records import ndarray
from ggly import img_utils
data_dir = os.path.dirname(os.path.realpath(__file__)) + '/data/'
class Rect(object):
def __init__(self, dimensions):
(self.x, self.y, self.width, self.height) = dimensions
@property
def center(self):
return int(self.x + self.width / 2), int(self.y + self.height / 2)
class Ggly(object):
img_max_size = (2048, 2048)
eye_img = cv2.imread(data_dir + '/img/googly_eye.png', -1)
face_cascade = cv2.CascadeClassifier(data_dir + '/haarcascades/haarcascade_frontalface_default.xml')
right_eye_cascade = cv2.CascadeClassifier(data_dir + '/haarcascades/haarcascade_righteye_2splits.xml')
left_eye_cascade = cv2.CascadeClassifier(data_dir + '/haarcascades/haarcascade_lefteye_2splits.xml')
debug = False
def __init__(self, debug: bool = False):
super().__init__()
self.debug = debug
def go(self, img: ndarray):
img = img_utils.resize_to_fit(img.copy(), self.img_max_size[0], self.img_max_size[1])
img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
height, width, _ = img.shape
faces = self.face_cascade.detectMultiScale(
img_gray,
scaleFactor=1.05,
minNeighbors=2,
minSize=(30, 30),
flags=cv2.CASCADE_SCALE_IMAGE
)
face_count = 0
for face in [Rect(x) for x in faces]:
logging.debug("Found face at %s:%s" % (face.x, face.y))
face_img_gray = img_gray[face.y:face.y + face.height, face.x:face.x + face.width]
face_img = img[face.y:face.y + face.height, face.x:face.x + face.width]
r_eye = self.__search_right_eye(face_img_gray, face)
if r_eye:
l_eye = self.__search_left_eye(face_img_gray, face)
if not l_eye:
l_eye = self.__fake_left_eye(r_eye, face)
self.__draw_eye(r_eye, face, face_img)
self.__draw_eye(l_eye, face, face_img)
face_count += 1
if self.debug:
img_utils.draw_rect(face_img, l_eye, (0, 255, 0))
img_utils.draw_center_line(face_img, r_eye, l_eye, (0, 255, 0))
img_utils.draw_rect(face_img, r_eye, (255, 255, 0))
if self.debug:
img_utils.draw_rect(img, face, (255, 0, 255))
return img, face_count
def __search_right_eye(self, face_img_gray: ndarray, face: Rect):
right_eyes = []
for i in range(3, -1, -1):
candidates = self.right_eye_cascade.detectMultiScale(face_img_gray, minNeighbors=i)
right_eyes = [x for x in candidates if self.__is_right_eye_valid(Rect(x), face)]
if len(right_eyes) > 0:
logging.debug("Found right eye with accuracy %s" % i)
break
return Rect(right_eyes[0]) if right_eyes else None
def __search_left_eye(self, face_img_gray: ndarray, face: Rect):
left_eyes = []
for i in range(3, -1, -1):
candidates = self.left_eye_cascade.detectMultiScale(face_img_gray, minNeighbors=i)
left_eyes = [x for x in candidates if self.__is_left_eye_valid(Rect(x), face)]
if len(left_eyes) > 0:
logging.debug("Found left eye with accuracy %s" % i)
break
return Rect(left_eyes[0]) if left_eyes else None
@staticmethod
def __fake_left_eye(r_eye: Rect, face: Rect):
logging.debug("Made fake left eye")
return Rect([r_eye.x + int(2 * (face.width / 2 - r_eye.center[0])), r_eye.y,
r_eye.width, r_eye.height])
@staticmethod
def __is_right_eye_valid(eye: Rect, face: Rect):
if eye.center[0] > face.width / 2:
return False
if eye.center[1] > face.height / 2:
return False
return True
@staticmethod
def __is_left_eye_valid(eye: Rect, face: Rect):
if eye.center[0] < face.width / 2:
return False
if eye.center[1] > face.height / 2:
return False
return True
def __draw_eye(self, eye_rect: Rect, face: Rect, face_img: ndarray):
scale_factor = 1.5
eye_img = self.eye_img.copy()
eye_img = img_utils.rotate(eye_img, random.randint(0, 360))
eyes_width_scaled = int(eye_rect.width * scale_factor)
eyes_height_scaled = int(eye_rect.height * scale_factor)
x1 = max(eye_rect.center[0] - int(eyes_width_scaled / 2), 0)
x2 = min(x1 + eyes_width_scaled, face.width)
y1 = max(eye_rect.center[1] - int(eyes_height_scaled / 2), 0)
y2 = min(y1 + eyes_height_scaled, face.height)
eye_img = cv2.resize(eye_img, (x2 - x1, y2 - y1), interpolation=cv2.INTER_AREA)
mask = eye_img[:, :, 3]
mask_inv = cv2.bitwise_not(mask)
eye_img = eye_img[:, :, 0:3]
roi = face_img[y1:y2, x1:x2]
roi_bg = cv2.bitwise_and(roi, roi, mask=mask_inv)
roi_fg = cv2.bitwise_and(eye_img, eye_img, mask=mask)
dst = cv2.add(roi_bg, roi_fg)
face_img[y1:y2, x1:x2] = dst
| true | true |
f7f7c5e7fb12af1889e18a374044563a934bca5b | 4,753 | py | Python | stack/load_balancer.py | engineervix/aws-web-stacks | 8bb4c0b358853b8cc6feb11a6d67523ecd607503 | [
"MIT"
] | null | null | null | stack/load_balancer.py | engineervix/aws-web-stacks | 8bb4c0b358853b8cc6feb11a6d67523ecd607503 | [
"MIT"
] | null | null | null | stack/load_balancer.py | engineervix/aws-web-stacks | 8bb4c0b358853b8cc6feb11a6d67523ecd607503 | [
"MIT"
] | null | null | null | from troposphere import elasticloadbalancing as elb
from troposphere import GetAtt, If, Join, Output, Ref
from . import USE_ECS, USE_GOVCLOUD
from .security_groups import load_balancer_security_group
from .template import template
from .utils import ParameterWithDefaults as Parameter
from .vpc import public_subnet_a, public_subnet_b
# Web worker
if USE_ECS:
web_worker_port = Ref(
template.add_parameter(
Parameter(
"WebWorkerPort",
Description="Web worker container exposed port",
Type="Number",
Default="8000",
),
group="Load Balancer",
label="Web Worker Port",
)
)
else:
# default to port 80 for EC2 and Elastic Beanstalk options
web_worker_port = Ref(
template.add_parameter(
Parameter(
"WebWorkerPort",
Description="Default web worker exposed port (non-HTTPS)",
Type="Number",
Default="80",
),
group="Load Balancer",
label="Web Worker Port",
)
)
web_worker_protocol = Ref(
template.add_parameter(
Parameter(
"WebWorkerProtocol",
Description="Web worker instance protocol",
Type="String",
Default="HTTP",
AllowedValues=["HTTP", "HTTPS"],
),
group="Load Balancer",
label="Web Worker Protocol",
)
)
# Web worker health check
web_worker_health_check_protocol = Ref(
template.add_parameter(
Parameter(
"WebWorkerHealthCheckProtocol",
Description="Web worker health check protocol",
Type="String",
Default="TCP",
AllowedValues=["TCP", "HTTP", "HTTPS"],
),
group="Load Balancer",
label="Health Check: Protocol",
)
)
# web_worker_health_check_port = Ref(template.add_parameter(
# Parameter(
# "WebWorkerHealthCheckPort",
# Description="Web worker health check port",
# Type="Number",
# Default="80",
# ),
# group="Load Balancer",
# label="Health Check: Port",
# ))
web_worker_health_check = Ref(
template.add_parameter(
Parameter(
"WebWorkerHealthCheck",
Description='Web worker health check URL path, e.g., "/health-check"; '
"required unless WebWorkerHealthCheckProtocol is TCP",
Type="String",
Default="",
),
group="Load Balancer",
label="Health Check: URL",
)
)
# Web load balancer
listeners = [
elb.Listener(
LoadBalancerPort=80,
InstanceProtocol=web_worker_protocol,
InstancePort=web_worker_port,
Protocol="HTTP",
)
]
if USE_GOVCLOUD:
# configure the default HTTPS listener to pass TCP traffic directly,
# since GovCloud doesn't support the Certificate Manager (this can be
# modified to enable SSL termination at the load balancer via the AWS
# console, if needed)
listeners.append(
elb.Listener(
LoadBalancerPort=443,
InstanceProtocol="TCP",
InstancePort=443,
Protocol="TCP",
)
)
else:
from .certificates import application as application_certificate
from .certificates import cert_condition
listeners.append(
If(
cert_condition,
elb.Listener(
LoadBalancerPort=443,
InstanceProtocol=web_worker_protocol,
InstancePort=web_worker_port,
Protocol="HTTPS",
SSLCertificateId=application_certificate,
),
Ref("AWS::NoValue"),
)
)
load_balancer = elb.LoadBalancer(
"LoadBalancer",
template=template,
Subnets=[
Ref(public_subnet_a),
Ref(public_subnet_b),
],
SecurityGroups=[Ref(load_balancer_security_group)],
Listeners=listeners,
HealthCheck=elb.HealthCheck(
Target=Join(
"",
[
web_worker_health_check_protocol,
":",
# web_worker_health_check_port,
web_worker_health_check,
],
),
HealthyThreshold="2",
UnhealthyThreshold="2",
Interval="100",
Timeout="10",
),
CrossZone=True,
)
template.add_output(
Output(
"LoadBalancerDNSName",
Description="Loadbalancer DNS",
Value=GetAtt(load_balancer, "DNSName"),
)
)
template.add_output(
Output(
"LoadBalancerHostedZoneID",
Description="Loadbalancer hosted zone",
Value=GetAtt(load_balancer, "CanonicalHostedZoneNameID"),
)
)
| 26.553073 | 83 | 0.584894 | from troposphere import elasticloadbalancing as elb
from troposphere import GetAtt, If, Join, Output, Ref
from . import USE_ECS, USE_GOVCLOUD
from .security_groups import load_balancer_security_group
from .template import template
from .utils import ParameterWithDefaults as Parameter
from .vpc import public_subnet_a, public_subnet_b
if USE_ECS:
web_worker_port = Ref(
template.add_parameter(
Parameter(
"WebWorkerPort",
Description="Web worker container exposed port",
Type="Number",
Default="8000",
),
group="Load Balancer",
label="Web Worker Port",
)
)
else:
web_worker_port = Ref(
template.add_parameter(
Parameter(
"WebWorkerPort",
Description="Default web worker exposed port (non-HTTPS)",
Type="Number",
Default="80",
),
group="Load Balancer",
label="Web Worker Port",
)
)
web_worker_protocol = Ref(
template.add_parameter(
Parameter(
"WebWorkerProtocol",
Description="Web worker instance protocol",
Type="String",
Default="HTTP",
AllowedValues=["HTTP", "HTTPS"],
),
group="Load Balancer",
label="Web Worker Protocol",
)
)
web_worker_health_check_protocol = Ref(
template.add_parameter(
Parameter(
"WebWorkerHealthCheckProtocol",
Description="Web worker health check protocol",
Type="String",
Default="TCP",
AllowedValues=["TCP", "HTTP", "HTTPS"],
),
group="Load Balancer",
label="Health Check: Protocol",
)
)
web_worker_health_check = Ref(
template.add_parameter(
Parameter(
"WebWorkerHealthCheck",
Description='Web worker health check URL path, e.g., "/health-check"; '
"required unless WebWorkerHealthCheckProtocol is TCP",
Type="String",
Default="",
),
group="Load Balancer",
label="Health Check: URL",
)
)
listeners = [
elb.Listener(
LoadBalancerPort=80,
InstanceProtocol=web_worker_protocol,
InstancePort=web_worker_port,
Protocol="HTTP",
)
]
if USE_GOVCLOUD:
# modified to enable SSL termination at the load balancer via the AWS
# console, if needed)
listeners.append(
elb.Listener(
LoadBalancerPort=443,
InstanceProtocol="TCP",
InstancePort=443,
Protocol="TCP",
)
)
else:
from .certificates import application as application_certificate
from .certificates import cert_condition
listeners.append(
If(
cert_condition,
elb.Listener(
LoadBalancerPort=443,
InstanceProtocol=web_worker_protocol,
InstancePort=web_worker_port,
Protocol="HTTPS",
SSLCertificateId=application_certificate,
),
Ref("AWS::NoValue"),
)
)
load_balancer = elb.LoadBalancer(
"LoadBalancer",
template=template,
Subnets=[
Ref(public_subnet_a),
Ref(public_subnet_b),
],
SecurityGroups=[Ref(load_balancer_security_group)],
Listeners=listeners,
HealthCheck=elb.HealthCheck(
Target=Join(
"",
[
web_worker_health_check_protocol,
":",
# web_worker_health_check_port,
web_worker_health_check,
],
),
HealthyThreshold="2",
UnhealthyThreshold="2",
Interval="100",
Timeout="10",
),
CrossZone=True,
)
template.add_output(
Output(
"LoadBalancerDNSName",
Description="Loadbalancer DNS",
Value=GetAtt(load_balancer, "DNSName"),
)
)
template.add_output(
Output(
"LoadBalancerHostedZoneID",
Description="Loadbalancer hosted zone",
Value=GetAtt(load_balancer, "CanonicalHostedZoneNameID"),
)
)
| true | true |
f7f7c6b33bcc0b84f3f6bbf33cb3f97b2016fd5f | 7,228 | py | Python | openmetrics/datadog_checks/openmetrics/config_models/defaults.py | flowcommerce/integrations-core | c562b0d423ec1a5dd4073b703d6a8d3a9ab23c72 | [
"BSD-3-Clause"
] | null | null | null | openmetrics/datadog_checks/openmetrics/config_models/defaults.py | flowcommerce/integrations-core | c562b0d423ec1a5dd4073b703d6a8d3a9ab23c72 | [
"BSD-3-Clause"
] | null | null | null | openmetrics/datadog_checks/openmetrics/config_models/defaults.py | flowcommerce/integrations-core | c562b0d423ec1a5dd4073b703d6a8d3a9ab23c72 | [
"BSD-3-Clause"
] | null | null | null | # (C) Datadog, Inc. 2021-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
# This file is autogenerated.
# To change this file you should edit assets/configuration/spec.yaml and then run the following commands:
# ddev -x validate config -s <INTEGRATION_NAME>
# ddev -x validate models -s <INTEGRATION_NAME>
from datadog_checks.base.utils.models.fields import get_default_field_value
def shared_proxy(field, value):
return get_default_field_value(field, value)
def shared_service(field, value):
return get_default_field_value(field, value)
def shared_skip_proxy(field, value):
return False
def shared_timeout(field, value):
return 10
def instance_allow_redirects(field, value):
return True
def instance_auth_token(field, value):
return get_default_field_value(field, value)
def instance_auth_type(field, value):
return 'basic'
def instance_aws_host(field, value):
return get_default_field_value(field, value)
def instance_aws_region(field, value):
return get_default_field_value(field, value)
def instance_aws_service(field, value):
return get_default_field_value(field, value)
def instance_bearer_token_auth(field, value):
return get_default_field_value(field, value)
def instance_bearer_token_path(field, value):
return get_default_field_value(field, value)
def instance_cache_metric_wildcards(field, value):
return True
def instance_cache_shared_labels(field, value):
return True
def instance_collect_counters_with_distributions(field, value):
return False
def instance_collect_histogram_buckets(field, value):
return True
def instance_connect_timeout(field, value):
return get_default_field_value(field, value)
def instance_disable_generic_tags(field, value):
return False
def instance_empty_default_hostname(field, value):
return False
def instance_enable_health_service_check(field, value):
return True
def instance_exclude_labels(field, value):
return get_default_field_value(field, value)
def instance_exclude_metrics(field, value):
return get_default_field_value(field, value)
def instance_exclude_metrics_by_labels(field, value):
return get_default_field_value(field, value)
def instance_extra_headers(field, value):
return get_default_field_value(field, value)
def instance_extra_metrics(field, value):
return get_default_field_value(field, value)
def instance_headers(field, value):
return get_default_field_value(field, value)
def instance_health_service_check(field, value):
return True
def instance_histogram_buckets_as_distributions(field, value):
return False
def instance_hostname_format(field, value):
return get_default_field_value(field, value)
def instance_hostname_label(field, value):
return get_default_field_value(field, value)
def instance_ignore_metrics(field, value):
return get_default_field_value(field, value)
def instance_ignore_metrics_by_labels(field, value):
return get_default_field_value(field, value)
def instance_ignore_tags(field, value):
return get_default_field_value(field, value)
def instance_include_labels(field, value):
return get_default_field_value(field, value)
def instance_kerberos_auth(field, value):
return 'disabled'
def instance_kerberos_cache(field, value):
return get_default_field_value(field, value)
def instance_kerberos_delegate(field, value):
return False
def instance_kerberos_force_initiate(field, value):
return False
def instance_kerberos_hostname(field, value):
return get_default_field_value(field, value)
def instance_kerberos_keytab(field, value):
return get_default_field_value(field, value)
def instance_kerberos_principal(field, value):
return get_default_field_value(field, value)
def instance_label_joins(field, value):
return get_default_field_value(field, value)
def instance_label_to_hostname(field, value):
return get_default_field_value(field, value)
def instance_labels_mapper(field, value):
return get_default_field_value(field, value)
def instance_log_requests(field, value):
return False
def instance_min_collection_interval(field, value):
return 15
def instance_namespace(field, value):
return get_default_field_value(field, value)
def instance_non_cumulative_histogram_buckets(field, value):
return False
def instance_ntlm_domain(field, value):
return get_default_field_value(field, value)
def instance_openmetrics_endpoint(field, value):
return get_default_field_value(field, value)
def instance_password(field, value):
return get_default_field_value(field, value)
def instance_persist_connections(field, value):
return False
def instance_prometheus_metrics_prefix(field, value):
return get_default_field_value(field, value)
def instance_prometheus_url(field, value):
return get_default_field_value(field, value)
def instance_proxy(field, value):
return get_default_field_value(field, value)
def instance_raw_line_filters(field, value):
return get_default_field_value(field, value)
def instance_raw_metric_prefix(field, value):
return get_default_field_value(field, value)
def instance_read_timeout(field, value):
return get_default_field_value(field, value)
def instance_rename_labels(field, value):
return get_default_field_value(field, value)
def instance_request_size(field, value):
return 16
def instance_send_distribution_buckets(field, value):
return False
def instance_send_distribution_counts_as_monotonic(field, value):
return False
def instance_send_distribution_sums_as_monotonic(field, value):
return False
def instance_send_histograms_buckets(field, value):
return True
def instance_send_monotonic_counter(field, value):
return True
def instance_send_monotonic_with_gauge(field, value):
return False
def instance_service(field, value):
return get_default_field_value(field, value)
def instance_share_labels(field, value):
return get_default_field_value(field, value)
def instance_skip_proxy(field, value):
return False
def instance_tags(field, value):
return get_default_field_value(field, value)
def instance_telemetry(field, value):
return False
def instance_timeout(field, value):
return 10
def instance_tls_ca_cert(field, value):
return get_default_field_value(field, value)
def instance_tls_cert(field, value):
return get_default_field_value(field, value)
def instance_tls_ignore_warning(field, value):
return False
def instance_tls_private_key(field, value):
return get_default_field_value(field, value)
def instance_tls_use_host_header(field, value):
return False
def instance_tls_verify(field, value):
return True
def instance_type_overrides(field, value):
return get_default_field_value(field, value)
def instance_use_latest_spec(field, value):
return False
def instance_use_legacy_auth_encoding(field, value):
return True
def instance_use_process_start_time(field, value):
return False
def instance_username(field, value):
return get_default_field_value(field, value)
| 21.072886 | 105 | 0.787493 |
from datadog_checks.base.utils.models.fields import get_default_field_value
def shared_proxy(field, value):
return get_default_field_value(field, value)
def shared_service(field, value):
return get_default_field_value(field, value)
def shared_skip_proxy(field, value):
return False
def shared_timeout(field, value):
return 10
def instance_allow_redirects(field, value):
return True
def instance_auth_token(field, value):
return get_default_field_value(field, value)
def instance_auth_type(field, value):
return 'basic'
def instance_aws_host(field, value):
return get_default_field_value(field, value)
def instance_aws_region(field, value):
return get_default_field_value(field, value)
def instance_aws_service(field, value):
return get_default_field_value(field, value)
def instance_bearer_token_auth(field, value):
return get_default_field_value(field, value)
def instance_bearer_token_path(field, value):
return get_default_field_value(field, value)
def instance_cache_metric_wildcards(field, value):
return True
def instance_cache_shared_labels(field, value):
return True
def instance_collect_counters_with_distributions(field, value):
return False
def instance_collect_histogram_buckets(field, value):
return True
def instance_connect_timeout(field, value):
return get_default_field_value(field, value)
def instance_disable_generic_tags(field, value):
return False
def instance_empty_default_hostname(field, value):
return False
def instance_enable_health_service_check(field, value):
return True
def instance_exclude_labels(field, value):
return get_default_field_value(field, value)
def instance_exclude_metrics(field, value):
return get_default_field_value(field, value)
def instance_exclude_metrics_by_labels(field, value):
return get_default_field_value(field, value)
def instance_extra_headers(field, value):
return get_default_field_value(field, value)
def instance_extra_metrics(field, value):
return get_default_field_value(field, value)
def instance_headers(field, value):
return get_default_field_value(field, value)
def instance_health_service_check(field, value):
return True
def instance_histogram_buckets_as_distributions(field, value):
return False
def instance_hostname_format(field, value):
return get_default_field_value(field, value)
def instance_hostname_label(field, value):
return get_default_field_value(field, value)
def instance_ignore_metrics(field, value):
return get_default_field_value(field, value)
def instance_ignore_metrics_by_labels(field, value):
return get_default_field_value(field, value)
def instance_ignore_tags(field, value):
return get_default_field_value(field, value)
def instance_include_labels(field, value):
return get_default_field_value(field, value)
def instance_kerberos_auth(field, value):
return 'disabled'
def instance_kerberos_cache(field, value):
return get_default_field_value(field, value)
def instance_kerberos_delegate(field, value):
return False
def instance_kerberos_force_initiate(field, value):
return False
def instance_kerberos_hostname(field, value):
return get_default_field_value(field, value)
def instance_kerberos_keytab(field, value):
return get_default_field_value(field, value)
def instance_kerberos_principal(field, value):
return get_default_field_value(field, value)
def instance_label_joins(field, value):
return get_default_field_value(field, value)
def instance_label_to_hostname(field, value):
return get_default_field_value(field, value)
def instance_labels_mapper(field, value):
return get_default_field_value(field, value)
def instance_log_requests(field, value):
return False
def instance_min_collection_interval(field, value):
return 15
def instance_namespace(field, value):
return get_default_field_value(field, value)
def instance_non_cumulative_histogram_buckets(field, value):
return False
def instance_ntlm_domain(field, value):
return get_default_field_value(field, value)
def instance_openmetrics_endpoint(field, value):
return get_default_field_value(field, value)
def instance_password(field, value):
return get_default_field_value(field, value)
def instance_persist_connections(field, value):
return False
def instance_prometheus_metrics_prefix(field, value):
return get_default_field_value(field, value)
def instance_prometheus_url(field, value):
return get_default_field_value(field, value)
def instance_proxy(field, value):
return get_default_field_value(field, value)
def instance_raw_line_filters(field, value):
return get_default_field_value(field, value)
def instance_raw_metric_prefix(field, value):
return get_default_field_value(field, value)
def instance_read_timeout(field, value):
return get_default_field_value(field, value)
def instance_rename_labels(field, value):
return get_default_field_value(field, value)
def instance_request_size(field, value):
return 16
def instance_send_distribution_buckets(field, value):
return False
def instance_send_distribution_counts_as_monotonic(field, value):
return False
def instance_send_distribution_sums_as_monotonic(field, value):
return False
def instance_send_histograms_buckets(field, value):
return True
def instance_send_monotonic_counter(field, value):
return True
def instance_send_monotonic_with_gauge(field, value):
return False
def instance_service(field, value):
return get_default_field_value(field, value)
def instance_share_labels(field, value):
return get_default_field_value(field, value)
def instance_skip_proxy(field, value):
return False
def instance_tags(field, value):
return get_default_field_value(field, value)
def instance_telemetry(field, value):
return False
def instance_timeout(field, value):
return 10
def instance_tls_ca_cert(field, value):
return get_default_field_value(field, value)
def instance_tls_cert(field, value):
return get_default_field_value(field, value)
def instance_tls_ignore_warning(field, value):
return False
def instance_tls_private_key(field, value):
return get_default_field_value(field, value)
def instance_tls_use_host_header(field, value):
return False
def instance_tls_verify(field, value):
return True
def instance_type_overrides(field, value):
return get_default_field_value(field, value)
def instance_use_latest_spec(field, value):
return False
def instance_use_legacy_auth_encoding(field, value):
return True
def instance_use_process_start_time(field, value):
return False
def instance_username(field, value):
return get_default_field_value(field, value)
| true | true |
f7f7c814c5912a4c37d5370d4c8b68c55cb27de1 | 91,511 | py | Python | spyder/app/mainwindow.py | TimenoLong/spyder | c4a71b75dd3229b2bebd606e073cf2db536f5c13 | [
"MIT"
] | 1 | 2021-06-29T02:20:12.000Z | 2021-06-29T02:20:12.000Z | spyder/app/mainwindow.py | TimenoLong/spyder | c4a71b75dd3229b2bebd606e073cf2db536f5c13 | [
"MIT"
] | null | null | null | spyder/app/mainwindow.py | TimenoLong/spyder | c4a71b75dd3229b2bebd606e073cf2db536f5c13 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
"""
Spyder, the Scientific Python Development Environment
=====================================================
Developed and maintained by the Spyder Project
Contributors
Copyright © Spyder Project Contributors
Licensed under the terms of the MIT License
(see spyder/__init__.py for details)
"""
# =============================================================================
# Stdlib imports
# =============================================================================
from __future__ import print_function
from collections import OrderedDict
from enum import Enum
import errno
import gc
import logging
import os
import os.path as osp
import shutil
import signal
import socket
import glob
import sys
import threading
import traceback
#==============================================================================
# Check requirements before proceeding
#==============================================================================
from spyder import requirements
requirements.check_path()
requirements.check_qt()
requirements.check_spyder_kernels()
#==============================================================================
# Third-party imports
#==============================================================================
from qtpy.compat import from_qvariant
from qtpy.QtCore import (QCoreApplication, Qt, QTimer, Signal, Slot,
qInstallMessageHandler)
from qtpy.QtGui import QColor, QIcon, QKeySequence
from qtpy.QtWidgets import (QAction, QApplication, QMainWindow, QMenu,
QMessageBox, QShortcut, QStyleFactory, QCheckBox)
# Avoid a "Cannot mix incompatible Qt library" error on Windows platforms
from qtpy import QtSvg # analysis:ignore
# Avoid a bug in Qt: https://bugreports.qt.io/browse/QTBUG-46720
from qtpy import QtWebEngineWidgets # analysis:ignore
from qtawesome.iconic_font import FontError
#==============================================================================
# Local imports
# NOTE: Move (if possible) import's of widgets and plugins exactly where they
# are needed in MainWindow to speed up perceived startup time (i.e. the time
# from clicking the Spyder icon to showing the splash screen).
#==============================================================================
from spyder import __version__
from spyder import dependencies
from spyder.app import tour
from spyder.app.utils import (create_splash_screen, delete_lsp_log_files,
qt_message_handler, set_links_color,
setup_logging, set_opengl_implementation, Spy)
from spyder.config.base import (_, DEV, get_conf_path, get_debug_level,
get_home_dir, get_module_source_path,
get_safe_mode, is_pynsist, running_in_mac_app,
running_under_pytest, STDERR)
from spyder.utils.image_path_manager import get_image_path
from spyder.config.gui import is_dark_font_color
from spyder.config.main import OPEN_FILES_PORT
from spyder.config.manager import CONF
from spyder.config.utils import IMPORT_EXT, is_gtk_desktop
from spyder.otherplugins import get_spyderplugins_mods
from spyder.py3compat import configparser as cp, PY3, to_text_string
from spyder.utils import encoding, programs
from spyder.utils.icon_manager import ima
from spyder.utils.misc import (select_port, getcwd_or_home,
get_python_executable)
from spyder.utils.palette import QStylePalette
from spyder.utils.qthelpers import (create_action, add_actions, file_uri,
qapplication, start_file)
from spyder.utils.stylesheet import APP_STYLESHEET
from spyder.app.solver import (
find_external_plugins, find_internal_plugins, solve_plugin_dependencies)
# Spyder API Imports
from spyder.api.exceptions import SpyderAPIError
from spyder.api.plugins import Plugins, SpyderPluginV2, SpyderDockablePlugin
#==============================================================================
# Windows only local imports
#==============================================================================
set_attached_console_visible = None
is_attached_console_visible = None
set_windows_appusermodelid = None
if os.name == 'nt':
from spyder.utils.windows import (set_attached_console_visible,
set_windows_appusermodelid)
#==============================================================================
# Constants
#==============================================================================
# Module logger
logger = logging.getLogger(__name__)
# Keeping a reference to the original sys.exit before patching it
ORIGINAL_SYS_EXIT = sys.exit
# Get the cwd before initializing WorkingDirectory, which sets it to the one
# used in the last session
CWD = getcwd_or_home()
# Set the index for the default tour
DEFAULT_TOUR = 0
#==============================================================================
# Install Qt messaage handler
#==============================================================================
qInstallMessageHandler(qt_message_handler)
#==============================================================================
# Main Window
#==============================================================================
class MainWindow(QMainWindow):
"""Spyder main window"""
DOCKOPTIONS = (
QMainWindow.AllowTabbedDocks | QMainWindow.AllowNestedDocks |
QMainWindow.AnimatedDocks
)
SPYDER_PATH = get_conf_path('path')
SPYDER_NOT_ACTIVE_PATH = get_conf_path('not_active_path')
DEFAULT_LAYOUTS = 4
# Signals
restore_scrollbar_position = Signal()
sig_setup_finished = Signal()
all_actions_defined = Signal()
# type: (OrderedDict, OrderedDict)
sig_pythonpath_changed = Signal(object, object)
sig_main_interpreter_changed = Signal()
sig_open_external_file = Signal(str)
sig_resized = Signal("QResizeEvent") # Related to interactive tour
sig_moved = Signal("QMoveEvent") # Related to interactive tour
sig_layout_setup_ready = Signal(object) # Related to default layouts
# --- Plugin handling methods
# ------------------------------------------------------------------------
def get_plugin(self, plugin_name, error=True):
"""
Return a plugin instance by providing the plugin class.
"""
for name, plugin in self._PLUGINS.items():
if plugin_name == name:
return plugin
else:
if error:
raise SpyderAPIError(
'Plugin "{}" not found!'.format(plugin_name))
else:
return None
def show_status_message(self, message, timeout):
"""
Show a status message in Spyder Main Window.
"""
status_bar = self.statusBar()
if status_bar.isVisible():
status_bar.showMessage(message, timeout)
def show_plugin_compatibility_message(self, message):
"""
Show a compatibility message.
"""
messageBox = QMessageBox(self)
messageBox.setWindowModality(Qt.NonModal)
messageBox.setAttribute(Qt.WA_DeleteOnClose)
messageBox.setWindowTitle(_('Compatibility Check'))
messageBox.setText(message)
messageBox.setStandardButtons(QMessageBox.Ok)
messageBox.show()
def add_plugin(self, plugin, external=False):
"""
Add plugin to plugins dictionary.
"""
self._PLUGINS[plugin.NAME] = plugin
if external:
self._EXTERNAL_PLUGINS[plugin.NAME] = plugin
else:
self._INTERNAL_PLUGINS[plugin.NAME] = plugin
def register_plugin(self, plugin, external=False, omit_conf=False):
"""
Register a plugin in Spyder Main Window.
"""
self.set_splash(_("Loading {}...").format(plugin.get_name()))
logger.info("Loading {}...".format(plugin.NAME))
# Check plugin compatibility
is_compatible, message = plugin.check_compatibility()
plugin.is_compatible = is_compatible
plugin.get_description()
if not is_compatible:
self.show_compatibility_message(message)
return
# Signals
plugin.sig_exception_occurred.connect(self.handle_exception)
plugin.sig_free_memory_requested.connect(self.free_memory)
plugin.sig_quit_requested.connect(self.close)
plugin.sig_restart_requested.connect(self.restart)
plugin.sig_redirect_stdio_requested.connect(
self.redirect_internalshell_stdio)
plugin.sig_status_message_requested.connect(self.show_status_message)
if isinstance(plugin, SpyderDockablePlugin):
plugin.sig_focus_changed.connect(self.plugin_focus_changed)
plugin.sig_switch_to_plugin_requested.connect(
self.switch_to_plugin)
plugin.sig_update_ancestor_requested.connect(
lambda: plugin.set_ancestor(self))
# Register plugin
plugin._register(omit_conf=omit_conf)
plugin.register()
if isinstance(plugin, SpyderDockablePlugin):
# Add dockwidget
self.add_dockwidget(plugin)
# Update margins
margin = 0
if CONF.get('main', 'use_custom_margin'):
margin = CONF.get('main', 'custom_margin')
plugin.update_margins(margin)
self.add_plugin(plugin, external=external)
logger.info("Registering shortcuts for {}...".format(plugin.NAME))
for action_name, action in plugin.get_actions().items():
context = (getattr(action, 'shortcut_context', plugin.NAME)
or plugin.NAME)
if getattr(action, 'register_shortcut', True):
if isinstance(action_name, Enum):
action_name = action_name.value
self.register_shortcut(action, context, action_name)
if isinstance(plugin, SpyderDockablePlugin):
try:
context = '_'
name = 'switch to {}'.format(plugin.CONF_SECTION)
shortcut = CONF.get_shortcut(context, name,
plugin_name=plugin.CONF_SECTION)
except (cp.NoSectionError, cp.NoOptionError):
shortcut = None
sc = QShortcut(QKeySequence(), self,
lambda: self.switch_to_plugin(plugin))
sc.setContext(Qt.ApplicationShortcut)
plugin._shortcut = sc
self.register_shortcut(sc, context, name)
self.register_shortcut(plugin.toggle_view_action, context, name)
def unregister_plugin(self, plugin):
"""
Unregister a plugin from the Spyder Main Window.
"""
logger.info("Unloading {}...".format(plugin.NAME))
# Disconnect all slots
signals = [
plugin.sig_quit_requested,
plugin.sig_redirect_stdio_requested,
plugin.sig_status_message_requested,
]
for sig in signals:
try:
sig.disconnect()
except TypeError:
pass
# Unregister shortcuts for actions
logger.info("Unregistering shortcuts for {}...".format(plugin.NAME))
for action_name, action in plugin.get_actions().items():
context = (getattr(action, 'shortcut_context', plugin.NAME)
or plugin.NAME)
self.shortcuts.unregister_shortcut(action, context, action_name)
# Unregister switch to shortcut
shortcut = None
try:
context = '_'
name = 'switch to {}'.format(plugin.CONF_SECTION)
shortcut = CONF.get_shortcut(context, name,
plugin_name=plugin.CONF_SECTION)
except Exception:
pass
if shortcut is not None:
self.shortcuts.unregister_shortcut(
plugin._shortcut,
context,
"Switch to {}".format(plugin.CONF_SECTION),
)
# Remove dockwidget
logger.info("Removing {} dockwidget...".format(plugin.NAME))
self.remove_dockwidget(plugin)
plugin.unregister()
plugin._unregister()
def create_plugin_conf_widget(self, plugin):
"""
Create configuration dialog box page widget.
"""
config_dialog = self.prefs_dialog_instance
if plugin.CONF_WIDGET_CLASS is not None and config_dialog is not None:
conf_widget = plugin.CONF_WIDGET_CLASS(plugin, config_dialog)
conf_widget.initialize()
return conf_widget
@property
def last_plugin(self):
"""
Get last plugin with focus if it is a dockable widget.
If a non-dockable plugin has the focus this will return by default
the Editor plugin.
"""
# Needed to prevent errors with the old API at
# spyder/plugins/base::_switch_to_plugin
return self.layouts.get_last_plugin()
def maximize_dockwidget(self, restore=False):
"""
This is needed to prevent errors with the old API at
spyder/plugins/base::_switch_to_plugin.
See spyder-ide/spyder#15164
Parameters
----------
restore : bool, optional
If the current dockwidget needs to be restored to its unmaximized
state. The default is False.
"""
self.layouts.maximize_dockwidget(restore=restore)
def switch_to_plugin(self, plugin, force_focus=None):
"""
Switch to this plugin.
Notes
-----
This operation unmaximizes the current plugin (if any), raises
this plugin to view (if it's hidden) and gives it focus (if
possible).
"""
last_plugin = self.last_plugin
try:
# New API
if (last_plugin is not None
and last_plugin.get_widget().is_maximized
and last_plugin is not plugin):
self.layouts.maximize_dockwidget()
except AttributeError:
# Old API
if (last_plugin is not None and self.last_plugin._ismaximized
and last_plugin is not plugin):
self.layouts.maximize_dockwidget()
try:
# New API
if not plugin.toggle_view_action.isChecked():
plugin.toggle_view_action.setChecked(True)
plugin.get_widget().is_visible = False
except AttributeError:
# Old API
if not plugin._toggle_view_action.isChecked():
plugin._toggle_view_action.setChecked(True)
plugin._widget._is_visible = False
plugin.change_visibility(True, force_focus=force_focus)
def remove_dockwidget(self, plugin):
"""
Remove a plugin QDockWidget from the main window.
"""
self.removeDockWidget(plugin.dockwidget)
try:
self.widgetlist.remove(plugin)
except ValueError:
pass
def tabify_plugins(self, first, second):
"""Tabify plugin dockwigdets."""
self.tabifyDockWidget(first.dockwidget, second.dockwidget)
def tabify_plugin(self, plugin, default=None):
"""
Tabify the plugin using the list of possible TABIFY options.
Only do this if the dockwidget does not have more dockwidgets
in the same position and if the plugin is using the New API.
"""
def tabify_helper(plugin, next_to_plugins):
for next_to_plugin in next_to_plugins:
try:
self.tabify_plugins(next_to_plugin, plugin)
break
except SpyderAPIError as err:
logger.error(err)
# If TABIFY not defined use the [default]
tabify = getattr(plugin, 'TABIFY', [default])
if not isinstance(tabify, list):
next_to_plugins = [tabify]
else:
next_to_plugins = tabify
# Check if TABIFY is not a list with None as unique value or a default
# list
if tabify in [[None], []]:
return False
# Get the actual plugins from the names
next_to_plugins = [self.get_plugin(p) for p in next_to_plugins]
# First time plugin starts
if plugin.get_conf('first_time', True):
if (isinstance(plugin, SpyderDockablePlugin)
and plugin.NAME != Plugins.Console):
logger.info(
"Tabify {} dockwidget for the first time...".format(
plugin.NAME))
tabify_helper(plugin, next_to_plugins)
plugin.set_conf('enable', True)
plugin.set_conf('first_time', False)
else:
# This is needed to ensure new plugins are placed correctly
# without the need for a layout reset.
logger.info("Tabify {} dockwidget...".format(plugin.NAME))
# Check if plugin has no other dockwidgets in the same position
if not bool(self.tabifiedDockWidgets(plugin.dockwidget)):
tabify_helper(plugin, next_to_plugins)
return True
def handle_exception(self, error_data):
"""
This method will call the handle exception method of the Console
plugin. It is provided as a signal on the Plugin API for convenience,
so that plugin do not need to explicitly call the Console plugin.
Parameters
----------
error_data: dict
The dictionary containing error data. The expected keys are:
>>> error_data= {
"text": str,
"is_traceback": bool,
"repo": str,
"title": str,
"label": str,
"steps": str,
}
Notes
-----
The `is_traceback` key indicates if `text` contains plain text or a
Python error traceback.
The `title` and `repo` keys indicate how the error data should
customize the report dialog and Github error submission.
The `label` and `steps` keys allow customizing the content of the
error dialog.
"""
if self.console:
self.console.handle_exception(error_data)
def __init__(self, splash=None, options=None):
QMainWindow.__init__(self)
qapp = QApplication.instance()
if running_under_pytest():
self._proxy_style = None
else:
from spyder.utils.qthelpers import SpyderProxyStyle
# None is needed, see: https://bugreports.qt.io/browse/PYSIDE-922
self._proxy_style = SpyderProxyStyle(None)
# Enabling scaling for high dpi
qapp.setAttribute(Qt.AA_UseHighDpiPixmaps)
self.default_style = str(qapp.style().objectName())
self.init_workdir = options.working_directory
self.profile = options.profile
self.multithreaded = options.multithreaded
self.new_instance = options.new_instance
if options.project is not None and not running_in_mac_app():
self.open_project = osp.normpath(osp.join(CWD, options.project))
else:
self.open_project = None
self.window_title = options.window_title
logger.info("Start of MainWindow constructor")
def signal_handler(signum, frame=None):
"""Handler for signals."""
sys.stdout.write('Handling signal: %s\n' % signum)
sys.stdout.flush()
QApplication.quit()
if os.name == "nt":
try:
import win32api
win32api.SetConsoleCtrlHandler(signal_handler, True)
except ImportError:
pass
else:
signal.signal(signal.SIGTERM, signal_handler)
if not DEV:
# Make spyder quit when presing ctrl+C in the console
# In DEV Ctrl+C doesn't quit, because it helps to
# capture the traceback when spyder freezes
signal.signal(signal.SIGINT, signal_handler)
# Use a custom Qt stylesheet
if sys.platform == 'darwin':
spy_path = get_module_source_path('spyder')
img_path = osp.join(spy_path, 'images')
mac_style = open(osp.join(spy_path, 'app', 'mac_stylesheet.qss')).read()
mac_style = mac_style.replace('$IMAGE_PATH', img_path)
self.setStyleSheet(mac_style)
# Shortcut management data
self.shortcut_data = []
# Handle Spyder path
self.path = ()
self.not_active_path = ()
self.project_path = ()
# New API
self._APPLICATION_TOOLBARS = OrderedDict()
self._STATUS_WIDGETS = OrderedDict()
self._PLUGINS = OrderedDict()
self._EXTERNAL_PLUGINS = OrderedDict()
self._INTERNAL_PLUGINS = OrderedDict()
# Mapping of new plugin identifiers vs old attributtes
# names given for plugins or to prevent collisions with other
# attributes, i.e layout (Qt) vs layout (SpyderPluginV2)
self._INTERNAL_PLUGINS_MAPPING = {
'console': Plugins.Console,
'maininterpreter': Plugins.MainInterpreter,
'outlineexplorer': Plugins.OutlineExplorer,
'variableexplorer': Plugins.VariableExplorer,
'ipyconsole': Plugins.IPythonConsole,
'workingdirectory': Plugins.WorkingDirectory,
'projects': Plugins.Projects,
'findinfiles': Plugins.Find,
'layouts': Plugins.Layout,
}
self.thirdparty_plugins = []
# Tour
# TODO: Should be a plugin
self.tour = None
self.tours_available = None
self.tour_dialog = None
# File switcher
self.switcher = None
# Preferences
self.prefs_dialog_size = None
self.prefs_dialog_instance = None
# Actions
self.undo_action = None
self.redo_action = None
self.copy_action = None
self.cut_action = None
self.paste_action = None
self.selectall_action = None
# Menu bars
self.edit_menu = None
self.edit_menu_actions = []
self.search_menu = None
self.search_menu_actions = []
self.source_menu = None
self.source_menu_actions = []
self.run_menu = None
self.run_menu_actions = []
self.debug_menu = None
self.debug_menu_actions = []
# TODO: Move to corresponding Plugins
self.main_toolbar = None
self.main_toolbar_actions = []
self.file_toolbar = None
self.file_toolbar_actions = []
self.run_toolbar = None
self.run_toolbar_actions = []
self.debug_toolbar = None
self.debug_toolbar_actions = []
self.menus = []
if running_under_pytest():
# Show errors in internal console when testing.
CONF.set('main', 'show_internal_errors', False)
self.CURSORBLINK_OSDEFAULT = QApplication.cursorFlashTime()
if set_windows_appusermodelid != None:
res = set_windows_appusermodelid()
logger.info("appusermodelid: %s", res)
# Setting QTimer if running in travis
test_app = os.environ.get('TEST_CI_APP')
if test_app is not None:
app = qapplication()
timer_shutdown_time = 30000
self.timer_shutdown = QTimer(self)
self.timer_shutdown.timeout.connect(app.quit)
self.timer_shutdown.start(timer_shutdown_time)
# Showing splash screen
self.splash = splash
if CONF.get('main', 'current_version', '') != __version__:
CONF.set('main', 'current_version', __version__)
# Execute here the actions to be performed only once after
# each update (there is nothing there for now, but it could
# be useful some day...)
# List of satellite widgets (registered in add_dockwidget):
self.widgetlist = []
# Flags used if closing() is called by the exit() shell command
self.already_closed = False
self.is_starting_up = True
self.is_setting_up = True
self.floating_dockwidgets = []
self.window_size = None
self.window_position = None
# To keep track of the last focused widget
self.last_focused_widget = None
self.previous_focused_widget = None
# Keep track of dpi message
self.show_dpi_message = True
# Server to open external files on a single instance
# This is needed in order to handle socket creation problems.
# See spyder-ide/spyder#4132.
if os.name == 'nt':
try:
self.open_files_server = socket.socket(socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_TCP)
except OSError:
self.open_files_server = None
QMessageBox.warning(None, "Spyder",
_("An error occurred while creating a socket needed "
"by Spyder. Please, try to run as an Administrator "
"from cmd.exe the following command and then "
"restart your computer: <br><br><span "
"style=\'color: {color}\'><b>netsh winsock reset "
"</b></span><br>").format(
color=QStylePalette.COLOR_BACKGROUND_4))
else:
self.open_files_server = socket.socket(socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_TCP)
# To show the message about starting the tour
self.sig_setup_finished.connect(self.show_tour_message)
# Apply main window settings
self.apply_settings()
# To set all dockwidgets tabs to be on top (in case we want to do it
# in the future)
# self.setTabPosition(Qt.AllDockWidgetAreas, QTabWidget.North)
logger.info("End of MainWindow constructor")
# --- Window setup
def _update_shortcuts_in_panes_menu(self, show=True):
"""
Display the shortcut for the "Switch to plugin..." on the toggle view
action of the plugins displayed in the Help/Panes menu.
Notes
-----
SpyderDockablePlugins provide two actions that function as a single
action. The `Switch to Plugin...` action has an assignable shortcut
via the shortcut preferences. The `Plugin toggle View` in the `View`
application menu, uses a custom `Toggle view action` that displays the
shortcut assigned to the `Switch to Plugin...` action, but is not
triggered by that shortcut.
"""
for plugin_id, plugin in self._PLUGINS.items():
if isinstance(plugin, SpyderDockablePlugin):
try:
# New API
action = plugin.toggle_view_action
except AttributeError:
# Old API
action = plugin._toggle_view_action
if show:
section = plugin.CONF_SECTION
try:
context = '_'
name = 'switch to {}'.format(section)
shortcut = CONF.get_shortcut(
context, name, plugin_name=section)
except (cp.NoSectionError, cp.NoOptionError):
shortcut = QKeySequence()
else:
shortcut = QKeySequence()
action.setShortcut(shortcut)
def setup(self):
"""Setup main window."""
# TODO: Remove circular dependency between help and ipython console
# and remove this import. Help plugin should take care of it
from spyder.plugins.help.utils.sphinxify import CSS_PATH, DARK_CSS_PATH
logger.info("*** Start of MainWindow setup ***")
logger.info("Updating PYTHONPATH")
path_dict = self.get_spyder_pythonpath_dict()
self.update_python_path(path_dict)
logger.info("Applying theme configuration...")
ui_theme = CONF.get('appearance', 'ui_theme')
color_scheme = CONF.get('appearance', 'selected')
if ui_theme == 'dark':
if not running_under_pytest():
# Set style proxy to fix combobox popup on mac and qdark
qapp = QApplication.instance()
qapp.setStyle(self._proxy_style)
dark_qss = str(APP_STYLESHEET)
self.setStyleSheet(dark_qss)
self.statusBar().setStyleSheet(dark_qss)
css_path = DARK_CSS_PATH
elif ui_theme == 'light':
if not running_under_pytest():
# Set style proxy to fix combobox popup on mac and qdark
qapp = QApplication.instance()
qapp.setStyle(self._proxy_style)
light_qss = str(APP_STYLESHEET)
self.setStyleSheet(light_qss)
self.statusBar().setStyleSheet(light_qss)
css_path = CSS_PATH
elif ui_theme == 'automatic':
if not is_dark_font_color(color_scheme):
if not running_under_pytest():
# Set style proxy to fix combobox popup on mac and qdark
qapp = QApplication.instance()
qapp.setStyle(self._proxy_style)
dark_qss = str(APP_STYLESHEET)
self.setStyleSheet(dark_qss)
self.statusBar().setStyleSheet(dark_qss)
css_path = DARK_CSS_PATH
else:
light_qss = str(APP_STYLESHEET)
self.setStyleSheet(light_qss)
self.statusBar().setStyleSheet(light_qss)
css_path = CSS_PATH
# Set css_path as a configuration to be used by the plugins
CONF.set('appearance', 'css_path', css_path)
# Status bar
status = self.statusBar()
status.setObjectName("StatusBar")
status.showMessage(_("Welcome to Spyder!"), 5000)
# Switcher instance
logger.info("Loading switcher...")
self.create_switcher()
message = _(
"Spyder Internal Console\n\n"
"This console is used to report application\n"
"internal errors and to inspect Spyder\n"
"internals with the following commands:\n"
" spy.app, spy.window, dir(spy)\n\n"
"Please don't use it to run your code\n\n"
)
CONF.set('internal_console', 'message', message)
CONF.set('internal_console', 'multithreaded', self.multithreaded)
CONF.set('internal_console', 'profile', self.profile)
CONF.set('internal_console', 'commands', [])
CONF.set('internal_console', 'namespace', {})
CONF.set('internal_console', 'show_internal_errors', True)
# Working directory initialization
CONF.set('workingdir', 'init_workdir', self.init_workdir)
# Load and register internal and external plugins
external_plugins = find_external_plugins()
internal_plugins = find_internal_plugins()
all_plugins = external_plugins.copy()
all_plugins.update(internal_plugins.copy())
# Determine 'enable' config for the plugins that have it
enabled_plugins = {}
for plugin in all_plugins.values():
plugin_name = plugin.NAME
plugin_main_attribute_name = (
self._INTERNAL_PLUGINS_MAPPING[plugin_name]
if plugin_name in self._INTERNAL_PLUGINS_MAPPING
else plugin_name)
try:
if CONF.get(plugin_main_attribute_name, "enable"):
enabled_plugins[plugin_name] = plugin
except (cp.NoOptionError, cp.NoSectionError):
enabled_plugins[plugin_name] = plugin
# Get ordered list of plugins classes and instantiate them
plugin_deps = solve_plugin_dependencies(list(enabled_plugins.values()))
for plugin_class in plugin_deps:
plugin_name = plugin_class.NAME
# Non-migrated plugins
if plugin_name in [
Plugins.Editor,
Plugins.IPythonConsole]:
if plugin_name == Plugins.IPythonConsole:
plugin_instance = plugin_class(self)
plugin_instance.sig_exception_occurred.connect(
self.handle_exception)
else:
plugin_instance = plugin_class(self)
plugin_instance.register_plugin()
self.add_plugin(plugin_instance)
self.preferences.register_plugin_preferences(
plugin_instance)
# Migrated or new plugins
elif plugin_name in [
Plugins.MainMenu,
Plugins.OnlineHelp,
Plugins.Toolbar,
Plugins.Preferences,
Plugins.Appearance,
Plugins.Run,
Plugins.Shortcuts,
Plugins.StatusBar,
Plugins.Completions,
Plugins.OutlineExplorer,
Plugins.Console,
Plugins.MainInterpreter,
Plugins.Breakpoints,
Plugins.History,
Plugins.Profiler,
Plugins.Explorer,
Plugins.Help,
Plugins.Plots,
Plugins.VariableExplorer,
Plugins.Application,
Plugins.Find,
Plugins.Pylint,
Plugins.WorkingDirectory,
Plugins.Projects,
Plugins.Layout]:
plugin_instance = plugin_class(self, configuration=CONF)
self.register_plugin(plugin_instance)
# TODO: Check thirdparty attribute usage
# For now append plugins to the thirdparty attribute as was
# being done
if plugin_name in [
Plugins.Breakpoints,
Plugins.Profiler,
Plugins.Pylint]:
self.thirdparty_plugins.append(plugin_instance)
# Load external_plugins adding their dependencies
elif (issubclass(plugin_class, SpyderPluginV2) and
plugin_class.NAME in external_plugins):
try:
if plugin_class.CONF_FILE:
CONF.register_plugin(plugin_class)
plugin_instance = plugin_class(
self,
configuration=CONF,
)
self.register_plugin(plugin_instance, external=True,
omit_conf=plugin_class.CONF_FILE)
# These attributes come from spyder.app.solver to add
# plugins to the dependencies dialog
if not running_under_pytest():
module = plugin_class._spyder_module_name
package_name = plugin_class._spyder_package_name
version = plugin_class._spyder_version
description = plugin_instance.get_description()
dependencies.add(
module, package_name, description, version, None,
kind=dependencies.PLUGIN)
except Exception as error:
print("%s: %s" % (plugin_class, str(error)), file=STDERR)
traceback.print_exc(file=STDERR)
self.set_splash(_("Loading old third-party plugins..."))
for mod in get_spyderplugins_mods():
try:
plugin = mod.PLUGIN_CLASS(self)
if plugin.check_compatibility()[0]:
if hasattr(plugin, 'CONFIGWIDGET_CLASS'):
self.preferences.register_plugin_preferences(plugin)
if hasattr(plugin, 'COMPLETION_PROVIDER_NAME'):
self.completions.register_completion_plugin(plugin)
else:
self.thirdparty_plugins.append(plugin)
plugin.register_plugin()
# Add to dependencies dialog
module = mod.__name__
name = module.replace('_', '-')
if plugin.DESCRIPTION:
description = plugin.DESCRIPTION
else:
description = plugin.get_plugin_title()
dependencies.add(module, name, description,
'', None, kind=dependencies.PLUGIN)
except TypeError:
# Fixes spyder-ide/spyder#13977
pass
except Exception as error:
print("%s: %s" % (mod, str(error)), file=STDERR)
traceback.print_exc(file=STDERR)
# Set window title
self.set_window_title()
# Menus
# TODO: Remove when all menus are migrated to use the Main Menu Plugin
logger.info("Creating Menus...")
from spyder.api.widgets.menus import SpyderMenu
from spyder.plugins.mainmenu.api import (
ApplicationMenus, HelpMenuSections, ToolsMenuSections,
FileMenuSections)
mainmenu = self.mainmenu
self.edit_menu = mainmenu.get_application_menu("edit_menu")
self.search_menu = mainmenu.get_application_menu("search_menu")
self.source_menu = mainmenu.get_application_menu("source_menu")
self.source_menu.aboutToShow.connect(self.update_source_menu)
self.run_menu = mainmenu.get_application_menu("run_menu")
self.debug_menu = mainmenu.get_application_menu("debug_menu")
# Switcher shortcuts
self.file_switcher_action = create_action(
self,
_('File switcher...'),
icon=ima.icon('filelist'),
tip=_('Fast switch between files'),
triggered=self.open_switcher,
context=Qt.ApplicationShortcut)
self.register_shortcut(self.file_switcher_action, context="_",
name="File switcher")
self.symbol_finder_action = create_action(
self, _('Symbol finder...'),
icon=ima.icon('symbol_find'),
tip=_('Fast symbol search in file'),
triggered=self.open_symbolfinder,
context=Qt.ApplicationShortcut)
self.register_shortcut(self.symbol_finder_action, context="_",
name="symbol finder", add_shortcut_to_tip=True)
def create_edit_action(text, tr_text, icon):
textseq = text.split(' ')
method_name = textseq[0].lower()+"".join(textseq[1:])
action = create_action(self, tr_text,
icon=icon,
triggered=self.global_callback,
data=method_name,
context=Qt.WidgetShortcut)
self.register_shortcut(action, "Editor", text)
return action
self.undo_action = create_edit_action('Undo', _('Undo'),
ima.icon('undo'))
self.redo_action = create_edit_action('Redo', _('Redo'),
ima.icon('redo'))
self.copy_action = create_edit_action('Copy', _('Copy'),
ima.icon('editcopy'))
self.cut_action = create_edit_action('Cut', _('Cut'),
ima.icon('editcut'))
self.paste_action = create_edit_action('Paste', _('Paste'),
ima.icon('editpaste'))
self.selectall_action = create_edit_action("Select All",
_("Select All"),
ima.icon('selectall'))
self.edit_menu_actions += [self.undo_action, self.redo_action,
None, self.cut_action, self.copy_action,
self.paste_action, self.selectall_action,
None] + self.editor.edit_menu_actions
switcher_actions = [
self.file_switcher_action,
self.symbol_finder_action
]
for switcher_action in switcher_actions:
mainmenu.add_item_to_application_menu(
switcher_action,
menu_id=ApplicationMenus.File,
section=FileMenuSections.Switcher,
before_section=FileMenuSections.Restart)
self.set_splash("")
# Toolbars
# TODO: Remove after finishing the migration
logger.info("Creating toolbars...")
toolbar = self.toolbar
self.file_toolbar = toolbar.get_application_toolbar("file_toolbar")
self.run_toolbar = toolbar.get_application_toolbar("run_toolbar")
self.debug_toolbar = toolbar.get_application_toolbar("debug_toolbar")
self.main_toolbar = toolbar.get_application_toolbar("main_toolbar")
# Tools + External Tools (some of this depends on the Application
# plugin)
logger.info("Creating Tools menu...")
spyder_path_action = create_action(
self,
_("PYTHONPATH manager"),
None, icon=ima.icon('pythonpath'),
triggered=self.show_path_manager,
tip=_("PYTHONPATH manager"),
menurole=QAction.ApplicationSpecificRole)
from spyder.plugins.application.plugin import (
ApplicationActions, WinUserEnvDialog)
winenv_action = None
if WinUserEnvDialog:
winenv_action = self.application.get_action(
ApplicationActions.SpyderWindowsEnvVariables)
mainmenu.add_item_to_application_menu(
spyder_path_action,
menu_id=ApplicationMenus.Tools,
section=ToolsMenuSections.Tools,
before=winenv_action
)
if get_debug_level() >= 3:
self.menu_lsp_logs = QMenu(_("LSP logs"))
self.menu_lsp_logs.aboutToShow.connect(self.update_lsp_logs)
mainmenu.add_item_to_application_menu(
self.menu_lsp_logs,
menu_id=ApplicationMenus.Tools)
# Main toolbar
from spyder.plugins.toolbar.api import (
ApplicationToolbars, MainToolbarSections)
self.toolbar.add_item_to_application_toolbar(
spyder_path_action,
toolbar_id=ApplicationToolbars.Main,
section=MainToolbarSections.ApplicationSection
)
self.set_splash(_("Setting up main window..."))
#----- Tours
# TODO: Move tours to a plugin structure
self.tour = tour.AnimatedTour(self)
# self.tours_menu = QMenu(_("Interactive tours"), self)
# self.tour_menu_actions = []
# # TODO: Only show intro tour for now. When we are close to finish
# # 3.0, we will finish and show the other tour
self.tours_available = tour.get_tours(DEFAULT_TOUR)
for i, tour_available in enumerate(self.tours_available):
self.tours_available[i]['last'] = 0
tour_name = tour_available['name']
# def trigger(i=i, self=self): # closure needed!
# return lambda: self.show_tour(i)
# temp_action = create_action(self, tour_name, tip="",
# triggered=trigger())
# self.tour_menu_actions += [temp_action]
# self.tours_menu.addActions(self.tour_menu_actions)
self.tour_action = create_action(
self,
self.tours_available[DEFAULT_TOUR]['name'],
tip=_("Interactive tour introducing Spyder's panes and features"),
triggered=lambda: self.show_tour(DEFAULT_TOUR))
mainmenu.add_item_to_application_menu(
self.tour_action,
menu_id=ApplicationMenus.Help,
section=HelpMenuSections.Documentation)
# TODO: Migrate to use the MainMenu Plugin instead of list of actions
# Filling out menu/toolbar entries:
add_actions(self.edit_menu, self.edit_menu_actions)
add_actions(self.search_menu, self.search_menu_actions)
add_actions(self.source_menu, self.source_menu_actions)
add_actions(self.run_menu, self.run_menu_actions)
add_actions(self.debug_menu, self.debug_menu_actions)
# Emitting the signal notifying plugins that main window menu and
# toolbar actions are all defined:
self.all_actions_defined.emit()
def __getattr__(self, attr):
"""
Redefinition of __getattr__ to enable access to plugins.
Loaded plugins can be accessed as attributes of the mainwindow
as before, e.g self.console or self.main.console, preserving the
same accessor as before.
"""
# Mapping of new plugin identifiers vs old attributtes
# names given for plugins
if attr in self._INTERNAL_PLUGINS_MAPPING.keys():
return self.get_plugin(self._INTERNAL_PLUGINS_MAPPING[attr])
try:
return self.get_plugin(attr)
except SpyderAPIError:
pass
return super().__getattr__(attr)
def update_lsp_logs(self):
"""Create an action for each lsp log file."""
self.menu_lsp_logs.clear()
lsp_logs = []
files = glob.glob(osp.join(get_conf_path('lsp_logs'), '*.log'))
for f in files:
action = create_action(self, f, triggered=self.editor.load)
action.setData(f)
lsp_logs.append(action)
add_actions(self.menu_lsp_logs, lsp_logs)
def pre_visible_setup(self):
"""
Actions to be performed before the main window is visible.
The actions here are related with setting up the main window.
"""
logger.info("Setting up window...")
# Create external plugins before loading the layout to include them in
# the window restore state after restarts.
for plugin, plugin_instance in self._EXTERNAL_PLUGINS.items():
self.tabify_plugin(plugin_instance, Plugins.Console)
if isinstance(plugin_instance, SpyderDockablePlugin):
plugin_instance.get_widget().toggle_view(False)
for plugin_id, plugin_instance in self._PLUGINS.items():
try:
plugin_instance.before_mainwindow_visible()
except AttributeError:
pass
if self.splash is not None:
self.splash.hide()
# Menu about to show
for child in self.menuBar().children():
if isinstance(child, QMenu):
try:
child.aboutToShow.connect(self.update_edit_menu)
child.aboutToShow.connect(self.update_search_menu)
except TypeError:
pass
# Register custom layouts
for plugin, plugin_instance in self._PLUGINS.items():
if hasattr(plugin_instance, 'CUSTOM_LAYOUTS'):
if isinstance(plugin_instance.CUSTOM_LAYOUTS, list):
for custom_layout in plugin_instance.CUSTOM_LAYOUTS:
self.layouts.register_layout(
self, custom_layout)
else:
logger.info(
'Unable to load custom layouts for {}. '
'Expecting a list of layout classes but got {}'
.format(plugin, plugin_instance.CUSTOM_LAYOUTS)
)
self.layouts.update_layout_menu_actions()
logger.info("*** End of MainWindow setup ***")
self.is_starting_up = False
def post_visible_setup(self):
"""Actions to be performed only after the main window's `show` method
was triggered"""
for __, plugin in self._PLUGINS.items():
try:
plugin.on_mainwindow_visible()
except AttributeError:
pass
self.restore_scrollbar_position.emit()
logger.info('Deleting previous Spyder instance LSP logs...')
delete_lsp_log_files()
# Workaround for spyder-ide/spyder#880.
# QDockWidget objects are not painted if restored as floating
# windows, so we must dock them before showing the mainwindow,
# then set them again as floating windows here.
for widget in self.floating_dockwidgets:
widget.setFloating(True)
# Server to maintain just one Spyder instance and open files in it if
# the user tries to start other instances with
# $ spyder foo.py
if (CONF.get('main', 'single_instance') and not self.new_instance
and self.open_files_server):
t = threading.Thread(target=self.start_open_files_server)
t.setDaemon(True)
t.start()
# Connect the window to the signal emitted by the previous server
# when it gets a client connected to it
self.sig_open_external_file.connect(self.open_external_file)
# Hide Internal Console so that people don't use it instead of
# the External or IPython ones
if self.console.dockwidget.isVisible() and DEV is None:
self.console.toggle_view_action.setChecked(False)
self.console.dockwidget.hide()
# Show Help and Consoles by default
plugins_to_show = [self.ipyconsole]
if self.help is not None:
plugins_to_show.append(self.help)
for plugin in plugins_to_show:
if plugin.dockwidget.isVisible():
plugin.dockwidget.raise_()
# Update plugins toggle actions to show the "Switch to" plugin shortcut
self._update_shortcuts_in_panes_menu()
# Process pending events and hide splash before loading the
# previous session.
QApplication.processEvents()
if self.splash is not None:
self.splash.hide()
# TODO: Remove this reference to projects once we can send the command
# line options to the plugins.
if self.open_project:
if not running_in_mac_app():
self.projects.open_project(
self.open_project, workdir=self.init_workdir
)
else:
# Load last project if a project was active when Spyder
# was closed
self.projects.reopen_last_project()
# If no project is active, load last session
if self.projects.get_active_project() is None:
self.editor.setup_open_files(close_previous_files=False)
# Raise the menuBar to the top of the main window widget's stack
# Fixes spyder-ide/spyder#3887.
self.menuBar().raise_()
# Handle DPI scale and window changes to show a restart message.
# Don't activate this functionality on macOS because it's being
# triggered in the wrong situations.
# See spyder-ide/spyder#11846
if not sys.platform == 'darwin':
window = self.window().windowHandle()
window.screenChanged.connect(self.handle_new_screen)
screen = self.window().windowHandle().screen()
self.current_dpi = screen.logicalDotsPerInch()
screen.logicalDotsPerInchChanged.connect(
self.show_dpi_change_message)
# Notify that the setup of the mainwindow was finished
self.is_setting_up = False
self.sig_setup_finished.emit()
def handle_new_screen(self, new_screen):
"""Connect DPI signals for new screen."""
if new_screen is not None:
new_screen_dpi = new_screen.logicalDotsPerInch()
if self.current_dpi != new_screen_dpi:
self.show_dpi_change_message(new_screen_dpi)
else:
new_screen.logicalDotsPerInchChanged.connect(
self.show_dpi_change_message)
def handle_dpi_change_response(self, result, dpi):
"""Handle dpi change message dialog result."""
if self.dpi_change_dismiss_box.isChecked():
self.show_dpi_message = False
self.dpi_change_dismiss_box = None
if result == 0: # Restart button was clicked
# Activate HDPI auto-scaling option since is needed for a
# proper display when using OS scaling
CONF.set('main', 'normal_screen_resolution', False)
CONF.set('main', 'high_dpi_scaling', True)
CONF.set('main', 'high_dpi_custom_scale_factor', False)
self.restart()
else:
# Update current dpi for future checks
self.current_dpi = dpi
def show_dpi_change_message(self, dpi):
"""Show message to restart Spyder since the DPI scale changed."""
if not self.show_dpi_message:
return
if self.current_dpi != dpi:
# Check the window state to not show the message if the window
# is in fullscreen mode.
window = self.window().windowHandle()
if (window.windowState() == Qt.WindowFullScreen and
sys.platform == 'darwin'):
return
self.dpi_change_dismiss_box = QCheckBox(
_("Hide this message during the current session"),
self
)
msgbox = QMessageBox(self)
msgbox.setIcon(QMessageBox.Warning)
msgbox.setText(
_
("A monitor scale change was detected. <br><br>"
"We recommend restarting Spyder to ensure that it's properly "
"displayed. If you don't want to do that, please be sure to "
"activate the option<br><br><tt>Enable auto high DPI scaling"
"</tt><br><br>in <tt>Preferences > Application > "
"Interface</tt>, in case Spyder is not displayed "
"correctly.<br><br>"
"Do you want to restart Spyder?"))
msgbox.addButton(_('Restart now'), QMessageBox.NoRole)
dismiss_button = msgbox.addButton(
_('Dismiss'), QMessageBox.NoRole)
msgbox.setCheckBox(self.dpi_change_dismiss_box)
msgbox.setDefaultButton(dismiss_button)
msgbox.finished.connect(
lambda result: self.handle_dpi_change_response(result, dpi))
msgbox.open()
def set_window_title(self):
"""Set window title."""
if DEV is not None:
title = u"Spyder %s (Python %s.%s)" % (__version__,
sys.version_info[0],
sys.version_info[1])
elif running_in_mac_app() or is_pynsist():
title = "Spyder"
else:
title = u"Spyder (Python %s.%s)" % (sys.version_info[0],
sys.version_info[1])
if get_debug_level():
title += u" [DEBUG MODE %d]" % get_debug_level()
if self.window_title is not None:
title += u' -- ' + to_text_string(self.window_title)
# TODO: Remove self.projects reference once there's an API for setting
# window title.
if self.projects is not None:
path = self.projects.get_active_project_path()
if path:
path = path.replace(get_home_dir(), u'~')
title = u'{0} - {1}'.format(path, title)
self.base_title = title
self.setWindowTitle(self.base_title)
# TODO: To be removed after all actions are moved to their corresponding
# plugins
def register_shortcut(self, qaction_or_qshortcut, context, name,
add_shortcut_to_tip=True, plugin_name=None):
self.shortcuts.register_shortcut(
qaction_or_qshortcut,
context,
name,
add_shortcut_to_tip=add_shortcut_to_tip,
plugin_name=plugin_name,
)
# --- Other
def update_source_menu(self):
"""Update source menu options that vary dynamically."""
# This is necessary to avoid an error at startup.
# Fixes spyder-ide/spyder#14901
try:
self.editor.refresh_formatter_name()
except AttributeError:
pass
def free_memory(self):
"""Free memory after event."""
gc.collect()
def plugin_focus_changed(self):
"""Focus has changed from one plugin to another"""
self.update_edit_menu()
self.update_search_menu()
def show_shortcuts(self, menu):
"""Show action shortcuts in menu."""
menu_actions = menu.actions()
for action in menu_actions:
if getattr(action, '_shown_shortcut', False):
# This is a SpyderAction
if action._shown_shortcut is not None:
action.setShortcut(action._shown_shortcut)
elif action.menu() is not None:
# This is submenu, so we need to call this again
self.show_shortcuts(action.menu())
else:
# We don't need to do anything for other elements
continue
def hide_shortcuts(self, menu):
"""Hide action shortcuts in menu."""
menu_actions = menu.actions()
for action in menu_actions:
if getattr(action, '_shown_shortcut', False):
# This is a SpyderAction
if action._shown_shortcut is not None:
action.setShortcut(QKeySequence())
elif action.menu() is not None:
# This is submenu, so we need to call this again
self.hide_shortcuts(action.menu())
else:
# We don't need to do anything for other elements
continue
def hide_options_menus(self):
"""Hide options menu when menubar is pressed in macOS."""
for plugin in self.widgetlist + self.thirdparty_plugins:
if plugin.CONF_SECTION == 'editor':
editorstack = self.editor.get_current_editorstack()
editorstack.menu.hide()
else:
try:
# New API
plugin.options_menu.hide()
except AttributeError:
# Old API
plugin._options_menu.hide()
def get_focus_widget_properties(self):
"""Get properties of focus widget
Returns tuple (widget, properties) where properties is a tuple of
booleans: (is_console, not_readonly, readwrite_editor)"""
from spyder.plugins.editor.widgets.base import TextEditBaseWidget
from spyder.plugins.ipythonconsole.widgets import ControlWidget
widget = QApplication.focusWidget()
textedit_properties = None
if isinstance(widget, (TextEditBaseWidget, ControlWidget)):
console = isinstance(widget, ControlWidget)
not_readonly = not widget.isReadOnly()
readwrite_editor = not_readonly and not console
textedit_properties = (console, not_readonly, readwrite_editor)
return widget, textedit_properties
def update_edit_menu(self):
"""Update edit menu"""
widget, textedit_properties = self.get_focus_widget_properties()
if textedit_properties is None: # widget is not an editor/console
return
# !!! Below this line, widget is expected to be a QPlainTextEdit
# instance
console, not_readonly, readwrite_editor = textedit_properties
# Editor has focus and there is no file opened in it
if (not console and not_readonly and self.editor
and not self.editor.is_file_opened()):
return
# Disabling all actions to begin with
for child in self.edit_menu.actions():
child.setEnabled(False)
self.selectall_action.setEnabled(True)
# Undo, redo
self.undo_action.setEnabled( readwrite_editor \
and widget.document().isUndoAvailable() )
self.redo_action.setEnabled( readwrite_editor \
and widget.document().isRedoAvailable() )
# Copy, cut, paste, delete
has_selection = widget.has_selected_text()
self.copy_action.setEnabled(has_selection)
self.cut_action.setEnabled(has_selection and not_readonly)
self.paste_action.setEnabled(not_readonly)
# Comment, uncomment, indent, unindent...
if not console and not_readonly:
# This is the editor and current file is writable
if self.editor:
for action in self.editor.edit_menu_actions:
action.setEnabled(True)
def update_search_menu(self):
"""Update search menu"""
# Disabling all actions except the last one
# (which is Find in files) to begin with
for child in self.search_menu.actions()[:-1]:
child.setEnabled(False)
widget, textedit_properties = self.get_focus_widget_properties()
if textedit_properties is None: # widget is not an editor/console
return
# !!! Below this line, widget is expected to be a QPlainTextEdit
# instance
console, not_readonly, readwrite_editor = textedit_properties
# Find actions only trigger an effect in the Editor
if not console:
for action in self.search_menu.actions():
try:
action.setEnabled(True)
except RuntimeError:
pass
# Disable the replace action for read-only files
if len(self.search_menu_actions) > 3:
self.search_menu_actions[3].setEnabled(readwrite_editor)
def createPopupMenu(self):
return self.application.get_application_context_menu(parent=self)
def set_splash(self, message):
"""Set splash message"""
if self.splash is None:
return
if message:
logger.info(message)
self.splash.show()
self.splash.showMessage(message,
int(Qt.AlignBottom | Qt.AlignCenter |
Qt.AlignAbsolute),
QColor(Qt.white))
QApplication.processEvents()
def closeEvent(self, event):
"""closeEvent reimplementation"""
if self.closing(True):
event.accept()
else:
event.ignore()
def resizeEvent(self, event):
"""Reimplement Qt method"""
if not self.isMaximized() and not self.layouts.get_fullscreen_flag():
self.window_size = self.size()
QMainWindow.resizeEvent(self, event)
# To be used by the tour to be able to resize
self.sig_resized.emit(event)
def moveEvent(self, event):
"""Reimplement Qt method"""
if not self.isMaximized() and not self.layouts.get_fullscreen_flag():
self.window_position = self.pos()
QMainWindow.moveEvent(self, event)
# To be used by the tour to be able to move
self.sig_moved.emit(event)
def hideEvent(self, event):
"""Reimplement Qt method"""
try:
for plugin in (self.widgetlist + self.thirdparty_plugins):
# TODO: Remove old API
try:
# New API
if plugin.get_widget().isAncestorOf(
self.last_focused_widget):
plugin.change_visibility(True)
except AttributeError:
# Old API
if plugin.isAncestorOf(self.last_focused_widget):
plugin._visibility_changed(True)
QMainWindow.hideEvent(self, event)
except RuntimeError:
QMainWindow.hideEvent(self, event)
def change_last_focused_widget(self, old, now):
"""To keep track of to the last focused widget"""
if (now is None and QApplication.activeWindow() is not None):
QApplication.activeWindow().setFocus()
self.last_focused_widget = QApplication.focusWidget()
elif now is not None:
self.last_focused_widget = now
self.previous_focused_widget = old
def closing(self, cancelable=False):
"""Exit tasks"""
if self.already_closed or self.is_starting_up:
return True
if cancelable and CONF.get('main', 'prompt_on_exit'):
reply = QMessageBox.critical(self, 'Spyder',
'Do you really want to exit?',
QMessageBox.Yes, QMessageBox.No)
if reply == QMessageBox.No:
return False
if CONF.get('main', 'single_instance') and self.open_files_server:
self.open_files_server.close()
# Internal plugins
for plugin in (self.widgetlist + self.thirdparty_plugins):
# New API
try:
if isinstance(plugin, SpyderDockablePlugin):
plugin.close_window()
if not plugin.on_close(cancelable):
return False
except AttributeError:
pass
# Old API
try:
plugin._close_window()
if not plugin.closing_plugin(cancelable):
return False
except AttributeError:
pass
# New API: External plugins
for plugin_name, plugin in self._EXTERNAL_PLUGINS.items():
try:
if isinstance(plugin, SpyderDockablePlugin):
plugin.close_window()
if not plugin.on_close(cancelable):
return False
except AttributeError as e:
logger.error(str(e))
# Save window settings *after* closing all plugin windows, in order
# to show them in their previous locations in the next session.
# Fixes spyder-ide/spyder#12139
prefix = 'window' + '/'
self.layouts.save_current_window_settings(prefix)
self.already_closed = True
return True
def add_dockwidget(self, plugin):
"""
Add a plugin QDockWidget to the main window.
"""
try:
# New API
if plugin.is_compatible:
dockwidget, location = plugin.create_dockwidget(self)
self.addDockWidget(location, dockwidget)
self.widgetlist.append(plugin)
except AttributeError:
# Old API
if plugin._is_compatible:
dockwidget, location = plugin._create_dockwidget()
self.addDockWidget(location, dockwidget)
self.widgetlist.append(plugin)
@Slot()
def global_callback(self):
"""Global callback"""
widget = QApplication.focusWidget()
action = self.sender()
callback = from_qvariant(action.data(), to_text_string)
from spyder.plugins.editor.widgets.base import TextEditBaseWidget
from spyder.plugins.ipythonconsole.widgets import ControlWidget
if isinstance(widget, (TextEditBaseWidget, ControlWidget)):
getattr(widget, callback)()
else:
return
def redirect_internalshell_stdio(self, state):
if state:
self.console.redirect_stds()
else:
self.console.restore_stds()
def open_external_console(self, fname, wdir, args, interact, debug, python,
python_args, systerm, post_mortem=False):
"""Open external console"""
if systerm:
# Running script in an external system terminal
try:
if CONF.get('main_interpreter', 'default'):
executable = get_python_executable()
else:
executable = CONF.get('main_interpreter', 'executable')
programs.run_python_script_in_terminal(
fname, wdir, args, interact, debug, python_args,
executable)
except NotImplementedError:
QMessageBox.critical(self, _("Run"),
_("Running an external system terminal "
"is not supported on platform %s."
) % os.name)
def open_file(self, fname, external=False):
"""
Open filename with the appropriate application
Redirect to the right widget (txt -> editor, spydata -> workspace, ...)
or open file outside Spyder (if extension is not supported)
"""
fname = to_text_string(fname)
ext = osp.splitext(fname)[1]
if encoding.is_text_file(fname):
self.editor.load(fname)
elif self.variableexplorer is not None and ext in IMPORT_EXT:
self.variableexplorer.import_data(fname)
elif not external:
fname = file_uri(fname)
start_file(fname)
def open_external_file(self, fname):
"""
Open external files that can be handled either by the Editor or the
variable explorer inside Spyder.
"""
# Check that file exists
fname = encoding.to_unicode_from_fs(fname)
if osp.exists(osp.join(CWD, fname)):
fpath = osp.join(CWD, fname)
elif osp.exists(fname):
fpath = fname
else:
return
# Don't open script that starts Spyder at startup.
# Fixes issue spyder-ide/spyder#14483
if sys.platform == 'darwin' and 'bin/spyder' in fname:
return
if osp.isfile(fpath):
self.open_file(fpath, external=True)
elif osp.isdir(fpath):
QMessageBox.warning(
self, _("Error"),
_('To open <code>{fpath}</code> as a project with Spyder, '
'please use <code>spyder -p "{fname}"</code>.')
.format(fpath=osp.normpath(fpath), fname=fname)
)
# --- Path Manager
# ------------------------------------------------------------------------
def load_python_path(self):
"""Load path stored in Spyder configuration folder."""
if osp.isfile(self.SPYDER_PATH):
with open(self.SPYDER_PATH, 'r', encoding='utf-8') as f:
path = f.read().splitlines()
self.path = tuple(name for name in path if osp.isdir(name))
if osp.isfile(self.SPYDER_NOT_ACTIVE_PATH):
with open(self.SPYDER_NOT_ACTIVE_PATH, 'r',
encoding='utf-8') as f:
not_active_path = f.read().splitlines()
self.not_active_path = tuple(name for name in not_active_path
if osp.isdir(name))
def save_python_path(self, new_path_dict):
"""
Save path in Spyder configuration folder.
`new_path_dict` is an OrderedDict that has the new paths as keys and
the state as values. The state is `True` for active and `False` for
inactive.
"""
path = [p for p in new_path_dict]
not_active_path = [p for p in new_path_dict if not new_path_dict[p]]
try:
encoding.writelines(path, self.SPYDER_PATH)
encoding.writelines(not_active_path, self.SPYDER_NOT_ACTIVE_PATH)
except EnvironmentError as e:
logger.error(str(e))
CONF.set('main', 'spyder_pythonpath', self.get_spyder_pythonpath())
def get_spyder_pythonpath_dict(self):
"""
Return Spyder PYTHONPATH.
The returned ordered dictionary has the paths as keys and the state
as values. The state is `True` for active and `False` for inactive.
Example:
OrderedDict([('/some/path, True), ('/some/other/path, False)])
"""
self.load_python_path()
path_dict = OrderedDict()
for path in self.path:
path_dict[path] = path not in self.not_active_path
for path in self.project_path:
path_dict[path] = True
return path_dict
def get_spyder_pythonpath(self):
"""
Return Spyder PYTHONPATH.
"""
path_dict = self.get_spyder_pythonpath_dict()
path = [k for k, v in path_dict.items() if v]
return path
def update_python_path(self, new_path_dict):
"""Update python path on Spyder interpreter and kernels."""
# Load previous path
path_dict = self.get_spyder_pythonpath_dict()
# Save path
if path_dict != new_path_dict:
# It doesn't include the project_path
self.save_python_path(new_path_dict)
# Load new path
new_path_dict_p = self.get_spyder_pythonpath_dict() # Includes project
# Update Spyder interpreter
for path in path_dict:
while path in sys.path:
sys.path.remove(path)
for path, active in reversed(new_path_dict_p.items()):
if active:
sys.path.insert(1, path)
# Any plugin that needs to do some work based on this signal should
# connect to it on plugin registration
self.sig_pythonpath_changed.emit(path_dict, new_path_dict_p)
@Slot()
def show_path_manager(self):
"""Show path manager dialog."""
from spyder.widgets.pathmanager import PathManager
read_only_path = tuple(self.projects.get_pythonpath())
dialog = PathManager(self, self.path, read_only_path,
self.not_active_path, sync=True)
self._path_manager = dialog
dialog.sig_path_changed.connect(self.update_python_path)
dialog.redirect_stdio.connect(self.redirect_internalshell_stdio)
dialog.show()
def pythonpath_changed(self):
"""Project's PYTHONPATH contribution has changed."""
self.project_path = tuple(self.projects.get_pythonpath())
path_dict = self.get_spyder_pythonpath_dict()
self.update_python_path(path_dict)
#---- Preferences
def apply_settings(self):
"""Apply main window settings."""
qapp = QApplication.instance()
# Set 'gtk+' as the default theme in Gtk-based desktops
# Fixes spyder-ide/spyder#2036.
if is_gtk_desktop() and ('GTK+' in QStyleFactory.keys()):
try:
qapp.setStyle('gtk+')
except:
pass
default = self.DOCKOPTIONS
if CONF.get('main', 'vertical_tabs'):
default = default|QMainWindow.VerticalTabs
self.setDockOptions(default)
self.apply_panes_settings()
if CONF.get('main', 'use_custom_cursor_blinking'):
qapp.setCursorFlashTime(
CONF.get('main', 'custom_cursor_blinking'))
else:
qapp.setCursorFlashTime(self.CURSORBLINK_OSDEFAULT)
def apply_panes_settings(self):
"""Update dockwidgets features settings."""
for plugin in (self.widgetlist + self.thirdparty_plugins):
features = plugin.dockwidget.FEATURES
plugin.dockwidget.setFeatures(features)
try:
# New API
margin = 0
if CONF.get('main', 'use_custom_margin'):
margin = CONF.get('main', 'custom_margin')
plugin.update_margins(margin)
except AttributeError:
# Old API
plugin._update_margins()
@Slot()
def show_preferences(self):
"""Edit Spyder preferences."""
self.preferences.open_dialog(self.prefs_dialog_size)
def set_prefs_size(self, size):
"""Save preferences dialog size."""
self.prefs_dialog_size = size
# -- Open files server
def start_open_files_server(self):
self.open_files_server.setsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR, 1)
port = select_port(default_port=OPEN_FILES_PORT)
CONF.set('main', 'open_files_port', port)
self.open_files_server.bind(('127.0.0.1', port))
self.open_files_server.listen(20)
while 1: # 1 is faster than True
try:
req, dummy = self.open_files_server.accept()
except socket.error as e:
# See spyder-ide/spyder#1275 for details on why errno EINTR is
# silently ignored here.
eintr = errno.WSAEINTR if os.name == 'nt' else errno.EINTR
# To avoid a traceback after closing on Windows
if e.args[0] == eintr:
continue
# handle a connection abort on close error
enotsock = (errno.WSAENOTSOCK if os.name == 'nt'
else errno.ENOTSOCK)
if e.args[0] in [errno.ECONNABORTED, enotsock]:
return
raise
fname = req.recv(1024)
fname = fname.decode('utf-8')
self.sig_open_external_file.emit(fname)
req.sendall(b' ')
# ---- Quit and restart, and reset spyder defaults
@Slot()
def reset_spyder(self):
"""
Quit and reset Spyder and then Restart application.
"""
answer = QMessageBox.warning(self, _("Warning"),
_("Spyder will restart and reset to default settings: <br><br>"
"Do you want to continue?"),
QMessageBox.Yes | QMessageBox.No)
if answer == QMessageBox.Yes:
self.restart(reset=True)
@Slot()
def restart(self, reset=False):
"""Wrapper to handle plugins request to restart Spyder."""
self.application.restart(reset=reset)
# ---- Interactive Tours
def show_tour(self, index):
"""Show interactive tour."""
self.layouts.maximize_dockwidget(restore=True)
frames = self.tours_available[index]
self.tour.set_tour(index, frames, self)
self.tour.start_tour()
# ---- Global Switcher
def open_switcher(self, symbol=False):
"""Open switcher dialog box."""
if self.switcher is not None and self.switcher.isVisible():
self.switcher.clear()
self.switcher.hide()
return
if symbol:
self.switcher.set_search_text('@')
else:
self.switcher.set_search_text('')
self.switcher.setup()
self.switcher.show()
# Note: The +6 pixel on the top makes it look better
# FIXME: Why is this using the toolbars menu? A: To not be on top of
# the toolbars.
# Probably toolbars should be taken into account for this 'delta' only
# when are visible
delta_top = (self.toolbar.toolbars_menu.geometry().height() +
self.menuBar().geometry().height() + 6)
self.switcher.set_position(delta_top)
def open_symbolfinder(self):
"""Open symbol list management dialog box."""
self.open_switcher(symbol=True)
def create_switcher(self):
"""Create switcher dialog instance."""
if self.switcher is None:
from spyder.widgets.switcher import Switcher
self.switcher = Switcher(self)
return self.switcher
@Slot()
def show_tour_message(self, force=False):
"""
Show message about starting the tour the first time Spyder starts.
"""
should_show_tour = CONF.get('main', 'show_tour_message')
if force or (should_show_tour and not running_under_pytest()
and not get_safe_mode()):
CONF.set('main', 'show_tour_message', False)
self.tour_dialog = tour.OpenTourDialog(
self, lambda: self.show_tour(DEFAULT_TOUR))
self.tour_dialog.show()
# --- For OpenGL
def _test_setting_opengl(self, option):
"""Get the current OpenGL implementation in use"""
if option == 'software':
return QCoreApplication.testAttribute(Qt.AA_UseSoftwareOpenGL)
elif option == 'desktop':
return QCoreApplication.testAttribute(Qt.AA_UseDesktopOpenGL)
elif option == 'gles':
return QCoreApplication.testAttribute(Qt.AA_UseOpenGLES)
#==============================================================================
# Utilities for the 'main' function below
#==============================================================================
def create_application():
"""Create application and patch sys.exit."""
# Our QApplication
app = qapplication()
# --- Set application icon
app_icon = QIcon(get_image_path("spyder"))
app.setWindowIcon(app_icon)
# Required for correct icon on GNOME/Wayland:
if hasattr(app, 'setDesktopFileName'):
app.setDesktopFileName('spyder')
#----Monkey patching QApplication
class FakeQApplication(QApplication):
"""Spyder's fake QApplication"""
def __init__(self, args):
self = app # analysis:ignore
@staticmethod
def exec_():
"""Do nothing because the Qt mainloop is already running"""
pass
from qtpy import QtWidgets
QtWidgets.QApplication = FakeQApplication
# ----Monkey patching sys.exit
def fake_sys_exit(arg=[]):
pass
sys.exit = fake_sys_exit
# ----Monkey patching sys.excepthook to avoid crashes in PyQt 5.5+
def spy_excepthook(type_, value, tback):
sys.__excepthook__(type_, value, tback)
sys.excepthook = spy_excepthook
# Removing arguments from sys.argv as in standard Python interpreter
sys.argv = ['']
return app
def create_window(app, splash, options, args):
"""
Create and show Spyder's main window and start QApplication event loop.
"""
# Main window
main = MainWindow(splash, options)
try:
main.setup()
except BaseException:
if main.console is not None:
try:
main.console.exit_interpreter()
except BaseException:
pass
raise
main.pre_visible_setup()
main.show()
main.post_visible_setup()
if main.console:
namespace = CONF.get('internal_console', 'namespace', {})
main.console.start_interpreter(namespace)
main.console.set_namespace_item('spy', Spy(app=app, window=main))
# Propagate current configurations to all configuration observers
CONF.notify_all_observers()
# Don't show icons in menus for Mac
if sys.platform == 'darwin':
QCoreApplication.setAttribute(Qt.AA_DontShowIconsInMenus, True)
# Open external files with our Mac app
if running_in_mac_app():
app.sig_open_external_file.connect(main.open_external_file)
app._has_started = True
if hasattr(app, '_pending_file_open'):
if args:
args = app._pending_file_open + args
else:
args = app._pending_file_open
# Open external files passed as args
if args:
for a in args:
main.open_external_file(a)
# To give focus again to the last focused widget after restoring
# the window
app.focusChanged.connect(main.change_last_focused_widget)
if not running_under_pytest():
app.exec_()
return main
#==============================================================================
# Main
#==============================================================================
def main(options, args):
"""Main function"""
# **** For Pytest ****
if running_under_pytest():
if CONF.get('main', 'opengl') != 'automatic':
option = CONF.get('main', 'opengl')
set_opengl_implementation(option)
app = create_application()
window = create_window(app, None, options, None)
return window
# **** Handle hide_console option ****
if options.show_console:
print("(Deprecated) --show console does nothing, now the default "
" behavior is to show the console, use --hide-console if you "
"want to hide it")
if set_attached_console_visible is not None:
set_attached_console_visible(not options.hide_console
or options.reset_config_files
or options.reset_to_defaults
or options.optimize
or bool(get_debug_level()))
# **** Set OpenGL implementation to use ****
# This attribute must be set before creating the application.
# See spyder-ide/spyder#11227
if options.opengl_implementation:
option = options.opengl_implementation
set_opengl_implementation(option)
else:
if CONF.get('main', 'opengl') != 'automatic':
option = CONF.get('main', 'opengl')
set_opengl_implementation(option)
# **** Set high DPI scaling ****
# This attribute must be set before creating the application.
if hasattr(Qt, 'AA_EnableHighDpiScaling'):
QCoreApplication.setAttribute(Qt.AA_EnableHighDpiScaling,
CONF.get('main', 'high_dpi_scaling'))
# **** Set debugging info ****
setup_logging(options)
# **** Create the application ****
app = create_application()
# **** Create splash screen ****
splash = create_splash_screen()
if splash is not None:
splash.show()
splash.showMessage(
_("Initializing..."),
int(Qt.AlignBottom | Qt.AlignCenter | Qt.AlignAbsolute),
QColor(Qt.white)
)
QApplication.processEvents()
if options.reset_to_defaults:
# Reset Spyder settings to defaults
CONF.reset_to_defaults()
return
elif options.optimize:
# Optimize the whole Spyder's source code directory
import spyder
programs.run_python_script(module="compileall",
args=[spyder.__path__[0]], p_args=['-O'])
return
# **** Read faulthandler log file ****
faulthandler_file = get_conf_path('faulthandler.log')
previous_crash = ''
if osp.exists(faulthandler_file):
with open(faulthandler_file, 'r') as f:
previous_crash = f.read()
# Remove file to not pick it up for next time.
try:
dst = get_conf_path('faulthandler.log.old')
shutil.move(faulthandler_file, dst)
except Exception:
pass
CONF.set('main', 'previous_crash', previous_crash)
# **** Set color for links ****
set_links_color(app)
# **** Create main window ****
mainwindow = None
try:
if PY3 and options.report_segfault:
import faulthandler
with open(faulthandler_file, 'w') as f:
faulthandler.enable(file=f)
mainwindow = create_window(app, splash, options, args)
else:
mainwindow = create_window(app, splash, options, args)
except FontError:
QMessageBox.information(None, "Spyder",
"Spyder was unable to load the <i>Spyder 3</i> "
"icon theme. That's why it's going to fallback to the "
"theme used in Spyder 2.<br><br>"
"For that, please close this window and start Spyder again.")
CONF.set('appearance', 'icon_theme', 'spyder 2')
if mainwindow is None:
# An exception occurred
if splash is not None:
splash.hide()
return
ORIGINAL_SYS_EXIT()
if __name__ == "__main__":
main()
| 40.171642 | 85 | 0.566019 |
from __future__ import print_function
from collections import OrderedDict
from enum import Enum
import errno
import gc
import logging
import os
import os.path as osp
import shutil
import signal
import socket
import glob
import sys
import threading
import traceback
from spyder import requirements
requirements.check_path()
requirements.check_qt()
requirements.check_spyder_kernels()
from qtpy.compat import from_qvariant
from qtpy.QtCore import (QCoreApplication, Qt, QTimer, Signal, Slot,
qInstallMessageHandler)
from qtpy.QtGui import QColor, QIcon, QKeySequence
from qtpy.QtWidgets import (QAction, QApplication, QMainWindow, QMenu,
QMessageBox, QShortcut, QStyleFactory, QCheckBox)
from qtpy import QtSvg
from qtpy import QtWebEngineWidgets
from qtawesome.iconic_font import FontError
# are needed in MainWindow to speed up perceived startup time (i.e. the time
# from clicking the Spyder icon to showing the splash screen).
#==============================================================================
from spyder import __version__
from spyder import dependencies
from spyder.app import tour
from spyder.app.utils import (create_splash_screen, delete_lsp_log_files,
qt_message_handler, set_links_color,
setup_logging, set_opengl_implementation, Spy)
from spyder.config.base import (_, DEV, get_conf_path, get_debug_level,
get_home_dir, get_module_source_path,
get_safe_mode, is_pynsist, running_in_mac_app,
running_under_pytest, STDERR)
from spyder.utils.image_path_manager import get_image_path
from spyder.config.gui import is_dark_font_color
from spyder.config.main import OPEN_FILES_PORT
from spyder.config.manager import CONF
from spyder.config.utils import IMPORT_EXT, is_gtk_desktop
from spyder.otherplugins import get_spyderplugins_mods
from spyder.py3compat import configparser as cp, PY3, to_text_string
from spyder.utils import encoding, programs
from spyder.utils.icon_manager import ima
from spyder.utils.misc import (select_port, getcwd_or_home,
get_python_executable)
from spyder.utils.palette import QStylePalette
from spyder.utils.qthelpers import (create_action, add_actions, file_uri,
qapplication, start_file)
from spyder.utils.stylesheet import APP_STYLESHEET
from spyder.app.solver import (
find_external_plugins, find_internal_plugins, solve_plugin_dependencies)
# Spyder API Imports
from spyder.api.exceptions import SpyderAPIError
from spyder.api.plugins import Plugins, SpyderPluginV2, SpyderDockablePlugin
#==============================================================================
# Windows only local imports
#==============================================================================
set_attached_console_visible = None
is_attached_console_visible = None
set_windows_appusermodelid = None
if os.name == 'nt':
from spyder.utils.windows import (set_attached_console_visible,
set_windows_appusermodelid)
#==============================================================================
# Constants
#==============================================================================
# Module logger
logger = logging.getLogger(__name__)
# Keeping a reference to the original sys.exit before patching it
ORIGINAL_SYS_EXIT = sys.exit
# Get the cwd before initializing WorkingDirectory, which sets it to the one
# used in the last session
CWD = getcwd_or_home()
# Set the index for the default tour
DEFAULT_TOUR = 0
#==============================================================================
# Install Qt messaage handler
#==============================================================================
qInstallMessageHandler(qt_message_handler)
#==============================================================================
# Main Window
#==============================================================================
class MainWindow(QMainWindow):
DOCKOPTIONS = (
QMainWindow.AllowTabbedDocks | QMainWindow.AllowNestedDocks |
QMainWindow.AnimatedDocks
)
SPYDER_PATH = get_conf_path('path')
SPYDER_NOT_ACTIVE_PATH = get_conf_path('not_active_path')
DEFAULT_LAYOUTS = 4
# Signals
restore_scrollbar_position = Signal()
sig_setup_finished = Signal()
all_actions_defined = Signal()
# type: (OrderedDict, OrderedDict)
sig_pythonpath_changed = Signal(object, object)
sig_main_interpreter_changed = Signal()
sig_open_external_file = Signal(str)
sig_resized = Signal("QResizeEvent") # Related to interactive tour
sig_moved = Signal("QMoveEvent") # Related to interactive tour
sig_layout_setup_ready = Signal(object) # Related to default layouts
# --- Plugin handling methods
# ------------------------------------------------------------------------
def get_plugin(self, plugin_name, error=True):
for name, plugin in self._PLUGINS.items():
if plugin_name == name:
return plugin
else:
if error:
raise SpyderAPIError(
'Plugin "{}" not found!'.format(plugin_name))
else:
return None
def show_status_message(self, message, timeout):
status_bar = self.statusBar()
if status_bar.isVisible():
status_bar.showMessage(message, timeout)
def show_plugin_compatibility_message(self, message):
messageBox = QMessageBox(self)
messageBox.setWindowModality(Qt.NonModal)
messageBox.setAttribute(Qt.WA_DeleteOnClose)
messageBox.setWindowTitle(_('Compatibility Check'))
messageBox.setText(message)
messageBox.setStandardButtons(QMessageBox.Ok)
messageBox.show()
def add_plugin(self, plugin, external=False):
self._PLUGINS[plugin.NAME] = plugin
if external:
self._EXTERNAL_PLUGINS[plugin.NAME] = plugin
else:
self._INTERNAL_PLUGINS[plugin.NAME] = plugin
def register_plugin(self, plugin, external=False, omit_conf=False):
self.set_splash(_("Loading {}...").format(plugin.get_name()))
logger.info("Loading {}...".format(plugin.NAME))
# Check plugin compatibility
is_compatible, message = plugin.check_compatibility()
plugin.is_compatible = is_compatible
plugin.get_description()
if not is_compatible:
self.show_compatibility_message(message)
return
# Signals
plugin.sig_exception_occurred.connect(self.handle_exception)
plugin.sig_free_memory_requested.connect(self.free_memory)
plugin.sig_quit_requested.connect(self.close)
plugin.sig_restart_requested.connect(self.restart)
plugin.sig_redirect_stdio_requested.connect(
self.redirect_internalshell_stdio)
plugin.sig_status_message_requested.connect(self.show_status_message)
if isinstance(plugin, SpyderDockablePlugin):
plugin.sig_focus_changed.connect(self.plugin_focus_changed)
plugin.sig_switch_to_plugin_requested.connect(
self.switch_to_plugin)
plugin.sig_update_ancestor_requested.connect(
lambda: plugin.set_ancestor(self))
# Register plugin
plugin._register(omit_conf=omit_conf)
plugin.register()
if isinstance(plugin, SpyderDockablePlugin):
# Add dockwidget
self.add_dockwidget(plugin)
# Update margins
margin = 0
if CONF.get('main', 'use_custom_margin'):
margin = CONF.get('main', 'custom_margin')
plugin.update_margins(margin)
self.add_plugin(plugin, external=external)
logger.info("Registering shortcuts for {}...".format(plugin.NAME))
for action_name, action in plugin.get_actions().items():
context = (getattr(action, 'shortcut_context', plugin.NAME)
or plugin.NAME)
if getattr(action, 'register_shortcut', True):
if isinstance(action_name, Enum):
action_name = action_name.value
self.register_shortcut(action, context, action_name)
if isinstance(plugin, SpyderDockablePlugin):
try:
context = '_'
name = 'switch to {}'.format(plugin.CONF_SECTION)
shortcut = CONF.get_shortcut(context, name,
plugin_name=plugin.CONF_SECTION)
except (cp.NoSectionError, cp.NoOptionError):
shortcut = None
sc = QShortcut(QKeySequence(), self,
lambda: self.switch_to_plugin(plugin))
sc.setContext(Qt.ApplicationShortcut)
plugin._shortcut = sc
self.register_shortcut(sc, context, name)
self.register_shortcut(plugin.toggle_view_action, context, name)
def unregister_plugin(self, plugin):
logger.info("Unloading {}...".format(plugin.NAME))
# Disconnect all slots
signals = [
plugin.sig_quit_requested,
plugin.sig_redirect_stdio_requested,
plugin.sig_status_message_requested,
]
for sig in signals:
try:
sig.disconnect()
except TypeError:
pass
# Unregister shortcuts for actions
logger.info("Unregistering shortcuts for {}...".format(plugin.NAME))
for action_name, action in plugin.get_actions().items():
context = (getattr(action, 'shortcut_context', plugin.NAME)
or plugin.NAME)
self.shortcuts.unregister_shortcut(action, context, action_name)
# Unregister switch to shortcut
shortcut = None
try:
context = '_'
name = 'switch to {}'.format(plugin.CONF_SECTION)
shortcut = CONF.get_shortcut(context, name,
plugin_name=plugin.CONF_SECTION)
except Exception:
pass
if shortcut is not None:
self.shortcuts.unregister_shortcut(
plugin._shortcut,
context,
"Switch to {}".format(plugin.CONF_SECTION),
)
# Remove dockwidget
logger.info("Removing {} dockwidget...".format(plugin.NAME))
self.remove_dockwidget(plugin)
plugin.unregister()
plugin._unregister()
def create_plugin_conf_widget(self, plugin):
config_dialog = self.prefs_dialog_instance
if plugin.CONF_WIDGET_CLASS is not None and config_dialog is not None:
conf_widget = plugin.CONF_WIDGET_CLASS(plugin, config_dialog)
conf_widget.initialize()
return conf_widget
@property
def last_plugin(self):
# Needed to prevent errors with the old API at
# spyder/plugins/base::_switch_to_plugin
return self.layouts.get_last_plugin()
def maximize_dockwidget(self, restore=False):
self.layouts.maximize_dockwidget(restore=restore)
def switch_to_plugin(self, plugin, force_focus=None):
last_plugin = self.last_plugin
try:
# New API
if (last_plugin is not None
and last_plugin.get_widget().is_maximized
and last_plugin is not plugin):
self.layouts.maximize_dockwidget()
except AttributeError:
# Old API
if (last_plugin is not None and self.last_plugin._ismaximized
and last_plugin is not plugin):
self.layouts.maximize_dockwidget()
try:
# New API
if not plugin.toggle_view_action.isChecked():
plugin.toggle_view_action.setChecked(True)
plugin.get_widget().is_visible = False
except AttributeError:
# Old API
if not plugin._toggle_view_action.isChecked():
plugin._toggle_view_action.setChecked(True)
plugin._widget._is_visible = False
plugin.change_visibility(True, force_focus=force_focus)
def remove_dockwidget(self, plugin):
self.removeDockWidget(plugin.dockwidget)
try:
self.widgetlist.remove(plugin)
except ValueError:
pass
def tabify_plugins(self, first, second):
self.tabifyDockWidget(first.dockwidget, second.dockwidget)
def tabify_plugin(self, plugin, default=None):
def tabify_helper(plugin, next_to_plugins):
for next_to_plugin in next_to_plugins:
try:
self.tabify_plugins(next_to_plugin, plugin)
break
except SpyderAPIError as err:
logger.error(err)
# If TABIFY not defined use the [default]
tabify = getattr(plugin, 'TABIFY', [default])
if not isinstance(tabify, list):
next_to_plugins = [tabify]
else:
next_to_plugins = tabify
# Check if TABIFY is not a list with None as unique value or a default
# list
if tabify in [[None], []]:
return False
# Get the actual plugins from the names
next_to_plugins = [self.get_plugin(p) for p in next_to_plugins]
# First time plugin starts
if plugin.get_conf('first_time', True):
if (isinstance(plugin, SpyderDockablePlugin)
and plugin.NAME != Plugins.Console):
logger.info(
"Tabify {} dockwidget for the first time...".format(
plugin.NAME))
tabify_helper(plugin, next_to_plugins)
plugin.set_conf('enable', True)
plugin.set_conf('first_time', False)
else:
# This is needed to ensure new plugins are placed correctly
# without the need for a layout reset.
logger.info("Tabify {} dockwidget...".format(plugin.NAME))
# Check if plugin has no other dockwidgets in the same position
if not bool(self.tabifiedDockWidgets(plugin.dockwidget)):
tabify_helper(plugin, next_to_plugins)
return True
def handle_exception(self, error_data):
if self.console:
self.console.handle_exception(error_data)
def __init__(self, splash=None, options=None):
QMainWindow.__init__(self)
qapp = QApplication.instance()
if running_under_pytest():
self._proxy_style = None
else:
from spyder.utils.qthelpers import SpyderProxyStyle
# None is needed, see: https://bugreports.qt.io/browse/PYSIDE-922
self._proxy_style = SpyderProxyStyle(None)
# Enabling scaling for high dpi
qapp.setAttribute(Qt.AA_UseHighDpiPixmaps)
self.default_style = str(qapp.style().objectName())
self.init_workdir = options.working_directory
self.profile = options.profile
self.multithreaded = options.multithreaded
self.new_instance = options.new_instance
if options.project is not None and not running_in_mac_app():
self.open_project = osp.normpath(osp.join(CWD, options.project))
else:
self.open_project = None
self.window_title = options.window_title
logger.info("Start of MainWindow constructor")
def signal_handler(signum, frame=None):
sys.stdout.write('Handling signal: %s\n' % signum)
sys.stdout.flush()
QApplication.quit()
if os.name == "nt":
try:
import win32api
win32api.SetConsoleCtrlHandler(signal_handler, True)
except ImportError:
pass
else:
signal.signal(signal.SIGTERM, signal_handler)
if not DEV:
# Make spyder quit when presing ctrl+C in the console
# In DEV Ctrl+C doesn't quit, because it helps to
signal.signal(signal.SIGINT, signal_handler)
if sys.platform == 'darwin':
spy_path = get_module_source_path('spyder')
img_path = osp.join(spy_path, 'images')
mac_style = open(osp.join(spy_path, 'app', 'mac_stylesheet.qss')).read()
mac_style = mac_style.replace('$IMAGE_PATH', img_path)
self.setStyleSheet(mac_style)
self.shortcut_data = []
self.path = ()
self.not_active_path = ()
self.project_path = ()
self._APPLICATION_TOOLBARS = OrderedDict()
self._STATUS_WIDGETS = OrderedDict()
self._PLUGINS = OrderedDict()
self._EXTERNAL_PLUGINS = OrderedDict()
self._INTERNAL_PLUGINS = OrderedDict()
self._INTERNAL_PLUGINS_MAPPING = {
'console': Plugins.Console,
'maininterpreter': Plugins.MainInterpreter,
'outlineexplorer': Plugins.OutlineExplorer,
'variableexplorer': Plugins.VariableExplorer,
'ipyconsole': Plugins.IPythonConsole,
'workingdirectory': Plugins.WorkingDirectory,
'projects': Plugins.Projects,
'findinfiles': Plugins.Find,
'layouts': Plugins.Layout,
}
self.thirdparty_plugins = []
self.tour = None
self.tours_available = None
self.tour_dialog = None
self.switcher = None
self.prefs_dialog_size = None
self.prefs_dialog_instance = None
self.undo_action = None
self.redo_action = None
self.copy_action = None
self.cut_action = None
self.paste_action = None
self.selectall_action = None
self.edit_menu = None
self.edit_menu_actions = []
self.search_menu = None
self.search_menu_actions = []
self.source_menu = None
self.source_menu_actions = []
self.run_menu = None
self.run_menu_actions = []
self.debug_menu = None
self.debug_menu_actions = []
self.main_toolbar = None
self.main_toolbar_actions = []
self.file_toolbar = None
self.file_toolbar_actions = []
self.run_toolbar = None
self.run_toolbar_actions = []
self.debug_toolbar = None
self.debug_toolbar_actions = []
self.menus = []
if running_under_pytest():
CONF.set('main', 'show_internal_errors', False)
self.CURSORBLINK_OSDEFAULT = QApplication.cursorFlashTime()
if set_windows_appusermodelid != None:
res = set_windows_appusermodelid()
logger.info("appusermodelid: %s", res)
test_app = os.environ.get('TEST_CI_APP')
if test_app is not None:
app = qapplication()
timer_shutdown_time = 30000
self.timer_shutdown = QTimer(self)
self.timer_shutdown.timeout.connect(app.quit)
self.timer_shutdown.start(timer_shutdown_time)
self.splash = splash
if CONF.get('main', 'current_version', '') != __version__:
CONF.set('main', 'current_version', __version__)
self.widgetlist = []
self.already_closed = False
self.is_starting_up = True
self.is_setting_up = True
self.floating_dockwidgets = []
self.window_size = None
self.window_position = None
self.last_focused_widget = None
self.previous_focused_widget = None
self.show_dpi_message = True
if os.name == 'nt':
try:
self.open_files_server = socket.socket(socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_TCP)
except OSError:
self.open_files_server = None
QMessageBox.warning(None, "Spyder",
_("An error occurred while creating a socket needed "
"by Spyder. Please, try to run as an Administrator "
"from cmd.exe the following command and then "
"restart your computer: <br><br><span "
"style=\'color: {color}\'><b>netsh winsock reset "
"</b></span><br>").format(
color=QStylePalette.COLOR_BACKGROUND_4))
else:
self.open_files_server = socket.socket(socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_TCP)
self.sig_setup_finished.connect(self.show_tour_message)
self.apply_settings()
logger.info("End of MainWindow constructor")
def _update_shortcuts_in_panes_menu(self, show=True):
for plugin_id, plugin in self._PLUGINS.items():
if isinstance(plugin, SpyderDockablePlugin):
try:
action = plugin.toggle_view_action
except AttributeError:
action = plugin._toggle_view_action
if show:
section = plugin.CONF_SECTION
try:
context = '_'
name = 'switch to {}'.format(section)
shortcut = CONF.get_shortcut(
context, name, plugin_name=section)
except (cp.NoSectionError, cp.NoOptionError):
shortcut = QKeySequence()
else:
shortcut = QKeySequence()
action.setShortcut(shortcut)
def setup(self):
from spyder.plugins.help.utils.sphinxify import CSS_PATH, DARK_CSS_PATH
logger.info("*** Start of MainWindow setup ***")
logger.info("Updating PYTHONPATH")
path_dict = self.get_spyder_pythonpath_dict()
self.update_python_path(path_dict)
logger.info("Applying theme configuration...")
ui_theme = CONF.get('appearance', 'ui_theme')
color_scheme = CONF.get('appearance', 'selected')
if ui_theme == 'dark':
if not running_under_pytest():
qapp = QApplication.instance()
qapp.setStyle(self._proxy_style)
dark_qss = str(APP_STYLESHEET)
self.setStyleSheet(dark_qss)
self.statusBar().setStyleSheet(dark_qss)
css_path = DARK_CSS_PATH
elif ui_theme == 'light':
if not running_under_pytest():
qapp = QApplication.instance()
qapp.setStyle(self._proxy_style)
light_qss = str(APP_STYLESHEET)
self.setStyleSheet(light_qss)
self.statusBar().setStyleSheet(light_qss)
css_path = CSS_PATH
elif ui_theme == 'automatic':
if not is_dark_font_color(color_scheme):
if not running_under_pytest():
qapp = QApplication.instance()
qapp.setStyle(self._proxy_style)
dark_qss = str(APP_STYLESHEET)
self.setStyleSheet(dark_qss)
self.statusBar().setStyleSheet(dark_qss)
css_path = DARK_CSS_PATH
else:
light_qss = str(APP_STYLESHEET)
self.setStyleSheet(light_qss)
self.statusBar().setStyleSheet(light_qss)
css_path = CSS_PATH
CONF.set('appearance', 'css_path', css_path)
status = self.statusBar()
status.setObjectName("StatusBar")
status.showMessage(_("Welcome to Spyder!"), 5000)
logger.info("Loading switcher...")
self.create_switcher()
message = _(
"Spyder Internal Console\n\n"
"This console is used to report application\n"
"internal errors and to inspect Spyder\n"
"internals with the following commands:\n"
" spy.app, spy.window, dir(spy)\n\n"
"Please don't use it to run your code\n\n"
)
CONF.set('internal_console', 'message', message)
CONF.set('internal_console', 'multithreaded', self.multithreaded)
CONF.set('internal_console', 'profile', self.profile)
CONF.set('internal_console', 'commands', [])
CONF.set('internal_console', 'namespace', {})
CONF.set('internal_console', 'show_internal_errors', True)
# Working directory initialization
CONF.set('workingdir', 'init_workdir', self.init_workdir)
# Load and register internal and external plugins
external_plugins = find_external_plugins()
internal_plugins = find_internal_plugins()
all_plugins = external_plugins.copy()
all_plugins.update(internal_plugins.copy())
# Determine 'enable' config for the plugins that have it
enabled_plugins = {}
for plugin in all_plugins.values():
plugin_name = plugin.NAME
plugin_main_attribute_name = (
self._INTERNAL_PLUGINS_MAPPING[plugin_name]
if plugin_name in self._INTERNAL_PLUGINS_MAPPING
else plugin_name)
try:
if CONF.get(plugin_main_attribute_name, "enable"):
enabled_plugins[plugin_name] = plugin
except (cp.NoOptionError, cp.NoSectionError):
enabled_plugins[plugin_name] = plugin
# Get ordered list of plugins classes and instantiate them
plugin_deps = solve_plugin_dependencies(list(enabled_plugins.values()))
for plugin_class in plugin_deps:
plugin_name = plugin_class.NAME
# Non-migrated plugins
if plugin_name in [
Plugins.Editor,
Plugins.IPythonConsole]:
if plugin_name == Plugins.IPythonConsole:
plugin_instance = plugin_class(self)
plugin_instance.sig_exception_occurred.connect(
self.handle_exception)
else:
plugin_instance = plugin_class(self)
plugin_instance.register_plugin()
self.add_plugin(plugin_instance)
self.preferences.register_plugin_preferences(
plugin_instance)
# Migrated or new plugins
elif plugin_name in [
Plugins.MainMenu,
Plugins.OnlineHelp,
Plugins.Toolbar,
Plugins.Preferences,
Plugins.Appearance,
Plugins.Run,
Plugins.Shortcuts,
Plugins.StatusBar,
Plugins.Completions,
Plugins.OutlineExplorer,
Plugins.Console,
Plugins.MainInterpreter,
Plugins.Breakpoints,
Plugins.History,
Plugins.Profiler,
Plugins.Explorer,
Plugins.Help,
Plugins.Plots,
Plugins.VariableExplorer,
Plugins.Application,
Plugins.Find,
Plugins.Pylint,
Plugins.WorkingDirectory,
Plugins.Projects,
Plugins.Layout]:
plugin_instance = plugin_class(self, configuration=CONF)
self.register_plugin(plugin_instance)
# TODO: Check thirdparty attribute usage
# For now append plugins to the thirdparty attribute as was
# being done
if plugin_name in [
Plugins.Breakpoints,
Plugins.Profiler,
Plugins.Pylint]:
self.thirdparty_plugins.append(plugin_instance)
# Load external_plugins adding their dependencies
elif (issubclass(plugin_class, SpyderPluginV2) and
plugin_class.NAME in external_plugins):
try:
if plugin_class.CONF_FILE:
CONF.register_plugin(plugin_class)
plugin_instance = plugin_class(
self,
configuration=CONF,
)
self.register_plugin(plugin_instance, external=True,
omit_conf=plugin_class.CONF_FILE)
# These attributes come from spyder.app.solver to add
# plugins to the dependencies dialog
if not running_under_pytest():
module = plugin_class._spyder_module_name
package_name = plugin_class._spyder_package_name
version = plugin_class._spyder_version
description = plugin_instance.get_description()
dependencies.add(
module, package_name, description, version, None,
kind=dependencies.PLUGIN)
except Exception as error:
print("%s: %s" % (plugin_class, str(error)), file=STDERR)
traceback.print_exc(file=STDERR)
self.set_splash(_("Loading old third-party plugins..."))
for mod in get_spyderplugins_mods():
try:
plugin = mod.PLUGIN_CLASS(self)
if plugin.check_compatibility()[0]:
if hasattr(plugin, 'CONFIGWIDGET_CLASS'):
self.preferences.register_plugin_preferences(plugin)
if hasattr(plugin, 'COMPLETION_PROVIDER_NAME'):
self.completions.register_completion_plugin(plugin)
else:
self.thirdparty_plugins.append(plugin)
plugin.register_plugin()
# Add to dependencies dialog
module = mod.__name__
name = module.replace('_', '-')
if plugin.DESCRIPTION:
description = plugin.DESCRIPTION
else:
description = plugin.get_plugin_title()
dependencies.add(module, name, description,
'', None, kind=dependencies.PLUGIN)
except TypeError:
# Fixes spyder-ide/spyder#13977
pass
except Exception as error:
print("%s: %s" % (mod, str(error)), file=STDERR)
traceback.print_exc(file=STDERR)
# Set window title
self.set_window_title()
# Menus
# TODO: Remove when all menus are migrated to use the Main Menu Plugin
logger.info("Creating Menus...")
from spyder.api.widgets.menus import SpyderMenu
from spyder.plugins.mainmenu.api import (
ApplicationMenus, HelpMenuSections, ToolsMenuSections,
FileMenuSections)
mainmenu = self.mainmenu
self.edit_menu = mainmenu.get_application_menu("edit_menu")
self.search_menu = mainmenu.get_application_menu("search_menu")
self.source_menu = mainmenu.get_application_menu("source_menu")
self.source_menu.aboutToShow.connect(self.update_source_menu)
self.run_menu = mainmenu.get_application_menu("run_menu")
self.debug_menu = mainmenu.get_application_menu("debug_menu")
# Switcher shortcuts
self.file_switcher_action = create_action(
self,
_('File switcher...'),
icon=ima.icon('filelist'),
tip=_('Fast switch between files'),
triggered=self.open_switcher,
context=Qt.ApplicationShortcut)
self.register_shortcut(self.file_switcher_action, context="_",
name="File switcher")
self.symbol_finder_action = create_action(
self, _('Symbol finder...'),
icon=ima.icon('symbol_find'),
tip=_('Fast symbol search in file'),
triggered=self.open_symbolfinder,
context=Qt.ApplicationShortcut)
self.register_shortcut(self.symbol_finder_action, context="_",
name="symbol finder", add_shortcut_to_tip=True)
def create_edit_action(text, tr_text, icon):
textseq = text.split(' ')
method_name = textseq[0].lower()+"".join(textseq[1:])
action = create_action(self, tr_text,
icon=icon,
triggered=self.global_callback,
data=method_name,
context=Qt.WidgetShortcut)
self.register_shortcut(action, "Editor", text)
return action
self.undo_action = create_edit_action('Undo', _('Undo'),
ima.icon('undo'))
self.redo_action = create_edit_action('Redo', _('Redo'),
ima.icon('redo'))
self.copy_action = create_edit_action('Copy', _('Copy'),
ima.icon('editcopy'))
self.cut_action = create_edit_action('Cut', _('Cut'),
ima.icon('editcut'))
self.paste_action = create_edit_action('Paste', _('Paste'),
ima.icon('editpaste'))
self.selectall_action = create_edit_action("Select All",
_("Select All"),
ima.icon('selectall'))
self.edit_menu_actions += [self.undo_action, self.redo_action,
None, self.cut_action, self.copy_action,
self.paste_action, self.selectall_action,
None] + self.editor.edit_menu_actions
switcher_actions = [
self.file_switcher_action,
self.symbol_finder_action
]
for switcher_action in switcher_actions:
mainmenu.add_item_to_application_menu(
switcher_action,
menu_id=ApplicationMenus.File,
section=FileMenuSections.Switcher,
before_section=FileMenuSections.Restart)
self.set_splash("")
# Toolbars
# TODO: Remove after finishing the migration
logger.info("Creating toolbars...")
toolbar = self.toolbar
self.file_toolbar = toolbar.get_application_toolbar("file_toolbar")
self.run_toolbar = toolbar.get_application_toolbar("run_toolbar")
self.debug_toolbar = toolbar.get_application_toolbar("debug_toolbar")
self.main_toolbar = toolbar.get_application_toolbar("main_toolbar")
# Tools + External Tools (some of this depends on the Application
# plugin)
logger.info("Creating Tools menu...")
spyder_path_action = create_action(
self,
_("PYTHONPATH manager"),
None, icon=ima.icon('pythonpath'),
triggered=self.show_path_manager,
tip=_("PYTHONPATH manager"),
menurole=QAction.ApplicationSpecificRole)
from spyder.plugins.application.plugin import (
ApplicationActions, WinUserEnvDialog)
winenv_action = None
if WinUserEnvDialog:
winenv_action = self.application.get_action(
ApplicationActions.SpyderWindowsEnvVariables)
mainmenu.add_item_to_application_menu(
spyder_path_action,
menu_id=ApplicationMenus.Tools,
section=ToolsMenuSections.Tools,
before=winenv_action
)
if get_debug_level() >= 3:
self.menu_lsp_logs = QMenu(_("LSP logs"))
self.menu_lsp_logs.aboutToShow.connect(self.update_lsp_logs)
mainmenu.add_item_to_application_menu(
self.menu_lsp_logs,
menu_id=ApplicationMenus.Tools)
# Main toolbar
from spyder.plugins.toolbar.api import (
ApplicationToolbars, MainToolbarSections)
self.toolbar.add_item_to_application_toolbar(
spyder_path_action,
toolbar_id=ApplicationToolbars.Main,
section=MainToolbarSections.ApplicationSection
)
self.set_splash(_("Setting up main window..."))
#----- Tours
# TODO: Move tours to a plugin structure
self.tour = tour.AnimatedTour(self)
# self.tours_menu = QMenu(_("Interactive tours"), self)
# self.tour_menu_actions = []
# # TODO: Only show intro tour for now. When we are close to finish
# # 3.0, we will finish and show the other tour
self.tours_available = tour.get_tours(DEFAULT_TOUR)
for i, tour_available in enumerate(self.tours_available):
self.tours_available[i]['last'] = 0
tour_name = tour_available['name']
# def trigger(i=i, self=self): # closure needed!
# return lambda: self.show_tour(i)
# temp_action = create_action(self, tour_name, tip="",
# triggered=trigger())
# self.tour_menu_actions += [temp_action]
# self.tours_menu.addActions(self.tour_menu_actions)
self.tour_action = create_action(
self,
self.tours_available[DEFAULT_TOUR]['name'],
tip=_("Interactive tour introducing Spyder's panes and features"),
triggered=lambda: self.show_tour(DEFAULT_TOUR))
mainmenu.add_item_to_application_menu(
self.tour_action,
menu_id=ApplicationMenus.Help,
section=HelpMenuSections.Documentation)
add_actions(self.edit_menu, self.edit_menu_actions)
add_actions(self.search_menu, self.search_menu_actions)
add_actions(self.source_menu, self.source_menu_actions)
add_actions(self.run_menu, self.run_menu_actions)
add_actions(self.debug_menu, self.debug_menu_actions)
self.all_actions_defined.emit()
def __getattr__(self, attr):
if attr in self._INTERNAL_PLUGINS_MAPPING.keys():
return self.get_plugin(self._INTERNAL_PLUGINS_MAPPING[attr])
try:
return self.get_plugin(attr)
except SpyderAPIError:
pass
return super().__getattr__(attr)
def update_lsp_logs(self):
self.menu_lsp_logs.clear()
lsp_logs = []
files = glob.glob(osp.join(get_conf_path('lsp_logs'), '*.log'))
for f in files:
action = create_action(self, f, triggered=self.editor.load)
action.setData(f)
lsp_logs.append(action)
add_actions(self.menu_lsp_logs, lsp_logs)
def pre_visible_setup(self):
logger.info("Setting up window...")
for plugin, plugin_instance in self._EXTERNAL_PLUGINS.items():
self.tabify_plugin(plugin_instance, Plugins.Console)
if isinstance(plugin_instance, SpyderDockablePlugin):
plugin_instance.get_widget().toggle_view(False)
for plugin_id, plugin_instance in self._PLUGINS.items():
try:
plugin_instance.before_mainwindow_visible()
except AttributeError:
pass
if self.splash is not None:
self.splash.hide()
for child in self.menuBar().children():
if isinstance(child, QMenu):
try:
child.aboutToShow.connect(self.update_edit_menu)
child.aboutToShow.connect(self.update_search_menu)
except TypeError:
pass
for plugin, plugin_instance in self._PLUGINS.items():
if hasattr(plugin_instance, 'CUSTOM_LAYOUTS'):
if isinstance(plugin_instance.CUSTOM_LAYOUTS, list):
for custom_layout in plugin_instance.CUSTOM_LAYOUTS:
self.layouts.register_layout(
self, custom_layout)
else:
logger.info(
'Unable to load custom layouts for {}. '
'Expecting a list of layout classes but got {}'
.format(plugin, plugin_instance.CUSTOM_LAYOUTS)
)
self.layouts.update_layout_menu_actions()
logger.info("*** End of MainWindow setup ***")
self.is_starting_up = False
def post_visible_setup(self):
for __, plugin in self._PLUGINS.items():
try:
plugin.on_mainwindow_visible()
except AttributeError:
pass
self.restore_scrollbar_position.emit()
logger.info('Deleting previous Spyder instance LSP logs...')
delete_lsp_log_files()
for widget in self.floating_dockwidgets:
widget.setFloating(True)
if (CONF.get('main', 'single_instance') and not self.new_instance
and self.open_files_server):
t = threading.Thread(target=self.start_open_files_server)
t.setDaemon(True)
t.start()
self.sig_open_external_file.connect(self.open_external_file)
# the External or IPython ones
if self.console.dockwidget.isVisible() and DEV is None:
self.console.toggle_view_action.setChecked(False)
self.console.dockwidget.hide()
# Show Help and Consoles by default
plugins_to_show = [self.ipyconsole]
if self.help is not None:
plugins_to_show.append(self.help)
for plugin in plugins_to_show:
if plugin.dockwidget.isVisible():
plugin.dockwidget.raise_()
# Update plugins toggle actions to show the "Switch to" plugin shortcut
self._update_shortcuts_in_panes_menu()
# Process pending events and hide splash before loading the
# previous session.
QApplication.processEvents()
if self.splash is not None:
self.splash.hide()
# TODO: Remove this reference to projects once we can send the command
# line options to the plugins.
if self.open_project:
if not running_in_mac_app():
self.projects.open_project(
self.open_project, workdir=self.init_workdir
)
else:
# Load last project if a project was active when Spyder
# was closed
self.projects.reopen_last_project()
# If no project is active, load last session
if self.projects.get_active_project() is None:
self.editor.setup_open_files(close_previous_files=False)
# Raise the menuBar to the top of the main window widget's stack
self.menuBar().raise_()
if not sys.platform == 'darwin':
window = self.window().windowHandle()
window.screenChanged.connect(self.handle_new_screen)
screen = self.window().windowHandle().screen()
self.current_dpi = screen.logicalDotsPerInch()
screen.logicalDotsPerInchChanged.connect(
self.show_dpi_change_message)
self.is_setting_up = False
self.sig_setup_finished.emit()
def handle_new_screen(self, new_screen):
if new_screen is not None:
new_screen_dpi = new_screen.logicalDotsPerInch()
if self.current_dpi != new_screen_dpi:
self.show_dpi_change_message(new_screen_dpi)
else:
new_screen.logicalDotsPerInchChanged.connect(
self.show_dpi_change_message)
def handle_dpi_change_response(self, result, dpi):
if self.dpi_change_dismiss_box.isChecked():
self.show_dpi_message = False
self.dpi_change_dismiss_box = None
if result == 0:
CONF.set('main', 'normal_screen_resolution', False)
CONF.set('main', 'high_dpi_scaling', True)
CONF.set('main', 'high_dpi_custom_scale_factor', False)
self.restart()
else:
self.current_dpi = dpi
def show_dpi_change_message(self, dpi):
if not self.show_dpi_message:
return
if self.current_dpi != dpi:
window = self.window().windowHandle()
if (window.windowState() == Qt.WindowFullScreen and
sys.platform == 'darwin'):
return
self.dpi_change_dismiss_box = QCheckBox(
_("Hide this message during the current session"),
self
)
msgbox = QMessageBox(self)
msgbox.setIcon(QMessageBox.Warning)
msgbox.setText(
_
("A monitor scale change was detected. <br><br>"
"We recommend restarting Spyder to ensure that it's properly "
"displayed. If you don't want to do that, please be sure to "
"activate the option<br><br><tt>Enable auto high DPI scaling"
"</tt><br><br>in <tt>Preferences > Application > "
"Interface</tt>, in case Spyder is not displayed "
"correctly.<br><br>"
"Do you want to restart Spyder?"))
msgbox.addButton(_('Restart now'), QMessageBox.NoRole)
dismiss_button = msgbox.addButton(
_('Dismiss'), QMessageBox.NoRole)
msgbox.setCheckBox(self.dpi_change_dismiss_box)
msgbox.setDefaultButton(dismiss_button)
msgbox.finished.connect(
lambda result: self.handle_dpi_change_response(result, dpi))
msgbox.open()
def set_window_title(self):
if DEV is not None:
title = u"Spyder %s (Python %s.%s)" % (__version__,
sys.version_info[0],
sys.version_info[1])
elif running_in_mac_app() or is_pynsist():
title = "Spyder"
else:
title = u"Spyder (Python %s.%s)" % (sys.version_info[0],
sys.version_info[1])
if get_debug_level():
title += u" [DEBUG MODE %d]" % get_debug_level()
if self.window_title is not None:
title += u' -- ' + to_text_string(self.window_title)
# window title.
if self.projects is not None:
path = self.projects.get_active_project_path()
if path:
path = path.replace(get_home_dir(), u'~')
title = u'{0} - {1}'.format(path, title)
self.base_title = title
self.setWindowTitle(self.base_title)
# TODO: To be removed after all actions are moved to their corresponding
# plugins
def register_shortcut(self, qaction_or_qshortcut, context, name,
add_shortcut_to_tip=True, plugin_name=None):
self.shortcuts.register_shortcut(
qaction_or_qshortcut,
context,
name,
add_shortcut_to_tip=add_shortcut_to_tip,
plugin_name=plugin_name,
)
# --- Other
def update_source_menu(self):
# This is necessary to avoid an error at startup.
# Fixes spyder-ide/spyder#14901
try:
self.editor.refresh_formatter_name()
except AttributeError:
pass
def free_memory(self):
gc.collect()
def plugin_focus_changed(self):
self.update_edit_menu()
self.update_search_menu()
def show_shortcuts(self, menu):
menu_actions = menu.actions()
for action in menu_actions:
if getattr(action, '_shown_shortcut', False):
# This is a SpyderAction
if action._shown_shortcut is not None:
action.setShortcut(action._shown_shortcut)
elif action.menu() is not None:
# This is submenu, so we need to call this again
self.show_shortcuts(action.menu())
else:
# We don't need to do anything for other elements
continue
def hide_shortcuts(self, menu):
menu_actions = menu.actions()
for action in menu_actions:
if getattr(action, '_shown_shortcut', False):
if action._shown_shortcut is not None:
action.setShortcut(QKeySequence())
elif action.menu() is not None:
self.hide_shortcuts(action.menu())
else:
continue
def hide_options_menus(self):
for plugin in self.widgetlist + self.thirdparty_plugins:
if plugin.CONF_SECTION == 'editor':
editorstack = self.editor.get_current_editorstack()
editorstack.menu.hide()
else:
try:
# New API
plugin.options_menu.hide()
except AttributeError:
# Old API
plugin._options_menu.hide()
def get_focus_widget_properties(self):
from spyder.plugins.editor.widgets.base import TextEditBaseWidget
from spyder.plugins.ipythonconsole.widgets import ControlWidget
widget = QApplication.focusWidget()
textedit_properties = None
if isinstance(widget, (TextEditBaseWidget, ControlWidget)):
console = isinstance(widget, ControlWidget)
not_readonly = not widget.isReadOnly()
readwrite_editor = not_readonly and not console
textedit_properties = (console, not_readonly, readwrite_editor)
return widget, textedit_properties
def update_edit_menu(self):
widget, textedit_properties = self.get_focus_widget_properties()
if textedit_properties is None: # widget is not an editor/console
return
# !!! Below this line, widget is expected to be a QPlainTextEdit
# instance
console, not_readonly, readwrite_editor = textedit_properties
# Editor has focus and there is no file opened in it
if (not console and not_readonly and self.editor
and not self.editor.is_file_opened()):
return
# Disabling all actions to begin with
for child in self.edit_menu.actions():
child.setEnabled(False)
self.selectall_action.setEnabled(True)
# Undo, redo
self.undo_action.setEnabled( readwrite_editor \
and widget.document().isUndoAvailable() )
self.redo_action.setEnabled( readwrite_editor \
and widget.document().isRedoAvailable() )
# Copy, cut, paste, delete
has_selection = widget.has_selected_text()
self.copy_action.setEnabled(has_selection)
self.cut_action.setEnabled(has_selection and not_readonly)
self.paste_action.setEnabled(not_readonly)
# Comment, uncomment, indent, unindent...
if not console and not_readonly:
# This is the editor and current file is writable
if self.editor:
for action in self.editor.edit_menu_actions:
action.setEnabled(True)
def update_search_menu(self):
# Disabling all actions except the last one
# (which is Find in files) to begin with
for child in self.search_menu.actions()[:-1]:
child.setEnabled(False)
widget, textedit_properties = self.get_focus_widget_properties()
if textedit_properties is None: # widget is not an editor/console
return
# !!! Below this line, widget is expected to be a QPlainTextEdit
# instance
console, not_readonly, readwrite_editor = textedit_properties
# Find actions only trigger an effect in the Editor
if not console:
for action in self.search_menu.actions():
try:
action.setEnabled(True)
except RuntimeError:
pass
# Disable the replace action for read-only files
if len(self.search_menu_actions) > 3:
self.search_menu_actions[3].setEnabled(readwrite_editor)
def createPopupMenu(self):
return self.application.get_application_context_menu(parent=self)
def set_splash(self, message):
if self.splash is None:
return
if message:
logger.info(message)
self.splash.show()
self.splash.showMessage(message,
int(Qt.AlignBottom | Qt.AlignCenter |
Qt.AlignAbsolute),
QColor(Qt.white))
QApplication.processEvents()
def closeEvent(self, event):
if self.closing(True):
event.accept()
else:
event.ignore()
def resizeEvent(self, event):
if not self.isMaximized() and not self.layouts.get_fullscreen_flag():
self.window_size = self.size()
QMainWindow.resizeEvent(self, event)
# To be used by the tour to be able to resize
self.sig_resized.emit(event)
def moveEvent(self, event):
if not self.isMaximized() and not self.layouts.get_fullscreen_flag():
self.window_position = self.pos()
QMainWindow.moveEvent(self, event)
# To be used by the tour to be able to move
self.sig_moved.emit(event)
def hideEvent(self, event):
try:
for plugin in (self.widgetlist + self.thirdparty_plugins):
# TODO: Remove old API
try:
# New API
if plugin.get_widget().isAncestorOf(
self.last_focused_widget):
plugin.change_visibility(True)
except AttributeError:
# Old API
if plugin.isAncestorOf(self.last_focused_widget):
plugin._visibility_changed(True)
QMainWindow.hideEvent(self, event)
except RuntimeError:
QMainWindow.hideEvent(self, event)
def change_last_focused_widget(self, old, now):
if (now is None and QApplication.activeWindow() is not None):
QApplication.activeWindow().setFocus()
self.last_focused_widget = QApplication.focusWidget()
elif now is not None:
self.last_focused_widget = now
self.previous_focused_widget = old
def closing(self, cancelable=False):
if self.already_closed or self.is_starting_up:
return True
if cancelable and CONF.get('main', 'prompt_on_exit'):
reply = QMessageBox.critical(self, 'Spyder',
'Do you really want to exit?',
QMessageBox.Yes, QMessageBox.No)
if reply == QMessageBox.No:
return False
if CONF.get('main', 'single_instance') and self.open_files_server:
self.open_files_server.close()
# Internal plugins
for plugin in (self.widgetlist + self.thirdparty_plugins):
# New API
try:
if isinstance(plugin, SpyderDockablePlugin):
plugin.close_window()
if not plugin.on_close(cancelable):
return False
except AttributeError:
pass
# Old API
try:
plugin._close_window()
if not plugin.closing_plugin(cancelable):
return False
except AttributeError:
pass
# New API: External plugins
for plugin_name, plugin in self._EXTERNAL_PLUGINS.items():
try:
if isinstance(plugin, SpyderDockablePlugin):
plugin.close_window()
if not plugin.on_close(cancelable):
return False
except AttributeError as e:
logger.error(str(e))
# Save window settings *after* closing all plugin windows, in order
# to show them in their previous locations in the next session.
# Fixes spyder-ide/spyder#12139
prefix = 'window' + '/'
self.layouts.save_current_window_settings(prefix)
self.already_closed = True
return True
def add_dockwidget(self, plugin):
try:
# New API
if plugin.is_compatible:
dockwidget, location = plugin.create_dockwidget(self)
self.addDockWidget(location, dockwidget)
self.widgetlist.append(plugin)
except AttributeError:
# Old API
if plugin._is_compatible:
dockwidget, location = plugin._create_dockwidget()
self.addDockWidget(location, dockwidget)
self.widgetlist.append(plugin)
@Slot()
def global_callback(self):
widget = QApplication.focusWidget()
action = self.sender()
callback = from_qvariant(action.data(), to_text_string)
from spyder.plugins.editor.widgets.base import TextEditBaseWidget
from spyder.plugins.ipythonconsole.widgets import ControlWidget
if isinstance(widget, (TextEditBaseWidget, ControlWidget)):
getattr(widget, callback)()
else:
return
def redirect_internalshell_stdio(self, state):
if state:
self.console.redirect_stds()
else:
self.console.restore_stds()
def open_external_console(self, fname, wdir, args, interact, debug, python,
python_args, systerm, post_mortem=False):
if systerm:
# Running script in an external system terminal
try:
if CONF.get('main_interpreter', 'default'):
executable = get_python_executable()
else:
executable = CONF.get('main_interpreter', 'executable')
programs.run_python_script_in_terminal(
fname, wdir, args, interact, debug, python_args,
executable)
except NotImplementedError:
QMessageBox.critical(self, _("Run"),
_("Running an external system terminal "
"is not supported on platform %s."
) % os.name)
def open_file(self, fname, external=False):
fname = to_text_string(fname)
ext = osp.splitext(fname)[1]
if encoding.is_text_file(fname):
self.editor.load(fname)
elif self.variableexplorer is not None and ext in IMPORT_EXT:
self.variableexplorer.import_data(fname)
elif not external:
fname = file_uri(fname)
start_file(fname)
def open_external_file(self, fname):
# Check that file exists
fname = encoding.to_unicode_from_fs(fname)
if osp.exists(osp.join(CWD, fname)):
fpath = osp.join(CWD, fname)
elif osp.exists(fname):
fpath = fname
else:
return
# Don't open script that starts Spyder at startup.
if sys.platform == 'darwin' and 'bin/spyder' in fname:
return
if osp.isfile(fpath):
self.open_file(fpath, external=True)
elif osp.isdir(fpath):
QMessageBox.warning(
self, _("Error"),
_('To open <code>{fpath}</code> as a project with Spyder, '
'please use <code>spyder -p "{fname}"</code>.')
.format(fpath=osp.normpath(fpath), fname=fname)
)
def load_python_path(self):
if osp.isfile(self.SPYDER_PATH):
with open(self.SPYDER_PATH, 'r', encoding='utf-8') as f:
path = f.read().splitlines()
self.path = tuple(name for name in path if osp.isdir(name))
if osp.isfile(self.SPYDER_NOT_ACTIVE_PATH):
with open(self.SPYDER_NOT_ACTIVE_PATH, 'r',
encoding='utf-8') as f:
not_active_path = f.read().splitlines()
self.not_active_path = tuple(name for name in not_active_path
if osp.isdir(name))
def save_python_path(self, new_path_dict):
path = [p for p in new_path_dict]
not_active_path = [p for p in new_path_dict if not new_path_dict[p]]
try:
encoding.writelines(path, self.SPYDER_PATH)
encoding.writelines(not_active_path, self.SPYDER_NOT_ACTIVE_PATH)
except EnvironmentError as e:
logger.error(str(e))
CONF.set('main', 'spyder_pythonpath', self.get_spyder_pythonpath())
def get_spyder_pythonpath_dict(self):
self.load_python_path()
path_dict = OrderedDict()
for path in self.path:
path_dict[path] = path not in self.not_active_path
for path in self.project_path:
path_dict[path] = True
return path_dict
def get_spyder_pythonpath(self):
path_dict = self.get_spyder_pythonpath_dict()
path = [k for k, v in path_dict.items() if v]
return path
def update_python_path(self, new_path_dict):
path_dict = self.get_spyder_pythonpath_dict()
if path_dict != new_path_dict:
self.save_python_path(new_path_dict)
# Load new path
new_path_dict_p = self.get_spyder_pythonpath_dict() # Includes project
# Update Spyder interpreter
for path in path_dict:
while path in sys.path:
sys.path.remove(path)
for path, active in reversed(new_path_dict_p.items()):
if active:
sys.path.insert(1, path)
# Any plugin that needs to do some work based on this signal should
# connect to it on plugin registration
self.sig_pythonpath_changed.emit(path_dict, new_path_dict_p)
@Slot()
def show_path_manager(self):
from spyder.widgets.pathmanager import PathManager
read_only_path = tuple(self.projects.get_pythonpath())
dialog = PathManager(self, self.path, read_only_path,
self.not_active_path, sync=True)
self._path_manager = dialog
dialog.sig_path_changed.connect(self.update_python_path)
dialog.redirect_stdio.connect(self.redirect_internalshell_stdio)
dialog.show()
def pythonpath_changed(self):
self.project_path = tuple(self.projects.get_pythonpath())
path_dict = self.get_spyder_pythonpath_dict()
self.update_python_path(path_dict)
#---- Preferences
def apply_settings(self):
qapp = QApplication.instance()
# Set 'gtk+' as the default theme in Gtk-based desktops
# Fixes spyder-ide/spyder#2036.
if is_gtk_desktop() and ('GTK+' in QStyleFactory.keys()):
try:
qapp.setStyle('gtk+')
except:
pass
default = self.DOCKOPTIONS
if CONF.get('main', 'vertical_tabs'):
default = default|QMainWindow.VerticalTabs
self.setDockOptions(default)
self.apply_panes_settings()
if CONF.get('main', 'use_custom_cursor_blinking'):
qapp.setCursorFlashTime(
CONF.get('main', 'custom_cursor_blinking'))
else:
qapp.setCursorFlashTime(self.CURSORBLINK_OSDEFAULT)
def apply_panes_settings(self):
for plugin in (self.widgetlist + self.thirdparty_plugins):
features = plugin.dockwidget.FEATURES
plugin.dockwidget.setFeatures(features)
try:
# New API
margin = 0
if CONF.get('main', 'use_custom_margin'):
margin = CONF.get('main', 'custom_margin')
plugin.update_margins(margin)
except AttributeError:
# Old API
plugin._update_margins()
@Slot()
def show_preferences(self):
self.preferences.open_dialog(self.prefs_dialog_size)
def set_prefs_size(self, size):
self.prefs_dialog_size = size
# -- Open files server
def start_open_files_server(self):
self.open_files_server.setsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR, 1)
port = select_port(default_port=OPEN_FILES_PORT)
CONF.set('main', 'open_files_port', port)
self.open_files_server.bind(('127.0.0.1', port))
self.open_files_server.listen(20)
while 1: # 1 is faster than True
try:
req, dummy = self.open_files_server.accept()
except socket.error as e:
# See spyder-ide/spyder#1275 for details on why errno EINTR is
# silently ignored here.
eintr = errno.WSAEINTR if os.name == 'nt' else errno.EINTR
# To avoid a traceback after closing on Windows
if e.args[0] == eintr:
continue
# handle a connection abort on close error
enotsock = (errno.WSAENOTSOCK if os.name == 'nt'
else errno.ENOTSOCK)
if e.args[0] in [errno.ECONNABORTED, enotsock]:
return
raise
fname = req.recv(1024)
fname = fname.decode('utf-8')
self.sig_open_external_file.emit(fname)
req.sendall(b' ')
# ---- Quit and restart, and reset spyder defaults
@Slot()
def reset_spyder(self):
answer = QMessageBox.warning(self, _("Warning"),
_("Spyder will restart and reset to default settings: <br><br>"
"Do you want to continue?"),
QMessageBox.Yes | QMessageBox.No)
if answer == QMessageBox.Yes:
self.restart(reset=True)
@Slot()
def restart(self, reset=False):
self.application.restart(reset=reset)
# ---- Interactive Tours
def show_tour(self, index):
self.layouts.maximize_dockwidget(restore=True)
frames = self.tours_available[index]
self.tour.set_tour(index, frames, self)
self.tour.start_tour()
# ---- Global Switcher
def open_switcher(self, symbol=False):
if self.switcher is not None and self.switcher.isVisible():
self.switcher.clear()
self.switcher.hide()
return
if symbol:
self.switcher.set_search_text('@')
else:
self.switcher.set_search_text('')
self.switcher.setup()
self.switcher.show()
# Note: The +6 pixel on the top makes it look better
# FIXME: Why is this using the toolbars menu? A: To not be on top of
# the toolbars.
# Probably toolbars should be taken into account for this 'delta' only
# when are visible
delta_top = (self.toolbar.toolbars_menu.geometry().height() +
self.menuBar().geometry().height() + 6)
self.switcher.set_position(delta_top)
def open_symbolfinder(self):
self.open_switcher(symbol=True)
def create_switcher(self):
if self.switcher is None:
from spyder.widgets.switcher import Switcher
self.switcher = Switcher(self)
return self.switcher
@Slot()
def show_tour_message(self, force=False):
should_show_tour = CONF.get('main', 'show_tour_message')
if force or (should_show_tour and not running_under_pytest()
and not get_safe_mode()):
CONF.set('main', 'show_tour_message', False)
self.tour_dialog = tour.OpenTourDialog(
self, lambda: self.show_tour(DEFAULT_TOUR))
self.tour_dialog.show()
# --- For OpenGL
def _test_setting_opengl(self, option):
if option == 'software':
return QCoreApplication.testAttribute(Qt.AA_UseSoftwareOpenGL)
elif option == 'desktop':
return QCoreApplication.testAttribute(Qt.AA_UseDesktopOpenGL)
elif option == 'gles':
return QCoreApplication.testAttribute(Qt.AA_UseOpenGLES)
#==============================================================================
# Utilities for the 'main' function below
#==============================================================================
def create_application():
# Our QApplication
app = qapplication()
# --- Set application icon
app_icon = QIcon(get_image_path("spyder"))
app.setWindowIcon(app_icon)
# Required for correct icon on GNOME/Wayland:
if hasattr(app, 'setDesktopFileName'):
app.setDesktopFileName('spyder')
#----Monkey patching QApplication
class FakeQApplication(QApplication):
def __init__(self, args):
self = app # analysis:ignore
@staticmethod
def exec_():
pass
from qtpy import QtWidgets
QtWidgets.QApplication = FakeQApplication
# ----Monkey patching sys.exit
def fake_sys_exit(arg=[]):
pass
sys.exit = fake_sys_exit
# ----Monkey patching sys.excepthook to avoid crashes in PyQt 5.5+
def spy_excepthook(type_, value, tback):
sys.__excepthook__(type_, value, tback)
sys.excepthook = spy_excepthook
# Removing arguments from sys.argv as in standard Python interpreter
sys.argv = ['']
return app
def create_window(app, splash, options, args):
# Main window
main = MainWindow(splash, options)
try:
main.setup()
except BaseException:
if main.console is not None:
try:
main.console.exit_interpreter()
except BaseException:
pass
raise
main.pre_visible_setup()
main.show()
main.post_visible_setup()
if main.console:
namespace = CONF.get('internal_console', 'namespace', {})
main.console.start_interpreter(namespace)
main.console.set_namespace_item('spy', Spy(app=app, window=main))
# Propagate current configurations to all configuration observers
CONF.notify_all_observers()
# Don't show icons in menus for Mac
if sys.platform == 'darwin':
QCoreApplication.setAttribute(Qt.AA_DontShowIconsInMenus, True)
if running_in_mac_app():
app.sig_open_external_file.connect(main.open_external_file)
app._has_started = True
if hasattr(app, '_pending_file_open'):
if args:
args = app._pending_file_open + args
else:
args = app._pending_file_open
if args:
for a in args:
main.open_external_file(a)
app.focusChanged.connect(main.change_last_focused_widget)
if not running_under_pytest():
app.exec_()
return main
def main(options, args):
if running_under_pytest():
if CONF.get('main', 'opengl') != 'automatic':
option = CONF.get('main', 'opengl')
set_opengl_implementation(option)
app = create_application()
window = create_window(app, None, options, None)
return window
if options.show_console:
print("(Deprecated) --show console does nothing, now the default "
" behavior is to show the console, use --hide-console if you "
"want to hide it")
if set_attached_console_visible is not None:
set_attached_console_visible(not options.hide_console
or options.reset_config_files
or options.reset_to_defaults
or options.optimize
or bool(get_debug_level()))
options.opengl_implementation:
option = options.opengl_implementation
set_opengl_implementation(option)
else:
if CONF.get('main', 'opengl') != 'automatic':
option = CONF.get('main', 'opengl')
set_opengl_implementation(option)
if hasattr(Qt, 'AA_EnableHighDpiScaling'):
QCoreApplication.setAttribute(Qt.AA_EnableHighDpiScaling,
CONF.get('main', 'high_dpi_scaling'))
setup_logging(options)
app = create_application()
splash = create_splash_screen()
if splash is not None:
splash.show()
splash.showMessage(
_("Initializing..."),
int(Qt.AlignBottom | Qt.AlignCenter | Qt.AlignAbsolute),
QColor(Qt.white)
)
QApplication.processEvents()
if options.reset_to_defaults:
CONF.reset_to_defaults()
return
elif options.optimize:
import spyder
programs.run_python_script(module="compileall",
args=[spyder.__path__[0]], p_args=['-O'])
return
# **** Read faulthandler log file ****
faulthandler_file = get_conf_path('faulthandler.log')
previous_crash = ''
if osp.exists(faulthandler_file):
with open(faulthandler_file, 'r') as f:
previous_crash = f.read()
# Remove file to not pick it up for next time.
try:
dst = get_conf_path('faulthandler.log.old')
shutil.move(faulthandler_file, dst)
except Exception:
pass
CONF.set('main', 'previous_crash', previous_crash)
# **** Set color for links ****
set_links_color(app)
# **** Create main window ****
mainwindow = None
try:
if PY3 and options.report_segfault:
import faulthandler
with open(faulthandler_file, 'w') as f:
faulthandler.enable(file=f)
mainwindow = create_window(app, splash, options, args)
else:
mainwindow = create_window(app, splash, options, args)
except FontError:
QMessageBox.information(None, "Spyder",
"Spyder was unable to load the <i>Spyder 3</i> "
"icon theme. That's why it's going to fallback to the "
"theme used in Spyder 2.<br><br>"
"For that, please close this window and start Spyder again.")
CONF.set('appearance', 'icon_theme', 'spyder 2')
if mainwindow is None:
# An exception occurred
if splash is not None:
splash.hide()
return
ORIGINAL_SYS_EXIT()
if __name__ == "__main__":
main()
| true | true |
f7f7c9d55070dd6dfe5199e363700b99c8c56360 | 4,439 | py | Python | apps/user/migrations/0001_initial.py | sushant96702/ecommerce_website_development | 7cd1b37614e7403e53be2ba1a390cf13eee1310c | [
"BSD-2-Clause"
] | 2 | 2018-09-13T10:39:40.000Z | 2018-09-13T10:40:01.000Z | apps/user/migrations/0001_initial.py | sushant96702/ecommerce_website_development | 7cd1b37614e7403e53be2ba1a390cf13eee1310c | [
"BSD-2-Clause"
] | 7 | 2020-06-05T20:06:36.000Z | 2022-03-11T23:43:03.000Z | apps/user/migrations/0001_initial.py | sushant96702/ecommerce_website_development | 7cd1b37614e7403e53be2ba1a390cf13eee1310c | [
"BSD-2-Clause"
] | 1 | 2018-11-11T11:31:45.000Z | 2018-11-11T11:31:45.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
import django.utils.timezone
import django.contrib.auth.models
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('auth', '0006_require_contenttypes_0002'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, serialize=False, auto_created=True)),
('password', models.CharField(verbose_name='password', max_length=128)),
('last_login', models.DateTimeField(verbose_name='last login', blank=True, null=True)),
('is_superuser', models.BooleanField(verbose_name='superuser status', default=False, help_text='Designates that this user has all permissions without explicitly assigning them.')),
('username', models.CharField(verbose_name='username', max_length=30, unique=True, help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.', validators=[django.core.validators.RegexValidator('^[\\w.@+-]+$', 'Enter a valid username. This value may contain only letters, numbers and @/./+/-/_ characters.', 'invalid')], error_messages={'unique': 'A user with that username already exists.'})),
('first_name', models.CharField(verbose_name='first name', max_length=30, blank=True)),
('last_name', models.CharField(verbose_name='last name', max_length=30, blank=True)),
('email', models.EmailField(verbose_name='email address', max_length=254, blank=True)),
('is_staff', models.BooleanField(verbose_name='staff status', default=False, help_text='Designates whether the user can log into this admin site.')),
('is_active', models.BooleanField(verbose_name='active', default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.')),
('date_joined', models.DateTimeField(verbose_name='date joined', default=django.utils.timezone.now)),
('create_time', models.DateTimeField(verbose_name='创建时间', auto_now_add=True)),
('update_time', models.DateTimeField(verbose_name='更新时间', auto_now=True)),
('is_delete', models.BooleanField(verbose_name='删除标记', default=False)),
('groups', models.ManyToManyField(verbose_name='groups', blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group')),
('user_permissions', models.ManyToManyField(verbose_name='user permissions', blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission')),
],
options={
'verbose_name': '用户',
'verbose_name_plural': '用户',
'db_table': 'df_user',
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='Address',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, serialize=False, auto_created=True)),
('create_time', models.DateTimeField(verbose_name='创建时间', auto_now_add=True)),
('update_time', models.DateTimeField(verbose_name='更新时间', auto_now=True)),
('is_delete', models.BooleanField(verbose_name='删除标记', default=False)),
('receiver', models.CharField(verbose_name='收件人', max_length=20)),
('addr', models.CharField(verbose_name='收件地址', max_length=256)),
('zip_code', models.CharField(verbose_name='邮政编码', max_length=6, null=True)),
('phone', models.CharField(verbose_name='联系电话', max_length=11)),
('is_default', models.BooleanField(verbose_name='是否默认', default=False)),
('user', models.ForeignKey(verbose_name='所属账户', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': '地址',
'verbose_name_plural': '地址',
'db_table': 'df_address',
},
),
]
| 65.279412 | 432 | 0.634828 |
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
import django.utils.timezone
import django.contrib.auth.models
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('auth', '0006_require_contenttypes_0002'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, serialize=False, auto_created=True)),
('password', models.CharField(verbose_name='password', max_length=128)),
('last_login', models.DateTimeField(verbose_name='last login', blank=True, null=True)),
('is_superuser', models.BooleanField(verbose_name='superuser status', default=False, help_text='Designates that this user has all permissions without explicitly assigning them.')),
('username', models.CharField(verbose_name='username', max_length=30, unique=True, help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.', validators=[django.core.validators.RegexValidator('^[\\w.@+-]+$', 'Enter a valid username. This value may contain only letters, numbers and @/./+/-/_ characters.', 'invalid')], error_messages={'unique': 'A user with that username already exists.'})),
('first_name', models.CharField(verbose_name='first name', max_length=30, blank=True)),
('last_name', models.CharField(verbose_name='last name', max_length=30, blank=True)),
('email', models.EmailField(verbose_name='email address', max_length=254, blank=True)),
('is_staff', models.BooleanField(verbose_name='staff status', default=False, help_text='Designates whether the user can log into this admin site.')),
('is_active', models.BooleanField(verbose_name='active', default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.')),
('date_joined', models.DateTimeField(verbose_name='date joined', default=django.utils.timezone.now)),
('create_time', models.DateTimeField(verbose_name='创建时间', auto_now_add=True)),
('update_time', models.DateTimeField(verbose_name='更新时间', auto_now=True)),
('is_delete', models.BooleanField(verbose_name='删除标记', default=False)),
('groups', models.ManyToManyField(verbose_name='groups', blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group')),
('user_permissions', models.ManyToManyField(verbose_name='user permissions', blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission')),
],
options={
'verbose_name': '用户',
'verbose_name_plural': '用户',
'db_table': 'df_user',
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='Address',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, serialize=False, auto_created=True)),
('create_time', models.DateTimeField(verbose_name='创建时间', auto_now_add=True)),
('update_time', models.DateTimeField(verbose_name='更新时间', auto_now=True)),
('is_delete', models.BooleanField(verbose_name='删除标记', default=False)),
('receiver', models.CharField(verbose_name='收件人', max_length=20)),
('addr', models.CharField(verbose_name='收件地址', max_length=256)),
('zip_code', models.CharField(verbose_name='邮政编码', max_length=6, null=True)),
('phone', models.CharField(verbose_name='联系电话', max_length=11)),
('is_default', models.BooleanField(verbose_name='是否默认', default=False)),
('user', models.ForeignKey(verbose_name='所属账户', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': '地址',
'verbose_name_plural': '地址',
'db_table': 'df_address',
},
),
]
| true | true |
f7f7ca390d44c253bf050cce13ed05190953cfaf | 828 | py | Python | django_demos/polls_admnistration/mysite/urls.py | guilhermetco/CS97---WestWoodWalks | 4e252eadd8cd20e0127fa7b87491337e33786ac4 | [
"MIT"
] | null | null | null | django_demos/polls_admnistration/mysite/urls.py | guilhermetco/CS97---WestWoodWalks | 4e252eadd8cd20e0127fa7b87491337e33786ac4 | [
"MIT"
] | null | null | null | django_demos/polls_admnistration/mysite/urls.py | guilhermetco/CS97---WestWoodWalks | 4e252eadd8cd20e0127fa7b87491337e33786ac4 | [
"MIT"
] | null | null | null | """mysite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.conf.urls import include
urlpatterns = [
path('polls/', include('polls.urls')),
path('admin/', admin.site.urls),
]
| 34.5 | 77 | 0.707729 | from django.contrib import admin
from django.urls import path
from django.conf.urls import include
urlpatterns = [
path('polls/', include('polls.urls')),
path('admin/', admin.site.urls),
]
| true | true |
f7f7ca621560cc5b7a66f98bc234dddbac60c0fc | 3,791 | py | Python | 2018_02_15_cryptocurrencies_trading/algorithms/shared/shannon's_demon-1500960177478.py | jiricejchan/AnonymniAnalytici | e4e96f943d0b2232d9099c6e7bb690a3d25ea422 | [
"MIT"
] | 10 | 2017-03-28T06:52:22.000Z | 2017-11-21T17:41:11.000Z | 2018_02_15_cryptocurrencies_trading/algorithms/shared/shannon's_demon-1500960177478.py | jiricejchan/AnonymniAnalytici | e4e96f943d0b2232d9099c6e7bb690a3d25ea422 | [
"MIT"
] | 1 | 2017-07-21T08:27:01.000Z | 2017-07-21T08:27:01.000Z | 2018_02_15_cryptocurrencies_trading/algorithms/shared/shannon's_demon-1500960177478.py | jiricejchan/AnonymniAnalytici | e4e96f943d0b2232d9099c6e7bb690a3d25ea422 | [
"MIT"
] | 8 | 2017-03-05T17:21:40.000Z | 2019-12-01T18:46:39.000Z | from catalyst.api import order_target_percent, record, symbol, set_benchmark
def initialize(context):
context.ASSET_NAME = 'USDT_REP'
context.asset = symbol(context.ASSET_NAME)
set_benchmark(context.asset)
context.is_first_time = True
# For all trading pairs in the poloniex bundle, the default denomination
# currently supported by Catalyst is 1/1000th of a full coin. Use this
# constant to scale the price of up to that of a full coin if desired.
context.TICK_SIZE = 1.0
def handle_data(context, data):
# Define base price and make initial trades to achieve target investment
# ratio of 0.5
if context.is_first_time:
order_target_percent(
context.asset,
0.5,
)
context.base_price = data[context.asset].price
context.first_price = data[context.asset].price
context.is_first_time = False
# Retrieve current asset price from pricing data
price = data[context.asset].price
REP_cumulative_return = (price/context.first_price-1)*100
Portfolio_cumulative_return = (context.portfolio.portfolio_value/
context.portfolio.starting_cash-1)*100
# Trading logic: rebalance to a 0.5 investment ratio every time the price
# of the asset doubles or decreases to half the initial price
if price > context.base_price*2:
order_target_percent(
context.asset,
0.5,
)
context.base_price = data[context.asset].price
elif price < context.base_price/2:
order_target_percent(
context.asset,
0.5,
)
context.base_price = data[context.asset].price
price = data[context.asset].price
# Save values for later inspection
record(price=price,
base_price=context.base_price,
cash=context.portfolio.cash,
leverage=context.account.leverage,
Portfolio_cumulative_return=Portfolio_cumulative_return,
REP_cumulative_return=REP_cumulative_return,
)
def analyze(context=None, results=None):
import matplotlib.pyplot as plt
import pandas as pd
import sys
import os
from os.path import basename
# Plot the portfolio and asset data.
ax1 = plt.subplot(221)
results[[
'Portfolio_cumulative_return',
'REP_cumulative_return',
]].plot(ax=ax1)
ax1.set_ylabel('Percent Return (%)')
ax2 = plt.subplot(222, sharex=ax1)
ax2.set_ylabel('{asset} (USD)'.format(asset=context.ASSET_NAME))
(context.TICK_SIZE * results[[
'price',
'base_price',
]]).plot(ax=ax2)
trans = results.ix[[t != [] for t in results.transactions]]
buys = trans.ix[
[t[0]['amount'] > 0 for t in trans.transactions]
]
sells = trans.ix[
[t[0]['amount'] < 0 for t in trans.transactions]
]
ax2.plot(
buys.index,
context.TICK_SIZE * results.price[buys.index],
'^',
markersize=10,
color='g',
)
ax2.plot(
sells.index,
context.TICK_SIZE * results.price[sells.index],
'v',
markersize=10,
color='r',
)
ax3 = plt.subplot(223, sharex=ax1)
results[['leverage']].plot(ax=ax3)
ax3.set_ylabel('Leverage ')
ax4 = plt.subplot(224, sharex=ax1)
results[['cash']].plot(ax=ax4)
ax4.set_ylabel('Cash (USD)')
plt.legend(loc=3)
# Show the plot.
plt.gcf().set_size_inches(16, 8)
plt.show()
# Save results in CSV file
filename = os.path.splitext(basename(sys.argv[3]))[0]
results.to_csv(filename + '.csv') | 30.821138 | 78 | 0.607491 | from catalyst.api import order_target_percent, record, symbol, set_benchmark
def initialize(context):
context.ASSET_NAME = 'USDT_REP'
context.asset = symbol(context.ASSET_NAME)
set_benchmark(context.asset)
context.is_first_time = True
context.TICK_SIZE = 1.0
def handle_data(context, data):
if context.is_first_time:
order_target_percent(
context.asset,
0.5,
)
context.base_price = data[context.asset].price
context.first_price = data[context.asset].price
context.is_first_time = False
price = data[context.asset].price
REP_cumulative_return = (price/context.first_price-1)*100
Portfolio_cumulative_return = (context.portfolio.portfolio_value/
context.portfolio.starting_cash-1)*100
if price > context.base_price*2:
order_target_percent(
context.asset,
0.5,
)
context.base_price = data[context.asset].price
elif price < context.base_price/2:
order_target_percent(
context.asset,
0.5,
)
context.base_price = data[context.asset].price
price = data[context.asset].price
record(price=price,
base_price=context.base_price,
cash=context.portfolio.cash,
leverage=context.account.leverage,
Portfolio_cumulative_return=Portfolio_cumulative_return,
REP_cumulative_return=REP_cumulative_return,
)
def analyze(context=None, results=None):
import matplotlib.pyplot as plt
import pandas as pd
import sys
import os
from os.path import basename
ax1 = plt.subplot(221)
results[[
'Portfolio_cumulative_return',
'REP_cumulative_return',
]].plot(ax=ax1)
ax1.set_ylabel('Percent Return (%)')
ax2 = plt.subplot(222, sharex=ax1)
ax2.set_ylabel('{asset} (USD)'.format(asset=context.ASSET_NAME))
(context.TICK_SIZE * results[[
'price',
'base_price',
]]).plot(ax=ax2)
trans = results.ix[[t != [] for t in results.transactions]]
buys = trans.ix[
[t[0]['amount'] > 0 for t in trans.transactions]
]
sells = trans.ix[
[t[0]['amount'] < 0 for t in trans.transactions]
]
ax2.plot(
buys.index,
context.TICK_SIZE * results.price[buys.index],
'^',
markersize=10,
color='g',
)
ax2.plot(
sells.index,
context.TICK_SIZE * results.price[sells.index],
'v',
markersize=10,
color='r',
)
ax3 = plt.subplot(223, sharex=ax1)
results[['leverage']].plot(ax=ax3)
ax3.set_ylabel('Leverage ')
ax4 = plt.subplot(224, sharex=ax1)
results[['cash']].plot(ax=ax4)
ax4.set_ylabel('Cash (USD)')
plt.legend(loc=3)
plt.gcf().set_size_inches(16, 8)
plt.show()
filename = os.path.splitext(basename(sys.argv[3]))[0]
results.to_csv(filename + '.csv') | true | true |
f7f7ca63a630fb4004cf1204f79d61f42852e59c | 596 | py | Python | email_service/src/email_sender.py | JoaoManoel/final-paper | e8ab6261ab06d4b057513ca22169e2a98f66c64d | [
"MIT"
] | null | null | null | email_service/src/email_sender.py | JoaoManoel/final-paper | e8ab6261ab06d4b057513ca22169e2a98f66c64d | [
"MIT"
] | null | null | null | email_service/src/email_sender.py | JoaoManoel/final-paper | e8ab6261ab06d4b057513ca22169e2a98f66c64d | [
"MIT"
] | null | null | null | import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
gmail_user = ''
gmail_password = ''
def sendmail(From, to, subject, body):
msg = MIMEMultipart()
msg['From'] = From
msg['To'] = to
msg['Subject'] = subject
msg.attach(MIMEText(body, 'html'))
try:
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.login(gmail_user, gmail_password)
server.sendmail(From, to, msg.as_string())
server.quit()
return True
except smtplib.SMTPException as e:
print(e)
return False | 22.923077 | 48 | 0.67953 | import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
gmail_user = ''
gmail_password = ''
def sendmail(From, to, subject, body):
msg = MIMEMultipart()
msg['From'] = From
msg['To'] = to
msg['Subject'] = subject
msg.attach(MIMEText(body, 'html'))
try:
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.login(gmail_user, gmail_password)
server.sendmail(From, to, msg.as_string())
server.quit()
return True
except smtplib.SMTPException as e:
print(e)
return False | true | true |
f7f7cc9c1b4fc41846aff3fb6bf0b60a335e5ec1 | 808 | py | Python | pyinterpolate/semivariance/__init__.py | NarayanAdithya/pyinterpolate | 3a8166ad0ca884f29be1afe789eff4ccfc891f71 | [
"BSD-3-Clause"
] | 61 | 2018-08-15T17:16:12.000Z | 2021-09-06T09:25:49.000Z | pyinterpolate/semivariance/__init__.py | NarayanAdithya/pyinterpolate | 3a8166ad0ca884f29be1afe789eff4ccfc891f71 | [
"BSD-3-Clause"
] | 70 | 2018-06-16T19:12:49.000Z | 2021-09-16T16:28:48.000Z | pyinterpolate/semivariance/__init__.py | NarayanAdithya/pyinterpolate | 3a8166ad0ca884f29be1afe789eff4ccfc891f71 | [
"BSD-3-Clause"
] | 7 | 2019-10-16T05:48:12.000Z | 2021-07-06T14:09:29.000Z | from .areal_semivariance.areal_semivariance import ArealSemivariance
from .semivariogram_deconvolution.regularize_semivariogram import RegularizedSemivariogram
from .semivariogram_estimation.calculate_covariance import calculate_covariance
from .semivariogram_estimation.calculate_semivariance import calculate_semivariance, calculate_weighted_semivariance
from .semivariogram_estimation.calculate_semivariance import calculate_directional_semivariogram
from .semivariogram_estimation.calculate_semivariance import build_variogram_point_cloud, show_variogram_cloud
from .semivariogram_estimation.calculate_semivariance import calc_semivariance_from_pt_cloud
from .semivariogram_estimation.calculate_semivariance import remove_outliers
from .semivariogram_fit.fit_semivariance import TheoreticalSemivariogram
| 80.8 | 116 | 0.928218 | from .areal_semivariance.areal_semivariance import ArealSemivariance
from .semivariogram_deconvolution.regularize_semivariogram import RegularizedSemivariogram
from .semivariogram_estimation.calculate_covariance import calculate_covariance
from .semivariogram_estimation.calculate_semivariance import calculate_semivariance, calculate_weighted_semivariance
from .semivariogram_estimation.calculate_semivariance import calculate_directional_semivariogram
from .semivariogram_estimation.calculate_semivariance import build_variogram_point_cloud, show_variogram_cloud
from .semivariogram_estimation.calculate_semivariance import calc_semivariance_from_pt_cloud
from .semivariogram_estimation.calculate_semivariance import remove_outliers
from .semivariogram_fit.fit_semivariance import TheoreticalSemivariogram
| true | true |
f7f7cdf9364dff18133dd3ff2b5ba10a52757e63 | 2,277 | py | Python | py_file_carving/libary/worker/worker_maximal_size_terminating.py | wahlflo/pyFileCarving | 7bbbbedccb551273fd4b22614c86f51bc876bd78 | [
"MIT"
] | null | null | null | py_file_carving/libary/worker/worker_maximal_size_terminating.py | wahlflo/pyFileCarving | 7bbbbedccb551273fd4b22614c86f51bc876bd78 | [
"MIT"
] | null | null | null | py_file_carving/libary/worker/worker_maximal_size_terminating.py | wahlflo/pyFileCarving | 7bbbbedccb551273fd4b22614c86f51bc876bd78 | [
"MIT"
] | null | null | null | import re
from .abstract_worker import AbstractWorker
class WorkerMaximalSizeTerminating(AbstractWorker):
def __init__(self, file_extension: str, first_chunk: bytes, footer_sequence: bytes, maximal_size_in_bytes: int,
make_corruption_checks: bool, flush_if_maximum_file_size_is_reached=False, minimum_bytes=None,
corruption_check=None):
super().__init__(first_chunk=first_chunk, make_corruption_checks=make_corruption_checks,
flush_if_maximum_file_size_is_reached=flush_if_maximum_file_size_is_reached)
self.footer_sequence = re.compile(footer_sequence)
self.file_extension = file_extension
self.maximal_size_in_bytes = maximal_size_in_bytes
self.minimum_bytes = minimum_bytes
self.corruption_check = corruption_check
self.already_carved = set()
self.last_chunk = first_chunk
def update(self, new_data_chunk: bytes) -> bool:
""" returns True if object continues living otherwise False """
last_two_chunks = self.last_chunk + new_data_chunk
index_offset = len(self.data) - len(self.last_chunk)
self.data += new_data_chunk
self.last_chunk = new_data_chunk
for m in re.finditer(re.compile(b'%EOF\r\n'), last_two_chunks):
end_index = index_offset + m.end(0)
if end_index not in self.already_carved:
self.already_carved.add(end_index)
if self.__check_on_corruption(content=self.data[:end_index]):
self.file_writer.submit_carved_file(content=self.data[:end_index],
file_extension=self.file_extension)
if self.maximal_size_in_bytes is not None and len(self.data) > self.maximal_size_in_bytes:
if self.flush_if_maximum_file_size_is_reached and self.__check_on_corruption(content=self.data):
self.file_writer.submit_carved_file(content=self.data, file_extension=self.file_extension)
return False
return True
def __check_on_corruption(self, content: bytes):
if self.make_corruption_checks and self.corruption_check is not None:
return self.corruption_check(content)
return True
| 47.4375 | 115 | 0.6917 | import re
from .abstract_worker import AbstractWorker
class WorkerMaximalSizeTerminating(AbstractWorker):
def __init__(self, file_extension: str, first_chunk: bytes, footer_sequence: bytes, maximal_size_in_bytes: int,
make_corruption_checks: bool, flush_if_maximum_file_size_is_reached=False, minimum_bytes=None,
corruption_check=None):
super().__init__(first_chunk=first_chunk, make_corruption_checks=make_corruption_checks,
flush_if_maximum_file_size_is_reached=flush_if_maximum_file_size_is_reached)
self.footer_sequence = re.compile(footer_sequence)
self.file_extension = file_extension
self.maximal_size_in_bytes = maximal_size_in_bytes
self.minimum_bytes = minimum_bytes
self.corruption_check = corruption_check
self.already_carved = set()
self.last_chunk = first_chunk
def update(self, new_data_chunk: bytes) -> bool:
last_two_chunks = self.last_chunk + new_data_chunk
index_offset = len(self.data) - len(self.last_chunk)
self.data += new_data_chunk
self.last_chunk = new_data_chunk
for m in re.finditer(re.compile(b'%EOF\r\n'), last_two_chunks):
end_index = index_offset + m.end(0)
if end_index not in self.already_carved:
self.already_carved.add(end_index)
if self.__check_on_corruption(content=self.data[:end_index]):
self.file_writer.submit_carved_file(content=self.data[:end_index],
file_extension=self.file_extension)
if self.maximal_size_in_bytes is not None and len(self.data) > self.maximal_size_in_bytes:
if self.flush_if_maximum_file_size_is_reached and self.__check_on_corruption(content=self.data):
self.file_writer.submit_carved_file(content=self.data, file_extension=self.file_extension)
return False
return True
def __check_on_corruption(self, content: bytes):
if self.make_corruption_checks and self.corruption_check is not None:
return self.corruption_check(content)
return True
| true | true |
f7f7ce0f75dd43daebb219e2c3f12bf492c63354 | 5,824 | py | Python | tests/test_scripts/output/genpython/inheritedid.py | deepakunni3/linkml | a335227b05b0290c21ebae50bb99e16eca57c8eb | [
"CC0-1.0"
] | null | null | null | tests/test_scripts/output/genpython/inheritedid.py | deepakunni3/linkml | a335227b05b0290c21ebae50bb99e16eca57c8eb | [
"CC0-1.0"
] | null | null | null | tests/test_scripts/output/genpython/inheritedid.py | deepakunni3/linkml | a335227b05b0290c21ebae50bb99e16eca57c8eb | [
"CC0-1.0"
] | null | null | null | # Auto generated from inheritedid.yaml by pythongen.py version: 0.9.0
# Generation date: 2021-12-28T22:05:46
# Schema: test_inherited_id
#
# id: https://example.org/inheritedid
# description: Test
# license: https://creativecommons.org/publicdomain/zero/1.0/
import dataclasses
import sys
import re
from jsonasobj2 import JsonObj, as_dict
from typing import Optional, List, Union, Dict, ClassVar, Any
from dataclasses import dataclass
from linkml_runtime.linkml_model.meta import EnumDefinition, PermissibleValue, PvFormulaOptions
from linkml_runtime.utils.slot import Slot
from linkml_runtime.utils.metamodelcore import empty_list, empty_dict, bnode
from linkml_runtime.utils.yamlutils import YAMLRoot, extended_str, extended_float, extended_int
from linkml_runtime.utils.dataclass_extensions_376 import dataclasses_init_fn_with_kwargs
from linkml_runtime.utils.formatutils import camelcase, underscore, sfx
from linkml_runtime.utils.enumerations import EnumDefinitionImpl
from rdflib import Namespace, URIRef
from linkml_runtime.utils.curienamespace import CurieNamespace
from linkml_runtime.utils.metamodelcore import URI
metamodel_version = "1.7.0"
# Overwrite dataclasses _init_fn to add **kwargs in __init__
dataclasses._init_fn = dataclasses_init_fn_with_kwargs
# Namespaces
LINKML = CurieNamespace('linkml', 'https://w3id.org/linkml/')
XSD = CurieNamespace('xsd', 'http://www.w3.org/2001/XMLSchema#')
DEFAULT_ = LINKML
# Types
class String(str):
type_class_uri = XSD.string
type_class_curie = "xsd:string"
type_name = "string"
type_model_uri = LINKML.String
class Uri(URI):
""" a complete URI """
type_class_uri = XSD.anyURI
type_class_curie = "xsd:anyURI"
type_name = "uri"
type_model_uri = LINKML.Uri
class IdentifierType(String):
""" A string that is intended to uniquely identify a thing May be URI in full or compact (CURIE) form """
type_class_uri = XSD.string
type_class_curie = "xsd:string"
type_name = "identifier type"
type_model_uri = LINKML.IdentifierType
class LabelType(String):
""" A string that provides a human-readable name for a thing """
type_class_uri = XSD.string
type_class_curie = "xsd:string"
type_name = "label type"
type_model_uri = LINKML.LabelType
# Class references
class NamedThingId(IdentifierType):
pass
class AttributeId(IdentifierType):
pass
class BiologicalSexId(AttributeId):
pass
class OntologyClassId(NamedThingId):
pass
@dataclass
class NamedThing(YAMLRoot):
"""
a databased entity or concept/class
"""
_inherited_slots: ClassVar[List[str]] = []
class_class_uri: ClassVar[URIRef] = LINKML.NamedThing
class_class_curie: ClassVar[str] = "linkml:NamedThing"
class_name: ClassVar[str] = "named thing"
class_model_uri: ClassVar[URIRef] = LINKML.NamedThing
id: Union[str, NamedThingId] = None
name: Optional[Union[str, LabelType]] = None
def __post_init__(self, *_: List[str], **kwargs: Dict[str, Any]):
if self._is_empty(self.id):
self.MissingRequiredField("id")
if not isinstance(self.id, NamedThingId):
self.id = NamedThingId(self.id)
if self.name is not None and not isinstance(self.name, LabelType):
self.name = LabelType(self.name)
super().__post_init__(**kwargs)
@dataclass
class Attribute(YAMLRoot):
"""
A property or characteristic of an entity
"""
_inherited_slots: ClassVar[List[str]] = []
class_class_uri: ClassVar[URIRef] = LINKML.Attribute
class_class_curie: ClassVar[str] = "linkml:Attribute"
class_name: ClassVar[str] = "attribute"
class_model_uri: ClassVar[URIRef] = LINKML.Attribute
id: Union[str, AttributeId] = None
def __post_init__(self, *_: List[str], **kwargs: Dict[str, Any]):
if self._is_empty(self.id):
self.MissingRequiredField("id")
if not isinstance(self.id, AttributeId):
self.id = AttributeId(self.id)
super().__post_init__(**kwargs)
@dataclass
class BiologicalSex(Attribute):
_inherited_slots: ClassVar[List[str]] = []
class_class_uri: ClassVar[URIRef] = LINKML.BiologicalSex
class_class_curie: ClassVar[str] = "linkml:BiologicalSex"
class_name: ClassVar[str] = "biological sex"
class_model_uri: ClassVar[URIRef] = LINKML.BiologicalSex
id: Union[str, BiologicalSexId] = None
def __post_init__(self, *_: List[str], **kwargs: Dict[str, Any]):
if self._is_empty(self.id):
self.MissingRequiredField("id")
if not isinstance(self.id, BiologicalSexId):
self.id = BiologicalSexId(self.id)
super().__post_init__(**kwargs)
@dataclass
class OntologyClass(NamedThing):
"""
a concept or class in an ontology, vocabulary or thesaurus
"""
_inherited_slots: ClassVar[List[str]] = []
class_class_uri: ClassVar[URIRef] = LINKML.OntologyClass
class_class_curie: ClassVar[str] = "linkml:OntologyClass"
class_name: ClassVar[str] = "ontology class"
class_model_uri: ClassVar[URIRef] = LINKML.OntologyClass
id: Union[str, OntologyClassId] = None
def __post_init__(self, *_: List[str], **kwargs: Dict[str, Any]):
if self._is_empty(self.id):
self.MissingRequiredField("id")
if not isinstance(self.id, OntologyClassId):
self.id = OntologyClassId(self.id)
super().__post_init__(**kwargs)
# Enumerations
# Slots
class slots:
pass
slots.id = Slot(uri=LINKML.id, name="id", curie=LINKML.curie('id'),
model_uri=LINKML.id, domain=NamedThing, range=Union[str, NamedThingId])
slots.name = Slot(uri=LINKML.name, name="name", curie=LINKML.curie('name'),
model_uri=LINKML.name, domain=NamedThing, range=Optional[Union[str, LabelType]])
| 30.333333 | 109 | 0.712397 |
import dataclasses
import sys
import re
from jsonasobj2 import JsonObj, as_dict
from typing import Optional, List, Union, Dict, ClassVar, Any
from dataclasses import dataclass
from linkml_runtime.linkml_model.meta import EnumDefinition, PermissibleValue, PvFormulaOptions
from linkml_runtime.utils.slot import Slot
from linkml_runtime.utils.metamodelcore import empty_list, empty_dict, bnode
from linkml_runtime.utils.yamlutils import YAMLRoot, extended_str, extended_float, extended_int
from linkml_runtime.utils.dataclass_extensions_376 import dataclasses_init_fn_with_kwargs
from linkml_runtime.utils.formatutils import camelcase, underscore, sfx
from linkml_runtime.utils.enumerations import EnumDefinitionImpl
from rdflib import Namespace, URIRef
from linkml_runtime.utils.curienamespace import CurieNamespace
from linkml_runtime.utils.metamodelcore import URI
metamodel_version = "1.7.0"
dataclasses._init_fn = dataclasses_init_fn_with_kwargs
LINKML = CurieNamespace('linkml', 'https://w3id.org/linkml/')
XSD = CurieNamespace('xsd', 'http://www.w3.org/2001/XMLSchema#')
DEFAULT_ = LINKML
class String(str):
type_class_uri = XSD.string
type_class_curie = "xsd:string"
type_name = "string"
type_model_uri = LINKML.String
class Uri(URI):
type_class_uri = XSD.anyURI
type_class_curie = "xsd:anyURI"
type_name = "uri"
type_model_uri = LINKML.Uri
class IdentifierType(String):
type_class_uri = XSD.string
type_class_curie = "xsd:string"
type_name = "identifier type"
type_model_uri = LINKML.IdentifierType
class LabelType(String):
type_class_uri = XSD.string
type_class_curie = "xsd:string"
type_name = "label type"
type_model_uri = LINKML.LabelType
class NamedThingId(IdentifierType):
pass
class AttributeId(IdentifierType):
pass
class BiologicalSexId(AttributeId):
pass
class OntologyClassId(NamedThingId):
pass
@dataclass
class NamedThing(YAMLRoot):
_inherited_slots: ClassVar[List[str]] = []
class_class_uri: ClassVar[URIRef] = LINKML.NamedThing
class_class_curie: ClassVar[str] = "linkml:NamedThing"
class_name: ClassVar[str] = "named thing"
class_model_uri: ClassVar[URIRef] = LINKML.NamedThing
id: Union[str, NamedThingId] = None
name: Optional[Union[str, LabelType]] = None
def __post_init__(self, *_: List[str], **kwargs: Dict[str, Any]):
if self._is_empty(self.id):
self.MissingRequiredField("id")
if not isinstance(self.id, NamedThingId):
self.id = NamedThingId(self.id)
if self.name is not None and not isinstance(self.name, LabelType):
self.name = LabelType(self.name)
super().__post_init__(**kwargs)
@dataclass
class Attribute(YAMLRoot):
_inherited_slots: ClassVar[List[str]] = []
class_class_uri: ClassVar[URIRef] = LINKML.Attribute
class_class_curie: ClassVar[str] = "linkml:Attribute"
class_name: ClassVar[str] = "attribute"
class_model_uri: ClassVar[URIRef] = LINKML.Attribute
id: Union[str, AttributeId] = None
def __post_init__(self, *_: List[str], **kwargs: Dict[str, Any]):
if self._is_empty(self.id):
self.MissingRequiredField("id")
if not isinstance(self.id, AttributeId):
self.id = AttributeId(self.id)
super().__post_init__(**kwargs)
@dataclass
class BiologicalSex(Attribute):
_inherited_slots: ClassVar[List[str]] = []
class_class_uri: ClassVar[URIRef] = LINKML.BiologicalSex
class_class_curie: ClassVar[str] = "linkml:BiologicalSex"
class_name: ClassVar[str] = "biological sex"
class_model_uri: ClassVar[URIRef] = LINKML.BiologicalSex
id: Union[str, BiologicalSexId] = None
def __post_init__(self, *_: List[str], **kwargs: Dict[str, Any]):
if self._is_empty(self.id):
self.MissingRequiredField("id")
if not isinstance(self.id, BiologicalSexId):
self.id = BiologicalSexId(self.id)
super().__post_init__(**kwargs)
@dataclass
class OntologyClass(NamedThing):
_inherited_slots: ClassVar[List[str]] = []
class_class_uri: ClassVar[URIRef] = LINKML.OntologyClass
class_class_curie: ClassVar[str] = "linkml:OntologyClass"
class_name: ClassVar[str] = "ontology class"
class_model_uri: ClassVar[URIRef] = LINKML.OntologyClass
id: Union[str, OntologyClassId] = None
def __post_init__(self, *_: List[str], **kwargs: Dict[str, Any]):
if self._is_empty(self.id):
self.MissingRequiredField("id")
if not isinstance(self.id, OntologyClassId):
self.id = OntologyClassId(self.id)
super().__post_init__(**kwargs)
class slots:
pass
slots.id = Slot(uri=LINKML.id, name="id", curie=LINKML.curie('id'),
model_uri=LINKML.id, domain=NamedThing, range=Union[str, NamedThingId])
slots.name = Slot(uri=LINKML.name, name="name", curie=LINKML.curie('name'),
model_uri=LINKML.name, domain=NamedThing, range=Optional[Union[str, LabelType]])
| true | true |
f7f7cedf38496f5182e702d5126fb479d2e4a14c | 37,870 | py | Python | nova/tests/unit/scheduler/test_filter_scheduler.py | hashsos/hashcloudos-nova | 8350f20167f256f740c5bb94f9222c95fa3e32ad | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/scheduler/test_filter_scheduler.py | hashsos/hashcloudos-nova | 8350f20167f256f740c5bb94f9222c95fa3e32ad | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/scheduler/test_filter_scheduler.py | hashsos/hashcloudos-nova | 8350f20167f256f740c5bb94f9222c95fa3e32ad | [
"Apache-2.0"
] | 1 | 2020-07-22T21:17:41.000Z | 2020-07-22T21:17:41.000Z | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests For Filter Scheduler.
"""
import mock
from nova import exception
from nova import objects
from nova.scheduler import client
from nova.scheduler.client import report
from nova.scheduler import filter_scheduler
from nova.scheduler import host_manager
from nova.scheduler import utils as scheduler_utils
from nova.scheduler import weights
from nova import test # noqa
from nova.tests.unit.scheduler import test_scheduler
from nova.tests import uuidsentinel as uuids
class FilterSchedulerTestCase(test_scheduler.SchedulerTestCase):
"""Test case for Filter Scheduler."""
driver_cls = filter_scheduler.FilterScheduler
@mock.patch('nova.scheduler.client.SchedulerClient')
def setUp(self, mock_client):
pc_client = mock.Mock(spec=report.SchedulerReportClient)
sched_client = mock.Mock(spec=client.SchedulerClient)
sched_client.reportclient = pc_client
mock_client.return_value = sched_client
self.placement_client = pc_client
super(FilterSchedulerTestCase, self).setUp()
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_claim_resources')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_all_host_states')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_sorted_hosts')
def test_schedule_placement_bad_comms(self, mock_get_hosts,
mock_get_all_states, mock_claim):
"""If there was a problem communicating with the Placement service,
alloc_reqs_by_rp_uuid will be None and we need to avoid trying to claim
in the Placement API.
"""
spec_obj = objects.RequestSpec(
num_instances=1,
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
instance_group=None)
host_state = mock.Mock(spec=host_manager.HostState,
host=mock.sentinel.host, uuid=uuids.cn1, cell_uuid=uuids.cell)
all_host_states = [host_state]
mock_get_all_states.return_value = all_host_states
mock_get_hosts.return_value = all_host_states
instance_uuids = None
ctx = mock.Mock()
selected_hosts = self.driver._schedule(ctx, spec_obj,
instance_uuids, None, mock.sentinel.provider_summaries)
mock_get_all_states.assert_called_once_with(
ctx.elevated.return_value, spec_obj,
mock.sentinel.provider_summaries)
mock_get_hosts.assert_called_once_with(spec_obj, all_host_states, 0)
self.assertEqual(len(selected_hosts), 1)
self.assertEqual([host_state], selected_hosts)
# Ensure that we have consumed the resources on the chosen host states
host_state.consume_from_request.assert_called_once_with(spec_obj)
# And ensure we never called _claim_resources()
self.assertFalse(mock_claim.called)
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_claim_resources')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_all_host_states')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_sorted_hosts')
def test_schedule_old_conductor(self, mock_get_hosts,
mock_get_all_states, mock_claim):
"""Old conductor can call scheduler without the instance_uuids
parameter. When this happens, we need to ensure we do not attempt to
claim resources in the placement API since obviously we need instance
UUIDs to perform those claims.
"""
spec_obj = objects.RequestSpec(
num_instances=1,
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
instance_group=None)
host_state = mock.Mock(spec=host_manager.HostState,
host=mock.sentinel.host, uuid=uuids.cn1)
all_host_states = [host_state]
mock_get_all_states.return_value = all_host_states
mock_get_hosts.return_value = all_host_states
instance_uuids = None
ctx = mock.Mock()
selected_hosts = self.driver._schedule(ctx, spec_obj,
instance_uuids, mock.sentinel.alloc_reqs_by_rp_uuid,
mock.sentinel.provider_summaries)
mock_get_all_states.assert_called_once_with(
ctx.elevated.return_value, spec_obj,
mock.sentinel.provider_summaries)
mock_get_hosts.assert_called_once_with(spec_obj, all_host_states, 0)
self.assertEqual(len(selected_hosts), 1)
self.assertEqual([host_state], selected_hosts)
# Ensure that we have consumed the resources on the chosen host states
host_state.consume_from_request.assert_called_once_with(spec_obj)
# And ensure we never called _claim_resources()
self.assertFalse(mock_claim.called)
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_claim_resources')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_all_host_states')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_sorted_hosts')
def _test_schedule_successful_claim(self, mock_get_hosts,
mock_get_all_states, mock_claim, num_instances=1):
spec_obj = objects.RequestSpec(
num_instances=num_instances,
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
instance_group=None)
host_state = mock.Mock(spec=host_manager.HostState,
host=mock.sentinel.host, uuid=uuids.cn1, cell_uuid=uuids.cell1)
all_host_states = [host_state]
mock_get_all_states.return_value = all_host_states
mock_get_hosts.return_value = all_host_states
mock_claim.return_value = True
instance_uuids = [uuids.instance]
alloc_reqs_by_rp_uuid = {
uuids.cn1: [mock.sentinel.alloc_req],
}
ctx = mock.Mock()
selected_hosts = self.driver._schedule(ctx, spec_obj, instance_uuids,
alloc_reqs_by_rp_uuid, mock.sentinel.provider_summaries)
mock_get_all_states.assert_called_once_with(
ctx.elevated.return_value, spec_obj,
mock.sentinel.provider_summaries)
mock_get_hosts.assert_called()
mock_claim.assert_called_once_with(ctx.elevated.return_value, spec_obj,
uuids.instance, [mock.sentinel.alloc_req])
self.assertEqual(len(selected_hosts), 1)
self.assertEqual([[host_state]], selected_hosts)
# Ensure that we have consumed the resources on the chosen host states
host_state.consume_from_request.assert_called_once_with(spec_obj)
def test_schedule_successful_claim(self):
self._test_schedule_successful_claim()
def test_schedule_old_reqspec_and_move_operation(self):
"""This test is for verifying that in case of a move operation with an
original RequestSpec created for 3 concurrent instances, we only verify
the instance that is moved.
"""
self._test_schedule_successful_claim(num_instances=3)
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_cleanup_allocations')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_claim_resources')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_all_host_states')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_sorted_hosts')
def test_schedule_unsuccessful_claim(self, mock_get_hosts,
mock_get_all_states, mock_claim, mock_cleanup):
"""Tests that we return an empty list if we are unable to successfully
claim resources for the instance
"""
spec_obj = objects.RequestSpec(
num_instances=1,
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
instance_group=None)
host_state = mock.Mock(spec=host_manager.HostState,
host=mock.sentinel.host, uuid=uuids.cn1, cell_uuid=uuids.cell1)
all_host_states = [host_state]
mock_get_all_states.return_value = all_host_states
mock_get_hosts.return_value = all_host_states
mock_claim.return_value = False
instance_uuids = [uuids.instance]
alloc_reqs_by_rp_uuid = {
uuids.cn1: [mock.sentinel.alloc_req],
}
ctx = mock.Mock()
selected_hosts = self.driver._schedule(ctx, spec_obj,
instance_uuids, alloc_reqs_by_rp_uuid,
mock.sentinel.provider_summaries)
mock_get_all_states.assert_called_once_with(
ctx.elevated.return_value, spec_obj,
mock.sentinel.provider_summaries)
mock_get_hosts.assert_called_once_with(spec_obj, all_host_states, 0)
mock_claim.assert_called_once_with(ctx.elevated.return_value, spec_obj,
uuids.instance, [mock.sentinel.alloc_req])
self.assertEqual([], selected_hosts)
mock_cleanup.assert_called_once_with([])
# Ensure that we have consumed the resources on the chosen host states
self.assertFalse(host_state.consume_from_request.called)
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_cleanup_allocations')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_claim_resources')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_all_host_states')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_sorted_hosts')
def test_schedule_not_all_instance_clean_claimed(self, mock_get_hosts,
mock_get_all_states, mock_claim, mock_cleanup):
"""Tests that we clean up previously-allocated instances if not all
instances could be scheduled
"""
spec_obj = objects.RequestSpec(
num_instances=2,
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
instance_group=None)
host_state = mock.Mock(spec=host_manager.HostState,
host=mock.sentinel.host, uuid=uuids.cn1, cell_uuid=uuids.cell1)
all_host_states = [host_state]
mock_get_all_states.return_value = all_host_states
mock_get_hosts.side_effect = [
all_host_states, # first instance: return all the hosts (only one)
[], # second: act as if no more hosts that meet criteria
all_host_states, # the final call when creating alternates
]
mock_claim.return_value = True
instance_uuids = [uuids.instance1, uuids.instance2]
alloc_reqs_by_rp_uuid = {
uuids.cn1: [mock.sentinel.alloc_req],
}
ctx = mock.Mock()
self.driver._schedule(ctx, spec_obj, instance_uuids,
alloc_reqs_by_rp_uuid, mock.sentinel.provider_summaries)
# Ensure we cleaned up the first successfully-claimed instance
mock_cleanup.assert_called_once_with([uuids.instance1])
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_claim_resources')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_all_host_states')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_sorted_hosts')
def test_schedule_instance_group(self, mock_get_hosts,
mock_get_all_states, mock_claim):
"""Test that since the request spec object contains an instance group
object, that upon choosing a host in the primary schedule loop,
that we update the request spec's instance group information
"""
num_instances = 2
ig = objects.InstanceGroup(hosts=[])
spec_obj = objects.RequestSpec(
num_instances=num_instances,
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
instance_group=ig)
hs1 = mock.Mock(spec=host_manager.HostState, host='host1',
uuid=uuids.cn1, cell_uuid=uuids.cell1)
hs2 = mock.Mock(spec=host_manager.HostState, host='host2',
uuid=uuids.cn2, cell_uuid=uuids.cell2)
all_host_states = [hs1, hs2]
mock_get_all_states.return_value = all_host_states
mock_claim.return_value = True
alloc_reqs_by_rp_uuid = {
uuids.cn1: [mock.sentinel.alloc_req_cn1],
uuids.cn2: [mock.sentinel.alloc_req_cn2],
}
# Simulate host 1 and host 2 being randomly returned first by
# _get_sorted_hosts() in the two iterations for each instance in
# num_instances
mock_get_hosts.side_effect = ([hs2, hs1], [hs1, hs2],
[hs2, hs1], [hs1, hs2])
instance_uuids = [
getattr(uuids, 'instance%d' % x) for x in range(num_instances)
]
ctx = mock.Mock()
self.driver._schedule(ctx, spec_obj, instance_uuids,
alloc_reqs_by_rp_uuid, mock.sentinel.provider_summaries)
# Check that we called _claim_resources() for both the first and second
# host state
claim_calls = [
mock.call(ctx.elevated.return_value, spec_obj,
uuids.instance0, [mock.sentinel.alloc_req_cn2]),
mock.call(ctx.elevated.return_value, spec_obj,
uuids.instance1, [mock.sentinel.alloc_req_cn1]),
]
mock_claim.assert_has_calls(claim_calls)
# Check that _get_sorted_hosts() is called twice and that the
# second time, we pass it the hosts that were returned from
# _get_sorted_hosts() the first time
sorted_host_calls = [
mock.call(spec_obj, all_host_states, 0),
mock.call(spec_obj, [hs2, hs1], 1),
]
mock_get_hosts.assert_has_calls(sorted_host_calls)
# The instance group object should have both host1 and host2 in its
# instance group hosts list and there should not be any "changes" to
# save in the instance group object
self.assertEqual(['host2', 'host1'], ig.hosts)
self.assertEqual({}, ig.obj_get_changes())
@mock.patch('random.choice', side_effect=lambda x: x[1])
@mock.patch('nova.scheduler.host_manager.HostManager.get_weighed_hosts')
@mock.patch('nova.scheduler.host_manager.HostManager.get_filtered_hosts')
def test_get_sorted_hosts(self, mock_filt, mock_weighed, mock_rand):
"""Tests the call that returns a sorted list of hosts by calling the
host manager's filtering and weighing routines
"""
self.flags(host_subset_size=2, group='filter_scheduler')
hs1 = mock.Mock(spec=host_manager.HostState, host='host1',
cell_uuid=uuids.cell1)
hs2 = mock.Mock(spec=host_manager.HostState, host='host2',
cell_uuid=uuids.cell2)
all_host_states = [hs1, hs2]
mock_weighed.return_value = [
weights.WeighedHost(hs1, 1.0), weights.WeighedHost(hs2, 1.0),
]
results = self.driver._get_sorted_hosts(mock.sentinel.spec,
all_host_states, mock.sentinel.index)
mock_filt.assert_called_once_with(all_host_states, mock.sentinel.spec,
mock.sentinel.index)
mock_weighed.assert_called_once_with(mock_filt.return_value,
mock.sentinel.spec)
# We override random.choice() to pick the **second** element of the
# returned weighed hosts list, which is the host state #2. This tests
# the code path that combines the randomly-chosen host with the
# remaining list of weighed host state objects
self.assertEqual([hs2, hs1], results)
@mock.patch('random.choice', side_effect=lambda x: x[0])
@mock.patch('nova.scheduler.host_manager.HostManager.get_weighed_hosts')
@mock.patch('nova.scheduler.host_manager.HostManager.get_filtered_hosts')
def test_get_sorted_hosts_subset_less_than_num_weighed(self, mock_filt,
mock_weighed, mock_rand):
"""Tests that when we have >1 weighed hosts but a host subset size of
1, that we always pick the first host in the weighed host
"""
self.flags(host_subset_size=1, group='filter_scheduler')
hs1 = mock.Mock(spec=host_manager.HostState, host='host1',
cell_uuid=uuids.cell1)
hs2 = mock.Mock(spec=host_manager.HostState, host='host2',
cell_uuid=uuids.cell2)
all_host_states = [hs1, hs2]
mock_weighed.return_value = [
weights.WeighedHost(hs1, 1.0), weights.WeighedHost(hs2, 1.0),
]
results = self.driver._get_sorted_hosts(mock.sentinel.spec,
all_host_states, mock.sentinel.index)
mock_filt.assert_called_once_with(all_host_states, mock.sentinel.spec,
mock.sentinel.index)
mock_weighed.assert_called_once_with(mock_filt.return_value,
mock.sentinel.spec)
# We should be randomly selecting only from a list of one host state
mock_rand.assert_called_once_with([hs1])
self.assertEqual([hs1, hs2], results)
@mock.patch('random.choice', side_effect=lambda x: x[0])
@mock.patch('nova.scheduler.host_manager.HostManager.get_weighed_hosts')
@mock.patch('nova.scheduler.host_manager.HostManager.get_filtered_hosts')
def test_get_sorted_hosts_subset_greater_than_num_weighed(self, mock_filt,
mock_weighed, mock_rand):
"""Hosts should still be chosen if host subset size is larger than
number of weighed hosts.
"""
self.flags(host_subset_size=20, group='filter_scheduler')
hs1 = mock.Mock(spec=host_manager.HostState, host='host1',
cell_uuid=uuids.cell1)
hs2 = mock.Mock(spec=host_manager.HostState, host='host2',
cell_uuid=uuids.cell2)
all_host_states = [hs1, hs2]
mock_weighed.return_value = [
weights.WeighedHost(hs1, 1.0), weights.WeighedHost(hs2, 1.0),
]
results = self.driver._get_sorted_hosts(mock.sentinel.spec,
all_host_states, mock.sentinel.index)
mock_filt.assert_called_once_with(all_host_states, mock.sentinel.spec,
mock.sentinel.index)
mock_weighed.assert_called_once_with(mock_filt.return_value,
mock.sentinel.spec)
# We overrode random.choice() to return the first element in the list,
# so even though we had a host_subset_size greater than the number of
# weighed hosts (2), we just random.choice() on the entire set of
# weighed hosts and thus return [hs1, hs2]
self.assertEqual([hs1, hs2], results)
def test_cleanup_allocations(self):
instance_uuids = []
# Check we don't do anything if there's no instance UUIDs to cleanup
# allocations for
pc = self.placement_client
self.driver._cleanup_allocations(instance_uuids)
self.assertFalse(pc.delete_allocation_for_instance.called)
instance_uuids = [uuids.instance1, uuids.instance2]
self.driver._cleanup_allocations(instance_uuids)
exp_calls = [mock.call(uuids.instance1), mock.call(uuids.instance2)]
pc.delete_allocation_for_instance.assert_has_calls(exp_calls)
def test_claim_resources(self):
"""Tests that when _schedule() calls _claim_resources(), that we
appropriately call the placement client to claim resources for the
instance.
"""
ctx = mock.Mock(user_id=uuids.user_id)
spec_obj = mock.Mock(project_id=uuids.project_id)
instance_uuid = uuids.instance
alloc_reqs = [mock.sentinel.alloc_req]
res = self.driver._claim_resources(ctx, spec_obj, instance_uuid,
alloc_reqs)
pc = self.placement_client
pc.claim_resources.return_value = True
pc.claim_resources.assert_called_once_with(uuids.instance,
mock.sentinel.alloc_req, uuids.project_id, uuids.user_id)
self.assertTrue(res)
def test_add_retry_host(self):
retry = dict(num_attempts=1, hosts=[])
filter_properties = dict(retry=retry)
host = "fakehost"
node = "fakenode"
scheduler_utils._add_retry_host(filter_properties, host, node)
hosts = filter_properties['retry']['hosts']
self.assertEqual(1, len(hosts))
self.assertEqual([host, node], hosts[0])
def test_post_select_populate(self):
# Test addition of certain filter props after a node is selected.
retry = {'hosts': [], 'num_attempts': 1}
filter_properties = {'retry': retry}
host_state = host_manager.HostState('host', 'node', uuids.cell)
host_state.limits['vcpu'] = 5
scheduler_utils.populate_filter_properties(filter_properties,
host_state)
self.assertEqual(['host', 'node'],
filter_properties['retry']['hosts'][0])
self.assertEqual({'vcpu': 5}, host_state.limits)
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_schedule')
def test_select_destinations_match_num_instances(self, mock_schedule):
"""Tests that the select_destinations() method returns the list of
hosts from the _schedule() method when the number of returned hosts
equals the number of instance UUIDs passed in.
"""
spec_obj = objects.RequestSpec(
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
num_instances=1)
mock_schedule.return_value = [[mock.sentinel.hs1]]
dests = self.driver.select_destinations(self.context, spec_obj,
[mock.sentinel.instance_uuid], mock.sentinel.alloc_reqs_by_rp_uuid,
mock.sentinel.p_sums)
mock_schedule.assert_called_once_with(self.context, spec_obj,
[mock.sentinel.instance_uuid], mock.sentinel.alloc_reqs_by_rp_uuid,
mock.sentinel.p_sums)
self.assertEqual([mock.sentinel.hs1], dests)
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_schedule')
def test_select_destinations_for_move_ops(self, mock_schedule):
"""Tests that the select_destinations() method verifies the number of
hosts returned from the _schedule() method against the number of
instance UUIDs passed as a parameter and not against the RequestSpec
num_instances field since the latter could be wrong in case of a move
operation.
"""
spec_obj = objects.RequestSpec(
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
num_instances=2)
host_state = mock.Mock(spec=host_manager.HostState,
cell_uuid=uuids.cell)
mock_schedule.return_value = [[host_state]]
dests = self.driver.select_destinations(self.context, spec_obj,
[mock.sentinel.instance_uuid], mock.sentinel.alloc_reqs_by_rp_uuid,
mock.sentinel.p_sums)
mock_schedule.assert_called_once_with(self.context, spec_obj,
[mock.sentinel.instance_uuid], mock.sentinel.alloc_reqs_by_rp_uuid,
mock.sentinel.p_sums)
self.assertEqual([host_state], dests)
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_schedule')
def test_select_destinations_fewer_num_instances(self, mock_schedule):
"""Tests that the select_destinations() method properly handles
resetting host state objects and raising NoValidHost when the
_schedule() method returns no host matches.
"""
spec_obj = objects.RequestSpec(
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
num_instances=2)
host_state = mock.Mock(spec=host_manager.HostState,
cell_uuid=uuids.cell)
mock_schedule.return_value = [[host_state]]
self.assertRaises(exception.NoValidHost,
self.driver.select_destinations, self.context, spec_obj,
[mock.sentinel.instance_uuid1, mock.sentinel.instance_uuid2],
mock.sentinel.alloc_reqs_by_rp_uuid, mock.sentinel.p_sums)
# Verify that the host state object has been marked as not updated so
# it's picked up in the next pull from the DB for compute node objects
self.assertIsNone(host_state.updated)
@mock.patch("nova.scheduler.host_manager.HostState.consume_from_request")
@mock.patch("nova.scheduler.filter_scheduler.FilterScheduler."
"_claim_resources")
@mock.patch("nova.scheduler.filter_scheduler.FilterScheduler."
"_get_sorted_hosts")
@mock.patch("nova.scheduler.filter_scheduler.FilterScheduler."
"_get_all_host_states")
def _test_alternates_returned(self, mock_get_all_hosts, mock_sorted,
mock_claim, mock_consume, num_instances=2, num_alternates=2):
all_host_states = []
alloc_reqs = {}
for num in range(10):
host_name = "host%s" % num
hs = host_manager.HostState(host_name, "node%s" % num,
uuids.cell)
hs.uuid = getattr(uuids, host_name)
all_host_states.append(hs)
alloc_reqs[hs.uuid] = {}
mock_get_all_hosts.return_value = all_host_states
mock_sorted.return_value = all_host_states
mock_claim.return_value = True
total_returned = num_alternates + 1
self.flags(max_attempts=total_returned, group="scheduler")
instance_uuids = [getattr(uuids, "inst%s" % num)
for num in range(num_instances)]
spec_obj = objects.RequestSpec(
num_instances=num_instances,
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
instance_group=None)
dests = self.driver._schedule(self.context, spec_obj,
instance_uuids, alloc_reqs, None)
self.assertEqual(num_instances, len(dests))
# Filtering and weighing hosts should be called num_instances + 1 times
# unless num_instances == 1.
self.assertEqual(num_instances + 1 if num_instances > 1 else 1,
mock_sorted.call_count,
'Unexpected number of calls to filter hosts for %s '
'instances.' % num_instances)
selected_hosts = [dest[0] for dest in dests]
for dest in dests:
self.assertEqual(total_returned, len(dest))
# Verify that there are no duplicates among a destination
self.assertEqual(len(dest), len(set(dest)))
# Verify that none of the selected hosts appear in the alternates.
for alt in dest[1:]:
self.assertNotIn(alt, selected_hosts)
def test_alternates_returned(self):
self._test_alternates_returned(num_instances=1, num_alternates=1)
self._test_alternates_returned(num_instances=3, num_alternates=0)
self._test_alternates_returned(num_instances=1, num_alternates=4)
self._test_alternates_returned(num_instances=2, num_alternates=3)
self._test_alternates_returned(num_instances=8, num_alternates=8)
@mock.patch("nova.scheduler.host_manager.HostState.consume_from_request")
@mock.patch("nova.scheduler.filter_scheduler.FilterScheduler."
"_claim_resources")
@mock.patch("nova.scheduler.filter_scheduler.FilterScheduler."
"_get_sorted_hosts")
@mock.patch("nova.scheduler.filter_scheduler.FilterScheduler."
"_get_all_host_states")
def test_alternates_same_cell(self, mock_get_all_hosts, mock_sorted,
mock_claim, mock_consume):
"""Tests getting alternates plus claims where the hosts are spread
across two cells.
"""
all_host_states = []
alloc_reqs = {}
for num in range(10):
host_name = "host%s" % num
cell_uuid = uuids.cell1 if num % 2 else uuids.cell2
hs = host_manager.HostState(host_name, "node%s" % num,
cell_uuid)
hs.uuid = getattr(uuids, host_name)
all_host_states.append(hs)
alloc_reqs[hs.uuid] = {}
mock_get_all_hosts.return_value = all_host_states
# There are two instances so _get_sorted_hosts is called once per
# instance and then once again before picking alternates.
mock_sorted.side_effect = [all_host_states,
list(reversed(all_host_states)),
all_host_states]
mock_claim.return_value = True
total_returned = 3
self.flags(max_attempts=total_returned, group="scheduler")
instance_uuids = [uuids.inst1, uuids.inst2]
num_instances = len(instance_uuids)
spec_obj = objects.RequestSpec(
num_instances=num_instances,
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
instance_group=None)
dests = self.driver._schedule(self.context, spec_obj,
instance_uuids, alloc_reqs, None)
# There should be max_attempts hosts per instance (1 selected, 2 alts)
self.assertEqual(total_returned, len(dests[0]))
self.assertEqual(total_returned, len(dests[1]))
# Verify that the two selected hosts are not in the same cell.
self.assertNotEqual(dests[0][0].cell_uuid, dests[1][0].cell_uuid)
for dest in dests:
selected_host = dest[0]
selected_cell_uuid = selected_host.cell_uuid
for alternate in dest[1:]:
self.assertEqual(alternate.cell_uuid, selected_cell_uuid)
@mock.patch("nova.scheduler.host_manager.HostState.consume_from_request")
@mock.patch("nova.scheduler.filter_scheduler.FilterScheduler."
"_claim_resources")
@mock.patch("nova.scheduler.filter_scheduler.FilterScheduler."
"_get_sorted_hosts")
@mock.patch("nova.scheduler.filter_scheduler.FilterScheduler."
"_get_all_host_states")
def _test_not_enough_alternates(self, mock_get_all_hosts, mock_sorted,
mock_claim, mock_consume, num_hosts, max_attempts):
all_host_states = []
alloc_reqs = {}
for num in range(num_hosts):
host_name = "host%s" % num
hs = host_manager.HostState(host_name, "node%s" % num,
uuids.cell)
hs.uuid = getattr(uuids, host_name)
all_host_states.append(hs)
alloc_reqs[hs.uuid] = {}
mock_get_all_hosts.return_value = all_host_states
mock_sorted.return_value = all_host_states
mock_claim.return_value = True
# Set the total returned to more than the number of available hosts
self.flags(max_attempts=max_attempts, group="scheduler")
instance_uuids = [uuids.inst1, uuids.inst2]
num_instances = len(instance_uuids)
spec_obj = objects.RequestSpec(
num_instances=num_instances,
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
instance_group=None)
dests = self.driver._schedule(self.context, spec_obj,
instance_uuids, alloc_reqs, None)
self.assertEqual(num_instances, len(dests))
selected_hosts = [dest[0] for dest in dests]
# The number returned for each destination should be the less of the
# number of available host and the max_attempts setting.
expected_number = min(num_hosts, max_attempts)
for dest in dests:
self.assertEqual(expected_number, len(dest))
# Verify that there are no duplicates among a destination
self.assertEqual(len(dest), len(set(dest)))
# Verify that none of the selected hosts appear in the alternates.
for alt in dest[1:]:
self.assertNotIn(alt, selected_hosts)
def test_not_enough_alternates(self):
self._test_not_enough_alternates(num_hosts=100, max_attempts=5)
self._test_not_enough_alternates(num_hosts=5, max_attempts=5)
self._test_not_enough_alternates(num_hosts=3, max_attempts=5)
self._test_not_enough_alternates(num_hosts=20, max_attempts=5)
@mock.patch.object(filter_scheduler.FilterScheduler, '_schedule')
def test_select_destinations_notifications(self, mock_schedule):
mock_schedule.return_value = [[mock.Mock()]]
with mock.patch.object(self.driver.notifier, 'info') as mock_info:
expected = {'num_instances': 1,
'instance_properties': {'uuid': uuids.instance},
'instance_type': {},
'image': {}}
spec_obj = objects.RequestSpec(num_instances=1,
instance_uuid=uuids.instance)
self.driver.select_destinations(self.context, spec_obj,
[uuids.instance], {}, None)
expected = [
mock.call(self.context, 'scheduler.select_destinations.start',
dict(request_spec=expected)),
mock.call(self.context, 'scheduler.select_destinations.end',
dict(request_spec=expected))]
self.assertEqual(expected, mock_info.call_args_list)
def test_get_all_host_states_provider_summaries_is_none(self):
"""Tests that HostManager.get_host_states_by_uuids is called with
compute_uuids being None when the incoming provider_summaries is None.
"""
with mock.patch.object(self.driver.host_manager,
'get_host_states_by_uuids') as get_host_states:
self.driver._get_all_host_states(
mock.sentinel.ctxt, mock.sentinel.spec_obj, None)
# Make sure get_host_states_by_uuids was called with
# compute_uuids being None.
get_host_states.assert_called_once_with(
mock.sentinel.ctxt, None, mock.sentinel.spec_obj)
def test_get_all_host_states_provider_summaries_is_empty(self):
"""Tests that HostManager.get_host_states_by_uuids is called with
compute_uuids being [] when the incoming provider_summaries is {}.
"""
with mock.patch.object(self.driver.host_manager,
'get_host_states_by_uuids') as get_host_states:
self.driver._get_all_host_states(
mock.sentinel.ctxt, mock.sentinel.spec_obj, {})
# Make sure get_host_states_by_uuids was called with
# compute_uuids being [].
get_host_states.assert_called_once_with(
mock.sentinel.ctxt, [], mock.sentinel.spec_obj)
| 44.763593 | 79 | 0.638078 |
import mock
from nova import exception
from nova import objects
from nova.scheduler import client
from nova.scheduler.client import report
from nova.scheduler import filter_scheduler
from nova.scheduler import host_manager
from nova.scheduler import utils as scheduler_utils
from nova.scheduler import weights
from nova import test
from nova.tests.unit.scheduler import test_scheduler
from nova.tests import uuidsentinel as uuids
class FilterSchedulerTestCase(test_scheduler.SchedulerTestCase):
driver_cls = filter_scheduler.FilterScheduler
@mock.patch('nova.scheduler.client.SchedulerClient')
def setUp(self, mock_client):
pc_client = mock.Mock(spec=report.SchedulerReportClient)
sched_client = mock.Mock(spec=client.SchedulerClient)
sched_client.reportclient = pc_client
mock_client.return_value = sched_client
self.placement_client = pc_client
super(FilterSchedulerTestCase, self).setUp()
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_claim_resources')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_all_host_states')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_sorted_hosts')
def test_schedule_placement_bad_comms(self, mock_get_hosts,
mock_get_all_states, mock_claim):
spec_obj = objects.RequestSpec(
num_instances=1,
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
instance_group=None)
host_state = mock.Mock(spec=host_manager.HostState,
host=mock.sentinel.host, uuid=uuids.cn1, cell_uuid=uuids.cell)
all_host_states = [host_state]
mock_get_all_states.return_value = all_host_states
mock_get_hosts.return_value = all_host_states
instance_uuids = None
ctx = mock.Mock()
selected_hosts = self.driver._schedule(ctx, spec_obj,
instance_uuids, None, mock.sentinel.provider_summaries)
mock_get_all_states.assert_called_once_with(
ctx.elevated.return_value, spec_obj,
mock.sentinel.provider_summaries)
mock_get_hosts.assert_called_once_with(spec_obj, all_host_states, 0)
self.assertEqual(len(selected_hosts), 1)
self.assertEqual([host_state], selected_hosts)
host_state.consume_from_request.assert_called_once_with(spec_obj)
self.assertFalse(mock_claim.called)
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_claim_resources')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_all_host_states')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_sorted_hosts')
def test_schedule_old_conductor(self, mock_get_hosts,
mock_get_all_states, mock_claim):
spec_obj = objects.RequestSpec(
num_instances=1,
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
instance_group=None)
host_state = mock.Mock(spec=host_manager.HostState,
host=mock.sentinel.host, uuid=uuids.cn1)
all_host_states = [host_state]
mock_get_all_states.return_value = all_host_states
mock_get_hosts.return_value = all_host_states
instance_uuids = None
ctx = mock.Mock()
selected_hosts = self.driver._schedule(ctx, spec_obj,
instance_uuids, mock.sentinel.alloc_reqs_by_rp_uuid,
mock.sentinel.provider_summaries)
mock_get_all_states.assert_called_once_with(
ctx.elevated.return_value, spec_obj,
mock.sentinel.provider_summaries)
mock_get_hosts.assert_called_once_with(spec_obj, all_host_states, 0)
self.assertEqual(len(selected_hosts), 1)
self.assertEqual([host_state], selected_hosts)
host_state.consume_from_request.assert_called_once_with(spec_obj)
self.assertFalse(mock_claim.called)
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_claim_resources')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_all_host_states')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_sorted_hosts')
def _test_schedule_successful_claim(self, mock_get_hosts,
mock_get_all_states, mock_claim, num_instances=1):
spec_obj = objects.RequestSpec(
num_instances=num_instances,
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
instance_group=None)
host_state = mock.Mock(spec=host_manager.HostState,
host=mock.sentinel.host, uuid=uuids.cn1, cell_uuid=uuids.cell1)
all_host_states = [host_state]
mock_get_all_states.return_value = all_host_states
mock_get_hosts.return_value = all_host_states
mock_claim.return_value = True
instance_uuids = [uuids.instance]
alloc_reqs_by_rp_uuid = {
uuids.cn1: [mock.sentinel.alloc_req],
}
ctx = mock.Mock()
selected_hosts = self.driver._schedule(ctx, spec_obj, instance_uuids,
alloc_reqs_by_rp_uuid, mock.sentinel.provider_summaries)
mock_get_all_states.assert_called_once_with(
ctx.elevated.return_value, spec_obj,
mock.sentinel.provider_summaries)
mock_get_hosts.assert_called()
mock_claim.assert_called_once_with(ctx.elevated.return_value, spec_obj,
uuids.instance, [mock.sentinel.alloc_req])
self.assertEqual(len(selected_hosts), 1)
self.assertEqual([[host_state]], selected_hosts)
host_state.consume_from_request.assert_called_once_with(spec_obj)
def test_schedule_successful_claim(self):
self._test_schedule_successful_claim()
def test_schedule_old_reqspec_and_move_operation(self):
self._test_schedule_successful_claim(num_instances=3)
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_cleanup_allocations')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_claim_resources')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_all_host_states')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_sorted_hosts')
def test_schedule_unsuccessful_claim(self, mock_get_hosts,
mock_get_all_states, mock_claim, mock_cleanup):
spec_obj = objects.RequestSpec(
num_instances=1,
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
instance_group=None)
host_state = mock.Mock(spec=host_manager.HostState,
host=mock.sentinel.host, uuid=uuids.cn1, cell_uuid=uuids.cell1)
all_host_states = [host_state]
mock_get_all_states.return_value = all_host_states
mock_get_hosts.return_value = all_host_states
mock_claim.return_value = False
instance_uuids = [uuids.instance]
alloc_reqs_by_rp_uuid = {
uuids.cn1: [mock.sentinel.alloc_req],
}
ctx = mock.Mock()
selected_hosts = self.driver._schedule(ctx, spec_obj,
instance_uuids, alloc_reqs_by_rp_uuid,
mock.sentinel.provider_summaries)
mock_get_all_states.assert_called_once_with(
ctx.elevated.return_value, spec_obj,
mock.sentinel.provider_summaries)
mock_get_hosts.assert_called_once_with(spec_obj, all_host_states, 0)
mock_claim.assert_called_once_with(ctx.elevated.return_value, spec_obj,
uuids.instance, [mock.sentinel.alloc_req])
self.assertEqual([], selected_hosts)
mock_cleanup.assert_called_once_with([])
self.assertFalse(host_state.consume_from_request.called)
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_cleanup_allocations')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_claim_resources')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_all_host_states')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_sorted_hosts')
def test_schedule_not_all_instance_clean_claimed(self, mock_get_hosts,
mock_get_all_states, mock_claim, mock_cleanup):
spec_obj = objects.RequestSpec(
num_instances=2,
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
instance_group=None)
host_state = mock.Mock(spec=host_manager.HostState,
host=mock.sentinel.host, uuid=uuids.cn1, cell_uuid=uuids.cell1)
all_host_states = [host_state]
mock_get_all_states.return_value = all_host_states
mock_get_hosts.side_effect = [
all_host_states,
[],
all_host_states,
]
mock_claim.return_value = True
instance_uuids = [uuids.instance1, uuids.instance2]
alloc_reqs_by_rp_uuid = {
uuids.cn1: [mock.sentinel.alloc_req],
}
ctx = mock.Mock()
self.driver._schedule(ctx, spec_obj, instance_uuids,
alloc_reqs_by_rp_uuid, mock.sentinel.provider_summaries)
mock_cleanup.assert_called_once_with([uuids.instance1])
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_claim_resources')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_all_host_states')
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_get_sorted_hosts')
def test_schedule_instance_group(self, mock_get_hosts,
mock_get_all_states, mock_claim):
num_instances = 2
ig = objects.InstanceGroup(hosts=[])
spec_obj = objects.RequestSpec(
num_instances=num_instances,
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
instance_group=ig)
hs1 = mock.Mock(spec=host_manager.HostState, host='host1',
uuid=uuids.cn1, cell_uuid=uuids.cell1)
hs2 = mock.Mock(spec=host_manager.HostState, host='host2',
uuid=uuids.cn2, cell_uuid=uuids.cell2)
all_host_states = [hs1, hs2]
mock_get_all_states.return_value = all_host_states
mock_claim.return_value = True
alloc_reqs_by_rp_uuid = {
uuids.cn1: [mock.sentinel.alloc_req_cn1],
uuids.cn2: [mock.sentinel.alloc_req_cn2],
}
mock_get_hosts.side_effect = ([hs2, hs1], [hs1, hs2],
[hs2, hs1], [hs1, hs2])
instance_uuids = [
getattr(uuids, 'instance%d' % x) for x in range(num_instances)
]
ctx = mock.Mock()
self.driver._schedule(ctx, spec_obj, instance_uuids,
alloc_reqs_by_rp_uuid, mock.sentinel.provider_summaries)
claim_calls = [
mock.call(ctx.elevated.return_value, spec_obj,
uuids.instance0, [mock.sentinel.alloc_req_cn2]),
mock.call(ctx.elevated.return_value, spec_obj,
uuids.instance1, [mock.sentinel.alloc_req_cn1]),
]
mock_claim.assert_has_calls(claim_calls)
sorted_host_calls = [
mock.call(spec_obj, all_host_states, 0),
mock.call(spec_obj, [hs2, hs1], 1),
]
mock_get_hosts.assert_has_calls(sorted_host_calls)
self.assertEqual(['host2', 'host1'], ig.hosts)
self.assertEqual({}, ig.obj_get_changes())
@mock.patch('random.choice', side_effect=lambda x: x[1])
@mock.patch('nova.scheduler.host_manager.HostManager.get_weighed_hosts')
@mock.patch('nova.scheduler.host_manager.HostManager.get_filtered_hosts')
def test_get_sorted_hosts(self, mock_filt, mock_weighed, mock_rand):
self.flags(host_subset_size=2, group='filter_scheduler')
hs1 = mock.Mock(spec=host_manager.HostState, host='host1',
cell_uuid=uuids.cell1)
hs2 = mock.Mock(spec=host_manager.HostState, host='host2',
cell_uuid=uuids.cell2)
all_host_states = [hs1, hs2]
mock_weighed.return_value = [
weights.WeighedHost(hs1, 1.0), weights.WeighedHost(hs2, 1.0),
]
results = self.driver._get_sorted_hosts(mock.sentinel.spec,
all_host_states, mock.sentinel.index)
mock_filt.assert_called_once_with(all_host_states, mock.sentinel.spec,
mock.sentinel.index)
mock_weighed.assert_called_once_with(mock_filt.return_value,
mock.sentinel.spec)
self.assertEqual([hs2, hs1], results)
@mock.patch('random.choice', side_effect=lambda x: x[0])
@mock.patch('nova.scheduler.host_manager.HostManager.get_weighed_hosts')
@mock.patch('nova.scheduler.host_manager.HostManager.get_filtered_hosts')
def test_get_sorted_hosts_subset_less_than_num_weighed(self, mock_filt,
mock_weighed, mock_rand):
self.flags(host_subset_size=1, group='filter_scheduler')
hs1 = mock.Mock(spec=host_manager.HostState, host='host1',
cell_uuid=uuids.cell1)
hs2 = mock.Mock(spec=host_manager.HostState, host='host2',
cell_uuid=uuids.cell2)
all_host_states = [hs1, hs2]
mock_weighed.return_value = [
weights.WeighedHost(hs1, 1.0), weights.WeighedHost(hs2, 1.0),
]
results = self.driver._get_sorted_hosts(mock.sentinel.spec,
all_host_states, mock.sentinel.index)
mock_filt.assert_called_once_with(all_host_states, mock.sentinel.spec,
mock.sentinel.index)
mock_weighed.assert_called_once_with(mock_filt.return_value,
mock.sentinel.spec)
mock_rand.assert_called_once_with([hs1])
self.assertEqual([hs1, hs2], results)
@mock.patch('random.choice', side_effect=lambda x: x[0])
@mock.patch('nova.scheduler.host_manager.HostManager.get_weighed_hosts')
@mock.patch('nova.scheduler.host_manager.HostManager.get_filtered_hosts')
def test_get_sorted_hosts_subset_greater_than_num_weighed(self, mock_filt,
mock_weighed, mock_rand):
self.flags(host_subset_size=20, group='filter_scheduler')
hs1 = mock.Mock(spec=host_manager.HostState, host='host1',
cell_uuid=uuids.cell1)
hs2 = mock.Mock(spec=host_manager.HostState, host='host2',
cell_uuid=uuids.cell2)
all_host_states = [hs1, hs2]
mock_weighed.return_value = [
weights.WeighedHost(hs1, 1.0), weights.WeighedHost(hs2, 1.0),
]
results = self.driver._get_sorted_hosts(mock.sentinel.spec,
all_host_states, mock.sentinel.index)
mock_filt.assert_called_once_with(all_host_states, mock.sentinel.spec,
mock.sentinel.index)
mock_weighed.assert_called_once_with(mock_filt.return_value,
mock.sentinel.spec)
self.assertEqual([hs1, hs2], results)
def test_cleanup_allocations(self):
instance_uuids = []
pc = self.placement_client
self.driver._cleanup_allocations(instance_uuids)
self.assertFalse(pc.delete_allocation_for_instance.called)
instance_uuids = [uuids.instance1, uuids.instance2]
self.driver._cleanup_allocations(instance_uuids)
exp_calls = [mock.call(uuids.instance1), mock.call(uuids.instance2)]
pc.delete_allocation_for_instance.assert_has_calls(exp_calls)
def test_claim_resources(self):
ctx = mock.Mock(user_id=uuids.user_id)
spec_obj = mock.Mock(project_id=uuids.project_id)
instance_uuid = uuids.instance
alloc_reqs = [mock.sentinel.alloc_req]
res = self.driver._claim_resources(ctx, spec_obj, instance_uuid,
alloc_reqs)
pc = self.placement_client
pc.claim_resources.return_value = True
pc.claim_resources.assert_called_once_with(uuids.instance,
mock.sentinel.alloc_req, uuids.project_id, uuids.user_id)
self.assertTrue(res)
def test_add_retry_host(self):
retry = dict(num_attempts=1, hosts=[])
filter_properties = dict(retry=retry)
host = "fakehost"
node = "fakenode"
scheduler_utils._add_retry_host(filter_properties, host, node)
hosts = filter_properties['retry']['hosts']
self.assertEqual(1, len(hosts))
self.assertEqual([host, node], hosts[0])
def test_post_select_populate(self):
retry = {'hosts': [], 'num_attempts': 1}
filter_properties = {'retry': retry}
host_state = host_manager.HostState('host', 'node', uuids.cell)
host_state.limits['vcpu'] = 5
scheduler_utils.populate_filter_properties(filter_properties,
host_state)
self.assertEqual(['host', 'node'],
filter_properties['retry']['hosts'][0])
self.assertEqual({'vcpu': 5}, host_state.limits)
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_schedule')
def test_select_destinations_match_num_instances(self, mock_schedule):
spec_obj = objects.RequestSpec(
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
num_instances=1)
mock_schedule.return_value = [[mock.sentinel.hs1]]
dests = self.driver.select_destinations(self.context, spec_obj,
[mock.sentinel.instance_uuid], mock.sentinel.alloc_reqs_by_rp_uuid,
mock.sentinel.p_sums)
mock_schedule.assert_called_once_with(self.context, spec_obj,
[mock.sentinel.instance_uuid], mock.sentinel.alloc_reqs_by_rp_uuid,
mock.sentinel.p_sums)
self.assertEqual([mock.sentinel.hs1], dests)
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_schedule')
def test_select_destinations_for_move_ops(self, mock_schedule):
spec_obj = objects.RequestSpec(
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
num_instances=2)
host_state = mock.Mock(spec=host_manager.HostState,
cell_uuid=uuids.cell)
mock_schedule.return_value = [[host_state]]
dests = self.driver.select_destinations(self.context, spec_obj,
[mock.sentinel.instance_uuid], mock.sentinel.alloc_reqs_by_rp_uuid,
mock.sentinel.p_sums)
mock_schedule.assert_called_once_with(self.context, spec_obj,
[mock.sentinel.instance_uuid], mock.sentinel.alloc_reqs_by_rp_uuid,
mock.sentinel.p_sums)
self.assertEqual([host_state], dests)
@mock.patch('nova.scheduler.filter_scheduler.FilterScheduler.'
'_schedule')
def test_select_destinations_fewer_num_instances(self, mock_schedule):
spec_obj = objects.RequestSpec(
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
num_instances=2)
host_state = mock.Mock(spec=host_manager.HostState,
cell_uuid=uuids.cell)
mock_schedule.return_value = [[host_state]]
self.assertRaises(exception.NoValidHost,
self.driver.select_destinations, self.context, spec_obj,
[mock.sentinel.instance_uuid1, mock.sentinel.instance_uuid2],
mock.sentinel.alloc_reqs_by_rp_uuid, mock.sentinel.p_sums)
self.assertIsNone(host_state.updated)
@mock.patch("nova.scheduler.host_manager.HostState.consume_from_request")
@mock.patch("nova.scheduler.filter_scheduler.FilterScheduler."
"_claim_resources")
@mock.patch("nova.scheduler.filter_scheduler.FilterScheduler."
"_get_sorted_hosts")
@mock.patch("nova.scheduler.filter_scheduler.FilterScheduler."
"_get_all_host_states")
def _test_alternates_returned(self, mock_get_all_hosts, mock_sorted,
mock_claim, mock_consume, num_instances=2, num_alternates=2):
all_host_states = []
alloc_reqs = {}
for num in range(10):
host_name = "host%s" % num
hs = host_manager.HostState(host_name, "node%s" % num,
uuids.cell)
hs.uuid = getattr(uuids, host_name)
all_host_states.append(hs)
alloc_reqs[hs.uuid] = {}
mock_get_all_hosts.return_value = all_host_states
mock_sorted.return_value = all_host_states
mock_claim.return_value = True
total_returned = num_alternates + 1
self.flags(max_attempts=total_returned, group="scheduler")
instance_uuids = [getattr(uuids, "inst%s" % num)
for num in range(num_instances)]
spec_obj = objects.RequestSpec(
num_instances=num_instances,
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
instance_group=None)
dests = self.driver._schedule(self.context, spec_obj,
instance_uuids, alloc_reqs, None)
self.assertEqual(num_instances, len(dests))
# Filtering and weighing hosts should be called num_instances + 1 times
# unless num_instances == 1.
self.assertEqual(num_instances + 1 if num_instances > 1 else 1,
mock_sorted.call_count,
'Unexpected number of calls to filter hosts for %s '
'instances.' % num_instances)
selected_hosts = [dest[0] for dest in dests]
for dest in dests:
self.assertEqual(total_returned, len(dest))
# Verify that there are no duplicates among a destination
self.assertEqual(len(dest), len(set(dest)))
# Verify that none of the selected hosts appear in the alternates.
for alt in dest[1:]:
self.assertNotIn(alt, selected_hosts)
def test_alternates_returned(self):
self._test_alternates_returned(num_instances=1, num_alternates=1)
self._test_alternates_returned(num_instances=3, num_alternates=0)
self._test_alternates_returned(num_instances=1, num_alternates=4)
self._test_alternates_returned(num_instances=2, num_alternates=3)
self._test_alternates_returned(num_instances=8, num_alternates=8)
@mock.patch("nova.scheduler.host_manager.HostState.consume_from_request")
@mock.patch("nova.scheduler.filter_scheduler.FilterScheduler."
"_claim_resources")
@mock.patch("nova.scheduler.filter_scheduler.FilterScheduler."
"_get_sorted_hosts")
@mock.patch("nova.scheduler.filter_scheduler.FilterScheduler."
"_get_all_host_states")
def test_alternates_same_cell(self, mock_get_all_hosts, mock_sorted,
mock_claim, mock_consume):
all_host_states = []
alloc_reqs = {}
for num in range(10):
host_name = "host%s" % num
cell_uuid = uuids.cell1 if num % 2 else uuids.cell2
hs = host_manager.HostState(host_name, "node%s" % num,
cell_uuid)
hs.uuid = getattr(uuids, host_name)
all_host_states.append(hs)
alloc_reqs[hs.uuid] = {}
mock_get_all_hosts.return_value = all_host_states
# There are two instances so _get_sorted_hosts is called once per
# instance and then once again before picking alternates.
mock_sorted.side_effect = [all_host_states,
list(reversed(all_host_states)),
all_host_states]
mock_claim.return_value = True
total_returned = 3
self.flags(max_attempts=total_returned, group="scheduler")
instance_uuids = [uuids.inst1, uuids.inst2]
num_instances = len(instance_uuids)
spec_obj = objects.RequestSpec(
num_instances=num_instances,
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
instance_group=None)
dests = self.driver._schedule(self.context, spec_obj,
instance_uuids, alloc_reqs, None)
# There should be max_attempts hosts per instance (1 selected, 2 alts)
self.assertEqual(total_returned, len(dests[0]))
self.assertEqual(total_returned, len(dests[1]))
# Verify that the two selected hosts are not in the same cell.
self.assertNotEqual(dests[0][0].cell_uuid, dests[1][0].cell_uuid)
for dest in dests:
selected_host = dest[0]
selected_cell_uuid = selected_host.cell_uuid
for alternate in dest[1:]:
self.assertEqual(alternate.cell_uuid, selected_cell_uuid)
@mock.patch("nova.scheduler.host_manager.HostState.consume_from_request")
@mock.patch("nova.scheduler.filter_scheduler.FilterScheduler."
"_claim_resources")
@mock.patch("nova.scheduler.filter_scheduler.FilterScheduler."
"_get_sorted_hosts")
@mock.patch("nova.scheduler.filter_scheduler.FilterScheduler."
"_get_all_host_states")
def _test_not_enough_alternates(self, mock_get_all_hosts, mock_sorted,
mock_claim, mock_consume, num_hosts, max_attempts):
all_host_states = []
alloc_reqs = {}
for num in range(num_hosts):
host_name = "host%s" % num
hs = host_manager.HostState(host_name, "node%s" % num,
uuids.cell)
hs.uuid = getattr(uuids, host_name)
all_host_states.append(hs)
alloc_reqs[hs.uuid] = {}
mock_get_all_hosts.return_value = all_host_states
mock_sorted.return_value = all_host_states
mock_claim.return_value = True
# Set the total returned to more than the number of available hosts
self.flags(max_attempts=max_attempts, group="scheduler")
instance_uuids = [uuids.inst1, uuids.inst2]
num_instances = len(instance_uuids)
spec_obj = objects.RequestSpec(
num_instances=num_instances,
flavor=objects.Flavor(memory_mb=512,
root_gb=512,
ephemeral_gb=0,
swap=0,
vcpus=1),
project_id=uuids.project_id,
instance_group=None)
dests = self.driver._schedule(self.context, spec_obj,
instance_uuids, alloc_reqs, None)
self.assertEqual(num_instances, len(dests))
selected_hosts = [dest[0] for dest in dests]
# The number returned for each destination should be the less of the
# number of available host and the max_attempts setting.
expected_number = min(num_hosts, max_attempts)
for dest in dests:
self.assertEqual(expected_number, len(dest))
# Verify that there are no duplicates among a destination
self.assertEqual(len(dest), len(set(dest)))
# Verify that none of the selected hosts appear in the alternates.
for alt in dest[1:]:
self.assertNotIn(alt, selected_hosts)
def test_not_enough_alternates(self):
self._test_not_enough_alternates(num_hosts=100, max_attempts=5)
self._test_not_enough_alternates(num_hosts=5, max_attempts=5)
self._test_not_enough_alternates(num_hosts=3, max_attempts=5)
self._test_not_enough_alternates(num_hosts=20, max_attempts=5)
@mock.patch.object(filter_scheduler.FilterScheduler, '_schedule')
def test_select_destinations_notifications(self, mock_schedule):
mock_schedule.return_value = [[mock.Mock()]]
with mock.patch.object(self.driver.notifier, 'info') as mock_info:
expected = {'num_instances': 1,
'instance_properties': {'uuid': uuids.instance},
'instance_type': {},
'image': {}}
spec_obj = objects.RequestSpec(num_instances=1,
instance_uuid=uuids.instance)
self.driver.select_destinations(self.context, spec_obj,
[uuids.instance], {}, None)
expected = [
mock.call(self.context, 'scheduler.select_destinations.start',
dict(request_spec=expected)),
mock.call(self.context, 'scheduler.select_destinations.end',
dict(request_spec=expected))]
self.assertEqual(expected, mock_info.call_args_list)
def test_get_all_host_states_provider_summaries_is_none(self):
with mock.patch.object(self.driver.host_manager,
'get_host_states_by_uuids') as get_host_states:
self.driver._get_all_host_states(
mock.sentinel.ctxt, mock.sentinel.spec_obj, None)
# Make sure get_host_states_by_uuids was called with
# compute_uuids being None.
get_host_states.assert_called_once_with(
mock.sentinel.ctxt, None, mock.sentinel.spec_obj)
def test_get_all_host_states_provider_summaries_is_empty(self):
with mock.patch.object(self.driver.host_manager,
'get_host_states_by_uuids') as get_host_states:
self.driver._get_all_host_states(
mock.sentinel.ctxt, mock.sentinel.spec_obj, {})
# Make sure get_host_states_by_uuids was called with
# compute_uuids being [].
get_host_states.assert_called_once_with(
mock.sentinel.ctxt, [], mock.sentinel.spec_obj)
| true | true |
f7f7cf61d708313e869c30369c09071980729b67 | 425 | py | Python | gedder/transforms/unmark.py | Taapeli/stk-gedcom | 8cd1a73452d8474968f591c715f7f6b21a1b8192 | [
"Unlicense"
] | null | null | null | gedder/transforms/unmark.py | Taapeli/stk-gedcom | 8cd1a73452d8474968f591c715f7f6b21a1b8192 | [
"Unlicense"
] | null | null | null | gedder/transforms/unmark.py | Taapeli/stk-gedcom | 8cd1a73452d8474968f591c715f7f6b21a1b8192 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python3
"""
Restores marked tags: <tag>-X -> <tag>
"""
_VERSION = "1.0"
#from transforms.model.gedcom_line import GedcomLine
def add_args(parser):
pass
def initialize(run_args):
pass
def phase3(run_args, gedline, f):
if gedline.tag.endswith("-X"):
gedline.tag = gedline.tag[:-2]
# line = "{} {} {}".format(gedline.level, gedline.tag, gedline.value)
f.emit(gedline.get_line())
| 21.25 | 75 | 0.644706 |
_VERSION = "1.0"
def add_args(parser):
pass
def initialize(run_args):
pass
def phase3(run_args, gedline, f):
if gedline.tag.endswith("-X"):
gedline.tag = gedline.tag[:-2]
f.emit(gedline.get_line())
| true | true |
f7f7d07e749ceda9e28ecbd7eafbac9bf42ce4ab | 5,161 | py | Python | problem_11/test_grid_product.py | plilja/project-euler | 646d1989cf15e903ef7e3c6e487284847d522ec9 | [
"Apache-2.0"
] | null | null | null | problem_11/test_grid_product.py | plilja/project-euler | 646d1989cf15e903ef7e3c6e487284847d522ec9 | [
"Apache-2.0"
] | null | null | null | problem_11/test_grid_product.py | plilja/project-euler | 646d1989cf15e903ef7e3c6e487284847d522ec9 | [
"Apache-2.0"
] | null | null | null | import unittest
from grid_product import *
from common.matrix import Matrix
def to_matrix(matrix_as_lists):
result = Matrix(len(matrix_as_lists), len(matrix_as_lists[0]))
for i in xrange(result.num_rows()):
for j in xrange(result.num_columns()):
result[i][j] = matrix_as_lists[i][j]
return result
class TestLargestGridProduct(unittest.TestCase):
def test_single_cell_grid(self):
self.assertEqual(largest_grid_product(to_matrix([[8]]), 1), 8)
self.assertEqual(largest_grid_product(to_matrix([[10]]), 1), 10)
def test_2_by_2_grid_length_1(self):
grid1 = [[2, 2],
[2, 2]]
self.assertEqual(largest_grid_product(to_matrix(grid1), 1), 2)
grid2 = [[1, 2],
[3, 4]]
self.assertEqual(largest_grid_product(to_matrix(grid2), 1), 4)
def test_2_by_2_grid_length_2(self):
grid1 = [[2, 2],
[2, 2]]
self.assertEqual(largest_grid_product(to_matrix(grid1), 2), 4)
grid2 = [[3, 2],
[2, 2]]
self.assertEqual(largest_grid_product(to_matrix(grid2), 2), 6)
grid3 = [[2, 2],
[3, 2]]
self.assertEqual(largest_grid_product(to_matrix(grid3), 2), 6)
grid4 = [[4, 5],
[4, 4]]
self.assertEqual(largest_grid_product(to_matrix(grid4), 2), 20)
grid5 = [[1, 5],
[1, 4]]
self.assertEqual(largest_grid_product(to_matrix(grid5), 2), 20)
grid6 = [[1, 1],
[5, 4]]
self.assertEqual(largest_grid_product(to_matrix(grid6), 2), 20)
grid7 = [[5, 1],
[1, 4]]
self.assertEqual(largest_grid_product(to_matrix(grid7), 2), 20)
grid8 = [[1, 5],
[4, 1]]
self.assertEqual(largest_grid_product(to_matrix(grid8), 2), 20)
def test_3_by_3_grid_length_2(self):
grid1 = [[2, 2, 2],
[2, 4, 2],
[4, 2, 2]]
self.assertEqual(largest_grid_product(to_matrix(grid1), 2), 16)
grid2 = [[2, 2, 2],
[3, 2, 2],
[2, 3, 2]]
self.assertEqual(largest_grid_product(to_matrix(grid2), 2), 9)
grid3 = [[2, 2, 9],
[2, 2, 9],
[2, 2, 2]]
self.assertEqual(largest_grid_product(to_matrix(grid3), 2), 81)
grid4 = [[2, 9, 2],
[2, 2, 9],
[2, 2, 2]]
self.assertEqual(largest_grid_product(to_matrix(grid4), 2), 81)
def test_3_by_3_grid_length_3(self):
grid1 = [[3, 4, 2],
[2, 8, 6],
[4, 2, 8]]
self.assertEqual(largest_grid_product(to_matrix(grid1), 3), 192)
grid2 = [[3, 4, 2],
[2, 8, 6],
[1, 8, 8]]
self.assertEqual(largest_grid_product(to_matrix(grid2), 3), 256)
grid3 = [[3, 2, 2],
[4, 8, 6],
[4, 8, 8]]
self.assertEqual(largest_grid_product(to_matrix(grid3), 3), 256)
def test_project_euler_input(self):
grid = [[8, 02, 22, 97, 38, 15, 00, 40, 00, 75, 04, 05, 07, 78, 52, 12, 50, 77, 91, 8],
[49, 49, 99, 40, 17, 81, 18, 57, 60, 87, 17, 40, 98, 43, 69, 48, 04, 56, 62, 00],
[81, 49, 31, 73, 55, 79, 14, 29, 93, 71, 40, 67, 53, 88, 30, 03, 49, 13, 36, 65],
[52, 70, 95, 23, 04, 60, 11, 42, 69, 24, 68, 56, 01, 32, 56, 71, 37, 02, 36, 91],
[22, 31, 16, 71, 51, 67, 63, 89, 41, 92, 36, 54, 22, 40, 40, 28, 66, 33, 13, 80],
[24, 47, 32, 60, 99, 03, 45, 02, 44, 75, 33, 53, 78, 36, 84, 20, 35, 17, 12, 50],
[32, 98, 81, 28, 64, 23, 67, 10, 26, 38, 40, 67, 59, 54, 70, 66, 18, 38, 64, 70],
[67, 26, 20, 68, 02, 62, 12, 20, 95, 63, 94, 39, 63, 8, 40, 91, 66, 49, 94, 21],
[24, 55, 58, 05, 66, 73, 99, 26, 97, 17, 78, 78, 96, 83, 14, 88, 34, 89, 63, 72],
[21, 36, 23, 9, 75, 00, 76, 44, 20, 45, 35, 14, 0, 61, 33, 97, 34, 31, 33, 95],
[78, 17, 53, 28, 22, 75, 31, 67, 15, 94, 03, 80, 04, 62, 16, 14, 9, 53, 56, 92],
[16, 39, 05, 42, 96, 35, 31, 47, 55, 58, 88, 24, 00, 17, 54, 24, 36, 29, 85, 57],
[86, 56, 00, 48, 35, 71, 89, 07, 05, 44, 44, 37, 44, 60, 21, 58, 51, 54, 17, 58],
[19, 80, 81, 68, 05, 94, 47, 69, 28, 73, 92, 13, 86, 52, 17, 77, 04, 89, 55, 40],
[04, 52, 8, 83, 97, 35, 99, 16, 07, 97, 57, 32, 16, 26, 26, 79, 33, 27, 98, 66],
[88, 36, 68, 87, 57, 62, 20, 72, 03, 46, 33, 67, 46, 55, 12, 32, 63, 93, 53, 69],
[04, 42, 16, 73, 38, 25, 39, 11, 24, 94, 72, 18, 8, 46, 29, 32, 40, 62, 76, 36],
[20, 69, 36, 41, 72, 30, 23, 88, 34, 62, 99, 69, 82, 67, 59, 85, 74, 04, 36, 16],
[20, 73, 35, 29, 78, 31, 90, 01, 74, 31, 49, 71, 48, 86, 81, 16, 23, 57, 05, 54],
[01, 70, 54, 71, 83, 51, 54, 69, 16, 92, 33, 48, 61, 43, 52, 01, 89, 19, 67, 48]]
self.assertEqual(largest_grid_product(to_matrix(grid), 4), 70600674)
if __name__ == '__main__':
unittest.main()
| 45.672566 | 97 | 0.48634 | import unittest
from grid_product import *
from common.matrix import Matrix
def to_matrix(matrix_as_lists):
result = Matrix(len(matrix_as_lists), len(matrix_as_lists[0]))
for i in xrange(result.num_rows()):
for j in xrange(result.num_columns()):
result[i][j] = matrix_as_lists[i][j]
return result
class TestLargestGridProduct(unittest.TestCase):
def test_single_cell_grid(self):
self.assertEqual(largest_grid_product(to_matrix([[8]]), 1), 8)
self.assertEqual(largest_grid_product(to_matrix([[10]]), 1), 10)
def test_2_by_2_grid_length_1(self):
grid1 = [[2, 2],
[2, 2]]
self.assertEqual(largest_grid_product(to_matrix(grid1), 1), 2)
grid2 = [[1, 2],
[3, 4]]
self.assertEqual(largest_grid_product(to_matrix(grid2), 1), 4)
def test_2_by_2_grid_length_2(self):
grid1 = [[2, 2],
[2, 2]]
self.assertEqual(largest_grid_product(to_matrix(grid1), 2), 4)
grid2 = [[3, 2],
[2, 2]]
self.assertEqual(largest_grid_product(to_matrix(grid2), 2), 6)
grid3 = [[2, 2],
[3, 2]]
self.assertEqual(largest_grid_product(to_matrix(grid3), 2), 6)
grid4 = [[4, 5],
[4, 4]]
self.assertEqual(largest_grid_product(to_matrix(grid4), 2), 20)
grid5 = [[1, 5],
[1, 4]]
self.assertEqual(largest_grid_product(to_matrix(grid5), 2), 20)
grid6 = [[1, 1],
[5, 4]]
self.assertEqual(largest_grid_product(to_matrix(grid6), 2), 20)
grid7 = [[5, 1],
[1, 4]]
self.assertEqual(largest_grid_product(to_matrix(grid7), 2), 20)
grid8 = [[1, 5],
[4, 1]]
self.assertEqual(largest_grid_product(to_matrix(grid8), 2), 20)
def test_3_by_3_grid_length_2(self):
grid1 = [[2, 2, 2],
[2, 4, 2],
[4, 2, 2]]
self.assertEqual(largest_grid_product(to_matrix(grid1), 2), 16)
grid2 = [[2, 2, 2],
[3, 2, 2],
[2, 3, 2]]
self.assertEqual(largest_grid_product(to_matrix(grid2), 2), 9)
grid3 = [[2, 2, 9],
[2, 2, 9],
[2, 2, 2]]
self.assertEqual(largest_grid_product(to_matrix(grid3), 2), 81)
grid4 = [[2, 9, 2],
[2, 2, 9],
[2, 2, 2]]
self.assertEqual(largest_grid_product(to_matrix(grid4), 2), 81)
def test_3_by_3_grid_length_3(self):
grid1 = [[3, 4, 2],
[2, 8, 6],
[4, 2, 8]]
self.assertEqual(largest_grid_product(to_matrix(grid1), 3), 192)
grid2 = [[3, 4, 2],
[2, 8, 6],
[1, 8, 8]]
self.assertEqual(largest_grid_product(to_matrix(grid2), 3), 256)
grid3 = [[3, 2, 2],
[4, 8, 6],
[4, 8, 8]]
self.assertEqual(largest_grid_product(to_matrix(grid3), 3), 256)
def test_project_euler_input(self):
grid = [[8, 02, 22, 97, 38, 15, 00, 40, 00, 75, 04, 05, 07, 78, 52, 12, 50, 77, 91, 8],
[49, 49, 99, 40, 17, 81, 18, 57, 60, 87, 17, 40, 98, 43, 69, 48, 04, 56, 62, 00],
[81, 49, 31, 73, 55, 79, 14, 29, 93, 71, 40, 67, 53, 88, 30, 03, 49, 13, 36, 65],
[52, 70, 95, 23, 04, 60, 11, 42, 69, 24, 68, 56, 01, 32, 56, 71, 37, 02, 36, 91],
[22, 31, 16, 71, 51, 67, 63, 89, 41, 92, 36, 54, 22, 40, 40, 28, 66, 33, 13, 80],
[24, 47, 32, 60, 99, 03, 45, 02, 44, 75, 33, 53, 78, 36, 84, 20, 35, 17, 12, 50],
[32, 98, 81, 28, 64, 23, 67, 10, 26, 38, 40, 67, 59, 54, 70, 66, 18, 38, 64, 70],
[67, 26, 20, 68, 02, 62, 12, 20, 95, 63, 94, 39, 63, 8, 40, 91, 66, 49, 94, 21],
[24, 55, 58, 05, 66, 73, 99, 26, 97, 17, 78, 78, 96, 83, 14, 88, 34, 89, 63, 72],
[21, 36, 23, 9, 75, 00, 76, 44, 20, 45, 35, 14, 0, 61, 33, 97, 34, 31, 33, 95],
[78, 17, 53, 28, 22, 75, 31, 67, 15, 94, 03, 80, 04, 62, 16, 14, 9, 53, 56, 92],
[16, 39, 05, 42, 96, 35, 31, 47, 55, 58, 88, 24, 00, 17, 54, 24, 36, 29, 85, 57],
[86, 56, 00, 48, 35, 71, 89, 07, 05, 44, 44, 37, 44, 60, 21, 58, 51, 54, 17, 58],
[19, 80, 81, 68, 05, 94, 47, 69, 28, 73, 92, 13, 86, 52, 17, 77, 04, 89, 55, 40],
[04, 52, 8, 83, 97, 35, 99, 16, 07, 97, 57, 32, 16, 26, 26, 79, 33, 27, 98, 66],
[88, 36, 68, 87, 57, 62, 20, 72, 03, 46, 33, 67, 46, 55, 12, 32, 63, 93, 53, 69],
[04, 42, 16, 73, 38, 25, 39, 11, 24, 94, 72, 18, 8, 46, 29, 32, 40, 62, 76, 36],
[20, 69, 36, 41, 72, 30, 23, 88, 34, 62, 99, 69, 82, 67, 59, 85, 74, 04, 36, 16],
[20, 73, 35, 29, 78, 31, 90, 01, 74, 31, 49, 71, 48, 86, 81, 16, 23, 57, 05, 54],
[01, 70, 54, 71, 83, 51, 54, 69, 16, 92, 33, 48, 61, 43, 52, 01, 89, 19, 67, 48]]
self.assertEqual(largest_grid_product(to_matrix(grid), 4), 70600674)
if __name__ == '__main__':
unittest.main()
| false | true |
f7f7d102523ded9ffc78ba807746901a032398de | 1,894 | py | Python | modules/dbnd/test_dbnd/task/basics/test_task_parameter_namespace.py | ipattarapong/dbnd | 7bd65621c46c73e078eb628f994127ad4c7dbd1a | [
"Apache-2.0"
] | null | null | null | modules/dbnd/test_dbnd/task/basics/test_task_parameter_namespace.py | ipattarapong/dbnd | 7bd65621c46c73e078eb628f994127ad4c7dbd1a | [
"Apache-2.0"
] | null | null | null | modules/dbnd/test_dbnd/task/basics/test_task_parameter_namespace.py | ipattarapong/dbnd | 7bd65621c46c73e078eb628f994127ad4c7dbd1a | [
"Apache-2.0"
] | null | null | null | from typing import List, Tuple
from dbnd import config, dbnd_run_cmd, parameter
from dbnd_test_scenarios.test_common.task.factories import TTask
class TestParameterNamespaceTask(object):
def testWithNamespaceConfig(self):
class A(TTask):
task_namespace = "mynamespace"
p = parameter[int]
with config({"mynamespace.A": {"p": "999"}}):
assert 999 == A().p
def testWithNamespaceCli(self):
class A(TTask):
task_namespace = "mynamespace"
p1 = parameter.value(100)
expected = parameter[int]
def complete(self):
if self.p1 != self.expected:
raise ValueError
return True
assert dbnd_run_cmd("mynamespace.A -r expected=100")
assert dbnd_run_cmd("mynamespace.A -r p1=200 -r expected=200")
def testListWithNamespaceCli(self):
class A(TTask):
task_namespace = "mynamespace"
l_param = parameter.value([1, 2, 3])
expected = parameter[List[int]]
def complete(self):
if self.l_param != self.expected:
raise ValueError
return True
assert dbnd_run_cmd("mynamespace.A -r expected=[1,2,3]")
assert dbnd_run_cmd("mynamespace.A -r l_param=[1,2,3] -r expected=[1,2,3]")
def testTupleWithNamespaceCli(self):
class A(TTask):
task_namespace = "mynamespace"
t = parameter.value(((1, 2), (3, 4)))
expected = parameter[Tuple]
def complete(self):
if self.t != self.expected:
raise ValueError
return True
assert dbnd_run_cmd("mynamespace.A -r expected=((1,2),(3,4))")
assert dbnd_run_cmd(
"mynamespace.A -r t=((1,2),(3,4)) -r expected=((1,2),(3,4))"
)
| 32.101695 | 83 | 0.561774 | from typing import List, Tuple
from dbnd import config, dbnd_run_cmd, parameter
from dbnd_test_scenarios.test_common.task.factories import TTask
class TestParameterNamespaceTask(object):
def testWithNamespaceConfig(self):
class A(TTask):
task_namespace = "mynamespace"
p = parameter[int]
with config({"mynamespace.A": {"p": "999"}}):
assert 999 == A().p
def testWithNamespaceCli(self):
class A(TTask):
task_namespace = "mynamespace"
p1 = parameter.value(100)
expected = parameter[int]
def complete(self):
if self.p1 != self.expected:
raise ValueError
return True
assert dbnd_run_cmd("mynamespace.A -r expected=100")
assert dbnd_run_cmd("mynamespace.A -r p1=200 -r expected=200")
def testListWithNamespaceCli(self):
class A(TTask):
task_namespace = "mynamespace"
l_param = parameter.value([1, 2, 3])
expected = parameter[List[int]]
def complete(self):
if self.l_param != self.expected:
raise ValueError
return True
assert dbnd_run_cmd("mynamespace.A -r expected=[1,2,3]")
assert dbnd_run_cmd("mynamespace.A -r l_param=[1,2,3] -r expected=[1,2,3]")
def testTupleWithNamespaceCli(self):
class A(TTask):
task_namespace = "mynamespace"
t = parameter.value(((1, 2), (3, 4)))
expected = parameter[Tuple]
def complete(self):
if self.t != self.expected:
raise ValueError
return True
assert dbnd_run_cmd("mynamespace.A -r expected=((1,2),(3,4))")
assert dbnd_run_cmd(
"mynamespace.A -r t=((1,2),(3,4)) -r expected=((1,2),(3,4))"
)
| true | true |
f7f7d12084910febfa9bfe72586243ba16d66099 | 329 | py | Python | Chapter02/blocks.py | JeffreyAsuncion/PythonEssentialTraining | adf9164ac01db35f2f657e58ec60d9bcc197dcda | [
"MIT"
] | null | null | null | Chapter02/blocks.py | JeffreyAsuncion/PythonEssentialTraining | adf9164ac01db35f2f657e58ec60d9bcc197dcda | [
"MIT"
] | null | null | null | Chapter02/blocks.py | JeffreyAsuncion/PythonEssentialTraining | adf9164ac01db35f2f657e58ec60d9bcc197dcda | [
"MIT"
] | null | null | null | #!/home/jepoy/anaconda3/bin/python
## at terminal which python
x = 42
y = 73
# Python has no Switch or Case Statement
# Python uses the if elif ... else blocks
if x > y:
print('x < y: x is {} and y is {}'.format(x, y))
elif x < y:
print('x < y: x is {} and y is {}'.format(x, y))
else:
print('do something else')
| 19.352941 | 52 | 0.598784 |
> y:
print('x < y: x is {} and y is {}'.format(x, y))
elif x < y:
print('x < y: x is {} and y is {}'.format(x, y))
else:
print('do something else')
| true | true |
f7f7d1817f4059106fc1ec05748d9b67eb5f4663 | 243 | py | Python | loopchain/blockchain/transactions/v3/__init__.py | windies21/loopchain | 6e96c8a7e006747af04187155678f2fae59e1389 | [
"Apache-2.0"
] | 105 | 2018-04-03T05:29:08.000Z | 2022-01-28T17:33:20.000Z | loopchain/blockchain/transactions/v3/__init__.py | laurenceyoon/loopchain | e87032779be4715c135c2c91d2757d9c63bf4e31 | [
"Apache-2.0"
] | 135 | 2018-09-04T07:11:02.000Z | 2021-12-15T06:25:47.000Z | loopchain/blockchain/transactions/v3/__init__.py | laurenceyoon/loopchain | e87032779be4715c135c2c91d2757d9c63bf4e31 | [
"Apache-2.0"
] | 46 | 2018-05-07T09:12:07.000Z | 2022-02-23T09:58:37.000Z | from .transaction import Transaction, HASH_SALT
from .transaction_builder import TransactionBuilder
from .transaction_serializer import TransactionSerializer
from .transaction_verifier import TransactionVerifier
version = Transaction.version
| 34.714286 | 57 | 0.884774 | from .transaction import Transaction, HASH_SALT
from .transaction_builder import TransactionBuilder
from .transaction_serializer import TransactionSerializer
from .transaction_verifier import TransactionVerifier
version = Transaction.version
| true | true |
f7f7d3837c542f18a2a63c5615d45f19cdbbdd50 | 2,705 | py | Python | Python/Telemetry2U.py | telemetry2u/public | cda938d5806c034e5cb3277a6c98fb1acbc0f528 | [
"MIT"
] | 1 | 2022-03-17T09:03:50.000Z | 2022-03-17T09:03:50.000Z | Python/Telemetry2U.py | telemetry2u/public | cda938d5806c034e5cb3277a6c98fb1acbc0f528 | [
"MIT"
] | null | null | null | Python/Telemetry2U.py | telemetry2u/public | cda938d5806c034e5cb3277a6c98fb1acbc0f528 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# Simple Python script demonstrating use of the Telemetry2U APi
# to retrieve data from LoRaWAN nodes.
# MIT License
# Copyright (c) 2021 Telemetry2U Pty Lrd
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from http.client import HTTPSConnection
import json
import pandas as pd
import matplotlib.pyplot as plt
from datetime import datetime, timedelta
def do_request(request):
connection = HTTPSConnection("telemetry2u.com")
# Your API key and authorization string may be generated under the Account / Api Keys section of your Telemetry2U account.
# The following authorization details are for the demo account and may be used for experimentation.
authorization = "Basic ZGVtb0BleGFtcGxlLm9yZzpQOXg2ZGgrSXpZYVV1NS9mUHpjL1JZZkh3VzFuL0gyNStsMVNlYi9TY3oxUQ=="
headers = { "Authorization" : authorization}
connection.request("GET", request, headers=headers)
response = connection.getresponse()
data = json.loads(response.read())
return pd.json_normalize(data)
def main():
# Retrieve and print list of node ids / descriptions
nodes = do_request("/api/nodes")
print(nodes[['nodeId', 'description']])
# Find nodeId for "LHT65 Fridge/Freezer Demo"
nodeId = nodes.query("description=='LHT65 Fridge/Freezer Demo'")["nodeId"].values[0]
# Call api/data endpoint to retrieve data for node for past week.
startDate = (datetime.now() - timedelta(days=7)).strftime('%Y-%m-%d')
endDate = "9999-12-31"; # Use large end date to retrieve most recent data
data = do_request(f"/api/data/{nodeId}/{startDate}/{endDate}")
data['Int. Temperature'].plot()
data['Int. Humidity'].plot()
plt.show()
if __name__ == '__main__':
main()
| 57.553191 | 462 | 0.754529 |
from http.client import HTTPSConnection
import json
import pandas as pd
import matplotlib.pyplot as plt
from datetime import datetime, timedelta
def do_request(request):
connection = HTTPSConnection("telemetry2u.com")
authorization = "Basic ZGVtb0BleGFtcGxlLm9yZzpQOXg2ZGgrSXpZYVV1NS9mUHpjL1JZZkh3VzFuL0gyNStsMVNlYi9TY3oxUQ=="
headers = { "Authorization" : authorization}
connection.request("GET", request, headers=headers)
response = connection.getresponse()
data = json.loads(response.read())
return pd.json_normalize(data)
def main():
nodes = do_request("/api/nodes")
print(nodes[['nodeId', 'description']])
nodeId = nodes.query("description=='LHT65 Fridge/Freezer Demo'")["nodeId"].values[0]
startDate = (datetime.now() - timedelta(days=7)).strftime('%Y-%m-%d')
endDate = "9999-12-31";
data = do_request(f"/api/data/{nodeId}/{startDate}/{endDate}")
data['Int. Temperature'].plot()
data['Int. Humidity'].plot()
plt.show()
if __name__ == '__main__':
main()
| true | true |
f7f7d3b1e916426ac4b1c88235f7a839530182d9 | 879 | py | Python | py/test/selenium/webdriver/marionette/mn_launcher_tests.py | davidgonzalezbarbe/Selenium | 55e370c99a289d36a6ecc41978f7fe2d3813b21c | [
"Apache-2.0"
] | null | null | null | py/test/selenium/webdriver/marionette/mn_launcher_tests.py | davidgonzalezbarbe/Selenium | 55e370c99a289d36a6ecc41978f7fe2d3813b21c | [
"Apache-2.0"
] | null | null | null | py/test/selenium/webdriver/marionette/mn_launcher_tests.py | davidgonzalezbarbe/Selenium | 55e370c99a289d36a6ecc41978f7fe2d3813b21c | [
"Apache-2.0"
] | 1 | 2021-08-11T15:39:13.000Z | 2021-08-11T15:39:13.000Z | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
def test_launch_and_close_browser(driver):
assert 'appBuildId' in driver.capabilities
| 41.857143 | 62 | 0.777019 |
def test_launch_and_close_browser(driver):
assert 'appBuildId' in driver.capabilities
| true | true |
f7f7d4f8e15ddacb4563e60ff86dbb0051705ebb | 2,075 | py | Python | source/component/3rd-party/btstack/raw/tool/sm_random_check.py | liangyongxiang/vsf-all-in-one | 942676bd201bb4fa6a3262f77150ab2d7766ec88 | [
"Apache-2.0"
] | 1 | 2022-01-04T08:06:04.000Z | 2022-01-04T08:06:04.000Z | source/component/3rd-party/btstack/raw/tool/sm_random_check.py | liangyongxiang/vsf-all-in-one | 942676bd201bb4fa6a3262f77150ab2d7766ec88 | [
"Apache-2.0"
] | null | null | null | source/component/3rd-party/btstack/raw/tool/sm_random_check.py | liangyongxiang/vsf-all-in-one | 942676bd201bb4fa6a3262f77150ab2d7766ec88 | [
"Apache-2.0"
] | 1 | 2021-08-23T10:11:47.000Z | 2021-08-23T10:11:47.000Z | #!/usr/bin/env python3
# BlueKitchen GmbH (c) 2014
# Report SM Pairing Random packets with value zero
import re
import sys
import time
import datetime
packet_types = [ "CMD =>", "EVT <=", "ACL =>", "ACL <="]
def read_net_32(f):
a = f.read(1)
if a == '':
return -1
b = f.read(1)
if b == '':
return -1
c = f.read(1)
if c == '':
return -1
d = f.read(1)
if d == '':
return -1
return ord(a) << 24 | ord(b) << 16 | ord(c) << 8 | ord(d)
def as_hex(data):
str_list = []
for byte in data:
str_list.append("{0:02x} ".format(ord(byte)))
return ''.join(str_list)
def check_file(infile):
with open (infile, 'rb') as fin:
pos = 0
warning = True
try:
while True:
len = read_net_32(fin)
if len < 0:
break
ts_sec = read_net_32(fin)
ts_usec = read_net_32(fin)
type = ord(fin.read(1))
packet_len = len - 9;
if (packet_len > 66000):
print ("Error parsing pklg at offset %u (%x)." % (pos, pos))
break
packet = fin.read(packet_len)
pos = pos + 4 + len
time = "[%s.%03u]" % (datetime.datetime.fromtimestamp(ts_sec).strftime("%Y-%m-%d %H:%M:%S"), ts_usec / 1000)
if type not in [0x02, 0x03]:
continue
packet_boundary_flags = (ord(packet[1]) >> 4) & 3
if packet_boundary_flags not in [0x00, 0x02]:
continue
channel = ord(packet[6]) | (ord(packet[7]) << 8)
if channel != 0x06:
continue
smp_command = ord(packet[8])
if smp_command != 4:
continue
random = [ ord(i) for i in packet[9:25] ]
num_zeros = random.count(0)
if num_zeros != 16:
continue
if warning:
print("%s contains SM Pairing Random command with Zeroes:" % infile)
warning = False
print (time, packet_types[type], as_hex(packet))
if not warning:
print("")
except TypeError:
print ("Error parsing pklg at offset %u (%x)." % (pos, pos))
if len(sys.argv) == 1:
print ('Usage: ' + sys.argv[0] + ' hci_dump.pklg')
exit(0)
for infile in sys.argv[2:]:
check_file(infile)
infile = sys.argv[1]
| 23.314607 | 115 | 0.577831 |
import re
import sys
import time
import datetime
packet_types = [ "CMD =>", "EVT <=", "ACL =>", "ACL <="]
def read_net_32(f):
a = f.read(1)
if a == '':
return -1
b = f.read(1)
if b == '':
return -1
c = f.read(1)
if c == '':
return -1
d = f.read(1)
if d == '':
return -1
return ord(a) << 24 | ord(b) << 16 | ord(c) << 8 | ord(d)
def as_hex(data):
str_list = []
for byte in data:
str_list.append("{0:02x} ".format(ord(byte)))
return ''.join(str_list)
def check_file(infile):
with open (infile, 'rb') as fin:
pos = 0
warning = True
try:
while True:
len = read_net_32(fin)
if len < 0:
break
ts_sec = read_net_32(fin)
ts_usec = read_net_32(fin)
type = ord(fin.read(1))
packet_len = len - 9;
if (packet_len > 66000):
print ("Error parsing pklg at offset %u (%x)." % (pos, pos))
break
packet = fin.read(packet_len)
pos = pos + 4 + len
time = "[%s.%03u]" % (datetime.datetime.fromtimestamp(ts_sec).strftime("%Y-%m-%d %H:%M:%S"), ts_usec / 1000)
if type not in [0x02, 0x03]:
continue
packet_boundary_flags = (ord(packet[1]) >> 4) & 3
if packet_boundary_flags not in [0x00, 0x02]:
continue
channel = ord(packet[6]) | (ord(packet[7]) << 8)
if channel != 0x06:
continue
smp_command = ord(packet[8])
if smp_command != 4:
continue
random = [ ord(i) for i in packet[9:25] ]
num_zeros = random.count(0)
if num_zeros != 16:
continue
if warning:
print("%s contains SM Pairing Random command with Zeroes:" % infile)
warning = False
print (time, packet_types[type], as_hex(packet))
if not warning:
print("")
except TypeError:
print ("Error parsing pklg at offset %u (%x)." % (pos, pos))
if len(sys.argv) == 1:
print ('Usage: ' + sys.argv[0] + ' hci_dump.pklg')
exit(0)
for infile in sys.argv[2:]:
check_file(infile)
infile = sys.argv[1]
| true | true |
f7f7d55f73ddac875cdb4cce2d68544d8db2d01e | 1,758 | py | Python | InstaF_Python/RGB_manipulation.py | lyyu0413/InstaF_Python | 949f5bd1c6d4d534409e36c977fd3b12b555f3d3 | [
"MIT"
] | null | null | null | InstaF_Python/RGB_manipulation.py | lyyu0413/InstaF_Python | 949f5bd1c6d4d534409e36c977fd3b12b555f3d3 | [
"MIT"
] | null | null | null | InstaF_Python/RGB_manipulation.py | lyyu0413/InstaF_Python | 949f5bd1c6d4d534409e36c977fd3b12b555f3d3 | [
"MIT"
] | null | null | null | # Copyright 2019 Betty Zhou
import numpy as np
import skimage.io
import pytest
def RGB_manipulation(input_path, output_path, R = 2, G = 2, B = 2):
'''
Manipulates the RGB intensity of an image
Inputs
------
input_path: string, path for an image file in .png format
output_path: string, path for the output image in .png format
R: int, the weight to adjust intensity for red channel, all with default 2
G: int, the weight to adjust intensity for green channel, all with default 2
B: int, the weight to adjust intensity for blue channel, all with default 2
Returns
-------
.png format image at the output path
'''
# Read in .png as np.array and exception handling
try:
img = skimage.io.imread(input_path)[:,:,:3]
except AttributeError:
print("Please provide the input path as a string")
raise
except OSError:
print("The input is not a .png file")
raise
except FileNotFoundError:
print("The input path does not exist")
raise
except Exception as error:
paste("Error: ", error)
raise
# construct filter based on user input of RGB weights
filter = np.array([[[R, G, B]]])
# Adjust RGB intensity with filter
output = img * filter
# Adjust RGB intenity above 255 to 255 and ensure output is uint8 type
output[output > 255] = 255
output_img = output.astype(np.uint8)
# output RGB manipulated img at output input_path and exception handling
try:
skimage.io.imsave(output_path, output_img)
except AttributeError:
print("Please provide the output path as a string")
raise
except Exception as error:
paste("Error: ", error)
raise
| 29.79661 | 80 | 0.652446 |
import numpy as np
import skimage.io
import pytest
def RGB_manipulation(input_path, output_path, R = 2, G = 2, B = 2):
try:
img = skimage.io.imread(input_path)[:,:,:3]
except AttributeError:
print("Please provide the input path as a string")
raise
except OSError:
print("The input is not a .png file")
raise
except FileNotFoundError:
print("The input path does not exist")
raise
except Exception as error:
paste("Error: ", error)
raise
filter = np.array([[[R, G, B]]])
output = img * filter
output[output > 255] = 255
output_img = output.astype(np.uint8)
try:
skimage.io.imsave(output_path, output_img)
except AttributeError:
print("Please provide the output path as a string")
raise
except Exception as error:
paste("Error: ", error)
raise
| true | true |
f7f7d5afd71925510e65c3bd434f1f938101a701 | 19,676 | py | Python | lib/kb_DRAM/kb_DRAMImpl.py | shafferm/kb_DRAM | f414190b56e12aef939e405413b930b96ccb781a | [
"MIT"
] | null | null | null | lib/kb_DRAM/kb_DRAMImpl.py | shafferm/kb_DRAM | f414190b56e12aef939e405413b930b96ccb781a | [
"MIT"
] | 1 | 2021-07-23T15:43:21.000Z | 2021-07-25T19:23:13.000Z | lib/kb_DRAM/kb_DRAMImpl.py | shafferm/kb_DRAM | f414190b56e12aef939e405413b930b96ccb781a | [
"MIT"
] | 1 | 2021-11-28T16:32:51.000Z | 2021-11-28T16:32:51.000Z | # -*- coding: utf-8 -*-
#BEGIN_HEADER
import logging
import os
import pandas as pd
import yaml
import warnings
from mag_annotator import __version__ as dram_version
from mag_annotator.database_processing import import_config, set_database_paths, print_database_locations
from mag_annotator.annotate_bins import annotate_bins, annotate_called_genes
from mag_annotator.summarize_genomes import summarize_genomes
from mag_annotator.annotate_vgfs import annotate_vgfs, remove_bad_chars
from mag_annotator.summarize_vgfs import summarize_vgfs
from mag_annotator.utils import remove_suffix
from installed_clients.WorkspaceClient import Workspace as workspaceService
from installed_clients.AssemblyUtilClient import AssemblyUtil
from installed_clients.GenomeFileUtilClient import GenomeFileUtil
from installed_clients.annotation_ontology_apiServiceClient import annotation_ontology_api
from installed_clients.KBaseDataObjectToFileUtilsClient import KBaseDataObjectToFileUtils
from installed_clients.DataFileUtilClient import DataFileUtil
from .utils.dram_util import get_annotation_files, get_distill_files, generate_genomes, add_ontology_terms,\
get_viral_distill_files
from .utils.kbase_util import generate_product_report
THREADS = 30
# TODO: Fix no pfam annotations bug
#END_HEADER
class kb_DRAM:
'''
Module Name:
kb_DRAM
Module Description:
A KBase module: kb_DRAM
'''
######## WARNING FOR GEVENT USERS ####### noqa
# Since asynchronous IO can lead to methods - even the same method -
# interrupting each other, you must be *very* careful when using global
# state. A method could easily clobber the state set by another while
# the latter method is running.
######################################### noqa
VERSION = "0.0.2"
GIT_URL = "https://github.com/shafferm/kb_DRAM.git"
GIT_COMMIT_HASH = "6c91eb1cdbd74eec6efd105477c89b76f34cabd9"
#BEGIN_CLASS_HEADER
#END_CLASS_HEADER
# config contains contents of config file in a hash or None if it couldn't
# be found
def __init__(self, config):
#BEGIN_CONSTRUCTOR
self.callback_url = os.environ['SDK_CALLBACK_URL']
self.workspaceURL = config['workspace-url']
self.shared_folder = config['scratch']
logging.basicConfig(format='%(created)s %(levelname)s: %(message)s',
level=logging.INFO)
#END_CONSTRUCTOR
pass
def run_kb_dram_annotate(self, ctx, params):
"""
This example function accepts any number of parameters and returns results in a KBaseReport
:param params: instance of mapping from String to unspecified object
:returns: instance of type "ReportResults" -> structure: parameter
"report_name" of String, parameter "report_ref" of String
"""
# ctx is the context object
# return variables are: output
#BEGIN run_kb_dram_annotate
# validate inputs
if not isinstance(params['assembly_input_ref'], str) or not len(params['assembly_input_ref']):
raise ValueError('Pass in a valid assembly reference string')
if not isinstance(params['desc'], str) or not len(params['desc']):
raise ValueError('Pass in a valid genomeSet description')
if not isinstance(params['output_name'], str) or not len(params['output_name']):
raise ValueError('Pass in a valid genomeSet output name')
if not isinstance(params['min_contig_size'], int) or (params['min_contig_size'] < 0):
raise ValueError('Min contig size must be a non-negative integer')
# setup params
with open("/kb/module/kbase.yml", 'r') as stream:
data_loaded = yaml.load(stream)
version = str(data_loaded['module-version'])
is_metagenome = params['is_metagenome']
min_contig_size = params['min_contig_size']
trans_table = str(params['trans_table'])
bitscore = params['bitscore']
rbh_bitscore = params['rbh_bitscore']
output_dir = os.path.join(self.shared_folder, 'DRAM_annos')
output_objects = []
# create Util objects
wsClient = workspaceService(self.workspaceURL, token=ctx['token'])
assembly_util = AssemblyUtil(self.callback_url)
genome_util = GenomeFileUtil(self.callback_url)
# set DRAM database locations
print('DRAM version: %s' % dram_version)
import_config('/data/DRAM_databases/CONFIG')
# This is a hack to get around a bug in my database setup
set_database_paths(description_db_loc='/data/DRAM_databases/description_db.sqlite')
print_database_locations()
# get files
assemblies = assembly_util.get_fastas({'ref_lst': [params['assembly_input_ref']]})
# would paths ever have more than one thing?
fasta_locs = [assembly_data['paths'][0] for assembly_ref, assembly_data in assemblies.items()]
# get assembly refs from dram assigned genome names
assembly_ref_dict = {os.path.splitext(os.path.basename(remove_suffix(assembly_data['paths'][0], '.gz')))[0]:
assembly_ref for assembly_ref, assembly_data in assemblies.items()}
# annotate and distill with DRAM
annotate_bins(fasta_locs, output_dir, min_contig_size, trans_table=trans_table, bit_score_threshold=bitscore,
rbh_bit_score_threshold=rbh_bitscore, low_mem_mode=True, rename_bins=False, keep_tmp_dir=False,
threads=THREADS, verbose=False)
output_files = get_annotation_files(output_dir)
distill_output_dir = os.path.join(output_dir, 'distilled')
summarize_genomes(output_files['annotations']['path'], output_files['trnas']['path'],
output_files['rrnas']['path'], output_dir=distill_output_dir, groupby_column='fasta')
output_files = get_distill_files(distill_output_dir, output_files)
if is_metagenome:
pass # TODO: make annotated metagenome object
else:
# generate genome files
annotations = pd.read_csv(output_files['annotations']['path'], sep='\t', index_col=0)
genome_objects = generate_genomes(annotations, output_files['genes_fna']['path'],
output_files['genes_faa']['path'], assembly_ref_dict, assemblies,
params["workspace_name"], ctx.provenance())
genome_ref_dict = dict()
genome_set_elements = dict()
for genome_object in genome_objects:
info = genome_util.save_one_genome(genome_object)["info"]
genome_ref = '%s/%s/%s' % (info[6], info[0], info[4])
genome_set_elements[genome_object["name"]] = {'ref': genome_ref}
output_objects.append({"ref": genome_ref,
"description": 'Annotated Genome'})
genome_ref_dict[genome_object["name"]] = genome_ref
# add ontology terms
anno_api = annotation_ontology_api(service_ver="beta")
ontology_events = add_ontology_terms(annotations, params['desc'], version, params['workspace_name'],
self.workspaceURL, genome_ref_dict)
[anno_api.add_annotation_ontology_events(i) for i in ontology_events]
# make genome set
# TODO: only make genome set if there is more than one genome
if 'provenance' in ctx:
provenance = ctx['provenance']
else:
provenance = [{}]
# add additional info to provenance here, in this case the input data object reference
provenance[0]['input_ws_objects'] = list(genome_ref_dict.values())
provenance[0]['service'] = 'kb_SetUtilities'
provenance[0]['method'] = 'KButil_Batch_Create_GenomeSet'
output_genomeSet_obj = {'description': params['desc'],
'elements': genome_set_elements}
output_genomeSet_name = params['output_name']
new_obj_info = wsClient.save_objects({'workspace': params['workspace_name'],
'objects': [{'type': 'KBaseSearch.GenomeSet',
'data': output_genomeSet_obj,
'name': output_genomeSet_name,
'meta': {},
'provenance': provenance
}]
})[0]
genome_set_ref = '%s/%s/%s' % (new_obj_info[6], new_obj_info[0], new_obj_info[4])
output_objects.append({"ref": genome_set_ref,
"description": params['desc']})
# generate report
product_html_loc = os.path.join(distill_output_dir, 'product.html')
report = generate_product_report(self.callback_url, params['workspace_name'], output_dir, product_html_loc,
output_files, output_objects)
output = {
'report_name': report['name'],
'report_ref': report['ref'],
}
#END run_kb_dram_annotate
# At some point might do deeper type checking...
if not isinstance(output, dict):
raise ValueError('Method run_kb_dram_annotate return value ' +
'output is not type dict as required.')
# return the results
return [output]
def run_kb_dram_annotate_genome(self, ctx, params):
"""
:param params: instance of mapping from String to unspecified object
:returns: instance of type "ReportResults" -> structure: parameter
"report_name" of String, parameter "report_ref" of String
"""
# ctx is the context object
# return variables are: output
#BEGIN run_kb_dram_annotate_genome
# validate inputs
if not isinstance(params['genome_input_ref'], str) or not len(params['genome_input_ref']):
raise ValueError('Pass in a valid genome reference string')
# setup
with open("/kb/module/kbase.yml", 'r') as stream:
data_loaded = yaml.load(stream)
version = str(data_loaded['module-version'])
genome_input_ref = params['genome_input_ref']
bitscore = params['bitscore']
rbh_bitscore = params['rbh_bitscore']
# create Util objects
wsClient = workspaceService(self.workspaceURL, token=ctx['token'])
object_to_file_utils = KBaseDataObjectToFileUtils(self.callback_url, token=ctx['token'])
# set DRAM database locations
print('DRAM version: %s' % dram_version)
import_config('/data/DRAM_databases/CONFIG')
# This is a hack to get around a bug in my database setup
set_database_paths(description_db_loc='/data/DRAM_databases/description_db.sqlite')
print_database_locations()
# get genomes
genome_dir = os.path.join(self.shared_folder, 'genomes')
os.mkdir(genome_dir)
genome_info = wsClient.get_object_info_new({'objects': [{'ref': genome_input_ref}]})[0]
genome_input_type = genome_info[2]
faa_locs = list()
genome_ref_dict = {}
if 'GenomeSet' in genome_input_type:
genomeSet_object = wsClient.get_objects2({'objects': [{'ref': genome_input_ref}]})['data'][0]['data']
for ref_dict in genomeSet_object['elements'].values():
genome_ref = ref_dict['ref']
name = wsClient.get_object_info_new({'objects': [{'ref': genome_ref}]})[0][1]
genome_ref_dict[name] = genome_ref
else:
genome_ref_dict[genome_info[1]] = genome_input_ref
for genome_name, genome_ref in genome_ref_dict.items():
# this makes the names match if you are doing a genome or genomeSet
faa_file = '%s.faa' % genome_name
faa_object = object_to_file_utils.GenomeToFASTA({"genome_ref": genome_ref,
"file": faa_file,
"dir": genome_dir,
"console": [],
"invalid_msgs": [],
"residue_type": 'P',
"feature_type": None,
"record_id_pattern": None,
"record_desc_pattern": None,
"case": None,
"linewrap": None})
faa_locs.append(faa_object['fasta_file_path'])
# annotate and distill with DRAM
output_dir = os.path.join(self.shared_folder, 'DRAM_annos')
annotate_called_genes(faa_locs, output_dir, bit_score_threshold=bitscore, rbh_bit_score_threshold=rbh_bitscore,
low_mem_mode=True, rename_genes=False, keep_tmp_dir=False, threads=THREADS, verbose=False)
output_files = get_annotation_files(output_dir)
distill_output_dir = os.path.join(output_dir, 'distilled')
summarize_genomes(output_files['annotations']['path'], output_files['trnas']['path'],
output_files['rrnas']['path'], output_dir=distill_output_dir, groupby_column='fasta')
output_files = get_distill_files(distill_output_dir, output_files)
# add ontology terms
annotations = pd.read_csv(output_files['annotations']['path'], sep='\t', index_col=0, dtype={'fasta': str})
anno_api = annotation_ontology_api(service_ver="beta")
ontology_events = add_ontology_terms(annotations, "DRAM genome annotated", version, params['workspace_name'],
self.workspaceURL, genome_ref_dict)
annotation_events = [anno_api.add_annotation_ontology_events(i) for i in ontology_events]
# generate report
product_html_loc = os.path.join(distill_output_dir, 'product.html')
report = generate_product_report(self.callback_url, params['workspace_name'], output_dir, product_html_loc,
output_files)
output = {
'report_name': report['name'],
'report_ref': report['ref'],
}
#END run_kb_dram_annotate_genome
# At some point might do deeper type checking...
if not isinstance(output, dict):
raise ValueError('Method run_kb_dram_annotate_genome return value ' +
'output is not type dict as required.')
# return the results
return [output]
def run_kb_dramv_annotate(self, ctx, params):
"""
:param params: instance of mapping from String to unspecified object
:returns: instance of type "ReportResults" -> structure: parameter
"report_name" of String, parameter "report_ref" of String
"""
# ctx is the context object
# return variables are: output
#BEGIN run_kb_dramv_annotate
warnings.filterwarnings("ignore")
# setup
affi_contigs_shock_ids = params['affi_contigs_shock_id']
min_contig_size = params['min_contig_size']
trans_table = str(params['trans_table'])
bitscore = params['bitscore']
rbh_bitscore = params['rbh_bitscore']
assembly_util = AssemblyUtil(self.callback_url)
datafile_util = DataFileUtil(self.callback_url)
# get contigs and merge
assemblies = assembly_util.get_fastas({'ref_lst': [params['assembly_input_ref']]})
fasta = os.path.join(self.shared_folder, 'merged_contigs.fasta')
with open(fasta, 'w') as f:
for assembly_ref, assembly_data in assemblies.items():
fasta_path = assembly_data['paths'][0]
for line in open(fasta_path):
f.write(line)
# get affi contigs, read all and merge
affi_contigs_path = os.path.join(self.shared_folder, 'VIRSorter_affi-contigs.tab')
with open(affi_contigs_path, 'w') as f:
for affi_contigs_shock_id in affi_contigs_shock_ids:
temp_affi_contigs_path = os.path.join(self.shared_folder, 'temp_VIRSorter_affi-contigs.tab')
temp_affi_contigs = datafile_util.shock_to_file({
'shock_id': affi_contigs_shock_id,
'file_path': temp_affi_contigs_path,
'unpack': 'unpack'
})['file_path']
for line in open(temp_affi_contigs):
f.write(line)
os.remove(temp_affi_contigs)
# set DRAM database locations
print('DRAM version: %s' % dram_version)
import_config('/data/DRAM_databases/CONFIG')
# This is a hack to get around a bug in my database setup
set_database_paths(description_db_loc='/data/DRAM_databases/description_db.sqlite')
print_database_locations()
# clean affi contigs file
cleaned_fasta = os.path.join(self.shared_folder, '%s.cleaned.fasta' % os.path.basename(fasta))
remove_bad_chars(input_fasta=fasta, output=cleaned_fasta)
cleaned_affi_contigs = os.path.join(self.shared_folder, 'VIRSorter_affi-contigs.cleaned.tab')
remove_bad_chars(input_virsorter_affi_contigs=affi_contigs_path, output=cleaned_affi_contigs)
# annotate and distill
output_dir = os.path.join(self.shared_folder, 'DRAM_annos')
annotate_vgfs(cleaned_fasta, cleaned_affi_contigs, output_dir, min_contig_size, trans_table=trans_table,
bit_score_threshold=bitscore, rbh_bit_score_threshold=rbh_bitscore, low_mem_mode=True,
keep_tmp_dir=False, threads=THREADS, verbose=False)
output_files = get_annotation_files(output_dir)
distill_output_dir = os.path.join(output_dir, 'distilled')
summarize_vgfs(output_files['annotations']['path'], distill_output_dir, groupby_column='scaffold')
output_files = get_viral_distill_files(distill_output_dir, output_files)
# generate report
product_html_loc = os.path.join(distill_output_dir, 'product.html')
report = generate_product_report(self.callback_url, params['workspace_name'], output_dir,
product_html_loc, output_files)
output = {
'report_name': report['name'],
'report_ref': report['ref'],
}
#END run_kb_dramv_annotate
# At some point might do deeper type checking...
if not isinstance(output, dict):
raise ValueError('Method run_kb_dramv_annotate return value ' +
'output is not type dict as required.')
# return the results
return [output]
def status(self, ctx):
#BEGIN_STATUS
returnVal = {'state': "OK",
'message': "",
'version': self.VERSION,
'git_url': self.GIT_URL,
'git_commit_hash': self.GIT_COMMIT_HASH}
#END_STATUS
return [returnVal]
| 50.71134 | 120 | 0.616233 |
import logging
import os
import pandas as pd
import yaml
import warnings
from mag_annotator import __version__ as dram_version
from mag_annotator.database_processing import import_config, set_database_paths, print_database_locations
from mag_annotator.annotate_bins import annotate_bins, annotate_called_genes
from mag_annotator.summarize_genomes import summarize_genomes
from mag_annotator.annotate_vgfs import annotate_vgfs, remove_bad_chars
from mag_annotator.summarize_vgfs import summarize_vgfs
from mag_annotator.utils import remove_suffix
from installed_clients.WorkspaceClient import Workspace as workspaceService
from installed_clients.AssemblyUtilClient import AssemblyUtil
from installed_clients.GenomeFileUtilClient import GenomeFileUtil
from installed_clients.annotation_ontology_apiServiceClient import annotation_ontology_api
from installed_clients.KBaseDataObjectToFileUtilsClient import KBaseDataObjectToFileUtils
from installed_clients.DataFileUtilClient import DataFileUtil
from .utils.dram_util import get_annotation_files, get_distill_files, generate_genomes, add_ontology_terms,\
get_viral_distill_files
from .utils.kbase_util import generate_product_report
THREADS = 30
class kb_DRAM:
contig size must be a non-negative integer')
# setup params
with open("/kb/module/kbase.yml", 'r') as stream:
data_loaded = yaml.load(stream)
version = str(data_loaded['module-version'])
is_metagenome = params['is_metagenome']
min_contig_size = params['min_contig_size']
trans_table = str(params['trans_table'])
bitscore = params['bitscore']
rbh_bitscore = params['rbh_bitscore']
output_dir = os.path.join(self.shared_folder, 'DRAM_annos')
output_objects = []
# create Util objects
wsClient = workspaceService(self.workspaceURL, token=ctx['token'])
assembly_util = AssemblyUtil(self.callback_url)
genome_util = GenomeFileUtil(self.callback_url)
# set DRAM database locations
print('DRAM version: %s' % dram_version)
import_config('/data/DRAM_databases/CONFIG')
# This is a hack to get around a bug in my database setup
set_database_paths(description_db_loc='/data/DRAM_databases/description_db.sqlite')
print_database_locations()
# get files
assemblies = assembly_util.get_fastas({'ref_lst': [params['assembly_input_ref']]})
# would paths ever have more than one thing?
fasta_locs = [assembly_data['paths'][0] for assembly_ref, assembly_data in assemblies.items()]
# get assembly refs from dram assigned genome names
assembly_ref_dict = {os.path.splitext(os.path.basename(remove_suffix(assembly_data['paths'][0], '.gz')))[0]:
assembly_ref for assembly_ref, assembly_data in assemblies.items()}
# annotate and distill with DRAM
annotate_bins(fasta_locs, output_dir, min_contig_size, trans_table=trans_table, bit_score_threshold=bitscore,
rbh_bit_score_threshold=rbh_bitscore, low_mem_mode=True, rename_bins=False, keep_tmp_dir=False,
threads=THREADS, verbose=False)
output_files = get_annotation_files(output_dir)
distill_output_dir = os.path.join(output_dir, 'distilled')
summarize_genomes(output_files['annotations']['path'], output_files['trnas']['path'],
output_files['rrnas']['path'], output_dir=distill_output_dir, groupby_column='fasta')
output_files = get_distill_files(distill_output_dir, output_files)
if is_metagenome:
pass # TODO: make annotated metagenome object
else:
# generate genome files
annotations = pd.read_csv(output_files['annotations']['path'], sep='\t', index_col=0)
genome_objects = generate_genomes(annotations, output_files['genes_fna']['path'],
output_files['genes_faa']['path'], assembly_ref_dict, assemblies,
params["workspace_name"], ctx.provenance())
genome_ref_dict = dict()
genome_set_elements = dict()
for genome_object in genome_objects:
info = genome_util.save_one_genome(genome_object)["info"]
genome_ref = '%s/%s/%s' % (info[6], info[0], info[4])
genome_set_elements[genome_object["name"]] = {'ref': genome_ref}
output_objects.append({"ref": genome_ref,
"description": 'Annotated Genome'})
genome_ref_dict[genome_object["name"]] = genome_ref
# add ontology terms
anno_api = annotation_ontology_api(service_ver="beta")
ontology_events = add_ontology_terms(annotations, params['desc'], version, params['workspace_name'],
self.workspaceURL, genome_ref_dict)
[anno_api.add_annotation_ontology_events(i) for i in ontology_events]
# make genome set
# TODO: only make genome set if there is more than one genome
if 'provenance' in ctx:
provenance = ctx['provenance']
else:
provenance = [{}]
# add additional info to provenance here, in this case the input data object reference
provenance[0]['input_ws_objects'] = list(genome_ref_dict.values())
provenance[0]['service'] = 'kb_SetUtilities'
provenance[0]['method'] = 'KButil_Batch_Create_GenomeSet'
output_genomeSet_obj = {'description': params['desc'],
'elements': genome_set_elements}
output_genomeSet_name = params['output_name']
new_obj_info = wsClient.save_objects({'workspace': params['workspace_name'],
'objects': [{'type': 'KBaseSearch.GenomeSet',
'data': output_genomeSet_obj,
'name': output_genomeSet_name,
'meta': {},
'provenance': provenance
}]
})[0]
genome_set_ref = '%s/%s/%s' % (new_obj_info[6], new_obj_info[0], new_obj_info[4])
output_objects.append({"ref": genome_set_ref,
"description": params['desc']})
# generate report
product_html_loc = os.path.join(distill_output_dir, 'product.html')
report = generate_product_report(self.callback_url, params['workspace_name'], output_dir, product_html_loc,
output_files, output_objects)
output = {
'report_name': report['name'],
'report_ref': report['ref'],
}
#END run_kb_dram_annotate
# At some point might do deeper type checking...
if not isinstance(output, dict):
raise ValueError('Method run_kb_dram_annotate return value ' +
'output is not type dict as required.')
# return the results
return [output]
def run_kb_dram_annotate_genome(self, ctx, params):
# ctx is the context object
# return variables are: output
#BEGIN run_kb_dram_annotate_genome
# validate inputs
if not isinstance(params['genome_input_ref'], str) or not len(params['genome_input_ref']):
raise ValueError('Pass in a valid genome reference string')
# setup
with open("/kb/module/kbase.yml", 'r') as stream:
data_loaded = yaml.load(stream)
version = str(data_loaded['module-version'])
genome_input_ref = params['genome_input_ref']
bitscore = params['bitscore']
rbh_bitscore = params['rbh_bitscore']
# create Util objects
wsClient = workspaceService(self.workspaceURL, token=ctx['token'])
object_to_file_utils = KBaseDataObjectToFileUtils(self.callback_url, token=ctx['token'])
# set DRAM database locations
print('DRAM version: %s' % dram_version)
import_config('/data/DRAM_databases/CONFIG')
# This is a hack to get around a bug in my database setup
set_database_paths(description_db_loc='/data/DRAM_databases/description_db.sqlite')
print_database_locations()
# get genomes
genome_dir = os.path.join(self.shared_folder, 'genomes')
os.mkdir(genome_dir)
genome_info = wsClient.get_object_info_new({'objects': [{'ref': genome_input_ref}]})[0]
genome_input_type = genome_info[2]
faa_locs = list()
genome_ref_dict = {}
if 'GenomeSet' in genome_input_type:
genomeSet_object = wsClient.get_objects2({'objects': [{'ref': genome_input_ref}]})['data'][0]['data']
for ref_dict in genomeSet_object['elements'].values():
genome_ref = ref_dict['ref']
name = wsClient.get_object_info_new({'objects': [{'ref': genome_ref}]})[0][1]
genome_ref_dict[name] = genome_ref
else:
genome_ref_dict[genome_info[1]] = genome_input_ref
for genome_name, genome_ref in genome_ref_dict.items():
# this makes the names match if you are doing a genome or genomeSet
faa_file = '%s.faa' % genome_name
faa_object = object_to_file_utils.GenomeToFASTA({"genome_ref": genome_ref,
"file": faa_file,
"dir": genome_dir,
"console": [],
"invalid_msgs": [],
"residue_type": 'P',
"feature_type": None,
"record_id_pattern": None,
"record_desc_pattern": None,
"case": None,
"linewrap": None})
faa_locs.append(faa_object['fasta_file_path'])
# annotate and distill with DRAM
output_dir = os.path.join(self.shared_folder, 'DRAM_annos')
annotate_called_genes(faa_locs, output_dir, bit_score_threshold=bitscore, rbh_bit_score_threshold=rbh_bitscore,
low_mem_mode=True, rename_genes=False, keep_tmp_dir=False, threads=THREADS, verbose=False)
output_files = get_annotation_files(output_dir)
distill_output_dir = os.path.join(output_dir, 'distilled')
summarize_genomes(output_files['annotations']['path'], output_files['trnas']['path'],
output_files['rrnas']['path'], output_dir=distill_output_dir, groupby_column='fasta')
output_files = get_distill_files(distill_output_dir, output_files)
# add ontology terms
annotations = pd.read_csv(output_files['annotations']['path'], sep='\t', index_col=0, dtype={'fasta': str})
anno_api = annotation_ontology_api(service_ver="beta")
ontology_events = add_ontology_terms(annotations, "DRAM genome annotated", version, params['workspace_name'],
self.workspaceURL, genome_ref_dict)
annotation_events = [anno_api.add_annotation_ontology_events(i) for i in ontology_events]
# generate report
product_html_loc = os.path.join(distill_output_dir, 'product.html')
report = generate_product_report(self.callback_url, params['workspace_name'], output_dir, product_html_loc,
output_files)
output = {
'report_name': report['name'],
'report_ref': report['ref'],
}
#END run_kb_dram_annotate_genome
# At some point might do deeper type checking...
if not isinstance(output, dict):
raise ValueError('Method run_kb_dram_annotate_genome return value ' +
'output is not type dict as required.')
# return the results
return [output]
def run_kb_dramv_annotate(self, ctx, params):
# ctx is the context object
# return variables are: output
#BEGIN run_kb_dramv_annotate
warnings.filterwarnings("ignore")
# setup
affi_contigs_shock_ids = params['affi_contigs_shock_id']
min_contig_size = params['min_contig_size']
trans_table = str(params['trans_table'])
bitscore = params['bitscore']
rbh_bitscore = params['rbh_bitscore']
assembly_util = AssemblyUtil(self.callback_url)
datafile_util = DataFileUtil(self.callback_url)
# get contigs and merge
assemblies = assembly_util.get_fastas({'ref_lst': [params['assembly_input_ref']]})
fasta = os.path.join(self.shared_folder, 'merged_contigs.fasta')
with open(fasta, 'w') as f:
for assembly_ref, assembly_data in assemblies.items():
fasta_path = assembly_data['paths'][0]
for line in open(fasta_path):
f.write(line)
# get affi contigs, read all and merge
affi_contigs_path = os.path.join(self.shared_folder, 'VIRSorter_affi-contigs.tab')
with open(affi_contigs_path, 'w') as f:
for affi_contigs_shock_id in affi_contigs_shock_ids:
temp_affi_contigs_path = os.path.join(self.shared_folder, 'temp_VIRSorter_affi-contigs.tab')
temp_affi_contigs = datafile_util.shock_to_file({
'shock_id': affi_contigs_shock_id,
'file_path': temp_affi_contigs_path,
'unpack': 'unpack'
})['file_path']
for line in open(temp_affi_contigs):
f.write(line)
os.remove(temp_affi_contigs)
# set DRAM database locations
print('DRAM version: %s' % dram_version)
import_config('/data/DRAM_databases/CONFIG')
# This is a hack to get around a bug in my database setup
set_database_paths(description_db_loc='/data/DRAM_databases/description_db.sqlite')
print_database_locations()
# clean affi contigs file
cleaned_fasta = os.path.join(self.shared_folder, '%s.cleaned.fasta' % os.path.basename(fasta))
remove_bad_chars(input_fasta=fasta, output=cleaned_fasta)
cleaned_affi_contigs = os.path.join(self.shared_folder, 'VIRSorter_affi-contigs.cleaned.tab')
remove_bad_chars(input_virsorter_affi_contigs=affi_contigs_path, output=cleaned_affi_contigs)
# annotate and distill
output_dir = os.path.join(self.shared_folder, 'DRAM_annos')
annotate_vgfs(cleaned_fasta, cleaned_affi_contigs, output_dir, min_contig_size, trans_table=trans_table,
bit_score_threshold=bitscore, rbh_bit_score_threshold=rbh_bitscore, low_mem_mode=True,
keep_tmp_dir=False, threads=THREADS, verbose=False)
output_files = get_annotation_files(output_dir)
distill_output_dir = os.path.join(output_dir, 'distilled')
summarize_vgfs(output_files['annotations']['path'], distill_output_dir, groupby_column='scaffold')
output_files = get_viral_distill_files(distill_output_dir, output_files)
# generate report
product_html_loc = os.path.join(distill_output_dir, 'product.html')
report = generate_product_report(self.callback_url, params['workspace_name'], output_dir,
product_html_loc, output_files)
output = {
'report_name': report['name'],
'report_ref': report['ref'],
}
#END run_kb_dramv_annotate
# At some point might do deeper type checking...
if not isinstance(output, dict):
raise ValueError('Method run_kb_dramv_annotate return value ' +
'output is not type dict as required.')
# return the results
return [output]
def status(self, ctx):
#BEGIN_STATUS
returnVal = {'state': "OK",
'message': "",
'version': self.VERSION,
'git_url': self.GIT_URL,
'git_commit_hash': self.GIT_COMMIT_HASH}
#END_STATUS
return [returnVal]
| true | true |
f7f7d6777c100497eec646d3e43e5c24e733fa6f | 1,185 | py | Python | src/ufdl/json/object_detection/_Polygon.py | waikato-ufdl/ufdl-json-messages | 408901bdf79aa9ae7cff1af165deee83e62f6088 | [
"Apache-2.0"
] | null | null | null | src/ufdl/json/object_detection/_Polygon.py | waikato-ufdl/ufdl-json-messages | 408901bdf79aa9ae7cff1af165deee83e62f6088 | [
"Apache-2.0"
] | null | null | null | src/ufdl/json/object_detection/_Polygon.py | waikato-ufdl/ufdl-json-messages | 408901bdf79aa9ae7cff1af165deee83e62f6088 | [
"Apache-2.0"
] | null | null | null | from typing import List
from wai.common.geometry import Polygon as GeometricPolygon, Point
from wai.json.object import StrictJSONObject
from wai.json.object.property import ArrayProperty, NumberProperty
class Polygon(StrictJSONObject['Polygon']):
"""
Represents a polygon mask for an annotation.
"""
# The coordinates of the polygon
points: List[List[int]] = ArrayProperty(
element_property=ArrayProperty(
element_property=NumberProperty(integer_only=True),
min_elements=2, max_elements=2
),
min_elements=3
)
def to_geometric_polygon(self) -> GeometricPolygon:
"""
Converts this polygon into a geometric polygon.
:return: The geometric polygon.
"""
return GeometricPolygon(*(Point(x, y) for x, y in self.points))
@staticmethod
def from_geometric_polygon(polygon: GeometricPolygon) -> 'Polygon':
"""
Converts a geometric polygon into a Polygon record.
:param polygon: The geometric polygon.
:return: The Polygon record.
"""
return Polygon(points=list([p.x, p.y] for p in polygon.points))
| 30.384615 | 71 | 0.657384 | from typing import List
from wai.common.geometry import Polygon as GeometricPolygon, Point
from wai.json.object import StrictJSONObject
from wai.json.object.property import ArrayProperty, NumberProperty
class Polygon(StrictJSONObject['Polygon']):
points: List[List[int]] = ArrayProperty(
element_property=ArrayProperty(
element_property=NumberProperty(integer_only=True),
min_elements=2, max_elements=2
),
min_elements=3
)
def to_geometric_polygon(self) -> GeometricPolygon:
return GeometricPolygon(*(Point(x, y) for x, y in self.points))
@staticmethod
def from_geometric_polygon(polygon: GeometricPolygon) -> 'Polygon':
return Polygon(points=list([p.x, p.y] for p in polygon.points))
| true | true |
f7f7d72d14b3c09f97209cdf0e3c209ff236c0fb | 1,390 | py | Python | face_recognition/face_recogniser.py | jagannath-sahoo/face-recognition- | e19fbe1a4fd853d4c79f69c42fd055b6463f5fab | [
"BSD-3-Clause"
] | 103 | 2019-11-14T15:53:43.000Z | 2022-03-30T18:14:45.000Z | face_recognition/face_recogniser.py | jagannath-sahoo/face-recognition- | e19fbe1a4fd853d4c79f69c42fd055b6463f5fab | [
"BSD-3-Clause"
] | 20 | 2020-02-05T08:17:12.000Z | 2021-05-19T10:38:01.000Z | face_recognition/face_recogniser.py | jagannath-sahoo/face-recognition- | e19fbe1a4fd853d4c79f69c42fd055b6463f5fab | [
"BSD-3-Clause"
] | 41 | 2020-01-08T10:07:48.000Z | 2022-03-15T14:57:36.000Z | from collections import namedtuple
Prediction = namedtuple('Prediction', 'label confidence')
Face = namedtuple('Face', 'top_prediction bb all_predictions')
BoundingBox = namedtuple('BoundingBox', 'left top right bottom')
def top_prediction(idx_to_class, probs):
top_label = probs.argmax()
return Prediction(label=idx_to_class[top_label], confidence=probs[top_label])
def to_predictions(idx_to_class, probs):
return [Prediction(label=idx_to_class[i], confidence=prob) for i, prob in enumerate(probs)]
class FaceRecogniser:
def __init__(self, feature_extractor, classifier, idx_to_class):
self.feature_extractor = feature_extractor
self.classifier = classifier
self.idx_to_class = idx_to_class
def recognise_faces(self, img):
bbs, embeddings = self.feature_extractor(img)
if bbs is None:
# if no faces are detected
return []
predictions = self.classifier.predict_proba(embeddings)
return [
Face(
top_prediction=top_prediction(self.idx_to_class, probs),
bb=BoundingBox(left=bb[0], top=bb[1], right=bb[2], bottom=bb[3]),
all_predictions=to_predictions(self.idx_to_class, probs)
)
for bb, probs in zip(bbs, predictions)
]
def __call__(self, img):
return self.recognise_faces(img)
| 33.095238 | 95 | 0.67482 | from collections import namedtuple
Prediction = namedtuple('Prediction', 'label confidence')
Face = namedtuple('Face', 'top_prediction bb all_predictions')
BoundingBox = namedtuple('BoundingBox', 'left top right bottom')
def top_prediction(idx_to_class, probs):
top_label = probs.argmax()
return Prediction(label=idx_to_class[top_label], confidence=probs[top_label])
def to_predictions(idx_to_class, probs):
return [Prediction(label=idx_to_class[i], confidence=prob) for i, prob in enumerate(probs)]
class FaceRecogniser:
def __init__(self, feature_extractor, classifier, idx_to_class):
self.feature_extractor = feature_extractor
self.classifier = classifier
self.idx_to_class = idx_to_class
def recognise_faces(self, img):
bbs, embeddings = self.feature_extractor(img)
if bbs is None:
return []
predictions = self.classifier.predict_proba(embeddings)
return [
Face(
top_prediction=top_prediction(self.idx_to_class, probs),
bb=BoundingBox(left=bb[0], top=bb[1], right=bb[2], bottom=bb[3]),
all_predictions=to_predictions(self.idx_to_class, probs)
)
for bb, probs in zip(bbs, predictions)
]
def __call__(self, img):
return self.recognise_faces(img)
| true | true |
f7f7d7ce0fda930b46fa0108ba3de629fdbd4704 | 2,267 | py | Python | 常用分割损失函数和指标/WCCE.py | 1044197988/TF.Keras-Commonly-used-models | b37276bcee454b2c39b8fcc60e87b72ec8a6a5d4 | [
"Apache-2.0"
] | 160 | 2019-09-19T14:13:23.000Z | 2022-03-25T03:14:20.000Z | 常用分割损失函数和指标/WCCE.py | johonnyyang/TF.Keras-Commonly-used-models | b37276bcee454b2c39b8fcc60e87b72ec8a6a5d4 | [
"Apache-2.0"
] | 1 | 2020-11-11T08:37:02.000Z | 2020-11-11T08:37:58.000Z | 常用分割损失函数和指标/WCCE.py | johonnyyang/TF.Keras-Commonly-used-models | b37276bcee454b2c39b8fcc60e87b72ec8a6a5d4 | [
"Apache-2.0"
] | 70 | 2019-09-24T03:05:09.000Z | 2022-03-25T03:14:21.000Z | from tensorflow.python import keras
from itertools import product
import numpy as np
from tensorflow.python.keras.utils import losses_utils
#weights->数组
class WeightedCategoricalCrossentropy(keras.losses.CategoricalCrossentropy):
def __init__(
self,
weights,
from_logits=False,
label_smoothing=0,
reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE,
name='categorical_crossentropy',
):
super().__init__(
from_logits, label_smoothing, reduction, name=f"weighted_{name}"
)
self.weights = weights
def call(self, y_true, y_pred):
weights = self.weights
nb_cl = len(weights)
final_mask = keras.backend.zeros_like(y_pred[:, 0])
y_pred_max = keras.backend.max(y_pred, axis=1)
y_pred_max = keras.backend.reshape(
y_pred_max, (keras.backend.shape(y_pred)[0], 1))
y_pred_max_mat = keras.backend.cast(
keras.backend.equal(y_pred, y_pred_max), keras.backend.floatx())
for c_p, c_t in product(range(nb_cl), range(nb_cl)):
final_mask += (
weights[c_t, c_p] * y_pred_max_mat[:, c_p] * y_true[:, c_t])
return super().call(y_true, y_pred) * final_mask
from tensorflow.keras import backend as K
def weighted_categorical_crossentropy(weights):
"""
A weighted version of keras.objectives.categorical_crossentropy
Variables:
weights: numpy array of shape (C,) where C is the number of classes
Usage:
weights = np.array([0.5,2,10]) # Class one at 0.5, class 2 twice the normal weights, class 3 10x.
loss = weighted_categorical_crossentropy(weights)
model.compile(loss=loss,optimizer='adam')
"""
weights = K.variable(weights)
def loss(y_true, y_pred):
# scale predictions so that the class probas of each sample sum to 1
y_pred /= K.sum(y_pred, axis=-1, keepdims=True)
# clip to prevent NaN's and Inf's
y_pred = K.clip(y_pred, K.epsilon(), 1 - K.epsilon())
# calc
loss = y_true * K.log(y_pred) * weights
loss = -K.sum(loss, -1)
return loss
return loss
| 35.984127 | 106 | 0.622408 | from tensorflow.python import keras
from itertools import product
import numpy as np
from tensorflow.python.keras.utils import losses_utils
class WeightedCategoricalCrossentropy(keras.losses.CategoricalCrossentropy):
def __init__(
self,
weights,
from_logits=False,
label_smoothing=0,
reduction=losses_utils.ReductionV2.SUM_OVER_BATCH_SIZE,
name='categorical_crossentropy',
):
super().__init__(
from_logits, label_smoothing, reduction, name=f"weighted_{name}"
)
self.weights = weights
def call(self, y_true, y_pred):
weights = self.weights
nb_cl = len(weights)
final_mask = keras.backend.zeros_like(y_pred[:, 0])
y_pred_max = keras.backend.max(y_pred, axis=1)
y_pred_max = keras.backend.reshape(
y_pred_max, (keras.backend.shape(y_pred)[0], 1))
y_pred_max_mat = keras.backend.cast(
keras.backend.equal(y_pred, y_pred_max), keras.backend.floatx())
for c_p, c_t in product(range(nb_cl), range(nb_cl)):
final_mask += (
weights[c_t, c_p] * y_pred_max_mat[:, c_p] * y_true[:, c_t])
return super().call(y_true, y_pred) * final_mask
from tensorflow.keras import backend as K
def weighted_categorical_crossentropy(weights):
weights = K.variable(weights)
def loss(y_true, y_pred):
y_pred /= K.sum(y_pred, axis=-1, keepdims=True)
y_pred = K.clip(y_pred, K.epsilon(), 1 - K.epsilon())
loss = y_true * K.log(y_pred) * weights
loss = -K.sum(loss, -1)
return loss
return loss
| true | true |
f7f7d90b35bda949d6319d2a1f2c991a52ce0192 | 5,750 | py | Python | django_unicorn/management/commands/startunicorn.py | nerdoc/django-unicorn | e512b8f64f5c276a78127db9a05d9d5c042232d5 | [
"MIT"
] | null | null | null | django_unicorn/management/commands/startunicorn.py | nerdoc/django-unicorn | e512b8f64f5c276a78127db9a05d9d5c042232d5 | [
"MIT"
] | null | null | null | django_unicorn/management/commands/startunicorn.py | nerdoc/django-unicorn | e512b8f64f5c276a78127db9a05d9d5c042232d5 | [
"MIT"
] | null | null | null | import os
import webbrowser
from pathlib import Path
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django_unicorn.components.unicorn_view import (
convert_to_pascal_case,
convert_to_snake_case,
)
COMPONENT_FILE_CONTENT = """from django_unicorn.components import UnicornView
class {pascal_case_component_name}View(UnicornView):
pass
"""
TEMPLATE_FILE_CONTENT = """<div>
<!-- put component code here -->
</div>
"""
class Command(BaseCommand):
help = "Creates a new component for `django-unicorn`"
def add_arguments(self, parser):
parser.add_argument("app_name", type=str)
parser.add_argument(
"component_names", nargs="+", type=str, help="Names of components"
)
def handle(self, *args, **options):
# Default from `django-cookiecutter`
base_path = getattr(settings, "APPS_DIR", None)
if not base_path:
# Default from new Django project
base_path = getattr(settings, "BASE_DIR", None)
if not base_path:
# Fallback to the current directory
base_path = os.getcwd()
base_path = Path(base_path)
if "app_name" not in options:
raise CommandError("An application name is required.")
if "component_names" not in options:
raise CommandError("At least one component name is required.")
app_name = options["app_name"]
app_directory = base_path / app_name
is_app_directory_correct = input(
f"\nUse '{app_directory}' for the app directory? [Y/n] "
)
if is_app_directory_correct.strip().lower() in ("n", "no"):
return
is_new_app = False
is_first_component = False
if not app_directory.exists():
is_new_app = True
app_directory.mkdir()
(app_directory / "__init__.py").touch(exist_ok=True)
# Create component
component_base_path = app_directory / "components"
if not component_base_path.exists():
component_base_path.mkdir()
self.stdout.write(
self.style.SUCCESS(f"Created your first component in {app_name}! ✨\n")
)
is_first_component = True
(component_base_path / "__init__.py").touch(exist_ok=True)
for component_name in options["component_names"]:
snake_case_component_name = convert_to_snake_case(component_name)
pascal_case_component_name = convert_to_pascal_case(component_name)
component_path = component_base_path / f"{snake_case_component_name}.py"
if component_path.exists():
self.stdout.write(
self.style.ERROR(
f"Skipping creating {snake_case_component_name}.py because it already exists."
)
)
else:
component_path.write_text(
COMPONENT_FILE_CONTENT.format(
**{"pascal_case_component_name": pascal_case_component_name}
)
)
self.stdout.write(self.style.SUCCESS(f"Created {component_path}."))
# Create template
template_base_path = app_directory / "templates" / "unicorn"
if not template_base_path.exists():
if not (app_directory / "templates").exists():
(app_directory / "templates").mkdir()
template_base_path.mkdir()
template_path = template_base_path / f"{component_name}.html"
if template_path.exists():
self.stdout.write(
self.style.ERROR(
f"Skipping creating {component_name}.html because it already exists."
)
)
else:
template_path.write_text(TEMPLATE_FILE_CONTENT)
self.stdout.write(self.style.SUCCESS(f"Created {template_path}."))
if is_first_component:
will_star_repo = input(
"\nStarring the GitHub repo helps other Django users find Unicorn. Can you star it for me? [y/N] "
)
if will_star_repo.strip().lower() in ("y", "yes"):
self.stdout.write(
self.style.SUCCESS(
"Thank you for helping spread the word about Unicorn!"
)
)
self.stdout.write(
"""
,/
//
,//
__ /| |//
`__/\\_ --(/|___/-/
\\|\\_-\\___ __-_`- /-/ \\.
|\\_-___,-\\_____--/_)' ) \\
\\ -_ / __ \\( `( __`\\|
`\\__| |\\) (/|
',--//-| \\ | ' /
/,---| \\ /
`_/ _,' | |
__/'/ | |
___/ \\ ( ) /
\\____/\\
\\
"""
)
webbrowser.open(
"https://github.com/adamghill/django-unicorn", new=2
)
else:
self.stdout.write(
self.style.ERROR(
"That's a bummer, but I understand. I hope you will star it for me later!"
)
)
if is_new_app:
self.stdout.write(
self.style.WARNING(
f'\nMake sure to add `"{app_name}",` to your INSTALLED_APPS list in your settings file if necessary.'
)
)
| 32.122905 | 125 | 0.518261 | import os
import webbrowser
from pathlib import Path
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django_unicorn.components.unicorn_view import (
convert_to_pascal_case,
convert_to_snake_case,
)
COMPONENT_FILE_CONTENT = """from django_unicorn.components import UnicornView
class {pascal_case_component_name}View(UnicornView):
pass
"""
TEMPLATE_FILE_CONTENT = """<div>
<!-- put component code here -->
</div>
"""
class Command(BaseCommand):
help = "Creates a new component for `django-unicorn`"
def add_arguments(self, parser):
parser.add_argument("app_name", type=str)
parser.add_argument(
"component_names", nargs="+", type=str, help="Names of components"
)
def handle(self, *args, **options):
base_path = getattr(settings, "APPS_DIR", None)
if not base_path:
base_path = getattr(settings, "BASE_DIR", None)
if not base_path:
base_path = os.getcwd()
base_path = Path(base_path)
if "app_name" not in options:
raise CommandError("An application name is required.")
if "component_names" not in options:
raise CommandError("At least one component name is required.")
app_name = options["app_name"]
app_directory = base_path / app_name
is_app_directory_correct = input(
f"\nUse '{app_directory}' for the app directory? [Y/n] "
)
if is_app_directory_correct.strip().lower() in ("n", "no"):
return
is_new_app = False
is_first_component = False
if not app_directory.exists():
is_new_app = True
app_directory.mkdir()
(app_directory / "__init__.py").touch(exist_ok=True)
component_base_path = app_directory / "components"
if not component_base_path.exists():
component_base_path.mkdir()
self.stdout.write(
self.style.SUCCESS(f"Created your first component in {app_name}! ✨\n")
)
is_first_component = True
(component_base_path / "__init__.py").touch(exist_ok=True)
for component_name in options["component_names"]:
snake_case_component_name = convert_to_snake_case(component_name)
pascal_case_component_name = convert_to_pascal_case(component_name)
component_path = component_base_path / f"{snake_case_component_name}.py"
if component_path.exists():
self.stdout.write(
self.style.ERROR(
f"Skipping creating {snake_case_component_name}.py because it already exists."
)
)
else:
component_path.write_text(
COMPONENT_FILE_CONTENT.format(
**{"pascal_case_component_name": pascal_case_component_name}
)
)
self.stdout.write(self.style.SUCCESS(f"Created {component_path}."))
template_base_path = app_directory / "templates" / "unicorn"
if not template_base_path.exists():
if not (app_directory / "templates").exists():
(app_directory / "templates").mkdir()
template_base_path.mkdir()
template_path = template_base_path / f"{component_name}.html"
if template_path.exists():
self.stdout.write(
self.style.ERROR(
f"Skipping creating {component_name}.html because it already exists."
)
)
else:
template_path.write_text(TEMPLATE_FILE_CONTENT)
self.stdout.write(self.style.SUCCESS(f"Created {template_path}."))
if is_first_component:
will_star_repo = input(
"\nStarring the GitHub repo helps other Django users find Unicorn. Can you star it for me? [y/N] "
)
if will_star_repo.strip().lower() in ("y", "yes"):
self.stdout.write(
self.style.SUCCESS(
"Thank you for helping spread the word about Unicorn!"
)
)
self.stdout.write(
"""
,/
//
,//
__ /| |//
`__/\\_ --(/|___/-/
\\|\\_-\\___ __-_`- /-/ \\.
|\\_-___,-\\_____--/_)' ) \\
\\ -_ / __ \\( `( __`\\|
`\\__| |\\) (/|
',--//-| \\ | ' /
/,---| \\ /
`_/ _,' | |
__/'/ | |
___/ \\ ( ) /
\\____/\\
\\
"""
)
webbrowser.open(
"https://github.com/adamghill/django-unicorn", new=2
)
else:
self.stdout.write(
self.style.ERROR(
"That's a bummer, but I understand. I hope you will star it for me later!"
)
)
if is_new_app:
self.stdout.write(
self.style.WARNING(
f'\nMake sure to add `"{app_name}",` to your INSTALLED_APPS list in your settings file if necessary.'
)
)
| true | true |
f7f7db319ae61f6acf2466c9778b837d2fe0fedd | 2,230 | py | Python | examples/ad_manager/v202105/line_item_creative_association_service/update_licas.py | timgates42/googleads-python-lib | 86c3d0558fcc30135ff44700a499678c7e69e0c0 | [
"Apache-2.0"
] | 601 | 2015-01-07T09:53:49.000Z | 2022-03-31T17:18:11.000Z | examples/ad_manager/v202105/line_item_creative_association_service/update_licas.py | timgates42/googleads-python-lib | 86c3d0558fcc30135ff44700a499678c7e69e0c0 | [
"Apache-2.0"
] | 443 | 2015-01-07T12:16:57.000Z | 2022-03-14T14:46:56.000Z | examples/ad_manager/v202105/line_item_creative_association_service/update_licas.py | timgates42/googleads-python-lib | 86c3d0558fcc30135ff44700a499678c7e69e0c0 | [
"Apache-2.0"
] | 1,135 | 2015-01-07T15:51:55.000Z | 2022-03-31T17:18:13.000Z | #!/usr/bin/env python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example updates the destination URL of all line item creative
associations (LICA).
To determine which LICAs exist, run get_all_licas.py.
"""
# Import appropriate modules from the client library.
from googleads import ad_manager
def main(client):
# Initialize appropriate service.
lica_service = client.GetService(
'LineItemCreativeAssociationService', version='v202105')
# Create statement object to get all LICAs.
statement = ad_manager.StatementBuilder(version='v202105')
while True:
# Get LICAs by statement.
response = lica_service.getLineItemCreativeAssociationsByStatement(
statement.ToStatement())
if 'results' in response and len(response['results']):
# Update each local LICA object by changing its destination URL.
updated_licas = []
for lica in response['results']:
lica['destinationUrl'] = 'http://news.google.com'
updated_licas.append(lica)
# Update LICAs remotely.
licas = lica_service.updateLineItemCreativeAssociations(updated_licas)
# Display results.
for lica in licas:
print('LICA with line item id "%s", creative id "%s", and status '
'"%s" was updated.' % (lica['lineItemId'], lica['creativeId'],
lica['status']))
statement.offset += statement.limit
else:
break
if response['totalResultSetSize'] == 0:
print('No LICAs found to update.')
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client)
| 33.283582 | 76 | 0.70583 |
from googleads import ad_manager
def main(client):
lica_service = client.GetService(
'LineItemCreativeAssociationService', version='v202105')
statement = ad_manager.StatementBuilder(version='v202105')
while True:
response = lica_service.getLineItemCreativeAssociationsByStatement(
statement.ToStatement())
if 'results' in response and len(response['results']):
updated_licas = []
for lica in response['results']:
lica['destinationUrl'] = 'http://news.google.com'
updated_licas.append(lica)
licas = lica_service.updateLineItemCreativeAssociations(updated_licas)
for lica in licas:
print('LICA with line item id "%s", creative id "%s", and status '
'"%s" was updated.' % (lica['lineItemId'], lica['creativeId'],
lica['status']))
statement.offset += statement.limit
else:
break
if response['totalResultSetSize'] == 0:
print('No LICAs found to update.')
if __name__ == '__main__':
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client)
| true | true |
f7f7db3fda5972c16bbcd55607e45786d8fc39b2 | 1,935 | py | Python | elastalert/alerters/chatwork.py | vbisserie/elastalert2 | 982115f0de055595fa452c425b6a15bedc3161cf | [
"Apache-2.0"
] | null | null | null | elastalert/alerters/chatwork.py | vbisserie/elastalert2 | 982115f0de055595fa452c425b6a15bedc3161cf | [
"Apache-2.0"
] | null | null | null | elastalert/alerters/chatwork.py | vbisserie/elastalert2 | 982115f0de055595fa452c425b6a15bedc3161cf | [
"Apache-2.0"
] | null | null | null | import warnings
import requests
from requests import RequestException
from requests.auth import HTTPProxyAuth
from elastalert.alerts import Alerter
from elastalert.util import EAException, elastalert_logger
class ChatworkAlerter(Alerter):
""" Creates a Chatwork room message for each alert """
required_options = frozenset(['chatwork_apikey', 'chatwork_room_id'])
def __init__(self, rule):
super(ChatworkAlerter, self).__init__(rule)
self.chatwork_apikey = self.rule.get('chatwork_apikey')
self.chatwork_room_id = self.rule.get('chatwork_room_id')
self.url = 'https://api.chatwork.com/v2/rooms/%s/messages' % (self.chatwork_room_id)
self.chatwork_proxy = self.rule.get('chatwork_proxy', None)
self.chatwork_proxy_login = self.rule.get('chatwork_proxy_login', None)
self.chatwork_proxy_pass = self.rule.get('chatwork_proxy_pass', None)
def alert(self, matches):
body = self.create_alert_body(matches)
headers = {'X-ChatWorkToken': self.chatwork_apikey}
# set https proxy, if it was provided
proxies = {'https': self.chatwork_proxy} if self.chatwork_proxy else None
auth = HTTPProxyAuth(self.chatwork_proxy_login, self.chatwork_proxy_pass) if self.chatwork_proxy_login else None
params = {'body': body}
try:
response = requests.post(self.url, params=params, headers=headers, proxies=proxies, auth=auth)
warnings.resetwarnings()
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to Chattwork: %s. Details: %s" % (e, "" if e.response is None else e.response.text))
elastalert_logger.info(
"Alert sent to Chatwork room %s" % self.chatwork_room_id)
def get_info(self):
return {
"type": "chatwork",
"chatwork_room_id": self.chatwork_room_id
}
| 40.3125 | 129 | 0.687339 | import warnings
import requests
from requests import RequestException
from requests.auth import HTTPProxyAuth
from elastalert.alerts import Alerter
from elastalert.util import EAException, elastalert_logger
class ChatworkAlerter(Alerter):
required_options = frozenset(['chatwork_apikey', 'chatwork_room_id'])
def __init__(self, rule):
super(ChatworkAlerter, self).__init__(rule)
self.chatwork_apikey = self.rule.get('chatwork_apikey')
self.chatwork_room_id = self.rule.get('chatwork_room_id')
self.url = 'https://api.chatwork.com/v2/rooms/%s/messages' % (self.chatwork_room_id)
self.chatwork_proxy = self.rule.get('chatwork_proxy', None)
self.chatwork_proxy_login = self.rule.get('chatwork_proxy_login', None)
self.chatwork_proxy_pass = self.rule.get('chatwork_proxy_pass', None)
def alert(self, matches):
body = self.create_alert_body(matches)
headers = {'X-ChatWorkToken': self.chatwork_apikey}
proxies = {'https': self.chatwork_proxy} if self.chatwork_proxy else None
auth = HTTPProxyAuth(self.chatwork_proxy_login, self.chatwork_proxy_pass) if self.chatwork_proxy_login else None
params = {'body': body}
try:
response = requests.post(self.url, params=params, headers=headers, proxies=proxies, auth=auth)
warnings.resetwarnings()
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to Chattwork: %s. Details: %s" % (e, "" if e.response is None else e.response.text))
elastalert_logger.info(
"Alert sent to Chatwork room %s" % self.chatwork_room_id)
def get_info(self):
return {
"type": "chatwork",
"chatwork_room_id": self.chatwork_room_id
}
| true | true |
f7f7dbd3bf4f39a55cf023a5116bf3da3d2eb1bd | 5,651 | py | Python | cloudwatch_metric2csv.py | aidenkihyunkim/cloudwatch-metric-to-csv | dbcffbf37f9a4ff230983c00fe2a3612107d2b42 | [
"MIT"
] | null | null | null | cloudwatch_metric2csv.py | aidenkihyunkim/cloudwatch-metric-to-csv | dbcffbf37f9a4ff230983c00fe2a3612107d2b42 | [
"MIT"
] | null | null | null | cloudwatch_metric2csv.py | aidenkihyunkim/cloudwatch-metric-to-csv | dbcffbf37f9a4ff230983c00fe2a3612107d2b42 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
##############################################
# Export CloudWatch metric data to csv file
# Author: Aiden Kim
# 2020-11-19
##############################################
from datetime import timedelta, timezone, datetime
import argparse
import dateutil
import boto3
import re
import csv
import config
# parse argument
parser = argparse.ArgumentParser()
parser.add_argument("--name", help="""
The Name tag of EC2 or name of RDS or name of ALB.
This name allow multiple inputs separated by a comma.""")
parser.add_argument("--metric", help="""
The name of the CloudWatch metric.""")
parser.add_argument("--period", help="""
The granularity, in seconds, of the returned data points.
A period can be as short as one minute (60 seconds) and must be a multiple of 60.
The default value is 3600.""")
parser.add_argument("--start", help="""
The timestamp that determines the first data point to return.
This timestamp must be in ISO 8601 UTC format.
Default is 24 hours ago.""")
parser.add_argument("--end", help="""
The timestamp that determines the last data point to return.
This timestamp must be in ISO 8601 UTC format.
Default is now.""")
parser.add_argument("--statistics", help="""
The metric statistics.
Default is Average.""")
parser.add_argument("--file", help="""
The name of the output csv file.
Default is '[METRIC].csv'.""")
args = parser.parse_args()
# check if parameter is valid
metrics = dict((k.lower(), {'metric': k, 'namespace': v}) for k,v in config.METRICS.items())
allowed_statistics = ['SampleCount', 'Average', 'Sum', 'Minimum', 'Maximum']
if (not args.metric) or (args.metric.lower() not in metrics.keys()):
print('Invalid --metric {} provided. Valid metrics: {}'.format(args.metric, list(metrics.keys())))
exit(-1)
if (not args.name):
print('Invalid --name {} provided.'.format(args.name))
exit(-2)
if (args.statistics) and (args.statistics not in allowed_statistics):
print('Invalid --statistics {} provided. Valid statistics: {}'.format(args.statistics, allowed_statistics))
exit(-3)
# extract parameters
names = args.name.split(',')
metric = args.metric.lower()
period = int(args.period) if args.period else 3600
start = dateutil.parser.parse(args.start).astimezone(timezone.utc) if args.start else datetime.utcnow() - timedelta(days=1)
end = dateutil.parser.parse(args.end).astimezone(timezone.utc) if args.end else datetime.utcnow()
statistics = args.statistics if args.statistics else 'Average'
file = args.file if args.file else '{}.csv'.format(metrics[metric]['metric'])
# get metric datas
datas = []
cw = boto3.client('cloudwatch')
if ('AWS/EC2' in metrics[metric]['namespace']):
ec2 = boto3.client('ec2')
for name in names:
ec2_res = ec2.describe_instances(Filters=[{'Name': 'tag:Name', 'Values': [name]}])
if ec2_res and ('Reservations' in ec2_res) and (len(ec2_res['Reservations'])>0) and ('Instances' in ec2_res['Reservations'][0]) and (len(ec2_res['Reservations'][0]['Instances'])>0):
for instance in ec2_res['Reservations'][0]['Instances']:
cw_stats = cw.get_metric_statistics(
StartTime=start, EndTime=end, Period=period, Statistics=[statistics],
Namespace='AWS/EC2', MetricName=metrics[metric]['metric'],
Dimensions=[{'Name': 'InstanceId', 'Value': instance['InstanceId']}],
)
if ('Label' in cw_stats) and ('Datapoints' in cw_stats) and (len(cw_stats['Datapoints'])>0):
cw_stats['Label'] = '{}@{}'.format(cw_stats['Label'], name)
datas.append(cw_stats)
if ('AWS/RDS' in metrics[metric]['namespace']):
for name in names:
cw_stats = cw.get_metric_statistics(
StartTime=start, EndTime=end, Period=period, Statistics=[statistics],
Namespace='AWS/RDS', MetricName=metrics[metric]['metric'],
Dimensions=[{'Name': 'DBInstanceIdentifier', 'Value': name}],
)
if ('Label' in cw_stats) and ('Datapoints' in cw_stats) and (len(cw_stats['Datapoints'])>0):
cw_stats['Label'] = '{}@{}'.format(cw_stats['Label'], name)
datas.append(cw_stats)
if ('AWS/ApplicationELB' in metrics[metric]['namespace']):
elb2 = boto3.client('elbv2')
for name in names:
elb2_res = elb2.describe_load_balancers(Names=[name])
if elb2_res and ('LoadBalancers' in elb2_res) and (len(elb2_res['LoadBalancers'])>0):
for elb in elb2_res['LoadBalancers']:
cw_stats = cw.get_metric_statistics(
StartTime=start, EndTime=end, Period=period, Statistics=[statistics],
Namespace='AWS/ApplicationELB', MetricName=metrics[metric]['metric'],
Dimensions=[{'Name': 'LoadBalancer', 'Value': re.sub('arn:.+:loadbalancer/', '', elb['LoadBalancerArn'])}],
)
if ('Label' in cw_stats) and ('Datapoints' in cw_stats) and (len(cw_stats['Datapoints'])>0):
cw_stats['Label'] = '{}@{}'.format(cw_stats['Label'], name)
datas.append(cw_stats)
# merge datas to one sheet
sheet = {}
for data in datas:
for item in data['Datapoints']:
timestr = item['Timestamp'].strftime('%Y-%m-%d %H:%M:%SZ')
label = '{} ({})'.format(data['Label'], item['Unit'])
if (timestr in sheet):
sheet[timestr][label] = item[statistics]
else:
sheet[timestr] = { label: item[statistics] }
#print(sorted(sheet.items()))
if (len(sheet.keys()) == 0):
print('No metric data found.')
exit(-9)
# write csv file
with open(file, 'w', newline='') as csvfile:
csvwriter = csv.writer(
csvfile,
delimiter=',',
quotechar='"',
quoting=csv.QUOTE_MINIMAL)
for i, (k, v) in enumerate(sorted(sheet.items())):
if (i == 0) :
csvwriter.writerow( ['Time'] + sorted(v.keys()) )
csvwriter.writerow([k] + [v[key] for key in sorted(v.keys())])
print('{} rows were written on the file {}'.format(len(sheet.keys()), file))
exit(0);
| 40.949275 | 183 | 0.679703 |
util.parser.parse(args.end).astimezone(timezone.utc) if args.end else datetime.utcnow()
statistics = args.statistics if args.statistics else 'Average'
file = args.file if args.file else '{}.csv'.format(metrics[metric]['metric'])
datas = []
cw = boto3.client('cloudwatch')
if ('AWS/EC2' in metrics[metric]['namespace']):
ec2 = boto3.client('ec2')
for name in names:
ec2_res = ec2.describe_instances(Filters=[{'Name': 'tag:Name', 'Values': [name]}])
if ec2_res and ('Reservations' in ec2_res) and (len(ec2_res['Reservations'])>0) and ('Instances' in ec2_res['Reservations'][0]) and (len(ec2_res['Reservations'][0]['Instances'])>0):
for instance in ec2_res['Reservations'][0]['Instances']:
cw_stats = cw.get_metric_statistics(
StartTime=start, EndTime=end, Period=period, Statistics=[statistics],
Namespace='AWS/EC2', MetricName=metrics[metric]['metric'],
Dimensions=[{'Name': 'InstanceId', 'Value': instance['InstanceId']}],
)
if ('Label' in cw_stats) and ('Datapoints' in cw_stats) and (len(cw_stats['Datapoints'])>0):
cw_stats['Label'] = '{}@{}'.format(cw_stats['Label'], name)
datas.append(cw_stats)
if ('AWS/RDS' in metrics[metric]['namespace']):
for name in names:
cw_stats = cw.get_metric_statistics(
StartTime=start, EndTime=end, Period=period, Statistics=[statistics],
Namespace='AWS/RDS', MetricName=metrics[metric]['metric'],
Dimensions=[{'Name': 'DBInstanceIdentifier', 'Value': name}],
)
if ('Label' in cw_stats) and ('Datapoints' in cw_stats) and (len(cw_stats['Datapoints'])>0):
cw_stats['Label'] = '{}@{}'.format(cw_stats['Label'], name)
datas.append(cw_stats)
if ('AWS/ApplicationELB' in metrics[metric]['namespace']):
elb2 = boto3.client('elbv2')
for name in names:
elb2_res = elb2.describe_load_balancers(Names=[name])
if elb2_res and ('LoadBalancers' in elb2_res) and (len(elb2_res['LoadBalancers'])>0):
for elb in elb2_res['LoadBalancers']:
cw_stats = cw.get_metric_statistics(
StartTime=start, EndTime=end, Period=period, Statistics=[statistics],
Namespace='AWS/ApplicationELB', MetricName=metrics[metric]['metric'],
Dimensions=[{'Name': 'LoadBalancer', 'Value': re.sub('arn:.+:loadbalancer/', '', elb['LoadBalancerArn'])}],
)
if ('Label' in cw_stats) and ('Datapoints' in cw_stats) and (len(cw_stats['Datapoints'])>0):
cw_stats['Label'] = '{}@{}'.format(cw_stats['Label'], name)
datas.append(cw_stats)
sheet = {}
for data in datas:
for item in data['Datapoints']:
timestr = item['Timestamp'].strftime('%Y-%m-%d %H:%M:%SZ')
label = '{} ({})'.format(data['Label'], item['Unit'])
if (timestr in sheet):
sheet[timestr][label] = item[statistics]
else:
sheet[timestr] = { label: item[statistics] }
if (len(sheet.keys()) == 0):
print('No metric data found.')
exit(-9)
with open(file, 'w', newline='') as csvfile:
csvwriter = csv.writer(
csvfile,
delimiter=',',
quotechar='"',
quoting=csv.QUOTE_MINIMAL)
for i, (k, v) in enumerate(sorted(sheet.items())):
if (i == 0) :
csvwriter.writerow( ['Time'] + sorted(v.keys()) )
csvwriter.writerow([k] + [v[key] for key in sorted(v.keys())])
print('{} rows were written on the file {}'.format(len(sheet.keys()), file))
exit(0);
| true | true |
f7f7deb7a85c7ce164392a8f0cf063fd4f5fa9dd | 123,818 | py | Python | fissix/tests/test_fixers.py | orsinium/fissix-py35 | 48914fcb69842c9fe3c97652870c7610a2cc639b | [
"PSF-2.0"
] | null | null | null | fissix/tests/test_fixers.py | orsinium/fissix-py35 | 48914fcb69842c9fe3c97652870c7610a2cc639b | [
"PSF-2.0"
] | null | null | null | fissix/tests/test_fixers.py | orsinium/fissix-py35 | 48914fcb69842c9fe3c97652870c7610a2cc639b | [
"PSF-2.0"
] | null | null | null | """ Test suite for the fixer modules """
# Python imports
import os
from itertools import chain
from operator import itemgetter
# Local imports
from fissix import pygram, fixer_util
from fissix.tests import support
class FixerTestCase(support.TestCase):
# Other test cases can subclass this class and replace "fixer_pkg" with
# their own.
def setUp(self, fix_list=None, fixer_pkg="fissix", options=None):
if fix_list is None:
fix_list = [self.fixer]
self.refactor = support.get_refactorer(fixer_pkg, fix_list, options)
self.fixer_log = []
self.filename = "<string>"
for fixer in chain(self.refactor.pre_order, self.refactor.post_order):
fixer.log = self.fixer_log
def _check(self, before, after):
before = support.reformat(before)
after = support.reformat(after)
tree = self.refactor.refactor_string(before, self.filename)
self.assertEqual(after, str(tree))
return tree
def check(self, before, after, ignore_warnings=False):
tree = self._check(before, after)
self.assertTrue(tree.was_changed)
if not ignore_warnings:
self.assertEqual(self.fixer_log, [])
def warns(self, before, after, message, unchanged=False):
tree = self._check(before, after)
self.assertIn(message, "".join(self.fixer_log))
if not unchanged:
self.assertTrue(tree.was_changed)
def warns_unchanged(self, before, message):
self.warns(before, before, message, unchanged=True)
def unchanged(self, before, ignore_warnings=False):
self._check(before, before)
if not ignore_warnings:
self.assertEqual(self.fixer_log, [])
def assert_runs_after(self, *names):
fixes = [self.fixer]
fixes.extend(names)
r = support.get_refactorer("fissix", fixes)
(pre, post) = r.get_fixers()
n = "fix_" + self.fixer
if post and post[-1].__class__.__module__.endswith(n):
# We're the last fixer to run
return
if pre and pre[-1].__class__.__module__.endswith(n) and not post:
# We're the last in pre and post is empty
return
self.fail(
"Fixer run order (%s) is incorrect; %s should be last."
% (", ".join([x.__class__.__module__ for x in (pre + post)]), n)
)
class Test_ne(FixerTestCase):
fixer = "ne"
def test_basic(self):
b = """if x <> y:
pass"""
a = """if x != y:
pass"""
self.check(b, a)
def test_no_spaces(self):
b = """if x<>y:
pass"""
a = """if x!=y:
pass"""
self.check(b, a)
def test_chained(self):
b = """if x<>y<>z:
pass"""
a = """if x!=y!=z:
pass"""
self.check(b, a)
class Test_has_key(FixerTestCase):
fixer = "has_key"
def test_1(self):
b = """x = d.has_key("x") or d.has_key("y")"""
a = """x = "x" in d or "y" in d"""
self.check(b, a)
def test_2(self):
b = """x = a.b.c.d.has_key("x") ** 3"""
a = """x = ("x" in a.b.c.d) ** 3"""
self.check(b, a)
def test_3(self):
b = """x = a.b.has_key(1 + 2).__repr__()"""
a = """x = (1 + 2 in a.b).__repr__()"""
self.check(b, a)
def test_4(self):
b = """x = a.b.has_key(1 + 2).__repr__() ** -3 ** 4"""
a = """x = (1 + 2 in a.b).__repr__() ** -3 ** 4"""
self.check(b, a)
def test_5(self):
b = """x = a.has_key(f or g)"""
a = """x = (f or g) in a"""
self.check(b, a)
def test_6(self):
b = """x = a + b.has_key(c)"""
a = """x = a + (c in b)"""
self.check(b, a)
def test_7(self):
b = """x = a.has_key(lambda: 12)"""
a = """x = (lambda: 12) in a"""
self.check(b, a)
def test_8(self):
b = """x = a.has_key(a for a in b)"""
a = """x = (a for a in b) in a"""
self.check(b, a)
def test_9(self):
b = """if not a.has_key(b): pass"""
a = """if b not in a: pass"""
self.check(b, a)
def test_10(self):
b = """if not a.has_key(b).__repr__(): pass"""
a = """if not (b in a).__repr__(): pass"""
self.check(b, a)
def test_11(self):
b = """if not a.has_key(b) ** 2: pass"""
a = """if not (b in a) ** 2: pass"""
self.check(b, a)
class Test_apply(FixerTestCase):
fixer = "apply"
def test_1(self):
b = """x = apply(f, g + h)"""
a = """x = f(*g + h)"""
self.check(b, a)
def test_2(self):
b = """y = apply(f, g, h)"""
a = """y = f(*g, **h)"""
self.check(b, a)
def test_3(self):
b = """z = apply(fs[0], g or h, h or g)"""
a = """z = fs[0](*g or h, **h or g)"""
self.check(b, a)
def test_4(self):
b = """apply(f, (x, y) + t)"""
a = """f(*(x, y) + t)"""
self.check(b, a)
def test_5(self):
b = """apply(f, args,)"""
a = """f(*args)"""
self.check(b, a)
def test_6(self):
b = """apply(f, args, kwds,)"""
a = """f(*args, **kwds)"""
self.check(b, a)
# Test that complex functions are parenthesized
def test_complex_1(self):
b = """x = apply(f+g, args)"""
a = """x = (f+g)(*args)"""
self.check(b, a)
def test_complex_2(self):
b = """x = apply(f*g, args)"""
a = """x = (f*g)(*args)"""
self.check(b, a)
def test_complex_3(self):
b = """x = apply(f**g, args)"""
a = """x = (f**g)(*args)"""
self.check(b, a)
# But dotted names etc. not
def test_dotted_name(self):
b = """x = apply(f.g, args)"""
a = """x = f.g(*args)"""
self.check(b, a)
def test_subscript(self):
b = """x = apply(f[x], args)"""
a = """x = f[x](*args)"""
self.check(b, a)
def test_call(self):
b = """x = apply(f(), args)"""
a = """x = f()(*args)"""
self.check(b, a)
# Extreme case
def test_extreme(self):
b = """x = apply(a.b.c.d.e.f, args, kwds)"""
a = """x = a.b.c.d.e.f(*args, **kwds)"""
self.check(b, a)
# XXX Comments in weird places still get lost
def test_weird_comments(self):
b = """apply( # foo
f, # bar
args)"""
a = """f(*args)"""
self.check(b, a)
# These should *not* be touched
def test_unchanged_1(self):
s = """apply()"""
self.unchanged(s)
def test_unchanged_2(self):
s = """apply(f)"""
self.unchanged(s)
def test_unchanged_3(self):
s = """apply(f,)"""
self.unchanged(s)
def test_unchanged_4(self):
s = """apply(f, args, kwds, extras)"""
self.unchanged(s)
def test_unchanged_5(self):
s = """apply(f, *args, **kwds)"""
self.unchanged(s)
def test_unchanged_6(self):
s = """apply(f, *args)"""
self.unchanged(s)
def test_unchanged_6b(self):
s = """apply(f, **kwds)"""
self.unchanged(s)
def test_unchanged_7(self):
s = """apply(func=f, args=args, kwds=kwds)"""
self.unchanged(s)
def test_unchanged_8(self):
s = """apply(f, args=args, kwds=kwds)"""
self.unchanged(s)
def test_unchanged_9(self):
s = """apply(f, args, kwds=kwds)"""
self.unchanged(s)
def test_space_1(self):
a = """apply( f, args, kwds)"""
b = """f(*args, **kwds)"""
self.check(a, b)
def test_space_2(self):
a = """apply( f ,args,kwds )"""
b = """f(*args, **kwds)"""
self.check(a, b)
class Test_reload(FixerTestCase):
fixer = "reload"
def test(self):
b = """reload(a)"""
a = """import importlib\nimportlib.reload(a)"""
self.check(b, a)
def test_comment(self):
b = """reload( a ) # comment"""
a = """import importlib\nimportlib.reload( a ) # comment"""
self.check(b, a)
# PEP 8 comments
b = """reload( a ) # comment"""
a = """import importlib\nimportlib.reload( a ) # comment"""
self.check(b, a)
def test_space(self):
b = """reload( a )"""
a = """import importlib\nimportlib.reload( a )"""
self.check(b, a)
b = """reload( a)"""
a = """import importlib\nimportlib.reload( a)"""
self.check(b, a)
b = """reload(a )"""
a = """import importlib\nimportlib.reload(a )"""
self.check(b, a)
def test_unchanged(self):
s = """reload(a=1)"""
self.unchanged(s)
s = """reload(f, g)"""
self.unchanged(s)
s = """reload(f, *h)"""
self.unchanged(s)
s = """reload(f, *h, **i)"""
self.unchanged(s)
s = """reload(f, **i)"""
self.unchanged(s)
s = """reload(*h, **i)"""
self.unchanged(s)
s = """reload(*h)"""
self.unchanged(s)
s = """reload(**i)"""
self.unchanged(s)
s = """reload()"""
self.unchanged(s)
class Test_intern(FixerTestCase):
fixer = "intern"
def test_prefix_preservation(self):
b = """x = intern( a )"""
a = """import sys\nx = sys.intern( a )"""
self.check(b, a)
b = """y = intern("b" # test
)"""
a = """import sys\ny = sys.intern("b" # test
)"""
self.check(b, a)
b = """z = intern(a+b+c.d, )"""
a = """import sys\nz = sys.intern(a+b+c.d, )"""
self.check(b, a)
def test(self):
b = """x = intern(a)"""
a = """import sys\nx = sys.intern(a)"""
self.check(b, a)
b = """z = intern(a+b+c.d,)"""
a = """import sys\nz = sys.intern(a+b+c.d,)"""
self.check(b, a)
b = """intern("y%s" % 5).replace("y", "")"""
a = """import sys\nsys.intern("y%s" % 5).replace("y", "")"""
self.check(b, a)
# These should not be refactored
def test_unchanged(self):
s = """intern(a=1)"""
self.unchanged(s)
s = """intern(f, g)"""
self.unchanged(s)
s = """intern(*h)"""
self.unchanged(s)
s = """intern(**i)"""
self.unchanged(s)
s = """intern()"""
self.unchanged(s)
class Test_reduce(FixerTestCase):
fixer = "reduce"
def test_simple_call(self):
b = "reduce(a, b, c)"
a = "from functools import reduce\nreduce(a, b, c)"
self.check(b, a)
def test_bug_7253(self):
# fix_tuple_params was being bad and orphaning nodes in the tree.
b = "def x(arg): reduce(sum, [])"
a = "from functools import reduce\ndef x(arg): reduce(sum, [])"
self.check(b, a)
def test_call_with_lambda(self):
b = "reduce(lambda x, y: x + y, seq)"
a = "from functools import reduce\nreduce(lambda x, y: x + y, seq)"
self.check(b, a)
def test_unchanged(self):
s = "reduce(a)"
self.unchanged(s)
s = "reduce(a, b=42)"
self.unchanged(s)
s = "reduce(a, b, c, d)"
self.unchanged(s)
s = "reduce(**c)"
self.unchanged(s)
s = "reduce()"
self.unchanged(s)
class Test_print(FixerTestCase):
fixer = "print"
def test_prefix_preservation(self):
b = """print 1, 1+1, 1+1+1"""
a = """print(1, 1+1, 1+1+1)"""
self.check(b, a)
def test_idempotency(self):
s = """print()"""
self.unchanged(s)
s = """print('')"""
self.unchanged(s)
def test_idempotency_print_as_function(self):
self.refactor.driver.grammar = pygram.python_grammar_no_print_statement
s = """print(1, 1+1, 1+1+1)"""
self.unchanged(s)
s = """print()"""
self.unchanged(s)
s = """print('')"""
self.unchanged(s)
def test_1(self):
b = """print 1, 1+1, 1+1+1"""
a = """print(1, 1+1, 1+1+1)"""
self.check(b, a)
def test_2(self):
b = """print 1, 2"""
a = """print(1, 2)"""
self.check(b, a)
def test_3(self):
b = """print"""
a = """print()"""
self.check(b, a)
def test_4(self):
# from bug 3000
b = """print whatever; print"""
a = """print(whatever); print()"""
self.check(b, a)
def test_5(self):
b = """print; print whatever;"""
a = """print(); print(whatever);"""
self.check(b, a)
def test_tuple(self):
b = """print (a, b, c)"""
a = """print((a, b, c))"""
self.check(b, a)
# trailing commas
def test_trailing_comma_1(self):
b = """print 1, 2, 3,"""
a = """print(1, 2, 3, end=' ')"""
self.check(b, a)
def test_trailing_comma_2(self):
b = """print 1, 2,"""
a = """print(1, 2, end=' ')"""
self.check(b, a)
def test_trailing_comma_3(self):
b = """print 1,"""
a = """print(1, end=' ')"""
self.check(b, a)
# >> stuff
def test_vargs_without_trailing_comma(self):
b = """print >>sys.stderr, 1, 2, 3"""
a = """print(1, 2, 3, file=sys.stderr)"""
self.check(b, a)
def test_with_trailing_comma(self):
b = """print >>sys.stderr, 1, 2,"""
a = """print(1, 2, end=' ', file=sys.stderr)"""
self.check(b, a)
def test_no_trailing_comma(self):
b = """print >>sys.stderr, 1+1"""
a = """print(1+1, file=sys.stderr)"""
self.check(b, a)
def test_spaces_before_file(self):
b = """print >> sys.stderr"""
a = """print(file=sys.stderr)"""
self.check(b, a)
def test_with_future_print_function(self):
s = "from __future__ import print_function\n" "print('Hai!', end=' ')"
self.unchanged(s)
b = "print 'Hello, world!'"
a = "print('Hello, world!')"
self.check(b, a)
class Test_exec(FixerTestCase):
fixer = "exec"
def test_prefix_preservation(self):
b = """ exec code in ns1, ns2"""
a = """ exec(code, ns1, ns2)"""
self.check(b, a)
def test_basic(self):
b = """exec code"""
a = """exec(code)"""
self.check(b, a)
def test_with_globals(self):
b = """exec code in ns"""
a = """exec(code, ns)"""
self.check(b, a)
def test_with_globals_locals(self):
b = """exec code in ns1, ns2"""
a = """exec(code, ns1, ns2)"""
self.check(b, a)
def test_complex_1(self):
b = """exec (a.b()) in ns"""
a = """exec((a.b()), ns)"""
self.check(b, a)
def test_complex_2(self):
b = """exec a.b() + c in ns"""
a = """exec(a.b() + c, ns)"""
self.check(b, a)
# These should not be touched
def test_unchanged_1(self):
s = """exec(code)"""
self.unchanged(s)
def test_unchanged_2(self):
s = """exec (code)"""
self.unchanged(s)
def test_unchanged_3(self):
s = """exec(code, ns)"""
self.unchanged(s)
def test_unchanged_4(self):
s = """exec(code, ns1, ns2)"""
self.unchanged(s)
class Test_repr(FixerTestCase):
fixer = "repr"
def test_prefix_preservation(self):
b = """x = `1 + 2`"""
a = """x = repr(1 + 2)"""
self.check(b, a)
def test_simple_1(self):
b = """x = `1 + 2`"""
a = """x = repr(1 + 2)"""
self.check(b, a)
def test_simple_2(self):
b = """y = `x`"""
a = """y = repr(x)"""
self.check(b, a)
def test_complex(self):
b = """z = `y`.__repr__()"""
a = """z = repr(y).__repr__()"""
self.check(b, a)
def test_tuple(self):
b = """x = `1, 2, 3`"""
a = """x = repr((1, 2, 3))"""
self.check(b, a)
def test_nested(self):
b = """x = `1 + `2``"""
a = """x = repr(1 + repr(2))"""
self.check(b, a)
def test_nested_tuples(self):
b = """x = `1, 2 + `3, 4``"""
a = """x = repr((1, 2 + repr((3, 4))))"""
self.check(b, a)
class Test_except(FixerTestCase):
fixer = "except"
def test_prefix_preservation(self):
b = """
try:
pass
except (RuntimeError, ImportError), e:
pass"""
a = """
try:
pass
except (RuntimeError, ImportError) as e:
pass"""
self.check(b, a)
def test_simple(self):
b = """
try:
pass
except Foo, e:
pass"""
a = """
try:
pass
except Foo as e:
pass"""
self.check(b, a)
def test_simple_no_space_before_target(self):
b = """
try:
pass
except Foo,e:
pass"""
a = """
try:
pass
except Foo as e:
pass"""
self.check(b, a)
def test_tuple_unpack(self):
b = """
def foo():
try:
pass
except Exception, (f, e):
pass
except ImportError, e:
pass"""
a = """
def foo():
try:
pass
except Exception as xxx_todo_changeme:
(f, e) = xxx_todo_changeme.args
pass
except ImportError as e:
pass"""
self.check(b, a)
def test_multi_class(self):
b = """
try:
pass
except (RuntimeError, ImportError), e:
pass"""
a = """
try:
pass
except (RuntimeError, ImportError) as e:
pass"""
self.check(b, a)
def test_list_unpack(self):
b = """
try:
pass
except Exception, [a, b]:
pass"""
a = """
try:
pass
except Exception as xxx_todo_changeme:
[a, b] = xxx_todo_changeme.args
pass"""
self.check(b, a)
def test_weird_target_1(self):
b = """
try:
pass
except Exception, d[5]:
pass"""
a = """
try:
pass
except Exception as xxx_todo_changeme:
d[5] = xxx_todo_changeme
pass"""
self.check(b, a)
def test_weird_target_2(self):
b = """
try:
pass
except Exception, a.foo:
pass"""
a = """
try:
pass
except Exception as xxx_todo_changeme:
a.foo = xxx_todo_changeme
pass"""
self.check(b, a)
def test_weird_target_3(self):
b = """
try:
pass
except Exception, a().foo:
pass"""
a = """
try:
pass
except Exception as xxx_todo_changeme:
a().foo = xxx_todo_changeme
pass"""
self.check(b, a)
def test_bare_except(self):
b = """
try:
pass
except Exception, a:
pass
except:
pass"""
a = """
try:
pass
except Exception as a:
pass
except:
pass"""
self.check(b, a)
def test_bare_except_and_else_finally(self):
b = """
try:
pass
except Exception, a:
pass
except:
pass
else:
pass
finally:
pass"""
a = """
try:
pass
except Exception as a:
pass
except:
pass
else:
pass
finally:
pass"""
self.check(b, a)
def test_multi_fixed_excepts_before_bare_except(self):
b = """
try:
pass
except TypeError, b:
pass
except Exception, a:
pass
except:
pass"""
a = """
try:
pass
except TypeError as b:
pass
except Exception as a:
pass
except:
pass"""
self.check(b, a)
def test_one_line_suites(self):
b = """
try: raise TypeError
except TypeError, e:
pass
"""
a = """
try: raise TypeError
except TypeError as e:
pass
"""
self.check(b, a)
b = """
try:
raise TypeError
except TypeError, e: pass
"""
a = """
try:
raise TypeError
except TypeError as e: pass
"""
self.check(b, a)
b = """
try: raise TypeError
except TypeError, e: pass
"""
a = """
try: raise TypeError
except TypeError as e: pass
"""
self.check(b, a)
b = """
try: raise TypeError
except TypeError, e: pass
else: function()
finally: done()
"""
a = """
try: raise TypeError
except TypeError as e: pass
else: function()
finally: done()
"""
self.check(b, a)
# These should not be touched:
def test_unchanged_1(self):
s = """
try:
pass
except:
pass"""
self.unchanged(s)
def test_unchanged_2(self):
s = """
try:
pass
except Exception:
pass"""
self.unchanged(s)
def test_unchanged_3(self):
s = """
try:
pass
except (Exception, SystemExit):
pass"""
self.unchanged(s)
class Test_raise(FixerTestCase):
fixer = "raise"
def test_basic(self):
b = """raise Exception, 5"""
a = """raise Exception(5)"""
self.check(b, a)
def test_prefix_preservation(self):
b = """raise Exception,5"""
a = """raise Exception(5)"""
self.check(b, a)
b = """raise Exception, 5"""
a = """raise Exception(5)"""
self.check(b, a)
def test_with_comments(self):
b = """raise Exception, 5 # foo"""
a = """raise Exception(5) # foo"""
self.check(b, a)
b = """raise E, (5, 6) % (a, b) # foo"""
a = """raise E((5, 6) % (a, b)) # foo"""
self.check(b, a)
b = """def foo():
raise Exception, 5, 6 # foo"""
a = """def foo():
raise Exception(5).with_traceback(6) # foo"""
self.check(b, a)
def test_None_value(self):
b = """raise Exception(5), None, tb"""
a = """raise Exception(5).with_traceback(tb)"""
self.check(b, a)
def test_tuple_value(self):
b = """raise Exception, (5, 6, 7)"""
a = """raise Exception(5, 6, 7)"""
self.check(b, a)
def test_tuple_detection(self):
b = """raise E, (5, 6) % (a, b)"""
a = """raise E((5, 6) % (a, b))"""
self.check(b, a)
def test_tuple_exc_1(self):
b = """raise (((E1, E2), E3), E4), V"""
a = """raise E1(V)"""
self.check(b, a)
def test_tuple_exc_2(self):
b = """raise (E1, (E2, E3), E4), V"""
a = """raise E1(V)"""
self.check(b, a)
# These should produce a warning
def test_string_exc(self):
s = """raise 'foo'"""
self.warns_unchanged(s, "Python 3 does not support string exceptions")
def test_string_exc_val(self):
s = """raise "foo", 5"""
self.warns_unchanged(s, "Python 3 does not support string exceptions")
def test_string_exc_val_tb(self):
s = """raise "foo", 5, 6"""
self.warns_unchanged(s, "Python 3 does not support string exceptions")
# These should result in traceback-assignment
def test_tb_1(self):
b = """def foo():
raise Exception, 5, 6"""
a = """def foo():
raise Exception(5).with_traceback(6)"""
self.check(b, a)
def test_tb_2(self):
b = """def foo():
a = 5
raise Exception, 5, 6
b = 6"""
a = """def foo():
a = 5
raise Exception(5).with_traceback(6)
b = 6"""
self.check(b, a)
def test_tb_3(self):
b = """def foo():
raise Exception,5,6"""
a = """def foo():
raise Exception(5).with_traceback(6)"""
self.check(b, a)
def test_tb_4(self):
b = """def foo():
a = 5
raise Exception,5,6
b = 6"""
a = """def foo():
a = 5
raise Exception(5).with_traceback(6)
b = 6"""
self.check(b, a)
def test_tb_5(self):
b = """def foo():
raise Exception, (5, 6, 7), 6"""
a = """def foo():
raise Exception(5, 6, 7).with_traceback(6)"""
self.check(b, a)
def test_tb_6(self):
b = """def foo():
a = 5
raise Exception, (5, 6, 7), 6
b = 6"""
a = """def foo():
a = 5
raise Exception(5, 6, 7).with_traceback(6)
b = 6"""
self.check(b, a)
class Test_throw(FixerTestCase):
fixer = "throw"
def test_1(self):
b = """g.throw(Exception, 5)"""
a = """g.throw(Exception(5))"""
self.check(b, a)
def test_2(self):
b = """g.throw(Exception,5)"""
a = """g.throw(Exception(5))"""
self.check(b, a)
def test_3(self):
b = """g.throw(Exception, (5, 6, 7))"""
a = """g.throw(Exception(5, 6, 7))"""
self.check(b, a)
def test_4(self):
b = """5 + g.throw(Exception, 5)"""
a = """5 + g.throw(Exception(5))"""
self.check(b, a)
# These should produce warnings
def test_warn_1(self):
s = """g.throw("foo")"""
self.warns_unchanged(s, "Python 3 does not support string exceptions")
def test_warn_2(self):
s = """g.throw("foo", 5)"""
self.warns_unchanged(s, "Python 3 does not support string exceptions")
def test_warn_3(self):
s = """g.throw("foo", 5, 6)"""
self.warns_unchanged(s, "Python 3 does not support string exceptions")
# These should not be touched
def test_untouched_1(self):
s = """g.throw(Exception)"""
self.unchanged(s)
def test_untouched_2(self):
s = """g.throw(Exception(5, 6))"""
self.unchanged(s)
def test_untouched_3(self):
s = """5 + g.throw(Exception(5, 6))"""
self.unchanged(s)
# These should result in traceback-assignment
def test_tb_1(self):
b = """def foo():
g.throw(Exception, 5, 6)"""
a = """def foo():
g.throw(Exception(5).with_traceback(6))"""
self.check(b, a)
def test_tb_2(self):
b = """def foo():
a = 5
g.throw(Exception, 5, 6)
b = 6"""
a = """def foo():
a = 5
g.throw(Exception(5).with_traceback(6))
b = 6"""
self.check(b, a)
def test_tb_3(self):
b = """def foo():
g.throw(Exception,5,6)"""
a = """def foo():
g.throw(Exception(5).with_traceback(6))"""
self.check(b, a)
def test_tb_4(self):
b = """def foo():
a = 5
g.throw(Exception,5,6)
b = 6"""
a = """def foo():
a = 5
g.throw(Exception(5).with_traceback(6))
b = 6"""
self.check(b, a)
def test_tb_5(self):
b = """def foo():
g.throw(Exception, (5, 6, 7), 6)"""
a = """def foo():
g.throw(Exception(5, 6, 7).with_traceback(6))"""
self.check(b, a)
def test_tb_6(self):
b = """def foo():
a = 5
g.throw(Exception, (5, 6, 7), 6)
b = 6"""
a = """def foo():
a = 5
g.throw(Exception(5, 6, 7).with_traceback(6))
b = 6"""
self.check(b, a)
def test_tb_7(self):
b = """def foo():
a + g.throw(Exception, 5, 6)"""
a = """def foo():
a + g.throw(Exception(5).with_traceback(6))"""
self.check(b, a)
def test_tb_8(self):
b = """def foo():
a = 5
a + g.throw(Exception, 5, 6)
b = 6"""
a = """def foo():
a = 5
a + g.throw(Exception(5).with_traceback(6))
b = 6"""
self.check(b, a)
class Test_long(FixerTestCase):
fixer = "long"
def test_1(self):
b = """x = long(x)"""
a = """x = int(x)"""
self.check(b, a)
def test_2(self):
b = """y = isinstance(x, long)"""
a = """y = isinstance(x, int)"""
self.check(b, a)
def test_3(self):
b = """z = type(x) in (int, long)"""
a = """z = type(x) in (int, int)"""
self.check(b, a)
def test_unchanged(self):
s = """long = True"""
self.unchanged(s)
s = """s.long = True"""
self.unchanged(s)
s = """def long(): pass"""
self.unchanged(s)
s = """class long(): pass"""
self.unchanged(s)
s = """def f(long): pass"""
self.unchanged(s)
s = """def f(g, long): pass"""
self.unchanged(s)
s = """def f(x, long=True): pass"""
self.unchanged(s)
def test_prefix_preservation(self):
b = """x = long( x )"""
a = """x = int( x )"""
self.check(b, a)
class Test_execfile(FixerTestCase):
fixer = "execfile"
def test_conversion(self):
b = """execfile("fn")"""
a = """exec(compile(open("fn", "rb").read(), "fn", 'exec'))"""
self.check(b, a)
b = """execfile("fn", glob)"""
a = """exec(compile(open("fn", "rb").read(), "fn", 'exec'), glob)"""
self.check(b, a)
b = """execfile("fn", glob, loc)"""
a = """exec(compile(open("fn", "rb").read(), "fn", 'exec'), glob, loc)"""
self.check(b, a)
b = """execfile("fn", globals=glob)"""
a = """exec(compile(open("fn", "rb").read(), "fn", 'exec'), globals=glob)"""
self.check(b, a)
b = """execfile("fn", locals=loc)"""
a = """exec(compile(open("fn", "rb").read(), "fn", 'exec'), locals=loc)"""
self.check(b, a)
b = """execfile("fn", globals=glob, locals=loc)"""
a = """exec(compile(open("fn", "rb").read(), "fn", 'exec'), globals=glob, locals=loc)"""
self.check(b, a)
def test_spacing(self):
b = """execfile( "fn" )"""
a = """exec(compile(open( "fn", "rb" ).read(), "fn", 'exec'))"""
self.check(b, a)
b = """execfile("fn", globals = glob)"""
a = """exec(compile(open("fn", "rb").read(), "fn", 'exec'), globals = glob)"""
self.check(b, a)
class Test_isinstance(FixerTestCase):
fixer = "isinstance"
def test_remove_multiple_items(self):
b = """isinstance(x, (int, int, int))"""
a = """isinstance(x, int)"""
self.check(b, a)
b = """isinstance(x, (int, float, int, int, float))"""
a = """isinstance(x, (int, float))"""
self.check(b, a)
b = """isinstance(x, (int, float, int, int, float, str))"""
a = """isinstance(x, (int, float, str))"""
self.check(b, a)
b = """isinstance(foo() + bar(), (x(), y(), x(), int, int))"""
a = """isinstance(foo() + bar(), (x(), y(), x(), int))"""
self.check(b, a)
def test_prefix_preservation(self):
b = """if isinstance( foo(), ( bar, bar, baz )) : pass"""
a = """if isinstance( foo(), ( bar, baz )) : pass"""
self.check(b, a)
def test_unchanged(self):
self.unchanged("isinstance(x, (str, int))")
class Test_dict(FixerTestCase):
fixer = "dict"
def test_prefix_preservation(self):
b = "if d. keys ( ) : pass"
a = "if list(d. keys ( )) : pass"
self.check(b, a)
b = "if d. items ( ) : pass"
a = "if list(d. items ( )) : pass"
self.check(b, a)
b = "if d. iterkeys ( ) : pass"
a = "if iter(d. keys ( )) : pass"
self.check(b, a)
b = "[i for i in d. iterkeys( ) ]"
a = "[i for i in d. keys( ) ]"
self.check(b, a)
b = "if d. viewkeys ( ) : pass"
a = "if d. keys ( ) : pass"
self.check(b, a)
b = "[i for i in d. viewkeys( ) ]"
a = "[i for i in d. keys( ) ]"
self.check(b, a)
def test_trailing_comment(self):
b = "d.keys() # foo"
a = "list(d.keys()) # foo"
self.check(b, a)
b = "d.items() # foo"
a = "list(d.items()) # foo"
self.check(b, a)
b = "d.iterkeys() # foo"
a = "iter(d.keys()) # foo"
self.check(b, a)
b = """[i for i in d.iterkeys() # foo
]"""
a = """[i for i in d.keys() # foo
]"""
self.check(b, a)
b = """[i for i in d.iterkeys() # foo
]"""
a = """[i for i in d.keys() # foo
]"""
self.check(b, a)
b = "d.viewitems() # foo"
a = "d.items() # foo"
self.check(b, a)
def test_unchanged(self):
for wrapper in fixer_util.consuming_calls:
s = "s = %s(d.keys())" % wrapper
self.unchanged(s)
s = "s = %s(d.values())" % wrapper
self.unchanged(s)
s = "s = %s(d.items())" % wrapper
self.unchanged(s)
def test_01(self):
b = "d.keys()"
a = "list(d.keys())"
self.check(b, a)
b = "a[0].foo().keys()"
a = "list(a[0].foo().keys())"
self.check(b, a)
def test_02(self):
b = "d.items()"
a = "list(d.items())"
self.check(b, a)
def test_03(self):
b = "d.values()"
a = "list(d.values())"
self.check(b, a)
def test_04(self):
b = "d.iterkeys()"
a = "iter(d.keys())"
self.check(b, a)
def test_05(self):
b = "d.iteritems()"
a = "iter(d.items())"
self.check(b, a)
def test_06(self):
b = "d.itervalues()"
a = "iter(d.values())"
self.check(b, a)
def test_07(self):
s = "list(d.keys())"
self.unchanged(s)
def test_08(self):
s = "sorted(d.keys())"
self.unchanged(s)
def test_09(self):
b = "iter(d.keys())"
a = "iter(list(d.keys()))"
self.check(b, a)
def test_10(self):
b = "foo(d.keys())"
a = "foo(list(d.keys()))"
self.check(b, a)
def test_11(self):
b = "for i in d.keys(): print i"
a = "for i in list(d.keys()): print i"
self.check(b, a)
def test_12(self):
b = "for i in d.iterkeys(): print i"
a = "for i in d.keys(): print i"
self.check(b, a)
def test_13(self):
b = "[i for i in d.keys()]"
a = "[i for i in list(d.keys())]"
self.check(b, a)
def test_14(self):
b = "[i for i in d.iterkeys()]"
a = "[i for i in d.keys()]"
self.check(b, a)
def test_15(self):
b = "(i for i in d.keys())"
a = "(i for i in list(d.keys()))"
self.check(b, a)
def test_16(self):
b = "(i for i in d.iterkeys())"
a = "(i for i in d.keys())"
self.check(b, a)
def test_17(self):
b = "iter(d.iterkeys())"
a = "iter(d.keys())"
self.check(b, a)
def test_18(self):
b = "list(d.iterkeys())"
a = "list(d.keys())"
self.check(b, a)
def test_19(self):
b = "sorted(d.iterkeys())"
a = "sorted(d.keys())"
self.check(b, a)
def test_20(self):
b = "foo(d.iterkeys())"
a = "foo(iter(d.keys()))"
self.check(b, a)
def test_21(self):
b = "print h.iterkeys().next()"
a = "print iter(h.keys()).next()"
self.check(b, a)
def test_22(self):
b = "print h.keys()[0]"
a = "print list(h.keys())[0]"
self.check(b, a)
def test_23(self):
b = "print list(h.iterkeys().next())"
a = "print list(iter(h.keys()).next())"
self.check(b, a)
def test_24(self):
b = "for x in h.keys()[0]: print x"
a = "for x in list(h.keys())[0]: print x"
self.check(b, a)
def test_25(self):
b = "d.viewkeys()"
a = "d.keys()"
self.check(b, a)
def test_26(self):
b = "d.viewitems()"
a = "d.items()"
self.check(b, a)
def test_27(self):
b = "d.viewvalues()"
a = "d.values()"
self.check(b, a)
def test_28(self):
b = "[i for i in d.viewkeys()]"
a = "[i for i in d.keys()]"
self.check(b, a)
def test_29(self):
b = "(i for i in d.viewkeys())"
a = "(i for i in d.keys())"
self.check(b, a)
def test_30(self):
b = "iter(d.viewkeys())"
a = "iter(d.keys())"
self.check(b, a)
def test_31(self):
b = "list(d.viewkeys())"
a = "list(d.keys())"
self.check(b, a)
def test_32(self):
b = "sorted(d.viewkeys())"
a = "sorted(d.keys())"
self.check(b, a)
class Test_xrange(FixerTestCase):
fixer = "xrange"
def test_prefix_preservation(self):
b = """x = xrange( 10 )"""
a = """x = range( 10 )"""
self.check(b, a)
b = """x = xrange( 1 , 10 )"""
a = """x = range( 1 , 10 )"""
self.check(b, a)
b = """x = xrange( 0 , 10 , 2 )"""
a = """x = range( 0 , 10 , 2 )"""
self.check(b, a)
def test_single_arg(self):
b = """x = xrange(10)"""
a = """x = range(10)"""
self.check(b, a)
def test_two_args(self):
b = """x = xrange(1, 10)"""
a = """x = range(1, 10)"""
self.check(b, a)
def test_three_args(self):
b = """x = xrange(0, 10, 2)"""
a = """x = range(0, 10, 2)"""
self.check(b, a)
def test_wrap_in_list(self):
b = """x = range(10, 3, 9)"""
a = """x = list(range(10, 3, 9))"""
self.check(b, a)
b = """x = foo(range(10, 3, 9))"""
a = """x = foo(list(range(10, 3, 9)))"""
self.check(b, a)
b = """x = range(10, 3, 9) + [4]"""
a = """x = list(range(10, 3, 9)) + [4]"""
self.check(b, a)
b = """x = range(10)[::-1]"""
a = """x = list(range(10))[::-1]"""
self.check(b, a)
b = """x = range(10) [3]"""
a = """x = list(range(10)) [3]"""
self.check(b, a)
def test_xrange_in_for(self):
b = """for i in xrange(10):\n j=i"""
a = """for i in range(10):\n j=i"""
self.check(b, a)
b = """[i for i in xrange(10)]"""
a = """[i for i in range(10)]"""
self.check(b, a)
def test_range_in_for(self):
self.unchanged("for i in range(10): pass")
self.unchanged("[i for i in range(10)]")
def test_in_contains_test(self):
self.unchanged("x in range(10, 3, 9)")
def test_in_consuming_context(self):
for call in fixer_util.consuming_calls:
self.unchanged("a = %s(range(10))" % call)
class Test_xrange_with_reduce(FixerTestCase):
def setUp(self):
super(Test_xrange_with_reduce, self).setUp(["xrange", "reduce"])
def test_double_transform(self):
b = """reduce(x, xrange(5))"""
a = """from functools import reduce
reduce(x, range(5))"""
self.check(b, a)
class Test_raw_input(FixerTestCase):
fixer = "raw_input"
def test_prefix_preservation(self):
b = """x = raw_input( )"""
a = """x = input( )"""
self.check(b, a)
b = """x = raw_input( '' )"""
a = """x = input( '' )"""
self.check(b, a)
def test_1(self):
b = """x = raw_input()"""
a = """x = input()"""
self.check(b, a)
def test_2(self):
b = """x = raw_input('')"""
a = """x = input('')"""
self.check(b, a)
def test_3(self):
b = """x = raw_input('prompt')"""
a = """x = input('prompt')"""
self.check(b, a)
def test_4(self):
b = """x = raw_input(foo(a) + 6)"""
a = """x = input(foo(a) + 6)"""
self.check(b, a)
def test_5(self):
b = """x = raw_input(invite).split()"""
a = """x = input(invite).split()"""
self.check(b, a)
def test_6(self):
b = """x = raw_input(invite) . split ()"""
a = """x = input(invite) . split ()"""
self.check(b, a)
def test_8(self):
b = "x = int(raw_input())"
a = "x = int(input())"
self.check(b, a)
class Test_funcattrs(FixerTestCase):
fixer = "funcattrs"
attrs = ["closure", "doc", "name", "defaults", "code", "globals", "dict"]
def test(self):
for attr in self.attrs:
b = "a.func_%s" % attr
a = "a.__%s__" % attr
self.check(b, a)
b = "self.foo.func_%s.foo_bar" % attr
a = "self.foo.__%s__.foo_bar" % attr
self.check(b, a)
def test_unchanged(self):
for attr in self.attrs:
s = "foo(func_%s + 5)" % attr
self.unchanged(s)
s = "f(foo.__%s__)" % attr
self.unchanged(s)
s = "f(foo.__%s__.foo)" % attr
self.unchanged(s)
class Test_xreadlines(FixerTestCase):
fixer = "xreadlines"
def test_call(self):
b = "for x in f.xreadlines(): pass"
a = "for x in f: pass"
self.check(b, a)
b = "for x in foo().xreadlines(): pass"
a = "for x in foo(): pass"
self.check(b, a)
b = "for x in (5 + foo()).xreadlines(): pass"
a = "for x in (5 + foo()): pass"
self.check(b, a)
def test_attr_ref(self):
b = "foo(f.xreadlines + 5)"
a = "foo(f.__iter__ + 5)"
self.check(b, a)
b = "foo(f().xreadlines + 5)"
a = "foo(f().__iter__ + 5)"
self.check(b, a)
b = "foo((5 + f()).xreadlines + 5)"
a = "foo((5 + f()).__iter__ + 5)"
self.check(b, a)
def test_unchanged(self):
s = "for x in f.xreadlines(5): pass"
self.unchanged(s)
s = "for x in f.xreadlines(k=5): pass"
self.unchanged(s)
s = "for x in f.xreadlines(*k, **v): pass"
self.unchanged(s)
s = "foo(xreadlines)"
self.unchanged(s)
class ImportsFixerTests:
def test_import_module(self):
for old, new in self.modules.items():
b = "import %s" % old
a = "import %s" % new
self.check(b, a)
b = "import foo, %s, bar" % old
a = "import foo, %s, bar" % new
self.check(b, a)
def test_import_from(self):
for old, new in self.modules.items():
b = "from %s import foo" % old
a = "from %s import foo" % new
self.check(b, a)
b = "from %s import foo, bar" % old
a = "from %s import foo, bar" % new
self.check(b, a)
b = "from %s import (yes, no)" % old
a = "from %s import (yes, no)" % new
self.check(b, a)
def test_import_module_as(self):
for old, new in self.modules.items():
b = "import %s as foo_bar" % old
a = "import %s as foo_bar" % new
self.check(b, a)
b = "import %s as foo_bar" % old
a = "import %s as foo_bar" % new
self.check(b, a)
def test_import_from_as(self):
for old, new in self.modules.items():
b = "from %s import foo as bar" % old
a = "from %s import foo as bar" % new
self.check(b, a)
def test_star(self):
for old, new in self.modules.items():
b = "from %s import *" % old
a = "from %s import *" % new
self.check(b, a)
def test_import_module_usage(self):
for old, new in self.modules.items():
b = """
import %s
foo(%s.bar)
""" % (
old,
old,
)
a = """
import %s
foo(%s.bar)
""" % (
new,
new,
)
self.check(b, a)
b = """
from %s import x
%s = 23
""" % (
old,
old,
)
a = """
from %s import x
%s = 23
""" % (
new,
old,
)
self.check(b, a)
s = """
def f():
%s.method()
""" % (
old,
)
self.unchanged(s)
# test nested usage
b = """
import %s
%s.bar(%s.foo)
""" % (
old,
old,
old,
)
a = """
import %s
%s.bar(%s.foo)
""" % (
new,
new,
new,
)
self.check(b, a)
b = """
import %s
x.%s
""" % (
old,
old,
)
a = """
import %s
x.%s
""" % (
new,
old,
)
self.check(b, a)
class Test_imports(FixerTestCase, ImportsFixerTests):
fixer = "imports"
from ..fixes.fix_imports import MAPPING as modules
def test_multiple_imports(self):
b = """import urlparse, cStringIO"""
a = """import urllib.parse, io"""
self.check(b, a)
def test_multiple_imports_as(self):
b = """
import copy_reg as bar, HTMLParser as foo, urlparse
s = urlparse.spam(bar.foo())
"""
a = """
import copyreg as bar, html.parser as foo, urllib.parse
s = urllib.parse.spam(bar.foo())
"""
self.check(b, a)
class Test_imports2(FixerTestCase, ImportsFixerTests):
fixer = "imports2"
from ..fixes.fix_imports2 import MAPPING as modules
class Test_imports_fixer_order(FixerTestCase, ImportsFixerTests):
def setUp(self):
super(Test_imports_fixer_order, self).setUp(["imports", "imports2"])
from ..fixes.fix_imports2 import MAPPING as mapping2
self.modules = mapping2.copy()
from ..fixes.fix_imports import MAPPING as mapping1
for key in ("dbhash", "dumbdbm", "dbm", "gdbm"):
self.modules[key] = mapping1[key]
def test_after_local_imports_refactoring(self):
for fix in ("imports", "imports2"):
self.fixer = fix
self.assert_runs_after("import")
class Test_urllib(FixerTestCase):
fixer = "urllib"
from ..fixes.fix_urllib import MAPPING as modules
def test_import_module(self):
for old, changes in self.modules.items():
b = "import %s" % old
a = "import %s" % ", ".join(map(itemgetter(0), changes))
self.check(b, a)
def test_import_from(self):
for old, changes in self.modules.items():
all_members = []
for new, members in changes:
for member in members:
all_members.append(member)
b = "from %s import %s" % (old, member)
a = "from %s import %s" % (new, member)
self.check(b, a)
s = "from foo import %s" % member
self.unchanged(s)
b = "from %s import %s" % (old, ", ".join(members))
a = "from %s import %s" % (new, ", ".join(members))
self.check(b, a)
s = "from foo import %s" % ", ".join(members)
self.unchanged(s)
# test the breaking of a module into multiple replacements
b = "from %s import %s" % (old, ", ".join(all_members))
a = "\n".join(
[
"from %s import %s" % (new, ", ".join(members))
for (new, members) in changes
]
)
self.check(b, a)
def test_import_module_as(self):
for old in self.modules:
s = "import %s as foo" % old
self.warns_unchanged(s, "This module is now multiple modules")
def test_import_from_as(self):
for old, changes in self.modules.items():
for new, members in changes:
for member in members:
b = "from %s import %s as foo_bar" % (old, member)
a = "from %s import %s as foo_bar" % (new, member)
self.check(b, a)
b = "from %s import %s as blah, %s" % (old, member, member)
a = "from %s import %s as blah, %s" % (new, member, member)
self.check(b, a)
def test_star(self):
for old in self.modules:
s = "from %s import *" % old
self.warns_unchanged(s, "Cannot handle star imports")
def test_indented(self):
b = """
def foo():
from urllib import urlencode, urlopen
"""
a = """
def foo():
from urllib.parse import urlencode
from urllib.request import urlopen
"""
self.check(b, a)
b = """
def foo():
other()
from urllib import urlencode, urlopen
"""
a = """
def foo():
other()
from urllib.parse import urlencode
from urllib.request import urlopen
"""
self.check(b, a)
def test_import_module_usage(self):
for old, changes in self.modules.items():
for new, members in changes:
for member in members:
new_import = ", ".join([n for (n, mems) in self.modules[old]])
b = """
import %s
foo(%s.%s)
""" % (
old,
old,
member,
)
a = """
import %s
foo(%s.%s)
""" % (
new_import,
new,
member,
)
self.check(b, a)
b = """
import %s
%s.%s(%s.%s)
""" % (
old,
old,
member,
old,
member,
)
a = """
import %s
%s.%s(%s.%s)
""" % (
new_import,
new,
member,
new,
member,
)
self.check(b, a)
class Test_input(FixerTestCase):
fixer = "input"
def test_prefix_preservation(self):
b = """x = input( )"""
a = """x = eval(input( ))"""
self.check(b, a)
b = """x = input( '' )"""
a = """x = eval(input( '' ))"""
self.check(b, a)
def test_trailing_comment(self):
b = """x = input() # foo"""
a = """x = eval(input()) # foo"""
self.check(b, a)
def test_idempotency(self):
s = """x = eval(input())"""
self.unchanged(s)
s = """x = eval(input(''))"""
self.unchanged(s)
s = """x = eval(input(foo(5) + 9))"""
self.unchanged(s)
def test_1(self):
b = """x = input()"""
a = """x = eval(input())"""
self.check(b, a)
def test_2(self):
b = """x = input('')"""
a = """x = eval(input(''))"""
self.check(b, a)
def test_3(self):
b = """x = input('prompt')"""
a = """x = eval(input('prompt'))"""
self.check(b, a)
def test_4(self):
b = """x = input(foo(5) + 9)"""
a = """x = eval(input(foo(5) + 9))"""
self.check(b, a)
class Test_tuple_params(FixerTestCase):
fixer = "tuple_params"
def test_unchanged_1(self):
s = """def foo(): pass"""
self.unchanged(s)
def test_unchanged_2(self):
s = """def foo(a, b, c): pass"""
self.unchanged(s)
def test_unchanged_3(self):
s = """def foo(a=3, b=4, c=5): pass"""
self.unchanged(s)
def test_1(self):
b = """
def foo(((a, b), c)):
x = 5"""
a = """
def foo(xxx_todo_changeme):
((a, b), c) = xxx_todo_changeme
x = 5"""
self.check(b, a)
def test_2(self):
b = """
def foo(((a, b), c), d):
x = 5"""
a = """
def foo(xxx_todo_changeme, d):
((a, b), c) = xxx_todo_changeme
x = 5"""
self.check(b, a)
def test_3(self):
b = """
def foo(((a, b), c), d) -> e:
x = 5"""
a = """
def foo(xxx_todo_changeme, d) -> e:
((a, b), c) = xxx_todo_changeme
x = 5"""
self.check(b, a)
def test_semicolon(self):
b = """
def foo(((a, b), c)): x = 5; y = 7"""
a = """
def foo(xxx_todo_changeme): ((a, b), c) = xxx_todo_changeme; x = 5; y = 7"""
self.check(b, a)
def test_keywords(self):
b = """
def foo(((a, b), c), d, e=5) -> z:
x = 5"""
a = """
def foo(xxx_todo_changeme, d, e=5) -> z:
((a, b), c) = xxx_todo_changeme
x = 5"""
self.check(b, a)
def test_varargs(self):
b = """
def foo(((a, b), c), d, *vargs, **kwargs) -> z:
x = 5"""
a = """
def foo(xxx_todo_changeme, d, *vargs, **kwargs) -> z:
((a, b), c) = xxx_todo_changeme
x = 5"""
self.check(b, a)
def test_multi_1(self):
b = """
def foo(((a, b), c), (d, e, f)) -> z:
x = 5"""
a = """
def foo(xxx_todo_changeme, xxx_todo_changeme1) -> z:
((a, b), c) = xxx_todo_changeme
(d, e, f) = xxx_todo_changeme1
x = 5"""
self.check(b, a)
def test_multi_2(self):
b = """
def foo(x, ((a, b), c), d, (e, f, g), y) -> z:
x = 5"""
a = """
def foo(x, xxx_todo_changeme, d, xxx_todo_changeme1, y) -> z:
((a, b), c) = xxx_todo_changeme
(e, f, g) = xxx_todo_changeme1
x = 5"""
self.check(b, a)
def test_docstring(self):
b = """
def foo(((a, b), c), (d, e, f)) -> z:
"foo foo foo foo"
x = 5"""
a = """
def foo(xxx_todo_changeme, xxx_todo_changeme1) -> z:
"foo foo foo foo"
((a, b), c) = xxx_todo_changeme
(d, e, f) = xxx_todo_changeme1
x = 5"""
self.check(b, a)
def test_lambda_no_change(self):
s = """lambda x: x + 5"""
self.unchanged(s)
def test_lambda_parens_single_arg(self):
b = """lambda (x): x + 5"""
a = """lambda x: x + 5"""
self.check(b, a)
b = """lambda(x): x + 5"""
a = """lambda x: x + 5"""
self.check(b, a)
b = """lambda ((((x)))): x + 5"""
a = """lambda x: x + 5"""
self.check(b, a)
b = """lambda((((x)))): x + 5"""
a = """lambda x: x + 5"""
self.check(b, a)
def test_lambda_simple(self):
b = """lambda (x, y): x + f(y)"""
a = """lambda x_y: x_y[0] + f(x_y[1])"""
self.check(b, a)
b = """lambda(x, y): x + f(y)"""
a = """lambda x_y: x_y[0] + f(x_y[1])"""
self.check(b, a)
b = """lambda (((x, y))): x + f(y)"""
a = """lambda x_y: x_y[0] + f(x_y[1])"""
self.check(b, a)
b = """lambda(((x, y))): x + f(y)"""
a = """lambda x_y: x_y[0] + f(x_y[1])"""
self.check(b, a)
def test_lambda_one_tuple(self):
b = """lambda (x,): x + f(x)"""
a = """lambda x1: x1[0] + f(x1[0])"""
self.check(b, a)
b = """lambda (((x,))): x + f(x)"""
a = """lambda x1: x1[0] + f(x1[0])"""
self.check(b, a)
def test_lambda_simple_multi_use(self):
b = """lambda (x, y): x + x + f(x) + x"""
a = """lambda x_y: x_y[0] + x_y[0] + f(x_y[0]) + x_y[0]"""
self.check(b, a)
def test_lambda_simple_reverse(self):
b = """lambda (x, y): y + x"""
a = """lambda x_y: x_y[1] + x_y[0]"""
self.check(b, a)
def test_lambda_nested(self):
b = """lambda (x, (y, z)): x + y + z"""
a = """lambda x_y_z: x_y_z[0] + x_y_z[1][0] + x_y_z[1][1]"""
self.check(b, a)
b = """lambda (((x, (y, z)))): x + y + z"""
a = """lambda x_y_z: x_y_z[0] + x_y_z[1][0] + x_y_z[1][1]"""
self.check(b, a)
def test_lambda_nested_multi_use(self):
b = """lambda (x, (y, z)): x + y + f(y)"""
a = """lambda x_y_z: x_y_z[0] + x_y_z[1][0] + f(x_y_z[1][0])"""
self.check(b, a)
class Test_methodattrs(FixerTestCase):
fixer = "methodattrs"
attrs = ["func", "self", "class"]
def test(self):
for attr in self.attrs:
b = "a.im_%s" % attr
if attr == "class":
a = "a.__self__.__class__"
else:
a = "a.__%s__" % attr
self.check(b, a)
b = "self.foo.im_%s.foo_bar" % attr
if attr == "class":
a = "self.foo.__self__.__class__.foo_bar"
else:
a = "self.foo.__%s__.foo_bar" % attr
self.check(b, a)
def test_unchanged(self):
for attr in self.attrs:
s = "foo(im_%s + 5)" % attr
self.unchanged(s)
s = "f(foo.__%s__)" % attr
self.unchanged(s)
s = "f(foo.__%s__.foo)" % attr
self.unchanged(s)
class Test_next(FixerTestCase):
fixer = "next"
def test_1(self):
b = """it.next()"""
a = """next(it)"""
self.check(b, a)
def test_2(self):
b = """a.b.c.d.next()"""
a = """next(a.b.c.d)"""
self.check(b, a)
def test_3(self):
b = """(a + b).next()"""
a = """next((a + b))"""
self.check(b, a)
def test_4(self):
b = """a().next()"""
a = """next(a())"""
self.check(b, a)
def test_5(self):
b = """a().next() + b"""
a = """next(a()) + b"""
self.check(b, a)
def test_6(self):
b = """c( a().next() + b)"""
a = """c( next(a()) + b)"""
self.check(b, a)
def test_prefix_preservation_1(self):
b = """
for a in b:
foo(a)
a.next()
"""
a = """
for a in b:
foo(a)
next(a)
"""
self.check(b, a)
def test_prefix_preservation_2(self):
b = """
for a in b:
foo(a) # abc
# def
a.next()
"""
a = """
for a in b:
foo(a) # abc
# def
next(a)
"""
self.check(b, a)
def test_prefix_preservation_3(self):
b = """
next = 5
for a in b:
foo(a)
a.next()
"""
a = """
next = 5
for a in b:
foo(a)
a.__next__()
"""
self.check(b, a, ignore_warnings=True)
def test_prefix_preservation_4(self):
b = """
next = 5
for a in b:
foo(a) # abc
# def
a.next()
"""
a = """
next = 5
for a in b:
foo(a) # abc
# def
a.__next__()
"""
self.check(b, a, ignore_warnings=True)
def test_prefix_preservation_5(self):
b = """
next = 5
for a in b:
foo(foo(a), # abc
a.next())
"""
a = """
next = 5
for a in b:
foo(foo(a), # abc
a.__next__())
"""
self.check(b, a, ignore_warnings=True)
def test_prefix_preservation_6(self):
b = """
for a in b:
foo(foo(a), # abc
a.next())
"""
a = """
for a in b:
foo(foo(a), # abc
next(a))
"""
self.check(b, a)
def test_method_1(self):
b = """
class A:
def next(self):
pass
"""
a = """
class A:
def __next__(self):
pass
"""
self.check(b, a)
def test_method_2(self):
b = """
class A(object):
def next(self):
pass
"""
a = """
class A(object):
def __next__(self):
pass
"""
self.check(b, a)
def test_method_3(self):
b = """
class A:
def next(x):
pass
"""
a = """
class A:
def __next__(x):
pass
"""
self.check(b, a)
def test_method_4(self):
b = """
class A:
def __init__(self, foo):
self.foo = foo
def next(self):
pass
def __iter__(self):
return self
"""
a = """
class A:
def __init__(self, foo):
self.foo = foo
def __next__(self):
pass
def __iter__(self):
return self
"""
self.check(b, a)
def test_method_unchanged(self):
s = """
class A:
def next(self, a, b):
pass
"""
self.unchanged(s)
def test_shadowing_assign_simple(self):
s = """
next = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_assign_tuple_1(self):
s = """
(next, a) = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_assign_tuple_2(self):
s = """
(a, (b, (next, c)), a) = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_assign_list_1(self):
s = """
[next, a] = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_assign_list_2(self):
s = """
[a, [b, [next, c]], a] = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_builtin_assign(self):
s = """
def foo():
__builtin__.next = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_builtin_assign_in_tuple(self):
s = """
def foo():
(a, __builtin__.next) = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_builtin_assign_in_list(self):
s = """
def foo():
[a, __builtin__.next] = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_assign_to_next(self):
s = """
def foo():
A.next = foo
class A:
def next(self, a, b):
pass
"""
self.unchanged(s)
def test_assign_to_next_in_tuple(self):
s = """
def foo():
(a, A.next) = foo
class A:
def next(self, a, b):
pass
"""
self.unchanged(s)
def test_assign_to_next_in_list(self):
s = """
def foo():
[a, A.next] = foo
class A:
def next(self, a, b):
pass
"""
self.unchanged(s)
def test_shadowing_import_1(self):
s = """
import foo.bar as next
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_import_2(self):
s = """
import bar, bar.foo as next
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_import_3(self):
s = """
import bar, bar.foo as next, baz
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_import_from_1(self):
s = """
from x import next
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_import_from_2(self):
s = """
from x.a import next
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_import_from_3(self):
s = """
from x import a, next, b
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_import_from_4(self):
s = """
from x.a import a, next, b
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_funcdef_1(self):
s = """
def next(a):
pass
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_funcdef_2(self):
b = """
def next(a):
pass
class A:
def next(self):
pass
it.next()
"""
a = """
def next(a):
pass
class A:
def __next__(self):
pass
it.__next__()
"""
self.warns(b, a, "Calls to builtin next() possibly shadowed")
def test_shadowing_global_1(self):
s = """
def f():
global next
next = 5
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_global_2(self):
s = """
def f():
global a, next, b
next = 5
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_for_simple(self):
s = """
for next in it():
pass
b = 5
c = 6
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_for_tuple_1(self):
s = """
for next, b in it():
pass
b = 5
c = 6
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_for_tuple_2(self):
s = """
for a, (next, c), b in it():
pass
b = 5
c = 6
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_noncall_access_1(self):
b = """gnext = g.next"""
a = """gnext = g.__next__"""
self.check(b, a)
def test_noncall_access_2(self):
b = """f(g.next + 5)"""
a = """f(g.__next__ + 5)"""
self.check(b, a)
def test_noncall_access_3(self):
b = """f(g().next + 5)"""
a = """f(g().__next__ + 5)"""
self.check(b, a)
class Test_nonzero(FixerTestCase):
fixer = "nonzero"
def test_1(self):
b = """
class A:
def __nonzero__(self):
pass
"""
a = """
class A:
def __bool__(self):
pass
"""
self.check(b, a)
def test_2(self):
b = """
class A(object):
def __nonzero__(self):
pass
"""
a = """
class A(object):
def __bool__(self):
pass
"""
self.check(b, a)
def test_unchanged_1(self):
s = """
class A(object):
def __bool__(self):
pass
"""
self.unchanged(s)
def test_unchanged_2(self):
s = """
class A(object):
def __nonzero__(self, a):
pass
"""
self.unchanged(s)
def test_unchanged_func(self):
s = """
def __nonzero__(self):
pass
"""
self.unchanged(s)
class Test_numliterals(FixerTestCase):
fixer = "numliterals"
def test_octal_1(self):
b = """0755"""
a = """0o755"""
self.check(b, a)
def test_long_int_1(self):
b = """a = 12L"""
a = """a = 12"""
self.check(b, a)
def test_long_int_2(self):
b = """a = 12l"""
a = """a = 12"""
self.check(b, a)
def test_long_hex(self):
b = """b = 0x12l"""
a = """b = 0x12"""
self.check(b, a)
def test_comments_and_spacing(self):
b = """b = 0x12L"""
a = """b = 0x12"""
self.check(b, a)
b = """b = 0755 # spam"""
a = """b = 0o755 # spam"""
self.check(b, a)
def test_unchanged_int(self):
s = """5"""
self.unchanged(s)
def test_unchanged_float(self):
s = """5.0"""
self.unchanged(s)
def test_unchanged_octal(self):
s = """0o755"""
self.unchanged(s)
def test_unchanged_hex(self):
s = """0xABC"""
self.unchanged(s)
def test_unchanged_exp(self):
s = """5.0e10"""
self.unchanged(s)
def test_unchanged_complex_int(self):
s = """5 + 4j"""
self.unchanged(s)
def test_unchanged_complex_float(self):
s = """5.4 + 4.9j"""
self.unchanged(s)
def test_unchanged_complex_bare(self):
s = """4j"""
self.unchanged(s)
s = """4.4j"""
self.unchanged(s)
class Test_renames(FixerTestCase):
fixer = "renames"
modules = {"sys": ("maxint", "maxsize")}
def test_import_from(self):
for mod, (old, new) in list(self.modules.items()):
b = "from %s import %s" % (mod, old)
a = "from %s import %s" % (mod, new)
self.check(b, a)
s = "from foo import %s" % old
self.unchanged(s)
def test_import_from_as(self):
for mod, (old, new) in list(self.modules.items()):
b = "from %s import %s as foo_bar" % (mod, old)
a = "from %s import %s as foo_bar" % (mod, new)
self.check(b, a)
def test_import_module_usage(self):
for mod, (old, new) in list(self.modules.items()):
b = """
import %s
foo(%s, %s.%s)
""" % (
mod,
mod,
mod,
old,
)
a = """
import %s
foo(%s, %s.%s)
""" % (
mod,
mod,
mod,
new,
)
self.check(b, a)
def XXX_test_from_import_usage(self):
# not implemented yet
for mod, (old, new) in list(self.modules.items()):
b = """
from %s import %s
foo(%s, %s)
""" % (
mod,
old,
mod,
old,
)
a = """
from %s import %s
foo(%s, %s)
""" % (
mod,
new,
mod,
new,
)
self.check(b, a)
class Test_unicode(FixerTestCase):
fixer = "unicode"
def test_whitespace(self):
b = """unicode( x)"""
a = """str( x)"""
self.check(b, a)
b = """ unicode(x )"""
a = """ str(x )"""
self.check(b, a)
b = """ u'h'"""
a = """ 'h'"""
self.check(b, a)
def test_unicode_call(self):
b = """unicode(x, y, z)"""
a = """str(x, y, z)"""
self.check(b, a)
def test_unichr(self):
b = """unichr(u'h')"""
a = """chr('h')"""
self.check(b, a)
def test_unicode_literal_1(self):
b = '''u"x"'''
a = '''"x"'''
self.check(b, a)
def test_unicode_literal_2(self):
b = """ur'x'"""
a = """r'x'"""
self.check(b, a)
def test_unicode_literal_3(self):
b = """UR'''x''' """
a = """R'''x''' """
self.check(b, a)
def test_native_literal_escape_u(self):
b = r"""'\\\u20ac\U0001d121\\u20ac'"""
a = r"""'\\\\u20ac\\U0001d121\\u20ac'"""
self.check(b, a)
b = r"""r'\\\u20ac\U0001d121\\u20ac'"""
a = r"""r'\\\u20ac\U0001d121\\u20ac'"""
self.check(b, a)
def test_bytes_literal_escape_u(self):
b = r"""b'\\\u20ac\U0001d121\\u20ac'"""
a = r"""b'\\\u20ac\U0001d121\\u20ac'"""
self.check(b, a)
b = r"""br'\\\u20ac\U0001d121\\u20ac'"""
a = r"""br'\\\u20ac\U0001d121\\u20ac'"""
self.check(b, a)
def test_unicode_literal_escape_u(self):
b = r"""u'\\\u20ac\U0001d121\\u20ac'"""
a = r"""'\\\u20ac\U0001d121\\u20ac'"""
self.check(b, a)
b = r"""ur'\\\u20ac\U0001d121\\u20ac'"""
a = r"""r'\\\u20ac\U0001d121\\u20ac'"""
self.check(b, a)
def test_native_unicode_literal_escape_u(self):
f = "from __future__ import unicode_literals\n"
b = f + r"""'\\\u20ac\U0001d121\\u20ac'"""
a = f + r"""'\\\u20ac\U0001d121\\u20ac'"""
self.check(b, a)
b = f + r"""r'\\\u20ac\U0001d121\\u20ac'"""
a = f + r"""r'\\\u20ac\U0001d121\\u20ac'"""
self.check(b, a)
class Test_filter(FixerTestCase):
fixer = "filter"
def test_prefix_preservation(self):
b = """x = filter( foo, 'abc' )"""
a = """x = list(filter( foo, 'abc' ))"""
self.check(b, a)
b = """x = filter( None , 'abc' )"""
a = """x = [_f for _f in 'abc' if _f]"""
self.check(b, a)
def test_filter_basic(self):
b = """x = filter(None, 'abc')"""
a = """x = [_f for _f in 'abc' if _f]"""
self.check(b, a)
b = """x = len(filter(f, 'abc'))"""
a = """x = len(list(filter(f, 'abc')))"""
self.check(b, a)
b = """x = filter(lambda x: x%2 == 0, range(10))"""
a = """x = [x for x in range(10) if x%2 == 0]"""
self.check(b, a)
# Note the parens around x
b = """x = filter(lambda (x): x%2 == 0, range(10))"""
a = """x = [x for x in range(10) if x%2 == 0]"""
self.check(b, a)
def test_filter_trailers(self):
b = """x = filter(None, 'abc')[0]"""
a = """x = [_f for _f in 'abc' if _f][0]"""
self.check(b, a)
b = """x = len(filter(f, 'abc')[0])"""
a = """x = len(list(filter(f, 'abc'))[0])"""
self.check(b, a)
b = """x = filter(lambda x: x%2 == 0, range(10))[0]"""
a = """x = [x for x in range(10) if x%2 == 0][0]"""
self.check(b, a)
# Note the parens around x
b = """x = filter(lambda (x): x%2 == 0, range(10))[0]"""
a = """x = [x for x in range(10) if x%2 == 0][0]"""
self.check(b, a)
def test_filter_nochange(self):
a = """b.join(filter(f, 'abc'))"""
self.unchanged(a)
a = """(a + foo(5)).join(filter(f, 'abc'))"""
self.unchanged(a)
a = """iter(filter(f, 'abc'))"""
self.unchanged(a)
a = """list(filter(f, 'abc'))"""
self.unchanged(a)
a = """list(filter(f, 'abc'))[0]"""
self.unchanged(a)
a = """set(filter(f, 'abc'))"""
self.unchanged(a)
a = """set(filter(f, 'abc')).pop()"""
self.unchanged(a)
a = """tuple(filter(f, 'abc'))"""
self.unchanged(a)
a = """any(filter(f, 'abc'))"""
self.unchanged(a)
a = """all(filter(f, 'abc'))"""
self.unchanged(a)
a = """sum(filter(f, 'abc'))"""
self.unchanged(a)
a = """sorted(filter(f, 'abc'))"""
self.unchanged(a)
a = """sorted(filter(f, 'abc'), key=blah)"""
self.unchanged(a)
a = """sorted(filter(f, 'abc'), key=blah)[0]"""
self.unchanged(a)
a = """enumerate(filter(f, 'abc'))"""
self.unchanged(a)
a = """enumerate(filter(f, 'abc'), start=1)"""
self.unchanged(a)
a = """for i in filter(f, 'abc'): pass"""
self.unchanged(a)
a = """[x for x in filter(f, 'abc')]"""
self.unchanged(a)
a = """(x for x in filter(f, 'abc'))"""
self.unchanged(a)
def test_future_builtins(self):
a = "from future_builtins import spam, filter; filter(f, 'ham')"
self.unchanged(a)
b = """from future_builtins import spam; x = filter(f, 'abc')"""
a = """from future_builtins import spam; x = list(filter(f, 'abc'))"""
self.check(b, a)
a = "from future_builtins import *; filter(f, 'ham')"
self.unchanged(a)
class Test_map(FixerTestCase):
fixer = "map"
def check(self, b, a):
self.unchanged("from future_builtins import map; " + b, a)
super(Test_map, self).check(b, a)
def test_prefix_preservation(self):
b = """x = map( f, 'abc' )"""
a = """x = list(map( f, 'abc' ))"""
self.check(b, a)
def test_map_trailers(self):
b = """x = map(f, 'abc')[0]"""
a = """x = list(map(f, 'abc'))[0]"""
self.check(b, a)
b = """x = map(None, l)[0]"""
a = """x = list(l)[0]"""
self.check(b, a)
b = """x = map(lambda x:x, l)[0]"""
a = """x = [x for x in l][0]"""
self.check(b, a)
b = """x = map(f, 'abc')[0][1]"""
a = """x = list(map(f, 'abc'))[0][1]"""
self.check(b, a)
def test_trailing_comment(self):
b = """x = map(f, 'abc') # foo"""
a = """x = list(map(f, 'abc')) # foo"""
self.check(b, a)
def test_None_with_multiple_arguments(self):
s = """x = map(None, a, b, c)"""
self.warns_unchanged(
s, "cannot convert map(None, ...) with " "multiple arguments"
)
def test_map_basic(self):
b = """x = map(f, 'abc')"""
a = """x = list(map(f, 'abc'))"""
self.check(b, a)
b = """x = len(map(f, 'abc', 'def'))"""
a = """x = len(list(map(f, 'abc', 'def')))"""
self.check(b, a)
b = """x = map(None, 'abc')"""
a = """x = list('abc')"""
self.check(b, a)
b = """x = map(lambda x: x+1, range(4))"""
a = """x = [x+1 for x in range(4)]"""
self.check(b, a)
# Note the parens around x
b = """x = map(lambda (x): x+1, range(4))"""
a = """x = [x+1 for x in range(4)]"""
self.check(b, a)
b = """
foo()
# foo
map(f, x)
"""
a = """
foo()
# foo
list(map(f, x))
"""
self.warns(b, a, "You should use a for loop here")
def test_map_nochange(self):
a = """b.join(map(f, 'abc'))"""
self.unchanged(a)
a = """(a + foo(5)).join(map(f, 'abc'))"""
self.unchanged(a)
a = """iter(map(f, 'abc'))"""
self.unchanged(a)
a = """list(map(f, 'abc'))"""
self.unchanged(a)
a = """list(map(f, 'abc'))[0]"""
self.unchanged(a)
a = """set(map(f, 'abc'))"""
self.unchanged(a)
a = """set(map(f, 'abc')).pop()"""
self.unchanged(a)
a = """tuple(map(f, 'abc'))"""
self.unchanged(a)
a = """any(map(f, 'abc'))"""
self.unchanged(a)
a = """all(map(f, 'abc'))"""
self.unchanged(a)
a = """sum(map(f, 'abc'))"""
self.unchanged(a)
a = """sorted(map(f, 'abc'))"""
self.unchanged(a)
a = """sorted(map(f, 'abc'), key=blah)"""
self.unchanged(a)
a = """sorted(map(f, 'abc'), key=blah)[0]"""
self.unchanged(a)
a = """enumerate(map(f, 'abc'))"""
self.unchanged(a)
a = """enumerate(map(f, 'abc'), start=1)"""
self.unchanged(a)
a = """for i in map(f, 'abc'): pass"""
self.unchanged(a)
a = """[x for x in map(f, 'abc')]"""
self.unchanged(a)
a = """(x for x in map(f, 'abc'))"""
self.unchanged(a)
def test_future_builtins(self):
a = "from future_builtins import spam, map, eggs; map(f, 'ham')"
self.unchanged(a)
b = """from future_builtins import spam, eggs; x = map(f, 'abc')"""
a = """from future_builtins import spam, eggs; x = list(map(f, 'abc'))"""
self.check(b, a)
a = "from future_builtins import *; map(f, 'ham')"
self.unchanged(a)
class Test_zip(FixerTestCase):
fixer = "zip"
def check(self, b, a):
self.unchanged("from future_builtins import zip; " + b, a)
super(Test_zip, self).check(b, a)
def test_zip_basic(self):
b = """x = zip()"""
a = """x = list(zip())"""
self.check(b, a)
b = """x = zip(a, b, c)"""
a = """x = list(zip(a, b, c))"""
self.check(b, a)
b = """x = len(zip(a, b))"""
a = """x = len(list(zip(a, b)))"""
self.check(b, a)
def test_zip_trailers(self):
b = """x = zip(a, b, c)[0]"""
a = """x = list(zip(a, b, c))[0]"""
self.check(b, a)
b = """x = zip(a, b, c)[0][1]"""
a = """x = list(zip(a, b, c))[0][1]"""
self.check(b, a)
def test_zip_nochange(self):
a = """b.join(zip(a, b))"""
self.unchanged(a)
a = """(a + foo(5)).join(zip(a, b))"""
self.unchanged(a)
a = """iter(zip(a, b))"""
self.unchanged(a)
a = """list(zip(a, b))"""
self.unchanged(a)
a = """list(zip(a, b))[0]"""
self.unchanged(a)
a = """set(zip(a, b))"""
self.unchanged(a)
a = """set(zip(a, b)).pop()"""
self.unchanged(a)
a = """tuple(zip(a, b))"""
self.unchanged(a)
a = """any(zip(a, b))"""
self.unchanged(a)
a = """all(zip(a, b))"""
self.unchanged(a)
a = """sum(zip(a, b))"""
self.unchanged(a)
a = """sorted(zip(a, b))"""
self.unchanged(a)
a = """sorted(zip(a, b), key=blah)"""
self.unchanged(a)
a = """sorted(zip(a, b), key=blah)[0]"""
self.unchanged(a)
a = """enumerate(zip(a, b))"""
self.unchanged(a)
a = """enumerate(zip(a, b), start=1)"""
self.unchanged(a)
a = """for i in zip(a, b): pass"""
self.unchanged(a)
a = """[x for x in zip(a, b)]"""
self.unchanged(a)
a = """(x for x in zip(a, b))"""
self.unchanged(a)
def test_future_builtins(self):
a = "from future_builtins import spam, zip, eggs; zip(a, b)"
self.unchanged(a)
b = """from future_builtins import spam, eggs; x = zip(a, b)"""
a = """from future_builtins import spam, eggs; x = list(zip(a, b))"""
self.check(b, a)
a = "from future_builtins import *; zip(a, b)"
self.unchanged(a)
class Test_standarderror(FixerTestCase):
fixer = "standarderror"
def test(self):
b = """x = StandardError()"""
a = """x = Exception()"""
self.check(b, a)
b = """x = StandardError(a, b, c)"""
a = """x = Exception(a, b, c)"""
self.check(b, a)
b = """f(2 + StandardError(a, b, c))"""
a = """f(2 + Exception(a, b, c))"""
self.check(b, a)
class Test_types(FixerTestCase):
fixer = "types"
def test_basic_types_convert(self):
b = """types.StringType"""
a = """bytes"""
self.check(b, a)
b = """types.DictType"""
a = """dict"""
self.check(b, a)
b = """types . IntType"""
a = """int"""
self.check(b, a)
b = """types.ListType"""
a = """list"""
self.check(b, a)
b = """types.LongType"""
a = """int"""
self.check(b, a)
b = """types.NoneType"""
a = """type(None)"""
self.check(b, a)
b = "types.StringTypes"
a = "(str,)"
self.check(b, a)
class Test_idioms(FixerTestCase):
fixer = "idioms"
def test_while(self):
b = """while 1: foo()"""
a = """while True: foo()"""
self.check(b, a)
b = """while 1: foo()"""
a = """while True: foo()"""
self.check(b, a)
b = """
while 1:
foo()
"""
a = """
while True:
foo()
"""
self.check(b, a)
def test_while_unchanged(self):
s = """while 11: foo()"""
self.unchanged(s)
s = """while 0: foo()"""
self.unchanged(s)
s = """while foo(): foo()"""
self.unchanged(s)
s = """while []: foo()"""
self.unchanged(s)
def test_eq_simple(self):
b = """type(x) == T"""
a = """isinstance(x, T)"""
self.check(b, a)
b = """if type(x) == T: pass"""
a = """if isinstance(x, T): pass"""
self.check(b, a)
def test_eq_reverse(self):
b = """T == type(x)"""
a = """isinstance(x, T)"""
self.check(b, a)
b = """if T == type(x): pass"""
a = """if isinstance(x, T): pass"""
self.check(b, a)
def test_eq_expression(self):
b = """type(x+y) == d.get('T')"""
a = """isinstance(x+y, d.get('T'))"""
self.check(b, a)
b = """type( x + y) == d.get('T')"""
a = """isinstance(x + y, d.get('T'))"""
self.check(b, a)
def test_is_simple(self):
b = """type(x) is T"""
a = """isinstance(x, T)"""
self.check(b, a)
b = """if type(x) is T: pass"""
a = """if isinstance(x, T): pass"""
self.check(b, a)
def test_is_reverse(self):
b = """T is type(x)"""
a = """isinstance(x, T)"""
self.check(b, a)
b = """if T is type(x): pass"""
a = """if isinstance(x, T): pass"""
self.check(b, a)
def test_is_expression(self):
b = """type(x+y) is d.get('T')"""
a = """isinstance(x+y, d.get('T'))"""
self.check(b, a)
b = """type( x + y) is d.get('T')"""
a = """isinstance(x + y, d.get('T'))"""
self.check(b, a)
def test_is_not_simple(self):
b = """type(x) is not T"""
a = """not isinstance(x, T)"""
self.check(b, a)
b = """if type(x) is not T: pass"""
a = """if not isinstance(x, T): pass"""
self.check(b, a)
def test_is_not_reverse(self):
b = """T is not type(x)"""
a = """not isinstance(x, T)"""
self.check(b, a)
b = """if T is not type(x): pass"""
a = """if not isinstance(x, T): pass"""
self.check(b, a)
def test_is_not_expression(self):
b = """type(x+y) is not d.get('T')"""
a = """not isinstance(x+y, d.get('T'))"""
self.check(b, a)
b = """type( x + y) is not d.get('T')"""
a = """not isinstance(x + y, d.get('T'))"""
self.check(b, a)
def test_ne_simple(self):
b = """type(x) != T"""
a = """not isinstance(x, T)"""
self.check(b, a)
b = """if type(x) != T: pass"""
a = """if not isinstance(x, T): pass"""
self.check(b, a)
def test_ne_reverse(self):
b = """T != type(x)"""
a = """not isinstance(x, T)"""
self.check(b, a)
b = """if T != type(x): pass"""
a = """if not isinstance(x, T): pass"""
self.check(b, a)
def test_ne_expression(self):
b = """type(x+y) != d.get('T')"""
a = """not isinstance(x+y, d.get('T'))"""
self.check(b, a)
b = """type( x + y) != d.get('T')"""
a = """not isinstance(x + y, d.get('T'))"""
self.check(b, a)
def test_type_unchanged(self):
a = """type(x).__name__"""
self.unchanged(a)
def test_sort_list_call(self):
b = """
v = list(t)
v.sort()
foo(v)
"""
a = """
v = sorted(t)
foo(v)
"""
self.check(b, a)
b = """
v = list(foo(b) + d)
v.sort()
foo(v)
"""
a = """
v = sorted(foo(b) + d)
foo(v)
"""
self.check(b, a)
b = """
while x:
v = list(t)
v.sort()
foo(v)
"""
a = """
while x:
v = sorted(t)
foo(v)
"""
self.check(b, a)
b = """
v = list(t)
# foo
v.sort()
foo(v)
"""
a = """
v = sorted(t)
# foo
foo(v)
"""
self.check(b, a)
b = r"""
v = list( t)
v.sort()
foo(v)
"""
a = r"""
v = sorted( t)
foo(v)
"""
self.check(b, a)
b = r"""
try:
m = list(s)
m.sort()
except: pass
"""
a = r"""
try:
m = sorted(s)
except: pass
"""
self.check(b, a)
b = r"""
try:
m = list(s)
# foo
m.sort()
except: pass
"""
a = r"""
try:
m = sorted(s)
# foo
except: pass
"""
self.check(b, a)
b = r"""
m = list(s)
# more comments
m.sort()"""
a = r"""
m = sorted(s)
# more comments"""
self.check(b, a)
def test_sort_simple_expr(self):
b = """
v = t
v.sort()
foo(v)
"""
a = """
v = sorted(t)
foo(v)
"""
self.check(b, a)
b = """
v = foo(b)
v.sort()
foo(v)
"""
a = """
v = sorted(foo(b))
foo(v)
"""
self.check(b, a)
b = """
v = b.keys()
v.sort()
foo(v)
"""
a = """
v = sorted(b.keys())
foo(v)
"""
self.check(b, a)
b = """
v = foo(b) + d
v.sort()
foo(v)
"""
a = """
v = sorted(foo(b) + d)
foo(v)
"""
self.check(b, a)
b = """
while x:
v = t
v.sort()
foo(v)
"""
a = """
while x:
v = sorted(t)
foo(v)
"""
self.check(b, a)
b = """
v = t
# foo
v.sort()
foo(v)
"""
a = """
v = sorted(t)
# foo
foo(v)
"""
self.check(b, a)
b = r"""
v = t
v.sort()
foo(v)
"""
a = r"""
v = sorted(t)
foo(v)
"""
self.check(b, a)
def test_sort_unchanged(self):
s = """
v = list(t)
w.sort()
foo(w)
"""
self.unchanged(s)
s = """
v = list(t)
v.sort(u)
foo(v)
"""
self.unchanged(s)
class Test_basestring(FixerTestCase):
fixer = "basestring"
def test_basestring(self):
b = """isinstance(x, basestring)"""
a = """isinstance(x, str)"""
self.check(b, a)
class Test_buffer(FixerTestCase):
fixer = "buffer"
def test_buffer(self):
b = """x = buffer(y)"""
a = """x = memoryview(y)"""
self.check(b, a)
def test_slicing(self):
b = """buffer(y)[4:5]"""
a = """memoryview(y)[4:5]"""
self.check(b, a)
class Test_future(FixerTestCase):
fixer = "future"
def test_future(self):
b = """from __future__ import braces"""
a = """"""
self.check(b, a)
b = """# comment\nfrom __future__ import braces"""
a = """# comment\n"""
self.check(b, a)
b = """from __future__ import braces\n# comment"""
a = """\n# comment"""
self.check(b, a)
def test_run_order(self):
self.assert_runs_after("print")
class Test_itertools(FixerTestCase):
fixer = "itertools"
def checkall(self, before, after):
# Because we need to check with and without the itertools prefix
# and on each of the three functions, these loops make it all
# much easier
for i in ("itertools.", ""):
for f in ("map", "filter", "zip"):
b = before % (i + "i" + f)
a = after % (f)
self.check(b, a)
def test_0(self):
# A simple example -- test_1 covers exactly the same thing,
# but it's not quite as clear.
b = "itertools.izip(a, b)"
a = "zip(a, b)"
self.check(b, a)
def test_1(self):
b = """%s(f, a)"""
a = """%s(f, a)"""
self.checkall(b, a)
def test_qualified(self):
b = """itertools.ifilterfalse(a, b)"""
a = """itertools.filterfalse(a, b)"""
self.check(b, a)
b = """itertools.izip_longest(a, b)"""
a = """itertools.zip_longest(a, b)"""
self.check(b, a)
def test_2(self):
b = """ifilterfalse(a, b)"""
a = """filterfalse(a, b)"""
self.check(b, a)
b = """izip_longest(a, b)"""
a = """zip_longest(a, b)"""
self.check(b, a)
def test_space_1(self):
b = """ %s(f, a)"""
a = """ %s(f, a)"""
self.checkall(b, a)
def test_space_2(self):
b = """ itertools.ifilterfalse(a, b)"""
a = """ itertools.filterfalse(a, b)"""
self.check(b, a)
b = """ itertools.izip_longest(a, b)"""
a = """ itertools.zip_longest(a, b)"""
self.check(b, a)
def test_run_order(self):
self.assert_runs_after("map", "zip", "filter")
class Test_itertools_imports(FixerTestCase):
fixer = "itertools_imports"
def test_reduced(self):
b = "from itertools import imap, izip, foo"
a = "from itertools import foo"
self.check(b, a)
b = "from itertools import bar, imap, izip, foo"
a = "from itertools import bar, foo"
self.check(b, a)
b = "from itertools import chain, imap, izip"
a = "from itertools import chain"
self.check(b, a)
def test_comments(self):
b = "#foo\nfrom itertools import imap, izip"
a = "#foo\n"
self.check(b, a)
def test_none(self):
b = "from itertools import imap, izip"
a = ""
self.check(b, a)
b = "from itertools import izip"
a = ""
self.check(b, a)
def test_import_as(self):
b = "from itertools import izip, bar as bang, imap"
a = "from itertools import bar as bang"
self.check(b, a)
b = "from itertools import izip as _zip, imap, bar"
a = "from itertools import bar"
self.check(b, a)
b = "from itertools import imap as _map"
a = ""
self.check(b, a)
b = "from itertools import imap as _map, izip as _zip"
a = ""
self.check(b, a)
s = "from itertools import bar as bang"
self.unchanged(s)
def test_ifilter_and_zip_longest(self):
for name in "filterfalse", "zip_longest":
b = "from itertools import i%s" % (name,)
a = "from itertools import %s" % (name,)
self.check(b, a)
b = "from itertools import imap, i%s, foo" % (name,)
a = "from itertools import %s, foo" % (name,)
self.check(b, a)
b = "from itertools import bar, i%s, foo" % (name,)
a = "from itertools import bar, %s, foo" % (name,)
self.check(b, a)
def test_import_star(self):
s = "from itertools import *"
self.unchanged(s)
def test_unchanged(self):
s = "from itertools import foo"
self.unchanged(s)
class Test_import(FixerTestCase):
fixer = "import"
def setUp(self):
super(Test_import, self).setUp()
# Need to replace fix_import's exists method
# so we can check that it's doing the right thing
self.files_checked = []
self.present_files = set()
self.always_exists = True
def fake_exists(name):
self.files_checked.append(name)
return self.always_exists or (name in self.present_files)
from fissix.fixes import fix_import
fix_import.exists = fake_exists
def tearDown(self):
from fissix.fixes import fix_import
fix_import.exists = os.path.exists
def check_both(self, b, a):
self.always_exists = True
super(Test_import, self).check(b, a)
self.always_exists = False
super(Test_import, self).unchanged(b)
def test_files_checked(self):
def p(path):
# Takes a unix path and returns a path with correct separators
return os.path.pathsep.join(path.split("/"))
self.always_exists = False
self.present_files = set(["__init__.py"])
expected_extensions = (".py", os.path.sep, ".pyc", ".so", ".sl", ".pyd")
names_to_test = (p("/spam/eggs.py"), "ni.py", p("../../shrubbery.py"))
for name in names_to_test:
self.files_checked = []
self.filename = name
self.unchanged("import jam")
if os.path.dirname(name):
name = os.path.dirname(name) + "/jam"
else:
name = "jam"
expected_checks = set(name + ext for ext in expected_extensions)
expected_checks.add("__init__.py")
self.assertEqual(set(self.files_checked), expected_checks)
def test_not_in_package(self):
s = "import bar"
self.always_exists = False
self.present_files = set(["bar.py"])
self.unchanged(s)
def test_with_absolute_import_enabled(self):
s = "from __future__ import absolute_import\nimport bar"
self.always_exists = False
self.present_files = set(["__init__.py", "bar.py"])
self.unchanged(s)
def test_in_package(self):
b = "import bar"
a = "from . import bar"
self.always_exists = False
self.present_files = set(["__init__.py", "bar.py"])
self.check(b, a)
def test_import_from_package(self):
b = "import bar"
a = "from . import bar"
self.always_exists = False
self.present_files = set(["__init__.py", "bar" + os.path.sep])
self.check(b, a)
def test_already_relative_import(self):
s = "from . import bar"
self.unchanged(s)
def test_comments_and_indent(self):
b = "import bar # Foo"
a = "from . import bar # Foo"
self.check(b, a)
def test_from(self):
b = "from foo import bar, baz"
a = "from .foo import bar, baz"
self.check_both(b, a)
b = "from foo import bar"
a = "from .foo import bar"
self.check_both(b, a)
b = "from foo import (bar, baz)"
a = "from .foo import (bar, baz)"
self.check_both(b, a)
def test_dotted_from(self):
b = "from green.eggs import ham"
a = "from .green.eggs import ham"
self.check_both(b, a)
def test_from_as(self):
b = "from green.eggs import ham as spam"
a = "from .green.eggs import ham as spam"
self.check_both(b, a)
def test_import(self):
b = "import foo"
a = "from . import foo"
self.check_both(b, a)
b = "import foo, bar"
a = "from . import foo, bar"
self.check_both(b, a)
b = "import foo, bar, x"
a = "from . import foo, bar, x"
self.check_both(b, a)
b = "import x, y, z"
a = "from . import x, y, z"
self.check_both(b, a)
def test_import_as(self):
b = "import foo as x"
a = "from . import foo as x"
self.check_both(b, a)
b = "import a as b, b as c, c as d"
a = "from . import a as b, b as c, c as d"
self.check_both(b, a)
def test_local_and_absolute(self):
self.always_exists = False
self.present_files = set(["foo.py", "__init__.py"])
s = "import foo, bar"
self.warns_unchanged(s, "absolute and local imports together")
def test_dotted_import(self):
b = "import foo.bar"
a = "from . import foo.bar"
self.check_both(b, a)
def test_dotted_import_as(self):
b = "import foo.bar as bang"
a = "from . import foo.bar as bang"
self.check_both(b, a)
def test_prefix(self):
b = """
# prefix
import foo.bar
"""
a = """
# prefix
from . import foo.bar
"""
self.check_both(b, a)
class Test_set_literal(FixerTestCase):
fixer = "set_literal"
def test_basic(self):
b = """set([1, 2, 3])"""
a = """{1, 2, 3}"""
self.check(b, a)
b = """set((1, 2, 3))"""
a = """{1, 2, 3}"""
self.check(b, a)
b = """set((1,))"""
a = """{1}"""
self.check(b, a)
b = """set([1])"""
self.check(b, a)
b = """set((a, b))"""
a = """{a, b}"""
self.check(b, a)
b = """set([a, b])"""
self.check(b, a)
b = """set((a*234, f(args=23)))"""
a = """{a*234, f(args=23)}"""
self.check(b, a)
b = """set([a*23, f(23)])"""
a = """{a*23, f(23)}"""
self.check(b, a)
b = """set([a-234**23])"""
a = """{a-234**23}"""
self.check(b, a)
def test_listcomps(self):
b = """set([x for x in y])"""
a = """{x for x in y}"""
self.check(b, a)
b = """set([x for x in y if x == m])"""
a = """{x for x in y if x == m}"""
self.check(b, a)
b = """set([x for x in y for a in b])"""
a = """{x for x in y for a in b}"""
self.check(b, a)
b = """set([f(x) - 23 for x in y])"""
a = """{f(x) - 23 for x in y}"""
self.check(b, a)
def test_whitespace(self):
b = """set( [1, 2])"""
a = """{1, 2}"""
self.check(b, a)
b = """set([1 , 2])"""
a = """{1 , 2}"""
self.check(b, a)
b = """set([ 1 ])"""
a = """{ 1 }"""
self.check(b, a)
b = """set( [1] )"""
a = """{1}"""
self.check(b, a)
b = """set([ 1, 2 ])"""
a = """{ 1, 2 }"""
self.check(b, a)
b = """set([x for x in y ])"""
a = """{x for x in y }"""
self.check(b, a)
b = """set(
[1, 2]
)
"""
a = """{1, 2}\n"""
self.check(b, a)
def test_comments(self):
b = """set((1, 2)) # Hi"""
a = """{1, 2} # Hi"""
self.check(b, a)
# This isn't optimal behavior, but the fixer is optional.
b = """
# Foo
set( # Bar
(1, 2)
)
"""
a = """
# Foo
{1, 2}
"""
self.check(b, a)
def test_unchanged(self):
s = """set()"""
self.unchanged(s)
s = """set(a)"""
self.unchanged(s)
s = """set(a, b, c)"""
self.unchanged(s)
# Don't transform generators because they might have to be lazy.
s = """set(x for x in y)"""
self.unchanged(s)
s = """set(x for x in y if z)"""
self.unchanged(s)
s = """set(a*823-23**2 + f(23))"""
self.unchanged(s)
class Test_sys_exc(FixerTestCase):
fixer = "sys_exc"
def test_0(self):
b = "sys.exc_type"
a = "sys.exc_info()[0]"
self.check(b, a)
def test_1(self):
b = "sys.exc_value"
a = "sys.exc_info()[1]"
self.check(b, a)
def test_2(self):
b = "sys.exc_traceback"
a = "sys.exc_info()[2]"
self.check(b, a)
def test_3(self):
b = "sys.exc_type # Foo"
a = "sys.exc_info()[0] # Foo"
self.check(b, a)
def test_4(self):
b = "sys. exc_type"
a = "sys. exc_info()[0]"
self.check(b, a)
def test_5(self):
b = "sys .exc_type"
a = "sys .exc_info()[0]"
self.check(b, a)
class Test_paren(FixerTestCase):
fixer = "paren"
def test_0(self):
b = """[i for i in 1, 2 ]"""
a = """[i for i in (1, 2) ]"""
self.check(b, a)
def test_1(self):
b = """[i for i in 1, 2, ]"""
a = """[i for i in (1, 2,) ]"""
self.check(b, a)
def test_2(self):
b = """[i for i in 1, 2 ]"""
a = """[i for i in (1, 2) ]"""
self.check(b, a)
def test_3(self):
b = """[i for i in 1, 2 if i]"""
a = """[i for i in (1, 2) if i]"""
self.check(b, a)
def test_4(self):
b = """[i for i in 1, 2 ]"""
a = """[i for i in (1, 2) ]"""
self.check(b, a)
def test_5(self):
b = """(i for i in 1, 2)"""
a = """(i for i in (1, 2))"""
self.check(b, a)
def test_6(self):
b = """(i for i in 1 ,2 if i)"""
a = """(i for i in (1 ,2) if i)"""
self.check(b, a)
def test_unchanged_0(self):
s = """[i for i in (1, 2)]"""
self.unchanged(s)
def test_unchanged_1(self):
s = """[i for i in foo()]"""
self.unchanged(s)
def test_unchanged_2(self):
s = """[i for i in (1, 2) if nothing]"""
self.unchanged(s)
def test_unchanged_3(self):
s = """(i for i in (1, 2))"""
self.unchanged(s)
def test_unchanged_4(self):
s = """[i for i in m]"""
self.unchanged(s)
class Test_metaclass(FixerTestCase):
fixer = "metaclass"
def test_unchanged(self):
self.unchanged("class X(): pass")
self.unchanged("class X(object): pass")
self.unchanged("class X(object1, object2): pass")
self.unchanged("class X(object1, object2, object3): pass")
self.unchanged("class X(metaclass=Meta): pass")
self.unchanged("class X(b, arg=23, metclass=Meta): pass")
self.unchanged("class X(b, arg=23, metaclass=Meta, other=42): pass")
s = """
class X:
def __metaclass__(self): pass
"""
self.unchanged(s)
s = """
class X:
a[23] = 74
"""
self.unchanged(s)
def test_comments(self):
b = """
class X:
# hi
__metaclass__ = AppleMeta
"""
a = """
class X(metaclass=AppleMeta):
# hi
pass
"""
self.check(b, a)
b = """
class X:
__metaclass__ = Meta
# Bedtime!
"""
a = """
class X(metaclass=Meta):
pass
# Bedtime!
"""
self.check(b, a)
def test_meta(self):
# no-parent class, odd body
b = """
class X():
__metaclass__ = Q
pass
"""
a = """
class X(metaclass=Q):
pass
"""
self.check(b, a)
# one parent class, no body
b = """class X(object): __metaclass__ = Q"""
a = """class X(object, metaclass=Q): pass"""
self.check(b, a)
# one parent, simple body
b = """
class X(object):
__metaclass__ = Meta
bar = 7
"""
a = """
class X(object, metaclass=Meta):
bar = 7
"""
self.check(b, a)
b = """
class X:
__metaclass__ = Meta; x = 4; g = 23
"""
a = """
class X(metaclass=Meta):
x = 4; g = 23
"""
self.check(b, a)
# one parent, simple body, __metaclass__ last
b = """
class X(object):
bar = 7
__metaclass__ = Meta
"""
a = """
class X(object, metaclass=Meta):
bar = 7
"""
self.check(b, a)
# redefining __metaclass__
b = """
class X():
__metaclass__ = A
__metaclass__ = B
bar = 7
"""
a = """
class X(metaclass=B):
bar = 7
"""
self.check(b, a)
# multiple inheritance, simple body
b = """
class X(clsA, clsB):
__metaclass__ = Meta
bar = 7
"""
a = """
class X(clsA, clsB, metaclass=Meta):
bar = 7
"""
self.check(b, a)
# keywords in the class statement
b = """class m(a, arg=23): __metaclass__ = Meta"""
a = """class m(a, arg=23, metaclass=Meta): pass"""
self.check(b, a)
b = """
class X(expression(2 + 4)):
__metaclass__ = Meta
"""
a = """
class X(expression(2 + 4), metaclass=Meta):
pass
"""
self.check(b, a)
b = """
class X(expression(2 + 4), x**4):
__metaclass__ = Meta
"""
a = """
class X(expression(2 + 4), x**4, metaclass=Meta):
pass
"""
self.check(b, a)
b = """
class X:
__metaclass__ = Meta
save.py = 23
"""
a = """
class X(metaclass=Meta):
save.py = 23
"""
self.check(b, a)
class Test_getcwdu(FixerTestCase):
fixer = "getcwdu"
def test_basic(self):
b = """os.getcwdu"""
a = """os.getcwd"""
self.check(b, a)
b = """os.getcwdu()"""
a = """os.getcwd()"""
self.check(b, a)
b = """meth = os.getcwdu"""
a = """meth = os.getcwd"""
self.check(b, a)
b = """os.getcwdu(args)"""
a = """os.getcwd(args)"""
self.check(b, a)
def test_comment(self):
b = """os.getcwdu() # Foo"""
a = """os.getcwd() # Foo"""
self.check(b, a)
def test_unchanged(self):
s = """os.getcwd()"""
self.unchanged(s)
s = """getcwdu()"""
self.unchanged(s)
s = """os.getcwdb()"""
self.unchanged(s)
def test_indentation(self):
b = """
if 1:
os.getcwdu()
"""
a = """
if 1:
os.getcwd()
"""
self.check(b, a)
def test_multilation(self):
b = """os .getcwdu()"""
a = """os .getcwd()"""
self.check(b, a)
b = """os. getcwdu"""
a = """os. getcwd"""
self.check(b, a)
b = """os.getcwdu ( )"""
a = """os.getcwd ( )"""
self.check(b, a)
class Test_operator(FixerTestCase):
fixer = "operator"
def test_operator_isCallable(self):
b = "operator.isCallable(x)"
a = "callable(x)"
self.check(b, a)
def test_operator_sequenceIncludes(self):
b = "operator.sequenceIncludes(x, y)"
a = "operator.contains(x, y)"
self.check(b, a)
b = "operator .sequenceIncludes(x, y)"
a = "operator .contains(x, y)"
self.check(b, a)
b = "operator. sequenceIncludes(x, y)"
a = "operator. contains(x, y)"
self.check(b, a)
def test_operator_isSequenceType(self):
b = "operator.isSequenceType(x)"
a = "import collections.abc\nisinstance(x, collections.abc.Sequence)"
self.check(b, a)
def test_operator_isMappingType(self):
b = "operator.isMappingType(x)"
a = "import collections.abc\nisinstance(x, collections.abc.Mapping)"
self.check(b, a)
def test_operator_isNumberType(self):
b = "operator.isNumberType(x)"
a = "import numbers\nisinstance(x, numbers.Number)"
self.check(b, a)
def test_operator_repeat(self):
b = "operator.repeat(x, n)"
a = "operator.mul(x, n)"
self.check(b, a)
b = "operator .repeat(x, n)"
a = "operator .mul(x, n)"
self.check(b, a)
b = "operator. repeat(x, n)"
a = "operator. mul(x, n)"
self.check(b, a)
def test_operator_irepeat(self):
b = "operator.irepeat(x, n)"
a = "operator.imul(x, n)"
self.check(b, a)
b = "operator .irepeat(x, n)"
a = "operator .imul(x, n)"
self.check(b, a)
b = "operator. irepeat(x, n)"
a = "operator. imul(x, n)"
self.check(b, a)
def test_bare_isCallable(self):
s = "isCallable(x)"
t = "You should use 'callable(x)' here."
self.warns_unchanged(s, t)
def test_bare_sequenceIncludes(self):
s = "sequenceIncludes(x, y)"
t = "You should use 'operator.contains(x, y)' here."
self.warns_unchanged(s, t)
def test_bare_operator_isSequenceType(self):
s = "isSequenceType(z)"
t = "You should use 'isinstance(z, collections.abc.Sequence)' here."
self.warns_unchanged(s, t)
def test_bare_operator_isMappingType(self):
s = "isMappingType(x)"
t = "You should use 'isinstance(x, collections.abc.Mapping)' here."
self.warns_unchanged(s, t)
def test_bare_operator_isNumberType(self):
s = "isNumberType(y)"
t = "You should use 'isinstance(y, numbers.Number)' here."
self.warns_unchanged(s, t)
def test_bare_operator_repeat(self):
s = "repeat(x, n)"
t = "You should use 'operator.mul(x, n)' here."
self.warns_unchanged(s, t)
def test_bare_operator_irepeat(self):
s = "irepeat(y, 187)"
t = "You should use 'operator.imul(y, 187)' here."
self.warns_unchanged(s, t)
class Test_exitfunc(FixerTestCase):
fixer = "exitfunc"
def test_simple(self):
b = """
import sys
sys.exitfunc = my_atexit
"""
a = """
import sys
import atexit
atexit.register(my_atexit)
"""
self.check(b, a)
def test_names_import(self):
b = """
import sys, crumbs
sys.exitfunc = my_func
"""
a = """
import sys, crumbs, atexit
atexit.register(my_func)
"""
self.check(b, a)
def test_complex_expression(self):
b = """
import sys
sys.exitfunc = do(d)/a()+complex(f=23, g=23)*expression
"""
a = """
import sys
import atexit
atexit.register(do(d)/a()+complex(f=23, g=23)*expression)
"""
self.check(b, a)
def test_comments(self):
b = """
import sys # Foo
sys.exitfunc = f # Blah
"""
a = """
import sys
import atexit # Foo
atexit.register(f) # Blah
"""
self.check(b, a)
b = """
import apples, sys, crumbs, larry # Pleasant comments
sys.exitfunc = func
"""
a = """
import apples, sys, crumbs, larry, atexit # Pleasant comments
atexit.register(func)
"""
self.check(b, a)
def test_in_a_function(self):
b = """
import sys
def f():
sys.exitfunc = func
"""
a = """
import sys
import atexit
def f():
atexit.register(func)
"""
self.check(b, a)
def test_no_sys_import(self):
b = """sys.exitfunc = f"""
a = """atexit.register(f)"""
msg = (
"Can't find sys import; Please add an atexit import at the "
"top of your file."
)
self.warns(b, a, msg)
def test_unchanged(self):
s = """f(sys.exitfunc)"""
self.unchanged(s)
class Test_asserts(FixerTestCase):
fixer = "asserts"
def test_deprecated_names(self):
tests = [
("self.assert_(True)", "self.assertTrue(True)"),
("self.assertEquals(2, 2)", "self.assertEqual(2, 2)"),
("self.assertNotEquals(2, 3)", "self.assertNotEqual(2, 3)"),
("self.assertAlmostEquals(2, 3)", "self.assertAlmostEqual(2, 3)"),
("self.assertNotAlmostEquals(2, 8)", "self.assertNotAlmostEqual(2, 8)"),
("self.failUnlessEqual(2, 2)", "self.assertEqual(2, 2)"),
("self.failIfEqual(2, 3)", "self.assertNotEqual(2, 3)"),
("self.failUnlessAlmostEqual(2, 3)", "self.assertAlmostEqual(2, 3)"),
("self.failIfAlmostEqual(2, 8)", "self.assertNotAlmostEqual(2, 8)"),
("self.failUnless(True)", "self.assertTrue(True)"),
("self.failUnlessRaises(foo)", "self.assertRaises(foo)"),
("self.failIf(False)", "self.assertFalse(False)"),
]
for b, a in tests:
self.check(b, a)
def test_variants(self):
b = "eq = self.assertEquals"
a = "eq = self.assertEqual"
self.check(b, a)
b = 'self.assertEquals(2, 3, msg="fail")'
a = 'self.assertEqual(2, 3, msg="fail")'
self.check(b, a)
b = 'self.assertEquals(2, 3, msg="fail") # foo'
a = 'self.assertEqual(2, 3, msg="fail") # foo'
self.check(b, a)
b = "self.assertEquals (2, 3)"
a = "self.assertEqual (2, 3)"
self.check(b, a)
b = " self.assertEquals (2, 3)"
a = " self.assertEqual (2, 3)"
self.check(b, a)
b = "with self.failUnlessRaises(Explosion): explode()"
a = "with self.assertRaises(Explosion): explode()"
self.check(b, a)
b = "with self.failUnlessRaises(Explosion) as cm: explode()"
a = "with self.assertRaises(Explosion) as cm: explode()"
self.check(b, a)
def test_unchanged(self):
self.unchanged("self.assertEqualsOnSaturday")
self.unchanged("self.assertEqualsOnSaturday(3, 5)")
| 26.088917 | 96 | 0.433951 |
import os
from itertools import chain
from operator import itemgetter
from fissix import pygram, fixer_util
from fissix.tests import support
class FixerTestCase(support.TestCase):
def setUp(self, fix_list=None, fixer_pkg="fissix", options=None):
if fix_list is None:
fix_list = [self.fixer]
self.refactor = support.get_refactorer(fixer_pkg, fix_list, options)
self.fixer_log = []
self.filename = "<string>"
for fixer in chain(self.refactor.pre_order, self.refactor.post_order):
fixer.log = self.fixer_log
def _check(self, before, after):
before = support.reformat(before)
after = support.reformat(after)
tree = self.refactor.refactor_string(before, self.filename)
self.assertEqual(after, str(tree))
return tree
def check(self, before, after, ignore_warnings=False):
tree = self._check(before, after)
self.assertTrue(tree.was_changed)
if not ignore_warnings:
self.assertEqual(self.fixer_log, [])
def warns(self, before, after, message, unchanged=False):
tree = self._check(before, after)
self.assertIn(message, "".join(self.fixer_log))
if not unchanged:
self.assertTrue(tree.was_changed)
def warns_unchanged(self, before, message):
self.warns(before, before, message, unchanged=True)
def unchanged(self, before, ignore_warnings=False):
self._check(before, before)
if not ignore_warnings:
self.assertEqual(self.fixer_log, [])
def assert_runs_after(self, *names):
fixes = [self.fixer]
fixes.extend(names)
r = support.get_refactorer("fissix", fixes)
(pre, post) = r.get_fixers()
n = "fix_" + self.fixer
if post and post[-1].__class__.__module__.endswith(n):
return
if pre and pre[-1].__class__.__module__.endswith(n) and not post:
# We're the last in pre and post is empty
return
self.fail(
"Fixer run order (%s) is incorrect; %s should be last."
% (", ".join([x.__class__.__module__ for x in (pre + post)]), n)
)
class Test_ne(FixerTestCase):
fixer = "ne"
def test_basic(self):
b = """if x <> y:
pass"""
a = """if x != y:
pass"""
self.check(b, a)
def test_no_spaces(self):
b = """if x<>y:
pass"""
a = """if x!=y:
pass"""
self.check(b, a)
def test_chained(self):
b = """if x<>y<>z:
pass"""
a = """if x!=y!=z:
pass"""
self.check(b, a)
class Test_has_key(FixerTestCase):
fixer = "has_key"
def test_1(self):
b = """x = d.has_key("x") or d.has_key("y")"""
a = """x = "x" in d or "y" in d"""
self.check(b, a)
def test_2(self):
b = """x = a.b.c.d.has_key("x") ** 3"""
a = """x = ("x" in a.b.c.d) ** 3"""
self.check(b, a)
def test_3(self):
b = """x = a.b.has_key(1 + 2).__repr__()"""
a = """x = (1 + 2 in a.b).__repr__()"""
self.check(b, a)
def test_4(self):
b = """x = a.b.has_key(1 + 2).__repr__() ** -3 ** 4"""
a = """x = (1 + 2 in a.b).__repr__() ** -3 ** 4"""
self.check(b, a)
def test_5(self):
b = """x = a.has_key(f or g)"""
a = """x = (f or g) in a"""
self.check(b, a)
def test_6(self):
b = """x = a + b.has_key(c)"""
a = """x = a + (c in b)"""
self.check(b, a)
def test_7(self):
b = """x = a.has_key(lambda: 12)"""
a = """x = (lambda: 12) in a"""
self.check(b, a)
def test_8(self):
b = """x = a.has_key(a for a in b)"""
a = """x = (a for a in b) in a"""
self.check(b, a)
def test_9(self):
b = """if not a.has_key(b): pass"""
a = """if b not in a: pass"""
self.check(b, a)
def test_10(self):
b = """if not a.has_key(b).__repr__(): pass"""
a = """if not (b in a).__repr__(): pass"""
self.check(b, a)
def test_11(self):
b = """if not a.has_key(b) ** 2: pass"""
a = """if not (b in a) ** 2: pass"""
self.check(b, a)
class Test_apply(FixerTestCase):
fixer = "apply"
def test_1(self):
b = """x = apply(f, g + h)"""
a = """x = f(*g + h)"""
self.check(b, a)
def test_2(self):
b = """y = apply(f, g, h)"""
a = """y = f(*g, **h)"""
self.check(b, a)
def test_3(self):
b = """z = apply(fs[0], g or h, h or g)"""
a = """z = fs[0](*g or h, **h or g)"""
self.check(b, a)
def test_4(self):
b = """apply(f, (x, y) + t)"""
a = """f(*(x, y) + t)"""
self.check(b, a)
def test_5(self):
b = """apply(f, args,)"""
a = """f(*args)"""
self.check(b, a)
def test_6(self):
b = """apply(f, args, kwds,)"""
a = """f(*args, **kwds)"""
self.check(b, a)
def test_complex_1(self):
b = """x = apply(f+g, args)"""
a = """x = (f+g)(*args)"""
self.check(b, a)
def test_complex_2(self):
b = """x = apply(f*g, args)"""
a = """x = (f*g)(*args)"""
self.check(b, a)
def test_complex_3(self):
b = """x = apply(f**g, args)"""
a = """x = (f**g)(*args)"""
self.check(b, a)
def test_dotted_name(self):
b = """x = apply(f.g, args)"""
a = """x = f.g(*args)"""
self.check(b, a)
def test_subscript(self):
b = """x = apply(f[x], args)"""
a = """x = f[x](*args)"""
self.check(b, a)
def test_call(self):
b = """x = apply(f(), args)"""
a = """x = f()(*args)"""
self.check(b, a)
def test_extreme(self):
b = """x = apply(a.b.c.d.e.f, args, kwds)"""
a = """x = a.b.c.d.e.f(*args, **kwds)"""
self.check(b, a)
def test_weird_comments(self):
b = """apply( # foo
f, # bar
args)"""
a = """f(*args)"""
self.check(b, a)
def test_unchanged_1(self):
s = """apply()"""
self.unchanged(s)
def test_unchanged_2(self):
s = """apply(f)"""
self.unchanged(s)
def test_unchanged_3(self):
s = """apply(f,)"""
self.unchanged(s)
def test_unchanged_4(self):
s = """apply(f, args, kwds, extras)"""
self.unchanged(s)
def test_unchanged_5(self):
s = """apply(f, *args, **kwds)"""
self.unchanged(s)
def test_unchanged_6(self):
s = """apply(f, *args)"""
self.unchanged(s)
def test_unchanged_6b(self):
s = """apply(f, **kwds)"""
self.unchanged(s)
def test_unchanged_7(self):
s = """apply(func=f, args=args, kwds=kwds)"""
self.unchanged(s)
def test_unchanged_8(self):
s = """apply(f, args=args, kwds=kwds)"""
self.unchanged(s)
def test_unchanged_9(self):
s = """apply(f, args, kwds=kwds)"""
self.unchanged(s)
def test_space_1(self):
a = """apply( f, args, kwds)"""
b = """f(*args, **kwds)"""
self.check(a, b)
def test_space_2(self):
a = """apply( f ,args,kwds )"""
b = """f(*args, **kwds)"""
self.check(a, b)
class Test_reload(FixerTestCase):
fixer = "reload"
def test(self):
b = """reload(a)"""
a = """import importlib\nimportlib.reload(a)"""
self.check(b, a)
def test_comment(self):
b = """reload( a ) # comment"""
a = """import importlib\nimportlib.reload( a ) # comment"""
self.check(b, a)
b = """reload( a ) # comment"""
a = """import importlib\nimportlib.reload( a ) # comment"""
self.check(b, a)
def test_space(self):
b = """reload( a )"""
a = """import importlib\nimportlib.reload( a )"""
self.check(b, a)
b = """reload( a)"""
a = """import importlib\nimportlib.reload( a)"""
self.check(b, a)
b = """reload(a )"""
a = """import importlib\nimportlib.reload(a )"""
self.check(b, a)
def test_unchanged(self):
s = """reload(a=1)"""
self.unchanged(s)
s = """reload(f, g)"""
self.unchanged(s)
s = """reload(f, *h)"""
self.unchanged(s)
s = """reload(f, *h, **i)"""
self.unchanged(s)
s = """reload(f, **i)"""
self.unchanged(s)
s = """reload(*h, **i)"""
self.unchanged(s)
s = """reload(*h)"""
self.unchanged(s)
s = """reload(**i)"""
self.unchanged(s)
s = """reload()"""
self.unchanged(s)
class Test_intern(FixerTestCase):
fixer = "intern"
def test_prefix_preservation(self):
b = """x = intern( a )"""
a = """import sys\nx = sys.intern( a )"""
self.check(b, a)
b = """y = intern("b" # test
)"""
a = """import sys\ny = sys.intern("b" # test
)"""
self.check(b, a)
b = """z = intern(a+b+c.d, )"""
a = """import sys\nz = sys.intern(a+b+c.d, )"""
self.check(b, a)
def test(self):
b = """x = intern(a)"""
a = """import sys\nx = sys.intern(a)"""
self.check(b, a)
b = """z = intern(a+b+c.d,)"""
a = """import sys\nz = sys.intern(a+b+c.d,)"""
self.check(b, a)
b = """intern("y%s" % 5).replace("y", "")"""
a = """import sys\nsys.intern("y%s" % 5).replace("y", "")"""
self.check(b, a)
def test_unchanged(self):
s = """intern(a=1)"""
self.unchanged(s)
s = """intern(f, g)"""
self.unchanged(s)
s = """intern(*h)"""
self.unchanged(s)
s = """intern(**i)"""
self.unchanged(s)
s = """intern()"""
self.unchanged(s)
class Test_reduce(FixerTestCase):
fixer = "reduce"
def test_simple_call(self):
b = "reduce(a, b, c)"
a = "from functools import reduce\nreduce(a, b, c)"
self.check(b, a)
def test_bug_7253(self):
b = "def x(arg): reduce(sum, [])"
a = "from functools import reduce\ndef x(arg): reduce(sum, [])"
self.check(b, a)
def test_call_with_lambda(self):
b = "reduce(lambda x, y: x + y, seq)"
a = "from functools import reduce\nreduce(lambda x, y: x + y, seq)"
self.check(b, a)
def test_unchanged(self):
s = "reduce(a)"
self.unchanged(s)
s = "reduce(a, b=42)"
self.unchanged(s)
s = "reduce(a, b, c, d)"
self.unchanged(s)
s = "reduce(**c)"
self.unchanged(s)
s = "reduce()"
self.unchanged(s)
class Test_print(FixerTestCase):
fixer = "print"
def test_prefix_preservation(self):
b = """print 1, 1+1, 1+1+1"""
a = """print(1, 1+1, 1+1+1)"""
self.check(b, a)
def test_idempotency(self):
s = """print()"""
self.unchanged(s)
s = """print('')"""
self.unchanged(s)
def test_idempotency_print_as_function(self):
self.refactor.driver.grammar = pygram.python_grammar_no_print_statement
s = """print(1, 1+1, 1+1+1)"""
self.unchanged(s)
s = """print()"""
self.unchanged(s)
s = """print('')"""
self.unchanged(s)
def test_1(self):
b = """print 1, 1+1, 1+1+1"""
a = """print(1, 1+1, 1+1+1)"""
self.check(b, a)
def test_2(self):
b = """print 1, 2"""
a = """print(1, 2)"""
self.check(b, a)
def test_3(self):
b = """print"""
a = """print()"""
self.check(b, a)
def test_4(self):
b = """print whatever; print"""
a = """print(whatever); print()"""
self.check(b, a)
def test_5(self):
b = """print; print whatever;"""
a = """print(); print(whatever);"""
self.check(b, a)
def test_tuple(self):
b = """print (a, b, c)"""
a = """print((a, b, c))"""
self.check(b, a)
def test_trailing_comma_1(self):
b = """print 1, 2, 3,"""
a = """print(1, 2, 3, end=' ')"""
self.check(b, a)
def test_trailing_comma_2(self):
b = """print 1, 2,"""
a = """print(1, 2, end=' ')"""
self.check(b, a)
def test_trailing_comma_3(self):
b = """print 1,"""
a = """print(1, end=' ')"""
self.check(b, a)
def test_vargs_without_trailing_comma(self):
b = """print >>sys.stderr, 1, 2, 3"""
a = """print(1, 2, 3, file=sys.stderr)"""
self.check(b, a)
def test_with_trailing_comma(self):
b = """print >>sys.stderr, 1, 2,"""
a = """print(1, 2, end=' ', file=sys.stderr)"""
self.check(b, a)
def test_no_trailing_comma(self):
b = """print >>sys.stderr, 1+1"""
a = """print(1+1, file=sys.stderr)"""
self.check(b, a)
def test_spaces_before_file(self):
b = """print >> sys.stderr"""
a = """print(file=sys.stderr)"""
self.check(b, a)
def test_with_future_print_function(self):
s = "from __future__ import print_function\n" "print('Hai!', end=' ')"
self.unchanged(s)
b = "print 'Hello, world!'"
a = "print('Hello, world!')"
self.check(b, a)
class Test_exec(FixerTestCase):
fixer = "exec"
def test_prefix_preservation(self):
b = """ exec code in ns1, ns2"""
a = """ exec(code, ns1, ns2)"""
self.check(b, a)
def test_basic(self):
b = """exec code"""
a = """exec(code)"""
self.check(b, a)
def test_with_globals(self):
b = """exec code in ns"""
a = """exec(code, ns)"""
self.check(b, a)
def test_with_globals_locals(self):
b = """exec code in ns1, ns2"""
a = """exec(code, ns1, ns2)"""
self.check(b, a)
def test_complex_1(self):
b = """exec (a.b()) in ns"""
a = """exec((a.b()), ns)"""
self.check(b, a)
def test_complex_2(self):
b = """exec a.b() + c in ns"""
a = """exec(a.b() + c, ns)"""
self.check(b, a)
def test_unchanged_1(self):
s = """exec(code)"""
self.unchanged(s)
def test_unchanged_2(self):
s = """exec (code)"""
self.unchanged(s)
def test_unchanged_3(self):
s = """exec(code, ns)"""
self.unchanged(s)
def test_unchanged_4(self):
s = """exec(code, ns1, ns2)"""
self.unchanged(s)
class Test_repr(FixerTestCase):
fixer = "repr"
def test_prefix_preservation(self):
b = """x = `1 + 2`"""
a = """x = repr(1 + 2)"""
self.check(b, a)
def test_simple_1(self):
b = """x = `1 + 2`"""
a = """x = repr(1 + 2)"""
self.check(b, a)
def test_simple_2(self):
b = """y = `x`"""
a = """y = repr(x)"""
self.check(b, a)
def test_complex(self):
b = """z = `y`.__repr__()"""
a = """z = repr(y).__repr__()"""
self.check(b, a)
def test_tuple(self):
b = """x = `1, 2, 3`"""
a = """x = repr((1, 2, 3))"""
self.check(b, a)
def test_nested(self):
b = """x = `1 + `2``"""
a = """x = repr(1 + repr(2))"""
self.check(b, a)
def test_nested_tuples(self):
b = """x = `1, 2 + `3, 4``"""
a = """x = repr((1, 2 + repr((3, 4))))"""
self.check(b, a)
class Test_except(FixerTestCase):
fixer = "except"
def test_prefix_preservation(self):
b = """
try:
pass
except (RuntimeError, ImportError), e:
pass"""
a = """
try:
pass
except (RuntimeError, ImportError) as e:
pass"""
self.check(b, a)
def test_simple(self):
b = """
try:
pass
except Foo, e:
pass"""
a = """
try:
pass
except Foo as e:
pass"""
self.check(b, a)
def test_simple_no_space_before_target(self):
b = """
try:
pass
except Foo,e:
pass"""
a = """
try:
pass
except Foo as e:
pass"""
self.check(b, a)
def test_tuple_unpack(self):
b = """
def foo():
try:
pass
except Exception, (f, e):
pass
except ImportError, e:
pass"""
a = """
def foo():
try:
pass
except Exception as xxx_todo_changeme:
(f, e) = xxx_todo_changeme.args
pass
except ImportError as e:
pass"""
self.check(b, a)
def test_multi_class(self):
b = """
try:
pass
except (RuntimeError, ImportError), e:
pass"""
a = """
try:
pass
except (RuntimeError, ImportError) as e:
pass"""
self.check(b, a)
def test_list_unpack(self):
b = """
try:
pass
except Exception, [a, b]:
pass"""
a = """
try:
pass
except Exception as xxx_todo_changeme:
[a, b] = xxx_todo_changeme.args
pass"""
self.check(b, a)
def test_weird_target_1(self):
b = """
try:
pass
except Exception, d[5]:
pass"""
a = """
try:
pass
except Exception as xxx_todo_changeme:
d[5] = xxx_todo_changeme
pass"""
self.check(b, a)
def test_weird_target_2(self):
b = """
try:
pass
except Exception, a.foo:
pass"""
a = """
try:
pass
except Exception as xxx_todo_changeme:
a.foo = xxx_todo_changeme
pass"""
self.check(b, a)
def test_weird_target_3(self):
b = """
try:
pass
except Exception, a().foo:
pass"""
a = """
try:
pass
except Exception as xxx_todo_changeme:
a().foo = xxx_todo_changeme
pass"""
self.check(b, a)
def test_bare_except(self):
b = """
try:
pass
except Exception, a:
pass
except:
pass"""
a = """
try:
pass
except Exception as a:
pass
except:
pass"""
self.check(b, a)
def test_bare_except_and_else_finally(self):
b = """
try:
pass
except Exception, a:
pass
except:
pass
else:
pass
finally:
pass"""
a = """
try:
pass
except Exception as a:
pass
except:
pass
else:
pass
finally:
pass"""
self.check(b, a)
def test_multi_fixed_excepts_before_bare_except(self):
b = """
try:
pass
except TypeError, b:
pass
except Exception, a:
pass
except:
pass"""
a = """
try:
pass
except TypeError as b:
pass
except Exception as a:
pass
except:
pass"""
self.check(b, a)
def test_one_line_suites(self):
b = """
try: raise TypeError
except TypeError, e:
pass
"""
a = """
try: raise TypeError
except TypeError as e:
pass
"""
self.check(b, a)
b = """
try:
raise TypeError
except TypeError, e: pass
"""
a = """
try:
raise TypeError
except TypeError as e: pass
"""
self.check(b, a)
b = """
try: raise TypeError
except TypeError, e: pass
"""
a = """
try: raise TypeError
except TypeError as e: pass
"""
self.check(b, a)
b = """
try: raise TypeError
except TypeError, e: pass
else: function()
finally: done()
"""
a = """
try: raise TypeError
except TypeError as e: pass
else: function()
finally: done()
"""
self.check(b, a)
def test_unchanged_1(self):
s = """
try:
pass
except:
pass"""
self.unchanged(s)
def test_unchanged_2(self):
s = """
try:
pass
except Exception:
pass"""
self.unchanged(s)
def test_unchanged_3(self):
s = """
try:
pass
except (Exception, SystemExit):
pass"""
self.unchanged(s)
class Test_raise(FixerTestCase):
fixer = "raise"
def test_basic(self):
b = """raise Exception, 5"""
a = """raise Exception(5)"""
self.check(b, a)
def test_prefix_preservation(self):
b = """raise Exception,5"""
a = """raise Exception(5)"""
self.check(b, a)
b = """raise Exception, 5"""
a = """raise Exception(5)"""
self.check(b, a)
def test_with_comments(self):
b = """raise Exception, 5 # foo"""
a = """raise Exception(5) # foo"""
self.check(b, a)
b = """raise E, (5, 6) % (a, b) # foo"""
a = """raise E((5, 6) % (a, b)) # foo"""
self.check(b, a)
b = """def foo():
raise Exception, 5, 6 # foo"""
a = """def foo():
raise Exception(5).with_traceback(6) # foo"""
self.check(b, a)
def test_None_value(self):
b = """raise Exception(5), None, tb"""
a = """raise Exception(5).with_traceback(tb)"""
self.check(b, a)
def test_tuple_value(self):
b = """raise Exception, (5, 6, 7)"""
a = """raise Exception(5, 6, 7)"""
self.check(b, a)
def test_tuple_detection(self):
b = """raise E, (5, 6) % (a, b)"""
a = """raise E((5, 6) % (a, b))"""
self.check(b, a)
def test_tuple_exc_1(self):
b = """raise (((E1, E2), E3), E4), V"""
a = """raise E1(V)"""
self.check(b, a)
def test_tuple_exc_2(self):
b = """raise (E1, (E2, E3), E4), V"""
a = """raise E1(V)"""
self.check(b, a)
def test_string_exc(self):
s = """raise 'foo'"""
self.warns_unchanged(s, "Python 3 does not support string exceptions")
def test_string_exc_val(self):
s = """raise "foo", 5"""
self.warns_unchanged(s, "Python 3 does not support string exceptions")
def test_string_exc_val_tb(self):
s = """raise "foo", 5, 6"""
self.warns_unchanged(s, "Python 3 does not support string exceptions")
def test_tb_1(self):
b = """def foo():
raise Exception, 5, 6"""
a = """def foo():
raise Exception(5).with_traceback(6)"""
self.check(b, a)
def test_tb_2(self):
b = """def foo():
a = 5
raise Exception, 5, 6
b = 6"""
a = """def foo():
a = 5
raise Exception(5).with_traceback(6)
b = 6"""
self.check(b, a)
def test_tb_3(self):
b = """def foo():
raise Exception,5,6"""
a = """def foo():
raise Exception(5).with_traceback(6)"""
self.check(b, a)
def test_tb_4(self):
b = """def foo():
a = 5
raise Exception,5,6
b = 6"""
a = """def foo():
a = 5
raise Exception(5).with_traceback(6)
b = 6"""
self.check(b, a)
def test_tb_5(self):
b = """def foo():
raise Exception, (5, 6, 7), 6"""
a = """def foo():
raise Exception(5, 6, 7).with_traceback(6)"""
self.check(b, a)
def test_tb_6(self):
b = """def foo():
a = 5
raise Exception, (5, 6, 7), 6
b = 6"""
a = """def foo():
a = 5
raise Exception(5, 6, 7).with_traceback(6)
b = 6"""
self.check(b, a)
class Test_throw(FixerTestCase):
fixer = "throw"
def test_1(self):
b = """g.throw(Exception, 5)"""
a = """g.throw(Exception(5))"""
self.check(b, a)
def test_2(self):
b = """g.throw(Exception,5)"""
a = """g.throw(Exception(5))"""
self.check(b, a)
def test_3(self):
b = """g.throw(Exception, (5, 6, 7))"""
a = """g.throw(Exception(5, 6, 7))"""
self.check(b, a)
def test_4(self):
b = """5 + g.throw(Exception, 5)"""
a = """5 + g.throw(Exception(5))"""
self.check(b, a)
def test_warn_1(self):
s = """g.throw("foo")"""
self.warns_unchanged(s, "Python 3 does not support string exceptions")
def test_warn_2(self):
s = """g.throw("foo", 5)"""
self.warns_unchanged(s, "Python 3 does not support string exceptions")
def test_warn_3(self):
s = """g.throw("foo", 5, 6)"""
self.warns_unchanged(s, "Python 3 does not support string exceptions")
def test_untouched_1(self):
s = """g.throw(Exception)"""
self.unchanged(s)
def test_untouched_2(self):
s = """g.throw(Exception(5, 6))"""
self.unchanged(s)
def test_untouched_3(self):
s = """5 + g.throw(Exception(5, 6))"""
self.unchanged(s)
def test_tb_1(self):
b = """def foo():
g.throw(Exception, 5, 6)"""
a = """def foo():
g.throw(Exception(5).with_traceback(6))"""
self.check(b, a)
def test_tb_2(self):
b = """def foo():
a = 5
g.throw(Exception, 5, 6)
b = 6"""
a = """def foo():
a = 5
g.throw(Exception(5).with_traceback(6))
b = 6"""
self.check(b, a)
def test_tb_3(self):
b = """def foo():
g.throw(Exception,5,6)"""
a = """def foo():
g.throw(Exception(5).with_traceback(6))"""
self.check(b, a)
def test_tb_4(self):
b = """def foo():
a = 5
g.throw(Exception,5,6)
b = 6"""
a = """def foo():
a = 5
g.throw(Exception(5).with_traceback(6))
b = 6"""
self.check(b, a)
def test_tb_5(self):
b = """def foo():
g.throw(Exception, (5, 6, 7), 6)"""
a = """def foo():
g.throw(Exception(5, 6, 7).with_traceback(6))"""
self.check(b, a)
def test_tb_6(self):
b = """def foo():
a = 5
g.throw(Exception, (5, 6, 7), 6)
b = 6"""
a = """def foo():
a = 5
g.throw(Exception(5, 6, 7).with_traceback(6))
b = 6"""
self.check(b, a)
def test_tb_7(self):
b = """def foo():
a + g.throw(Exception, 5, 6)"""
a = """def foo():
a + g.throw(Exception(5).with_traceback(6))"""
self.check(b, a)
def test_tb_8(self):
b = """def foo():
a = 5
a + g.throw(Exception, 5, 6)
b = 6"""
a = """def foo():
a = 5
a + g.throw(Exception(5).with_traceback(6))
b = 6"""
self.check(b, a)
class Test_long(FixerTestCase):
fixer = "long"
def test_1(self):
b = """x = long(x)"""
a = """x = int(x)"""
self.check(b, a)
def test_2(self):
b = """y = isinstance(x, long)"""
a = """y = isinstance(x, int)"""
self.check(b, a)
def test_3(self):
b = """z = type(x) in (int, long)"""
a = """z = type(x) in (int, int)"""
self.check(b, a)
def test_unchanged(self):
s = """long = True"""
self.unchanged(s)
s = """s.long = True"""
self.unchanged(s)
s = """def long(): pass"""
self.unchanged(s)
s = """class long(): pass"""
self.unchanged(s)
s = """def f(long): pass"""
self.unchanged(s)
s = """def f(g, long): pass"""
self.unchanged(s)
s = """def f(x, long=True): pass"""
self.unchanged(s)
def test_prefix_preservation(self):
b = """x = long( x )"""
a = """x = int( x )"""
self.check(b, a)
class Test_execfile(FixerTestCase):
fixer = "execfile"
def test_conversion(self):
b = """execfile("fn")"""
a = """exec(compile(open("fn", "rb").read(), "fn", 'exec'))"""
self.check(b, a)
b = """execfile("fn", glob)"""
a = """exec(compile(open("fn", "rb").read(), "fn", 'exec'), glob)"""
self.check(b, a)
b = """execfile("fn", glob, loc)"""
a = """exec(compile(open("fn", "rb").read(), "fn", 'exec'), glob, loc)"""
self.check(b, a)
b = """execfile("fn", globals=glob)"""
a = """exec(compile(open("fn", "rb").read(), "fn", 'exec'), globals=glob)"""
self.check(b, a)
b = """execfile("fn", locals=loc)"""
a = """exec(compile(open("fn", "rb").read(), "fn", 'exec'), locals=loc)"""
self.check(b, a)
b = """execfile("fn", globals=glob, locals=loc)"""
a = """exec(compile(open("fn", "rb").read(), "fn", 'exec'), globals=glob, locals=loc)"""
self.check(b, a)
def test_spacing(self):
b = """execfile( "fn" )"""
a = """exec(compile(open( "fn", "rb" ).read(), "fn", 'exec'))"""
self.check(b, a)
b = """execfile("fn", globals = glob)"""
a = """exec(compile(open("fn", "rb").read(), "fn", 'exec'), globals = glob)"""
self.check(b, a)
class Test_isinstance(FixerTestCase):
fixer = "isinstance"
def test_remove_multiple_items(self):
b = """isinstance(x, (int, int, int))"""
a = """isinstance(x, int)"""
self.check(b, a)
b = """isinstance(x, (int, float, int, int, float))"""
a = """isinstance(x, (int, float))"""
self.check(b, a)
b = """isinstance(x, (int, float, int, int, float, str))"""
a = """isinstance(x, (int, float, str))"""
self.check(b, a)
b = """isinstance(foo() + bar(), (x(), y(), x(), int, int))"""
a = """isinstance(foo() + bar(), (x(), y(), x(), int))"""
self.check(b, a)
def test_prefix_preservation(self):
b = """if isinstance( foo(), ( bar, bar, baz )) : pass"""
a = """if isinstance( foo(), ( bar, baz )) : pass"""
self.check(b, a)
def test_unchanged(self):
self.unchanged("isinstance(x, (str, int))")
class Test_dict(FixerTestCase):
fixer = "dict"
def test_prefix_preservation(self):
b = "if d. keys ( ) : pass"
a = "if list(d. keys ( )) : pass"
self.check(b, a)
b = "if d. items ( ) : pass"
a = "if list(d. items ( )) : pass"
self.check(b, a)
b = "if d. iterkeys ( ) : pass"
a = "if iter(d. keys ( )) : pass"
self.check(b, a)
b = "[i for i in d. iterkeys( ) ]"
a = "[i for i in d. keys( ) ]"
self.check(b, a)
b = "if d. viewkeys ( ) : pass"
a = "if d. keys ( ) : pass"
self.check(b, a)
b = "[i for i in d. viewkeys( ) ]"
a = "[i for i in d. keys( ) ]"
self.check(b, a)
def test_trailing_comment(self):
b = "d.keys() # foo"
a = "list(d.keys()) # foo"
self.check(b, a)
b = "d.items() # foo"
a = "list(d.items()) # foo"
self.check(b, a)
b = "d.iterkeys() # foo"
a = "iter(d.keys()) # foo"
self.check(b, a)
b = """[i for i in d.iterkeys() # foo
]"""
a = """[i for i in d.keys() # foo
]"""
self.check(b, a)
b = """[i for i in d.iterkeys() # foo
]"""
a = """[i for i in d.keys() # foo
]"""
self.check(b, a)
b = "d.viewitems() # foo"
a = "d.items() # foo"
self.check(b, a)
def test_unchanged(self):
for wrapper in fixer_util.consuming_calls:
s = "s = %s(d.keys())" % wrapper
self.unchanged(s)
s = "s = %s(d.values())" % wrapper
self.unchanged(s)
s = "s = %s(d.items())" % wrapper
self.unchanged(s)
def test_01(self):
b = "d.keys()"
a = "list(d.keys())"
self.check(b, a)
b = "a[0].foo().keys()"
a = "list(a[0].foo().keys())"
self.check(b, a)
def test_02(self):
b = "d.items()"
a = "list(d.items())"
self.check(b, a)
def test_03(self):
b = "d.values()"
a = "list(d.values())"
self.check(b, a)
def test_04(self):
b = "d.iterkeys()"
a = "iter(d.keys())"
self.check(b, a)
def test_05(self):
b = "d.iteritems()"
a = "iter(d.items())"
self.check(b, a)
def test_06(self):
b = "d.itervalues()"
a = "iter(d.values())"
self.check(b, a)
def test_07(self):
s = "list(d.keys())"
self.unchanged(s)
def test_08(self):
s = "sorted(d.keys())"
self.unchanged(s)
def test_09(self):
b = "iter(d.keys())"
a = "iter(list(d.keys()))"
self.check(b, a)
def test_10(self):
b = "foo(d.keys())"
a = "foo(list(d.keys()))"
self.check(b, a)
def test_11(self):
b = "for i in d.keys(): print i"
a = "for i in list(d.keys()): print i"
self.check(b, a)
def test_12(self):
b = "for i in d.iterkeys(): print i"
a = "for i in d.keys(): print i"
self.check(b, a)
def test_13(self):
b = "[i for i in d.keys()]"
a = "[i for i in list(d.keys())]"
self.check(b, a)
def test_14(self):
b = "[i for i in d.iterkeys()]"
a = "[i for i in d.keys()]"
self.check(b, a)
def test_15(self):
b = "(i for i in d.keys())"
a = "(i for i in list(d.keys()))"
self.check(b, a)
def test_16(self):
b = "(i for i in d.iterkeys())"
a = "(i for i in d.keys())"
self.check(b, a)
def test_17(self):
b = "iter(d.iterkeys())"
a = "iter(d.keys())"
self.check(b, a)
def test_18(self):
b = "list(d.iterkeys())"
a = "list(d.keys())"
self.check(b, a)
def test_19(self):
b = "sorted(d.iterkeys())"
a = "sorted(d.keys())"
self.check(b, a)
def test_20(self):
b = "foo(d.iterkeys())"
a = "foo(iter(d.keys()))"
self.check(b, a)
def test_21(self):
b = "print h.iterkeys().next()"
a = "print iter(h.keys()).next()"
self.check(b, a)
def test_22(self):
b = "print h.keys()[0]"
a = "print list(h.keys())[0]"
self.check(b, a)
def test_23(self):
b = "print list(h.iterkeys().next())"
a = "print list(iter(h.keys()).next())"
self.check(b, a)
def test_24(self):
b = "for x in h.keys()[0]: print x"
a = "for x in list(h.keys())[0]: print x"
self.check(b, a)
def test_25(self):
b = "d.viewkeys()"
a = "d.keys()"
self.check(b, a)
def test_26(self):
b = "d.viewitems()"
a = "d.items()"
self.check(b, a)
def test_27(self):
b = "d.viewvalues()"
a = "d.values()"
self.check(b, a)
def test_28(self):
b = "[i for i in d.viewkeys()]"
a = "[i for i in d.keys()]"
self.check(b, a)
def test_29(self):
b = "(i for i in d.viewkeys())"
a = "(i for i in d.keys())"
self.check(b, a)
def test_30(self):
b = "iter(d.viewkeys())"
a = "iter(d.keys())"
self.check(b, a)
def test_31(self):
b = "list(d.viewkeys())"
a = "list(d.keys())"
self.check(b, a)
def test_32(self):
b = "sorted(d.viewkeys())"
a = "sorted(d.keys())"
self.check(b, a)
class Test_xrange(FixerTestCase):
fixer = "xrange"
def test_prefix_preservation(self):
b = """x = xrange( 10 )"""
a = """x = range( 10 )"""
self.check(b, a)
b = """x = xrange( 1 , 10 )"""
a = """x = range( 1 , 10 )"""
self.check(b, a)
b = """x = xrange( 0 , 10 , 2 )"""
a = """x = range( 0 , 10 , 2 )"""
self.check(b, a)
def test_single_arg(self):
b = """x = xrange(10)"""
a = """x = range(10)"""
self.check(b, a)
def test_two_args(self):
b = """x = xrange(1, 10)"""
a = """x = range(1, 10)"""
self.check(b, a)
def test_three_args(self):
b = """x = xrange(0, 10, 2)"""
a = """x = range(0, 10, 2)"""
self.check(b, a)
def test_wrap_in_list(self):
b = """x = range(10, 3, 9)"""
a = """x = list(range(10, 3, 9))"""
self.check(b, a)
b = """x = foo(range(10, 3, 9))"""
a = """x = foo(list(range(10, 3, 9)))"""
self.check(b, a)
b = """x = range(10, 3, 9) + [4]"""
a = """x = list(range(10, 3, 9)) + [4]"""
self.check(b, a)
b = """x = range(10)[::-1]"""
a = """x = list(range(10))[::-1]"""
self.check(b, a)
b = """x = range(10) [3]"""
a = """x = list(range(10)) [3]"""
self.check(b, a)
def test_xrange_in_for(self):
b = """for i in xrange(10):\n j=i"""
a = """for i in range(10):\n j=i"""
self.check(b, a)
b = """[i for i in xrange(10)]"""
a = """[i for i in range(10)]"""
self.check(b, a)
def test_range_in_for(self):
self.unchanged("for i in range(10): pass")
self.unchanged("[i for i in range(10)]")
def test_in_contains_test(self):
self.unchanged("x in range(10, 3, 9)")
def test_in_consuming_context(self):
for call in fixer_util.consuming_calls:
self.unchanged("a = %s(range(10))" % call)
class Test_xrange_with_reduce(FixerTestCase):
def setUp(self):
super(Test_xrange_with_reduce, self).setUp(["xrange", "reduce"])
def test_double_transform(self):
b = """reduce(x, xrange(5))"""
a = """from functools import reduce
reduce(x, range(5))"""
self.check(b, a)
class Test_raw_input(FixerTestCase):
fixer = "raw_input"
def test_prefix_preservation(self):
b = """x = raw_input( )"""
a = """x = input( )"""
self.check(b, a)
b = """x = raw_input( '' )"""
a = """x = input( '' )"""
self.check(b, a)
def test_1(self):
b = """x = raw_input()"""
a = """x = input()"""
self.check(b, a)
def test_2(self):
b = """x = raw_input('')"""
a = """x = input('')"""
self.check(b, a)
def test_3(self):
b = """x = raw_input('prompt')"""
a = """x = input('prompt')"""
self.check(b, a)
def test_4(self):
b = """x = raw_input(foo(a) + 6)"""
a = """x = input(foo(a) + 6)"""
self.check(b, a)
def test_5(self):
b = """x = raw_input(invite).split()"""
a = """x = input(invite).split()"""
self.check(b, a)
def test_6(self):
b = """x = raw_input(invite) . split ()"""
a = """x = input(invite) . split ()"""
self.check(b, a)
def test_8(self):
b = "x = int(raw_input())"
a = "x = int(input())"
self.check(b, a)
class Test_funcattrs(FixerTestCase):
fixer = "funcattrs"
attrs = ["closure", "doc", "name", "defaults", "code", "globals", "dict"]
def test(self):
for attr in self.attrs:
b = "a.func_%s" % attr
a = "a.__%s__" % attr
self.check(b, a)
b = "self.foo.func_%s.foo_bar" % attr
a = "self.foo.__%s__.foo_bar" % attr
self.check(b, a)
def test_unchanged(self):
for attr in self.attrs:
s = "foo(func_%s + 5)" % attr
self.unchanged(s)
s = "f(foo.__%s__)" % attr
self.unchanged(s)
s = "f(foo.__%s__.foo)" % attr
self.unchanged(s)
class Test_xreadlines(FixerTestCase):
fixer = "xreadlines"
def test_call(self):
b = "for x in f.xreadlines(): pass"
a = "for x in f: pass"
self.check(b, a)
b = "for x in foo().xreadlines(): pass"
a = "for x in foo(): pass"
self.check(b, a)
b = "for x in (5 + foo()).xreadlines(): pass"
a = "for x in (5 + foo()): pass"
self.check(b, a)
def test_attr_ref(self):
b = "foo(f.xreadlines + 5)"
a = "foo(f.__iter__ + 5)"
self.check(b, a)
b = "foo(f().xreadlines + 5)"
a = "foo(f().__iter__ + 5)"
self.check(b, a)
b = "foo((5 + f()).xreadlines + 5)"
a = "foo((5 + f()).__iter__ + 5)"
self.check(b, a)
def test_unchanged(self):
s = "for x in f.xreadlines(5): pass"
self.unchanged(s)
s = "for x in f.xreadlines(k=5): pass"
self.unchanged(s)
s = "for x in f.xreadlines(*k, **v): pass"
self.unchanged(s)
s = "foo(xreadlines)"
self.unchanged(s)
class ImportsFixerTests:
def test_import_module(self):
for old, new in self.modules.items():
b = "import %s" % old
a = "import %s" % new
self.check(b, a)
b = "import foo, %s, bar" % old
a = "import foo, %s, bar" % new
self.check(b, a)
def test_import_from(self):
for old, new in self.modules.items():
b = "from %s import foo" % old
a = "from %s import foo" % new
self.check(b, a)
b = "from %s import foo, bar" % old
a = "from %s import foo, bar" % new
self.check(b, a)
b = "from %s import (yes, no)" % old
a = "from %s import (yes, no)" % new
self.check(b, a)
def test_import_module_as(self):
for old, new in self.modules.items():
b = "import %s as foo_bar" % old
a = "import %s as foo_bar" % new
self.check(b, a)
b = "import %s as foo_bar" % old
a = "import %s as foo_bar" % new
self.check(b, a)
def test_import_from_as(self):
for old, new in self.modules.items():
b = "from %s import foo as bar" % old
a = "from %s import foo as bar" % new
self.check(b, a)
def test_star(self):
for old, new in self.modules.items():
b = "from %s import *" % old
a = "from %s import *" % new
self.check(b, a)
def test_import_module_usage(self):
for old, new in self.modules.items():
b = """
import %s
foo(%s.bar)
""" % (
old,
old,
)
a = """
import %s
foo(%s.bar)
""" % (
new,
new,
)
self.check(b, a)
b = """
from %s import x
%s = 23
""" % (
old,
old,
)
a = """
from %s import x
%s = 23
""" % (
new,
old,
)
self.check(b, a)
s = """
def f():
%s.method()
""" % (
old,
)
self.unchanged(s)
b = """
import %s
%s.bar(%s.foo)
""" % (
old,
old,
old,
)
a = """
import %s
%s.bar(%s.foo)
""" % (
new,
new,
new,
)
self.check(b, a)
b = """
import %s
x.%s
""" % (
old,
old,
)
a = """
import %s
x.%s
""" % (
new,
old,
)
self.check(b, a)
class Test_imports(FixerTestCase, ImportsFixerTests):
fixer = "imports"
from ..fixes.fix_imports import MAPPING as modules
def test_multiple_imports(self):
b = """import urlparse, cStringIO"""
a = """import urllib.parse, io"""
self.check(b, a)
def test_multiple_imports_as(self):
b = """
import copy_reg as bar, HTMLParser as foo, urlparse
s = urlparse.spam(bar.foo())
"""
a = """
import copyreg as bar, html.parser as foo, urllib.parse
s = urllib.parse.spam(bar.foo())
"""
self.check(b, a)
class Test_imports2(FixerTestCase, ImportsFixerTests):
fixer = "imports2"
from ..fixes.fix_imports2 import MAPPING as modules
class Test_imports_fixer_order(FixerTestCase, ImportsFixerTests):
def setUp(self):
super(Test_imports_fixer_order, self).setUp(["imports", "imports2"])
from ..fixes.fix_imports2 import MAPPING as mapping2
self.modules = mapping2.copy()
from ..fixes.fix_imports import MAPPING as mapping1
for key in ("dbhash", "dumbdbm", "dbm", "gdbm"):
self.modules[key] = mapping1[key]
def test_after_local_imports_refactoring(self):
for fix in ("imports", "imports2"):
self.fixer = fix
self.assert_runs_after("import")
class Test_urllib(FixerTestCase):
fixer = "urllib"
from ..fixes.fix_urllib import MAPPING as modules
def test_import_module(self):
for old, changes in self.modules.items():
b = "import %s" % old
a = "import %s" % ", ".join(map(itemgetter(0), changes))
self.check(b, a)
def test_import_from(self):
for old, changes in self.modules.items():
all_members = []
for new, members in changes:
for member in members:
all_members.append(member)
b = "from %s import %s" % (old, member)
a = "from %s import %s" % (new, member)
self.check(b, a)
s = "from foo import %s" % member
self.unchanged(s)
b = "from %s import %s" % (old, ", ".join(members))
a = "from %s import %s" % (new, ", ".join(members))
self.check(b, a)
s = "from foo import %s" % ", ".join(members)
self.unchanged(s)
b = "from %s import %s" % (old, ", ".join(all_members))
a = "\n".join(
[
"from %s import %s" % (new, ", ".join(members))
for (new, members) in changes
]
)
self.check(b, a)
def test_import_module_as(self):
for old in self.modules:
s = "import %s as foo" % old
self.warns_unchanged(s, "This module is now multiple modules")
def test_import_from_as(self):
for old, changes in self.modules.items():
for new, members in changes:
for member in members:
b = "from %s import %s as foo_bar" % (old, member)
a = "from %s import %s as foo_bar" % (new, member)
self.check(b, a)
b = "from %s import %s as blah, %s" % (old, member, member)
a = "from %s import %s as blah, %s" % (new, member, member)
self.check(b, a)
def test_star(self):
for old in self.modules:
s = "from %s import *" % old
self.warns_unchanged(s, "Cannot handle star imports")
def test_indented(self):
b = """
def foo():
from urllib import urlencode, urlopen
"""
a = """
def foo():
from urllib.parse import urlencode
from urllib.request import urlopen
"""
self.check(b, a)
b = """
def foo():
other()
from urllib import urlencode, urlopen
"""
a = """
def foo():
other()
from urllib.parse import urlencode
from urllib.request import urlopen
"""
self.check(b, a)
def test_import_module_usage(self):
for old, changes in self.modules.items():
for new, members in changes:
for member in members:
new_import = ", ".join([n for (n, mems) in self.modules[old]])
b = """
import %s
foo(%s.%s)
""" % (
old,
old,
member,
)
a = """
import %s
foo(%s.%s)
""" % (
new_import,
new,
member,
)
self.check(b, a)
b = """
import %s
%s.%s(%s.%s)
""" % (
old,
old,
member,
old,
member,
)
a = """
import %s
%s.%s(%s.%s)
""" % (
new_import,
new,
member,
new,
member,
)
self.check(b, a)
class Test_input(FixerTestCase):
fixer = "input"
def test_prefix_preservation(self):
b = """x = input( )"""
a = """x = eval(input( ))"""
self.check(b, a)
b = """x = input( '' )"""
a = """x = eval(input( '' ))"""
self.check(b, a)
def test_trailing_comment(self):
b = """x = input() # foo"""
a = """x = eval(input()) # foo"""
self.check(b, a)
def test_idempotency(self):
s = """x = eval(input())"""
self.unchanged(s)
s = """x = eval(input(''))"""
self.unchanged(s)
s = """x = eval(input(foo(5) + 9))"""
self.unchanged(s)
def test_1(self):
b = """x = input()"""
a = """x = eval(input())"""
self.check(b, a)
def test_2(self):
b = """x = input('')"""
a = """x = eval(input(''))"""
self.check(b, a)
def test_3(self):
b = """x = input('prompt')"""
a = """x = eval(input('prompt'))"""
self.check(b, a)
def test_4(self):
b = """x = input(foo(5) + 9)"""
a = """x = eval(input(foo(5) + 9))"""
self.check(b, a)
class Test_tuple_params(FixerTestCase):
fixer = "tuple_params"
def test_unchanged_1(self):
s = """def foo(): pass"""
self.unchanged(s)
def test_unchanged_2(self):
s = """def foo(a, b, c): pass"""
self.unchanged(s)
def test_unchanged_3(self):
s = """def foo(a=3, b=4, c=5): pass"""
self.unchanged(s)
def test_1(self):
b = """
def foo(((a, b), c)):
x = 5"""
a = """
def foo(xxx_todo_changeme):
((a, b), c) = xxx_todo_changeme
x = 5"""
self.check(b, a)
def test_2(self):
b = """
def foo(((a, b), c), d):
x = 5"""
a = """
def foo(xxx_todo_changeme, d):
((a, b), c) = xxx_todo_changeme
x = 5"""
self.check(b, a)
def test_3(self):
b = """
def foo(((a, b), c), d) -> e:
x = 5"""
a = """
def foo(xxx_todo_changeme, d) -> e:
((a, b), c) = xxx_todo_changeme
x = 5"""
self.check(b, a)
def test_semicolon(self):
b = """
def foo(((a, b), c)): x = 5; y = 7"""
a = """
def foo(xxx_todo_changeme): ((a, b), c) = xxx_todo_changeme; x = 5; y = 7"""
self.check(b, a)
def test_keywords(self):
b = """
def foo(((a, b), c), d, e=5) -> z:
x = 5"""
a = """
def foo(xxx_todo_changeme, d, e=5) -> z:
((a, b), c) = xxx_todo_changeme
x = 5"""
self.check(b, a)
def test_varargs(self):
b = """
def foo(((a, b), c), d, *vargs, **kwargs) -> z:
x = 5"""
a = """
def foo(xxx_todo_changeme, d, *vargs, **kwargs) -> z:
((a, b), c) = xxx_todo_changeme
x = 5"""
self.check(b, a)
def test_multi_1(self):
b = """
def foo(((a, b), c), (d, e, f)) -> z:
x = 5"""
a = """
def foo(xxx_todo_changeme, xxx_todo_changeme1) -> z:
((a, b), c) = xxx_todo_changeme
(d, e, f) = xxx_todo_changeme1
x = 5"""
self.check(b, a)
def test_multi_2(self):
b = """
def foo(x, ((a, b), c), d, (e, f, g), y) -> z:
x = 5"""
a = """
def foo(x, xxx_todo_changeme, d, xxx_todo_changeme1, y) -> z:
((a, b), c) = xxx_todo_changeme
(e, f, g) = xxx_todo_changeme1
x = 5"""
self.check(b, a)
def test_docstring(self):
b = """
def foo(((a, b), c), (d, e, f)) -> z:
"foo foo foo foo"
x = 5"""
a = """
def foo(xxx_todo_changeme, xxx_todo_changeme1) -> z:
"foo foo foo foo"
((a, b), c) = xxx_todo_changeme
(d, e, f) = xxx_todo_changeme1
x = 5"""
self.check(b, a)
def test_lambda_no_change(self):
s = """lambda x: x + 5"""
self.unchanged(s)
def test_lambda_parens_single_arg(self):
b = """lambda (x): x + 5"""
a = """lambda x: x + 5"""
self.check(b, a)
b = """lambda(x): x + 5"""
a = """lambda x: x + 5"""
self.check(b, a)
b = """lambda ((((x)))): x + 5"""
a = """lambda x: x + 5"""
self.check(b, a)
b = """lambda((((x)))): x + 5"""
a = """lambda x: x + 5"""
self.check(b, a)
def test_lambda_simple(self):
b = """lambda (x, y): x + f(y)"""
a = """lambda x_y: x_y[0] + f(x_y[1])"""
self.check(b, a)
b = """lambda(x, y): x + f(y)"""
a = """lambda x_y: x_y[0] + f(x_y[1])"""
self.check(b, a)
b = """lambda (((x, y))): x + f(y)"""
a = """lambda x_y: x_y[0] + f(x_y[1])"""
self.check(b, a)
b = """lambda(((x, y))): x + f(y)"""
a = """lambda x_y: x_y[0] + f(x_y[1])"""
self.check(b, a)
def test_lambda_one_tuple(self):
b = """lambda (x,): x + f(x)"""
a = """lambda x1: x1[0] + f(x1[0])"""
self.check(b, a)
b = """lambda (((x,))): x + f(x)"""
a = """lambda x1: x1[0] + f(x1[0])"""
self.check(b, a)
def test_lambda_simple_multi_use(self):
b = """lambda (x, y): x + x + f(x) + x"""
a = """lambda x_y: x_y[0] + x_y[0] + f(x_y[0]) + x_y[0]"""
self.check(b, a)
def test_lambda_simple_reverse(self):
b = """lambda (x, y): y + x"""
a = """lambda x_y: x_y[1] + x_y[0]"""
self.check(b, a)
def test_lambda_nested(self):
b = """lambda (x, (y, z)): x + y + z"""
a = """lambda x_y_z: x_y_z[0] + x_y_z[1][0] + x_y_z[1][1]"""
self.check(b, a)
b = """lambda (((x, (y, z)))): x + y + z"""
a = """lambda x_y_z: x_y_z[0] + x_y_z[1][0] + x_y_z[1][1]"""
self.check(b, a)
def test_lambda_nested_multi_use(self):
b = """lambda (x, (y, z)): x + y + f(y)"""
a = """lambda x_y_z: x_y_z[0] + x_y_z[1][0] + f(x_y_z[1][0])"""
self.check(b, a)
class Test_methodattrs(FixerTestCase):
fixer = "methodattrs"
attrs = ["func", "self", "class"]
def test(self):
for attr in self.attrs:
b = "a.im_%s" % attr
if attr == "class":
a = "a.__self__.__class__"
else:
a = "a.__%s__" % attr
self.check(b, a)
b = "self.foo.im_%s.foo_bar" % attr
if attr == "class":
a = "self.foo.__self__.__class__.foo_bar"
else:
a = "self.foo.__%s__.foo_bar" % attr
self.check(b, a)
def test_unchanged(self):
for attr in self.attrs:
s = "foo(im_%s + 5)" % attr
self.unchanged(s)
s = "f(foo.__%s__)" % attr
self.unchanged(s)
s = "f(foo.__%s__.foo)" % attr
self.unchanged(s)
class Test_next(FixerTestCase):
fixer = "next"
def test_1(self):
b = """it.next()"""
a = """next(it)"""
self.check(b, a)
def test_2(self):
b = """a.b.c.d.next()"""
a = """next(a.b.c.d)"""
self.check(b, a)
def test_3(self):
b = """(a + b).next()"""
a = """next((a + b))"""
self.check(b, a)
def test_4(self):
b = """a().next()"""
a = """next(a())"""
self.check(b, a)
def test_5(self):
b = """a().next() + b"""
a = """next(a()) + b"""
self.check(b, a)
def test_6(self):
b = """c( a().next() + b)"""
a = """c( next(a()) + b)"""
self.check(b, a)
def test_prefix_preservation_1(self):
b = """
for a in b:
foo(a)
a.next()
"""
a = """
for a in b:
foo(a)
next(a)
"""
self.check(b, a)
def test_prefix_preservation_2(self):
b = """
for a in b:
foo(a) # abc
# def
a.next()
"""
a = """
for a in b:
foo(a) # abc
# def
next(a)
"""
self.check(b, a)
def test_prefix_preservation_3(self):
b = """
next = 5
for a in b:
foo(a)
a.next()
"""
a = """
next = 5
for a in b:
foo(a)
a.__next__()
"""
self.check(b, a, ignore_warnings=True)
def test_prefix_preservation_4(self):
b = """
next = 5
for a in b:
foo(a) # abc
# def
a.next()
"""
a = """
next = 5
for a in b:
foo(a) # abc
# def
a.__next__()
"""
self.check(b, a, ignore_warnings=True)
def test_prefix_preservation_5(self):
b = """
next = 5
for a in b:
foo(foo(a), # abc
a.next())
"""
a = """
next = 5
for a in b:
foo(foo(a), # abc
a.__next__())
"""
self.check(b, a, ignore_warnings=True)
def test_prefix_preservation_6(self):
b = """
for a in b:
foo(foo(a), # abc
a.next())
"""
a = """
for a in b:
foo(foo(a), # abc
next(a))
"""
self.check(b, a)
def test_method_1(self):
b = """
class A:
def next(self):
pass
"""
a = """
class A:
def __next__(self):
pass
"""
self.check(b, a)
def test_method_2(self):
b = """
class A(object):
def next(self):
pass
"""
a = """
class A(object):
def __next__(self):
pass
"""
self.check(b, a)
def test_method_3(self):
b = """
class A:
def next(x):
pass
"""
a = """
class A:
def __next__(x):
pass
"""
self.check(b, a)
def test_method_4(self):
b = """
class A:
def __init__(self, foo):
self.foo = foo
def next(self):
pass
def __iter__(self):
return self
"""
a = """
class A:
def __init__(self, foo):
self.foo = foo
def __next__(self):
pass
def __iter__(self):
return self
"""
self.check(b, a)
def test_method_unchanged(self):
s = """
class A:
def next(self, a, b):
pass
"""
self.unchanged(s)
def test_shadowing_assign_simple(self):
s = """
next = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_assign_tuple_1(self):
s = """
(next, a) = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_assign_tuple_2(self):
s = """
(a, (b, (next, c)), a) = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_assign_list_1(self):
s = """
[next, a] = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_assign_list_2(self):
s = """
[a, [b, [next, c]], a] = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_builtin_assign(self):
s = """
def foo():
__builtin__.next = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_builtin_assign_in_tuple(self):
s = """
def foo():
(a, __builtin__.next) = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_builtin_assign_in_list(self):
s = """
def foo():
[a, __builtin__.next] = foo
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_assign_to_next(self):
s = """
def foo():
A.next = foo
class A:
def next(self, a, b):
pass
"""
self.unchanged(s)
def test_assign_to_next_in_tuple(self):
s = """
def foo():
(a, A.next) = foo
class A:
def next(self, a, b):
pass
"""
self.unchanged(s)
def test_assign_to_next_in_list(self):
s = """
def foo():
[a, A.next] = foo
class A:
def next(self, a, b):
pass
"""
self.unchanged(s)
def test_shadowing_import_1(self):
s = """
import foo.bar as next
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_import_2(self):
s = """
import bar, bar.foo as next
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_import_3(self):
s = """
import bar, bar.foo as next, baz
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_import_from_1(self):
s = """
from x import next
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_import_from_2(self):
s = """
from x.a import next
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_import_from_3(self):
s = """
from x import a, next, b
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_import_from_4(self):
s = """
from x.a import a, next, b
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_funcdef_1(self):
s = """
def next(a):
pass
class A:
def next(self, a, b):
pass
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_funcdef_2(self):
b = """
def next(a):
pass
class A:
def next(self):
pass
it.next()
"""
a = """
def next(a):
pass
class A:
def __next__(self):
pass
it.__next__()
"""
self.warns(b, a, "Calls to builtin next() possibly shadowed")
def test_shadowing_global_1(self):
s = """
def f():
global next
next = 5
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_global_2(self):
s = """
def f():
global a, next, b
next = 5
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_for_simple(self):
s = """
for next in it():
pass
b = 5
c = 6
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_for_tuple_1(self):
s = """
for next, b in it():
pass
b = 5
c = 6
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_shadowing_for_tuple_2(self):
s = """
for a, (next, c), b in it():
pass
b = 5
c = 6
"""
self.warns_unchanged(s, "Calls to builtin next() possibly shadowed")
def test_noncall_access_1(self):
b = """gnext = g.next"""
a = """gnext = g.__next__"""
self.check(b, a)
def test_noncall_access_2(self):
b = """f(g.next + 5)"""
a = """f(g.__next__ + 5)"""
self.check(b, a)
def test_noncall_access_3(self):
b = """f(g().next + 5)"""
a = """f(g().__next__ + 5)"""
self.check(b, a)
class Test_nonzero(FixerTestCase):
fixer = "nonzero"
def test_1(self):
b = """
class A:
def __nonzero__(self):
pass
"""
a = """
class A:
def __bool__(self):
pass
"""
self.check(b, a)
def test_2(self):
b = """
class A(object):
def __nonzero__(self):
pass
"""
a = """
class A(object):
def __bool__(self):
pass
"""
self.check(b, a)
def test_unchanged_1(self):
s = """
class A(object):
def __bool__(self):
pass
"""
self.unchanged(s)
def test_unchanged_2(self):
s = """
class A(object):
def __nonzero__(self, a):
pass
"""
self.unchanged(s)
def test_unchanged_func(self):
s = """
def __nonzero__(self):
pass
"""
self.unchanged(s)
class Test_numliterals(FixerTestCase):
fixer = "numliterals"
def test_octal_1(self):
b = """0755"""
a = """0o755"""
self.check(b, a)
def test_long_int_1(self):
b = """a = 12L"""
a = """a = 12"""
self.check(b, a)
def test_long_int_2(self):
b = """a = 12l"""
a = """a = 12"""
self.check(b, a)
def test_long_hex(self):
b = """b = 0x12l"""
a = """b = 0x12"""
self.check(b, a)
def test_comments_and_spacing(self):
b = """b = 0x12L"""
a = """b = 0x12"""
self.check(b, a)
b = """b = 0755 # spam"""
a = """b = 0o755 # spam"""
self.check(b, a)
def test_unchanged_int(self):
s = """5"""
self.unchanged(s)
def test_unchanged_float(self):
s = """5.0"""
self.unchanged(s)
def test_unchanged_octal(self):
s = """0o755"""
self.unchanged(s)
def test_unchanged_hex(self):
s = """0xABC"""
self.unchanged(s)
def test_unchanged_exp(self):
s = """5.0e10"""
self.unchanged(s)
def test_unchanged_complex_int(self):
s = """5 + 4j"""
self.unchanged(s)
def test_unchanged_complex_float(self):
s = """5.4 + 4.9j"""
self.unchanged(s)
def test_unchanged_complex_bare(self):
s = """4j"""
self.unchanged(s)
s = """4.4j"""
self.unchanged(s)
class Test_renames(FixerTestCase):
fixer = "renames"
modules = {"sys": ("maxint", "maxsize")}
def test_import_from(self):
for mod, (old, new) in list(self.modules.items()):
b = "from %s import %s" % (mod, old)
a = "from %s import %s" % (mod, new)
self.check(b, a)
s = "from foo import %s" % old
self.unchanged(s)
def test_import_from_as(self):
for mod, (old, new) in list(self.modules.items()):
b = "from %s import %s as foo_bar" % (mod, old)
a = "from %s import %s as foo_bar" % (mod, new)
self.check(b, a)
def test_import_module_usage(self):
for mod, (old, new) in list(self.modules.items()):
b = """
import %s
foo(%s, %s.%s)
""" % (
mod,
mod,
mod,
old,
)
a = """
import %s
foo(%s, %s.%s)
""" % (
mod,
mod,
mod,
new,
)
self.check(b, a)
def XXX_test_from_import_usage(self):
for mod, (old, new) in list(self.modules.items()):
b = """
from %s import %s
foo(%s, %s)
""" % (
mod,
old,
mod,
old,
)
a = """
from %s import %s
foo(%s, %s)
""" % (
mod,
new,
mod,
new,
)
self.check(b, a)
class Test_unicode(FixerTestCase):
fixer = "unicode"
def test_whitespace(self):
b = """unicode( x)"""
a = """str( x)"""
self.check(b, a)
b = """ unicode(x )"""
a = """ str(x )"""
self.check(b, a)
b = """ u'h'"""
a = """ 'h'"""
self.check(b, a)
def test_unicode_call(self):
b = """unicode(x, y, z)"""
a = """str(x, y, z)"""
self.check(b, a)
def test_unichr(self):
b = """unichr(u'h')"""
a = """chr('h')"""
self.check(b, a)
def test_unicode_literal_1(self):
b = '''u"x"'''
a = '''"x"'''
self.check(b, a)
def test_unicode_literal_2(self):
b = """ur'x'"""
a = """r'x'"""
self.check(b, a)
def test_unicode_literal_3(self):
b = """UR'''x''' """
a = """R'''x''' """
self.check(b, a)
def test_native_literal_escape_u(self):
b = r"""'\\\u20ac\U0001d121\\u20ac'"""
a = r"""'\\\\u20ac\\U0001d121\\u20ac'"""
self.check(b, a)
b = r"""r'\\\u20ac\U0001d121\\u20ac'"""
a = r"""r'\\\u20ac\U0001d121\\u20ac'"""
self.check(b, a)
def test_bytes_literal_escape_u(self):
b = r"""b'\\\u20ac\U0001d121\\u20ac'"""
a = r"""b'\\\u20ac\U0001d121\\u20ac'"""
self.check(b, a)
b = r"""br'\\\u20ac\U0001d121\\u20ac'"""
a = r"""br'\\\u20ac\U0001d121\\u20ac'"""
self.check(b, a)
def test_unicode_literal_escape_u(self):
b = r"""u'\\\u20ac\U0001d121\\u20ac'"""
a = r"""'\\\u20ac\U0001d121\\u20ac'"""
self.check(b, a)
b = r"""ur'\\\u20ac\U0001d121\\u20ac'"""
a = r"""r'\\\u20ac\U0001d121\\u20ac'"""
self.check(b, a)
def test_native_unicode_literal_escape_u(self):
f = "from __future__ import unicode_literals\n"
b = f + r"""'\\\u20ac\U0001d121\\u20ac'"""
a = f + r"""'\\\u20ac\U0001d121\\u20ac'"""
self.check(b, a)
b = f + r"""r'\\\u20ac\U0001d121\\u20ac'"""
a = f + r"""r'\\\u20ac\U0001d121\\u20ac'"""
self.check(b, a)
class Test_filter(FixerTestCase):
fixer = "filter"
def test_prefix_preservation(self):
b = """x = filter( foo, 'abc' )"""
a = """x = list(filter( foo, 'abc' ))"""
self.check(b, a)
b = """x = filter( None , 'abc' )"""
a = """x = [_f for _f in 'abc' if _f]"""
self.check(b, a)
def test_filter_basic(self):
b = """x = filter(None, 'abc')"""
a = """x = [_f for _f in 'abc' if _f]"""
self.check(b, a)
b = """x = len(filter(f, 'abc'))"""
a = """x = len(list(filter(f, 'abc')))"""
self.check(b, a)
b = """x = filter(lambda x: x%2 == 0, range(10))"""
a = """x = [x for x in range(10) if x%2 == 0]"""
self.check(b, a)
b = """x = filter(lambda (x): x%2 == 0, range(10))"""
a = """x = [x for x in range(10) if x%2 == 0]"""
self.check(b, a)
def test_filter_trailers(self):
b = """x = filter(None, 'abc')[0]"""
a = """x = [_f for _f in 'abc' if _f][0]"""
self.check(b, a)
b = """x = len(filter(f, 'abc')[0])"""
a = """x = len(list(filter(f, 'abc'))[0])"""
self.check(b, a)
b = """x = filter(lambda x: x%2 == 0, range(10))[0]"""
a = """x = [x for x in range(10) if x%2 == 0][0]"""
self.check(b, a)
b = """x = filter(lambda (x): x%2 == 0, range(10))[0]"""
a = """x = [x for x in range(10) if x%2 == 0][0]"""
self.check(b, a)
def test_filter_nochange(self):
a = """b.join(filter(f, 'abc'))"""
self.unchanged(a)
a = """(a + foo(5)).join(filter(f, 'abc'))"""
self.unchanged(a)
a = """iter(filter(f, 'abc'))"""
self.unchanged(a)
a = """list(filter(f, 'abc'))"""
self.unchanged(a)
a = """list(filter(f, 'abc'))[0]"""
self.unchanged(a)
a = """set(filter(f, 'abc'))"""
self.unchanged(a)
a = """set(filter(f, 'abc')).pop()"""
self.unchanged(a)
a = """tuple(filter(f, 'abc'))"""
self.unchanged(a)
a = """any(filter(f, 'abc'))"""
self.unchanged(a)
a = """all(filter(f, 'abc'))"""
self.unchanged(a)
a = """sum(filter(f, 'abc'))"""
self.unchanged(a)
a = """sorted(filter(f, 'abc'))"""
self.unchanged(a)
a = """sorted(filter(f, 'abc'), key=blah)"""
self.unchanged(a)
a = """sorted(filter(f, 'abc'), key=blah)[0]"""
self.unchanged(a)
a = """enumerate(filter(f, 'abc'))"""
self.unchanged(a)
a = """enumerate(filter(f, 'abc'), start=1)"""
self.unchanged(a)
a = """for i in filter(f, 'abc'): pass"""
self.unchanged(a)
a = """[x for x in filter(f, 'abc')]"""
self.unchanged(a)
a = """(x for x in filter(f, 'abc'))"""
self.unchanged(a)
def test_future_builtins(self):
a = "from future_builtins import spam, filter; filter(f, 'ham')"
self.unchanged(a)
b = """from future_builtins import spam; x = filter(f, 'abc')"""
a = """from future_builtins import spam; x = list(filter(f, 'abc'))"""
self.check(b, a)
a = "from future_builtins import *; filter(f, 'ham')"
self.unchanged(a)
class Test_map(FixerTestCase):
fixer = "map"
def check(self, b, a):
self.unchanged("from future_builtins import map; " + b, a)
super(Test_map, self).check(b, a)
def test_prefix_preservation(self):
b = """x = map( f, 'abc' )"""
a = """x = list(map( f, 'abc' ))"""
self.check(b, a)
def test_map_trailers(self):
b = """x = map(f, 'abc')[0]"""
a = """x = list(map(f, 'abc'))[0]"""
self.check(b, a)
b = """x = map(None, l)[0]"""
a = """x = list(l)[0]"""
self.check(b, a)
b = """x = map(lambda x:x, l)[0]"""
a = """x = [x for x in l][0]"""
self.check(b, a)
b = """x = map(f, 'abc')[0][1]"""
a = """x = list(map(f, 'abc'))[0][1]"""
self.check(b, a)
def test_trailing_comment(self):
b = """x = map(f, 'abc') # foo"""
a = """x = list(map(f, 'abc')) # foo"""
self.check(b, a)
def test_None_with_multiple_arguments(self):
s = """x = map(None, a, b, c)"""
self.warns_unchanged(
s, "cannot convert map(None, ...) with " "multiple arguments"
)
def test_map_basic(self):
b = """x = map(f, 'abc')"""
a = """x = list(map(f, 'abc'))"""
self.check(b, a)
b = """x = len(map(f, 'abc', 'def'))"""
a = """x = len(list(map(f, 'abc', 'def')))"""
self.check(b, a)
b = """x = map(None, 'abc')"""
a = """x = list('abc')"""
self.check(b, a)
b = """x = map(lambda x: x+1, range(4))"""
a = """x = [x+1 for x in range(4)]"""
self.check(b, a)
b = """x = map(lambda (x): x+1, range(4))"""
a = """x = [x+1 for x in range(4)]"""
self.check(b, a)
b = """
foo()
# foo
map(f, x)
"""
a = """
foo()
# foo
list(map(f, x))
"""
self.warns(b, a, "You should use a for loop here")
def test_map_nochange(self):
a = """b.join(map(f, 'abc'))"""
self.unchanged(a)
a = """(a + foo(5)).join(map(f, 'abc'))"""
self.unchanged(a)
a = """iter(map(f, 'abc'))"""
self.unchanged(a)
a = """list(map(f, 'abc'))"""
self.unchanged(a)
a = """list(map(f, 'abc'))[0]"""
self.unchanged(a)
a = """set(map(f, 'abc'))"""
self.unchanged(a)
a = """set(map(f, 'abc')).pop()"""
self.unchanged(a)
a = """tuple(map(f, 'abc'))"""
self.unchanged(a)
a = """any(map(f, 'abc'))"""
self.unchanged(a)
a = """all(map(f, 'abc'))"""
self.unchanged(a)
a = """sum(map(f, 'abc'))"""
self.unchanged(a)
a = """sorted(map(f, 'abc'))"""
self.unchanged(a)
a = """sorted(map(f, 'abc'), key=blah)"""
self.unchanged(a)
a = """sorted(map(f, 'abc'), key=blah)[0]"""
self.unchanged(a)
a = """enumerate(map(f, 'abc'))"""
self.unchanged(a)
a = """enumerate(map(f, 'abc'), start=1)"""
self.unchanged(a)
a = """for i in map(f, 'abc'): pass"""
self.unchanged(a)
a = """[x for x in map(f, 'abc')]"""
self.unchanged(a)
a = """(x for x in map(f, 'abc'))"""
self.unchanged(a)
def test_future_builtins(self):
a = "from future_builtins import spam, map, eggs; map(f, 'ham')"
self.unchanged(a)
b = """from future_builtins import spam, eggs; x = map(f, 'abc')"""
a = """from future_builtins import spam, eggs; x = list(map(f, 'abc'))"""
self.check(b, a)
a = "from future_builtins import *; map(f, 'ham')"
self.unchanged(a)
class Test_zip(FixerTestCase):
fixer = "zip"
def check(self, b, a):
self.unchanged("from future_builtins import zip; " + b, a)
super(Test_zip, self).check(b, a)
def test_zip_basic(self):
b = """x = zip()"""
a = """x = list(zip())"""
self.check(b, a)
b = """x = zip(a, b, c)"""
a = """x = list(zip(a, b, c))"""
self.check(b, a)
b = """x = len(zip(a, b))"""
a = """x = len(list(zip(a, b)))"""
self.check(b, a)
def test_zip_trailers(self):
b = """x = zip(a, b, c)[0]"""
a = """x = list(zip(a, b, c))[0]"""
self.check(b, a)
b = """x = zip(a, b, c)[0][1]"""
a = """x = list(zip(a, b, c))[0][1]"""
self.check(b, a)
def test_zip_nochange(self):
a = """b.join(zip(a, b))"""
self.unchanged(a)
a = """(a + foo(5)).join(zip(a, b))"""
self.unchanged(a)
a = """iter(zip(a, b))"""
self.unchanged(a)
a = """list(zip(a, b))"""
self.unchanged(a)
a = """list(zip(a, b))[0]"""
self.unchanged(a)
a = """set(zip(a, b))"""
self.unchanged(a)
a = """set(zip(a, b)).pop()"""
self.unchanged(a)
a = """tuple(zip(a, b))"""
self.unchanged(a)
a = """any(zip(a, b))"""
self.unchanged(a)
a = """all(zip(a, b))"""
self.unchanged(a)
a = """sum(zip(a, b))"""
self.unchanged(a)
a = """sorted(zip(a, b))"""
self.unchanged(a)
a = """sorted(zip(a, b), key=blah)"""
self.unchanged(a)
a = """sorted(zip(a, b), key=blah)[0]"""
self.unchanged(a)
a = """enumerate(zip(a, b))"""
self.unchanged(a)
a = """enumerate(zip(a, b), start=1)"""
self.unchanged(a)
a = """for i in zip(a, b): pass"""
self.unchanged(a)
a = """[x for x in zip(a, b)]"""
self.unchanged(a)
a = """(x for x in zip(a, b))"""
self.unchanged(a)
def test_future_builtins(self):
a = "from future_builtins import spam, zip, eggs; zip(a, b)"
self.unchanged(a)
b = """from future_builtins import spam, eggs; x = zip(a, b)"""
a = """from future_builtins import spam, eggs; x = list(zip(a, b))"""
self.check(b, a)
a = "from future_builtins import *; zip(a, b)"
self.unchanged(a)
class Test_standarderror(FixerTestCase):
fixer = "standarderror"
def test(self):
b = """x = StandardError()"""
a = """x = Exception()"""
self.check(b, a)
b = """x = StandardError(a, b, c)"""
a = """x = Exception(a, b, c)"""
self.check(b, a)
b = """f(2 + StandardError(a, b, c))"""
a = """f(2 + Exception(a, b, c))"""
self.check(b, a)
class Test_types(FixerTestCase):
fixer = "types"
def test_basic_types_convert(self):
b = """types.StringType"""
a = """bytes"""
self.check(b, a)
b = """types.DictType"""
a = """dict"""
self.check(b, a)
b = """types . IntType"""
a = """int"""
self.check(b, a)
b = """types.ListType"""
a = """list"""
self.check(b, a)
b = """types.LongType"""
a = """int"""
self.check(b, a)
b = """types.NoneType"""
a = """type(None)"""
self.check(b, a)
b = "types.StringTypes"
a = "(str,)"
self.check(b, a)
class Test_idioms(FixerTestCase):
fixer = "idioms"
def test_while(self):
b = """while 1: foo()"""
a = """while True: foo()"""
self.check(b, a)
b = """while 1: foo()"""
a = """while True: foo()"""
self.check(b, a)
b = """
while 1:
foo()
"""
a = """
while True:
foo()
"""
self.check(b, a)
def test_while_unchanged(self):
s = """while 11: foo()"""
self.unchanged(s)
s = """while 0: foo()"""
self.unchanged(s)
s = """while foo(): foo()"""
self.unchanged(s)
s = """while []: foo()"""
self.unchanged(s)
def test_eq_simple(self):
b = """type(x) == T"""
a = """isinstance(x, T)"""
self.check(b, a)
b = """if type(x) == T: pass"""
a = """if isinstance(x, T): pass"""
self.check(b, a)
def test_eq_reverse(self):
b = """T == type(x)"""
a = """isinstance(x, T)"""
self.check(b, a)
b = """if T == type(x): pass"""
a = """if isinstance(x, T): pass"""
self.check(b, a)
def test_eq_expression(self):
b = """type(x+y) == d.get('T')"""
a = """isinstance(x+y, d.get('T'))"""
self.check(b, a)
b = """type( x + y) == d.get('T')"""
a = """isinstance(x + y, d.get('T'))"""
self.check(b, a)
def test_is_simple(self):
b = """type(x) is T"""
a = """isinstance(x, T)"""
self.check(b, a)
b = """if type(x) is T: pass"""
a = """if isinstance(x, T): pass"""
self.check(b, a)
def test_is_reverse(self):
b = """T is type(x)"""
a = """isinstance(x, T)"""
self.check(b, a)
b = """if T is type(x): pass"""
a = """if isinstance(x, T): pass"""
self.check(b, a)
def test_is_expression(self):
b = """type(x+y) is d.get('T')"""
a = """isinstance(x+y, d.get('T'))"""
self.check(b, a)
b = """type( x + y) is d.get('T')"""
a = """isinstance(x + y, d.get('T'))"""
self.check(b, a)
def test_is_not_simple(self):
b = """type(x) is not T"""
a = """not isinstance(x, T)"""
self.check(b, a)
b = """if type(x) is not T: pass"""
a = """if not isinstance(x, T): pass"""
self.check(b, a)
def test_is_not_reverse(self):
b = """T is not type(x)"""
a = """not isinstance(x, T)"""
self.check(b, a)
b = """if T is not type(x): pass"""
a = """if not isinstance(x, T): pass"""
self.check(b, a)
def test_is_not_expression(self):
b = """type(x+y) is not d.get('T')"""
a = """not isinstance(x+y, d.get('T'))"""
self.check(b, a)
b = """type( x + y) is not d.get('T')"""
a = """not isinstance(x + y, d.get('T'))"""
self.check(b, a)
def test_ne_simple(self):
b = """type(x) != T"""
a = """not isinstance(x, T)"""
self.check(b, a)
b = """if type(x) != T: pass"""
a = """if not isinstance(x, T): pass"""
self.check(b, a)
def test_ne_reverse(self):
b = """T != type(x)"""
a = """not isinstance(x, T)"""
self.check(b, a)
b = """if T != type(x): pass"""
a = """if not isinstance(x, T): pass"""
self.check(b, a)
def test_ne_expression(self):
b = """type(x+y) != d.get('T')"""
a = """not isinstance(x+y, d.get('T'))"""
self.check(b, a)
b = """type( x + y) != d.get('T')"""
a = """not isinstance(x + y, d.get('T'))"""
self.check(b, a)
def test_type_unchanged(self):
a = """type(x).__name__"""
self.unchanged(a)
def test_sort_list_call(self):
b = """
v = list(t)
v.sort()
foo(v)
"""
a = """
v = sorted(t)
foo(v)
"""
self.check(b, a)
b = """
v = list(foo(b) + d)
v.sort()
foo(v)
"""
a = """
v = sorted(foo(b) + d)
foo(v)
"""
self.check(b, a)
b = """
while x:
v = list(t)
v.sort()
foo(v)
"""
a = """
while x:
v = sorted(t)
foo(v)
"""
self.check(b, a)
b = """
v = list(t)
# foo
v.sort()
foo(v)
"""
a = """
v = sorted(t)
# foo
foo(v)
"""
self.check(b, a)
b = r"""
v = list( t)
v.sort()
foo(v)
"""
a = r"""
v = sorted( t)
foo(v)
"""
self.check(b, a)
b = r"""
try:
m = list(s)
m.sort()
except: pass
"""
a = r"""
try:
m = sorted(s)
except: pass
"""
self.check(b, a)
b = r"""
try:
m = list(s)
# foo
m.sort()
except: pass
"""
a = r"""
try:
m = sorted(s)
# foo
except: pass
"""
self.check(b, a)
b = r"""
m = list(s)
# more comments
m.sort()"""
a = r"""
m = sorted(s)
# more comments"""
self.check(b, a)
def test_sort_simple_expr(self):
b = """
v = t
v.sort()
foo(v)
"""
a = """
v = sorted(t)
foo(v)
"""
self.check(b, a)
b = """
v = foo(b)
v.sort()
foo(v)
"""
a = """
v = sorted(foo(b))
foo(v)
"""
self.check(b, a)
b = """
v = b.keys()
v.sort()
foo(v)
"""
a = """
v = sorted(b.keys())
foo(v)
"""
self.check(b, a)
b = """
v = foo(b) + d
v.sort()
foo(v)
"""
a = """
v = sorted(foo(b) + d)
foo(v)
"""
self.check(b, a)
b = """
while x:
v = t
v.sort()
foo(v)
"""
a = """
while x:
v = sorted(t)
foo(v)
"""
self.check(b, a)
b = """
v = t
# foo
v.sort()
foo(v)
"""
a = """
v = sorted(t)
# foo
foo(v)
"""
self.check(b, a)
b = r"""
v = t
v.sort()
foo(v)
"""
a = r"""
v = sorted(t)
foo(v)
"""
self.check(b, a)
def test_sort_unchanged(self):
s = """
v = list(t)
w.sort()
foo(w)
"""
self.unchanged(s)
s = """
v = list(t)
v.sort(u)
foo(v)
"""
self.unchanged(s)
class Test_basestring(FixerTestCase):
fixer = "basestring"
def test_basestring(self):
b = """isinstance(x, basestring)"""
a = """isinstance(x, str)"""
self.check(b, a)
class Test_buffer(FixerTestCase):
fixer = "buffer"
def test_buffer(self):
b = """x = buffer(y)"""
a = """x = memoryview(y)"""
self.check(b, a)
def test_slicing(self):
b = """buffer(y)[4:5]"""
a = """memoryview(y)[4:5]"""
self.check(b, a)
class Test_future(FixerTestCase):
fixer = "future"
def test_future(self):
b = """from __future__ import braces"""
a = """"""
self.check(b, a)
b = """# comment\nfrom __future__ import braces"""
a = """# comment\n"""
self.check(b, a)
b = """from __future__ import braces\n# comment"""
a = """\n# comment"""
self.check(b, a)
def test_run_order(self):
self.assert_runs_after("print")
class Test_itertools(FixerTestCase):
fixer = "itertools"
def checkall(self, before, after):
for i in ("itertools.", ""):
for f in ("map", "filter", "zip"):
b = before % (i + "i" + f)
a = after % (f)
self.check(b, a)
def test_0(self):
b = "itertools.izip(a, b)"
a = "zip(a, b)"
self.check(b, a)
def test_1(self):
b = """%s(f, a)"""
a = """%s(f, a)"""
self.checkall(b, a)
def test_qualified(self):
b = """itertools.ifilterfalse(a, b)"""
a = """itertools.filterfalse(a, b)"""
self.check(b, a)
b = """itertools.izip_longest(a, b)"""
a = """itertools.zip_longest(a, b)"""
self.check(b, a)
def test_2(self):
b = """ifilterfalse(a, b)"""
a = """filterfalse(a, b)"""
self.check(b, a)
b = """izip_longest(a, b)"""
a = """zip_longest(a, b)"""
self.check(b, a)
def test_space_1(self):
b = """ %s(f, a)"""
a = """ %s(f, a)"""
self.checkall(b, a)
def test_space_2(self):
b = """ itertools.ifilterfalse(a, b)"""
a = """ itertools.filterfalse(a, b)"""
self.check(b, a)
b = """ itertools.izip_longest(a, b)"""
a = """ itertools.zip_longest(a, b)"""
self.check(b, a)
def test_run_order(self):
self.assert_runs_after("map", "zip", "filter")
class Test_itertools_imports(FixerTestCase):
fixer = "itertools_imports"
def test_reduced(self):
b = "from itertools import imap, izip, foo"
a = "from itertools import foo"
self.check(b, a)
b = "from itertools import bar, imap, izip, foo"
a = "from itertools import bar, foo"
self.check(b, a)
b = "from itertools import chain, imap, izip"
a = "from itertools import chain"
self.check(b, a)
def test_comments(self):
b = "#foo\nfrom itertools import imap, izip"
a = "#foo\n"
self.check(b, a)
def test_none(self):
b = "from itertools import imap, izip"
a = ""
self.check(b, a)
b = "from itertools import izip"
a = ""
self.check(b, a)
def test_import_as(self):
b = "from itertools import izip, bar as bang, imap"
a = "from itertools import bar as bang"
self.check(b, a)
b = "from itertools import izip as _zip, imap, bar"
a = "from itertools import bar"
self.check(b, a)
b = "from itertools import imap as _map"
a = ""
self.check(b, a)
b = "from itertools import imap as _map, izip as _zip"
a = ""
self.check(b, a)
s = "from itertools import bar as bang"
self.unchanged(s)
def test_ifilter_and_zip_longest(self):
for name in "filterfalse", "zip_longest":
b = "from itertools import i%s" % (name,)
a = "from itertools import %s" % (name,)
self.check(b, a)
b = "from itertools import imap, i%s, foo" % (name,)
a = "from itertools import %s, foo" % (name,)
self.check(b, a)
b = "from itertools import bar, i%s, foo" % (name,)
a = "from itertools import bar, %s, foo" % (name,)
self.check(b, a)
def test_import_star(self):
s = "from itertools import *"
self.unchanged(s)
def test_unchanged(self):
s = "from itertools import foo"
self.unchanged(s)
class Test_import(FixerTestCase):
fixer = "import"
def setUp(self):
super(Test_import, self).setUp()
# Need to replace fix_import's exists method
self.files_checked = []
self.present_files = set()
self.always_exists = True
def fake_exists(name):
self.files_checked.append(name)
return self.always_exists or (name in self.present_files)
from fissix.fixes import fix_import
fix_import.exists = fake_exists
def tearDown(self):
from fissix.fixes import fix_import
fix_import.exists = os.path.exists
def check_both(self, b, a):
self.always_exists = True
super(Test_import, self).check(b, a)
self.always_exists = False
super(Test_import, self).unchanged(b)
def test_files_checked(self):
def p(path):
# Takes a unix path and returns a path with correct separators
return os.path.pathsep.join(path.split("/"))
self.always_exists = False
self.present_files = set(["__init__.py"])
expected_extensions = (".py", os.path.sep, ".pyc", ".so", ".sl", ".pyd")
names_to_test = (p("/spam/eggs.py"), "ni.py", p("../../shrubbery.py"))
for name in names_to_test:
self.files_checked = []
self.filename = name
self.unchanged("import jam")
if os.path.dirname(name):
name = os.path.dirname(name) + "/jam"
else:
name = "jam"
expected_checks = set(name + ext for ext in expected_extensions)
expected_checks.add("__init__.py")
self.assertEqual(set(self.files_checked), expected_checks)
def test_not_in_package(self):
s = "import bar"
self.always_exists = False
self.present_files = set(["bar.py"])
self.unchanged(s)
def test_with_absolute_import_enabled(self):
s = "from __future__ import absolute_import\nimport bar"
self.always_exists = False
self.present_files = set(["__init__.py", "bar.py"])
self.unchanged(s)
def test_in_package(self):
b = "import bar"
a = "from . import bar"
self.always_exists = False
self.present_files = set(["__init__.py", "bar.py"])
self.check(b, a)
def test_import_from_package(self):
b = "import bar"
a = "from . import bar"
self.always_exists = False
self.present_files = set(["__init__.py", "bar" + os.path.sep])
self.check(b, a)
def test_already_relative_import(self):
s = "from . import bar"
self.unchanged(s)
def test_comments_and_indent(self):
b = "import bar # Foo"
a = "from . import bar # Foo"
self.check(b, a)
def test_from(self):
b = "from foo import bar, baz"
a = "from .foo import bar, baz"
self.check_both(b, a)
b = "from foo import bar"
a = "from .foo import bar"
self.check_both(b, a)
b = "from foo import (bar, baz)"
a = "from .foo import (bar, baz)"
self.check_both(b, a)
def test_dotted_from(self):
b = "from green.eggs import ham"
a = "from .green.eggs import ham"
self.check_both(b, a)
def test_from_as(self):
b = "from green.eggs import ham as spam"
a = "from .green.eggs import ham as spam"
self.check_both(b, a)
def test_import(self):
b = "import foo"
a = "from . import foo"
self.check_both(b, a)
b = "import foo, bar"
a = "from . import foo, bar"
self.check_both(b, a)
b = "import foo, bar, x"
a = "from . import foo, bar, x"
self.check_both(b, a)
b = "import x, y, z"
a = "from . import x, y, z"
self.check_both(b, a)
def test_import_as(self):
b = "import foo as x"
a = "from . import foo as x"
self.check_both(b, a)
b = "import a as b, b as c, c as d"
a = "from . import a as b, b as c, c as d"
self.check_both(b, a)
def test_local_and_absolute(self):
self.always_exists = False
self.present_files = set(["foo.py", "__init__.py"])
s = "import foo, bar"
self.warns_unchanged(s, "absolute and local imports together")
def test_dotted_import(self):
b = "import foo.bar"
a = "from . import foo.bar"
self.check_both(b, a)
def test_dotted_import_as(self):
b = "import foo.bar as bang"
a = "from . import foo.bar as bang"
self.check_both(b, a)
def test_prefix(self):
b = """
# prefix
import foo.bar
"""
a = """
# prefix
from . import foo.bar
"""
self.check_both(b, a)
class Test_set_literal(FixerTestCase):
fixer = "set_literal"
def test_basic(self):
b = """set([1, 2, 3])"""
a = """{1, 2, 3}"""
self.check(b, a)
b = """set((1, 2, 3))"""
a = """{1, 2, 3}"""
self.check(b, a)
b = """set((1,))"""
a = """{1}"""
self.check(b, a)
b = """set([1])"""
self.check(b, a)
b = """set((a, b))"""
a = """{a, b}"""
self.check(b, a)
b = """set([a, b])"""
self.check(b, a)
b = """set((a*234, f(args=23)))"""
a = """{a*234, f(args=23)}"""
self.check(b, a)
b = """set([a*23, f(23)])"""
a = """{a*23, f(23)}"""
self.check(b, a)
b = """set([a-234**23])"""
a = """{a-234**23}"""
self.check(b, a)
def test_listcomps(self):
b = """set([x for x in y])"""
a = """{x for x in y}"""
self.check(b, a)
b = """set([x for x in y if x == m])"""
a = """{x for x in y if x == m}"""
self.check(b, a)
b = """set([x for x in y for a in b])"""
a = """{x for x in y for a in b}"""
self.check(b, a)
b = """set([f(x) - 23 for x in y])"""
a = """{f(x) - 23 for x in y}"""
self.check(b, a)
def test_whitespace(self):
b = """set( [1, 2])"""
a = """{1, 2}"""
self.check(b, a)
b = """set([1 , 2])"""
a = """{1 , 2}"""
self.check(b, a)
b = """set([ 1 ])"""
a = """{ 1 }"""
self.check(b, a)
b = """set( [1] )"""
a = """{1}"""
self.check(b, a)
b = """set([ 1, 2 ])"""
a = """{ 1, 2 }"""
self.check(b, a)
b = """set([x for x in y ])"""
a = """{x for x in y }"""
self.check(b, a)
b = """set(
[1, 2]
)
"""
a = """{1, 2}\n"""
self.check(b, a)
def test_comments(self):
b = """set((1, 2)) # Hi"""
a = """{1, 2} # Hi"""
self.check(b, a)
# This isn't optimal behavior, but the fixer is optional.
b = """
# Foo
set( # Bar
(1, 2)
)
"""
a = """
# Foo
{1, 2}
"""
self.check(b, a)
def test_unchanged(self):
s = """set()"""
self.unchanged(s)
s = """set(a)"""
self.unchanged(s)
s = """set(a, b, c)"""
self.unchanged(s)
s = """set(x for x in y)"""
self.unchanged(s)
s = """set(x for x in y if z)"""
self.unchanged(s)
s = """set(a*823-23**2 + f(23))"""
self.unchanged(s)
class Test_sys_exc(FixerTestCase):
fixer = "sys_exc"
def test_0(self):
b = "sys.exc_type"
a = "sys.exc_info()[0]"
self.check(b, a)
def test_1(self):
b = "sys.exc_value"
a = "sys.exc_info()[1]"
self.check(b, a)
def test_2(self):
b = "sys.exc_traceback"
a = "sys.exc_info()[2]"
self.check(b, a)
def test_3(self):
b = "sys.exc_type # Foo"
a = "sys.exc_info()[0] # Foo"
self.check(b, a)
def test_4(self):
b = "sys. exc_type"
a = "sys. exc_info()[0]"
self.check(b, a)
def test_5(self):
b = "sys .exc_type"
a = "sys .exc_info()[0]"
self.check(b, a)
class Test_paren(FixerTestCase):
fixer = "paren"
def test_0(self):
b = """[i for i in 1, 2 ]"""
a = """[i for i in (1, 2) ]"""
self.check(b, a)
def test_1(self):
b = """[i for i in 1, 2, ]"""
a = """[i for i in (1, 2,) ]"""
self.check(b, a)
def test_2(self):
b = """[i for i in 1, 2 ]"""
a = """[i for i in (1, 2) ]"""
self.check(b, a)
def test_3(self):
b = """[i for i in 1, 2 if i]"""
a = """[i for i in (1, 2) if i]"""
self.check(b, a)
def test_4(self):
b = """[i for i in 1, 2 ]"""
a = """[i for i in (1, 2) ]"""
self.check(b, a)
def test_5(self):
b = """(i for i in 1, 2)"""
a = """(i for i in (1, 2))"""
self.check(b, a)
def test_6(self):
b = """(i for i in 1 ,2 if i)"""
a = """(i for i in (1 ,2) if i)"""
self.check(b, a)
def test_unchanged_0(self):
s = """[i for i in (1, 2)]"""
self.unchanged(s)
def test_unchanged_1(self):
s = """[i for i in foo()]"""
self.unchanged(s)
def test_unchanged_2(self):
s = """[i for i in (1, 2) if nothing]"""
self.unchanged(s)
def test_unchanged_3(self):
s = """(i for i in (1, 2))"""
self.unchanged(s)
def test_unchanged_4(self):
s = """[i for i in m]"""
self.unchanged(s)
class Test_metaclass(FixerTestCase):
fixer = "metaclass"
def test_unchanged(self):
self.unchanged("class X(): pass")
self.unchanged("class X(object): pass")
self.unchanged("class X(object1, object2): pass")
self.unchanged("class X(object1, object2, object3): pass")
self.unchanged("class X(metaclass=Meta): pass")
self.unchanged("class X(b, arg=23, metclass=Meta): pass")
self.unchanged("class X(b, arg=23, metaclass=Meta, other=42): pass")
s = """
class X:
def __metaclass__(self): pass
"""
self.unchanged(s)
s = """
class X:
a[23] = 74
"""
self.unchanged(s)
def test_comments(self):
b = """
class X:
# hi
__metaclass__ = AppleMeta
"""
a = """
class X(metaclass=AppleMeta):
# hi
pass
"""
self.check(b, a)
b = """
class X:
__metaclass__ = Meta
# Bedtime!
"""
a = """
class X(metaclass=Meta):
pass
# Bedtime!
"""
self.check(b, a)
def test_meta(self):
# no-parent class, odd body
b = """
class X():
__metaclass__ = Q
pass
"""
a = """
class X(metaclass=Q):
pass
"""
self.check(b, a)
# one parent class, no body
b = """class X(object): __metaclass__ = Q"""
a = """class X(object, metaclass=Q): pass"""
self.check(b, a)
# one parent, simple body
b = """
class X(object):
__metaclass__ = Meta
bar = 7
"""
a = """
class X(object, metaclass=Meta):
bar = 7
"""
self.check(b, a)
b = """
class X:
__metaclass__ = Meta; x = 4; g = 23
"""
a = """
class X(metaclass=Meta):
x = 4; g = 23
"""
self.check(b, a)
# one parent, simple body, __metaclass__ last
b = """
class X(object):
bar = 7
__metaclass__ = Meta
"""
a = """
class X(object, metaclass=Meta):
bar = 7
"""
self.check(b, a)
# redefining __metaclass__
b = """
class X():
__metaclass__ = A
__metaclass__ = B
bar = 7
"""
a = """
class X(metaclass=B):
bar = 7
"""
self.check(b, a)
# multiple inheritance, simple body
b = """
class X(clsA, clsB):
__metaclass__ = Meta
bar = 7
"""
a = """
class X(clsA, clsB, metaclass=Meta):
bar = 7
"""
self.check(b, a)
# keywords in the class statement
b = """class m(a, arg=23): __metaclass__ = Meta"""
a = """class m(a, arg=23, metaclass=Meta): pass"""
self.check(b, a)
b = """
class X(expression(2 + 4)):
__metaclass__ = Meta
"""
a = """
class X(expression(2 + 4), metaclass=Meta):
pass
"""
self.check(b, a)
b = """
class X(expression(2 + 4), x**4):
__metaclass__ = Meta
"""
a = """
class X(expression(2 + 4), x**4, metaclass=Meta):
pass
"""
self.check(b, a)
b = """
class X:
__metaclass__ = Meta
save.py = 23
"""
a = """
class X(metaclass=Meta):
save.py = 23
"""
self.check(b, a)
class Test_getcwdu(FixerTestCase):
fixer = "getcwdu"
def test_basic(self):
b = """os.getcwdu"""
a = """os.getcwd"""
self.check(b, a)
b = """os.getcwdu()"""
a = """os.getcwd()"""
self.check(b, a)
b = """meth = os.getcwdu"""
a = """meth = os.getcwd"""
self.check(b, a)
b = """os.getcwdu(args)"""
a = """os.getcwd(args)"""
self.check(b, a)
def test_comment(self):
b = """os.getcwdu() # Foo"""
a = """os.getcwd() # Foo"""
self.check(b, a)
def test_unchanged(self):
s = """os.getcwd()"""
self.unchanged(s)
s = """getcwdu()"""
self.unchanged(s)
s = """os.getcwdb()"""
self.unchanged(s)
def test_indentation(self):
b = """
if 1:
os.getcwdu()
"""
a = """
if 1:
os.getcwd()
"""
self.check(b, a)
def test_multilation(self):
b = """os .getcwdu()"""
a = """os .getcwd()"""
self.check(b, a)
b = """os. getcwdu"""
a = """os. getcwd"""
self.check(b, a)
b = """os.getcwdu ( )"""
a = """os.getcwd ( )"""
self.check(b, a)
class Test_operator(FixerTestCase):
fixer = "operator"
def test_operator_isCallable(self):
b = "operator.isCallable(x)"
a = "callable(x)"
self.check(b, a)
def test_operator_sequenceIncludes(self):
b = "operator.sequenceIncludes(x, y)"
a = "operator.contains(x, y)"
self.check(b, a)
b = "operator .sequenceIncludes(x, y)"
a = "operator .contains(x, y)"
self.check(b, a)
b = "operator. sequenceIncludes(x, y)"
a = "operator. contains(x, y)"
self.check(b, a)
def test_operator_isSequenceType(self):
b = "operator.isSequenceType(x)"
a = "import collections.abc\nisinstance(x, collections.abc.Sequence)"
self.check(b, a)
def test_operator_isMappingType(self):
b = "operator.isMappingType(x)"
a = "import collections.abc\nisinstance(x, collections.abc.Mapping)"
self.check(b, a)
def test_operator_isNumberType(self):
b = "operator.isNumberType(x)"
a = "import numbers\nisinstance(x, numbers.Number)"
self.check(b, a)
def test_operator_repeat(self):
b = "operator.repeat(x, n)"
a = "operator.mul(x, n)"
self.check(b, a)
b = "operator .repeat(x, n)"
a = "operator .mul(x, n)"
self.check(b, a)
b = "operator. repeat(x, n)"
a = "operator. mul(x, n)"
self.check(b, a)
def test_operator_irepeat(self):
b = "operator.irepeat(x, n)"
a = "operator.imul(x, n)"
self.check(b, a)
b = "operator .irepeat(x, n)"
a = "operator .imul(x, n)"
self.check(b, a)
b = "operator. irepeat(x, n)"
a = "operator. imul(x, n)"
self.check(b, a)
def test_bare_isCallable(self):
s = "isCallable(x)"
t = "You should use 'callable(x)' here."
self.warns_unchanged(s, t)
def test_bare_sequenceIncludes(self):
s = "sequenceIncludes(x, y)"
t = "You should use 'operator.contains(x, y)' here."
self.warns_unchanged(s, t)
def test_bare_operator_isSequenceType(self):
s = "isSequenceType(z)"
t = "You should use 'isinstance(z, collections.abc.Sequence)' here."
self.warns_unchanged(s, t)
def test_bare_operator_isMappingType(self):
s = "isMappingType(x)"
t = "You should use 'isinstance(x, collections.abc.Mapping)' here."
self.warns_unchanged(s, t)
def test_bare_operator_isNumberType(self):
s = "isNumberType(y)"
t = "You should use 'isinstance(y, numbers.Number)' here."
self.warns_unchanged(s, t)
def test_bare_operator_repeat(self):
s = "repeat(x, n)"
t = "You should use 'operator.mul(x, n)' here."
self.warns_unchanged(s, t)
def test_bare_operator_irepeat(self):
s = "irepeat(y, 187)"
t = "You should use 'operator.imul(y, 187)' here."
self.warns_unchanged(s, t)
class Test_exitfunc(FixerTestCase):
fixer = "exitfunc"
def test_simple(self):
b = """
import sys
sys.exitfunc = my_atexit
"""
a = """
import sys
import atexit
atexit.register(my_atexit)
"""
self.check(b, a)
def test_names_import(self):
b = """
import sys, crumbs
sys.exitfunc = my_func
"""
a = """
import sys, crumbs, atexit
atexit.register(my_func)
"""
self.check(b, a)
def test_complex_expression(self):
b = """
import sys
sys.exitfunc = do(d)/a()+complex(f=23, g=23)*expression
"""
a = """
import sys
import atexit
atexit.register(do(d)/a()+complex(f=23, g=23)*expression)
"""
self.check(b, a)
def test_comments(self):
b = """
import sys # Foo
sys.exitfunc = f # Blah
"""
a = """
import sys
import atexit # Foo
atexit.register(f) # Blah
"""
self.check(b, a)
b = """
import apples, sys, crumbs, larry # Pleasant comments
sys.exitfunc = func
"""
a = """
import apples, sys, crumbs, larry, atexit # Pleasant comments
atexit.register(func)
"""
self.check(b, a)
def test_in_a_function(self):
b = """
import sys
def f():
sys.exitfunc = func
"""
a = """
import sys
import atexit
def f():
atexit.register(func)
"""
self.check(b, a)
def test_no_sys_import(self):
b = """sys.exitfunc = f"""
a = """atexit.register(f)"""
msg = (
"Can't find sys import; Please add an atexit import at the "
"top of your file."
)
self.warns(b, a, msg)
def test_unchanged(self):
s = """f(sys.exitfunc)"""
self.unchanged(s)
class Test_asserts(FixerTestCase):
fixer = "asserts"
def test_deprecated_names(self):
tests = [
("self.assert_(True)", "self.assertTrue(True)"),
("self.assertEquals(2, 2)", "self.assertEqual(2, 2)"),
("self.assertNotEquals(2, 3)", "self.assertNotEqual(2, 3)"),
("self.assertAlmostEquals(2, 3)", "self.assertAlmostEqual(2, 3)"),
("self.assertNotAlmostEquals(2, 8)", "self.assertNotAlmostEqual(2, 8)"),
("self.failUnlessEqual(2, 2)", "self.assertEqual(2, 2)"),
("self.failIfEqual(2, 3)", "self.assertNotEqual(2, 3)"),
("self.failUnlessAlmostEqual(2, 3)", "self.assertAlmostEqual(2, 3)"),
("self.failIfAlmostEqual(2, 8)", "self.assertNotAlmostEqual(2, 8)"),
("self.failUnless(True)", "self.assertTrue(True)"),
("self.failUnlessRaises(foo)", "self.assertRaises(foo)"),
("self.failIf(False)", "self.assertFalse(False)"),
]
for b, a in tests:
self.check(b, a)
def test_variants(self):
b = "eq = self.assertEquals"
a = "eq = self.assertEqual"
self.check(b, a)
b = 'self.assertEquals(2, 3, msg="fail")'
a = 'self.assertEqual(2, 3, msg="fail")'
self.check(b, a)
b = 'self.assertEquals(2, 3, msg="fail") # foo'
a = 'self.assertEqual(2, 3, msg="fail") # foo'
self.check(b, a)
b = "self.assertEquals (2, 3)"
a = "self.assertEqual (2, 3)"
self.check(b, a)
b = " self.assertEquals (2, 3)"
a = " self.assertEqual (2, 3)"
self.check(b, a)
b = "with self.failUnlessRaises(Explosion): explode()"
a = "with self.assertRaises(Explosion): explode()"
self.check(b, a)
b = "with self.failUnlessRaises(Explosion) as cm: explode()"
a = "with self.assertRaises(Explosion) as cm: explode()"
self.check(b, a)
def test_unchanged(self):
self.unchanged("self.assertEqualsOnSaturday")
self.unchanged("self.assertEqualsOnSaturday(3, 5)")
| true | true |
f7f7df480840dbd6f615e87db228b886d5bae05f | 1,053 | py | Python | plot_results.py | Niky1/size-image-annotator | fce57e97ac61601aa8c67cf69438f6dc6dbac946 | [
"MIT"
] | null | null | null | plot_results.py | Niky1/size-image-annotator | fce57e97ac61601aa8c67cf69438f6dc6dbac946 | [
"MIT"
] | null | null | null | plot_results.py | Niky1/size-image-annotator | fce57e97ac61601aa8c67cf69438f6dc6dbac946 | [
"MIT"
] | null | null | null | import json
import os.path
import sys
from os import path
import matplotlib.pyplot as plt
import numpy as np
from imageio import imread
from matplotlib import gridspec
with open(sys.argv[1]) as f:
data = json.load(f)
picture_indices = [i for datapoint in data for i in (
datapoint["left"], datapoint["right"])]
picture_paths = [
f"pics/ILSVRC2012_test_{i:08d}.zoom00.JPEG" for i in picture_indices]
pics = [imread(path) for path in picture_paths]
labels = []
for datapoint in data:
cur_labels = [1, 0] if datapoint["label"] == 0 else [0, 1]
labels.extend(cur_labels)
ncols = 10
nrows = 10
gs = gridspec.GridSpec(nrows, ncols, hspace=.25)
fig = plt.figure(figsize=(16, 16))
for i in range(ncols):
for j in range(nrows):
cur_index = i * ncols + j
ax = fig.add_subplot(gs[i, j])
ax.set_title("label: " + str(labels[cur_index]), fontdict={"fontsize": 8}, pad=4)
ax.set_xticklabels([])
ax.set_yticklabels([])
plt.imshow(pics[cur_index])
plt.savefig("fig.png", bbox_inches="tight")
| 27.710526 | 89 | 0.673314 | import json
import os.path
import sys
from os import path
import matplotlib.pyplot as plt
import numpy as np
from imageio import imread
from matplotlib import gridspec
with open(sys.argv[1]) as f:
data = json.load(f)
picture_indices = [i for datapoint in data for i in (
datapoint["left"], datapoint["right"])]
picture_paths = [
f"pics/ILSVRC2012_test_{i:08d}.zoom00.JPEG" for i in picture_indices]
pics = [imread(path) for path in picture_paths]
labels = []
for datapoint in data:
cur_labels = [1, 0] if datapoint["label"] == 0 else [0, 1]
labels.extend(cur_labels)
ncols = 10
nrows = 10
gs = gridspec.GridSpec(nrows, ncols, hspace=.25)
fig = plt.figure(figsize=(16, 16))
for i in range(ncols):
for j in range(nrows):
cur_index = i * ncols + j
ax = fig.add_subplot(gs[i, j])
ax.set_title("label: " + str(labels[cur_index]), fontdict={"fontsize": 8}, pad=4)
ax.set_xticklabels([])
ax.set_yticklabels([])
plt.imshow(pics[cur_index])
plt.savefig("fig.png", bbox_inches="tight")
| true | true |
f7f7e0401b7970df07707bce7d7c76621fa25e06 | 1,637 | py | Python | pandas/rolling_mean.py | minister19/Python_snippets | 69accc4278443271aefc7e354161eac7df2fa283 | [
"MIT"
] | null | null | null | pandas/rolling_mean.py | minister19/Python_snippets | 69accc4278443271aefc7e354161eac7df2fa283 | [
"MIT"
] | null | null | null | pandas/rolling_mean.py | minister19/Python_snippets | 69accc4278443271aefc7e354161eac7df2fa283 | [
"MIT"
] | null | null | null | import matplotlib
import matplotlib.pyplot as plt
import pandas as pd
from numpy import NaN
is_ipython = 'inline' in matplotlib.get_backend()
if is_ipython:
from IPython import display
def plot_single_with_mean(config):
'''
config: {
'id': unique identifier,
'title': '',
'xlabel': '',
'ylabel': '',
'x_data': [],
'y_data': [],
'm': int
}
'''
fig = plt.figure(config['id'])
axes = fig.get_axes()
_data = config['y_data']
m = config['m']
if m > 0 and len(_data) > m:
means = pd.Series(_data).rolling(m).mean()
print(len(_data), len(means))
else:
means = [NaN] * len(_data)
if len(axes) == 0:
plt.title(config['title'])
plt.xlabel(config['xlabel'])
plt.plot(config['x_data'], config['y_data'], label=config['ylabel'])
plt.plot(config['x_data'], means, label=config['ylabel'] + '_mean')
else:
ax = axes[0]
line, meanline = ax.get_lines()
line.set_xdata(config['x_data'])
line.set_ydata(config['y_data'])
meanline.set_xdata(config['x_data'])
meanline.set_ydata(means)
ax.relim()
ax.autoscale_view(True, True, True)
if is_ipython:
display.clear_output(wait=True)
display.display(fig)
else:
plt.pause(0.2) # pause a bit so that plots are updated
return axes
config = {
'id': 2,
'title': 'single_with_mean',
'xlabel': 't',
'ylabel': 'l1',
'x_data': range(5),
'y_data': [1, 3, 6, 7, 9],
"m": 3
}
plot_single_with_mean(config)
plt.show(block=True)
| 25.184615 | 76 | 0.563836 | import matplotlib
import matplotlib.pyplot as plt
import pandas as pd
from numpy import NaN
is_ipython = 'inline' in matplotlib.get_backend()
if is_ipython:
from IPython import display
def plot_single_with_mean(config):
fig = plt.figure(config['id'])
axes = fig.get_axes()
_data = config['y_data']
m = config['m']
if m > 0 and len(_data) > m:
means = pd.Series(_data).rolling(m).mean()
print(len(_data), len(means))
else:
means = [NaN] * len(_data)
if len(axes) == 0:
plt.title(config['title'])
plt.xlabel(config['xlabel'])
plt.plot(config['x_data'], config['y_data'], label=config['ylabel'])
plt.plot(config['x_data'], means, label=config['ylabel'] + '_mean')
else:
ax = axes[0]
line, meanline = ax.get_lines()
line.set_xdata(config['x_data'])
line.set_ydata(config['y_data'])
meanline.set_xdata(config['x_data'])
meanline.set_ydata(means)
ax.relim()
ax.autoscale_view(True, True, True)
if is_ipython:
display.clear_output(wait=True)
display.display(fig)
else:
plt.pause(0.2)
return axes
config = {
'id': 2,
'title': 'single_with_mean',
'xlabel': 't',
'ylabel': 'l1',
'x_data': range(5),
'y_data': [1, 3, 6, 7, 9],
"m": 3
}
plot_single_with_mean(config)
plt.show(block=True)
| true | true |
f7f7e17a90104bb97f9edf831f6cea34066ce1c4 | 5,399 | py | Python | console.py | calypsobronte/AirBnB_clone | 25c6519b85db8f826a77f6ec76045b3358aa79df | [
"MIT"
] | null | null | null | console.py | calypsobronte/AirBnB_clone | 25c6519b85db8f826a77f6ec76045b3358aa79df | [
"MIT"
] | null | null | null | console.py | calypsobronte/AirBnB_clone | 25c6519b85db8f826a77f6ec76045b3358aa79df | [
"MIT"
] | 1 | 2020-02-28T16:01:00.000Z | 2020-02-28T16:01:00.000Z | #!/usr/bin/python3
# description of the function
import cmd
import models
import re
from models.base_model import BaseModel
from models.user import User
from models.state import State
from models.city import City
from models.amenity import Amenity
from models.place import Place
from models.review import Review
class HBNBCommand(cmd.Cmd):
# intro = 'Welcome to the hbnb shell. Type help or ? to list commands.\n'
prompt = '(hbnb) '
__class_name = {"BaseModel": BaseModel,
"User": User,
"Amenity": Amenity,
"State": State,
"City": City,
"Place": Place,
"Review": Review}
# ----- basic hbnb commands -----
def emptyline(self):
""" an empty line + ENTER """
pass
def do_EOF(self, line):
""" Exit command to exit the program """
return True
def do_quit(self, line):
""" Quit command to exit the program """
# print('Thank you for using hbnb')
return True
def do_create(self, line):
""" Creates a new instance of BaseModel """
if line is None or line == "":
print("** class name missing **")
elif line not in self.__class_name:
print("** class doesn't exist **")
else:
new_model = self.__class_name[line]()
new_model.save()
print(new_model.id)
models.storage.save()
def do_show(self, line):
""" Prints the string representation of an
instance based on the class name and id """
if line is None or line == "":
print("** class name missing **")
elif line.split(' ')[0] not in self.__class_name:
print("** class doesn't exist **")
elif len(line.split(' ')) < 2:
print("** instance id missing **")
else:
key = "{}.{}".format(line.split(' ')[0], line.split(' ')[1])
objects = models.storage.all()
if key not in objects:
print("** no instance found **")
else:
print(objects[key])
def do_destroy(self, line):
""" Deletes an instance based on the class name and id """
if line is None or line == "":
print("** class name missing **")
elif line.split(' ')[0] not in self.__class_name:
print("** class doesn't exist **")
elif len(line.split(' ')) < 2:
print("** instance id missing **")
else:
key = "{}.{}".format(line.split(' ')[0], line.split(' ')[1])
objects = models.storage.all()
if key not in objects:
print("** no instance found **")
else:
del objects[key]
models.storage.save()
def do_all(self, line):
""" Prints all string representation of all
instances based or not on the class name """
if (len(line.split()) == 0):
print([str(value) for value in models.storage.all().values()])
return
if (line.split()[0] not in self.__class_name):
print("** class doesn't exist **")
return
print([str(value) for key, value in models.storage.all().items()
if key.split(".")[0] == line])
def do_update(self, line):
""" Updates an instance based on the class name
and id by adding or updating attribute
(save the change into the JSON file """
if line is None or line == "":
print("** class name missing **")
elif line.split(' ')[0] not in self.__class_name:
print("** class doesn't exist **")
elif len(line.split(' ')) < 2:
print("** instance id missing **")
elif len(line.split(' ')) < 3:
print("** attribute name missing **")
elif len(line.split(' ')) < 4:
print("** value missing **")
else:
key = "{}.{}".format(line.split(' ')[0], line.split(' ')[1])
if key not in models.storage.all():
print("** no instance found **")
else:
line_1 = line.split()[2]
value = line.split()[3]
setattr(models.storage.all()[key], line_1, value)
models.storage.save()
def do_count(self, line):
"""Count command counts the instances of a class"""
count = 0
for key, value in models.storage.all().items():
if key.split(".")[0] == line:
count += 1
print(count)
def default(self, line):
if (re.match(r"\w+\.\w+(\(\)|\(\"[^\"]*\"(?:, (\"[^\"]*\"|{.*}))*\))",
line) is None):
super().default(line)
return
if (line.split(".")[1].split("(")[0] == "all"):
if (line.split(".")[0] not in self.__class_name):
print("** class doesn't exist **")
return
self.do_all(line.split(".")[0])
elif (line.split(".")[1].split("(")[0] == "count"):
if (line.split(".")[0] not in self.__class_name):
print("** class doesn't exist **")
return
self.do_count(line.split(".")[0])
else:
super().default(line)
if __name__ == '__main__':
HBNBCommand().cmdloop()
| 34.832258 | 79 | 0.50213 |
import cmd
import models
import re
from models.base_model import BaseModel
from models.user import User
from models.state import State
from models.city import City
from models.amenity import Amenity
from models.place import Place
from models.review import Review
class HBNBCommand(cmd.Cmd):
prompt = '(hbnb) '
__class_name = {"BaseModel": BaseModel,
"User": User,
"Amenity": Amenity,
"State": State,
"City": City,
"Place": Place,
"Review": Review}
def emptyline(self):
pass
def do_EOF(self, line):
return True
def do_quit(self, line):
return True
def do_create(self, line):
if line is None or line == "":
print("** class name missing **")
elif line not in self.__class_name:
print("** class doesn't exist **")
else:
new_model = self.__class_name[line]()
new_model.save()
print(new_model.id)
models.storage.save()
def do_show(self, line):
if line is None or line == "":
print("** class name missing **")
elif line.split(' ')[0] not in self.__class_name:
print("** class doesn't exist **")
elif len(line.split(' ')) < 2:
print("** instance id missing **")
else:
key = "{}.{}".format(line.split(' ')[0], line.split(' ')[1])
objects = models.storage.all()
if key not in objects:
print("** no instance found **")
else:
print(objects[key])
def do_destroy(self, line):
if line is None or line == "":
print("** class name missing **")
elif line.split(' ')[0] not in self.__class_name:
print("** class doesn't exist **")
elif len(line.split(' ')) < 2:
print("** instance id missing **")
else:
key = "{}.{}".format(line.split(' ')[0], line.split(' ')[1])
objects = models.storage.all()
if key not in objects:
print("** no instance found **")
else:
del objects[key]
models.storage.save()
def do_all(self, line):
if (len(line.split()) == 0):
print([str(value) for value in models.storage.all().values()])
return
if (line.split()[0] not in self.__class_name):
print("** class doesn't exist **")
return
print([str(value) for key, value in models.storage.all().items()
if key.split(".")[0] == line])
def do_update(self, line):
if line is None or line == "":
print("** class name missing **")
elif line.split(' ')[0] not in self.__class_name:
print("** class doesn't exist **")
elif len(line.split(' ')) < 2:
print("** instance id missing **")
elif len(line.split(' ')) < 3:
print("** attribute name missing **")
elif len(line.split(' ')) < 4:
print("** value missing **")
else:
key = "{}.{}".format(line.split(' ')[0], line.split(' ')[1])
if key not in models.storage.all():
print("** no instance found **")
else:
line_1 = line.split()[2]
value = line.split()[3]
setattr(models.storage.all()[key], line_1, value)
models.storage.save()
def do_count(self, line):
count = 0
for key, value in models.storage.all().items():
if key.split(".")[0] == line:
count += 1
print(count)
def default(self, line):
if (re.match(r"\w+\.\w+(\(\)|\(\"[^\"]*\"(?:, (\"[^\"]*\"|{.*}))*\))",
line) is None):
super().default(line)
return
if (line.split(".")[1].split("(")[0] == "all"):
if (line.split(".")[0] not in self.__class_name):
print("** class doesn't exist **")
return
self.do_all(line.split(".")[0])
elif (line.split(".")[1].split("(")[0] == "count"):
if (line.split(".")[0] not in self.__class_name):
print("** class doesn't exist **")
return
self.do_count(line.split(".")[0])
else:
super().default(line)
if __name__ == '__main__':
HBNBCommand().cmdloop()
| true | true |
f7f7e2795d1b53c361ffc3c5cd1e81684a6f064d | 84 | py | Python | handle/admin.py | acdh-oeaw/acdh-django-handle | 008b1e5264fb1f76d7ad4d1034e1ac0713b60498 | [
"MIT"
] | null | null | null | handle/admin.py | acdh-oeaw/acdh-django-handle | 008b1e5264fb1f76d7ad4d1034e1ac0713b60498 | [
"MIT"
] | null | null | null | handle/admin.py | acdh-oeaw/acdh-django-handle | 008b1e5264fb1f76d7ad4d1034e1ac0713b60498 | [
"MIT"
] | null | null | null | from django.contrib import admin
from . models import Pid
admin.site.register(Pid)
| 16.8 | 32 | 0.797619 | from django.contrib import admin
from . models import Pid
admin.site.register(Pid)
| true | true |
f7f7e30adfcc111e4349028b9bd650611d1d8fba | 27,482 | py | Python | tests/components/accuweather/test_sensor.py | GuyKh/core | 859bcb6eb4dbb9a8b87b6e4e888e074502db5df1 | [
"Apache-2.0"
] | 2 | 2021-04-15T06:11:12.000Z | 2021-12-13T21:17:29.000Z | tests/components/accuweather/test_sensor.py | GuyKh/core | 859bcb6eb4dbb9a8b87b6e4e888e074502db5df1 | [
"Apache-2.0"
] | 84 | 2020-07-14T17:08:58.000Z | 2022-03-31T06:01:46.000Z | tests/components/accuweather/test_sensor.py | GuyKh/core | 859bcb6eb4dbb9a8b87b6e4e888e074502db5df1 | [
"Apache-2.0"
] | 1 | 2021-12-09T11:44:52.000Z | 2021-12-09T11:44:52.000Z | """Test sensor of AccuWeather integration."""
from datetime import timedelta
import json
from unittest.mock import PropertyMock, patch
from homeassistant.components.accuweather.const import ATTRIBUTION, DOMAIN
from homeassistant.components.sensor import (
ATTR_STATE_CLASS,
DOMAIN as SENSOR_DOMAIN,
STATE_CLASS_MEASUREMENT,
)
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_DEVICE_CLASS,
ATTR_ENTITY_ID,
ATTR_ICON,
ATTR_UNIT_OF_MEASUREMENT,
CONCENTRATION_PARTS_PER_CUBIC_METER,
DEVICE_CLASS_TEMPERATURE,
LENGTH_FEET,
LENGTH_METERS,
LENGTH_MILLIMETERS,
PERCENTAGE,
SPEED_KILOMETERS_PER_HOUR,
STATE_UNAVAILABLE,
TEMP_CELSIUS,
TIME_HOURS,
UV_INDEX,
)
from homeassistant.helpers import entity_registry as er
from homeassistant.setup import async_setup_component
from homeassistant.util.dt import utcnow
from homeassistant.util.unit_system import IMPERIAL_SYSTEM
from tests.common import async_fire_time_changed, load_fixture
from tests.components.accuweather import init_integration
async def test_sensor_without_forecast(hass):
"""Test states of the sensor without forecast."""
await init_integration(hass)
registry = er.async_get(hass)
state = hass.states.get("sensor.home_cloud_ceiling")
assert state
assert state.state == "3200"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_ICON) == "mdi:weather-fog"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == LENGTH_METERS
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_cloud_ceiling")
assert entry
assert entry.unique_id == "0123456-ceiling"
state = hass.states.get("sensor.home_precipitation")
assert state
assert state.state == "0.0"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == LENGTH_MILLIMETERS
assert state.attributes.get(ATTR_ICON) == "mdi:weather-rainy"
assert state.attributes.get("type") is None
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_precipitation")
assert entry
assert entry.unique_id == "0123456-precipitation"
state = hass.states.get("sensor.home_pressure_tendency")
assert state
assert state.state == "falling"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_ICON) == "mdi:gauge"
assert state.attributes.get(ATTR_DEVICE_CLASS) == "accuweather__pressure_tendency"
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_pressure_tendency")
assert entry
assert entry.unique_id == "0123456-pressuretendency"
state = hass.states.get("sensor.home_realfeel_temperature")
assert state
assert state.state == "25.1"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_CELSIUS
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TEMPERATURE
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_realfeel_temperature")
assert entry
assert entry.unique_id == "0123456-realfeeltemperature"
state = hass.states.get("sensor.home_uv_index")
assert state
assert state.state == "6"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UV_INDEX
assert state.attributes.get("level") == "High"
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_uv_index")
assert entry
assert entry.unique_id == "0123456-uvindex"
async def test_sensor_with_forecast(hass):
"""Test states of the sensor with forecast."""
await init_integration(hass, forecast=True)
registry = er.async_get(hass)
state = hass.states.get("sensor.home_hours_of_sun_0d")
assert state
assert state.state == "7.2"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_ICON) == "mdi:weather-partly-cloudy"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TIME_HOURS
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_hours_of_sun_0d")
assert entry
assert entry.unique_id == "0123456-hoursofsun-0"
state = hass.states.get("sensor.home_realfeel_temperature_max_0d")
assert state
assert state.state == "29.8"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_CELSIUS
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TEMPERATURE
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_realfeel_temperature_max_0d")
assert entry
state = hass.states.get("sensor.home_realfeel_temperature_min_0d")
assert state
assert state.state == "15.1"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_CELSIUS
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TEMPERATURE
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_realfeel_temperature_min_0d")
assert entry
assert entry.unique_id == "0123456-realfeeltemperaturemin-0"
state = hass.states.get("sensor.home_thunderstorm_probability_day_0d")
assert state
assert state.state == "40"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_ICON) == "mdi:weather-lightning"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_thunderstorm_probability_day_0d")
assert entry
assert entry.unique_id == "0123456-thunderstormprobabilityday-0"
state = hass.states.get("sensor.home_thunderstorm_probability_night_0d")
assert state
assert state.state == "40"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_ICON) == "mdi:weather-lightning"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_thunderstorm_probability_night_0d")
assert entry
assert entry.unique_id == "0123456-thunderstormprobabilitynight-0"
state = hass.states.get("sensor.home_uv_index_0d")
assert state
assert state.state == "5"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_ICON) == "mdi:weather-sunny"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UV_INDEX
assert state.attributes.get("level") == "Moderate"
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_uv_index_0d")
assert entry
assert entry.unique_id == "0123456-uvindex-0"
async def test_sensor_disabled(hass):
"""Test sensor disabled by default."""
await init_integration(hass)
registry = er.async_get(hass)
entry = registry.async_get("sensor.home_apparent_temperature")
assert entry
assert entry.unique_id == "0123456-apparenttemperature"
assert entry.disabled
assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION
# Test enabling entity
updated_entry = registry.async_update_entity(
entry.entity_id, **{"disabled_by": None}
)
assert updated_entry != entry
assert updated_entry.disabled is False
async def test_sensor_enabled_without_forecast(hass):
"""Test enabling an advanced sensor."""
registry = er.async_get(hass)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-apparenttemperature",
suggested_object_id="home_apparent_temperature",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-cloudcover",
suggested_object_id="home_cloud_cover",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-dewpoint",
suggested_object_id="home_dew_point",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-realfeeltemperatureshade",
suggested_object_id="home_realfeel_temperature_shade",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-wetbulbtemperature",
suggested_object_id="home_wet_bulb_temperature",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-wind",
suggested_object_id="home_wind",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-windchilltemperature",
suggested_object_id="home_wind_chill_temperature",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-windgust",
suggested_object_id="home_wind_gust",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-cloudcoverday-0",
suggested_object_id="home_cloud_cover_day_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-cloudcovernight-0",
suggested_object_id="home_cloud_cover_night_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-grass-0",
suggested_object_id="home_grass_pollen_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-mold-0",
suggested_object_id="home_mold_pollen_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-ozone-0",
suggested_object_id="home_ozone_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-ragweed-0",
suggested_object_id="home_ragweed_pollen_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-realfeeltemperatureshademax-0",
suggested_object_id="home_realfeel_temperature_shade_max_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-realfeeltemperatureshademin-0",
suggested_object_id="home_realfeel_temperature_shade_min_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-tree-0",
suggested_object_id="home_tree_pollen_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-windgustday-0",
suggested_object_id="home_wind_gust_day_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-windgustnight-0",
suggested_object_id="home_wind_gust_night_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-windday-0",
suggested_object_id="home_wind_day_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-windnight-0",
suggested_object_id="home_wind_night_0d",
disabled_by=None,
)
await init_integration(hass, forecast=True)
state = hass.states.get("sensor.home_apparent_temperature")
assert state
assert state.state == "22.8"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_CELSIUS
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TEMPERATURE
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_apparent_temperature")
assert entry
assert entry.unique_id == "0123456-apparenttemperature"
state = hass.states.get("sensor.home_cloud_cover")
assert state
assert state.state == "10"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.attributes.get(ATTR_ICON) == "mdi:weather-cloudy"
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_cloud_cover")
assert entry
assert entry.unique_id == "0123456-cloudcover"
state = hass.states.get("sensor.home_dew_point")
assert state
assert state.state == "16.2"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_CELSIUS
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TEMPERATURE
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_dew_point")
assert entry
assert entry.unique_id == "0123456-dewpoint"
state = hass.states.get("sensor.home_realfeel_temperature_shade")
assert state
assert state.state == "21.1"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_CELSIUS
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TEMPERATURE
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_realfeel_temperature_shade")
assert entry
assert entry.unique_id == "0123456-realfeeltemperatureshade"
state = hass.states.get("sensor.home_wet_bulb_temperature")
assert state
assert state.state == "18.6"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_CELSIUS
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TEMPERATURE
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_wet_bulb_temperature")
assert entry
assert entry.unique_id == "0123456-wetbulbtemperature"
state = hass.states.get("sensor.home_wind_chill_temperature")
assert state
assert state.state == "22.8"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_CELSIUS
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TEMPERATURE
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_wind_chill_temperature")
assert entry
assert entry.unique_id == "0123456-windchilltemperature"
state = hass.states.get("sensor.home_wind_gust")
assert state
assert state.state == "20.3"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == SPEED_KILOMETERS_PER_HOUR
assert state.attributes.get(ATTR_ICON) == "mdi:weather-windy"
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_wind_gust")
assert entry
assert entry.unique_id == "0123456-windgust"
state = hass.states.get("sensor.home_wind")
assert state
assert state.state == "14.5"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == SPEED_KILOMETERS_PER_HOUR
assert state.attributes.get(ATTR_ICON) == "mdi:weather-windy"
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_wind")
assert entry
assert entry.unique_id == "0123456-wind"
state = hass.states.get("sensor.home_cloud_cover_day_0d")
assert state
assert state.state == "58"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.attributes.get(ATTR_ICON) == "mdi:weather-cloudy"
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_cloud_cover_day_0d")
assert entry
assert entry.unique_id == "0123456-cloudcoverday-0"
state = hass.states.get("sensor.home_cloud_cover_night_0d")
assert state
assert state.state == "65"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.attributes.get(ATTR_ICON) == "mdi:weather-cloudy"
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_cloud_cover_night_0d")
assert entry
state = hass.states.get("sensor.home_grass_pollen_0d")
assert state
assert state.state == "0"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== CONCENTRATION_PARTS_PER_CUBIC_METER
)
assert state.attributes.get("level") == "Low"
assert state.attributes.get(ATTR_ICON) == "mdi:grass"
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_grass_pollen_0d")
assert entry
assert entry.unique_id == "0123456-grass-0"
state = hass.states.get("sensor.home_mold_pollen_0d")
assert state
assert state.state == "0"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== CONCENTRATION_PARTS_PER_CUBIC_METER
)
assert state.attributes.get("level") == "Low"
assert state.attributes.get(ATTR_ICON) == "mdi:blur"
entry = registry.async_get("sensor.home_mold_pollen_0d")
assert entry
assert entry.unique_id == "0123456-mold-0"
state = hass.states.get("sensor.home_ozone_0d")
assert state
assert state.state == "32"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get("level") == "Good"
assert state.attributes.get(ATTR_ICON) == "mdi:vector-triangle"
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_ozone_0d")
assert entry
assert entry.unique_id == "0123456-ozone-0"
state = hass.states.get("sensor.home_ragweed_pollen_0d")
assert state
assert state.state == "0"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== CONCENTRATION_PARTS_PER_CUBIC_METER
)
assert state.attributes.get("level") == "Low"
assert state.attributes.get(ATTR_ICON) == "mdi:sprout"
entry = registry.async_get("sensor.home_ragweed_pollen_0d")
assert entry
assert entry.unique_id == "0123456-ragweed-0"
state = hass.states.get("sensor.home_realfeel_temperature_shade_max_0d")
assert state
assert state.state == "28.0"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_CELSIUS
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TEMPERATURE
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_realfeel_temperature_shade_max_0d")
assert entry
assert entry.unique_id == "0123456-realfeeltemperatureshademax-0"
state = hass.states.get("sensor.home_realfeel_temperature_shade_min_0d")
assert state
assert state.state == "15.1"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_CELSIUS
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TEMPERATURE
entry = registry.async_get("sensor.home_realfeel_temperature_shade_min_0d")
assert entry
assert entry.unique_id == "0123456-realfeeltemperatureshademin-0"
state = hass.states.get("sensor.home_tree_pollen_0d")
assert state
assert state.state == "0"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== CONCENTRATION_PARTS_PER_CUBIC_METER
)
assert state.attributes.get("level") == "Low"
assert state.attributes.get(ATTR_ICON) == "mdi:tree-outline"
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_tree_pollen_0d")
assert entry
assert entry.unique_id == "0123456-tree-0"
state = hass.states.get("sensor.home_wind_day_0d")
assert state
assert state.state == "13.0"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == SPEED_KILOMETERS_PER_HOUR
assert state.attributes.get("direction") == "SSE"
assert state.attributes.get(ATTR_ICON) == "mdi:weather-windy"
entry = registry.async_get("sensor.home_wind_day_0d")
assert entry
assert entry.unique_id == "0123456-windday-0"
state = hass.states.get("sensor.home_wind_night_0d")
assert state
assert state.state == "7.4"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == SPEED_KILOMETERS_PER_HOUR
assert state.attributes.get("direction") == "WNW"
assert state.attributes.get(ATTR_ICON) == "mdi:weather-windy"
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_wind_night_0d")
assert entry
assert entry.unique_id == "0123456-windnight-0"
state = hass.states.get("sensor.home_wind_gust_day_0d")
assert state
assert state.state == "29.6"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == SPEED_KILOMETERS_PER_HOUR
assert state.attributes.get("direction") == "S"
assert state.attributes.get(ATTR_ICON) == "mdi:weather-windy"
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_wind_gust_day_0d")
assert entry
assert entry.unique_id == "0123456-windgustday-0"
state = hass.states.get("sensor.home_wind_gust_night_0d")
assert state
assert state.state == "18.5"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == SPEED_KILOMETERS_PER_HOUR
assert state.attributes.get("direction") == "WSW"
assert state.attributes.get(ATTR_ICON) == "mdi:weather-windy"
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_wind_gust_night_0d")
assert entry
assert entry.unique_id == "0123456-windgustnight-0"
async def test_availability(hass):
"""Ensure that we mark the entities unavailable correctly when service is offline."""
await init_integration(hass)
state = hass.states.get("sensor.home_cloud_ceiling")
assert state
assert state.state != STATE_UNAVAILABLE
assert state.state == "3200"
future = utcnow() + timedelta(minutes=60)
with patch(
"homeassistant.components.accuweather.AccuWeather.async_get_current_conditions",
side_effect=ConnectionError(),
):
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get("sensor.home_cloud_ceiling")
assert state
assert state.state == STATE_UNAVAILABLE
future = utcnow() + timedelta(minutes=120)
with patch(
"homeassistant.components.accuweather.AccuWeather.async_get_current_conditions",
return_value=json.loads(
load_fixture("accuweather/current_conditions_data.json")
),
), patch(
"homeassistant.components.accuweather.AccuWeather.requests_remaining",
new_callable=PropertyMock,
return_value=10,
):
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get("sensor.home_cloud_ceiling")
assert state
assert state.state != STATE_UNAVAILABLE
assert state.state == "3200"
async def test_manual_update_entity(hass):
"""Test manual update entity via service homeasasistant/update_entity."""
await init_integration(hass, forecast=True)
await async_setup_component(hass, "homeassistant", {})
current = json.loads(load_fixture("accuweather/current_conditions_data.json"))
forecast = json.loads(load_fixture("accuweather/forecast_data.json"))
with patch(
"homeassistant.components.accuweather.AccuWeather.async_get_current_conditions",
return_value=current,
) as mock_current, patch(
"homeassistant.components.accuweather.AccuWeather.async_get_forecast",
return_value=forecast,
) as mock_forecast, patch(
"homeassistant.components.accuweather.AccuWeather.requests_remaining",
new_callable=PropertyMock,
return_value=10,
):
await hass.services.async_call(
"homeassistant",
"update_entity",
{ATTR_ENTITY_ID: ["sensor.home_cloud_ceiling"]},
blocking=True,
)
assert mock_current.call_count == 1
assert mock_forecast.call_count == 1
async def test_sensor_imperial_units(hass):
"""Test states of the sensor without forecast."""
hass.config.units = IMPERIAL_SYSTEM
await init_integration(hass)
state = hass.states.get("sensor.home_cloud_ceiling")
assert state
assert state.state == "10500"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_ICON) == "mdi:weather-fog"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == LENGTH_FEET
async def test_state_update(hass):
"""Ensure the sensor state changes after updating the data."""
await init_integration(hass)
state = hass.states.get("sensor.home_cloud_ceiling")
assert state
assert state.state != STATE_UNAVAILABLE
assert state.state == "3200"
future = utcnow() + timedelta(minutes=60)
current_condition = json.loads(
load_fixture("accuweather/current_conditions_data.json")
)
current_condition["Ceiling"]["Metric"]["Value"] = 3300
with patch(
"homeassistant.components.accuweather.AccuWeather.async_get_current_conditions",
return_value=current_condition,
), patch(
"homeassistant.components.accuweather.AccuWeather.requests_remaining",
new_callable=PropertyMock,
return_value=10,
):
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get("sensor.home_cloud_ceiling")
assert state
assert state.state != STATE_UNAVAILABLE
assert state.state == "3300"
| 37.087719 | 89 | 0.725602 | from datetime import timedelta
import json
from unittest.mock import PropertyMock, patch
from homeassistant.components.accuweather.const import ATTRIBUTION, DOMAIN
from homeassistant.components.sensor import (
ATTR_STATE_CLASS,
DOMAIN as SENSOR_DOMAIN,
STATE_CLASS_MEASUREMENT,
)
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_DEVICE_CLASS,
ATTR_ENTITY_ID,
ATTR_ICON,
ATTR_UNIT_OF_MEASUREMENT,
CONCENTRATION_PARTS_PER_CUBIC_METER,
DEVICE_CLASS_TEMPERATURE,
LENGTH_FEET,
LENGTH_METERS,
LENGTH_MILLIMETERS,
PERCENTAGE,
SPEED_KILOMETERS_PER_HOUR,
STATE_UNAVAILABLE,
TEMP_CELSIUS,
TIME_HOURS,
UV_INDEX,
)
from homeassistant.helpers import entity_registry as er
from homeassistant.setup import async_setup_component
from homeassistant.util.dt import utcnow
from homeassistant.util.unit_system import IMPERIAL_SYSTEM
from tests.common import async_fire_time_changed, load_fixture
from tests.components.accuweather import init_integration
async def test_sensor_without_forecast(hass):
await init_integration(hass)
registry = er.async_get(hass)
state = hass.states.get("sensor.home_cloud_ceiling")
assert state
assert state.state == "3200"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_ICON) == "mdi:weather-fog"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == LENGTH_METERS
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_cloud_ceiling")
assert entry
assert entry.unique_id == "0123456-ceiling"
state = hass.states.get("sensor.home_precipitation")
assert state
assert state.state == "0.0"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == LENGTH_MILLIMETERS
assert state.attributes.get(ATTR_ICON) == "mdi:weather-rainy"
assert state.attributes.get("type") is None
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_precipitation")
assert entry
assert entry.unique_id == "0123456-precipitation"
state = hass.states.get("sensor.home_pressure_tendency")
assert state
assert state.state == "falling"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_ICON) == "mdi:gauge"
assert state.attributes.get(ATTR_DEVICE_CLASS) == "accuweather__pressure_tendency"
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_pressure_tendency")
assert entry
assert entry.unique_id == "0123456-pressuretendency"
state = hass.states.get("sensor.home_realfeel_temperature")
assert state
assert state.state == "25.1"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_CELSIUS
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TEMPERATURE
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_realfeel_temperature")
assert entry
assert entry.unique_id == "0123456-realfeeltemperature"
state = hass.states.get("sensor.home_uv_index")
assert state
assert state.state == "6"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UV_INDEX
assert state.attributes.get("level") == "High"
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_uv_index")
assert entry
assert entry.unique_id == "0123456-uvindex"
async def test_sensor_with_forecast(hass):
await init_integration(hass, forecast=True)
registry = er.async_get(hass)
state = hass.states.get("sensor.home_hours_of_sun_0d")
assert state
assert state.state == "7.2"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_ICON) == "mdi:weather-partly-cloudy"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TIME_HOURS
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_hours_of_sun_0d")
assert entry
assert entry.unique_id == "0123456-hoursofsun-0"
state = hass.states.get("sensor.home_realfeel_temperature_max_0d")
assert state
assert state.state == "29.8"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_CELSIUS
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TEMPERATURE
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_realfeel_temperature_max_0d")
assert entry
state = hass.states.get("sensor.home_realfeel_temperature_min_0d")
assert state
assert state.state == "15.1"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_CELSIUS
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TEMPERATURE
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_realfeel_temperature_min_0d")
assert entry
assert entry.unique_id == "0123456-realfeeltemperaturemin-0"
state = hass.states.get("sensor.home_thunderstorm_probability_day_0d")
assert state
assert state.state == "40"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_ICON) == "mdi:weather-lightning"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_thunderstorm_probability_day_0d")
assert entry
assert entry.unique_id == "0123456-thunderstormprobabilityday-0"
state = hass.states.get("sensor.home_thunderstorm_probability_night_0d")
assert state
assert state.state == "40"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_ICON) == "mdi:weather-lightning"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_thunderstorm_probability_night_0d")
assert entry
assert entry.unique_id == "0123456-thunderstormprobabilitynight-0"
state = hass.states.get("sensor.home_uv_index_0d")
assert state
assert state.state == "5"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_ICON) == "mdi:weather-sunny"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UV_INDEX
assert state.attributes.get("level") == "Moderate"
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_uv_index_0d")
assert entry
assert entry.unique_id == "0123456-uvindex-0"
async def test_sensor_disabled(hass):
await init_integration(hass)
registry = er.async_get(hass)
entry = registry.async_get("sensor.home_apparent_temperature")
assert entry
assert entry.unique_id == "0123456-apparenttemperature"
assert entry.disabled
assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION
updated_entry = registry.async_update_entity(
entry.entity_id, **{"disabled_by": None}
)
assert updated_entry != entry
assert updated_entry.disabled is False
async def test_sensor_enabled_without_forecast(hass):
registry = er.async_get(hass)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-apparenttemperature",
suggested_object_id="home_apparent_temperature",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-cloudcover",
suggested_object_id="home_cloud_cover",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-dewpoint",
suggested_object_id="home_dew_point",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-realfeeltemperatureshade",
suggested_object_id="home_realfeel_temperature_shade",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-wetbulbtemperature",
suggested_object_id="home_wet_bulb_temperature",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-wind",
suggested_object_id="home_wind",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-windchilltemperature",
suggested_object_id="home_wind_chill_temperature",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-windgust",
suggested_object_id="home_wind_gust",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-cloudcoverday-0",
suggested_object_id="home_cloud_cover_day_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-cloudcovernight-0",
suggested_object_id="home_cloud_cover_night_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-grass-0",
suggested_object_id="home_grass_pollen_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-mold-0",
suggested_object_id="home_mold_pollen_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-ozone-0",
suggested_object_id="home_ozone_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-ragweed-0",
suggested_object_id="home_ragweed_pollen_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-realfeeltemperatureshademax-0",
suggested_object_id="home_realfeel_temperature_shade_max_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-realfeeltemperatureshademin-0",
suggested_object_id="home_realfeel_temperature_shade_min_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-tree-0",
suggested_object_id="home_tree_pollen_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-windgustday-0",
suggested_object_id="home_wind_gust_day_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-windgustnight-0",
suggested_object_id="home_wind_gust_night_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-windday-0",
suggested_object_id="home_wind_day_0d",
disabled_by=None,
)
registry.async_get_or_create(
SENSOR_DOMAIN,
DOMAIN,
"0123456-windnight-0",
suggested_object_id="home_wind_night_0d",
disabled_by=None,
)
await init_integration(hass, forecast=True)
state = hass.states.get("sensor.home_apparent_temperature")
assert state
assert state.state == "22.8"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_CELSIUS
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TEMPERATURE
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_apparent_temperature")
assert entry
assert entry.unique_id == "0123456-apparenttemperature"
state = hass.states.get("sensor.home_cloud_cover")
assert state
assert state.state == "10"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.attributes.get(ATTR_ICON) == "mdi:weather-cloudy"
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_cloud_cover")
assert entry
assert entry.unique_id == "0123456-cloudcover"
state = hass.states.get("sensor.home_dew_point")
assert state
assert state.state == "16.2"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_CELSIUS
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TEMPERATURE
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_dew_point")
assert entry
assert entry.unique_id == "0123456-dewpoint"
state = hass.states.get("sensor.home_realfeel_temperature_shade")
assert state
assert state.state == "21.1"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_CELSIUS
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TEMPERATURE
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_realfeel_temperature_shade")
assert entry
assert entry.unique_id == "0123456-realfeeltemperatureshade"
state = hass.states.get("sensor.home_wet_bulb_temperature")
assert state
assert state.state == "18.6"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_CELSIUS
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TEMPERATURE
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_wet_bulb_temperature")
assert entry
assert entry.unique_id == "0123456-wetbulbtemperature"
state = hass.states.get("sensor.home_wind_chill_temperature")
assert state
assert state.state == "22.8"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_CELSIUS
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TEMPERATURE
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_wind_chill_temperature")
assert entry
assert entry.unique_id == "0123456-windchilltemperature"
state = hass.states.get("sensor.home_wind_gust")
assert state
assert state.state == "20.3"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == SPEED_KILOMETERS_PER_HOUR
assert state.attributes.get(ATTR_ICON) == "mdi:weather-windy"
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_wind_gust")
assert entry
assert entry.unique_id == "0123456-windgust"
state = hass.states.get("sensor.home_wind")
assert state
assert state.state == "14.5"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == SPEED_KILOMETERS_PER_HOUR
assert state.attributes.get(ATTR_ICON) == "mdi:weather-windy"
assert state.attributes.get(ATTR_STATE_CLASS) == STATE_CLASS_MEASUREMENT
entry = registry.async_get("sensor.home_wind")
assert entry
assert entry.unique_id == "0123456-wind"
state = hass.states.get("sensor.home_cloud_cover_day_0d")
assert state
assert state.state == "58"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.attributes.get(ATTR_ICON) == "mdi:weather-cloudy"
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_cloud_cover_day_0d")
assert entry
assert entry.unique_id == "0123456-cloudcoverday-0"
state = hass.states.get("sensor.home_cloud_cover_night_0d")
assert state
assert state.state == "65"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE
assert state.attributes.get(ATTR_ICON) == "mdi:weather-cloudy"
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_cloud_cover_night_0d")
assert entry
state = hass.states.get("sensor.home_grass_pollen_0d")
assert state
assert state.state == "0"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== CONCENTRATION_PARTS_PER_CUBIC_METER
)
assert state.attributes.get("level") == "Low"
assert state.attributes.get(ATTR_ICON) == "mdi:grass"
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_grass_pollen_0d")
assert entry
assert entry.unique_id == "0123456-grass-0"
state = hass.states.get("sensor.home_mold_pollen_0d")
assert state
assert state.state == "0"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== CONCENTRATION_PARTS_PER_CUBIC_METER
)
assert state.attributes.get("level") == "Low"
assert state.attributes.get(ATTR_ICON) == "mdi:blur"
entry = registry.async_get("sensor.home_mold_pollen_0d")
assert entry
assert entry.unique_id == "0123456-mold-0"
state = hass.states.get("sensor.home_ozone_0d")
assert state
assert state.state == "32"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get("level") == "Good"
assert state.attributes.get(ATTR_ICON) == "mdi:vector-triangle"
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_ozone_0d")
assert entry
assert entry.unique_id == "0123456-ozone-0"
state = hass.states.get("sensor.home_ragweed_pollen_0d")
assert state
assert state.state == "0"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== CONCENTRATION_PARTS_PER_CUBIC_METER
)
assert state.attributes.get("level") == "Low"
assert state.attributes.get(ATTR_ICON) == "mdi:sprout"
entry = registry.async_get("sensor.home_ragweed_pollen_0d")
assert entry
assert entry.unique_id == "0123456-ragweed-0"
state = hass.states.get("sensor.home_realfeel_temperature_shade_max_0d")
assert state
assert state.state == "28.0"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_CELSIUS
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TEMPERATURE
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_realfeel_temperature_shade_max_0d")
assert entry
assert entry.unique_id == "0123456-realfeeltemperatureshademax-0"
state = hass.states.get("sensor.home_realfeel_temperature_shade_min_0d")
assert state
assert state.state == "15.1"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == TEMP_CELSIUS
assert state.attributes.get(ATTR_DEVICE_CLASS) == DEVICE_CLASS_TEMPERATURE
entry = registry.async_get("sensor.home_realfeel_temperature_shade_min_0d")
assert entry
assert entry.unique_id == "0123456-realfeeltemperatureshademin-0"
state = hass.states.get("sensor.home_tree_pollen_0d")
assert state
assert state.state == "0"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert (
state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
== CONCENTRATION_PARTS_PER_CUBIC_METER
)
assert state.attributes.get("level") == "Low"
assert state.attributes.get(ATTR_ICON) == "mdi:tree-outline"
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_tree_pollen_0d")
assert entry
assert entry.unique_id == "0123456-tree-0"
state = hass.states.get("sensor.home_wind_day_0d")
assert state
assert state.state == "13.0"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == SPEED_KILOMETERS_PER_HOUR
assert state.attributes.get("direction") == "SSE"
assert state.attributes.get(ATTR_ICON) == "mdi:weather-windy"
entry = registry.async_get("sensor.home_wind_day_0d")
assert entry
assert entry.unique_id == "0123456-windday-0"
state = hass.states.get("sensor.home_wind_night_0d")
assert state
assert state.state == "7.4"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == SPEED_KILOMETERS_PER_HOUR
assert state.attributes.get("direction") == "WNW"
assert state.attributes.get(ATTR_ICON) == "mdi:weather-windy"
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_wind_night_0d")
assert entry
assert entry.unique_id == "0123456-windnight-0"
state = hass.states.get("sensor.home_wind_gust_day_0d")
assert state
assert state.state == "29.6"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == SPEED_KILOMETERS_PER_HOUR
assert state.attributes.get("direction") == "S"
assert state.attributes.get(ATTR_ICON) == "mdi:weather-windy"
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_wind_gust_day_0d")
assert entry
assert entry.unique_id == "0123456-windgustday-0"
state = hass.states.get("sensor.home_wind_gust_night_0d")
assert state
assert state.state == "18.5"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == SPEED_KILOMETERS_PER_HOUR
assert state.attributes.get("direction") == "WSW"
assert state.attributes.get(ATTR_ICON) == "mdi:weather-windy"
assert state.attributes.get(ATTR_STATE_CLASS) is None
entry = registry.async_get("sensor.home_wind_gust_night_0d")
assert entry
assert entry.unique_id == "0123456-windgustnight-0"
async def test_availability(hass):
await init_integration(hass)
state = hass.states.get("sensor.home_cloud_ceiling")
assert state
assert state.state != STATE_UNAVAILABLE
assert state.state == "3200"
future = utcnow() + timedelta(minutes=60)
with patch(
"homeassistant.components.accuweather.AccuWeather.async_get_current_conditions",
side_effect=ConnectionError(),
):
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get("sensor.home_cloud_ceiling")
assert state
assert state.state == STATE_UNAVAILABLE
future = utcnow() + timedelta(minutes=120)
with patch(
"homeassistant.components.accuweather.AccuWeather.async_get_current_conditions",
return_value=json.loads(
load_fixture("accuweather/current_conditions_data.json")
),
), patch(
"homeassistant.components.accuweather.AccuWeather.requests_remaining",
new_callable=PropertyMock,
return_value=10,
):
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get("sensor.home_cloud_ceiling")
assert state
assert state.state != STATE_UNAVAILABLE
assert state.state == "3200"
async def test_manual_update_entity(hass):
await init_integration(hass, forecast=True)
await async_setup_component(hass, "homeassistant", {})
current = json.loads(load_fixture("accuweather/current_conditions_data.json"))
forecast = json.loads(load_fixture("accuweather/forecast_data.json"))
with patch(
"homeassistant.components.accuweather.AccuWeather.async_get_current_conditions",
return_value=current,
) as mock_current, patch(
"homeassistant.components.accuweather.AccuWeather.async_get_forecast",
return_value=forecast,
) as mock_forecast, patch(
"homeassistant.components.accuweather.AccuWeather.requests_remaining",
new_callable=PropertyMock,
return_value=10,
):
await hass.services.async_call(
"homeassistant",
"update_entity",
{ATTR_ENTITY_ID: ["sensor.home_cloud_ceiling"]},
blocking=True,
)
assert mock_current.call_count == 1
assert mock_forecast.call_count == 1
async def test_sensor_imperial_units(hass):
hass.config.units = IMPERIAL_SYSTEM
await init_integration(hass)
state = hass.states.get("sensor.home_cloud_ceiling")
assert state
assert state.state == "10500"
assert state.attributes.get(ATTR_ATTRIBUTION) == ATTRIBUTION
assert state.attributes.get(ATTR_ICON) == "mdi:weather-fog"
assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == LENGTH_FEET
async def test_state_update(hass):
await init_integration(hass)
state = hass.states.get("sensor.home_cloud_ceiling")
assert state
assert state.state != STATE_UNAVAILABLE
assert state.state == "3200"
future = utcnow() + timedelta(minutes=60)
current_condition = json.loads(
load_fixture("accuweather/current_conditions_data.json")
)
current_condition["Ceiling"]["Metric"]["Value"] = 3300
with patch(
"homeassistant.components.accuweather.AccuWeather.async_get_current_conditions",
return_value=current_condition,
), patch(
"homeassistant.components.accuweather.AccuWeather.requests_remaining",
new_callable=PropertyMock,
return_value=10,
):
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get("sensor.home_cloud_ceiling")
assert state
assert state.state != STATE_UNAVAILABLE
assert state.state == "3300"
| true | true |
f7f7e34bca1b3f88ff1403c8427d3696b3f1604d | 802 | py | Python | elichika/tests/node/ndarray/Shape.py | disktnk/chainer-compiler | 5cfd027b40ea6e4abf73eb42be70b4fba74d1cde | [
"MIT"
] | null | null | null | elichika/tests/node/ndarray/Shape.py | disktnk/chainer-compiler | 5cfd027b40ea6e4abf73eb42be70b4fba74d1cde | [
"MIT"
] | null | null | null | elichika/tests/node/ndarray/Shape.py | disktnk/chainer-compiler | 5cfd027b40ea6e4abf73eb42be70b4fba74d1cde | [
"MIT"
] | null | null | null | # coding: utf-8
import chainer
import chainer.functions as F
class Shape(chainer.Chain):
def forward(self, x):
y1 = x.shape
return list(y1)
class ShapeConcat(chainer.Chain):
def forward(self, x):
y1 = x.shape
return np.array(y1 + (42,))
class ShapeIndex(chainer.Chain):
def forward(self, x):
y1 = x.shape
return y1[0]
# ======================================
import testtools
import numpy as np
def main():
import numpy as np
np.random.seed(314)
x = np.random.rand(12, 6, 4).astype(np.float32)
testtools.generate_testcase(Shape(), [x])
testtools.generate_testcase(ShapeConcat(), [x], subname='concat')
testtools.generate_testcase(ShapeIndex(), [x], subname='index')
if __name__ == '__main__':
main()
| 19.095238 | 69 | 0.604738 |
import chainer
import chainer.functions as F
class Shape(chainer.Chain):
def forward(self, x):
y1 = x.shape
return list(y1)
class ShapeConcat(chainer.Chain):
def forward(self, x):
y1 = x.shape
return np.array(y1 + (42,))
class ShapeIndex(chainer.Chain):
def forward(self, x):
y1 = x.shape
return y1[0]
import testtools
import numpy as np
def main():
import numpy as np
np.random.seed(314)
x = np.random.rand(12, 6, 4).astype(np.float32)
testtools.generate_testcase(Shape(), [x])
testtools.generate_testcase(ShapeConcat(), [x], subname='concat')
testtools.generate_testcase(ShapeIndex(), [x], subname='index')
if __name__ == '__main__':
main()
| true | true |
f7f7e3d1863b55115a131a6f14c0094e353bcd81 | 11,152 | py | Python | test/IECoreScene/CurvesPrimitiveTest.py | ericmehl/cortex | 054839cc709ce153d1bcaaefe7f340ebe641ec82 | [
"BSD-3-Clause"
] | 386 | 2015-01-02T11:10:43.000Z | 2022-03-10T15:12:20.000Z | test/IECoreScene/CurvesPrimitiveTest.py | ericmehl/cortex | 054839cc709ce153d1bcaaefe7f340ebe641ec82 | [
"BSD-3-Clause"
] | 484 | 2015-01-09T18:28:06.000Z | 2022-03-31T16:02:04.000Z | test/IECoreScene/CurvesPrimitiveTest.py | ericmehl/cortex | 054839cc709ce153d1bcaaefe7f340ebe641ec82 | [
"BSD-3-Clause"
] | 99 | 2015-01-28T23:18:04.000Z | 2022-03-27T00:59:39.000Z | ##########################################################################
#
# Copyright (c) 2008-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import os
import os.path
import math
import unittest
import imath
import IECore
import IECoreScene
class CurvesPrimitiveTest( unittest.TestCase ) :
def testConstructors( self ) :
c = IECoreScene.CurvesPrimitive()
self.assertEqual( c.verticesPerCurve(), IECore.IntVectorData() )
self.assertEqual( c.basis(), IECore.CubicBasisf.linear() )
self.assertEqual( c.periodic(), False )
self.assertEqual( c.keys(), [] )
self.assertEqual( c.numCurves(), 0 )
c = IECoreScene.CurvesPrimitive( IECore.IntVectorData( [ 2 ] ) )
self.assertEqual( c.verticesPerCurve(), IECore.IntVectorData( [ 2 ] ) )
self.assertEqual( c.basis(), IECore.CubicBasisf.linear() )
self.assertEqual( c.periodic(), False )
self.assertEqual( c.keys(), [] )
self.assertEqual( c.numCurves(), 1 )
c = IECoreScene.CurvesPrimitive( IECore.IntVectorData( [ 4 ] ), IECore.CubicBasisf.bSpline() )
self.assertEqual( c.verticesPerCurve(), IECore.IntVectorData( [ 4 ] ) )
self.assertEqual( c.basis(), IECore.CubicBasisf.bSpline() )
self.assertEqual( c.periodic(), False )
self.assertEqual( c.keys(), [] )
self.assertEqual( c.numCurves(), 1 )
c = IECoreScene.CurvesPrimitive( IECore.IntVectorData( [ 4 ] ), IECore.CubicBasisf.bSpline(), True )
self.assertEqual( c.verticesPerCurve(), IECore.IntVectorData( [ 4 ] ) )
self.assertEqual( c.basis(), IECore.CubicBasisf.bSpline() )
self.assertEqual( c.periodic(), True )
self.assertEqual( c.keys(), [] )
self.assertEqual( c.numCurves(), 1 )
i = IECore.IntVectorData( [ 4 ] )
p = IECore.V3fVectorData( [ imath.V3f( 0 ), imath.V3f( 1 ), imath.V3f( 2 ), imath.V3f( 3 ) ] )
c = IECoreScene.CurvesPrimitive( i, IECore.CubicBasisf.bSpline(), True, p )
self.assertEqual( c.verticesPerCurve(), IECore.IntVectorData( [ 4 ] ) )
self.assertEqual( c.basis(), IECore.CubicBasisf.bSpline() )
self.assertEqual( c.periodic(), True )
self.assertEqual( c.keys(), [ "P" ] )
self.assertNotEqual( c["P"].data, p )
self.assertEqual( c["P"].data.getInterpretation(), IECore.GeometricData.Interpretation.Point )
pp = p.copy()
pp.setInterpretation( IECore.GeometricData.Interpretation.Point )
self.assertEqual( c["P"].data, pp )
self.assertFalse( c["P"].data.isSame( p ) )
self.assertFalse( c["P"].data.isSame( pp ) )
self.assertFalse( c.verticesPerCurve().isSame( i ) )
def testConstructorValidation( self ) :
self.assertRaises( Exception, IECoreScene.CurvesPrimitive, IECore.IntVectorData( [ 1 ] ) )
self.assertRaises( Exception, IECoreScene.CurvesPrimitive, IECore.IntVectorData( [ 3 ] ), IECore.CubicBasisf.bSpline() )
self.assertRaises( Exception, IECoreScene.CurvesPrimitive, IECore.IntVectorData( [ 5 ] ), IECore.CubicBasisf.bezier() )
def testConstructorKeywords( self ) :
c = IECoreScene.CurvesPrimitive(
verticesPerCurve = IECore.IntVectorData( [ 3 ] ),
periodic = True,
p = IECore.V3fVectorData( [ imath.V3f( x ) for x in range( 0, 3 ) ] )
)
self.assertEqual( c.verticesPerCurve(), IECore.IntVectorData( [ 3 ] ) )
self.assertEqual( c.periodic(), True )
self.assertEqual( c.basis(), IECore.CubicBasisf.linear() )
self.assertEqual( c["P"].data, IECore.V3fVectorData( [ imath.V3f( x ) for x in range( 0, 3 ) ], IECore.GeometricData.Interpretation.Point ) )
def testCopy( self ) :
i = IECore.IntVectorData( [ 4 ] )
p = IECore.V3fVectorData( [ imath.V3f( 0 ), imath.V3f( 1 ), imath.V3f( 2 ), imath.V3f( 3 ) ] )
c = IECoreScene.CurvesPrimitive( i, IECore.CubicBasisf.bSpline(), True, p )
cc = c.copy()
self.assertEqual( c, cc )
def testIO( self ) :
i = IECore.IntVectorData( [ 4 ] )
p = IECore.V3fVectorData( [ imath.V3f( 0 ), imath.V3f( 1 ), imath.V3f( 2 ), imath.V3f( 3 ) ] )
c = IECoreScene.CurvesPrimitive( i, IECore.CubicBasisf.bSpline(), True, p )
IECore.Writer.create( c, os.path.join( "test", "IECore", "data", "curves.cob" ) ).write()
cc = IECore.Reader.create( os.path.join( "test", "IECore", "data", "curves.cob" ) ).read()
self.assertEqual( cc, c )
c = IECore.Reader.create( os.path.join( "test", "IECore", "data", "cobFiles", "torusCurves.cob" ) ).read()
def testVariableSize( self ) :
c = IECoreScene.CurvesPrimitive( IECore.IntVectorData( [ 4 ] ), IECore.CubicBasisf.bSpline(), True )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Constant ), 1 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Uniform ), 1 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Vertex ), 4 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Varying ), 4 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.FaceVarying ), 4 )
# asking for the constant size of a single curve makes no sense
self.assertRaises( Exception, c.variableSize, IECoreScene.PrimitiveVariable.Interpolation.Constant, 0 )
# as does asking for the size of a nonexistent curve
self.assertRaises( Exception, c.variableSize, IECoreScene.PrimitiveVariable.Interpolation.Vertex, 1 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Uniform, 0 ), 1 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Vertex, 0 ), 4 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Varying, 0 ), 4 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.FaceVarying, 0 ), 4 )
self.assertEqual( c.numSegments( 0 ), 4 )
c = IECoreScene.CurvesPrimitive( IECore.IntVectorData( [ 4 ] ), IECore.CubicBasisf.bSpline(), False )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Constant ), 1 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Uniform ), 1 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Vertex ), 4 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Varying ), 2 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.FaceVarying ), 2 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Uniform, 0 ), 1 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Vertex, 0 ), 4 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Varying, 0 ), 2 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.FaceVarying, 0 ), 2 )
self.assertEqual( c.numSegments( 0 ), 1 )
def testSetTopology( self ) :
c = IECoreScene.CurvesPrimitive( IECore.IntVectorData( [ 4 ] ), IECore.CubicBasisf.bSpline(), True )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Constant ), 1 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Uniform ), 1 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Vertex ), 4 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Varying ), 4 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.FaceVarying ), 4 )
newVertsPerCurve = IECore.IntVectorData( [ 4, 4 ] )
c.setTopology( newVertsPerCurve, IECore.CubicBasisf.bezier(), False )
self.assertEqual( c.verticesPerCurve(), newVertsPerCurve )
self.assertEqual( c.basis(), IECore.CubicBasisf.bezier() )
self.assertEqual( c.periodic(), False )
self.assertEqual( c.numCurves(), 2 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Constant ), 1 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Uniform ), 2 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Vertex ), 8 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Varying ), 4 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.FaceVarying ), 4 )
newVertsPerCurve.append( 10 )
self.assertEqual( c.verticesPerCurve(), IECore.IntVectorData( [ 4, 4 ] ) )
def testHash( self ) :
c = IECoreScene.CurvesPrimitive( IECore.IntVectorData( [ 4 ] ), IECore.CubicBasisf.bSpline(), True )
h = c.hash()
t = c.topologyHash()
c2 = c.copy()
self.assertEqual( c2.hash(), h )
self.assertEqual( c2.topologyHash(), t )
c.setTopology( IECore.IntVectorData( [ 5 ] ), IECore.CubicBasisf.bSpline(), True )
self.assertNotEqual( c.hash(), h )
self.assertNotEqual( c.topologyHash(), h )
h = c.hash()
t = c.topologyHash()
c.setTopology( IECore.IntVectorData( [ 5 ] ), IECore.CubicBasisf.catmullRom(), True )
self.assertNotEqual( c.hash(), h )
self.assertNotEqual( c.topologyHash(), h )
h = c.hash()
t = c.topologyHash()
c.setTopology( IECore.IntVectorData( [ 5 ] ), IECore.CubicBasisf.catmullRom(), False )
self.assertNotEqual( c.hash(), h )
self.assertNotEqual( c.topologyHash(), h )
h = c.hash()
t = c.topologyHash()
c["primVar"] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Constant, IECore.IntData( 10 ) )
self.assertNotEqual( c.hash(), h )
self.assertEqual( c.topologyHash(), t )
def tearDown( self ) :
if os.path.isfile( os.path.join( "test", "IECore", "data", "curves.cob" ) ) :
os.remove( os.path.join( "test", "IECore", "data", "curves.cob" ) )
if __name__ == "__main__":
unittest.main()
| 47.455319 | 143 | 0.717898 | rpolation.FaceVarying, 0 ), 4 )
self.assertEqual( c.numSegments( 0 ), 4 )
c = IECoreScene.CurvesPrimitive( IECore.IntVectorData( [ 4 ] ), IECore.CubicBasisf.bSpline(), False )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Constant ), 1 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Uniform ), 1 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Vertex ), 4 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Varying ), 2 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.FaceVarying ), 2 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Uniform, 0 ), 1 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Vertex, 0 ), 4 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Varying, 0 ), 2 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.FaceVarying, 0 ), 2 )
self.assertEqual( c.numSegments( 0 ), 1 )
def testSetTopology( self ) :
c = IECoreScene.CurvesPrimitive( IECore.IntVectorData( [ 4 ] ), IECore.CubicBasisf.bSpline(), True )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Constant ), 1 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Uniform ), 1 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Vertex ), 4 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Varying ), 4 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.FaceVarying ), 4 )
newVertsPerCurve = IECore.IntVectorData( [ 4, 4 ] )
c.setTopology( newVertsPerCurve, IECore.CubicBasisf.bezier(), False )
self.assertEqual( c.verticesPerCurve(), newVertsPerCurve )
self.assertEqual( c.basis(), IECore.CubicBasisf.bezier() )
self.assertEqual( c.periodic(), False )
self.assertEqual( c.numCurves(), 2 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Constant ), 1 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Uniform ), 2 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Vertex ), 8 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.Varying ), 4 )
self.assertEqual( c.variableSize( IECoreScene.PrimitiveVariable.Interpolation.FaceVarying ), 4 )
newVertsPerCurve.append( 10 )
self.assertEqual( c.verticesPerCurve(), IECore.IntVectorData( [ 4, 4 ] ) )
def testHash( self ) :
c = IECoreScene.CurvesPrimitive( IECore.IntVectorData( [ 4 ] ), IECore.CubicBasisf.bSpline(), True )
h = c.hash()
t = c.topologyHash()
c2 = c.copy()
self.assertEqual( c2.hash(), h )
self.assertEqual( c2.topologyHash(), t )
c.setTopology( IECore.IntVectorData( [ 5 ] ), IECore.CubicBasisf.bSpline(), True )
self.assertNotEqual( c.hash(), h )
self.assertNotEqual( c.topologyHash(), h )
h = c.hash()
t = c.topologyHash()
c.setTopology( IECore.IntVectorData( [ 5 ] ), IECore.CubicBasisf.catmullRom(), True )
self.assertNotEqual( c.hash(), h )
self.assertNotEqual( c.topologyHash(), h )
h = c.hash()
t = c.topologyHash()
c.setTopology( IECore.IntVectorData( [ 5 ] ), IECore.CubicBasisf.catmullRom(), False )
self.assertNotEqual( c.hash(), h )
self.assertNotEqual( c.topologyHash(), h )
h = c.hash()
t = c.topologyHash()
c["primVar"] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Constant, IECore.IntData( 10 ) )
self.assertNotEqual( c.hash(), h )
self.assertEqual( c.topologyHash(), t )
def tearDown( self ) :
if os.path.isfile( os.path.join( "test", "IECore", "data", "curves.cob" ) ) :
os.remove( os.path.join( "test", "IECore", "data", "curves.cob" ) )
if __name__ == "__main__":
unittest.main()
| true | true |
f7f7e4c585816ab09634d61d4487a11a5d9f6f9d | 390 | py | Python | test/nose_integration_tests/dummy_first_level_pkg_two_tests/dummy_test_f.py | Points/teamcity-python | 58dd0e9d83fea92f9212f2cadaaaeaefd1deb68e | [
"Apache-2.0"
] | null | null | null | test/nose_integration_tests/dummy_first_level_pkg_two_tests/dummy_test_f.py | Points/teamcity-python | 58dd0e9d83fea92f9212f2cadaaaeaefd1deb68e | [
"Apache-2.0"
] | null | null | null | test/nose_integration_tests/dummy_first_level_pkg_two_tests/dummy_test_f.py | Points/teamcity-python | 58dd0e9d83fea92f9212f2cadaaaeaefd1deb68e | [
"Apache-2.0"
] | null | null | null | from nose.plugins.attrib import attr
@attr('demo_smoke', 'smoke', 'known_bad')
def test_dummy_known_bad_with_assertion_error():
assert False
@attr('demo_smoke', 'smoke', 'known_bad')
def test_dummy_known_bad_with_assertion_pass():
assert True
@attr('demo_smoke', 'smoke', 'known_bad')
def test_dummy_known_bad_with_language_error():
non_existent_method_123()
assert True | 26 | 48 | 0.766667 | from nose.plugins.attrib import attr
@attr('demo_smoke', 'smoke', 'known_bad')
def test_dummy_known_bad_with_assertion_error():
assert False
@attr('demo_smoke', 'smoke', 'known_bad')
def test_dummy_known_bad_with_assertion_pass():
assert True
@attr('demo_smoke', 'smoke', 'known_bad')
def test_dummy_known_bad_with_language_error():
non_existent_method_123()
assert True | true | true |
f7f7e4e4ed733a16a3c4643f001ca2f4d405ddbe | 5,768 | py | Python | MachineLearning/Classification/KernelSupportVectorMachine(SVM)Classification/kernel_support_vector_machine_(svm)_classification.py | norbertosanchezdichi/TIL | 2e9719ddd288022f53b094a42679e849bdbcc625 | [
"MIT"
] | null | null | null | MachineLearning/Classification/KernelSupportVectorMachine(SVM)Classification/kernel_support_vector_machine_(svm)_classification.py | norbertosanchezdichi/TIL | 2e9719ddd288022f53b094a42679e849bdbcc625 | [
"MIT"
] | null | null | null | MachineLearning/Classification/KernelSupportVectorMachine(SVM)Classification/kernel_support_vector_machine_(svm)_classification.py | norbertosanchezdichi/TIL | 2e9719ddd288022f53b094a42679e849bdbcc625 | [
"MIT"
] | null | null | null | # Import libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
# Import dataset
dataset = pd.read_csv('Social_Network_Ads.csv')
X = dataset.iloc[:, :-1].values
Y = dataset.iloc[:, -1].values
print(f"X = {X}")
print(f"Y = {Y}")
print()
# Split Dataset: Training Set and Test Set
from sklearn.model_selection import train_test_split
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size = 0.25, random_state = 0)
print(f"X_train = {X_train}")
print(f"X_test = {X_test}")
print(f"Y_train = {Y_train}")
print(f"Y_test = {Y_test}")
print()
# Feature Scaling (done after splitting to avoid information leakage.)
from sklearn.preprocessing import StandardScaler
standardScaler = StandardScaler()
X_train_scaled = standardScaler.fit_transform(X_train)
X_test_scaled = standardScaler.transform(X_test)
print(f"X_train_scaled = {X_train_scaled}")
print(f"X_test_scaled = {X_test_scaled}")
print()
# Kernel Support Vector Machine (SVM) Classifier
## Effective for data sets that are non-linearly separable by mapping to a higher dimension.
## The data set becomes separable by using a line, a hyperplane, or other structure with a dimension less than the mapped higher dimension.
## Mapping to a higher dimensional space can become computationally expensive.
## The Kernel Trick using the Gaussian Radial-Basis Function (RBF)
### Its a function of a vector and a landmark, which is the center of the peak of the function.
#### Using Euler's number, the function is three-dimensional and uses σ to adjust the radius of the base of the peak.
### It is used to produce a decision boundary for a non-linearly separable dataset.
### By choosing the optimal place for the landmark in the non-linear dataset and by tuning σ, the dataset is easily separated into two categories.
### Multiple kernel functions can be used by adding them up such that multiple landmarks with a specific base radius are found to linearly separate the dataset in 3-D. This allows to create a more complex decision boundary.
## Types of Kernel Functions
### Gaussian Radial-Basis Function (RBF) Kernel
### Sigmoid Kernel
### Polynomial Kernel
### mlkernels.readthedocs.io
### When evaluation which kernel to use, evaluate on new observations (K-Fold Cross Validation) and use different metrics (Accuracy, F1 Score, etc.)
## Non-Linear Support Vector Regression (SVR)
### Results in a non-linear separation between the two categories.
### For example, the intersection of three hyperplanes and the Gaussian RBF function is done in such a way that a non-linear solution projected to the 2-D space results in an accurate separation between the two categories.
# Create and train Kernel Support Vector Machine (SVM) model
## Use Gaussian Radial-Basis Function (RBF) kernel
from sklearn.svm import SVC
classifier = SVC(kernel = 'rbf', random_state = 0)
classifier.fit(X_train_scaled, Y_train)
# Predict if-purchase for 30 year old customer earning $87,000
Y_predict = classifier.predict(standardScaler.transform([[30, 87000]]))
# Output prediction salary for a position 6
print(f"Purchase possible from 30 year old earning $87,000? = {Y_predict}.")
print()
# Predict using Kernel Support Vector Machine (SVM) model
Y_predict = classifier.predict(X_test_scaled)
print(f"[Y_predict Y_test] = {np.concatenate((Y_predict.reshape(len(Y_predict), 1), Y_test.reshape(len(Y_test), 1)), axis = 1)}")
print()
# Create Confusion Matrix
## Not the optimal method to evaluate the performance of the model - K-Fold Cross Validation is preferred and it involves using validation tests.
from sklearn.metrics import confusion_matrix
print(f"Confusion Matrix = {confusion_matrix(Y_test, Y_predict)}")
print()
# Generate Accuracy Score
from sklearn.metrics import accuracy_score
print(f"Accuracy Score = {accuracy_score(Y_test, Y_predict)}")
# Output Training Set Results
from matplotlib.colors import ListedColormap
X_set, Y_set = standardScaler.inverse_transform(X_train_scaled), Y_train
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 10, stop = X_set[:, 0].max() + 10, step = 1),
np.arange(start = X_set[:, 1].min() - 1000, stop = X_set[:, 1].max() + 1000, step = 1))
plt.contourf(X1, X2, classifier.predict(standardScaler.transform(np.array([X1.ravel(), X2.ravel()]).T)).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(Y_train)):
plt.scatter(X_set[Y_train == j, 0], X_set[Y_train == j, 1], c = ListedColormap(('red', 'green'))(i), label = j)
plt.title('Kernel Support Vector Machine (SVM) (Training Set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.savefig('Kernel_Support_Vector_Machine_Training_Set_Results.png')
plt.clf()
# Output Test Set Results
from matplotlib.colors import ListedColormap
X_set, Y_set = standardScaler.inverse_transform(X_test_scaled), Y_test
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 10, stop = X_set[:, 0].max() + 10, step = 1),
np.arange(start = X_set[:, 1].min() - 1000, stop = X_set[:, 1].max() + 1000, step = 1))
plt.contourf(X1, X2, classifier.predict(standardScaler.transform(np.array([X1.ravel(), X2.ravel()]).T)).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(Y_test)):
plt.scatter(X_set[Y_test == j, 0], X_set[Y_test == j, 1], c = ListedColormap(('red', 'green'))(i), label = j)
plt.title('Kernel Support Vector Machine (SVM) (Test Set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.savefig('Kernel_Support_Vector_Machine_Test_Set_Results.png')
plt.clf() | 49.299145 | 224 | 0.73613 |
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
dataset = pd.read_csv('Social_Network_Ads.csv')
X = dataset.iloc[:, :-1].values
Y = dataset.iloc[:, -1].values
print(f"X = {X}")
print(f"Y = {Y}")
print()
from sklearn.model_selection import train_test_split
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size = 0.25, random_state = 0)
print(f"X_train = {X_train}")
print(f"X_test = {X_test}")
print(f"Y_train = {Y_train}")
print(f"Y_test = {Y_test}")
print()
from sklearn.preprocessing import StandardScaler
standardScaler = StandardScaler()
X_train_scaled = standardScaler.fit_transform(X_train)
X_test_scaled = standardScaler.transform(X_test)
print(f"X_train_scaled = {X_train_scaled}")
print(f"X_test_scaled = {X_test_scaled}")
print()
n of three hyperplanes and the Gaussian RBF function is done in such a way that a non-linear solution projected to the 2-D space results in an accurate separation between the two categories.
# Create and train Kernel Support Vector Machine (SVM) model
## Use Gaussian Radial-Basis Function (RBF) kernel
from sklearn.svm import SVC
classifier = SVC(kernel = 'rbf', random_state = 0)
classifier.fit(X_train_scaled, Y_train)
# Predict if-purchase for 30 year old customer earning $87,000
Y_predict = classifier.predict(standardScaler.transform([[30, 87000]]))
# Output prediction salary for a position 6
print(f"Purchase possible from 30 year old earning $87,000? = {Y_predict}.")
print()
# Predict using Kernel Support Vector Machine (SVM) model
Y_predict = classifier.predict(X_test_scaled)
print(f"[Y_predict Y_test] = {np.concatenate((Y_predict.reshape(len(Y_predict), 1), Y_test.reshape(len(Y_test), 1)), axis = 1)}")
print()
# Create Confusion Matrix
## Not the optimal method to evaluate the performance of the model - K-Fold Cross Validation is preferred and it involves using validation tests.
from sklearn.metrics import confusion_matrix
print(f"Confusion Matrix = {confusion_matrix(Y_test, Y_predict)}")
print()
# Generate Accuracy Score
from sklearn.metrics import accuracy_score
print(f"Accuracy Score = {accuracy_score(Y_test, Y_predict)}")
# Output Training Set Results
from matplotlib.colors import ListedColormap
X_set, Y_set = standardScaler.inverse_transform(X_train_scaled), Y_train
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 10, stop = X_set[:, 0].max() + 10, step = 1),
np.arange(start = X_set[:, 1].min() - 1000, stop = X_set[:, 1].max() + 1000, step = 1))
plt.contourf(X1, X2, classifier.predict(standardScaler.transform(np.array([X1.ravel(), X2.ravel()]).T)).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(Y_train)):
plt.scatter(X_set[Y_train == j, 0], X_set[Y_train == j, 1], c = ListedColormap(('red', 'green'))(i), label = j)
plt.title('Kernel Support Vector Machine (SVM) (Training Set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.savefig('Kernel_Support_Vector_Machine_Training_Set_Results.png')
plt.clf()
# Output Test Set Results
from matplotlib.colors import ListedColormap
X_set, Y_set = standardScaler.inverse_transform(X_test_scaled), Y_test
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 10, stop = X_set[:, 0].max() + 10, step = 1),
np.arange(start = X_set[:, 1].min() - 1000, stop = X_set[:, 1].max() + 1000, step = 1))
plt.contourf(X1, X2, classifier.predict(standardScaler.transform(np.array([X1.ravel(), X2.ravel()]).T)).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(Y_test)):
plt.scatter(X_set[Y_test == j, 0], X_set[Y_test == j, 1], c = ListedColormap(('red', 'green'))(i), label = j)
plt.title('Kernel Support Vector Machine (SVM) (Test Set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.savefig('Kernel_Support_Vector_Machine_Test_Set_Results.png')
plt.clf() | true | true |
f7f7e66a8966c0a484a7742228442b179fa012db | 10,093 | py | Python | bootstrap.py | qoda/python-searchengine | d514ddbc0be9755ed48143b38dd4591d0123b1bc | [
"BSD-3-Clause"
] | 19 | 2015-01-21T14:46:20.000Z | 2021-04-28T22:46:31.000Z | bootstrap.py | gourav245/python-searchengine | d514ddbc0be9755ed48143b38dd4591d0123b1bc | [
"BSD-3-Clause"
] | 1 | 2020-03-07T08:41:20.000Z | 2020-03-11T09:16:59.000Z | bootstrap.py | gourav245/python-searchengine | d514ddbc0be9755ed48143b38dd4591d0123b1bc | [
"BSD-3-Clause"
] | 15 | 2015-03-28T07:44:52.000Z | 2021-01-28T23:45:20.000Z | ##############################################################################
#
# Copyright (c) 2006 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Bootstrap a buildout-based project
Simply run this script in a directory containing a buildout.cfg.
The script accepts buildout command-line options, so you can
use the -c option to specify an alternate configuration file.
"""
import os, shutil, sys, tempfile, textwrap, urllib, urllib2, subprocess
from optparse import OptionParser
from encodings import ascii
if sys.platform == 'win32':
def quote(c):
if ' ' in c:
return '"%s"' % c # work around spawn lamosity on windows
else:
return c
else:
quote = str
# See zc.buildout.easy_install._has_broken_dash_S for motivation and comments.
stdout, stderr = subprocess.Popen(
[sys.executable, '-Sc',
'try:\n'
' import ConfigParser\n'
'except ImportError:\n'
' print 1\n'
'else:\n'
' print 0\n'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
has_broken_dash_S = bool(int(stdout.strip()))
# In order to be more robust in the face of system Pythons, we want to
# run without site-packages loaded. This is somewhat tricky, in
# particular because Python 2.6's distutils imports site, so starting
# with the -S flag is not sufficient. However, we'll start with that:
if not has_broken_dash_S and 'site' in sys.modules:
# We will restart with python -S.
args = sys.argv[:]
args[0:0] = [sys.executable, '-S']
args = map(quote, args)
os.execv(sys.executable, args)
# Now we are running with -S. We'll get the clean sys.path, import site
# because distutils will do it later, and then reset the path and clean
# out any namespace packages from site-packages that might have been
# loaded by .pth files.
clean_path = sys.path[:]
import site
sys.path[:] = clean_path
for k, v in sys.modules.items():
if k in ('setuptools', 'pkg_resources') or (
hasattr(v, '__path__') and
len(v.__path__)==1 and
not os.path.exists(os.path.join(v.__path__[0],'__init__.py'))):
# This is a namespace package. Remove it.
sys.modules.pop(k)
is_jython = sys.platform.startswith('java')
setuptools_source = 'http://peak.telecommunity.com/dist/ez_setup.py'
distribute_source = 'http://python-distribute.org/distribute_setup.py'
# parsing arguments
def normalize_to_url(option, opt_str, value, parser):
if value:
if '://' not in value: # It doesn't smell like a URL.
value = 'file://%s' % (
urllib.pathname2url(
os.path.abspath(os.path.expanduser(value))),)
if opt_str == '--download-base' and not value.endswith('/'):
# Download base needs a trailing slash to make the world happy.
value += '/'
else:
value = None
name = opt_str[2:].replace('-', '_')
setattr(parser.values, name, value)
usage = '''\
[DESIRED PYTHON FOR BUILDOUT] bootstrap.py [options]
Bootstraps a buildout-based project.
Simply run this script in a directory containing a buildout.cfg, using the
Python that you want bin/buildout to use.
Note that by using --setup-source and --download-base to point to
local resources, you can keep this script from going over the network.
'''
parser = OptionParser(usage=usage)
parser.add_option("-v", "--version", dest="version",
help="use a specific zc.buildout version")
parser.add_option("-d", "--distribute",
action="store_true", dest="use_distribute", default=False,
help="Use Distribute rather than Setuptools.")
parser.add_option("--setup-source", action="callback", dest="setup_source",
callback=normalize_to_url, nargs=1, type="string",
help=("Specify a URL or file location for the setup file. "
"If you use Setuptools, this will default to " +
setuptools_source + "; if you use Distribute, this "
"will default to " + distribute_source +"."))
parser.add_option("--download-base", action="callback", dest="download_base",
callback=normalize_to_url, nargs=1, type="string",
help=("Specify a URL or directory for downloading "
"zc.buildout and either Setuptools or Distribute. "
"Defaults to PyPI."))
parser.add_option("--eggs",
help=("Specify a directory for storing eggs. Defaults to "
"a temporary directory that is deleted when the "
"bootstrap script completes."))
parser.add_option("-t", "--accept-buildout-test-releases",
dest='accept_buildout_test_releases',
action="store_true", default=False,
help=("Normally, if you do not specify a --version, the "
"bootstrap script and buildout gets the newest "
"*final* versions of zc.buildout and its recipes and "
"extensions for you. If you use this flag, "
"bootstrap and buildout will get the newest releases "
"even if they are alphas or betas."))
parser.add_option("-c", None, action="store", dest="config_file",
help=("Specify the path to the buildout configuration "
"file to be used."))
options, args = parser.parse_args()
# if -c was provided, we push it back into args for buildout's main function
if options.config_file is not None:
args += ['-c', options.config_file]
if options.eggs:
eggs_dir = os.path.abspath(os.path.expanduser(options.eggs))
else:
eggs_dir = tempfile.mkdtemp()
if options.setup_source is None:
if options.use_distribute:
options.setup_source = distribute_source
else:
options.setup_source = setuptools_source
if options.accept_buildout_test_releases:
args.append('buildout:accept-buildout-test-releases=true')
args.append('bootstrap')
try:
import pkg_resources
import setuptools # A flag. Sometimes pkg_resources is installed alone.
if not hasattr(pkg_resources, '_distribute'):
raise ImportError
except ImportError:
ez_code = urllib2.urlopen(
options.setup_source).read().replace('\r\n', '\n')
ez = {}
exec ez_code in ez
setup_args = dict(to_dir=eggs_dir, download_delay=0)
if options.download_base:
setup_args['download_base'] = options.download_base
if options.use_distribute:
setup_args['no_fake'] = True
ez['use_setuptools'](**setup_args)
if 'pkg_resources' in sys.modules:
reload(sys.modules['pkg_resources'])
import pkg_resources
# This does not (always?) update the default working set. We will
# do it.
for path in sys.path:
if path not in pkg_resources.working_set.entries:
pkg_resources.working_set.add_entry(path)
cmd = [quote(sys.executable),
'-c',
quote('from setuptools.command.easy_install import main; main()'),
'-mqNxd',
quote(eggs_dir)]
if not has_broken_dash_S:
cmd.insert(1, '-S')
find_links = options.download_base
if not find_links:
find_links = os.environ.get('bootstrap-testing-find-links')
if find_links:
cmd.extend(['-f', quote(find_links)])
if options.use_distribute:
setup_requirement = 'distribute'
else:
setup_requirement = 'setuptools'
ws = pkg_resources.working_set
setup_requirement_path = ws.find(
pkg_resources.Requirement.parse(setup_requirement)).location
env = dict(
os.environ,
PYTHONPATH=setup_requirement_path)
requirement = 'zc.buildout'
version = options.version
if version is None and not options.accept_buildout_test_releases:
# Figure out the most recent final version of zc.buildout.
import setuptools.package_index
_final_parts = '*final-', '*final'
def _final_version(parsed_version):
for part in parsed_version:
if (part[:1] == '*') and (part not in _final_parts):
return False
return True
index = setuptools.package_index.PackageIndex(
search_path=[setup_requirement_path])
if find_links:
index.add_find_links((find_links,))
req = pkg_resources.Requirement.parse(requirement)
if index.obtain(req) is not None:
best = []
bestv = None
for dist in index[req.project_name]:
distv = dist.parsed_version
if _final_version(distv):
if bestv is None or distv > bestv:
best = [dist]
bestv = distv
elif distv == bestv:
best.append(dist)
if best:
best.sort()
version = best[-1].version
if version:
requirement = '=='.join((requirement, version))
cmd.append(requirement)
if is_jython:
import subprocess
exitcode = subprocess.Popen(cmd, env=env).wait()
else: # Windows prefers this, apparently; otherwise we would prefer subprocess
exitcode = os.spawnle(*([os.P_WAIT, sys.executable] + cmd + [env]))
if exitcode != 0:
sys.stdout.flush()
sys.stderr.flush()
print ("An error occurred when trying to install zc.buildout. "
"Look above this message for any errors that "
"were output by easy_install.")
sys.exit(exitcode)
ws.add_entry(eggs_dir)
ws.require(requirement)
import zc.buildout.buildout
zc.buildout.buildout.main(args)
if not options.eggs: # clean up temporary egg directory
shutil.rmtree(eggs_dir) | 38.670498 | 78 | 0.643416 | s:
reload(sys.modules['pkg_resources'])
import pkg_resources
# This does not (always?) update the default working set. We will
# do it.
for path in sys.path:
if path not in pkg_resources.working_set.entries:
pkg_resources.working_set.add_entry(path)
cmd = [quote(sys.executable),
'-c',
quote('from setuptools.command.easy_install import main; main()'),
'-mqNxd',
quote(eggs_dir)]
if not has_broken_dash_S:
cmd.insert(1, '-S')
find_links = options.download_base
if not find_links:
find_links = os.environ.get('bootstrap-testing-find-links')
if find_links:
cmd.extend(['-f', quote(find_links)])
if options.use_distribute:
setup_requirement = 'distribute'
else:
setup_requirement = 'setuptools'
ws = pkg_resources.working_set
setup_requirement_path = ws.find(
pkg_resources.Requirement.parse(setup_requirement)).location
env = dict(
os.environ,
PYTHONPATH=setup_requirement_path)
requirement = 'zc.buildout'
version = options.version
if version is None and not options.accept_buildout_test_releases:
# Figure out the most recent final version of zc.buildout.
import setuptools.package_index
_final_parts = '*final-', '*final'
def _final_version(parsed_version):
for part in parsed_version:
if (part[:1] == '*') and (part not in _final_parts):
return False
return True
index = setuptools.package_index.PackageIndex(
search_path=[setup_requirement_path])
if find_links:
index.add_find_links((find_links,))
req = pkg_resources.Requirement.parse(requirement)
if index.obtain(req) is not None:
best = []
bestv = None
for dist in index[req.project_name]:
distv = dist.parsed_version
if _final_version(distv):
if bestv is None or distv > bestv:
best = [dist]
bestv = distv
elif distv == bestv:
best.append(dist)
if best:
best.sort()
version = best[-1].version
if version:
requirement = '=='.join((requirement, version))
cmd.append(requirement)
if is_jython:
import subprocess
exitcode = subprocess.Popen(cmd, env=env).wait()
else: # Windows prefers this, apparently; otherwise we would prefer subprocess
exitcode = os.spawnle(*([os.P_WAIT, sys.executable] + cmd + [env]))
if exitcode != 0:
sys.stdout.flush()
sys.stderr.flush()
print ("An error occurred when trying to install zc.buildout. "
"Look above this message for any errors that "
"were output by easy_install.")
sys.exit(exitcode)
ws.add_entry(eggs_dir)
ws.require(requirement)
import zc.buildout.buildout
zc.buildout.buildout.main(args)
if not options.eggs: # clean up temporary egg directory
shutil.rmtree(eggs_dir) | false | true |
f7f7e6eed95dcac0d0e9b5fccb8759e13725db67 | 7,216 | py | Python | tunnel/tunnel_node.py | peterpolidoro/tunnel_ros | a204ddc8ed7ab01a7755d822318dc456ec37c919 | [
"BSD-3-Clause"
] | null | null | null | tunnel/tunnel_node.py | peterpolidoro/tunnel_ros | a204ddc8ed7ab01a7755d822318dc456ec37c919 | [
"BSD-3-Clause"
] | null | null | null | tunnel/tunnel_node.py | peterpolidoro/tunnel_ros | a204ddc8ed7ab01a7755d822318dc456ec37c919 | [
"BSD-3-Clause"
] | 1 | 2019-12-17T21:27:57.000Z | 2019-12-17T21:27:57.000Z | # Copyright (c) 2020, Howard Hughes Medical Institute
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import rclpy
from rclpy.node import Node
from smart_cage_msgs.msg import TunnelState
from .tunnel import Tunnel, TunnelInfo
import time
import datetime
import math
class TunnelNode(Node):
def __init__(self):
super().__init__('tunnel')
self.tunnel_info = TunnelInfo()
self.name = 'tunnel'
self.logger = self.get_logger()
self._tunnel_state_publisher = self.create_publisher(TunnelState, 'tunnel_state', 10)
# self._joint_target_subscription = self.create_subscription(
# TunnelState,
# 'tunnel_joint_target',
# self._joint_target_callback,
# 10)
# self._joint_target_subscription # prevent unused variable warning
self._attached_timer_period = 1
self._attached_timer = None
self._latched_timer_period = 5
self._latched_timer = None
self.tunnel = Tunnel(self.tunnel_info, self.name, self.logger)
self.tunnel.set_on_attach_handler(self._on_attach_handler)
self.logger.info('opening tunnel phidgets...')
self.tunnel.open()
def _on_attach_handler(self, handle):
self.tunnel._on_attach_handler(handle)
if self._attached_timer is None:
self._attached_timer = self.create_timer(self._attached_timer_period, self._attached_timer_callback)
def _attached_timer_callback(self):
self._attached_timer.cancel()
self._attached_timer = None
if self.tunnel.is_attached():
self.logger.info('tunnel is attached!')
self.tunnel.set_stepper_on_change_handlers_to_disabled()
self.tunnel.set_stepper_on_homed_handlers(self._homed_handler)
self.tunnel.set_limit_switch_handlers(self._publish_tunnel_state_handler)
self.tunnel.voltage_ratio_input.set_on_voltage_ratio_change_handler(self._publish_tunnel_state_handler)
self.tunnel.home_latches()
def _publish_tunnel_state_handler(self, handle, value):
if not self.tunnel.all_latches_homed:
return
tunnel_state = TunnelState()
tunnel_state.datetime = datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
now_frac, now_whole = math.modf(time.time())
tunnel_state.nanosec = int(now_frac * 1e9)
tunnel_state.load_cell_voltage_ratio = self.tunnel.voltage_ratio_input.get_voltage_ratio()
tunnel_state.right_head_bar_sensor_active = self.tunnel.latches['right'].stepper_joint.limit_switch.is_active()
tunnel_state.left_head_bar_sensor_active = self.tunnel.latches['left'].stepper_joint.limit_switch.is_active()
tunnel_state.right_latch_position = self.tunnel.latches['right'].stepper_joint.stepper.get_position()
tunnel_state.left_latch_position = self.tunnel.latches['left'].stepper_joint.stepper.get_position()
self._tunnel_state_publisher.publish(tunnel_state)
def _homed_handler(self, handle):
for name, latch in self.tunnel.latches.items():
if not latch.stepper_joint.homed:
return
self.tunnel.set_stepper_on_change_handlers(self._publish_tunnel_state_handler)
self.tunnel.set_stepper_on_stopped_handlers_to_disabled()
self.tunnel.set_limit_switch_handlers(self._latch_handler)
def _latch_handler(self, handle, state):
for name, latch in self.tunnel.latches.items():
self._publish_tunnel_state_handler(handle, state)
if not latch.stepper_joint.limit_switch.is_active():
return
if not latch.stepper_joint.stepper.in_step_control_mode():
return
self.tunnel.set_stepper_on_stopped_handlers(self._latched_handler)
self.tunnel.set_limit_switch_handlers(self._publish_tunnel_state_handler)
self.tunnel.latch_all()
def _latched_handler(self, handle):
for name, latch in self.tunnel.latches.items():
if latch.stepper_joint.stepper.is_moving():
return
self._latched_timer = self.create_timer(self._latched_timer_period, self._latched_timer_callback)
def _latched_timer_callback(self):
self._latched_timer.cancel()
self._latched_timer = None
self.tunnel.set_stepper_on_stopped_handlers(self._unlatched_handler)
self.tunnel.unlatch_all()
def _unlatched_handler(self, handle):
for name, latch in self.tunnel.latches.items():
if latch.stepper_joint.stepper.is_moving():
return
self.tunnel.set_stepper_on_stopped_handlers_to_disabled()
self.tunnel.set_limit_switch_handlers(self._latch_handler)
# def _joint_target_callback(self, msg):
# if len(msg.name) == len(msg.velocity) == len(msg.position):
# targets = zip(msg.name, msg.velocity, msg.position)
# for name, velocity, position in targets:
# try:
# self.joints[name].stepper.set_velocity_limit(velocity)
# self.joints[name].stepper.set_target_position(position)
# except KeyError:
# pass
# elif len(msg.name) == len(msg.position):
# targets = zip(msg.name, msg.position)
# for name, position in targets:
# try:
# self.joints[name].stepper.set_target_position(position)
# except KeyError:
# pass
def main(args=None):
rclpy.init(args=args)
tunnel_node = TunnelNode()
rclpy.spin(tunnel_node)
tunnel_node.destroy_node()
rclpy.shutdown()
if __name__ == '__main__':
main()
| 43.733333 | 119 | 0.698725 |
import rclpy
from rclpy.node import Node
from smart_cage_msgs.msg import TunnelState
from .tunnel import Tunnel, TunnelInfo
import time
import datetime
import math
class TunnelNode(Node):
def __init__(self):
super().__init__('tunnel')
self.tunnel_info = TunnelInfo()
self.name = 'tunnel'
self.logger = self.get_logger()
self._tunnel_state_publisher = self.create_publisher(TunnelState, 'tunnel_state', 10)
riod = 1
self._attached_timer = None
self._latched_timer_period = 5
self._latched_timer = None
self.tunnel = Tunnel(self.tunnel_info, self.name, self.logger)
self.tunnel.set_on_attach_handler(self._on_attach_handler)
self.logger.info('opening tunnel phidgets...')
self.tunnel.open()
def _on_attach_handler(self, handle):
self.tunnel._on_attach_handler(handle)
if self._attached_timer is None:
self._attached_timer = self.create_timer(self._attached_timer_period, self._attached_timer_callback)
def _attached_timer_callback(self):
self._attached_timer.cancel()
self._attached_timer = None
if self.tunnel.is_attached():
self.logger.info('tunnel is attached!')
self.tunnel.set_stepper_on_change_handlers_to_disabled()
self.tunnel.set_stepper_on_homed_handlers(self._homed_handler)
self.tunnel.set_limit_switch_handlers(self._publish_tunnel_state_handler)
self.tunnel.voltage_ratio_input.set_on_voltage_ratio_change_handler(self._publish_tunnel_state_handler)
self.tunnel.home_latches()
def _publish_tunnel_state_handler(self, handle, value):
if not self.tunnel.all_latches_homed:
return
tunnel_state = TunnelState()
tunnel_state.datetime = datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
now_frac, now_whole = math.modf(time.time())
tunnel_state.nanosec = int(now_frac * 1e9)
tunnel_state.load_cell_voltage_ratio = self.tunnel.voltage_ratio_input.get_voltage_ratio()
tunnel_state.right_head_bar_sensor_active = self.tunnel.latches['right'].stepper_joint.limit_switch.is_active()
tunnel_state.left_head_bar_sensor_active = self.tunnel.latches['left'].stepper_joint.limit_switch.is_active()
tunnel_state.right_latch_position = self.tunnel.latches['right'].stepper_joint.stepper.get_position()
tunnel_state.left_latch_position = self.tunnel.latches['left'].stepper_joint.stepper.get_position()
self._tunnel_state_publisher.publish(tunnel_state)
def _homed_handler(self, handle):
for name, latch in self.tunnel.latches.items():
if not latch.stepper_joint.homed:
return
self.tunnel.set_stepper_on_change_handlers(self._publish_tunnel_state_handler)
self.tunnel.set_stepper_on_stopped_handlers_to_disabled()
self.tunnel.set_limit_switch_handlers(self._latch_handler)
def _latch_handler(self, handle, state):
for name, latch in self.tunnel.latches.items():
self._publish_tunnel_state_handler(handle, state)
if not latch.stepper_joint.limit_switch.is_active():
return
if not latch.stepper_joint.stepper.in_step_control_mode():
return
self.tunnel.set_stepper_on_stopped_handlers(self._latched_handler)
self.tunnel.set_limit_switch_handlers(self._publish_tunnel_state_handler)
self.tunnel.latch_all()
def _latched_handler(self, handle):
for name, latch in self.tunnel.latches.items():
if latch.stepper_joint.stepper.is_moving():
return
self._latched_timer = self.create_timer(self._latched_timer_period, self._latched_timer_callback)
def _latched_timer_callback(self):
self._latched_timer.cancel()
self._latched_timer = None
self.tunnel.set_stepper_on_stopped_handlers(self._unlatched_handler)
self.tunnel.unlatch_all()
def _unlatched_handler(self, handle):
for name, latch in self.tunnel.latches.items():
if latch.stepper_joint.stepper.is_moving():
return
self.tunnel.set_stepper_on_stopped_handlers_to_disabled()
self.tunnel.set_limit_switch_handlers(self._latch_handler)
def main(args=None):
rclpy.init(args=args)
tunnel_node = TunnelNode()
rclpy.spin(tunnel_node)
tunnel_node.destroy_node()
rclpy.shutdown()
if __name__ == '__main__':
main()
| true | true |
f7f7e72f676688ff107e55ebb04e68bb70cad3ea | 156 | py | Python | setup.py | TheRender/nba-data | 0f92e7ec2e756b4880931beee528abdca719f555 | [
"MIT"
] | null | null | null | setup.py | TheRender/nba-data | 0f92e7ec2e756b4880931beee528abdca719f555 | [
"MIT"
] | null | null | null | setup.py | TheRender/nba-data | 0f92e7ec2e756b4880931beee528abdca719f555 | [
"MIT"
] | null | null | null | from setuptools import setup
setup(
name='nba-data',
version='0.1',
py_modules=['nba-data'],
install_requires=[
'requests'
]
)
| 14.181818 | 28 | 0.583333 | from setuptools import setup
setup(
name='nba-data',
version='0.1',
py_modules=['nba-data'],
install_requires=[
'requests'
]
)
| true | true |
f7f7e730b28e1f50be4dd6a2ac83290c2da521bc | 9,397 | py | Python | tests/snippets/strings.py | sanxiyn/RustPython | 6308be3dfeb8e1b334136c81a4175514bb8561c3 | [
"MIT"
] | 3 | 2019-08-14T02:05:49.000Z | 2020-01-03T08:39:56.000Z | tests/snippets/strings.py | Vicfred/RustPython | d42d422566f64f48311cc7e7efb4d0ffcefb0297 | [
"MIT"
] | null | null | null | tests/snippets/strings.py | Vicfred/RustPython | d42d422566f64f48311cc7e7efb4d0ffcefb0297 | [
"MIT"
] | 1 | 2022-03-14T13:03:29.000Z | 2022-03-14T13:03:29.000Z | from testutils import assert_raises
assert "".__eq__(1) == NotImplemented
assert "a" == 'a'
assert """a""" == "a"
assert len(""" " "" " "" """) == 11
assert "\"" == '"'
assert "\"" == """\""""
assert "\n" == """
"""
assert len(""" " \" """) == 5
assert len("é") == 1
assert len("é") == 2
assert len("あ") == 1
assert type("") is str
assert type(b"") is bytes
assert str(1) == "1"
assert str(2.1) == "2.1"
assert str() == ""
assert str("abc") == "abc"
assert repr("a") == "'a'"
assert repr("can't") == '"can\'t"'
assert repr('"won\'t"') == "'\"won\\'t\"'"
assert repr('\n\t') == "'\\n\\t'"
assert str(["a", "b", "can't"]) == "['a', 'b', \"can't\"]"
assert "xy" * 3 == "xyxyxy"
assert "x" * 0 == ""
assert "x" * -1 == ""
assert 3 * "xy" == "xyxyxy"
assert 0 * "x" == ""
assert -1 * "x" == ""
assert_raises(OverflowError, lambda: 'xy' * 234234234234234234234234234234)
a = 'Hallo'
assert a.lower() == 'hallo'
assert a.upper() == 'HALLO'
assert a.startswith('H')
assert a.startswith(('H', 1))
assert a.startswith(('A', 'H'))
assert not a.startswith('f')
assert not a.startswith(('A', 'f'))
assert a.endswith('llo')
assert a.endswith(('lo', 1))
assert a.endswith(('A', 'lo'))
assert not a.endswith('on')
assert not a.endswith(('A', 'll'))
assert a.zfill(8) == '000Hallo'
assert a.isalnum()
assert not a.isdigit()
assert not a.isdecimal()
assert not a.isnumeric()
assert a.istitle()
assert a.isalpha()
s = '1 2 3'
assert s.split(' ', 1) == ['1', '2 3']
assert s.rsplit(' ', 1) == ['1 2', '3']
b = ' hallo '
assert b.strip() == 'hallo'
assert b.lstrip() == 'hallo '
assert b.rstrip() == ' hallo'
s = '^*RustPython*^'
assert s.strip('^*') == 'RustPython'
assert s.lstrip('^*') == 'RustPython*^'
assert s.rstrip('^*') == '^*RustPython'
s = 'RustPython'
assert s.ljust(8) == 'RustPython'
assert s.rjust(8) == 'RustPython'
assert s.ljust(12) == 'RustPython '
assert s.rjust(12) == ' RustPython'
assert s.ljust(12, '_') == 'RustPython__'
assert s.rjust(12, '_') == '__RustPython'
# The fill character must be exactly one character long
assert_raises(TypeError, lambda: s.ljust(12, '__'))
assert_raises(TypeError, lambda: s.rjust(12, '__'))
c = 'hallo'
assert c.capitalize() == 'Hallo'
assert c.center(11, '-') == '---hallo---'
assert ["koki".center(i, "|") for i in range(3, 10)] == [
"koki",
"koki",
"|koki",
"|koki|",
"||koki|",
"||koki||",
"|||koki||",
]
assert ["kok".center(i, "|") for i in range(2, 10)] == [
"kok",
"kok",
"kok|",
"|kok|",
"|kok||",
"||kok||",
"||kok|||",
"|||kok|||",
]
# requires CPython 3.7, and the CI currently runs with 3.6
# assert c.isascii()
assert c.index('a') == 1
assert c.rindex('l') == 3
assert c.find('h') == 0
assert c.rfind('x') == -1
assert c.islower()
assert c.title() == 'Hallo'
assert c.count('l') == 2
assert 'aaa'.count('a') == 3
assert 'aaa'.count('a', 1) == 2
assert 'aaa'.count('a', 1, 2) == 1
assert 'aaa'.count('a', 2, 2) == 0
assert 'aaa'.count('a', 2, 1) == 0
assert '___a__'.find('a') == 3
assert '___a__'.find('a', -10) == 3
assert '___a__'.find('a', -3) == 3
assert '___a__'.find('a', -2) == -1
assert '___a__'.find('a', -1) == -1
assert '___a__'.find('a', 0) == 3
assert '___a__'.find('a', 3) == 3
assert '___a__'.find('a', 4) == -1
assert '___a__'.find('a', 10) == -1
assert '___a__'.rfind('a', 3) == 3
assert '___a__'.index('a', 3) == 3
assert '___a__'.find('a', 0, -10) == -1
assert '___a__'.find('a', 0, -3) == -1
assert '___a__'.find('a', 0, -2) == 3
assert '___a__'.find('a', 0, -1) == 3
assert '___a__'.find('a', 0, 0) == -1
assert '___a__'.find('a', 0, 3) == -1
assert '___a__'.find('a', 0, 4) == 3
assert '___a__'.find('a', 0, 10) == 3
assert '___a__'.find('a', 3, 3) == -1
assert '___a__'.find('a', 3, 4) == 3
assert '___a__'.find('a', 4, 3) == -1
assert 'abcd'.startswith('b', 1)
assert 'abcd'.startswith(('b', 'z'), 1)
assert not 'abcd'.startswith('b', -4)
assert 'abcd'.startswith('b', -3)
assert not 'abcd'.startswith('b', 3, 3)
assert 'abcd'.startswith('', 3, 3)
assert not 'abcd'.startswith('', 4, 3)
assert ' '.isspace()
assert 'hello\nhallo\nHallo'.splitlines() == ['hello', 'hallo', 'Hallo']
assert 'abc\t12345\txyz'.expandtabs() == 'abc 12345 xyz'
assert '-'.join(['1', '2', '3']) == '1-2-3'
assert 'HALLO'.isupper()
assert "hello, my name is".partition("my ") == ('hello, ', 'my ', 'name is')
assert "hello".partition("is") == ('hello', '', '')
assert "hello, my name is".rpartition("is") == ('hello, my name ', 'is', '')
assert "hello".rpartition("is") == ('', '', 'hello')
assert not ''.isdecimal()
assert '123'.isdecimal()
assert not '\u00B2'.isdecimal()
assert not ''.isidentifier()
assert 'python'.isidentifier()
assert '_'.isidentifier()
assert '유니코드'.isidentifier()
assert not '😂'.isidentifier()
assert not '123'.isidentifier()
# String Formatting
assert "{} {}".format(1, 2) == "1 2"
assert "{0} {1}".format(2, 3) == "2 3"
assert "--{:s>4}--".format(1) == "--sss1--"
assert "{keyword} {0}".format(1, keyword=2) == "2 1"
assert "repr() shows quotes: {!r}; str() doesn't: {!s}".format(
'test1', 'test2'
) == "repr() shows quotes: 'test1'; str() doesn't: test2", 'Output: {!r}, {!s}'.format('test1', 'test2')
class Foo:
def __str__(self):
return 'str(Foo)'
def __repr__(self):
return 'repr(Foo)'
f = Foo()
assert "{} {!s} {!r} {!a}".format(f, f, f, f) == 'str(Foo) str(Foo) repr(Foo) repr(Foo)'
assert "{foo} {foo!s} {foo!r} {foo!a}".format(foo=f) == 'str(Foo) str(Foo) repr(Foo) repr(Foo)'
# assert '{} {!r} {:10} {!r:10} {foo!r:10} {foo!r} {foo}'.format('txt1', 'txt2', 'txt3', 'txt4', 'txt5', foo='bar')
# Printf-style String formatting
assert "%d %d" % (1, 2) == "1 2"
assert "%*c " % (3, '❤') == " ❤ "
assert "%(first)s %(second)s" % {'second': 'World!', 'first': "Hello,"} == "Hello, World!"
assert "%(key())s" % {'key()': 'aaa'}
assert "%s %a %r" % (f, f, f) == "str(Foo) repr(Foo) repr(Foo)"
assert "repr() shows quotes: %r; str() doesn't: %s" % ("test1", "test2") == "repr() shows quotes: 'test1'; str() doesn't: test2"
assert "%f" % (1.2345) == "1.234500"
assert "%+f" % (1.2345) == "+1.234500"
assert "% f" % (1.2345) == " 1.234500"
assert "%f" % (-1.2345) == "-1.234500"
assert "%f" % (1.23456789012) == "1.234568"
assert "%f" % (123) == "123.000000"
assert "%f" % (-123) == "-123.000000"
assert_raises(TypeError, lambda: "My name is %s and I'm %(age)d years old" % ("Foo", 25), _msg='format requires a mapping')
assert_raises(TypeError, lambda: "My name is %(name)s" % "Foo", _msg='format requires a mapping')
assert_raises(ValueError, lambda: "This %(food}s is great!" % {"food": "cookie"}, _msg='incomplete format key')
assert_raises(ValueError, lambda: "My name is %" % "Foo", _msg='incomplete format')
assert 'a' < 'b'
assert 'a' <= 'b'
assert 'a' <= 'a'
assert 'z' > 'b'
assert 'z' >= 'b'
assert 'a' >= 'a'
# str.translate
assert "abc".translate({97: '🎅', 98: None, 99: "xd"}) == "🎅xd"
# str.maketrans
assert str.maketrans({"a": "abc", "b": None, "c": 33}) == {97: "abc", 98: None, 99: 33}
assert str.maketrans("hello", "world", "rust") == {104: 119, 101: 111, 108: 108, 111: 100, 114: None, 117: None, 115: None, 116: None}
def try_mutate_str():
word = "word"
word[0] = 'x'
assert_raises(TypeError, try_mutate_str)
ss = ['Hello', '안녕', '👋']
bs = [b'Hello', b'\xec\x95\x88\xeb\x85\x95', b'\xf0\x9f\x91\x8b']
for s, b in zip(ss, bs):
assert s.encode() == b
for s, b, e in zip(ss, bs, ['u8', 'U8', 'utf-8', 'UTF-8', 'utf_8']):
assert s.encode(e) == b
# assert s.encode(encoding=e) == b
# str.isisprintable
assert "".isprintable()
assert " ".isprintable()
assert "abcdefg".isprintable()
assert not "abcdefg\n".isprintable()
assert "ʹ".isprintable()
# test unicode literals
assert "\xac" == "¬"
assert "\u0037" == "7"
assert "\u0040" == "@"
assert "\u0041" == "A"
assert "\u00BE" == "¾"
assert "\u9487" == "钇"
assert "\U0001F609" == "😉"
# test str iter
iterable_str = "123456789"
str_iter = iter(iterable_str)
assert next(str_iter) == "1"
assert next(str_iter) == "2"
assert next(str_iter) == "3"
assert next(str_iter) == "4"
assert next(str_iter) == "5"
assert next(str_iter) == "6"
assert next(str_iter) == "7"
assert next(str_iter) == "8"
assert next(str_iter) == "9"
assert next(str_iter, None) == None
assert_raises(StopIteration, next, str_iter)
str_iter_reversed = reversed(iterable_str)
assert next(str_iter_reversed) == "9"
assert next(str_iter_reversed) == "8"
assert next(str_iter_reversed) == "7"
assert next(str_iter_reversed) == "6"
assert next(str_iter_reversed) == "5"
assert next(str_iter_reversed) == "4"
assert next(str_iter_reversed) == "3"
assert next(str_iter_reversed) == "2"
assert next(str_iter_reversed) == "1"
assert next(str_iter_reversed, None) == None
assert_raises(StopIteration, next, str_iter_reversed)
assert str.__rmod__('%i', 30) == NotImplemented
assert_raises(TypeError, lambda: str.__rmod__(30, '%i'))
# test str index
index_str = 'Rust Python'
assert index_str[0] == 'R'
assert index_str[-1] == 'n'
assert_raises(TypeError, lambda: index_str['a'])
assert chr(9).__repr__() == "'\\t'"
assert chr(99).__repr__() == "'c'"
assert chr(999).__repr__() == "'ϧ'"
assert chr(9999).__repr__() == "'✏'"
assert chr(99999).__repr__() == "'𘚟'"
assert chr(999999).__repr__() == "'\\U000f423f'"
assert "a".__ne__("b")
assert not "a".__ne__("a")
assert not "".__ne__("")
assert "".__ne__(1) == NotImplemented
| 28.737003 | 134 | 0.587634 | from testutils import assert_raises
assert "".__eq__(1) == NotImplemented
assert "a" == 'a'
assert """a""" == "a"
assert len(""" " "" " "" """) == 11
assert "\"" == '"'
assert "\"" == """\""""
assert "\n" == """
"""
assert len(""" " \" """) == 5
assert len("é") == 1
assert len("é") == 2
assert len("あ") == 1
assert type("") is str
assert type(b"") is bytes
assert str(1) == "1"
assert str(2.1) == "2.1"
assert str() == ""
assert str("abc") == "abc"
assert repr("a") == "'a'"
assert repr("can't") == '"can\'t"'
assert repr('"won\'t"') == "'\"won\\'t\"'"
assert repr('\n\t') == "'\\n\\t'"
assert str(["a", "b", "can't"]) == "['a', 'b', \"can't\"]"
assert "xy" * 3 == "xyxyxy"
assert "x" * 0 == ""
assert "x" * -1 == ""
assert 3 * "xy" == "xyxyxy"
assert 0 * "x" == ""
assert -1 * "x" == ""
assert_raises(OverflowError, lambda: 'xy' * 234234234234234234234234234234)
a = 'Hallo'
assert a.lower() == 'hallo'
assert a.upper() == 'HALLO'
assert a.startswith('H')
assert a.startswith(('H', 1))
assert a.startswith(('A', 'H'))
assert not a.startswith('f')
assert not a.startswith(('A', 'f'))
assert a.endswith('llo')
assert a.endswith(('lo', 1))
assert a.endswith(('A', 'lo'))
assert not a.endswith('on')
assert not a.endswith(('A', 'll'))
assert a.zfill(8) == '000Hallo'
assert a.isalnum()
assert not a.isdigit()
assert not a.isdecimal()
assert not a.isnumeric()
assert a.istitle()
assert a.isalpha()
s = '1 2 3'
assert s.split(' ', 1) == ['1', '2 3']
assert s.rsplit(' ', 1) == ['1 2', '3']
b = ' hallo '
assert b.strip() == 'hallo'
assert b.lstrip() == 'hallo '
assert b.rstrip() == ' hallo'
s = '^*RustPython*^'
assert s.strip('^*') == 'RustPython'
assert s.lstrip('^*') == 'RustPython*^'
assert s.rstrip('^*') == '^*RustPython'
s = 'RustPython'
assert s.ljust(8) == 'RustPython'
assert s.rjust(8) == 'RustPython'
assert s.ljust(12) == 'RustPython '
assert s.rjust(12) == ' RustPython'
assert s.ljust(12, '_') == 'RustPython__'
assert s.rjust(12, '_') == '__RustPython'
assert_raises(TypeError, lambda: s.ljust(12, '__'))
assert_raises(TypeError, lambda: s.rjust(12, '__'))
c = 'hallo'
assert c.capitalize() == 'Hallo'
assert c.center(11, '-') == '---hallo---'
assert ["koki".center(i, "|") for i in range(3, 10)] == [
"koki",
"koki",
"|koki",
"|koki|",
"||koki|",
"||koki||",
"|||koki||",
]
assert ["kok".center(i, "|") for i in range(2, 10)] == [
"kok",
"kok",
"kok|",
"|kok|",
"|kok||",
"||kok||",
"||kok|||",
"|||kok|||",
]
assert c.index('a') == 1
assert c.rindex('l') == 3
assert c.find('h') == 0
assert c.rfind('x') == -1
assert c.islower()
assert c.title() == 'Hallo'
assert c.count('l') == 2
assert 'aaa'.count('a') == 3
assert 'aaa'.count('a', 1) == 2
assert 'aaa'.count('a', 1, 2) == 1
assert 'aaa'.count('a', 2, 2) == 0
assert 'aaa'.count('a', 2, 1) == 0
assert '___a__'.find('a') == 3
assert '___a__'.find('a', -10) == 3
assert '___a__'.find('a', -3) == 3
assert '___a__'.find('a', -2) == -1
assert '___a__'.find('a', -1) == -1
assert '___a__'.find('a', 0) == 3
assert '___a__'.find('a', 3) == 3
assert '___a__'.find('a', 4) == -1
assert '___a__'.find('a', 10) == -1
assert '___a__'.rfind('a', 3) == 3
assert '___a__'.index('a', 3) == 3
assert '___a__'.find('a', 0, -10) == -1
assert '___a__'.find('a', 0, -3) == -1
assert '___a__'.find('a', 0, -2) == 3
assert '___a__'.find('a', 0, -1) == 3
assert '___a__'.find('a', 0, 0) == -1
assert '___a__'.find('a', 0, 3) == -1
assert '___a__'.find('a', 0, 4) == 3
assert '___a__'.find('a', 0, 10) == 3
assert '___a__'.find('a', 3, 3) == -1
assert '___a__'.find('a', 3, 4) == 3
assert '___a__'.find('a', 4, 3) == -1
assert 'abcd'.startswith('b', 1)
assert 'abcd'.startswith(('b', 'z'), 1)
assert not 'abcd'.startswith('b', -4)
assert 'abcd'.startswith('b', -3)
assert not 'abcd'.startswith('b', 3, 3)
assert 'abcd'.startswith('', 3, 3)
assert not 'abcd'.startswith('', 4, 3)
assert ' '.isspace()
assert 'hello\nhallo\nHallo'.splitlines() == ['hello', 'hallo', 'Hallo']
assert 'abc\t12345\txyz'.expandtabs() == 'abc 12345 xyz'
assert '-'.join(['1', '2', '3']) == '1-2-3'
assert 'HALLO'.isupper()
assert "hello, my name is".partition("my ") == ('hello, ', 'my ', 'name is')
assert "hello".partition("is") == ('hello', '', '')
assert "hello, my name is".rpartition("is") == ('hello, my name ', 'is', '')
assert "hello".rpartition("is") == ('', '', 'hello')
assert not ''.isdecimal()
assert '123'.isdecimal()
assert not '\u00B2'.isdecimal()
assert not ''.isidentifier()
assert 'python'.isidentifier()
assert '_'.isidentifier()
assert '유니코드'.isidentifier()
assert not '😂'.isidentifier()
assert not '123'.isidentifier()
assert "{} {}".format(1, 2) == "1 2"
assert "{0} {1}".format(2, 3) == "2 3"
assert "--{:s>4}--".format(1) == "--sss1--"
assert "{keyword} {0}".format(1, keyword=2) == "2 1"
assert "repr() shows quotes: {!r}; str() doesn't: {!s}".format(
'test1', 'test2'
) == "repr() shows quotes: 'test1'; str() doesn't: test2", 'Output: {!r}, {!s}'.format('test1', 'test2')
class Foo:
def __str__(self):
return 'str(Foo)'
def __repr__(self):
return 'repr(Foo)'
f = Foo()
assert "{} {!s} {!r} {!a}".format(f, f, f, f) == 'str(Foo) str(Foo) repr(Foo) repr(Foo)'
assert "{foo} {foo!s} {foo!r} {foo!a}".format(foo=f) == 'str(Foo) str(Foo) repr(Foo) repr(Foo)'
assert "%d %d" % (1, 2) == "1 2"
assert "%*c " % (3, '❤') == " ❤ "
assert "%(first)s %(second)s" % {'second': 'World!', 'first': "Hello,"} == "Hello, World!"
assert "%(key())s" % {'key()': 'aaa'}
assert "%s %a %r" % (f, f, f) == "str(Foo) repr(Foo) repr(Foo)"
assert "repr() shows quotes: %r; str() doesn't: %s" % ("test1", "test2") == "repr() shows quotes: 'test1'; str() doesn't: test2"
assert "%f" % (1.2345) == "1.234500"
assert "%+f" % (1.2345) == "+1.234500"
assert "% f" % (1.2345) == " 1.234500"
assert "%f" % (-1.2345) == "-1.234500"
assert "%f" % (1.23456789012) == "1.234568"
assert "%f" % (123) == "123.000000"
assert "%f" % (-123) == "-123.000000"
assert_raises(TypeError, lambda: "My name is %s and I'm %(age)d years old" % ("Foo", 25), _msg='format requires a mapping')
assert_raises(TypeError, lambda: "My name is %(name)s" % "Foo", _msg='format requires a mapping')
assert_raises(ValueError, lambda: "This %(food}s is great!" % {"food": "cookie"}, _msg='incomplete format key')
assert_raises(ValueError, lambda: "My name is %" % "Foo", _msg='incomplete format')
assert 'a' < 'b'
assert 'a' <= 'b'
assert 'a' <= 'a'
assert 'z' > 'b'
assert 'z' >= 'b'
assert 'a' >= 'a'
# str.translate
assert "abc".translate({97: '🎅', 98: None, 99: "xd"}) == "🎅xd"
# str.maketrans
assert str.maketrans({"a": "abc", "b": None, "c": 33}) == {97: "abc", 98: None, 99: 33}
assert str.maketrans("hello", "world", "rust") == {104: 119, 101: 111, 108: 108, 111: 100, 114: None, 117: None, 115: None, 116: None}
def try_mutate_str():
word = "word"
word[0] = 'x'
assert_raises(TypeError, try_mutate_str)
ss = ['Hello', '안녕', '👋']
bs = [b'Hello', b'\xec\x95\x88\xeb\x85\x95', b'\xf0\x9f\x91\x8b']
for s, b in zip(ss, bs):
assert s.encode() == b
for s, b, e in zip(ss, bs, ['u8', 'U8', 'utf-8', 'UTF-8', 'utf_8']):
assert s.encode(e) == b
# assert s.encode(encoding=e) == b
# str.isisprintable
assert "".isprintable()
assert " ".isprintable()
assert "abcdefg".isprintable()
assert not "abcdefg\n".isprintable()
assert "ʹ".isprintable()
# test unicode literals
assert "\xac" == "¬"
assert "\u0037" == "7"
assert "\u0040" == "@"
assert "\u0041" == "A"
assert "\u00BE" == "¾"
assert "\u9487" == "钇"
assert "\U0001F609" == "😉"
# test str iter
iterable_str = "123456789"
str_iter = iter(iterable_str)
assert next(str_iter) == "1"
assert next(str_iter) == "2"
assert next(str_iter) == "3"
assert next(str_iter) == "4"
assert next(str_iter) == "5"
assert next(str_iter) == "6"
assert next(str_iter) == "7"
assert next(str_iter) == "8"
assert next(str_iter) == "9"
assert next(str_iter, None) == None
assert_raises(StopIteration, next, str_iter)
str_iter_reversed = reversed(iterable_str)
assert next(str_iter_reversed) == "9"
assert next(str_iter_reversed) == "8"
assert next(str_iter_reversed) == "7"
assert next(str_iter_reversed) == "6"
assert next(str_iter_reversed) == "5"
assert next(str_iter_reversed) == "4"
assert next(str_iter_reversed) == "3"
assert next(str_iter_reversed) == "2"
assert next(str_iter_reversed) == "1"
assert next(str_iter_reversed, None) == None
assert_raises(StopIteration, next, str_iter_reversed)
assert str.__rmod__('%i', 30) == NotImplemented
assert_raises(TypeError, lambda: str.__rmod__(30, '%i'))
# test str index
index_str = 'Rust Python'
assert index_str[0] == 'R'
assert index_str[-1] == 'n'
assert_raises(TypeError, lambda: index_str['a'])
assert chr(9).__repr__() == "'\\t'"
assert chr(99).__repr__() == "'c'"
assert chr(999).__repr__() == "'ϧ'"
assert chr(9999).__repr__() == "'✏'"
assert chr(99999).__repr__() == "'𘚟'"
assert chr(999999).__repr__() == "'\\U000f423f'"
assert "a".__ne__("b")
assert not "a".__ne__("a")
assert not "".__ne__("")
assert "".__ne__(1) == NotImplemented
| true | true |
f7f7e7464b431923c9e478ef93cf76c0f0300b2b | 209 | py | Python | reseller_cashback/core/serializers/base.py | cesarbruschetta/reseller-cashback | 2e7fcbee15c4c446bcd1a163c5360b11e6bef6fc | [
"MIT"
] | 1 | 2022-01-12T14:26:46.000Z | 2022-01-12T14:26:46.000Z | reseller_cashback/core/serializers/base.py | cesarbruschetta/reseller-cashback | 2e7fcbee15c4c446bcd1a163c5360b11e6bef6fc | [
"MIT"
] | null | null | null | reseller_cashback/core/serializers/base.py | cesarbruschetta/reseller-cashback | 2e7fcbee15c4c446bcd1a163c5360b11e6bef6fc | [
"MIT"
] | null | null | null | from rest_framework import serializers
from ..models import BaseModel
class BaseSerializer(serializers.ModelSerializer): # type: ignore
class Meta:
model = BaseModel
fields = '__all__'
| 20.9 | 66 | 0.722488 | from rest_framework import serializers
from ..models import BaseModel
class BaseSerializer(serializers.ModelSerializer):
class Meta:
model = BaseModel
fields = '__all__'
| true | true |
f7f7e7744d659793ffbf22a7957247b46d82e28d | 23 | py | Python | user_interface/__init__.py | pablomodernell/lorawan_conformance_testing | 3e6b9028ee7a6a614e52bac684e396ecd04fd10c | [
"MIT"
] | 1 | 2020-09-10T14:12:07.000Z | 2020-09-10T14:12:07.000Z | user_interface/__init__.py | pablomodernell/lorawan_conformance_testing | 3e6b9028ee7a6a614e52bac684e396ecd04fd10c | [
"MIT"
] | null | null | null | user_interface/__init__.py | pablomodernell/lorawan_conformance_testing | 3e6b9028ee7a6a614e52bac684e396ecd04fd10c | [
"MIT"
] | null | null | null | API_VERSION = "1.0.15"
| 11.5 | 22 | 0.652174 | API_VERSION = "1.0.15"
| true | true |
f7f7e9f37f322d1bf969bfa0dcb88a6aad61503b | 2,076 | py | Python | var/spack/repos/builtin/packages/openkim-models/package.py | kkauder/spack | 6ae8d5c380c1f42094b05d38be26b03650aafb39 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2 | 2020-09-10T22:50:08.000Z | 2021-01-12T22:18:54.000Z | var/spack/repos/builtin/packages/openkim-models/package.py | kkauder/spack | 6ae8d5c380c1f42094b05d38be26b03650aafb39 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 14 | 2021-07-20T01:04:53.000Z | 2022-03-02T01:08:36.000Z | var/spack/repos/builtin/packages/openkim-models/package.py | kkauder/spack | 6ae8d5c380c1f42094b05d38be26b03650aafb39 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 1 | 2021-05-06T00:17:46.000Z | 2021-05-06T00:17:46.000Z | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class OpenkimModels(CMakePackage):
"""OpenKIM is an online framework for making molecular simulations
reliable, reproducible, and portable. Computer implementations of
inter-atomic models are archived in OpenKIM, verified for coding
integrity, and tested by computing their predictions for a variety
of material properties. Models conforming to the KIM application
programming interface (API) work seamlessly with major simulation
codes that have adopted the KIM API standard.
This package provides all models archived at openkim.org that are
compatible with the kim-api package.
"""
homepage = "https://openkim.org/"
url = "https://s3.openkim.org/archives/collection/openkim-models-2019-07-25.txz"
maintainers = ['ellio167']
extends('kim-api')
depends_on('kim-api@2.1.0:', when='@2019-07-25:')
depends_on('kim-api@:2.0.2', when='@:2019-03-29')
version(
'2019-07-25',
sha256='50338084ece92ec0fb13b0bbdf357b5d7450e26068ba501f23c315f814befc26')
version(
'2019-03-29',
sha256='053dda2023fe4bb6d7c1d66530c758c4e633bbf1f1be17b6b075b276fe8874f6')
def cmake_args(self):
args = []
args.append(('-DKIM_API_MODEL_DRIVER_INSTALL_PREFIX={0}'
+ '/lib/kim-api/model-drivers').format(prefix))
if self.spec.satisfies('@2019-07-25:'):
args.append(('-DKIM_API_PORTABLE_MODEL_INSTALL_PREFIX={0}'
+ '/lib/kim-api/portable-models').format(prefix))
else:
args.append(('-DKIM_API_MODEL_INSTALL_PREFIX={0}'
+ '/lib/kim-api/models').format(prefix))
args.append(('-DKIM_API_SIMULATOR_MODEL_INSTALL_PREFIX={0}'
+ '/lib/kim-api/simulator-models').format(prefix))
return args
| 39.923077 | 89 | 0.665222 |
from spack import *
class OpenkimModels(CMakePackage):
homepage = "https://openkim.org/"
url = "https://s3.openkim.org/archives/collection/openkim-models-2019-07-25.txz"
maintainers = ['ellio167']
extends('kim-api')
depends_on('kim-api@2.1.0:', when='@2019-07-25:')
depends_on('kim-api@:2.0.2', when='@:2019-03-29')
version(
'2019-07-25',
sha256='50338084ece92ec0fb13b0bbdf357b5d7450e26068ba501f23c315f814befc26')
version(
'2019-03-29',
sha256='053dda2023fe4bb6d7c1d66530c758c4e633bbf1f1be17b6b075b276fe8874f6')
def cmake_args(self):
args = []
args.append(('-DKIM_API_MODEL_DRIVER_INSTALL_PREFIX={0}'
+ '/lib/kim-api/model-drivers').format(prefix))
if self.spec.satisfies('@2019-07-25:'):
args.append(('-DKIM_API_PORTABLE_MODEL_INSTALL_PREFIX={0}'
+ '/lib/kim-api/portable-models').format(prefix))
else:
args.append(('-DKIM_API_MODEL_INSTALL_PREFIX={0}'
+ '/lib/kim-api/models').format(prefix))
args.append(('-DKIM_API_SIMULATOR_MODEL_INSTALL_PREFIX={0}'
+ '/lib/kim-api/simulator-models').format(prefix))
return args
| true | true |
f7f7eacf047c1833e8d4d6076067b8cce1660859 | 3,215 | py | Python | siga/siga/settings.py | JenniferAmaral/DjangoSpike | 768237bb0f3cffe7bbdbcab38a8bae6faa78e495 | [
"Apache-2.0"
] | null | null | null | siga/siga/settings.py | JenniferAmaral/DjangoSpike | 768237bb0f3cffe7bbdbcab38a8bae6faa78e495 | [
"Apache-2.0"
] | 2 | 2021-03-19T03:21:17.000Z | 2021-03-30T13:24:07.000Z | siga/siga/settings.py | JenniferAmaral/DjangoSpike | 768237bb0f3cffe7bbdbcab38a8bae6faa78e495 | [
"Apache-2.0"
] | null | null | null | """
Django settings for siga project.
Generated by 'django-admin startproject' using Django 3.0.6.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '-6yk-0ry&n=6a!rcgw(aa^4gn*$$gx3ixt3y4amg#7*kv^1*t1'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'widget_tweaks',
'core',
'sensibilizacao',
'prospeccao'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'siga.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'siga.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
LOGIN_REDIRECT_URL = '/'
| 25.314961 | 91 | 0.692691 |
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = '-6yk-0ry&n=6a!rcgw(aa^4gn*$$gx3ixt3y4amg#7*kv^1*t1'
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'widget_tweaks',
'core',
'sensibilizacao',
'prospeccao'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'siga.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'siga.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
LOGIN_REDIRECT_URL = '/'
| true | true |
f7f7eb1fd5997f8121a2d808cd4ea3eb3505e636 | 4,630 | py | Python | tasks/views.py | igorxcardoso/todo-list-django | e8745e3370ab50a83612fc5a268cad04ac84f465 | [
"MIT"
] | null | null | null | tasks/views.py | igorxcardoso/todo-list-django | e8745e3370ab50a83612fc5a268cad04ac84f465 | [
"MIT"
] | null | null | null | tasks/views.py | igorxcardoso/todo-list-django | e8745e3370ab50a83612fc5a268cad04ac84f465 | [
"MIT"
] | null | null | null | from django.shortcuts import render, get_object_or_404, redirect
from django.contrib.auth.decorators import login_required # Esse docoreto vai impedir usuários não autorizados acessem rotas protegidas
from django.http import HttpResponse
from django.contrib import messages
from django.core.paginator import Paginator # Paginação
from django.contrib.auth.views import (PasswordChangeView, PasswordResetView,
PasswordResetDoneView, PasswordResetConfirmView,
PasswordResetCompleteView)
from .forms import TaskForm
from .models import Task
import datetime
# Funções relacionadas as URL's do arquivo urls.py
@login_required
def taskList(request): # order_by: ordena por data de criação do mais novo para mais antigo
search = request.GET.get('search') # reseach é name do input de busca que está no html list.html
filter = request.GET.get('filter') # Filtro
dias_30 = datetime.datetime.now()-datetime.timedelta(days=30)
taskFeitaRecente = Task.objects.filter(done='feito', update_at__gt=dias_30, user=request.user).count()
taskFeito = Task.objects.filter(done='feito', user=request.user).count()
taskFazendo = Task.objects.filter(done='fazendo', user=request.user).count()
if search: # Filtrar pelo nome
# Vai buscar uma lista de tasks e vai filtrar pelo request.user (o usuário da requisição)
tasks = Task.objects.filter(title__icontains=search, user=request.user)#, user=request.user)
elif filter: # Filtrar pelo estado (feito ou fazendo)
tasks = Task.objects.filter(done=filter, user=request.user) # Vai filtrar pelo user e pelo done
else:
# Vou pegar todos as tasks e filtrar pelo request.user
tasks_list = Task.objects.all().order_by('-created_at').filter(user=request.user)
# Paginação
paginacao = Paginator(tasks_list, 10) # (lista, num de páginas)
page = request.GET.get('page')
tasks = paginacao.get_page(page) # Vai exibir o numero correto na página que está
# render: "renderiza" a página
return render(request, 'tasks/list.html', {'tasks': tasks, 'taskFeitaRecente': taskFeitaRecente,
'taskFeito': taskFeito, 'taskFazendo': taskFazendo})
@login_required
def perfil(request):
return render(request, 'user/perfil.html', {'name': request.user})
@login_required
def taskView(request, id):
task = get_object_or_404(Task, pk=id)
return render(request, 'tasks/task.html', {'task':task})
# '-> Argumento enviado para p front-end
@login_required
def newTask(request):
# Dispor e tratar formulário
if request.method == 'POST': # Se for POST vai fazer inserção
form = TaskForm(request.POST) # Passa request.POST para preencher o formulário
if form.is_valid(): # Se o formulário for válido
task = form.save(commit=False) # Com o commit=False ele vai parar o processo save e esperar até salvar
task.done = 'fazendo'
task.user = request.user # Envia o user altenticado
task.save()
messages.info(request, 'Tarefa adicionada com sucesso!') # Mensagem enviada para o front-end
return redirect('/') # Volta para home, se não voltaria para msm url
else: # Senão vai mostrar o formulário
form = TaskForm() # Chama o formulário. Lembrandoq em TaskForm temos Metadados
return render(request, 'tasks/addtask.html', {'form':form})
@login_required
def editTasks(request, id):
# O id vem do parametro da url para poder achar a task
task = get_object_or_404(Task, pk=id)
# (model, primary_key)
form = TaskForm(instance=task) # Para puxar o formulário. No instance=task deixar o form prepopulado
if request.method == 'POST':
form = TaskForm(request.POST, instance=task) # instance=task para saber qual form está sendo alterado
if form.is_valid(): # Se o formulário for válido
task.save()
messages.info(request, 'Tarefa editada com sucesso!') # Mensagem enviada para o front-end
return redirect('/')
else:
return render(request, 'tasks/edittask.html', {'form':form, 'task':task}) # Volta para mesma página, em caso de erro
else:
return render(request, 'tasks/edittask.html', {'form':form, 'task':task})
@login_required
def deleteTasks(request, id):
task = get_object_or_404(Task, pk=id)
task.delete()
messages.info(request, 'Tarefa deletada com sucesso!') # Mensagem enviada para o front-end
return redirect('/')
@login_required
def changestatus(request, id):
task = get_object_or_404(Task, pk=id)
if task.done == 'fazendo':
task.done = 'feito'
else:
task.done = 'fazendo'
task.save()
return redirect('/')
def helloWorld(request):
return HttpResponse('Hello World')
def yourName(request, name):
return render(request, 'tasks/yourname.html', {'name':name})
| 37.33871 | 135 | 0.737149 | from django.shortcuts import render, get_object_or_404, redirect
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.contrib import messages
from django.core.paginator import Paginator
from django.contrib.auth.views import (PasswordChangeView, PasswordResetView,
PasswordResetDoneView, PasswordResetConfirmView,
PasswordResetCompleteView)
from .forms import TaskForm
from .models import Task
import datetime
@login_required
def taskList(request): # order_by: ordena por data de criação do mais novo para mais antigo
search = request.GET.get('search') # reseach é name do input de busca que está no html list.html
filter = request.GET.get('filter') # Filtro
dias_30 = datetime.datetime.now()-datetime.timedelta(days=30)
taskFeitaRecente = Task.objects.filter(done='feito', update_at__gt=dias_30, user=request.user).count()
taskFeito = Task.objects.filter(done='feito', user=request.user).count()
taskFazendo = Task.objects.filter(done='fazendo', user=request.user).count()
if search: # Filtrar pelo nome
# Vai buscar uma lista de tasks e vai filtrar pelo request.user (o usuário da requisição)
tasks = Task.objects.filter(title__icontains=search, user=request.user)#, user=request.user)
elif filter: # Filtrar pelo estado (feito ou fazendo)
tasks = Task.objects.filter(done=filter, user=request.user) # Vai filtrar pelo user e pelo done
else:
# Vou pegar todos as tasks e filtrar pelo request.user
tasks_list = Task.objects.all().order_by('-created_at').filter(user=request.user)
# Paginação
paginacao = Paginator(tasks_list, 10) # (lista, num de páginas)
page = request.GET.get('page')
tasks = paginacao.get_page(page) # Vai exibir o numero correto na página que está
# render: "renderiza" a página
return render(request, 'tasks/list.html', {'tasks': tasks, 'taskFeitaRecente': taskFeitaRecente,
'taskFeito': taskFeito, 'taskFazendo': taskFazendo})
@login_required
def perfil(request):
return render(request, 'user/perfil.html', {'name': request.user})
@login_required
def taskView(request, id):
task = get_object_or_404(Task, pk=id)
return render(request, 'tasks/task.html', {'task':task})
# '-> Argumento enviado para p front-end
@login_required
def newTask(request):
if request.method == 'POST':
form = TaskForm(request.POST)
if form.is_valid():
task = form.save(commit=False)
task.done = 'fazendo'
task.user = request.user
task.save()
messages.info(request, 'Tarefa adicionada com sucesso!')
return redirect('/')
else:
form = TaskForm()
return render(request, 'tasks/addtask.html', {'form':form})
@login_required
def editTasks(request, id):
task = get_object_or_404(Task, pk=id)
form = TaskForm(instance=task)
if request.method == 'POST':
form = TaskForm(request.POST, instance=task)
if form.is_valid():
task.save()
messages.info(request, 'Tarefa editada com sucesso!')
return redirect('/')
else:
return render(request, 'tasks/edittask.html', {'form':form, 'task':task})
else:
return render(request, 'tasks/edittask.html', {'form':form, 'task':task})
@login_required
def deleteTasks(request, id):
task = get_object_or_404(Task, pk=id)
task.delete()
messages.info(request, 'Tarefa deletada com sucesso!')
return redirect('/')
@login_required
def changestatus(request, id):
task = get_object_or_404(Task, pk=id)
if task.done == 'fazendo':
task.done = 'feito'
else:
task.done = 'fazendo'
task.save()
return redirect('/')
def helloWorld(request):
return HttpResponse('Hello World')
def yourName(request, name):
return render(request, 'tasks/yourname.html', {'name':name})
| true | true |
f7f7eb5f7e3d44c78e8b12465d9b24340f2d06e5 | 1,155 | py | Python | Tools/scripts/ptags.py | marcosptf/cpython-2.0.1 | 73c739a764e8b1dc84640e73b880bc66e1916bca | [
"PSF-2.0"
] | 5 | 2022-03-26T21:53:36.000Z | 2022-03-30T21:47:20.000Z | Tools/scripts/ptags.py | marcosptf/cpython-2.0.1 | 73c739a764e8b1dc84640e73b880bc66e1916bca | [
"PSF-2.0"
] | 6 | 2020-11-18T15:48:14.000Z | 2021-05-03T21:20:50.000Z | Tools/scripts/ptags.py | marcosptf/cpython-2.0.1 | 73c739a764e8b1dc84640e73b880bc66e1916bca | [
"PSF-2.0"
] | 2 | 2015-07-16T08:14:13.000Z | 2022-03-27T01:55:17.000Z | #! /usr/bin/env python
# ptags
#
# Create a tags file for Python programs, usable with vi.
# Tagged are:
# - functions (even inside other defs or classes)
# - classes
# - filenames
# Warns about files it cannot open.
# No warnings about duplicate tags.
import sys, re, os
tags = [] # Modified global variable!
def main():
args = sys.argv[1:]
for file in args: treat_file(file)
if tags:
fp = open('tags', 'w')
tags.sort()
for s in tags: fp.write(s)
expr = '^[ \t]*(def|class)[ \t]+([a-zA-Z0-9_]+)[ \t]*[:\(]'
matcher = re.compile(expr)
def treat_file(file):
try:
fp = open(file, 'r')
except:
sys.stderr.write('Cannot open %s\n' % file)
return
base = os.path.basename(file)
if base[-3:] == '.py':
base = base[:-3]
s = base + '\t' + file + '\t' + '1\n'
tags.append(s)
while 1:
line = fp.readline()
if not line:
break
m = matcher.match(line)
if m:
content = m.group(0)
name = m.group(2)
s = name + '\t' + file + '\t/^' + content + '/\n'
tags.append(s)
main()
| 22.211538 | 61 | 0.520346 |
import sys, re, os
tags = []
def main():
args = sys.argv[1:]
for file in args: treat_file(file)
if tags:
fp = open('tags', 'w')
tags.sort()
for s in tags: fp.write(s)
expr = '^[ \t]*(def|class)[ \t]+([a-zA-Z0-9_]+)[ \t]*[:\(]'
matcher = re.compile(expr)
def treat_file(file):
try:
fp = open(file, 'r')
except:
sys.stderr.write('Cannot open %s\n' % file)
return
base = os.path.basename(file)
if base[-3:] == '.py':
base = base[:-3]
s = base + '\t' + file + '\t' + '1\n'
tags.append(s)
while 1:
line = fp.readline()
if not line:
break
m = matcher.match(line)
if m:
content = m.group(0)
name = m.group(2)
s = name + '\t' + file + '\t/^' + content + '/\n'
tags.append(s)
main()
| true | true |
f7f7eef97bf70dfbf3caf85f97c5d3de6e77513d | 11,128 | py | Python | rosleapmotion/scripts/leap_interface.py | Chikurtev/leap_joy | 2bc094baa3634890ebbf86bc582ea3e4a2c3fc6a | [
"BSD-3-Clause"
] | 7 | 2022-02-17T23:29:45.000Z | 2022-03-12T09:23:54.000Z | rosleapmotion/scripts/leap_interface.py | Chikurtev/leap_joy | 2bc094baa3634890ebbf86bc582ea3e4a2c3fc6a | [
"BSD-3-Clause"
] | 1 | 2019-02-21T13:40:48.000Z | 2019-02-21T13:47:37.000Z | rosleapmotion/scripts/leap_interface.py | Chikurtev/leap_joy | 2bc094baa3634890ebbf86bc582ea3e4a2c3fc6a | [
"BSD-3-Clause"
] | 1 | 2019-02-21T13:25:36.000Z | 2019-02-21T13:25:36.000Z | #################################################################################
# Copyright (C) 2012-2013 Leap Motion, Inc. All rights reserved. #
# Leap Motion proprietary and confidential. Not for distribution. #
# Use subject to the terms of the Leap Motion SDK Agreement available at #
# https://developer.leapmotion.com/sdk_agreement, or another agreement #
# between Leap Motion and you, your company or other organization. #
#################################################################################
#################################################################################
# Altered LEAP example by Florian Lier, you need to have the LEAP SDK installed #
# for this to work properly ;) #
# This interface provides access to the LEAP MOTION hardware, you will need to #
# have the official LEAP MOTION SDK installed in order to load the shared #
# provided with the SDK. #
#################################################################################
""" For backwards compatibility with the old driver files
Will be DELETED in the future """
import sys
import time
# Set (append) your PYTHONPATH properly, or just fill in the location of your LEAP
# SDK folder, e.g., $HOME/LeapSDK/lib where the Leap.py lives and /LeapSDK/lib/x64 or
# x86 where the *.so files reside.
# Below, you can see the "dirty" version - NOT RECOMMENDED!
# sys.path.append("/home/YOUR_NAME/path/to/Leap_Developer/LeapSDK/lib")
# sys.path.append("/home/YOUR_NAME/path/to/Leap_Developer/Leap_Developer/LeapSDK/lib/x64")
import threading
import Leap
from Leap import CircleGesture, KeyTapGesture, ScreenTapGesture, SwipeGesture
class LeapFinger():
def __init__(self, finger=None):
self.boneNames = ['metacarpal',
'proximal',
'intermediate',
'distal']
for boneName in self.boneNames:
setattr(self, boneName, [0.0, 0.0, 0.0])
self.tip = [0.0, 0.0, 0.0]
self.leapBoneNames = [Leap.Bone.TYPE_METACARPAL,
Leap.Bone.TYPE_PROXIMAL,
Leap.Bone.TYPE_INTERMEDIATE,
Leap.Bone.TYPE_DISTAL]
if finger is not None:
self.importFinger(finger)
def importFinger(self, finger):
for boneName in self.boneNames:
# Get the base of each bone
bone = finger.bone(getattr(Leap.Bone, 'TYPE_%s' % boneName.upper()))
setattr(self, boneName, bone.prev_joint.to_float_array())
# For the tip, get the end of the distal bone
self.tip = finger.bone(Leap.Bone.TYPE_DISTAL).next_joint.to_float_array()
class LeapInterface(Leap.Listener):
def on_init(self, controller):
# These variables as probably not thread safe
# TODO: Make thread safe ;)
self.hand = [0,0,0]
self.right_hand = False
self.left_hand = False
self.hand_direction = [0,0,0]
self.hand_normal = [0,0,0]
self.hand_palm_pos = [0,0,0]
self.hand_pitch = 0.0
self.hand_yaw = 0.0
self.hand_roll = 0.0
self.fingerNames = ['thumb', 'index', 'middle', 'ring', 'pinky']
for fingerName in self.fingerNames:
setattr(self, fingerName, LeapFinger())
print "Initialized Leap Motion Device"
def on_connect(self, controller):
print "Connected to Leap Motion Controller"
# Enable gestures
controller.enable_gesture(Leap.Gesture.TYPE_CIRCLE);
controller.enable_gesture(Leap.Gesture.TYPE_KEY_TAP);
controller.enable_gesture(Leap.Gesture.TYPE_SCREEN_TAP);
controller.enable_gesture(Leap.Gesture.TYPE_SWIPE);
def on_disconnect(self, controller):
# Note: not dispatched when running in a debugger.
print "Disconnected Leap Motion"
def on_exit(self, controller):
print "Exited Leap Motion Controller"
def on_frame(self, controller):
# Get the most recent frame and report some basic information
frame = controller.frame()
print "Frame id: %d, timestamp: %d, hands: %d, fingers: %d, tools: %d, gestures: %d" % (
frame.id, frame.timestamp, len(frame.hands), len(frame.fingers), len(frame.tools), len(frame.gestures()))
if not frame.hands.is_empty: #recently changed in API
# Get the first hand
#we are seeking one left and one right hands
there_is_right_hand=False
there_is_left_hand=False
for hand in frame.hands:
if hand.is_right:
there_is_right_hand=True
self.right_hand=hand
elif hand.is_left:
there_is_left_hand=True
self.left_hand=hand
if not there_is_right_hand:
self.right_hand=False
if not there_is_left_hand:
self.left_hand=False
self.hand = frame.hands[0] #old way
# Check if the hand has any fingers
fingers = self.hand.fingers
if not fingers.is_empty:
for fingerName in self.fingerNames:
#finger = fingers.finger_type(Leap.Finger.TYPE_THUMB)[0]
#self.thumb.importFinger(finger)
finger = fingers.finger_type(getattr(Leap.Finger, 'TYPE_%s' % fingerName.upper()))[0]
getattr(self, fingerName).importFinger(finger)
# Get the hand's sphere radius and palm position
# print "Hand sphere radius: %f mm, palm position: %s" % (self.hand.sphere_radius, hand.palm_position)
# Get the hand's normal vector and direction
normal = self.hand.palm_normal
direction = self.hand.direction
pos = self.hand.palm_position
self.hand_direction[0] = direction.x
self.hand_direction[1] = direction.y
self.hand_direction[2] = direction.z
self.hand_normal[0] = normal.x
self.hand_normal[1] = normal.y
self.hand_normal[2] = normal.z
self.hand_palm_pos[0] = pos.x
self.hand_palm_pos[1] = pos.y
self.hand_palm_pos[2] = pos.z
self.hand_pitch = direction.pitch * Leap.RAD_TO_DEG
self.hand_yaw = normal.yaw * Leap.RAD_TO_DEG
self.hand_roll = direction.roll * Leap.RAD_TO_DEG
# Calculate the hand's pitch, roll, and yaw angles
print "Hand pitch: %f degrees, roll: %f degrees, yaw: %f degrees" % (self.hand_pitch, self.hand_roll, self.hand_yaw)
'''
# Gestures
for gesture in frame.gestures():
if gesture.type == Leap.Gesture.TYPE_CIRCLE:
circle = CircleGesture(gesture)
# Determine clock direction using the angle between the pointable and the circle normal
if circle.pointable.direction.angle_to(circle.normal) <= Leap.PI/4:
clockwiseness = "clockwise"
else:
clockwiseness = "counterclockwise"
# Calculate the angle swept since the last frame
swept_angle = 0
if circle.state != Leap.Gesture.STATE_START:
previous_update = CircleGesture(controller.frame(1).gesture(circle.id))
swept_angle = (circle.progress - previous_update.progress) * 2 * Leap.PI
print "Circle id: %d, %s, progress: %f, radius: %f, angle: %f degrees, %s" % (
gesture.id, self.state_string(gesture.state),
circle.progress, circle.radius, swept_angle * Leap.RAD_TO_DEG, clockwiseness)
if gesture.type == Leap.Gesture.TYPE_SWIPE:
swipe = SwipeGesture(gesture)
print "Swipe id: %d, state: %s, position: %s, direction: %s, speed: %f" % (
gesture.id, self.state_string(gesture.state),
swipe.position, swipe.direction, swipe.speed)
if gesture.type == Leap.Gesture.TYPE_KEY_TAP:
keytap = KeyTapGesture(gesture)
print "Key Tap id: %d, %s, position: %s, direction: %s" % (
gesture.id, self.state_string(gesture.state),
keytap.position, keytap.direction )
if gesture.type == Leap.Gesture.TYPE_SCREEN_TAP:
screentap = ScreenTapGesture(gesture)
print "Screen Tap id: %d, %s, position: %s, direction: %s" % (
gesture.id, self.state_string(gesture.state),
screentap.position, screentap.direction )
if not (frame.hands.empty and frame.gestures().empty):
print ""
def state_string(self, state):
if state == Leap.Gesture.STATE_START:
return "STATE_START"
if state == Leap.Gesture.STATE_UPDATE:
return "STATE_UPDATE"
if state == Leap.Gesture.STATE_STOP:
return "STATE_STOP"
if state == Leap.Gesture.STATE_INVALID:
return "STATE_INVALID"
'''
def get_hand_direction(self):
return self.hand_direction
def get_hand_normal(self):
return self.hand_normal
def get_hand_palmpos(self):
return self.hand_palm_pos
def get_hand_yaw(self):
return self.hand_yaw
def get_hand_pitch(self):
return self.hand_pitch
def get_hand_roll(self):
return self.hand_roll
def get_finger_point(self, fingerName, fingerPointName):
return getattr(getattr(self, fingerName), fingerPointName)
class Runner(threading.Thread):
def __init__(self,arg=None):
threading.Thread.__init__(self)
self.arg=arg
self.listener = LeapInterface()
self.controller = Leap.Controller()
self.controller.add_listener(self.listener)
def __del__(self):
self.controller.remove_listener(self.listener)
def get_hand_direction(self):
return self.listener.get_hand_direction()
def get_hand_normal(self):
return self.listener.get_hand_normal()
def get_hand_palmpos(self):
return self.listener.get_hand_palmpos()
def get_hand_roll(self):
return self.listener.get_hand_roll()
def get_hand_pitch(self):
return self.listener.get_hand_pitch()
def get_hand_yaw(self):
return self.listener.get_hand_yaw()
def get_finger_point(self, fingerName, fingerPointName):
return self.listener.get_finger_point(fingerName, fingerPointName)
def run (self):
while True:
# Save some CPU time
time.sleep(0.001)
| 39.885305 | 128 | 0.573688 | false | true | |
f7f7f0031e041042a7f6b934af86ea828c3f74c4 | 1,827 | py | Python | util3d/cameratex.py | aolkin/text-physics | fa74e20c2461968bd9510a5f5f8ebf92f67c8186 | [
"MIT"
] | null | null | null | util3d/cameratex.py | aolkin/text-physics | fa74e20c2461968bd9510a5f5f8ebf92f67c8186 | [
"MIT"
] | null | null | null | util3d/cameratex.py | aolkin/text-physics | fa74e20c2461968bd9510a5f5f8ebf92f67c8186 | [
"MIT"
] | null | null | null | import cv2
import io
import os
import time
import sys
from redis import Redis
from panda3d.core import CardMaker, Texture, PNMImage
from direct.stdpy import threading, thread
PIPE_FILE = "panda_camera_pipe.pnm"
class CameraReader:
def __init__(self, index=1, pipe=PIPE_FILE):
self.cap = cv2.VideoCapture(index)
self.pipe = pipe
self.redis = Redis()
self.redis.delete("camera-lock")
def getFrame(self):
ret, frame = self.cap.read()
if ret:
with self.redis.lock("camera-lock"):
return cv2.imwrite(self.pipe, frame)
return ret
class CameraTexture:
def __init__(self, pipe=PIPE_FILE):
self.tex = Texture("CameraTexture")
self.pipe = pipe
self.redis = Redis()
self.redis.delete("camera-lock")
self.image = None
self.thread = thread.start_new_thread(self.readImage, ())
def readImage(self):
while True:
with self.redis.lock("camera-lock"):
newImage = PNMImage(self.pipe)
self.image = newImage
time.sleep(0.06)
def update(self):
if self.image:
self.tex.load(self.image)
def getTexture(self):
return self.tex
class CameraCard:
def __init__(self, parent):
self.tex = CameraTexture()
cm = CardMaker("CameraCard")
self.cardNp = parent.attachNewNode(cm.generate())
self.cardNp.setTexture(self.tex.getTexture())
def __getattr__(self, name):
return getattr(self.cardNp, name)
def update(self):
return self.tex.update()
if __name__ == "__main__":
camera = CameraReader(int(sys.argv[1]) if len(sys.argv) > 1 else 1)
while True:
ret = camera.getFrame()
print(time.time())
time.sleep(0.05)
| 25.375 | 71 | 0.611932 | import cv2
import io
import os
import time
import sys
from redis import Redis
from panda3d.core import CardMaker, Texture, PNMImage
from direct.stdpy import threading, thread
PIPE_FILE = "panda_camera_pipe.pnm"
class CameraReader:
def __init__(self, index=1, pipe=PIPE_FILE):
self.cap = cv2.VideoCapture(index)
self.pipe = pipe
self.redis = Redis()
self.redis.delete("camera-lock")
def getFrame(self):
ret, frame = self.cap.read()
if ret:
with self.redis.lock("camera-lock"):
return cv2.imwrite(self.pipe, frame)
return ret
class CameraTexture:
def __init__(self, pipe=PIPE_FILE):
self.tex = Texture("CameraTexture")
self.pipe = pipe
self.redis = Redis()
self.redis.delete("camera-lock")
self.image = None
self.thread = thread.start_new_thread(self.readImage, ())
def readImage(self):
while True:
with self.redis.lock("camera-lock"):
newImage = PNMImage(self.pipe)
self.image = newImage
time.sleep(0.06)
def update(self):
if self.image:
self.tex.load(self.image)
def getTexture(self):
return self.tex
class CameraCard:
def __init__(self, parent):
self.tex = CameraTexture()
cm = CardMaker("CameraCard")
self.cardNp = parent.attachNewNode(cm.generate())
self.cardNp.setTexture(self.tex.getTexture())
def __getattr__(self, name):
return getattr(self.cardNp, name)
def update(self):
return self.tex.update()
if __name__ == "__main__":
camera = CameraReader(int(sys.argv[1]) if len(sys.argv) > 1 else 1)
while True:
ret = camera.getFrame()
print(time.time())
time.sleep(0.05)
| true | true |
f7f7f0f9faf5b94069127c7a3a753e5fd6a9cd8a | 1,066 | py | Python | EHTask/utils/FileSystem.py | CraneHzm/EHTask | 1d36e326bee8b8d5b9d20761fc24bd2b3fa22aaa | [
"MIT"
] | 15 | 2020-02-26T14:09:48.000Z | 2022-02-24T09:45:11.000Z | DGaze/utils/FileSystem.py | yangjing628/DGaze | 1f547256724f0c0ff7c68baac384b510176222a0 | [
"MIT"
] | null | null | null | DGaze/utils/FileSystem.py | yangjing628/DGaze | 1f547256724f0c0ff7c68baac384b510176222a0 | [
"MIT"
] | 7 | 2020-06-11T02:56:08.000Z | 2021-08-20T05:12:46.000Z | # Copyright (c) 2019/7/13 Hu Zhiming jimmyhu@pku.edu.cn All Rights Reserved.
# process files and directories.
#################### Libs ####################
import os
import shutil
import time
# remove a directory
def RemoveDir(dirName):
if os.path.exists(dirName):
shutil.rmtree(dirName)
else:
print("Invalid Directory Path!")
# remake a directory
def RemakeDir(dirName):
if os.path.exists(dirName):
shutil.rmtree(dirName)
os.makedirs(dirName)
else:
os.makedirs(dirName)
# calculate the number of lines in a file
def FileLines(fileName):
if os.path.exists(fileName):
with open(fileName, 'r') as fr:
return len(fr.readlines())
else:
print("Invalid File Path!")
return 0
# make a directory if it does not exist.
def MakeDir(dirName):
if os.path.exists(dirName):
print("Directory "+ dirName + " already exists.")
else:
os.makedirs(dirName)
if __name__ == "__main__":
dirName = "test"
RemakeDir(dirName)
time.sleep(3)
MakeDir(dirName)
RemoveDir(dirName)
time.sleep(3)
MakeDir(dirName)
#print(FileLines('233.txt')) | 19.740741 | 76 | 0.687617 | true | true | |
f7f7f1e1bd5413c2b1680792ede2d1b882897001 | 228 | py | Python | demos/datakit_fut/runDT.py | NoTravel/wtpy | bd48db23ed6eb3157fc97d298b47279c0733d197 | [
"MIT"
] | 164 | 2020-06-18T01:47:31.000Z | 2022-03-30T09:19:42.000Z | demos/datakit_fut/runDT.py | NoTravel/wtpy | bd48db23ed6eb3157fc97d298b47279c0733d197 | [
"MIT"
] | 20 | 2020-12-02T02:57:55.000Z | 2022-03-30T05:25:14.000Z | demos/datakit_fut/runDT.py | NoTravel/wtpy | bd48db23ed6eb3157fc97d298b47279c0733d197 | [
"MIT"
] | 66 | 2020-09-08T03:21:24.000Z | 2022-03-29T08:39:55.000Z | from wtpy import WtDtEngine
if __name__ == "__main__":
#创建一个运行环境,并加入策略
engine = WtDtEngine()
engine.initialize("dtcfg.json", "logcfgdt.json")
engine.run()
kw = input('press any key to exit\n') | 22.8 | 53 | 0.622807 | from wtpy import WtDtEngine
if __name__ == "__main__":
engine = WtDtEngine()
engine.initialize("dtcfg.json", "logcfgdt.json")
engine.run()
kw = input('press any key to exit\n') | true | true |
f7f7f335cdb6d5299819c87c1d07b0c979102fd1 | 27,493 | py | Python | examples/beginner/plot_hdf_utils_read.py | sulaymandesai/pyUSID | fa4d152856e4717c92b1fbe34222eb2e1c042707 | [
"MIT"
] | null | null | null | examples/beginner/plot_hdf_utils_read.py | sulaymandesai/pyUSID | fa4d152856e4717c92b1fbe34222eb2e1c042707 | [
"MIT"
] | null | null | null | examples/beginner/plot_hdf_utils_read.py | sulaymandesai/pyUSID | fa4d152856e4717c92b1fbe34222eb2e1c042707 | [
"MIT"
] | null | null | null | """
================================================================================
04. Utilities for reading h5USID files
================================================================================
**Suhas Somnath**
4/18/2018
**This document illustrates the many handy functions in pyUSID.hdf_utils that significantly simplify reading data
and metadata in Universal Spectroscopy and Imaging Data (USID) HDF5 files (h5USID files)**
"""
########################################################################################################################
# Introduction
# -------------
# The USID model uses a data-centric approach to data analysis and processing meaning that results from all data analysis
# and processing are written to the same h5 file that contains the recorded measurements. **Hierarchical Data Format
# (HDF5)** files allow data, whether it is raw measured data or results of analysis, to be stored in multiple datasets within
# the same file in a tree-like manner. Certain rules and considerations have been made in pyUSID to ensure
# consistent and easy access to any data.
#
# The h5py python package provides great functions to create, read, and manage data in HDF5 files. In
# ``pyUSID.hdf_utils``, we have added functions that facilitate scientifically relevant, or USID specific
# functionality such as checking if a dataset is a Main dataset, reshaping to / from the original N dimensional form of
# the data, etc. Due to the wide breadth of the functions in ``hdf_utils``, the guide for hdf_utils will be split in two
# parts - one that focuses on functions that facilitate reading and one that facilitate writing of data. The following
# guide provides examples of how, and more importantly when, to use functions in ``pyUSID.hdf_utils`` for various
# scenarios.
#
# Recommended pre-requisite reading
# ---------------------------------
# * `Universal Spectroscopic and Imaging Data (USID) model </../../../USID/usid_model.html>`_
# * `Crash course on HDF5 and h5py <./plot_h5py.html>`_
#
# .. tip::
# You can download and run this document as a Jupyter notebook using the link at the bottom of this page.
#
# Import all necessary packages
# -------------------------------
#
# Before we begin demonstrating the numerous functions in ``pyUSID.hdf_utils``, we need to import the necessary
# packages. Here are a list of packages besides pyUSID that will be used in this example:
#
# * ``h5py`` - to open and close the file
# * ``wget`` - to download the example data file
# * ``numpy`` - for numerical operations on arrays in memory
# * ``matplotlib`` - basic visualization of data
from __future__ import print_function, division, unicode_literals
import os
# Warning package in case something goes wrong
from warnings import warn
import subprocess
import sys
def install(package):
subprocess.call([sys.executable, "-m", "pip", "install", package])
# Package for downloading online files:
try:
# This package is not part of anaconda and may need to be installed.
import wget
except ImportError:
warn('wget not found. Will install with pip.')
import pip
install(wget)
import wget
import h5py
import numpy as np
import matplotlib.pyplot as plt
# import sidpy - supporting package for pyUSID:
try:
import sidpy
except ImportError:
warn('sidpy not found. Will install with pip.')
import pip
install('sidpy')
import sidpy
# Finally import pyUSID.
try:
import pyUSID as usid
except ImportError:
warn('pyUSID not found. Will install with pip.')
import pip
install('pyUSID')
import pyUSID as usid
########################################################################################################################
# In order to demonstrate the many functions in hdf_utils, we will be using a h5USID file containing real
# experimental data along with results from analyses on the measurement data
#
# This scientific dataset
# -----------------------
#
# For this example, we will be working with a **Band Excitation Polarization Switching (BEPS)** dataset acquired from
# advanced atomic force microscopes. In the much simpler **Band Excitation (BE)** imaging datasets, a single spectrum is
# acquired at each location in a two dimensional grid of spatial locations. Thus, BE imaging datasets have two
# position dimensions (``X``, ``Y``) and one spectroscopic dimension (``Frequency`` - against which the spectrum is recorded).
# The BEPS dataset used in this example has a spectrum for **each combination of** three other parameters (``DC offset``,
# ``Field``, and ``Cycle``). Thus, this dataset has three new spectral dimensions in addition to ``Frequency``. Hence,
# this dataset becomes a 2+4 = **6 dimensional dataset**
#
# Load the dataset
# ------------------
# First, let us download this file from the pyUSID Github project:
url = 'https://raw.githubusercontent.com/pycroscopy/pyUSID/master/data/BEPS_small.h5'
h5_path = 'temp.h5'
_ = wget.download(url, h5_path, bar=None)
print('Working on:\n' + h5_path)
########################################################################################################################
# Next, lets open this HDF5 file in read-only mode. Note that opening the file does not cause the contents to be
# automatically loaded to memory. Instead, we are presented with objects that refer to specific HDF5 datasets,
# attributes or groups in the file
h5_path = 'temp.h5'
h5_f = h5py.File(h5_path, mode='r')
########################################################################################################################
# Here, ``h5_f`` is an active handle to the open file
#
# Inspect HDF5 contents
# ======================
#
# The file contents are stored in a tree structure, just like files on a contemporary computer. The file contains
# groups (similar to file folders) and datasets (similar to spreadsheets).
# There are several datasets in the file and these store:
#
# * The actual measurement collected from the experiment
# * Spatial location on the sample where each measurement was collected
# * Information to support and explain the spectral data collected at each location
# * Since the USID model stores results from processing and analyses performed on the data in the same h5USID file,
# these datasets and groups are present as well
# * Any other relevant ancillary information
#
# print_tree()
# ------------
# Soon after opening any file, it is often of interest to list the contents of the file. While one can use the open
# source software HDFViewer developed by the HDF organization, ``pyUSID.hdf_utils`` also has a very handy function -
# ``print_tree()`` to quickly visualize all the datasets and groups within the file within python.
print('Contents of the H5 file:')
sidpy.hdf_utils.print_tree(h5_f)
########################################################################################################################
# By default, ``print_tree()`` presents a clean tree view of the contents of the group. In this mode, only the group names
# are underlined. Alternatively, it can print the full paths of each dataset and group, with respect to the group / file
# of interest, by setting the ``rel_paths``
# keyword argument. ``print_tree()`` could also be used to display the contents of and HDF5 group instead of complete HDF5
# file as we have done above. Lets configure it to print the relative paths of all objects within the ``Channel_000``
# group:
sidpy.hdf_utils.print_tree(h5_f['/Measurement_000/Channel_000/'], rel_paths=True)
########################################################################################################################
# Finally, ``print_tree()`` can also be configured to only print USID Main datasets besides Group objects using the
# ``main_dsets_only`` option
sidpy.hdf_utils.print_tree(h5_f, main_dsets_only=True)
########################################################################################################################
# Accessing Attributes
# ==================================
#
# HDF5 datasets and groups can also store metadata such as experimental parameters. These metadata can be text,
# numbers, small lists of numbers or text etc. These metadata can be very important for understanding the datasets
# and guide the analysis routines.
#
# While one could use the basic ``h5py`` functionality to access attributes, one would encounter a lot of problems when
# attempting to decode attributes whose values were strings or lists of strings due to some issues in ``h5py``. This problem
# has been demonstrated in our `primer to HDF5 and h5py <./plot_h5py.html>`_. Instead of using the basic functionality of ``h5py``, we recommend always
# using the functions in pyUSID that reliably and consistently work for any kind of attribute for any version of
# python:
#
# get_attributes()
# ----------------
#
# ``get_attributes()`` is a very handy function that returns all or a specified set of attributes in an HDF5 object. If no
# attributes are explicitly requested, all attributes in the object are returned:
for key, val in sidpy.hdf_utils.get_attributes(h5_f).items():
print('{} : {}'.format(key, val))
########################################################################################################################
# ``get_attributes()`` is also great for only getting selected attributes. For example, if we only cared about the user
# and project related attributes, we could manually request for any that we wanted:
proj_attrs = sidpy.hdf_utils.get_attributes(h5_f, ['project_name', 'project_id', 'user_name'])
for key, val in proj_attrs.items():
print('{} : {}'.format(key, val))
########################################################################################################################
# get_attr()
# ----------
#
# If we are sure that we only wanted a specific attribute, we could instead use ``get_attr()`` as:
print(sidpy.hdf_utils.get_attr(h5_f, 'user_name'))
########################################################################################################################
# check_for_matching_attrs()
# --------------------------
# Consider the scenario where we are have several HDF5 files or Groups or datasets and we wanted to check each one to
# see if they have the certain metadata / attributes. ``check_for_matching_attrs()`` is one very handy function that
# simplifies the comparision operation.
#
# For example, let us check if this file was authored by ``John Doe``:
print(usid.hdf_utils.check_for_matching_attrs(h5_f, new_parms={'user_name': 'John Doe'}))
########################################################################################################################
# Finding datasets and groups
# ============================
#
# There are numerous ways to search for and access datasets and groups in H5 files using the basic functionalities
# of h5py. pyUSID.hdf_utils contains several functions that simplify common searching / lookup operations as part of
# scientific workflows.
#
# find_dataset()
# ----------------
#
# The ``find_dataset()`` function will return all datasets that whose names contain the provided string. In this case, we
# are looking for any datasets containing the string ``UDVS`` in their names. If you look above, there are two datasets
# (UDVS and UDVS_Indices) that match this condition:
udvs_dsets_2 = usid.hdf_utils.find_dataset(h5_f, 'UDVS')
for item in udvs_dsets_2:
print(item)
########################################################################################################################
# As you might know by now, h5USID files contain three kinds of datasets:
#
# * ``Main`` datasets that contain data recorded / computed at multiple spatial locations.
# * ``Ancillary`` datasets that support a main dataset
# * Other datasets
#
# For more information, please refer to the documentation on the USID model.
#
# check_if_main()
# ---------------
# ``check_if_main()`` is a very handy function that helps distinguish between ``Main`` datasets and other objects
# (``Ancillary`` datasets, other datasets, Groups etc.). Lets apply this function to see which of the objects within the
# ``Channel_000`` Group are ``Main`` datasets:
h5_chan_group = h5_f['Measurement_000/Channel_000']
# We will prepare two lists - one of objects that are ``main`` and one of objects that are not
non_main_objs = []
main_objs = []
for key, val in h5_chan_group.items():
if usid.hdf_utils.check_if_main(val):
main_objs.append(key)
else:
non_main_objs.append(key)
# Now we simply print the names of the items in each list
print('Main Datasets:')
print('----------------')
for item in main_objs:
print(item)
print('\nObjects that were not Main datasets:')
print('--------------------------------------')
for item in non_main_objs:
print(item)
########################################################################################################################
# The above script allowed us to distinguish Main datasets from all other objects only within the Group named
# ``Channel_000``.
#
# get_all_main()
# --------------
# What if we want to quickly find all ``Main`` datasets even within the sub-Groups of ``Channel_000``? To do this, we have a
# very handy function called - ``get_all_main()``:
main_dsets = usid.hdf_utils.get_all_main(h5_chan_group)
for dset in main_dsets:
print(dset)
print('--------------------------------------------------------------------')
########################################################################################################################
# The datasets above show that the file contains three main datasets. Two of these datasets are contained in a HDF5
# Group called ``Raw_Data-SHO_Fit_000`` meaning that they are results of an operation called ``SHO_Fit`` performed on the
# ``Main`` dataset - ``Raw_Data``. The first of the three main datasets is indeed the ``Raw_Data`` dataset from which the
# latter two datasets (``Fit`` and ``Guess``) were derived.
#
# The USID model allows the same operation, such as ``SHO_Fit``, to be performed on the same dataset (``Raw_Data``),
# multiple
# times. Each time the operation is performed, a new HDF5 Group is created to hold the new results. Often, we may
# want to perform a few operations such as:
#
# * Find the (source / main) dataset from which certain results were derived
# * Check if a particular operation was performed on a main dataset
# * Find all groups corresponding to a particular operation (e.g. - ``SHO_Fit``) being applied to a Main dataset
#
# ``hdf_utils`` has a few handy functions for many of these use cases.
#
# find_results_groups()
# ----------------------
# First, lets show that ``find_results_groups()`` finds all Groups containing the results of a ``SHO_Fit`` operation applied
# to ``Raw_Data``:
# First get the dataset corresponding to Raw_Data
h5_raw = h5_chan_group['Raw_Data']
operation = 'SHO_Fit'
print('Instances of operation "{}" applied to dataset named "{}":'.format(operation, h5_raw.name))
h5_sho_group_list = usid.hdf_utils.find_results_groups(h5_raw, operation)
print(h5_sho_group_list)
########################################################################################################################
# As expected, the ``SHO_Fit`` operation was performed on ``Raw_Data`` dataset only once, which is why
# ``find_results_groups()`` returned only one HDF5 Group - ``SHO_Fit_000``.
#
# check_for_old()
# -----------------
#
# Often one may want to check if a certain operation was performed on a dataset with the very same parameters to
# avoid recomputing the results. ``hdf_utils.check_for_old()`` is a very handy function that compares parameters (a
# dictionary) for a new / potential operation against the metadata (attributes) stored in each existing results group
# (HDF5 groups whose name starts with ``Raw_Data-SHO_Fit`` in this case). Before we demonstrate ``check_for_old()``, lets
# take a look at the attributes stored in the existing results groups:
print('Parameters already used for computing SHO_Fit on Raw_Data in the file:')
for key, val in sidpy.hdf_utils.get_attributes(h5_chan_group['Raw_Data-SHO_Fit_000']).items():
print('{} : {}'.format(key, val))
########################################################################################################################
# Now, let us check for existing results where the ``SHO_fit_method`` attribute matches an existing value and a new value:
print('Checking to see if SHO Fits have been computed on the raw dataset:')
print('\nUsing "pycroscopy BESHO":')
print(usid.hdf_utils.check_for_old(h5_raw, 'SHO_Fit',
new_parms={'SHO_fit_method': 'pycroscopy BESHO'}))
print('\nUsing "alternate technique"')
print(usid.hdf_utils.check_for_old(h5_raw, 'SHO_Fit',
new_parms={'SHO_fit_method': 'alternate technique'}))
########################################################################################################################
# Clearly, while find_results_groups() returned any and all groups corresponding to ``SHO_Fit`` being applied to
# ``Raw_Data``, ``check_for_old()`` only returned the group(s) where the operation was performed using the same specified
# parameters (``sho_fit_method`` in this case).
#
# Note that ``check_for_old()`` performs two operations - search for all groups with the matching nomenclature and then
# compare the attributes. ``check_for_matching_attrs()`` is the handy function, that enables the latter operation of
# comparing a giving dictionary of parameters against attributes in a given object.
#
# get_source_dataset()
# ---------------------
# ``hdf_utils.get_source_dataset()`` is a very handy function for the inverse scenario where we are interested in finding
# the source dataset from which the known result was derived:
h5_sho_group = h5_sho_group_list[0]
print('Datagroup containing the SHO fits:')
print(h5_sho_group)
print('\nDataset on which the SHO Fit was computed:')
h5_source_dset = usid.hdf_utils.get_source_dataset(h5_sho_group)
print(h5_source_dset)
########################################################################################################################
# Since the source dataset is always a ``Main`` dataset, ``get_source_dataset()`` results a ``USIDataset`` object instead of
# a regular ``HDF5 Dataset`` object.
#
# Note that ``hdf_utils.get_source_dataset()`` and ``find_results_groups()`` rely on the USID rule that results of an
# operation be stored in a Group named ``Source_Dataset_Name-Operation_Name_00x``.
# get_auxiliary_datasets()
# -------------------------
# The association of datasets and groups with one another provides a powerful mechanism for conveying (richer)
# information. One way to associate objects with each other is to store the reference of an object as an attribute of
# another. This is precisely the capability that is leveraged to turn Central datasets into USID Main Datasets or
# ``USIDatasets``. USIDatasets need to have four attributes that are references to the ``Position`` and ``Spectroscopic``
# ``ancillary`` datasets. Note, that USID does not restrict or preclude the storage of other relevant datasets as
# attributes of another dataset.
#
# For example, the ``Raw_Data`` dataset appears to contain several attributes whose keys / names match the names of
# datasets we see above and values all appear to be HDF5 object references:
for key, val in sidpy.hdf_utils.get_attributes(h5_raw).items():
print('{} : {}'.format(key, val))
########################################################################################################################
# As the name suggests, these HDF5 object references are references or addresses to datasets located elsewhere in the
# file. Conventionally, one would need to apply this reference to the file handle to get the actual HDF5 Dataset / Group
# object.
#
# ``get_auxiliary_datasets()`` simplifies this process by directly retrieving the actual Dataset / Group associated with
# the attribute. Thus, we would be able to get a reference to the ``Bin_Frequencies`` Dataset via:
h5_obj = sidpy.hdf_utils.get_auxiliary_datasets(h5_raw, 'Bin_Frequencies')[0]
print(h5_obj)
# Lets prove that this object is the same as the 'Bin_Frequencies' object that can be directly addressed:
print(h5_obj == h5_f['/Measurement_000/Channel_000/Bin_Frequencies'])
########################################################################################################################
# Accessing Ancillary Datasets
# =============================
# One of the major benefits of h5USID is its ability to handle large multidimensional datasets at ease. ``Ancillary``
# datasets serve as the keys or legends for explaining the dimensionality, reshape-ability, etc. of a dataset. There are
# several functions in hdf_utils that simplify many common operations on ancillary datasets.
#
# Before we demonstrate the several useful functions in hdf_utils, lets access the position and spectroscopic ancillary
# datasets using the ``get_auxiliary_datasets()`` function we used above:
dset_list = sidpy.hdf_utils.get_auxiliary_datasets(h5_raw, ['Position_Indices', 'Position_Values',
'Spectroscopic_Indices', 'Spectroscopic_Values'])
h5_pos_inds, h5_pos_vals, h5_spec_inds, h5_spec_vals = dset_list
########################################################################################################################
# As mentioned above, this is indeed a six dimensional dataset with two position dimensions and four spectroscopic
# dimensions. The ``Field`` and ``Cycle`` dimensions do not have any units since they are dimensionless unlike the other
# dimensions.
#
# get_dimensionality()
# ---------------------
# Now lets find out the number of steps in each of those dimensions using another handy function called
# ``get_dimensionality()``:
pos_dim_sizes = usid.hdf_utils.get_dimensionality(h5_pos_inds)
spec_dim_sizes = usid.hdf_utils.get_dimensionality(h5_spec_inds)
pos_dim_names = sidpy.hdf_utils.get_attr(h5_pos_inds, 'labels')
spec_dim_names = sidpy.hdf_utils.get_attr(h5_spec_inds, 'labels')
print('Size of each Position dimension:')
for name, length in zip(pos_dim_names, pos_dim_sizes):
print('{} : {}'.format(name, length))
print('\nSize of each Spectroscopic dimension:')
for name, length in zip(spec_dim_names, spec_dim_sizes):
print('{} : {}'.format(name, length))
########################################################################################################################
# get_sort_order()
# ----------------
#
# In a few (rare) cases, the spectroscopic / position dimensions are not arranged in descending order of rate of change.
# In other words, the dimensions in these ancillary matrices are not arranged from fastest-varying to slowest.
# To account for such discrepancies, ``hdf_utils`` has a very handy function that goes through each of the columns or
# rows in the ancillary indices matrices and finds the order in which these dimensions vary.
#
# Below we illustrate an example of sorting the names of the spectroscopic dimensions from fastest to slowest in
# the BEPS data file:
spec_sort_order = usid.hdf_utils.get_sort_order(h5_spec_inds)
print('Rate of change of spectroscopic dimensions: {}'.format(spec_sort_order))
print('\nSpectroscopic dimensions arranged as is:')
print(spec_dim_names)
sorted_spec_labels = np.array(spec_dim_names)[np.array(spec_sort_order)]
print('\nSpectroscopic dimensions arranged from fastest to slowest')
print(sorted_spec_labels)
########################################################################################################################
# get_unit_values()
# -----------------
#
# When visualizing the data it is essential to plot the data against appropriate values on the X, Y, or Z axes.
# Recall that by definition that the values over which each dimension is varied, are repeated and tiled over the entire
# position or spectroscopic dimension of the dataset. Thus, if we had just the bias waveform repeated over two cycles,
# spectroscopic values would contain the bias waveform tiled twice and the cycle numbers repeated as many times as the
# number of points in the bias waveform. Therefore, extracting the bias waveform or the cycle numbers from the ancillary
# datasets is not trivial. This problem is especially challenging for multidimensional datasets such as the one under
# consideration. Fortunately, ``hdf_utils`` has a very handy function for this as well:
pos_unit_values = usid.hdf_utils.get_unit_values(h5_pos_inds, h5_pos_vals)
print('Position unit values:')
for key, val in pos_unit_values.items():
print('{} : {}'.format(key, val))
spec_unit_values = usid.hdf_utils.get_unit_values(h5_spec_inds, h5_spec_vals)
########################################################################################################################
# Since the spectroscopic dimensions are quite complicated, lets visualize the results from ``get_unit_values()``:
fig, axes = plt.subplots(ncols=2, nrows=2, figsize=(6.5, 6))
for axis, name in zip(axes.flat, spec_dim_names):
axis.set_title(name)
axis.plot(spec_unit_values[name], 'o-')
fig.suptitle('Spectroscopic Dimensions', fontsize=16, y=1.05)
fig.tight_layout()
########################################################################################################################
# Reshaping Data
# ==============
#
# reshape_to_n_dims()
# -------------------
#
# The USID model stores N dimensional datasets in a flattened 2D form of position x spectral values. It can become
# challenging to retrieve the data in its original N-dimensional form, especially for multidimensional datasets such as
# the one we are working on. Fortunately, all the information regarding the dimensionality of the dataset are contained
# in the spectral and position ancillary datasets. ``reshape_to_n_dims()`` is a very useful function that can help
# retrieve the N-dimensional form of the data using a simple function call:
ndim_form, success, labels = usid.hdf_utils.reshape_to_n_dims(h5_raw, get_labels=True)
if success:
print('Succeeded in reshaping flattened 2D dataset to N dimensions')
print('Shape of the data in its original 2D form')
print(h5_raw.shape)
print('Shape of the N dimensional form of the dataset:')
print(ndim_form.shape)
print('And these are the dimensions')
print(labels)
else:
print('Failed in reshaping the dataset')
########################################################################################################################
# reshape_from_n_dims()
# -----------------------
# The inverse problem of reshaping an N dimensional dataset back to a 2D dataset (let's say for the purposes of
# multivariate analysis or storing into h5USID files) is also easily solved using another handy
# function - ``reshape_from_n_dims()``:
two_dim_form, success = usid.hdf_utils.reshape_from_n_dims(ndim_form, h5_pos=h5_pos_inds, h5_spec=h5_spec_inds)
if success:
print('Shape of flattened two dimensional form')
print(two_dim_form.shape)
else:
print('Failed in flattening the N dimensional dataset')
########################################################################################################################
# Close and delete the h5_file
h5_f.close()
os.remove(h5_path)
| 51.873585 | 151 | 0.645473 | true | true | |
f7f7f415fcb48472a8eda2cf5ca8444622115d93 | 213 | py | Python | actions/get_cluster_configs.py | cognifloyd/stackstorm-opscenter | aabbbc5f236dc48ebc3c72f788f191c8782a5b86 | [
"Apache-2.0"
] | 164 | 2015-01-17T16:08:33.000Z | 2021-08-03T02:34:07.000Z | actions/get_cluster_configs.py | cognifloyd/stackstorm-opscenter | aabbbc5f236dc48ebc3c72f788f191c8782a5b86 | [
"Apache-2.0"
] | 442 | 2015-01-01T11:19:01.000Z | 2017-09-06T23:26:17.000Z | actions/get_cluster_configs.py | cognifloyd/stackstorm-opscenter | aabbbc5f236dc48ebc3c72f788f191c8782a5b86 | [
"Apache-2.0"
] | 202 | 2015-01-13T00:37:40.000Z | 2020-11-07T11:30:10.000Z | import requests
from lib.base import OpscenterAction
class GetClustersAction(OpscenterAction):
def run(self):
url = self._get_full_url(['cluster-configs'])
return requests.get(url).json()
| 17.75 | 53 | 0.70892 | import requests
from lib.base import OpscenterAction
class GetClustersAction(OpscenterAction):
def run(self):
url = self._get_full_url(['cluster-configs'])
return requests.get(url).json()
| true | true |
f7f7f417ba92326d6c01218e170dafcd0b4ad126 | 1,926 | py | Python | ochrona/parser/tox.py | ttw225/ochrona-cli | 974973265091e0e9ff03c8cabad4c3e6ad20ea07 | [
"MIT"
] | null | null | null | ochrona/parser/tox.py | ttw225/ochrona-cli | 974973265091e0e9ff03c8cabad4c3e6ad20ea07 | [
"MIT"
] | null | null | null | ochrona/parser/tox.py | ttw225/ochrona-cli | 974973265091e0e9ff03c8cabad4c3e6ad20ea07 | [
"MIT"
] | null | null | null | from configparser import ConfigParser, NoOptionError
from io import StringIO
from typing import List
from ochrona.const import TOX_LINKED_REQUIREMENTS, INVALID_TOX_LINES, TOX_INI
from ochrona.parser.requirements import RequirementsFile
class ToxFile:
@staticmethod
def parse(file_path: str) -> List[str]:
"""
Parses a tox.ini into a list of requirements.
:param file_path: tox.ini path
:return: list<str> list of dependencies ['dependency==semvar']
"""
dependencies = []
with open(file_path) as tox:
parser = ConfigParser()
parser.read_file(tox)
for section in parser.sections():
try:
deps = parser.get(section=section, option="deps")
for _, line in enumerate(deps.splitlines()):
if line.startswith(TOX_LINKED_REQUIREMENTS):
path = ToxFile._tox_path(file_path)
req_file_name = line.replace(TOX_LINKED_REQUIREMENTS, "")
return RequirementsFile.parse(f"{path}{req_file_name}")
elif not any([line.startswith(i) for i in INVALID_TOX_LINES]):
if ":" in line:
# requirement is specified with an environment
dependencies.append(line.split(":")[-1].strip())
else:
if line != "":
# plain requirement
dependencies.append(line)
else:
pass # did not find valid line to parse
except NoOptionError:
pass
return dependencies
@staticmethod
def _tox_path(tox_file_path):
return tox_file_path.replace(TOX_INI, "")
| 40.978723 | 86 | 0.526999 | from configparser import ConfigParser, NoOptionError
from io import StringIO
from typing import List
from ochrona.const import TOX_LINKED_REQUIREMENTS, INVALID_TOX_LINES, TOX_INI
from ochrona.parser.requirements import RequirementsFile
class ToxFile:
@staticmethod
def parse(file_path: str) -> List[str]:
dependencies = []
with open(file_path) as tox:
parser = ConfigParser()
parser.read_file(tox)
for section in parser.sections():
try:
deps = parser.get(section=section, option="deps")
for _, line in enumerate(deps.splitlines()):
if line.startswith(TOX_LINKED_REQUIREMENTS):
path = ToxFile._tox_path(file_path)
req_file_name = line.replace(TOX_LINKED_REQUIREMENTS, "")
return RequirementsFile.parse(f"{path}{req_file_name}")
elif not any([line.startswith(i) for i in INVALID_TOX_LINES]):
if ":" in line:
dependencies.append(line.split(":")[-1].strip())
else:
if line != "":
dependencies.append(line)
else:
pass
except NoOptionError:
pass
return dependencies
@staticmethod
def _tox_path(tox_file_path):
return tox_file_path.replace(TOX_INI, "")
| true | true |
f7f7f6697947b8ed87e964328d3534d73892a439 | 1,480 | py | Python | mgs/v1.2/logger.py | vt-rocksat-2017/dashboard | e99a71edc74dd8b7f3eec023c381524561a7b6e4 | [
"MIT"
] | 1 | 2017-08-09T19:57:38.000Z | 2017-08-09T19:57:38.000Z | vtgs/v1.1/logger.py | vt-rocksat-2017/dashboard | e99a71edc74dd8b7f3eec023c381524561a7b6e4 | [
"MIT"
] | null | null | null | vtgs/v1.1/logger.py | vt-rocksat-2017/dashboard | e99a71edc74dd8b7f3eec023c381524561a7b6e4 | [
"MIT"
] | null | null | null | #!/usr/bin/env python2
# Logger utilities
import math, sys, os, time, struct, traceback, binascii, logging
import datetime as dt
class MyFormatter(logging.Formatter):
#Overriding formatter for datetime
converter=dt.datetime.utcfromtimestamp
def formatTime(self, record, datefmt=None):
ct = self.converter(record.created)
if datefmt:
s = ct.strftime(datefmt)
else:
t = ct.strftime("%Y-%m-%d %H:%M:%S")
s = "%s,%03d" % (t, record.msecs)
return s
def setup_logger(rx_id, log_name, startup_ts, level=logging.INFO):
l = logging.getLogger(log_name)
log_file = "{:s}_{:s}_{:s}.log".format(rx_id.upper(), log_name.upper(), startup_ts)
log_path = '/captures/rocksat/' + log_file
formatter = MyFormatter(fmt='%(asctime)s UTC,%(message)s',datefmt='%Y-%m-%d %H:%M:%S.%f')
#fileHandler = logging.FileHandler(log_path, mode='w')
fileHandler = logging.FileHandler(log_path)
fileHandler.setFormatter(formatter)
#streamHandler = logging.StreamHandler()
#streamHandler.setFormatter(formatter)
l.setLevel(level)
l.addHandler(fileHandler)
l.info('Logger Initialized')
#l.addHandler(streamHandler)
return fileHandler
def get_uptime():
with open('/proc/uptime', 'r') as f:
uptime_seconds = float(f.readline().split()[0])
#print uptime_seconds
return uptime_seconds
#uptime_string = str(timedelta(seconds = uptime_seconds))
| 34.418605 | 93 | 0.662838 |
import math, sys, os, time, struct, traceback, binascii, logging
import datetime as dt
class MyFormatter(logging.Formatter):
converter=dt.datetime.utcfromtimestamp
def formatTime(self, record, datefmt=None):
ct = self.converter(record.created)
if datefmt:
s = ct.strftime(datefmt)
else:
t = ct.strftime("%Y-%m-%d %H:%M:%S")
s = "%s,%03d" % (t, record.msecs)
return s
def setup_logger(rx_id, log_name, startup_ts, level=logging.INFO):
l = logging.getLogger(log_name)
log_file = "{:s}_{:s}_{:s}.log".format(rx_id.upper(), log_name.upper(), startup_ts)
log_path = '/captures/rocksat/' + log_file
formatter = MyFormatter(fmt='%(asctime)s UTC,%(message)s',datefmt='%Y-%m-%d %H:%M:%S.%f')
fileHandler = logging.FileHandler(log_path)
fileHandler.setFormatter(formatter)
l.setLevel(level)
l.addHandler(fileHandler)
l.info('Logger Initialized')
return fileHandler
def get_uptime():
with open('/proc/uptime', 'r') as f:
uptime_seconds = float(f.readline().split()[0])
return uptime_seconds
| true | true |
f7f7f698fa9513a1d995e2de5d4c0e189995a51c | 623 | py | Python | kYPython/FluentPython/BasicLearn/OOP/Dynamic.py | kyaing/KDYSample | 6a09ef3f7dab18a71187cd81f7da2dd13cf7a4a5 | [
"MIT"
] | 10 | 2017-02-23T07:42:20.000Z | 2017-02-23T07:42:25.000Z | kYPython/FluentPython/BasicLearn/OOP/Dynamic.py | kaideyi/KDYSample | 6a09ef3f7dab18a71187cd81f7da2dd13cf7a4a5 | [
"MIT"
] | null | null | null | kYPython/FluentPython/BasicLearn/OOP/Dynamic.py | kaideyi/KDYSample | 6a09ef3f7dab18a71187cd81f7da2dd13cf7a4a5 | [
"MIT"
] | null | null | null | # coding: utf-8
import types
class Person(object):
# 用 __slots__ 限制了动态添加属性,只能调用定义的属性
# __slots__ = {'name', 'age'}
def __init__(self, name, age):
self.name = name
self.age = age
def eat(self):
print('---eating---')
def run(self):
print('---running---')
@staticmethod
def test():
print('---static method---')
@classmethod
def test2(cls):
print('---class method---')
p1 = Person("David", 18)
p1.addr = 'beijing' # 动态添加属性
print(p1.addr)
p1.eat()
p1.run = types.MethodType(run, p1) # 动态添加实例方法
p1.run()
Person.test = test # 动态添加静态方法
Person.test()
Person.test2 = test2 # 动态添加类方法
Person.test2() | 15.974359 | 46 | 0.637239 |
import types
class Person(object):
def __init__(self, name, age):
self.name = name
self.age = age
def eat(self):
print('---eating---')
def run(self):
print('---running---')
@staticmethod
def test():
print('---static method---')
@classmethod
def test2(cls):
print('---class method---')
p1 = Person("David", 18)
p1.addr = 'beijing'
print(p1.addr)
p1.eat()
p1.run = types.MethodType(run, p1)
p1.run()
Person.test = test
Person.test()
Person.test2 = test2
Person.test2() | true | true |
f7f7f89e24cf78c4be7d8140c66f6a0f01a90792 | 8,136 | py | Python | tests/test_layout.py | goodmami/penman | a6030a412cb40ae6f1ab5ccffb2010b36f3169f3 | [
"MIT"
] | 97 | 2016-11-17T16:48:44.000Z | 2022-03-28T09:57:39.000Z | tests/test_layout.py | goodmami/penman | a6030a412cb40ae6f1ab5ccffb2010b36f3169f3 | [
"MIT"
] | 94 | 2016-12-17T01:08:31.000Z | 2022-02-07T11:41:45.000Z | tests/test_layout.py | goodmami/penman | a6030a412cb40ae6f1ab5ccffb2010b36f3169f3 | [
"MIT"
] | 20 | 2017-02-19T15:51:57.000Z | 2021-11-12T19:24:57.000Z |
import random
import logging
import pytest
from penman.exceptions import LayoutError
from penman.model import Model
from penman.tree import Tree
from penman.graph import Graph
from penman.codec import PENMANCodec
from penman import layout
from penman.layout import (
interpret,
rearrange,
configure,
reconfigure,
get_pushed_variable,
appears_inverted,
node_contexts,
)
codec = PENMANCodec()
model = Model()
@pytest.fixture(scope='module')
def amr_model(mini_amr):
return Model.from_dict(mini_amr)
def test_interpret(amr_model):
t = codec.parse('(a / A)')
assert interpret(t) == Graph([('a', ':instance', 'A')], top='a')
t = codec.parse('(a / A :consist-of (b / B))')
assert interpret(t) == Graph(
[('a', ':instance', 'A'),
('b', ':consist', 'a'),
('b', ':instance', 'B')],
top='a')
assert interpret(t, model=amr_model) == Graph(
[('a', ':instance', 'A'),
('a', ':consist-of', 'b'),
('b', ':instance', 'B')],
top='a')
def test_rearrange():
random.seed(1)
t = codec.parse('''
(a / alpha
:ARG0 (b / beta
:ARG0 (g / gamma)
:ARG1 (d / delta))
:ARG0-of d
:ARG1 (e / epsilon))''')
rearrange(t, model.original_order)
assert codec.format(t) == (
'(a / alpha\n'
' :ARG0 (b / beta\n'
' :ARG0 (g / gamma)\n'
' :ARG1 (d / delta))\n'
' :ARG0-of d\n'
' :ARG1 (e / epsilon))')
rearrange(t, model.random_order)
assert codec.format(t) == (
'(a / alpha\n'
' :ARG0-of d\n'
' :ARG1 (e / epsilon)\n'
' :ARG0 (b / beta\n'
' :ARG0 (g / gamma)\n'
' :ARG1 (d / delta)))')
rearrange(t, model.canonical_order)
assert codec.format(t) == (
'(a / alpha\n'
' :ARG0 (b / beta\n'
' :ARG0 (g / gamma)\n'
' :ARG1 (d / delta))\n'
' :ARG1 (e / epsilon)\n'
' :ARG0-of d)')
def test_configure(amr_model):
g = codec.decode('(a / A)')
assert configure(g) == Tree(('a', [('/', 'A')]))
with pytest.raises(LayoutError):
configure(g, top='A')
g = codec.decode('(a / A :consist-of (b / B))')
assert configure(g) == Tree(
('a', [('/', 'A'),
(':consist-of', ('b', [('/', 'B')]))]))
assert configure(g, top='b') == Tree(
('b', [('/', 'B'),
(':consist', ('a', [('/', 'A')]))]))
amr_codec = PENMANCodec(model=amr_model)
g = amr_codec.decode('(a / A :consist-of (b / B))')
assert configure(g, model=amr_model) == Tree(
('a', [('/', 'A'),
(':consist-of', ('b', [('/', 'B')]))]))
assert configure(g, top='b', model=amr_model) == Tree(
('b', [('/', 'B'),
(':consist-of-of', ('a', [('/', 'A')]))]))
def test_issue_34():
# https://github.com/goodmami/penman/issues/34
g = codec.decode('''
# ::snt I think you failed to not not act.
(t / think
:ARG0 (i / i)
:ARG1 (f / fail
:ARG0 (y / you)
:ARG1 (a / act
:polarity -
:polarity -)))''')
assert configure(g) == Tree(
('t', [('/', 'think'),
(':ARG0', ('i', [('/', 'i')])),
(':ARG1', ('f', [('/', 'fail'),
(':ARG0', ('y', [('/', 'you')])),
(':ARG1', ('a', [('/', 'act'),
(':polarity', '-'),
(':polarity', '-')]))]))]))
def test_issue_85(monkeypatch, caplog):
# https://github.com/goodmami/penman/issues/85
# Emulate multiprocessing by reassigning POP
with monkeypatch.context() as m:
m.setattr(layout, 'POP', layout.Pop())
g = codec.decode('(a / alpha :ARG0 (b / beta))')
caplog.set_level(logging.WARNING)
codec.encode(g, indent=None)
assert 'epigraphical marker ignored: POP' not in caplog.text
def test_reconfigure():
g = codec.decode('''
(a / alpha
:ARG0 b
:ARG1 (g / gamma
:ARG0-of (b / beta)))''')
# original order reconfiguration puts node definitions at first
# appearance of a variable
assert reconfigure(g) == Tree(
('a', [('/', 'alpha'),
(':ARG0', ('b', [('/', 'beta')])),
(':ARG1', ('g', [('/', 'gamma'),
(':ARG0-of', 'b')]))]))
# canonical order reconfiguration can also shift things like
# inverted arguments
assert reconfigure(g, key=model.canonical_order) == Tree(
('a', [('/', 'alpha'),
(':ARG0', ('b', [('/', 'beta'),
(':ARG0', ('g', [('/', 'gamma')]))])),
(':ARG1', 'g')]))
def test_issue_90():
# https://github.com/goodmami/penman/issues/90
g = Graph([('i', ':instance', 'iota'),
('i2', ':instance', 'i'),
('i', ':ARG0', 'i2')],
top='i')
assert reconfigure(g) == Tree(
('i', [('/', 'iota'),
(':ARG0', ('i2', [('/', 'i')]))]))
def test_get_pushed_variable():
g = codec.decode('''
(a / alpha
:ARG0 (b / beta)
:ARG1-of (g / gamma))''')
assert get_pushed_variable(g, ('a', ':instance', 'alpha')) is None
assert get_pushed_variable(g, ('a', ':ARG0', 'b')) == 'b'
assert get_pushed_variable(g, ('g', ':ARG1', 'a')) == 'g'
def test_appears_inverted():
g = codec.decode('''
(a / alpha
:ARG0 (b / beta)
:ARG1-of (g / gamma))''')
assert not appears_inverted(g, ('a', ':instance', 'alpha'))
assert not appears_inverted(g, ('a', ':ARG0', 'b'))
assert appears_inverted(g, ('g', ':ARG1', 'a'))
def test_issue_47():
# https://github.com/goodmami/penman/issues/47
g = codec.decode('''
(a / alpha
:ARG0 (b / beta)
:ARG1 (g / gamma
:ARG0 (d / delta)
:ARG1-of (e / epsilon)
:ARG1-of b))''')
assert not appears_inverted(g, ('a', ':ARG0', 'b'))
assert not appears_inverted(g, ('g', ':ARG0', 'd'))
assert appears_inverted(g, ('e', ':ARG1', 'g'))
assert appears_inverted(g, ('b', ':ARG1', 'g'))
def test_issue_87():
# https://github.com/goodmami/penman/issues/87
# The duplicate triple (i, :ARG0, c) below means the graph is bad
# so the output is not guaranteed. Just check for errors.
g = codec.decode('(c / company :ARG0-of (i / insure-02 :ARG0 c))')
appears_inverted(g, ('i', ':ARG0', 'c'))
codec.encode(g)
g = codec.decode('(c / company :ARG0-of i :ARG0-of (i / insure-02))')
appears_inverted(g, ('i', ':ARG0', 'c'))
codec.encode(g)
def test_node_contexts():
g = codec.decode('(a / alpha)')
assert node_contexts(g) == ['a']
# note here and below: the first 'a' is for ('a', ':instance', None)
g = codec.decode('(a :ARG0 (b / beta))')
assert node_contexts(g) == ['a', 'a', 'b']
g = codec.decode('(a :ARG0-of (b / beta))')
assert node_contexts(g) == ['a', 'a', 'b']
# also ('b', ':instance', None) here
g = codec.decode('(a :ARG0 (b) :ARG1 (g / gamma))')
assert node_contexts(g) == ['a', 'a', 'b', 'a', 'g']
def test_issue_92():
# https://github.com/goodmami/penman/issues/92
g = codec.decode('(a / alpha :ARG0~e.0 (b / beta))')
assert configure(g) == Tree(
('a', [('/', 'alpha'),
(':ARG0~e.0', ('b', [('/', 'beta')]))]))
assert configure(g, top='b') == Tree(
('b', [('/', 'beta'),
(':ARG0-of~e.0', ('a', [('/', 'alpha')]))]))
def test_issue_93():
# https://github.com/goodmami/penman/issues/93
g = codec.decode('(a / alpha :ARG0 b~1)')
g.triples.append(('b', ':instance', 'beta'))
assert configure(g) == Tree(
('a', [('/', 'alpha'),
(':ARG0', ('b', [('/', 'beta')]))]))
| 31.292308 | 76 | 0.468781 |
import random
import logging
import pytest
from penman.exceptions import LayoutError
from penman.model import Model
from penman.tree import Tree
from penman.graph import Graph
from penman.codec import PENMANCodec
from penman import layout
from penman.layout import (
interpret,
rearrange,
configure,
reconfigure,
get_pushed_variable,
appears_inverted,
node_contexts,
)
codec = PENMANCodec()
model = Model()
@pytest.fixture(scope='module')
def amr_model(mini_amr):
return Model.from_dict(mini_amr)
def test_interpret(amr_model):
t = codec.parse('(a / A)')
assert interpret(t) == Graph([('a', ':instance', 'A')], top='a')
t = codec.parse('(a / A :consist-of (b / B))')
assert interpret(t) == Graph(
[('a', ':instance', 'A'),
('b', ':consist', 'a'),
('b', ':instance', 'B')],
top='a')
assert interpret(t, model=amr_model) == Graph(
[('a', ':instance', 'A'),
('a', ':consist-of', 'b'),
('b', ':instance', 'B')],
top='a')
def test_rearrange():
random.seed(1)
t = codec.parse('''
(a / alpha
:ARG0 (b / beta
:ARG0 (g / gamma)
:ARG1 (d / delta))
:ARG0-of d
:ARG1 (e / epsilon))''')
rearrange(t, model.original_order)
assert codec.format(t) == (
'(a / alpha\n'
' :ARG0 (b / beta\n'
' :ARG0 (g / gamma)\n'
' :ARG1 (d / delta))\n'
' :ARG0-of d\n'
' :ARG1 (e / epsilon))')
rearrange(t, model.random_order)
assert codec.format(t) == (
'(a / alpha\n'
' :ARG0-of d\n'
' :ARG1 (e / epsilon)\n'
' :ARG0 (b / beta\n'
' :ARG0 (g / gamma)\n'
' :ARG1 (d / delta)))')
rearrange(t, model.canonical_order)
assert codec.format(t) == (
'(a / alpha\n'
' :ARG0 (b / beta\n'
' :ARG0 (g / gamma)\n'
' :ARG1 (d / delta))\n'
' :ARG1 (e / epsilon)\n'
' :ARG0-of d)')
def test_configure(amr_model):
g = codec.decode('(a / A)')
assert configure(g) == Tree(('a', [('/', 'A')]))
with pytest.raises(LayoutError):
configure(g, top='A')
g = codec.decode('(a / A :consist-of (b / B))')
assert configure(g) == Tree(
('a', [('/', 'A'),
(':consist-of', ('b', [('/', 'B')]))]))
assert configure(g, top='b') == Tree(
('b', [('/', 'B'),
(':consist', ('a', [('/', 'A')]))]))
amr_codec = PENMANCodec(model=amr_model)
g = amr_codec.decode('(a / A :consist-of (b / B))')
assert configure(g, model=amr_model) == Tree(
('a', [('/', 'A'),
(':consist-of', ('b', [('/', 'B')]))]))
assert configure(g, top='b', model=amr_model) == Tree(
('b', [('/', 'B'),
(':consist-of-of', ('a', [('/', 'A')]))]))
def test_issue_34():
g = codec.decode('''
# ::snt I think you failed to not not act.
(t / think
:ARG0 (i / i)
:ARG1 (f / fail
:ARG0 (y / you)
:ARG1 (a / act
:polarity -
:polarity -)))''')
assert configure(g) == Tree(
('t', [('/', 'think'),
(':ARG0', ('i', [('/', 'i')])),
(':ARG1', ('f', [('/', 'fail'),
(':ARG0', ('y', [('/', 'you')])),
(':ARG1', ('a', [('/', 'act'),
(':polarity', '-'),
(':polarity', '-')]))]))]))
def test_issue_85(monkeypatch, caplog):
with monkeypatch.context() as m:
m.setattr(layout, 'POP', layout.Pop())
g = codec.decode('(a / alpha :ARG0 (b / beta))')
caplog.set_level(logging.WARNING)
codec.encode(g, indent=None)
assert 'epigraphical marker ignored: POP' not in caplog.text
def test_reconfigure():
g = codec.decode('''
(a / alpha
:ARG0 b
:ARG1 (g / gamma
:ARG0-of (b / beta)))''')
assert reconfigure(g) == Tree(
('a', [('/', 'alpha'),
(':ARG0', ('b', [('/', 'beta')])),
(':ARG1', ('g', [('/', 'gamma'),
(':ARG0-of', 'b')]))]))
assert reconfigure(g, key=model.canonical_order) == Tree(
('a', [('/', 'alpha'),
(':ARG0', ('b', [('/', 'beta'),
(':ARG0', ('g', [('/', 'gamma')]))])),
(':ARG1', 'g')]))
def test_issue_90():
g = Graph([('i', ':instance', 'iota'),
('i2', ':instance', 'i'),
('i', ':ARG0', 'i2')],
top='i')
assert reconfigure(g) == Tree(
('i', [('/', 'iota'),
(':ARG0', ('i2', [('/', 'i')]))]))
def test_get_pushed_variable():
g = codec.decode('''
(a / alpha
:ARG0 (b / beta)
:ARG1-of (g / gamma))''')
assert get_pushed_variable(g, ('a', ':instance', 'alpha')) is None
assert get_pushed_variable(g, ('a', ':ARG0', 'b')) == 'b'
assert get_pushed_variable(g, ('g', ':ARG1', 'a')) == 'g'
def test_appears_inverted():
g = codec.decode('''
(a / alpha
:ARG0 (b / beta)
:ARG1-of (g / gamma))''')
assert not appears_inverted(g, ('a', ':instance', 'alpha'))
assert not appears_inverted(g, ('a', ':ARG0', 'b'))
assert appears_inverted(g, ('g', ':ARG1', 'a'))
def test_issue_47():
g = codec.decode('''
(a / alpha
:ARG0 (b / beta)
:ARG1 (g / gamma
:ARG0 (d / delta)
:ARG1-of (e / epsilon)
:ARG1-of b))''')
assert not appears_inverted(g, ('a', ':ARG0', 'b'))
assert not appears_inverted(g, ('g', ':ARG0', 'd'))
assert appears_inverted(g, ('e', ':ARG1', 'g'))
assert appears_inverted(g, ('b', ':ARG1', 'g'))
def test_issue_87():
g = codec.decode('(c / company :ARG0-of (i / insure-02 :ARG0 c))')
appears_inverted(g, ('i', ':ARG0', 'c'))
codec.encode(g)
g = codec.decode('(c / company :ARG0-of i :ARG0-of (i / insure-02))')
appears_inverted(g, ('i', ':ARG0', 'c'))
codec.encode(g)
def test_node_contexts():
g = codec.decode('(a / alpha)')
assert node_contexts(g) == ['a']
g = codec.decode('(a :ARG0 (b / beta))')
assert node_contexts(g) == ['a', 'a', 'b']
g = codec.decode('(a :ARG0-of (b / beta))')
assert node_contexts(g) == ['a', 'a', 'b']
g = codec.decode('(a :ARG0 (b) :ARG1 (g / gamma))')
assert node_contexts(g) == ['a', 'a', 'b', 'a', 'g']
def test_issue_92():
g = codec.decode('(a / alpha :ARG0~e.0 (b / beta))')
assert configure(g) == Tree(
('a', [('/', 'alpha'),
(':ARG0~e.0', ('b', [('/', 'beta')]))]))
assert configure(g, top='b') == Tree(
('b', [('/', 'beta'),
(':ARG0-of~e.0', ('a', [('/', 'alpha')]))]))
def test_issue_93():
g = codec.decode('(a / alpha :ARG0 b~1)')
g.triples.append(('b', ':instance', 'beta'))
assert configure(g) == Tree(
('a', [('/', 'alpha'),
(':ARG0', ('b', [('/', 'beta')]))]))
| true | true |
f7f7f98a217d70a3deb957c5277285a94b3f466d | 60 | py | Python | test_xixi/__init__.py | jhfwb/test_x | 7db77408b2921698a48ea83742d4dc6e4d460611 | [
"MIT"
] | null | null | null | test_xixi/__init__.py | jhfwb/test_x | 7db77408b2921698a48ea83742d4dc6e4d460611 | [
"MIT"
] | null | null | null | test_xixi/__init__.py | jhfwb/test_x | 7db77408b2921698a48ea83742d4dc6e4d460611 | [
"MIT"
] | null | null | null | from test_xixi.main import xixi_class
__all__=['xixi_class'] | 30 | 37 | 0.833333 | from test_xixi.main import xixi_class
__all__=['xixi_class'] | true | true |
f7f7fb49a9bb6447630ed7b0b8cfa3a1d27ecdcb | 13,412 | py | Python | videointelligence/google/cloud/videointelligence_v1p1beta1/gapic/video_intelligence_service_client.py | deryrahman/google-cloud-python | b55058c4b2328fde32f29bfd8ea04708fcc578e0 | [
"Apache-2.0"
] | null | null | null | videointelligence/google/cloud/videointelligence_v1p1beta1/gapic/video_intelligence_service_client.py | deryrahman/google-cloud-python | b55058c4b2328fde32f29bfd8ea04708fcc578e0 | [
"Apache-2.0"
] | null | null | null | videointelligence/google/cloud/videointelligence_v1p1beta1/gapic/video_intelligence_service_client.py | deryrahman/google-cloud-python | b55058c4b2328fde32f29bfd8ea04708fcc578e0 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Accesses the google.cloud.videointelligence.v1p1beta1 VideoIntelligenceService API."""
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.grpc_helpers
import google.api_core.operation
import google.api_core.operations_v1
import grpc
from google.cloud.videointelligence_v1p1beta1.gapic import enums
from google.cloud.videointelligence_v1p1beta1.gapic import video_intelligence_service_client_config
from google.cloud.videointelligence_v1p1beta1.gapic.transports import video_intelligence_service_grpc_transport
from google.cloud.videointelligence_v1p1beta1.proto import video_intelligence_pb2
from google.cloud.videointelligence_v1p1beta1.proto import video_intelligence_pb2_grpc
from google.longrunning import operations_pb2
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
'google-cloud-videointelligence', ).version
class VideoIntelligenceServiceClient(object):
"""Service that implements Google Cloud Video Intelligence API."""
SERVICE_ADDRESS = 'videointelligence.googleapis.com:443'
"""The default address of the service."""
# The name of the interface for this client. This is the key used to
# find the method configuration in the client_config dictionary.
_INTERFACE_NAME = 'google.cloud.videointelligence.v1p1beta1.VideoIntelligenceService'
@classmethod
def from_service_account_file(cls, filename, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
VideoIntelligenceServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename)
kwargs['credentials'] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
def __init__(self,
transport=None,
channel=None,
credentials=None,
client_config=video_intelligence_service_client_config.config,
client_info=None):
"""Constructor.
Args:
transport (Union[~.VideoIntelligenceServiceGrpcTransport,
Callable[[~.Credentials, type], ~.VideoIntelligenceServiceGrpcTransport]): A transport
instance, responsible for actually making the API calls.
The default transport uses the gRPC protocol.
This argument may also be a callable which returns a
transport instance. Callables will be sent the credentials
as the first argument and the default transport class as
the second argument.
channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
through which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is mutually exclusive with providing a
transport instance to ``transport``; doing so will raise
an exception.
client_config (dict): DEPRECATED. A dictionary of call options for
each method. If not specified, the default configuration is used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Raise deprecation warnings for things we want to go away.
if client_config:
warnings.warn('The `client_config` argument is deprecated.',
PendingDeprecationWarning)
if channel:
warnings.warn(
'The `channel` argument is deprecated; use '
'`transport` instead.', PendingDeprecationWarning)
# Instantiate the transport.
# The transport is responsible for handling serialization and
# deserialization and actually sending data to the service.
if transport:
if callable(transport):
self.transport = transport(
credentials=credentials,
default_class=video_intelligence_service_grpc_transport.
VideoIntelligenceServiceGrpcTransport,
)
else:
if credentials:
raise ValueError(
'Received both a transport instance and '
'credentials; these are mutually exclusive.')
self.transport = transport
else:
self.transport = video_intelligence_service_grpc_transport.VideoIntelligenceServiceGrpcTransport(
address=self.SERVICE_ADDRESS,
channel=channel,
credentials=credentials,
)
if client_info is None:
client_info = (
google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO)
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
self._client_info = client_info
# Parse out the default settings for retry and timeout for each RPC
# from the client configuration.
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
client_config['interfaces'][self._INTERFACE_NAME], )
# Save a dictionary of cached API call functions.
# These are the actual callables which invoke the proper
# transport methods, wrapped with `wrap_method` to add retry,
# timeout, and the like.
self._inner_api_calls = {}
# Service calls
def annotate_video(self,
input_uri=None,
input_content=None,
features=None,
video_context=None,
output_uri=None,
location_id=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Performs asynchronous video annotation. Progress and results can be
retrieved through the ``google.longrunning.Operations`` interface.
``Operation.metadata`` contains ``AnnotateVideoProgress`` (progress).
``Operation.response`` contains ``AnnotateVideoResponse`` (results).
Example:
>>> from google.cloud import videointelligence_v1p1beta1
>>> from google.cloud.videointelligence_v1p1beta1 import enums
>>>
>>> client = videointelligence_v1p1beta1.VideoIntelligenceServiceClient()
>>>
>>> input_uri = 'gs://demomaker/cat.mp4'
>>> features_element = enums.Feature.LABEL_DETECTION
>>> features = [features_element]
>>>
>>> response = client.annotate_video(input_uri=input_uri, features=features)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
input_uri (str): Input video location. Currently, only
`Google Cloud Storage <https://cloud.google.com/storage/>`_ URIs are
supported, which must be specified in the following format:
``gs://bucket-id/object-id`` (other URI formats return
``google.rpc.Code.INVALID_ARGUMENT``). For more information, see
`Request URIs <https://cloud.google.com/storage/docs/reference-uris>`_.
A video URI may include wildcards in ``object-id``, and thus identify
multiple videos. Supported wildcards: '*' to match 0 or more characters;
'?' to match 1 character. If unset, the input video should be embedded
in the request as ``input_content``. If set, ``input_content`` should be unset.
input_content (bytes): The video data bytes.
If unset, the input video(s) should be specified via ``input_uri``.
If set, ``input_uri`` should be unset.
features (list[~google.cloud.videointelligence_v1p1beta1.types.Feature]): Requested video annotation features.
video_context (Union[dict, ~google.cloud.videointelligence_v1p1beta1.types.VideoContext]): Additional video context and/or feature-specific parameters.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.videointelligence_v1p1beta1.types.VideoContext`
output_uri (str): Optional location where the output (in JSON format) should be stored.
Currently, only `Google Cloud Storage <https://cloud.google.com/storage/>`_
URIs are supported, which must be specified in the following format:
``gs://bucket-id/object-id`` (other URI formats return
``google.rpc.Code.INVALID_ARGUMENT``). For more information, see
`Request URIs <https://cloud.google.com/storage/docs/reference-uris>`_.
location_id (str): Optional cloud region where annotation should take place. Supported cloud
regions: ``us-east1``, ``us-west1``, ``europe-west1``, ``asia-east1``. If no region
is specified, a region will be determined based on video file location.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.videointelligence_v1p1beta1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'annotate_video' not in self._inner_api_calls:
self._inner_api_calls[
'annotate_video'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.annotate_video,
default_retry=self._method_configs['AnnotateVideo'].retry,
default_timeout=self._method_configs['AnnotateVideo'].
timeout,
client_info=self._client_info,
)
request = video_intelligence_pb2.AnnotateVideoRequest(
input_uri=input_uri,
input_content=input_content,
features=features,
video_context=video_context,
output_uri=output_uri,
location_id=location_id,
)
operation = self._inner_api_calls['annotate_video'](
request, retry=retry, timeout=timeout, metadata=metadata)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
video_intelligence_pb2.AnnotateVideoResponse,
metadata_type=video_intelligence_pb2.AnnotateVideoProgress,
)
| 49.674074 | 163 | 0.642261 |
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.grpc_helpers
import google.api_core.operation
import google.api_core.operations_v1
import grpc
from google.cloud.videointelligence_v1p1beta1.gapic import enums
from google.cloud.videointelligence_v1p1beta1.gapic import video_intelligence_service_client_config
from google.cloud.videointelligence_v1p1beta1.gapic.transports import video_intelligence_service_grpc_transport
from google.cloud.videointelligence_v1p1beta1.proto import video_intelligence_pb2
from google.cloud.videointelligence_v1p1beta1.proto import video_intelligence_pb2_grpc
from google.longrunning import operations_pb2
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
'google-cloud-videointelligence', ).version
class VideoIntelligenceServiceClient(object):
SERVICE_ADDRESS = 'videointelligence.googleapis.com:443'
_INTERFACE_NAME = 'google.cloud.videointelligence.v1p1beta1.VideoIntelligenceService'
@classmethod
def from_service_account_file(cls, filename, *args, **kwargs):
credentials = service_account.Credentials.from_service_account_file(
filename)
kwargs['credentials'] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
def __init__(self,
transport=None,
channel=None,
credentials=None,
client_config=video_intelligence_service_client_config.config,
client_info=None):
if client_config:
warnings.warn('The `client_config` argument is deprecated.',
PendingDeprecationWarning)
if channel:
warnings.warn(
'The `channel` argument is deprecated; use '
'`transport` instead.', PendingDeprecationWarning)
if transport:
if callable(transport):
self.transport = transport(
credentials=credentials,
default_class=video_intelligence_service_grpc_transport.
VideoIntelligenceServiceGrpcTransport,
)
else:
if credentials:
raise ValueError(
'Received both a transport instance and '
'credentials; these are mutually exclusive.')
self.transport = transport
else:
self.transport = video_intelligence_service_grpc_transport.VideoIntelligenceServiceGrpcTransport(
address=self.SERVICE_ADDRESS,
channel=channel,
credentials=credentials,
)
if client_info is None:
client_info = (
google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO)
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
self._client_info = client_info
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
client_config['interfaces'][self._INTERFACE_NAME], )
self._inner_api_calls = {}
def annotate_video(self,
input_uri=None,
input_content=None,
features=None,
video_context=None,
output_uri=None,
location_id=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
if 'annotate_video' not in self._inner_api_calls:
self._inner_api_calls[
'annotate_video'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.annotate_video,
default_retry=self._method_configs['AnnotateVideo'].retry,
default_timeout=self._method_configs['AnnotateVideo'].
timeout,
client_info=self._client_info,
)
request = video_intelligence_pb2.AnnotateVideoRequest(
input_uri=input_uri,
input_content=input_content,
features=features,
video_context=video_context,
output_uri=output_uri,
location_id=location_id,
)
operation = self._inner_api_calls['annotate_video'](
request, retry=retry, timeout=timeout, metadata=metadata)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
video_intelligence_pb2.AnnotateVideoResponse,
metadata_type=video_intelligence_pb2.AnnotateVideoProgress,
)
| true | true |
f7f7fd30ae8456a6a259690f14028a754355799b | 2,981 | py | Python | tests/integration/test_stdout_exit_codes.py | simaishi/ansible-navigator | 901bc7be11c44ed1aa61340a42916a7b572302d9 | [
"Apache-2.0",
"MIT"
] | null | null | null | tests/integration/test_stdout_exit_codes.py | simaishi/ansible-navigator | 901bc7be11c44ed1aa61340a42916a7b572302d9 | [
"Apache-2.0",
"MIT"
] | null | null | null | tests/integration/test_stdout_exit_codes.py | simaishi/ansible-navigator | 901bc7be11c44ed1aa61340a42916a7b572302d9 | [
"Apache-2.0",
"MIT"
] | null | null | null | """ check return codes from mode stdout
"""
import os
from typing import NamedTuple
from typing import Tuple
import pytest
from ..defaults import DEFAULT_CONTAINER_IMAGE
from ..defaults import FIXTURES_DIR
PLAYBOOK = os.path.join(FIXTURES_DIR, "integration", "stdout_exit_codes", "site.yml")
@pytest.fixture(name="params")
def fixture_params(request):
"""generate params"""
return {
"execution_environment": request.param,
"execution_environment_image": DEFAULT_CONTAINER_IMAGE,
}
def id_ee(value):
"""generate id"""
return f"execution_environment={value}"
def id_test_data(value):
"""generate id"""
return f"action={value.action_name} return={value.return_code}"
class StdoutTest(NamedTuple):
"""define the stdout test"""
action_name: str
action_params: Tuple[Tuple, ...]
message: str
return_code: int
test_datas = (
StdoutTest(
action_name="config",
action_params=(("cmdline", ["--help"]),),
message="usage: ansible-config",
return_code=0,
),
StdoutTest(
action_name="config",
action_params=(("cmdline", ["foo"]),),
message="invalid choice: 'foo'",
return_code=1,
),
StdoutTest(
action_name="doc",
action_params=(("cmdline", ["--help"]),),
message="usage: ansible-doc",
return_code=0,
),
StdoutTest(
action_name="doc",
action_params=(("cmdline", ["--json"]),),
message="Incorrect options passed",
return_code=1,
),
StdoutTest(
action_name="inventory",
action_params=(("cmdline", ["--help"]),),
message="usage: ansible-inventory",
return_code=0,
),
StdoutTest(
action_name="inventory",
action_params=(("cmdline", ["foo"]),),
message="No action selected",
return_code=1,
),
StdoutTest(
action_name="run",
action_params=(("playbook", PLAYBOOK), ("playbook_artifact_enable", False)),
message="success",
return_code=0,
),
StdoutTest(
action_name="run",
action_params=(("playbook", "foo"), ("playbook_artifact_enable", False)),
message="foo could not be found",
return_code=1,
),
)
@pytest.mark.parametrize("params", [True, False], indirect=["params"], ids=id_ee)
@pytest.mark.parametrize("test_data", test_datas, ids=id_test_data)
def test(action_run_stdout, params, test_data):
"""test for a return code"""
actionruntest = action_run_stdout(action_name=test_data.action_name, **params)
ret, out, err = actionruntest.run_action_stdout(**dict(test_data.action_params))
assert ret == test_data.return_code
if test_data.return_code == 0:
assert test_data.message in out, (test_data.message, out, err)
else:
std_stream = out if params["execution_environment"] else err
assert test_data.message in std_stream, (test_data.message, out, err)
| 27.601852 | 85 | 0.635693 | import os
from typing import NamedTuple
from typing import Tuple
import pytest
from ..defaults import DEFAULT_CONTAINER_IMAGE
from ..defaults import FIXTURES_DIR
PLAYBOOK = os.path.join(FIXTURES_DIR, "integration", "stdout_exit_codes", "site.yml")
@pytest.fixture(name="params")
def fixture_params(request):
return {
"execution_environment": request.param,
"execution_environment_image": DEFAULT_CONTAINER_IMAGE,
}
def id_ee(value):
return f"execution_environment={value}"
def id_test_data(value):
return f"action={value.action_name} return={value.return_code}"
class StdoutTest(NamedTuple):
action_name: str
action_params: Tuple[Tuple, ...]
message: str
return_code: int
test_datas = (
StdoutTest(
action_name="config",
action_params=(("cmdline", ["--help"]),),
message="usage: ansible-config",
return_code=0,
),
StdoutTest(
action_name="config",
action_params=(("cmdline", ["foo"]),),
message="invalid choice: 'foo'",
return_code=1,
),
StdoutTest(
action_name="doc",
action_params=(("cmdline", ["--help"]),),
message="usage: ansible-doc",
return_code=0,
),
StdoutTest(
action_name="doc",
action_params=(("cmdline", ["--json"]),),
message="Incorrect options passed",
return_code=1,
),
StdoutTest(
action_name="inventory",
action_params=(("cmdline", ["--help"]),),
message="usage: ansible-inventory",
return_code=0,
),
StdoutTest(
action_name="inventory",
action_params=(("cmdline", ["foo"]),),
message="No action selected",
return_code=1,
),
StdoutTest(
action_name="run",
action_params=(("playbook", PLAYBOOK), ("playbook_artifact_enable", False)),
message="success",
return_code=0,
),
StdoutTest(
action_name="run",
action_params=(("playbook", "foo"), ("playbook_artifact_enable", False)),
message="foo could not be found",
return_code=1,
),
)
@pytest.mark.parametrize("params", [True, False], indirect=["params"], ids=id_ee)
@pytest.mark.parametrize("test_data", test_datas, ids=id_test_data)
def test(action_run_stdout, params, test_data):
actionruntest = action_run_stdout(action_name=test_data.action_name, **params)
ret, out, err = actionruntest.run_action_stdout(**dict(test_data.action_params))
assert ret == test_data.return_code
if test_data.return_code == 0:
assert test_data.message in out, (test_data.message, out, err)
else:
std_stream = out if params["execution_environment"] else err
assert test_data.message in std_stream, (test_data.message, out, err)
| true | true |
f7f7fd71343031751c75ef71fcf0aeed78e5c18c | 6,298 | py | Python | fuzzer/modules/py_parser.py | FChikh/REST-API-Fuzzer | cbbb03dfa1ac16d42b7372db35ba1804e879e6f6 | [
"MIT"
] | null | null | null | fuzzer/modules/py_parser.py | FChikh/REST-API-Fuzzer | cbbb03dfa1ac16d42b7372db35ba1804e879e6f6 | [
"MIT"
] | null | null | null | fuzzer/modules/py_parser.py | FChikh/REST-API-Fuzzer | cbbb03dfa1ac16d42b7372db35ba1804e879e6f6 | [
"MIT"
] | null | null | null | """
This module is a parser of our fuzzer
It converts from RAML specs to JSON format
"""
import json
import os
from subprocess import PIPE, Popen
def parse(parsed_page, page, data):
"""
Parse data from JSON to usable format, use recursion to parse all data, probably work with RAML v1.0, probably can
be optimised
:param parsed_page: dictionary contained parsed data for current page
:type: dict
:param page: dictionary contained data from JSON for current page
:type: dict
:param data: dictionary contained data from JSON
:type: dict
:return: none
"""
try:
parsed_page['baseUri'] = page['baseUri']
parsed_page['is_changeable'] = False
parsed_page['type'] = None
except KeyError:
try:
parsed_page['relativeUri'] = page['relativeUri']
parsed_page['uri'] = page['absoluteUri']
if parsed_page['uri'][-1] == '}':
parsed_page['is_changeable'] = True
if parsed_page['uri'][-3:-1] == 'id':
parsed_page['type'] = 'integer'
else:
parsed_page['type'] = 'string'
except KeyError:
pass
try:
parsed_page['protocols'] = page['protocols']
except KeyError:
parsed_page['protocols'] = []
parsed_page['methods'] = []
try:
for method in page['methods']:
tmp_method = {}
try:
tmp_method['method'] = method['method']
except KeyError:
pass
tmp_method['queryParameters'] = []
try:
for queryParameter in method['queryParameters']:
parameter = method['queryParameters'][queryParameter]
tmp_dict = {'name': parameter['name'],
'type': parameter['type'][0],
'required': parameter['required']}
if tmp_dict['type'] == 'array':
tmp_dict['items'] = parameter['items']
if tmp_dict['type'] == 'object':
tmp_dict['properties'] = {}
for tmp_property in parameter['properties']:
tmp = parameter['properties'][tmp_property]
tmp_dict['properties'][tmp['name']] = tmp['type'][0]
if tmp['type'][0] == 'object':
tmp_dict['properties'][tmp['name']] = {}
for property_tmp in tmp['properties']:
tmp_dict['properties'][tmp['name']][property_tmp] = tmp['properties'][property_tmp]['type'][0]
tmp_method['queryParameters'].append(tmp_dict)
except KeyError:
pass
try:
tmp_method['body'] = {'name': method['body']['application/json']['type'][0],
'properties': []}
try:
for type in data['types']:
tmp = type[list(type.keys())[0]]
if tmp['name'] == tmp_method['body']['name']:
for tmp_property in tmp['properties']:
parameter = tmp['properties'][tmp_property]
tmp_dict = {'name': parameter['name'],
'type': parameter['type'][0],
'required': parameter['required']}
if tmp_dict['type'] == 'array':
tmp_dict['items'] = parameter['items']
if tmp_dict['type'] == 'object':
tmp_dict['properties'] = {}
for property_tmp in parameter['properties']:
tmp_parameter = parameter['properties'][property_tmp]
tmp_dict['properties'][tmp_parameter['name']] = tmp_parameter['type'][0]
if tmp_parameter['type'][0] == 'object':
tmp_dict['properties'][tmp_parameter['name']] = {}
for new_property in tmp_parameter['properties']:
tmp_dict['properties'][tmp_parameter['name']][new_property] = \
tmp['properties'][new_property]['type'][0]
tmp_method['body']['properties'].append(tmp_dict)
break
except KeyError:
pass
except KeyError:
tmp_method['body'] = {}
tmp_method['responses'] = []
try:
for response in method['responses']:
tmp_dict = {'code': method['responses'][response]['code'],
'type': method['responses'][response]['body']['application/json']['type'][0]}
tmp_method['responses'].append(tmp_dict)
except KeyError:
pass
parsed_page['methods'].append(tmp_method)
except KeyError:
pass
parsed_page['pages'] = []
try:
for resource in page['resources']:
parsed_page['pages'].append({'type': parsed_page['type'],
'is_changeable': parsed_page['is_changeable']});
parse(parsed_page['pages'][-1], resource, data)
except KeyError:
pass
def fetch_parsed_data(path):
"""
Start parser.js to parse data from RAML to JSON, fetch data from JSON file, use parse() to parse JSON file, return
parsed data
:param path: string contained full path to RAML file
:type: str
:return: dictionary contained parsed data and object contained response from parser.js
:rtype: dict, obj
"""
sensor = Popen(['node', 'modules/parser.js', path], stdout=PIPE)
with open('modules/parsed.json', 'r') as json_file:
data = json.load(json_file)
parsed_data = {}
parse(parsed_data, data, data)
return parsed_data, sensor
| 44.352113 | 130 | 0.48174 |
import json
import os
from subprocess import PIPE, Popen
def parse(parsed_page, page, data):
try:
parsed_page['baseUri'] = page['baseUri']
parsed_page['is_changeable'] = False
parsed_page['type'] = None
except KeyError:
try:
parsed_page['relativeUri'] = page['relativeUri']
parsed_page['uri'] = page['absoluteUri']
if parsed_page['uri'][-1] == '}':
parsed_page['is_changeable'] = True
if parsed_page['uri'][-3:-1] == 'id':
parsed_page['type'] = 'integer'
else:
parsed_page['type'] = 'string'
except KeyError:
pass
try:
parsed_page['protocols'] = page['protocols']
except KeyError:
parsed_page['protocols'] = []
parsed_page['methods'] = []
try:
for method in page['methods']:
tmp_method = {}
try:
tmp_method['method'] = method['method']
except KeyError:
pass
tmp_method['queryParameters'] = []
try:
for queryParameter in method['queryParameters']:
parameter = method['queryParameters'][queryParameter]
tmp_dict = {'name': parameter['name'],
'type': parameter['type'][0],
'required': parameter['required']}
if tmp_dict['type'] == 'array':
tmp_dict['items'] = parameter['items']
if tmp_dict['type'] == 'object':
tmp_dict['properties'] = {}
for tmp_property in parameter['properties']:
tmp = parameter['properties'][tmp_property]
tmp_dict['properties'][tmp['name']] = tmp['type'][0]
if tmp['type'][0] == 'object':
tmp_dict['properties'][tmp['name']] = {}
for property_tmp in tmp['properties']:
tmp_dict['properties'][tmp['name']][property_tmp] = tmp['properties'][property_tmp]['type'][0]
tmp_method['queryParameters'].append(tmp_dict)
except KeyError:
pass
try:
tmp_method['body'] = {'name': method['body']['application/json']['type'][0],
'properties': []}
try:
for type in data['types']:
tmp = type[list(type.keys())[0]]
if tmp['name'] == tmp_method['body']['name']:
for tmp_property in tmp['properties']:
parameter = tmp['properties'][tmp_property]
tmp_dict = {'name': parameter['name'],
'type': parameter['type'][0],
'required': parameter['required']}
if tmp_dict['type'] == 'array':
tmp_dict['items'] = parameter['items']
if tmp_dict['type'] == 'object':
tmp_dict['properties'] = {}
for property_tmp in parameter['properties']:
tmp_parameter = parameter['properties'][property_tmp]
tmp_dict['properties'][tmp_parameter['name']] = tmp_parameter['type'][0]
if tmp_parameter['type'][0] == 'object':
tmp_dict['properties'][tmp_parameter['name']] = {}
for new_property in tmp_parameter['properties']:
tmp_dict['properties'][tmp_parameter['name']][new_property] = \
tmp['properties'][new_property]['type'][0]
tmp_method['body']['properties'].append(tmp_dict)
break
except KeyError:
pass
except KeyError:
tmp_method['body'] = {}
tmp_method['responses'] = []
try:
for response in method['responses']:
tmp_dict = {'code': method['responses'][response]['code'],
'type': method['responses'][response]['body']['application/json']['type'][0]}
tmp_method['responses'].append(tmp_dict)
except KeyError:
pass
parsed_page['methods'].append(tmp_method)
except KeyError:
pass
parsed_page['pages'] = []
try:
for resource in page['resources']:
parsed_page['pages'].append({'type': parsed_page['type'],
'is_changeable': parsed_page['is_changeable']});
parse(parsed_page['pages'][-1], resource, data)
except KeyError:
pass
def fetch_parsed_data(path):
sensor = Popen(['node', 'modules/parser.js', path], stdout=PIPE)
with open('modules/parsed.json', 'r') as json_file:
data = json.load(json_file)
parsed_data = {}
parse(parsed_data, data, data)
return parsed_data, sensor
| true | true |
f7f7fdf776adc9d06b10b84abe16fecb2b89d9e5 | 9,488 | py | Python | docs/gallery/general/linking_data.py | t-b/pynwb | b58e7b003247485120380360bb112bc6b22c7e60 | [
"BSD-3-Clause-LBNL"
] | 1 | 2021-04-13T20:47:36.000Z | 2021-04-13T20:47:36.000Z | docs/gallery/general/linking_data.py | t-b/pynwb | b58e7b003247485120380360bb112bc6b22c7e60 | [
"BSD-3-Clause-LBNL"
] | null | null | null | docs/gallery/general/linking_data.py | t-b/pynwb | b58e7b003247485120380360bb112bc6b22c7e60 | [
"BSD-3-Clause-LBNL"
] | null | null | null | '''
Modular Data Storage using External Files
===========================================
PyNWB supports linking between files using external links.
'''
####################
# Example Use Case: Integrating data from multiple files
# ---------------------------------------------------------
#
# NBWContainer classes (e.g., :py:meth:`~pynwb.base.TimeSeries`) support the integration of data stored in external
# HDF5 files with NWB data files via external links. To make things more concrete, let's look at the following use
# case. We want to simultaneously record multiple data steams during data acquisition. Using the concept of external
# links allows us to save each data stream to an external HDF5 files during data acquisition and to
# afterwards link the data into a single NWB:N file. In this case, each recording becomes represented by a
# separate file-system object that can be set as read-only once the experiment is done. In the following
# we are using :py:meth:`~pynwb.base.TimeSeries` as an example, but the same approach works for other
# NWBContainers as well.
#
#
####################
# .. tip::
#
# The same strategies we use here for creating External Links also apply to Soft Links.
# The main difference between soft and external links is that soft links point to other
# objects within the same file while external links point to objects in external files.
#
####################
# .. tip::
#
# In the case of :py:meth:`~pynwb.base.TimeSeries`, the uncorrected time stamps generated by the acquisition
# system can be stored (or linked) in the *sync* group. In the NWB:N format, hardware-recorded time data
# must then be corrected to a common time base (e.g., timestamps from all hardware sources aligned) before
# it can be included in the *timestamps* of the *TimeSeries* This means, in the case
# of :py:meth:`~pynwb.base.TimeSeries` we need to be careful that we are not including data with incompatible
# timestamps in the same file when using external links.
#
####################
# .. warning::
#
# External links can become stale/break. Since external links are pointing to data in other files
# external links may become invalid any time files are modified on the file system, e.g., renamed,
# moved or access permissions are changed.
#
####################
# Creating test data
# ---------------------------
#
# In the following we are creating 2 TimeSeries each written to a separate file. In the following we
# then show how we can integrate these files into a single NWBFile.
from datetime import datetime
from dateutil.tz import tzlocal
from pynwb import NWBFile
from pynwb import TimeSeries
from pynwb import NWBHDF5IO
import numpy as np
# Create the base data
start_time = datetime(2017, 4, 3, 11, tzinfo=tzlocal())
create_date = datetime(2017, 4, 15, 12, tzinfo=tzlocal())
data = np.arange(1000).reshape((100, 10))
timestamps = np.arange(100)
filename1 = 'external1_example.nwb'
filename2 = 'external2_example.nwb'
filename3 = 'external_linkcontainer_example.nwb'
filename4 = 'external_linkdataset_example.nwb'
# Create the first file
nwbfile1 = NWBFile(session_description='demonstrate external files',
identifier='NWBE1',
session_start_time=start_time,
file_create_date=create_date)
# Create the second file
test_ts1 = TimeSeries(name='test_timeseries1',
data=data,
unit='SIunit',
timestamps=timestamps)
nwbfile1.add_acquisition(test_ts1)
# Write the first file
io = NWBHDF5IO(filename1, 'w')
io.write(nwbfile1)
io.close()
# Create the second file
nwbfile2 = NWBFile(session_description='demonstrate external files',
identifier='NWBE2',
session_start_time=start_time,
file_create_date=create_date)
# Create the second file
test_ts2 = TimeSeries(name='test_timeseries2',
data=data,
unit='SIunit',
timestamps=timestamps)
nwbfile2.add_acquisition(test_ts2)
# Write the second file
io = NWBHDF5IO(filename2, 'w')
io.write(nwbfile2)
io.close()
#####################
# Linking to select datasets
# --------------------------
#
####################
# Step 1: Create the new NWBFile
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# Create the first file
nwbfile4 = NWBFile(session_description='demonstrate external files',
identifier='NWBE4',
session_start_time=start_time,
file_create_date=create_date)
####################
# Step 2: Get the dataset you want to link to
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# Now let's open our test files and retrieve our timeseries.
#
# Get the first timeseries
io1 = NWBHDF5IO(filename1)
nwbfile1 = io1.read()
timeseries_1 = nwbfile1.get_acquisition('test_timeseries1')
timeseries_1_data = timeseries_1.data
####################
# Step 3: Create the object you want to link to the data
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# To link to the dataset we can simply assign the data object (here `` timeseries_1.data``) to a new ``TimeSeries``
# Create a new timeseries that links to our data
test_ts4 = TimeSeries(name='test_timeseries4',
data=timeseries_1_data, # <-------
unit='SIunit',
timestamps=timestamps)
nwbfile4.add_acquisition(test_ts4)
####################
# In the above case we did not make it explicit how we want to handle the data from
# our TimeSeries, this means that :py:class:`~pynwb.NWBHDF5IO` will need to
# determine on write how to treat the dataset. We can make this explicit and customize this
# behavior on a per-dataset basis by wrapping our dataset using
# :py:meth:`~pynwb.form.backends.hdf5.h5_utils.H5DataIO`
from pynwb.form.backends.hdf5.h5_utils import H5DataIO
# Create another timeseries that links to the same data
test_ts5 = TimeSeries(name='test_timeseries5',
data=H5DataIO(data=timeseries_1_data, # <-------
link_data=True), # <-------
unit='SIunit',
timestamps=timestamps)
nwbfile4.add_acquisition(test_ts5)
####################
# Step 4: Write the data
# ^^^^^^^^^^^^^^^^^^^^^^^
#
from pynwb import NWBHDF5IO
io4 = NWBHDF5IO(filename4, 'w')
io4.write(nwbfile4,
link_data=True) # <-------- Specify default behavior to link rather than copy data
io4.close()
#####################
# .. note::
#
# In the case of TimeSeries one advantage of linking to just the main dataset is that we can now
# use our own timestamps in case the timestamps in the original file are not aligned with the
# clock of the NWBFile we are creating. In this way we can use the linking to "re-align" different
# TimeSeries without having to copy the main data.
####################
# Linking to whole Containers
# ---------------------------
#
# Appending to files and linking is made possible by passing around the same
# :py:class:`~pynwb.form.build.map.BuildManager`. You can get a manager to pass around
# using the :py:meth:`~pynwb.get_manager` function.
#
from pynwb import get_manager
manager = get_manager()
####################
# .. tip::
#
# You can pass in extensions to :py:meth:`~pynwb.get_manager` using the *extensions* argument.
####################
# Step 1: Get the container object you want to link to
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# Now let's open our test files and retrieve our timeseries.
#
# Get the first timeseries
io1 = NWBHDF5IO(filename1, manager=manager)
nwbfile1 = io1.read()
timeseries_1 = nwbfile1.get_acquisition('test_timeseries1')
# Get the second timeseries
io2 = NWBHDF5IO(filename2, manager=manager)
nwbfile2 = io2.read()
timeseries_2 = nwbfile2.get_acquisition('test_timeseries2')
####################
# Step 2: Add the container to another NWBFile
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# To intergrate both :py:meth:`~pynwb.base.TimeSeries` into a single file we simply create a new
# :py:meth:`~pynwb.file.NWBFile` and our existing :py:meth:`~pynwb.base.TimeSeries` to it. PyNWB's
# :py:meth:`~pynwb.NWBHDF5IO` backend then automatically detects that the TimeSeries have already
# been written to another file and will create external links for us.
#
# Create a new NWBFile that links to the external timeseries
nwbfile3 = NWBFile(session_description='demonstrate external files',
identifier='NWBE3',
session_start_time=start_time,
file_create_date=create_date)
nwbfile3.add_acquisition(timeseries_1) # <--------
nwbfile3.add_acquisition(timeseries_2) # <--------
# Write our third file that includes our two timeseries as external links
io3 = NWBHDF5IO(filename3, 'w', manager=manager)
io3.write(nwbfile3)
io3.close()
####################
# Creating a single file for sharing
# -----------------------------------
#
# External links are convenient but to share data we may want to hand a single file with all the
# data to our collaborator rather than having to collect all relevant files. To do this,
# :py:class:`~pynwb.form.backends.hdf5.h5tools.HDF5IO` (and in turn :py:class:`~pynwb.NWBHDF5IO`)
# provide the convenience function :py:func:`~pynwb.form.backends.hdf5.h5tools.HDF5IO.copy_file`
| 37.952 | 116 | 0.645974 |
s during data acquisition and to
# afterwards link the data into a single NWB:N file. In this case, each recording becomes represented by a
# separate file-system object that can be set as read-only once the experiment is done. In the following
# we are using :py:meth:`~pynwb.base.TimeSeries` as an example, but the same approach works for other
# NWBContainers as well.
#
#
####################
# .. tip::
#
# The same strategies we use here for creating External Links also apply to Soft Links.
# The main difference between soft and external links is that soft links point to other
# objects within the same file while external links point to objects in external files.
#
####################
# .. tip::
#
# In the case of :py:meth:`~pynwb.base.TimeSeries`, the uncorrected time stamps generated by the acquisition
# system can be stored (or linked) in the *sync* group. In the NWB:N format, hardware-recorded time data
# must then be corrected to a common time base (e.g., timestamps from all hardware sources aligned) before
# it can be included in the *timestamps* of the *TimeSeries* This means, in the case
# of :py:meth:`~pynwb.base.TimeSeries` we need to be careful that we are not including data with incompatible
# timestamps in the same file when using external links.
#
####################
# .. warning::
#
# External links can become stale/break. Since external links are pointing to data in other files
# external links may become invalid any time files are modified on the file system, e.g., renamed,
# moved or access permissions are changed.
#
####################
# Creating test data
# ---------------------------
#
# In the following we are creating 2 TimeSeries each written to a separate file. In the following we
# then show how we can integrate these files into a single NWBFile.
from datetime import datetime
from dateutil.tz import tzlocal
from pynwb import NWBFile
from pynwb import TimeSeries
from pynwb import NWBHDF5IO
import numpy as np
# Create the base data
start_time = datetime(2017, 4, 3, 11, tzinfo=tzlocal())
create_date = datetime(2017, 4, 15, 12, tzinfo=tzlocal())
data = np.arange(1000).reshape((100, 10))
timestamps = np.arange(100)
filename1 = 'external1_example.nwb'
filename2 = 'external2_example.nwb'
filename3 = 'external_linkcontainer_example.nwb'
filename4 = 'external_linkdataset_example.nwb'
# Create the first file
nwbfile1 = NWBFile(session_description='demonstrate external files',
identifier='NWBE1',
session_start_time=start_time,
file_create_date=create_date)
# Create the second file
test_ts1 = TimeSeries(name='test_timeseries1',
data=data,
unit='SIunit',
timestamps=timestamps)
nwbfile1.add_acquisition(test_ts1)
# Write the first file
io = NWBHDF5IO(filename1, 'w')
io.write(nwbfile1)
io.close()
# Create the second file
nwbfile2 = NWBFile(session_description='demonstrate external files',
identifier='NWBE2',
session_start_time=start_time,
file_create_date=create_date)
# Create the second file
test_ts2 = TimeSeries(name='test_timeseries2',
data=data,
unit='SIunit',
timestamps=timestamps)
nwbfile2.add_acquisition(test_ts2)
# Write the second file
io = NWBHDF5IO(filename2, 'w')
io.write(nwbfile2)
io.close()
#####################
# Linking to select datasets
# --------------------------
#
####################
# Step 1: Create the new NWBFile
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# Create the first file
nwbfile4 = NWBFile(session_description='demonstrate external files',
identifier='NWBE4',
session_start_time=start_time,
file_create_date=create_date)
####################
# Step 2: Get the dataset you want to link to
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# Now let's open our test files and retrieve our timeseries.
io1 = NWBHDF5IO(filename1)
nwbfile1 = io1.read()
timeseries_1 = nwbfile1.get_acquisition('test_timeseries1')
timeseries_1_data = timeseries_1.data
le4.add_acquisition(test_ts4)
link_data=True),
unit='SIunit',
timestamps=timestamps)
nwbfile4.add_acquisition(test_ts5)
session_start_time=start_time,
file_create_date=create_date)
nwbfile3.add_acquisition(timeseries_1)
nwbfile3.add_acquisition(timeseries_2)
io3 = NWBHDF5IO(filename3, 'w', manager=manager)
io3.write(nwbfile3)
io3.close()
| true | true |
f7f7fdfdf0fc7ceafca9c8c6a3a8f53f11b3c44d | 733 | py | Python | seq_match_corrector/__init__.py | slntopp/seq_match_corrector | 8f22bc81630b3c90ef43f9f9e275be70b16a96e3 | [
"Apache-2.0"
] | null | null | null | seq_match_corrector/__init__.py | slntopp/seq_match_corrector | 8f22bc81630b3c90ef43f9f9e275be70b16a96e3 | [
"Apache-2.0"
] | null | null | null | seq_match_corrector/__init__.py | slntopp/seq_match_corrector | 8f22bc81630b3c90ef43f9f9e275be70b16a96e3 | [
"Apache-2.0"
] | null | null | null | from difflib import SequenceMatcher
def ratios(target: list, dictionary: list) -> list:
def ratio(word: str):
l = len(word)
return sorted(
filter(
lambda x: x[1] > 0.5,
map(lambda x: [x, SequenceMatcher(None, x, word).ratio()], filter(lambda x: len(x) in range(l - 2, l + 3), dictionary))
), key=lambda x: x[1], reverse=True
)
return list(
map(ratio, target)
)
def correct(target: list, dictionary: list) -> dict:
r = ratios(target, dictionary)
result = {}
for i in range(len(target)):
if r[i]:
result[target[i]] = r[i][0][0]
else:
result[target[i]] = target[i]
return result | 30.541667 | 135 | 0.53206 | from difflib import SequenceMatcher
def ratios(target: list, dictionary: list) -> list:
def ratio(word: str):
l = len(word)
return sorted(
filter(
lambda x: x[1] > 0.5,
map(lambda x: [x, SequenceMatcher(None, x, word).ratio()], filter(lambda x: len(x) in range(l - 2, l + 3), dictionary))
), key=lambda x: x[1], reverse=True
)
return list(
map(ratio, target)
)
def correct(target: list, dictionary: list) -> dict:
r = ratios(target, dictionary)
result = {}
for i in range(len(target)):
if r[i]:
result[target[i]] = r[i][0][0]
else:
result[target[i]] = target[i]
return result | true | true |
f7f7ff4685247bc5bf7e07414362153a88f9b9b9 | 7,736 | py | Python | tests/test_models.py | simkimsia/django-organizations | 720129f8897efd498504af78c208f29b1d823d4c | [
"BSD-2-Clause"
] | 855 | 2015-01-06T21:08:34.000Z | 2022-03-31T04:24:49.000Z | tests/test_models.py | simkimsia/django-organizations | 720129f8897efd498504af78c208f29b1d823d4c | [
"BSD-2-Clause"
] | 156 | 2015-02-09T01:51:40.000Z | 2022-03-29T22:23:01.000Z | tests/test_models.py | simkimsia/django-organizations | 720129f8897efd498504af78c208f29b1d823d4c | [
"BSD-2-Clause"
] | 186 | 2015-01-21T06:21:59.000Z | 2022-03-29T12:44:24.000Z | # -*- coding: utf-8 -*-
from functools import partial
from django.contrib.auth.models import User
from django.db import IntegrityError
from django.test import TestCase
from django.test.utils import override_settings
from organizations.models import Organization
from organizations.models import OrganizationInvitation
from organizations.models import OrganizationOwner
from organizations.models import OrganizationUser
from organizations.utils import create_organization
from test_abstract.models import CustomOrganization
from test_accounts.models import Account
from test_accounts.models import AccountInvitation
from test_custom.models import Team
@override_settings(USE_TZ=True)
class ActiveManagerTests(TestCase):
fixtures = ["users.json", "orgs.json"]
def test_active(self):
self.assertEqual(3, Organization.objects.all().count())
self.assertEqual(2, Organization.active.all().count())
def test_by_user(self):
user = User.objects.get(username="dave")
self.assertEqual(3, Organization.objects.get_for_user(user).count())
self.assertEqual(2, Organization.active.get_for_user(user).count())
@override_settings(USE_TZ=True)
class OrgModelTests(TestCase):
fixtures = ["users.json", "orgs.json"]
def setUp(self):
self.kurt = User.objects.get(username="kurt")
self.dave = User.objects.get(username="dave")
self.krist = User.objects.get(username="krist")
self.duder = User.objects.get(username="duder")
self.nirvana = Organization.objects.get(name="Nirvana")
self.foo = Organization.objects.get(name="Foo Fighters")
def test_invitation_model(self):
assert Organization.invitation_model == OrganizationInvitation
def test_org_string_representation(self):
"""Ensure that models' string representation are error free"""
self.foo.name = "Föö Fíghterß"
self.assertTrue("{0}".format(self.foo))
self.assertTrue("{0}".format(self.foo.owner))
self.assertTrue("{0}".format(self.foo.owner.organization_user))
def test_relation_name(self):
"""Ensure user-related name is accessible from common attribute"""
self.assertEqual(self.foo.user_relation_name, "organizations_organization")
def test_duplicate_members(self):
"""Ensure that a User can only have one OrganizationUser object"""
self.assertRaises(IntegrityError, self.nirvana.add_user, self.dave)
def test_is_member(self):
self.assertTrue(self.nirvana.is_member(self.kurt))
self.assertTrue(self.nirvana.is_member(self.dave))
self.assertTrue(self.foo.is_member(self.dave))
self.assertFalse(self.foo.is_member(self.kurt))
def test_is_admin(self):
self.assertTrue(self.nirvana.is_admin(self.kurt))
self.assertTrue(self.nirvana.is_admin(self.krist))
self.assertFalse(self.nirvana.is_admin(self.dave))
self.assertTrue(self.foo.is_admin(self.dave))
def test_is_owner(self):
self.assertTrue(self.nirvana.is_owner(self.kurt))
self.assertTrue(self.foo.is_owner(self.dave))
self.assertFalse(self.nirvana.is_owner(self.dave))
self.assertFalse(self.nirvana.is_owner(self.krist))
def test_add_user(self):
new_guy = self.foo.add_user(self.krist)
self.assertTrue(isinstance(new_guy, OrganizationUser))
self.assertEqual(new_guy.organization, self.foo)
def test_remove_user(self):
self.foo.add_user(self.krist)
self.foo.remove_user(self.krist)
self.assertFalse(self.foo.users.filter(pk=self.krist.pk).exists())
def test_get_or_add_user(self):
"""Ensure `get_or_add_user` adds a user IFF it exists"""
new_guy, created = self.foo.get_or_add_user(self.duder)
self.assertTrue(isinstance(new_guy, OrganizationUser))
self.assertEqual(new_guy.organization, self.foo)
self.assertTrue(created)
new_guy, created = self.foo.get_or_add_user(self.dave)
self.assertTrue(isinstance(new_guy, OrganizationUser))
self.assertFalse(created)
def test_delete_owner(self):
from organizations.exceptions import OwnershipRequired
owner = self.nirvana.owner.organization_user
self.assertRaises(OwnershipRequired, owner.delete)
def test_change_owner(self):
admin = self.nirvana.organization_users.get(user__username="krist")
self.nirvana.change_owner(admin)
owner = self.nirvana.owner.organization_user
self.assertEqual(owner, admin)
def test_delete_missing_owner(self):
"""Ensure an org user can be deleted when there is no owner"""
org = Organization.objects.create(name="Some test", slug="some-test")
# Avoid the Organization.add_user method which would make an owner
org_user = OrganizationUser.objects.create(user=self.kurt, organization=org)
# Just make sure it doesn't raise an error
org_user.delete()
def test_nonmember_owner(self):
from organizations.exceptions import OrganizationMismatch
foo_user = self.foo.owner
self.nirvana.owner = foo_user
self.assertRaises(OrganizationMismatch, self.nirvana.owner.save)
@override_settings(USE_TZ=True)
class OrgDeleteTests(TestCase):
fixtures = ["users.json", "orgs.json"]
def test_delete_account(self):
"""Ensure Users are not deleted on the cascade"""
self.assertEqual(3, OrganizationOwner.objects.all().count())
self.assertEqual(4, User.objects.all().count())
scream = Organization.objects.get(name="Scream")
scream.delete()
self.assertEqual(2, OrganizationOwner.objects.all().count())
self.assertEqual(4, User.objects.all().count())
def test_delete_orguser(self):
"""Ensure the user is not deleted on the cascade"""
krist = User.objects.get(username="krist")
org_user = OrganizationUser.objects.filter(
organization__name="Nirvana", user=krist
)
org_user.delete()
self.assertTrue(krist.pk)
class CustomModelTests(TestCase):
# Load the world as we know it.
fixtures = ["users.json", "orgs.json"]
def setUp(self):
self.kurt = User.objects.get(username="kurt")
self.dave = User.objects.get(username="dave")
self.krist = User.objects.get(username="krist")
self.duder = User.objects.get(username="duder")
self.red_account = Account.objects.create(
name="Red Account", monthly_subscription=1200
)
def test_invitation_model(self):
assert Account.invitation_model == AccountInvitation
def test_org_string(self):
self.assertEqual(self.red_account.__str__(), "Red Account")
def test_relation_name(self):
"""Ensure user-related name is accessible from common attribute"""
self.assertEqual(self.red_account.user_relation_name, "test_accounts_account")
def test_change_user(self):
"""Ensure custom organizations validate in owner change"""
create_team = partial(create_organization, model=Team)
hometeam = create_team(self.dave, "Hometeam")
duder_org_user = hometeam.add_user(self.duder)
hometeam.owner.organization_user = duder_org_user
hometeam.owner.save()
def test_abstract_change_user(self):
"""
Ensure custom organizations inheriting abstract model
validate in owner change
"""
create_org = partial(create_organization, model=CustomOrganization)
org1 = create_org(self.dave, "Org1")
duder_org_user = org1.add_user(self.duder)
org1.owner.organization_user = duder_org_user
org1.owner.save()
| 38.874372 | 86 | 0.702042 |
from functools import partial
from django.contrib.auth.models import User
from django.db import IntegrityError
from django.test import TestCase
from django.test.utils import override_settings
from organizations.models import Organization
from organizations.models import OrganizationInvitation
from organizations.models import OrganizationOwner
from organizations.models import OrganizationUser
from organizations.utils import create_organization
from test_abstract.models import CustomOrganization
from test_accounts.models import Account
from test_accounts.models import AccountInvitation
from test_custom.models import Team
@override_settings(USE_TZ=True)
class ActiveManagerTests(TestCase):
fixtures = ["users.json", "orgs.json"]
def test_active(self):
self.assertEqual(3, Organization.objects.all().count())
self.assertEqual(2, Organization.active.all().count())
def test_by_user(self):
user = User.objects.get(username="dave")
self.assertEqual(3, Organization.objects.get_for_user(user).count())
self.assertEqual(2, Organization.active.get_for_user(user).count())
@override_settings(USE_TZ=True)
class OrgModelTests(TestCase):
fixtures = ["users.json", "orgs.json"]
def setUp(self):
self.kurt = User.objects.get(username="kurt")
self.dave = User.objects.get(username="dave")
self.krist = User.objects.get(username="krist")
self.duder = User.objects.get(username="duder")
self.nirvana = Organization.objects.get(name="Nirvana")
self.foo = Organization.objects.get(name="Foo Fighters")
def test_invitation_model(self):
assert Organization.invitation_model == OrganizationInvitation
def test_org_string_representation(self):
self.foo.name = "Föö Fíghterß"
self.assertTrue("{0}".format(self.foo))
self.assertTrue("{0}".format(self.foo.owner))
self.assertTrue("{0}".format(self.foo.owner.organization_user))
def test_relation_name(self):
self.assertEqual(self.foo.user_relation_name, "organizations_organization")
def test_duplicate_members(self):
self.assertRaises(IntegrityError, self.nirvana.add_user, self.dave)
def test_is_member(self):
self.assertTrue(self.nirvana.is_member(self.kurt))
self.assertTrue(self.nirvana.is_member(self.dave))
self.assertTrue(self.foo.is_member(self.dave))
self.assertFalse(self.foo.is_member(self.kurt))
def test_is_admin(self):
self.assertTrue(self.nirvana.is_admin(self.kurt))
self.assertTrue(self.nirvana.is_admin(self.krist))
self.assertFalse(self.nirvana.is_admin(self.dave))
self.assertTrue(self.foo.is_admin(self.dave))
def test_is_owner(self):
self.assertTrue(self.nirvana.is_owner(self.kurt))
self.assertTrue(self.foo.is_owner(self.dave))
self.assertFalse(self.nirvana.is_owner(self.dave))
self.assertFalse(self.nirvana.is_owner(self.krist))
def test_add_user(self):
new_guy = self.foo.add_user(self.krist)
self.assertTrue(isinstance(new_guy, OrganizationUser))
self.assertEqual(new_guy.organization, self.foo)
def test_remove_user(self):
self.foo.add_user(self.krist)
self.foo.remove_user(self.krist)
self.assertFalse(self.foo.users.filter(pk=self.krist.pk).exists())
def test_get_or_add_user(self):
new_guy, created = self.foo.get_or_add_user(self.duder)
self.assertTrue(isinstance(new_guy, OrganizationUser))
self.assertEqual(new_guy.organization, self.foo)
self.assertTrue(created)
new_guy, created = self.foo.get_or_add_user(self.dave)
self.assertTrue(isinstance(new_guy, OrganizationUser))
self.assertFalse(created)
def test_delete_owner(self):
from organizations.exceptions import OwnershipRequired
owner = self.nirvana.owner.organization_user
self.assertRaises(OwnershipRequired, owner.delete)
def test_change_owner(self):
admin = self.nirvana.organization_users.get(user__username="krist")
self.nirvana.change_owner(admin)
owner = self.nirvana.owner.organization_user
self.assertEqual(owner, admin)
def test_delete_missing_owner(self):
org = Organization.objects.create(name="Some test", slug="some-test")
org_user = OrganizationUser.objects.create(user=self.kurt, organization=org)
org_user.delete()
def test_nonmember_owner(self):
from organizations.exceptions import OrganizationMismatch
foo_user = self.foo.owner
self.nirvana.owner = foo_user
self.assertRaises(OrganizationMismatch, self.nirvana.owner.save)
@override_settings(USE_TZ=True)
class OrgDeleteTests(TestCase):
fixtures = ["users.json", "orgs.json"]
def test_delete_account(self):
self.assertEqual(3, OrganizationOwner.objects.all().count())
self.assertEqual(4, User.objects.all().count())
scream = Organization.objects.get(name="Scream")
scream.delete()
self.assertEqual(2, OrganizationOwner.objects.all().count())
self.assertEqual(4, User.objects.all().count())
def test_delete_orguser(self):
krist = User.objects.get(username="krist")
org_user = OrganizationUser.objects.filter(
organization__name="Nirvana", user=krist
)
org_user.delete()
self.assertTrue(krist.pk)
class CustomModelTests(TestCase):
# Load the world as we know it.
fixtures = ["users.json", "orgs.json"]
def setUp(self):
self.kurt = User.objects.get(username="kurt")
self.dave = User.objects.get(username="dave")
self.krist = User.objects.get(username="krist")
self.duder = User.objects.get(username="duder")
self.red_account = Account.objects.create(
name="Red Account", monthly_subscription=1200
)
def test_invitation_model(self):
assert Account.invitation_model == AccountInvitation
def test_org_string(self):
self.assertEqual(self.red_account.__str__(), "Red Account")
def test_relation_name(self):
self.assertEqual(self.red_account.user_relation_name, "test_accounts_account")
def test_change_user(self):
create_team = partial(create_organization, model=Team)
hometeam = create_team(self.dave, "Hometeam")
duder_org_user = hometeam.add_user(self.duder)
hometeam.owner.organization_user = duder_org_user
hometeam.owner.save()
def test_abstract_change_user(self):
create_org = partial(create_organization, model=CustomOrganization)
org1 = create_org(self.dave, "Org1")
duder_org_user = org1.add_user(self.duder)
org1.owner.organization_user = duder_org_user
org1.owner.save()
| true | true |
f7f7ff860b78f4a08e3ca5831ae247f83c0cd317 | 8,312 | py | Python | src/the_tale/the_tale/game/abilities/deck/help.py | devapromix/the-tale | 2a10efd3270734f8cf482b4cfbc5353ef8f0494c | [
"BSD-3-Clause"
] | 1 | 2020-04-02T11:51:20.000Z | 2020-04-02T11:51:20.000Z | src/the_tale/the_tale/game/abilities/deck/help.py | devapromix/the-tale | 2a10efd3270734f8cf482b4cfbc5353ef8f0494c | [
"BSD-3-Clause"
] | null | null | null | src/the_tale/the_tale/game/abilities/deck/help.py | devapromix/the-tale | 2a10efd3270734f8cf482b4cfbc5353ef8f0494c | [
"BSD-3-Clause"
] | null | null | null |
import smart_imports
smart_imports.all()
class Help(prototypes.AbilityPrototype):
TYPE = relations.ABILITY_TYPE.HELP
def use_heal(self, task, action, hero, critical):
if critical:
heal_amount = int(hero.heal(hero.max_health * random.uniform(*c.ANGEL_HELP_CRIT_HEAL_FRACTION)))
hero.add_message('angel_ability_healhero_crit', hero=hero, health=heal_amount, energy=self.TYPE.cost)
else:
heal_amount = int(hero.heal(hero.max_health * random.uniform(*c.ANGEL_HELP_HEAL_FRACTION)))
hero.add_message('angel_ability_healhero', hero=hero, health=heal_amount, energy=self.TYPE.cost)
action.on_heal()
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.SUCCESS)
def use_start_quest(self, task, action, hero, critical): # pylint: disable=W0613
hero.add_message('angel_ability_stimulate', hero=hero, energy=self.TYPE.cost)
action.init_quest()
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.SUCCESS)
def use_money(self, task, action, hero, critical): # pylint: disable=W0613
coins = int(math.ceil(f.normal_loot_cost_at_lvl(hero.level) * random.uniform(*c.ANGEL_HELP_CRIT_MONEY_FRACTION)))
if critical:
coins *= c.ANGEL_HELP_CRIT_MONEY_MULTIPLIER
hero.change_money(heroes_relations.MONEY_SOURCE.EARNED_FROM_HELP, coins)
hero.add_message('angel_ability_money_crit', hero=hero, coins=coins, energy=self.TYPE.cost)
else:
hero.change_money(heroes_relations.MONEY_SOURCE.EARNED_FROM_HELP, coins)
hero.add_message('angel_ability_money', hero=hero, coins=coins, energy=self.TYPE.cost)
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.SUCCESS)
def use_teleport(self, task, action, hero, critical):
if critical:
hero.add_message('angel_ability_shortteleport_crit', hero=hero, energy=self.TYPE.cost)
distance = c.ANGEL_HELP_CRIT_TELEPORT_DISTANCE
else:
hero.add_message('angel_ability_shortteleport', hero=hero, energy=self.TYPE.cost)
distance = c.ANGEL_HELP_TELEPORT_DISTANCE
action.teleport(distance, create_inplace_action=True)
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.SUCCESS)
def use_lightning(self, task, action, hero, critical):
if critical:
damage_percents = random.uniform(*c.ANGEL_HELP_CRIT_LIGHTING_FRACTION)
else:
damage_percents = random.uniform(*c.ANGEL_HELP_LIGHTING_FRACTION)
damage = action.mob_damage_percents_to_health(damage_percents)
if critical:
hero.add_message('angel_ability_lightning_crit', hero=hero, mob=action.mob, damage=damage, energy=self.TYPE.cost)
else:
hero.add_message('angel_ability_lightning', hero=hero, mob=action.mob, damage=damage, energy=self.TYPE.cost)
action.bit_mob(damage)
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.SUCCESS)
def use_resurrect(self, task, action, hero, critical): # pylint: disable=W0613
if hero.is_alive:
return (game_postponed_tasks.ComplexChangeTask.RESULT.IGNORE, game_postponed_tasks.ComplexChangeTask.STEP.SUCCESS, ())
hero.add_message('angel_ability_resurrect', hero=hero, energy=self.TYPE.cost)
action.fast_resurrect()
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.SUCCESS)
def use_experience(self, task, action, hero, critical): # pylint: disable=W0613
if critical:
experience = int(c.ANGEL_HELP_CRIT_EXPERIENCE * (1 + random.uniform(-c.ANGEL_HELP_EXPERIENCE_DELTA, c.ANGEL_HELP_EXPERIENCE_DELTA)) + 1)
real_experience = hero.add_experience(experience)
hero.add_message('angel_ability_experience_crit', hero=hero, experience=real_experience, energy=self.TYPE.cost)
else:
experience = int(c.ANGEL_HELP_EXPERIENCE * (1 + random.uniform(-c.ANGEL_HELP_EXPERIENCE_DELTA, c.ANGEL_HELP_EXPERIENCE_DELTA)) + 1)
real_experience = hero.add_experience(experience)
hero.add_message('angel_ability_experience', hero=hero, experience=real_experience, energy=self.TYPE.cost)
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.SUCCESS)
def use_heal_companion(self, task, action, hero, critical): # pylint: disable=W0613
if hero.companion is None:
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.ERROR)
if hero.companion.health == hero.companion.max_health:
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.ERROR)
if critical:
health = hero.companion.heal(c.COMPANIONS_HEAL_CRIT_AMOUNT)
hero.add_message('angel_ability_heal_companion_crit', hero=hero, companion=hero.companion, health=health, energy=self.TYPE.cost)
else:
health = hero.companion.heal(c.COMPANIONS_HEAL_AMOUNT)
hero.add_message('angel_ability_heal_companion', hero=hero, companion=hero.companion, health=health, energy=self.TYPE.cost)
action.on_heal_companion()
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.SUCCESS)
def _use(self, task, choice, action, hero, critical):
if choice.is_HEAL:
return self.use_heal(task, action, hero, critical)
elif choice.is_START_QUEST:
return self.use_start_quest(task, action, hero, critical)
elif choice.is_MONEY:
return self.use_money(task, action, hero, critical)
elif choice.is_TELEPORT:
return self.use_teleport(task, action, hero, critical)
elif choice.is_LIGHTING:
return self.use_lightning(task, action, hero, critical)
elif choice.is_RESURRECT:
return self.use_resurrect(task, action, hero, critical)
elif choice.is_EXPERIENCE:
return self.use_experience(task, action, hero, critical)
elif choice.is_HEAL_COMPANION:
return self.use_heal_companion(task, action, hero, critical)
def use(self, task, storage, **kwargs): # pylint: disable=R0911
battle = pvp_prototypes.Battle1x1Prototype.get_by_account_id(task.hero.account_id)
if battle and not battle.state.is_WAITING:
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.ERROR)
if not task.hero.can_be_helped():
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.ERROR)
task.hero.on_help()
action = task.hero.actions.current_action
choice = action.get_help_choice()
if choice is None:
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.ERROR)
if action.HABIT_MODE.is_AGGRESSIVE:
task.hero.update_habits(heroes_relations.HABIT_CHANGE_SOURCE.HELP_AGGRESSIVE)
elif action.HABIT_MODE.is_PEACEFUL:
task.hero.update_habits(heroes_relations.HABIT_CHANGE_SOURCE.HELP_UNAGGRESSIVE)
elif action.HABIT_MODE.is_COMPANION:
if task.hero.companion:
for habit_source in task.hero.companion.modify_attribute(heroes_relations.MODIFIERS.HABITS_SOURCES, set()):
task.hero.update_habits(habit_source, multuplier=task.hero.companion_habits_multiplier)
else:
raise exceptions.UnknownHabitModeError(mode=action.HABIT_MODE)
critical = random.uniform(0, 1) < task.hero.might_crit_chance
result = self._use(task, choice, action, task.hero, critical)
if result[0].is_SUCCESSED:
task.hero.statistics.change_help_count(1)
if task.hero.actions.current_action.state == task.hero.actions.current_action.STATE.PROCESSED:
storage.process_turn__single_hero(hero=task.hero,
logger=None,
continue_steps_if_needed=True)
task.hero.process_removed_artifacts()
return result
| 46.696629 | 148 | 0.705486 |
import smart_imports
smart_imports.all()
class Help(prototypes.AbilityPrototype):
TYPE = relations.ABILITY_TYPE.HELP
def use_heal(self, task, action, hero, critical):
if critical:
heal_amount = int(hero.heal(hero.max_health * random.uniform(*c.ANGEL_HELP_CRIT_HEAL_FRACTION)))
hero.add_message('angel_ability_healhero_crit', hero=hero, health=heal_amount, energy=self.TYPE.cost)
else:
heal_amount = int(hero.heal(hero.max_health * random.uniform(*c.ANGEL_HELP_HEAL_FRACTION)))
hero.add_message('angel_ability_healhero', hero=hero, health=heal_amount, energy=self.TYPE.cost)
action.on_heal()
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.SUCCESS)
def use_start_quest(self, task, action, hero, critical):
hero.add_message('angel_ability_stimulate', hero=hero, energy=self.TYPE.cost)
action.init_quest()
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.SUCCESS)
def use_money(self, task, action, hero, critical):
coins = int(math.ceil(f.normal_loot_cost_at_lvl(hero.level) * random.uniform(*c.ANGEL_HELP_CRIT_MONEY_FRACTION)))
if critical:
coins *= c.ANGEL_HELP_CRIT_MONEY_MULTIPLIER
hero.change_money(heroes_relations.MONEY_SOURCE.EARNED_FROM_HELP, coins)
hero.add_message('angel_ability_money_crit', hero=hero, coins=coins, energy=self.TYPE.cost)
else:
hero.change_money(heroes_relations.MONEY_SOURCE.EARNED_FROM_HELP, coins)
hero.add_message('angel_ability_money', hero=hero, coins=coins, energy=self.TYPE.cost)
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.SUCCESS)
def use_teleport(self, task, action, hero, critical):
if critical:
hero.add_message('angel_ability_shortteleport_crit', hero=hero, energy=self.TYPE.cost)
distance = c.ANGEL_HELP_CRIT_TELEPORT_DISTANCE
else:
hero.add_message('angel_ability_shortteleport', hero=hero, energy=self.TYPE.cost)
distance = c.ANGEL_HELP_TELEPORT_DISTANCE
action.teleport(distance, create_inplace_action=True)
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.SUCCESS)
def use_lightning(self, task, action, hero, critical):
if critical:
damage_percents = random.uniform(*c.ANGEL_HELP_CRIT_LIGHTING_FRACTION)
else:
damage_percents = random.uniform(*c.ANGEL_HELP_LIGHTING_FRACTION)
damage = action.mob_damage_percents_to_health(damage_percents)
if critical:
hero.add_message('angel_ability_lightning_crit', hero=hero, mob=action.mob, damage=damage, energy=self.TYPE.cost)
else:
hero.add_message('angel_ability_lightning', hero=hero, mob=action.mob, damage=damage, energy=self.TYPE.cost)
action.bit_mob(damage)
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.SUCCESS)
def use_resurrect(self, task, action, hero, critical):
if hero.is_alive:
return (game_postponed_tasks.ComplexChangeTask.RESULT.IGNORE, game_postponed_tasks.ComplexChangeTask.STEP.SUCCESS, ())
hero.add_message('angel_ability_resurrect', hero=hero, energy=self.TYPE.cost)
action.fast_resurrect()
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.SUCCESS)
def use_experience(self, task, action, hero, critical):
if critical:
experience = int(c.ANGEL_HELP_CRIT_EXPERIENCE * (1 + random.uniform(-c.ANGEL_HELP_EXPERIENCE_DELTA, c.ANGEL_HELP_EXPERIENCE_DELTA)) + 1)
real_experience = hero.add_experience(experience)
hero.add_message('angel_ability_experience_crit', hero=hero, experience=real_experience, energy=self.TYPE.cost)
else:
experience = int(c.ANGEL_HELP_EXPERIENCE * (1 + random.uniform(-c.ANGEL_HELP_EXPERIENCE_DELTA, c.ANGEL_HELP_EXPERIENCE_DELTA)) + 1)
real_experience = hero.add_experience(experience)
hero.add_message('angel_ability_experience', hero=hero, experience=real_experience, energy=self.TYPE.cost)
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.SUCCESS)
def use_heal_companion(self, task, action, hero, critical):
if hero.companion is None:
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.ERROR)
if hero.companion.health == hero.companion.max_health:
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.ERROR)
if critical:
health = hero.companion.heal(c.COMPANIONS_HEAL_CRIT_AMOUNT)
hero.add_message('angel_ability_heal_companion_crit', hero=hero, companion=hero.companion, health=health, energy=self.TYPE.cost)
else:
health = hero.companion.heal(c.COMPANIONS_HEAL_AMOUNT)
hero.add_message('angel_ability_heal_companion', hero=hero, companion=hero.companion, health=health, energy=self.TYPE.cost)
action.on_heal_companion()
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.SUCCESS)
def _use(self, task, choice, action, hero, critical):
if choice.is_HEAL:
return self.use_heal(task, action, hero, critical)
elif choice.is_START_QUEST:
return self.use_start_quest(task, action, hero, critical)
elif choice.is_MONEY:
return self.use_money(task, action, hero, critical)
elif choice.is_TELEPORT:
return self.use_teleport(task, action, hero, critical)
elif choice.is_LIGHTING:
return self.use_lightning(task, action, hero, critical)
elif choice.is_RESURRECT:
return self.use_resurrect(task, action, hero, critical)
elif choice.is_EXPERIENCE:
return self.use_experience(task, action, hero, critical)
elif choice.is_HEAL_COMPANION:
return self.use_heal_companion(task, action, hero, critical)
def use(self, task, storage, **kwargs):
battle = pvp_prototypes.Battle1x1Prototype.get_by_account_id(task.hero.account_id)
if battle and not battle.state.is_WAITING:
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.ERROR)
if not task.hero.can_be_helped():
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.ERROR)
task.hero.on_help()
action = task.hero.actions.current_action
choice = action.get_help_choice()
if choice is None:
return task.logic_result(next_step=game_postponed_tasks.ComplexChangeTask.STEP.ERROR)
if action.HABIT_MODE.is_AGGRESSIVE:
task.hero.update_habits(heroes_relations.HABIT_CHANGE_SOURCE.HELP_AGGRESSIVE)
elif action.HABIT_MODE.is_PEACEFUL:
task.hero.update_habits(heroes_relations.HABIT_CHANGE_SOURCE.HELP_UNAGGRESSIVE)
elif action.HABIT_MODE.is_COMPANION:
if task.hero.companion:
for habit_source in task.hero.companion.modify_attribute(heroes_relations.MODIFIERS.HABITS_SOURCES, set()):
task.hero.update_habits(habit_source, multuplier=task.hero.companion_habits_multiplier)
else:
raise exceptions.UnknownHabitModeError(mode=action.HABIT_MODE)
critical = random.uniform(0, 1) < task.hero.might_crit_chance
result = self._use(task, choice, action, task.hero, critical)
if result[0].is_SUCCESSED:
task.hero.statistics.change_help_count(1)
if task.hero.actions.current_action.state == task.hero.actions.current_action.STATE.PROCESSED:
storage.process_turn__single_hero(hero=task.hero,
logger=None,
continue_steps_if_needed=True)
task.hero.process_removed_artifacts()
return result
| true | true |
f7f7ff981455442fbf6680876525a85ec3257852 | 4,579 | py | Python | qa/rpc-tests/multi_rpc.py | ronnpaulcoin/ronpaulcoin | 07c357663483ca7781d2a4fc02746629c4eccfbe | [
"MIT"
] | null | null | null | qa/rpc-tests/multi_rpc.py | ronnpaulcoin/ronpaulcoin | 07c357663483ca7781d2a4fc02746629c4eccfbe | [
"MIT"
] | null | null | null | qa/rpc-tests/multi_rpc.py | ronnpaulcoin/ronpaulcoin | 07c357663483ca7781d2a4fc02746629c4eccfbe | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test multiple rpc user config option rpcauth
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import str_to_b64str, assert_equal
import os
import http.client
import urllib.parse
class HTTPBasicsTest (BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = False
self.num_nodes = 1
def setup_chain(self):
super().setup_chain()
#Append rpcauth to bitcoin.conf before initialization
rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
rpcauth2 = "rpcauth=rt2:f8607b1a88861fac29dfccf9b52ff9f$ff36a0c23c8c62b4846112e50fa888416e94c17bfd4c42f88fd8f55ec6a3137e"
with open(os.path.join(self.options.tmpdir+"/node0", "ronpaulcoin.conf"), 'a', encoding='utf8') as f:
f.write(rpcauth+"\n")
f.write(rpcauth2+"\n")
def setup_network(self):
self.nodes = self.setup_nodes()
def run_test(self):
##################################################
# Check correctness of the rpcauth config option #
##################################################
url = urllib.parse.urlparse(self.nodes[0].url)
#Old authpair
authpair = url.username + ':' + url.password
#New authpair generated via share/rpcuser tool
rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
password = "cA773lm788buwYe4g4WT+05pKyNruVKjQ25x3n0DQcM="
#Second authpair with different username
rpcauth2 = "rpcauth=rt2:f8607b1a88861fac29dfccf9b52ff9f$ff36a0c23c8c62b4846112e50fa888416e94c17bfd4c42f88fd8f55ec6a3137e"
password2 = "8/F3uMDw4KSEbw96U3CA1C4X05dkHDN2BPFjTgZW4KI="
authpairnew = "rt:"+password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, False)
conn.close()
#Use new authpair to confirm both work
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, False)
conn.close()
#Wrong login name with rt's password
authpairnew = "rtwrong:"+password
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, True)
conn.close()
#Wrong password for rt
authpairnew = "rt:"+password+"wrong"
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, True)
conn.close()
#Correct for rt2
authpairnew = "rt2:"+password2
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, False)
conn.close()
#Wrong password for rt2
authpairnew = "rt2:"+password2+"wrong"
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, True)
conn.close()
if __name__ == '__main__':
HTTPBasicsTest ().main ()
| 37.842975 | 129 | 0.645556 |
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import str_to_b64str, assert_equal
import os
import http.client
import urllib.parse
class HTTPBasicsTest (BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = False
self.num_nodes = 1
def setup_chain(self):
super().setup_chain()
rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
rpcauth2 = "rpcauth=rt2:f8607b1a88861fac29dfccf9b52ff9f$ff36a0c23c8c62b4846112e50fa888416e94c17bfd4c42f88fd8f55ec6a3137e"
with open(os.path.join(self.options.tmpdir+"/node0", "ronpaulcoin.conf"), 'a', encoding='utf8') as f:
f.write(rpcauth+"\n")
f.write(rpcauth2+"\n")
def setup_network(self):
self.nodes = self.setup_nodes()
def run_test(self):
ssert_equal(resp.status==401, False)
conn.close()
#Wrong password for rt2
authpairnew = "rt2:"+password2+"wrong"
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, True)
conn.close()
if __name__ == '__main__':
HTTPBasicsTest ().main ()
| true | true |
f7f7ff9ca775addcc2e9c24e3c8fa60a23bc9d70 | 9,779 | py | Python | toontown/battle/Fanfare.py | MasterLoopyBM/Toontown | ebed7fc3f2ef06a529cf02eda7ab46361aceef9d | [
"MIT"
] | 1 | 2020-02-07T18:15:12.000Z | 2020-02-07T18:15:12.000Z | toontown/battle/Fanfare.py | TrueBlueDogemon/Toontown | ebed7fc3f2ef06a529cf02eda7ab46361aceef9d | [
"MIT"
] | null | null | null | toontown/battle/Fanfare.py | TrueBlueDogemon/Toontown | ebed7fc3f2ef06a529cf02eda7ab46361aceef9d | [
"MIT"
] | 2 | 2020-11-08T03:38:35.000Z | 2021-09-02T07:03:47.000Z | from direct.interval.IntervalGlobal import *
from BattleBase import *
from BattleProps import *
from BattleSounds import *
from toontown.toon.ToonDNA import *
from toontown.suit.SuitDNA import *
from direct.particles.ParticleEffect import *
from direct.gui.DirectGui import *
from pandac.PandaModules import *
import MovieUtil
import MovieCamera
from direct.directnotify import DirectNotifyGlobal
import BattleParticles
from toontown.toonbase import ToontownGlobals
import RewardPanel
notify = DirectNotifyGlobal.directNotify.newCategory('Fanfare')
def makePanel(toon, showToonName):
panel = DirectFrame(relief=None, geom=DGG.getDefaultDialogGeom(), geom_color=ToontownGlobals.GlobalDialogColor, geom_scale=(1.75, 1, 0.75), pos=(0, 0, 0.587))
panel.initialiseoptions(RewardPanel)
panel.setTransparency(1)
panel.hide()
if showToonName is 1:
panel.avNameLabel = DirectLabel(parent=panel, relief=None, pos=Vec3(0, 0, 0.3), text=toon.getName(), text_scale=0.08)
return panel
def makeMessageBox(panel, message, messagePos, messageScale, wordwrap = 100):
panel.itemFrame = DirectFrame(parent=panel, relief=None, text=message, text_pos=messagePos, text_scale=messageScale, text_wordwrap=wordwrap)
return
def makeImageBox(frame, image, imagePos, imageScale):
frame.imageIcon = image.copyTo(frame)
frame.imageIcon.setPos(imagePos)
frame.imageIcon.setScale(imageScale)
def makeFanfare(delay, toon):
return doFanfare(delay, toon, None)
def makeFanfareWithMessage(delay, toon, showToonName, message, messagePos, messageScale, wordwrap = 100):
panel = makePanel(toon, showToonName)
makeMessageBox(panel, message, messagePos, messageScale, wordwrap)
return doFanfare(delay, toon, panel)
def makeFanfareWithImage(delay, toon, showToonName, image, imagePos, imageScale, wordwrap = 100):
panel = makePanel(toon, showToonName)
makeMessageBox(panel, '', Vec3(0, 0, 0), 1, wordwrap)
makeImageBox(panel.itemFrame, image, imagePos, imageScale)
return doFanfare(delay, toon, panel)
def makeFanfareWithMessageImage(delay, toon, showToonName, message, messagePos, messageScale, image, imagePos, imageScale, wordwrap = 100):
panel = makePanel(toon, showToonName)
makeMessageBox(panel, message, messagePos, messageScale, wordwrap)
makeImageBox(panel.itemFrame, image, imagePos, imageScale)
return doFanfare(delay, toon, panel)
def doFanfare(delay, toon, panel):
fanfareNode = toon.attachNewNode('fanfareNode')
partyBall = fanfareNode.attachNewNode('partyBall')
headparts = toon.getHeadParts()
pos = headparts[2].getPos(fanfareNode)
partyBallLeft = globalPropPool.getProp('partyBall')
partyBallLeft.reparentTo(partyBall)
partyBallLeft.setScale(0.8)
partyBallLeft.setH(90)
partyBallLeft.setColorScale(1, 0, 0, 0)
partyBallRight = globalPropPool.getProp('partyBall')
partyBallRight.reparentTo(partyBall)
partyBallRight.setScale(0.8)
partyBallRight.setH(-90)
partyBallRight.setColorScale(1, 1, 0, 0)
partyBall.setZ(pos.getZ() + 3.2)
ballShake1 = Sequence(Parallel(LerpHprInterval(partyBallLeft, duration=0.2, startHpr=Vec3(90, 0, 0), hpr=Vec3(90, 10, 0), blendType='easeInOut'), LerpHprInterval(partyBallRight, duration=0.2, startHpr=Vec3(-90, 0, 0), hpr=Vec3(-90, -10, 0), blendType='easeInOut')), Parallel(LerpHprInterval(partyBallLeft, duration=0.2, startHpr=Vec3(90, 10, 0), hpr=Vec3(90, -10, 0), blendType='easeInOut'), LerpHprInterval(partyBallRight, duration=0.2, startHpr=Vec3(-90, -10, 0), hpr=Vec3(-90, 10, 0), blendType='easeInOut')), Parallel(LerpHprInterval(partyBallLeft, duration=0.2, startHpr=Vec3(90, -10, 0), hpr=Vec3(90, 0, 0), blendType='easeInOut'), LerpHprInterval(partyBallRight, duration=0.2, startHpr=Vec3(-90, 10, 0), hpr=Vec3(-90, 0, 0), blendType='easeInOut')))
ballShake2 = Sequence(Parallel(LerpHprInterval(partyBallLeft, duration=0.2, startHpr=Vec3(90, 0, 0), hpr=Vec3(90, -10, 0), blendType='easeInOut'), LerpHprInterval(partyBallRight, duration=0.2, startHpr=Vec3(-90, 0, 0), hpr=Vec3(-90, 10, 0), blendType='easeInOut')), Parallel(LerpHprInterval(partyBallLeft, duration=0.2, startHpr=Vec3(90, -10, 0), hpr=Vec3(90, 10, 0), blendType='easeInOut'), LerpHprInterval(partyBallRight, duration=0.2, startHpr=Vec3(-90, 10, 0), hpr=Vec3(-90, -10, 0), blendType='easeInOut')), Parallel(LerpHprInterval(partyBallLeft, duration=0.2, startHpr=Vec3(90, 10, 0), hpr=Vec3(90, 0, 0), blendType='easeInOut'), LerpHprInterval(partyBallRight, duration=0.2, startHpr=Vec3(-90, -10, 0), hpr=Vec3(-90, 0, 0), blendType='easeInOut')))
openBall = Parallel(LerpHprInterval(partyBallLeft, duration=0.2, startHpr=Vec3(90, 0, 0), hpr=Vec3(90, 30, 0)), LerpHprInterval(partyBallRight, duration=0.2, startHpr=Vec3(-90, 0, 0), hpr=Vec3(-90, 30, 0)))
confettiNode = fanfareNode.attachNewNode('confetti')
confettiNode.setScale(3)
confettiNode.setZ(pos.getZ() + 2.5)
def longshake(models, num, duration):
inShake = getScaleBlendIntervals(models, duration=duration, startScale=0.23, endScale=0.2, blendType='easeInOut')
outShake = getScaleBlendIntervals(models, duration=duration, startScale=0.2, endScale=0.23, blendType='easeInOut')
i = 1
seq = Sequence()
while i < num:
if i % 2 == 0:
seq.append(inShake)
else:
seq.append(outShake)
i += 1
return seq
def getScaleBlendIntervals(props, duration, startScale, endScale, blendType):
tracks = Parallel()
for prop in props:
tracks.append(LerpScaleInterval(prop, duration, endScale, startScale=startScale, blendType=blendType))
return tracks
trumpetNode = fanfareNode.attachNewNode('trumpetNode')
trumpet1 = globalPropPool.getProp('bugle')
trumpet2 = MovieUtil.copyProp(trumpet1)
trumpet1.reparentTo(trumpetNode)
trumpet1.setScale(0.2)
trumpet1.setPos(2, 2, 1)
trumpet1.setHpr(120, 65, 0)
trumpet2.reparentTo(trumpetNode)
trumpet2.setScale(0.2)
trumpet2.setPos(-2, 2, 1)
trumpet2.setHpr(-120, 65, 0)
trumpetNode.setTransparency(1)
trumpetNode.setColor(1, 1, 1, 0)
trumpturn1 = LerpHprInterval(trumpet1, duration=4, startHpr=Vec3(80, 15, 0), hpr=Vec3(150, 40, 0))
trumpturn2 = LerpHprInterval(trumpet2, duration=4, startHpr=Vec3(-80, 15, 0), hpr=Vec3(-150, 40, 0))
trumpetTurn = Parallel(trumpturn1, trumpturn2)
BattleParticles.loadParticles()
confettiBlue = BattleParticles.createParticleEffect('Confetti')
confettiBlue.reparentTo(confettiNode)
blue_p0 = confettiBlue.getParticlesNamed('particles-1')
blue_p0.renderer.getColorInterpolationManager().addConstant(0.0, 1.0, Vec4(0.0, 0.0, 1.0, 1.0), 1)
confettiYellow = BattleParticles.createParticleEffect('Confetti')
confettiYellow.reparentTo(confettiNode)
yellow_p0 = confettiYellow.getParticlesNamed('particles-1')
yellow_p0.renderer.getColorInterpolationManager().addConstant(0.0, 1.0, Vec4(1.0, 1.0, 0.0, 1.0), 1)
confettiRed = BattleParticles.createParticleEffect('Confetti')
confettiRed.reparentTo(confettiNode)
red_p0 = confettiRed.getParticlesNamed('particles-1')
red_p0.renderer.getColorInterpolationManager().addConstant(0.0, 1.0, Vec4(1.0, 0.0, 0.0, 1.0), 1)
trumpetsAppear = LerpColorInterval(trumpetNode, 0.3, startColor=Vec4(1, 1, 0, 0), color=Vec4(1, 1, 0, 1))
trumpetsVanish = LerpColorInterval(trumpetNode, 0.3, startColor=Vec4(1, 1, 0, 1), color=Vec4(1, 1, 0, 0))
crabHorn = globalBattleSoundCache.getSound('King_Crab.ogg')
drumroll = globalBattleSoundCache.getSound('SZ_MM_drumroll.ogg')
fanfare = globalBattleSoundCache.getSound('SZ_MM_fanfare.ogg')
crabHorn.setTime(1.5)
partyBall.setTransparency(1)
partyBall.setColorScale(1, 1, 1, 1)
ballAppear = Parallel(LerpColorScaleInterval(partyBallLeft, 0.3, startColorScale=Vec4(1, 0, 0, 0), colorScale=Vec4(1, 0, 0, 1)), LerpColorScaleInterval(partyBallRight, 0.3, startColorScale=Vec4(1, 1, 0, 0), colorScale=Vec4(1, 1, 0, 1)))
ballVanish = Parallel(LerpColorScaleInterval(partyBallLeft, 0.3, startColorScale=Vec4(1, 0, 0, 1), colorScale=Vec4(1, 0, 0, 0)), LerpColorScaleInterval(partyBallRight, 0.3, startColorScale=Vec4(1, 1, 0, 1), colorScale=Vec4(1, 1, 0, 0)))
play = Parallel(SoundInterval(crabHorn, startTime=1.5, duration=4.0, node=toon), Sequence(Wait(0.25), longshake([trumpet1, trumpet2], 3, 0.2), Wait(0.5), longshake([trumpet1, trumpet2], 3, 0.2), Wait(0.5), longshake([trumpet1, trumpet2], 9, 0.1), longshake([trumpet1, trumpet2], 3, 0.2)))
killParticles = Parallel(Func(blue_p0.setLitterSize, 0), Func(red_p0.setLitterSize, 0), Func(yellow_p0.setLitterSize, 0))
p = Parallel(ParticleInterval(confettiBlue, confettiNode, worldRelative=0, duration=3, cleanup=True), ParticleInterval(confettiRed, confettiNode, worldRelative=0, duration=3, cleanup=True), ParticleInterval(confettiYellow, confettiNode, worldRelative=0, duration=3, cleanup=True))
pOff = Parallel(Func(confettiBlue.remove), Func(confettiRed.remove), Func(confettiYellow.remove))
partInterval = Parallel(p, Sequence(Wait(1.7), killParticles, Wait(1.3), pOff, Func(p.finish)), Sequence(Wait(3), Parallel(ballVanish)))
seq1 = Parallel(Sequence(Wait(delay + 4.1), SoundInterval(drumroll, node=toon), Wait(0.25), SoundInterval(fanfare, node=toon)), Sequence(Wait(delay), trumpetsAppear, Wait(3), ballAppear, Wait(0.5), ballShake1, Wait(0.1), ballShake2, Wait(0.2), Wait(0.1), Parallel(openBall, partInterval), Func(fanfareNode.remove)))
seq = Parallel(seq1, Sequence(Wait(delay), Parallel(trumpetTurn, Sequence(Wait(0.5), play)), Wait(0.5), trumpetsVanish))
if panel != None:
return (seq, panel)
return (seq, None)
| 62.685897 | 760 | 0.728909 | from direct.interval.IntervalGlobal import *
from BattleBase import *
from BattleProps import *
from BattleSounds import *
from toontown.toon.ToonDNA import *
from toontown.suit.SuitDNA import *
from direct.particles.ParticleEffect import *
from direct.gui.DirectGui import *
from pandac.PandaModules import *
import MovieUtil
import MovieCamera
from direct.directnotify import DirectNotifyGlobal
import BattleParticles
from toontown.toonbase import ToontownGlobals
import RewardPanel
notify = DirectNotifyGlobal.directNotify.newCategory('Fanfare')
def makePanel(toon, showToonName):
panel = DirectFrame(relief=None, geom=DGG.getDefaultDialogGeom(), geom_color=ToontownGlobals.GlobalDialogColor, geom_scale=(1.75, 1, 0.75), pos=(0, 0, 0.587))
panel.initialiseoptions(RewardPanel)
panel.setTransparency(1)
panel.hide()
if showToonName is 1:
panel.avNameLabel = DirectLabel(parent=panel, relief=None, pos=Vec3(0, 0, 0.3), text=toon.getName(), text_scale=0.08)
return panel
def makeMessageBox(panel, message, messagePos, messageScale, wordwrap = 100):
panel.itemFrame = DirectFrame(parent=panel, relief=None, text=message, text_pos=messagePos, text_scale=messageScale, text_wordwrap=wordwrap)
return
def makeImageBox(frame, image, imagePos, imageScale):
frame.imageIcon = image.copyTo(frame)
frame.imageIcon.setPos(imagePos)
frame.imageIcon.setScale(imageScale)
def makeFanfare(delay, toon):
return doFanfare(delay, toon, None)
def makeFanfareWithMessage(delay, toon, showToonName, message, messagePos, messageScale, wordwrap = 100):
panel = makePanel(toon, showToonName)
makeMessageBox(panel, message, messagePos, messageScale, wordwrap)
return doFanfare(delay, toon, panel)
def makeFanfareWithImage(delay, toon, showToonName, image, imagePos, imageScale, wordwrap = 100):
panel = makePanel(toon, showToonName)
makeMessageBox(panel, '', Vec3(0, 0, 0), 1, wordwrap)
makeImageBox(panel.itemFrame, image, imagePos, imageScale)
return doFanfare(delay, toon, panel)
def makeFanfareWithMessageImage(delay, toon, showToonName, message, messagePos, messageScale, image, imagePos, imageScale, wordwrap = 100):
panel = makePanel(toon, showToonName)
makeMessageBox(panel, message, messagePos, messageScale, wordwrap)
makeImageBox(panel.itemFrame, image, imagePos, imageScale)
return doFanfare(delay, toon, panel)
def doFanfare(delay, toon, panel):
fanfareNode = toon.attachNewNode('fanfareNode')
partyBall = fanfareNode.attachNewNode('partyBall')
headparts = toon.getHeadParts()
pos = headparts[2].getPos(fanfareNode)
partyBallLeft = globalPropPool.getProp('partyBall')
partyBallLeft.reparentTo(partyBall)
partyBallLeft.setScale(0.8)
partyBallLeft.setH(90)
partyBallLeft.setColorScale(1, 0, 0, 0)
partyBallRight = globalPropPool.getProp('partyBall')
partyBallRight.reparentTo(partyBall)
partyBallRight.setScale(0.8)
partyBallRight.setH(-90)
partyBallRight.setColorScale(1, 1, 0, 0)
partyBall.setZ(pos.getZ() + 3.2)
ballShake1 = Sequence(Parallel(LerpHprInterval(partyBallLeft, duration=0.2, startHpr=Vec3(90, 0, 0), hpr=Vec3(90, 10, 0), blendType='easeInOut'), LerpHprInterval(partyBallRight, duration=0.2, startHpr=Vec3(-90, 0, 0), hpr=Vec3(-90, -10, 0), blendType='easeInOut')), Parallel(LerpHprInterval(partyBallLeft, duration=0.2, startHpr=Vec3(90, 10, 0), hpr=Vec3(90, -10, 0), blendType='easeInOut'), LerpHprInterval(partyBallRight, duration=0.2, startHpr=Vec3(-90, -10, 0), hpr=Vec3(-90, 10, 0), blendType='easeInOut')), Parallel(LerpHprInterval(partyBallLeft, duration=0.2, startHpr=Vec3(90, -10, 0), hpr=Vec3(90, 0, 0), blendType='easeInOut'), LerpHprInterval(partyBallRight, duration=0.2, startHpr=Vec3(-90, 10, 0), hpr=Vec3(-90, 0, 0), blendType='easeInOut')))
ballShake2 = Sequence(Parallel(LerpHprInterval(partyBallLeft, duration=0.2, startHpr=Vec3(90, 0, 0), hpr=Vec3(90, -10, 0), blendType='easeInOut'), LerpHprInterval(partyBallRight, duration=0.2, startHpr=Vec3(-90, 0, 0), hpr=Vec3(-90, 10, 0), blendType='easeInOut')), Parallel(LerpHprInterval(partyBallLeft, duration=0.2, startHpr=Vec3(90, -10, 0), hpr=Vec3(90, 10, 0), blendType='easeInOut'), LerpHprInterval(partyBallRight, duration=0.2, startHpr=Vec3(-90, 10, 0), hpr=Vec3(-90, -10, 0), blendType='easeInOut')), Parallel(LerpHprInterval(partyBallLeft, duration=0.2, startHpr=Vec3(90, 10, 0), hpr=Vec3(90, 0, 0), blendType='easeInOut'), LerpHprInterval(partyBallRight, duration=0.2, startHpr=Vec3(-90, -10, 0), hpr=Vec3(-90, 0, 0), blendType='easeInOut')))
openBall = Parallel(LerpHprInterval(partyBallLeft, duration=0.2, startHpr=Vec3(90, 0, 0), hpr=Vec3(90, 30, 0)), LerpHprInterval(partyBallRight, duration=0.2, startHpr=Vec3(-90, 0, 0), hpr=Vec3(-90, 30, 0)))
confettiNode = fanfareNode.attachNewNode('confetti')
confettiNode.setScale(3)
confettiNode.setZ(pos.getZ() + 2.5)
def longshake(models, num, duration):
inShake = getScaleBlendIntervals(models, duration=duration, startScale=0.23, endScale=0.2, blendType='easeInOut')
outShake = getScaleBlendIntervals(models, duration=duration, startScale=0.2, endScale=0.23, blendType='easeInOut')
i = 1
seq = Sequence()
while i < num:
if i % 2 == 0:
seq.append(inShake)
else:
seq.append(outShake)
i += 1
return seq
def getScaleBlendIntervals(props, duration, startScale, endScale, blendType):
tracks = Parallel()
for prop in props:
tracks.append(LerpScaleInterval(prop, duration, endScale, startScale=startScale, blendType=blendType))
return tracks
trumpetNode = fanfareNode.attachNewNode('trumpetNode')
trumpet1 = globalPropPool.getProp('bugle')
trumpet2 = MovieUtil.copyProp(trumpet1)
trumpet1.reparentTo(trumpetNode)
trumpet1.setScale(0.2)
trumpet1.setPos(2, 2, 1)
trumpet1.setHpr(120, 65, 0)
trumpet2.reparentTo(trumpetNode)
trumpet2.setScale(0.2)
trumpet2.setPos(-2, 2, 1)
trumpet2.setHpr(-120, 65, 0)
trumpetNode.setTransparency(1)
trumpetNode.setColor(1, 1, 1, 0)
trumpturn1 = LerpHprInterval(trumpet1, duration=4, startHpr=Vec3(80, 15, 0), hpr=Vec3(150, 40, 0))
trumpturn2 = LerpHprInterval(trumpet2, duration=4, startHpr=Vec3(-80, 15, 0), hpr=Vec3(-150, 40, 0))
trumpetTurn = Parallel(trumpturn1, trumpturn2)
BattleParticles.loadParticles()
confettiBlue = BattleParticles.createParticleEffect('Confetti')
confettiBlue.reparentTo(confettiNode)
blue_p0 = confettiBlue.getParticlesNamed('particles-1')
blue_p0.renderer.getColorInterpolationManager().addConstant(0.0, 1.0, Vec4(0.0, 0.0, 1.0, 1.0), 1)
confettiYellow = BattleParticles.createParticleEffect('Confetti')
confettiYellow.reparentTo(confettiNode)
yellow_p0 = confettiYellow.getParticlesNamed('particles-1')
yellow_p0.renderer.getColorInterpolationManager().addConstant(0.0, 1.0, Vec4(1.0, 1.0, 0.0, 1.0), 1)
confettiRed = BattleParticles.createParticleEffect('Confetti')
confettiRed.reparentTo(confettiNode)
red_p0 = confettiRed.getParticlesNamed('particles-1')
red_p0.renderer.getColorInterpolationManager().addConstant(0.0, 1.0, Vec4(1.0, 0.0, 0.0, 1.0), 1)
trumpetsAppear = LerpColorInterval(trumpetNode, 0.3, startColor=Vec4(1, 1, 0, 0), color=Vec4(1, 1, 0, 1))
trumpetsVanish = LerpColorInterval(trumpetNode, 0.3, startColor=Vec4(1, 1, 0, 1), color=Vec4(1, 1, 0, 0))
crabHorn = globalBattleSoundCache.getSound('King_Crab.ogg')
drumroll = globalBattleSoundCache.getSound('SZ_MM_drumroll.ogg')
fanfare = globalBattleSoundCache.getSound('SZ_MM_fanfare.ogg')
crabHorn.setTime(1.5)
partyBall.setTransparency(1)
partyBall.setColorScale(1, 1, 1, 1)
ballAppear = Parallel(LerpColorScaleInterval(partyBallLeft, 0.3, startColorScale=Vec4(1, 0, 0, 0), colorScale=Vec4(1, 0, 0, 1)), LerpColorScaleInterval(partyBallRight, 0.3, startColorScale=Vec4(1, 1, 0, 0), colorScale=Vec4(1, 1, 0, 1)))
ballVanish = Parallel(LerpColorScaleInterval(partyBallLeft, 0.3, startColorScale=Vec4(1, 0, 0, 1), colorScale=Vec4(1, 0, 0, 0)), LerpColorScaleInterval(partyBallRight, 0.3, startColorScale=Vec4(1, 1, 0, 1), colorScale=Vec4(1, 1, 0, 0)))
play = Parallel(SoundInterval(crabHorn, startTime=1.5, duration=4.0, node=toon), Sequence(Wait(0.25), longshake([trumpet1, trumpet2], 3, 0.2), Wait(0.5), longshake([trumpet1, trumpet2], 3, 0.2), Wait(0.5), longshake([trumpet1, trumpet2], 9, 0.1), longshake([trumpet1, trumpet2], 3, 0.2)))
killParticles = Parallel(Func(blue_p0.setLitterSize, 0), Func(red_p0.setLitterSize, 0), Func(yellow_p0.setLitterSize, 0))
p = Parallel(ParticleInterval(confettiBlue, confettiNode, worldRelative=0, duration=3, cleanup=True), ParticleInterval(confettiRed, confettiNode, worldRelative=0, duration=3, cleanup=True), ParticleInterval(confettiYellow, confettiNode, worldRelative=0, duration=3, cleanup=True))
pOff = Parallel(Func(confettiBlue.remove), Func(confettiRed.remove), Func(confettiYellow.remove))
partInterval = Parallel(p, Sequence(Wait(1.7), killParticles, Wait(1.3), pOff, Func(p.finish)), Sequence(Wait(3), Parallel(ballVanish)))
seq1 = Parallel(Sequence(Wait(delay + 4.1), SoundInterval(drumroll, node=toon), Wait(0.25), SoundInterval(fanfare, node=toon)), Sequence(Wait(delay), trumpetsAppear, Wait(3), ballAppear, Wait(0.5), ballShake1, Wait(0.1), ballShake2, Wait(0.2), Wait(0.1), Parallel(openBall, partInterval), Func(fanfareNode.remove)))
seq = Parallel(seq1, Sequence(Wait(delay), Parallel(trumpetTurn, Sequence(Wait(0.5), play)), Wait(0.5), trumpetsVanish))
if panel != None:
return (seq, panel)
return (seq, None)
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.