blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 4
721
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 5
91
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 321
values | visit_date
timestamp[ns]date 2016-08-12 09:31:09
2023-09-06 10:45:07
| revision_date
timestamp[ns]date 2010-09-28 14:01:40
2023-09-06 06:22:19
| committer_date
timestamp[ns]date 2010-09-28 14:01:40
2023-09-06 06:22:19
| github_id
int64 426
681M
| star_events_count
int64 101
243k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[ns]date 2012-06-28 18:51:49
2023-09-14 21:59:16
⌀ | gha_created_at
timestamp[ns]date 2008-02-11 22:55:26
2023-08-10 11:14:58
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 26
values | language
stringclasses 2
values | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 6
10.2M
| extension
stringclasses 115
values | filename
stringlengths 3
113
| content
stringlengths 6
10.2M
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2f9a0bbd346d1a7b402fc4a2e66d7c59d629810a
|
df87814cb32990ad8c27d0b13a821aabce012819
|
/kolibri/plugins/policies/kolibri_plugin.py
|
61cb7c4ac1d910d35d86c65a30c1c376baf9941f
|
[
"MIT"
] |
permissive
|
learningequality/kolibri
|
26812d4ae771f3b389d3317a586bc032fc84866b
|
cc9da2a6acd139acac3cd71c4cb05c15d4465712
|
refs/heads/release-v0.16.x
| 2023-09-01T18:07:29.720772
| 2023-08-31T15:43:47
| 2023-08-31T15:43:47
| 49,976,939
| 689
| 682
|
MIT
| 2023-09-14T20:02:29
| 2016-01-19T19:22:07
|
Python
|
UTF-8
|
Python
| false
| false
| 688
|
py
|
kolibri_plugin.py
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from kolibri.core.webpack import hooks as webpack_hooks
from kolibri.plugins import KolibriPluginBase
from kolibri.plugins.hooks import register_hook
from kolibri.utils import translation
from kolibri.utils.translation import ugettext as _
class Policies(KolibriPluginBase):
translated_view_urls = "urls"
@property
def url_slug(self):
return "policies"
def name(self, lang):
with translation.override(lang):
return _("Policies")
@register_hook
class PoliciesAsset(webpack_hooks.WebpackBundleHook):
bundle_id = "app"
|
c9a9bef0022846df258d794d130fa4ae32f3293c
|
73a0f661f1423d63e86489d4b2673f0103698aab
|
/python/oneflow/test/modules/test_rand.py
|
bcf9cd2c45a505014922bc845e44748d0b81ad99
|
[
"Apache-2.0"
] |
permissive
|
Oneflow-Inc/oneflow
|
4fc3e081e45db0242a465c4330d8bcc8b21ee924
|
0aab78ea24d4b1c784c30c57d33ec69fe5605e4a
|
refs/heads/master
| 2023-08-25T16:58:30.576596
| 2023-08-22T14:15:46
| 2023-08-22T14:15:46
| 81,634,683
| 5,495
| 786
|
Apache-2.0
| 2023-09-14T09:44:31
| 2017-02-11T06:09:53
|
C++
|
UTF-8
|
Python
| false
| false
| 4,725
|
py
|
test_rand.py
|
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from collections import OrderedDict
import numpy as np
import oneflow as flow
import oneflow.unittest
from oneflow.test_utils.automated_test_util import *
from oneflow.test_utils.test_util import GenArgList
def _test_rand(test_case, device, shape):
y1 = flow.rand(*shape, device=flow.device(device))
y2 = flow.rand(size=shape, device=flow.device(device))
test_case.assertTrue(not np.array_equal(y1.numpy(), y2.numpy()))
test_case.assertTrue(shape == y1.shape)
test_case.assertTrue(shape == y2.shape)
def _test_rand_tuple_shape(test_case, device, shape):
y1 = flow.rand(shape, device=flow.device(device))
y2 = flow.rand(shape, device=flow.device(device))
test_case.assertTrue(not np.array_equal(y1.numpy(), y2.numpy()))
test_case.assertTrue(shape == y1.shape)
def _test_0d_rand(test_case, device, shape):
y1 = flow.rand(*shape, device=flow.device(device))
y2 = flow.rand(*shape, device=flow.device(device))
test_case.assertTrue(
np.allclose(y1.numpy(), y2.numpy(), atol=1e-4, rtol=1e-4)
) # 0d is [] and []
test_case.assertTrue(shape == y1.shape)
def _test_different_dtype(test_case, device, shape):
y1 = flow.rand(*shape, dtype=flow.float32, device=flow.device(device))
y2 = flow.rand(*shape, dtype=flow.float64, device=flow.device(device))
test_case.assertTrue(not np.array_equal(y1.numpy(), y2.numpy()))
test_case.assertTrue(shape == y1.shape)
with test_case.assertRaises(NotImplementedError):
flow.rand(*shape, dtype=flow.int32, device=flow.device(device))
def _test_backward(test_case, device, shape):
x = flow.rand(*shape, device=flow.device(device), requires_grad=True)
y = x.sum()
y.backward()
test_case.assertTrue(np.array_equal(np.ones(shape), x.grad.numpy()))
def _test_with_generator(test_case, device, shape):
gen = flow.Generator()
gen.manual_seed(0)
y1 = flow.rand(
*shape, dtype=flow.float32, device=flow.device(device), generator=gen
)
gen.manual_seed(0)
y2 = flow.rand(
*shape, dtype=flow.float32, device=flow.device(device), generator=gen
)
test_case.assertTrue(np.allclose(y1.numpy(), y2.numpy(), atol=1e-4, rtol=1e-4))
def _test_rand_with_flow_size(test_case, device, shape):
y1 = flow.rand(flow.Size(shape), device=flow.device(device))
y2 = flow.rand(flow.Size(shape), device=flow.device(device))
test_case.assertTrue(not np.array_equal(y1.numpy(), y2.numpy()))
test_case.assertTrue(shape == y1.shape)
@flow.unittest.skip_unless_1n1d()
class TestRandModule(flow.unittest.TestCase):
def test_0d_randint(test_case):
arg_dict = OrderedDict()
arg_dict["test_fun"] = [_test_0d_rand]
arg_dict["device"] = ["cpu", "cuda"]
arg_dict["shape"] = [(2, 0, 4), (2, 0, 2)]
for arg in GenArgList(arg_dict):
arg[0](test_case, *arg[1:])
def test_cases(test_case):
arg_dict = OrderedDict()
arg_dict["test_fun"] = [
_test_rand,
_test_rand_tuple_shape,
_test_different_dtype,
_test_backward,
_test_with_generator,
_test_rand_with_flow_size,
]
arg_dict["device"] = ["cpu", "cuda"]
arg_dict["shape"] = [(2, 3), (2, 3, 4), (2, 3, 4, 5), (2, 4)]
for arg in GenArgList(arg_dict):
arg[0](test_case, *arg[1:])
@unittest.skipIf(os.getenv("ONEFLOW_TEST_CPU_ONLY"), "only test cpu cases")
def test_half_rand(test_case):
for device in ["cuda", "cpu"]:
x = flow.rand(2, 3, dtype=flow.float16, device=flow.device(device))
test_case.assertTrue(x.dtype == flow.float16)
test_case.assertTrue(x.shape == flow.Size((2, 3)))
@unittest.skipIf(os.getenv("ONEFLOW_TEST_CPU_ONLY"), "only test cpu cases")
@flow.unittest.skip_unless_1n2d()
class TestRandOnNonDefaultDevice(flow.unittest.TestCase):
def test_non_default_device(test_case):
x = flow.rand(2, 3, device="cuda:1")
test_case.assertEqual(x.device, flow.device("cuda:1"))
if __name__ == "__main__":
unittest.main()
|
eb918554171d1f49ead1bde7991962e92d6c19c6
|
d8233a120f46fd07e604cfcf0fd05506e8b4a1ec
|
/lesion_detector_3DCE/rcnn/symbol/proposal.py
|
878cfe7e9a65b991b7085fed2ea139e3ef08b6a7
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
rsummers11/CADLab
|
aeb4e8b66563c33d2fae9a73a3f35da647c1b2eb
|
78766a3609a16d6ac8e1d22344f6bebef509aef7
|
refs/heads/master
| 2023-07-04T23:30:49.791094
| 2023-06-22T16:47:28
| 2023-06-22T16:47:28
| 23,888,481
| 441
| 222
| null | 2022-02-19T15:42:24
| 2014-09-10T19:16:19
|
C++
|
UTF-8
|
Python
| false
| false
| 10,529
|
py
|
proposal.py
|
"""
Proposal Operator transform anchor coordinates into ROI coordinates with prediction results on
classification probability and bounding box prediction results, and image size and scale information.
"""
import mxnet as mx
import numpy as np
import numpy.random as npr
from distutils.util import strtobool
from rcnn.logger import logger
from rcnn.processing.bbox_transform import bbox_pred, clip_boxes
from rcnn.processing.generate_anchor import generate_anchors
from rcnn.processing.nms import py_nms_wrapper, cpu_nms_wrapper, gpu_nms_wrapper
from rcnn.config import config
class ProposalOperator(mx.operator.CustomOp):
def __init__(self, feat_stride, scales, ratios, output_score,
rpn_pre_nms_top_n, rpn_post_nms_top_n, threshold, rpn_min_size):
super(ProposalOperator, self).__init__()
self._feat_stride = feat_stride
self._scales = np.fromstring(scales[1:-1], dtype=float, sep=',')
self._ratios = np.fromstring(ratios[1:-1], dtype=float, sep=',')
self._anchors = generate_anchors(base_size=self._feat_stride, scales=self._scales, ratios=self._ratios)
self._num_anchors = self._anchors.shape[0]
self._output_score = output_score
self._rpn_pre_nms_top_n = rpn_pre_nms_top_n
self._rpn_post_nms_top_n = rpn_post_nms_top_n
self._threshold = threshold
self._rpn_min_size = rpn_min_size
logger.debug('feat_stride: %s' % self._feat_stride)
logger.debug('anchors:\n%s' % self._anchors)
def forward(self, is_train, req, in_data, out_data, aux):
nms = gpu_nms_wrapper(self._threshold, in_data[0].context.device_id)
batch_size = in_data[0].shape[0]
# if batch_size > 1:
# raise ValueError("Sorry, multiple images each device is not implemented")
# for each (H, W) location i
# generate A anchor boxes centered on cell i
# apply predicted bbox deltas at cell i to each of the A anchors
# clip predicted boxes to image
# remove predicted boxes with either height or width < threshold
# sort all (proposal, score) pairs by score from highest to lowest
# take top pre_nms_topN proposals before NMS
# apply NMS with threshold 0.7 to remaining proposals
# take after_nms_topN proposals after NMS
# return the top proposals (-> RoIs top, scores top)
pre_nms_topN = self._rpn_pre_nms_top_n
post_nms_topN = self._rpn_post_nms_top_n
min_size = self._rpn_min_size
blob_all = np.empty((0, 5))
scores_all = np.empty((0, 1))
num_image = config.NUM_IMAGES_3DCE
key_idx = (num_image-1)/2
for i in range(key_idx, batch_size, num_image):
# the first set of anchors are background probabilities
# keep the second part
scores = in_data[0].asnumpy()[i:i+1, self._num_anchors:, :, :]
bbox_deltas = in_data[1].asnumpy()[i:i+1, :, :, :]
im_info = in_data[2].asnumpy()[i, :]
logger.debug('im_info: %s' % im_info)
# 1. Generate proposals from bbox_deltas and shifted anchors
# use real image size instead of padded feature map sizes
height, width = int(im_info[0] / self._feat_stride), int(im_info[1] / self._feat_stride)
logger.debug('score map size: (%d, %d)' % (scores.shape[2], scores.shape[3]))
logger.debug('resudial: (%d, %d)' % (scores.shape[2] - height, scores.shape[3] - width))
# Enumerate all shifts
shift_x = np.arange(0, width) * self._feat_stride
shift_y = np.arange(0, height) * self._feat_stride
shift_x, shift_y = np.meshgrid(shift_x, shift_y)
shifts = np.vstack((shift_x.ravel(), shift_y.ravel(), shift_x.ravel(), shift_y.ravel())).transpose()
# Enumerate all shifted anchors:
#
# add A anchors (1, A, 4) to
# cell K shifts (K, 1, 4) to get
# shift anchors (K, A, 4)
# reshape to (K*A, 4) shifted anchors
A = self._num_anchors
K = shifts.shape[0]
anchors = self._anchors.reshape((1, A, 4)) + shifts.reshape((1, K, 4)).transpose((1, 0, 2))
anchors = anchors.reshape((K * A, 4))
# Transpose and reshape predicted bbox transformations to get them
# into the same order as the anchors:
#
# bbox deltas will be (1, 4 * A, H, W) format
# transpose to (1, H, W, 4 * A)
# reshape to (1 * H * W * A, 4) where rows are ordered by (h, w, a)
# in slowest to fastest order
# print bbox_deltas.shape, height, width, scores.shape[2], scores.shape[3]
bbox_deltas = self._clip_pad(bbox_deltas, (height, width))
bbox_deltas = bbox_deltas.transpose((0, 2, 3, 1)).reshape((-1, 4))
# Same story for the scores:
#
# scores are (1, A, H, W) format
# transpose to (1, H, W, A)
# reshape to (1 * H * W * A, 1) where rows are ordered by (h, w, a)
scores = self._clip_pad(scores, (height, width))
scores = scores.transpose((0, 2, 3, 1)).reshape((-1, 1))
# Convert anchors into proposals via bbox transformations
# print anchors.shape, bbox_deltas.shape
proposals = bbox_pred(anchors, bbox_deltas)
# 2. clip predicted boxes to image
proposals = clip_boxes(proposals, im_info[:2])
# 3. remove predicted boxes with either height or width < threshold
# (LEGAL NOTICE: convert min_size to input image scale stored in im_info[2])
keep = self._filter_boxes(proposals, min_size * im_info[2])
proposals = proposals[keep, :]
scores = scores[keep]
# 4. sort all (proposal, score) pairs by score from highest to lowest
# 5. take top pre_nms_topN (e.g. 6000)
order = scores.ravel().argsort()[::-1]
if pre_nms_topN > 0:
order = order[:pre_nms_topN]
proposals = proposals[order, :]
scores = scores[order]
# 6. apply nms (e.g. threshold = 0.7)
# 7. take after_nms_topN (e.g. 300)
# 8. return the top proposals (-> RoIs top)
det = np.hstack((proposals, scores)).astype(np.float32)
keep = nms(det)
if post_nms_topN > 0:
keep = keep[:post_nms_topN]
# pad to ensure output size remains unchanged
if len(keep) < post_nms_topN:
pad = npr.choice(keep, size=post_nms_topN - len(keep))
keep = np.hstack((keep, pad))
proposals = proposals[keep, :]
scores = scores[keep]
# Output rois array
# adjust for 3DCE
batch_inds = (i-key_idx)/num_image
batch_inds = np.zeros((proposals.shape[0], 1), dtype=np.float32) + batch_inds
blob = np.hstack((batch_inds, proposals.astype(np.float32, copy=False)))
blob_all = np.vstack([blob_all, blob])
scores_all = np.vstack([scores_all, scores])
self.assign(out_data[0], req[0], blob_all)
if self._output_score:
self.assign(out_data[1], req[1], scores_all.astype(np.float32, copy=False))
def backward(self, req, out_grad, in_data, out_data, in_grad, aux):
self.assign(in_grad[0], req[0], 0)
self.assign(in_grad[1], req[1], 0)
self.assign(in_grad[2], req[2], 0)
@staticmethod
def _filter_boxes(boxes, min_size):
""" Remove all boxes with any side smaller than min_size """
ws = boxes[:, 2] - boxes[:, 0] + 1
hs = boxes[:, 3] - boxes[:, 1] + 1
keep = np.where((ws >= min_size) & (hs >= min_size))[0]
return keep
@staticmethod
def _clip_pad(tensor, pad_shape):
"""
Clip boxes of the pad area.
:param tensor: [n, c, H, W]
:param pad_shape: [h, w]
:return: [n, c, h, w]
"""
H, W = tensor.shape[2:]
h, w = pad_shape
if h < H or w < W:
tensor = tensor[:, :, :h, :w].copy()
return tensor
@mx.operator.register("proposal")
class ProposalProp(mx.operator.CustomOpProp):
def __init__(self, feat_stride='16', scales='(8, 16, 32)', ratios='(0.5, 1, 2)', output_score='False',
rpn_pre_nms_top_n='6000', rpn_post_nms_top_n='300', threshold='0.3', rpn_min_size='16'):
super(ProposalProp, self).__init__(need_top_grad=False)
self._feat_stride = int(feat_stride)
self._scales = scales
self._ratios = ratios
self._output_score = strtobool(output_score)
self._rpn_pre_nms_top_n = int(rpn_pre_nms_top_n)
self._rpn_post_nms_top_n = int(rpn_post_nms_top_n)
self._threshold = float(threshold)
self._rpn_min_size = int(rpn_min_size)
def list_arguments(self):
return ['cls_prob', 'bbox_pred', 'im_info']
def list_outputs(self):
if self._output_score:
return ['output', 'score']
else:
return ['output']
def infer_shape(self, in_shape):
cls_prob_shape = in_shape[0]
bbox_pred_shape = in_shape[1]
assert cls_prob_shape[0] == bbox_pred_shape[0], 'ROI number does not equal in cls and reg'
num_image = config.NUM_IMAGES_3DCE
batch_size = cls_prob_shape[0]
im_info_shape = (batch_size, 3)
output_shape = (self._rpn_post_nms_top_n * batch_size/num_image, 5)
score_shape = (self._rpn_post_nms_top_n * batch_size/num_image, 1)
if self._output_score:
return [cls_prob_shape, bbox_pred_shape, im_info_shape], [output_shape, score_shape]
else:
return [cls_prob_shape, bbox_pred_shape, im_info_shape], [output_shape]
def create_operator(self, ctx, shapes, dtypes):
return ProposalOperator(self._feat_stride, self._scales, self._ratios, self._output_score,
self._rpn_pre_nms_top_n, self._rpn_post_nms_top_n, self._threshold, self._rpn_min_size)
def declare_backward_dependency(self, out_grad, in_data, out_data):
return []
|
1bf13ad8e883e41a08e61a4a20bba6b9f2692883
|
5f69a6549b8d5e417553d910622e6855b2ae679b
|
/src/opendr/perception/skeleton_based_action_recognition/algorithm/graphs/nturgbd.py
|
c17a8aae7f31c0772b042cee82d87826a63dce24
|
[
"Apache-2.0"
] |
permissive
|
opendr-eu/opendr
|
822219f709613d77c5eb62c5d02808d344239835
|
b3d6ce670cdf63469fc5766630eb295d67b3d788
|
refs/heads/master
| 2023-08-31T07:02:36.375231
| 2023-08-29T06:39:51
| 2023-08-29T06:39:51
| 293,755,225
| 535
| 82
|
Apache-2.0
| 2023-09-13T16:53:34
| 2020-09-08T08:55:04
|
Python
|
UTF-8
|
Python
| false
| false
| 1,765
|
py
|
nturgbd.py
|
"""
Modified based on: https://github.com/open-mmlab/mmskeleton
"""
import numpy as np
num_node = 25
self_link = [(i, i) for i in range(num_node)]
in_edge_ori_index = [(1, 2), (2, 21), (3, 21), (4, 3), (5, 21), (6, 5), (7, 6),
(8, 7), (9, 21), (10, 9), (11, 10), (12, 11), (13, 1),
(14, 13), (15, 14), (16, 15), (17, 1), (18, 17), (19, 18),
(20, 19), (22, 23), (23, 8), (24, 25), (25, 12)]
in_edge = [(i - 1, j - 1) for (i, j) in in_edge_ori_index]
out_edge = [(j, i) for (i, j) in in_edge]
neighbor = in_edge + out_edge
def get_hop(link, num_node):
A = np.zeros((num_node, num_node))
for i, j in link:
A[j, i] = 1
return A
def normalize_digraph(A):
Dl = np.sum(A, 0)
h, w = A.shape
Dn = np.zeros((w, w))
for i in range(w):
if Dl[i] > 0:
Dn[i, i] = Dl[i] ** (-1)
AD = np.dot(A, Dn)
return AD
def get_spatial_graph(num_node, self_link, in_edge, out_edge):
I = get_hop(self_link, num_node)
In = normalize_digraph(get_hop(in_edge, num_node))
Out = normalize_digraph(get_hop(out_edge, num_node))
A = np.stack((I, In, Out))
return A
class NTUGraph:
def __init__(self, labeling_mode='spatial'):
self.A = self.get_adjacency_matrix(labeling_mode)
self.num_node = num_node
self.self_link = self_link
self.in_edge = in_edge
self.out_edge = out_edge
self.neighbor = neighbor
def get_adjacency_matrix(self, labeling_mode=None):
if labeling_mode is None:
return self.A
if labeling_mode == 'spatial':
A = get_spatial_graph(num_node, self_link, in_edge, out_edge)
else:
raise ValueError()
return A
|
d113ca1a83e27d164320cfc517568ba38adbf1f4
|
69d8d91954f6623f3674d52d734d589f72383628
|
/openstack_dashboard/dashboards/identity/users/tabs.py
|
4bfca875283a28d7581502a78c3d970388a296f8
|
[
"Apache-2.0"
] |
permissive
|
openstack/horizon
|
d031cebe126c06ad9717bbc52790b3d890e8661e
|
7896fd8c77a6766a1156a520946efaf792b76ca5
|
refs/heads/master
| 2023-09-04T06:57:58.069907
| 2023-09-01T20:17:10
| 2023-09-01T20:17:10
| 2,665,166
| 1,060
| 1,175
|
Apache-2.0
| 2023-08-07T02:33:44
| 2011-10-28T13:12:05
|
Python
|
UTF-8
|
Python
| false
| false
| 5,435
|
py
|
tabs.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from horizon import exceptions
from horizon import tabs
from openstack_dashboard import api
from openstack_dashboard.dashboards.identity.users.groups \
import tables as groups_tables
from openstack_dashboard.dashboards.identity.users.role_assignments \
import tables as role_assignments_tables
from openstack_dashboard import policy
LOG = logging.getLogger(__name__)
class OverviewTab(tabs.Tab):
"""Overview of the user.
Global user informations such as user name, domain ID, email...
"""
name = _("Overview")
slug = "overview"
template_name = 'identity/users/_detail_overview.html'
def _get_domain_name(self, user):
domain_name = ''
try:
if policy.check((("identity", "identity:get_domain"),),
self.request):
domain = api.keystone.domain_get(
self.request, user.domain_id)
domain_name = domain.name
else:
domain = api.keystone.get_default_domain(self.request)
domain_name = domain.get('name')
except Exception:
exceptions.handle(self.request,
_('Unable to retrieve user domain.'))
return domain_name
def _get_project_name(self, user):
project_id = user.project_id
if not project_id:
return
try:
tenant = api.keystone.tenant_get(self.request, project_id)
return tenant.name
except Exception as e:
LOG.error('Failed to get tenant %(project_id)s: %(reason)s',
{'project_id': project_id, 'reason': e})
def _get_extras(self, user):
extra_info = settings.USER_TABLE_EXTRA_INFO
return dict((display_key, getattr(user, key, ''))
for key, display_key in extra_info.items())
def get_context_data(self, request):
user = self.tab_group.kwargs['user']
options = getattr(user, 'options', {})
return {
"user": user,
"domain_name": self._get_domain_name(user),
'extras': self._get_extras(user),
'project_name': self._get_project_name(user),
'lock_password': options.get('lock_password', False),
}
class RoleAssignmentsTab(tabs.TableTab):
"""Role assignment of the user to domain/project."""
table_classes = (role_assignments_tables.RoleAssignmentsTable,)
name = _("Role assignments")
slug = "roleassignments"
template_name = "horizon/common/_detail_table.html"
preload = False
policy_rules = (("identity", "identity:list_role_assignments"),)
def get_roleassignmentstable_data(self):
user = self.tab_group.kwargs['user']
role_assignments = []
try:
# Get all the roles of the user
role_assignments = api.keystone.role_assignments_list(
self.request, user=user, include_subtree=False,
include_names=True)
except Exception:
exceptions.handle(
self.request,
_("Unable to display the role assignments of this user."))
else:
# Find all the role assignments through the groups of the user
try:
user_groups = api.keystone.group_list(
self.request, user=user.id)
# Get the role for each group of the user:
for group in user_groups:
group_role_assignments = api.keystone. \
role_assignments_list(
self.request, group=group, include_subtree=False,
include_names=True)
role_assignments.extend(group_role_assignments)
except Exception:
exceptions.handle(
self.request,
_("Unable to display role assignment through groups."))
return role_assignments
class GroupsTab(tabs.TableTab):
"""Groups of the user."""
table_classes = (groups_tables.GroupsTable,)
name = _("Groups")
slug = "groups"
template_name = "horizon/common/_detail_table.html"
preload = False
policy_rules = (("identity", "identity:list_groups"),)
def get_groupstable_data(self):
user_groups = []
user = self.tab_group.kwargs['user']
try:
user_groups = api.keystone.group_list(self.request, user=user.id)
except Exception:
exceptions.handle(self.request,
_("Unable to display the groups of this user."))
return user_groups
class UserDetailTabs(tabs.DetailTabsGroup):
slug = "user_details"
tabs = (OverviewTab, RoleAssignmentsTab, GroupsTab,)
|
87284f15bc6b6e289eaf0d2ed770f324bcfe9392
|
c117e905ac5f1938da3c8e23845ad52cc922923a
|
/src/orion/executor/dask_backend.py
|
6d90053f0eebac50b233b7adb1f0be9072615328
|
[
"BSD-3-Clause"
] |
permissive
|
Epistimio/orion
|
2850983dd7ac0a417d451d39b2dc7a652f1920c8
|
2944875eff03b86138d6780df4b1dd6dc8158ccb
|
refs/heads/develop
| 2023-09-01T20:36:59.279966
| 2023-08-21T13:25:43
| 2023-08-21T13:25:43
| 102,697,867
| 218
| 41
|
NOASSERTION
| 2023-08-21T12:51:55
| 2017-09-07T06:05:21
|
Python
|
UTF-8
|
Python
| false
| false
| 3,823
|
py
|
dask_backend.py
|
import traceback
from orion.executor.base import (
AsyncException,
AsyncResult,
BaseExecutor,
ExecutorClosed,
Future,
)
try:
import dask.distributed
from dask.distributed import Client, get_client, get_worker, rejoin, secede
HAS_DASK = True
except ImportError:
HAS_DASK = False
class _Future(Future):
"""Wraps a Dask Future"""
def __init__(self, future):
self.future = future
self.exception = None
def get(self, timeout=None):
if self.exception:
raise self.exception
try:
return self.future.result(timeout)
except dask.distributed.TimeoutError as e:
raise TimeoutError() from e
def wait(self, timeout=None):
try:
self.future.result(timeout)
except dask.distributed.TimeoutError:
pass
except Exception as e:
self.exception = e
def ready(self):
return self.future.done()
def successful(self):
if not self.future.done():
raise ValueError()
return self.future.exception() is None
class Dask(BaseExecutor):
"""Wrapper around the dask client.
.. warning::
The Dask executor can be pickled and used inside a subprocess,
the pickled client will use the main client that was spawned in the main process,
but you cannot spawn clients inside a subprocess.
"""
def __init__(self, n_workers=-1, client=None, **config):
super().__init__(n_workers=n_workers)
if not HAS_DASK:
raise ImportError("Dask must be installed to use Dask executor.")
self.config = config
if client is None:
client = Client(**self.config)
self.client = client
def __getstate__(self):
state = super().__getstate__()
state["address"] = self.client.cluster.scheduler_address
return state
def __setstate__(self, state):
super().__setstate__(state)
self.client = get_client(address=state["address"])
@property
def in_worker(self):
try:
get_worker()
return True
except ValueError:
return False
def wait(self, futures):
if self.in_worker:
secede()
results = self.client.gather(list(futures))
if self.in_worker:
rejoin()
return [r.get() for r in results]
def async_get(self, futures, timeout=0.01):
results = []
tobe_deleted = []
for i, future in enumerate(futures):
if timeout and i == 0:
future.wait(timeout)
if future.ready():
try:
results.append(AsyncResult(future, future.get()))
except Exception as err:
results.append(AsyncException(future, err, traceback.format_exc()))
tobe_deleted.append(future)
for future in tobe_deleted:
futures.remove(future)
return results
def submit(self, function, *args, **kwargs):
try:
return _Future(self.client.submit(function, *args, **kwargs, pure=False))
except Exception as e:
if str(e).startswith(
"Tried sending message after closing. Status: closed"
):
raise ExecutorClosed() from e
raise
def __del__(self):
# This is necessary because if the factory constructor fails
# __del__ is executed right away but client might not be set
if hasattr(self, "client"):
self.client.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.client.close()
super().__exit__(exc_type, exc_value, traceback)
|
bcb017a8239ac0c0a0a0b2e105ce5f18c2e0b556
|
e48198ffea7b0b80669253fb970fdcc1d2f4c518
|
/src/testdir/pyxfile/py3_shebang.py
|
ec05808ca4f25d6505ab8e065b8a211008d7477f
|
[
"GPL-1.0-or-later",
"Vim",
"GPL-2.0-only"
] |
permissive
|
vim/vim
|
f9ea5913ff884c87bc11f7826b1fc277fba8a2b5
|
816fbcc262687b81fc46f82f7bbeb1453addfe0c
|
refs/heads/master
| 2023-09-01T16:01:56.964678
| 2023-08-31T21:52:30
| 2023-08-31T21:52:30
| 40,997,482
| 37,589
| 7,920
|
Vim
| 2023-09-14T20:57:43
| 2015-08-18T21:03:56
|
Vim Script
|
UTF-8
|
Python
| false
| false
| 50
|
py
|
py3_shebang.py
|
#!/usr/bin/python3
import sys
print(sys.version)
|
3251373c1eaf9e7bcf937479cf51f1a8e218c954
|
e7efae2b83216d9621bd93390959d652de779c3d
|
/cert_manager/tests/common.py
|
90fb24c813898f1e8a1d95369ee5ddc9bab0352f
|
[
"BSD-3-Clause",
"MIT",
"BSD-3-Clause-Modification",
"Unlicense",
"Apache-2.0",
"LGPL-3.0-only",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause",
"CC0-1.0"
] |
permissive
|
DataDog/integrations-core
|
ee1886cc7655972b2791e6ab8a1c62ab35afdb47
|
406072e4294edff5b46b513f0cdf7c2c00fac9d2
|
refs/heads/master
| 2023-08-31T04:08:06.243593
| 2023-08-30T18:22:10
| 2023-08-30T18:22:10
| 47,203,045
| 852
| 1,548
|
BSD-3-Clause
| 2023-09-14T16:39:54
| 2015-12-01T16:41:45
|
Python
|
UTF-8
|
Python
| false
| false
| 901
|
py
|
common.py
|
# (C) Datadog, Inc. 2019-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from datadog_checks.base.stubs import aggregator
CERT_METRICS = {
'cert_manager.certificate.ready_status': aggregator.GAUGE,
'cert_manager.certificate.expiration_timestamp': aggregator.GAUGE,
}
ACME_METRICS = {
'cert_manager.http_acme_client.request.count': aggregator.MONOTONIC_COUNT,
'cert_manager.http_acme_client.request.duration.sum': aggregator.MONOTONIC_COUNT,
'cert_manager.http_acme_client.request.duration.count': aggregator.MONOTONIC_COUNT,
'cert_manager.http_acme_client.request.duration.quantile': aggregator.GAUGE,
}
CONTROLLER_METRICS = {
'cert_manager.clock_time': aggregator.GAUGE,
'cert_manager.controller.sync_call.count': aggregator.MONOTONIC_COUNT,
}
MOCK_INSTANCE = {
'openmetrics_endpoint': 'http://fake.tld/prometheus',
}
|
8ce89ef09c80baa3402f002954beceedc89e631f
|
ca593f5a272ce0478ba6f52d2670cb9dd8564b00
|
/mycroft/client/speech/mic.py
|
e65c912ee10eea27803bfccf057d077716d22c96
|
[
"Apache-2.0"
] |
permissive
|
MycroftAI/mycroft-core
|
d41ce0fccfe4c29d8d802dcc6bcf583dc356d9ce
|
8051e4e1f89d5ed1f63f06db5d3570371ae92e5d
|
refs/heads/master
| 2023-08-23T17:45:10.569985
| 2021-12-10T04:51:59
| 2021-12-10T04:51:59
| 59,299,524
| 6,838
| 1,719
|
Apache-2.0
| 2023-08-15T10:25:32
| 2016-05-20T14:11:07
|
Python
|
UTF-8
|
Python
| false
| false
| 28,731
|
py
|
mic.py
|
# Copyright 2017 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import audioop
from time import sleep, time as get_time
from collections import deque, namedtuple
import datetime
import json
import os
from os.path import isdir, join
import pyaudio
import requests
import speech_recognition
from hashlib import md5
from io import BytesIO, StringIO
from speech_recognition import (
Microphone,
AudioSource,
AudioData
)
from tempfile import gettempdir
from threading import Thread, Lock
from mycroft.api import DeviceApi
from mycroft.configuration import Configuration
from mycroft.session import SessionManager
from mycroft.util import (
check_for_signal,
get_ipc_directory,
resolve_resource_file,
play_wav
)
from mycroft.util.log import LOG
from .data_structures import RollingMean, CyclicAudioBuffer
WakeWordData = namedtuple('WakeWordData',
['audio', 'found', 'stopped', 'end_audio'])
class MutableStream:
def __init__(self, wrapped_stream, format, muted=False):
assert wrapped_stream is not None
self.wrapped_stream = wrapped_stream
self.SAMPLE_WIDTH = pyaudio.get_sample_size(format)
self.muted_buffer = b''.join([b'\x00' * self.SAMPLE_WIDTH])
self.read_lock = Lock()
self.muted = muted
if muted:
self.mute()
def mute(self):
"""Stop the stream and set the muted flag."""
with self.read_lock:
self.muted = True
self.wrapped_stream.stop_stream()
def unmute(self):
"""Start the stream and clear the muted flag."""
with self.read_lock:
self.muted = False
self.wrapped_stream.start_stream()
def read(self, size, of_exc=False):
"""Read data from stream.
Args:
size (int): Number of bytes to read
of_exc (bool): flag determining if the audio producer thread
should throw IOError at overflows.
Returns:
(bytes) Data read from device
"""
frames = deque()
remaining = size
with self.read_lock:
while remaining > 0:
# If muted during read return empty buffer. This ensures no
# reads occur while the stream is stopped
if self.muted:
return self.muted_buffer
to_read = min(self.wrapped_stream.get_read_available(),
remaining)
if to_read <= 0:
sleep(.01)
continue
result = self.wrapped_stream.read(to_read,
exception_on_overflow=of_exc)
frames.append(result)
remaining -= to_read
input_latency = self.wrapped_stream.get_input_latency()
if input_latency > 0.2:
LOG.warning("High input latency: %f" % input_latency)
audio = b"".join(list(frames))
return audio
def close(self):
self.wrapped_stream.close()
self.wrapped_stream = None
def is_stopped(self):
try:
return self.wrapped_stream.is_stopped()
except Exception as e:
LOG.error(repr(e))
return True # Assume the stream has been closed and thusly stopped
def stop_stream(self):
return self.wrapped_stream.stop_stream()
class MutableMicrophone(Microphone):
def __init__(self, device_index=None, sample_rate=16000, chunk_size=1024,
mute=False):
Microphone.__init__(self, device_index=device_index,
sample_rate=sample_rate, chunk_size=chunk_size)
self.muted = False
if mute:
self.mute()
def __enter__(self):
return self._start()
def _start(self):
"""Open the selected device and setup the stream."""
assert self.stream is None, \
"This audio source is already inside a context manager"
self.audio = pyaudio.PyAudio()
self.stream = MutableStream(self.audio.open(
input_device_index=self.device_index, channels=1,
format=self.format, rate=self.SAMPLE_RATE,
frames_per_buffer=self.CHUNK,
input=True, # stream is an input stream
), self.format, self.muted)
return self
def __exit__(self, exc_type, exc_value, traceback):
return self._stop()
def _stop(self):
"""Stop and close an open stream."""
try:
if not self.stream.is_stopped():
self.stream.stop_stream()
self.stream.close()
except Exception:
LOG.exception('Failed to stop mic input stream')
# Let's pretend nothing is wrong...
self.stream = None
self.audio.terminate()
def restart(self):
"""Shutdown input device and restart."""
self._stop()
self._start()
def mute(self):
self.muted = True
if self.stream:
self.stream.mute()
def unmute(self):
self.muted = False
if self.stream:
self.stream.unmute()
def is_muted(self):
return self.muted
def duration_to_bytes(self, sec):
"""Converts a duration in seconds to number of recorded bytes.
Args:
sec: number of seconds
Returns:
(int) equivalent number of bytes recorded by this Mic
"""
return int(sec * self.SAMPLE_RATE) * self.SAMPLE_WIDTH
def get_silence(num_bytes):
return b'\0' * num_bytes
class NoiseTracker:
"""Noise tracker, used to deterimine if an audio utterance is complete.
The current implementation expects a number of loud chunks (not necessary
in one continous sequence) followed by a short period of continous quiet
audio data to be considered complete.
Args:
minimum (int): lower noise level will be threshold for "quiet" level
maximum (int): ceiling of noise level
sec_per_buffer (float): the length of each buffer used when updating
the tracker
loud_time_limit (float): time in seconds of low noise to be considered
a complete sentence
silence_time_limit (float): time limit for silence to abort sentence
silence_after_loud (float): time of silence to finalize the sentence.
default 0.25 seconds.
"""
def __init__(self, minimum, maximum, sec_per_buffer, loud_time_limit,
silence_time_limit, silence_after_loud_time=0.25):
self.min_level = minimum
self.max_level = maximum
self.sec_per_buffer = sec_per_buffer
self.num_loud_chunks = 0
self.level = 0
# Smallest number of loud chunks required to return loud enough
self.min_loud_chunks = int(loud_time_limit / sec_per_buffer)
self.max_silence_duration = silence_time_limit
self.silence_duration = 0
# time of quite period after long enough loud data to consider the
# sentence complete
self.silence_after_loud = silence_after_loud_time
# Constants
self.increase_multiplier = 200
self.decrease_multiplier = 100
def _increase_noise(self):
"""Bumps the current level.
Modifies the noise level with a factor depending in the buffer length.
"""
if self.level < self.max_level:
self.level += self.increase_multiplier * self.sec_per_buffer
def _decrease_noise(self):
"""Decrease the current level.
Modifies the noise level with a factor depending in the buffer length.
"""
if self.level > self.min_level:
self.level -= self.decrease_multiplier * self.sec_per_buffer
def update(self, is_loud):
"""Update the tracking. with either a loud chunk or a quiet chunk.
Args:
is_loud: True if a loud chunk should be registered
False if a quiet chunk should be registered
"""
if is_loud:
self._increase_noise()
self.num_loud_chunks += 1
else:
self._decrease_noise()
# Update duration of energy under the threshold level
if self._quiet_enough():
self.silence_duration += self.sec_per_buffer
else: # Reset silence duration
self.silence_duration = 0
def _loud_enough(self):
"""Check if the noise loudness criteria is fulfilled.
The noise is considered loud enough if it's been over the threshold
for a certain number of chunks (accumulated, not in a row).
"""
return self.num_loud_chunks > self.min_loud_chunks
def _quiet_enough(self):
"""Check if the noise quietness criteria is fulfilled.
The quiet level is instant and will return True if the level is lower
or equal to the minimum noise level.
"""
return self.level <= self.min_level
def recording_complete(self):
"""Has the end creteria for the recording been met.
If the noise level has decresed from a loud level to a low level
the user has stopped speaking.
Alternatively if a lot of silence was recorded without detecting
a loud enough phrase.
"""
too_much_silence = (self.silence_duration > self.max_silence_duration)
if too_much_silence:
LOG.debug('Too much silence recorded without start of sentence '
'detected')
return ((self._quiet_enough() and
self.silence_duration > self.silence_after_loud) and
(self._loud_enough() or too_much_silence))
class ResponsiveRecognizer(speech_recognition.Recognizer):
# Padding of silence when feeding to pocketsphinx
SILENCE_SEC = 0.01
# The minimum seconds of noise before a
# phrase can be considered complete
MIN_LOUD_SEC_PER_PHRASE = 0.5
# The minimum seconds of silence required at the end
# before a phrase will be considered complete
MIN_SILENCE_AT_END = 0.25
# Time between pocketsphinx checks for the wake word
SEC_BETWEEN_WW_CHECKS = 0.2
def __init__(self, wake_word_recognizer, watchdog=None):
self._watchdog = watchdog or (lambda: None) # Default to dummy func
self.config = Configuration.get()
listener_config = self.config.get('listener')
self.upload_url = listener_config['wake_word_upload']['url']
self.upload_disabled = listener_config['wake_word_upload']['disable']
self.wake_word_name = wake_word_recognizer.key_phrase
self.overflow_exc = listener_config.get('overflow_exception', False)
super().__init__()
self.wake_word_recognizer = wake_word_recognizer
self.audio = pyaudio.PyAudio()
self.multiplier = listener_config.get('multiplier')
self.energy_ratio = listener_config.get('energy_ratio')
# Check the config for the flag to save wake words, utterances
# and for a path under which to save them
self.save_utterances = listener_config.get('save_utterances', False)
self.save_wake_words = listener_config.get('record_wake_words', False)
self.save_path = listener_config.get('save_path', gettempdir())
self.saved_wake_words_dir = join(self.save_path, 'mycroft_wake_words')
if self.save_wake_words and not isdir(self.saved_wake_words_dir):
os.mkdir(self.saved_wake_words_dir)
self.saved_utterances_dir = join(self.save_path, 'mycroft_utterances')
if self.save_utterances and not isdir(self.saved_utterances_dir):
os.mkdir(self.saved_utterances_dir)
self.mic_level_file = os.path.join(get_ipc_directory(), "mic_level")
# Signal statuses
self._stop_signaled = False
self._listen_triggered = False
self._account_id = None
# The maximum seconds a phrase can be recorded,
# provided there is noise the entire time
self.recording_timeout = listener_config.get('recording_timeout',
10.0)
# The maximum time it will continue to record silence
# when not enough noise has been detected
self.recording_timeout_with_silence = listener_config.get(
'recording_timeout_with_silence', 3.0)
@property
def account_id(self):
"""Fetch account from backend when needed.
If an error occurs it's handled and a temporary value is returned.
When a value is received it will be cached until next start.
"""
if not self._account_id:
try:
self._account_id = DeviceApi().get()['user']['uuid']
except (requests.RequestException, AttributeError):
pass # These are expected and won't be reported
except Exception as e:
LOG.debug('Unhandled exception while determining device_id, '
'Error: {}'.format(repr(e)))
return self._account_id or '0'
def record_sound_chunk(self, source):
return source.stream.read(source.CHUNK, self.overflow_exc)
@staticmethod
def calc_energy(sound_chunk, sample_width):
return audioop.rms(sound_chunk, sample_width)
def _record_phrase(
self,
source,
sec_per_buffer,
stream=None,
ww_frames=None
):
"""Record an entire spoken phrase.
Essentially, this code waits for a period of silence and then returns
the audio. If silence isn't detected, it will terminate and return
a buffer of self.recording_timeout duration.
Args:
source (AudioSource): Source producing the audio chunks
sec_per_buffer (float): Fractional number of seconds in each chunk
stream (AudioStreamHandler): Stream target that will receive chunks
of the utterance audio while it is
being recorded.
ww_frames (deque): Frames of audio data from the last part of wake
word detection.
Returns:
bytearray: complete audio buffer recorded, including any
silence at the end of the user's utterance
"""
noise_tracker = NoiseTracker(0, 25, sec_per_buffer,
self.MIN_LOUD_SEC_PER_PHRASE,
self.recording_timeout_with_silence)
# Maximum number of chunks to record before timing out
max_chunks = int(self.recording_timeout / sec_per_buffer)
num_chunks = 0
# bytearray to store audio in, initialized with a single sample of
# silence.
byte_data = get_silence(source.SAMPLE_WIDTH)
if stream:
stream.stream_start()
phrase_complete = False
while num_chunks < max_chunks and not phrase_complete:
if ww_frames:
chunk = ww_frames.popleft()
else:
chunk = self.record_sound_chunk(source)
byte_data += chunk
num_chunks += 1
if stream:
stream.stream_chunk(chunk)
energy = self.calc_energy(chunk, source.SAMPLE_WIDTH)
test_threshold = self.energy_threshold * self.multiplier
is_loud = energy > test_threshold
noise_tracker.update(is_loud)
if not is_loud:
self._adjust_threshold(energy, sec_per_buffer)
# The phrase is complete if the noise_tracker end of sentence
# criteria is met or if the top-button is pressed
phrase_complete = (noise_tracker.recording_complete() or
check_for_signal('buttonPress'))
# Periodically write the energy level to the mic level file.
if num_chunks % 10 == 0:
self._watchdog()
self.write_mic_level(energy, source)
return byte_data
def write_mic_level(self, energy, source):
with open(self.mic_level_file, 'w') as f:
f.write('Energy: cur={} thresh={:.3f} muted={}'.format(
energy,
self.energy_threshold,
int(source.muted)
)
)
def _skip_wake_word(self):
"""Check if told programatically to skip the wake word
For example when we are in a dialog with the user.
"""
if self._listen_triggered:
return True
# Pressing the Mark 1 button can start recording (unless
# it is being used to mean 'stop' instead)
if check_for_signal('buttonPress', 1):
# give other processes time to consume this signal if
# it was meant to be a 'stop'
sleep(0.25)
if check_for_signal('buttonPress'):
# Signal is still here, assume it was intended to
# begin recording
LOG.debug("Button Pressed, wakeword not needed")
return True
return False
def stop(self):
"""Signal stop and exit waiting state."""
self._stop_signaled = True
def _compile_metadata(self):
ww_module = self.wake_word_recognizer.__class__.__name__
if ww_module == 'PreciseHotword':
model_path = self.wake_word_recognizer.precise_model
with open(model_path, 'rb') as f:
model_hash = md5(f.read()).hexdigest()
else:
model_hash = '0'
return {
'name': self.wake_word_name.replace(' ', '-'),
'engine': md5(ww_module.encode('utf-8')).hexdigest(),
'time': str(int(1000 * get_time())),
'sessionId': SessionManager.get().session_id,
'accountId': self.account_id,
'model': str(model_hash)
}
def trigger_listen(self):
"""Externally trigger listening."""
LOG.debug('Listen triggered from external source.')
self._listen_triggered = True
def _upload_wakeword(self, audio, metadata):
"""Upload the wakeword in a background thread."""
LOG.debug(
"Wakeword uploading has been disabled. The API endpoint used in "
"Mycroft-core v20.2 and below has been deprecated. To contribute "
"new wakeword samples please upgrade to v20.8 or above."
)
# def upload(audio, metadata):
# requests.post(self.upload_url,
# files={'audio': BytesIO(audio.get_wav_data()),
# 'metadata': StringIO(json.dumps(metadata))})
# Thread(target=upload, daemon=True, args=(audio, metadata)).start()
def _send_wakeword_info(self, emitter):
"""Send messagebus message indicating that a wakeword was received.
Args:
emitter: bus emitter to send information on.
"""
SessionManager.touch()
payload = {'utterance': self.wake_word_name,
'session': SessionManager.get().session_id}
emitter.emit("recognizer_loop:wakeword", payload)
def _write_wakeword_to_disk(self, audio, metadata):
"""Write wakeword to disk.
Args:
audio: Audio data to write
metadata: List of metadata about the captured wakeword
"""
filename = join(self.saved_wake_words_dir,
'_'.join(str(metadata[k]) for k in sorted(metadata)) +
'.wav')
with open(filename, 'wb') as f:
f.write(audio.get_wav_data())
def _handle_wakeword_found(self, audio_data, source):
"""Perform actions to be triggered after a wakeword is found.
This includes: emit event on messagebus that a wakeword is heard,
store wakeword to disk if configured and sending the wakeword data
to the cloud in case the user has opted into the data sharing.
"""
# Save and upload positive wake words as appropriate
upload_allowed = (self.config['opt_in'] and not self.upload_disabled)
if (self.save_wake_words or upload_allowed):
audio = self._create_audio_data(audio_data, source)
metadata = self._compile_metadata()
if self.save_wake_words:
# Save wake word locally
self._write_wakeword_to_disk(audio, metadata)
# Upload wake word for opt_in people
if upload_allowed:
self._upload_wakeword(audio, metadata)
def _wait_until_wake_word(self, source, sec_per_buffer):
"""Listen continuously on source until a wake word is spoken
Args:
source (AudioSource): Source producing the audio chunks
sec_per_buffer (float): Fractional number of seconds in each chunk
"""
# The maximum audio in seconds to keep for transcribing a phrase
# The wake word must fit in this time
ww_duration = self.wake_word_recognizer.expected_duration
ww_test_duration = max(3, ww_duration)
mic_write_counter = 0
num_silent_bytes = int(self.SILENCE_SEC * source.SAMPLE_RATE *
source.SAMPLE_WIDTH)
silence = get_silence(num_silent_bytes)
# Max bytes for byte_data before audio is removed from the front
max_size = source.duration_to_bytes(ww_duration)
test_size = source.duration_to_bytes(ww_test_duration)
audio_buffer = CyclicAudioBuffer(max_size, silence)
buffers_per_check = self.SEC_BETWEEN_WW_CHECKS / sec_per_buffer
buffers_since_check = 0.0
# Rolling buffer to track the audio energy (loudness) heard on
# the source recently. An average audio energy is maintained
# based on these levels.
average_samples = int(5 / sec_per_buffer) # average over last 5 secs
audio_mean = RollingMean(average_samples)
# These are frames immediately after wake word is detected
# that we want to keep to send to STT
ww_frames = deque(maxlen=7)
said_wake_word = False
audio_data = None
while (not said_wake_word and not self._stop_signaled and
not self._skip_wake_word()):
chunk = self.record_sound_chunk(source)
audio_buffer.append(chunk)
ww_frames.append(chunk)
energy = self.calc_energy(chunk, source.SAMPLE_WIDTH)
audio_mean.append_sample(energy)
if energy < self.energy_threshold * self.multiplier:
self._adjust_threshold(energy, sec_per_buffer)
# maintain the threshold using average
if self.energy_threshold < energy < audio_mean.value * 1.5:
# bump the threshold to just above this value
self.energy_threshold = energy * 1.2
# Periodically output energy level stats. This can be used to
# visualize the microphone input, e.g. a needle on a meter.
if mic_write_counter % 3:
self._watchdog()
self.write_mic_level(energy, source)
mic_write_counter += 1
buffers_since_check += 1.0
# Send chunk to wake_word_recognizer
self.wake_word_recognizer.update(chunk)
if buffers_since_check > buffers_per_check:
buffers_since_check -= buffers_per_check
audio_data = audio_buffer.get_last(test_size) + silence
said_wake_word = \
self.wake_word_recognizer.found_wake_word(audio_data)
self._listen_triggered = False
return WakeWordData(audio_data, said_wake_word,
self._stop_signaled, ww_frames)
@staticmethod
def _create_audio_data(raw_data, source):
"""
Constructs an AudioData instance with the same parameters
as the source and the specified frame_data
"""
return AudioData(raw_data, source.SAMPLE_RATE, source.SAMPLE_WIDTH)
def mute_and_confirm_listening(self, source):
audio_file = resolve_resource_file(
self.config.get('sounds').get('start_listening'))
if audio_file:
source.mute()
play_wav(audio_file).wait()
source.unmute()
return True
else:
return False
def listen(self, source, emitter, stream=None):
"""Listens for chunks of audio that Mycroft should perform STT on.
This will listen continuously for a wake-up-word, then return the
audio chunk containing the spoken phrase that comes immediately
afterwards.
Args:
source (AudioSource): Source producing the audio chunks
emitter (EventEmitter): Emitter for notifications of when recording
begins and ends.
stream (AudioStreamHandler): Stream target that will receive chunks
of the utterance audio while it is
being recorded
Returns:
AudioData: audio with the user's utterance, minus the wake-up-word
"""
assert isinstance(source, AudioSource), "Source must be an AudioSource"
# bytes_per_sec = source.SAMPLE_RATE * source.SAMPLE_WIDTH
sec_per_buffer = float(source.CHUNK) / source.SAMPLE_RATE
# Every time a new 'listen()' request begins, reset the threshold
# used for silence detection. This is as good of a reset point as
# any, as we expect the user and Mycroft to not be talking.
# NOTE: adjust_for_ambient_noise() doc claims it will stop early if
# speech is detected, but there is no code to actually do that.
self.adjust_for_ambient_noise(source, 1.0)
LOG.debug("Waiting for wake word...")
ww_data = self._wait_until_wake_word(source, sec_per_buffer)
ww_frames = None
if ww_data.found:
# If the wakeword was heard send it
self._send_wakeword_info(emitter)
self._handle_wakeword_found(ww_data.audio, source)
ww_frames = ww_data.end_audio
if ww_data.stopped:
# If the waiting returned from a stop signal
return
LOG.debug("Recording...")
# If enabled, play a wave file with a short sound to audibly
# indicate recording has begun.
if self.config.get('confirm_listening'):
if self.mute_and_confirm_listening(source):
# Clear frames from wakeword detctions since they're
# irrelevant after mute - play wav - unmute sequence
ww_frames = None
# Notify system of recording start
emitter.emit("recognizer_loop:record_begin")
frame_data = self._record_phrase(
source,
sec_per_buffer,
stream,
ww_frames
)
audio_data = self._create_audio_data(frame_data, source)
emitter.emit("recognizer_loop:record_end")
if self.save_utterances:
LOG.info("Recording utterance")
stamp = str(datetime.datetime.now())
filename = "/{}/{}.wav".format(
self.saved_utterances_dir,
stamp
)
with open(filename, 'wb') as filea:
filea.write(audio_data.get_wav_data())
LOG.debug("Thinking...")
return audio_data
def _adjust_threshold(self, energy, seconds_per_buffer):
if self.dynamic_energy_threshold and energy > 0:
# account for different chunk sizes and rates
damping = (
self.dynamic_energy_adjustment_damping ** seconds_per_buffer)
target_energy = energy * self.energy_ratio
self.energy_threshold = (
self.energy_threshold * damping +
target_energy * (1 - damping))
|
1447c2d23ed97fecba5be2651e2af21d48f94a97
|
f54290f045fd150f9be640bf8ab4a91f6b9ae3e3
|
/evennia/typeclasses/models.py
|
5fe225dabe2971fcf4b7e7a24cb754246e4d3056
|
[
"BSD-3-Clause"
] |
permissive
|
evennia/evennia
|
54d075093f0ff125be40e17a7bc4e1e0e22cf77b
|
b3ca58b5c1325a3bf57051dfe23560a08d2947b7
|
refs/heads/main
| 2023-09-02T05:29:35.678676
| 2023-09-01T19:06:05
| 2023-09-01T19:06:05
| 16,120,959
| 1,781
| 1,004
|
BSD-3-Clause
| 2023-09-12T18:37:23
| 2014-01-21T22:22:28
|
Python
|
UTF-8
|
Python
| false
| false
| 37,156
|
py
|
models.py
|
"""
This is the *abstract* django models for many of the database objects
in Evennia. A django abstract (obs, not the same as a Python metaclass!) is
a model which is not actually created in the database, but which only exists
for other models to inherit from, to avoid code duplication. Any model can
import and inherit from these classes.
Attributes are database objects stored on other objects. The implementing
class needs to supply a ForeignKey field attr_object pointing to the kind
of object being mapped. Attributes storing iterables actually store special
types of iterables named PackedList/PackedDict respectively. These make
sure to save changes to them to database - this is criticial in order to
allow for obj.db.mylist[2] = data. Also, all dbobjects are saved as
dbrefs but are also aggressively cached.
TypedObjects are objects 'decorated' with a typeclass - that is, the typeclass
(which is a normal Python class implementing some special tricks with its
get/set attribute methods, allows for the creation of all sorts of different
objects all with the same database object underneath. Usually attributes are
used to permanently store things not hard-coded as field on the database object.
The admin should usually not have to deal directly with the database object
layer.
This module also contains the Managers for the respective models; inherit from
these to create custom managers.
"""
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.db.models import signals
from django.db.models.base import ModelBase
from django.urls import reverse
from django.utils.encoding import smart_str
from django.utils.text import slugify
from evennia.locks.lockhandler import LockHandler
from evennia.server.signals import SIGNAL_TYPED_OBJECT_POST_RENAME
from evennia.typeclasses import managers
from evennia.typeclasses.attributes import (
Attribute,
AttributeHandler,
AttributeProperty,
DbHolder,
InMemoryAttributeBackend,
ModelAttributeBackend,
)
from evennia.typeclasses.tags import (
AliasHandler,
PermissionHandler,
Tag,
TagCategoryProperty,
TagHandler,
TagProperty,
)
from evennia.utils.idmapper.models import SharedMemoryModel, SharedMemoryModelBase
from evennia.utils.logger import log_trace
from evennia.utils.utils import class_from_module, inherits_from, is_iter, lazy_property
__all__ = ("TypedObject",)
TICKER_HANDLER = None
_PERMISSION_HIERARCHY = [p.lower() for p in settings.PERMISSION_HIERARCHY]
_TYPECLASS_AGGRESSIVE_CACHE = settings.TYPECLASS_AGGRESSIVE_CACHE
_GA = object.__getattribute__
_SA = object.__setattr__
# signal receivers. Connected in __new__
def call_at_first_save(sender, instance, created, **kwargs):
"""
Receives a signal just after the object is saved.
"""
if created:
instance.at_first_save()
def remove_attributes_on_delete(sender, instance, **kwargs):
"""
Wipe object's Attributes when it's deleted
"""
instance.db_attributes.all().delete()
# ------------------------------------------------------------
#
# Typed Objects
#
# ------------------------------------------------------------
#
# Meta class for typeclasses
#
class TypeclassBase(SharedMemoryModelBase):
"""
Metaclass which should be set for the root of model proxies
that don't define any new fields, like Object, Script etc. This
is the basis for the typeclassing system.
"""
def __new__(cls, name, bases, attrs):
"""
We must define our Typeclasses as proxies. We also store the
path directly on the class, this is required by managers.
"""
# storage of stats
attrs["typename"] = name
attrs["path"] = "%s.%s" % (attrs["__module__"], name)
def _get_dbmodel(bases):
"""Recursively get the dbmodel"""
if not hasattr(bases, "__iter__"):
bases = [bases]
for base in bases:
try:
if base._meta.proxy or base._meta.abstract:
for kls in base._meta.parents:
return _get_dbmodel(kls)
except AttributeError:
# this happens if trying to parse a non-typeclass mixin parent,
# without a _meta
continue
else:
return base
return None
dbmodel = _get_dbmodel(bases)
if not dbmodel:
raise TypeError(f"{name} does not appear to inherit from a database model.")
# typeclass proxy setup
# first check explicit __applabel__ on the typeclass, then figure
# it out from the dbmodel
if "__applabel__" not in attrs:
# find the app-label in one of the bases, usually the dbmodel
attrs["__applabel__"] = dbmodel._meta.app_label
if "Meta" not in attrs:
class Meta:
proxy = True
app_label = attrs.get("__applabel__", "typeclasses")
attrs["Meta"] = Meta
attrs["Meta"].proxy = True
new_class = ModelBase.__new__(cls, name, bases, attrs)
# django doesn't support inheriting proxy models so we hack support for
# it here by injecting `proxy_for_model` to the actual dbmodel.
# Unfortunately we cannot also set the correct model_name, because this
# would block multiple-inheritance of typeclasses (Django doesn't allow
# multiple bases of the same model).
if dbmodel:
new_class._meta.proxy_for_model = dbmodel
# Maybe Django will eventually handle this in the future:
# new_class._meta.model_name = dbmodel._meta.model_name
# attach signals
signals.post_save.connect(call_at_first_save, sender=new_class)
signals.pre_delete.connect(remove_attributes_on_delete, sender=new_class)
return new_class
#
# Main TypedObject abstraction
#
class TypedObject(SharedMemoryModel):
"""
Abstract Django model.
This is the basis for a typed object. It also contains all the
mechanics for managing connected attributes.
The TypedObject has the following properties:
- key - main name
- name - alias for key
- typeclass_path - the path to the decorating typeclass
- typeclass - auto-linked typeclass
- date_created - time stamp of object creation
- permissions - perm strings
- dbref - #id of object
- db - persistent attribute storage
- ndb - non-persistent attribute storage
"""
#
# TypedObject Database Model setup
#
#
# These databse fields are all accessed and set using their corresponding
# properties, named same as the field, but without the db_* prefix
# (no separate save() call is needed)
# Main identifier of the object, for searching. Is accessed with self.key
# or self.name
db_key = models.CharField("key", max_length=255, db_index=True)
# This is the python path to the type class this object is tied to. The
# typeclass is what defines what kind of Object this is)
db_typeclass_path = models.CharField(
"typeclass",
max_length=255,
null=True,
help_text=(
"this defines what 'type' of entity this is. This variable holds "
"a Python path to a module with a valid Evennia Typeclass."
),
db_index=True,
)
# Creation date. This is not changed once the object is created.
db_date_created = models.DateTimeField("creation date", editable=False, auto_now_add=True)
# Lock storage
db_lock_storage = models.TextField(
"locks",
blank=True,
help_text=(
"locks limit access to an entity. A lock is defined as a 'lock string' "
"on the form 'type:lockfunctions', defining what functionality is locked and "
"how to determine access. Not defining a lock means no access is granted."
),
)
# many2many relationships
db_attributes = models.ManyToManyField(
Attribute,
help_text=(
"attributes on this object. An attribute can hold any pickle-able "
"python object (see docs for special cases)."
),
)
db_tags = models.ManyToManyField(
Tag,
help_text=(
"tags on this object. Tags are simple string markers to identify, "
"group and alias objects."
),
)
# Database manager
objects = managers.TypedObjectManager()
# quick on-object typeclass cache for speed
_cached_typeclass = None
# typeclass mechanism
def set_class_from_typeclass(self, typeclass_path=None):
if typeclass_path:
try:
self.__class__ = class_from_module(
typeclass_path, defaultpaths=settings.TYPECLASS_PATHS
)
except Exception:
log_trace()
try:
self.__class__ = class_from_module(self.__settingsclasspath__)
except Exception:
log_trace()
try:
self.__class__ = class_from_module(self.__defaultclasspath__)
except Exception:
log_trace()
self.__class__ = self._meta.concrete_model or self.__class__
finally:
self.db_typeclass_path = typeclass_path
elif self.db_typeclass_path:
try:
self.__class__ = class_from_module(self.db_typeclass_path)
except Exception:
log_trace()
try:
self.__class__ = class_from_module(self.__defaultclasspath__)
except Exception:
log_trace()
self.__dbclass__ = self._meta.concrete_model or self.__class__
else:
self.db_typeclass_path = "%s.%s" % (self.__module__, self.__class__.__name__)
# important to put this at the end since _meta is based on the set __class__
try:
self.__dbclass__ = self._meta.concrete_model or self.__class__
except AttributeError:
err_class = repr(self.__class__)
self.__class__ = class_from_module("evennia.objects.objects.DefaultObject")
self.__dbclass__ = class_from_module("evennia.objects.models.ObjectDB")
self.db_typeclass_path = "evennia.objects.objects.DefaultObject"
log_trace(
"Critical: Class %s of %s is not a valid typeclass!\nTemporarily falling back"
" to %s." % (err_class, self, self.__class__)
)
def __init__(self, *args, **kwargs):
"""
The `__init__` method of typeclasses is the core operational
code of the typeclass system, where it dynamically re-applies
a class based on the db_typeclass_path database field rather
than use the one in the model.
Args:
Passed through to parent.
Keyword Args:
Passed through to parent.
Notes:
The loading mechanism will attempt the following steps:
1. Attempt to load typeclass given on command line
2. Attempt to load typeclass stored in db_typeclass_path
3. Attempt to load `__settingsclasspath__`, which is by the
default classes defined to be the respective user-set
base typeclass settings, like `BASE_OBJECT_TYPECLASS`.
4. Attempt to load `__defaultclasspath__`, which is the
base classes in the library, like DefaultObject etc.
5. If everything else fails, use the database model.
Normal operation is to load successfully at either step 1
or 2 depending on how the class was called. Tracebacks
will be logged for every step the loader must take beyond
2.
"""
typeclass_path = kwargs.pop("typeclass", None)
super().__init__(*args, **kwargs)
self.set_class_from_typeclass(typeclass_path=typeclass_path)
def init_evennia_properties(self):
"""
Called by creation methods; makes sure to initialize Attribute/TagProperties
by fetching them once.
"""
for propkey, prop in self.__class__.__dict__.items():
if isinstance(prop, (AttributeProperty, TagProperty, TagCategoryProperty)):
try:
getattr(self, propkey)
except Exception:
log_trace()
# initialize all handlers in a lazy fashion
@lazy_property
def attributes(self):
return AttributeHandler(self, ModelAttributeBackend)
@lazy_property
def locks(self):
return LockHandler(self)
@lazy_property
def tags(self):
return TagHandler(self)
@lazy_property
def aliases(self):
return AliasHandler(self)
@lazy_property
def permissions(self):
return PermissionHandler(self)
@lazy_property
def nattributes(self):
return AttributeHandler(self, InMemoryAttributeBackend)
class Meta:
"""
Django setup info.
"""
abstract = True
verbose_name = "Evennia Database Object"
ordering = ["-db_date_created", "id", "db_typeclass_path", "db_key"]
# wrapper
# Wrapper properties to easily set database fields. These are
# @property decorators that allows to access these fields using
# normal python operations (without having to remember to save()
# etc). So e.g. a property 'attr' has a get/set/del decorator
# defined that allows the user to do self.attr = value,
# value = self.attr and del self.attr respectively (where self
# is the object in question).
# name property (alias to self.key)
def __name_get(self):
return self.key
def __name_set(self, value):
self.key = value
def __name_del(self):
raise Exception("Cannot delete name")
name = property(__name_get, __name_set, __name_del)
# key property (overrides's the idmapper's db_key for the at_rename hook)
@property
def key(self):
return self.db_key
@key.setter
def key(self, value):
oldname = str(self.db_key)
self.db_key = value
self.save(update_fields=["db_key"])
self.at_rename(oldname, value)
SIGNAL_TYPED_OBJECT_POST_RENAME.send(sender=self, old_key=oldname, new_key=value)
#
#
# TypedObject main class methods and properties
#
#
def __eq__(self, other):
try:
return self.__dbclass__ == other.__dbclass__ and self.dbid == other.dbid
except AttributeError:
return False
def __hash__(self):
# this is required to maintain hashing
return super().__hash__()
def __str__(self):
return smart_str("%s" % self.db_key)
def __repr__(self):
return "%s" % self.db_key
# @property
def __dbid_get(self):
"""
Caches and returns the unique id of the object.
Use this instead of self.id, which is not cached.
"""
return self.id
def __dbid_set(self, value):
raise Exception("dbid cannot be set!")
def __dbid_del(self):
raise Exception("dbid cannot be deleted!")
dbid = property(__dbid_get, __dbid_set, __dbid_del)
# @property
def __dbref_get(self):
"""
Returns the object's dbref on the form #NN.
"""
return "#%s" % self.id
def __dbref_set(self):
raise Exception("dbref cannot be set!")
def __dbref_del(self):
raise Exception("dbref cannot be deleted!")
dbref = property(__dbref_get, __dbref_set, __dbref_del)
def at_idmapper_flush(self):
"""
This is called when the idmapper cache is flushed and
allows customized actions when this happens.
Returns:
do_flush (bool): If True, flush this object as normal. If
False, don't flush and expect this object to handle
the flushing on its own.
Notes:
The default implementation relies on being able to clear
Django's Foreignkey cache on objects not affected by the
flush (notably objects with an NAttribute stored). We rely
on this cache being stored on the format "_<fieldname>_cache".
If Django were to change this name internally, we need to
update here (unlikely, but marking just in case).
"""
if self.nattributes.all():
# we can't flush this object if we have non-persistent
# attributes stored - those would get lost! Nevertheless
# we try to flush as many references as we can.
self.attributes.reset_cache()
self.tags.reset_cache()
# flush caches for all related fields
for field in self._meta.fields:
name = "_%s_cache" % field.name
if field.is_relation and name in self.__dict__:
# a foreignkey - remove its cache
del self.__dict__[name]
return False
# a normal flush
return True
#
# Object manipulation methods
#
def at_init(self):
"""
Called when this object is loaded into cache. This is more reliable
than to override `__init__`.
"""
pass
@classmethod
def search(cls, query, **kwargs):
"""
Overridden by class children. This implements a common API.
Args:
query (str): A search query.
**kwargs: Other search parameters.
Returns:
list: A list of 0, 1 or more matches, only of this typeclass.
"""
if cls.objects.dbref(query):
return [cls.objects.get_id(query)]
return list(cls.objects.filter(db_key__lower=query))
def is_typeclass(self, typeclass, exact=False):
"""
Returns true if this object has this type OR has a typeclass
which is an subclass of the given typeclass. This operates on
the actually loaded typeclass (this is important since a
failing typeclass may instead have its default currently
loaded) typeclass - can be a class object or the python path
to such an object to match against.
Args:
typeclass (str or class): A class or the full python path
to the class to check.
exact (bool, optional): Returns true only if the object's
type is exactly this typeclass, ignoring parents.
Returns:
is_typeclass (bool): If this typeclass matches the given
typeclass.
"""
if isinstance(typeclass, str):
typeclass = [typeclass] + [
"%s.%s" % (prefix, typeclass) for prefix in settings.TYPECLASS_PATHS
]
else:
typeclass = [typeclass.path]
selfpath = self.path
if exact:
# check only exact match
return selfpath in typeclass
else:
# check parent chain
return any(
hasattr(cls, "path") and cls.path in typeclass for cls in self.__class__.mro()
)
def swap_typeclass(
self,
new_typeclass,
clean_attributes=False,
run_start_hooks="all",
no_default=True,
clean_cmdsets=False,
):
"""
This performs an in-situ swap of the typeclass. This means
that in-game, this object will suddenly be something else.
Account will not be affected. To 'move' an account to a different
object entirely (while retaining this object's type), use
self.account.swap_object().
Note that this might be an error prone operation if the
old/new typeclass was heavily customized - your code
might expect one and not the other, so be careful to
bug test your code if using this feature! Often its easiest
to create a new object and just swap the account over to
that one instead.
Args:
new_typeclass (str or classobj): Type to switch to.
clean_attributes (bool or list, optional): Will delete all
attributes stored on this object (but not any of the
database fields such as name or location). You can't get
attributes back, but this is often the safest bet to make
sure nothing in the new typeclass clashes with the old
one. If you supply a list, only those named attributes
will be cleared.
run_start_hooks (str or None, optional): This is either None,
to not run any hooks, "all" to run all hooks defined by
at_first_start, or a string with space-separated hook-names to run
(for example 'at_object_creation'). This will
always be called without arguments.
no_default (bool, optiona): If set, the swapper will not
allow for swapping to a default typeclass in case the
given one fails for some reason. Instead the old one will
be preserved.
clean_cmdsets (bool, optional): Delete all cmdsets on the object.
"""
if not callable(new_typeclass):
# this is an actual class object - build the path
new_typeclass = class_from_module(new_typeclass, defaultpaths=settings.TYPECLASS_PATHS)
# if we get to this point, the class is ok.
if inherits_from(self, "evennia.scripts.models.ScriptDB"):
if self.interval > 0:
raise RuntimeError(
"Cannot use swap_typeclass on time-dependent "
"Script '%s'.\nStop and start a new Script of the "
"right type instead."
% self.key
)
self.typeclass_path = new_typeclass.path
self.__class__ = new_typeclass
if clean_attributes:
# Clean out old attributes
if is_iter(clean_attributes):
for attr in clean_attributes:
self.attributes.remove(attr)
for nattr in clean_attributes:
if hasattr(self.ndb, nattr):
self.nattributes.remove(nattr)
else:
self.attributes.clear()
self.nattributes.clear()
if clean_cmdsets:
# purge all cmdsets
self.cmdset.clear()
self.cmdset.remove_default()
if run_start_hooks == "all":
# fake this call to mimic the first save
self.at_first_save()
elif run_start_hooks:
# a custom hook-name to call.
for start_hook in str(run_start_hooks).split():
getattr(self, run_start_hooks)()
#
# Lock / permission methods
#
def access(
self, accessing_obj, access_type="read", default=False, no_superuser_bypass=False, **kwargs
):
"""
Determines if another object has permission to access this one.
Args:
accessing_obj (str): Object trying to access this one.
access_type (str, optional): Type of access sought.
default (bool, optional): What to return if no lock of
access_type was found
no_superuser_bypass (bool, optional): Turn off the
superuser lock bypass (be careful with this one).
Keyword Args:
kwar (any): Ignored, but is there to make the api
consistent with the object-typeclass method access, which
use it to feed to its hook methods.
"""
return self.locks.check(
accessing_obj,
access_type=access_type,
default=default,
no_superuser_bypass=no_superuser_bypass,
)
def check_permstring(self, permstring):
"""
This explicitly checks if we hold particular permission
without involving any locks.
Args:
permstring (str): The permission string to check against.
Returns:
result (bool): If the permstring is passed or not.
"""
if hasattr(self, "account"):
if (
self.account
and self.account.is_superuser
and not self.account.attributes.get("_quell")
):
return True
else:
if self.is_superuser and not self.attributes.get("_quell"):
return True
if not permstring:
return False
perm = permstring.lower()
perms = [p.lower() for p in self.permissions.all()]
if perm in perms:
# simplest case - we have a direct match
return True
if perm in _PERMISSION_HIERARCHY:
# check if we have a higher hierarchy position
ppos = _PERMISSION_HIERARCHY.index(perm)
return any(
True
for hpos, hperm in enumerate(_PERMISSION_HIERARCHY)
if hperm in perms and hpos > ppos
)
# we ignore pluralization (english only)
if perm.endswith("s"):
return self.check_permstring(perm[:-1])
return False
#
# Deletion methods
#
def _deleted(self, *args, **kwargs):
"""
Scrambling method for already deleted objects
"""
raise ObjectDoesNotExist("This object was already deleted!")
def delete(self):
"""
Cleaning up handlers on the typeclass level
"""
global TICKER_HANDLER
self.permissions.clear()
self.attributes.clear()
self.aliases.clear()
if hasattr(self, "nicks"):
self.nicks.clear()
# scrambling properties
self.delete = self._deleted
super().delete()
#
# Attribute storage
#
@property
def db(self):
"""
Attribute handler wrapper. Allows for the syntax
```python
obj.db.attrname = value
# and
value = obj.db.attrname
# and
del obj.db.attrname
# and
all_attr = obj.db.all()
# (unless there is an attribute
# named 'all', in which case that will be returned instead).
```
"""
try:
return self._db_holder
except AttributeError:
self._db_holder = DbHolder(self, "attributes")
return self._db_holder
@db.setter
def db(self, value):
"Stop accidentally replacing the db object"
string = "Cannot assign directly to db object! "
string += "Use db.attr=value instead."
raise Exception(string)
@db.deleter
def db(self):
"Stop accidental deletion."
raise Exception("Cannot delete the db object!")
#
# Non-persistent (ndb) storage
#
@property
def ndb(self):
"""
A non-attr_obj store (ndb: NonDataBase). Everything stored
to this is guaranteed to be cleared when a server is shutdown.
Syntax is same as for the _get_db_holder() method and
property, e.g. obj.ndb.attr = value etc.
"""
try:
return self._ndb_holder
except AttributeError:
self._ndb_holder = DbHolder(self, "nattrhandler", manager_name="nattributes")
return self._ndb_holder
@ndb.setter
def ndb(self, value):
"Stop accidentally replacing the ndb object"
string = "Cannot assign directly to ndb object! "
string += "Use ndb.attr=value instead."
raise Exception(string)
@ndb.deleter
def ndb(self):
"Stop accidental deletion."
raise Exception("Cannot delete the ndb object!")
def get_display_name(self, looker, **kwargs):
"""
Displays the name of the object in a viewer-aware manner.
Args:
looker (TypedObject, optional): The object or account that is looking
at/getting inforamtion for this object. If not given, some
'safe' minimum level should be returned.
Returns:
name (str): A string containing the name of the object,
including the DBREF if this user is privileged to control
said object.
Notes:
This function could be extended to change how object names
appear to users in character, but be wary. This function
does not change an object's keys or aliases when
searching, and is expected to produce something useful for
builders.
"""
if self.access(looker, access_type="controls"):
return "{}(#{})".format(self.name, self.id)
return self.name
def get_extra_info(self, looker, **kwargs):
"""
Used when an object is in a list of ambiguous objects as an
additional information tag.
For instance, if you had potions which could have varying
levels of liquid left in them, you might want to display how
many drinks are left in each when selecting which to drop, but
not in your normal inventory listing.
Args:
looker (TypedObject): The object or account that is looking
at/getting information for this object.
Returns:
info (str): A string with disambiguating information,
conventionally with a leading space.
"""
if self.location == looker:
return " (carried)"
return ""
def at_rename(self, oldname, newname):
"""
This Hook is called by @name on a successful rename.
Args:
oldname (str): The instance's original name.
newname (str): The new name for the instance.
"""
pass
#
# Web/Django methods
#
def web_get_admin_url(self):
"""
Returns the URI path for the Django Admin page for this object.
ex. Account#1 = '/admin/accounts/accountdb/1/change/'
Returns:
path (str): URI path to Django Admin page for object.
"""
content_type = ContentType.objects.get_for_model(self.__class__)
return reverse(
"admin:%s_%s_change" % (content_type.app_label, content_type.model), args=(self.id,)
)
@classmethod
def web_get_create_url(cls):
"""
Returns the URI path for a View that allows users to create new
instances of this object.
ex. Chargen = '/characters/create/'
For this to work, the developer must have defined a named view somewhere
in urls.py that follows the format 'modelname-action', so in this case
a named view of 'character-create' would be referenced by this method.
ex.
url(r'characters/create/', ChargenView.as_view(), name='character-create')
If no View has been created and defined in urls.py, returns an
HTML anchor.
This method is naive and simply returns a path. Securing access to
the actual view and limiting who can create new objects is the
developer's responsibility.
Returns:
path (str): URI path to object creation page, if defined.
"""
try:
return reverse("%s-create" % slugify(cls._meta.verbose_name))
except Exception:
return "#"
def web_get_detail_url(self):
"""
Returns the URI path for a View that allows users to view details for
this object.
Returns:
path (str): URI path to object detail page, if defined.
Examples:
```python
Oscar (Character) = '/characters/oscar/1/'
```
For this to work, the developer must have defined a named view somewhere
in urls.py that follows the format 'modelname-action', so in this case
a named view of 'character-detail' would be referenced by this method.
```python
url(r'characters/(?P<slug>[\w\d\-]+)/(?P<pk>[0-9]+)/$',
CharDetailView.as_view(), name='character-detail')
```
If no View has been created and defined in urls.py, returns an
HTML anchor.
This method is naive and simply returns a path. Securing access to
the actual view and limiting who can view this object is the
developer's responsibility.
"""
try:
return reverse(
"%s-detail" % slugify(self._meta.verbose_name),
kwargs={"pk": self.pk, "slug": slugify(self.name)},
)
except Exception:
return "#"
def web_get_puppet_url(self):
"""
Returns the URI path for a View that allows users to puppet a specific
object.
Returns:
str: URI path to object puppet page, if defined.
Examples:
::
Oscar (Character) = '/characters/oscar/1/puppet/'
For this to work, the developer must have defined a named view somewhere
in urls.py that follows the format 'modelname-action', so in this case
a named view of 'character-puppet' would be referenced by this method.
::
url(r'characters/(?P<slug>[\w\d\-]+)/(?P<pk>[0-9]+)/puppet/$',
CharPuppetView.as_view(), name='character-puppet')
If no View has been created and defined in urls.py, returns an
HTML anchor.
This method is naive and simply returns a path. Securing access to
the actual view and limiting who can view this object is the developer's
responsibility.
"""
try:
return reverse(
"%s-puppet" % slugify(self._meta.verbose_name),
kwargs={"pk": self.pk, "slug": slugify(self.name)},
)
except Exception:
return "#"
def web_get_update_url(self):
"""
Returns the URI path for a View that allows users to update this
object.
Returns:
str: URI path to object update page, if defined.
Examples:
```python
Oscar (Character) = '/characters/oscar/1/change/'
```
For this to work, the developer must have defined a named view somewhere
in urls.py that follows the format 'modelname-action', so in this case
a named view of 'character-update' would be referenced by this method.
::
url(r'characters/(?P<slug>[\w\d\-]+)/(?P<pk>[0-9]+)/change/$',
CharUpdateView.as_view(), name='character-update')
If no View has been created and defined in urls.py, returns an
HTML anchor.
This method is naive and simply returns a path. Securing access to
the actual view and limiting who can modify objects is the developer's
responsibility.
"""
try:
return reverse(
"%s-update" % slugify(self._meta.verbose_name),
kwargs={"pk": self.pk, "slug": slugify(self.name)},
)
except Exception:
return "#"
def web_get_delete_url(self):
"""
Returns the URI path for a View that allows users to delete this object.
Returns:
path (str): URI path to object deletion page, if defined.
Examples:
```python
Oscar (Character) = '/characters/oscar/1/delete/'
```
For this to work, the developer must have defined a named view
somewhere in urls.py that follows the format 'modelname-action', so
in this case a named view of 'character-detail' would be referenced
by this method.
::
url(r'characters/(?P<slug>[\w\d\-]+)/(?P<pk>[0-9]+)/delete/$',
CharDeleteView.as_view(), name='character-delete')
If no View has been created and defined in urls.py, returns an HTML
anchor.
This method is naive and simply returns a path. Securing access to
the actual view and limiting who can delete this object is the
developer's responsibility.
"""
try:
return reverse(
"%s-delete" % slugify(self._meta.verbose_name),
kwargs={"pk": self.pk, "slug": slugify(self.name)},
)
except Exception:
return "#"
# Used by Django Sites/Admin
get_absolute_url = web_get_detail_url
|
a7a9c01e7242cc5726588c87ffb4d2b9f977ea97
|
d7fd46dfd8aab520c4958fa065367e168b6bfee7
|
/compiler_gym/envs/llvm/datasets/jotaibench.py
|
1fed72e8603b5f9d7fe81325dfec9470c41576b7
|
[
"MIT"
] |
permissive
|
facebookresearch/CompilerGym
|
f04a79fbfdbaf8afd6920ec205db6f1b6003d073
|
9e0c0beb12da1e1ea82ae6ce920713ee28dda4c9
|
refs/heads/development
| 2023-08-31T09:17:48.967970
| 2023-03-10T19:29:56
| 2023-03-10T19:29:56
| 312,059,069
| 787
| 126
|
MIT
| 2023-03-10T19:29:58
| 2020-11-11T18:44:35
|
Python
|
UTF-8
|
Python
| false
| false
| 8,225
|
py
|
jotaibench.py
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import subprocess
from concurrent.futures import as_completed
from pathlib import Path
from compiler_gym.datasets import Benchmark, TarDataset, TarDatasetWithManifest
from compiler_gym.datasets.benchmark import BenchmarkWithSource
from compiler_gym.datasets.uri import BenchmarkUri
from compiler_gym.envs.llvm.llvm_benchmark import (
ClangInvocation,
get_system_library_flags,
)
from compiler_gym.service.proto import BenchmarkDynamicConfig, Command
from compiler_gym.util import thread_pool
from compiler_gym.util.filesystem import atomic_file_write
class JotaiBenchDataset(TarDatasetWithManifest):
"""A dataset of C programs curated from GitHub source code.
The dataset is from:
da Silva, Anderson Faustino, Bruno Conde Kind, José Wesley de Souza
Magalhaes, Jerônimo Nunes Rocha, Breno Campos Ferreira Guimaraes, and
Fernando Magno Quinão Pereira. "ANGHABENCH: A Suite with One Million
Compilable C Benchmarks for Code-Size Reduction." In 2021 IEEE/ACM
International Symposium on Code Generation and Optimization (CGO),
pp. 378-390. IEEE, 2021.
And is available at:
http://cuda.dcc.ufmg.br/Jotai/src/
Installation
------------
The JotaiBench dataset consists of C functions that are compiled to LLVM-IR
on-demand and cached. The first time each benchmark is used there is an
overhead of compiling it from C to bitcode. This is a one-off cost.
"""
def __init__(
self,
site_data_base: Path,
):
super().__init__(
name="benchmark://jotaibench-v0",
description="Compile-only C/C++ functions extracted from GitHub",
references={
"Paper": "https://homepages.dcc.ufmg.br/~fernando/publications/papers/FaustinoCGO21.pdf",
"Homepage": "http://cuda.dcc.ufmg.br/angha/",
},
license="GNU General Public License v3.0 (GPLv3)",
site_data_base=site_data_base,
manifest_urls=[
"https://dl.fbaipublicfiles.com/compiler_gym/llvm_bitcodes-10.0.0-jotaibench-v0.bz2"
],
manifest_sha256="ac4ee456e52073964d472d3e3969058b2f3052f8a4b402719013a3c603eb4b62",
tar_urls=[
"https://github.com/ChrisCummins/jotai-benchmarks/raw/ca26ccd27afecf38919c1e101c64e3cc17e39631/benchmarks/jotaibench.bz2"
],
tar_sha256="b5a51af3d4e2f77a66001635ec64ed321e0ece19873c4a888040859af7556401",
strip_prefix="jotaibench/jotaibench-v0",
tar_compression="bz2",
benchmark_file_suffix=".c",
sort_order=0,
)
def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark:
self.install()
benchmark_name = uri.path[1:]
if not benchmark_name:
raise LookupError(f"No benchmark specified: {uri}")
# The absolute path of the file, without an extension.
path_stem = self.dataset_root / benchmark_name
bitcode_abspath = Path(f"{path_stem}.bc")
c_file_abspath = Path(f"{path_stem}.c")
# If the file does not exist, compile it on-demand.
if not bitcode_abspath.is_file():
if not c_file_abspath.is_file():
raise LookupError(
f"Benchmark not found: {uri} (file not found: {c_file_abspath})"
)
with atomic_file_write(bitcode_abspath) as tmp_path:
compile_cmd = ClangInvocation.from_c_file(
c_file_abspath,
copt=[
"-ferror-limit=1", # Stop on first error.
"-w", # No warnings.
],
).command(outpath=tmp_path)
subprocess.check_call(compile_cmd, timeout=300)
return BenchmarkWithSource.create(
uri, bitcode_abspath, "function.c", c_file_abspath
)
def compile_all(self):
n = self.size
executor = thread_pool.get_thread_pool_executor()
# Since the dataset is lazily compiled, simply iterating over the full
# set of URIs will compile everything. Do this in parallel.
futures = (
executor.submit(self.benchmark, uri) for uri in self.benchmark_uris()
)
for i, future in enumerate(as_completed(futures), start=1):
future.result()
print(
f"\r\033[KCompiled {i} of {n} programs ({i/n:.1%} complete)",
flush=True,
end="",
)
class JotaiBenchRunnableDataset(TarDataset):
def __init__(
self,
site_data_base: Path,
):
super().__init__(
name="benchmark://jotai-runnable-v0",
description="Runnable C/C++ functions extracted from GitHub",
references={
"Paper": "https://homepages.dcc.ufmg.br/~fernando/publications/papers/FaustinoCGO21.pdf",
"Homepage": "http://cuda.dcc.ufmg.br/angha/",
},
license="GNU General Public License v3.0 (GPLv3)",
site_data_base=site_data_base,
tar_urls=[
"https://github.com/lac-dcc/jotai-benchmarks/blob/main/benchmarks/jotaibench.bz2?raw=true"
],
tar_sha256="b5a51af3d4e2f77a66001635ec64ed321e0ece19873c4a888040859af7556401",
strip_prefix="jotaibench-v0",
tar_compression="bz2",
benchmark_file_suffix=".c",
)
def benchmark_from_parsed_uri(self, uri: BenchmarkUri) -> Benchmark:
self.install()
benchmark_name = uri.path[1:]
if not benchmark_name:
raise LookupError(f"No benchmark specified: {uri}")
# The absolute path of the file, without an extension.
path_stem = self.dataset_root / benchmark_name
bitcode_abspath = Path(f"{path_stem}.bc")
c_file_abspath = Path(f"{path_stem}.c")
# If the file does not exist, compile it to a bitcode file on-demand.
if not bitcode_abspath.is_file():
if not c_file_abspath.is_file():
raise LookupError(
f"Benchmark not found: {uri} (file not found: {c_file_abspath})"
)
with atomic_file_write(bitcode_abspath) as tmp_path:
compile_cmd = ClangInvocation.from_c_file(
c_file_abspath,
copt=[
"-ferror-limit=1", # Stop on first error.
"-w", # No warnings.
],
).command(outpath=tmp_path)
subprocess.check_call(compile_cmd, timeout=300)
benchmark = BenchmarkWithSource.create(
uri, bitcode_abspath, "function.c", c_file_abspath
)
# This is what makes a benchmark "runnable".
benchmark.proto.dynamic_config.MergeFrom(
BenchmarkDynamicConfig(
build_cmd=Command(
argument=["$CC", "$IN"] + get_system_library_flags(),
timeout_seconds=30,
outfile=["a.out"],
),
run_cmd=Command(
argument=["./a.out 0"],
timeout_seconds=30,
infile=[],
outfile=[],
),
)
)
return benchmark
def compile_all(self):
n = self.size
executor = thread_pool.get_thread_pool_executor()
# Since the dataset is lazily compiled, simply iterating over the full
# set of URIs will compile everything. Do this in parallel.
futures = (
executor.submit(self.benchmark, uri) for uri in self.benchmark_uris()
)
for i, future in enumerate(as_completed(futures), start=1):
future.result()
print(
f"\r\033[KCompiled {i} of {n} programs ({i/n:.1%} complete)",
flush=True,
end="",
)
|
5c8aaff98dce24c0e709734f62843a980c971fdc
|
e3bb1df7fa4c51900dec7e9ddf5295e1a80938bd
|
/hummingbot/connector/exchange/vertex/vertex_exchange.py
|
4250eab3c5965ae7c025b5ef3f78f0697d6d9728
|
[
"Apache-2.0"
] |
permissive
|
CoinAlpha/hummingbot
|
0d1e2bd94de1280748647108c7d7800a09546eb8
|
c3f101759ab7e7a2165cd23a3a3e94c90c642a9b
|
refs/heads/development
| 2023-09-01T11:24:43.322137
| 2023-08-31T03:08:06
| 2023-08-31T03:08:06
| 439,330,952
| 135
| 98
|
Apache-2.0
| 2023-08-30T13:55:08
| 2021-12-17T12:50:42
|
Python
|
UTF-8
|
Python
| false
| false
| 40,073
|
py
|
vertex_exchange.py
|
import asyncio
import time
from decimal import Decimal
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple
from bidict import bidict
from hummingbot.connector.constants import s_decimal_0, s_decimal_NaN
from hummingbot.connector.exchange.vertex import (
vertex_constants as CONSTANTS,
vertex_eip712_structs as vertex_eip712_structs,
vertex_utils as utils,
vertex_web_utils as web_utils,
)
from hummingbot.connector.exchange.vertex.vertex_api_order_book_data_source import VertexAPIOrderBookDataSource
from hummingbot.connector.exchange.vertex.vertex_api_user_stream_data_source import VertexAPIUserStreamDataSource
from hummingbot.connector.exchange.vertex.vertex_auth import VertexAuth
from hummingbot.connector.exchange_py_base import ExchangePyBase
from hummingbot.connector.trading_rule import TradingRule
from hummingbot.connector.utils import combine_to_hb_trading_pair
from hummingbot.core.data_type.common import OrderType, TradeType
from hummingbot.core.data_type.in_flight_order import InFlightOrder, OrderState, OrderUpdate, TradeUpdate
from hummingbot.core.data_type.order_book_tracker_data_source import OrderBookTrackerDataSource
from hummingbot.core.data_type.trade_fee import AddedToCostTradeFee, TokenAmount, TradeFeeBase
from hummingbot.core.data_type.user_stream_tracker_data_source import UserStreamTrackerDataSource
from hummingbot.core.utils.estimate_fee import build_trade_fee
from hummingbot.core.web_assistant.connections.data_types import RESTMethod
from hummingbot.core.web_assistant.web_assistants_factory import WebAssistantsFactory
if TYPE_CHECKING:
from hummingbot.client.config.config_helpers import ClientConfigAdapter
class VertexExchange(ExchangePyBase):
web_utils = web_utils
def __init__(
self,
client_config_map: "ClientConfigAdapter",
vertex_arbitrum_address: str,
vertex_arbitrum_private_key: str,
trading_pairs: Optional[List[str]] = None,
trading_required: bool = True,
domain: str = CONSTANTS.DEFAULT_DOMAIN,
):
self.sender_address = utils.convert_address_to_sender(vertex_arbitrum_address)
self.private_key = vertex_arbitrum_private_key
self._use_spot_leverage = False
# NOTE: Vertex doesn't submit all balance updates, instead it only updates the product on position change (not cancel)
self.real_time_balance_update = False
self._domain = domain
self._trading_required = trading_required
self._trading_pairs = trading_pairs
self._exchange_market_info = {self._domain: {}}
self._symbols = {}
self._contracts = {}
self._chain_id = CONSTANTS.CHAIN_IDS[self.domain]
super().__init__(client_config_map)
@staticmethod
def vertex_order_type(order_type: OrderType) -> str:
return order_type.name.upper()
@staticmethod
def to_hb_order_type(vertex_type: str) -> OrderType:
return OrderType[vertex_type]
@property
def authenticator(self):
return VertexAuth(vertex_arbitrum_address=self.sender_address, vertex_arbitrum_private_key=self.private_key)
@property
def name(self) -> str:
return self._domain
@property
def rate_limits_rules(self):
return CONSTANTS.RATE_LIMITS
@property
def domain(self):
return self._domain
@property
def client_order_id_max_length(self):
return CONSTANTS.MAX_ORDER_ID_LEN
@property
def client_order_id_prefix(self):
return CONSTANTS.HBOT_BROKER_ID
@property
def trading_rules_request_path(self):
return CONSTANTS.QUERY_PATH_URL + "?type=" + CONSTANTS.ALL_PRODUCTS_REQUEST_TYPE
@property
def trading_pairs_request_path(self):
return CONSTANTS.QUERY_PATH_URL + "?type=" + CONSTANTS.ALL_PRODUCTS_REQUEST_TYPE
@property
def check_network_request_path(self):
return CONSTANTS.QUERY_PATH_URL + "?type=" + CONSTANTS.STATUS_REQUEST_TYPE
@property
def trading_pairs(self):
return self._trading_pairs
@property
def is_cancel_request_in_exchange_synchronous(self) -> bool:
return True
@property
def is_trading_required(self) -> bool:
return self._trading_required
async def start_network(self):
await self.build_exchange_market_info()
await super().start_network()
def supported_order_types(self):
return [OrderType.MARKET, OrderType.LIMIT, OrderType.LIMIT_MAKER]
def _is_request_exception_related_to_time_synchronizer(self, request_exception: Exception) -> bool:
# TODO: implement this method correctly for the connector
# The default implementation was added when the functionality to detect not found orders was introduced in the
# ExchangePyBase class. Also fix the unit test test_lost_order_removed_if_not_found_during_order_status_update
# when replacing the dummy implementation
return False
def _is_order_not_found_during_status_update_error(self, status_update_exception: Exception) -> bool:
# TODO: implement this method correctly for the connector
# The default implementation was added when the functionality to detect not found orders was introduced in the
# ExchangePyBase class. Also fix the unit test test_lost_order_removed_if_not_found_during_order_status_update
# when replacing the dummy implementation
return False
def _is_order_not_found_during_cancelation_error(self, cancelation_exception: Exception) -> bool:
# TODO: implement this method correctly for the connector
# The default implementation was added when the functionality to detect not found orders was introduced in the
# ExchangePyBase class. Also fix the unit test test_lost_order_removed_if_not_found_during_order_status_update
# when replacing the dummy implementation
return False
def _create_web_assistants_factory(self) -> WebAssistantsFactory:
return web_utils.build_api_factory(throttler=self._throttler, auth=self._auth)
def _create_order_book_data_source(self) -> OrderBookTrackerDataSource:
return VertexAPIOrderBookDataSource(
trading_pairs=self._trading_pairs,
connector=self,
domain=self.domain,
api_factory=self._web_assistants_factory,
)
def _create_user_stream_data_source(self) -> UserStreamTrackerDataSource:
return VertexAPIUserStreamDataSource(
auth=self._auth,
trading_pairs=self._trading_pairs,
api_factory=self._web_assistants_factory,
connector=self,
domain=self.domain,
)
def _get_fee(
self,
base_currency: str,
quote_currency: str,
order_type: OrderType,
order_side: TradeType,
amount: Decimal,
price: Decimal = s_decimal_NaN,
is_maker: Optional[bool] = None,
) -> TradeFeeBase:
trading_pair = f"{base_currency}-{quote_currency}"
is_maker = is_maker or False
if trading_pair not in self._trading_fees:
fee = build_trade_fee(
exchange=self.name,
is_maker=is_maker,
order_side=order_side,
order_type=order_type,
amount=amount,
price=price,
base_currency=base_currency,
quote_currency=quote_currency,
)
else:
fee_data = self._trading_fees[trading_pair]
if is_maker:
fee_value = fee_data["maker"]
else:
fee_value = fee_data["taker"]
fee = AddedToCostTradeFee(percent=fee_value)
return fee
async def _place_order(
self,
order_id: str,
trading_pair: str,
amount: Decimal,
trade_type: TradeType,
order_type: OrderType,
price: Decimal,
**kwargs,
) -> Tuple[str, float]:
# NOTE: A positive amount indicates a buy, and a negative amount indicates a sell.
if trade_type == TradeType.SELL:
amount = -amount
trading_rules = self.trading_rules[trading_pair]
amount_str = utils.convert_to_x18(amount, trading_rules.min_base_amount_increment)
price_str = utils.convert_to_x18(price, trading_rules.min_price_increment)
if order_type and order_type == OrderType.LIMIT_MAKER:
_order_type = CONSTANTS.TIME_IN_FORCE_POSTONLY
else:
_order_type = CONSTANTS.TIME_IN_FORCE_GTC
expiration = utils.generate_expiration(time.time(), order_type=_order_type)
product_id = utils.trading_pair_to_product_id(trading_pair, self._exchange_market_info[self._domain])
nonce = utils.generate_nonce(time.time())
contract = self._exchange_market_info[self._domain][product_id]["contract"]
sender = utils.hex_to_bytes32(self.sender_address)
order = vertex_eip712_structs.Order(
sender=sender, priceX18=int(price_str), amount=int(amount_str), expiration=int(expiration), nonce=nonce
)
signature, digest = self.authenticator.sign_payload(order, contract, self._chain_id)
place_order = {
"place_order": {
"product_id": product_id,
"order": {
"sender": self.sender_address,
"priceX18": price_str,
"amount": amount_str,
"expiration": expiration,
"nonce": str(nonce),
},
"signature": signature,
"spot_leverage": self._use_spot_leverage,
}
}
try:
# NOTE: There are two differen't limits depending on the use of leverage
limit_id = CONSTANTS.PLACE_ORDER_METHOD_NO_LEVERAGE
if self._use_spot_leverage:
limit_id = CONSTANTS.PLACE_ORDER_METHOD
order_result = await self._api_post(path_url=CONSTANTS.POST_PATH_URL, data=place_order, limit_id=limit_id)
if order_result.get("status") == "failure":
raise Exception(f"Failed to create order {order_result}")
except IOError:
raise
o_id = digest
transact_time = int(time.time())
await self._update_balances()
return o_id, transact_time
async def _place_cancel(self, order_id: str, tracked_order: InFlightOrder):
sender = utils.hex_to_bytes32(self.sender_address)
product_id = utils.trading_pair_to_product_id(
tracked_order.trading_pair, self._exchange_market_info[self._domain]
)
nonce = utils.generate_nonce(time.time())
# NOTE: Dynamically adjust this
endpoint_contract = CONSTANTS.CONTRACTS[self.domain]
if tracked_order.exchange_order_id:
order_id = tracked_order.exchange_order_id
else:
order_id = tracked_order.client_order_id
order_id_bytes = utils.hex_to_bytes32(order_id)
cancel = vertex_eip712_structs.Cancellation(
sender=sender, productIds=[int(product_id)], digests=[order_id_bytes], nonce=nonce
)
signature, digest = self.authenticator.sign_payload(cancel, endpoint_contract, self._chain_id)
cancel_orders = {
"cancel_orders": {
"tx": {
"sender": self.sender_address,
"productIds": [product_id],
"digests": [order_id],
"nonce": str(nonce),
},
"signature": signature,
}
}
cancel_result = await self._api_post(
path_url=CONSTANTS.POST_PATH_URL, data=cancel_orders, limit_id=CONSTANTS.CANCEL_ORDERS_METHOD
)
await self._update_balances()
if cancel_result.get("status") == "failure":
if cancel_result.get("error_code") and cancel_result["error_code"] == 2020:
# NOTE: This is the most elegant handling outside of passing through restrictive lost order limit to 0
self._order_tracker._trigger_cancelled_event(tracked_order)
self._order_tracker._trigger_order_completion(tracked_order)
self.logger().warning(f"Marked order canceled as the exchange holds no record: {order_id}")
return True
if isinstance(cancel_result, dict) and cancel_result["status"] == "success":
return True
return False
async def _format_trading_rules(self, exchange_info_dict: Dict[int, Any]) -> List[TradingRule]:
"""
Example:
"spot_products": [
{
"product_id": 1,
"oracle_price_x18": "25741837349502615455138",
"risk": {
"long_weight_initial_x18": "900000000000000000",
"short_weight_initial_x18": "1100000000000000000",
"long_weight_maintenance_x18": "950000000000000000",
"short_weight_maintenance_x18": "1050000000000000000",
"large_position_penalty_x18": "0"
},
"config": {
"token": "0x5cc7c91690b2cbaee19a513473d73403e13fb431",
"interest_inflection_util_x18": "800000000000000000",
"interest_floor_x18": "10000000000000000",
"interest_small_cap_x18": "40000000000000000",
"interest_large_cap_x18": "1000000000000000000"
},
"state": {
"cumulative_deposits_multiplier_x18": "1001477610660740732",
"cumulative_borrows_multiplier_x18": "1005360996332066877",
"total_deposits_normalized": "336131479261252096179100",
"total_borrows_normalized": "106663044719707335242158"
},
"lp_state": {
"supply": "62623749006749305149587800",
"quote": {
"amount": "90948379767723832838627925",
"last_cumulative_multiplier_x18": "1000000008171891309"
},
"base": {
"amount": "3549779755052134826620",
"last_cumulative_multiplier_x18": "1001477610660740732"
}
},
"book_info": {
"size_increment": "1000000000000000",
"price_increment_x18": "1000000000000000000",
"min_size": "10000000000000000",
"collected_fees": "41050488980466524595135",
"lp_spread_x18": "3000000000000000"
}
},
]
"""
retval = []
for rule in exchange_info_dict:
try:
if rule == 0:
# NOTE: USDC product doesn't have a market
continue
trading_pair = utils.market_to_trading_pair(self._exchange_market_info[self._domain][rule]["market"])
rule_set: Dict[str, Any] = exchange_info_dict[rule]["book_info"]
min_order_size = utils.convert_from_x18(rule_set.get("min_size"))
min_price_increment = utils.convert_from_x18(rule_set.get("price_increment_x18"))
min_base_amount_increment = utils.convert_from_x18(rule_set.get("size_increment"))
retval.append(
TradingRule(
trading_pair,
min_order_size=Decimal(min_order_size),
min_price_increment=Decimal(min_price_increment),
min_base_amount_increment=Decimal(min_base_amount_increment),
min_notional_size=Decimal("0.01"), # NOTE: added to ensure proper functioning with strategies.
)
)
except Exception:
self.logger().exception(f"Error parsing the trading pair rule {rule.get('name')}. Skipping.")
return retval
async def _update_trading_fees(self):
"""
Update fees information from the exchange
"""
"""
{
"status": "success",
"data": {
"taker_fee_rates_x18": [
"0",
"300000000000000",
"200000000000000",
"300000000000000",
"200000000000000"
],
"maker_fee_rates_x18": [
"0",
"0",
"0",
"0",
"0"
],
"liquidation_sequencer_fee": "250000000000000000",
"health_check_sequencer_fee": "100000000000000000",
"taker_sequencer_fee": "25000000000000000",
"withdraw_sequencer_fees": [
"10000000000000000",
"40000000000000",
"0",
"600000000000000",
"0"
]
}
}
"""
try:
fee_rates = await self._get_fee_rates()
taker_fees = {idx: fee_rate for idx, fee_rate in enumerate(fee_rates["taker_fee_rates_x18"])}
maker_fees = {idx: fee_rate for idx, fee_rate in enumerate(fee_rates["maker_fee_rates_x18"])}
# NOTE: This builds our fee rates based on indexed product_id
for trading_pair in self._trading_pairs:
product_id = utils.trading_pair_to_product_id(
trading_pair=trading_pair, exchange_market_info=self._exchange_market_info[self._domain]
)
self._trading_fees[trading_pair] = {
"maker": Decimal(utils.convert_from_x18(maker_fees[product_id])),
"taker": Decimal(utils.convert_from_x18(taker_fees[product_id])),
}
except Exception:
# NOTE: If failure to fetch, build default fees
for trading_pair in self._trading_pairs:
self._trading_fees[trading_pair] = {
"maker": utils.DEFAULT_FEES.maker_percent_fee_decimal,
"taker": utils.DEFAULT_FEES.taker_percent_fee_decimal,
}
async def _user_stream_event_listener(self):
"""
This functions runs in background continuously processing the events received from the exchange by the user
stream data source. It keeps reading events from the queue until the task is interrupted.
The events received are fill and position change events.
"""
async for event_message in self._iter_user_event_queue():
try:
event_type = event_message.get("type")
if event_type == CONSTANTS.FILL_EVENT_TYPE:
exchange_order_id = event_message.get("order_digest")
execution_type = (
OrderState.PARTIALLY_FILLED
if Decimal(utils.convert_from_x18(event_message["remaining_qty"])) > Decimal("0.0")
else OrderState.FILLED
)
tracked_order = self._order_tracker.fetch_order(exchange_order_id=exchange_order_id)
if tracked_order is not None:
if execution_type in [OrderState.PARTIALLY_FILLED, OrderState.FILLED]:
amount = abs(Decimal(utils.convert_from_x18(event_message["filled_qty"])))
price = Decimal(utils.convert_from_x18(event_message["price"]))
fee_rate = self._trading_fees[tracked_order.trading_pair]["maker"]
if event_message["is_taker"]:
fee_rate = self._trading_fees[tracked_order.trading_pair]["taker"]
fee = TradeFeeBase.new_spot_fee(
fee_schema=self.trade_fee_schema(),
trade_type=tracked_order.trade_type,
percent=fee_rate,
percent_token="USDC", # NOTE: All fees are denominated in USDC
)
trade_update = TradeUpdate(
trade_id=str(event_message["timestamp"]),
client_order_id=tracked_order.client_order_id,
exchange_order_id=str(exchange_order_id),
trading_pair=tracked_order.trading_pair,
fee=fee,
fill_base_amount=amount,
fill_quote_amount=amount * price,
fill_price=price,
fill_timestamp=int(event_message["timestamp"]) * 1e-9,
)
self._order_tracker.process_trade_update(trade_update)
order_update = OrderUpdate(
trading_pair=tracked_order.trading_pair,
update_timestamp=int(event_message["timestamp"]) * 1e-9,
new_state=execution_type,
client_order_id=tracked_order.client_order_id,
exchange_order_id=str(exchange_order_id),
)
self._order_tracker.process_order_update(order_update=order_update)
elif event_type == CONSTANTS.POSITION_CHANGE_EVENT_TYPE:
await self._update_balances()
except asyncio.CancelledError:
self.logger().error(
f"An Asyncio.CancelledError occurs when process message: {event_message}.", exc_info=True
)
raise
except Exception:
self.logger().error("Unexpected error in user stream listener loop.", exc_info=True)
await self._sleep(5.0)
async def _all_trade_updates_for_order(self, order: InFlightOrder) -> List[TradeUpdate]:
trade_updates = []
if order.exchange_order_id is not None:
exchange_order_id = order.exchange_order_id
trading_pair = order.trading_pair
product_id = utils.trading_pair_to_product_id(order.trading_pair, self._exchange_market_info[self._domain])
matches_response = await self._api_post(
path_url=CONSTANTS.INDEXER_PATH_URL,
data={"matches": {"product_ids": [product_id], "subaccount": self.sender_address}},
limit_id=CONSTANTS.INDEXER_PATH_URL,
)
matches_data = matches_response.get("matches", [])
if matches_data is not None:
for trade in matches_data:
# NOTE: Vertex returns all orders and matches.
if trade["digest"] != order.exchange_order_id:
continue
exchange_order_id = str(trade["digest"])
# NOTE: Matches can be composed of multiple trade transactions.
# https://vertex-protocol.gitbook.io/docs/developer-resources/api/indexer-api/matches
submission_idx = str(trade["submission_idx"])
trade_fee = utils.convert_from_x18(trade["fee"])
trade_amount = utils.convert_from_x18(trade["order"]["amount"])
fee = TradeFeeBase.new_spot_fee(
fee_schema=self.trade_fee_schema(),
trade_type=TradeType.SELL if Decimal(trade_amount) < s_decimal_0 else TradeType.BUY,
flat_fees=[TokenAmount(amount=Decimal(trade_fee), token="USDC")],
)
fill_base_amount = utils.convert_from_x18(trade["base_filled"])
converted_price = utils.convert_from_x18(trade["order"]["priceX18"])
fill_quote_amount = utils.convert_from_x18(trade["base_filled"])
# NOTE: Matches can be composed of multiple trade transactions..
matches_transactions_data = matches_response.get("txs", [])
trade_timestamp = int(time.time())
for transaction in matches_transactions_data:
if str(transaction["submission_idx"]) != submission_idx:
continue
trade_timestamp = transaction["timestamp"]
break
trade_update = TradeUpdate(
trade_id=submission_idx,
client_order_id=order.client_order_id,
exchange_order_id=exchange_order_id,
trading_pair=trading_pair,
fee=fee,
fill_base_amount=abs(Decimal(fill_base_amount)),
fill_quote_amount=Decimal(converted_price) * abs(Decimal(fill_quote_amount)),
fill_price=Decimal(converted_price),
fill_timestamp=int(trade_timestamp),
)
trade_updates.append(trade_update)
return trade_updates
async def _request_order_status(self, tracked_order: InFlightOrder) -> OrderUpdate:
"""
This requests the order from the live squencer, then if it cannot locate it, it attempts to locate it with the indexer
"""
live_order = True
try:
order_request_response = await self._api_get(
path_url=CONSTANTS.QUERY_PATH_URL,
params={
"type": CONSTANTS.ORDER_REQUEST_TYPE,
"product_id": utils.trading_pair_to_product_id(
tracked_order.trading_pair, self._exchange_market_info[self._domain]
),
"digest": tracked_order.exchange_order_id,
},
limit_id=CONSTANTS.ORDER_REQUEST_TYPE,
)
if order_request_response.get("status") == "failure":
updated_order_data = {
"status": "failure",
"data": {"unfilled_amount": 100000000000, "amount": 1000000000000},
}
else:
updated_order_data = order_request_response
except Exception as e:
self.logger().warning(f"Error requesting orders from Vertex sequencer: {e}")
# NOTE: Try to fetch order details from indexer
if updated_order_data.get("status") == "failure":
live_order = False
try:
data = {
"orders": {"digests": [tracked_order.exchange_order_id]},
}
indexed_order_data = await self._api_post(
path_url=CONSTANTS.INDEXER_PATH_URL, data=data, limit_id=CONSTANTS.INDEXER_PATH_URL
)
orders = indexed_order_data.get("orders", [])
if len(orders) > 0:
updated_order_data["data"] = orders[0]
updated_order_data["data"]["unfilled_amount"] = float(updated_order_data["data"]["amount"]) - float(
updated_order_data["data"]["base_filled"]
)
except Exception as e:
self.logger().warning(f"Error requesting orders from Vertex indexer: {e}")
unfilled_amount = Decimal(utils.convert_from_x18(updated_order_data["data"]["unfilled_amount"]))
order_amount = Decimal(utils.convert_from_x18(updated_order_data["data"]["amount"]))
filled_amount = abs(Decimal(order_amount - unfilled_amount))
if filled_amount == s_decimal_0:
new_state = OrderState.OPEN
if filled_amount > s_decimal_0:
new_state = OrderState.PARTIALLY_FILLED
# NOTE: Default to canceled if this is queried against indexer
if not live_order:
new_state = OrderState.CANCELED
if unfilled_amount == s_decimal_0:
if live_order:
new_state = OrderState.FILLED
else:
# Override default canceled with complete if complete
new_state = OrderState.COMPLETED
order_update = OrderUpdate(
client_order_id=tracked_order.client_order_id,
exchange_order_id=str(tracked_order.exchange_order_id),
trading_pair=tracked_order.trading_pair,
update_timestamp=int(time.time()),
new_state=new_state,
)
return order_update
async def _update_balances(self):
if not self._exchange_market_info[self._domain]:
await self.build_exchange_market_info()
local_asset_names = set(self._account_balances.keys())
remote_asset_names = set()
account = await self._get_account()
available_balances = await self._get_account_max_withdrawable()
self._allocated_collateral_sum = s_decimal_0
# Loop for all the balances returned for account
for spot_balance in account["spot_balances"]:
try:
product_id = spot_balance["product_id"]
# If we don't have it in our exchange defined list, we don't care
if product_id not in self._exchange_market_info[self._domain] and product_id != 0:
continue
asset_name = self._exchange_market_info[self._domain][product_id]["symbol"]
total_balance = Decimal(utils.convert_from_x18(spot_balance["balance"]["amount"]))
available_balance = s_decimal_0
if product_id in available_balances:
available_balance = available_balances[product_id]
self._account_available_balances[asset_name] = available_balance
self._account_balances[asset_name] = total_balance
remote_asset_names.add(asset_name)
except Exception as e:
self.logger().warning(f"Balance Error: {spot_balance} {e}")
pass
asset_names_to_remove = local_asset_names.difference(remote_asset_names)
for asset_name in asset_names_to_remove:
del self._account_available_balances[asset_name]
del self._account_balances[asset_name]
async def build_exchange_market_info(self):
exchange_info = await self._api_get(path_url=self.trading_pairs_request_path)
symbol_map = await self._get_symbols()
contract_info = await self._get_contracts()
self._exchange_market_info[self._domain] = {}
symbol_data = {}
for product in symbol_map:
symbol_data.update({product["product_id"]: product["symbol"]})
product_data = {}
for product in exchange_info["data"]["spot_products"]:
if product["product_id"] in symbol_data:
try:
product_id = int(product["product_id"])
# NOTE: Hardcoded USDC
product.update({"symbol": f"{symbol_data[product_id]}"})
product.update({"market": f"{symbol_data[product_id]}/USDC"})
product.update({"contract": f"{contract_info[product_id]}"})
product_data.update({product_id: product})
except Exception:
pass
self._exchange_market_info[self._domain] = product_data
return product_data
async def _make_trading_rules_request(self) -> Any:
return self._exchange_market_info[self._domain]
async def _initialize_trading_pair_symbol_map(self):
try:
exchange_info = await self.build_exchange_market_info()
self._initialize_trading_pair_symbols_from_exchange_info(exchange_info=exchange_info)
except Exception:
self.logger().exception("There was an error requesting exchange info.")
def _initialize_trading_pair_symbols_from_exchange_info(self, exchange_info: Dict[str, Any]):
mapping = bidict()
for product_id in filter(utils.is_exchange_information_valid, exchange_info):
trading_pair = exchange_info[product_id]["market"]
# NOTE: USDC is an asset, however it doesn't have a "market"
if product_id == 0:
continue
base = trading_pair.split("/")[0]
quote = trading_pair.split("/")[1]
mapping[trading_pair] = combine_to_hb_trading_pair(base=base, quote=quote)
self._set_trading_pair_symbol_map(mapping)
async def _get_last_traded_price(self, trading_pair: str) -> float:
product_id = utils.trading_pair_to_product_id(trading_pair, self._exchange_market_info[self._domain])
try:
data = {"matches": {"product_ids": [product_id], "limit": 5}}
matches_response = await self._api_post(
path_url=CONSTANTS.INDEXER_PATH_URL,
data=data,
limit_id=CONSTANTS.INDEXER_PATH_URL,
)
matches = matches_response.get("matches", [])
if matches and len(matches) > 0:
last_price = float(utils.convert_from_x18(matches[0]["order"]["priceX18"]))
return last_price
except Exception as e:
self.logger().warning(f"Failed to get last traded price, using mid price instead, error: {e}")
params = {"type": CONSTANTS.MARKET_PRICE_REQUEST_TYPE, "product_id": product_id}
resp_json = await self._api_get(
path_url=CONSTANTS.QUERY_PATH_URL,
params=params,
limit_id=CONSTANTS.MARKET_PRICE_REQUEST_TYPE,
)
trading_rules = self.trading_rules[trading_pair]
mid_price = float(
str(
(
(
Decimal(utils.convert_from_x18(resp_json["data"]["bid_x18"]))
+ Decimal(utils.convert_from_x18(resp_json["data"]["ask_x18"]))
)
/ Decimal("2.0")
).quantize(trading_rules.min_price_increment)
)
)
return mid_price
async def _get_account(self):
sender_address = self.sender_address
response: Dict[str, Dict[str, Any]] = await self._api_get(
path_url=CONSTANTS.QUERY_PATH_URL,
params={"type": CONSTANTS.SUBACCOUNT_INFO_REQUEST_TYPE, "subaccount": sender_address},
limit_id=CONSTANTS.SUBACCOUNT_INFO_REQUEST_TYPE,
)
if response is None or "failure" in response["status"] or "data" not in response:
if "error_code" in response and response["error_code"] in CONSTANTS.ERRORS:
raise IOError(f"IP address issue from Vertex {response}")
raise IOError(f"Unable to get account info for sender address {sender_address}")
return response["data"]
async def _get_symbols(self):
response = await self._api_get(path_url=CONSTANTS.SYMBOLS_PATH_URL)
if response is None or "status" in response:
raise IOError("Unable to get Vertex symbols")
self._symbols = response
return response
async def _get_account_max_withdrawable(self):
sender_address = self.sender_address
available_balances = {}
trading_pairs = self._trading_pairs
params = {
"type": CONSTANTS.MAX_WITHDRAWABLE_REQUEST_TYPE,
"product_id": 0,
"sender": sender_address,
"spot_leverage": str(self._use_spot_leverage).lower(),
}
response = await self._api_get(path_url=CONSTANTS.QUERY_PATH_URL, params=params)
if response is None or "failure" in response["status"] or "data" not in response:
raise IOError(f"Unable to get available balance of product {0} for {sender_address}")
available_balances.update({0: Decimal(utils.convert_from_x18(response["data"]["max_withdrawable"]))})
if len(self._trading_pairs) == 0:
trading_pairs = []
for product_id in self._exchange_market_info[self._domain]:
if product_id != 0:
trading_pairs.append(self._exchange_market_info[self._domain][product_id]["market"])
for trading_pair in trading_pairs:
product_id = utils.trading_pair_to_product_id(
trading_pair=trading_pair, exchange_market_info=self._exchange_market_info[self._domain]
)
params = {
"type": CONSTANTS.MAX_WITHDRAWABLE_REQUEST_TYPE,
"product_id": product_id,
"sender": sender_address,
"spot_leverage": str(self._use_spot_leverage).lower(),
}
response = await self._api_get(path_url=CONSTANTS.QUERY_PATH_URL, params=params)
if response is None or "failure" in response["status"] or "data" not in response:
raise IOError(f"Unable to get available balance of product {product_id} for {sender_address}")
available_balances.update(
{product_id: Decimal(utils.convert_from_x18(response["data"]["max_withdrawable"]))}
)
return available_balances
async def _get_contracts(self):
response = await self._api_get(
path_url=CONSTANTS.QUERY_PATH_URL, params={"type": CONSTANTS.CONTRACTS_REQUEST_TYPE}
)
if response is None or "failure" in response["status"] or "data" not in response:
raise IOError("Unable to get Vertex contracts")
# NOTE: List indexed to be matached according to product_id
contracts = response["data"]["book_addrs"]
self._contracts = contracts
return contracts
async def _get_fee_rates(self):
sender_address = self.sender_address
response: Dict[str, Dict[str, Any]] = await self._api_get(
path_url=CONSTANTS.QUERY_PATH_URL,
params={
"type": CONSTANTS.FEE_RATES_REQUEST_TYPE,
"sender": sender_address,
},
is_auth_required=False,
limit_id=CONSTANTS.FEE_RATES_REQUEST_TYPE,
)
if response is None or "failure" in response["status"] or "data" not in response:
raise IOError(f"Unable to get trading fees sender address {sender_address}")
return response["data"]
async def _api_request(
self,
path_url,
method: RESTMethod = RESTMethod.GET,
params: Optional[Dict[str, Any]] = None,
data: Optional[Dict[str, Any]] = None,
is_auth_required: bool = False,
return_err: bool = False,
limit_id: Optional[str] = None,
**kwargs,
) -> Dict[str, Any]:
last_exception = None
rest_assistant = await self._web_assistants_factory.get_rest_assistant()
url = web_utils.public_rest_url(path_url, domain=self.domain)
local_headers = {"Content-Type": "application/json"}
for _ in range(2):
try:
request_result = await rest_assistant.execute_request(
url=url,
params=params,
data=data,
method=method,
is_auth_required=is_auth_required,
return_err=return_err,
headers=local_headers,
throttler_limit_id=limit_id if limit_id else CONSTANTS.ALL_ENDPOINTS_LIMIT,
)
return request_result
except IOError as request_exception:
last_exception = request_exception
raise
# Failed even after the last retry
raise last_exception
|
2507c499e5ff5f5ff0cd1f0dfea1303625441179
|
595cb1db42944a82ee6328596a7e27e93f239bbd
|
/python/magellan/types.py
|
ef3e3ffe29c13c49eaaa10748101571cd84e9c62
|
[
"Apache-2.0"
] |
permissive
|
harsha2010/magellan
|
37be4ce603324591365f4e84428be5faed4058cf
|
63655a22df995f07a24b7e49daf8c294f2653d95
|
refs/heads/master
| 2022-06-27T13:04:50.181222
| 2021-08-26T15:37:18
| 2021-08-26T15:37:18
| 36,629,888
| 591
| 170
|
Apache-2.0
| 2019-10-05T22:43:05
| 2015-06-01T01:06:52
|
Scala
|
UTF-8
|
Python
| false
| false
| 11,168
|
py
|
types.py
|
#
# Copyright 2015 Ram Sriharsha
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import sys
from itertools import izip
from pyspark import SparkContext
from pyspark.sql.types import DataType, UserDefinedType, StructField, StructType, \
ArrayType, DoubleType, IntegerType
__all__ = ['Point']
try:
from shapely.geometry import Point as SPoint
from shapely.geometry import Polygon as SPolygon
from shapely.geometry import LineString, MultiLineString
_have_shapely = True
except:
# No Shapely in environment, but that's okay
_have_shapely = False
class Shape(DataType):
def convert(self):
raise NotImplementedError()
def toShapely(self):
if _have_shapely:
return self.convert()
else:
raise TypeError("Cannot convert to Shapely type")
class PointUDT(UserDefinedType):
"""User-defined type (UDT).
.. note:: WARN: SpatialSDK Internal Use Only
"""
@classmethod
def sqlType(cls):
return Point()
@classmethod
def module(cls):
"""
The Python module of the UDT.
"""
return "magellan.types"
@classmethod
def scalaUDT(cls):
"""
The class name of the paired Scala UDT.
"""
return "magellan.PointUDT"
def serialize(self, obj):
"""
Converts the a user-type object into a SQL datum.
"""
if isinstance(obj, Point):
return obj
else:
raise TypeError("cannot serialize %r of type %r" % (obj, type(obj)))
def deserialize(self, datum):
"""
Converts a SQL datum into a user-type object.
"""
if isinstance(datum, Point):
return datum
else:
assert len(datum) == 2, \
"PointUDT.deserialize given row with length %d but requires 2" % len(datum)
return Point(datum[0], datum[1])
def simpleString(self):
return 'point'
@classmethod
def fromJson(cls, json):
return Point(json['x'], json['y'])
class Point(Shape):
"""
A point is a zero dimensional shape.
The coordinates of a point can be in linear units such as feet or meters,
or they can be in angular units such as degrees or radians.
The associated spatial reference specifies the units of the coordinates.
In the case of a geographic coordinate system, the x-coordinate is the longitude
and the y-coordinate is the latitude.
>>> v = Point(1.0, 2.0)
Point([1.0, 2.0])
"""
__UDT__ = PointUDT()
def __init__(self, x = 0.0, y = 0.0):
self.x = x
self.y = y
def __str__(self):
return "Point (" + str(self.x) + "," + str(self.y) + ")"
def __repr__(self):
return self.__str__()
def __unicode__(self):
return self.__str__()
def __reduce__(self):
return (Point, (self.x, self.y))
def __eq__(self, other):
return isinstance(other, Point) and self.x == other.x and self.y == other.y
@classmethod
def fromJson(cls, json):
return Point(json['x'], json['y'])
def jsonValue(self):
return {"type": "udt",
"pyClass": "magellan.types.PointUDT",
"class": "magellan.PointUDT",
"sqlType": "magellan.Point"}
def convert(self):
return SPoint(self.x, self.y)
class PolygonUDT(UserDefinedType):
"""User-defined type (UDT).
.. note:: WARN: SpatialSDK Internal Use Only
"""
pointUDT = PointUDT()
@classmethod
def sqlType(cls):
"""
Underlying SQL storage type for this UDT.
"""
return Polygon()
@classmethod
def module(cls):
"""
The Python module of the UDT.
"""
return "magellan.types"
@classmethod
def scalaUDT(cls):
"""
The class name of the paired Scala UDT.
"""
return "magellan.PolygonUDT"
def serialize(self, obj):
"""
Converts the a user-type object into a SQL datum.
"""
if isinstance(obj, Polygon):
return obj
else:
raise TypeError("cannot serialize %r of type %r" % (obj, type(obj)))
def deserialize(self, datum):
"""
Converts a SQL datum into a user-type object.
"""
if isinstance(datum, Polygon):
return datum
else:
assert len(datum) == 2, \
"PolygonUDT.deserialize given row with length %d but requires 2" % len(datum)
return Polygon(datum[0], [self.pointUDT.deserialize(point) for point in datum[1]])
def simpleString(self):
return 'polygon'
@classmethod
def fromJson(cls, json):
indices = json["indices"]
points = [PointUDT.fromJson(point) for point in json["points"]]
return Polygon(indices, points)
class Polygon(Shape):
"""
A polygon consists of one or more rings. A ring is a connected sequence of four or more points
that form a closed, non-self-intersecting loop. A polygon may contain multiple outer rings.
The order of vertices or orientation for a ring indicates which side of the ring is the interior
of the polygon. The neighborhood to the right of an observer walking along the ring
in vertex order is the neighborhood inside the polygon.
Vertices of rings defining holes in polygons are in a counterclockwise direction.
Vertices for a single, ringed polygon are, therefore, always in clockwise order.
The rings of a polygon are referred to as its parts.
>>> v = Polygon([0], [Point(1.0, 1.0), Point(1.0, -1.0), Point(1.0, 1.0))
Point([-1.0,-1.0, 1.0, 1.0], [0], Point(1.0, 1.0), Point(1.0, -1.0), Point(1.0, 1.0))
"""
__UDT__ = PolygonUDT()
def __init__(self, indices = [], points = []):
self.indices = indices
self.points = points
def __str__(self):
inds = "[" + ",".join([str(i) for i in self.indices]) + "]"
pts = "[" + ",".join([str(v) for v in self.points]) + "]"
return "Polygon (" + ",".join((inds, pts)) + ")"
def __repr__(self):
return self.__str__()
def __reduce__(self):
return (Polygon, (self.indices, self.points))
@classmethod
def fromJson(cls, json):
indices = json["indices"]
points = [PointUDT.fromJson(point) for point in json["points"]]
return Polygon(indices, points)
def jsonValue(self):
return {"type": "udt",
"pyClass": "magellan.types.PolygonUDT",
"class": "magellan.Polygon",
"sqlType": "magellan.Polygon"}
def convert(self):
l = []
l.extend(self.indices)
l.append(len(self.points))
p = []
for i,j in zip(l, l[1:]):
spoints = [(point.x, point.y) for point in self.points[i:j - 1]]
p.append(spoints)
shell = p[0]
holes = p[1:]
return SPolygon(shell=shell, holes=holes)
class PolyLineUDT(UserDefinedType):
"""User-defined type (UDT).
.. note:: WARN: SpatialSDK Internal Use Only
"""
pointUDT = PointUDT()
@classmethod
def sqlType(cls):
"""
Underlying SQL storage type for this UDT.
"""
return PolyLine()
@classmethod
def module(cls):
"""
The Python module of the UDT.
"""
return "magellan.types"
@classmethod
def scalaUDT(cls):
"""
The class name of the paired Scala UDT.
"""
return "magellan.PolyLineUDT"
def serialize(self, obj):
"""
Converts the a user-type object into a SQL datum.
"""
if isinstance(obj, PolyLine):
return obj
else:
raise TypeError("cannot serialize %r of type %r" % (obj, type(obj)))
def deserialize(self, datum):
"""
Converts a SQL datum into a user-type object.
"""
if isinstance(datum, PolyLine):
return datum
else:
assert len(datum) == 2, \
"PolyLineUDT.deserialize given row with length %d but requires 2" % len(datum)
return PolyLine(datum[0], [self.pointUDT.deserialize(point) for point in datum[1]])
def simpleString(self):
return 'polyline'
@classmethod
def fromJson(cls, json):
indices = json["indices"]
points = [PointUDT.fromJson(point) for point in json["points"]]
return PolyLine(indices, points)
class PolyLine(Shape):
"""
A PolyLine is an ordered set of vertices that consists of one or more parts.
A part is a connected sequence of two or more points.
Parts may or may not be connected to one another.
Parts may or may not intersect one another
>>> v = PolyLine([0], [Point(1.0, 1.0), Point(1.0, -1.0), Point(1.0, 0.0))
Point([0], Point(1.0, 1.0), Point(1.0, -1.0), Point(1.0, 0.0))
"""
__UDT__ = PolyLineUDT()
def __init__(self, indices = [], points = []):
self.indices = indices
self.points = points
def __str__(self):
inds = "[" + ",".join([str(i) for i in self.indices]) + "]"
pts = "[" + ",".join([str(v) for v in self.points]) + "]"
return "Polygon (" + ",".join((inds, pts)) + ")"
def __repr__(self):
return self.__str__()
def __reduce__(self):
return (PolyLine, (self.indices, self.points))
@classmethod
def fromJson(cls, json):
indices = json["indices"]
points = [PointUDT.fromJson(point) for point in json["points"]]
return PolyLine(indices, points)
def jsonValue(self):
return {"type": "udt",
"pyClass": "magellan.types.PolyLineUDT",
"class": "magellan.PolyLine",
"sqlType": "magellan.PolyLine"}
def convert(self):
l = []
l.extend(self.indices)
l.append(len(self.points))
p = []
for i,j in zip(l, l[1:]):
spoints = [(point.x, point.y) for point in self.points[i:j - 1]]
p.append(LineString(spoints))
return MultiLineString(p)
def _inbound_shape_converter(json_string):
j = json.loads(json_string)
shapeType = str(j["pyClass"]) # convert unicode to str
split = shapeType.rfind(".")
module = shapeType[:split]
shapeClass = shapeType[split+1:]
m = __import__(module, globals(), locals(), [shapeClass])
UDT = getattr(m, shapeClass)
return UDT.fromJson(j)
# This is used to unpickle a Row from JVM
def _create_row_inbound_converter(dataType):
return lambda *a: dataType.fromInternal(a)
|
9d081f54d79c55355d7a127f37c5538211ec2b4d
|
cd6a55dd4f4a5472699eafd191f75654f2424e8b
|
/fury/tests/test_convert.py
|
3a40830b37290117b43be2c5bac3d603361f718b
|
[
"BSD-3-Clause"
] |
permissive
|
fury-gl/fury
|
870e297868f746def746d58f233575a439f74590
|
e595bad0246899d58d24121dcc291eb050721f9f
|
refs/heads/master
| 2023-07-05T08:51:11.160972
| 2023-06-30T16:23:39
| 2023-06-30T16:23:39
| 149,529,946
| 209
| 190
|
NOASSERTION
| 2023-08-29T17:03:09
| 2018-09-20T00:48:22
|
Python
|
UTF-8
|
Python
| false
| false
| 1,106
|
py
|
test_convert.py
|
import os
from tempfile import TemporaryDirectory
import numpy.testing as npt
import pytest
from fury.io import load_image
# Optional packages
from fury.optpkg import optional_package
matplotlib, have_matplotlib, _ = optional_package('matplotlib')
if have_matplotlib:
import matplotlib.pyplot as plt
from fury.convert import matplotlib_figure_to_numpy
@pytest.mark.skipif(not have_matplotlib, reason='Requires MatplotLib')
def test_convert():
names = ['group_a', 'group_b', 'group_c']
values = [1, 10, 100]
fig = plt.figure(figsize=(9, 3))
plt.subplot(131)
plt.bar(names, values)
plt.subplot(132)
plt.scatter(names, values)
plt.subplot(133)
plt.plot(names, values)
plt.suptitle('Categorical Plotting')
arr2 = matplotlib_figure_to_numpy(fig, transparent=False, flip_up_down=False)
with TemporaryDirectory() as tmpdir:
fname = os.path.join(tmpdir, 'tmp.png')
dpi = 100
fig.savefig(fname, transparent=False, bbox_inches='tight', pad_inches=0)
arr1 = load_image(fname)
npt.assert_array_equal(arr1, arr2)
|
26fc8c5bf45d83a5b82c37bd0f838278fd841ec9
|
88ae8695987ada722184307301e221e1ba3cc2fa
|
/third_party/webrtc/tools_webrtc/get_landmines.py
|
18bc413e252914ac3414ffbb3ee84fc103da8648
|
[
"LicenseRef-scancode-google-patent-license-webrtc",
"BSD-3-Clause",
"LicenseRef-scancode-google-patent-license-webm",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"LGPL-2.0-or-later",
"MIT",
"GPL-1.0-or-later"
] |
permissive
|
iridium-browser/iridium-browser
|
71d9c5ff76e014e6900b825f67389ab0ccd01329
|
5ee297f53dc7f8e70183031cff62f37b0f19d25f
|
refs/heads/master
| 2023-08-03T16:44:16.844552
| 2023-07-20T15:17:00
| 2023-07-23T16:09:30
| 220,016,632
| 341
| 40
|
BSD-3-Clause
| 2021-08-13T13:54:45
| 2019-11-06T14:32:31
| null |
UTF-8
|
Python
| false
| false
| 3,235
|
py
|
get_landmines.py
|
#!/usr/bin/env vpython3
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""
This file emits the list of reasons why a particular build needs to be clobbered
(or a list of 'landmines').
"""
import os
import sys
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
CHECKOUT_ROOT = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir))
sys.path.insert(0, os.path.join(CHECKOUT_ROOT, 'build'))
import landmine_utils
host_os = landmine_utils.host_os # pylint: disable=invalid-name
def print_landmines(): # pylint: disable=invalid-name
"""
ALL LANDMINES ARE EMITTED FROM HERE.
"""
# DO NOT add landmines as part of a regular CL. Landmines are a last-effort
# bandaid fix if a CL that got landed has a build dependency bug and all
# bots need to be cleaned up. If you're writing a new CL that causes build
# dependency problems, fix the dependency problems instead of adding a
# landmine.
# See the Chromium version in src/build/get_landmines.py for usage examples.
print('Clobber to remove out/{Debug,Release}/args.gn (webrtc:5070)')
if host_os() == 'win':
print('Clobber to resolve some issues with corrupt .pdb files on bots.')
print('Clobber due to corrupt .pdb files (after #14623)')
print('Clobber due to Win 64-bit Debug linking error (crbug.com/668961)')
print('Clobber due to Win Clang Debug linking errors in '
'https://codereview.webrtc.org/2786603002')
print('Clobber due to Win Debug linking errors in '
'https://codereview.webrtc.org/2832063003/')
print('Clobber win x86 bots (issues with isolated files).')
print('Clobber because of libc++ issue')
print('Clobber because of libc++ issue - take 2')
print('Clobber because of libc++ issue - take 3')
print('Clobber because of libc++ issue - take 4 (crbug.com/1337238)')
print('Clobber because of libc++ issue - take 5 (crbug.com/1337238)')
print('Clobber because of libc++ issue - take 6 (crbug.com/1337238)')
if host_os() == 'mac':
print('Clobber due to iOS compile errors (crbug.com/694721)')
print('Clobber to unblock https://codereview.webrtc.org/2709573003')
print('Clobber to fix https://codereview.webrtc.org/2709573003 after '
'landing')
print('Clobber to fix https://codereview.webrtc.org/2767383005 before'
'landing (changing rtc_executable -> rtc_test on iOS)')
print('Clobber to fix https://codereview.webrtc.org/2767383005 before'
'landing (changing rtc_executable -> rtc_test on iOS)')
print('Another landmine for low_bandwidth_audio_test (webrtc:7430)')
print('Clobber to change neteq_rtpplay type to executable')
print('Clobber to remove .xctest files.')
print('Clobber to remove .xctest files (take 2).')
print('Switching rtc_executable to rtc_test')
def main():
print_landmines()
return 0
if __name__ == '__main__':
sys.exit(main())
|
66c6123cff86e10cf3393d299d3ddd33c06fb593
|
226727e281e6ce17450fac3ea78d1a3c4a3999fc
|
/examples/ExperimentalCases/ParallelConnectionExample/parallel_connection_example.py
|
d56ed276fd3ce7c4b7c54e4a8804a128afc01d48
|
[
"MIT"
] |
permissive
|
GazzolaLab/PyElastica
|
20df23e97560d05ef50e60f2aeefb420968fb01d
|
49017d456aa10032e0ba1af23d5afd92cecedfa5
|
refs/heads/master
| 2023-08-31T14:28:48.056038
| 2023-08-18T16:54:51
| 2023-08-18T16:54:51
| 254,172,891
| 159
| 94
|
MIT
| 2023-09-09T04:11:01
| 2020-04-08T18:47:47
|
Python
|
UTF-8
|
Python
| false
| false
| 5,530
|
py
|
parallel_connection_example.py
|
__doc__ = """Parallel connection example"""
import numpy as np
import elastica as ea
from elastica.experimental.connection_contact_joint.parallel_connection import (
get_connection_vector_straight_straight_rod,
SurfaceJointSideBySide,
)
from elastica._calculus import difference_kernel
from examples.JointCases.joint_cases_postprocessing import (
plot_position,
plot_video,
plot_video_xy,
plot_video_xz,
)
class ParallelConnection(
ea.BaseSystemCollection,
ea.Constraints,
ea.Connections,
ea.Forcing,
ea.Damping,
ea.CallBacks,
):
pass
parallel_connection_sim = ParallelConnection()
# setting up test params
n_elem = 10
direction = np.array([0.0, 0.0, 1.0])
normal = np.array([0.0, 1.0, 0.0])
binormal = np.cross(direction, normal)
base_length = 0.2
base_radius = 0.007
base_area = np.pi * base_radius ** 2
density = 1750
E = 3e4
poisson_ratio = 0.5
shear_modulus = E / (poisson_ratio + 1.0)
start_rod_1 = np.zeros((3,)) + 0.1 * direction
start_rod_2 = start_rod_1 + binormal * 2 * base_radius
# Create rod 1
rod_one = ea.CosseratRod.straight_rod(
n_elem,
start_rod_1,
direction,
normal,
base_length,
base_radius,
density,
youngs_modulus=E,
shear_modulus=shear_modulus,
)
parallel_connection_sim.append(rod_one)
# Create rod 2
rod_two = ea.CosseratRod.straight_rod(
n_elem,
start_rod_2,
direction,
normal,
base_length,
base_radius,
density,
youngs_modulus=E,
shear_modulus=shear_modulus,
)
parallel_connection_sim.append(rod_two)
# Apply boundary conditions to rod1.
parallel_connection_sim.constrain(rod_one).using(
ea.OneEndFixedBC, constrained_position_idx=(0,), constrained_director_idx=(0,)
)
# Apply boundary conditions to rod2.
parallel_connection_sim.constrain(rod_two).using(
ea.OneEndFixedBC, constrained_position_idx=(0,), constrained_director_idx=(0,)
)
# Apply a contraction force on rod one.
class ContractionForce(ea.NoForces):
def __init__(
self,
ramp,
force_mag,
):
self.ramp = ramp
self.force_mag = force_mag
def apply_forces(self, system, time: np.float64 = 0.0):
# Ramp the force
factor = min(1.0, time / self.ramp)
system.external_forces[:] -= factor * difference_kernel(
self.force_mag * system.tangents
)
parallel_connection_sim.add_forcing_to(rod_one).using(
ContractionForce, ramp=0.5, force_mag=1.0
)
# Connect rod 1 and rod 2
(
rod_one_direction_vec_in_material_frame,
rod_two_direction_vec_in_material_frame,
offset_btw_rods,
) = get_connection_vector_straight_straight_rod(
rod_one, rod_two, (0, n_elem), (0, n_elem)
)
for i in range(n_elem):
parallel_connection_sim.connect(
first_rod=rod_one, second_rod=rod_two, first_connect_idx=i, second_connect_idx=i
).using(
SurfaceJointSideBySide,
k=1e2,
nu=1e-5,
k_repulsive=1e3,
rod_one_direction_vec_in_material_frame=rod_one_direction_vec_in_material_frame[
:, i
],
rod_two_direction_vec_in_material_frame=rod_two_direction_vec_in_material_frame[
:, i
],
offset_btw_rods=offset_btw_rods[i],
) # k=kg/s2 nu=kg/s 1e-2
# add damping
damping_constant = 4e-3
dt = 1e-3
parallel_connection_sim.dampen(rod_one).using(
ea.AnalyticalLinearDamper,
damping_constant=damping_constant,
time_step=dt,
)
parallel_connection_sim.dampen(rod_two).using(
ea.AnalyticalLinearDamper,
damping_constant=damping_constant,
time_step=dt,
)
class ParallelConnecitonCallback(ea.CallBackBaseClass):
"""
Call back function for parallel connection
"""
def __init__(self, step_skip: int, callback_params: dict):
ea.CallBackBaseClass.__init__(self)
self.every = step_skip
self.callback_params = callback_params
def make_callback(self, system, time, current_step: int):
if current_step % self.every == 0:
self.callback_params["time"].append(time)
self.callback_params["step"].append(current_step)
self.callback_params["position"].append(system.position_collection.copy())
self.callback_params["velocity"].append(system.velocity_collection.copy())
return
pp_list_rod1 = ea.defaultdict(list)
pp_list_rod2 = ea.defaultdict(list)
parallel_connection_sim.collect_diagnostics(rod_one).using(
ParallelConnecitonCallback, step_skip=40, callback_params=pp_list_rod1
)
parallel_connection_sim.collect_diagnostics(rod_two).using(
ParallelConnecitonCallback, step_skip=40, callback_params=pp_list_rod2
)
parallel_connection_sim.finalize()
timestepper = ea.PositionVerlet()
final_time = 20.0
dl = base_length / n_elem
total_steps = int(final_time / dt)
print("Total steps", total_steps)
ea.integrate(timestepper, parallel_connection_sim, final_time, total_steps)
PLOT_FIGURE = True
SAVE_FIGURE = False
PLOT_VIDEO = True
# plotting results
if PLOT_FIGURE:
filename = "parallel_connection_test_last_node_pos_xy.png"
plot_position(pp_list_rod1, pp_list_rod2, filename, SAVE_FIGURE)
if PLOT_VIDEO:
filename = "parallel_connection_test.mp4"
plot_video(pp_list_rod1, pp_list_rod2, video_name=filename, margin=0.2, fps=100)
plot_video_xy(
pp_list_rod1, pp_list_rod2, video_name=filename + "_xy.mp4", margin=0.2, fps=100
)
plot_video_xz(
pp_list_rod1, pp_list_rod2, video_name=filename + "_xz.mp4", margin=0.2, fps=100
)
|
fca93300090d0b0a45cc39453432f3aa6b058da1
|
96dcea595e7c16cec07b3f649afd65f3660a0bad
|
/homeassistant/components/skybeacon/sensor.py
|
17bf8a3ab7f08764fe886d50e155929a75281003
|
[
"Apache-2.0"
] |
permissive
|
home-assistant/core
|
3455eac2e9d925c92d30178643b1aaccf3a6484f
|
80caeafcb5b6e2f9da192d0ea6dd1a5b8244b743
|
refs/heads/dev
| 2023-08-31T15:41:06.299469
| 2023-08-31T14:50:53
| 2023-08-31T14:50:53
| 12,888,993
| 35,501
| 20,617
|
Apache-2.0
| 2023-09-14T21:50:15
| 2013-09-17T07:29:48
|
Python
|
UTF-8
|
Python
| false
| false
| 5,965
|
py
|
sensor.py
|
"""Support for Skybeacon temperature/humidity Bluetooth LE sensors."""
from __future__ import annotations
import logging
import threading
from uuid import UUID
from pygatt import BLEAddressType
from pygatt.backends import Characteristic, GATTToolBackend
from pygatt.exceptions import BLEError, NotConnectedError, NotificationTimeout
import voluptuous as vol
from homeassistant.components.sensor import (
PLATFORM_SCHEMA,
SensorDeviceClass,
SensorEntity,
)
from homeassistant.const import (
CONF_MAC,
CONF_NAME,
EVENT_HOMEASSISTANT_STOP,
PERCENTAGE,
STATE_UNKNOWN,
UnitOfTemperature,
)
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
_LOGGER = logging.getLogger(__name__)
ATTR_DEVICE = "device"
ATTR_MODEL = "model"
BLE_TEMP_HANDLE = 0x24
BLE_TEMP_UUID = "0000ff92-0000-1000-8000-00805f9b34fb"
CONNECT_LOCK = threading.Lock()
CONNECT_TIMEOUT = 30
DEFAULT_NAME = "Skybeacon"
SKIP_HANDLE_LOOKUP = True
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_MAC): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Skybeacon sensor."""
name = config.get(CONF_NAME)
mac = config.get(CONF_MAC)
_LOGGER.debug("Setting up")
mon = Monitor(hass, mac, name)
add_entities([SkybeaconTemp(name, mon)])
add_entities([SkybeaconHumid(name, mon)])
def monitor_stop(_service_or_event):
"""Stop the monitor thread."""
_LOGGER.info("Stopping monitor for %s", name)
mon.terminate()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, monitor_stop)
mon.start()
class SkybeaconHumid(SensorEntity):
"""Representation of a Skybeacon humidity sensor."""
_attr_native_unit_of_measurement = PERCENTAGE
def __init__(self, name, mon):
"""Initialize a sensor."""
self.mon = mon
self._name = name
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def native_value(self):
"""Return the state of the device."""
return self.mon.data["humid"]
@property
def extra_state_attributes(self):
"""Return the state attributes of the sensor."""
return {ATTR_DEVICE: "SKYBEACON", ATTR_MODEL: 1}
class SkybeaconTemp(SensorEntity):
"""Representation of a Skybeacon temperature sensor."""
_attr_device_class = SensorDeviceClass.TEMPERATURE
_attr_native_unit_of_measurement = UnitOfTemperature.CELSIUS
def __init__(self, name, mon):
"""Initialize a sensor."""
self.mon = mon
self._name = name
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def native_value(self):
"""Return the state of the device."""
return self.mon.data["temp"]
@property
def extra_state_attributes(self):
"""Return the state attributes of the sensor."""
return {ATTR_DEVICE: "SKYBEACON", ATTR_MODEL: 1}
class Monitor(threading.Thread, SensorEntity):
"""Connection handling."""
def __init__(self, hass, mac, name):
"""Construct interface object."""
threading.Thread.__init__(self)
self.daemon = False
self.hass = hass
self.mac = mac
self.name = name
self.data = {"temp": STATE_UNKNOWN, "humid": STATE_UNKNOWN}
self.keep_going = True
self.event = threading.Event()
def run(self):
"""Thread that keeps connection alive."""
cached_char = Characteristic(BLE_TEMP_UUID, BLE_TEMP_HANDLE)
adapter = GATTToolBackend()
while True:
try:
_LOGGER.debug("Connecting to %s", self.name)
# We need concurrent connect, so lets not reset the device
adapter.start(reset_on_start=False)
# Seems only one connection can be initiated at a time
with CONNECT_LOCK:
device = adapter.connect(
self.mac, CONNECT_TIMEOUT, BLEAddressType.random
)
if SKIP_HANDLE_LOOKUP:
# HACK: inject handle mapping collected offline
# pylint: disable-next=protected-access
device._characteristics[UUID(BLE_TEMP_UUID)] = cached_char
# Magic: writing this makes device happy
device.char_write_handle(0x1B, bytearray([255]), False)
device.subscribe(BLE_TEMP_UUID, self._update)
_LOGGER.info("Subscribed to %s", self.name)
while self.keep_going:
# protect against stale connections, just read temperature
device.char_read(BLE_TEMP_UUID, timeout=CONNECT_TIMEOUT)
self.event.wait(60)
break
except (BLEError, NotConnectedError, NotificationTimeout) as ex:
_LOGGER.error("Exception: %s ", str(ex))
finally:
adapter.stop()
def _update(self, handle, value):
"""Notification callback from pygatt."""
_LOGGER.debug(
"%s: %15s temperature = %-2d.%-2d, humidity = %3d",
handle,
self.name,
value[0],
value[2],
value[1],
)
self.data["temp"] = float("%d.%d" % (value[0], value[2]))
self.data["humid"] = value[1]
def terminate(self):
"""Signal runner to stop and join thread."""
self.keep_going = False
self.event.set()
self.join()
|
508aebf018316a967f0fe7a0f9b24c2ed6a55916
|
80f94bea418d7956df1ba19d4d6a1d7715a94ade
|
/test/integration/test_quota.py
|
fa9a319197966264bbf1440c27e283442dcf537c
|
[
"CC-BY-2.5",
"MIT",
"CC-BY-3.0",
"AFL-3.0"
] |
permissive
|
galaxyproject/galaxy
|
5748409eb6693b1611f289d164f85e20c3237495
|
b9ae7a16ba0465995e880ae9701b7e87226b9bab
|
refs/heads/dev
| 2023-08-28T22:35:51.248138
| 2023-08-26T08:02:33
| 2023-08-26T08:02:33
| 31,211,061
| 1,277
| 1,137
|
NOASSERTION
| 2023-09-14T19:39:01
| 2015-02-23T14:18:06
|
Python
|
UTF-8
|
Python
| false
| false
| 8,275
|
py
|
test_quota.py
|
from galaxy_test.base.populators import DatasetPopulator
from galaxy_test.driver import integration_util
class TestQuotaIntegration(integration_util.IntegrationTestCase):
dataset_populator: DatasetPopulator
require_admin_user = True
@classmethod
def handle_galaxy_config_kwds(cls, config):
super().handle_galaxy_config_kwds(config)
config["enable_quotas"] = True
def setUp(self):
super().setUp()
self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
def test_create(self):
self._create_quota_with_name("test-create-quota")
def test_index(self):
self._create_quota_with_name("test-index-quota")
index_response = self._get("quotas")
index_response.raise_for_status()
json_response = index_response.json()
assert len(json_response) > 0
def test_index_deleted(self):
quota = self._create_quota_with_name("test-index-deleted-quota")
quota_id = quota["id"]
delete_response = self._delete(f"quotas/{quota_id}")
delete_response.raise_for_status()
index_response = self._get("quotas/deleted")
index_response.raise_for_status()
json_response = index_response.json()
assert len(json_response) > 0
def test_show(self):
quota_name = "test-show-quota"
quota = self._create_quota_with_name(quota_name)
quota_id = quota["id"]
show_response = self._get(f"quotas/{quota_id}")
show_response.raise_for_status()
json_response = show_response.json()
assert json_response["name"] == quota["name"]
def test_show_deleted(self):
quota_name = "test-show-deleted-quota"
quota = self._create_quota_with_name(quota_name)
quota_id = quota["id"]
delete_response = self._delete(f"quotas/{quota_id}")
delete_response.raise_for_status()
show_response = self._get(f"quotas/deleted/{quota_id}")
show_response.raise_for_status()
json_response = show_response.json()
assert json_response["name"] == quota["name"]
def test_update(self):
quota_name = "test-update-quota"
quota = self._create_quota_with_name(quota_name)
quota_id = quota["id"]
new_quota_name = "updated-quota-name"
update_payload = {
"name": new_quota_name,
}
put_response = self._put(f"quotas/{quota_id}", data=update_payload, json=True)
put_response.raise_for_status()
assert "has been renamed to" in put_response.text
show_response = self._get(f"quotas/{quota_id}")
show_response.raise_for_status()
json_response = show_response.json()
assert json_response["name"] == new_quota_name
def test_delete(self):
quota_name = "test-delete-quota"
quota = self._create_quota_with_name(quota_name)
quota_id = quota["id"]
delete_response = self._delete(f"quotas/{quota_id}")
delete_response.raise_for_status()
self._assert_quota_is_deleted(quota_id)
def test_delete_and_purge(self):
quota_name = "test-delete-purge-quota"
quota = self._create_quota_with_name(quota_name)
quota_id = quota["id"]
delete_response = self._delete_and_purge(quota_id)
delete_response.raise_for_status()
self._assert_quota_is_deleted(quota_id)
def test_delete_and_purge_with_user(self):
user_email = "test@galaxy.test"
self.galaxy_interactor.ensure_user_with_email(user_email)
quota_name = "test-delete-purge-quota-user"
payload = self._build_quota_payload_with_name(quota_name)
payload["in_users"].append(user_email)
create_response = self._post("quotas", data=payload, json=True)
create_response.raise_for_status()
quota = create_response.json()
quota_id = quota["id"]
show_response = self._get(f"quotas/{quota_id}")
show_response.raise_for_status()
json_response = show_response.json()
assert user_email in str(json_response["users"])
delete_response = self._delete_and_purge(quota_id)
delete_response.raise_for_status()
show_response = self._get(f"quotas/deleted/{quota_id}")
show_response.raise_for_status()
json_response = show_response.json()
assert user_email not in str(json_response["users"])
def test_undelete(self):
quota_name = "test-undelete-quota"
quota = self._create_quota_with_name(quota_name)
quota_id = quota["id"]
delete_response = self._delete(f"quotas/{quota_id}")
delete_response.raise_for_status()
self._assert_quota_is_deleted(quota_id)
undelete_response = self._post(f"quotas/deleted/{quota_id}/undelete")
undelete_response.raise_for_status()
show_response = self._get(f"quotas/{quota_id}")
show_response.raise_for_status()
show_response = self._get(f"quotas/deleted/{quota_id}")
self._assert_status_code_is(show_response, 400)
def test_400_when_delete_default(self):
quota_name = "test-delete-default-quota"
quota = self._create_quota_with_name(quota_name, is_default=True)
quota_id = quota["id"]
delete_response = self._delete(f"quotas/{quota_id}")
self._assert_status_code_is(delete_response, 400)
def test_400_when_quota_name_already_exists(self):
quota_name = "test-duplicated-quota"
self._create_quota_with_name(quota_name)
payload = self._build_quota_payload_with_name(quota_name)
create_response = self._post("quotas", data=payload, json=True)
self._assert_status_code_is(create_response, 400)
def test_400_when_show_unknown_quota(self):
quota_id = "unknown-id"
show_response = self._get(f"quotas/{quota_id}")
self._assert_status_code_is(show_response, 400)
def test_400_when_invalid_amount(self):
invalid_amount = ""
quota_name = "invalid-amount-id"
payload = {
"name": quota_name,
"description": f"Quota {quota_name} description",
"amount": invalid_amount,
"operation": "=",
"default": "no",
"in_users": [],
"in_groups": [],
}
create_response = self._post("quotas", data=payload, json=True)
self._assert_status_code_is(create_response, 400)
def test_quota_source_label_basics(self):
quotas = self.dataset_populator.get_quotas()
prior_quotas_len = len(quotas)
payload = {
"name": "defaultmylabeledquota1",
"description": "first default quota that is labeled",
"amount": "120MB",
"operation": "=",
"default": "registered",
"quota_source_label": "mylabel",
}
self.dataset_populator.create_quota(payload)
quotas = self.dataset_populator.get_quotas()
assert len(quotas) == prior_quotas_len + 1
labels = [q["quota_source_label"] for q in quotas]
assert "mylabel" in labels
def _create_quota_with_name(self, quota_name: str, is_default: bool = False):
payload = self._build_quota_payload_with_name(quota_name, is_default)
create_response = self._post("quotas", data=payload, json=True)
create_response.raise_for_status()
return create_response.json()
def _build_quota_payload_with_name(self, quota_name: str, is_default: bool = False):
default = "registered" if is_default else "no"
return {
"name": quota_name,
"description": f"Quota {quota_name} description",
"amount": "100MB",
"operation": "=",
"default": default,
"in_users": [],
"in_groups": [],
}
def _delete_and_purge(self, quota_id):
data = {"purge": "true"}
return self._delete(f"quotas/{quota_id}", data=data, admin=True, json=True)
def _assert_quota_is_deleted(self, quota_id: str):
show_response = self._get(f"quotas/deleted/{quota_id}")
show_response.raise_for_status()
json_response = show_response.json()
assert json_response["id"] == quota_id
|
4b892f5bcf503ffb920005d861c70a11becb0b3d
|
85c668af40853f5ee48fbe8c4045df1a5dd4104e
|
/examples/basic/colormaps.py
|
878e927cae1b3d23d26ba0b6bcb51c078265e6fd
|
[
"MIT",
"LicenseRef-scancode-public-domain",
"OFL-1.1"
] |
permissive
|
marcomusy/vedo
|
771db91bca05cda864fc7d1776d9140726676704
|
9a9f7c5e9ebf135e5c745c521c898866e3ede0ef
|
refs/heads/master
| 2023-08-21T12:56:35.545713
| 2023-08-14T14:39:37
| 2023-08-14T14:39:37
| 110,261,047
| 1,419
| 206
|
MIT
| 2023-09-02T18:38:22
| 2017-11-10T15:17:47
|
Python
|
UTF-8
|
Python
| false
| false
| 703
|
py
|
colormaps.py
|
"""
Example usage of cmap() to assign a color to each mesh vertex
by looking it up in matplotlib database of colormaps
"""
from vedo import Plotter, Mesh, dataurl
print(__doc__)
# these are the some matplotlib color maps
maps = [
"afmhot",
"binary",
"bone",
"cool",
"coolwarm",
"copper",
"gist_earth",
"gray",
"hot",
"jet",
"rainbow",
"winter",
]
mug = Mesh(dataurl+"mug.ply")
scalars = mug.points()[:, 1] # let y-coord be the scalar
plt = Plotter(N=len(maps))
for i, key in enumerate(maps): # for each available color map name
imug = mug.clone(deep=False).cmap(key, scalars, n_colors=5)
plt.at(i).show(imug, key)
plt.interactive().close()
|
3c46927084fded1c76ae62b1096e9eeab24e9533
|
88ef2b9b1afa9c2f0cdaccce5fc42ea375c8c70a
|
/examples/ch09/snippets_py/09_12.02.py
|
2ab797e6ee99c7770e888ad6238557021f443d32
|
[
"LicenseRef-scancode-warranty-disclaimer"
] |
no_license
|
pdeitel/PythonFundamentalsLiveLessons
|
7089c60a12d3cb0b8540de54b25da624239800e1
|
b01c1d102bb51307ce61e132818b856311d667bd
|
refs/heads/master
| 2022-12-22T06:31:22.889065
| 2022-12-11T04:40:10
| 2022-12-11T04:40:10
| 192,583,776
| 300
| 384
| null | 2022-03-05T16:23:43
| 2019-06-18T17:21:03
| null |
UTF-8
|
Python
| false
| false
| 1,301
|
py
|
09_12.02.py
|
# Section 9.12.2 snippets
# Datasets
# Working with Locally Stored CSV Files
import pandas as pd
df = pd.read_csv('accounts.csv',
names=['account', 'name', 'balance'])
df
df.to_csv('accounts_from_dataframe.csv', index=False)
##########################################################################
# (C) Copyright 2019 by Deitel & Associates, Inc. and #
# Pearson Education, Inc. All Rights Reserved. #
# #
# DISCLAIMER: The authors and publisher of this book have used their #
# best efforts in preparing the book. These efforts include the #
# development, research, and testing of the theories and programs #
# to determine their effectiveness. The authors and publisher make #
# no warranty of any kind, expressed or implied, with regard to these #
# programs or to the documentation contained in these books. The authors #
# and publisher shall not be liable in any event for incidental or #
# consequential damages in connection with, or arising out of, the #
# furnishing, performance, or use of these programs. #
##########################################################################
|
eb83f7a720f04b8fd974e9a5e3e0349dba5b898a
|
0a23c93c0b61301081bd914754f88fbad29de00d
|
/alipy/toolbox.py
|
7271284810a06ba284f77dba92b6237f454495cc
|
[
"BSD-3-Clause"
] |
permissive
|
NUAA-AL/ALiPy
|
187d4f3aa5a3e04324f5f98da8dc197a386d1079
|
1b2ee2e5acc2e8651fc64759aae332853ad9e437
|
refs/heads/master
| 2023-07-03T15:56:18.935587
| 2022-09-17T11:16:05
| 2022-09-17T11:16:05
| 149,413,428
| 844
| 127
|
BSD-3-Clause
| 2022-09-17T11:16:06
| 2018-09-19T07:54:37
|
Python
|
UTF-8
|
Python
| false
| false
| 27,784
|
py
|
toolbox.py
|
import copy
import os
import pickle
import warnings
import inspect
from sklearn.linear_model import LogisticRegression
from sklearn.utils import check_array
from sklearn.utils.multiclass import type_of_target, unique_labels
from .data_manipulate.al_split import split, split_multi_label, split_features
from .experiment.experiment_analyser import ExperimentAnalyser
from .experiment.state import State
from .experiment.state_io import StateIO
from .experiment.stopping_criteria import StoppingCriteria
from .index.index_collections import IndexCollection, MultiLabelIndexCollection, FeatureIndexCollection
from .metrics import performance
from .oracle.knowledge_repository import MatrixRepository, ElementRepository
from .oracle.oracle import OracleQueryMultiLabel, Oracle, OracleQueryFeatures
from .query_strategy import check_query_type
from .utils.multi_thread import aceThreading
__all__ = ['ToolBox',
]
class ToolBox:
"""Tool box is a tool class which initializes the active learning
elements according to the setting in order to reduce the error and improve
the usability.
In initializing, necessary information to initialize various tool classes
must be given. You can set the split setting in initializing or generate a
new split by ToolBox.split.
Note that, using ToolBox to initialize other tools is optional, you may use
each modules independently.
Parameters
----------
y: array-like
Labels of given data [n_samples, n_labels] or [n_samples]
X: array-like, optional (default=None)
Data matrix with [n_samples, n_features].
instance_indexes: array-like, optional (default=None)
Indexes of instances, it should be one-to-one correspondence of
X, if not provided, it will be generated automatically for each
x_i started from 0.
It also can be a list contains names of instances, used for image data_manipulate.
The split will only depend on the indexes if X is not provided.
query_type: str, optional (default='AllLabels')
Active learning settings. It will determine how to split data.
should be one of ['AllLabels', 'Partlabels', 'Features']:
AllLabels: query all labels of an selected instance.
Support scene: binary classification, multi-class classification, multi-label classification, regression
Partlabels: query part of labels of an instance.
Support scene: multi-label classification
Features: query part of features of an instance.
Support scene: missing features
saving_path: str, optional (default='.')
Path to save current settings. Passing None to disable saving.
train_idx: array-like, optional (default=None)
Index of training set, shape like [n_split_count, n_training_indexes]
test_idx: array-like, optional (default=None)
Index of testing set, shape like [n_split_count, n_testing_indexes]
label_idx: array-like, optional (default=None)
Index of labeling set, shape like [n_split_count, n_labeling_indexes]
unlabel_idx: array-like, optional (default=None)
Index of unlabeling set, shape like [n_split_count, n_unlabeling_indexes]
"""
def __init__(self, y, X=None, instance_indexes=None,
query_type='AllLabels', saving_path=None, **kwargs):
"""
index_len: int, length of indexes.
y: 2d array, the label matrix of whole dataset.
target_type: str, the type of target.
label_space: list, the label space.
label_num: int, The number of unique labels.
instance_flag: bool, Whether passed instances when initializing.
X: 2d array, The feature matrix of the whole dataset.
indexes: list, The indexes of each instances, should have the same length of the feature and label matrix.
query_type: str, The query type of this active learning project.
split: bool, whether split the data.
split_count: int, the number of split times.
train_idx: list, a list split_count lists which include the indexes of training set.
test_idx: list, a list split_count lists which include the indexes of testing set.
label_idx: list, a list split_count lists which include the indexes of labeled set. (A subset of training set)
unlabel_idx: list, a list split_count lists which include the indexes of unlabeled set. (A subset of training set)
saving_path: str, saving path.
saving_dir: str, saving dir.
"""
self._index_len = None
# check and record parameters
self._y = check_array(y, ensure_2d=False, dtype=None)
if self._y.ndim == 2:
if self._y.shape[0] == 1 or self._y.shape[1] == 1:
self._y = self._y.flatten()
ytype = type_of_target(self._y)
if len(self._y.shape) == 2:
self._target_type = 'multilabel'
else:
self._target_type = ytype
self._index_len = len(self._y)
if len(self._y.shape) == 1:
self._label_space = unique_labels(self._y)
elif len(self._y.shape) == 2:
self._label_space = list(range(self._y.shape[1]))
else:
raise ValueError("Label matrix should be 1d or 2d array.")
self._label_num = len(self._label_space)
self._instance_flag = False
if X is not None:
self._instance_flag = True
self._X = check_array(X, accept_sparse='csr', ensure_2d=True, order='C')
n_samples = self._X.shape[0]
if n_samples != self._index_len:
raise ValueError("Different length of instances and labels found.")
else:
self._index_len = n_samples
if instance_indexes is None:
self._indexes = [i for i in range(self._index_len)]
else:
if len(instance_indexes) != self._index_len:
raise ValueError("Length of given instance_indexes do not accord the data set.")
self._indexes = copy.copy(instance_indexes)
if check_query_type(query_type):
self.query_type = query_type
if self.query_type == 'Features' and not self._instance_flag:
raise Exception("In feature querying, feature matrix must be given.")
else:
raise NotImplementedError("Query type %s is not implemented." % type)
self._split = False
train_idx = kwargs.pop('train_idx', None)
test_idx = kwargs.pop('test_idx', None)
label_idx = kwargs.pop('label_idx', None)
unlabel_idx = kwargs.pop('unlabel_idx', None)
if train_idx is not None and test_idx is not None and label_idx is not None and unlabel_idx is not None:
if not (len(train_idx) == len(test_idx) == len(label_idx) == len(unlabel_idx)):
raise ValueError("train_idx, test_idx, label_idx, unlabel_idx "
"should have the same split count (length)")
self._split = True
self.train_idx = train_idx
self.test_idx = test_idx
self.label_idx = label_idx
self.unlabel_idx = unlabel_idx
self.split_count = len(train_idx)
self._saving_path = saving_path
self._saving_dir = None
if saving_path is not None:
if not isinstance(self._saving_path, str):
raise TypeError("A string is expected, but received: %s" % str(type(self._saving_path)))
self._saving_path = os.path.abspath(saving_path)
if os.path.isdir(self._saving_path):
self._saving_dir = self._saving_path
if os.path.exists(os.path.join(saving_path, 'al_settings.pkl')):
warnings.warn("An existed Toolbox file is detected, load the existed one in case of overwriting. "
"(Delete the old file to create a new Toolbox object)", category=UserWarning)
with open(os.path.join(saving_path, 'al_settings.pkl'), 'rb') as f:
existed_toolbox = pickle.load(f)
for ke in existed_toolbox.__dict__.keys():
setattr(self, ke, getattr(existed_toolbox, ke))
return
else:
self._saving_dir = os.path.split(self._saving_path)[0] # if a directory, a dir and None will return.
if os.path.exists(saving_path):
with open(os.path.abspath(saving_path), 'rb') as f:
existed_toolbox = pickle.load(f)
for ke in existed_toolbox.__dict__.keys():
setattr(self, ke, getattr(existed_toolbox, ke))
return
self.save()
def split_AL(self, test_ratio=0.3, initial_label_rate=0.05,
split_count=10, all_class=True):
"""split dataset for active learning experiment.
The labeled set for multi-label setting is fully labeled.
Parameters
----------
test_ratio: float, optional (default=0.3)
ratio of test set
initial_label_rate: float, optional (default=0.05)
ratio of initial label set or the existed features (missing rate = 1-initial_label_rate)
e.g. initial_labelset*(1-test_ratio)*n_samples
split_count: int, optional (default=10)
random split data _split_count times
all_class: bool, optional (default=True)
whether each split will contain at least one instance for each class.
If False, a totally random split will be performed.
Returns
-------
train_idx: list
index of training set, shape like [n_split_count, n_training_indexes]
test_idx: list
index of testing set, shape like [n_split_count, n_testing_indexes]
label_idx: list
index of labeling set, shape like [n_split_count, n_labeling_indexes]
unlabel_idx: list
index of unlabeling set, shape like [n_split_count, n_unlabeling_indexes]
"""
# should support other query types in the future
if self._split is True:
warnings.warn("Data has already been split. Return the existed split in case of overwriting.",
category=RuntimeWarning)
return self.train_idx, self.test_idx, self.label_idx, self.unlabel_idx
self.split_count = split_count
if self._target_type != 'Features':
if self._target_type != 'multilabel':
self.train_idx, self.test_idx, self.label_idx, self.unlabel_idx = split(
X=self._X if self._instance_flag else None,
y=self._y,
query_type=self.query_type, test_ratio=test_ratio,
initial_label_rate=initial_label_rate,
split_count=split_count,
instance_indexes=self._indexes,
all_class=all_class,
saving_path=self._saving_path)
else:
self.train_idx, self.test_idx, self.label_idx, self.unlabel_idx = split_multi_label(
y=self._y,
label_shape=self._y.shape,
test_ratio=test_ratio,
initial_label_rate=initial_label_rate,
split_count=split_count,
all_class=all_class,
saving_path=self._saving_path
)
else:
self.train_idx, self.test_idx, self.label_idx, self.unlabel_idx = split_features(
feature_matrix=self._X,
test_ratio=test_ratio,
missing_rate=1 - initial_label_rate,
split_count=split_count,
all_features=all_class,
saving_path=self._saving_path
)
self._split = True
self.save()
return self.train_idx, self.test_idx, self.label_idx, self.unlabel_idx
def get_split(self, round=None):
"""Get split of one fold experiment.
Parameters:
-----------
round: int
The number of fold. 0 <= round < split_count
Returns
-------
train_idx: list
index of training set, shape like [n_split_count, n_training_indexes]
test_idx: list
index of testing set, shape like [n_split_count, n_testing_indexes]
label_idx: list
index of labeling set, shape like [n_split_count, n_labeling_indexes]
unlabel_idx: list
index of unlabeling set, shape like [n_split_count, n_unlabeling_indexes]
"""
if not self._split:
raise Exception("The split setting is unknown, use split_AL() first.")
if round is not None:
assert (0 <= round < self.split_count)
if self.query_type == 'Features':
return copy.copy(self.train_idx[round]), copy.copy(self.test_idx[round]), FeatureIndexCollection(
self.label_idx[round], self._X.shape[1]), FeatureIndexCollection(self.unlabel_idx[round],
self._X.shape[1])
else:
if self._target_type == 'multilabel':
return copy.copy(self.train_idx[round]), copy.copy(self.test_idx[round]), MultiLabelIndexCollection(
self.label_idx[round], self._label_num), MultiLabelIndexCollection(self.unlabel_idx[round],
self._label_num)
else:
return copy.copy(self.train_idx[round]), copy.copy(self.test_idx[round]), IndexCollection(
self.label_idx[round]), IndexCollection(self.unlabel_idx[round])
else:
return copy.deepcopy(self.train_idx), copy.deepcopy(self.test_idx), \
copy.deepcopy(self.label_idx), copy.deepcopy(self.unlabel_idx)
def get_clean_oracle(self, query_by_example=False, cost_mat=None):
"""Get a clean oracle.
Parameters:
-----------
query_by_example: bool, optional (default=False)
Whether to pass the feature matrix to the oracle object for
querying by feature vector. (Need more memory)
"""
if self.query_type == 'Features':
return OracleQueryFeatures(feature_mat=self._X, cost=cost_mat)
elif self.query_type == 'AllLabels':
if self._target_type == 'multilabel':
return OracleQueryMultiLabel(self._y) if not query_by_example else OracleQueryMultiLabel(self._y,
examples=self._X,
cost=cost_mat)
else:
return Oracle(self._y) if not query_by_example else Oracle(self._y, examples=self._X, cost=cost_mat)
def get_stateio(self, round, saving_path=None, check_flag=True, verbose=True, print_interval=1):
"""Get a stateio object for experiment saving.
Parameters:
-----------
round: int
The number of fold. 0 <= round < split_count
saving_path: str, optional (default='.')
Path to save the intermediate files. If None is given, it will
not save the intermediate result.
check_flag: bool, optional (default=True)
Whether to check the validity of states.
verbose: bool, optional (default=True)
Whether to print query information during the AL process.
print_interval: int optional (default=1)
How many queries will trigger a print when verbose is True.
Returns
-------
stateio: StateIO
The stateio obejct initialized with the specific round.
"""
assert (0 <= round < self.split_count)
train_id, test_id, Lcollection, Ucollection = self.get_split(round)
return StateIO(round, train_id, test_id, Lcollection, Ucollection,
saving_path=self._saving_dir if saving_path is None else saving_path,
check_flag=check_flag, verbose=verbose, print_interval=print_interval)
def get_repository(self, round, instance_flag=False):
"""Get knowledge repository object.
Parameters
----------
round: int
The number of fold. 0 <= round < split_count
instance_flag: bool, optional (default=False)
Whether the repository object contains the examples.
Note that, if this flag is True, the instances must
be provided when updating the query information.
Returns
-------
repository: BaseRepository
knowledge repository object initialized with the labeled set.
"""
assert (0 <= round < self.split_count)
train_id, test_id, Lcollection, Ucollection = self.get_split(round)
if self.query_type == 'AllLabels':
return MatrixRepository(labels=self._y[Lcollection.index],
examples=self._X[Lcollection.index, :] if instance_flag else None,
indexes=Lcollection.index)
else:
return ElementRepository(labels=self._y[Lcollection.index],
examples=self._X[Lcollection.index, :] if instance_flag else None,
indexes=Lcollection.index)
def get_query_strategy(self, strategy_name="QueryInstanceRandom", **kwargs):
"""Return the query strategy object.
Parameters
----------
strategy_name: str, optional (default='QueryInstanceRandom')
The name of a query strategy, should be one of
the implemented methods.
arg1, arg2, ...: dict, optional
if kwargs is None,the pre-defined strategy will init in
The args used in strategy.
Note that, each parameters should be static.
The parameters will be fed to the callable object automatically.
Returns
-------
query_strategy: BaseQueryStrategy
the query_strategy object.
"""
try:
exec("from .query_strategy import " + strategy_name)
except:
raise KeyError("Strategy " + strategy_name + " is not implemented in ALiPy.")
strategy = None
strategy = eval(strategy_name + "(X=self._X, y=self._y, **kwargs)")
# print(strategy)
return strategy
def calc_performance_metric(self, y_true, y_pred, performance_metric='accuracy_score', **kwargs):
"""Evaluate the model performance.
Parameters
----------
y_true : array, shape = [n_samples] or [n_samples, n_classes]
The true labels correspond to the y_pred.
y_pred : array, shape = [n_samples] or [n_samples, n_classes]
The predict result of the model. Note that, different metrics
need different types of predict.
performance_metric: str, optional (default='accuracy_score')
The name of the performance metric function.
Should be one of ['accuracy_score', 'roc_auc_score', 'get_fps_tps_thresholds', 'hamming_loss', 'f1_score',
'one_error', 'coverage_error', 'label_ranking_loss', 'label_ranking_average_precision_score'].
"""
valid_metric = ['accuracy_score', 'roc_auc_score', 'get_fps_tps_thresholds', 'hamming_loss', 'one_error',
'coverage_error', 'f1_score', 'label_ranking_loss', 'label_ranking_average_precision_score']
if performance_metric not in valid_metric:
raise NotImplementedError('Performance {} is not implemented.'.format(str(performance_metric)))
performance_metric = getattr(performance, performance_metric)
metric_para = inspect.signature(performance_metric)
if 'y_pred' in metric_para.parameters:
return performance_metric(y_pred=y_pred, y_true=y_true, **kwargs)
else:
y_pred = y_pred[:, 0]
return performance_metric(y_score=y_pred, y_true=y_true, **kwargs)
def get_default_model(self):
"""
return the LogisticRegression(solver='liblinear') implemented by the sklearn.
"""
return LogisticRegression(solver='liblinear')
def get_stopping_criterion(self, stopping_criteria=None, value=None):
"""Return example stopping criterion.
Parameters
----------
stopping_criteria: str, optional (default=None)
stopping criteria, must be one of: [None, 'num_of_queries', 'cost_limit', 'percent_of_unlabel', 'time_limit']
None: stop when no unlabeled samples available
'num_of_queries': stop when preset number of quiries is reached
'cost_limit': stop when cost reaches the limit.
'percent_of_unlabel': stop when specific percentage of unlabeled data pool is labeled.
'time_limit': stop when CPU time reaches the limit.
value: {int, float}, optional (default=None)
The value of the corresponding stopping criterion.
Returns
-------
stop: StoppingCriteria
The StoppingCriteria object
"""
return StoppingCriteria(stopping_criteria=stopping_criteria, value=value)
def get_experiment_analyser(self, x_axis='num_of_queries'):
"""Return ExperimentAnalyser object.
Parameters
----------
x_axis: {'num_of_queries', 'cost'}, optional (default='num_of_queries')
The x_axis when analysing the result.
x_axis should be one of ['num_of_queries', 'cost'],
if 'cost' is given, your experiment results must contains the
cost value for each performance value.
Returns
-------
analyser: BaseAnalyser
The experiment analyser object
"""
return ExperimentAnalyser(x_axis=x_axis)
def get_ace_threading(self, target_function=None, max_thread=None, refresh_interval=1, saving_path='.'):
"""Return the multithreading tool class
Parameters
----------
target_function: callable, optional (default=None)
The acceptable active learning main loop.
the parameters of target_function must be:
(round, train_id, test_id, Ucollection, Lcollection, saver, examples, labels, global_parameters)
in which, the global_parameters is a dict which contains the other variables for user-defined function.
max_thread: int, optional (default=None)
The max threads for running at the same time. If not provided, it will run all rounds simultaneously.
refresh_interval: float, optional (default=1.0)
how many seconds to refresh the current state output, default is 1.0.
saving_path: str, optional (default='.')
the path to save the result files.
Returns
-------
ace_threading: aceThreading
The ace_threading object initialized with the data split.
"""
if not self._instance_flag:
raise Exception("instance matrix is necessary for initializing aceThreading object.")
if not self._split:
raise Exception("The split information is not found, please split the data or set the split setting first.")
return aceThreading(examples=self._X, labels=self._y,
train_idx=self.train_idx, test_idx=self.test_idx,
label_index=self.label_idx,
unlabel_index=self.unlabel_idx,
refresh_interval=refresh_interval,
max_thread=max_thread,
saving_path=saving_path,
target_func=target_function)
def save(self):
"""Save the experiment settings to file for auditting or loading for other methods."""
if self._saving_path is None:
return
saving_path = os.path.abspath(self._saving_path)
if os.path.isdir(saving_path):
f = open(os.path.join(saving_path, 'al_settings.pkl'), 'wb')
else:
f = open(os.path.abspath(saving_path), 'wb')
pickle.dump(self, f)
f.close()
def IndexCollection(self, array=None):
"""Return an IndexCollection object initialized with array."""
return IndexCollection(array)
def MultiLabelIndexCollection(self, array, label_mat_shape=None, order='F'):
"""
Return a MultiLabelIndexCollection object initialized with array.
The label_mat_shape is the shape of the provided label matrix by default.
Parameters
----------
array: {list, np.ndarray}
An 1d array or a list of tuples of indexes.
label_mat_shape: tuple (optional, default=None)
The shape of label matrix. The 1st element is the number of instances,
and the 2nd element is the total classes. If it is not specified, it will
use the shape of label matrix y.
order : {'C', 'F'}, optional
Determines whether the indices should be viewed as indexing in
row-major (C-style) or column-major (Matlab-style) order.
Only useful when an 1d array is given.
"""
if isinstance(array[0], tuple):
return MultiLabelIndexCollection(data=array, label_size=self._y.shape[1] if label_mat_shape is None else
label_mat_shape[1])
else:
return MultiLabelIndexCollection.construct_by_1d_array(data=array,
label_mat_shape=self._y.shape if label_mat_shape is None else label_mat_shape,
order=order)
def State(self, select_index, performance, queried_label=None, cost=None):
"""Get a State object for storing information in one iteration of active learning.
Parameters
----------
select_index: array-like or object
If multiple select_index are provided, it should be a list or np.ndarray type.
otherwise, it will be treated as only one pair for adding.
performance: array-like or object
Performance after querying.
queried_label: array-like or object, optional
The queried label.
cost: array-like or object, optional
Cost corresponds to the query.
Returns
-------
state: State
The State object.
"""
return State(select_index=select_index, performance=performance, queried_label=queried_label, cost=cost)
@classmethod
def load(cls, path):
"""Loading ExperimentSetting object from path.
Parameters
----------
path: str
Path to a specific file, not a dir.
Returns
-------
setting: ToolBox
Object of ExperimentSetting.
"""
if not isinstance(path, str):
raise TypeError("A string is expected, but received: %s" % str(type(path)))
import pickle
f = open(os.path.abspath(path), 'rb')
setting_from_file = pickle.load(f)
f.close()
return setting_from_file
|
433c41ae69519160ae1e988c445eb803c48ad527
|
8ca19f1a31070738b376c0370c4bebf6b7efcb43
|
/office365/sharepoint/tenant/settings.py
|
b59e069d1048001d15728bb6abf515e4c22ae7b2
|
[
"MIT"
] |
permissive
|
vgrem/Office365-REST-Python-Client
|
2ef153d737c6ed5445ba1e446aeaec39c4ef4ed3
|
cbd245d1af8d69e013c469cfc2a9851f51c91417
|
refs/heads/master
| 2023-09-02T14:20:40.109462
| 2023-08-31T19:14:05
| 2023-08-31T19:14:05
| 51,305,798
| 1,006
| 326
|
MIT
| 2023-08-28T05:38:02
| 2016-02-08T15:24:51
|
Python
|
UTF-8
|
Python
| false
| false
| 1,252
|
py
|
settings.py
|
from office365.runtime.paths.resource_path import ResourcePath
from office365.runtime.queries.service_operation import ServiceOperationQuery
from office365.sharepoint.base_entity import BaseEntity
class TenantSettings(BaseEntity):
"""Specifies the tenant properties."""
def clear_corporate_catalog(self):
qry = ServiceOperationQuery(self, "ClearCorporateCatalog", None, None, None, None)
self.context.add_query(qry)
return self
def set_corporate_catalog(self, url):
"""
:param str url:
"""
payload = {"url": url}
qry = ServiceOperationQuery(self, "SetCorporateCatalog", None, payload, None, None)
self.context.add_query(qry)
return self
@property
def corporate_catalog_url(self):
"""Specifies the URL of the corporate catalog site collection.
:rtype: str or None
"""
return self.properties.get('CorporateCatalogUrl', None)
@staticmethod
def current(context):
"""
Specifies the current instance for the SP.TenantSettings.
:type context: office365.sharepoint.client_context.ClientContext
"""
return TenantSettings(context, ResourcePath("SP.TenantSettings.Current"))
|
5d511820f39a89129c8beb5ce7298324eea73b02
|
7ef3792456a4e6596c0a512fc6e2e0bb02d4204b
|
/scripts/pyinstaller_entrypoint.py
|
99ec4fc09901d1423002cfbab817a06799a97933
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
gcovr/gcovr
|
a9d93ab62b0eee031cb9b6005754d923cfaed92f
|
99553d7ab65d2873c4db206bf4cb99c03a2cd090
|
refs/heads/master
| 2023-09-01T14:41:35.954432
| 2023-08-29T19:00:00
| 2023-08-29T19:00:00
| 10,068,798
| 735
| 258
|
NOASSERTION
| 2023-09-14T19:57:08
| 2013-05-15T01:45:20
|
Python
|
UTF-8
|
Python
| false
| false
| 889
|
py
|
pyinstaller_entrypoint.py
|
# -*- coding:utf-8 -*-
# ************************** Copyrights and license ***************************
#
# This file is part of gcovr 6.0+master, a parsing and reporting tool for gcov.
# https://gcovr.com/en/stable
#
# _____________________________________________________________________________
#
# Copyright (c) 2013-2023 the gcovr authors
# Copyright (c) 2013 Sandia Corporation.
# Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
# the U.S. Government retains certain rights in this software.
#
# This software is distributed under the 3-clause BSD License.
# For more information, see the README.rst file.
#
# ****************************************************************************
import re
import sys
from gcovr.__main__ import main
if __name__ == "__main__":
sys.argv[0] = re.sub(r"(-script\.pyw|\.exe)?$", "", sys.argv[0])
sys.exit(main())
|
f66bce549c6ab8ce0d78a272ac491372df97336f
|
568fa58296378fa129ab3349adf010daa44ed45b
|
/tests/st/ops/ascend/vector/test_relu_grad_001.py
|
72f6af5e63e0b39dbbd69d662466fe783d5673a1
|
[
"Apache-2.0",
"BSD-3-Clause",
"NCSA",
"X11-distribute-modifications-variant",
"Zlib",
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Unlicense",
"LLVM-exception",
"BSD-2-Clause"
] |
permissive
|
mindspore-ai/akg
|
37f471badc66de6a831f1f45ad84344f34d23ef2
|
99f33858d6972741748cbfc9ab0bf9600428fef7
|
refs/heads/master
| 2023-07-25T23:03:17.672665
| 2023-07-11T07:33:57
| 2023-07-11T07:33:57
| 274,077,856
| 319
| 36
|
Apache-2.0
| 2021-12-30T13:43:08
| 2020-06-22T08:09:05
|
Python
|
UTF-8
|
Python
| false
| false
| 3,568
|
py
|
test_relu_grad_001.py
|
# Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
################################################
Testcase_PrepareCondition:
Testcase_TestSteps:
Testcase_ExpectedResult:
"""
import os
import pytest
from tests.common.base import TestBase
from tests.common.test_run.ascend.relu_grad_run import relu_grad_run
############################################################
# TestCase= class: put to tests/*/
############################################################
class TestCase(TestBase):
def setup(self):
case_name = "test_akg_relu_grad_001"
case_path = os.getcwd()
# params init
self.params_init(case_name, case_path)
self.caseresult = True
self._log.info("============= {0} Setup case============".format(self.casename))
self.testarg = [
# testflag,opfuncname,testRunArgs, dimArgs
("relu_grad_001", relu_grad_run, ((1, 128), "float16"), ((16, 0), (1, 0))),
#("relu_grad_002", relu_grad_run, ((4,129,129,256), "float16")),
("relu_grad_003", relu_grad_run, ((4, 129, 129, 48), "float16")),
("relu_grad_004", relu_grad_run, ((4, 257, 257, 128), "float16")),
("relu_grad_005", relu_grad_run, ((4, 257, 257, 64), "float16")),
("relu_grad_006", relu_grad_run, ((4, 65, 65, 728), "float16"), ((1, 1), (1, 1), (1, 1), (728, 1))),
("relu_grad_007", relu_grad_run, ((4, 33, 33, 256), "float16")),
("relu_grad_008", relu_grad_run, ((4, 33, 33, 2048), "float16")),
("relu_grad_009", relu_grad_run, ((4, 33, 33, 1024), "float16")),
("relu_grad_010", relu_grad_run, ((4, 33, 33, 728), "float16"), ((1, 1), (1, 1), (1, 1), (768, 1))),
("relu_grad_011", relu_grad_run, ((4, 1, 1, 256), "float16")),
("relu_grad_012", relu_grad_run, ((4, 129, 129, 128), "float16")),
("relu_grad_013", relu_grad_run, ((4, 257, 257, 32), "float16")),
("relu_grad_014", relu_grad_run, ((4, 129, 129, 304), "float16")),
("relu_grad_015", relu_grad_run, ((4, 33, 33, 1536), "float16")),
("relu_grad_016", relu_grad_run, ((4, 65, 65, 256), "float16")),
]
self.testlenet_rpc_cloud = [
# testflag,opfuncname,testRunArgs, dimArgs
("relu_grad_001", relu_grad_run, ((1, 16, 7, 7), "float16")),
("relu_grad_002", relu_grad_run, ((1, 6, 15, 15), "float16")),
]
return
@pytest.mark.level0
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_onecard
def test_run(self):
self.common_run(self.testarg)
def test_run_rpc_cloud(self):
# self.common_run(self.testarg_rpc_cloud)
self.common_run(self.testlenet_rpc_cloud)
def teardown(self):
self._log.info("============= {0} Teardown============".format(self.casename))
return
|
9c34717d6f874dae5fcb82d524edd2e4eee2556a
|
a7afce4298911b90c0a45db5200cb563cc4b726c
|
/django_prometheus/urls.py
|
23651927445f8630f46b1c6f04284fc0f9280de8
|
[
"Apache-2.0"
] |
permissive
|
korfuri/django-prometheus
|
c64897623f8f3218a3fd6384a659ef41340f95f9
|
bea7696b9a5330665cc34d4f62075086df218ff4
|
refs/heads/master
| 2023-07-09T05:24:21.599553
| 2023-05-09T14:30:47
| 2023-05-09T14:30:47
| 33,433,092
| 1,292
| 273
|
Apache-2.0
| 2023-09-05T21:31:13
| 2015-04-05T06:53:05
|
Python
|
UTF-8
|
Python
| false
| false
| 163
|
py
|
urls.py
|
from django.urls import path
from django_prometheus import exports
urlpatterns = [path("metrics", exports.ExportToDjangoView, name="prometheus-django-metrics")]
|
40d3b0035d3074fa883ca01c63ad7ed2ab271e55
|
eb9f655206c43c12b497c667ba56a0d358b6bc3a
|
/python/testData/resolve/multiFile/fromQualifiedFileImportClass/mypackage2/myfile.py
|
b7a4d7111a611ca51cef829e96d8a9cee155f269
|
[
"Apache-2.0"
] |
permissive
|
JetBrains/intellij-community
|
2ed226e200ecc17c037dcddd4a006de56cd43941
|
05dbd4575d01a213f3f4d69aa4968473f2536142
|
refs/heads/master
| 2023-09-03T17:06:37.560889
| 2023-09-03T11:51:00
| 2023-09-03T12:12:27
| 2,489,216
| 16,288
| 6,635
|
Apache-2.0
| 2023-09-12T07:41:58
| 2011-09-30T13:33:05
| null |
UTF-8
|
Python
| false
| false
| 14
|
py
|
myfile.py
|
def f(): pass
|
e21b78ae133aa3495d109e40ce3c6be62fa54f74
|
c36019f1e584c7b3677d7f0c81dcb117af9743f9
|
/tests/fake_elasticsearch/test_instance.py
|
80267f336f8373eef20d7e7b025a5a604ae4e32f
|
[
"MIT"
] |
permissive
|
vrcmarcos/elasticmock
|
ad1e5d79212cdacd347834b52d8d8d8192eeea3d
|
0e327eb933a669cc626699bfb60ab14384958dfd
|
refs/heads/master
| 2023-04-08T17:52:38.537011
| 2023-03-27T22:14:56
| 2023-03-27T22:14:56
| 65,153,568
| 112
| 75
|
MIT
| 2023-03-27T22:14:58
| 2016-08-07T20:52:07
|
Python
|
UTF-8
|
Python
| false
| false
| 676
|
py
|
test_instance.py
|
# -*- coding: utf-8 -*-
import elasticsearch
from elasticmock import elasticmock
from elasticmock.fake_elasticsearch import FakeElasticsearch
from tests import TestElasticmock
class TestInstance(TestElasticmock):
def test_should_create_fake_elasticsearch_instance(self):
self.assertIsInstance(self.es, FakeElasticsearch)
@elasticmock
def test_should_return_same_elastic_instance_when_instantiate_more_than_one_instance_with_same_host(self):
es1 = elasticsearch.Elasticsearch(hosts=[{'host': 'localhost', 'port': 9200}])
es2 = elasticsearch.Elasticsearch(hosts=[{'host': 'localhost', 'port': 9200}])
self.assertEqual(es1, es2)
|
96425d2487613bd60dbf6b4074750b63a0cea80c
|
93713f46f16f1e29b725f263da164fed24ebf8a8
|
/Library/lib/python3.7/site-packages/sympy/integrals/rubi/rules/miscellaneous_algebraic.py
|
1632575a74951b36f84d724cff78df62fbb202d2
|
[
"BSD-3-Clause"
] |
permissive
|
holzschu/Carnets
|
b83d15136d25db640cea023abb5c280b26a9620e
|
1ad7ec05fb1e3676ac879585296c513c3ee50ef9
|
refs/heads/master
| 2023-02-20T12:05:14.980685
| 2023-02-13T15:59:23
| 2023-02-13T15:59:23
| 167,671,526
| 541
| 36
|
BSD-3-Clause
| 2022-11-29T03:08:22
| 2019-01-26T09:26:46
|
Python
|
UTF-8
|
Python
| false
| false
| 232,625
|
py
|
miscellaneous_algebraic.py
|
"""
This code is automatically generated. Never edit it manually.
For details of generating the code see `rubi_parsing_guide.md` in `parsetools`.
"""
from sympy.external import import_module
matchpy = import_module("matchpy")
if matchpy:
from matchpy import Pattern, ReplacementRule, CustomConstraint, is_match
from sympy.integrals.rubi.utility_function import (
Int, Sum, Set, With, Module, Scan, MapAnd, FalseQ,
ZeroQ, NegativeQ, NonzeroQ, FreeQ, NFreeQ, List, Log, PositiveQ,
PositiveIntegerQ, NegativeIntegerQ, IntegerQ, IntegersQ,
ComplexNumberQ, PureComplexNumberQ, RealNumericQ, PositiveOrZeroQ,
NegativeOrZeroQ, FractionOrNegativeQ, NegQ, Equal, Unequal, IntPart,
FracPart, RationalQ, ProductQ, SumQ, NonsumQ, Subst, First, Rest,
SqrtNumberQ, SqrtNumberSumQ, LinearQ, Sqrt, ArcCosh, Coefficient,
Denominator, Hypergeometric2F1, Not, Simplify, FractionalPart,
IntegerPart, AppellF1, EllipticPi, EllipticE, EllipticF, ArcTan,
ArcCot, ArcCoth, ArcTanh, ArcSin, ArcSinh, ArcCos, ArcCsc, ArcSec,
ArcCsch, ArcSech, Sinh, Tanh, Cosh, Sech, Csch, Coth, LessEqual, Less,
Greater, GreaterEqual, FractionQ, IntLinearcQ, Expand, IndependentQ,
PowerQ, IntegerPowerQ, PositiveIntegerPowerQ, FractionalPowerQ, AtomQ,
ExpQ, LogQ, Head, MemberQ, TrigQ, SinQ, CosQ, TanQ, CotQ, SecQ, CscQ,
Sin, Cos, Tan, Cot, Sec, Csc, HyperbolicQ, SinhQ, CoshQ, TanhQ, CothQ,
SechQ, CschQ, InverseTrigQ, SinCosQ, SinhCoshQ, LeafCount, Numerator,
NumberQ, NumericQ, Length, ListQ, Im, Re, InverseHyperbolicQ,
InverseFunctionQ, TrigHyperbolicFreeQ, InverseFunctionFreeQ, RealQ,
EqQ, FractionalPowerFreeQ, ComplexFreeQ, PolynomialQ, FactorSquareFree,
PowerOfLinearQ, Exponent, QuadraticQ, LinearPairQ, BinomialParts,
TrinomialParts, PolyQ, EvenQ, OddQ, PerfectSquareQ, NiceSqrtAuxQ,
NiceSqrtQ, Together, PosAux, PosQ, CoefficientList, ReplaceAll,
ExpandLinearProduct, GCD, ContentFactor, NumericFactor,
NonnumericFactors, MakeAssocList, GensymSubst, KernelSubst,
ExpandExpression, Apart, SmartApart, MatchQ,
PolynomialQuotientRemainder, FreeFactors, NonfreeFactors,
RemoveContentAux, RemoveContent, FreeTerms, NonfreeTerms,
ExpandAlgebraicFunction, CollectReciprocals, ExpandCleanup,
AlgebraicFunctionQ, Coeff, LeadTerm, RemainingTerms, LeadFactor,
RemainingFactors, LeadBase, LeadDegree, Numer, Denom, hypergeom, Expon,
MergeMonomials, PolynomialDivide, BinomialQ, TrinomialQ,
GeneralizedBinomialQ, GeneralizedTrinomialQ, FactorSquareFreeList,
PerfectPowerTest, SquareFreeFactorTest, RationalFunctionQ,
RationalFunctionFactors, NonrationalFunctionFactors, Reverse,
RationalFunctionExponents, RationalFunctionExpand, ExpandIntegrand,
SimplerQ, SimplerSqrtQ, SumSimplerQ, BinomialDegree, TrinomialDegree,
CancelCommonFactors, SimplerIntegrandQ, GeneralizedBinomialDegree,
GeneralizedBinomialParts, GeneralizedTrinomialDegree,
GeneralizedTrinomialParts, MonomialQ, MonomialSumQ,
MinimumMonomialExponent, MonomialExponent, LinearMatchQ,
PowerOfLinearMatchQ, QuadraticMatchQ, CubicMatchQ, BinomialMatchQ,
TrinomialMatchQ, GeneralizedBinomialMatchQ, GeneralizedTrinomialMatchQ,
QuotientOfLinearsMatchQ, PolynomialTermQ, PolynomialTerms,
NonpolynomialTerms, PseudoBinomialParts, NormalizePseudoBinomial,
PseudoBinomialPairQ, PseudoBinomialQ, PolynomialGCD, PolyGCD,
AlgebraicFunctionFactors, NonalgebraicFunctionFactors,
QuotientOfLinearsP, QuotientOfLinearsParts, QuotientOfLinearsQ,
Flatten, Sort, AbsurdNumberQ, AbsurdNumberFactors,
NonabsurdNumberFactors, SumSimplerAuxQ, Prepend, Drop,
CombineExponents, FactorInteger, FactorAbsurdNumber,
SubstForInverseFunction, SubstForFractionalPower,
SubstForFractionalPowerOfQuotientOfLinears,
FractionalPowerOfQuotientOfLinears, SubstForFractionalPowerQ,
SubstForFractionalPowerAuxQ, FractionalPowerOfSquareQ,
FractionalPowerSubexpressionQ, Apply, FactorNumericGcd,
MergeableFactorQ, MergeFactor, MergeFactors, TrigSimplifyQ,
TrigSimplify, TrigSimplifyRecur, Order, FactorOrder, Smallest,
OrderedQ, MinimumDegree, PositiveFactors, Sign, NonpositiveFactors,
PolynomialInAuxQ, PolynomialInQ, ExponentInAux, ExponentIn,
PolynomialInSubstAux, PolynomialInSubst, Distrib, DistributeDegree,
FunctionOfPower, DivideDegreesOfFactors, MonomialFactor, FullSimplify,
FunctionOfLinearSubst, FunctionOfLinear, NormalizeIntegrand,
NormalizeIntegrandAux, NormalizeIntegrandFactor,
NormalizeIntegrandFactorBase, NormalizeTogether,
NormalizeLeadTermSigns, AbsorbMinusSign, NormalizeSumFactors,
SignOfFactor, NormalizePowerOfLinear, SimplifyIntegrand, SimplifyTerm,
TogetherSimplify, SmartSimplify, SubstForExpn, ExpandToSum, UnifySum,
UnifyTerms, UnifyTerm, CalculusQ, FunctionOfInverseLinear,
PureFunctionOfSinhQ, PureFunctionOfTanhQ, PureFunctionOfCoshQ,
IntegerQuotientQ, OddQuotientQ, EvenQuotientQ, FindTrigFactor,
FunctionOfSinhQ, FunctionOfCoshQ, OddHyperbolicPowerQ, FunctionOfTanhQ,
FunctionOfTanhWeight, FunctionOfHyperbolicQ, SmartNumerator,
SmartDenominator, SubstForAux, ActivateTrig, ExpandTrig, TrigExpand,
SubstForTrig, SubstForHyperbolic, InertTrigFreeQ, LCM,
SubstForFractionalPowerOfLinear, FractionalPowerOfLinear,
InverseFunctionOfLinear, InertTrigQ, InertReciprocalQ, DeactivateTrig,
FixInertTrigFunction, DeactivateTrigAux, PowerOfInertTrigSumQ,
PiecewiseLinearQ, KnownTrigIntegrandQ, KnownSineIntegrandQ,
KnownTangentIntegrandQ, KnownCotangentIntegrandQ,
KnownSecantIntegrandQ, TryPureTanSubst, TryTanhSubst, TryPureTanhSubst,
AbsurdNumberGCD, AbsurdNumberGCDList, ExpandTrigExpand,
ExpandTrigReduce, ExpandTrigReduceAux, NormalizeTrig, TrigToExp,
ExpandTrigToExp, TrigReduce, FunctionOfTrig, AlgebraicTrigFunctionQ,
FunctionOfHyperbolic, FunctionOfQ, FunctionOfExpnQ, PureFunctionOfSinQ,
PureFunctionOfCosQ, PureFunctionOfTanQ, PureFunctionOfCotQ,
FunctionOfCosQ, FunctionOfSinQ, OddTrigPowerQ, FunctionOfTanQ,
FunctionOfTanWeight, FunctionOfTrigQ, FunctionOfDensePolynomialsQ,
FunctionOfLog, PowerVariableExpn, PowerVariableDegree,
PowerVariableSubst, EulerIntegrandQ, FunctionOfSquareRootOfQuadratic,
SquareRootOfQuadraticSubst, Divides, EasyDQ, ProductOfLinearPowersQ,
Rt, NthRoot, AtomBaseQ, SumBaseQ, NegSumBaseQ, AllNegTermQ,
SomeNegTermQ, TrigSquareQ, RtAux, TrigSquare, IntSum, IntTerm, Map2,
ConstantFactor, SameQ, ReplacePart, CommonFactors,
MostMainFactorPosition, FunctionOfExponentialQ, FunctionOfExponential,
FunctionOfExponentialFunction, FunctionOfExponentialFunctionAux,
FunctionOfExponentialTest, FunctionOfExponentialTestAux, stdev,
rubi_test, If, IntQuadraticQ, IntBinomialQ, RectifyTangent,
RectifyCotangent, Inequality, Condition, Simp, SimpHelp, SplitProduct,
SplitSum, SubstFor, SubstForAux, FresnelS, FresnelC, Erfc, Erfi, Gamma,
FunctionOfTrigOfLinearQ, ElementaryFunctionQ, Complex, UnsameQ,
_SimpFixFactor, SimpFixFactor, _FixSimplify, FixSimplify,
_SimplifyAntiderivativeSum, SimplifyAntiderivativeSum,
_SimplifyAntiderivative, SimplifyAntiderivative, _TrigSimplifyAux,
TrigSimplifyAux, Cancel, Part, PolyLog, D, Dist, Sum_doit, PolynomialQuotient, Floor,
PolynomialRemainder, Factor, PolyLog, CosIntegral, SinIntegral, LogIntegral, SinhIntegral,
CoshIntegral, Rule, Erf, PolyGamma, ExpIntegralEi, ExpIntegralE, LogGamma , UtilityOperator, Factorial,
Zeta, ProductLog, DerivativeDivides, HypergeometricPFQ, IntHide, OneQ, Null, rubi_exp as exp, rubi_log as log, Discriminant,
Negative, Quotient
)
from sympy import (Integral, S, sqrt, And, Or, Integer, Float, Mod, I, Abs, simplify, Mul,
Add, Pow, sign, EulerGamma)
from sympy.integrals.rubi.symbol import WC
from sympy.core.symbol import symbols, Symbol
from sympy.functions import (sin, cos, tan, cot, csc, sec, sqrt, erf)
from sympy.functions.elementary.hyperbolic import (acosh, asinh, atanh, acoth, acsch, asech, cosh, sinh, tanh, coth, sech, csch)
from sympy.functions.elementary.trigonometric import (atan, acsc, asin, acot, acos, asec, atan2)
from sympy import pi as Pi
A_, B_, C_, F_, G_, H_, a_, b_, c_, d_, e_, f_, g_, h_, i_, j_, k_, l_, m_, n_, p_, q_, r_, t_, u_, v_, s_, w_, x_, y_, z_ = [WC(i) for i in 'ABCFGHabcdefghijklmnpqrtuvswxyz']
a1_, a2_, b1_, b2_, c1_, c2_, d1_, d2_, n1_, n2_, e1_, e2_, f1_, f2_, g1_, g2_, n1_, n2_, n3_, Pq_, Pm_, Px_, Qm_, Qr_, Qx_, jn_, mn_, non2_, RFx_, RGx_ = [WC(i) for i in ['a1', 'a2', 'b1', 'b2', 'c1', 'c2', 'd1', 'd2', 'n1', 'n2', 'e1', 'e2', 'f1', 'f2', 'g1', 'g2', 'n1', 'n2', 'n3', 'Pq', 'Pm', 'Px', 'Qm', 'Qr', 'Qx', 'jn', 'mn', 'non2', 'RFx', 'RGx']]
i, ii, Pqq, Q, R, r, C, k, u = symbols('i ii Pqq Q R r C k u')
_UseGamma = False
ShowSteps = False
StepCounter = None
def miscellaneous_algebraic():
from sympy.integrals.rubi.constraints import cons800, cons2, cons3, cons8, cons52, cons4, cons5, cons20, cons19, cons801, cons29, cons50, cons127, cons54, cons802, cons27, cons803, cons804, cons151, cons805, cons502, cons806, cons650, cons807, cons808, cons21, cons48, cons809, cons810, cons70, cons811, cons812, cons813, cons814, cons815, cons816, cons817, cons818, cons819, cons820, cons821, cons822, cons823, cons454, cons824, cons825, cons826, cons827, cons828, cons829, cons830, cons831, cons832, cons833, cons834, cons835, cons836, cons837, cons838, cons839, cons840, cons841, cons842, cons843, cons844, cons845, cons846, cons847, cons848, cons849, cons850, cons851, cons852, cons853, cons854, cons210, cons211, cons66, cons855, cons68, cons856, cons857, cons466, cons858, cons859, cons860, cons55, cons13, cons139, cons861, cons862, cons150, cons246, cons165, cons863, cons523, cons864, cons865, cons866, cons86, cons867, cons36, cons37, cons868, cons470, cons471, cons869, cons870, cons38, cons871, cons872, cons873, cons874, cons875, cons876, cons877, cons878, cons879, cons880, cons881, cons882, cons883, cons884, cons885, cons886, cons887, cons888, cons889, cons890, cons891, cons892, cons893, cons894, cons895, cons896, cons897, cons898, cons899, cons900, cons901, cons902, cons903, cons904, cons905, cons906, cons676, cons907, cons483, cons908, cons909, cons484, cons910, cons911, cons912, cons913, cons914, cons915, cons916, cons917, cons918, cons87, cons33, cons96, cons919, cons198, cons369, cons358, cons491, cons543, cons25, cons920, cons556, cons921, cons554, cons57, cons496, cons59, cons60, cons61, cons62, cons922, cons923, cons924, cons925, cons926, cons597, cons73, cons927, cons588, cons89, cons130, cons928, cons929, cons930, cons931, cons932, cons47, cons316, cons228, cons933, cons934, cons935, cons936, cons937, cons938, cons939, cons940, cons941, cons942, cons943, cons944, cons945, cons946, cons947, cons948, cons284, cons949, cons65, cons721, cons950, cons951, cons952, cons75, cons953, cons704, cons149, cons954, cons955, cons798, cons956, cons957, cons958, cons959, cons960, cons961, cons962, cons963, cons964, cons965, cons966, cons967, cons968, cons71, cons969, cons970, cons971, cons972, cons973, cons974, cons975, cons976, cons977, cons514, cons978, cons979, cons980, cons981, cons982, cons669, cons983, cons984, cons799, cons985, cons986, cons987, cons988, cons989, cons990, cons95, cons90, cons991, cons992, cons993, cons994, cons995, cons996, cons997, cons998, cons999, cons1000, cons40, cons1001, cons1002, cons1003, cons1004, cons1005, cons1006, cons1007, cons1008, cons1009, cons1010, cons1011, cons1012, cons385, cons1013, cons1014, cons1015, cons1016, cons1017, cons1018, cons1019, cons1020, cons359, cons1021, cons248, cons1022, cons1023, cons1024, cons1025, cons1026, cons1027, cons1028, cons1029, cons1030, cons1031, cons1032, cons1033, cons1034, cons1035, cons1036, cons1037, cons1038, cons1039, cons1040, cons1041, cons1042, cons1043, cons1044, cons1045, cons299, cons1046, cons1047, cons1048, cons1049, cons1050, cons707, cons384, cons1051, cons1052, cons699, cons711, cons155, cons1053, cons1054, cons1055, cons1056, cons1057, cons1058, cons1059, cons1060, cons1061, cons226, cons1062, cons517, cons1063, cons1064, cons1065, cons1066, cons1067, cons1068, cons1069, cons1070, cons1071, cons1072, cons1073, cons45, cons481, cons482, cons1074, cons1075, cons1076, cons1077, cons1078, cons1079, cons1080, cons1081, cons1082, cons1083, cons1084, cons1085, cons1086, cons1087, cons1088, cons1089, cons1090, cons1091
pattern1476 = Pattern(Integral(((x_**n_*WC('c', S(1)))**q_*WC('b', S(1)) + WC('a', S(0)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons52, cons4, cons5, cons800)
rule1476 = ReplacementRule(pattern1476, replacement1476)
pattern1477 = Pattern(Integral(x_**WC('m', S(1))*((x_**n_*WC('c', S(1)))**q_*WC('b', S(1)) + WC('a', S(0)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons19, cons4, cons5, cons52, cons800, cons20)
rule1477 = ReplacementRule(pattern1477, replacement1477)
pattern1478 = Pattern(Integral(x_**WC('m', S(1))*((a_ + x_**WC('n', S(1))*WC('b', S(1)))**WC('r', S(1))*WC('e', S(1)))**p_*((c_ + x_**WC('n', S(1))*WC('d', S(1)))**s_*WC('f', S(1)))**q_, x_), cons2, cons3, cons8, cons29, cons50, cons127, cons19, cons4, cons5, cons52, cons54, cons802, cons801)
rule1478 = ReplacementRule(pattern1478, replacement1478)
pattern1479 = Pattern(Integral(((x_**WC('n', S(1))*WC('b', S(1)) + WC('a', S(0)))*WC('e', S(1))/(c_ + x_**WC('n', S(1))*WC('d', S(1))))**p_*WC('u', S(1)), x_), cons2, cons3, cons8, cons29, cons50, cons4, cons5, cons27)
rule1479 = ReplacementRule(pattern1479, replacement1479)
pattern1480 = Pattern(Integral(((x_**WC('n', S(1))*WC('b', S(1)) + WC('a', S(0)))*WC('e', S(1))/(c_ + x_**WC('n', S(1))*WC('d', S(1))))**p_*WC('u', S(1)), x_), cons2, cons3, cons8, cons29, cons50, cons4, cons5, cons803, cons804)
rule1480 = ReplacementRule(pattern1480, replacement1480)
pattern1481 = Pattern(Integral(((x_**WC('n', S(1))*WC('b', S(1)) + WC('a', S(0)))*WC('e', S(1))/(c_ + x_**WC('n', S(1))*WC('d', S(1))))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons151, cons805)
rule1481 = ReplacementRule(pattern1481, With1481)
pattern1482 = Pattern(Integral(x_**WC('m', S(1))*((x_**WC('n', S(1))*WC('b', S(1)) + WC('a', S(0)))*WC('e', S(1))/(c_ + x_**WC('n', S(1))*WC('d', S(1))))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons19, cons4, cons151, cons502)
rule1482 = ReplacementRule(pattern1482, With1482)
pattern1483 = Pattern(Integral(u_**WC('r', S(1))*((x_**WC('n', S(1))*WC('b', S(1)) + WC('a', S(0)))*WC('e', S(1))/(c_ + x_**WC('n', S(1))*WC('d', S(1))))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons806, cons151, cons805, cons650)
rule1483 = ReplacementRule(pattern1483, With1483)
pattern1484 = Pattern(Integral(u_**WC('r', S(1))*x_**WC('m', S(1))*((x_**WC('n', S(1))*WC('b', S(1)) + WC('a', S(0)))*WC('e', S(1))/(c_ + x_**WC('n', S(1))*WC('d', S(1))))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons806, cons151, cons805, cons807)
rule1484 = ReplacementRule(pattern1484, With1484)
pattern1485 = Pattern(Integral(((WC('c', S(1))/x_)**n_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons4, cons5, cons808)
rule1485 = ReplacementRule(pattern1485, replacement1485)
pattern1486 = Pattern(Integral(x_**WC('m', S(1))*((WC('c', S(1))/x_)**n_*WC('b', S(1)) + WC('a', S(0)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons4, cons5, cons20)
rule1486 = ReplacementRule(pattern1486, replacement1486)
pattern1487 = Pattern(Integral((x_*WC('d', S(1)))**m_*((WC('c', S(1))/x_)**n_*WC('b', S(1)) + WC('a', S(0)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons29, cons19, cons4, cons5, cons21)
rule1487 = ReplacementRule(pattern1487, replacement1487)
pattern1488 = Pattern(Integral(((WC('d', S(1))/x_)**n_*WC('b', S(1)) + (WC('d', S(1))/x_)**WC('n2', S(1))*WC('c', S(1)) + WC('a', S(0)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons29, cons4, cons5, cons48)
rule1488 = ReplacementRule(pattern1488, replacement1488)
pattern1489 = Pattern(Integral(x_**WC('m', S(1))*(a_ + (WC('d', S(1))/x_)**n_*WC('b', S(1)) + (WC('d', S(1))/x_)**WC('n2', S(1))*WC('c', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons29, cons4, cons5, cons48, cons20)
rule1489 = ReplacementRule(pattern1489, replacement1489)
pattern1490 = Pattern(Integral((x_*WC('e', S(1)))**m_*(a_ + (WC('d', S(1))/x_)**n_*WC('b', S(1)) + (WC('d', S(1))/x_)**WC('n2', S(1))*WC('c', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons29, cons50, cons19, cons4, cons5, cons48, cons21)
rule1490 = ReplacementRule(pattern1490, replacement1490)
pattern1491 = Pattern(Integral((x_**WC('n2', S(1))*WC('c', S(1)) + (WC('d', S(1))/x_)**n_*WC('b', S(1)) + WC('a', S(0)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons29, cons4, cons5, cons809, cons810)
rule1491 = ReplacementRule(pattern1491, replacement1491)
pattern1492 = Pattern(Integral(x_**WC('m', S(1))*(a_ + x_**WC('n2', S(1))*WC('c', S(1)) + (WC('d', S(1))/x_)**n_*WC('b', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons29, cons4, cons5, cons809, cons810, cons20)
rule1492 = ReplacementRule(pattern1492, replacement1492)
pattern1493 = Pattern(Integral((x_*WC('e', S(1)))**m_*(a_ + x_**WC('n2', S(1))*WC('c', S(1)) + (WC('d', S(1))/x_)**n_*WC('b', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons29, cons50, cons4, cons5, cons809, cons21, cons810)
rule1493 = ReplacementRule(pattern1493, replacement1493)
pattern1494 = Pattern(Integral(u_**m_, x_), cons19, cons70, cons811)
rule1494 = ReplacementRule(pattern1494, replacement1494)
pattern1495 = Pattern(Integral(u_**WC('m', S(1))*v_**WC('n', S(1)), x_), cons19, cons4, cons812, cons813)
rule1495 = ReplacementRule(pattern1495, replacement1495)
pattern1496 = Pattern(Integral(u_**WC('m', S(1))*v_**WC('n', S(1))*w_**WC('p', S(1)), x_), cons19, cons4, cons5, cons814, cons815)
rule1496 = ReplacementRule(pattern1496, replacement1496)
pattern1497 = Pattern(Integral(u_**WC('m', S(1))*v_**WC('n', S(1))*w_**WC('p', S(1))*z_**WC('q', S(1)), x_), cons19, cons4, cons5, cons52, cons816, cons817)
rule1497 = ReplacementRule(pattern1497, replacement1497)
pattern1498 = Pattern(Integral(u_**p_, x_), cons5, cons818, cons819)
rule1498 = ReplacementRule(pattern1498, replacement1498)
pattern1499 = Pattern(Integral(u_**WC('m', S(1))*v_**WC('p', S(1)), x_), cons19, cons5, cons70, cons820, cons821)
rule1499 = ReplacementRule(pattern1499, replacement1499)
pattern1500 = Pattern(Integral(u_**WC('m', S(1))*v_**WC('n', S(1))*w_**WC('p', S(1)), x_), cons19, cons4, cons5, cons812, cons822, cons823)
rule1500 = ReplacementRule(pattern1500, replacement1500)
pattern1501 = Pattern(Integral(u_**WC('p', S(1))*v_**WC('q', S(1)), x_), cons5, cons52, cons454, cons824)
rule1501 = ReplacementRule(pattern1501, replacement1501)
pattern1502 = Pattern(Integral(u_**p_, x_), cons5, cons825, cons826)
rule1502 = ReplacementRule(pattern1502, replacement1502)
pattern1503 = Pattern(Integral(u_**WC('p', S(1))*(x_*WC('c', S(1)))**WC('m', S(1)), x_), cons8, cons19, cons5, cons825, cons826)
rule1503 = ReplacementRule(pattern1503, replacement1503)
pattern1504 = Pattern(Integral(u_**WC('p', S(1))*v_**WC('q', S(1)), x_), cons5, cons52, cons827, cons828, cons829)
rule1504 = ReplacementRule(pattern1504, replacement1504)
pattern1505 = Pattern(Integral(u_**WC('p', S(1))*v_**WC('q', S(1))*x_**WC('m', S(1)), x_), cons19, cons5, cons52, cons827, cons828, cons829)
rule1505 = ReplacementRule(pattern1505, replacement1505)
pattern1506 = Pattern(Integral(u_**WC('m', S(1))*v_**WC('p', S(1))*w_**WC('q', S(1)), x_), cons19, cons5, cons52, cons830, cons828, cons831, cons832)
rule1506 = ReplacementRule(pattern1506, replacement1506)
pattern1507 = Pattern(Integral(u_**WC('p', S(1))*v_**WC('q', S(1))*x_**WC('m', S(1))*z_**WC('r', S(1)), x_), cons19, cons5, cons52, cons54, cons833, cons828, cons834, cons835)
rule1507 = ReplacementRule(pattern1507, replacement1507)
pattern1508 = Pattern(Integral(u_**p_, x_), cons5, cons836, cons837)
rule1508 = ReplacementRule(pattern1508, replacement1508)
pattern1509 = Pattern(Integral(u_**WC('p', S(1))*x_**WC('m', S(1)), x_), cons19, cons5, cons836, cons837)
rule1509 = ReplacementRule(pattern1509, replacement1509)
pattern1510 = Pattern(Integral(u_**p_, x_), cons5, cons838, cons839)
rule1510 = ReplacementRule(pattern1510, replacement1510)
pattern1511 = Pattern(Integral(u_**WC('p', S(1))*(x_*WC('d', S(1)))**WC('m', S(1)), x_), cons29, cons19, cons5, cons838, cons839)
rule1511 = ReplacementRule(pattern1511, replacement1511)
pattern1512 = Pattern(Integral(u_**WC('q', S(1))*v_**WC('p', S(1)), x_), cons5, cons52, cons825, cons840, cons841)
rule1512 = ReplacementRule(pattern1512, replacement1512)
pattern1513 = Pattern(Integral(u_**WC('q', S(1))*v_**WC('p', S(1)), x_), cons5, cons52, cons825, cons842, cons843)
rule1513 = ReplacementRule(pattern1513, replacement1513)
pattern1514 = Pattern(Integral(u_**WC('p', S(1))*x_**WC('m', S(1))*z_**WC('q', S(1)), x_), cons19, cons5, cons52, cons844, cons838, cons845)
rule1514 = ReplacementRule(pattern1514, replacement1514)
pattern1515 = Pattern(Integral(u_**WC('p', S(1))*x_**WC('m', S(1))*z_**WC('q', S(1)), x_), cons19, cons5, cons52, cons844, cons825, cons846)
rule1515 = ReplacementRule(pattern1515, replacement1515)
pattern1516 = Pattern(Integral(u_**p_, x_), cons5, cons847, cons848)
rule1516 = ReplacementRule(pattern1516, replacement1516)
pattern1517 = Pattern(Integral(u_**WC('p', S(1))*x_**WC('m', S(1)), x_), cons19, cons5, cons847, cons848)
rule1517 = ReplacementRule(pattern1517, replacement1517)
pattern1518 = Pattern(Integral(u_**WC('p', S(1))*z_, x_), cons5, cons844, cons847, cons849, cons850)
rule1518 = ReplacementRule(pattern1518, replacement1518)
pattern1519 = Pattern(Integral(u_**WC('p', S(1))*x_**WC('m', S(1))*z_, x_), cons19, cons5, cons844, cons847, cons849, cons850)
rule1519 = ReplacementRule(pattern1519, replacement1519)
pattern1520 = Pattern(Integral(x_**WC('m', S(1))*(e_ + x_**WC('n', S(1))*WC('h', S(1)) + x_**WC('q', S(1))*WC('f', S(1)) + x_**WC('r', S(1))*WC('g', S(1)))/(a_ + x_**WC('n', S(1))*WC('c', S(1)))**(S(3)/2), x_), cons2, cons8, cons50, cons127, cons210, cons211, cons19, cons4, cons851, cons852, cons853, cons854)
rule1520 = ReplacementRule(pattern1520, replacement1520)
pattern1521 = Pattern(Integral((d_*x_)**WC('m', S(1))*(e_ + x_**WC('n', S(1))*WC('h', S(1)) + x_**WC('q', S(1))*WC('f', S(1)) + x_**WC('r', S(1))*WC('g', S(1)))/(a_ + x_**WC('n', S(1))*WC('c', S(1)))**(S(3)/2), x_), cons2, cons8, cons29, cons50, cons127, cons210, cons211, cons19, cons4, cons853, cons851, cons852, cons854)
rule1521 = ReplacementRule(pattern1521, replacement1521)
pattern1522 = Pattern(Integral(Pq_*(x_*WC('c', S(1)))**m_*(a_ + x_*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons19, cons66, cons151, cons855)
rule1522 = ReplacementRule(pattern1522, With1522)
pattern1523 = Pattern(Integral(Pq_*x_**WC('m', S(1))*(a_ + x_**n_*WC('b', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons19, cons4, cons5, cons68, cons856, cons857)
rule1523 = ReplacementRule(pattern1523, replacement1523)
pattern1524 = Pattern(Integral(Pq_*(a_ + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons66, cons466, cons858)
rule1524 = ReplacementRule(pattern1524, replacement1524)
pattern1525 = Pattern(Integral(Pq_*(x_*WC('c', S(1)))**WC('m', S(1))*(a_ + x_**WC('n', S(1))*WC('b', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons19, cons4, cons66, cons859)
rule1525 = ReplacementRule(pattern1525, replacement1525)
pattern1526 = Pattern(Integral(Pq_*(a_ + x_**WC('n', S(1))*WC('b', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons4, cons66, cons859)
rule1526 = ReplacementRule(pattern1526, replacement1526)
pattern1527 = Pattern(Integral(Pq_*x_**WC('m', S(1))*(a_ + x_**n_*WC('b', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons19, cons4, cons5, cons860, cons502)
rule1527 = ReplacementRule(pattern1527, replacement1527)
pattern1528 = Pattern(Integral(Pq_*(c_*x_)**WC('m', S(1))*(a_ + x_**n_*WC('b', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons19, cons4, cons5, cons860, cons502)
rule1528 = ReplacementRule(pattern1528, replacement1528)
pattern1529 = Pattern(Integral(Pq_*x_**WC('m', S(1))*(a_ + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons19, cons4, cons66, cons55, cons13, cons139)
rule1529 = ReplacementRule(pattern1529, replacement1529)
pattern1530 = Pattern(Integral(Pq_*(x_*WC('d', S(1)))**WC('m', S(1))*(a_ + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons29, cons19, cons4, cons5, cons66, cons861)
rule1530 = ReplacementRule(pattern1530, replacement1530)
pattern1531 = Pattern(Integral(Pq_*(a_ + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons4, cons5, cons66, cons861, cons862)
rule1531 = ReplacementRule(pattern1531, replacement1531)
pattern1532 = Pattern(Integral(Pq_*x_**WC('m', S(1))*(a_ + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons66, cons150, cons246, cons165, cons863)
rule1532 = ReplacementRule(pattern1532, With1532)
pattern1533 = Pattern(Integral(Pq_*(x_*WC('c', S(1)))**WC('m', S(1))*(a_ + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons19, cons66, cons523, cons13, cons165)
rule1533 = ReplacementRule(pattern1533, With1533)
pattern1534 = Pattern(Integral(Pq_*(a_ + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons66, cons523, cons13, cons165)
rule1534 = ReplacementRule(pattern1534, With1534)
pattern1535 = Pattern(Integral(Pq_*(a_ + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons66, cons150, cons13, cons139, CustomConstraint(With1535))
rule1535 = ReplacementRule(pattern1535, replacement1535)
pattern1536 = Pattern(Integral(Pq_*(a_ + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons66, cons150, cons13, cons139, cons864)
rule1536 = ReplacementRule(pattern1536, replacement1536)
pattern1537 = Pattern(Integral((d_ + x_**S(4)*WC('g', S(1)) + x_**S(3)*WC('f', S(1)) + x_*WC('e', S(1)))/(a_ + x_**S(4)*WC('b', S(1)))**(S(3)/2), x_), cons2, cons3, cons29, cons50, cons127, cons210, cons865)
rule1537 = ReplacementRule(pattern1537, replacement1537)
pattern1538 = Pattern(Integral((d_ + x_**S(4)*WC('g', S(1)) + x_**S(3)*WC('f', S(1)))/(a_ + x_**S(4)*WC('b', S(1)))**(S(3)/2), x_), cons2, cons3, cons29, cons127, cons210, cons865)
rule1538 = ReplacementRule(pattern1538, replacement1538)
pattern1539 = Pattern(Integral((d_ + x_**S(4)*WC('g', S(1)) + x_*WC('e', S(1)))/(a_ + x_**S(4)*WC('b', S(1)))**(S(3)/2), x_), cons2, cons3, cons29, cons50, cons210, cons865)
rule1539 = ReplacementRule(pattern1539, replacement1539)
pattern1540 = Pattern(Integral(x_**S(2)*(x_**S(4)*WC('h', S(1)) + x_*WC('f', S(1)) + WC('e', S(0)))/(a_ + x_**S(4)*WC('b', S(1)))**(S(3)/2), x_), cons2, cons3, cons50, cons127, cons211, cons866)
rule1540 = ReplacementRule(pattern1540, replacement1540)
pattern1541 = Pattern(Integral(x_**S(2)*(x_**S(4)*WC('h', S(1)) + WC('e', S(0)))/(a_ + x_**S(4)*WC('b', S(1)))**(S(3)/2), x_), cons2, cons3, cons50, cons211, cons866)
rule1541 = ReplacementRule(pattern1541, replacement1541)
pattern1542 = Pattern(Integral((d_ + x_**S(6)*WC('h', S(1)) + x_**S(4)*WC('g', S(1)) + x_**S(3)*WC('f', S(1)) + x_**S(2)*WC('e', S(1)))/(a_ + x_**S(4)*WC('b', S(1)))**(S(3)/2), x_), cons2, cons3, cons29, cons50, cons127, cons210, cons211, cons866, cons865)
rule1542 = ReplacementRule(pattern1542, replacement1542)
pattern1543 = Pattern(Integral((d_ + x_**S(6)*WC('h', S(1)) + x_**S(4)*WC('g', S(1)) + x_**S(2)*WC('e', S(1)))/(a_ + x_**S(4)*WC('b', S(1)))**(S(3)/2), x_), cons2, cons3, cons29, cons50, cons210, cons211, cons866, cons865)
rule1543 = ReplacementRule(pattern1543, replacement1543)
pattern1544 = Pattern(Integral(Pq_*(a_ + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons66, cons150, cons13, cons139, CustomConstraint(With1544))
rule1544 = ReplacementRule(pattern1544, replacement1544)
pattern1545 = Pattern(Integral(Pq_*x_**m_*(a_ + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons66, cons150, cons13, cons139, cons86)
rule1545 = ReplacementRule(pattern1545, With1545)
pattern1546 = Pattern(Integral(Pq_*x_**WC('m', S(1))*(a_ + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons5, cons860, cons150, cons20, CustomConstraint(With1546))
rule1546 = ReplacementRule(pattern1546, replacement1546)
pattern1547 = Pattern(Integral((A_ + x_*WC('B', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons37, cons867)
rule1547 = ReplacementRule(pattern1547, replacement1547)
pattern1548 = Pattern(Integral((A_ + x_*WC('B', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons37, cons868, cons470)
rule1548 = ReplacementRule(pattern1548, With1548)
pattern1549 = Pattern(Integral((A_ + x_*WC('B', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons37, cons868, cons471)
rule1549 = ReplacementRule(pattern1549, With1549)
pattern1550 = Pattern(Integral((A_ + x_**S(2)*WC('C', S(1)) + x_*WC('B', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons37, cons38, cons869, cons870)
rule1550 = ReplacementRule(pattern1550, replacement1550)
pattern1551 = Pattern(Integral((x_**S(2)*WC('C', S(1)) + x_*WC('B', S(1)) + WC('A', S(0)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons37, cons38, cons871)
rule1551 = ReplacementRule(pattern1551, With1551)
pattern1552 = Pattern(Integral(x_*(B_ + x_*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons37, cons38, cons872)
rule1552 = ReplacementRule(pattern1552, With1552)
pattern1553 = Pattern(Integral((A_ + x_**S(2)*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons38, cons873)
rule1553 = ReplacementRule(pattern1553, With1553)
pattern1554 = Pattern(Integral((x_**S(2)*WC('C', S(1)) + x_*WC('B', S(1)) + WC('A', S(0)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons37, cons38, cons874)
rule1554 = ReplacementRule(pattern1554, With1554)
pattern1555 = Pattern(Integral(x_*(B_ + x_*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons37, cons38, cons875)
rule1555 = ReplacementRule(pattern1555, With1555)
pattern1556 = Pattern(Integral((A_ + x_**S(2)*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons38, cons876)
rule1556 = ReplacementRule(pattern1556, With1556)
pattern1557 = Pattern(Integral((x_**S(2)*WC('C', S(1)) + x_*WC('B', S(1)) + WC('A', S(0)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons37, cons38, cons877)
rule1557 = ReplacementRule(pattern1557, With1557)
pattern1558 = Pattern(Integral(x_*(B_ + x_*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons37, cons38, cons878)
rule1558 = ReplacementRule(pattern1558, With1558)
pattern1559 = Pattern(Integral((A_ + x_**S(2)*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons38, cons879)
rule1559 = ReplacementRule(pattern1559, With1559)
pattern1560 = Pattern(Integral((x_**S(2)*WC('C', S(1)) + x_*WC('B', S(1)) + WC('A', S(0)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons37, cons38, cons880)
rule1560 = ReplacementRule(pattern1560, With1560)
pattern1561 = Pattern(Integral(x_*(B_ + x_*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons37, cons38, cons881)
rule1561 = ReplacementRule(pattern1561, With1561)
pattern1562 = Pattern(Integral((A_ + x_**S(2)*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons38, cons882)
rule1562 = ReplacementRule(pattern1562, With1562)
pattern1563 = Pattern(Integral((x_**S(2)*WC('C', S(1)) + x_*WC('B', S(1)) + WC('A', S(0)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons37, cons38, cons883)
rule1563 = ReplacementRule(pattern1563, With1563)
pattern1564 = Pattern(Integral(x_*(B_ + x_*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons37, cons38, cons884)
rule1564 = ReplacementRule(pattern1564, With1564)
pattern1565 = Pattern(Integral((A_ + x_**S(2)*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons38, cons885)
rule1565 = ReplacementRule(pattern1565, With1565)
pattern1566 = Pattern(Integral((x_**S(2)*WC('C', S(1)) + x_*WC('B', S(1)) + WC('A', S(0)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons37, cons38, cons886)
rule1566 = ReplacementRule(pattern1566, With1566)
pattern1567 = Pattern(Integral(x_*(B_ + x_*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons37, cons38, cons887)
rule1567 = ReplacementRule(pattern1567, With1567)
pattern1568 = Pattern(Integral((x_**S(2)*WC('C', S(1)) + WC('A', S(0)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons38, cons888)
rule1568 = ReplacementRule(pattern1568, With1568)
pattern1569 = Pattern(Integral((x_**S(2)*WC('C', S(1)) + x_*WC('B', S(1)) + WC('A', S(0)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons37, cons38, cons889)
rule1569 = ReplacementRule(pattern1569, With1569)
pattern1570 = Pattern(Integral(x_*(B_ + x_*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons37, cons38, cons890)
rule1570 = ReplacementRule(pattern1570, With1570)
pattern1571 = Pattern(Integral((A_ + x_**S(2)*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons38, cons891)
rule1571 = ReplacementRule(pattern1571, With1571)
pattern1572 = Pattern(Integral((x_**S(2)*WC('C', S(1)) + x_*WC('B', S(1)) + WC('A', S(0)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons37, cons38, cons892)
rule1572 = ReplacementRule(pattern1572, With1572)
pattern1573 = Pattern(Integral(x_*(B_ + x_*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons37, cons38, cons893)
rule1573 = ReplacementRule(pattern1573, With1573)
pattern1574 = Pattern(Integral((x_**S(2)*WC('C', S(1)) + WC('A', S(0)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons38, cons894)
rule1574 = ReplacementRule(pattern1574, With1574)
pattern1575 = Pattern(Integral((x_**S(2)*WC('C', S(1)) + x_*WC('B', S(1)) + WC('A', S(0)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons37, cons38, cons895)
rule1575 = ReplacementRule(pattern1575, replacement1575)
pattern1576 = Pattern(Integral(x_*(B_ + x_*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons37, cons38, cons896)
rule1576 = ReplacementRule(pattern1576, replacement1576)
pattern1577 = Pattern(Integral((A_ + x_**S(2)*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons38, cons897)
rule1577 = ReplacementRule(pattern1577, replacement1577)
pattern1578 = Pattern(Integral((x_**S(2)*WC('C', S(1)) + x_*WC('B', S(1)) + WC('A', S(0)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons37, cons38, cons898)
rule1578 = ReplacementRule(pattern1578, With1578)
pattern1579 = Pattern(Integral(x_*(x_*WC('C', S(1)) + WC('B', S(0)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons37, cons38, cons899)
rule1579 = ReplacementRule(pattern1579, With1579)
pattern1580 = Pattern(Integral((A_ + x_**S(2)*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons38, cons900)
rule1580 = ReplacementRule(pattern1580, With1580)
pattern1581 = Pattern(Integral((x_**S(2)*WC('C', S(1)) + x_*WC('B', S(1)) + WC('A', S(0)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons37, cons38, cons901)
rule1581 = ReplacementRule(pattern1581, With1581)
pattern1582 = Pattern(Integral(x_*(B_ + x_*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons37, cons38, cons902)
rule1582 = ReplacementRule(pattern1582, With1582)
pattern1583 = Pattern(Integral((A_ + x_**S(2)*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons38, cons903)
rule1583 = ReplacementRule(pattern1583, With1583)
pattern1584 = Pattern(Integral((x_**S(2)*WC('C', S(1)) + x_*WC('B', S(1)) + WC('A', S(0)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons37, cons38, cons868, cons904, cons905, CustomConstraint(With1584))
rule1584 = ReplacementRule(pattern1584, replacement1584)
pattern1585 = Pattern(Integral(x_*(B_ + x_*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons37, cons38, cons904, cons905, CustomConstraint(With1585))
rule1585 = ReplacementRule(pattern1585, replacement1585)
pattern1586 = Pattern(Integral((A_ + x_**S(2)*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons38, cons904, cons905, CustomConstraint(With1586))
rule1586 = ReplacementRule(pattern1586, replacement1586)
pattern1587 = Pattern(Integral((x_**S(2)*WC('C', S(1)) + x_*WC('B', S(1)) + WC('A', S(0)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons37, cons38, cons868, cons904, cons906, CustomConstraint(With1587))
rule1587 = ReplacementRule(pattern1587, replacement1587)
pattern1588 = Pattern(Integral(x_*(B_ + x_*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons37, cons38, cons904, cons906, CustomConstraint(With1588))
rule1588 = ReplacementRule(pattern1588, replacement1588)
pattern1589 = Pattern(Integral((A_ + x_**S(2)*WC('C', S(1)))/(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons36, cons38, cons904, cons906, CustomConstraint(With1589))
rule1589 = ReplacementRule(pattern1589, replacement1589)
pattern1590 = Pattern(Integral(Pq_*(x_*WC('c', S(1)))**WC('m', S(1))/(a_ + x_**n_*WC('b', S(1))), x_), cons2, cons3, cons8, cons19, cons66, cons676, cons907, CustomConstraint(With1590))
rule1590 = ReplacementRule(pattern1590, replacement1590)
pattern1591 = Pattern(Integral(Pq_/(a_ + x_**n_*WC('b', S(1))), x_), cons2, cons3, cons66, cons676, cons907, CustomConstraint(With1591))
rule1591 = ReplacementRule(pattern1591, replacement1591)
pattern1592 = Pattern(Integral((c_ + x_*WC('d', S(1)))/sqrt(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons8, cons29, cons483, cons908)
rule1592 = ReplacementRule(pattern1592, With1592)
pattern1593 = Pattern(Integral((c_ + x_*WC('d', S(1)))/sqrt(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons8, cons29, cons483, cons909)
rule1593 = ReplacementRule(pattern1593, With1593)
pattern1594 = Pattern(Integral((c_ + x_*WC('d', S(1)))/sqrt(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons8, cons29, cons484, cons910)
rule1594 = ReplacementRule(pattern1594, With1594)
pattern1595 = Pattern(Integral((c_ + x_*WC('d', S(1)))/sqrt(a_ + x_**S(3)*WC('b', S(1))), x_), cons2, cons3, cons8, cons29, cons484, cons911)
rule1595 = ReplacementRule(pattern1595, With1595)
pattern1596 = Pattern(Integral((c_ + x_**S(4)*WC('d', S(1)))/sqrt(a_ + x_**S(6)*WC('b', S(1))), x_), cons2, cons3, cons8, cons29, cons912)
rule1596 = ReplacementRule(pattern1596, With1596)
pattern1597 = Pattern(Integral((c_ + x_**S(4)*WC('d', S(1)))/sqrt(a_ + x_**S(6)*WC('b', S(1))), x_), cons2, cons3, cons8, cons29, cons913)
rule1597 = ReplacementRule(pattern1597, With1597)
pattern1598 = Pattern(Integral((c_ + x_**S(2)*WC('d', S(1)))/sqrt(a_ + x_**S(8)*WC('b', S(1))), x_), cons2, cons3, cons8, cons29, cons914)
rule1598 = ReplacementRule(pattern1598, replacement1598)
pattern1599 = Pattern(Integral((c_ + x_**S(2)*WC('d', S(1)))/sqrt(a_ + x_**S(8)*WC('b', S(1))), x_), cons2, cons3, cons8, cons29, cons915)
rule1599 = ReplacementRule(pattern1599, replacement1599)
pattern1600 = Pattern(Integral(Pq_/(x_*sqrt(a_ + x_**n_*WC('b', S(1)))), x_), cons2, cons3, cons66, cons150, cons916)
rule1600 = ReplacementRule(pattern1600, replacement1600)
pattern1601 = Pattern(Integral(Pq_*(x_*WC('c', S(1)))**WC('m', S(1))*(a_ + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons19, cons5, cons66, cons676, cons917)
rule1601 = ReplacementRule(pattern1601, With1601)
pattern1602 = Pattern(Integral(Pq_*(a_ + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons5, cons66, cons676, cons917)
rule1602 = ReplacementRule(pattern1602, With1602)
pattern1603 = Pattern(Integral(Pq_*(a_ + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons5, cons66, cons150, cons918)
rule1603 = ReplacementRule(pattern1603, replacement1603)
pattern1604 = Pattern(Integral(Pq_*(x_*WC('c', S(1)))**WC('m', S(1))/(a_ + x_**n_*WC('b', S(1))), x_), cons2, cons3, cons8, cons19, cons66, cons87)
rule1604 = ReplacementRule(pattern1604, replacement1604)
pattern1605 = Pattern(Integral(Pq_/(a_ + x_**n_*WC('b', S(1))), x_), cons2, cons3, cons66, cons87)
rule1605 = ReplacementRule(pattern1605, replacement1605)
pattern1606 = Pattern(Integral(Pq_*(x_*WC('c', S(1)))**m_*(a_ + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons5, cons66, cons150, cons33, cons96, cons919, CustomConstraint(With1606))
rule1606 = ReplacementRule(pattern1606, replacement1606)
pattern1607 = Pattern(Integral(Pq_*(x_*WC('c', S(1)))**WC('m', S(1))*(a_ + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons19, cons5, cons66, cons150, CustomConstraint(With1607))
rule1607 = ReplacementRule(pattern1607, replacement1607)
pattern1608 = Pattern(Integral(Pq_*(a_ + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons5, cons66, cons150, CustomConstraint(With1608))
rule1608 = ReplacementRule(pattern1608, replacement1608)
pattern1609 = Pattern(Integral(Pq_*x_**WC('m', S(1))*(a_ + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons5, cons66, cons198, cons20)
rule1609 = ReplacementRule(pattern1609, With1609)
pattern1610 = Pattern(Integral(Pq_*(x_*WC('c', S(1)))**WC('m', S(1))*(a_ + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons5, cons66, cons198, cons369)
rule1610 = ReplacementRule(pattern1610, With1610)
pattern1611 = Pattern(Integral(Pq_*(x_*WC('c', S(1)))**m_*(a_ + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons19, cons5, cons66, cons198, cons358)
rule1611 = ReplacementRule(pattern1611, With1611)
pattern1612 = Pattern(Integral(Pq_*x_**WC('m', S(1))*(a_ + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons19, cons5, cons66, cons491)
rule1612 = ReplacementRule(pattern1612, With1612)
pattern1613 = Pattern(Integral(Pq_*(a_ + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons5, cons66, cons491)
rule1613 = ReplacementRule(pattern1613, With1613)
pattern1614 = Pattern(Integral(Pq_*(c_*x_)**m_*(a_ + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons19, cons5, cons66, cons491)
rule1614 = ReplacementRule(pattern1614, replacement1614)
pattern1615 = Pattern(Integral(Pq_*x_**WC('m', S(1))*(a_ + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons19, cons4, cons5, cons860, cons543, cons25)
rule1615 = ReplacementRule(pattern1615, replacement1615)
pattern1616 = Pattern(Integral(Pq_*(c_*x_)**m_*(a_ + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons19, cons4, cons5, cons860, cons543, cons25)
rule1616 = ReplacementRule(pattern1616, replacement1616)
pattern1617 = Pattern(Integral((A_ + x_**WC('m', S(1))*WC('B', S(1)))*(a_ + x_**n_*WC('b', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons36, cons37, cons19, cons4, cons5, cons55)
rule1617 = ReplacementRule(pattern1617, replacement1617)
pattern1618 = Pattern(Integral(Pq_*(x_*WC('c', S(1)))**WC('m', S(1))*(a_ + x_**n_*WC('b', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons19, cons4, cons5, cons920)
rule1618 = ReplacementRule(pattern1618, replacement1618)
pattern1619 = Pattern(Integral(Pq_*(a_ + x_**n_*WC('b', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons4, cons5, cons920)
rule1619 = ReplacementRule(pattern1619, replacement1619)
pattern1620 = Pattern(Integral(Pq_*u_**WC('m', S(1))*(a_ + v_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons19, cons4, cons5, cons556, cons921)
rule1620 = ReplacementRule(pattern1620, replacement1620)
pattern1621 = Pattern(Integral(Pq_*(a_ + v_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons4, cons5, cons554, cons921)
rule1621 = ReplacementRule(pattern1621, replacement1621)
pattern1622 = Pattern(Integral(Pq_*(x_*WC('c', S(1)))**WC('m', S(1))*(a1_ + x_**WC('n', S(1))*WC('b1', S(1)))**WC('p', S(1))*(a2_ + x_**WC('n', S(1))*WC('b2', S(1)))**WC('p', S(1)), x_), cons59, cons60, cons61, cons62, cons8, cons19, cons4, cons5, cons66, cons57, cons496)
rule1622 = ReplacementRule(pattern1622, replacement1622)
pattern1623 = Pattern(Integral(Pq_*(a1_ + x_**WC('n', S(1))*WC('b1', S(1)))**WC('p', S(1))*(a2_ + x_**WC('n', S(1))*WC('b2', S(1)))**WC('p', S(1)), x_), cons59, cons60, cons61, cons62, cons4, cons5, cons66, cons57, cons496)
rule1623 = ReplacementRule(pattern1623, replacement1623)
pattern1624 = Pattern(Integral(Pq_*(x_*WC('c', S(1)))**WC('m', S(1))*(a1_ + x_**WC('n', S(1))*WC('b1', S(1)))**WC('p', S(1))*(a2_ + x_**WC('n', S(1))*WC('b2', S(1)))**WC('p', S(1)), x_), cons59, cons60, cons61, cons62, cons8, cons19, cons4, cons5, cons66, cons57)
rule1624 = ReplacementRule(pattern1624, replacement1624)
pattern1625 = Pattern(Integral(Pq_*(a1_ + x_**WC('n', S(1))*WC('b1', S(1)))**WC('p', S(1))*(a2_ + x_**WC('n', S(1))*WC('b2', S(1)))**WC('p', S(1)), x_), cons59, cons60, cons61, cons62, cons4, cons5, cons66, cons57)
rule1625 = ReplacementRule(pattern1625, replacement1625)
pattern1626 = Pattern(Integral((a_ + x_**WC('n', S(1))*WC('b', S(1)))**WC('p', S(1))*(c_ + x_**WC('n', S(1))*WC('d', S(1)))**WC('p', S(1))*(e_ + x_**WC('n', S(1))*WC('f', S(1)) + x_**WC('n2', S(1))*WC('g', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons210, cons4, cons5, cons48, cons922, cons923)
rule1626 = ReplacementRule(pattern1626, replacement1626)
pattern1627 = Pattern(Integral((a_ + x_**WC('n', S(1))*WC('b', S(1)))**WC('p', S(1))*(c_ + x_**WC('n', S(1))*WC('d', S(1)))**WC('p', S(1))*(e_ + x_**WC('n2', S(1))*WC('g', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons210, cons4, cons5, cons48, cons924, cons923)
rule1627 = ReplacementRule(pattern1627, replacement1627)
pattern1628 = Pattern(Integral((x_*WC('h', S(1)))**WC('m', S(1))*(a_ + x_**WC('n', S(1))*WC('b', S(1)))**WC('p', S(1))*(c_ + x_**WC('n', S(1))*WC('d', S(1)))**WC('p', S(1))*(e_ + x_**WC('n', S(1))*WC('f', S(1)) + x_**WC('n2', S(1))*WC('g', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons210, cons211, cons19, cons4, cons5, cons48, cons925, cons926, cons68)
rule1628 = ReplacementRule(pattern1628, replacement1628)
pattern1629 = Pattern(Integral((x_*WC('h', S(1)))**WC('m', S(1))*(a_ + x_**WC('n', S(1))*WC('b', S(1)))**WC('p', S(1))*(c_ + x_**WC('n', S(1))*WC('d', S(1)))**WC('p', S(1))*(e_ + x_**WC('n2', S(1))*WC('g', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons210, cons211, cons19, cons4, cons5, cons48, cons597, cons926, cons68)
rule1629 = ReplacementRule(pattern1629, replacement1629)
pattern1630 = Pattern(Integral((A_ + x_**WC('m', S(1))*WC('B', S(1)))*(c_ + x_**n_*WC('d', S(1)))**WC('q', S(1))*(x_**n_*WC('b', S(1)) + WC('a', S(0)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons29, cons36, cons37, cons19, cons4, cons5, cons52, cons73, cons55)
rule1630 = ReplacementRule(pattern1630, replacement1630)
pattern1631 = Pattern(Integral(Px_**WC('q', S(1))*((c_ + x_*WC('d', S(1)))**n_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons5, cons927, cons588, cons89)
rule1631 = ReplacementRule(pattern1631, With1631)
pattern1632 = Pattern(Integral(Pq_*x_**WC('m', S(1))*(a_ + x_**n_*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons19, cons4, cons5, cons48, cons860, cons55)
rule1632 = ReplacementRule(pattern1632, replacement1632)
pattern1633 = Pattern(Integral(Pq_*(x_*WC('d', S(1)))**WC('m', S(1))*(a_ + x_**WC('n', S(1))*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons29, cons19, cons4, cons48, cons66, cons130)
rule1633 = ReplacementRule(pattern1633, replacement1633)
pattern1634 = Pattern(Integral(Pq_*(a_ + x_**WC('n', S(1))*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons4, cons48, cons66, cons130)
rule1634 = ReplacementRule(pattern1634, replacement1634)
pattern1635 = Pattern(Integral((a_ + x_**WC('n', S(1))*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**WC('p', S(1))*(d_ + x_**WC('n', S(1))*WC('e', S(1)) + x_**WC('n2', S(1))*WC('f', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons4, cons5, cons48, cons928, cons929)
rule1635 = ReplacementRule(pattern1635, replacement1635)
pattern1636 = Pattern(Integral((d_ + x_**WC('n2', S(1))*WC('f', S(1)))*(a_ + x_**WC('n', S(1))*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons29, cons127, cons4, cons5, cons48, cons924, cons930)
rule1636 = ReplacementRule(pattern1636, replacement1636)
pattern1637 = Pattern(Integral((x_*WC('g', S(1)))**WC('m', S(1))*(a_ + x_**WC('n', S(1))*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**WC('p', S(1))*(d_ + x_**WC('n', S(1))*WC('e', S(1)) + x_**WC('n2', S(1))*WC('f', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons210, cons19, cons4, cons5, cons48, cons931, cons932, cons68)
rule1637 = ReplacementRule(pattern1637, replacement1637)
pattern1638 = Pattern(Integral((x_*WC('g', S(1)))**WC('m', S(1))*(d_ + x_**WC('n2', S(1))*WC('f', S(1)))*(a_ + x_**WC('n', S(1))*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons29, cons127, cons210, cons19, cons4, cons5, cons48, cons597, cons930, cons68)
rule1638 = ReplacementRule(pattern1638, replacement1638)
pattern1639 = Pattern(Integral(Pq_*(x_*WC('d', S(1)))**WC('m', S(1))*(a_ + x_**WC('n', S(1))*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons19, cons4, cons5, cons48, cons66, cons47, cons316)
rule1639 = ReplacementRule(pattern1639, replacement1639)
pattern1640 = Pattern(Integral(Pq_*(a_ + x_**WC('n', S(1))*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**p_, x_), cons2, cons3, cons8, cons4, cons5, cons48, cons66, cons47, cons316)
rule1640 = ReplacementRule(pattern1640, replacement1640)
pattern1641 = Pattern(Integral(Pq_*x_**WC('m', S(1))*(a_ + x_**n_*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**p_, x_), cons2, cons3, cons8, cons19, cons4, cons5, cons48, cons860, cons228, cons502)
rule1641 = ReplacementRule(pattern1641, replacement1641)
pattern1642 = Pattern(Integral(Pq_*(d_*x_)**WC('m', S(1))*(a_ + x_**n_*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons19, cons4, cons5, cons48, cons860, cons228, cons502)
rule1642 = ReplacementRule(pattern1642, replacement1642)
pattern1643 = Pattern(Integral(Pq_*(x_*WC('d', S(1)))**WC('m', S(1))*(a_ + x_**WC('n', S(1))*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons19, cons4, cons5, cons48, cons66, cons861)
rule1643 = ReplacementRule(pattern1643, replacement1643)
pattern1644 = Pattern(Integral(Pq_*(a_ + x_**WC('n', S(1))*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**p_, x_), cons2, cons3, cons8, cons4, cons5, cons48, cons66, cons861, cons862)
rule1644 = ReplacementRule(pattern1644, replacement1644)
pattern1645 = Pattern(Integral((a_ + x_**n_*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**WC('p', S(1))*(d_ + x_**n_*WC('e', S(1)) + x_**WC('n2', S(1))*WC('f', S(1)) + x_**WC('n3', S(1))*WC('g', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons210, cons4, cons5, cons48, cons933, cons228, cons934, cons935)
rule1645 = ReplacementRule(pattern1645, replacement1645)
pattern1646 = Pattern(Integral((a_ + x_**n_*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**WC('p', S(1))*(d_ + x_**WC('n2', S(1))*WC('f', S(1)) + x_**WC('n3', S(1))*WC('g', S(1))), x_), cons2, cons3, cons8, cons29, cons127, cons210, cons4, cons5, cons48, cons933, cons228, cons936, cons937)
rule1646 = ReplacementRule(pattern1646, replacement1646)
pattern1647 = Pattern(Integral((a_ + x_**n_*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**WC('p', S(1))*(d_ + x_**n_*WC('e', S(1)) + x_**WC('n3', S(1))*WC('g', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons210, cons4, cons5, cons48, cons933, cons228, cons934, cons938)
rule1647 = ReplacementRule(pattern1647, replacement1647)
pattern1648 = Pattern(Integral((d_ + x_**WC('n3', S(1))*WC('g', S(1)))*(a_ + x_**n_*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons29, cons210, cons4, cons5, cons48, cons933, cons228, cons936, cons939)
rule1648 = ReplacementRule(pattern1648, replacement1648)
pattern1649 = Pattern(Integral(x_**WC('m', S(1))*(e_ + x_**WC('q', S(1))*WC('f', S(1)) + x_**WC('r', S(1))*WC('g', S(1)) + x_**WC('s', S(1))*WC('h', S(1)))/(a_ + x_**WC('n', S(1))*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**(S(3)/2), x_), cons2, cons3, cons8, cons50, cons127, cons210, cons211, cons19, cons4, cons48, cons940, cons941, cons942, cons228, cons943, cons854)
rule1649 = ReplacementRule(pattern1649, replacement1649)
pattern1650 = Pattern(Integral((d_*x_)**WC('m', S(1))*(e_ + x_**WC('q', S(1))*WC('f', S(1)) + x_**WC('r', S(1))*WC('g', S(1)) + x_**WC('s', S(1))*WC('h', S(1)))/(a_ + x_**WC('n', S(1))*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**(S(3)/2), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons210, cons211, cons19, cons4, cons48, cons940, cons941, cons942, cons228, cons943, cons854)
rule1650 = ReplacementRule(pattern1650, replacement1650)
pattern1651 = Pattern(Integral(Pq_*(a_ + x_**n2_*WC('c', S(1)) + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons48, cons66, cons228, cons150, cons13, cons139, CustomConstraint(With1651))
rule1651 = ReplacementRule(pattern1651, replacement1651)
pattern1652 = Pattern(Integral((d_ + x_**S(4)*WC('g', S(1)) + x_**S(3)*WC('f', S(1)) + x_*WC('e', S(1)))/(a_ + x_**S(4)*WC('c', S(1)) + x_**S(2)*WC('b', S(1)))**(S(3)/2), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons210, cons228, cons944)
rule1652 = ReplacementRule(pattern1652, replacement1652)
pattern1653 = Pattern(Integral((d_ + x_**S(4)*WC('g', S(1)) + x_**S(3)*WC('f', S(1)))/(a_ + x_**S(4)*WC('c', S(1)) + x_**S(2)*WC('b', S(1)))**(S(3)/2), x_), cons2, cons3, cons8, cons29, cons127, cons210, cons228, cons944)
rule1653 = ReplacementRule(pattern1653, replacement1653)
pattern1654 = Pattern(Integral((d_ + x_**S(4)*WC('g', S(1)) + x_*WC('e', S(1)))/(a_ + x_**S(4)*WC('c', S(1)) + x_**S(2)*WC('b', S(1)))**(S(3)/2), x_), cons2, cons3, cons8, cons29, cons50, cons210, cons228, cons944)
rule1654 = ReplacementRule(pattern1654, replacement1654)
pattern1655 = Pattern(Integral(x_**S(2)*(x_**S(4)*WC('h', S(1)) + x_**S(2)*WC('g', S(1)) + x_*WC('f', S(1)) + WC('e', S(0)))/(a_ + x_**S(4)*WC('c', S(1)) + x_**S(2)*WC('b', S(1)))**(S(3)/2), x_), cons2, cons3, cons8, cons50, cons127, cons210, cons211, cons228, cons945, cons946)
rule1655 = ReplacementRule(pattern1655, replacement1655)
pattern1656 = Pattern(Integral(x_**S(2)*(x_**S(4)*WC('h', S(1)) + x_**S(2)*WC('g', S(1)) + WC('e', S(0)))/(a_ + x_**S(4)*WC('c', S(1)) + x_**S(2)*WC('b', S(1)))**(S(3)/2), x_), cons2, cons3, cons8, cons50, cons210, cons211, cons228, cons945, cons946)
rule1656 = ReplacementRule(pattern1656, replacement1656)
pattern1657 = Pattern(Integral((d_ + x_**S(6)*WC('h', S(1)) + x_**S(4)*WC('g', S(1)) + x_**S(3)*WC('f', S(1)) + x_**S(2)*WC('e', S(1)))/(a_ + x_**S(4)*WC('c', S(1)) + x_**S(2)*WC('b', S(1)))**(S(3)/2), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons210, cons211, cons228, cons945, cons947)
rule1657 = ReplacementRule(pattern1657, replacement1657)
pattern1658 = Pattern(Integral((d_ + x_**S(6)*WC('h', S(1)) + x_**S(3)*WC('f', S(1)) + x_**S(2)*WC('e', S(1)))/(a_ + x_**S(4)*WC('c', S(1)) + x_**S(2)*WC('b', S(1)))**(S(3)/2), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons211, cons228, cons945, cons948)
rule1658 = ReplacementRule(pattern1658, replacement1658)
pattern1659 = Pattern(Integral(Pq_*(a_ + x_**n2_*WC('c', S(1)) + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons48, cons66, cons228, cons150, cons13, cons139, CustomConstraint(With1659))
rule1659 = ReplacementRule(pattern1659, replacement1659)
pattern1660 = Pattern(Integral(Pq_*x_**m_*(a_ + x_**n2_*WC('c', S(1)) + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons48, cons66, cons228, cons150, cons13, cons139, cons86, CustomConstraint(With1660))
rule1660 = ReplacementRule(pattern1660, replacement1660)
pattern1661 = Pattern(Integral(Pq_*x_**WC('m', S(1))*(a_ + x_**n2_*WC('c', S(1)) + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons5, cons48, cons860, cons228, cons150, cons20, CustomConstraint(With1661))
rule1661 = ReplacementRule(pattern1661, replacement1661)
pattern1662 = Pattern(Integral(Pq_*(x_*WC('d', S(1)))**WC('m', S(1))/(a_ + x_**n2_*WC('c', S(1)) + x_**WC('n', S(1))*WC('b', S(1))), x_), cons2, cons3, cons8, cons29, cons19, cons48, cons860, cons228, cons150, cons284)
rule1662 = ReplacementRule(pattern1662, replacement1662)
pattern1663 = Pattern(Integral(Pq_/(a_ + x_**n2_*WC('c', S(1)) + x_**WC('n', S(1))*WC('b', S(1))), x_), cons2, cons3, cons8, cons48, cons860, cons228, cons150, cons949)
rule1663 = ReplacementRule(pattern1663, replacement1663)
pattern1664 = Pattern(Integral(Pq_*(a_ + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons66, cons228, cons65, CustomConstraint(With1664))
rule1664 = ReplacementRule(pattern1664, replacement1664)
pattern1665 = Pattern(Integral(Pq_*(a_ + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons66, cons228, cons721, cons950, CustomConstraint(With1665))
rule1665 = ReplacementRule(pattern1665, replacement1665)
pattern1666 = Pattern(Integral(Pq_*(a_ + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons66, cons228, cons721, cons951, CustomConstraint(With1666))
rule1666 = ReplacementRule(pattern1666, replacement1666)
pattern1667 = Pattern(Integral(Pq_*(x_*WC('d', S(1)))**WC('m', S(1))*(a_ + x_**n2_*WC('c', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons19, cons5, cons48, cons860, cons228, cons150, CustomConstraint(With1667))
rule1667 = ReplacementRule(pattern1667, replacement1667)
pattern1668 = Pattern(Integral(Pq_*(a_ + x_**n2_*WC('c', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons5, cons48, cons860, cons228, cons150, CustomConstraint(With1668))
rule1668 = ReplacementRule(pattern1668, replacement1668)
pattern1669 = Pattern(Integral(Pq_*(x_*WC('d', S(1)))**WC('m', S(1))*(a_ + x_**n2_*WC('c', S(1)) + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons19, cons5, cons48, cons66, cons228, cons150, cons952)
rule1669 = ReplacementRule(pattern1669, With1669)
pattern1670 = Pattern(Integral(Pq_*(a_ + x_**n2_*WC('c', S(1)) + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons5, cons48, cons66, cons228, cons150, cons952)
rule1670 = ReplacementRule(pattern1670, With1670)
pattern1671 = Pattern(Integral(Pq_*(x_*WC('d', S(1)))**WC('m', S(1))/(a_ + x_**WC('n', S(1))*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1))), x_), cons2, cons3, cons8, cons29, cons19, cons48, cons66, cons228, cons150)
rule1671 = ReplacementRule(pattern1671, replacement1671)
pattern1672 = Pattern(Integral(Pq_/(a_ + x_**WC('n', S(1))*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1))), x_), cons2, cons3, cons8, cons48, cons66, cons228, cons150)
rule1672 = ReplacementRule(pattern1672, replacement1672)
pattern1673 = Pattern(Integral(Pq_*x_**WC('m', S(1))*(a_ + x_**n_*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**p_, x_), cons2, cons3, cons8, cons5, cons48, cons66, cons228, cons198, cons20)
rule1673 = ReplacementRule(pattern1673, With1673)
pattern1674 = Pattern(Integral(Pq_*(x_*WC('d', S(1)))**WC('m', S(1))*(a_ + x_**n_*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons5, cons48, cons66, cons228, cons198, cons369)
rule1674 = ReplacementRule(pattern1674, With1674)
pattern1675 = Pattern(Integral(Pq_*(x_*WC('d', S(1)))**m_*(a_ + x_**n_*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons19, cons5, cons48, cons66, cons228, cons198, cons358)
rule1675 = ReplacementRule(pattern1675, With1675)
pattern1676 = Pattern(Integral(Pq_*x_**WC('m', S(1))*(a_ + x_**n_*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**p_, x_), cons2, cons3, cons8, cons19, cons5, cons48, cons66, cons228, cons491)
rule1676 = ReplacementRule(pattern1676, With1676)
pattern1677 = Pattern(Integral(Pq_*(a_ + x_**n_*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**p_, x_), cons2, cons3, cons8, cons5, cons48, cons66, cons228, cons491)
rule1677 = ReplacementRule(pattern1677, With1677)
pattern1678 = Pattern(Integral(Pq_*(d_*x_)**m_*(a_ + x_**n_*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons5, cons48, cons66, cons228, cons491, cons75)
rule1678 = ReplacementRule(pattern1678, replacement1678)
pattern1679 = Pattern(Integral(Pq_*(d_*x_)**m_*(a_ + x_**n_*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons5, cons48, cons66, cons228, cons491, cons953)
rule1679 = ReplacementRule(pattern1679, replacement1679)
pattern1680 = Pattern(Integral(Pq_*(d_*x_)**m_*(a_ + x_**n_*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons19, cons5, cons48, cons66, cons228, cons491)
rule1680 = ReplacementRule(pattern1680, replacement1680)
pattern1681 = Pattern(Integral(Pq_*x_**WC('m', S(1))*(a_ + x_**n_*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**p_, x_), cons2, cons3, cons8, cons19, cons4, cons5, cons48, cons860, cons228, cons543, cons25)
rule1681 = ReplacementRule(pattern1681, replacement1681)
pattern1682 = Pattern(Integral(Pq_*(d_*x_)**m_*(a_ + x_**n_*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons19, cons5, cons48, cons860, cons228, cons543, cons25)
rule1682 = ReplacementRule(pattern1682, replacement1682)
pattern1683 = Pattern(Integral(Pq_*(x_*WC('d', S(1)))**WC('m', S(1))/(a_ + x_**WC('n', S(1))*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1))), x_), cons2, cons3, cons8, cons29, cons19, cons4, cons48, cons66, cons228)
rule1683 = ReplacementRule(pattern1683, With1683)
pattern1684 = Pattern(Integral(Pq_/(a_ + x_**WC('n', S(1))*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1))), x_), cons2, cons3, cons8, cons4, cons48, cons66, cons228)
rule1684 = ReplacementRule(pattern1684, With1684)
pattern1685 = Pattern(Integral(Pq_*(x_*WC('d', S(1)))**WC('m', S(1))*(a_ + x_**WC('n', S(1))*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons19, cons4, cons48, cons66, cons704)
rule1685 = ReplacementRule(pattern1685, replacement1685)
pattern1686 = Pattern(Integral(Pq_*(a_ + x_**WC('n', S(1))*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**p_, x_), cons2, cons3, cons8, cons4, cons48, cons66, cons704)
rule1686 = ReplacementRule(pattern1686, replacement1686)
pattern1687 = Pattern(Integral(Pq_*(x_*WC('d', S(1)))**WC('m', S(1))*(a_ + x_**WC('n', S(1))*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons29, cons19, cons4, cons5, cons48, cons920)
rule1687 = ReplacementRule(pattern1687, replacement1687)
pattern1688 = Pattern(Integral(Pq_*(a_ + x_**WC('n', S(1))*WC('b', S(1)) + x_**WC('n2', S(1))*WC('c', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons4, cons5, cons48, cons920)
rule1688 = ReplacementRule(pattern1688, replacement1688)
pattern1689 = Pattern(Integral(Pq_*u_**WC('m', S(1))*(a_ + v_**n_*WC('b', S(1)) + v_**WC('n2', S(1))*WC('c', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons19, cons4, cons5, cons48, cons556, cons921)
rule1689 = ReplacementRule(pattern1689, replacement1689)
pattern1690 = Pattern(Integral(Pq_*(a_ + v_**n_*WC('b', S(1)) + v_**WC('n2', S(1))*WC('c', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons4, cons5, cons48, cons554, cons921)
rule1690 = ReplacementRule(pattern1690, replacement1690)
pattern1691 = Pattern(Integral((x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons798, cons4, cons5, cons149, cons954, cons955)
rule1691 = ReplacementRule(pattern1691, replacement1691)
pattern1692 = Pattern(Integral((x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons798, cons4, cons149, cons954, cons956, cons13, cons139)
rule1692 = ReplacementRule(pattern1692, replacement1692)
pattern1693 = Pattern(Integral((x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons798, cons4, cons5, cons149, cons954, cons956, cons957)
rule1693 = ReplacementRule(pattern1693, replacement1693)
pattern1694 = Pattern(Integral((x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons149, cons958, cons959, cons165, cons960)
rule1694 = ReplacementRule(pattern1694, replacement1694)
pattern1695 = Pattern(Integral((x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons149, cons958, cons959, cons165, cons961)
rule1695 = ReplacementRule(pattern1695, replacement1695)
pattern1696 = Pattern(Integral((x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons149, cons958, cons959, cons139, cons962)
rule1696 = ReplacementRule(pattern1696, replacement1696)
pattern1697 = Pattern(Integral((x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons149, cons958, cons959, cons139)
rule1697 = ReplacementRule(pattern1697, replacement1697)
pattern1698 = Pattern(Integral((x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons798, cons4, cons963, cons954, cons964)
rule1698 = ReplacementRule(pattern1698, replacement1698)
pattern1699 = Pattern(Integral(S(1)/sqrt(x_**S(2)*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1))), x_), cons2, cons3, cons4, cons965)
rule1699 = ReplacementRule(pattern1699, replacement1699)
pattern1700 = Pattern(Integral((x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons798, cons4, cons721, cons954, cons964)
rule1700 = ReplacementRule(pattern1700, replacement1700)
pattern1701 = Pattern(Integral(S(1)/sqrt(x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1))), x_), cons2, cons3, cons966, cons967)
rule1701 = ReplacementRule(pattern1701, replacement1701)
pattern1702 = Pattern(Integral((x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons798, cons4, cons5, cons149, cons954, cons968)
rule1702 = ReplacementRule(pattern1702, replacement1702)
pattern1703 = Pattern(Integral((u_**WC('j', S(1))*WC('a', S(1)) + u_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons798, cons4, cons5, cons70, cons71)
rule1703 = ReplacementRule(pattern1703, replacement1703)
pattern1704 = Pattern(Integral(x_**WC('m', S(1))*(x_**n_*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons798, cons19, cons4, cons5, cons149, cons954, cons969, cons55)
rule1704 = ReplacementRule(pattern1704, replacement1704)
pattern1705 = Pattern(Integral((x_*WC('c', S(1)))**WC('m', S(1))*(x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons798, cons19, cons4, cons5, cons149, cons954, cons970, cons971)
rule1705 = ReplacementRule(pattern1705, replacement1705)
pattern1706 = Pattern(Integral((x_*WC('c', S(1)))**WC('m', S(1))*(x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons798, cons19, cons4, cons149, cons954, cons972, cons13, cons139, cons971)
rule1706 = ReplacementRule(pattern1706, replacement1706)
pattern1707 = Pattern(Integral((x_*WC('c', S(1)))**WC('m', S(1))*(x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons798, cons19, cons4, cons5, cons149, cons954, cons972, cons973, cons974)
rule1707 = ReplacementRule(pattern1707, replacement1707)
pattern1708 = Pattern(Integral((c_*x_)**WC('m', S(1))*(x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons798, cons19, cons4, cons5, cons149, cons954, cons972)
rule1708 = ReplacementRule(pattern1708, replacement1708)
pattern1709 = Pattern(Integral(x_**WC('m', S(1))*(x_**n_*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons798, cons19, cons4, cons5, cons149, cons954, cons969, cons502, cons975)
rule1709 = ReplacementRule(pattern1709, replacement1709)
pattern1710 = Pattern(Integral((c_*x_)**WC('m', S(1))*(x_**n_*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons8, cons798, cons19, cons4, cons5, cons149, cons954, cons969, cons502, cons975)
rule1710 = ReplacementRule(pattern1710, replacement1710)
pattern1711 = Pattern(Integral((x_*WC('c', S(1)))**m_*(x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons149, cons976, cons959, cons974, cons165, cons977)
rule1711 = ReplacementRule(pattern1711, replacement1711)
pattern1712 = Pattern(Integral((x_*WC('c', S(1)))**WC('m', S(1))*(x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons19, cons149, cons958, cons959, cons974, cons165, cons514)
rule1712 = ReplacementRule(pattern1712, replacement1712)
pattern1713 = Pattern(Integral((x_*WC('c', S(1)))**WC('m', S(1))*(x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons149, cons976, cons959, cons974, cons139, cons978)
rule1713 = ReplacementRule(pattern1713, replacement1713)
pattern1714 = Pattern(Integral((x_*WC('c', S(1)))**WC('m', S(1))*(x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons19, cons149, cons958, cons959, cons974, cons139)
rule1714 = ReplacementRule(pattern1714, replacement1714)
pattern1715 = Pattern(Integral((x_*WC('c', S(1)))**WC('m', S(1))*(x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons19, cons5, cons149, cons966, cons959, cons974, cons979, cons514)
rule1715 = ReplacementRule(pattern1715, replacement1715)
pattern1716 = Pattern(Integral((x_*WC('c', S(1)))**WC('m', S(1))*(x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons19, cons5, cons149, cons966, cons959, cons974, cons980)
rule1716 = ReplacementRule(pattern1716, replacement1716)
pattern1717 = Pattern(Integral(x_**WC('m', S(1))*(x_**n_*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons798, cons19, cons4, cons5, cons149, cons954, cons969, cons68, cons543, cons25)
rule1717 = ReplacementRule(pattern1717, replacement1717)
pattern1718 = Pattern(Integral((c_*x_)**WC('m', S(1))*(x_**n_*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons8, cons798, cons19, cons4, cons5, cons149, cons954, cons969, cons68, cons543, cons25)
rule1718 = ReplacementRule(pattern1718, replacement1718)
pattern1719 = Pattern(Integral((x_*WC('c', S(1)))**WC('m', S(1))*(x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons798, cons19, cons4, cons963, cons954, cons981, cons971)
rule1719 = ReplacementRule(pattern1719, replacement1719)
pattern1720 = Pattern(Integral(x_**WC('m', S(1))/sqrt(x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1))), x_), cons2, cons3, cons798, cons4, cons982, cons954)
rule1720 = ReplacementRule(pattern1720, replacement1720)
pattern1721 = Pattern(Integral((x_*WC('c', S(1)))**WC('m', S(1))*(x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons798, cons19, cons4, cons721, cons954, cons981, cons971)
rule1721 = ReplacementRule(pattern1721, replacement1721)
pattern1722 = Pattern(Integral((c_*x_)**WC('m', S(1))*(x_**n_*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons8, cons798, cons19, cons4, cons5, cons669, cons954, cons981)
rule1722 = ReplacementRule(pattern1722, replacement1722)
pattern1723 = Pattern(Integral((x_*WC('c', S(1)))**WC('m', S(1))*(x_**WC('j', S(1))*WC('a', S(1)) + x_**WC('n', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons798, cons19, cons4, cons5, cons149, cons954, cons968)
rule1723 = ReplacementRule(pattern1723, replacement1723)
pattern1724 = Pattern(Integral(u_**WC('m', S(1))*(v_**WC('j', S(1))*WC('a', S(1)) + v_**WC('n', S(1))*WC('b', S(1)))**WC('p', S(1)), x_), cons2, cons3, cons798, cons19, cons4, cons5, cons556)
rule1724 = ReplacementRule(pattern1724, replacement1724)
pattern1725 = Pattern(Integral(x_**WC('m', S(1))*(c_ + x_**n_*WC('d', S(1)))**WC('q', S(1))*(x_**j_*WC('a', S(1)) + x_**WC('k', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons798, cons799, cons19, cons4, cons5, cons52, cons149, cons983, cons969, cons984, cons502, cons975)
rule1725 = ReplacementRule(pattern1725, replacement1725)
pattern1726 = Pattern(Integral((e_*x_)**WC('m', S(1))*(c_ + x_**WC('n', S(1))*WC('d', S(1)))**WC('q', S(1))*(x_**j_*WC('a', S(1)) + x_**WC('k', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons798, cons799, cons19, cons4, cons5, cons52, cons149, cons983, cons969, cons984, cons502, cons975)
rule1726 = ReplacementRule(pattern1726, replacement1726)
pattern1727 = Pattern(Integral((x_*WC('e', S(1)))**WC('m', S(1))*(c_ + x_**WC('n', S(1))*WC('d', S(1)))*(x_**WC('jn', S(1))*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons798, cons19, cons4, cons5, cons985, cons149, cons73, cons986, cons987, cons973)
rule1727 = ReplacementRule(pattern1727, replacement1727)
pattern1728 = Pattern(Integral((x_*WC('e', S(1)))**WC('m', S(1))*(c_ + x_**WC('n', S(1))*WC('d', S(1)))*(x_**WC('jn', S(1))*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons798, cons19, cons4, cons985, cons149, cons73, cons988, cons139, cons989, cons990)
rule1728 = ReplacementRule(pattern1728, replacement1728)
pattern1729 = Pattern(Integral((x_*WC('e', S(1)))**WC('m', S(1))*(c_ + x_**WC('n', S(1))*WC('d', S(1)))*(x_**WC('jn', S(1))*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons798, cons5, cons985, cons149, cons73, cons95, cons90, cons991, cons992, cons973, cons993)
rule1729 = ReplacementRule(pattern1729, replacement1729)
pattern1730 = Pattern(Integral((x_*WC('e', S(1)))**WC('m', S(1))*(c_ + x_**WC('n', S(1))*WC('d', S(1)))*(x_**WC('jn', S(1))*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons798, cons19, cons4, cons5, cons985, cons149, cons73, cons994, cons990)
rule1730 = ReplacementRule(pattern1730, replacement1730)
pattern1731 = Pattern(Integral(x_**WC('m', S(1))*(c_ + x_**WC('n', S(1))*WC('d', S(1)))**WC('q', S(1))*(x_**j_*WC('a', S(1)) + x_**WC('k', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons798, cons799, cons19, cons4, cons5, cons52, cons149, cons983, cons969, cons984, cons68, cons543, cons25)
rule1731 = ReplacementRule(pattern1731, replacement1731)
pattern1732 = Pattern(Integral((e_*x_)**WC('m', S(1))*(c_ + x_**WC('n', S(1))*WC('d', S(1)))**WC('q', S(1))*(x_**j_*WC('a', S(1)) + x_**WC('k', S(1))*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons798, cons799, cons19, cons4, cons5, cons52, cons149, cons983, cons969, cons984, cons68, cons543, cons25)
rule1732 = ReplacementRule(pattern1732, replacement1732)
pattern1733 = Pattern(Integral((x_*WC('e', S(1)))**WC('m', S(1))*(c_ + x_**WC('n', S(1))*WC('d', S(1)))**WC('q', S(1))*(x_**WC('jn', S(1))*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons798, cons19, cons4, cons5, cons52, cons985, cons149, cons73, cons995)
rule1733 = ReplacementRule(pattern1733, replacement1733)
pattern1734 = Pattern(Integral(Pq_*(x_**n_*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons798, cons4, cons5, cons860, cons149, cons954, cons966, cons969, cons996)
rule1734 = ReplacementRule(pattern1734, With1734)
pattern1735 = Pattern(Integral(Pq_*x_**WC('m', S(1))*(x_**n_*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons798, cons19, cons4, cons5, cons860, cons149, cons954, cons969, cons502)
rule1735 = ReplacementRule(pattern1735, replacement1735)
pattern1736 = Pattern(Integral(Pq_*(c_*x_)**WC('m', S(1))*(x_**n_*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons8, cons798, cons4, cons5, cons860, cons149, cons954, cons969, cons502, cons33, cons997)
rule1736 = ReplacementRule(pattern1736, replacement1736)
pattern1737 = Pattern(Integral(Pq_*(c_*x_)**WC('m', S(1))*(x_**n_*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons8, cons798, cons19, cons4, cons5, cons860, cons149, cons954, cons969, cons502)
rule1737 = ReplacementRule(pattern1737, replacement1737)
pattern1738 = Pattern(Integral(Pq_*x_**WC('m', S(1))*(x_**n_*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons5, cons860, cons149, cons998, cons20, CustomConstraint(With1738))
rule1738 = ReplacementRule(pattern1738, replacement1738)
pattern1739 = Pattern(Integral(Pq_*(x_*WC('c', S(1)))**WC('m', S(1))*(x_**n_*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons8, cons19, cons5, cons66, cons149, cons999, cons1000, CustomConstraint(With1739))
rule1739 = ReplacementRule(pattern1739, replacement1739)
pattern1740 = Pattern(Integral(Pq_*x_**WC('m', S(1))*(x_**n_*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons798, cons19, cons4, cons5, cons860, cons149, cons954, cons969, cons543, cons25)
rule1740 = ReplacementRule(pattern1740, replacement1740)
pattern1741 = Pattern(Integral(Pq_*(c_*x_)**m_*(x_**n_*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons8, cons798, cons4, cons5, cons860, cons149, cons954, cons969, cons543, cons25, cons33, cons997)
rule1741 = ReplacementRule(pattern1741, replacement1741)
pattern1742 = Pattern(Integral(Pq_*(c_*x_)**m_*(x_**n_*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons8, cons798, cons19, cons4, cons5, cons860, cons149, cons954, cons969, cons543, cons25)
rule1742 = ReplacementRule(pattern1742, replacement1742)
pattern1743 = Pattern(Integral(Pq_*(x_*WC('c', S(1)))**WC('m', S(1))*(x_**n_*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons8, cons798, cons19, cons4, cons5, cons920, cons149, cons954)
rule1743 = ReplacementRule(pattern1743, replacement1743)
pattern1744 = Pattern(Integral(Pq_*(x_**n_*WC('b', S(1)) + x_**WC('j', S(1))*WC('a', S(1)))**p_, x_), cons2, cons3, cons798, cons4, cons5, cons920, cons149, cons954)
rule1744 = ReplacementRule(pattern1744, replacement1744)
pattern1745 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons29, cons40, cons1001)
rule1745 = ReplacementRule(pattern1745, replacement1745)
pattern1746 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons29, cons130, cons1002)
rule1746 = ReplacementRule(pattern1746, replacement1746)
pattern1747 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons29, cons65, cons1002, CustomConstraint(With1747))
rule1747 = ReplacementRule(pattern1747, replacement1747)
pattern1748 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons29, cons65, cons1002)
rule1748 = ReplacementRule(pattern1748, With1748)
pattern1749 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons29, cons5, cons149, cons1001)
rule1749 = ReplacementRule(pattern1749, replacement1749)
pattern1750 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons29, cons5, cons149, cons1002, CustomConstraint(With1750))
rule1750 = ReplacementRule(pattern1750, replacement1750)
pattern1751 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons29, cons5, cons149, cons1002)
rule1751 = ReplacementRule(pattern1751, With1751)
pattern1752 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons29, cons50, cons127, cons19, cons40, cons1001)
rule1752 = ReplacementRule(pattern1752, replacement1752)
pattern1753 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons29, cons50, cons127, cons19, cons130, cons1002)
rule1753 = ReplacementRule(pattern1753, replacement1753)
pattern1754 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons29, cons50, cons127, cons19, cons65, cons1002, CustomConstraint(With1754))
rule1754 = ReplacementRule(pattern1754, replacement1754)
pattern1755 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons29, cons50, cons127, cons19, cons65, cons1002)
rule1755 = ReplacementRule(pattern1755, With1755)
pattern1756 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons29, cons50, cons127, cons19, cons5, cons149, cons1001)
rule1756 = ReplacementRule(pattern1756, replacement1756)
pattern1757 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons29, cons50, cons127, cons19, cons5, cons149, cons1002, CustomConstraint(With1757))
rule1757 = ReplacementRule(pattern1757, replacement1757)
pattern1758 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons29, cons50, cons127, cons19, cons5, cons149, cons1002)
rule1758 = ReplacementRule(pattern1758, With1758)
pattern1759 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons8, cons29, cons40, cons1003)
rule1759 = ReplacementRule(pattern1759, replacement1759)
pattern1760 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons8, cons29, cons130, cons1004)
rule1760 = ReplacementRule(pattern1760, replacement1760)
pattern1761 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons8, cons29, cons65, cons1004, CustomConstraint(With1761))
rule1761 = ReplacementRule(pattern1761, replacement1761)
pattern1762 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons8, cons29, cons65, cons1004)
rule1762 = ReplacementRule(pattern1762, With1762)
pattern1763 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons8, cons29, cons5, cons149, cons1003)
rule1763 = ReplacementRule(pattern1763, replacement1763)
pattern1764 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons8, cons29, cons5, cons149, cons1004, CustomConstraint(With1764))
rule1764 = ReplacementRule(pattern1764, replacement1764)
pattern1765 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons8, cons29, cons5, cons149, cons1004)
rule1765 = ReplacementRule(pattern1765, With1765)
pattern1766 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons8, cons29, cons50, cons127, cons19, cons40, cons1003)
rule1766 = ReplacementRule(pattern1766, replacement1766)
pattern1767 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons8, cons29, cons50, cons127, cons19, cons130, cons1004)
rule1767 = ReplacementRule(pattern1767, replacement1767)
pattern1768 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons8, cons29, cons50, cons127, cons19, cons65, cons1004, CustomConstraint(With1768))
rule1768 = ReplacementRule(pattern1768, replacement1768)
pattern1769 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons8, cons29, cons50, cons127, cons19, cons65, cons1004)
rule1769 = ReplacementRule(pattern1769, With1769)
pattern1770 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons8, cons29, cons50, cons127, cons19, cons5, cons149, cons1003)
rule1770 = ReplacementRule(pattern1770, replacement1770)
pattern1771 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons8, cons29, cons50, cons127, cons19, cons5, cons149, cons1004, CustomConstraint(With1771))
rule1771 = ReplacementRule(pattern1771, replacement1771)
pattern1772 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons8, cons29, cons50, cons127, cons19, cons5, cons149, cons1004)
rule1772 = ReplacementRule(pattern1772, With1772)
pattern1773 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons40, cons1005, cons1006)
rule1773 = ReplacementRule(pattern1773, replacement1773)
pattern1774 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons40, cons1005, cons1007)
rule1774 = ReplacementRule(pattern1774, replacement1774)
pattern1775 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons40, cons1008, cons1006)
rule1775 = ReplacementRule(pattern1775, With1775)
pattern1776 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons130, cons1008, cons1007)
rule1776 = ReplacementRule(pattern1776, replacement1776)
pattern1777 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons65, cons1008, cons1007, CustomConstraint(With1777))
rule1777 = ReplacementRule(pattern1777, replacement1777)
pattern1778 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons65, cons1008, cons1007)
rule1778 = ReplacementRule(pattern1778, replacement1778)
pattern1779 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons5, cons149, cons1005, cons1006)
rule1779 = ReplacementRule(pattern1779, replacement1779)
pattern1780 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons5, cons149, cons1005, cons1007)
rule1780 = ReplacementRule(pattern1780, With1780)
pattern1781 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons5, cons149, cons1008, cons1006)
rule1781 = ReplacementRule(pattern1781, With1781)
pattern1782 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons5, cons149, cons1008, cons1007, CustomConstraint(With1782))
rule1782 = ReplacementRule(pattern1782, replacement1782)
pattern1783 = Pattern(Integral((x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons5, cons149, cons1008, cons1007)
rule1783 = ReplacementRule(pattern1783, With1783)
pattern1784 = Pattern(Integral(u_**p_, x_), cons5, cons1009, cons1010)
rule1784 = ReplacementRule(pattern1784, replacement1784)
pattern1785 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons127, cons19, cons40, cons1005, cons1006)
rule1785 = ReplacementRule(pattern1785, replacement1785)
pattern1786 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons127, cons19, cons40, cons1005, cons1007)
rule1786 = ReplacementRule(pattern1786, With1786)
pattern1787 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons127, cons19, cons40, cons1008, cons1006)
rule1787 = ReplacementRule(pattern1787, With1787)
pattern1788 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons127, cons19, cons130, cons1008, cons1007)
rule1788 = ReplacementRule(pattern1788, replacement1788)
pattern1789 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons127, cons19, cons65, cons1008, cons1007, CustomConstraint(With1789))
rule1789 = ReplacementRule(pattern1789, replacement1789)
pattern1790 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons127, cons19, cons65, cons1008, cons1007)
rule1790 = ReplacementRule(pattern1790, replacement1790)
pattern1791 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons127, cons19, cons5, cons149, cons1005, cons1006)
rule1791 = ReplacementRule(pattern1791, replacement1791)
pattern1792 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons127, cons19, cons5, cons149, cons1005, cons1007)
rule1792 = ReplacementRule(pattern1792, With1792)
pattern1793 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons127, cons19, cons5, cons149, cons1008, cons1006)
rule1793 = ReplacementRule(pattern1793, With1793)
pattern1794 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons127, cons19, cons5, cons149, cons1008, cons1007, CustomConstraint(With1794))
rule1794 = ReplacementRule(pattern1794, replacement1794)
pattern1795 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1))*(x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons127, cons19, cons5, cons149, cons1008, cons1007)
rule1795 = ReplacementRule(pattern1795, With1795)
pattern1796 = Pattern(Integral(u_**WC('m', S(1))*v_**WC('p', S(1)), x_), cons19, cons5, cons70, cons1011, cons1012)
rule1796 = ReplacementRule(pattern1796, replacement1796)
pattern1797 = Pattern(Integral((f_ + x_**S(2)*WC('g', S(1)))/((d_ + x_**S(2)*WC('d', S(1)) + x_*WC('e', S(1)))*sqrt(a_ + x_**S(4)*WC('a', S(1)) + x_**S(3)*WC('b', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons210, cons385, cons1013, cons1014)
rule1797 = ReplacementRule(pattern1797, replacement1797)
pattern1798 = Pattern(Integral((f_ + x_**S(2)*WC('g', S(1)))/((d_ + x_**S(2)*WC('d', S(1)) + x_*WC('e', S(1)))*sqrt(a_ + x_**S(4)*WC('a', S(1)) + x_**S(3)*WC('b', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons210, cons385, cons1013, cons1015)
rule1798 = ReplacementRule(pattern1798, replacement1798)
pattern1799 = Pattern(Integral((x_**S(4)*WC('e', S(1)) + x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons5, cons1016, cons1017, cons1018)
rule1799 = ReplacementRule(pattern1799, replacement1799)
pattern1800 = Pattern(Integral(v_**p_, x_), cons5, cons1019, cons1020, cons1017, cons1018, CustomConstraint(With1800))
rule1800 = ReplacementRule(pattern1800, replacement1800)
pattern1801 = Pattern(Integral(u_*(x_**S(4)*WC('e', S(1)) + x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons5, cons806, cons1016, cons359)
rule1801 = ReplacementRule(pattern1801, replacement1801)
pattern1802 = Pattern(Integral(u_*v_**p_, x_), cons5, cons806, cons1019, cons1020, cons359, CustomConstraint(With1802))
rule1802 = ReplacementRule(pattern1802, replacement1802)
pattern1803 = Pattern(Integral((a_ + x_**S(4)*WC('e', S(1)) + x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons50, cons1021, cons248)
rule1803 = ReplacementRule(pattern1803, replacement1803)
pattern1804 = Pattern(Integral(v_**p_, x_), cons5, cons1019, cons1020, cons248, CustomConstraint(With1804))
rule1804 = ReplacementRule(pattern1804, replacement1804)
pattern1805 = Pattern(Integral((x_**S(3)*WC('D', S(1)) + x_**S(2)*WC('C', S(1)) + x_*WC('B', S(1)) + WC('A', S(0)))/(a_ + x_**S(4)*WC('e', S(1)) + x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1))), x_), cons2, cons3, cons8, cons36, cons37, cons38, cons1025, cons1022, cons1023, cons1024)
rule1805 = ReplacementRule(pattern1805, With1805)
pattern1806 = Pattern(Integral((x_**S(3)*WC('D', S(1)) + x_*WC('B', S(1)) + WC('A', S(0)))/(a_ + x_**S(4)*WC('e', S(1)) + x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1))), x_), cons2, cons3, cons8, cons36, cons37, cons1025, cons1022, cons1023, cons1024)
rule1806 = ReplacementRule(pattern1806, With1806)
pattern1807 = Pattern(Integral(x_**WC('m', S(1))*(x_**S(3)*WC('D', S(1)) + x_**S(2)*WC('C', S(1)) + x_*WC('B', S(1)) + WC('A', S(0)))/(a_ + x_**S(4)*WC('e', S(1)) + x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1))), x_), cons2, cons3, cons8, cons36, cons37, cons38, cons1025, cons19, cons1022, cons1023, cons1024)
rule1807 = ReplacementRule(pattern1807, With1807)
pattern1808 = Pattern(Integral(x_**WC('m', S(1))*(x_**S(3)*WC('D', S(1)) + x_*WC('B', S(1)) + WC('A', S(0)))/(a_ + x_**S(4)*WC('e', S(1)) + x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1))), x_), cons2, cons3, cons8, cons36, cons37, cons1025, cons19, cons1022, cons1023, cons1024)
rule1808 = ReplacementRule(pattern1808, With1808)
pattern1809 = Pattern(Integral((x_**S(2)*WC('C', S(1)) + x_*WC('B', S(1)) + WC('A', S(0)))/(a_ + x_**S(4)*WC('e', S(1)) + x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons36, cons37, cons38, cons1026, cons1027, cons1028)
rule1809 = ReplacementRule(pattern1809, With1809)
pattern1810 = Pattern(Integral((x_**S(2)*WC('C', S(1)) + WC('A', S(0)))/(a_ + x_**S(4)*WC('e', S(1)) + x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons36, cons38, cons1029, cons1030, cons1031)
rule1810 = ReplacementRule(pattern1810, With1810)
pattern1811 = Pattern(Integral((x_**S(2)*WC('C', S(1)) + x_*WC('B', S(1)) + WC('A', S(0)))/(a_ + x_**S(4)*WC('e', S(1)) + x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons36, cons37, cons38, cons1026, cons1027, cons1032)
rule1811 = ReplacementRule(pattern1811, With1811)
pattern1812 = Pattern(Integral((x_**S(2)*WC('C', S(1)) + WC('A', S(0)))/(a_ + x_**S(4)*WC('e', S(1)) + x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons36, cons38, cons1029, cons1030, cons1033)
rule1812 = ReplacementRule(pattern1812, With1812)
pattern1813 = Pattern(Integral((x_**S(3)*WC('D', S(1)) + x_**S(2)*WC('C', S(1)) + x_*WC('B', S(1)) + WC('A', S(0)))/(a_ + x_**S(4)*WC('e', S(1)) + x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons36, cons37, cons38, cons1025, cons1034, cons1035)
rule1813 = ReplacementRule(pattern1813, replacement1813)
pattern1814 = Pattern(Integral((x_**S(3)*WC('D', S(1)) + x_*WC('B', S(1)) + WC('A', S(0)))/(a_ + x_**S(4)*WC('e', S(1)) + x_**S(3)*WC('d', S(1)) + x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons36, cons37, cons1025, cons1036, cons1037)
rule1814 = ReplacementRule(pattern1814, replacement1814)
pattern1815 = Pattern(Integral(u_/(sqrt(x_*WC('b', S(1)) + WC('a', S(0)))*WC('e', S(1)) + sqrt(x_*WC('d', S(1)) + WC('c', S(0)))*WC('f', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons73, cons1038)
rule1815 = ReplacementRule(pattern1815, replacement1815)
pattern1816 = Pattern(Integral(u_/(sqrt(x_*WC('b', S(1)) + WC('a', S(0)))*WC('e', S(1)) + sqrt(x_*WC('d', S(1)) + WC('c', S(0)))*WC('f', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons73, cons1039)
rule1816 = ReplacementRule(pattern1816, replacement1816)
pattern1817 = Pattern(Integral(u_/(sqrt(x_*WC('b', S(1)) + WC('a', S(0)))*WC('e', S(1)) + sqrt(x_*WC('d', S(1)) + WC('c', S(0)))*WC('f', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons1040, cons1041)
rule1817 = ReplacementRule(pattern1817, replacement1817)
pattern1818 = Pattern(Integral(WC('u', S(1))/(x_**WC('n', S(1))*WC('d', S(1)) + sqrt(x_**WC('p', S(1))*WC('b', S(1)) + WC('a', S(0)))*WC('c', S(1))), x_), cons2, cons3, cons8, cons29, cons4, cons1042, cons1043)
rule1818 = ReplacementRule(pattern1818, replacement1818)
pattern1819 = Pattern(Integral(x_**WC('m', S(1))/(x_**WC('n', S(1))*WC('d', S(1)) + sqrt(x_**WC('p', S(1))*WC('b', S(1)) + WC('a', S(0)))*WC('c', S(1))), x_), cons2, cons3, cons8, cons29, cons19, cons4, cons1042, cons1044)
rule1819 = ReplacementRule(pattern1819, replacement1819)
pattern1820 = Pattern(Integral(S(1)/((a_ + x_**S(3)*WC('b', S(1)))*sqrt(x_**S(2)*WC('f', S(1)) + x_*WC('e', S(1)) + WC('d', S(0)))), x_), cons2, cons3, cons29, cons50, cons127, cons470)
rule1820 = ReplacementRule(pattern1820, With1820)
pattern1821 = Pattern(Integral(S(1)/((a_ + x_**S(3)*WC('b', S(1)))*sqrt(x_**S(2)*WC('f', S(1)) + WC('d', S(0)))), x_), cons2, cons3, cons29, cons127, cons470)
rule1821 = ReplacementRule(pattern1821, With1821)
pattern1822 = Pattern(Integral(S(1)/((a_ + x_**S(3)*WC('b', S(1)))*sqrt(x_**S(2)*WC('f', S(1)) + x_*WC('e', S(1)) + WC('d', S(0)))), x_), cons2, cons3, cons29, cons50, cons127, cons471)
rule1822 = ReplacementRule(pattern1822, With1822)
pattern1823 = Pattern(Integral(S(1)/((a_ + x_**S(3)*WC('b', S(1)))*sqrt(x_**S(2)*WC('f', S(1)) + WC('d', S(0)))), x_), cons2, cons3, cons29, cons127, cons471)
rule1823 = ReplacementRule(pattern1823, With1823)
pattern1824 = Pattern(Integral(S(1)/((d_ + x_*WC('e', S(1)))*sqrt(a_ + x_**S(4)*WC('c', S(1)) + x_**S(2)*WC('b', S(1)))), x_), cons2, cons3, cons8, cons29, cons50, cons1045)
rule1824 = ReplacementRule(pattern1824, replacement1824)
pattern1825 = Pattern(Integral(S(1)/(sqrt(a_ + x_**S(4)*WC('c', S(1)))*(d_ + x_*WC('e', S(1)))), x_), cons2, cons8, cons29, cons50, cons299)
rule1825 = ReplacementRule(pattern1825, replacement1825)
pattern1826 = Pattern(Integral(S(1)/((d_ + x_*WC('e', S(1)))**S(2)*sqrt(a_ + x_**S(4)*WC('c', S(1)) + x_**S(2)*WC('b', S(1)))), x_), cons2, cons3, cons8, cons29, cons50, cons1046, cons1047)
rule1826 = ReplacementRule(pattern1826, replacement1826)
pattern1827 = Pattern(Integral(S(1)/((d_ + x_*WC('e', S(1)))**S(2)*sqrt(a_ + x_**S(4)*WC('c', S(1)) + x_**S(2)*WC('b', S(1)))), x_), cons2, cons3, cons8, cons29, cons50, cons1046, cons1048)
rule1827 = ReplacementRule(pattern1827, replacement1827)
pattern1828 = Pattern(Integral(S(1)/(sqrt(a_ + x_**S(4)*WC('c', S(1)))*(d_ + x_*WC('e', S(1)))**S(2)), x_), cons2, cons8, cons29, cons50, cons1049)
rule1828 = ReplacementRule(pattern1828, replacement1828)
pattern1829 = Pattern(Integral((A_ + x_**S(2)*WC('B', S(1)))/((d_ + x_**S(2)*WC('e', S(1)))*sqrt(a_ + x_**S(4)*WC('c', S(1)) + x_**S(2)*WC('b', S(1)))), x_), cons2, cons3, cons8, cons29, cons50, cons36, cons37, cons1050, cons707)
rule1829 = ReplacementRule(pattern1829, replacement1829)
pattern1830 = Pattern(Integral((A_ + x_**S(2)*WC('B', S(1)))/(sqrt(a_ + x_**S(4)*WC('c', S(1)))*(d_ + x_**S(2)*WC('e', S(1)))), x_), cons2, cons8, cons29, cons50, cons36, cons37, cons1050, cons707)
rule1830 = ReplacementRule(pattern1830, replacement1830)
pattern1831 = Pattern(Integral((A_ + x_**S(4)*WC('B', S(1)))/(sqrt(a_ + x_**S(4)*WC('c', S(1)) + x_**S(2)*WC('b', S(1)))*(d_ + x_**S(4)*WC('f', S(1)) + x_**S(2)*WC('e', S(1)))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons36, cons37, cons384, cons1051)
rule1831 = ReplacementRule(pattern1831, replacement1831)
pattern1832 = Pattern(Integral((A_ + x_**S(4)*WC('B', S(1)))/(sqrt(a_ + x_**S(4)*WC('c', S(1)))*(d_ + x_**S(4)*WC('f', S(1)) + x_**S(2)*WC('e', S(1)))), x_), cons2, cons8, cons29, cons50, cons127, cons36, cons37, cons384, cons1051)
rule1832 = ReplacementRule(pattern1832, replacement1832)
pattern1833 = Pattern(Integral((A_ + x_**S(4)*WC('B', S(1)))/((d_ + x_**S(4)*WC('f', S(1)))*sqrt(a_ + x_**S(4)*WC('c', S(1)) + x_**S(2)*WC('b', S(1)))), x_), cons2, cons3, cons8, cons29, cons127, cons36, cons37, cons384, cons1051)
rule1833 = ReplacementRule(pattern1833, replacement1833)
pattern1834 = Pattern(Integral(sqrt(a_ + x_**S(4)*WC('c', S(1)) + x_**S(2)*WC('b', S(1)))/(d_ + x_**S(4)*WC('e', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons1052, cons699)
rule1834 = ReplacementRule(pattern1834, replacement1834)
pattern1835 = Pattern(Integral(sqrt(a_ + x_**S(4)*WC('c', S(1)) + x_**S(2)*WC('b', S(1)))/(d_ + x_**S(4)*WC('e', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons1052, cons711)
rule1835 = ReplacementRule(pattern1835, With1835)
pattern1836 = Pattern(Integral(S(1)/((a_ + x_*WC('b', S(1)))*sqrt(c_ + x_**S(2)*WC('d', S(1)))*sqrt(e_ + x_**S(2)*WC('f', S(1)))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons155)
rule1836 = ReplacementRule(pattern1836, replacement1836)
pattern1837 = Pattern(Integral((x_*WC('h', S(1)) + WC('g', S(0)))*sqrt(x_*WC('e', S(1)) + sqrt(x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))*WC('f', S(1)) + WC('d', S(0))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons210, cons211, cons1053, cons1054)
rule1837 = ReplacementRule(pattern1837, replacement1837)
pattern1838 = Pattern(Integral((u_ + (sqrt(v_)*WC('k', S(1)) + WC('j', S(0)))*WC('f', S(1)))**WC('n', S(1))*(x_*WC('h', S(1)) + WC('g', S(0)))**WC('m', S(1)), x_), cons127, cons210, cons211, cons798, cons799, cons19, cons4, cons70, cons820, cons1055, cons1056)
rule1838 = ReplacementRule(pattern1838, replacement1838)
pattern1839 = Pattern(Integral(((x_*WC('e', S(1)) + sqrt(x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))*WC('f', S(1)) + WC('d', S(0)))**n_*WC('h', S(1)) + WC('g', S(0)))**WC('p', S(1)), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons210, cons211, cons4, cons1057, cons40)
rule1839 = ReplacementRule(pattern1839, replacement1839)
pattern1840 = Pattern(Integral(((x_*WC('e', S(1)) + sqrt(a_ + x_**S(2)*WC('c', S(1)))*WC('f', S(1)) + WC('d', S(0)))**n_*WC('h', S(1)) + WC('g', S(0)))**WC('p', S(1)), x_), cons2, cons8, cons29, cons50, cons127, cons210, cons211, cons4, cons1057, cons40)
rule1840 = ReplacementRule(pattern1840, replacement1840)
pattern1841 = Pattern(Integral(((u_ + sqrt(v_)*WC('f', S(1)))**n_*WC('h', S(1)) + WC('g', S(0)))**WC('p', S(1)), x_), cons127, cons210, cons211, cons4, cons70, cons820, cons821, cons1058, cons40)
rule1841 = ReplacementRule(pattern1841, replacement1841)
pattern1842 = Pattern(Integral((x_*WC('e', S(1)) + sqrt(x_**S(2)*WC('c', S(1)) + WC('a', S(0)))*WC('f', S(1)))**WC('n', S(1))*(x_*WC('h', S(1)) + WC('g', S(0)))**WC('m', S(1)), x_), cons2, cons8, cons50, cons127, cons210, cons211, cons4, cons1057, cons20)
rule1842 = ReplacementRule(pattern1842, replacement1842)
pattern1843 = Pattern(Integral(x_**WC('p', S(1))*(g_ + x_**S(2)*WC('i', S(1)))**WC('m', S(1))*(x_*WC('e', S(1)) + sqrt(a_ + x_**S(2)*WC('c', S(1)))*WC('f', S(1)))**WC('n', S(1)), x_), cons2, cons8, cons50, cons127, cons210, cons226, cons4, cons1057, cons1059, cons1060, cons1061)
rule1843 = ReplacementRule(pattern1843, replacement1843)
pattern1844 = Pattern(Integral((x_*WC('e', S(1)) + sqrt(x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))*WC('f', S(1)) + WC('d', S(0)))**WC('n', S(1))*(x_**S(2)*WC('i', S(1)) + x_*WC('h', S(1)) + WC('g', S(0)))**WC('m', S(1)), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons210, cons211, cons226, cons4, cons1057, cons1059, cons1062, cons517, cons1061)
rule1844 = ReplacementRule(pattern1844, replacement1844)
pattern1845 = Pattern(Integral((g_ + x_**S(2)*WC('i', S(1)))**WC('m', S(1))*(x_*WC('e', S(1)) + sqrt(a_ + x_**S(2)*WC('c', S(1)))*WC('f', S(1)) + WC('d', S(0)))**WC('n', S(1)), x_), cons2, cons8, cons29, cons50, cons127, cons210, cons226, cons4, cons1057, cons1059, cons517, cons1061)
rule1845 = ReplacementRule(pattern1845, replacement1845)
pattern1846 = Pattern(Integral((x_*WC('e', S(1)) + sqrt(x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))*WC('f', S(1)) + WC('d', S(0)))**WC('n', S(1))*(x_**S(2)*WC('i', S(1)) + x_*WC('h', S(1)) + WC('g', S(0)))**WC('m', S(1)), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons210, cons211, cons226, cons4, cons1057, cons1059, cons1062, cons75, cons1063)
rule1846 = ReplacementRule(pattern1846, replacement1846)
pattern1847 = Pattern(Integral((g_ + x_**S(2)*WC('i', S(1)))**WC('m', S(1))*(x_*WC('e', S(1)) + sqrt(a_ + x_**S(2)*WC('c', S(1)))*WC('f', S(1)) + WC('d', S(0)))**WC('n', S(1)), x_), cons2, cons8, cons29, cons50, cons127, cons210, cons226, cons4, cons1057, cons1059, cons75, cons1063)
rule1847 = ReplacementRule(pattern1847, replacement1847)
pattern1848 = Pattern(Integral((x_*WC('e', S(1)) + sqrt(x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))*WC('f', S(1)) + WC('d', S(0)))**WC('n', S(1))*(x_**S(2)*WC('i', S(1)) + x_*WC('h', S(1)) + WC('g', S(0)))**WC('m', S(1)), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons210, cons211, cons226, cons4, cons1057, cons1059, cons1062, cons953, cons1063)
rule1848 = ReplacementRule(pattern1848, replacement1848)
pattern1849 = Pattern(Integral((g_ + x_**S(2)*WC('i', S(1)))**WC('m', S(1))*(x_*WC('e', S(1)) + sqrt(a_ + x_**S(2)*WC('c', S(1)))*WC('f', S(1)) + WC('d', S(0)))**WC('n', S(1)), x_), cons2, cons8, cons29, cons50, cons127, cons210, cons226, cons4, cons1057, cons1059, cons953, cons1063)
rule1849 = ReplacementRule(pattern1849, replacement1849)
pattern1850 = Pattern(Integral(w_**WC('m', S(1))*(u_ + (sqrt(v_)*WC('k', S(1)) + WC('j', S(0)))*WC('f', S(1)))**WC('n', S(1)), x_), cons127, cons798, cons799, cons19, cons4, cons70, cons1064, cons1065, cons1066)
rule1850 = ReplacementRule(pattern1850, replacement1850)
pattern1851 = Pattern(Integral(S(1)/((a_ + x_**WC('n', S(1))*WC('b', S(1)))*sqrt(x_**S(2)*WC('c', S(1)) + (a_ + x_**WC('n', S(1))*WC('b', S(1)))**WC('p', S(1))*WC('d', S(1)))), x_), cons2, cons3, cons8, cons29, cons4, cons1067)
rule1851 = ReplacementRule(pattern1851, replacement1851)
pattern1852 = Pattern(Integral(sqrt(a_ + sqrt(c_ + x_**S(2)*WC('d', S(1)))*WC('b', S(1))), x_), cons2, cons3, cons8, cons29, cons1068)
rule1852 = ReplacementRule(pattern1852, replacement1852)
pattern1853 = Pattern(Integral(sqrt(x_**S(2)*WC('a', S(1)) + x_*sqrt(c_ + x_**S(2)*WC('d', S(1)))*WC('b', S(1)))/(x_*sqrt(c_ + x_**S(2)*WC('d', S(1)))), x_), cons2, cons3, cons8, cons29, cons1069, cons1070)
rule1853 = ReplacementRule(pattern1853, replacement1853)
pattern1854 = Pattern(Integral(sqrt(x_*(x_*WC('a', S(1)) + sqrt(c_ + x_**S(2)*WC('d', S(1)))*WC('b', S(1)))*WC('e', S(1)))/(x_*sqrt(c_ + x_**S(2)*WC('d', S(1)))), x_), cons2, cons3, cons8, cons29, cons50, cons1069, cons1071)
rule1854 = ReplacementRule(pattern1854, replacement1854)
pattern1855 = Pattern(Integral(sqrt(x_**S(2)*WC('c', S(1)) + sqrt(a_ + x_**S(4)*WC('b', S(1)))*WC('d', S(1)))/sqrt(a_ + x_**S(4)*WC('b', S(1))), x_), cons2, cons3, cons8, cons29, cons1072)
rule1855 = ReplacementRule(pattern1855, replacement1855)
pattern1856 = Pattern(Integral((x_*WC('d', S(1)) + WC('c', S(0)))**WC('m', S(1))*sqrt(x_**S(2)*WC('b', S(1)) + sqrt(a_ + x_**S(4)*WC('e', S(1))))/sqrt(a_ + x_**S(4)*WC('e', S(1))), x_), cons2, cons3, cons8, cons29, cons19, cons1073, cons45)
rule1856 = ReplacementRule(pattern1856, replacement1856)
pattern1857 = Pattern(Integral(S(1)/(sqrt(a_ + x_**S(3)*WC('b', S(1)))*(c_ + x_*WC('d', S(1)))), x_), cons2, cons3, cons8, cons29, cons483, cons481)
rule1857 = ReplacementRule(pattern1857, With1857)
pattern1858 = Pattern(Integral(S(1)/(sqrt(a_ + x_**S(3)*WC('b', S(1)))*(c_ + x_*WC('d', S(1)))), x_), cons2, cons3, cons8, cons29, cons483, cons482)
rule1858 = ReplacementRule(pattern1858, With1858)
pattern1859 = Pattern(Integral(S(1)/(sqrt(a_ + x_**S(3)*WC('b', S(1)))*(c_ + x_*WC('d', S(1)))), x_), cons2, cons3, cons8, cons29, cons484, cons481)
rule1859 = ReplacementRule(pattern1859, With1859)
pattern1860 = Pattern(Integral(S(1)/(sqrt(a_ + x_**S(3)*WC('b', S(1)))*(c_ + x_*WC('d', S(1)))), x_), cons2, cons3, cons8, cons29, cons484, cons482)
rule1860 = ReplacementRule(pattern1860, With1860)
pattern1861 = Pattern(Integral((e_ + x_*WC('f', S(1)))/(sqrt(a_ + x_**S(3)*WC('b', S(1)))*(c_ + x_*WC('d', S(1)))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons483, cons481, CustomConstraint(With1861))
rule1861 = ReplacementRule(pattern1861, replacement1861)
pattern1862 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))/(sqrt(a_ + x_**S(3)*WC('b', S(1)))*(c_ + x_*WC('d', S(1)))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons483, cons481, CustomConstraint(With1862))
rule1862 = ReplacementRule(pattern1862, replacement1862)
pattern1863 = Pattern(Integral((e_ + x_*WC('f', S(1)))/(sqrt(a_ + x_**S(3)*WC('b', S(1)))*(c_ + x_*WC('d', S(1)))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons483, cons482, CustomConstraint(With1863))
rule1863 = ReplacementRule(pattern1863, replacement1863)
pattern1864 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))/(sqrt(a_ + x_**S(3)*WC('b', S(1)))*(c_ + x_*WC('d', S(1)))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons483, cons482, CustomConstraint(With1864))
rule1864 = ReplacementRule(pattern1864, replacement1864)
pattern1865 = Pattern(Integral((e_ + x_*WC('f', S(1)))/(sqrt(a_ + x_**S(3)*WC('b', S(1)))*(c_ + x_*WC('d', S(1)))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons484, cons481, CustomConstraint(With1865))
rule1865 = ReplacementRule(pattern1865, replacement1865)
pattern1866 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))/(sqrt(a_ + x_**S(3)*WC('b', S(1)))*(c_ + x_*WC('d', S(1)))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons484, cons481, CustomConstraint(With1866))
rule1866 = ReplacementRule(pattern1866, replacement1866)
pattern1867 = Pattern(Integral((e_ + x_*WC('f', S(1)))/(sqrt(a_ + x_**S(3)*WC('b', S(1)))*(c_ + x_*WC('d', S(1)))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons484, cons482, CustomConstraint(With1867))
rule1867 = ReplacementRule(pattern1867, replacement1867)
pattern1868 = Pattern(Integral((x_*WC('f', S(1)) + WC('e', S(0)))/(sqrt(a_ + x_**S(3)*WC('b', S(1)))*(c_ + x_*WC('d', S(1)))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons484, cons482, CustomConstraint(With1868))
rule1868 = ReplacementRule(pattern1868, replacement1868)
pattern1869 = Pattern(Integral(x_**WC('m', S(1))/(c_ + x_**n_*WC('d', S(1)) + sqrt(a_ + x_**n_*WC('b', S(1)))*WC('e', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons19, cons4, cons1074, cons502)
rule1869 = ReplacementRule(pattern1869, replacement1869)
pattern1870 = Pattern(Integral(WC('u', S(1))/(c_ + x_**n_*WC('d', S(1)) + sqrt(a_ + x_**n_*WC('b', S(1)))*WC('e', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons4, cons1074)
rule1870 = ReplacementRule(pattern1870, replacement1870)
pattern1871 = Pattern(Integral((A_ + x_**n_*WC('B', S(1)))/(a_ + x_**S(2)*WC('b', S(1)) + x_**n2_*WC('d', S(1)) + x_**n_*WC('c', S(1))), x_), cons2, cons3, cons8, cons29, cons36, cons37, cons4, cons48, cons965, cons1075, cons1076)
rule1871 = ReplacementRule(pattern1871, replacement1871)
pattern1872 = Pattern(Integral(x_**WC('m', S(1))*(A_ + x_**WC('n', S(1))*WC('B', S(1)))/(a_ + x_**n2_*WC('d', S(1)) + x_**WC('k', S(1))*WC('b', S(1)) + x_**WC('n', S(1))*WC('c', S(1))), x_), cons2, cons3, cons8, cons29, cons36, cons37, cons19, cons4, cons48, cons1077, cons1078, cons1079)
rule1872 = ReplacementRule(pattern1872, replacement1872)
pattern1873 = Pattern(Integral((a_ + x_**n2_*WC('c', S(1)) + x_**n_*WC('b', S(1)))**p_*(d_ + g_*x_**n3_ + x_**n2_*WC('f', S(1)) + x_**n_*WC('e', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons210, cons4, cons48, cons933, cons228, cons704)
rule1873 = ReplacementRule(pattern1873, replacement1873)
pattern1874 = Pattern(Integral((a_ + x_**n2_*WC('c', S(1)) + x_**n_*WC('b', S(1)))**p_*(d_ + x_**n2_*WC('f', S(1)) + x_**n_*WC('e', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons4, cons48, cons228, cons704)
rule1874 = ReplacementRule(pattern1874, replacement1874)
pattern1875 = Pattern(Integral((a_ + x_**n2_*WC('c', S(1)) + x_**n_*WC('b', S(1)))**p_*(d_ + g_*x_**n3_ + x_**n_*WC('e', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons210, cons4, cons48, cons933, cons228, cons704)
rule1875 = ReplacementRule(pattern1875, replacement1875)
pattern1876 = Pattern(Integral((a_ + x_**n2_*WC('c', S(1)) + x_**n_*WC('b', S(1)))**p_*(d_ + g_*x_**n3_ + x_**n2_*WC('f', S(1))), x_), cons2, cons3, cons8, cons29, cons127, cons210, cons4, cons48, cons933, cons228, cons704)
rule1876 = ReplacementRule(pattern1876, replacement1876)
pattern1877 = Pattern(Integral((d_ + x_**n2_*WC('f', S(1)))*(a_ + x_**n2_*WC('c', S(1)) + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons127, cons4, cons48, cons228, cons704)
rule1877 = ReplacementRule(pattern1877, replacement1877)
pattern1878 = Pattern(Integral((d_ + g_*x_**n3_)*(a_ + x_**n2_*WC('c', S(1)) + x_**n_*WC('b', S(1)))**p_, x_), cons2, cons3, cons8, cons29, cons210, cons4, cons48, cons933, cons228, cons704)
rule1878 = ReplacementRule(pattern1878, replacement1878)
pattern1879 = Pattern(Integral((a_ + x_**n2_*WC('c', S(1)))**p_*(d_ + g_*x_**n3_ + x_**n2_*WC('f', S(1)) + x_**n_*WC('e', S(1))), x_), cons2, cons8, cons29, cons50, cons127, cons210, cons4, cons48, cons933, cons704)
rule1879 = ReplacementRule(pattern1879, replacement1879)
pattern1880 = Pattern(Integral((a_ + x_**n2_*WC('c', S(1)))**p_*(d_ + x_**n2_*WC('f', S(1)) + x_**n_*WC('e', S(1))), x_), cons2, cons8, cons29, cons50, cons127, cons4, cons48, cons704)
rule1880 = ReplacementRule(pattern1880, replacement1880)
pattern1881 = Pattern(Integral((a_ + x_**n2_*WC('c', S(1)))**p_*(d_ + g_*x_**n3_ + x_**n_*WC('e', S(1))), x_), cons2, cons8, cons29, cons50, cons210, cons4, cons48, cons933, cons704)
rule1881 = ReplacementRule(pattern1881, replacement1881)
pattern1882 = Pattern(Integral((x_**S(4)*WC('c', S(1)) + x_**S(2)*WC('b', S(1)) + WC('a', S(0)))/(d_ + x_**S(6)*WC('g', S(1)) + x_**S(4)*WC('f', S(1)) + x_**S(2)*WC('e', S(1))), x_), cons2, cons3, cons8, cons29, cons50, cons127, cons210, cons1080, cons1081, cons1082, cons1083, cons1084, cons1085)
rule1882 = ReplacementRule(pattern1882, With1882)
pattern1883 = Pattern(Integral((x_**S(4)*WC('c', S(1)) + WC('a', S(0)))/(d_ + x_**S(6)*WC('g', S(1)) + x_**S(4)*WC('f', S(1)) + x_**S(2)*WC('e', S(1))), x_), cons2, cons8, cons29, cons50, cons127, cons210, cons1086, cons1087, cons1082, cons1088)
rule1883 = ReplacementRule(pattern1883, With1883)
pattern1884 = Pattern(Integral(u_*v_**p_, x_), cons13, cons139, cons806, cons1019, cons1089, cons1090, cons1091, CustomConstraint(With1884))
rule1884 = ReplacementRule(pattern1884, replacement1884)
return [rule1476, rule1477, rule1478, rule1479, rule1480, rule1481, rule1482, rule1483, rule1484, rule1485, rule1486, rule1487, rule1488, rule1489, rule1490, rule1491, rule1492, rule1493, rule1494, rule1495, rule1496, rule1497, rule1498, rule1499, rule1500, rule1501, rule1502, rule1503, rule1504, rule1505, rule1506, rule1507, rule1508, rule1509, rule1510, rule1511, rule1512, rule1513, rule1514, rule1515, rule1516, rule1517, rule1518, rule1519, rule1520, rule1521, rule1522, rule1523, rule1524, rule1525, rule1526, rule1527, rule1528, rule1529, rule1530, rule1531, rule1532, rule1533, rule1534, rule1535, rule1536, rule1537, rule1538, rule1539, rule1540, rule1541, rule1542, rule1543, rule1544, rule1545, rule1546, rule1547, rule1548, rule1549, rule1550, rule1551, rule1552, rule1553, rule1554, rule1555, rule1556, rule1557, rule1558, rule1559, rule1560, rule1561, rule1562, rule1563, rule1564, rule1565, rule1566, rule1567, rule1568, rule1569, rule1570, rule1571, rule1572, rule1573, rule1574, rule1575, rule1576, rule1577, rule1578, rule1579, rule1580, rule1581, rule1582, rule1583, rule1584, rule1585, rule1586, rule1587, rule1588, rule1589, rule1590, rule1591, rule1592, rule1593, rule1594, rule1595, rule1596, rule1597, rule1598, rule1599, rule1600, rule1601, rule1602, rule1603, rule1604, rule1605, rule1606, rule1607, rule1608, rule1609, rule1610, rule1611, rule1612, rule1613, rule1614, rule1615, rule1616, rule1617, rule1618, rule1619, rule1620, rule1621, rule1622, rule1623, rule1624, rule1625, rule1626, rule1627, rule1628, rule1629, rule1630, rule1631, rule1632, rule1633, rule1634, rule1635, rule1636, rule1637, rule1638, rule1639, rule1640, rule1641, rule1642, rule1643, rule1644, rule1645, rule1646, rule1647, rule1648, rule1649, rule1650, rule1651, rule1652, rule1653, rule1654, rule1655, rule1656, rule1657, rule1658, rule1659, rule1660, rule1661, rule1662, rule1663, rule1664, rule1665, rule1666, rule1667, rule1668, rule1669, rule1670, rule1671, rule1672, rule1673, rule1674, rule1675, rule1676, rule1677, rule1678, rule1679, rule1680, rule1681, rule1682, rule1683, rule1684, rule1685, rule1686, rule1687, rule1688, rule1689, rule1690, rule1691, rule1692, rule1693, rule1694, rule1695, rule1696, rule1697, rule1698, rule1699, rule1700, rule1701, rule1702, rule1703, rule1704, rule1705, rule1706, rule1707, rule1708, rule1709, rule1710, rule1711, rule1712, rule1713, rule1714, rule1715, rule1716, rule1717, rule1718, rule1719, rule1720, rule1721, rule1722, rule1723, rule1724, rule1725, rule1726, rule1727, rule1728, rule1729, rule1730, rule1731, rule1732, rule1733, rule1734, rule1735, rule1736, rule1737, rule1738, rule1739, rule1740, rule1741, rule1742, rule1743, rule1744, rule1745, rule1746, rule1747, rule1748, rule1749, rule1750, rule1751, rule1752, rule1753, rule1754, rule1755, rule1756, rule1757, rule1758, rule1759, rule1760, rule1761, rule1762, rule1763, rule1764, rule1765, rule1766, rule1767, rule1768, rule1769, rule1770, rule1771, rule1772, rule1773, rule1774, rule1775, rule1776, rule1777, rule1778, rule1779, rule1780, rule1781, rule1782, rule1783, rule1784, rule1785, rule1786, rule1787, rule1788, rule1789, rule1790, rule1791, rule1792, rule1793, rule1794, rule1795, rule1796, rule1797, rule1798, rule1799, rule1800, rule1801, rule1802, rule1803, rule1804, rule1805, rule1806, rule1807, rule1808, rule1809, rule1810, rule1811, rule1812, rule1813, rule1814, rule1815, rule1816, rule1817, rule1818, rule1819, rule1820, rule1821, rule1822, rule1823, rule1824, rule1825, rule1826, rule1827, rule1828, rule1829, rule1830, rule1831, rule1832, rule1833, rule1834, rule1835, rule1836, rule1837, rule1838, rule1839, rule1840, rule1841, rule1842, rule1843, rule1844, rule1845, rule1846, rule1847, rule1848, rule1849, rule1850, rule1851, rule1852, rule1853, rule1854, rule1855, rule1856, rule1857, rule1858, rule1859, rule1860, rule1861, rule1862, rule1863, rule1864, rule1865, rule1866, rule1867, rule1868, rule1869, rule1870, rule1871, rule1872, rule1873, rule1874, rule1875, rule1876, rule1877, rule1878, rule1879, rule1880, rule1881, rule1882, rule1883, rule1884, ]
def replacement1476(a, b, c, n, p, q, x):
return Dist(x*(c*x**n)**(-S(1)/n), Subst(Int((a + b*x**(n*q))**p, x), x, (c*x**n)**(S(1)/n)), x)
def replacement1477(a, b, c, m, n, p, q, x):
return Dist(x**(m + S(1))*(c*x**n)**(-(m + S(1))/n), Subst(Int(x**m*(a + b*x**(n*q))**p, x), x, (c*x**n)**(S(1)/n)), x)
def replacement1478(a, b, c, d, e, f, m, n, p, q, r, s, x):
return Dist((e*(a + b*x**n)**r)**p*(f*(c + d*x**n)**s)**q*(a + b*x**n)**(-p*r)*(c + d*x**n)**(-q*s), Int(x**m*(a + b*x**n)**(p*r)*(c + d*x**n)**(q*s), x), x)
def replacement1479(a, b, c, d, e, n, p, u, x):
return Dist((b*e/d)**p, Int(u, x), x)
def replacement1480(a, b, c, d, e, n, p, u, x):
return Int(u*(e*(a + b*x**n))**p*(c + d*x**n)**(-p), x)
def With1481(a, b, c, d, e, n, p, x):
q = Denominator(p)
return Dist(e*q*(-a*d + b*c)/n, Subst(Int(x**(q*(p + S(1)) + S(-1))*(-a*e + c*x**q)**(S(-1) + S(1)/n)*(b*e - d*x**q)**(S(-1) - S(1)/n), x), x, (e*(a + b*x**n)/(c + d*x**n))**(S(1)/q)), x)
def With1482(a, b, c, d, e, m, n, p, x):
q = Denominator(p)
return Dist(e*q*(-a*d + b*c)/n, Subst(Int(x**(q*(p + S(1)) + S(-1))*(-a*e + c*x**q)**(S(-1) + (m + S(1))/n)*(b*e - d*x**q)**(S(-1) - (m + S(1))/n), x), x, (e*(a + b*x**n)/(c + d*x**n))**(S(1)/q)), x)
def With1483(a, b, c, d, e, n, p, r, u, x):
q = Denominator(p)
return Dist(e*q*(-a*d + b*c)/n, Subst(Int(SimplifyIntegrand(x**(q*(p + S(1)) + S(-1))*(-a*e + c*x**q)**(S(-1) + S(1)/n)*(b*e - d*x**q)**(S(-1) - S(1)/n)*ReplaceAll(u, Rule(x, (-a*e + c*x**q)**(S(1)/n)*(b*e - d*x**q)**(-S(1)/n)))**r, x), x), x, (e*(a + b*x**n)/(c + d*x**n))**(S(1)/q)), x)
def With1484(a, b, c, d, e, m, n, p, r, u, x):
q = Denominator(p)
return Dist(e*q*(-a*d + b*c)/n, Subst(Int(SimplifyIntegrand(x**(q*(p + S(1)) + S(-1))*(-a*e + c*x**q)**(S(-1) + (m + S(1))/n)*(b*e - d*x**q)**(S(-1) - (m + S(1))/n)*ReplaceAll(u, Rule(x, (-a*e + c*x**q)**(S(1)/n)*(b*e - d*x**q)**(-S(1)/n)))**r, x), x), x, (e*(a + b*x**n)/(c + d*x**n))**(S(1)/q)), x)
def replacement1485(a, b, c, n, p, x):
return -Dist(c, Subst(Int((a + b*x**n)**p/x**S(2), x), x, c/x), x)
def replacement1486(a, b, c, m, n, p, x):
return -Dist(c**(m + S(1)), Subst(Int(x**(-m + S(-2))*(a + b*x**n)**p, x), x, c/x), x)
def replacement1487(a, b, c, d, m, n, p, x):
return -Dist(c*(c/x)**m*(d*x)**m, Subst(Int(x**(-m + S(-2))*(a + b*x**n)**p, x), x, c/x), x)
def replacement1488(a, b, c, d, n, n2, p, x):
return -Dist(d, Subst(Int((a + b*x**n + c*x**(S(2)*n))**p/x**S(2), x), x, d/x), x)
def replacement1489(a, b, c, d, m, n, n2, p, x):
return -Dist(d**(m + S(1)), Subst(Int(x**(-m + S(-2))*(a + b*x**n + c*x**(S(2)*n))**p, x), x, d/x), x)
def replacement1490(a, b, c, d, e, m, n, n2, p, x):
return -Dist(d*(d/x)**m*(e*x)**m, Subst(Int(x**(-m + S(-2))*(a + b*x**n + c*x**(S(2)*n))**p, x), x, d/x), x)
def replacement1491(a, b, c, d, n, n2, p, x):
return -Dist(d, Subst(Int((a + b*x**n + c*d**(-S(2)*n)*x**(S(2)*n))**p/x**S(2), x), x, d/x), x)
def replacement1492(a, b, c, d, m, n, n2, p, x):
return -Dist(d**(m + S(1)), Subst(Int(x**(-m + S(-2))*(a + b*x**n + c*d**(-S(2)*n)*x**(S(2)*n))**p, x), x, d/x), x)
def replacement1493(a, b, c, d, e, m, n, n2, p, x):
return -Dist(d*(d/x)**m*(e*x)**m, Subst(Int(x**(-m + S(-2))*(a + b*x**n + c*d**(-S(2)*n)*x**(S(2)*n))**p, x), x, d/x), x)
def replacement1494(m, u, x):
return Int(ExpandToSum(u, x)**m, x)
def replacement1495(m, n, u, v, x):
return Int(ExpandToSum(u, x)**m*ExpandToSum(v, x)**n, x)
def replacement1496(m, n, p, u, v, w, x):
return Int(ExpandToSum(u, x)**m*ExpandToSum(v, x)**n*ExpandToSum(w, x)**p, x)
def replacement1497(m, n, p, q, u, v, w, x, z):
return Int(ExpandToSum(u, x)**m*ExpandToSum(v, x)**n*ExpandToSum(w, x)**p*ExpandToSum(z, x)**q, x)
def replacement1498(p, u, x):
return Int(ExpandToSum(u, x)**p, x)
def replacement1499(m, p, u, v, x):
return Int(ExpandToSum(u, x)**m*ExpandToSum(v, x)**p, x)
def replacement1500(m, n, p, u, v, w, x):
return Int(ExpandToSum(u, x)**m*ExpandToSum(v, x)**n*ExpandToSum(w, x)**p, x)
def replacement1501(p, q, u, v, x):
return Int(ExpandToSum(u, x)**p*ExpandToSum(v, x)**q, x)
def replacement1502(p, u, x):
return Int(ExpandToSum(u, x)**p, x)
def replacement1503(c, m, p, u, x):
return Int((c*x)**m*ExpandToSum(u, x)**p, x)
def replacement1504(p, q, u, v, x):
return Int(ExpandToSum(u, x)**p*ExpandToSum(v, x)**q, x)
def replacement1505(m, p, q, u, v, x):
return Int(x**m*ExpandToSum(u, x)**p*ExpandToSum(v, x)**q, x)
def replacement1506(m, p, q, u, v, w, x):
return Int(ExpandToSum(u, x)**m*ExpandToSum(v, x)**p*ExpandToSum(w, x)**q, x)
def replacement1507(m, p, q, r, u, v, x, z):
return Int(x**m*ExpandToSum(u, x)**p*ExpandToSum(v, x)**q*ExpandToSum(z, x)**r, x)
def replacement1508(p, u, x):
return Int(ExpandToSum(u, x)**p, x)
def replacement1509(m, p, u, x):
return Int(x**m*ExpandToSum(u, x)**p, x)
def replacement1510(p, u, x):
return Int(ExpandToSum(u, x)**p, x)
def replacement1511(d, m, p, u, x):
return Int((d*x)**m*ExpandToSum(u, x)**p, x)
def replacement1512(p, q, u, v, x):
return Int(ExpandToSum(u, x)**q*ExpandToSum(v, x)**p, x)
def replacement1513(p, q, u, v, x):
return Int(ExpandToSum(u, x)**q*ExpandToSum(v, x)**p, x)
def replacement1514(m, p, q, u, x, z):
return Int(x**m*ExpandToSum(u, x)**p*ExpandToSum(z, x)**q, x)
def replacement1515(m, p, q, u, x, z):
return Int(x**m*ExpandToSum(u, x)**p*ExpandToSum(z, x)**q, x)
def replacement1516(p, u, x):
return Int(ExpandToSum(u, x)**p, x)
def replacement1517(m, p, u, x):
return Int(x**m*ExpandToSum(u, x)**p, x)
def replacement1518(p, u, x, z):
return Int(ExpandToSum(u, x)**p*ExpandToSum(z, x), x)
def replacement1519(m, p, u, x, z):
return Int(x**m*ExpandToSum(u, x)**p*ExpandToSum(z, x), x)
def replacement1520(a, c, e, f, g, h, m, n, q, r, x):
return -Simp((S(2)*a*g + S(4)*a*h*x**(n/S(4)) - S(2)*c*f*x**(n/S(2)))/(a*c*n*sqrt(a + c*x**n)), x)
def replacement1521(a, c, d, e, f, g, h, m, n, q, r, x):
return Dist(x**(-m)*(d*x)**m, Int(x**m*(e + f*x**(n/S(4)) + g*x**(S(3)*n/S(4)) + h*x**n)/(a + c*x**n)**(S(3)/2), x), x)
def With1522(Pq, a, b, c, m, p, x):
n = Denominator(p)
return Dist(n/b, Subst(Int(x**(n*p + n + S(-1))*(-a*c/b + c*x**n/b)**m*ReplaceAll(Pq, Rule(x, -a/b + x**n/b)), x), x, (a + b*x)**(S(1)/n)), x)
def replacement1523(Pq, a, b, m, n, p, x):
return Dist(S(1)/(m + S(1)), Subst(Int((a + b*x**(n/(m + S(1))))**p*SubstFor(x**(m + S(1)), Pq, x), x), x, x**(m + S(1))), x)
def replacement1524(Pq, a, b, n, p, x):
return Int((a + b*x**n)**p*ExpandToSum(Pq - x**(n + S(-1))*Coeff(Pq, x, n + S(-1)), x), x) + Simp((a + b*x**n)**(p + S(1))*Coeff(Pq, x, n + S(-1))/(b*n*(p + S(1))), x)
def replacement1525(Pq, a, b, c, m, n, p, x):
return Int(ExpandIntegrand(Pq*(c*x)**m*(a + b*x**n)**p, x), x)
def replacement1526(Pq, a, b, n, p, x):
return Int(ExpandIntegrand(Pq*(a + b*x**n)**p, x), x)
def replacement1527(Pq, a, b, m, n, p, x):
return Dist(S(1)/n, Subst(Int(x**(S(-1) + (m + S(1))/n)*(a + b*x)**p*SubstFor(x**n, Pq, x), x), x, x**n), x)
def replacement1528(Pq, a, b, c, m, n, p, x):
return Dist(c**IntPart(m)*x**(-FracPart(m))*(c*x)**FracPart(m), Int(Pq*x**m*(a + b*x**n)**p, x), x)
def replacement1529(Pq, a, b, m, n, p, x):
return -Dist(S(1)/(b*n*(p + S(1))), Int((a + b*x**n)**(p + S(1))*D(Pq, x), x), x) + Simp(Pq*(a + b*x**n)**(p + S(1))/(b*n*(p + S(1))), x)
def replacement1530(Pq, a, b, d, m, n, p, x):
return Dist(S(1)/d, Int((d*x)**(m + S(1))*(a + b*x**n)**p*ExpandToSum(Pq/x, x), x), x)
def replacement1531(Pq, a, b, n, p, x):
return Int(x*(a + b*x**n)**p*ExpandToSum(Pq/x, x), x)
def With1532(Pq, a, b, m, n, p, x):
u = IntHide(Pq*x**m, x)
return -Dist(b*n*p, Int(x**(m + n)*(a + b*x**n)**(p + S(-1))*ExpandToSum(u*x**(-m + S(-1)), x), x), x) + Simp(u*(a + b*x**n)**p, x)
def With1533(Pq, a, b, c, m, n, p, x):
q = Expon(Pq, x)
i = Symbol('i')
return Dist(a*n*p, Int((c*x)**m*(a + b*x**n)**(p + S(-1))*Sum_doit(x**i*Coeff(Pq, x, i)/(i + m + n*p + S(1)), List(i, S(0), q)), x), x) + Simp((c*x)**m*(a + b*x**n)**p*Sum_doit(x**(i + S(1))*Coeff(Pq, x, i)/(i + m + n*p + S(1)), List(i, S(0), q)), x)
def With1534(Pq, a, b, n, p, x):
q = Expon(Pq, x)
i = Symbol('i')
return Dist(a*n*p, Int((a + b*x**n)**(p + S(-1))*Sum_doit(x**i*Coeff(Pq, x, i)/(i + n*p + S(1)), List(i, S(0), q)), x), x) + Simp((a + b*x**n)**p*Sum_doit(x**(i + S(1))*Coeff(Pq, x, i)/(i + n*p + S(1)), List(i, S(0), q)), x)
def With1535(Pq, a, b, n, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = Expon(Pq, x)
i = Symbol('i')
if Equal(q, n + S(-1)):
return True
return False
def replacement1535(Pq, a, b, n, p, x):
q = Expon(Pq, x)
i = Symbol('i')
return Dist(S(1)/(a*n*(p + S(1))), Int((a + b*x**n)**(p + S(1))*Sum_doit(x**i*(i + n*(p + S(1)) + S(1))*Coeff(Pq, x, i), List(i, S(0), q + S(-1))), x), x) + Simp((a + b*x**n)**(p + S(1))*(a*Coeff(Pq, x, q) - b*x*ExpandToSum(Pq - x**q*Coeff(Pq, x, q), x))/(a*b*n*(p + S(1))), x)
def replacement1536(Pq, a, b, n, p, x):
return Dist(S(1)/(a*n*(p + S(1))), Int((a + b*x**n)**(p + S(1))*ExpandToSum(Pq*n*(p + S(1)) + D(Pq*x, x), x), x), x) - Simp(Pq*x*(a + b*x**n)**(p + S(1))/(a*n*(p + S(1))), x)
def replacement1537(a, b, d, e, f, g, x):
return -Simp((S(2)*a*f + S(4)*a*g*x - S(2)*b*e*x**S(2))/(S(4)*a*b*sqrt(a + b*x**S(4))), x)
def replacement1538(a, b, d, f, g, x):
return -Simp((f + S(2)*g*x)/(S(2)*b*sqrt(a + b*x**S(4))), x)
def replacement1539(a, b, d, e, g, x):
return -Simp(x*(S(2)*a*g - b*e*x)/(S(2)*a*b*sqrt(a + b*x**S(4))), x)
def replacement1540(a, b, e, f, h, x):
return -Simp((f - S(2)*h*x**S(3))/(S(2)*b*sqrt(a + b*x**S(4))), x)
def replacement1541(a, b, e, h, x):
return Simp(h*x**S(3)/(b*sqrt(a + b*x**S(4))), x)
def replacement1542(a, b, d, e, f, g, h, x):
return -Simp((a*f - S(2)*a*h*x**S(3) - S(2)*b*d*x)/(S(2)*a*b*sqrt(a + b*x**S(4))), x)
def replacement1543(a, b, d, e, g, h, x):
return Simp(x*(a*h*x**S(2) + b*d)/(a*b*sqrt(a + b*x**S(4))), x)
def With1544(Pq, a, b, n, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = Expon(Pq, x)
Q = PolynomialQuotient(Pq*b**(Floor((q + S(-1))/n) + S(1)), a + b*x**n, x)
R = PolynomialRemainder(Pq*b**(Floor((q + S(-1))/n) + S(1)), a + b*x**n, x)
if GreaterEqual(q, n):
return True
return False
def replacement1544(Pq, a, b, n, p, x):
q = Expon(Pq, x)
Q = PolynomialQuotient(Pq*b**(Floor((q + S(-1))/n) + S(1)), a + b*x**n, x)
R = PolynomialRemainder(Pq*b**(Floor((q + S(-1))/n) + S(1)), a + b*x**n, x)
return Dist(b**(-Floor((q - 1)/n) - 1)/(a*n*(p + 1)), Int((a + b*x**n)**(p + 1)*ExpandToSum(Q*a*n*(p + 1) + R*n*(p + 1) + D(R*x, x), x), x), x) - Simp(R*b**(-Floor((q - 1)/n) - 1)*x*(a + b*x**n)**(p + 1)/(a*n*(p + 1)), x)
def With1545(Pq, a, b, m, n, p, x):
q = Expon(Pq, x)
Q = PolynomialQuotient(Pq*a*b**(Floor((q + S(-1))/n) + S(1))*x**m, a + b*x**n, x)
R = PolynomialRemainder(Pq*a*b**(Floor((q + S(-1))/n) + S(1))*x**m, a + b*x**n, x)
return Dist(b**(-Floor((q - 1)/n) - 1)/(a*n*(p + 1)), Int(x**m*(a + b*x**n)**(p + 1)*ExpandToSum(Q*n*x**(-m)*(p + 1) + Sum_doit(x**(i - m)*(i + n*(p + 1) + 1)*Coeff(R, x, i)/a, List(i, 0, n - 1)), x), x), x) - Simp(R*b**(-Floor((q - 1)/n) - 1)*x*(a + b*x**n)**(p + 1)/(a**2*n*(p + 1)), x)
def With1546(Pq, a, b, m, n, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
g = GCD(m + S(1), n)
if Unequal(g, S(1)):
return True
return False
def replacement1546(Pq, a, b, m, n, p, x):
g = GCD(m + S(1), n)
return Dist(S(1)/g, Subst(Int(x**(S(-1) + (m + S(1))/g)*(a + b*x**(n/g))**p*ReplaceAll(Pq, Rule(x, x**(S(1)/g))), x), x, x**g), x)
def replacement1547(A, B, a, b, x):
return Dist(B**S(3)/b, Int(S(1)/(A**S(2) - A*B*x + B**S(2)*x**S(2)), x), x)
def With1548(A, B, a, b, x):
r = Numerator(Rt(a/b, S(3)))
s = Denominator(Rt(a/b, S(3)))
return Dist(r/(S(3)*a*s), Int((r*(S(2)*A*s + B*r) + s*x*(-A*s + B*r))/(r**S(2) - r*s*x + s**S(2)*x**S(2)), x), x) - Dist(r*(-A*s + B*r)/(S(3)*a*s), Int(S(1)/(r + s*x), x), x)
def With1549(A, B, a, b, x):
r = Numerator(Rt(-a/b, S(3)))
s = Denominator(Rt(-a/b, S(3)))
return -Dist(r/(S(3)*a*s), Int((r*(-S(2)*A*s + B*r) - s*x*(A*s + B*r))/(r**S(2) + r*s*x + s**S(2)*x**S(2)), x), x) + Dist(r*(A*s + B*r)/(S(3)*a*s), Int(S(1)/(r - s*x), x), x)
def replacement1550(A, B, C, a, b, x):
return -Dist(C**S(2)/b, Int(S(1)/(B - C*x), x), x)
def With1551(A, B, C, a, b, x):
q = a**(S(1)/3)/b**(S(1)/3)
return Dist(C/b, Int(S(1)/(q + x), x), x) + Dist((B + C*q)/b, Int(S(1)/(q**S(2) - q*x + x**S(2)), x), x)
def With1552(B, C, a, b, x):
q = a**(S(1)/3)/b**(S(1)/3)
return Dist(C/b, Int(S(1)/(q + x), x), x) + Dist((B + C*q)/b, Int(S(1)/(q**S(2) - q*x + x**S(2)), x), x)
def With1553(A, C, a, b, x):
q = a**(S(1)/3)/b**(S(1)/3)
return Dist(C/b, Int(S(1)/(q + x), x), x) + Dist(C*q/b, Int(S(1)/(q**S(2) - q*x + x**S(2)), x), x)
def With1554(A, B, C, a, b, x):
q = (-a)**(S(1)/3)/(-b)**(S(1)/3)
return Dist(C/b, Int(S(1)/(q + x), x), x) + Dist((B + C*q)/b, Int(S(1)/(q**S(2) - q*x + x**S(2)), x), x)
def With1555(B, C, a, b, x):
q = (-a)**(S(1)/3)/(-b)**(S(1)/3)
return Dist(C/b, Int(S(1)/(q + x), x), x) + Dist((B + C*q)/b, Int(S(1)/(q**S(2) - q*x + x**S(2)), x), x)
def With1556(A, C, a, b, x):
q = (-a)**(S(1)/3)/(-b)**(S(1)/3)
return Dist(C/b, Int(S(1)/(q + x), x), x) + Dist(C*q/b, Int(S(1)/(q**S(2) - q*x + x**S(2)), x), x)
def With1557(A, B, C, a, b, x):
q = (-a)**(S(1)/3)/b**(S(1)/3)
return -Dist(C/b, Int(S(1)/(q - x), x), x) + Dist((B - C*q)/b, Int(S(1)/(q**S(2) + q*x + x**S(2)), x), x)
def With1558(B, C, a, b, x):
q = (-a)**(S(1)/3)/b**(S(1)/3)
return -Dist(C/b, Int(S(1)/(q - x), x), x) + Dist((B - C*q)/b, Int(S(1)/(q**S(2) + q*x + x**S(2)), x), x)
def With1559(A, C, a, b, x):
q = (-a)**(S(1)/3)/b**(S(1)/3)
return -Dist(C/b, Int(S(1)/(q - x), x), x) - Dist(C*q/b, Int(S(1)/(q**S(2) + q*x + x**S(2)), x), x)
def With1560(A, B, C, a, b, x):
q = a**(S(1)/3)/(-b)**(S(1)/3)
return -Dist(C/b, Int(S(1)/(q - x), x), x) + Dist((B - C*q)/b, Int(S(1)/(q**S(2) + q*x + x**S(2)), x), x)
def With1561(B, C, a, b, x):
q = a**(S(1)/3)/(-b)**(S(1)/3)
return -Dist(C/b, Int(S(1)/(q - x), x), x) + Dist((B - C*q)/b, Int(S(1)/(q**S(2) + q*x + x**S(2)), x), x)
def With1562(A, C, a, b, x):
q = a**(S(1)/3)/(-b)**(S(1)/3)
return -Dist(C/b, Int(S(1)/(q - x), x), x) - Dist(C*q/b, Int(S(1)/(q**S(2) + q*x + x**S(2)), x), x)
def With1563(A, B, C, a, b, x):
q = (a/b)**(S(1)/3)
return Dist(C/b, Int(S(1)/(q + x), x), x) + Dist((B + C*q)/b, Int(S(1)/(q**S(2) - q*x + x**S(2)), x), x)
def With1564(B, C, a, b, x):
q = (a/b)**(S(1)/3)
return Dist(C/b, Int(S(1)/(q + x), x), x) + Dist((B + C*q)/b, Int(S(1)/(q**S(2) - q*x + x**S(2)), x), x)
def With1565(A, C, a, b, x):
q = (a/b)**(S(1)/3)
return Dist(C/b, Int(S(1)/(q + x), x), x) + Dist(C*q/b, Int(S(1)/(q**S(2) - q*x + x**S(2)), x), x)
def With1566(A, B, C, a, b, x):
q = Rt(a/b, S(3))
return Dist(C/b, Int(S(1)/(q + x), x), x) + Dist((B + C*q)/b, Int(S(1)/(q**S(2) - q*x + x**S(2)), x), x)
def With1567(B, C, a, b, x):
q = Rt(a/b, S(3))
return Dist(C/b, Int(S(1)/(q + x), x), x) + Dist((B + C*q)/b, Int(S(1)/(q**S(2) - q*x + x**S(2)), x), x)
def With1568(A, C, a, b, x):
q = Rt(a/b, S(3))
return Dist(C/b, Int(S(1)/(q + x), x), x) + Dist(C*q/b, Int(S(1)/(q**S(2) - q*x + x**S(2)), x), x)
def With1569(A, B, C, a, b, x):
q = (-a/b)**(S(1)/3)
return -Dist(C/b, Int(S(1)/(q - x), x), x) + Dist((B - C*q)/b, Int(S(1)/(q**S(2) + q*x + x**S(2)), x), x)
def With1570(B, C, a, b, x):
q = (-a/b)**(S(1)/3)
return -Dist(C/b, Int(S(1)/(q - x), x), x) + Dist((B - C*q)/b, Int(S(1)/(q**S(2) + q*x + x**S(2)), x), x)
def With1571(A, C, a, b, x):
q = (-a/b)**(S(1)/3)
return -Dist(C/b, Int(S(1)/(q - x), x), x) - Dist(C*q/b, Int(S(1)/(q**S(2) + q*x + x**S(2)), x), x)
def With1572(A, B, C, a, b, x):
q = Rt(-a/b, S(3))
return -Dist(C/b, Int(S(1)/(q - x), x), x) + Dist((B - C*q)/b, Int(S(1)/(q**S(2) + q*x + x**S(2)), x), x)
def With1573(B, C, a, b, x):
q = Rt(-a/b, S(3))
return -Dist(C/b, Int(S(1)/(q - x), x), x) + Dist((B - C*q)/b, Int(S(1)/(q**S(2) + q*x + x**S(2)), x), x)
def With1574(A, C, a, b, x):
q = Rt(-a/b, S(3))
return -Dist(C/b, Int(S(1)/(q - x), x), x) - Dist(C*q/b, Int(S(1)/(q**S(2) + q*x + x**S(2)), x), x)
def replacement1575(A, B, C, a, b, x):
return Dist(C, Int(x**S(2)/(a + b*x**S(3)), x), x) + Int((A + B*x)/(a + b*x**S(3)), x)
def replacement1576(B, C, a, b, x):
return Dist(B, Int(x/(a + b*x**S(3)), x), x) + Dist(C, Int(x**S(2)/(a + b*x**S(3)), x), x)
def replacement1577(A, C, a, b, x):
return Dist(A, Int(S(1)/(a + b*x**S(3)), x), x) + Dist(C, Int(x**S(2)/(a + b*x**S(3)), x), x)
def With1578(A, B, C, a, b, x):
q = (a/b)**(S(1)/3)
return Dist(q**S(2)/a, Int((A + C*q*x)/(q**S(2) - q*x + x**S(2)), x), x)
def With1579(B, C, a, b, x):
q = (a/b)**(S(1)/3)
return Dist(C*q**S(3)/a, Int(x/(q**S(2) - q*x + x**S(2)), x), x)
def With1580(A, C, a, b, x):
q = (a/b)**(S(1)/3)
return Dist(q**S(2)/a, Int((A + C*q*x)/(q**S(2) - q*x + x**S(2)), x), x)
def With1581(A, B, C, a, b, x):
q = (-a/b)**(S(1)/3)
return Dist(q/a, Int((A*q + x*(A + B*q))/(q**S(2) + q*x + x**S(2)), x), x)
def With1582(B, C, a, b, x):
q = (-a/b)**(S(1)/3)
return Dist(B*q**S(2)/a, Int(x/(q**S(2) + q*x + x**S(2)), x), x)
def With1583(A, C, a, b, x):
q = (-a/b)**(S(1)/3)
return Dist(A*q/a, Int((q + x)/(q**S(2) + q*x + x**S(2)), x), x)
def With1584(A, B, C, a, b, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = (a/b)**(S(1)/3)
if NonzeroQ(A - B*q + C*q**S(2)):
return True
return False
def replacement1584(A, B, C, a, b, x):
q = (a/b)**(S(1)/3)
return Dist(q/(S(3)*a), Int((q*(S(2)*A + B*q - C*q**S(2)) - x*(A - B*q - S(2)*C*q**S(2)))/(q**S(2) - q*x + x**S(2)), x), x) + Dist(q*(A - B*q + C*q**S(2))/(S(3)*a), Int(S(1)/(q + x), x), x)
def With1585(B, C, a, b, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = (a/b)**(S(1)/3)
if NonzeroQ(B*q - C*q**S(2)):
return True
return False
def replacement1585(B, C, a, b, x):
q = (a/b)**(S(1)/3)
return Dist(q/(S(3)*a), Int((q*(B*q - C*q**S(2)) + x*(B*q + S(2)*C*q**S(2)))/(q**S(2) - q*x + x**S(2)), x), x) - Dist(q*(B*q - C*q**S(2))/(S(3)*a), Int(S(1)/(q + x), x), x)
def With1586(A, C, a, b, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = (a/b)**(S(1)/3)
if NonzeroQ(A + C*q**S(2)):
return True
return False
def replacement1586(A, C, a, b, x):
q = (a/b)**(S(1)/3)
return Dist(q/(S(3)*a), Int((q*(S(2)*A - C*q**S(2)) - x*(A - S(2)*C*q**S(2)))/(q**S(2) - q*x + x**S(2)), x), x) + Dist(q*(A + C*q**S(2))/(S(3)*a), Int(S(1)/(q + x), x), x)
def With1587(A, B, C, a, b, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = (-a/b)**(S(1)/3)
if NonzeroQ(A + B*q + C*q**S(2)):
return True
return False
def replacement1587(A, B, C, a, b, x):
q = (-a/b)**(S(1)/3)
return Dist(q/(S(3)*a), Int((q*(S(2)*A - B*q - C*q**S(2)) + x*(A + B*q - S(2)*C*q**S(2)))/(q**S(2) + q*x + x**S(2)), x), x) + Dist(q*(A + B*q + C*q**S(2))/(S(3)*a), Int(S(1)/(q - x), x), x)
def With1588(B, C, a, b, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = (-a/b)**(S(1)/3)
if NonzeroQ(B*q + C*q**S(2)):
return True
return False
def replacement1588(B, C, a, b, x):
q = (-a/b)**(S(1)/3)
return Dist(q/(S(3)*a), Int((-q*(B*q + C*q**S(2)) + x*(B*q - S(2)*C*q**S(2)))/(q**S(2) + q*x + x**S(2)), x), x) + Dist(q*(B*q + C*q**S(2))/(S(3)*a), Int(S(1)/(q - x), x), x)
def With1589(A, C, a, b, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = (-a/b)**(S(1)/3)
if NonzeroQ(A + C*q**S(2)):
return True
return False
def replacement1589(A, C, a, b, x):
q = (-a/b)**(S(1)/3)
return Dist(q/(S(3)*a), Int((q*(S(2)*A - C*q**S(2)) + x*(A - S(2)*C*q**S(2)))/(q**S(2) + q*x + x**S(2)), x), x) + Dist(q*(A + C*q**S(2))/(S(3)*a), Int(S(1)/(q - x), x), x)
def With1590(Pq, a, b, c, m, n, x):
if isinstance(x, (int, Integer, float, Float)):
return False
v = Sum_doit(c**(-ii)*(c*x)**(ii + m)*(x**(n/S(2))*Coeff(Pq, x, ii + n/S(2)) + Coeff(Pq, x, ii))/(a + b*x**n), List(ii, S(0), n/S(2) + S(-1)))
if SumQ(v):
return True
return False
def replacement1590(Pq, a, b, c, m, n, x):
v = Sum_doit(c**(-ii)*(c*x)**(ii + m)*(x**(n/S(2))*Coeff(Pq, x, ii + n/S(2)) + Coeff(Pq, x, ii))/(a + b*x**n), List(ii, S(0), n/S(2) + S(-1)))
return Int(v, x)
def With1591(Pq, a, b, n, x):
if isinstance(x, (int, Integer, float, Float)):
return False
v = Sum_doit(x**ii*(x**(n/S(2))*Coeff(Pq, x, ii + n/S(2)) + Coeff(Pq, x, ii))/(a + b*x**n), List(ii, S(0), n/S(2) + S(-1)))
if SumQ(v):
return True
return False
def replacement1591(Pq, a, b, n, x):
v = Sum_doit(x**ii*(x**(n/S(2))*Coeff(Pq, x, ii + n/S(2)) + Coeff(Pq, x, ii))/(a + b*x**n), List(ii, S(0), n/S(2) + S(-1)))
return Int(v, x)
def With1592(a, b, c, d, x):
r = Numer(Rt(b/a, S(3)))
s = Denom(Rt(b/a, S(3)))
return Simp(S(2)*d*s**S(3)*sqrt(a + b*x**S(3))/(a*r**S(2)*(r*x + s*(S(1) + sqrt(S(3))))), x) - Simp(S(3)**(S(1)/4)*d*s*sqrt((r**S(2)*x**S(2) - r*s*x + s**S(2))/(r*x + s*(S(1) + sqrt(S(3))))**S(2))*sqrt(S(2) - sqrt(S(3)))*(r*x + s)*EllipticE(asin((r*x + s*(S(1) - sqrt(S(3))))/(r*x + s*(S(1) + sqrt(S(3))))), S(-7) - S(4)*sqrt(S(3)))/(r**S(2)*sqrt(s*(r*x + s)/(r*x + s*(S(1) + sqrt(S(3))))**S(2))*sqrt(a + b*x**S(3))), x)
def With1593(a, b, c, d, x):
r = Numer(Rt(b/a, S(3)))
s = Denom(Rt(b/a, S(3)))
return Dist(d/r, Int((r*x + s*(S(1) - sqrt(S(3))))/sqrt(a + b*x**S(3)), x), x) + Dist((c*r - d*s*(S(1) - sqrt(S(3))))/r, Int(S(1)/sqrt(a + b*x**S(3)), x), x)
def With1594(a, b, c, d, x):
r = Numer(Rt(b/a, S(3)))
s = Denom(Rt(b/a, S(3)))
return Simp(S(2)*d*s**S(3)*sqrt(a + b*x**S(3))/(a*r**S(2)*(r*x + s*(S(1) - sqrt(S(3))))), x) + Simp(S(3)**(S(1)/4)*d*s*sqrt((r**S(2)*x**S(2) - r*s*x + s**S(2))/(r*x + s*(S(1) - sqrt(S(3))))**S(2))*sqrt(sqrt(S(3)) + S(2))*(r*x + s)*EllipticE(asin((r*x + s*(S(1) + sqrt(S(3))))/(r*x + s*(S(1) - sqrt(S(3))))), S(-7) + S(4)*sqrt(S(3)))/(r**S(2)*sqrt(-s*(r*x + s)/(r*x + s*(S(1) - sqrt(S(3))))**S(2))*sqrt(a + b*x**S(3))), x)
def With1595(a, b, c, d, x):
r = Numer(Rt(b/a, S(3)))
s = Denom(Rt(b/a, S(3)))
return Dist(d/r, Int((r*x + s*(S(1) + sqrt(S(3))))/sqrt(a + b*x**S(3)), x), x) + Dist((c*r - d*s*(S(1) + sqrt(S(3))))/r, Int(S(1)/sqrt(a + b*x**S(3)), x), x)
def With1596(a, b, c, d, x):
r = Numer(Rt(b/a, S(3)))
s = Denom(Rt(b/a, S(3)))
return Simp(d*s**S(3)*x*(S(1) + sqrt(S(3)))*sqrt(a + b*x**S(6))/(S(2)*a*r**S(2)*(r*x**S(2)*(S(1) + sqrt(S(3))) + s)), x) - Simp(S(3)**(S(1)/4)*d*s*x*sqrt((r**S(2)*x**S(4) - r*s*x**S(2) + s**S(2))/(r*x**S(2)*(S(1) + sqrt(S(3))) + s)**S(2))*(r*x**S(2) + s)*EllipticE(acos((r*x**S(2)*(S(1) - sqrt(S(3))) + s)/(r*x**S(2)*(S(1) + sqrt(S(3))) + s)), sqrt(S(3))/S(4) + S(1)/2)/(S(2)*r**S(2)*sqrt(r*x**S(2)*(r*x**S(2) + s)/(r*x**S(2)*(S(1) + sqrt(S(3))) + s)**S(2))*sqrt(a + b*x**S(6))), x)
def With1597(a, b, c, d, x):
q = Rt(b/a, S(3))
return Dist(d/(S(2)*q**S(2)), Int((S(2)*q**S(2)*x**S(4) - sqrt(S(3)) + S(1))/sqrt(a + b*x**S(6)), x), x) + Dist((S(2)*c*q**S(2) - d*(S(1) - sqrt(S(3))))/(S(2)*q**S(2)), Int(S(1)/sqrt(a + b*x**S(6)), x), x)
def replacement1598(a, b, c, d, x):
return -Simp(c*d*x**S(3)*sqrt(-(c - d*x**S(2))**S(2)/(c*d*x**S(2)))*sqrt(-d**S(2)*(a + b*x**S(8))/(b*c**S(2)*x**S(4)))*EllipticF(asin(sqrt((sqrt(S(2))*c**S(2) + S(2)*c*d*x**S(2) + sqrt(S(2))*d**S(2)*x**S(4))/(c*d*x**S(2)))/S(2)), S(-2) + S(2)*sqrt(S(2)))/(sqrt(sqrt(S(2)) + S(2))*sqrt(a + b*x**S(8))*(c - d*x**S(2))), x)
def replacement1599(a, b, c, d, x):
return -Dist((-c*Rt(b/a, S(4)) + d)/(S(2)*Rt(b/a, S(4))), Int((-x**S(2)*Rt(b/a, S(4)) + S(1))/sqrt(a + b*x**S(8)), x), x) + Dist((c*Rt(b/a, S(4)) + d)/(S(2)*Rt(b/a, S(4))), Int((x**S(2)*Rt(b/a, S(4)) + S(1))/sqrt(a + b*x**S(8)), x), x)
def replacement1600(Pq, a, b, n, x):
return Dist(Coeff(Pq, x, S(0)), Int(S(1)/(x*sqrt(a + b*x**n)), x), x) + Int(ExpandToSum((Pq - Coeff(Pq, x, S(0)))/x, x)/sqrt(a + b*x**n), x)
def With1601(Pq, a, b, c, m, n, p, x):
q = Expon(Pq, x)
j = Symbol('j')
k = Symbol('k')
return Int(Sum_doit(c**(-j)*(c*x)**(j + m)*(a + b*x**n)**p*Sum_doit(x**(k*n/S(2))*Coeff(Pq, x, j + k*n/S(2)), List(k, S(0), S(1) + S(2)*(-j + q)/n)), List(j, S(0), n/S(2) + S(-1))), x)
def With1602(Pq, a, b, n, p, x):
q = Expon(Pq, x)
j = Symbol('j')
k = Symbol('k')
return Int(Sum_doit(x**j*(a + b*x**n)**p*Sum_doit(x**(k*n/S(2))*Coeff(Pq, x, j + k*n/S(2)), List(k, S(0), S(1) + S(2)*(-j + q)/n)), List(j, S(0), n/S(2) + S(-1))), x)
def replacement1603(Pq, a, b, n, p, x):
return Dist(Coeff(Pq, x, n + S(-1)), Int(x**(n + S(-1))*(a + b*x**n)**p, x), x) + Int((a + b*x**n)**p*ExpandToSum(Pq - x**(n + S(-1))*Coeff(Pq, x, n + S(-1)), x), x)
def replacement1604(Pq, a, b, c, m, n, x):
return Int(ExpandIntegrand(Pq*(c*x)**m/(a + b*x**n), x), x)
def replacement1605(Pq, a, b, n, x):
return Int(ExpandIntegrand(Pq/(a + b*x**n), x), x)
def With1606(Pq, a, b, c, m, n, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
Pq0 = Coeff(Pq, x, S(0))
if NonzeroQ(Pq0):
return True
return False
def replacement1606(Pq, a, b, c, m, n, p, x):
Pq0 = Coeff(Pq, x, S(0))
return Dist(S(1)/(S(2)*a*c*(m + S(1))), Int((c*x)**(m + S(1))*(a + b*x**n)**p*ExpandToSum(-S(2)*Pq0*b*x**(n + S(-1))*(m + n*(p + S(1)) + S(1)) + S(2)*a*(Pq - Pq0)*(m + S(1))/x, x), x), x) + Simp(Pq0*(c*x)**(m + S(1))*(a + b*x**n)**(p + S(1))/(a*c*(m + S(1))), x)
def With1607(Pq, a, b, c, m, n, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = Expon(Pq, x)
Pqq = Coeff(Pq, x, q)
if And(NonzeroQ(m + n*p + q + S(1)), GreaterEqual(-n + q, S(0)), Or(IntegerQ(S(2)*p), IntegerQ(p + (q + S(1))/(S(2)*n)))):
return True
return False
def replacement1607(Pq, a, b, c, m, n, p, x):
q = Expon(Pq, x)
Pqq = Coeff(Pq, x, q)
return Dist(1/(b*(m + n*p + q + 1)), Int((c*x)**m*(a + b*x**n)**p*ExpandToSum(-Pqq*a*x**(-n + q)*(m - n + q + 1) + b*(Pq - Pqq*x**q)*(m + n*p + q + 1), x), x), x) + Simp(Pqq*c**(n - q - 1)*(c*x)**(m - n + q + 1)*(a + b*x**n)**(p + 1)/(b*(m + n*p + q + 1)), x)
def With1608(Pq, a, b, n, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = Expon(Pq, x)
Pqq = Coeff(Pq, x, q)
if And(NonzeroQ(n*p + q + S(1)), GreaterEqual(-n + q, S(0)), Or(IntegerQ(S(2)*p), IntegerQ(p + (q + S(1))/(S(2)*n)))):
return True
return False
def replacement1608(Pq, a, b, n, p, x):
q = Expon(Pq, x)
Pqq = Coeff(Pq, x, q)
return Dist(1/(b*(n*p + q + 1)), Int((a + b*x**n)**p*ExpandToSum(-Pqq*a*x**(-n + q)*(-n + q + 1) + b*(Pq - Pqq*x**q)*(n*p + q + 1), x), x), x) + Simp(Pqq*x**(-n + q + 1)*(a + b*x**n)**(p + 1)/(b*(n*p + q + 1)), x)
def With1609(Pq, a, b, m, n, p, x):
q = Expon(Pq, x)
return -Subst(Int(x**(-m - q + S(-2))*(a + b*x**(-n))**p*ExpandToSum(x**q*ReplaceAll(Pq, Rule(x, S(1)/x)), x), x), x, S(1)/x)
def With1610(Pq, a, b, c, m, n, p, x):
g = Denominator(m)
q = Expon(Pq, x)
return -Dist(g/c, Subst(Int(x**(-g*(m + q + S(1)) + S(-1))*(a + b*c**(-n)*x**(-g*n))**p*ExpandToSum(x**(g*q)*ReplaceAll(Pq, Rule(x, x**(-g)/c)), x), x), x, (c*x)**(-S(1)/g)), x)
def With1611(Pq, a, b, c, m, n, p, x):
q = Expon(Pq, x)
return -Dist((c*x)**m*(S(1)/x)**m, Subst(Int(x**(-m - q + S(-2))*(a + b*x**(-n))**p*ExpandToSum(x**q*ReplaceAll(Pq, Rule(x, S(1)/x)), x), x), x, S(1)/x), x)
def With1612(Pq, a, b, m, n, p, x):
g = Denominator(n)
return Dist(g, Subst(Int(x**(g*(m + S(1)) + S(-1))*(a + b*x**(g*n))**p*ReplaceAll(Pq, Rule(x, x**g)), x), x, x**(S(1)/g)), x)
def With1613(Pq, a, b, n, p, x):
g = Denominator(n)
return Dist(g, Subst(Int(x**(g + S(-1))*(a + b*x**(g*n))**p*ReplaceAll(Pq, Rule(x, x**g)), x), x, x**(S(1)/g)), x)
def replacement1614(Pq, a, b, c, m, n, p, x):
return Dist(c**IntPart(m)*x**(-FracPart(m))*(c*x)**FracPart(m), Int(Pq*x**m*(a + b*x**n)**p, x), x)
def replacement1615(Pq, a, b, m, n, p, x):
return Dist(S(1)/(m + S(1)), Subst(Int((a + b*x**(n/(m + S(1))))**p*ReplaceAll(SubstFor(x**n, Pq, x), Rule(x, x**(n/(m + S(1))))), x), x, x**(m + S(1))), x)
def replacement1616(Pq, a, b, c, m, n, p, x):
return Dist(c**IntPart(m)*x**(-FracPart(m))*(c*x)**FracPart(m), Int(Pq*x**m*(a + b*x**n)**p, x), x)
def replacement1617(A, B, a, b, m, n, p, x):
return Dist(A, Int((a + b*x**n)**p, x), x) + Dist(B, Int(x**m*(a + b*x**n)**p, x), x)
def replacement1618(Pq, a, b, c, m, n, p, x):
return Int(ExpandIntegrand(Pq*(c*x)**m*(a + b*x**n)**p, x), x)
def replacement1619(Pq, a, b, n, p, x):
return Int(ExpandIntegrand(Pq*(a + b*x**n)**p, x), x)
def replacement1620(Pq, a, b, m, n, p, u, v, x):
return Dist(u**m*v**(-m)/Coeff(v, x, S(1)), Subst(Int(x**m*(a + b*x**n)**p*SubstFor(v, Pq, x), x), x, v), x)
def replacement1621(Pq, a, b, n, p, v, x):
return Dist(S(1)/Coeff(v, x, S(1)), Subst(Int((a + b*x**n)**p*SubstFor(v, Pq, x), x), x, v), x)
def replacement1622(Pq, a1, a2, b1, b2, c, m, n, p, x):
return Int(Pq*(c*x)**m*(a1*a2 + b1*b2*x**(S(2)*n))**p, x)
def replacement1623(Pq, a1, a2, b1, b2, n, p, x):
return Int(Pq*(a1*a2 + b1*b2*x**(S(2)*n))**p, x)
def replacement1624(Pq, a1, a2, b1, b2, c, m, n, p, x):
return Dist((a1 + b1*x**n)**FracPart(p)*(a2 + b2*x**n)**FracPart(p)*(a1*a2 + b1*b2*x**(S(2)*n))**(-FracPart(p)), Int(Pq*(c*x)**m*(a1*a2 + b1*b2*x**(S(2)*n))**p, x), x)
def replacement1625(Pq, a1, a2, b1, b2, n, p, x):
return Dist((a1 + b1*x**n)**FracPart(p)*(a2 + b2*x**n)**FracPart(p)*(a1*a2 + b1*b2*x**(S(2)*n))**(-FracPart(p)), Int(Pq*(a1*a2 + b1*b2*x**(S(2)*n))**p, x), x)
def replacement1626(a, b, c, d, e, f, g, n, n2, p, x):
return Simp(e*x*(a + b*x**n)**(p + S(1))*(c + d*x**n)**(p + S(1))/(a*c), x)
def replacement1627(a, b, c, d, e, g, n, n2, p, x):
return Simp(e*x*(a + b*x**n)**(p + S(1))*(c + d*x**n)**(p + S(1))/(a*c), x)
def replacement1628(a, b, c, d, e, f, g, h, m, n, n2, p, x):
return Simp(e*(h*x)**(m + S(1))*(a + b*x**n)**(p + S(1))*(c + d*x**n)**(p + S(1))/(a*c*h*(m + S(1))), x)
def replacement1629(a, b, c, d, e, g, h, m, n, n2, p, x):
return Simp(e*(h*x)**(m + S(1))*(a + b*x**n)**(p + S(1))*(c + d*x**n)**(p + S(1))/(a*c*h*(m + S(1))), x)
def replacement1630(A, B, a, b, c, d, m, n, p, q, x):
return Dist(A, Int((a + b*x**n)**p*(c + d*x**n)**q, x), x) + Dist(B, Int(x**m*(a + b*x**n)**p*(c + d*x**n)**q, x), x)
def With1631(Px, a, b, c, d, n, p, q, x):
k = Denominator(n)
return Dist(k/d, Subst(Int(SimplifyIntegrand(x**(k + S(-1))*(a + b*x**(k*n))**p*ReplaceAll(Px, Rule(x, -c/d + x**k/d))**q, x), x), x, (c + d*x)**(S(1)/k)), x)
def replacement1632(Pq, a, b, c, m, n, n2, p, x):
return Dist(S(1)/n, Subst(Int((a + b*x + c*x**S(2))**p*SubstFor(x**n, Pq, x), x), x, x**n), x)
def replacement1633(Pq, a, b, c, d, m, n, n2, p, x):
return Int(ExpandIntegrand(Pq*(d*x)**m*(a + b*x**n + c*x**(S(2)*n))**p, x), x)
def replacement1634(Pq, a, b, c, n, n2, p, x):
return Int(ExpandIntegrand(Pq*(a + b*x**n + c*x**(S(2)*n))**p, x), x)
def replacement1635(a, b, c, d, e, f, n, n2, p, x):
return Simp(d*x*(a + b*x**n + c*x**(S(2)*n))**(p + S(1))/a, x)
def replacement1636(a, b, c, d, f, n, n2, p, x):
return Simp(d*x*(a + b*x**n + c*x**(S(2)*n))**(p + S(1))/a, x)
def replacement1637(a, b, c, d, e, f, g, m, n, n2, p, x):
return Simp(d*(g*x)**(m + S(1))*(a + b*x**n + c*x**(S(2)*n))**(p + S(1))/(a*g*(m + S(1))), x)
def replacement1638(a, b, c, d, f, g, m, n, n2, p, x):
return Simp(d*(g*x)**(m + S(1))*(a + b*x**n + c*x**(S(2)*n))**(p + S(1))/(a*g*(m + S(1))), x)
def replacement1639(Pq, a, b, c, d, m, n, n2, p, x):
return Dist((S(4)*c)**(-IntPart(p))*(b + S(2)*c*x**n)**(-S(2)*FracPart(p))*(a + b*x**n + c*x**(S(2)*n))**FracPart(p), Int(Pq*(d*x)**m*(b + S(2)*c*x**n)**(S(2)*p), x), x)
def replacement1640(Pq, a, b, c, n, n2, p, x):
return Dist((S(4)*c)**(-IntPart(p))*(b + S(2)*c*x**n)**(-S(2)*FracPart(p))*(a + b*x**n + c*x**(S(2)*n))**FracPart(p), Int(Pq*(b + S(2)*c*x**n)**(S(2)*p), x), x)
def replacement1641(Pq, a, b, c, m, n, n2, p, x):
return Dist(S(1)/n, Subst(Int(x**(S(-1) + (m + S(1))/n)*(a + b*x + c*x**S(2))**p*SubstFor(x**n, Pq, x), x), x, x**n), x)
def replacement1642(Pq, a, b, c, d, m, n, n2, p, x):
return Dist(x**(-m)*(d*x)**m, Int(Pq*x**m*(a + b*x**n + c*x**(S(2)*n))**p, x), x)
def replacement1643(Pq, a, b, c, d, m, n, n2, p, x):
return Dist(S(1)/d, Int((d*x)**(m + S(1))*(a + b*x**n + c*x**(S(2)*n))**p*ExpandToSum(Pq/x, x), x), x)
def replacement1644(Pq, a, b, c, n, n2, p, x):
return Int(x*(a + b*x**n + c*x**(S(2)*n))**p*ExpandToSum(Pq/x, x), x)
def replacement1645(a, b, c, d, e, f, g, n, n2, n3, p, x):
return Simp(x*(S(3)*a*d - x**S(2)*(-a*e + S(2)*b*d*p + S(3)*b*d))*(a + b*x**S(2) + c*x**S(4))**(p + S(1))/(S(3)*a**S(2)), x)
def replacement1646(a, b, c, d, f, g, n, n2, n3, p, x):
return Simp(x*(S(3)*a*d - x**S(2)*(S(2)*b*d*p + S(3)*b*d))*(a + b*x**S(2) + c*x**S(4))**(p + S(1))/(S(3)*a**S(2)), x)
def replacement1647(a, b, c, d, e, g, n, n2, n3, p, x):
return Simp(x*(S(3)*a*d - x**S(2)*(-a*e + S(2)*b*d*p + S(3)*b*d))*(a + b*x**S(2) + c*x**S(4))**(p + S(1))/(S(3)*a**S(2)), x)
def replacement1648(a, b, c, d, g, n, n2, n3, p, x):
return Simp(x*(S(3)*a*d - x**S(2)*(S(2)*b*d*p + S(3)*b*d))*(a + b*x**S(2) + c*x**S(4))**(p + S(1))/(S(3)*a**S(2)), x)
def replacement1649(a, b, c, e, f, g, h, m, n, n2, q, r, s, x):
return -Simp((S(2)*c*x**n*(-b*g + S(2)*c*f) + S(2)*c*(-S(2)*a*g + b*f) + S(2)*h*x**(n/S(2))*(-S(4)*a*c + b**S(2)))/(c*n*(-S(4)*a*c + b**S(2))*sqrt(a + b*x**n + c*x**(S(2)*n))), x)
def replacement1650(a, b, c, d, e, f, g, h, m, n, n2, q, r, s, x):
return Dist(x**(-m)*(d*x)**m, Int(x**m*(e + f*x**(n/S(2)) + g*x**(S(3)*n/S(2)) + h*x**(S(2)*n))/(a + b*x**n + c*x**(S(2)*n))**(S(3)/2), x), x)
def With1651(Pq, a, b, c, n, n2, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = Expon(Pq, x)
i = Symbol('i')
if Less(q, S(2)*n):
return True
return False
def replacement1651(Pq, a, b, c, n, n2, p, x):
q = Expon(Pq, x)
i = Symbol('i')
return Dist(S(1)/(a*n*(p + S(1))*(-S(4)*a*c + b**S(2))), Int((a + b*x**n + c*x**(S(2)*n))**(p + S(1))*Sum_doit(c*x**(i + n)*(-S(2)*a*Coeff(Pq, x, i + n) + b*Coeff(Pq, x, i))*(i + n*(S(2)*p + S(3)) + S(1)) + x**i*(-a*b*(i + S(1))*Coeff(Pq, x, i + n) + (-S(2)*a*c*(i + S(2)*n*(p + S(1)) + S(1)) + b**S(2)*(i + n*(p + S(1)) + S(1)))*Coeff(Pq, x, i)), List(i, S(0), n + S(-1))), x), x) - Simp(x*(a + b*x**n + c*x**(S(2)*n))**(p + S(1))*Sum_doit(c*x**(i + n)*(-S(2)*a*Coeff(Pq, x, i + n) + b*Coeff(Pq, x, i)) + x**i*(-a*b*Coeff(Pq, x, i + n) + (-S(2)*a*c + b**S(2))*Coeff(Pq, x, i)), List(i, S(0), n + S(-1)))/(a*n*(p + S(1))*(-S(4)*a*c + b**S(2))), x)
def replacement1652(a, b, c, d, e, f, g, x):
return -Simp((c*x**S(2)*(-b*f + S(2)*c*e) + c*(-S(2)*a*f + b*e) + g*x*(-S(4)*a*c + b**S(2)))/(c*(-S(4)*a*c + b**S(2))*sqrt(a + b*x**S(2) + c*x**S(4))), x)
def replacement1653(a, b, c, d, f, g, x):
return Simp((S(2)*a*c*f + b*c*f*x**S(2) - g*x*(-S(4)*a*c + b**S(2)))/(c*(-S(4)*a*c + b**S(2))*sqrt(a + b*x**S(2) + c*x**S(4))), x)
def replacement1654(a, b, c, d, e, g, x):
return -Simp((b*c*e + S(2)*c**S(2)*e*x**S(2) + g*x*(-S(4)*a*c + b**S(2)))/(c*(-S(4)*a*c + b**S(2))*sqrt(a + b*x**S(2) + c*x**S(4))), x)
def replacement1655(a, b, c, e, f, g, h, x):
return Simp((S(2)*a**S(2)*c*f + a*b*c*f*x**S(2) + a*h*x**S(3)*(-S(4)*a*c + b**S(2)))/(a*c*(-S(4)*a*c + b**S(2))*sqrt(a + b*x**S(2) + c*x**S(4))), x)
def replacement1656(a, b, c, e, g, h, x):
return Simp(h*x**S(3)/(c*sqrt(a + b*x**S(2) + c*x**S(4))), x)
def replacement1657(a, b, c, d, e, f, g, h, x):
return Simp((S(2)*a**S(2)*c*f + a*b*c*f*x**S(2) + a*h*x**S(3)*(-S(4)*a*c + b**S(2)) + c*d*x*(-S(4)*a*c + b**S(2)))/(a*c*(-S(4)*a*c + b**S(2))*sqrt(a + b*x**S(2) + c*x**S(4))), x)
def replacement1658(a, b, c, d, e, f, h, x):
return Simp((S(2)*a**S(2)*c*f + a*b*c*f*x**S(2) + a*h*x**S(3)*(-S(4)*a*c + b**S(2)) + c*d*x*(-S(4)*a*c + b**S(2)))/(a*c*(-S(4)*a*c + b**S(2))*sqrt(a + b*x**S(2) + c*x**S(4))), x)
def With1659(Pq, a, b, c, n, n2, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = Expon(Pq, x)
Q = PolynomialQuotient(Pq*(b*c)**(Floor((q + S(-1))/n) + S(1)), a + b*x**n + c*x**(S(2)*n), x)
R = PolynomialRemainder(Pq*(b*c)**(Floor((q + S(-1))/n) + S(1)), a + b*x**n + c*x**(S(2)*n), x)
if GreaterEqual(q, S(2)*n):
return True
return False
def replacement1659(Pq, a, b, c, n, n2, p, x):
q = Expon(Pq, x)
Q = PolynomialQuotient(Pq*(b*c)**(Floor((q + S(-1))/n) + S(1)), a + b*x**n + c*x**(S(2)*n), x)
R = PolynomialRemainder(Pq*(b*c)**(Floor((q + S(-1))/n) + S(1)), a + b*x**n + c*x**(S(2)*n), x)
return Dist((b*c)**(-Floor((q - 1)/n) - 1)/(a*n*(p + 1)*(-4*a*c + b**2)), Int((a + b*x**n + c*x**(2*n))**(p + 1)*ExpandToSum(Q*a*n*(p + 1)*(-4*a*c + b**2) + Sum_doit(c*x**(i + n)*(-2*a*Coeff(R, x, i + n) + b*Coeff(R, x, i))*(i + n*(2*p + 3) + 1) + x**i*(-a*b*(i + 1)*Coeff(R, x, i + n) + (-2*a*c*(i + 2*n*(p + 1) + 1) + b**2*(i + n*(p + 1) + 1))*Coeff(R, x, i)), List(i, 0, n - 1)), x), x), x) - Simp(x*(b*c)**(-Floor((q - 1)/n) - 1)*(a + b*x**n + c*x**(2*n))**(p + 1)*Sum_doit(c*x**(i + n)*(-2*a*Coeff(R, x, i + n) + b*Coeff(R, x, i)) + x**i*(-a*b*Coeff(R, x, i + n) + (-2*a*c + b**2)*Coeff(R, x, i)), List(i, 0, n - 1))/(a*n*(p + 1)*(-4*a*c + b**2)), x)
def With1660(Pq, a, b, c, m, n, n2, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = Expon(Pq, x)
Q = PolynomialQuotient(Pq*a*x**m*(b*c)**(Floor((q + S(-1))/n) + S(1)), a + b*x**n + c*x**(S(2)*n), x)
R = PolynomialRemainder(Pq*a*x**m*(b*c)**(Floor((q + S(-1))/n) + S(1)), a + b*x**n + c*x**(S(2)*n), x)
if GreaterEqual(q, S(2)*n):
return True
return False
def replacement1660(Pq, a, b, c, m, n, n2, p, x):
q = Expon(Pq, x)
Q = PolynomialQuotient(Pq*a*x**m*(b*c)**(Floor((q + S(-1))/n) + S(1)), a + b*x**n + c*x**(S(2)*n), x)
R = PolynomialRemainder(Pq*a*x**m*(b*c)**(Floor((q + S(-1))/n) + S(1)), a + b*x**n + c*x**(S(2)*n), x)
return Dist((b*c)**(-Floor((q - 1)/n) - 1)/(a*n*(p + 1)*(-4*a*c + b**2)), Int(x**m*(a + b*x**n + c*x**(2*n))**(p + 1)*ExpandToSum(Q*n*x**(-m)*(p + 1)*(-4*a*c + b**2) + Sum_doit(c*x**(i - m + n)*(-2*Coeff(R, x, i + n) + b*Coeff(R, x, i)/a)*(i + n*(2*p + 3) + 1) + x**(i - m)*(-b*(i + 1)*Coeff(R, x, i + n) + (-2*c*(i + 2*n*(p + 1) + 1) + b**2*(i + n*(p + 1) + 1)/a)*Coeff(R, x, i)), List(i, 0, n - 1)), x), x), x) - Simp(x*(b*c)**(-Floor((q - 1)/n) - 1)*(a + b*x**n + c*x**(2*n))**(p + 1)*Sum_doit(c*x**(i + n)*(-2*a*Coeff(R, x, i + n) + b*Coeff(R, x, i)) + x**i*(-a*b*Coeff(R, x, i + n) + (-2*a*c + b**2)*Coeff(R, x, i)), List(i, 0, n - 1))/(a**2*n*(p + 1)*(-4*a*c + b**2)), x)
def With1661(Pq, a, b, c, m, n, n2, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
g = GCD(m + S(1), n)
if Unequal(g, S(1)):
return True
return False
def replacement1661(Pq, a, b, c, m, n, n2, p, x):
g = GCD(m + S(1), n)
return Dist(S(1)/g, Subst(Int(x**(S(-1) + (m + S(1))/g)*(a + b*x**(n/g) + c*x**(S(2)*n/g))**p*ReplaceAll(Pq, Rule(x, x**(S(1)/g))), x), x, x**g), x)
def replacement1662(Pq, a, b, c, d, m, n, n2, x):
return Int(ExpandIntegrand(Pq*(d*x)**m/(a + b*x**n + c*x**(S(2)*n)), x), x)
def replacement1663(Pq, a, b, c, n, n2, x):
return Int(ExpandIntegrand(Pq/(a + b*x**n + c*x**(S(2)*n)), x), x)
def With1664(Pq, a, b, c, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = Expon(Pq, x)
Pqq = Coeff(Pq, x, q)
if Equal(S(2)*p + q + S(1), S(0)):
return True
return False
def replacement1664(Pq, a, b, c, p, x):
q = Expon(Pq, x)
Pqq = Coeff(Pq, x, q)
return Dist(1/2, Int((a + b*x + c*x**2)**p*ExpandToSum(2*Pq - Pqq*c**p*(b + 2*c*x)*(a + b*x + c*x**2)**(-p - 1), x), x), x) + Simp(Pqq*c**p*log(a + b*x + c*x**2)/2, x)
def With1665(Pq, a, b, c, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = Expon(Pq, x)
Pqq = Coeff(Pq, x, q)
if Equal(S(2)*p + q + S(1), S(0)):
return True
return False
def replacement1665(Pq, a, b, c, p, x):
q = Expon(Pq, x)
Pqq = Coeff(Pq, x, q)
return Int((a + b*x + c*x**2)**p*ExpandToSum(Pq - Pqq*c**(p + 1/2)*(a + b*x + c*x**2)**(-p - 1/2), x), x) + Simp(Pqq*c**p*atanh((b + 2*c*x)/(2*sqrt(a + b*x + c*x**2)*Rt(c, 2))), x)
def With1666(Pq, a, b, c, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = Expon(Pq, x)
Pqq = Coeff(Pq, x, q)
if Equal(S(2)*p + q + S(1), S(0)):
return True
return False
def replacement1666(Pq, a, b, c, p, x):
q = Expon(Pq, x)
Pqq = Coeff(Pq, x, q)
return Int((a + b*x + c*x**2)**p*ExpandToSum(Pq - Pqq*(-c)**(p + 1/2)*(a + b*x + c*x**2)**(-p - 1/2), x), x) - Simp(Pqq*(-c)**p*ArcTan((b + 2*c*x)/(2*sqrt(a + b*x + c*x**2)*Rt(-c, 2))), x)
def With1667(Pq, a, b, c, d, m, n, n2, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = Expon(Pq, x)
Pqq = Coeff(Pq, x, q)
if And(GreaterEqual(q, S(2)*n), Unequal(m + S(2)*n*p + q + S(1), S(0)), Or(IntegerQ(S(2)*p), And(Equal(n, S(1)), IntegerQ(S(4)*p)), IntegerQ(p + (q + S(1))/(S(2)*n)))):
return True
return False
def replacement1667(Pq, a, b, c, d, m, n, n2, p, x):
q = Expon(Pq, x)
Pqq = Coeff(Pq, x, q)
return Int((d*x)**m*(a + b*x**n + c*x**(2*n))**p*ExpandToSum(Pq - Pqq*x**q - Pqq*(a*x**(-2*n + q)*(m - 2*n + q + 1) + b*x**(-n + q)*(m + n*(p - 1) + q + 1))/(c*(m + 2*n*p + q + 1)), x), x) + Simp(Pqq*d**(2*n - q - 1)*(d*x)**(m - 2*n + q + 1)*(a + b*x**n + c*x**(2*n))**(p + 1)/(c*(m + 2*n*p + q + 1)), x)
def With1668(Pq, a, b, c, n, n2, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = Expon(Pq, x)
Pqq = Coeff(Pq, x, q)
if And(GreaterEqual(q, S(2)*n), Unequal(S(2)*n*p + q + S(1), S(0)), Or(IntegerQ(S(2)*p), And(Equal(n, S(1)), IntegerQ(S(4)*p)), IntegerQ(p + (q + S(1))/(S(2)*n)))):
return True
return False
def replacement1668(Pq, a, b, c, n, n2, p, x):
q = Expon(Pq, x)
Pqq = Coeff(Pq, x, q)
return Int((a + b*x**n + c*x**(2*n))**p*ExpandToSum(Pq - Pqq*x**q - Pqq*(a*x**(-2*n + q)*(-2*n + q + 1) + b*x**(-n + q)*(n*(p - 1) + q + 1))/(c*(2*n*p + q + 1)), x), x) + Simp(Pqq*x**(-2*n + q + 1)*(a + b*x**n + c*x**(2*n))**(p + 1)/(c*(2*n*p + q + 1)), x)
def With1669(Pq, a, b, c, d, m, n, n2, p, x):
q = Expon(Pq, x)
j = Symbol('j')
k = Symbol('k')
return Int(Sum_doit(d**(-j)*(d*x)**(j + m)*(a + b*x**n + c*x**(S(2)*n))**p*Sum_doit(x**(k*n)*Coeff(Pq, x, j + k*n), List(k, S(0), S(1) + (-j + q)/n)), List(j, S(0), n + S(-1))), x)
def With1670(Pq, a, b, c, n, n2, p, x):
q = Expon(Pq, x)
j = Symbol('j')
k = Symbol('k')
return Int(Sum_doit(x**j*(a + b*x**n + c*x**(S(2)*n))**p*Sum_doit(x**(k*n)*Coeff(Pq, x, j + k*n), List(k, S(0), S(1) + (-j + q)/n)), List(j, S(0), n + S(-1))), x)
def replacement1671(Pq, a, b, c, d, m, n, n2, x):
return Int(RationalFunctionExpand(Pq*(d*x)**m/(a + b*x**n + c*x**(S(2)*n)), x), x)
def replacement1672(Pq, a, b, c, n, n2, x):
return Int(RationalFunctionExpand(Pq/(a + b*x**n + c*x**(S(2)*n)), x), x)
def With1673(Pq, a, b, c, m, n, n2, p, x):
q = Expon(Pq, x)
return -Subst(Int(x**(-m - q + S(-2))*(a + b*x**(-n) + c*x**(-S(2)*n))**p*ExpandToSum(x**q*ReplaceAll(Pq, Rule(x, S(1)/x)), x), x), x, S(1)/x)
def With1674(Pq, a, b, c, d, m, n, n2, p, x):
g = Denominator(m)
q = Expon(Pq, x)
return -Dist(g/d, Subst(Int(x**(-g*(m + q + S(1)) + S(-1))*(a + b*d**(-n)*x**(-g*n) + c*d**(-S(2)*n)*x**(-S(2)*g*n))**p*ExpandToSum(x**(g*q)*ReplaceAll(Pq, Rule(x, x**(-g)/d)), x), x), x, (d*x)**(-S(1)/g)), x)
def With1675(Pq, a, b, c, d, m, n, n2, p, x):
q = Expon(Pq, x)
return -Dist((d*x)**m*(S(1)/x)**m, Subst(Int(x**(-m - q + S(-2))*(a + b*x**(-n) + c*x**(-S(2)*n))**p*ExpandToSum(x**q*ReplaceAll(Pq, Rule(x, S(1)/x)), x), x), x, S(1)/x), x)
def With1676(Pq, a, b, c, m, n, n2, p, x):
g = Denominator(n)
return Dist(g, Subst(Int(x**(g*(m + S(1)) + S(-1))*(a + b*x**(g*n) + c*x**(S(2)*g*n))**p*ReplaceAll(Pq, Rule(x, x**g)), x), x, x**(S(1)/g)), x)
def With1677(Pq, a, b, c, n, n2, p, x):
g = Denominator(n)
return Dist(g, Subst(Int(x**(g + S(-1))*(a + b*x**(g*n) + c*x**(S(2)*g*n))**p*ReplaceAll(Pq, Rule(x, x**g)), x), x, x**(S(1)/g)), x)
def replacement1678(Pq, a, b, c, d, m, n, n2, p, x):
return Dist(d**(m + S(-1)/2)*sqrt(d*x)/sqrt(x), Int(Pq*x**m*(a + b*x**n + c*x**(S(2)*n))**p, x), x)
def replacement1679(Pq, a, b, c, d, m, n, n2, p, x):
return Dist(d**(m + S(1)/2)*sqrt(x)/sqrt(d*x), Int(Pq*x**m*(a + b*x**n + c*x**(S(2)*n))**p, x), x)
def replacement1680(Pq, a, b, c, d, m, n, n2, p, x):
return Dist(x**(-m)*(d*x)**m, Int(Pq*x**m*(a + b*x**n + c*x**(S(2)*n))**p, x), x)
def replacement1681(Pq, a, b, c, m, n, n2, p, x):
return Dist(S(1)/(m + S(1)), Subst(Int((a + b*x**(n/(m + S(1))) + c*x**(S(2)*n/(m + S(1))))**p*ReplaceAll(SubstFor(x**n, Pq, x), Rule(x, x**(n/(m + S(1))))), x), x, x**(m + S(1))), x)
def replacement1682(Pq, a, b, c, d, m, n, n2, p, x):
return Dist(x**(-m)*(d*x)**m, Int(Pq*x**m*(a + b*x**n + c*x**(S(2)*n))**p, x), x)
def With1683(Pq, a, b, c, d, m, n, n2, x):
q = Rt(-S(4)*a*c + b**S(2), S(2))
return Dist(S(2)*c/q, Int(Pq*(d*x)**m/(b + S(2)*c*x**n - q), x), x) - Dist(S(2)*c/q, Int(Pq*(d*x)**m/(b + S(2)*c*x**n + q), x), x)
def With1684(Pq, a, b, c, n, n2, x):
q = Rt(-S(4)*a*c + b**S(2), S(2))
return Dist(S(2)*c/q, Int(Pq/(b + S(2)*c*x**n - q), x), x) - Dist(S(2)*c/q, Int(Pq/(b + S(2)*c*x**n + q), x), x)
def replacement1685(Pq, a, b, c, d, m, n, n2, p, x):
return Int(ExpandIntegrand(Pq*(d*x)**m*(a + b*x**n + c*x**(S(2)*n))**p, x), x)
def replacement1686(Pq, a, b, c, n, n2, p, x):
return Int(ExpandIntegrand(Pq*(a + b*x**n + c*x**(S(2)*n))**p, x), x)
def replacement1687(Pq, a, b, c, d, m, n, n2, p, x):
return Int(Pq*(d*x)**m*(a + b*x**n + c*x**(S(2)*n))**p, x)
def replacement1688(Pq, a, b, c, n, n2, p, x):
return Int(Pq*(a + b*x**n + c*x**(S(2)*n))**p, x)
def replacement1689(Pq, a, b, c, m, n, n2, p, u, v, x):
return Dist(u**m*v**(-m)/Coefficient(v, x, S(1)), Subst(Int(x**m*(a + b*x**n + c*x**(S(2)*n))**p*SubstFor(v, Pq, x), x), x, v), x)
def replacement1690(Pq, a, b, c, n, n2, p, v, x):
return Dist(S(1)/Coefficient(v, x, S(1)), Subst(Int((a + b*x**n + c*x**(S(2)*n))**p*SubstFor(v, Pq, x), x), x, v), x)
def replacement1691(a, b, j, n, p, x):
return Simp(x**(S(1) - n)*(a*x**j + b*x**n)**(p + S(1))/(b*(-j + n)*(p + S(1))), x)
def replacement1692(a, b, j, n, p, x):
return Dist((-j + n*p + n + S(1))/(a*(-j + n)*(p + S(1))), Int(x**(-j)*(a*x**j + b*x**n)**(p + S(1)), x), x) - Simp(x**(S(1) - j)*(a*x**j + b*x**n)**(p + S(1))/(a*(-j + n)*(p + S(1))), x)
def replacement1693(a, b, j, n, p, x):
return -Dist(b*(-j + n*p + n + S(1))/(a*(j*p + S(1))), Int(x**(-j + n)*(a*x**j + b*x**n)**p, x), x) + Simp(x**(S(1) - j)*(a*x**j + b*x**n)**(p + S(1))/(a*(j*p + S(1))), x)
def replacement1694(a, b, j, n, p, x):
return -Dist(b*p*(-j + n)/(j*p + S(1)), Int(x**n*(a*x**j + b*x**n)**(p + S(-1)), x), x) + Simp(x*(a*x**j + b*x**n)**p/(j*p + S(1)), x)
def replacement1695(a, b, j, n, p, x):
return Dist(a*p*(-j + n)/(n*p + S(1)), Int(x**j*(a*x**j + b*x**n)**(p + S(-1)), x), x) + Simp(x*(a*x**j + b*x**n)**p/(n*p + S(1)), x)
def replacement1696(a, b, j, n, p, x):
return -Dist((j*p + j - n + S(1))/(b*(-j + n)*(p + S(1))), Int(x**(-n)*(a*x**j + b*x**n)**(p + S(1)), x), x) + Simp(x**(S(1) - n)*(a*x**j + b*x**n)**(p + S(1))/(b*(-j + n)*(p + S(1))), x)
def replacement1697(a, b, j, n, p, x):
return Dist((-j + n*p + n + S(1))/(a*(-j + n)*(p + S(1))), Int(x**(-j)*(a*x**j + b*x**n)**(p + S(1)), x), x) - Simp(x**(S(1) - j)*(a*x**j + b*x**n)**(p + S(1))/(a*(-j + n)*(p + S(1))), x)
def replacement1698(a, b, j, n, p, x):
return Dist(a, Int(x**j*(a*x**j + b*x**n)**(p + S(-1)), x), x) + Simp(x*(a*x**j + b*x**n)**p/(p*(-j + n)), x)
def replacement1699(a, b, n, x):
return Dist(S(2)/(S(2) - n), Subst(Int(S(1)/(-a*x**S(2) + S(1)), x), x, x/sqrt(a*x**S(2) + b*x**n)), x)
def replacement1700(a, b, j, n, p, x):
return Dist((-j + n*p + n + S(1))/(a*(-j + n)*(p + S(1))), Int(x**(-j)*(a*x**j + b*x**n)**(p + S(1)), x), x) - Simp(x**(S(1) - j)*(a*x**j + b*x**n)**(p + S(1))/(a*(-j + n)*(p + S(1))), x)
def replacement1701(a, b, j, n, x):
return -Dist(a*(-j + S(2)*n + S(-2))/(b*(n + S(-2))), Int(x**(j - n)/sqrt(a*x**j + b*x**n), x), x) + Simp(-S(2)*x**(S(1) - n)*sqrt(a*x**j + b*x**n)/(b*(n + S(-2))), x)
def replacement1702(a, b, j, n, p, x):
return Dist(x**(-j*FracPart(p))*(a + b*x**(-j + n))**(-FracPart(p))*(a*x**j + b*x**n)**FracPart(p), Int(x**(j*p)*(a + b*x**(-j + n))**p, x), x)
def replacement1703(a, b, j, n, p, u, x):
return Dist(S(1)/Coefficient(u, x, S(1)), Subst(Int((a*x**j + b*x**n)**p, x), x, u), x)
def replacement1704(a, b, j, m, n, p, x):
return Dist(S(1)/n, Subst(Int((a*x**(j/n) + b*x)**p, x), x, x**n), x)
def replacement1705(a, b, c, j, m, n, p, x):
return -Simp(c**(j + S(-1))*(c*x)**(-j + m + S(1))*(a*x**j + b*x**n)**(p + S(1))/(a*(-j + n)*(p + S(1))), x)
def replacement1706(a, b, c, j, m, n, p, x):
return Dist(c**j*(-j + m + n*p + n + S(1))/(a*(-j + n)*(p + S(1))), Int((c*x)**(-j + m)*(a*x**j + b*x**n)**(p + S(1)), x), x) - Simp(c**(j + S(-1))*(c*x)**(-j + m + S(1))*(a*x**j + b*x**n)**(p + S(1))/(a*(-j + n)*(p + S(1))), x)
def replacement1707(a, b, c, j, m, n, p, x):
return -Dist(b*c**(j - n)*(-j + m + n*p + n + S(1))/(a*(j*p + m + S(1))), Int((c*x)**(-j + m + n)*(a*x**j + b*x**n)**p, x), x) + Simp(c**(j + S(-1))*(c*x)**(-j + m + S(1))*(a*x**j + b*x**n)**(p + S(1))/(a*(j*p + m + S(1))), x)
def replacement1708(a, b, c, j, m, n, p, x):
return Dist(c**IntPart(m)*x**(-FracPart(m))*(c*x)**FracPart(m), Int(x**m*(a*x**j + b*x**n)**p, x), x)
def replacement1709(a, b, j, m, n, p, x):
return Dist(S(1)/n, Subst(Int(x**(S(-1) + (m + S(1))/n)*(a*x**(j/n) + b*x)**p, x), x, x**n), x)
def replacement1710(a, b, c, j, m, n, p, x):
return Dist(c**IntPart(m)*x**(-FracPart(m))*(c*x)**FracPart(m), Int(x**m*(a*x**j + b*x**n)**p, x), x)
def replacement1711(a, b, c, j, m, n, p, x):
return -Dist(b*c**(-n)*p*(-j + n)/(j*p + m + S(1)), Int((c*x)**(m + n)*(a*x**j + b*x**n)**(p + S(-1)), x), x) + Simp((c*x)**(m + S(1))*(a*x**j + b*x**n)**p/(c*(j*p + m + S(1))), x)
def replacement1712(a, b, c, j, m, n, p, x):
return Dist(a*c**(-j)*p*(-j + n)/(m + n*p + S(1)), Int((c*x)**(j + m)*(a*x**j + b*x**n)**(p + S(-1)), x), x) + Simp((c*x)**(m + S(1))*(a*x**j + b*x**n)**p/(c*(m + n*p + S(1))), x)
def replacement1713(a, b, c, j, m, n, p, x):
return -Dist(c**n*(j*p + j + m - n + S(1))/(b*(-j + n)*(p + S(1))), Int((c*x)**(m - n)*(a*x**j + b*x**n)**(p + S(1)), x), x) + Simp(c**(n + S(-1))*(c*x)**(m - n + S(1))*(a*x**j + b*x**n)**(p + S(1))/(b*(-j + n)*(p + S(1))), x)
def replacement1714(a, b, c, j, m, n, p, x):
return Dist(c**j*(-j + m + n*p + n + S(1))/(a*(-j + n)*(p + S(1))), Int((c*x)**(-j + m)*(a*x**j + b*x**n)**(p + S(1)), x), x) - Simp(c**(j + S(-1))*(c*x)**(-j + m + S(1))*(a*x**j + b*x**n)**(p + S(1))/(a*(-j + n)*(p + S(1))), x)
def replacement1715(a, b, c, j, m, n, p, x):
return -Dist(a*c**(-j + n)*(j*p + j + m - n + S(1))/(b*(m + n*p + S(1))), Int((c*x)**(j + m - n)*(a*x**j + b*x**n)**p, x), x) + Simp(c**(n + S(-1))*(c*x)**(m - n + S(1))*(a*x**j + b*x**n)**(p + S(1))/(b*(m + n*p + S(1))), x)
def replacement1716(a, b, c, j, m, n, p, x):
return -Dist(b*c**(j - n)*(-j + m + n*p + n + S(1))/(a*(j*p + m + S(1))), Int((c*x)**(-j + m + n)*(a*x**j + b*x**n)**p, x), x) + Simp(c**(j + S(-1))*(c*x)**(-j + m + S(1))*(a*x**j + b*x**n)**(p + S(1))/(a*(j*p + m + S(1))), x)
def replacement1717(a, b, j, m, n, p, x):
return Dist(S(1)/(m + S(1)), Subst(Int((a*x**(j/(m + S(1))) + b*x**(n/(m + S(1))))**p, x), x, x**(m + S(1))), x)
def replacement1718(a, b, c, j, m, n, p, x):
return Dist(c**IntPart(m)*x**(-FracPart(m))*(c*x)**FracPart(m), Int(x**m*(a*x**j + b*x**n)**p, x), x)
def replacement1719(a, b, c, j, m, n, p, x):
return Dist(a*c**(-j), Int((c*x)**(j + m)*(a*x**j + b*x**n)**(p + S(-1)), x), x) + Simp((c*x)**(m + S(1))*(a*x**j + b*x**n)**p/(c*p*(-j + n)), x)
def replacement1720(a, b, j, m, n, x):
return Dist(-S(2)/(-j + n), Subst(Int(S(1)/(-a*x**S(2) + S(1)), x), x, x**(j/S(2))/sqrt(a*x**j + b*x**n)), x)
def replacement1721(a, b, c, j, m, n, p, x):
return Dist(c**j*(-j + m + n*p + n + S(1))/(a*(-j + n)*(p + S(1))), Int((c*x)**(-j + m)*(a*x**j + b*x**n)**(p + S(1)), x), x) - Simp(c**(j + S(-1))*(c*x)**(-j + m + S(1))*(a*x**j + b*x**n)**(p + S(1))/(a*(-j + n)*(p + S(1))), x)
def replacement1722(a, b, c, j, m, n, p, x):
return Dist(c**IntPart(m)*x**(-FracPart(m))*(c*x)**FracPart(m), Int(x**m*(a*x**j + b*x**n)**p, x), x)
def replacement1723(a, b, c, j, m, n, p, x):
return Dist(c**IntPart(m)*x**(-j*FracPart(p) - FracPart(m))*(c*x)**FracPart(m)*(a + b*x**(-j + n))**(-FracPart(p))*(a*x**j + b*x**n)**FracPart(p), Int(x**(j*p + m)*(a + b*x**(-j + n))**p, x), x)
def replacement1724(a, b, j, m, n, p, u, v, x):
return Dist(u**m*v**(-m)/Coefficient(v, x, S(1)), Subst(Int(x**m*(a*x**j + b*x**n)**p, x), x, v), x)
def replacement1725(a, b, c, d, j, k, m, n, p, q, x):
return Dist(S(1)/n, Subst(Int(x**(S(-1) + (m + S(1))/n)*(c + d*x)**q*(a*x**(j/n) + b*x**(k/n))**p, x), x, x**n), x)
def replacement1726(a, b, c, d, e, j, k, m, n, p, q, x):
return Dist(e**IntPart(m)*x**(-FracPart(m))*(e*x)**FracPart(m), Int(x**m*(c + d*x**n)**q*(a*x**j + b*x**k)**p, x), x)
def replacement1727(jn, a, b, c, d, e, j, m, n, p, x):
return Simp(c*e**(j + S(-1))*(e*x)**(-j + m + S(1))*(a*x**j + b*x**(j + n))**(p + S(1))/(a*(j*p + m + S(1))), x)
def replacement1728(jn, a, b, c, d, e, j, m, n, p, x):
return -Dist(e**j*(a*d*(j*p + m + S(1)) - b*c*(m + n + p*(j + n) + S(1)))/(a*b*n*(p + S(1))), Int((e*x)**(-j + m)*(a*x**j + b*x**(j + n))**(p + S(1)), x), x) - Simp(e**(j + S(-1))*(e*x)**(-j + m + S(1))*(-a*d + b*c)*(a*x**j + b*x**(j + n))**(p + S(1))/(a*b*n*(p + S(1))), x)
def replacement1729(jn, a, b, c, d, e, j, m, n, p, x):
return Dist(e**(-n)*(a*d*(j*p + m + S(1)) - b*c*(m + n + p*(j + n) + S(1)))/(a*(j*p + m + S(1))), Int((e*x)**(m + n)*(a*x**j + b*x**(j + n))**p, x), x) + Simp(c*e**(j + S(-1))*(e*x)**(-j + m + S(1))*(a*x**j + b*x**(j + n))**(p + S(1))/(a*(j*p + m + S(1))), x)
def replacement1730(jn, a, b, c, d, e, j, m, n, p, x):
return -Dist((a*d*(j*p + m + S(1)) - b*c*(m + n + p*(j + n) + S(1)))/(b*(m + n + p*(j + n) + S(1))), Int((e*x)**m*(a*x**j + b*x**(j + n))**p, x), x) + Simp(d*e**(j + S(-1))*(e*x)**(-j + m + S(1))*(a*x**j + b*x**(j + n))**(p + S(1))/(b*(m + n + p*(j + n) + S(1))), x)
def replacement1731(a, b, c, d, j, k, m, n, p, q, x):
return Dist(S(1)/(m + S(1)), Subst(Int((c + d*x**(n/(m + S(1))))**q*(a*x**(j/(m + S(1))) + b*x**(k/(m + S(1))))**p, x), x, x**(m + S(1))), x)
def replacement1732(a, b, c, d, e, j, k, m, n, p, q, x):
return Dist(e**IntPart(m)*x**(-FracPart(m))*(e*x)**FracPart(m), Int(x**m*(c + d*x**n)**q*(a*x**j + b*x**k)**p, x), x)
def replacement1733(jn, a, b, c, d, e, j, m, n, p, q, x):
return Dist(e**IntPart(m)*x**(-j*FracPart(p) - FracPart(m))*(e*x)**FracPart(m)*(a + b*x**n)**(-FracPart(p))*(a*x**j + b*x**(j + n))**FracPart(p), Int(x**(j*p + m)*(a + b*x**n)**p*(c + d*x**n)**q, x), x)
def With1734(Pq, a, b, j, n, p, x):
d = Denominator(n)
return Dist(d, Subst(Int(x**(d + S(-1))*(a*x**(d*j) + b*x**(d*n))**p*ReplaceAll(SubstFor(x**n, Pq, x), Rule(x, x**(d*n))), x), x, x**(S(1)/d)), x)
def replacement1735(Pq, a, b, j, m, n, p, x):
return Dist(S(1)/n, Subst(Int(x**(S(-1) + (m + S(1))/n)*(a*x**(j/n) + b*x)**p*SubstFor(x**n, Pq, x), x), x, x**n), x)
def replacement1736(Pq, a, b, c, j, m, n, p, x):
return Dist(c**(Quotient(m, sign(m))*sign(m))*x**(-Mod(m, sign(m)))*(c*x)**Mod(m, sign(m)), Int(Pq*x**m*(a*x**j + b*x**n)**p, x), x)
def replacement1737(Pq, a, b, c, j, m, n, p, x):
return Dist(x**(-m)*(c*x)**m, Int(Pq*x**m*(a*x**j + b*x**n)**p, x), x)
def With1738(Pq, a, b, j, m, n, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
g = GCD(m + S(1), n)
if Unequal(g, S(1)):
return True
return False
def replacement1738(Pq, a, b, j, m, n, p, x):
g = GCD(m + S(1), n)
return Dist(S(1)/g, Subst(Int(x**(S(-1) + (m + S(1))/g)*(a*x**(j/g) + b*x**(n/g))**p*ReplaceAll(Pq, Rule(x, x**(S(1)/g))), x), x, x**g), x)
def With1739(Pq, a, b, c, j, m, n, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = Expon(Pq, x)
Pqq = Coeff(Pq, x, q)
if And(Greater(q, n + S(-1)), Unequal(m + n*p + q + S(1), S(0)), Or(IntegerQ(S(2)*p), IntegerQ(p + (q + S(1))/(S(2)*n)))):
return True
return False
def replacement1739(Pq, a, b, c, j, m, n, p, x):
q = Expon(Pq, x)
Pqq = Coeff(Pq, x, q)
return Int((c*x)**m*(a*x**j + b*x**n)**p*ExpandToSum(Pq - Pqq*a*x**(-n + q)*(m - n + q + 1)/(b*(m + n*p + q + 1)) - Pqq*x**q, x), x) + Simp(Pqq*c**(n - q - 1)*(c*x)**(m - n + q + 1)*(a*x**j + b*x**n)**(p + 1)/(b*(m + n*p + q + 1)), x)
def replacement1740(Pq, a, b, j, m, n, p, x):
return Dist(S(1)/(m + S(1)), Subst(Int((a*x**(j/(m + S(1))) + b*x**(n/(m + S(1))))**p*ReplaceAll(SubstFor(x**n, Pq, x), Rule(x, x**(n/(m + S(1))))), x), x, x**(m + S(1))), x)
def replacement1741(Pq, a, b, c, j, m, n, p, x):
return Dist(c**(Quotient(m, sign(m))*sign(m))*x**(-Mod(m, sign(m)))*(c*x)**Mod(m, sign(m)), Int(Pq*x**m*(a*x**j + b*x**n)**p, x), x)
def replacement1742(Pq, a, b, c, j, m, n, p, x):
return Dist(x**(-m)*(c*x)**m, Int(Pq*x**m*(a*x**j + b*x**n)**p, x), x)
def replacement1743(Pq, a, b, c, j, m, n, p, x):
return Int(ExpandIntegrand(Pq*(c*x)**m*(a*x**j + b*x**n)**p, x), x)
def replacement1744(Pq, a, b, j, n, p, x):
return Int(ExpandIntegrand(Pq*(a*x**j + b*x**n)**p, x), x)
def replacement1745(a, b, d, p, x):
return Dist(S(3)**(-S(3)*p)*a**(-S(2)*p), Int((S(3)*a - b*x)**p*(S(3)*a + S(2)*b*x)**(S(2)*p), x), x)
def replacement1746(a, b, d, p, x):
return Int(ExpandToSum((a + b*x + d*x**S(3))**p, x), x)
def With1747(a, b, d, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
u = Factor(a + b*x + d*x**S(3))
if ProductQ(NonfreeFactors(u, x)):
return True
return False
def replacement1747(a, b, d, p, x):
u = Factor(a + b*x + d*x**S(3))
return Dist(FreeFactors(u, x)**p, Int(DistributeDegree(NonfreeFactors(u, x), p), x), x)
def With1748(a, b, d, p, x):
r = Rt(-S(27)*a*d**S(2) + S(3)*sqrt(S(3))*d*sqrt(S(27)*a**S(2)*d**S(2) + S(4)*b**S(3)*d), S(3))
return Dist(S(3)**(-S(3)*p)*d**(-S(2)*p), Int((-S(3)*d*x + S(2)**(S(1)/3)*(S(6)*b*d*(S(1) - sqrt(S(3))*I) - S(2)**(S(1)/3)*r**S(2)*(S(1) + sqrt(S(3))*I))/(S(4)*r))**p*(-S(3)*d*x + S(2)**(S(1)/3)*(S(6)*b*d*(S(1) + sqrt(S(3))*I) - S(2)**(S(1)/3)*r**S(2)*(S(1) - sqrt(S(3))*I))/(S(4)*r))**p*(S(3)*d*x + S(2)**(S(1)/3)*(S(6)*b*d - S(2)**(S(1)/3)*r**S(2))/(S(2)*r))**p, x), x)
def replacement1749(a, b, d, p, x):
return Dist((S(3)*a - b*x)**(-p)*(S(3)*a + S(2)*b*x)**(-S(2)*p)*(a + b*x + d*x**S(3))**p, Int((S(3)*a - b*x)**p*(S(3)*a + S(2)*b*x)**(S(2)*p), x), x)
def With1750(a, b, d, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
u = NonfreeFactors(Factor(a + b*x + d*x**S(3)), x)
if ProductQ(u):
return True
return False
def replacement1750(a, b, d, p, x):
u = NonfreeFactors(Factor(a + b*x + d*x**S(3)), x)
return Dist((a + b*x + d*x**S(3))**p/DistributeDegree(u, p), Int(DistributeDegree(u, p), x), x)
def With1751(a, b, d, p, x):
r = Rt(-S(27)*a*d**S(2) + S(3)*sqrt(S(3))*d*sqrt(S(27)*a**S(2)*d**S(2) + S(4)*b**S(3)*d), S(3))
return Dist((-S(3)*d*x + S(2)**(S(1)/3)*(S(6)*b*d*(S(1) - sqrt(S(3))*I) - S(2)**(S(1)/3)*r**S(2)*(S(1) + sqrt(S(3))*I))/(S(4)*r))**(-p)*(-S(3)*d*x + S(2)**(S(1)/3)*(S(6)*b*d*(S(1) + sqrt(S(3))*I) - S(2)**(S(1)/3)*r**S(2)*(S(1) - sqrt(S(3))*I))/(S(4)*r))**(-p)*(S(3)*d*x + S(2)**(S(1)/3)*(S(6)*b*d - S(2)**(S(1)/3)*r**S(2))/(S(2)*r))**(-p)*(a + b*x + d*x**S(3))**p, Int((-S(3)*d*x + S(2)**(S(1)/3)*(S(6)*b*d*(S(1) - sqrt(S(3))*I) - S(2)**(S(1)/3)*r**S(2)*(S(1) + sqrt(S(3))*I))/(S(4)*r))**p*(-S(3)*d*x + S(2)**(S(1)/3)*(S(6)*b*d*(S(1) + sqrt(S(3))*I) - S(2)**(S(1)/3)*r**S(2)*(S(1) - sqrt(S(3))*I))/(S(4)*r))**p*(S(3)*d*x + S(2)**(S(1)/3)*(S(6)*b*d - S(2)**(S(1)/3)*r**S(2))/(S(2)*r))**p, x), x)
def replacement1752(a, b, d, e, f, m, p, x):
return Dist(S(3)**(-S(3)*p)*a**(-S(2)*p), Int((S(3)*a - b*x)**p*(S(3)*a + S(2)*b*x)**(S(2)*p)*(e + f*x)**m, x), x)
def replacement1753(a, b, d, e, f, m, p, x):
return Int(ExpandIntegrand((e + f*x)**m*(a + b*x + d*x**S(3))**p, x), x)
def With1754(a, b, d, e, f, m, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
u = Factor(a + b*x + d*x**S(3))
if ProductQ(NonfreeFactors(u, x)):
return True
return False
def replacement1754(a, b, d, e, f, m, p, x):
u = Factor(a + b*x + d*x**S(3))
return Dist(FreeFactors(u, x)**p, Int((e + f*x)**m*DistributeDegree(NonfreeFactors(u, x), p), x), x)
def With1755(a, b, d, e, f, m, p, x):
r = Rt(-S(27)*a*d**S(2) + S(3)*sqrt(S(3))*d*sqrt(S(27)*a**S(2)*d**S(2) + S(4)*b**S(3)*d), S(3))
return Dist(S(3)**(-S(3)*p)*d**(-S(2)*p), Int((e + f*x)**m*(-S(3)*d*x + S(2)**(S(1)/3)*(S(6)*b*d*(S(1) - sqrt(S(3))*I) - S(2)**(S(1)/3)*r**S(2)*(S(1) + sqrt(S(3))*I))/(S(4)*r))**p*(-S(3)*d*x + S(2)**(S(1)/3)*(S(6)*b*d*(S(1) + sqrt(S(3))*I) - S(2)**(S(1)/3)*r**S(2)*(S(1) - sqrt(S(3))*I))/(S(4)*r))**p*(S(3)*d*x + S(2)**(S(1)/3)*(S(6)*b*d - S(2)**(S(1)/3)*r**S(2))/(S(2)*r))**p, x), x)
def replacement1756(a, b, d, e, f, m, p, x):
return Dist((S(3)*a - b*x)**(-p)*(S(3)*a + S(2)*b*x)**(-S(2)*p)*(a + b*x + d*x**S(3))**p, Int((S(3)*a - b*x)**p*(S(3)*a + S(2)*b*x)**(S(2)*p)*(e + f*x)**m, x), x)
def With1757(a, b, d, e, f, m, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
u = NonfreeFactors(Factor(a + b*x + d*x**S(3)), x)
if ProductQ(u):
return True
return False
def replacement1757(a, b, d, e, f, m, p, x):
u = NonfreeFactors(Factor(a + b*x + d*x**S(3)), x)
return Dist((a + b*x + d*x**S(3))**p/DistributeDegree(u, p), Int((e + f*x)**m*DistributeDegree(u, p), x), x)
def With1758(a, b, d, e, f, m, p, x):
r = Rt(-S(27)*a*d**S(2) + S(3)*sqrt(S(3))*d*sqrt(S(27)*a**S(2)*d**S(2) + S(4)*b**S(3)*d), S(3))
return Dist((-S(3)*d*x + S(2)**(S(1)/3)*(S(6)*b*d*(S(1) - sqrt(S(3))*I) - S(2)**(S(1)/3)*r**S(2)*(S(1) + sqrt(S(3))*I))/(S(4)*r))**(-p)*(-S(3)*d*x + S(2)**(S(1)/3)*(S(6)*b*d*(S(1) + sqrt(S(3))*I) - S(2)**(S(1)/3)*r**S(2)*(S(1) - sqrt(S(3))*I))/(S(4)*r))**(-p)*(S(3)*d*x + S(2)**(S(1)/3)*(S(6)*b*d - S(2)**(S(1)/3)*r**S(2))/(S(2)*r))**(-p)*(a + b*x + d*x**S(3))**p, Int((e + f*x)**m*(-S(3)*d*x + S(2)**(S(1)/3)*(S(6)*b*d*(S(1) - sqrt(S(3))*I) - S(2)**(S(1)/3)*r**S(2)*(S(1) + sqrt(S(3))*I))/(S(4)*r))**p*(-S(3)*d*x + S(2)**(S(1)/3)*(S(6)*b*d*(S(1) + sqrt(S(3))*I) - S(2)**(S(1)/3)*r**S(2)*(S(1) - sqrt(S(3))*I))/(S(4)*r))**p*(S(3)*d*x + S(2)**(S(1)/3)*(S(6)*b*d - S(2)**(S(1)/3)*r**S(2))/(S(2)*r))**p, x), x)
def replacement1759(a, c, d, p, x):
return -Dist(S(3)**(-S(3)*p)*d**(-S(2)*p), Int((c - S(3)*d*x)**p*(S(2)*c + S(3)*d*x)**(S(2)*p), x), x)
def replacement1760(a, c, d, p, x):
return Int(ExpandToSum((a + c*x**S(2) + d*x**S(3))**p, x), x)
def With1761(a, c, d, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
u = Factor(a + c*x**S(2) + d*x**S(3))
if ProductQ(NonfreeFactors(u, x)):
return True
return False
def replacement1761(a, c, d, p, x):
u = Factor(a + c*x**S(2) + d*x**S(3))
return Dist(FreeFactors(u, x)**p, Int(DistributeDegree(NonfreeFactors(u, x), p), x), x)
def With1762(a, c, d, p, x):
r = Rt(-S(27)*a*d**S(2) - S(2)*c**S(3) + S(3)*sqrt(S(3))*d*sqrt(S(27)*a**S(2)*d**S(2) + S(4)*a*c**S(3)), S(3))
return Dist(S(3)**(-S(3)*p)*d**(-S(2)*p), Int((c + S(3)*d*x - S(2)**(S(1)/3)*(S(2)*c**S(2) + S(2)**(S(1)/3)*r**S(2))/(S(2)*r))**p*(c + S(3)*d*x + S(2)**(S(1)/3)*(S(2)*c**S(2)*(S(1) - sqrt(S(3))*I) + S(2)**(S(1)/3)*r**S(2)*(S(1) + sqrt(S(3))*I))/(S(4)*r))**p*(c + S(3)*d*x + S(2)**(S(1)/3)*(S(2)*c**S(2)*(S(1) + sqrt(S(3))*I) + S(2)**(S(1)/3)*r**S(2)*(S(1) - sqrt(S(3))*I))/(S(4)*r))**p, x), x)
def replacement1763(a, c, d, p, x):
return Dist((c - S(3)*d*x)**(-p)*(S(2)*c + S(3)*d*x)**(-S(2)*p)*(a + c*x**S(2) + d*x**S(3))**p, Int((c - S(3)*d*x)**p*(S(2)*c + S(3)*d*x)**(S(2)*p), x), x)
def With1764(a, c, d, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
u = NonfreeFactors(Factor(a + c*x**S(2) + d*x**S(3)), x)
if ProductQ(u):
return True
return False
def replacement1764(a, c, d, p, x):
u = NonfreeFactors(Factor(a + c*x**S(2) + d*x**S(3)), x)
return Dist((a + c*x**S(2) + d*x**S(3))**p/DistributeDegree(u, p), Int(DistributeDegree(u, p), x), x)
def With1765(a, c, d, p, x):
r = Rt(-S(27)*a*d**S(2) - S(2)*c**S(3) + S(3)*sqrt(S(3))*d*sqrt(S(27)*a**S(2)*d**S(2) + S(4)*a*c**S(3)), S(3))
return Dist((a + c*x**S(2) + d*x**S(3))**p*(c + S(3)*d*x - S(2)**(S(1)/3)*(S(2)*c**S(2) + S(2)**(S(1)/3)*r**S(2))/(S(2)*r))**(-p)*(c + S(3)*d*x + S(2)**(S(1)/3)*(S(2)*c**S(2)*(S(1) - sqrt(S(3))*I) + S(2)**(S(1)/3)*r**S(2)*(S(1) + sqrt(S(3))*I))/(S(4)*r))**(-p)*(c + S(3)*d*x + S(2)**(S(1)/3)*(S(2)*c**S(2)*(S(1) + sqrt(S(3))*I) + S(2)**(S(1)/3)*r**S(2)*(S(1) - sqrt(S(3))*I))/(S(4)*r))**(-p), Int((c + S(3)*d*x - S(2)**(S(1)/3)*(S(2)*c**S(2) + S(2)**(S(1)/3)*r**S(2))/(S(2)*r))**p*(c + S(3)*d*x + S(2)**(S(1)/3)*(S(2)*c**S(2)*(S(1) - sqrt(S(3))*I) + S(2)**(S(1)/3)*r**S(2)*(S(1) + sqrt(S(3))*I))/(S(4)*r))**p*(c + S(3)*d*x + S(2)**(S(1)/3)*(S(2)*c**S(2)*(S(1) + sqrt(S(3))*I) + S(2)**(S(1)/3)*r**S(2)*(S(1) - sqrt(S(3))*I))/(S(4)*r))**p, x), x)
def replacement1766(a, c, d, e, f, m, p, x):
return -Dist(S(3)**(-S(3)*p)*d**(-S(2)*p), Int((c - S(3)*d*x)**p*(S(2)*c + S(3)*d*x)**(S(2)*p)*(e + f*x)**m, x), x)
def replacement1767(a, c, d, e, f, m, p, x):
return Int(ExpandIntegrand((e + f*x)**m*(a + c*x**S(2) + d*x**S(3))**p, x), x)
def With1768(a, c, d, e, f, m, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
u = Factor(a + c*x**S(2) + d*x**S(3))
if ProductQ(NonfreeFactors(u, x)):
return True
return False
def replacement1768(a, c, d, e, f, m, p, x):
u = Factor(a + c*x**S(2) + d*x**S(3))
return Dist(FreeFactors(u, x)**p, Int((e + f*x)**m*DistributeDegree(NonfreeFactors(u, x), p), x), x)
def With1769(a, c, d, e, f, m, p, x):
r = Rt(-S(27)*a*d**S(2) - S(2)*c**S(3) + S(3)*sqrt(S(3))*d*sqrt(S(27)*a**S(2)*d**S(2) + S(4)*a*c**S(3)), S(3))
return Dist(S(3)**(-S(3)*p)*d**(-S(2)*p), Int((e + f*x)**m*(c + S(3)*d*x - S(2)**(S(1)/3)*(S(2)*c**S(2) + S(2)**(S(1)/3)*r**S(2))/(S(2)*r))**p*(c + S(3)*d*x + S(2)**(S(1)/3)*(S(2)*c**S(2)*(S(1) - sqrt(S(3))*I) + S(2)**(S(1)/3)*r**S(2)*(S(1) + sqrt(S(3))*I))/(S(4)*r))**p*(c + S(3)*d*x + S(2)**(S(1)/3)*(S(2)*c**S(2)*(S(1) + sqrt(S(3))*I) + S(2)**(S(1)/3)*r**S(2)*(S(1) - sqrt(S(3))*I))/(S(4)*r))**p, x), x)
def replacement1770(a, c, d, e, f, m, p, x):
return Dist((c - S(3)*d*x)**(-p)*(S(2)*c + S(3)*d*x)**(-S(2)*p)*(a + c*x**S(2) + d*x**S(3))**p, Int((c - S(3)*d*x)**p*(S(2)*c + S(3)*d*x)**(S(2)*p)*(e + f*x)**m, x), x)
def With1771(a, c, d, e, f, m, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
u = NonfreeFactors(Factor(a + c*x**S(2) + d*x**S(3)), x)
if ProductQ(u):
return True
return False
def replacement1771(a, c, d, e, f, m, p, x):
u = NonfreeFactors(Factor(a + c*x**S(2) + d*x**S(3)), x)
return Dist((a + c*x**S(2) + d*x**S(3))**p/DistributeDegree(u, p), Int((e + f*x)**m*DistributeDegree(u, p), x), x)
def With1772(a, c, d, e, f, m, p, x):
r = Rt(-S(27)*a*d**S(2) - S(2)*c**S(3) + S(3)*sqrt(S(3))*d*sqrt(S(27)*a**S(2)*d**S(2) + S(4)*a*c**S(3)), S(3))
return Dist((a + c*x**S(2) + d*x**S(3))**p*(c + S(3)*d*x - S(2)**(S(1)/3)*(S(2)*c**S(2) + S(2)**(S(1)/3)*r**S(2))/(S(2)*r))**(-p)*(c + S(3)*d*x + S(2)**(S(1)/3)*(S(2)*c**S(2)*(S(1) - sqrt(S(3))*I) + S(2)**(S(1)/3)*r**S(2)*(S(1) + sqrt(S(3))*I))/(S(4)*r))**(-p)*(c + S(3)*d*x + S(2)**(S(1)/3)*(S(2)*c**S(2)*(S(1) + sqrt(S(3))*I) + S(2)**(S(1)/3)*r**S(2)*(S(1) - sqrt(S(3))*I))/(S(4)*r))**(-p), Int((e + f*x)**m*(c + S(3)*d*x - S(2)**(S(1)/3)*(S(2)*c**S(2) + S(2)**(S(1)/3)*r**S(2))/(S(2)*r))**p*(c + S(3)*d*x + S(2)**(S(1)/3)*(S(2)*c**S(2)*(S(1) - sqrt(S(3))*I) + S(2)**(S(1)/3)*r**S(2)*(S(1) + sqrt(S(3))*I))/(S(4)*r))**p*(c + S(3)*d*x + S(2)**(S(1)/3)*(S(2)*c**S(2)*(S(1) + sqrt(S(3))*I) + S(2)**(S(1)/3)*r**S(2)*(S(1) - sqrt(S(3))*I))/(S(4)*r))**p, x), x)
def replacement1773(a, b, c, d, p, x):
return Dist(S(3)**(-p)*b**(-p)*c**(-p), Int((b + c*x)**(S(3)*p), x), x)
def replacement1774(a, b, c, d, p, x):
return Dist(S(3)**(-p)*b**(-p)*c**(-p), Subst(Int((S(3)*a*b*c - b**S(3) + c**S(3)*x**S(3))**p, x), x, c/(S(3)*d) + x), x)
def With1775(a, b, c, d, p, x):
r = Rt(-S(3)*b*c*d + c**S(3), S(3))
return Dist(S(3)**(-p)*b**(-p)*c**(-p), Int((b + x*(c - r))**p*(b + x*(c + r*(S(1) - sqrt(S(3))*I)/S(2)))**p*(b + x*(c + r*(S(1) + sqrt(S(3))*I)/S(2)))**p, x), x)
def replacement1776(a, b, c, d, p, x):
return Int(ExpandToSum((a + b*x + c*x**S(2) + d*x**S(3))**p, x), x)
def With1777(a, b, c, d, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
u = Factor(a + b*x + c*x**S(2) + d*x**S(3))
if ProductQ(NonfreeFactors(u, x)):
return True
return False
def replacement1777(a, b, c, d, p, x):
u = Factor(a + b*x + c*x**S(2) + d*x**S(3))
return Dist(FreeFactors(u, x)**p, Int(DistributeDegree(NonfreeFactors(u, x), p), x), x)
def replacement1778(a, b, c, d, p, x):
return Dist(S(3)**(-S(3)*p)*d**(-S(2)*p), Subst(Int((S(27)*a*d**S(2) - S(9)*b*c*d + S(2)*c**S(3) + S(27)*d**S(3)*x**S(3) - S(9)*d*x*(-S(3)*b*d + c**S(2)))**p, x), x, c/(S(3)*d) + x), x)
def replacement1779(a, b, c, d, p, x):
return Dist((b + c*x)**(-S(3)*p)*(a + b*x + c*x**S(2) + d*x**S(3))**p, Int((b + c*x)**(S(3)*p), x), x)
def With1780(a, b, c, d, p, x):
r = Rt(-S(3)*a*b*c + b**S(3), S(3))
return Dist((b + c*x - r)**(-p)*(b + c*x + r*(S(1) - sqrt(S(3))*I)/S(2))**(-p)*(b + c*x + r*(S(1) + sqrt(S(3))*I)/S(2))**(-p)*(a + b*x + c*x**S(2) + d*x**S(3))**p, Int((b + c*x - r)**p*(b + c*x + r*(S(1) - sqrt(S(3))*I)/S(2))**p*(b + c*x + r*(S(1) + sqrt(S(3))*I)/S(2))**p, x), x)
def With1781(a, b, c, d, p, x):
r = Rt(-S(3)*b*c*d + c**S(3), S(3))
return Dist((b + x*(c - r))**(-p)*(b + x*(c + r*(S(1) - sqrt(S(3))*I)/S(2)))**(-p)*(b + x*(c + r*(S(1) + sqrt(S(3))*I)/S(2)))**(-p)*(a + b*x + c*x**S(2) + d*x**S(3))**p, Int((b + x*(c - r))**p*(b + x*(c + r*(S(1) - sqrt(S(3))*I)/S(2)))**p*(b + x*(c + r*(S(1) + sqrt(S(3))*I)/S(2)))**p, x), x)
def With1782(a, b, c, d, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
u = NonfreeFactors(Factor(a + b*x + c*x**S(2) + d*x**S(3)), x)
if ProductQ(u):
return True
return False
def replacement1782(a, b, c, d, p, x):
u = NonfreeFactors(Factor(a + b*x + c*x**S(2) + d*x**S(3)), x)
return Dist((a + b*x + c*x**S(2) + d*x**S(3))**p/DistributeDegree(u, p), Int(DistributeDegree(u, p), x), x)
def With1783(a, b, c, d, p, x):
r = Rt(-S(27)*a*d**S(2) + S(9)*b*c*d - S(2)*c**S(3) + S(3)*sqrt(S(3))*d*sqrt(S(27)*a**S(2)*d**S(2) - S(18)*a*b*c*d + S(4)*a*c**S(3) + S(4)*b**S(3)*d - b**S(2)*c**S(2)), S(3))
return Dist((c + S(3)*d*x - S(2)**(S(1)/3)*(-S(6)*b*d + S(2)*c**S(2) + S(2)**(S(1)/3)*r**S(2))/(S(2)*r))**(-p)*(c + S(3)*d*x + S(2)**(S(1)/3)*(-S(6)*b*d*(S(1) - sqrt(S(3))*I) + S(2)*c**S(2)*(S(1) - sqrt(S(3))*I) + S(2)**(S(1)/3)*I*r**S(2)*(sqrt(S(3)) - I))/(S(4)*r))**(-p)*(c + S(3)*d*x + S(2)**(S(1)/3)*(-S(6)*b*d*(S(1) + sqrt(S(3))*I) + S(2)*c**S(2)*(S(1) + sqrt(S(3))*I) - S(2)**(S(1)/3)*I*r**S(2)*(sqrt(S(3)) + I))/(S(4)*r))**(-p)*(a + b*x + c*x**S(2) + d*x**S(3))**p, Int((c + S(3)*d*x - S(2)**(S(1)/3)*(-S(6)*b*d + S(2)*c**S(2) + S(2)**(S(1)/3)*r**S(2))/(S(2)*r))**p*(c + S(3)*d*x + S(2)**(S(1)/3)*(-S(6)*b*d*(S(1) - sqrt(S(3))*I) + S(2)*c**S(2)*(S(1) - sqrt(S(3))*I) + S(2)**(S(1)/3)*I*r**S(2)*(sqrt(S(3)) - I))/(S(4)*r))**p*(c + S(3)*d*x + S(2)**(S(1)/3)*(-S(6)*b*d*(S(1) + sqrt(S(3))*I) + S(2)*c**S(2)*(S(1) + sqrt(S(3))*I) - S(2)**(S(1)/3)*I*r**S(2)*(sqrt(S(3)) + I))/(S(4)*r))**p, x), x)
def replacement1784(p, u, x):
return Int(ExpandToSum(u, x)**p, x)
def replacement1785(a, b, c, d, e, f, m, p, x):
return Dist(S(3)**(-p)*b**(-p)*c**(-p), Int((b + c*x)**(S(3)*p)*(e + f*x)**m, x), x)
def With1786(a, b, c, d, e, f, m, p, x):
r = Rt(-S(3)*a*b*c + b**S(3), S(3))
return Dist(S(3)**(-p)*b**(-p)*c**(-p), Int((e + f*x)**m*(b + c*x - r)**p*(b + c*x + r*(S(1) - sqrt(S(3))*I)/S(2))**p*(b + c*x + r*(S(1) + sqrt(S(3))*I)/S(2))**p, x), x)
def With1787(a, b, c, d, e, f, m, p, x):
r = Rt(-S(3)*b*c*d + c**S(3), S(3))
return Dist(S(3)**(-p)*b**(-p)*c**(-p), Int((b + x*(c - r))**p*(b + x*(c + r*(S(1) - sqrt(S(3))*I)/S(2)))**p*(b + x*(c + r*(S(1) + sqrt(S(3))*I)/S(2)))**p*(e + f*x)**m, x), x)
def replacement1788(a, b, c, d, e, f, m, p, x):
return Int(ExpandIntegrand((e + f*x)**m*(a + b*x + c*x**S(2) + d*x**S(3))**p, x), x)
def With1789(a, b, c, d, e, f, m, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
u = Factor(a + b*x + c*x**S(2) + d*x**S(3))
if ProductQ(NonfreeFactors(u, x)):
return True
return False
def replacement1789(a, b, c, d, e, f, m, p, x):
u = Factor(a + b*x + c*x**S(2) + d*x**S(3))
return Dist(FreeFactors(u, x)**p, Int((e + f*x)**m*DistributeDegree(NonfreeFactors(u, x), p), x), x)
def replacement1790(a, b, c, d, e, f, m, p, x):
return Dist(S(3)**(-S(3)*p)*d**(-S(2)*p), Subst(Int((S(27)*a*d**S(2) - S(9)*b*c*d + S(2)*c**S(3) + S(27)*d**S(3)*x**S(3) - S(9)*d*x*(-S(3)*b*d + c**S(2)))**p, x), x, c/(S(3)*d) + x), x)
def replacement1791(a, b, c, d, e, f, m, p, x):
return Dist((b + c*x)**(-S(3)*p)*(a + b*x + c*x**S(2) + d*x**S(3))**p, Int((b + c*x)**(S(3)*p)*(e + f*x)**m, x), x)
def With1792(a, b, c, d, e, f, m, p, x):
r = Rt(-S(3)*a*b*c + b**S(3), S(3))
return Dist((b + c*x - r)**(-p)*(b + c*x + r*(S(1) - sqrt(S(3))*I)/S(2))**(-p)*(b + c*x + r*(S(1) + sqrt(S(3))*I)/S(2))**(-p)*(a + b*x + c*x**S(2) + d*x**S(3))**p, Int((e + f*x)**m*(b + c*x - r)**p*(b + c*x + r*(S(1) - sqrt(S(3))*I)/S(2))**p*(b + c*x + r*(S(1) + sqrt(S(3))*I)/S(2))**p, x), x)
def With1793(a, b, c, d, e, f, m, p, x):
r = Rt(-S(3)*b*c*d + c**S(3), S(3))
return Dist((b + x*(c - r))**(-p)*(b + x*(c + r*(S(1) - sqrt(S(3))*I)/S(2)))**(-p)*(b + x*(c + r*(S(1) + sqrt(S(3))*I)/S(2)))**(-p)*(a + b*x + c*x**S(2) + d*x**S(3))**p, Int((b + x*(c - r))**p*(b + x*(c + r*(S(1) - sqrt(S(3))*I)/S(2)))**p*(b + x*(c + r*(S(1) + sqrt(S(3))*I)/S(2)))**p*(e + f*x)**m, x), x)
def With1794(a, b, c, d, e, f, m, p, x):
if isinstance(x, (int, Integer, float, Float)):
return False
u = NonfreeFactors(Factor(a + b*x + c*x**S(2) + d*x**S(3)), x)
if ProductQ(u):
return True
return False
def replacement1794(a, b, c, d, e, f, m, p, x):
u = NonfreeFactors(Factor(a + b*x + c*x**S(2) + d*x**S(3)), x)
return Dist((a + b*x + c*x**S(2) + d*x**S(3))**p/DistributeDegree(u, p), Int((e + f*x)**m*DistributeDegree(u, p), x), x)
def With1795(a, b, c, d, e, f, m, p, x):
r = Rt(-S(27)*a*d**S(2) + S(9)*b*c*d - S(2)*c**S(3) + S(3)*sqrt(S(3))*d*sqrt(S(27)*a**S(2)*d**S(2) - S(18)*a*b*c*d + S(4)*a*c**S(3) + S(4)*b**S(3)*d - b**S(2)*c**S(2)), S(3))
return Dist((c + S(3)*d*x - S(2)**(S(1)/3)*(-S(6)*b*d + S(2)*c**S(2) + S(2)**(S(1)/3)*r**S(2))/(S(2)*r))**(-p)*(c + S(3)*d*x + S(2)**(S(1)/3)*(-S(6)*b*d*(S(1) - sqrt(S(3))*I) + S(2)*c**S(2)*(S(1) - sqrt(S(3))*I) + S(2)**(S(1)/3)*I*r**S(2)*(sqrt(S(3)) - I))/(S(4)*r))**(-p)*(c + S(3)*d*x + S(2)**(S(1)/3)*(-S(6)*b*d*(S(1) + sqrt(S(3))*I) + S(2)*c**S(2)*(S(1) + sqrt(S(3))*I) - S(2)**(S(1)/3)*I*r**S(2)*(sqrt(S(3)) + I))/(S(4)*r))**(-p)*(a + b*x + c*x**S(2) + d*x**S(3))**p, Int((e + f*x)**m*(c + S(3)*d*x - S(2)**(S(1)/3)*(-S(6)*b*d + S(2)*c**S(2) + S(2)**(S(1)/3)*r**S(2))/(S(2)*r))**p*(c + S(3)*d*x + S(2)**(S(1)/3)*(-S(6)*b*d*(S(1) - sqrt(S(3))*I) + S(2)*c**S(2)*(S(1) - sqrt(S(3))*I) + S(2)**(S(1)/3)*I*r**S(2)*(sqrt(S(3)) - I))/(S(4)*r))**p*(c + S(3)*d*x + S(2)**(S(1)/3)*(-S(6)*b*d*(S(1) + sqrt(S(3))*I) + S(2)*c**S(2)*(S(1) + sqrt(S(3))*I) - S(2)**(S(1)/3)*I*r**S(2)*(sqrt(S(3)) + I))/(S(4)*r))**p, x), x)
def replacement1796(m, p, u, v, x):
return Int(ExpandToSum(u, x)**m*ExpandToSum(v, x)**p, x)
def replacement1797(a, b, c, d, e, f, g, x):
return Simp(a*f*ArcTan((a*b*x**S(2) + a*b + x*(S(4)*a**S(2) - S(2)*a*c + b**S(2)))/(S(2)*sqrt(a*x**S(4) + a + b*x**S(3) + b*x + c*x**S(2))*Rt(a**S(2)*(S(2)*a - c), S(2))))/(d*Rt(a**S(2)*(S(2)*a - c), S(2))), x)
def replacement1798(a, b, c, d, e, f, g, x):
return -Simp(a*f*atanh((a*b*x**S(2) + a*b + x*(S(4)*a**S(2) - S(2)*a*c + b**S(2)))/(S(2)*sqrt(a*x**S(4) + a + b*x**S(3) + b*x + c*x**S(2))*Rt(-a**S(2)*(S(2)*a - c), S(2))))/(d*Rt(-a**S(2)*(S(2)*a - c), S(2))), x)
def replacement1799(a, b, c, d, e, p, x):
return Subst(Int(SimplifyIntegrand((a - b*d/(S(8)*e) + d**S(4)/(S(256)*e**S(3)) + e*x**S(4) + x**S(2)*(c - S(3)*d**S(2)/(S(8)*e)))**p, x), x), x, d/(S(4)*e) + x)
def With1800(p, v, x):
if isinstance(x, (int, Integer, float, Float)):
return False
a = Coefficient(v, x, S(0))
b = Coefficient(v, x, S(1))
c = Coefficient(v, x, S(2))
d = Coefficient(v, x, S(3))
e = Coefficient(v, x, S(4))
if And(ZeroQ(S(8)*b*e**S(2) - S(4)*c*d*e + d**S(3)), NonzeroQ(d)):
return True
return False
def replacement1800(p, v, x):
a = Coefficient(v, x, S(0))
b = Coefficient(v, x, S(1))
c = Coefficient(v, x, S(2))
d = Coefficient(v, x, S(3))
e = Coefficient(v, x, S(4))
return Subst(Int(SimplifyIntegrand((a - b*d/(S(8)*e) + d**S(4)/(S(256)*e**S(3)) + e*x**S(4) + x**S(2)*(c - S(3)*d**S(2)/(S(8)*e)))**p, x), x), x, d/(S(4)*e) + x)
def replacement1801(a, b, c, d, e, p, u, x):
return Subst(Int(SimplifyIntegrand((a - b*d/(S(8)*e) + d**S(4)/(S(256)*e**S(3)) + e*x**S(4) + x**S(2)*(c - S(3)*d**S(2)/(S(8)*e)))**p*ReplaceAll(u, Rule(x, -d/(S(4)*e) + x)), x), x), x, d/(S(4)*e) + x)
def With1802(p, u, v, x):
if isinstance(x, (int, Integer, float, Float)):
return False
a = Coefficient(v, x, S(0))
b = Coefficient(v, x, S(1))
c = Coefficient(v, x, S(2))
d = Coefficient(v, x, S(3))
e = Coefficient(v, x, S(4))
if And(ZeroQ(S(8)*b*e**S(2) - S(4)*c*d*e + d**S(3)), NonzeroQ(d)):
return True
return False
def replacement1802(p, u, v, x):
a = Coefficient(v, x, S(0))
b = Coefficient(v, x, S(1))
c = Coefficient(v, x, S(2))
d = Coefficient(v, x, S(3))
e = Coefficient(v, x, S(4))
return Subst(Int(SimplifyIntegrand((a - b*d/(S(8)*e) + d**S(4)/(S(256)*e**S(3)) + e*x**S(4) + x**S(2)*(c - S(3)*d**S(2)/(S(8)*e)))**p*ReplaceAll(u, Rule(x, -d/(S(4)*e) + x)), x), x), x, d/(S(4)*e) + x)
def replacement1803(a, b, c, d, e, p, x):
return Dist(-S(16)*a**S(2), Subst(Int((a*(S(256)*a**S(4)*x**S(4) + S(256)*a**S(3)*e - S(64)*a**S(2)*b*d - S(32)*a**S(2)*x**S(2)*(-S(8)*a*c + S(3)*b**S(2)) + S(16)*a*b**S(2)*c - S(3)*b**S(4))/(-S(4)*a*x + b)**S(4))**p/(-S(4)*a*x + b)**S(2), x), x, S(1)/x + b/(S(4)*a)), x)
def With1804(p, v, x):
if isinstance(x, (int, Integer, float, Float)):
return False
a = Coefficient(v, x, S(0))
b = Coefficient(v, x, S(1))
c = Coefficient(v, x, S(2))
d = Coefficient(v, x, S(3))
e = Coefficient(v, x, S(4))
if And(NonzeroQ(a), NonzeroQ(b), ZeroQ(S(8)*a**S(2)*d - S(4)*a*b*c + b**S(3))):
return True
return False
def replacement1804(p, v, x):
a = Coefficient(v, x, S(0))
b = Coefficient(v, x, S(1))
c = Coefficient(v, x, S(2))
d = Coefficient(v, x, S(3))
e = Coefficient(v, x, S(4))
return Dist(-S(16)*a**S(2), Subst(Int((a*(S(256)*a**S(4)*x**S(4) + S(256)*a**S(3)*e - S(64)*a**S(2)*b*d - S(32)*a**S(2)*x**S(2)*(-S(8)*a*c + S(3)*b**S(2)) + S(16)*a*b**S(2)*c - S(3)*b**S(4))/(-S(4)*a*x + b)**S(4))**p/(-S(4)*a*x + b)**S(2), x), x, S(1)/x + b/(S(4)*a)), x)
def With1805(A, B, C, D, a, b, c, d, e, x):
q = sqrt(S(8)*a**S(2) - S(4)*a*c + b**S(2))
return -Dist(S(1)/q, Int((A*b - A*q - S(2)*B*a + S(2)*D*a + x*(S(2)*A*a - S(2)*C*a + D*b - D*q))/(S(2)*a*x**S(2) + S(2)*a + x*(b - q)), x), x) + Dist(S(1)/q, Int((A*b + A*q - S(2)*B*a + S(2)*D*a + x*(S(2)*A*a - S(2)*C*a + D*b + D*q))/(S(2)*a*x**S(2) + S(2)*a + x*(b + q)), x), x)
def With1806(A, B, D, a, b, c, d, e, x):
q = sqrt(S(8)*a**S(2) - S(4)*a*c + b**S(2))
return -Dist(S(1)/q, Int((A*b - A*q - S(2)*B*a + S(2)*D*a + x*(S(2)*A*a + D*b - D*q))/(S(2)*a*x**S(2) + S(2)*a + x*(b - q)), x), x) + Dist(S(1)/q, Int((A*b + A*q - S(2)*B*a + S(2)*D*a + x*(S(2)*A*a + D*b + D*q))/(S(2)*a*x**S(2) + S(2)*a + x*(b + q)), x), x)
def With1807(A, B, C, D, a, b, c, d, e, m, x):
q = sqrt(S(8)*a**S(2) - S(4)*a*c + b**S(2))
return -Dist(S(1)/q, Int(x**m*(A*b - A*q - S(2)*B*a + S(2)*D*a + x*(S(2)*A*a - S(2)*C*a + D*b - D*q))/(S(2)*a*x**S(2) + S(2)*a + x*(b - q)), x), x) + Dist(S(1)/q, Int(x**m*(A*b + A*q - S(2)*B*a + S(2)*D*a + x*(S(2)*A*a - S(2)*C*a + D*b + D*q))/(S(2)*a*x**S(2) + S(2)*a + x*(b + q)), x), x)
def With1808(A, B, D, a, b, c, d, e, m, x):
q = sqrt(S(8)*a**S(2) - S(4)*a*c + b**S(2))
return -Dist(S(1)/q, Int(x**m*(A*b - A*q - S(2)*B*a + S(2)*D*a + x*(S(2)*A*a + D*b - D*q))/(S(2)*a*x**S(2) + S(2)*a + x*(b - q)), x), x) + Dist(S(1)/q, Int(x**m*(A*b + A*q - S(2)*B*a + S(2)*D*a + x*(S(2)*A*a + D*b + D*q))/(S(2)*a*x**S(2) + S(2)*a + x*(b + q)), x), x)
def With1809(A, B, C, a, b, c, d, e, x):
q = Rt(C*(C*(-S(4)*c*e + d**S(2)) + S(2)*e*(-S(4)*A*e + B*d)), S(2))
return Simp(-S(2)*C**S(2)*atanh((-B*e + C*d + S(2)*C*e*x)/q)/q, x) + Simp(S(2)*C**S(2)*atanh(C*(S(12)*A*B*e - S(4)*A*C*d - S(3)*B**S(2)*d + S(4)*B*C*c + S(8)*C**S(2)*e*x**S(3) + S(4)*C*x**S(2)*(-B*e + S(2)*C*d) + S(4)*C*x*(S(2)*A*e - B*d + S(2)*C*c))/(q*(-S(4)*A*C + B**S(2))))/q, x)
def With1810(A, C, a, b, c, d, e, x):
q = Rt(C*(-S(8)*A*e**S(2) + C*(-S(4)*c*e + d**S(2))), S(2))
return Simp(-S(2)*C**S(2)*atanh(C*(d + S(2)*e*x)/q)/q, x) + Simp(S(2)*C**S(2)*atanh(C*(A*d - S(2)*C*d*x**S(2) - S(2)*C*e*x**S(3) - S(2)*x*(A*e + C*c))/(A*q))/q, x)
def With1811(A, B, C, a, b, c, d, e, x):
q = Rt(-C*(C*(-S(4)*c*e + d**S(2)) + S(2)*e*(-S(4)*A*e + B*d)), S(2))
return Simp(S(2)*C**S(2)*ArcTan((-B*e + C*d + S(2)*C*e*x)/q)/q, x) - Simp(S(2)*C**S(2)*ArcTan(C*(S(12)*A*B*e - S(4)*A*C*d - S(3)*B**S(2)*d + S(4)*B*C*c + S(8)*C**S(2)*e*x**S(3) + S(4)*C*x**S(2)*(-B*e + S(2)*C*d) + S(4)*C*x*(S(2)*A*e - B*d + S(2)*C*c))/(q*(-S(4)*A*C + B**S(2))))/q, x)
def With1812(A, C, a, b, c, d, e, x):
q = Rt(-C*(-S(8)*A*e**S(2) + C*(-S(4)*c*e + d**S(2))), S(2))
return Simp(S(2)*C**S(2)*ArcTan((C*d + S(2)*C*e*x)/q)/q, x) - Simp(S(2)*C**S(2)*ArcTan(-C*(-A*d + S(2)*C*d*x**S(2) + S(2)*C*e*x**S(3) + S(2)*x*(A*e + C*c))/(A*q))/q, x)
def replacement1813(A, B, C, D, a, b, c, d, e, x):
return -Dist(S(1)/(S(4)*e), Int((-S(4)*A*e + D*b + x**S(2)*(-S(4)*C*e + S(3)*D*d) + S(2)*x*(-S(2)*B*e + D*c))/(a + b*x + c*x**S(2) + d*x**S(3) + e*x**S(4)), x), x) + Simp(D*log(a + b*x + c*x**S(2) + d*x**S(3) + e*x**S(4))/(S(4)*e), x)
def replacement1814(A, B, D, a, b, c, d, e, x):
return -Dist(S(1)/(S(4)*e), Int((-S(4)*A*e + D*b + S(3)*D*d*x**S(2) + S(2)*x*(-S(2)*B*e + D*c))/(a + b*x + c*x**S(2) + d*x**S(3) + e*x**S(4)), x), x) + Simp(D*log(a + b*x + c*x**S(2) + d*x**S(3) + e*x**S(4))/(S(4)*e), x)
def replacement1815(a, b, c, d, e, f, u, x):
return -Dist(a/(f*(-a*d + b*c)), Int(u*sqrt(c + d*x)/x, x), x) + Dist(c/(e*(-a*d + b*c)), Int(u*sqrt(a + b*x)/x, x), x)
def replacement1816(a, b, c, d, e, f, u, x):
return Dist(b/(f*(-a*d + b*c)), Int(u*sqrt(c + d*x), x), x) - Dist(d/(e*(-a*d + b*c)), Int(u*sqrt(a + b*x), x), x)
def replacement1817(a, b, c, d, e, f, u, x):
return Dist(e, Int(u*sqrt(a + b*x)/(a*e**S(2) - c*f**S(2) + x*(b*e**S(2) - d*f**S(2))), x), x) - Dist(f, Int(u*sqrt(c + d*x)/(a*e**S(2) - c*f**S(2) + x*(b*e**S(2) - d*f**S(2))), x), x)
def replacement1818(a, b, c, d, n, p, u, x):
return Dist(S(1)/(a*c), Int(u*sqrt(a + b*x**(S(2)*n)), x), x) - Dist(b/(a*d), Int(u*x**n, x), x)
def replacement1819(a, b, c, d, m, n, p, x):
return Dist(c, Int(x**m*sqrt(a + b*x**(S(2)*n))/(a*c**S(2) + x**(S(2)*n)*(b*c**S(2) - d**S(2))), x), x) - Dist(d, Int(x**(m + n)/(a*c**S(2) + x**(S(2)*n)*(b*c**S(2) - d**S(2))), x), x)
def With1820(a, b, d, e, f, x):
r = Numerator(Rt(a/b, S(3)))
s = Denominator(Rt(a/b, S(3)))
return Dist(r/(S(3)*a), Int(S(1)/((r + s*x)*sqrt(d + e*x + f*x**S(2))), x), x) + Dist(r/(S(3)*a), Int((S(2)*r - s*x)/(sqrt(d + e*x + f*x**S(2))*(r**S(2) - r*s*x + s**S(2)*x**S(2))), x), x)
def With1821(a, b, d, f, x):
r = Numerator(Rt(a/b, S(3)))
s = Denominator(Rt(a/b, S(3)))
return Dist(r/(S(3)*a), Int(S(1)/(sqrt(d + f*x**S(2))*(r + s*x)), x), x) + Dist(r/(S(3)*a), Int((S(2)*r - s*x)/(sqrt(d + f*x**S(2))*(r**S(2) - r*s*x + s**S(2)*x**S(2))), x), x)
def With1822(a, b, d, e, f, x):
r = Numerator(Rt(-a/b, S(3)))
s = Denominator(Rt(-a/b, S(3)))
return Dist(r/(S(3)*a), Int(S(1)/((r - s*x)*sqrt(d + e*x + f*x**S(2))), x), x) + Dist(r/(S(3)*a), Int((S(2)*r + s*x)/(sqrt(d + e*x + f*x**S(2))*(r**S(2) + r*s*x + s**S(2)*x**S(2))), x), x)
def With1823(a, b, d, f, x):
r = Numerator(Rt(-a/b, S(3)))
s = Denominator(Rt(-a/b, S(3)))
return Dist(r/(S(3)*a), Int(S(1)/(sqrt(d + f*x**S(2))*(r - s*x)), x), x) + Dist(r/(S(3)*a), Int((S(2)*r + s*x)/(sqrt(d + f*x**S(2))*(r**S(2) + r*s*x + s**S(2)*x**S(2))), x), x)
def replacement1824(a, b, c, d, e, x):
return Dist(d, Int(S(1)/((d**S(2) - e**S(2)*x**S(2))*sqrt(a + b*x**S(2) + c*x**S(4))), x), x) - Dist(e, Int(x/((d**S(2) - e**S(2)*x**S(2))*sqrt(a + b*x**S(2) + c*x**S(4))), x), x)
def replacement1825(a, c, d, e, x):
return Dist(d, Int(S(1)/(sqrt(a + c*x**S(4))*(d**S(2) - e**S(2)*x**S(2))), x), x) - Dist(e, Int(x/(sqrt(a + c*x**S(4))*(d**S(2) - e**S(2)*x**S(2))), x), x)
def replacement1826(a, b, c, d, e, x):
return -Dist(c/(a*e**S(4) + b*d**S(2)*e**S(2) + c*d**S(4)), Int((d**S(2) - e**S(2)*x**S(2))/sqrt(a + b*x**S(2) + c*x**S(4)), x), x) - Simp(e**S(3)*sqrt(a + b*x**S(2) + c*x**S(4))/((d + e*x)*(a*e**S(4) + b*d**S(2)*e**S(2) + c*d**S(4))), x)
def replacement1827(a, b, c, d, e, x):
return -Dist(c/(a*e**S(4) + b*d**S(2)*e**S(2) + c*d**S(4)), Int((d**S(2) - e**S(2)*x**S(2))/sqrt(a + b*x**S(2) + c*x**S(4)), x), x) + Dist((b*d*e**S(2) + S(2)*c*d**S(3))/(a*e**S(4) + b*d**S(2)*e**S(2) + c*d**S(4)), Int(S(1)/((d + e*x)*sqrt(a + b*x**S(2) + c*x**S(4))), x), x) - Simp(e**S(3)*sqrt(a + b*x**S(2) + c*x**S(4))/((d + e*x)*(a*e**S(4) + b*d**S(2)*e**S(2) + c*d**S(4))), x)
def replacement1828(a, c, d, e, x):
return -Dist(c/(a*e**S(4) + c*d**S(4)), Int((d**S(2) - e**S(2)*x**S(2))/sqrt(a + c*x**S(4)), x), x) + Dist(S(2)*c*d**S(3)/(a*e**S(4) + c*d**S(4)), Int(S(1)/(sqrt(a + c*x**S(4))*(d + e*x)), x), x) - Simp(e**S(3)*sqrt(a + c*x**S(4))/((d + e*x)*(a*e**S(4) + c*d**S(4))), x)
def replacement1829(A, B, a, b, c, d, e, x):
return Dist(A, Subst(Int(S(1)/(d - x**S(2)*(-S(2)*a*e + b*d)), x), x, x/sqrt(a + b*x**S(2) + c*x**S(4))), x)
def replacement1830(A, B, a, c, d, e, x):
return Dist(A, Subst(Int(S(1)/(S(2)*a*e*x**S(2) + d), x), x, x/sqrt(a + c*x**S(4))), x)
def replacement1831(A, B, a, b, c, d, e, f, x):
return Dist(A, Subst(Int(S(1)/(d - x**S(2)*(-a*e + b*d)), x), x, x/sqrt(a + b*x**S(2) + c*x**S(4))), x)
def replacement1832(A, B, a, c, d, e, f, x):
return Dist(A, Subst(Int(S(1)/(a*e*x**S(2) + d), x), x, x/sqrt(a + c*x**S(4))), x)
def replacement1833(A, B, a, b, c, d, f, x):
return Dist(A, Subst(Int(S(1)/(-b*d*x**S(2) + d), x), x, x/sqrt(a + b*x**S(2) + c*x**S(4))), x)
def replacement1834(a, b, c, d, e, x):
return Dist(a/d, Subst(Int(S(1)/(-S(2)*b*x**S(2) + x**S(4)*(-S(4)*a*c + b**S(2)) + S(1)), x), x, x/sqrt(a + b*x**S(2) + c*x**S(4))), x)
def With1835(a, b, c, d, e, x):
q = sqrt(-S(4)*a*c + b**S(2))
return Simp(sqrt(S(2))*a*sqrt(-b + q)*atanh(sqrt(S(2))*x*sqrt(-b + q)*(b + S(2)*c*x**S(2) + q)/(S(4)*sqrt(a + b*x**S(2) + c*x**S(4))*Rt(-a*c, S(2))))/(S(4)*d*Rt(-a*c, S(2))), x) - Simp(sqrt(S(2))*a*sqrt(b + q)*ArcTan(sqrt(S(2))*x*sqrt(b + q)*(b + S(2)*c*x**S(2) - q)/(S(4)*sqrt(a + b*x**S(2) + c*x**S(4))*Rt(-a*c, S(2))))/(S(4)*d*Rt(-a*c, S(2))), x)
def replacement1836(a, b, c, d, e, f, x):
return Dist(a, Int(S(1)/((a**S(2) - b**S(2)*x**S(2))*sqrt(c + d*x**S(2))*sqrt(e + f*x**S(2))), x), x) - Dist(b, Int(x/((a**S(2) - b**S(2)*x**S(2))*sqrt(c + d*x**S(2))*sqrt(e + f*x**S(2))), x), x)
def replacement1837(a, b, c, d, e, f, g, h, x):
return Simp(S(2)*sqrt(d + e*x + f*sqrt(a + b*x + c*x**S(2)))*(S(9)*c**S(2)*f*g*h*x**S(2) + S(3)*c**S(2)*f*h**S(2)*x**S(3) + c*f*x*(a*h**S(2) - b*g*h + S(10)*c*g**S(2)) + f*(S(2)*a*b*h**S(2) - S(3)*a*c*g*h - S(2)*b**S(2)*g*h + S(5)*b*c*g**S(2)) - (-d*h + e*g)*sqrt(a + b*x + c*x**S(2))*(-S(2)*b*h + S(5)*c*g + c*h*x))/(S(15)*c**S(2)*f*(g + h*x)), x)
def replacement1838(f, g, h, j, k, m, n, u, v, x):
return Int((g + h*x)**m*(f*k*sqrt(ExpandToSum(v, x)) + ExpandToSum(f*j + u, x))**n, x)
def replacement1839(a, b, c, d, e, f, g, h, n, p, x):
return Dist(S(2), Subst(Int((g + h*x**n)**p*(d**S(2)*e + e*x**S(2) - f**S(2)*(-a*e + b*d) - x*(-b*f**S(2) + S(2)*d*e))/(b*f**S(2) - S(2)*d*e + S(2)*e*x)**S(2), x), x, d + e*x + f*sqrt(a + b*x + c*x**S(2))), x)
def replacement1840(a, c, d, e, f, g, h, n, p, x):
return Dist(S(1)/(S(2)*e), Subst(Int((g + h*x**n)**p*(a*f**S(2) + d**S(2) - S(2)*d*x + x**S(2))/(d - x)**S(2), x), x, d + e*x + f*sqrt(a + c*x**S(2))), x)
def replacement1841(f, g, h, n, p, u, v, x):
return Int((g + h*(f*sqrt(ExpandToSum(v, x)) + ExpandToSum(u, x))**n)**p, x)
def replacement1842(a, c, e, f, g, h, m, n, x):
return Dist(S(2)**(-m + S(-1))*e**(-m + S(-1)), Subst(Int(x**(-m + n + S(-2))*(a*f**S(2) + x**S(2))*(-a*f**S(2)*h + S(2)*e*g*x + h*x**S(2))**m, x), x, e*x + f*sqrt(a + c*x**S(2))), x)
def replacement1843(a, c, e, f, g, i, m, n, p, x):
return Dist(S(2)**(-S(2)*m - p + S(-1))*e**(-p + S(-1))*f**(-S(2)*m)*(i/c)**m, Subst(Int(x**(-S(2)*m + n - p + S(-2))*(-a*f**S(2) + x**S(2))**p*(a*f**S(2) + x**S(2))**(S(2)*m + S(1)), x), x, e*x + f*sqrt(a + c*x**S(2))), x)
def replacement1844(a, b, c, d, e, f, g, h, i, m, n, x):
return Dist(S(2)*f**(-S(2)*m)*(i/c)**m, Subst(Int(x**n*(b*f**S(2) - S(2)*d*e + S(2)*e*x)**(-S(2)*m + S(-2))*(d**S(2)*e + e*x**S(2) - f**S(2)*(-a*e + b*d) - x*(-b*f**S(2) + S(2)*d*e))**(S(2)*m + S(1)), x), x, d + e*x + f*sqrt(a + b*x + c*x**S(2))), x)
def replacement1845(a, c, d, e, f, g, i, m, n, x):
return Dist(S(2)**(-S(2)*m + S(-1))*f**(-S(2)*m)*(i/c)**m/e, Subst(Int(x**n*(-d + x)**(-S(2)*m + S(-2))*(a*f**S(2) + d**S(2) - S(2)*d*x + x**S(2))**(S(2)*m + S(1)), x), x, d + e*x + f*sqrt(a + c*x**S(2))), x)
def replacement1846(a, b, c, d, e, f, g, h, i, m, n, x):
return Dist((i/c)**(m + S(-1)/2)*sqrt(g + h*x + i*x**S(2))/sqrt(a + b*x + c*x**S(2)), Int((a + b*x + c*x**S(2))**m*(d + e*x + f*sqrt(a + b*x + c*x**S(2)))**n, x), x)
def replacement1847(a, c, d, e, f, g, i, m, n, x):
return Dist((i/c)**(m + S(-1)/2)*sqrt(g + i*x**S(2))/sqrt(a + c*x**S(2)), Int((a + c*x**S(2))**m*(d + e*x + f*sqrt(a + c*x**S(2)))**n, x), x)
def replacement1848(a, b, c, d, e, f, g, h, i, m, n, x):
return Dist((i/c)**(m + S(1)/2)*sqrt(a + b*x + c*x**S(2))/sqrt(g + h*x + i*x**S(2)), Int((a + b*x + c*x**S(2))**m*(d + e*x + f*sqrt(a + b*x + c*x**S(2)))**n, x), x)
def replacement1849(a, c, d, e, f, g, i, m, n, x):
return Dist((i/c)**(m + S(1)/2)*sqrt(a + c*x**S(2))/sqrt(g + i*x**S(2)), Int((a + c*x**S(2))**m*(d + e*x + f*sqrt(a + c*x**S(2)))**n, x), x)
def replacement1850(f, j, k, m, n, u, v, w, x):
return Int((f*k*sqrt(ExpandToSum(v, x)) + ExpandToSum(f*j + u, x))**n*ExpandToSum(w, x)**m, x)
def replacement1851(a, b, c, d, n, p, x):
return Dist(S(1)/a, Subst(Int(S(1)/(-c*x**S(2) + S(1)), x), x, x/sqrt(c*x**S(2) + d*(a + b*x**n)**(S(2)/n))), x)
def replacement1852(a, b, c, d, x):
return Simp(S(2)*a*x/sqrt(a + b*sqrt(c + d*x**S(2))), x) + Simp(S(2)*b**S(2)*d*x**S(3)/(S(3)*(a + b*sqrt(c + d*x**S(2)))**(S(3)/2)), x)
def replacement1853(a, b, c, d, x):
return Dist(sqrt(S(2))*b/a, Subst(Int(S(1)/sqrt(S(1) + x**S(2)/a), x), x, a*x + b*sqrt(c + d*x**S(2))), x)
def replacement1854(a, b, c, d, e, x):
return Int(sqrt(a*e*x**S(2) + b*e*x*sqrt(c + d*x**S(2)))/(x*sqrt(c + d*x**S(2))), x)
def replacement1855(a, b, c, d, x):
return Dist(d, Subst(Int(S(1)/(-S(2)*c*x**S(2) + S(1)), x), x, x/sqrt(c*x**S(2) + d*sqrt(a + b*x**S(4)))), x)
def replacement1856(a, b, c, d, e, m, x):
return Dist(S(1)/2 - I/S(2), Int((c + d*x)**m/sqrt(sqrt(a) - I*b*x**S(2)), x), x) + Dist(S(1)/2 + I/S(2), Int((c + d*x)**m/sqrt(sqrt(a) + I*b*x**S(2)), x), x)
def With1857(a, b, c, d, x):
q = Rt(b/a, S(3))
return Dist(d/(-c*q + d*(S(1) + sqrt(S(3)))), Int((q*x + S(1) + sqrt(S(3)))/(sqrt(a + b*x**S(3))*(c + d*x)), x), x) - Dist(q/(-c*q + d*(S(1) + sqrt(S(3)))), Int(S(1)/sqrt(a + b*x**S(3)), x), x)
def With1858(a, b, c, d, x):
q = Rt(-b/a, S(3))
return Dist(d/(c*q + d*(S(1) + sqrt(S(3)))), Int((-q*x + S(1) + sqrt(S(3)))/(sqrt(a + b*x**S(3))*(c + d*x)), x), x) + Dist(q/(c*q + d*(S(1) + sqrt(S(3)))), Int(S(1)/sqrt(a + b*x**S(3)), x), x)
def With1859(a, b, c, d, x):
q = Rt(-b/a, S(3))
return Dist(d/(c*q + d*(S(1) - sqrt(S(3)))), Int((-q*x - sqrt(S(3)) + S(1))/(sqrt(a + b*x**S(3))*(c + d*x)), x), x) + Dist(q/(c*q + d*(S(1) - sqrt(S(3)))), Int(S(1)/sqrt(a + b*x**S(3)), x), x)
def With1860(a, b, c, d, x):
q = Rt(b/a, S(3))
return Dist(d/(-c*q + d*(S(1) - sqrt(S(3)))), Int((q*x - sqrt(S(3)) + S(1))/(sqrt(a + b*x**S(3))*(c + d*x)), x), x) - Dist(q/(-c*q + d*(S(1) - sqrt(S(3)))), Int(S(1)/sqrt(a + b*x**S(3)), x), x)
def With1861(a, b, c, d, e, f, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = Rt(b/a, S(3))
if ZeroQ(-e*q + f*(S(1) + sqrt(S(3)))):
return True
return False
def replacement1861(a, b, c, d, e, f, x):
q = Rt(b/a, S(3))
return Dist(S(4)*S(3)**(S(1)/4)*f*sqrt((q**S(2)*x**S(2) - q*x + S(1))/(q*x + S(1) + sqrt(S(3)))**S(2))*sqrt(S(2) - sqrt(S(3)))*(q*x + S(1))/(q*sqrt((q*x + S(1))/(q*x + S(1) + sqrt(S(3)))**S(2))*sqrt(a + b*x**S(3))), Subst(Int(S(1)/(sqrt(S(1) - x**S(2))*sqrt(x**S(2) - S(4)*sqrt(S(3)) + S(7))*(-c*q + d*(S(1) - sqrt(S(3))) + x*(-c*q + d*(S(1) + sqrt(S(3)))))), x), x, (-q*x + S(-1) + sqrt(S(3)))/(q*x + S(1) + sqrt(S(3)))), x)
def With1862(a, b, c, d, e, f, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = Rt(b/a, S(3))
if NonzeroQ(-e*q + f*(S(1) + sqrt(S(3)))):
return True
return False
def replacement1862(a, b, c, d, e, f, x):
q = Rt(b/a, S(3))
return Dist((-c*f + d*e)/(-c*q + d*(S(1) + sqrt(S(3)))), Int((q*x + S(1) + sqrt(S(3)))/(sqrt(a + b*x**S(3))*(c + d*x)), x), x) + Dist((-e*q + f*(S(1) + sqrt(S(3))))/(-c*q + d*(S(1) + sqrt(S(3)))), Int(S(1)/sqrt(a + b*x**S(3)), x), x)
def With1863(a, b, c, d, e, f, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = Rt(-b/a, S(3))
if ZeroQ(e*q + f*(S(1) + sqrt(S(3)))):
return True
return False
def replacement1863(a, b, c, d, e, f, x):
q = Rt(-b/a, S(3))
return Dist(-S(4)*S(3)**(S(1)/4)*f*sqrt((q**S(2)*x**S(2) + q*x + S(1))/(-q*x + S(1) + sqrt(S(3)))**S(2))*sqrt(S(2) - sqrt(S(3)))*(-q*x + S(1))/(q*sqrt((-q*x + S(1))/(-q*x + S(1) + sqrt(S(3)))**S(2))*sqrt(a + b*x**S(3))), Subst(Int(S(1)/(sqrt(S(1) - x**S(2))*sqrt(x**S(2) - S(4)*sqrt(S(3)) + S(7))*(c*q + d*(S(1) - sqrt(S(3))) + x*(c*q + d*(S(1) + sqrt(S(3)))))), x), x, (q*x + S(-1) + sqrt(S(3)))/(-q*x + S(1) + sqrt(S(3)))), x)
def With1864(a, b, c, d, e, f, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = Rt(-b/a, S(3))
if NonzeroQ(e*q + f*(S(1) + sqrt(S(3)))):
return True
return False
def replacement1864(a, b, c, d, e, f, x):
q = Rt(-b/a, S(3))
return Dist((-c*f + d*e)/(c*q + d*(S(1) + sqrt(S(3)))), Int((-q*x + S(1) + sqrt(S(3)))/(sqrt(a + b*x**S(3))*(c + d*x)), x), x) + Dist((e*q + f*(S(1) + sqrt(S(3))))/(c*q + d*(S(1) + sqrt(S(3)))), Int(S(1)/sqrt(a + b*x**S(3)), x), x)
def With1865(a, b, c, d, e, f, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = Rt(-b/a, S(3))
if ZeroQ(e*q + f*(S(1) - sqrt(S(3)))):
return True
return False
def replacement1865(a, b, c, d, e, f, x):
q = Rt(-b/a, S(3))
return Dist(S(4)*S(3)**(S(1)/4)*f*sqrt((q**S(2)*x**S(2) + q*x + S(1))/(-q*x - sqrt(S(3)) + S(1))**S(2))*sqrt(sqrt(S(3)) + S(2))*(-q*x + S(1))/(q*sqrt(-(-q*x + S(1))/(-q*x - sqrt(S(3)) + S(1))**S(2))*sqrt(a + b*x**S(3))), Subst(Int(S(1)/(sqrt(S(1) - x**S(2))*sqrt(x**S(2) + S(4)*sqrt(S(3)) + S(7))*(c*q + d*(S(1) + sqrt(S(3))) + x*(c*q + d*(S(1) - sqrt(S(3)))))), x), x, (-q*x + S(1) + sqrt(S(3)))/(q*x + S(-1) + sqrt(S(3)))), x)
def With1866(a, b, c, d, e, f, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = Rt(-b/a, S(3))
if NonzeroQ(e*q + f*(S(1) - sqrt(S(3)))):
return True
return False
def replacement1866(a, b, c, d, e, f, x):
q = Rt(-b/a, S(3))
return Dist((-c*f + d*e)/(c*q + d*(S(1) - sqrt(S(3)))), Int((-q*x - sqrt(S(3)) + S(1))/(sqrt(a + b*x**S(3))*(c + d*x)), x), x) + Dist((e*q + f*(S(1) - sqrt(S(3))))/(c*q + d*(S(1) - sqrt(S(3)))), Int(S(1)/sqrt(a + b*x**S(3)), x), x)
def With1867(a, b, c, d, e, f, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = Rt(b/a, S(3))
if ZeroQ(-e*q + f*(S(1) - sqrt(S(3)))):
return True
return False
def replacement1867(a, b, c, d, e, f, x):
q = Rt(b/a, S(3))
return Dist(-S(4)*S(3)**(S(1)/4)*f*sqrt((q**S(2)*x**S(2) - q*x + S(1))/(q*x - sqrt(S(3)) + S(1))**S(2))*sqrt(sqrt(S(3)) + S(2))*(q*x + S(1))/(q*sqrt(-(q*x + S(1))/(q*x - sqrt(S(3)) + S(1))**S(2))*sqrt(a + b*x**S(3))), Subst(Int(S(1)/(sqrt(S(1) - x**S(2))*sqrt(x**S(2) + S(4)*sqrt(S(3)) + S(7))*(-c*q + d*(S(1) + sqrt(S(3))) + x*(-c*q + d*(S(1) - sqrt(S(3)))))), x), x, (q*x + S(1) + sqrt(S(3)))/(-q*x + S(-1) + sqrt(S(3)))), x)
def With1868(a, b, c, d, e, f, x):
if isinstance(x, (int, Integer, float, Float)):
return False
q = Rt(b/a, S(3))
if NonzeroQ(-e*q + f*(S(1) - sqrt(S(3)))):
return True
return False
def replacement1868(a, b, c, d, e, f, x):
q = Rt(b/a, S(3))
return Dist((-c*f + d*e)/(-c*q + d*(S(1) - sqrt(S(3)))), Int((q*x - sqrt(S(3)) + S(1))/(sqrt(a + b*x**S(3))*(c + d*x)), x), x) + Dist((-e*q + f*(S(1) - sqrt(S(3))))/(-c*q + d*(S(1) - sqrt(S(3)))), Int(S(1)/sqrt(a + b*x**S(3)), x), x)
def replacement1869(a, b, c, d, e, m, n, x):
return Dist(S(1)/n, Subst(Int(x**(S(-1) + (m + S(1))/n)/(c + d*x + e*sqrt(a + b*x)), x), x, x**n), x)
def replacement1870(a, b, c, d, e, n, u, x):
return Dist(c, Int(u/(-a*e**S(2) + c**S(2) + c*d*x**n), x), x) - Dist(a*e, Int(u/(sqrt(a + b*x**n)*(-a*e**S(2) + c**S(2) + c*d*x**n)), x), x)
def replacement1871(A, B, a, b, c, d, n, n2, x):
return Dist(A**S(2)*(n + S(-1)), Subst(Int(S(1)/(A**S(2)*b*x**S(2)*(n + S(-1))**S(2) + a), x), x, x/(A*(n + S(-1)) - B*x**n)), x)
def replacement1872(A, B, a, b, c, d, k, m, n, n2, x):
return Dist(A**S(2)*(m - n + S(1))/(m + S(1)), Subst(Int(S(1)/(A**S(2)*b*x**S(2)*(m - n + S(1))**S(2) + a), x), x, x**(m + S(1))/(A*(m - n + S(1)) + B*x**n*(m + S(1)))), x)
def replacement1873(a, b, c, d, e, f, g, n, n2, n3, p, x):
return -Dist(S(1)/(a*c*n*(p + S(1))*(-S(4)*a*c + b**S(2))), Int((a + b*x**n + c*x**(S(2)*n))**(p + S(1))*Simp(a*b*(a*g + c*e) - S(2)*a*c*(a*f - c*d*(S(2)*n*(p + S(1)) + S(1))) - b**S(2)*c*d*(n*p + n + S(1)) + x**n*(a*b**S(2)*g*(n*(p + S(2)) + S(1)) - S(2)*a*c*(a*g*(n + S(1)) - c*e*(n*(S(2)*p + S(3)) + S(1))) - b*c*(a*f + c*d)*(n*(S(2)*p + S(3)) + S(1))), x), x), x) - Simp(x*(a + b*x**n + c*x**(S(2)*n))**(p + S(1))*(-a*b*(a*g + c*e) - S(2)*a*c*(-a*f + c*d) + b**S(2)*c*d + x**n*(-a*b**S(2)*g - S(2)*a*c*(-a*g + c*e) + b*c*(a*f + c*d)))/(a*c*n*(p + S(1))*(-S(4)*a*c + b**S(2))), x)
def replacement1874(a, b, c, d, e, f, n, n2, p, x):
return -Dist(S(1)/(a*n*(p + S(1))*(-S(4)*a*c + b**S(2))), Int((a + b*x**n + c*x**(S(2)*n))**(p + S(1))*Simp(a*b*e - S(2)*a*(a*f - c*d*(S(2)*n*(p + S(1)) + S(1))) - b**S(2)*d*(n*p + n + S(1)) - x**n*(-S(2)*a*c*e*(n*(S(2)*p + S(3)) + S(1)) + b*(a*f + c*d)*(n*(S(2)*p + S(3)) + S(1))), x), x), x) - Simp(x*(a + b*x**n + c*x**(S(2)*n))**(p + S(1))*(-a*b*e - S(2)*a*(-a*f + c*d) + b**S(2)*d + x**n*(-S(2)*a*c*e + b*(a*f + c*d)))/(a*n*(p + S(1))*(-S(4)*a*c + b**S(2))), x)
def replacement1875(a, b, c, d, e, g, n, n2, n3, p, x):
return -Dist(S(1)/(a*c*n*(p + S(1))*(-S(4)*a*c + b**S(2))), Int((a + b*x**n + c*x**(S(2)*n))**(p + S(1))*Simp(a*b*(a*g + c*e) + S(2)*a*c**S(2)*d*(S(2)*n*(p + S(1)) + S(1)) - b**S(2)*c*d*(n*p + n + S(1)) + x**n*(a*b**S(2)*g*(n*(p + S(2)) + S(1)) - S(2)*a*c*(a*g*(n + S(1)) - c*e*(n*(S(2)*p + S(3)) + S(1))) - b*c**S(2)*d*(n*(S(2)*p + S(3)) + S(1))), x), x), x) - Simp(x*(a + b*x**n + c*x**(S(2)*n))**(p + S(1))*(-a*b*(a*g + c*e) - S(2)*a*c**S(2)*d + b**S(2)*c*d + x**n*(-a*b**S(2)*g - S(2)*a*c*(-a*g + c*e) + b*c**S(2)*d))/(a*c*n*(p + S(1))*(-S(4)*a*c + b**S(2))), x)
def replacement1876(a, b, c, d, f, g, n, n2, n3, p, x):
return -Dist(S(1)/(a*c*n*(p + S(1))*(-S(4)*a*c + b**S(2))), Int((a + b*x**n + c*x**(S(2)*n))**(p + S(1))*Simp(a**S(2)*b*g - S(2)*a*c*(a*f - c*d*(S(2)*n*(p + S(1)) + S(1))) - b**S(2)*c*d*(n*p + n + S(1)) + x**n*(-S(2)*a**S(2)*c*g*(n + S(1)) + a*b**S(2)*g*(n*(p + S(2)) + S(1)) - b*c*(a*f + c*d)*(n*(S(2)*p + S(3)) + S(1))), x), x), x) - Simp(x*(a + b*x**n + c*x**(S(2)*n))**(p + S(1))*(-a**S(2)*b*g - S(2)*a*c*(-a*f + c*d) + b**S(2)*c*d + x**n*(S(2)*a**S(2)*c*g - a*b**S(2)*g + b*c*(a*f + c*d)))/(a*c*n*(p + S(1))*(-S(4)*a*c + b**S(2))), x)
def replacement1877(a, b, c, d, f, n, n2, p, x):
return Dist(S(1)/(a*n*(p + S(1))*(-S(4)*a*c + b**S(2))), Int((a + b*x**n + c*x**(S(2)*n))**(p + S(1))*Simp(S(2)*a*(a*f - c*d*(S(2)*n*(p + S(1)) + S(1))) + b**S(2)*d*(n*p + n + S(1)) + b*x**n*(a*f + c*d)*(n*(S(2)*p + S(3)) + S(1)), x), x), x) - Simp(x*(a + b*x**n + c*x**(S(2)*n))**(p + S(1))*(-S(2)*a*(-a*f + c*d) + b**S(2)*d + b*x**n*(a*f + c*d))/(a*n*(p + S(1))*(-S(4)*a*c + b**S(2))), x)
def replacement1878(a, b, c, d, g, n, n2, n3, p, x):
return -Dist(S(1)/(a*c*n*(p + S(1))*(-S(4)*a*c + b**S(2))), Int((a + b*x**n + c*x**(S(2)*n))**(p + S(1))*Simp(a**S(2)*b*g + S(2)*a*c**S(2)*d*(S(2)*n*(p + S(1)) + S(1)) - b**S(2)*c*d*(n*p + n + S(1)) + x**n*(-S(2)*a**S(2)*c*g*(n + S(1)) + a*b**S(2)*g*(n*(p + S(2)) + S(1)) - b*c**S(2)*d*(n*(S(2)*p + S(3)) + S(1))), x), x), x) - Simp(x*(a + b*x**n + c*x**(S(2)*n))**(p + S(1))*(-a**S(2)*b*g - S(2)*a*c**S(2)*d + b**S(2)*c*d + x**n*(S(2)*a**S(2)*c*g - a*b**S(2)*g + b*c**S(2)*d))/(a*c*n*(p + S(1))*(-S(4)*a*c + b**S(2))), x)
def replacement1879(a, c, d, e, f, g, n, n2, n3, p, x):
return -Dist(-S(1)/(S(4)*a**S(2)*c**S(2)*n*(p + S(1))), Int((a + c*x**(S(2)*n))**(p + S(1))*Simp(-S(2)*a*c*x**n*(a*g*(n + S(1)) - c*e*(n*(S(2)*p + S(3)) + S(1))) - S(2)*a*c*(a*f - c*d*(S(2)*n*(p + S(1)) + S(1))), x), x), x) - Simp(-x*(a + c*x**(S(2)*n))**(p + S(1))*(-S(2)*a*c*x**n*(-a*g + c*e) - S(2)*a*c*(-a*f + c*d))/(S(4)*a**S(2)*c**S(2)*n*(p + S(1))), x)
def replacement1880(a, c, d, e, f, n, n2, p, x):
return -Dist(-S(1)/(S(4)*a**S(2)*c*n*(p + S(1))), Int((a + c*x**(S(2)*n))**(p + S(1))*Simp(S(2)*a*c*e*x**n*(n*(S(2)*p + S(3)) + S(1)) - S(2)*a*(a*f - c*d*(S(2)*n*(p + S(1)) + S(1))), x), x), x) - Simp(-x*(a + c*x**(S(2)*n))**(p + S(1))*(-S(2)*a*c*e*x**n - S(2)*a*(-a*f + c*d))/(S(4)*a**S(2)*c*n*(p + S(1))), x)
def replacement1881(a, c, d, e, g, n, n2, n3, p, x):
return -Dist(-S(1)/(S(4)*a**S(2)*c**S(2)*n*(p + S(1))), Int((a + c*x**(S(2)*n))**(p + S(1))*Simp(S(2)*a*c**S(2)*d*(S(2)*n*(p + S(1)) + S(1)) - S(2)*a*c*x**n*(a*g*(n + S(1)) - c*e*(n*(S(2)*p + S(3)) + S(1))), x), x), x) - Simp(-x*(a + c*x**(S(2)*n))**(p + S(1))*(-S(2)*a*c**S(2)*d - S(2)*a*c*x**n*(-a*g + c*e))/(S(4)*a**S(2)*c**S(2)*n*(p + S(1))), x)
def With1882(a, b, c, d, e, f, g, x):
q = Rt((S(12)*a**S(2)*g**S(2) - a*c*f**S(2) + f*(-S(2)*a*b*g + S(3)*c**S(2)*d))/(c*g*(-a*f + S(3)*c*d)), S(2))
r = Rt((a*c*f**S(2) - f*(S(2)*a*b*g + S(3)*c**S(2)*d) + S(4)*g*(a**S(2)*g + b*c*d))/(c*g*(-a*f + S(3)*c*d)), S(2))
return -Simp(c*ArcTan((r - S(2)*x)/q)/(g*q), x) + Simp(c*ArcTan((r + S(2)*x)/q)/(g*q), x) - Simp(c*ArcTan(x*(-a*f + S(3)*c*d)*(S(6)*a**S(2)*b*g**S(2) - S(2)*a**S(2)*c*f*g - a*b**S(2)*f*g + b*c**S(2)*d*f + c**S(2)*g*x**S(4)*(-a*f + S(3)*c*d) + c*x**S(2)*(S(2)*a**S(2)*g**S(2) - a*c*f**S(2) - b*c*d*g + S(3)*c**S(2)*d*f))/(g*q*(-S(2)*a**S(2)*g + b*c*d)*(S(4)*a**S(2)*g - a*b*f + b*c*d)))/(g*q), x)
def With1883(a, c, d, e, f, g, x):
q = Rt((S(12)*a**S(2)*g**S(2) - a*c*f**S(2) + S(3)*c**S(2)*d*f)/(c*g*(-a*f + S(3)*c*d)), S(2))
r = Rt((S(4)*a**S(2)*g**S(2) + a*c*f**S(2) - S(3)*c**S(2)*d*f)/(c*g*(-a*f + S(3)*c*d)), S(2))
return -Simp(c*ArcTan((r - S(2)*x)/q)/(g*q), x) + Simp(c*ArcTan((r + S(2)*x)/q)/(g*q), x) - Simp(c*ArcTan(c*x*(-a*f + S(3)*c*d)*(S(2)*a**S(2)*f*g - c*g*x**S(4)*(-a*f + S(3)*c*d) - x**S(2)*(S(2)*a**S(2)*g**S(2) - a*c*f**S(2) + S(3)*c**S(2)*d*f))/(S(8)*a**S(4)*g**S(3)*q))/(g*q), x)
def With1884(p, u, v, x):
if isinstance(x, (int, Integer, float, Float)):
return False
try:
m = Exponent(u, x)
n = Exponent(v, x)
c = Coefficient(u, x, m)/((m + n*p + S(1))*Coefficient(v, x, n))
c = Coefficient(u, x, m)/((m + n*p + S(1))*Coefficient(v, x, n))
w = Apart(-c*x**(m - n)*(v*(m - n + S(1)) + x*(p + S(1))*D(v, x)) + u, x)
res = And(Inequality(S(1), Less, n, LessEqual, m + S(1)), Less(m + n*p, S(-1)), FalseQ(DerivativeDivides(v, u, x)))
except (TypeError, AttributeError):
return False
if res:
return True
return False
def replacement1884(p, u, v, x):
m = Exponent(u, x)
n = Exponent(v, x)
c = Coefficient(u, x, m)/((m + n*p + S(1))*Coefficient(v, x, n))
c = Coefficient(u, x, m)/((m + n*p + S(1))*Coefficient(v, x, n))
w = Apart(-c*x**(m - n)*(v*(m - n + S(1)) + x*(p + S(1))*D(v, x)) + u, x)
return Simp(If(ZeroQ(w), c*v**(p + 1)*x**(m - n + 1), c*v**(p + 1)*x**(m - n + 1) + Int(v**p*w, x)), x)
|
85fd5874666847be4c439c139aac7b073ec18e1c
|
097f1a48017fad4d193361e28556b01faa38fcaf
|
/tests/functional/api/test_webhooks_api.py
|
53d5e3cd74ad01ce1b39318094b37b53507efc8a
|
[
"MIT"
] |
permissive
|
uploadcare/pyuploadcare
|
a1ece136ff33ae7d7005a194daee31b848cc3d5f
|
d565e79ca7cb9dc60f5b32554219c9575b260476
|
refs/heads/main
| 2023-09-02T20:03:50.045515
| 2023-07-18T11:05:33
| 2023-07-18T11:05:33
| 2,203,143
| 101
| 40
|
MIT
| 2023-09-08T13:29:08
| 2011-08-13T21:16:31
|
Python
|
UTF-8
|
Python
| false
| false
| 892
|
py
|
test_webhooks_api.py
|
import pytest
from pyuploadcare.api.entities import Webhook
@pytest.mark.vcr
def test_create_webhook(uploadcare):
webhook = uploadcare.webhooks_api.create(
{
"event": "file.uploaded",
"target_url": "https://webhook.site/699ba5a4-b178-41c7-b416-5d1b6739d052",
"is_active": True,
}
)
assert isinstance(webhook, Webhook)
@pytest.mark.vcr
def test_list_webhooks(uploadcare):
webhooks = list(uploadcare.webhooks_api.list(limit=1))
assert len(webhooks) == 1
assert isinstance(webhooks[0], Webhook)
@pytest.mark.vcr
def test_update_webhook(uploadcare):
webhook_id = 847096
webhook = uploadcare.webhooks_api.update(webhook_id, {"is_active": False})
assert not webhook.is_active
@pytest.mark.vcr
def test_delete_webhook(uploadcare):
webhook_id = 847096
uploadcare.webhooks_api.delete(webhook_id)
|
531e66566676bd90299601b3fac939150e32aa6e
|
32b01231af56c01a0ed057222ede46490b1488ce
|
/amplify/ext/phpfpm/collectors/master/meta.py
|
f7bce52dec3359e0280e022e7a5508a2f79facab
|
[
"BSD-2-Clause"
] |
permissive
|
nginxinc/nginx-amplify-agent
|
118478cf17ff3788e5c3d807c0f9a97a584afa65
|
f6be8cf8f8bcc61c549a821bf2aba41b2d843f18
|
refs/heads/master
| 2023-08-24T18:55:44.088497
| 2022-10-25T09:54:50
| 2022-10-25T09:54:50
| 46,153,777
| 312
| 80
|
BSD-2-Clause
| 2022-06-07T16:34:53
| 2015-11-14T00:07:42
|
Python
|
UTF-8
|
Python
| false
| false
| 4,593
|
py
|
meta.py
|
# -*- coding: utf-8 -*-
from amplify.agent.common.context import context
from amplify.agent.data.eventd import INFO
from amplify.agent.collectors.abstract import AbstractMetaCollector
from amplify.agent.common.util import subp, host
from amplify.ext.phpfpm.util.ps import LS_CMD, LS_CMD_FREEBSD, LS_PARSER
from amplify.ext.phpfpm.util.version import VERSION_PARSER
__author__ = "Grant Hulegaard"
__copyright__ = "Copyright (C) Nginx, Inc. All rights reserved."
__license__ = ""
__maintainer__ = "Grant Hulegaard"
__email__ = "grant.hulegaard@nginx.com"
class PHPFPMMetaCollector(AbstractMetaCollector):
"""
Meta collector. Collects meta data about master
"""
short_name = 'phpfpm_meta'
def __init__(self, **kwargs):
super(PHPFPMMetaCollector, self).__init__(**kwargs)
self._bin_path = None # cache for bin_path discovery
self._version = None # cache for version discovery
self._version_line = None # "" "" ""
self.register(
self.bin_path,
self.version
)
@property
def default_meta(self):
meta = {
'type': self.object.type,
'root_uuid': context.uuid,
'local_id': self.object.local_id,
'name': self.object.name,
'display_name': self.object.display_name,
'cmd': self.object.cmd,
'conf_path': self.object.conf_path,
'workers': len(self.object.workers),
'bin_path': None,
'version': None,
'version_line': None,
}
if not self.in_container:
meta['pid'] = self.object.pid
return meta
def bin_path(self):
"""
Compute the bin_path as part of meta collection to be more tolerant of
users that utilize `pm.ondemand`. bin_path is also not required for
our regular running logic so it can safely be moved down a level (to
this collector that runs on a regular async basis).
This used to live in manager._find_all() but it is impossible to cache
the value there.
"""
# only compute if bin_path hasn't been found before
if self._bin_path is None:
all_pids = [self.object.pid] + self.object.workers
last_exception = None
for pid in all_pids:
ls_cmd_template = LS_CMD_FREEBSD if host.linux_name() == 'freebsd' else LS_CMD
ls_cmd = ls_cmd_template % pid
try:
ls, _ = subp.call(ls_cmd)
context.log.debug('ls "%s" output: %s' % (ls_cmd, ls))
except Exception as e:
last_exception = e
else:
try:
self._bin_path = LS_PARSER(ls[0])
except Exception as e:
exc_name = e.__class__.__name__
context.log.debug(
'failed to parse ls result "%s" due to %s' %
(ls[0], exc_name)
)
context.log.debug('additional info:', exc_info=True)
last_exception = None # clear last exception for ls
break
# if we never succeeded...log error
if last_exception:
exc_name = last_exception.__class__.__name__
# this is being kept as an error because it has
# implications for meta collection success/failure
context.log.debug(
'failed to find php-fpm bin path, last attempt: '
'"%s" failed due to %s' %
(ls_cmd, exc_name)
)
context.log.debug('additional info:', exc_info=True)
# If there is a root_object defined, send an event to the cloud
if context.objects.root_object:
context.objects.root_object.eventd.event(
level=INFO,
message='php-fpm bin not found'
)
self.meta['bin_path'] = self._bin_path
def version(self):
# only compute if version hasn't been found before and we have found a
# bin_path
if self._version is None and self._bin_path is not None:
version, raw_line = VERSION_PARSER(self._bin_path)
self._version, self._version_line = version, raw_line
self.meta['version'] = self._version
self.meta['version_line'] = self._version_line
|
9db35a0ea69cc7f66809b8c3b5e1b9b775bd3cb2
|
6416b746ee71d897789eab1e450000831674dbd0
|
/src/otx/algorithms/anomaly/configs/base/configuration_enums.py
|
4988c475a7075e448da637a82895bc7fdca3057e
|
[
"Apache-2.0"
] |
permissive
|
openvinotoolkit/training_extensions
|
c921f83ad52311af96ff45ae0b88d0aecddd855b
|
80454808b38727e358e8b880043eeac0f18152fb
|
refs/heads/develop
| 2023-08-31T06:29:07.229339
| 2023-08-31T01:57:26
| 2023-08-31T01:57:26
| 154,843,614
| 397
| 230
|
Apache-2.0
| 2023-09-14T06:17:01
| 2018-10-26T14:02:29
|
Python
|
UTF-8
|
Python
| false
| false
| 1,672
|
py
|
configuration_enums.py
|
"""Configuration Enums.
Enums needed to define the options of selectable parameters in the configurable
parameter classes.
"""
# Copyright (C) 2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions
# and limitations under the License.
from otx.api.configuration import ConfigurableEnum
class POTQuantizationPreset(ConfigurableEnum):
"""POT Quantization Preset Enum.
This Enum represents the quantization preset for post training optimization.
"""
PERFORMANCE = "Performance"
MIXED = "Mixed"
class EarlyStoppingMetrics(ConfigurableEnum):
"""Early Stopping Metric Enum.
This enum represents the different metrics that can be used for early
stopping.
"""
IMAGE_ROC_AUC = "image_AUROC"
IMAGE_F1 = "image_F1Score"
class ModelName(ConfigurableEnum):
"""Model Name Enum.
This enum represents the different model architectures for anomaly
classification.
"""
STFPM = "stfpm"
PADIM = "padim"
class ModelBackbone(ConfigurableEnum):
"""Model Backbone Enum.
This enum represents the common backbones that can be used with Padim and
STFPM.
"""
RESNET18 = "resnet18"
WIDE_RESNET_50 = "wide_resnet50_2"
|
f2ce200170ca60b5b92212ae777642db8605ffe7
|
a0736beb3269a71b2f5b13cafe8fb5e7f6f540f4
|
/src/lib/time.py
|
cbb80050fb914d4eeff5f13f97cd9850e120fd0a
|
[
"Apache-2.0",
"CC-BY-4.0"
] |
permissive
|
GoogleCloudPlatform/covid-19-open-data
|
d9e467fadbfc71686e30e28f3ce1d438e8fd92ba
|
1123ce02a0b4059d6acd4c4446f3f9b8335018f1
|
refs/heads/main
| 2023-08-02T23:57:12.785363
| 2022-10-23T22:26:29
| 2022-10-23T22:26:29
| 282,079,576
| 489
| 146
|
Apache-2.0
| 2022-09-05T12:00:37
| 2020-07-23T23:43:51
|
Python
|
UTF-8
|
Python
| false
| false
| 2,426
|
py
|
time.py
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
from typing import Iterable
from .cast import safe_datetime_parse
ISO_DATE_FORMAT = "%Y-%m-%d"
def datetime_isoformat(value: str, date_format: str) -> str:
date = safe_datetime_parse(value, date_format)
if date is not None:
return date.date().isoformat()
else:
return None
def date_offset(value: str, offset: int) -> str:
assert offset is not None, "Offset none: %r" % offset
date_value = datetime.date.fromisoformat(value)
date_value += datetime.timedelta(days=offset)
return date_value.isoformat()
def timezone_adjust(timestamp: str, offset: int) -> str:
""" Adjust hour difference between a timezone and given offset """
date_timestamp = datetime.datetime.fromisoformat(timestamp)
if date_timestamp.hour <= 24 - offset:
return date_timestamp.date().isoformat()
else:
return (date_timestamp + datetime.timedelta(days=1)).date().isoformat()
def date_range(start: str, end: str) -> Iterable[str]:
"""
Range of dates from `start` to `end`, both inclusive.
Arguments:
start: Start date in ISO format YYYY-MM-DD.
end: Start date in ISO format YYYY-MM-DD.
Returns:
Iterable[str]: Iterable of dates from `start` to `end`.
"""
start_date = datetime.datetime.strptime(start, ISO_DATE_FORMAT)
end_date = datetime.datetime.strptime(end, ISO_DATE_FORMAT)
assert start_date <= end_date, f"Start date must be less or equal than end date"
for idx in range((end_date - start_date).days + 1):
yield (start_date + datetime.timedelta(days=idx)).strftime(ISO_DATE_FORMAT)
def date_today(offset: int = 0) -> str:
""" Returns today's date for UTC timezone in ISO format. """
date_value = datetime.datetime.utcnow() + datetime.timedelta(days=offset)
return date_value.date().isoformat()
|
396730567910427a772170397c6b4b36f712591f
|
10ddfb2d43a8ec5d47ce35dc0b8acf4fd58dea94
|
/Python/cat-and-mouse-ii.py
|
dacecda01c09520890232c7bc683bc2dfb76c4f3
|
[
"MIT"
] |
permissive
|
kamyu104/LeetCode-Solutions
|
f54822059405ef4df737d2e9898b024f051fd525
|
4dc4e6642dc92f1983c13564cc0fd99917cab358
|
refs/heads/master
| 2023-09-02T13:48:26.830566
| 2023-08-28T10:11:12
| 2023-08-28T10:11:12
| 152,631,182
| 4,549
| 1,651
|
MIT
| 2023-05-31T06:10:33
| 2018-10-11T17:38:35
|
C++
|
UTF-8
|
Python
| false
| false
| 6,470
|
py
|
cat-and-mouse-ii.py
|
# Time: O((m * n)^2 * (m + n))
# Space: O((m * n)^2)
import collections
class Solution(object):
def canMouseWin(self, grid, catJump, mouseJump):
"""
:type grid: List[str]
:type catJump: int
:type mouseJump: int
:rtype: bool
"""
directions = [(0, 1), (1, 0), (0, -1), (-1, 0)]
DRAW, MOUSE, CAT = range(3)
def parents(m, c, t):
if t == CAT:
for nm in graph[m, MOUSE^CAT^t]:
yield nm, c, MOUSE^CAT^t
else:
for nc in graph[c, MOUSE^CAT^t]:
yield m, nc, MOUSE^CAT^t
R, C = len(grid), len(grid[0])
N = R*C
WALLS = set()
FOOD, MOUSE_START, CAT_START = [-1]*3
for r in xrange(R):
for c in xrange(C):
if grid[r][c] == 'M':
MOUSE_START = r*C + c
elif grid[r][c] == 'C':
CAT_START = r*C + c
elif grid[r][c] == 'F':
FOOD = r*C + c
elif grid[r][c] == '#':
WALLS.add(r*C + c)
graph = collections.defaultdict(set)
jump = {MOUSE:mouseJump, CAT:catJump}
for r in xrange(R):
for c in xrange(C):
if grid[r][c] == '#':
continue
pos = r*C + c
for t in [MOUSE, CAT]:
for dr, dc in directions:
for d in xrange(jump[t]+1):
nr, nc = r+dr*d, c+dc*d
if not (0 <= nr < R and 0 <= nc < C and grid[nr][nc] != '#'):
break
graph[pos, t].add(nr*C + nc)
degree = {}
for m in xrange(N):
for c in xrange(N):
degree[m, c, MOUSE] = len(graph[m, MOUSE])
degree[m, c, CAT] = len(graph[c, CAT])
color = collections.defaultdict(int)
q = collections.deque()
for i in xrange(N):
if i in WALLS or i == FOOD:
continue
color[FOOD, i, CAT] = MOUSE
q.append((FOOD, i, CAT, MOUSE))
color[i, FOOD, MOUSE] = CAT
q.append((i, FOOD, MOUSE, CAT))
for t in [MOUSE, CAT]:
color[i, i, t] = CAT
q.append((i, i, t, CAT))
while q:
i, j, t, c = q.popleft()
for ni, nj, nt in parents(i, j, t):
if color[ni, nj, nt] != DRAW:
continue
if nt == c:
color[ni, nj, nt] = c
q.append((ni, nj, nt, c))
continue
degree[ni, nj, nt] -= 1
if not degree[ni, nj, nt]:
color[ni, nj, nt] = c
q.append((ni, nj, nt, c))
return color[MOUSE_START, CAT_START, MOUSE] == MOUSE
# Time: O((m * n)^2 * (m + n))
# Space: O((m * n)^2)
import collections
class Solution2(object):
def canMouseWin(self, grid, catJump, mouseJump):
"""
:type grid: List[str]
:type catJump: int
:type mouseJump: int
:rtype: bool
"""
directions = [(0, 1), (1, 0), (0, -1), (-1, 0)]
DRAW, MOUSE, CAT = range(3)
def parents(m, c, t):
if t == CAT:
for nm in graph[m, MOUSE^CAT^t]:
yield nm, c, MOUSE^CAT^t
else:
for nc in graph[c, MOUSE^CAT^t]:
yield m, nc, MOUSE^CAT^t
R, C = len(grid), len(grid[0])
N = R*C
WALLS = set()
FOOD, MOUSE_START, CAT_START = [-1]*3
for r in xrange(R):
for c in xrange(C):
if grid[r][c] == 'M':
MOUSE_START = r*C + c
elif grid[r][c] == 'C':
CAT_START = r*C + c
elif grid[r][c] == 'F':
FOOD = r*C + c
elif grid[r][c] == '#':
WALLS.add(r*C + c)
graph = collections.defaultdict(set)
jump = {MOUSE:mouseJump, CAT:catJump}
for r in xrange(R):
for c in xrange(C):
if grid[r][c] == '#':
continue
pos = r*C + c
for t in [MOUSE, CAT]:
for dr, dc in directions:
for d in xrange(jump[t]+1):
nr, nc = r+dr*d, c+dc*d
if not (0 <= nr < R and 0 <= nc < C and grid[nr][nc] != '#'):
break
graph[pos, t].add(nr*C + nc)
degree = {}
for m in xrange(N):
for c in xrange(N):
# degree[m, c, MOUSE] = len(graph[m, MOUSE])
degree[m, c, CAT] = len(graph[c, CAT])
color = collections.defaultdict(int)
q1 = collections.deque()
# q2 = collections.deque()
for i in xrange(N):
if i in WALLS or i == FOOD:
continue
color[FOOD, i, CAT] = MOUSE
q1.append((FOOD, i, CAT))
color[i, FOOD, MOUSE] = CAT
# q2.append((i, FOOD, MOUSE))
for t in [MOUSE, CAT]:
color[i, i, t] = CAT
# q2.append((i, i, t))
while q1:
i, j, t = q1.popleft()
for ni, nj, nt in parents(i, j, t):
if color[ni, nj, nt] != DRAW:
continue
if t == CAT:
color[ni, nj, nt] = MOUSE
q1.append((ni, nj, nt))
continue
degree[ni, nj, nt] -= 1
if not degree[ni, nj, nt]:
color[ni, nj, nt] = MOUSE
q1.append((ni, nj, nt))
# while q2:
# i, j, t = q2.popleft()
# for ni, nj, nt in parents(i, j, t):
# if color[ni, nj, nt] != DRAW:
# continue
# if t == MOUSE:
# color[ni, nj, nt] = CAT
# q2.append((ni, nj, nt))
# continue
# degree[ni, nj, nt] -= 1
# if not degree[ni, nj, nt]:
# color[ni, nj, nt] = CAT
# q2.append((ni, nj, nt))
return color[MOUSE_START, CAT_START, MOUSE] == MOUSE
|
5f614093ee30df4f60522327c638a21d69b74faf
|
2a1b8a671aceda6bc446f8ce26400aa84fa444a6
|
/Packs/Opsgeniev2/Integrations/Opsgeniev2/Opsgeniev2_test.py
|
4e10faca2fef0565b393103ff313aac28fdbbc4c
|
[
"MIT"
] |
permissive
|
demisto/content
|
6d4722d46f0ff0beea2748e9f7de585bf91a78b4
|
890def5a0e0ae8d6eaa538148249ddbc851dbb6b
|
refs/heads/master
| 2023-09-04T00:02:25.618032
| 2023-09-03T21:56:22
| 2023-09-03T21:56:22
| 60,525,392
| 1,023
| 1,921
|
MIT
| 2023-09-14T20:55:24
| 2016-06-06T12:17:02
|
Python
|
UTF-8
|
Python
| false
| false
| 5,155
|
py
|
Opsgeniev2_test.py
|
import pytest
import os
from Opsgeniev2 import Client
import json
from unittest.mock import call
"""
Test script for the OpsGenieV2 Integration
Envvars:
API_TOKEN: If configured, runs integration tests.
GEN_TEST_DATA: If set, copies the raw output* of the API queries into test_data.
* In the case of Paged data, the raw_response only contains the data of the request response and not the
paging information. Compare list_alerts_paged.json to list_alerts.json to see the difference.
Integration steps use the real OpsGenie API to go through the lifecycle of an alert, schedule, and on-call.
You must get the API_TOKEN from within an opsgenie "team" integration;
teams->[team]->Integrations->Add integration->Rest API Over JSON
"""
PARAMS = {
"url": "https://api.opsgenie.com",
"token": os.getenv("API_TOKEN"),
}
ARGS = {
"message": "This is a test alert!"
}
@pytest.fixture
def testclient():
"""
Setup a test client, used as a fixture for Integration tests.
"""
base_url = PARAMS.get("url") + "/v2"
client = Client(
base_url=base_url,
headers={
"Authorization": f"GenieKey {PARAMS.get('token')}",
}
)
return client
def test_integration_tests(mocker, testclient):
"""
Creates, lists, and then delets an alert.
"""
if not PARAMS.get("token"):
# Pass if no token for acceptance tests
return
test_data = {}
test_data["list_schedules"] = list_schedule_tester(testclient)
test_data["get_schedules"] = get_schedule_tester(testclient, test_data["list_schedules"]["data"][0]["id"])
test_data["on_call"] = get_on_call_tester(testclient, test_data["list_schedules"]["data"][0]["id"])
# Create alert
alert_raw_response = create_alerts_tester(testclient)
test_data["create_alert"] = alert_raw_response
alert_id = alert_raw_response.get("alertId")
# List alerts
test_data["list_alerts"] = list_alerts_tester(testclient)
# Get the alert we just created
test_data["get_alert"] = get_alert_tester(testclient, alert_id)
# Ack the same alert
test_data["ack_alert"] = ack_alert_tester(testclient, alert_id)
# Close the same alert
test_data["close_alert"] = close_alert_tester(testclient, alert_id)
# Delete the alert we just created
test_data["delete_alert"] = delete_alert_tester(testclient, alert_id)
if os.getenv("GEN_TEST_DATA"):
# If set, test JSON added to test_data
for k, v in test_data.items():
with open(f"test_data/{k}.json", "w") as fh:
json.dump(v, fh, indent=4, sort_keys=True)
def create_alerts_tester(testclient):
from Opsgeniev2 import create_alert
r = create_alert(testclient, ARGS)
assert r.raw_response.get("alertId")
return r.raw_response
def list_alerts_tester(testclient):
from Opsgeniev2 import list_alerts
r = list_alerts(testclient, 40, "createdBy")
assert len(r.outputs) > 0
return r.raw_response
def delete_alert_tester(testclient, alert_id):
from Opsgeniev2 import delete_alert
r = delete_alert(testclient, alert_id)
assert r.outputs
return r.raw_response
def get_alert_tester(testclient, alert_id):
from Opsgeniev2 import get_alert
r = get_alert(testclient, alert_id)
assert r.outputs
return r.raw_response
def ack_alert_tester(testclient, alert_id):
from Opsgeniev2 import ack_alert
r = ack_alert(testclient, {"alert-id": alert_id})
assert r.outputs
return r.raw_response
def close_alert_tester(testclient, alert_id):
from Opsgeniev2 import close_alert
r = close_alert(testclient, {"alert-id": alert_id})
assert r.outputs
return r.raw_response
def list_schedule_tester(testclient):
from Opsgeniev2 import list_schedules
r = list_schedules(testclient, 20, "createdAt")
assert r.outputs
return r.raw_response
def get_schedule_tester(testclient, schedule_id):
from Opsgeniev2 import get_schedule
r = get_schedule(testclient, schedule_id)
assert r.outputs
return r.raw_response
def get_on_call_tester(testclient, schedule_id):
from Opsgeniev2 import get_on_calls
r = get_on_calls(testclient, schedule_id)
assert r.outputs
return r.raw_response
def test_paging(mocker, testclient):
"""
Test the paging functionality works as expected
"""
# Patch to return list_alerts json data
with open("./test_data/list_alerts_paged.json") as list_alerts_paged:
list_alerts_response = json.load(list_alerts_paged)
with open("./test_data/list_alerts_empty.json") as list_alerts_empty:
mocker.patch.object(Client, "_http_request", side_effect=[
list_alerts_response,
json.load(list_alerts_empty),
])
data = testclient.get_paged(40, url_suffix="/not_real", method="GET")
assert len(data) == 29
calls = [
call(url_suffix="/not_real", method="GET"),
call(full_url=list_alerts_response.get("paging").get("next"),
url_suffix="/not_real",
method="GET",
)
]
Client._http_request.assert_has_calls(calls)
|
c38234b16ea89b0359176a2bb4dc1bcff293be32
|
542ea44d056fa800a67f80d403c3e92a7f73747d
|
/drop/config/Genome.py
|
46da99a3ab6a868e902eaca787fb9f5d35f83357
|
[
"MIT"
] |
permissive
|
gagneurlab/drop
|
01dfff87721253559f2c6d4dceb57c194f6b859f
|
594d7daaff872604d65ae1537a0fe59f463de6b3
|
refs/heads/master
| 2023-07-27T18:28:02.999705
| 2023-04-14T09:50:33
| 2023-04-14T09:50:33
| 213,693,892
| 102
| 44
|
MIT
| 2023-07-07T09:41:58
| 2019-10-08T16:21:43
|
Python
|
UTF-8
|
Python
| false
| false
| 2,418
|
py
|
Genome.py
|
from pathlib import Path
from snakemake.logging import logger
class Genome:
def __init__(self, annotation, assembly, reference):
self.annotation = annotation
self.assembly = assembly
# Allow for old drop config stylings, where the file was a string under MAE
# -> force into dictionary
self.reference = {reference: reference} if isinstance(reference, str) else reference
def getGeneAnnotations(self):
return self.annotation
def getGeneVersions(self):
return self.annotation.keys()
def getGeneAnnotationFile(self, annotation):
"""
:param annotation: config-defined annotation key
:return: GTF file from config key 'geneAnnotations'
"""
return self.annotation[annotation]
def getFastaFiles(self):
"""
:return: dictionary of genome name -> genome file
"""
if isinstance(self.reference, str):
return {self.reference: self.reference}
else:
return self.reference
def getFastaList(self):
return list(self.getFastaFiles().values())
def getFastaDict(self, fasta_file):
return Path(fasta_file).with_suffix(".dict")
def getDictList(self):
return [self.getFastaDict(i) for i in self.getFastaList()]
def getBSGenomeName(self):
assemblyID = self.assembly
if assemblyID == 'hg19':
return "BSgenome.Hsapiens.UCSC.hg19"
if assemblyID == 'hs37d5':
return "BSgenome.Hsapiens.1000genomes.hs37d5"
if assemblyID == 'hg38':
return "BSgenome.Hsapiens.UCSC.hg38"
if assemblyID == 'GRCh38':
return "BSgenome.Hsapiens.NCBI.GRCh38"
raise ValueError("Provided genome assembly not known: " + assemblyID)
def getBSGenomeVersion(self):
assemblyID = self.assembly
if assemblyID in ['hg19', 'hs37d5']:
return 37
if assemblyID in ['hg38', 'GRCh38']:
return 38
raise ValueError("Provided genome assembly not known: " + assemblyID)
def getMafDbName(self):
assemblyID = self.assembly
if assemblyID in ['hg19', 'hs37d5']:
return "MafDb.gnomAD.r2.1.hs37d5"
if assemblyID in ['hg38', 'GRCh38']:
return "MafDb.gnomAD.r2.1.GRCh38"
raise ValueError("Provided genome assembly not known: " + assemblyID)
|
84f35093cc809b81caf225d99cfd05fb497646b0
|
ea57d267ab31480d8d731b2c095e9da9ad989133
|
/tests/test_packages_for_aea_tests/test_connections/test_p2p_libp2p_mailbox/test_aea_cli.py
|
2018a47e338a7e9d8e0e3e0e14378edf2a5dd69b
|
[
"Apache-2.0"
] |
permissive
|
fetchai/agents-aea
|
6d034f1db6f3beacf31dac2f5a1baaa60c8edb7d
|
bec49adaeba661d8d0f03ac9935dc89f39d95a0d
|
refs/heads/main
| 2023-08-08T23:19:06.276643
| 2023-02-04T10:46:39
| 2023-02-04T10:46:39
| 203,558,879
| 192
| 58
|
Apache-2.0
| 2023-07-19T04:45:26
| 2019-08-21T10:12:47
|
Python
|
UTF-8
|
Python
| false
| false
| 4,599
|
py
|
test_aea_cli.py
|
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2018-2023 Fetch.AI Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""This test module contains AEA cli tests for Libp2p tcp client connection."""
import os
from aea_ledger_fetchai import FetchAICrypto
from aea.helpers.base import CertRequest
from aea.multiplexer import Multiplexer
from aea.test_tools.test_cases import AEATestCaseEmpty
from packages.fetchai.connections.p2p_libp2p_mailbox.connection import PUBLIC_ID
from tests.conftest import (
_make_libp2p_connection,
libp2p_log_on_failure,
libp2p_log_on_failure_all,
)
DEFAULT_PORT = 10234
DEFAULT_DELEGATE_PORT = 11234
DEFAULT_HOST = "127.0.0.1"
DEFAULT_MAILBOX_PORT = 8888
DEFAULT_CLIENTS_PER_NODE = 4
DEFAULT_LAUNCH_TIMEOUT = 10
@libp2p_log_on_failure_all
class TestP2PLibp2pClientConnectionAEARunning(AEATestCaseEmpty):
"""Test AEA with p2p_libp2p_client connection is correctly run"""
@classmethod
@libp2p_log_on_failure
def setup_class(cls):
"""Set up the test class."""
super(TestP2PLibp2pClientConnectionAEARunning, cls).setup_class()
temp_dir = os.path.join(cls.t, "temp_dir_node")
os.mkdir(temp_dir)
cls.node_connection = _make_libp2p_connection(
data_dir=temp_dir,
delegate_host=DEFAULT_HOST,
delegate_port=DEFAULT_DELEGATE_PORT,
mailbox_port=DEFAULT_MAILBOX_PORT,
delegate=True,
mailbox=True,
)
cls.node_multiplexer = Multiplexer([cls.node_connection])
cls.log_files = [cls.node_connection.node.log_file]
cls.node_multiplexer.connect()
def test_node(self):
"""Test the node is connected."""
assert self.node_connection.is_connected is True
def test_connection(self):
"""Test the connection can be used in an aea."""
self.generate_private_key()
self.add_private_key()
self.add_item("connection", str(PUBLIC_ID))
conn_path = "vendor.fetchai.connections.p2p_libp2p_mailbox"
self.nested_set_config(
conn_path + ".config",
{
"nodes": [
{
"uri": "{}:{}".format(DEFAULT_HOST, DEFAULT_MAILBOX_PORT),
"public_key": self.node_connection.node.pub,
}
]
},
)
# generate certificates for connection
self.nested_set_config(
conn_path + ".cert_requests",
[
CertRequest(
identifier="acn",
ledger_id=FetchAICrypto.identifier,
not_after="2022-01-01",
not_before="2021-01-01",
public_key=self.node_connection.node.pub,
message_format="{public_key}",
save_path="./cli_test_cert.txt",
)
],
)
self.run_cli_command("issue-certificates", cwd=self._get_cwd())
process = self.run_agent()
is_running = self.is_running(process, timeout=DEFAULT_LAUNCH_TIMEOUT)
assert is_running, "AEA not running within timeout!"
check_strings = "Successfully connected to libp2p node {}:{}".format(
DEFAULT_HOST, DEFAULT_MAILBOX_PORT
)
missing_strings = self.missing_from_output(process, check_strings)
assert (
missing_strings == []
), "Strings {} didn't appear in agent output.".format(missing_strings)
self.terminate_agents(process)
assert self.is_successfully_terminated(
process
), "AEA wasn't successfully terminated."
@classmethod
def teardown_class(cls):
"""Tear down the test"""
cls.terminate_agents()
super(TestP2PLibp2pClientConnectionAEARunning, cls).teardown_class()
cls.node_multiplexer.disconnect()
|
d7e7188f412a84ae7c178dbae238cd0ece0c061f
|
1fd8e5db25f8ebc7cc4506cbb07ba98f717b667e
|
/info.py
|
d89941613cab3599554286289f0b4476a0364cda
|
[] |
no_license
|
flatplanet/Intro-To-TKinter-Youtube-Course
|
6103410435fc3b977fb44a4b08d045950ba10380
|
cf988099fc358e52ed773273cb2e7ddb9d37d995
|
refs/heads/master
| 2022-10-06T10:02:38.689302
| 2022-07-18T18:11:12
| 2022-07-18T18:11:12
| 174,183,345
| 524
| 426
| null | 2021-10-10T16:16:44
| 2019-03-06T16:44:03
|
Python
|
UTF-8
|
Python
| false
| false
| 677
|
py
|
info.py
|
from tkinter import *
root = Tk()
root.title('Codemy.com')
root.iconbitmap('c:/gui/codemy.ico')
root.geometry("800x800+-1900+100")
def info():
dimension_label = Label(root, text=root.winfo_geometry())
dimension_label.pack(pady=20)
height_label = Label(root, text=f"Height: {root.winfo_height()}")
height_label.pack(pady=20)
width_label = Label(root, text="Width: " + str(root.winfo_width()))
width_label.pack()
x_label = Label(root, text="X: " + str(root.winfo_x()))
x_label.pack(pady=20)
y_label = Label(root, text="Y: " + str(root.winfo_y()))
y_label.pack()
my_button = Button(root, text="Click Me", command=info)
my_button.pack(pady=20)
root.mainloop()
|
164bc39995fc032148e1ac96d967a2aedb890781
|
77f85a550c28212071067cb122ebfd93eb705190
|
/tests/tools/test_spectral.py
|
3a332c2a9eaf18056fdba1a1bddb8552a10aa28e
|
[
"MIT"
] |
permissive
|
zwicker-group/py-pde
|
baf215a733508fe86093ea9e818228bbb3b34c58
|
d9c931a8361eaf27bc3766daba26edc11756b5f5
|
refs/heads/master
| 2023-08-31T06:36:34.514617
| 2023-08-30T18:15:44
| 2023-08-30T18:15:44
| 242,093,001
| 327
| 45
|
MIT
| 2023-08-31T13:16:24
| 2020-02-21T08:42:23
|
Python
|
UTF-8
|
Python
| false
| false
| 2,673
|
py
|
test_spectral.py
|
"""
.. codeauthor:: David Zwicker <david.zwicker@ds.mpg.de>
"""
import numpy as np
import pytest
from scipy import fftpack, stats
from pde import CartesianGrid, UnitGrid
from pde.tools.spectral import make_colored_noise
def spectral_density(data, dx=1.0):
"""calculate the power spectral density of a field
Args:
data (:class:`~numpy.ndarray`):
Data of which the power spectral density will be calculated
dx (float or list): The discretizations of the grid either as a single
number or as an array with a value for each dimension
Returns:
A tuple with two arrays containing the magnitudes of the wave vectors
and the associated density, respectively.
"""
dim = len(data.shape)
dx = np.broadcast_to(dx, (dim,))
# prepare wave vectors
k2s = 0
for i in range(dim):
k = fftpack.fftfreq(data.shape[i], dx[i])
k2s = np.add.outer(k2s, k**2)
res = fftpack.fftn(data)
return np.sqrt(k2s), np.abs(res) ** 2
def test_colored_noise():
"""test the implementation of the colored noise"""
grid = UnitGrid([64, 64], periodic=True)
for exponent in [0, -1, 2]:
scale = np.random.uniform(1, 10)
noise = make_colored_noise(grid.shape, grid.discretization, exponent, scale)
x = noise()
assert (
stats.normaltest(x.flat).pvalue > 2e-5
), f"Colored noise with exp={exponent} is not normal distributed"
def test_noise_scaling():
"""compare the noise strength (in terms of the spectral density of
two different noise sources that should be equivalent)"""
# create a grid
x, w = 2 + 10 * np.random.random(2)
size = np.random.randint(128, 256)
grid = CartesianGrid([[x, x + w]], size, periodic=True)
# colored noise
noise_colored = make_colored_noise(grid.shape, grid.discretization, exponent=2)
# divergence of white noise
shape = (grid.dim,) + grid.shape
div = grid.make_operator("divergence", bc="auto_periodic_neumann")
def noise_div():
return div(np.random.randn(*shape))
# calculate spectral densities of the two noises
result = []
for noise_func in [noise_colored, noise_div]:
def get_noise():
k, density = spectral_density(data=noise_func(), dx=grid.discretization)
assert k[0] == 0
assert density[0] == pytest.approx(0)
return np.log(density[1]) # log of spectral density
# average spectral density of longest length scale
mean = np.mean([get_noise() for _ in range(64)])
result.append(mean)
np.testing.assert_allclose(*result, rtol=0.5)
|
88e4e6a3be58e7f0fd0aec04f47f267997cba81d
|
8ca98d2c7226f3fd7801665837a06758cbbf7903
|
/verde/io.py
|
ad2dcf9f2a396feac25de920480725bfd9bd76ce
|
[
"BSD-3-Clause"
] |
permissive
|
fatiando/verde
|
d30f70acb1aaf38ef0b1efaa8e651a9d8f874d60
|
ec5509464bf1b4cb72b53a0e8e2c5d5ab4e87a1e
|
refs/heads/main
| 2023-08-29T00:06:04.990472
| 2023-05-08T13:41:22
| 2023-05-08T13:41:22
| 131,073,898
| 532
| 79
|
BSD-3-Clause
| 2023-05-08T13:41:24
| 2018-04-25T23:21:08
|
Python
|
UTF-8
|
Python
| false
| false
| 4,476
|
py
|
io.py
|
# Copyright (c) 2017 The Verde Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
#
# This code is part of the Fatiando a Terra project (https://www.fatiando.org)
#
"""
Functions for input and output of grids in less common formats.
"""
import numpy as np
import xarray as xr
def load_surfer(fname, dtype="float64"):
"""
Read data from a Surfer ASCII grid file.
This function reads a Surfer grid file, masks any NaNs in the data,
and outputs a :class:`xarray.DataArray` that contains easting and northing
coordinates, data values, and associated file metadata.
Surfer is a contouring, griding and surface mapping software
from GoldenSoftware. The names and logos for Surfer and Golden
Software are registered trademarks of Golden Software, Inc.
http://www.goldensoftware.com/products/surfer
Parameters
----------
fname : str or file-like object
Name or path of the Surfer grid file or an open file (or file-like)
object.
dtype : str or numpy.dtype
The type of variable used for the data. Default is ``float64``. Use
``float32`` if the data are large and precision is not an issue.
Returns
----------
data : :class:`xarray.DataArray`
A 2D grid with the data.
"""
# Surfer ASCII grid structure
# DSAA Surfer ASCII GRD ID
# nCols nRows number of columns and rows
# xMin xMax X min max
# yMin yMax Y min max
# zMin zMax Z min max
# z11 z21 z31 ... List of Z values
# Only open a file if given a path instead of a file-like object
ispath = not hasattr(fname, "readline")
if ispath:
input_file = open(fname, "r") # noqa: SIM115
else:
input_file = fname
try:
grid_id, shape, region, data_range = _read_surfer_header(input_file)
field = np.loadtxt(input_file, dtype=dtype)
nans = field >= 1.70141e38
if np.any(nans):
field = np.ma.masked_where(nans, field)
_check_surfer_integrity(field, shape, data_range)
attrs = {"gridID": grid_id}
if ispath:
attrs["file"] = fname
dims = ("northing", "easting")
coords = {
"northing": np.linspace(*region[2:], shape[0]),
"easting": np.linspace(*region[:2], shape[1]),
}
data = xr.DataArray(field, coords=coords, dims=dims, attrs=attrs)
finally:
if ispath:
input_file.close()
return data
def _read_surfer_header(input_file):
"""
Parse the header record of the grid file.
The header contains information on the grid shape, region, and the minimum
and maximum data values.
Parameters
----------
input_file : file-like object
An open file to read from.
Returns
-------
grid_id : str
The ID of the Surfer ASCII grid.
shape : tuple = (n_northing, n_easting)
The number of grid points in the northing and easting dimension,
respectively.
region : tuple = (west, east, south, north)
The grid region.
data_range : list = [min, max]
The minimum and maximum data values.
"""
# DSAA is a Surfer ASCII GRD ID
grid_id = input_file.readline().strip()
# Read the grid shape (n_northing, n_easting)
shape = tuple(int(i.strip()) for i in input_file.readline().split())
# Our x points North, so the first thing we read north-south.
south, north = [float(i.strip()) for i in input_file.readline().split()]
west, east = [float(i.strip()) for i in input_file.readline().split()]
region = (west, east, south, north)
# The min and max of the data values (used for validation)
data_range = [float(i.strip()) for i in input_file.readline().split()]
return grid_id, shape, region, data_range
def _check_surfer_integrity(field, shape, data_range):
"""
Check that the grid matches the information from the header.
"""
if field.shape != shape:
raise IOError(
"Grid shape {} doesn't match shape read from header {}.".format(
field.shape, shape
)
)
field_range = [field.min(), field.max()]
if not np.allclose(field_range, data_range):
raise IOError(
"Grid data range {} doesn't match range read from header {}.".format(
field_range, data_range
)
)
|
76d8fad0b7962c6c83c8537d28ec2dd0cfe3108d
|
3f0948e07aef06f734fa6db3945b192f71ab435f
|
/tests/test_typing_utils.py
|
8f87946885d32871b0012eda45cf9d36c7341e41
|
[
"BSD-3-Clause"
] |
permissive
|
nucleic/atom
|
c15c932c3a1386469685580867105df7be5c0ba3
|
761a52821d8c77b5718216256963682d11599c1e
|
refs/heads/main
| 2023-08-25T07:55:24.697711
| 2023-05-05T07:59:30
| 2023-05-05T07:59:30
| 8,594,952
| 251
| 49
|
NOASSERTION
| 2023-09-11T17:00:00
| 2013-03-06T03:20:53
|
Python
|
UTF-8
|
Python
| false
| false
| 2,163
|
py
|
test_typing_utils.py
|
# --------------------------------------------------------------------------------------
# Copyright (c) 2021, Nucleic Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# --------------------------------------------------------------------------------------
"""Test typing utilities."""
import sys
from collections.abc import Iterable
from typing import Dict, List, Optional, Set, TypeVar, Union
import pytest
from atom.typing_utils import extract_types, is_optional
T = TypeVar("T")
U = TypeVar("U", bound=int)
V = TypeVar("V", int, float)
W = TypeVar("W", contravariant=True)
@pytest.mark.parametrize(
"ty, outputs",
[
(List[int], (list,)),
(Dict[str, int], (dict,)),
(Set[int], (set,)),
(Optional[int], (int, type(None))),
(Union[int, str], (int, str)),
(Union[int, Optional[str]], (int, str, type(None))),
(Union[int, Union[str, bytes]], (int, str, bytes)),
]
+ (
[
(list[int], (list,)),
(dict[str, int], (dict,)),
(set[int], (set,)),
(Iterable[int], (Iterable,)),
]
if sys.version_info >= (3, 9)
else []
)
+ ([(int | str, (int, str))] if sys.version_info >= (3, 10) else []),
)
def test_extract_types(ty, outputs):
assert extract_types(ty) == outputs
def test_extract_types_for_type_vars():
assert extract_types(T) == (object,)
assert extract_types(U) == (int,)
with pytest.raises(ValueError) as e:
extract_types(V)
assert "Constraints" in e.exconly()
with pytest.raises(ValueError) as e:
extract_types(W)
assert "contravariant" in e.exconly()
@pytest.mark.parametrize(
"ty, outputs",
[
(Optional[int], (True, (int,))),
(Union[int, str], (False, (int, str))),
(Union[int, Optional[str]], (True, (int, str))),
],
)
def test_is_optional(ty, outputs):
assert is_optional(extract_types(ty)) == outputs
def test_reject_str_annotations():
with pytest.raises(TypeError):
extract_types("int")
|
ab750627424dbe897c7432c09c6d8d1002e0c768
|
a8b0599af76b5393039431f876be00d628a1fe43
|
/examples/breast_cancer.py
|
78f65e299f3442208611b4865882d3f872644ba9
|
[
"Apache-2.0"
] |
permissive
|
comet-ml/kangas
|
c951f648d890dca5a66cbab405d3437be2f3e9e3
|
df0c1a495032cc4f1c367c74fcb0ef6e5a2063be
|
refs/heads/main
| 2023-06-12T23:38:43.068259
| 2023-06-05T18:38:34
| 2023-06-05T19:28:33
| 550,324,241
| 944
| 41
|
Apache-2.0
| 2023-06-05T19:28:35
| 2022-10-12T15:10:04
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 3,319
|
py
|
breast_cancer.py
|
# -*- coding: utf-8 -*-
######################################################
# _____ _____ _ _ #
# (____ \ _ | ___) (_) | | #
# _ \ \ ____| |_ ____| | ___ ___ _ _ | | #
# | | | )/ _ | _)/ _ | |(_ / __) |/ || | #
# | |__/ ( ( | | | ( ( | | |__| | | | ( (_| | #
# |_____/ \_||_|___)\_||_|_____/|_| |_|\____| #
# #
# Copyright (c) 2023 Kangas Development Team #
# All rights reserved #
######################################################
"""
Creates 4 projection maps from sklearn's breast_cancer
dataset.
What is the effect of scaling on projections?
"""
from sklearn.datasets import load_breast_cancer
import kangas as kg
# Load the data:
ds = load_breast_cancer(as_frame=True)
df = ds.data
df["target"] = ds.target
cols = df.columns.tolist()
df = df[cols[-1:] + cols[:-1]]
# Get scaling values:
scales = {
name: {
"min": df[name].min(),
"max": df[name].max(),
"mean": df[name].mean(),
"std": df[name].std(),
}
for name in df
}
def normalize(value, name):
return (value - scales[name]["min"]) / (scales[name]["max"] - scales[name]["min"])
def standardize(value, name):
return (value - scales[name]["mean"]) / scales[name]["std"]
# Create the datagrid:
pca = []
t_sne = []
# normalized_pca = []
# normalized_t_sne = []
# standardized_pca = []
# standardized_t_sne = []
names = []
col_names = list(df.columns)
for index, column in df.iterrows():
# Everything, but target:
# Scale the data
normalized_data = [
normalize(row, col_names[i + 1]) for i, row in enumerate(column[1:])
]
standardized_data = [
standardize(row, col_names[i + 1]) for i, row in enumerate(column[1:])
]
data = [row for i, row in enumerate(column[1:])]
# The target is the name:
name = str(int(column[0]))
names.append(name)
# PCA:
pca.append(kg.Embedding(data, name=name, text=str(index + 1), projection="pca"))
# t-SNE:
t_sne.append(kg.Embedding(data, name=name, text=str(index + 1), projection="t-sne"))
"""
# PCA:
normalized_pca.append(
kg.Embedding(normalized_data, name=name, text=str(index + 1), projection="pca")
)
standardized_pca.append(
kg.Embedding(
standardized_data, name=name, text=str(index + 1), projection="pca"
)
)
# t-SNE:
t_sne.append(kg.Embedding(data, name=name, text=str(index + 1), projection="t-sne"))
normalized_t_sne.append(
kg.Embedding(
normalized_data, name=name, text=str(index + 1), projection="t-sne"
)
)
standardized_t_sne.append(
kg.Embedding(
standardized_data, name=name, text=str(index + 1), projection="t-sne"
)
)
"""
dg = kg.read_dataframe(df, name="breast-cancer")
dg.append_columns(
{
"Label": names,
"PCA": pca,
# "Normalized PCA": normalized_pca,
# "Standardized PCA": standardized_pca,
"t-SNE": t_sne,
# "Normalized t-SNE": normalized_t_sne,
# "Standardized t-SNE": standardized_t_sne,
"All": [1] * len(pca),
}
)
dg.save()
dg.set_about_from_script(__file__)
|
586a56903d4a0c57d90a3fa0a0b7d5444d8907a1
|
549270020f6c8724e2ef1b12e38d11b025579f8d
|
/recipes/pgm-index/all/conanfile.py
|
0169b4f5c8cc9594e068e882c672d11a1a6d406f
|
[
"MIT"
] |
permissive
|
conan-io/conan-center-index
|
1bcec065ccd65aa38b1fed93fbd94d9d5fe6bc43
|
3b17e69bb4e5601a850b6e006e44775e690bac33
|
refs/heads/master
| 2023-08-31T11:34:45.403978
| 2023-08-31T11:13:23
| 2023-08-31T11:13:23
| 204,671,232
| 844
| 1,820
|
MIT
| 2023-09-14T21:22:42
| 2019-08-27T09:43:58
|
Python
|
UTF-8
|
Python
| false
| false
| 2,377
|
py
|
conanfile.py
|
from conan import ConanFile
from conan.errors import ConanInvalidConfiguration
from conan.tools.build import check_min_cppstd
from conan.tools.files import copy, get
from conan.tools.layout import basic_layout
from conan.tools.microsoft import is_msvc
from conan.tools.scm import Version
import os
required_conan_version = ">=1.50.0"
class PgmIndexConan(ConanFile):
name = "pgm-index"
description = (
"State-of-the-art learned data structure that enables fast lookup, "
"predecessor, range searches and updates in arrays"
)
license = "Apache-2.0"
topics = ("data-structure", "spatial-index", "b-tree", "compression", "database", "machine-learning")
homepage = "https://pgm.di.unipi.it"
url = "https://github.com/conan-io/conan-center-index"
settings = "os", "arch", "compiler", "build_type"
no_copy_source = True
@property
def _min_cppstd(self):
return "17"
@property
def _compilers_minimum_version(self):
return {
"gcc": "7",
"clang": "5",
"apple-clang": "10",
}
def layout(self):
basic_layout(self, src_folder="src")
def package_id(self):
self.info.clear()
def validate(self):
if is_msvc(self):
# see https://github.com/gvinciguerra/PGM-index/issues/19
raise ConanInvalidConfiguration(f"{self.ref} doesn't support Visual Studio")
if self.settings.compiler.get_safe("cppstd"):
check_min_cppstd(self, self._min_cppstd)
minimum_version = self._compilers_minimum_version.get(str(self.settings.compiler), False)
if minimum_version and Version(self.settings.compiler.version) < minimum_version:
raise ConanInvalidConfiguration(
f"{self.ref} requires C++{self._min_cppstd}, which your compiler does not support.",
)
def source(self):
get(self, **self.conan_data["sources"][self.version], strip_root=True)
def build(self):
pass
def package(self):
copy(self, "LICENSE", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
copy(self, "*.hpp", src=os.path.join(self.source_folder, "include"), dst=os.path.join(self.package_folder, "include"))
def package_info(self):
self.cpp_info.bindirs = []
self.cpp_info.libdirs = []
|
43b75b61353d8d5d4c1b16a99ce9b7abd87729f1
|
11addbd8e89d5ea614c7668b4fef736bb41d4dac
|
/core/jms-implementation/support-mini-x86-32/bin/panther/Bio/_utils.py
|
e1320cd687350ef08ea1c11859fb9f714c265170
|
[
"BSD-3-Clause",
"LicenseRef-scancode-biopython",
"Apache-2.0"
] |
permissive
|
ebi-pf-team/interproscan
|
e5a99140b0d66e7b5b722093be6cb0249d498294
|
1fe33c359c01d452305233272f163d28463d3f98
|
refs/heads/master
| 2023-08-29T12:58:05.713771
| 2023-06-22T08:07:43
| 2023-06-22T08:07:43
| 39,084,866
| 265
| 78
|
Apache-2.0
| 2023-06-29T15:46:42
| 2015-07-14T15:54:17
|
Java
|
UTF-8
|
Python
| false
| false
| 4,246
|
py
|
_utils.py
|
# Copyright 2010 by Eric Talevich. All rights reserved.
# Copyright 2012 by Wibowo Arindrarto. All rights reserved.
#
# This file is part of the Biopython distribution and governed by your
# choice of the "Biopython License Agreement" or the "BSD 3-Clause License".
# Please see the LICENSE file that should have been included as part of this
# package.
"""Common utility functions for various Bio submodules."""
from __future__ import print_function
import os
def iterlen(items):
"""Count the number of items in an iterable.
If the argument supports len(items), and some iterators do, then
this returns len(items). Otherwise it will scan over the entries
in order to count them.
Exhausts a generator, but doesn't require creating a full list.
>>> iterlen("abcde")
5
>>> iterlen(iter("abcde"))
5
"""
try:
# e.g. Under Python 2, the xrange iterator defines __len__
return len(items)
except TypeError:
for i, x in enumerate(items):
count = i
return count + 1
def read_forward(handle):
"""Read through whitespaces, return the first non-whitespace line."""
while True:
line = handle.readline()
# if line is empty or line has characters and stripping does not remove
# them, return the line
if (not line) or (line and line.strip()):
return line
def _read_header(handle, length):
"""Read the specified number of characters from the given handle.
Raise a ValueError("Empty file.") if the length of data read is zero. The
reason for having a separate function for the header is it enables raising
an empty file error if the length of the data read is zero. This might
not always be the case later in the file.
"""
data = handle.read(length)
if not data:
raise ValueError("Empty file.")
if len(data) < length:
raise ValueError("Improper header, cannot read %d bytes from handle" % length)
return data
def trim_str(string, max_len, concat_char):
"""Truncate the given string for display."""
if len(string) > max_len:
return string[:max_len - len(concat_char)] + concat_char
return string
def getattr_str(obj, attr, fmt=None, fallback="?"):
"""Return string of the given object's attribute.
Defaults to the given fallback value if attribute is not present.
"""
if hasattr(obj, attr):
if fmt is not None:
return fmt % getattr(obj, attr)
return str(getattr(obj, attr))
return fallback
def find_test_dir(start_dir=None):
"""Find the absolute path of Biopython's Tests directory.
Arguments:
start_dir -- Initial directory to begin lookup (default to current dir)
If the directory is not found up the filesystem's root directory, an
exception will be raised.
"""
if not start_dir:
# no callbacks in function signatures!
# defaults to the current directory
# (using __file__ would give the installed Biopython)
start_dir = "."
target = os.path.abspath(start_dir)
while True:
if os.path.isdir(os.path.join(target, "Bio")) and \
os.path.isdir(os.path.join(target, "Tests")):
# Good, we're in the Biopython root now
return os.path.abspath(os.path.join(target, "Tests"))
# Recurse up the tree
# TODO - Test this on Windows
new, tmp = os.path.split(target)
if target == new:
# Reached root
break
target = new
raise ValueError("Not within Biopython source tree: %r" %
os.path.abspath(start_dir))
def run_doctest(target_dir=None, *args, **kwargs):
"""Run doctest for the importing module."""
import doctest
# default doctest options
default_kwargs = {
"optionflags": doctest.ELLIPSIS,
}
kwargs.update(default_kwargs)
cur_dir = os.path.abspath(os.curdir)
print("Running doctests...")
try:
os.chdir(find_test_dir(target_dir))
doctest.testmod(*args, **kwargs)
finally:
# and revert back to initial directory
os.chdir(cur_dir)
print("Done")
if __name__ == "__main__":
run_doctest()
|
b943397db75502ae2757a08fbf1757a6de336411
|
1c82ebd0cf02a80b97c3d2200505ed470360619c
|
/dask_image/ndfilters/_smooth.py
|
ac9bd6f45fd0d98e783af8a4b8e9f69dec20c01f
|
[
"BSD-3-Clause"
] |
permissive
|
dask/dask-image
|
4f248391852a1474c937ee5332d72adea227fc5b
|
86b56a7726ced371a0a52faaf24e66fb68ca95f0
|
refs/heads/main
| 2023-08-08T12:13:09.645375
| 2023-08-08T00:21:46
| 2023-08-08T00:21:46
| 123,603,345
| 178
| 44
|
BSD-3-Clause
| 2023-09-11T02:38:25
| 2018-03-02T16:30:58
|
Python
|
UTF-8
|
Python
| false
| false
| 895
|
py
|
_smooth.py
|
# -*- coding: utf-8 -*-
import scipy.ndimage
from ..dispatch._dispatch_ndfilters import dispatch_uniform_filter
from . import _utils
from ._gaussian import gaussian_filter
__all__ = [
"uniform_filter",
]
gaussian_filter = gaussian_filter
@_utils._update_wrapper(scipy.ndimage.uniform_filter)
def uniform_filter(image,
size=3,
mode='reflect',
cval=0.0,
origin=0):
size = _utils._get_size(image.ndim, size)
depth = _utils._get_depth(size, origin)
depth, boundary = _utils._get_depth_boundary(image.ndim, depth, "none")
result = image.map_overlap(
dispatch_uniform_filter(image),
depth=depth,
boundary=boundary,
dtype=image.dtype,
meta=image._meta,
size=size,
mode=mode,
cval=cval,
origin=origin
)
return result
|
ec3c40e2650b4cbf29f4f6e6a3a7340acfe0575c
|
4467a183750d42e6633e6447b5c93f6cdf95a70a
|
/scripts/replay.py
|
fa60467f45819f18748b90e30510ba0ea04357f8
|
[
"Apache-2.0"
] |
permissive
|
erdos-project/pylot
|
e741a4377e3e102c83d6bbe94ab6745648de3b90
|
a71ae927328388dc44acc784662bf32a99f273f0
|
refs/heads/master
| 2023-04-08T00:13:49.240926
| 2023-01-27T03:24:16
| 2023-01-27T03:24:16
| 190,252,594
| 389
| 117
|
Apache-2.0
| 2023-03-24T22:58:22
| 2019-06-04T17:54:40
|
Python
|
UTF-8
|
Python
| false
| false
| 2,122
|
py
|
replay.py
|
import time
from absl import app
from absl import flags
from carla import Location, Rotation, Transform
import pylot.flags
from pylot.simulation.utils import get_world
flags.DEFINE_float('replay_start_time', 0.0,
'The time at which to start replaying')
flags.DEFINE_float('replay_duration', 0.0,
'The duration of the replay run')
flags.DEFINE_integer('replay_id', 0,
'The actor id to follow during the replay')
flags.DEFINE_string('replay_file', '', 'Path to the log file')
FLAGS = flags.FLAGS
def process_images(image):
game_time = int(image.timestamp * 1000)
print('Received frame for {}'.format(game_time))
# frame = pylot.utils.bgra_to_bgr(to_bgra_array(image))
# cv2.imshow("test", frame)
# cv2.waitKey(1)
def main(argv):
client, world = get_world(FLAGS.simulator_host, FLAGS.simulator_port,
FLAGS.simulator_timeout)
# Replayer time factor is only available in > 0.9.5.
client.set_replayer_time_factor(0.1)
print(
client.replay_file(FLAGS.replay_file,
FLAGS.replay_start_time,
FLAGS.replay_duration, FLAGS.replay_id))
# Sleep a bit to allow the server to start the replay.
time.sleep(1)
vehicle = world.get_actors().find(FLAGS.replay_id)
if vehicle is None:
raise ValueError("There was an issue finding the vehicle.")
# Install the camera.
camera_blueprint = world.get_blueprint_library().find('sensor.camera.rgb')
camera_blueprint.set_attribute('image_size_x',
str(FLAGS.camera_image_width))
camera_blueprint.set_attribute('image_size_y',
str(FLAGS.camera_image_height))
transform = Transform(Location(2.0, 0.0, 1.4),
Rotation(pitch=0, yaw=0, roll=0))
camera = world.spawn_actor(camera_blueprint, transform, attach_to=vehicle)
# Register the callback on the camera.
camera.listen(process_images)
time.sleep(20)
if __name__ == '__main__':
app.run(main)
|
f4481e68bda4bf8acce533d27130ddd4cca90954
|
61a148d684047323f866017c6c95e0dc78682c43
|
/core/amber/src/main/python/core/runnables/test_main_loop.py
|
df12252605b92fd3bad1aff131fd9825a2b5c358
|
[
"LicenseRef-scancode-free-unknown",
"Apache-2.0"
] |
permissive
|
Texera/texera
|
9dd92dd0999fd78ff37cb6241f3395d475549e27
|
ca554ecad8e161b489aa17bdb17c9249ef888b6d
|
refs/heads/master
| 2023-09-03T21:46:42.147647
| 2023-08-31T21:42:30
| 2023-08-31T21:42:30
| 53,976,910
| 129
| 61
|
Apache-2.0
| 2023-09-14T15:53:52
| 2016-03-15T20:38:46
|
Scala
|
UTF-8
|
Python
| false
| false
| 19,114
|
py
|
test_main_loop.py
|
import inspect
from threading import Thread
import pandas
import pyarrow
import pytest
from core.models import (
InputDataFrame,
OutputDataFrame,
EndOfUpstream,
InternalQueue,
Tuple,
)
from core.models.internal_queue import DataElement, ControlElement
from core.runnables import MainLoop
from core.util import set_one_of
from proto.edu.uci.ics.amber.engine.architecture.sendsemantics import (
OneToOnePartitioning,
Partitioning,
)
from proto.edu.uci.ics.amber.engine.architecture.worker import (
AddPartitioningV2,
ControlCommandV2,
ControlReturnV2,
QueryStatisticsV2,
UpdateInputLinkingV2,
WorkerExecutionCompletedV2,
WorkerState,
WorkerStatistics,
LinkCompletedV2,
InitializeOperatorLogicV2,
LinkOrdinal,
PauseWorkerV2,
ResumeWorkerV2,
)
from proto.edu.uci.ics.amber.engine.common import (
ActorVirtualIdentity,
ControlInvocationV2,
ControlPayloadV2,
LayerIdentity,
LinkIdentity,
ReturnInvocationV2,
)
from pytexera.udf.examples.count_batch_operator import CountBatchOperator
from pytexera.udf.examples.echo_operator import EchoOperator
class TestMainLoop:
@pytest.fixture
def command_sequence(self):
return 1
@pytest.fixture
def mock_link(self):
return LinkIdentity(
from_=LayerIdentity("from", "from", "from"),
to=LayerIdentity("to", "to", "to"),
)
@pytest.fixture
def mock_tuple(self):
return Tuple({"test-1": "hello", "test-2": 10})
@pytest.fixture
def mock_batch(self):
batch_list = []
for i in range(57):
batch_list.append(Tuple({"test-1": "hello", "test-2": i}))
return batch_list
@pytest.fixture
def mock_sender_actor(self):
return ActorVirtualIdentity("sender")
@pytest.fixture
def mock_controller(self):
return ActorVirtualIdentity("CONTROLLER")
@pytest.fixture
def mock_receiver_actor(self):
return ActorVirtualIdentity("receiver")
@pytest.fixture
def mock_data_element(self, mock_tuple, mock_sender_actor):
return DataElement(
tag=mock_sender_actor,
payload=InputDataFrame(
frame=pyarrow.Table.from_pandas(
pandas.DataFrame([mock_tuple.as_dict()])
)
),
)
@pytest.fixture
def mock_batch_data_elements(self, mock_batch, mock_sender_actor):
data_elements = []
for i in range(57):
mock_tuple = Tuple({"test-1": "hello", "test-2": i})
data_elements.append(
DataElement(
tag=mock_sender_actor,
payload=InputDataFrame(
frame=pyarrow.Table.from_pandas(
pandas.DataFrame([mock_tuple.as_dict()])
)
),
)
)
return data_elements
@pytest.fixture
def mock_end_of_upstream(self, mock_tuple, mock_sender_actor):
return DataElement(tag=mock_sender_actor, payload=EndOfUpstream())
@pytest.fixture
def input_queue(self):
return InternalQueue()
@pytest.fixture
def output_queue(self):
return InternalQueue()
@pytest.fixture
def mock_update_input_linking(
self, mock_controller, mock_sender_actor, mock_link, command_sequence
):
command = set_one_of(
ControlCommandV2,
UpdateInputLinkingV2(identifier=mock_sender_actor, input_link=mock_link),
)
payload = set_one_of(
ControlPayloadV2,
ControlInvocationV2(command_id=command_sequence, command=command),
)
return ControlElement(tag=mock_controller, payload=payload)
@pytest.fixture
def mock_raw_schema(self):
return {"test-1": "string", "test-2": "integer"}
@pytest.fixture
def mock_initialize_operator_logic(
self,
mock_controller,
mock_sender_actor,
mock_link,
command_sequence,
mock_raw_schema,
):
command = set_one_of(
ControlCommandV2,
InitializeOperatorLogicV2(
code="from pytexera import *\n" + inspect.getsource(EchoOperator),
is_source=False,
input_ordinal_mapping=[LinkOrdinal(mock_link, 0)],
output_ordinal_mapping=[],
output_schema=mock_raw_schema,
),
)
payload = set_one_of(
ControlPayloadV2,
ControlInvocationV2(command_id=command_sequence, command=command),
)
return ControlElement(tag=mock_controller, payload=payload)
@pytest.fixture
def mock_initialize_batch_count_operator_logic(
self,
mock_controller,
mock_sender_actor,
mock_link,
command_sequence,
mock_raw_schema,
):
command = set_one_of(
ControlCommandV2,
InitializeOperatorLogicV2(
code="from pytexera import *\n" + inspect.getsource(CountBatchOperator),
is_source=False,
input_ordinal_mapping=[LinkOrdinal(mock_link, 0)],
output_ordinal_mapping=[],
output_schema=mock_raw_schema,
),
)
payload = set_one_of(
ControlPayloadV2,
ControlInvocationV2(command_id=command_sequence, command=command),
)
return ControlElement(tag=mock_controller, payload=payload)
@pytest.fixture
def mock_add_partitioning(
self, mock_controller, mock_receiver_actor, command_sequence
):
command = set_one_of(
ControlCommandV2,
AddPartitioningV2(
tag=mock_receiver_actor,
partitioning=set_one_of(
Partitioning,
OneToOnePartitioning(batch_size=1, receivers=[mock_receiver_actor]),
),
),
)
payload = set_one_of(
ControlPayloadV2,
ControlInvocationV2(command_id=command_sequence, command=command),
)
return ControlElement(tag=mock_controller, payload=payload)
@pytest.fixture
def mock_query_statistics(
self, mock_controller, mock_sender_actor, command_sequence
):
command = set_one_of(ControlCommandV2, QueryStatisticsV2())
payload = set_one_of(
ControlPayloadV2,
ControlInvocationV2(command_id=command_sequence, command=command),
)
return ControlElement(tag=mock_controller, payload=payload)
@pytest.fixture
def mock_pause(self, mock_controller, mock_sender_actor, command_sequence):
command = set_one_of(ControlCommandV2, PauseWorkerV2())
payload = set_one_of(
ControlPayloadV2,
ControlInvocationV2(command_id=command_sequence, command=command),
)
return ControlElement(tag=mock_controller, payload=payload)
@pytest.fixture
def mock_resume(self, mock_controller, mock_sender_actor, command_sequence):
command = set_one_of(ControlCommandV2, ResumeWorkerV2())
payload = set_one_of(
ControlPayloadV2,
ControlInvocationV2(command_id=command_sequence, command=command),
)
return ControlElement(tag=mock_controller, payload=payload)
@pytest.fixture
def main_loop(self, input_queue, output_queue, mock_link):
main_loop = MainLoop("dummy_worker_id", input_queue, output_queue)
yield main_loop
main_loop.stop()
@pytest.fixture
def main_loop_thread(self, main_loop, reraise):
def wrapper():
with reraise:
main_loop.run()
main_loop_thread = Thread(target=wrapper, name="main_loop_thread")
yield main_loop_thread
@staticmethod
def check_batch_rank_sum(
operator,
input_queue,
mock_batch_data_elements,
output_data_elements,
output_queue,
mock_batch,
start,
end,
count,
):
# Checking the rank sum of each batch to make sure the accuracy
for i in range(start, end):
input_queue.put(mock_batch_data_elements[i])
rank_sum_real = 0
rank_sum_suppose = 0
for i in range(start, end):
output_data_elements.append(output_queue.get())
rank_sum_real += output_data_elements[i].payload.frame[0]["test-2"]
rank_sum_suppose += mock_batch[i]["test-2"]
assert operator.count == count
assert rank_sum_real == rank_sum_suppose
@pytest.mark.timeout(2)
def test_main_loop_thread_can_start(self, main_loop_thread):
main_loop_thread.start()
assert main_loop_thread.is_alive()
@pytest.mark.timeout(2)
def test_main_loop_thread_can_process_messages(
self,
mock_link,
mock_receiver_actor,
mock_controller,
input_queue,
output_queue,
mock_data_element,
main_loop_thread,
mock_update_input_linking,
mock_add_partitioning,
mock_initialize_operator_logic,
mock_end_of_upstream,
mock_query_statistics,
mock_tuple,
command_sequence,
reraise,
):
main_loop_thread.start()
# can process UpdateInputLinking
input_queue.put(mock_update_input_linking)
assert output_queue.get() == ControlElement(
tag=mock_controller,
payload=ControlPayloadV2(
return_invocation=ReturnInvocationV2(
original_command_id=command_sequence,
control_return=ControlReturnV2(),
)
),
)
# can process AddPartitioning
input_queue.put(mock_add_partitioning)
assert output_queue.get() == ControlElement(
tag=mock_controller,
payload=ControlPayloadV2(
return_invocation=ReturnInvocationV2(
original_command_id=command_sequence,
control_return=ControlReturnV2(),
)
),
)
# can process InitializeOperatorLogic
input_queue.put(mock_initialize_operator_logic)
assert output_queue.get() == ControlElement(
tag=mock_controller,
payload=ControlPayloadV2(
return_invocation=ReturnInvocationV2(
original_command_id=command_sequence,
control_return=ControlReturnV2(),
)
),
)
# can process a InputDataFrame
input_queue.put(mock_data_element)
output_data_element: DataElement = output_queue.get()
assert output_data_element.tag == mock_receiver_actor
assert isinstance(output_data_element.payload, OutputDataFrame)
data_frame: OutputDataFrame = output_data_element.payload
assert len(data_frame.frame) == 1
assert data_frame.frame[0] == mock_tuple
# can process QueryStatistics
input_queue.put(mock_query_statistics)
assert output_queue.get() == ControlElement(
tag=mock_controller,
payload=ControlPayloadV2(
return_invocation=ReturnInvocationV2(
original_command_id=1,
control_return=ControlReturnV2(
worker_statistics=WorkerStatistics(
worker_state=WorkerState.RUNNING,
input_tuple_count=1,
output_tuple_count=1,
)
),
)
),
)
# can process EndOfUpstream
input_queue.put(mock_end_of_upstream)
assert output_queue.get() == ControlElement(
tag=mock_controller,
payload=ControlPayloadV2(
control_invocation=ControlInvocationV2(
command_id=0,
command=ControlCommandV2(
link_completed=LinkCompletedV2(link_id=mock_link)
),
)
),
)
# WorkerExecutionCompletedV2 should be triggered when workflow finishes
assert output_queue.get() == ControlElement(
tag=mock_controller,
payload=ControlPayloadV2(
control_invocation=ControlInvocationV2(
command_id=1,
command=ControlCommandV2(
worker_execution_completed=WorkerExecutionCompletedV2()
),
)
),
)
assert output_queue.get() == DataElement(
tag=mock_receiver_actor, payload=EndOfUpstream()
)
# can process ReturnInvocation
input_queue.put(
ControlElement(
tag=mock_controller,
payload=set_one_of(
ControlPayloadV2,
ReturnInvocationV2(
original_command_id=0, control_return=ControlReturnV2()
),
),
)
)
reraise()
@pytest.mark.timeout(5)
def test_batch_dp_thread_can_process_batch(
self,
mock_controller,
mock_link,
input_queue,
output_queue,
mock_receiver_actor,
main_loop,
main_loop_thread,
mock_query_statistics,
mock_update_input_linking,
mock_add_partitioning,
mock_pause,
mock_resume,
mock_initialize_batch_count_operator_logic,
mock_batch,
mock_batch_data_elements,
mock_end_of_upstream,
command_sequence,
reraise,
):
main_loop_thread.start()
# can process UpdateInputLinking
input_queue.put(mock_update_input_linking)
assert output_queue.get() == ControlElement(
tag=mock_controller,
payload=ControlPayloadV2(
return_invocation=ReturnInvocationV2(
original_command_id=command_sequence,
control_return=ControlReturnV2(),
)
),
)
# can process AddPartitioning
input_queue.put(mock_add_partitioning)
assert output_queue.get() == ControlElement(
tag=mock_controller,
payload=ControlPayloadV2(
return_invocation=ReturnInvocationV2(
original_command_id=command_sequence,
control_return=ControlReturnV2(),
)
),
)
# can process InitializeOperatorLogic
input_queue.put(mock_initialize_batch_count_operator_logic)
assert output_queue.get() == ControlElement(
tag=mock_controller,
payload=ControlPayloadV2(
return_invocation=ReturnInvocationV2(
original_command_id=command_sequence,
control_return=ControlReturnV2(),
)
),
)
operator = main_loop.context.operator_manager.operator
output_data_elements = []
# can process a InputDataFrame
operator.BATCH_SIZE = 10
for i in range(13):
input_queue.put(mock_batch_data_elements[i])
for i in range(10):
output_data_elements.append(output_queue.get())
self.send_pause(
command_sequence, input_queue, mock_controller, mock_pause, output_queue
)
# input queue 13, output queue 10, batch_buffer 3
assert operator.count == 1
operator.BATCH_SIZE = 20
self.send_resume(
command_sequence, input_queue, mock_controller, mock_resume, output_queue
)
for i in range(13, 41):
input_queue.put(mock_batch_data_elements[i])
for i in range(20):
output_data_elements.append(output_queue.get())
self.send_pause(
command_sequence, input_queue, mock_controller, mock_pause, output_queue
)
# input queue 41, output queue 30, batch_buffer 11
assert operator.count == 2
operator.BATCH_SIZE = 5
self.send_resume(
command_sequence, input_queue, mock_controller, mock_resume, output_queue
)
input_queue.put(mock_batch_data_elements[41])
input_queue.put(mock_batch_data_elements[42])
for i in range(10):
output_data_elements.append(output_queue.get())
self.send_pause(
command_sequence, input_queue, mock_controller, mock_pause, output_queue
)
# input queue 43, output queue 40, batch_buffer 3
assert operator.count == 4
self.send_resume(
command_sequence, input_queue, mock_controller, mock_resume, output_queue
)
for i in range(43, 57):
input_queue.put(mock_batch_data_elements[i])
for i in range(15):
output_data_elements.append(output_queue.get())
self.send_pause(
command_sequence, input_queue, mock_controller, mock_pause, output_queue
)
# input queue 57, output queue 55, batch_buffer 2
assert operator.count == 7
self.send_resume(
command_sequence, input_queue, mock_controller, mock_resume, output_queue
)
input_queue.put(mock_end_of_upstream)
for i in range(2):
output_data_elements.append(output_queue.get())
# check the batch count
assert main_loop.context.operator_manager.operator.count == 8
assert output_data_elements[0].tag == mock_receiver_actor
assert isinstance(output_data_elements[0].payload, OutputDataFrame)
data_frame: OutputDataFrame = output_data_elements[0].payload
assert len(data_frame.frame) == 1
assert data_frame.frame[0] == Tuple(mock_batch[0])
reraise()
@staticmethod
def send_pause(
command_sequence, input_queue, mock_controller, mock_pause, output_queue
):
input_queue.put(mock_pause)
assert output_queue.get() == ControlElement(
tag=mock_controller,
payload=ControlPayloadV2(
return_invocation=ReturnInvocationV2(
original_command_id=command_sequence,
control_return=ControlReturnV2(worker_state=WorkerState.PAUSED),
)
),
)
@staticmethod
def send_resume(
command_sequence, input_queue, mock_controller, mock_resume, output_queue
):
input_queue.put(mock_resume)
assert output_queue.get() == ControlElement(
tag=mock_controller,
payload=ControlPayloadV2(
return_invocation=ReturnInvocationV2(
original_command_id=command_sequence,
control_return=ControlReturnV2(worker_state=WorkerState.RUNNING),
)
),
)
|
a2abaf0b6a30ee51ad42e4b35104be66ce7b5df2
|
cc0ebf14bf29edfd89441749310f44f4aad989e6
|
/windpowerlib/turbine_cluster_modelchain.py
|
4f3258a5ee1709dd4893dd3b96e9be44a54ebc5b
|
[
"MIT"
] |
permissive
|
wind-python/windpowerlib
|
253471ccce018a328b96f65da7a768182a630bef
|
2c5af71bdaa69376d3c0caa8d0a31f1b03681fc4
|
refs/heads/dev
| 2023-08-22T21:05:46.880749
| 2023-04-12T19:01:00
| 2023-04-12T19:01:00
| 66,651,306
| 290
| 108
|
MIT
| 2023-08-15T09:22:27
| 2016-08-26T13:50:35
|
Python
|
UTF-8
|
Python
| false
| false
| 13,193
|
py
|
turbine_cluster_modelchain.py
|
"""
The ``turbine_cluster_modelchain`` module contains functions and classes of the
windpowerlib. This module makes it easy to get started with the windpowerlib
and shows use cases for the power output calculation of wind farms and wind
turbine clusters.
SPDX-FileCopyrightText: 2019 oemof developer group <contact@oemof.org>
SPDX-License-Identifier: MIT
"""
import logging
from windpowerlib import wake_losses
from windpowerlib.modelchain import ModelChain, data
class TurbineClusterModelChain(ModelChain):
r"""
Model to determine the output of a wind farm or wind turbine cluster.
Parameters
----------
power_plant : :class:`~.wind_farm.WindFarm` or :class:`~.wind_turbine_cluster.WindTurbineCluster`
A :class:`~.wind_farm.WindFarm` object representing the wind farm or
a :class:`~.wind_turbine_cluster.WindTurbineCluster` object
representing the wind turbine cluster.
wake_losses_model : str or None
Defines the method for taking wake losses within the farm into
consideration.
* None -
Wake losses are not taken into account.
* 'wind_farm_efficiency' -
The values of the wind farm power curve(s) are reduced by the wind
farm efficiency, which needs to be set in the
:py:class:`~.wind_farm.WindFarm` class. Note: The wind farm
efficiency has no effect if `wake_losses_model` is not set to
'wind_farm_efficiency'.
See :func:`~.power_curves.wake_losses_to_power_curve` for more
information.
* 'dena_mean' or name of other wind efficiency curve -
The values of the wind speed time series are reduced by the chosen
wind efficiency curve in :func:`~.run_model` before the power output
calculations.
See :func:`~.wake_losses.reduce_wind_speed` for more information.
Use :func:`~.wake_losses.get_wind_efficiency_curve` to get a
DataFrame of all provided wind efficiency curves and see the provided
example on how to plot the wind efficiency curves.
Default: 'dena_mean'.
smoothing : bool
If True the power curves will be smoothed to account for the
distribution of wind speeds over space. Depending on the parameter
`smoothing_order` the power curves are smoothed before or after
aggregating wind turbine power curves to one representative power
curve of the wind farm or cluster.
See :func:`~.power_curves.smooth_power_curve` for more information.
Default: False.
block_width : float
Width between the wind speeds in the sum of the equation in
:py:func:`~.power_curves.smooth_power_curve`. This parameter is only
used if `smoothing` is True. To achieve a smooth curve without steps a
value not much higher than the step width between the power curve wind
speeds should be chosen.
Default: 0.5.
standard_deviation_method : str
Method for calculating the standard deviation for the Gauss
distribution if `smoothing` is True.
* 'turbulence_intensity' -
See :func:`~.power_curves.smooth_power_curve` for more information.
* 'Staffell_Pfenninger' -
See :func:`~.power_curves.smooth_power_curve` for more information.
Default: 'turbulence_intensity'.
smoothing_order : str
Defines when the smoothing takes place if `smoothing` is True.
* 'turbine_power_curves' -
Smoothing is applied to wind turbine power curves.
* 'wind_farm_power_curves' -
Smoothing is applied to wind farm power curves.
Default: 'wind_farm_power_curves'.
Other Parameters
----------------
wind_speed_model :
See :py:class:`~.modelchain.ModelChain` for more information.
temperature_model :
See :py:class:`~.modelchain.ModelChain` for more information.
density_model :
See :py:class:`~.modelchain.ModelChain` for more information.
power_output_model :
See :py:class:`~.modelchain.ModelChain` for more information.
density_correction :
See :py:class:`~.modelchain.ModelChain` for more information.
obstacle_height :
See :py:class:`~.modelchain.ModelChain` for more information.
hellman_exp :
See :py:class:`~.modelchain.ModelChain` for more information.
Attributes
----------
power_plant : :class:`~.wind_farm.WindFarm` or :class:`~.wind_turbine_cluster.WindTurbineCluster`
A :class:`~.wind_farm.WindFarm` object representing the wind farm or
a :class:`~.wind_turbine_cluster.WindTurbineCluster` object
representing the wind turbine cluster.
wake_losses_model : str or None
Defines the method for taking wake losses within the farm into
consideration.
smoothing : bool
If True the power curves are smoothed.
block_width : float
Width between the wind speeds in the sum of the equation in
:py:func:`~.power_curves.smooth_power_curve`.
standard_deviation_method : str
Method for calculating the standard deviation for the Gauss
distribution.
smoothing_order : str
Defines when the smoothing takes place if `smoothing` is True.
power_output : :pandas:`pandas.Series<series>`
Electrical power output of the wind turbine in W.
power_curve : :pandas:`pandas.Dataframe<frame>` or None
The calculated power curve of the wind farm.
wind_speed_model : str
Defines which model is used to calculate the wind speed at hub height.
temperature_model : str
Defines which model is used to calculate the temperature of air at hub
height.
density_model : str
Defines which model is used to calculate the density of air at hub
height.
power_output_model : str
Defines which model is used to calculate the turbine power output.
density_correction : bool
Used to set `density_correction` parameter in
:func:`~.power_output.power_curve`.
obstacle_height : float
Used to set `obstacle_height` in :func:`~.wind_speed.logarithmic`.
hellman_exp : float
Used to set `hellman_exponent` in :func:`~.wind_speed.hellman`.
"""
def __init__(
self,
power_plant,
wake_losses_model="dena_mean",
smoothing=False,
block_width=0.5,
standard_deviation_method="turbulence_intensity",
smoothing_order="wind_farm_power_curves",
**kwargs,
):
super(TurbineClusterModelChain, self).__init__(power_plant, **kwargs)
self.power_plant = power_plant
self.wake_losses_model = wake_losses_model
self.smoothing = smoothing
self.block_width = block_width
self.standard_deviation_method = standard_deviation_method
self.smoothing_order = smoothing_order
self.power_curve = None
self.power_output = None
def assign_power_curve(self, weather_df):
r"""
Calculates the power curve of the wind turbine cluster.
The power curve is aggregated from the wind farms' and wind turbines'
power curves by using :func:`power_plant.assign_power_curve`. Depending
on the parameters of the WindTurbineCluster power curves are smoothed
and/or wake losses are taken into account.
Parameters
----------
weather_df : :pandas:`pandas.DataFrame<frame>`
DataFrame with weather data time series. If power curve smoothing
:py:attr:`~smoothing` is True and chosen method for calculating the
standard deviation :py:attr:`~standard_deviation_method` is
`turbulence_intensity` the weather dataframe needs to either
contain the turbulence intensity in column 'turbulence_intensity'
or the roughness length in m in column 'roughness_length'. The
turbulence intensity should be provided at hub height or at least
at a height close to the hub height, as it cannot be inter- or
extrapolated.
Returns
-------
self
"""
# Get turbulence intensity from weather if existent
turbulence_intensity = (
weather_df["turbulence_intensity"].values.mean()
if "turbulence_intensity" in weather_df.columns.get_level_values(0)
else None
)
roughness_length = (
weather_df["roughness_length"].values.mean()
if "roughness_length" in weather_df.columns.get_level_values(0)
else None
)
# Assign power curve
if (
self.wake_losses_model == "wind_farm_efficiency"
or self.wake_losses_model is None
):
wake_losses_model_to_power_curve = self.wake_losses_model
if self.wake_losses_model is None:
logging.debug("Wake losses in wind farms are not considered.")
else:
logging.debug(
"Wake losses considered with {}.".format(
self.wake_losses_model
)
)
else:
logging.debug(
"Wake losses considered by {} wind ".format(
self.wake_losses_model
)
+ "efficiency curve."
)
wake_losses_model_to_power_curve = None
self.power_plant.assign_power_curve(
wake_losses_model=wake_losses_model_to_power_curve,
smoothing=self.smoothing,
block_width=self.block_width,
standard_deviation_method=self.standard_deviation_method,
smoothing_order=self.smoothing_order,
roughness_length=roughness_length,
turbulence_intensity=turbulence_intensity,
)
# Further logging messages
if self.smoothing is False:
logging.debug("Aggregated power curve not smoothed.")
else:
logging.debug(
"Aggregated power curve smoothed by method: "
+ self.standard_deviation_method
)
return self
def run_model(self, weather_df):
r"""
Runs the model.
Parameters
----------
weather_df : :pandas:`pandas.DataFrame<frame>`
DataFrame with time series for wind speed `wind_speed` in m/s, and
roughness length `roughness_length` in m, as well as optionally
temperature `temperature` in K, pressure `pressure` in Pa,
density `density` in kg/m³ and turbulence intensity
`turbulence_intensity` depending on `power_output_model`,
`density_model` and `standard_deviation_model` chosen.
The columns of the DataFrame are a MultiIndex where the first level
contains the variable name (e.g. wind_speed) and the second level
contains the height at which it applies (e.g. 10, if it was
measured at a height of 10 m). See below for an example on how to
create the weather_df DataFrame.
Returns
-------
self
Examples
---------
>>> import numpy as np
>>> import pandas as pd
>>> my_weather_df = pd.DataFrame(np.random.rand(2,6),
... index=pd.date_range('1/1/2012',
... periods=2,
... freq='H'),
... columns=[np.array(['wind_speed',
... 'wind_speed',
... 'temperature',
... 'temperature',
... 'pressure',
... 'roughness_length']),
... np.array([10, 80, 10, 80,
... 10, 0])])
>>> my_weather_df.columns.get_level_values(0)[0]
'wind_speed'
"""
weather_df = data.check_weather_data(weather_df)
self.assign_power_curve(weather_df)
self.power_plant.mean_hub_height()
wind_speed_hub = self.wind_speed_hub(weather_df)
density_hub = (
None
if (
self.power_output_model == "power_curve"
and self.density_correction is False
)
else self.density_hub(weather_df)
)
if (
self.wake_losses_model != "wind_farm_efficiency"
and self.wake_losses_model is not None
):
# Reduce wind speed with wind efficiency curve
wind_speed_hub = wake_losses.reduce_wind_speed(
wind_speed_hub,
wind_efficiency_curve_name=self.wake_losses_model,
)
self.power_output = self.calculate_power_output(
wind_speed_hub, density_hub
)
return self
|
c1dbf6fd42fcfd6d8691cbd8db03026a44398496
|
376e1818d427b5e4d32fa6dd6c7b71e9fd88afdb
|
/textproc/py-ICU/patches/patch-setup.py
|
8824590a78931bad2c5e7f2b4c364d0a91635760
|
[] |
no_license
|
NetBSD/pkgsrc
|
a0732c023519650ef821ab89c23ab6ab59e25bdb
|
d042034ec4896cc5b47ed6f2e5b8802d9bc5c556
|
refs/heads/trunk
| 2023-09-01T07:40:12.138283
| 2023-09-01T05:25:19
| 2023-09-01T05:25:19
| 88,439,572
| 321
| 138
| null | 2023-07-12T22:34:14
| 2017-04-16T20:04:15
| null |
UTF-8
|
Python
| false
| false
| 1,937
|
py
|
patch-setup.py
|
$NetBSD: patch-setup.py,v 1.4 2022/11/23 22:18:47 adam Exp $
Add NetBSD support.
--- setup.py.orig 2022-10-25 04:31:36.000000000 +0000
+++ setup.py
@@ -64,6 +64,8 @@ if platform.startswith(('linux', 'gnu'))
platform = 'linux'
elif platform.startswith('freebsd'):
platform = 'freebsd'
+elif platform.startswith('netbsd'):
+ platform = 'netbsd'
CONFIGURE_WITH_ICU_CONFIG = {
@@ -73,6 +75,7 @@ CONFIGURE_WITH_ICU_CONFIG = {
'win32': False, # no icu-config
'sunos5': False, # not tested
'cygwin': False, # not tested
+ 'netbsd': False,
}
CONFIGURE_WITH_PKG_CONFIG = {
@@ -82,6 +85,7 @@ CONFIGURE_WITH_PKG_CONFIG = {
'win32': False, # no pkg-config ?
'sunos5': False, # not tested
'cygwin': False, # not tested
+ 'netbsd': True,
}
@@ -114,6 +118,7 @@ INCLUDES = {
'win32': ['c:/icu/include'],
'sunos5': [],
'cygwin': [],
+ 'netbsd': [],
}
if sys.platform == 'win32' and sys.version_info < (3,9):
@@ -132,6 +137,7 @@ PEDANTIC_FLAGS = {
'win32': [],
'sunos5': [],
'cygwin': ['-pedantic'],
+ 'netbsd': [],
}
CFLAGS = {
@@ -141,6 +147,7 @@ CFLAGS = {
'win32': ['/Zc:wchar_t', '/EHsc'],
'sunos5': ['-std=c++11'],
'cygwin': ['-D_GNU_SOURCE=1', '-std=c++11'],
+ 'netbsd': [],
}
# added to CFLAGS when setup is invoked with --debug
@@ -151,6 +158,7 @@ DEBUG_CFLAGS = {
'win32': ['/Od', '/DDEBUG'],
'sunos5': ['-DDEBUG'],
'cygwin': ['-Og', '-g', '-DDEBUG'],
+ 'netbsd': ['-O0', '-g', '-DDEBUG'],
}
LFLAGS = {
@@ -160,6 +168,7 @@ LFLAGS = {
'win32': ['/LIBPATH:c:/icu/lib'],
'sunos5': [],
'cygwin': [],
+ 'netbsd': [],
}
LIBRARIES = {
@@ -169,6 +178,7 @@ LIBRARIES = {
'win32': ['icuin', 'icuuc', 'icudt'],
'sunos5': ['icui18n', 'icuuc', 'icudata'],
'cygwin': ['icui18n', 'icuuc', 'icudata'],
+ 'netbsd': [],
}
if 'PYICU_INCLUDES' in os.environ:
|
61baf6d8c123b3775ccd065f5678aac594e50a18
|
5bd1490ada452d262819b51d240b519b7264dbd8
|
/Chapter 4/ch4_3.py
|
e131ffda1bbbed1dcfb67973c92773cfb3afb700
|
[] |
no_license
|
PacktPublishing/Mastering-Natural-Language-Processing-with-Python
|
59feee3a1ac0751f97256af328c6957adaeb7111
|
61fb2091f8c2d42fa5f14cb02664b0f2ca9127a1
|
refs/heads/master
| 2022-11-05T20:29:52.245545
| 2022-10-28T07:52:43
| 2022-10-28T07:52:43
| 60,772,409
| 142
| 124
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 50
|
py
|
ch4_3.py
|
import nltk
print(nltk.help.upenn_tagset('VB.*'))
|
b5aaa9eeea583fc2a2cbd5b42509c10605670a26
|
1742b6719b988e5519373002305e31d28b8bd691
|
/sdk/python/pulumi_aws/route53domains/_inputs.py
|
fbca943cfdc36927dcc8fdf31713605a7a67bc30
|
[
"MPL-2.0",
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
pulumi/pulumi-aws
|
4f7fdb4a816c5ea357cff2c2e3b613c006e49f1a
|
42b0a0abdf6c14da248da22f8c4530af06e67b98
|
refs/heads/master
| 2023-08-03T23:08:34.520280
| 2023-08-01T18:09:58
| 2023-08-01T18:09:58
| 97,484,940
| 384
| 171
|
Apache-2.0
| 2023-09-14T14:48:40
| 2017-07-17T14:20:33
|
Java
|
UTF-8
|
Python
| false
| false
| 31,472
|
py
|
_inputs.py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'RegisteredDomainAdminContactArgs',
'RegisteredDomainNameServerArgs',
'RegisteredDomainRegistrantContactArgs',
'RegisteredDomainTechContactArgs',
]
@pulumi.input_type
class RegisteredDomainAdminContactArgs:
def __init__(__self__, *,
address_line1: Optional[pulumi.Input[str]] = None,
address_line2: Optional[pulumi.Input[str]] = None,
city: Optional[pulumi.Input[str]] = None,
contact_type: Optional[pulumi.Input[str]] = None,
country_code: Optional[pulumi.Input[str]] = None,
email: Optional[pulumi.Input[str]] = None,
extra_params: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
fax: Optional[pulumi.Input[str]] = None,
first_name: Optional[pulumi.Input[str]] = None,
last_name: Optional[pulumi.Input[str]] = None,
organization_name: Optional[pulumi.Input[str]] = None,
phone_number: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
zip_code: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] address_line1: First line of the contact's address.
:param pulumi.Input[str] address_line2: Second line of contact's address, if any.
:param pulumi.Input[str] city: The city of the contact's address.
:param pulumi.Input[str] contact_type: Indicates whether the contact is a person, company, association, or public organization. See the [AWS API documentation](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_ContactDetail.html#Route53Domains-Type-domains_ContactDetail-ContactType) for valid values.
:param pulumi.Input[str] country_code: Code for the country of the contact's address. See the [AWS API documentation](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_ContactDetail.html#Route53Domains-Type-domains_ContactDetail-CountryCode) for valid values.
:param pulumi.Input[str] email: Email address of the contact.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] extra_params: A key-value map of parameters required by certain top-level domains.
:param pulumi.Input[str] fax: Fax number of the contact. Phone number must be specified in the format "+[country dialing code].[number including any area code]".
:param pulumi.Input[str] first_name: First name of contact.
:param pulumi.Input[str] last_name: Last name of contact.
:param pulumi.Input[str] organization_name: Name of the organization for contact types other than `PERSON`.
:param pulumi.Input[str] phone_number: The phone number of the contact. Phone number must be specified in the format "+[country dialing code].[number including any area code]".
:param pulumi.Input[str] state: The state or province of the contact's city.
:param pulumi.Input[str] zip_code: The zip or postal code of the contact's address.
"""
if address_line1 is not None:
pulumi.set(__self__, "address_line1", address_line1)
if address_line2 is not None:
pulumi.set(__self__, "address_line2", address_line2)
if city is not None:
pulumi.set(__self__, "city", city)
if contact_type is not None:
pulumi.set(__self__, "contact_type", contact_type)
if country_code is not None:
pulumi.set(__self__, "country_code", country_code)
if email is not None:
pulumi.set(__self__, "email", email)
if extra_params is not None:
pulumi.set(__self__, "extra_params", extra_params)
if fax is not None:
pulumi.set(__self__, "fax", fax)
if first_name is not None:
pulumi.set(__self__, "first_name", first_name)
if last_name is not None:
pulumi.set(__self__, "last_name", last_name)
if organization_name is not None:
pulumi.set(__self__, "organization_name", organization_name)
if phone_number is not None:
pulumi.set(__self__, "phone_number", phone_number)
if state is not None:
pulumi.set(__self__, "state", state)
if zip_code is not None:
pulumi.set(__self__, "zip_code", zip_code)
@property
@pulumi.getter(name="addressLine1")
def address_line1(self) -> Optional[pulumi.Input[str]]:
"""
First line of the contact's address.
"""
return pulumi.get(self, "address_line1")
@address_line1.setter
def address_line1(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "address_line1", value)
@property
@pulumi.getter(name="addressLine2")
def address_line2(self) -> Optional[pulumi.Input[str]]:
"""
Second line of contact's address, if any.
"""
return pulumi.get(self, "address_line2")
@address_line2.setter
def address_line2(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "address_line2", value)
@property
@pulumi.getter
def city(self) -> Optional[pulumi.Input[str]]:
"""
The city of the contact's address.
"""
return pulumi.get(self, "city")
@city.setter
def city(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "city", value)
@property
@pulumi.getter(name="contactType")
def contact_type(self) -> Optional[pulumi.Input[str]]:
"""
Indicates whether the contact is a person, company, association, or public organization. See the [AWS API documentation](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_ContactDetail.html#Route53Domains-Type-domains_ContactDetail-ContactType) for valid values.
"""
return pulumi.get(self, "contact_type")
@contact_type.setter
def contact_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "contact_type", value)
@property
@pulumi.getter(name="countryCode")
def country_code(self) -> Optional[pulumi.Input[str]]:
"""
Code for the country of the contact's address. See the [AWS API documentation](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_ContactDetail.html#Route53Domains-Type-domains_ContactDetail-CountryCode) for valid values.
"""
return pulumi.get(self, "country_code")
@country_code.setter
def country_code(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "country_code", value)
@property
@pulumi.getter
def email(self) -> Optional[pulumi.Input[str]]:
"""
Email address of the contact.
"""
return pulumi.get(self, "email")
@email.setter
def email(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "email", value)
@property
@pulumi.getter(name="extraParams")
def extra_params(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A key-value map of parameters required by certain top-level domains.
"""
return pulumi.get(self, "extra_params")
@extra_params.setter
def extra_params(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "extra_params", value)
@property
@pulumi.getter
def fax(self) -> Optional[pulumi.Input[str]]:
"""
Fax number of the contact. Phone number must be specified in the format "+[country dialing code].[number including any area code]".
"""
return pulumi.get(self, "fax")
@fax.setter
def fax(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fax", value)
@property
@pulumi.getter(name="firstName")
def first_name(self) -> Optional[pulumi.Input[str]]:
"""
First name of contact.
"""
return pulumi.get(self, "first_name")
@first_name.setter
def first_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "first_name", value)
@property
@pulumi.getter(name="lastName")
def last_name(self) -> Optional[pulumi.Input[str]]:
"""
Last name of contact.
"""
return pulumi.get(self, "last_name")
@last_name.setter
def last_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_name", value)
@property
@pulumi.getter(name="organizationName")
def organization_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the organization for contact types other than `PERSON`.
"""
return pulumi.get(self, "organization_name")
@organization_name.setter
def organization_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "organization_name", value)
@property
@pulumi.getter(name="phoneNumber")
def phone_number(self) -> Optional[pulumi.Input[str]]:
"""
The phone number of the contact. Phone number must be specified in the format "+[country dialing code].[number including any area code]".
"""
return pulumi.get(self, "phone_number")
@phone_number.setter
def phone_number(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "phone_number", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
The state or province of the contact's city.
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter(name="zipCode")
def zip_code(self) -> Optional[pulumi.Input[str]]:
"""
The zip or postal code of the contact's address.
"""
return pulumi.get(self, "zip_code")
@zip_code.setter
def zip_code(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zip_code", value)
@pulumi.input_type
class RegisteredDomainNameServerArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
glue_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[str] name: The fully qualified host name of the name server.
:param pulumi.Input[Sequence[pulumi.Input[str]]] glue_ips: Glue IP addresses of a name server. The list can contain only one IPv4 and one IPv6 address.
"""
pulumi.set(__self__, "name", name)
if glue_ips is not None:
pulumi.set(__self__, "glue_ips", glue_ips)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
The fully qualified host name of the name server.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="glueIps")
def glue_ips(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Glue IP addresses of a name server. The list can contain only one IPv4 and one IPv6 address.
"""
return pulumi.get(self, "glue_ips")
@glue_ips.setter
def glue_ips(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "glue_ips", value)
@pulumi.input_type
class RegisteredDomainRegistrantContactArgs:
def __init__(__self__, *,
address_line1: Optional[pulumi.Input[str]] = None,
address_line2: Optional[pulumi.Input[str]] = None,
city: Optional[pulumi.Input[str]] = None,
contact_type: Optional[pulumi.Input[str]] = None,
country_code: Optional[pulumi.Input[str]] = None,
email: Optional[pulumi.Input[str]] = None,
extra_params: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
fax: Optional[pulumi.Input[str]] = None,
first_name: Optional[pulumi.Input[str]] = None,
last_name: Optional[pulumi.Input[str]] = None,
organization_name: Optional[pulumi.Input[str]] = None,
phone_number: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
zip_code: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] address_line1: First line of the contact's address.
:param pulumi.Input[str] address_line2: Second line of contact's address, if any.
:param pulumi.Input[str] city: The city of the contact's address.
:param pulumi.Input[str] contact_type: Indicates whether the contact is a person, company, association, or public organization. See the [AWS API documentation](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_ContactDetail.html#Route53Domains-Type-domains_ContactDetail-ContactType) for valid values.
:param pulumi.Input[str] country_code: Code for the country of the contact's address. See the [AWS API documentation](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_ContactDetail.html#Route53Domains-Type-domains_ContactDetail-CountryCode) for valid values.
:param pulumi.Input[str] email: Email address of the contact.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] extra_params: A key-value map of parameters required by certain top-level domains.
:param pulumi.Input[str] fax: Fax number of the contact. Phone number must be specified in the format "+[country dialing code].[number including any area code]".
:param pulumi.Input[str] first_name: First name of contact.
:param pulumi.Input[str] last_name: Last name of contact.
:param pulumi.Input[str] organization_name: Name of the organization for contact types other than `PERSON`.
:param pulumi.Input[str] phone_number: The phone number of the contact. Phone number must be specified in the format "+[country dialing code].[number including any area code]".
:param pulumi.Input[str] state: The state or province of the contact's city.
:param pulumi.Input[str] zip_code: The zip or postal code of the contact's address.
"""
if address_line1 is not None:
pulumi.set(__self__, "address_line1", address_line1)
if address_line2 is not None:
pulumi.set(__self__, "address_line2", address_line2)
if city is not None:
pulumi.set(__self__, "city", city)
if contact_type is not None:
pulumi.set(__self__, "contact_type", contact_type)
if country_code is not None:
pulumi.set(__self__, "country_code", country_code)
if email is not None:
pulumi.set(__self__, "email", email)
if extra_params is not None:
pulumi.set(__self__, "extra_params", extra_params)
if fax is not None:
pulumi.set(__self__, "fax", fax)
if first_name is not None:
pulumi.set(__self__, "first_name", first_name)
if last_name is not None:
pulumi.set(__self__, "last_name", last_name)
if organization_name is not None:
pulumi.set(__self__, "organization_name", organization_name)
if phone_number is not None:
pulumi.set(__self__, "phone_number", phone_number)
if state is not None:
pulumi.set(__self__, "state", state)
if zip_code is not None:
pulumi.set(__self__, "zip_code", zip_code)
@property
@pulumi.getter(name="addressLine1")
def address_line1(self) -> Optional[pulumi.Input[str]]:
"""
First line of the contact's address.
"""
return pulumi.get(self, "address_line1")
@address_line1.setter
def address_line1(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "address_line1", value)
@property
@pulumi.getter(name="addressLine2")
def address_line2(self) -> Optional[pulumi.Input[str]]:
"""
Second line of contact's address, if any.
"""
return pulumi.get(self, "address_line2")
@address_line2.setter
def address_line2(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "address_line2", value)
@property
@pulumi.getter
def city(self) -> Optional[pulumi.Input[str]]:
"""
The city of the contact's address.
"""
return pulumi.get(self, "city")
@city.setter
def city(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "city", value)
@property
@pulumi.getter(name="contactType")
def contact_type(self) -> Optional[pulumi.Input[str]]:
"""
Indicates whether the contact is a person, company, association, or public organization. See the [AWS API documentation](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_ContactDetail.html#Route53Domains-Type-domains_ContactDetail-ContactType) for valid values.
"""
return pulumi.get(self, "contact_type")
@contact_type.setter
def contact_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "contact_type", value)
@property
@pulumi.getter(name="countryCode")
def country_code(self) -> Optional[pulumi.Input[str]]:
"""
Code for the country of the contact's address. See the [AWS API documentation](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_ContactDetail.html#Route53Domains-Type-domains_ContactDetail-CountryCode) for valid values.
"""
return pulumi.get(self, "country_code")
@country_code.setter
def country_code(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "country_code", value)
@property
@pulumi.getter
def email(self) -> Optional[pulumi.Input[str]]:
"""
Email address of the contact.
"""
return pulumi.get(self, "email")
@email.setter
def email(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "email", value)
@property
@pulumi.getter(name="extraParams")
def extra_params(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A key-value map of parameters required by certain top-level domains.
"""
return pulumi.get(self, "extra_params")
@extra_params.setter
def extra_params(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "extra_params", value)
@property
@pulumi.getter
def fax(self) -> Optional[pulumi.Input[str]]:
"""
Fax number of the contact. Phone number must be specified in the format "+[country dialing code].[number including any area code]".
"""
return pulumi.get(self, "fax")
@fax.setter
def fax(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fax", value)
@property
@pulumi.getter(name="firstName")
def first_name(self) -> Optional[pulumi.Input[str]]:
"""
First name of contact.
"""
return pulumi.get(self, "first_name")
@first_name.setter
def first_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "first_name", value)
@property
@pulumi.getter(name="lastName")
def last_name(self) -> Optional[pulumi.Input[str]]:
"""
Last name of contact.
"""
return pulumi.get(self, "last_name")
@last_name.setter
def last_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_name", value)
@property
@pulumi.getter(name="organizationName")
def organization_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the organization for contact types other than `PERSON`.
"""
return pulumi.get(self, "organization_name")
@organization_name.setter
def organization_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "organization_name", value)
@property
@pulumi.getter(name="phoneNumber")
def phone_number(self) -> Optional[pulumi.Input[str]]:
"""
The phone number of the contact. Phone number must be specified in the format "+[country dialing code].[number including any area code]".
"""
return pulumi.get(self, "phone_number")
@phone_number.setter
def phone_number(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "phone_number", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
The state or province of the contact's city.
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter(name="zipCode")
def zip_code(self) -> Optional[pulumi.Input[str]]:
"""
The zip or postal code of the contact's address.
"""
return pulumi.get(self, "zip_code")
@zip_code.setter
def zip_code(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zip_code", value)
@pulumi.input_type
class RegisteredDomainTechContactArgs:
def __init__(__self__, *,
address_line1: Optional[pulumi.Input[str]] = None,
address_line2: Optional[pulumi.Input[str]] = None,
city: Optional[pulumi.Input[str]] = None,
contact_type: Optional[pulumi.Input[str]] = None,
country_code: Optional[pulumi.Input[str]] = None,
email: Optional[pulumi.Input[str]] = None,
extra_params: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
fax: Optional[pulumi.Input[str]] = None,
first_name: Optional[pulumi.Input[str]] = None,
last_name: Optional[pulumi.Input[str]] = None,
organization_name: Optional[pulumi.Input[str]] = None,
phone_number: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
zip_code: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] address_line1: First line of the contact's address.
:param pulumi.Input[str] address_line2: Second line of contact's address, if any.
:param pulumi.Input[str] city: The city of the contact's address.
:param pulumi.Input[str] contact_type: Indicates whether the contact is a person, company, association, or public organization. See the [AWS API documentation](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_ContactDetail.html#Route53Domains-Type-domains_ContactDetail-ContactType) for valid values.
:param pulumi.Input[str] country_code: Code for the country of the contact's address. See the [AWS API documentation](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_ContactDetail.html#Route53Domains-Type-domains_ContactDetail-CountryCode) for valid values.
:param pulumi.Input[str] email: Email address of the contact.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] extra_params: A key-value map of parameters required by certain top-level domains.
:param pulumi.Input[str] fax: Fax number of the contact. Phone number must be specified in the format "+[country dialing code].[number including any area code]".
:param pulumi.Input[str] first_name: First name of contact.
:param pulumi.Input[str] last_name: Last name of contact.
:param pulumi.Input[str] organization_name: Name of the organization for contact types other than `PERSON`.
:param pulumi.Input[str] phone_number: The phone number of the contact. Phone number must be specified in the format "+[country dialing code].[number including any area code]".
:param pulumi.Input[str] state: The state or province of the contact's city.
:param pulumi.Input[str] zip_code: The zip or postal code of the contact's address.
"""
if address_line1 is not None:
pulumi.set(__self__, "address_line1", address_line1)
if address_line2 is not None:
pulumi.set(__self__, "address_line2", address_line2)
if city is not None:
pulumi.set(__self__, "city", city)
if contact_type is not None:
pulumi.set(__self__, "contact_type", contact_type)
if country_code is not None:
pulumi.set(__self__, "country_code", country_code)
if email is not None:
pulumi.set(__self__, "email", email)
if extra_params is not None:
pulumi.set(__self__, "extra_params", extra_params)
if fax is not None:
pulumi.set(__self__, "fax", fax)
if first_name is not None:
pulumi.set(__self__, "first_name", first_name)
if last_name is not None:
pulumi.set(__self__, "last_name", last_name)
if organization_name is not None:
pulumi.set(__self__, "organization_name", organization_name)
if phone_number is not None:
pulumi.set(__self__, "phone_number", phone_number)
if state is not None:
pulumi.set(__self__, "state", state)
if zip_code is not None:
pulumi.set(__self__, "zip_code", zip_code)
@property
@pulumi.getter(name="addressLine1")
def address_line1(self) -> Optional[pulumi.Input[str]]:
"""
First line of the contact's address.
"""
return pulumi.get(self, "address_line1")
@address_line1.setter
def address_line1(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "address_line1", value)
@property
@pulumi.getter(name="addressLine2")
def address_line2(self) -> Optional[pulumi.Input[str]]:
"""
Second line of contact's address, if any.
"""
return pulumi.get(self, "address_line2")
@address_line2.setter
def address_line2(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "address_line2", value)
@property
@pulumi.getter
def city(self) -> Optional[pulumi.Input[str]]:
"""
The city of the contact's address.
"""
return pulumi.get(self, "city")
@city.setter
def city(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "city", value)
@property
@pulumi.getter(name="contactType")
def contact_type(self) -> Optional[pulumi.Input[str]]:
"""
Indicates whether the contact is a person, company, association, or public organization. See the [AWS API documentation](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_ContactDetail.html#Route53Domains-Type-domains_ContactDetail-ContactType) for valid values.
"""
return pulumi.get(self, "contact_type")
@contact_type.setter
def contact_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "contact_type", value)
@property
@pulumi.getter(name="countryCode")
def country_code(self) -> Optional[pulumi.Input[str]]:
"""
Code for the country of the contact's address. See the [AWS API documentation](https://docs.aws.amazon.com/Route53/latest/APIReference/API_domains_ContactDetail.html#Route53Domains-Type-domains_ContactDetail-CountryCode) for valid values.
"""
return pulumi.get(self, "country_code")
@country_code.setter
def country_code(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "country_code", value)
@property
@pulumi.getter
def email(self) -> Optional[pulumi.Input[str]]:
"""
Email address of the contact.
"""
return pulumi.get(self, "email")
@email.setter
def email(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "email", value)
@property
@pulumi.getter(name="extraParams")
def extra_params(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A key-value map of parameters required by certain top-level domains.
"""
return pulumi.get(self, "extra_params")
@extra_params.setter
def extra_params(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "extra_params", value)
@property
@pulumi.getter
def fax(self) -> Optional[pulumi.Input[str]]:
"""
Fax number of the contact. Phone number must be specified in the format "+[country dialing code].[number including any area code]".
"""
return pulumi.get(self, "fax")
@fax.setter
def fax(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "fax", value)
@property
@pulumi.getter(name="firstName")
def first_name(self) -> Optional[pulumi.Input[str]]:
"""
First name of contact.
"""
return pulumi.get(self, "first_name")
@first_name.setter
def first_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "first_name", value)
@property
@pulumi.getter(name="lastName")
def last_name(self) -> Optional[pulumi.Input[str]]:
"""
Last name of contact.
"""
return pulumi.get(self, "last_name")
@last_name.setter
def last_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_name", value)
@property
@pulumi.getter(name="organizationName")
def organization_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the organization for contact types other than `PERSON`.
"""
return pulumi.get(self, "organization_name")
@organization_name.setter
def organization_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "organization_name", value)
@property
@pulumi.getter(name="phoneNumber")
def phone_number(self) -> Optional[pulumi.Input[str]]:
"""
The phone number of the contact. Phone number must be specified in the format "+[country dialing code].[number including any area code]".
"""
return pulumi.get(self, "phone_number")
@phone_number.setter
def phone_number(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "phone_number", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
The state or province of the contact's city.
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter(name="zipCode")
def zip_code(self) -> Optional[pulumi.Input[str]]:
"""
The zip or postal code of the contact's address.
"""
return pulumi.get(self, "zip_code")
@zip_code.setter
def zip_code(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zip_code", value)
|
3e23df8ccd059e8b3f7c8af59cd027a20a43c947
|
3db7b5409f2f9c57ab3f98bda50f8b548d98063d
|
/samples/snippets/authenticate_service_account.py
|
8a8c9557dc9b16ad019c18ea6b676a7c55476012
|
[
"Apache-2.0"
] |
permissive
|
googleapis/python-bigquery
|
66db156b52e97565f6211b2fab5aac4e519fa798
|
3645e32aeebefe9d5a4bc71a6513942741f0f196
|
refs/heads/main
| 2023-09-01T07:41:24.893598
| 2023-08-23T19:04:13
| 2023-08-23T19:04:13
| 226,992,475
| 622
| 287
|
Apache-2.0
| 2023-09-12T04:31:26
| 2019-12-10T00:09:04
|
Python
|
UTF-8
|
Python
| false
| false
| 1,755
|
py
|
authenticate_service_account.py
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import typing
if typing.TYPE_CHECKING:
from google.cloud import bigquery
def main() -> "bigquery.Client":
key_path = os.environ.get("GOOGLE_APPLICATION_CREDENTIALS")
# [START bigquery_client_json_credentials]
from google.cloud import bigquery
from google.oauth2 import service_account
# TODO(developer): Set key_path to the path to the service account key
# file.
# key_path = "path/to/service_account.json"
credentials = service_account.Credentials.from_service_account_file(
key_path,
scopes=["https://www.googleapis.com/auth/cloud-platform"],
)
# Alternatively, use service_account.Credentials.from_service_account_info()
# to set credentials directly via a json object rather than set a filepath
# TODO(developer): Set key_json to the content of the service account key file.
# credentials = service_account.Credentials.from_service_account_info(key_json)
client = bigquery.Client(
credentials=credentials,
project=credentials.project_id,
)
# [END bigquery_client_json_credentials]
return client
if __name__ == "__main__":
main()
|
bad2a8e7cc917214291e098b17fc6b9db06686ad
|
31aee922759bcfd2bcfb56a81f814d52ebcd3dcc
|
/tests/unit/facts/test__init__.py
|
5327e63bb116ec86f5f43af6f824783bad797aeb
|
[
"Apache-2.0"
] |
permissive
|
Juniper/py-junos-eznc
|
2eba47a5feb440bc46163e1bc709138d09a568f5
|
e19a7683be1da67140798987ac42e8c82041c393
|
refs/heads/master
| 2023-09-04T10:26:41.094991
| 2023-08-02T04:06:38
| 2023-08-02T04:06:38
| 13,530,047
| 628
| 384
|
Apache-2.0
| 2023-09-12T03:56:01
| 2013-10-12T22:21:38
|
Python
|
UTF-8
|
Python
| false
| false
| 717
|
py
|
test__init__.py
|
__author__ = "Stacy Smith"
__credits__ = "Jeremy Schulman, Nitin Kumar"
try:
import unittest2 as unittest
except ImportError:
import unittest
from nose.plugins.attrib import attr
import importlib
import sys
import jnpr.junos.facts
@attr("unit")
class TestFactInitialization(unittest.TestCase):
def test_duplicate_facts(self):
module = importlib.import_module("tests.unit.facts.dupe_foo1")
sys.modules["jnpr.junos.facts.dupe_foo1"] = module
module = importlib.import_module("tests.unit.facts.dupe_foo2")
sys.modules["jnpr.junos.facts.dupe_foo2"] = module
with self.assertRaises(RuntimeError):
jnpr.junos.facts._build_fact_callbacks_and_doc_strings()
|
58b63fa132e1189c89c9536a7997bc1531e2cf85
|
71fb04f723b46a1bf45295be239bcec25e07f98c
|
/keras_cv/layers/preprocessing/vectorized_base_image_augmentation_layer_test.py
|
38cff729408bd4e3be1b237f83ded86b50799ce4
|
[
"Apache-2.0"
] |
permissive
|
keras-team/keras-cv
|
9bca4479474e853ec3a1c541b8be20fea2447a1a
|
e83f229f1b7b847cd712d5cd4810097d3e06d14e
|
refs/heads/master
| 2023-08-31T10:22:08.406394
| 2023-08-30T20:24:57
| 2023-08-30T20:24:57
| 265,079,853
| 818
| 287
|
NOASSERTION
| 2023-09-12T16:49:01
| 2020-05-18T22:39:21
|
Python
|
UTF-8
|
Python
| false
| false
| 20,777
|
py
|
vectorized_base_image_augmentation_layer_test.py
|
# Copyright 2023 The KerasCV Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import tensorflow as tf
from keras_cv import bounding_box
from keras_cv.layers.preprocessing.vectorized_base_image_augmentation_layer import ( # noqa: E501
VectorizedBaseImageAugmentationLayer,
)
from keras_cv.tests.test_case import TestCase
class VectorizedRandomAddLayer(VectorizedBaseImageAugmentationLayer):
def __init__(self, add_range=(0.0, 1.0), fixed_value=None, **kwargs):
super().__init__(**kwargs)
self.add_range = add_range
self.fixed_value = fixed_value
def augment_ragged_image(self, image, transformation, **kwargs):
return image + transformation[None, None]
def get_random_transformation_batch(self, batch_size, **kwargs):
if self.fixed_value:
return tf.ones((batch_size,)) * self.fixed_value
return self._random_generator.random_uniform(
(batch_size,), minval=self.add_range[0], maxval=self.add_range[1]
)
def augment_images(self, images, transformations, **kwargs):
return images + transformations[:, None, None, None]
def augment_labels(self, labels, transformations, **kwargs):
return labels + transformations[:, None]
def augment_bounding_boxes(self, bounding_boxes, transformations, **kwargs):
return {
"boxes": bounding_boxes["boxes"] + transformations[:, None, None],
"classes": bounding_boxes["classes"] + transformations[:, None],
}
def augment_keypoints(self, keypoints, transformations, **kwargs):
return keypoints + transformations[:, None, None]
def augment_segmentation_masks(
self, segmentation_masks, transformations, **kwargs
):
return segmentation_masks + transformations[:, None, None, None]
TF_ALL_TENSOR_TYPES = (tf.Tensor, tf.RaggedTensor, tf.SparseTensor)
class VectorizedAssertionLayer(VectorizedBaseImageAugmentationLayer):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def augment_ragged_image(
self,
image,
label=None,
bounding_boxes=None,
keypoints=None,
segmentation_mask=None,
transformation=None,
**kwargs
):
assert isinstance(image, TF_ALL_TENSOR_TYPES)
assert isinstance(label, TF_ALL_TENSOR_TYPES)
assert isinstance(bounding_boxes["boxes"], TF_ALL_TENSOR_TYPES)
assert isinstance(bounding_boxes["classes"], TF_ALL_TENSOR_TYPES)
assert isinstance(keypoints, TF_ALL_TENSOR_TYPES)
assert isinstance(segmentation_mask, TF_ALL_TENSOR_TYPES)
assert isinstance(transformation, TF_ALL_TENSOR_TYPES)
return image
def get_random_transformation_batch(
self,
batch_size,
images=None,
labels=None,
bounding_boxes=None,
keypoints=None,
segmentation_masks=None,
**kwargs
):
assert isinstance(images, TF_ALL_TENSOR_TYPES)
assert isinstance(labels, TF_ALL_TENSOR_TYPES)
assert isinstance(bounding_boxes["boxes"], TF_ALL_TENSOR_TYPES)
assert isinstance(bounding_boxes["classes"], TF_ALL_TENSOR_TYPES)
assert isinstance(keypoints, TF_ALL_TENSOR_TYPES)
assert isinstance(segmentation_masks, TF_ALL_TENSOR_TYPES)
return self._random_generator.random_uniform((batch_size,))
def augment_images(
self,
images,
transformations=None,
bounding_boxes=None,
labels=None,
**kwargs
):
assert isinstance(images, TF_ALL_TENSOR_TYPES)
assert isinstance(transformations, TF_ALL_TENSOR_TYPES)
assert isinstance(bounding_boxes["boxes"], TF_ALL_TENSOR_TYPES)
assert isinstance(bounding_boxes["classes"], TF_ALL_TENSOR_TYPES)
assert isinstance(labels, TF_ALL_TENSOR_TYPES)
return images
def augment_labels(
self,
labels,
transformations=None,
bounding_boxes=None,
images=None,
raw_images=None,
**kwargs
):
assert isinstance(labels, TF_ALL_TENSOR_TYPES)
assert isinstance(transformations, TF_ALL_TENSOR_TYPES)
assert isinstance(bounding_boxes["boxes"], TF_ALL_TENSOR_TYPES)
assert isinstance(bounding_boxes["classes"], TF_ALL_TENSOR_TYPES)
assert isinstance(images, TF_ALL_TENSOR_TYPES)
assert isinstance(raw_images, TF_ALL_TENSOR_TYPES)
return labels
def augment_bounding_boxes(
self,
bounding_boxes,
transformations=None,
labels=None,
images=None,
raw_images=None,
**kwargs
):
assert isinstance(bounding_boxes["boxes"], TF_ALL_TENSOR_TYPES)
assert isinstance(bounding_boxes["classes"], TF_ALL_TENSOR_TYPES)
assert isinstance(transformations, TF_ALL_TENSOR_TYPES)
assert isinstance(labels, TF_ALL_TENSOR_TYPES)
assert isinstance(images, TF_ALL_TENSOR_TYPES)
assert isinstance(raw_images, TF_ALL_TENSOR_TYPES)
return bounding_boxes
def augment_keypoints(
self,
keypoints,
transformations=None,
labels=None,
bounding_boxes=None,
images=None,
raw_images=None,
**kwargs
):
assert isinstance(keypoints, TF_ALL_TENSOR_TYPES)
assert isinstance(transformations, TF_ALL_TENSOR_TYPES)
assert isinstance(labels, TF_ALL_TENSOR_TYPES)
assert isinstance(bounding_boxes["boxes"], TF_ALL_TENSOR_TYPES)
assert isinstance(bounding_boxes["classes"], TF_ALL_TENSOR_TYPES)
assert isinstance(images, TF_ALL_TENSOR_TYPES)
assert isinstance(raw_images, TF_ALL_TENSOR_TYPES)
return keypoints
def augment_segmentation_masks(
self,
segmentation_masks,
transformations=None,
labels=None,
bounding_boxes=None,
images=None,
raw_images=None,
**kwargs
):
assert isinstance(segmentation_masks, TF_ALL_TENSOR_TYPES)
assert isinstance(transformations, TF_ALL_TENSOR_TYPES)
assert isinstance(labels, TF_ALL_TENSOR_TYPES)
assert isinstance(bounding_boxes["boxes"], TF_ALL_TENSOR_TYPES)
assert isinstance(bounding_boxes["classes"], TF_ALL_TENSOR_TYPES)
assert isinstance(images, TF_ALL_TENSOR_TYPES)
assert isinstance(raw_images, TF_ALL_TENSOR_TYPES)
return segmentation_masks
class VectorizedBaseImageAugmentationLayerTest(TestCase):
def test_augment_single_image(self):
add_layer = VectorizedRandomAddLayer(fixed_value=2.0)
image = np.random.random(size=(8, 8, 3)).astype("float32")
output = add_layer(image)
self.assertAllClose(image + 2.0, output)
def test_augment_dict_return_type(self):
add_layer = VectorizedRandomAddLayer(fixed_value=2.0)
image = np.random.random(size=(8, 8, 3)).astype("float32")
output = add_layer({"images": image})
self.assertIsInstance(output, dict)
def test_augment_casts_dtypes(self):
add_layer = VectorizedRandomAddLayer(fixed_value=2.0)
images = tf.ones((2, 8, 8, 3), dtype="uint8")
output = add_layer(images)
self.assertAllClose(
tf.ones((2, 8, 8, 3), dtype="float32") * 3.0, output
)
def test_augment_batch_images(self):
add_layer = VectorizedRandomAddLayer()
images = np.random.random(size=(2, 8, 8, 3)).astype("float32")
output = add_layer(images)
diff = output - images
# Make sure the first image and second image get different augmentation
self.assertNotAllClose(diff[0], diff[1])
def test_augment_image_and_label(self):
add_layer = VectorizedRandomAddLayer(fixed_value=2.0)
image = np.random.random(size=(8, 8, 3)).astype("float32")
label = np.random.random(size=(1,)).astype("float32")
output = add_layer({"images": image, "targets": label})
expected_output = {"images": image + 2.0, "targets": label + 2.0}
self.assertAllClose(output, expected_output)
def test_augment_image_and_target(self):
add_layer = VectorizedRandomAddLayer(fixed_value=2.0)
image = np.random.random(size=(8, 8, 3)).astype("float32")
label = np.random.random(size=(1,)).astype("float32")
output = add_layer({"images": image, "targets": label})
expected_output = {"images": image + 2.0, "targets": label + 2.0}
self.assertAllClose(output, expected_output)
def test_augment_batch_images_and_targets(self):
add_layer = VectorizedRandomAddLayer()
images = np.random.random(size=(2, 8, 8, 3)).astype("float32")
targets = np.random.random(size=(2, 1)).astype("float32")
output = add_layer({"images": images, "targets": targets})
image_diff = output["images"] - images
label_diff = output["targets"] - targets
# Make sure the first image and second image get different augmentation
self.assertNotAllClose(image_diff[0], image_diff[1])
self.assertNotAllClose(label_diff[0], label_diff[1])
def test_augment_leaves_extra_dict_entries_unmodified(self):
add_layer = VectorizedRandomAddLayer(fixed_value=0.5)
images = np.random.random(size=(8, 8, 3)).astype("float32")
timestamps = np.array(123123123)
inputs = {"images": images, "timestamps": timestamps}
output = add_layer(inputs)
self.assertAllEqual(output["timestamps"], timestamps)
def test_augment_ragged_images(self):
images = tf.ragged.stack(
[
np.random.random(size=(8, 8, 3)).astype("float32"),
np.random.random(size=(16, 8, 3)).astype("float32"),
]
)
add_layer = VectorizedRandomAddLayer(fixed_value=0.5)
result = add_layer(images)
self.assertAllClose(images + 0.5, result)
def test_augment_image_and_localization_data(self):
add_layer = VectorizedRandomAddLayer(fixed_value=2.0)
images = np.random.random(size=(8, 8, 8, 3)).astype("float32")
bounding_boxes = {
"boxes": np.random.random(size=(8, 3, 4)).astype("float32"),
"classes": np.random.random(size=(8, 3)).astype("float32"),
}
keypoints = np.random.random(size=(8, 5, 2)).astype("float32")
segmentation_mask = np.random.random(size=(8, 8, 8, 1)).astype(
"float32"
)
output = add_layer(
{
"images": images,
"bounding_boxes": bounding_boxes,
"keypoints": keypoints,
"segmentation_masks": segmentation_mask,
}
)
expected_output = {
"images": images + 2.0,
"bounding_boxes": bounding_box.to_dense(
{
"boxes": bounding_boxes["boxes"] + 2.0,
"classes": bounding_boxes["classes"] + 2.0,
}
),
"keypoints": keypoints + 2.0,
"segmentation_masks": segmentation_mask + 2.0,
}
output["bounding_boxes"] = bounding_box.to_dense(
output["bounding_boxes"]
)
self.assertAllClose(output["images"], expected_output["images"])
self.assertAllClose(output["keypoints"], expected_output["keypoints"])
self.assertAllClose(
output["bounding_boxes"]["boxes"],
expected_output["bounding_boxes"]["boxes"],
)
self.assertAllClose(
output["bounding_boxes"]["classes"],
expected_output["bounding_boxes"]["classes"],
)
self.assertAllClose(
output["segmentation_masks"], expected_output["segmentation_masks"]
)
def test_augment_batch_image_and_localization_data(self):
add_layer = VectorizedRandomAddLayer()
images = np.random.random(size=(2, 8, 8, 3)).astype("float32")
bounding_boxes = {
"boxes": np.random.random(size=(2, 3, 4)).astype("float32"),
"classes": np.random.random(size=(2, 3)).astype("float32"),
}
keypoints = np.random.random(size=(2, 5, 2)).astype("float32")
segmentation_masks = np.random.random(size=(2, 8, 8, 1)).astype(
"float32"
)
output = add_layer(
{
"images": images,
"bounding_boxes": bounding_boxes,
"keypoints": keypoints,
"segmentation_masks": segmentation_masks,
}
)
bounding_boxes_diff = (
output["bounding_boxes"]["boxes"] - bounding_boxes["boxes"]
)
keypoints_diff = output["keypoints"] - keypoints
segmentation_mask_diff = (
output["segmentation_masks"] - segmentation_masks
)
self.assertNotAllClose(bounding_boxes_diff[0], bounding_boxes_diff[1])
self.assertNotAllClose(keypoints_diff[0], keypoints_diff[1])
self.assertNotAllClose(
segmentation_mask_diff[0], segmentation_mask_diff[1]
)
@tf.function
def in_tf_function(inputs):
return add_layer(inputs)
output = in_tf_function(
{
"images": images,
"bounding_boxes": bounding_boxes,
"keypoints": keypoints,
"segmentation_masks": segmentation_masks,
}
)
bounding_boxes_diff = (
output["bounding_boxes"]["boxes"] - bounding_boxes["boxes"]
)
keypoints_diff = output["keypoints"] - keypoints
segmentation_mask_diff = (
output["segmentation_masks"] - segmentation_masks
)
self.assertNotAllClose(bounding_boxes_diff[0], bounding_boxes_diff[1])
self.assertNotAllClose(keypoints_diff[0], keypoints_diff[1])
self.assertNotAllClose(
segmentation_mask_diff[0], segmentation_mask_diff[1]
)
def test_augment_all_data_in_tf_function(self):
add_layer = VectorizedRandomAddLayer()
images = np.random.random(size=(2, 8, 8, 3)).astype("float32")
bounding_boxes = {
"boxes": np.random.random(size=(2, 3, 4)).astype("float32"),
"classes": np.random.random(size=(2, 3)).astype("float32"),
}
keypoints = np.random.random(size=(2, 5, 2)).astype("float32")
segmentation_masks = np.random.random(size=(2, 8, 8, 1)).astype(
"float32"
)
@tf.function
def in_tf_function(inputs):
return add_layer(inputs)
output = in_tf_function(
{
"images": images,
"bounding_boxes": bounding_boxes,
"keypoints": keypoints,
"segmentation_masks": segmentation_masks,
}
)
bounding_boxes_diff = (
output["bounding_boxes"]["boxes"] - bounding_boxes["boxes"]
)
keypoints_diff = output["keypoints"] - keypoints
segmentation_mask_diff = (
output["segmentation_masks"] - segmentation_masks
)
self.assertNotAllClose(bounding_boxes_diff[0], bounding_boxes_diff[1])
self.assertNotAllClose(keypoints_diff[0], keypoints_diff[1])
self.assertNotAllClose(
segmentation_mask_diff[0], segmentation_mask_diff[1]
)
def test_augment_unbatched_all_data(self):
add_layer = VectorizedRandomAddLayer(fixed_value=2.0)
images = np.random.random(size=(8, 8, 3)).astype("float32")
bounding_boxes = {
"boxes": np.random.random(size=(3, 4)).astype("float32"),
"classes": np.random.random(size=(3)).astype("float32"),
}
keypoints = np.random.random(size=(5, 2)).astype("float32")
segmentation_masks = np.random.random(size=(8, 8, 1)).astype("float32")
input = {
"images": images,
"bounding_boxes": bounding_boxes,
"keypoints": keypoints,
"segmentation_masks": segmentation_masks,
}
output = add_layer(input, training=True)
self.assertAllClose(output["images"], images + 2.0)
self.assertAllClose(output["keypoints"], keypoints + 2.0)
self.assertAllClose(
output["bounding_boxes"]["boxes"],
tf.squeeze(bounding_boxes["boxes"]) + 2.0,
)
self.assertAllClose(
output["bounding_boxes"]["classes"],
tf.squeeze(bounding_boxes["classes"]) + 2.0,
)
self.assertAllClose(
output["segmentation_masks"], segmentation_masks + 2.0
)
def test_augment_all_data_for_assertion(self):
images = np.random.random(size=(2, 8, 8, 3)).astype("float32")
labels = np.squeeze(np.eye(10)[np.array([0, 1]).reshape(-1)])
bounding_boxes = {
"boxes": np.random.random(size=(2, 3, 4)).astype("float32"),
"classes": np.random.random(size=(2, 3)).astype("float32"),
}
keypoints = np.random.random(size=(2, 5, 2)).astype("float32")
segmentation_masks = np.random.random(size=(2, 8, 8, 1)).astype(
"float32"
)
assertion_layer = VectorizedAssertionLayer()
_ = assertion_layer(
{
"images": images,
"labels": labels,
"bounding_boxes": bounding_boxes,
"keypoints": keypoints,
"segmentation_masks": segmentation_masks,
}
)
# assertion is at VectorizedAssertionLayer's methods
def test_augment_all_data_with_ragged_images_for_assertion(self):
images = tf.ragged.stack(
[
tf.random.uniform(shape=(8, 8, 3)),
tf.random.uniform(shape=(16, 8, 3)),
]
)
labels = tf.constant(
np.squeeze(np.eye(10)[np.array([0, 1]).reshape(-1)])
)
bounding_boxes = {
"boxes": tf.random.uniform(shape=(2, 3, 4)),
"classes": tf.random.uniform(shape=(2, 3)),
}
keypoints = tf.random.uniform(shape=(2, 5, 2))
segmentation_masks = tf.random.uniform(shape=(2, 8, 8, 1))
assertion_layer = VectorizedAssertionLayer()
print(
{
"images": type(images),
"labels": type(labels),
"bounding_boxes": type(bounding_boxes),
"keypoints": type(keypoints),
"segmentation_masks": type(segmentation_masks),
}
)
_ = assertion_layer(
{
"images": images,
"labels": labels,
"bounding_boxes": bounding_boxes,
"keypoints": keypoints,
"segmentation_masks": segmentation_masks,
}
)
# assertion is at VectorizedAssertionLayer's methods
def test_converts_ragged_to_dense_images(self):
images = tf.ragged.stack(
[
np.random.random(size=(8, 8, 3)).astype("float32"),
np.random.random(size=(16, 8, 3)).astype("float32"),
]
)
add_layer = VectorizedRandomAddLayer(fixed_value=0.5)
add_layer.force_output_dense_images = True
result = add_layer(images)
self.assertTrue(isinstance(result, tf.Tensor))
def test_converts_ragged_to_dense_segmentation_masks(self):
images = tf.ragged.stack(
[
np.random.random(size=(8, 8, 3)).astype("float32"),
np.random.random(size=(16, 8, 3)).astype("float32"),
]
)
segmentation_masks = tf.ragged.stack(
[
np.random.randint(0, 10, size=(8, 8, 1)).astype("float32"),
np.random.randint(0, 10, size=(16, 8, 1)).astype("float32"),
]
)
add_layer = VectorizedRandomAddLayer(fixed_value=0.5)
add_layer.force_output_dense_segmentation_masks = True
result = add_layer(
{"images": images, "segmentation_masks": segmentation_masks}
)
self.assertTrue(isinstance(result["segmentation_masks"], tf.Tensor))
|
1b38a4e94b17d68e7a65136093dd11d41bc1d3b6
|
d3ef2463f556d6cd166eb29d3a5f5b210a6402e7
|
/cupyx/_pinned_array.py
|
f9b8c9c813b4b513e26bf4199b59947316784838
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
cupy/cupy
|
ce7a010a57504dbfe4fb5af10d354a22e79f4907
|
96105afb78aa3f8380834d2516184b8365e23fcb
|
refs/heads/main
| 2023-08-31T00:36:47.967611
| 2023-08-30T09:19:27
| 2023-08-30T09:19:27
| 72,523,920
| 7,505
| 1,072
|
MIT
| 2023-09-14T01:04:42
| 2016-11-01T09:54:45
|
Python
|
UTF-8
|
Python
| false
| false
| 5,108
|
py
|
_pinned_array.py
|
import numpy
from cupy import cuda
from cupy._creation.basic import _new_like_order_and_strides
from cupy._core import internal
def _update_shape(a, shape):
if shape is None and a is not None:
shape = a.shape
elif isinstance(shape, int):
shape = (shape,)
else:
shape = tuple(shape)
return shape
def empty_pinned(shape, dtype=float, order='C'):
"""Returns a new, uninitialized NumPy array with the given shape
and dtype.
This is a convenience function which is just :func:`numpy.empty`,
except that the underlying memory is pinned/pagelocked.
Args:
shape (int or tuple of ints): Dimensionalities of the array.
dtype: Data type specifier.
order ({'C', 'F'}): Row-major (C-style) or column-major
(Fortran-style) order.
Returns:
numpy.ndarray: A new array with elements not initialized.
.. seealso:: :func:`numpy.empty`
"""
shape = _update_shape(None, shape)
nbytes = internal.prod(shape) * numpy.dtype(dtype).itemsize
mem = cuda.alloc_pinned_memory(nbytes)
out = numpy.ndarray(shape, dtype=dtype, buffer=mem, order=order)
return out
def empty_like_pinned(a, dtype=None, order='K', subok=None, shape=None):
"""Returns a new, uninitialized NumPy array with the same shape and dtype
as those of the given array.
This is a convenience function which is just :func:`numpy.empty_like`,
except that the underlying memory is pinned/pagelocked.
This function currently does not support ``subok`` option.
Args:
a (numpy.ndarray or cupy.ndarray): Base array.
dtype: Data type specifier. The data type of ``a`` is used by default.
order ({'C', 'F', 'A', or 'K'}): Overrides the memory layout of the
result. ``'C'`` means C-order, ``'F'`` means F-order, ``'A'`` means
``'F'`` if ``a`` is Fortran contiguous, ``'C'`` otherwise.
``'K'`` means match the layout of ``a`` as closely as possible.
subok: Not supported yet, must be None.
shape (int or tuple of ints): Overrides the shape of the result. If
``order='K'`` and the number of dimensions is unchanged, will try
to keep order, otherwise, ``order='C'`` is implied.
Returns:
numpy.ndarray: A new array with same shape and dtype of ``a`` with
elements not initialized.
.. seealso:: :func:`numpy.empty_like`
"""
# We're kinda duplicating the code here because order='K' needs special
# treatment: strides need to be computed
if subok is not None:
raise TypeError('subok is not supported yet')
if dtype is None:
dtype = a.dtype
shape = _update_shape(a, shape)
order, strides, _ = _new_like_order_and_strides(
a, dtype, order, shape, get_memptr=False)
nbytes = internal.prod(shape) * numpy.dtype(dtype).itemsize
mem = cuda.alloc_pinned_memory(nbytes)
out = numpy.ndarray(shape, dtype=dtype, buffer=mem,
strides=strides, order=order)
return out
def zeros_pinned(shape, dtype=float, order='C'):
"""Returns a new, zero-initialized NumPy array with the given shape
and dtype.
This is a convenience function which is just :func:`numpy.zeros`,
except that the underlying memory is pinned/pagelocked.
Args:
shape (int or tuple of ints): Dimensionalities of the array.
dtype: Data type specifier.
order ({'C', 'F'}): Row-major (C-style) or column-major
(Fortran-style) order.
Returns:
numpy.ndarray: An array filled with zeros.
.. seealso:: :func:`numpy.zeros`
"""
out = empty_pinned(shape, dtype, order)
numpy.copyto(out, 0, casting='unsafe')
return out
def zeros_like_pinned(a, dtype=None, order='K', subok=None, shape=None):
"""Returns a new, zero-initialized NumPy array with the same shape and dtype
as those of the given array.
This is a convenience function which is just :func:`numpy.zeros_like`,
except that the underlying memory is pinned/pagelocked.
This function currently does not support ``subok`` option.
Args:
a (numpy.ndarray or cupy.ndarray): Base array.
dtype: Data type specifier. The dtype of ``a`` is used by default.
order ({'C', 'F', 'A', or 'K'}): Overrides the memory layout of the
result. ``'C'`` means C-order, ``'F'`` means F-order, ``'A'`` means
``'F'`` if ``a`` is Fortran contiguous, ``'C'`` otherwise.
``'K'`` means match the layout of ``a`` as closely as possible.
subok: Not supported yet, must be None.
shape (int or tuple of ints): Overrides the shape of the result. If
``order='K'`` and the number of dimensions is unchanged, will try
to keep order, otherwise, ``order='C'`` is implied.
Returns:
numpy.ndarray: An array filled with zeros.
.. seealso:: :func:`numpy.zeros_like`
""" # NOQA
out = empty_like_pinned(a, dtype, order, subok, shape)
numpy.copyto(out, 0, casting='unsafe')
return out
|
8347c68e074bc89ffd3618c1bf1acc1378d2cfb6
|
8718fe44768d41f2d87559af26b7858a5997ecf2
|
/tools/lib-alert-tree/tests/test_cli.py
|
a8a2992730f6d63e13880fa8d5800875c745b856
|
[
"Apache-2.0"
] |
permissive
|
scality/metalk8s
|
b1891a8a236b80f7b7282e5a0a29cb063cdeaffa
|
6854d582f58592675afb3759585ce614b3db08f3
|
refs/heads/development/126.0
| 2023-08-16T17:47:52.110502
| 2023-08-16T12:31:08
| 2023-08-16T12:31:08
| 124,905,930
| 321
| 50
|
Apache-2.0
| 2023-09-14T11:27:20
| 2018-03-12T14:57:42
|
SaltStack
|
UTF-8
|
Python
| false
| false
| 9,688
|
py
|
test_cli.py
|
"""Check the generated CLI behavior."""
import textwrap
import tempfile
import click
from click.testing import CliRunner
from lib_alert_tree import cli
from lib_alert_tree.models import DerivedAlert as D, ExistingAlert as E
from lib_alert_tree.models import Relationship, severity_pair
ROOT_W, ROOT_C = severity_pair(
name="Root",
relationship=Relationship.ANY,
summary_name="The root",
warning_children=[
E.warning("Child1", somelabel="somevalue"),
E.critical("Child2"),
D.warning(
"Parent1",
relationship=Relationship.ANY,
children=[E.warning("Child3"), E.warning("Child4")],
duration="1m",
),
],
critical_children=[E.critical("Child3"), E.critical("Child4")],
duration="1m",
)
EXAMPLE_CLI = cli.generate_cli({"root-degraded": ROOT_W, "root-at-risk": ROOT_C})
EXAMPLE_CLI_WITH_CUSTOM_LABELS = cli.generate_cli(
{"root-degraded": ROOT_W, "root-at-risk": ROOT_C}, {"hello": "world"}
)
def test_show_with_colors():
"""Check how the 'show' command works, with pretty colors."""
# pylint: disable=line-too-long
runner = CliRunner()
result = runner.invoke(EXAMPLE_CLI, "show")
assert (
result.stdout
== textwrap.dedent(
f"""
{click.style('[root]', fg=8)}
├── {click.style('RootAtRisk', fg='red', bold=True)}
│ ├── {click.style('Child3', fg='red', bold=True)}
│ └── {click.style('Child4', fg='red', bold=True)}
└── {click.style('RootDegraded', fg='yellow', bold=True)}
├── {click.style('Child1', fg='yellow', bold=True)} {{somelabel = '{click.style('somevalue', bold=True)}'}}
├── {click.style('Child2', fg='red', bold=True)}
└── {click.style('Parent1', fg='yellow', bold=True)}
├── {click.style('Child3', fg='yellow', bold=True)}
└── {click.style('Child4', fg='yellow', bold=True)}
"""
).lstrip()
)
assert result.exit_code == 0
def test_show_single_root():
"""Check how the 'show' command works, when selecting a root."""
runner = CliRunner()
result = runner.invoke(
EXAMPLE_CLI, ["show", "--no-pretty", "--root", "root-at-risk"]
)
assert (
result.stdout
== textwrap.dedent(
"""
RootAtRisk{severity='critical'}
├── Child3{severity='critical'}
└── Child4{severity='critical'}
"""
).lstrip()
)
assert result.exit_code == 0
def test_show_filter_alert():
"""Check how the 'show' command works, when filtering on alert name."""
runner = CliRunner()
result = runner.invoke(
EXAMPLE_CLI,
["show", "--no-pretty", "--alert", "Parent1"],
)
assert (
result.stdout
== textwrap.dedent(
"""
[root]
└── Parent1{severity='warning'}
├── Child3{severity='warning'}
└── Child4{severity='warning'}
"""
).lstrip()
)
assert result.exit_code == 0
error = runner.invoke(EXAMPLE_CLI, ["show", "--no-pretty", "--alert", "NotFound"])
assert error.stdout == (
"Error: Failed to find an alert with name 'NotFound' in selected roots: "
"root-degraded, root-at-risk\n"
)
assert error.exit_code == 1
def test_show_filter_depth():
"""Check how the 'show' command works, when controlling the tree depth."""
runner = CliRunner()
expected_results = {
-1: """
[root]
├── RootAtRisk{severity='critical'}
│ ├── Child3{severity='critical'}
│ └── Child4{severity='critical'}
└── RootDegraded{severity='warning'}
├── Child1{severity='warning', somelabel=~'somevalue'}
├── Child2{severity='critical'}
└── Parent1{severity='warning'}
├── Child3{severity='warning'}
└── Child4{severity='warning'}
""",
0: """
[root]
├── RootAtRisk{severity='critical'}
└── RootDegraded{severity='warning'}
""",
1: """
[root]
├── RootAtRisk{severity='critical'}
│ ├── Child3{severity='critical'}
│ └── Child4{severity='critical'}
└── RootDegraded{severity='warning'}
├── Child1{severity='warning', somelabel=~'somevalue'}
├── Child2{severity='critical'}
└── Parent1{severity='warning'}
""",
2: """
[root]
├── RootAtRisk{severity='critical'}
│ ├── Child3{severity='critical'}
│ └── Child4{severity='critical'}
└── RootDegraded{severity='warning'}
├── Child1{severity='warning', somelabel=~'somevalue'}
├── Child2{severity='critical'}
└── Parent1{severity='warning'}
├── Child3{severity='warning'}
└── Child4{severity='warning'}
""",
}
for depth, expected_result in expected_results.items():
result = runner.invoke(
EXAMPLE_CLI,
["show", "--no-pretty", "--depth", str(depth)],
)
assert result.stdout == textwrap.dedent(expected_result).lstrip()
assert result.exit_code == 0
def test_gen_rule():
"""Check how the 'gen-rule' command works."""
# pylint: disable=line-too-long
outfile = tempfile.mktemp(suffix=".yaml")
runner = CliRunner()
result = runner.invoke(
EXAMPLE_CLI,
["gen-rule", "--name", "test.rules", "--namespace", "my-ns", "--out", outfile],
)
assert result.stdout == ""
assert result.exit_code == 0
with open(outfile, "r", encoding="utf-8") as handle:
output = handle.read()
assert (
output
== textwrap.dedent(
"""
apiVersion: monitoring.coreos.com/v1
kind: PrometheusRule
metadata:
labels: {}
name: test.rules
namespace: my-ns
spec:
groups:
- name: root-degraded.rules
rules:
- alert: RootDegraded
annotations:
children: Child1{severity='warning', somelabel=~'somevalue'}, Child2{severity='critical'},
Parent1{severity='warning'}
childrenJsonPath: $[?((@.labels.alertname === 'Child1' && @.labels.severity
=== 'warning' && @.labels.somelabel.match(new RegExp('^(?:somevalue)$')))
|| (@.labels.alertname === 'Child2' && @.labels.severity === 'critical')
|| (@.labels.alertname === 'Parent1' && @.labels.severity === 'warning'))]
summary: The root is degraded.
expr: sum(ALERTS{alertname='Child1', alertstate='firing', severity='warning',
somelabel=~'somevalue'} or ALERTS{alertname='Child2', alertstate='firing',
severity='critical'} or ALERTS{alertname='Parent1', alertstate='firing', severity='warning'})
>= 1
for: 1m
labels:
severity: warning
- alert: Parent1
annotations:
children: Child3{severity='warning'}, Child4{severity='warning'}
childrenJsonPath: $[?((@.labels.alertname === 'Child3' && @.labels.severity
=== 'warning') || (@.labels.alertname === 'Child4' && @.labels.severity
=== 'warning'))]
expr: sum(ALERTS{alertname='Child3', alertstate='firing', severity='warning'}
or ALERTS{alertname='Child4', alertstate='firing', severity='warning'}) >=
1
for: 1m
labels:
severity: warning
- name: root-at-risk.rules
rules:
- alert: RootAtRisk
annotations:
children: Child3{severity='critical'}, Child4{severity='critical'}
childrenJsonPath: $[?((@.labels.alertname === 'Child3' && @.labels.severity
=== 'critical') || (@.labels.alertname === 'Child4' && @.labels.severity
=== 'critical'))]
summary: The root is at risk.
expr: sum(ALERTS{alertname='Child3', alertstate='firing', severity='critical'}
or ALERTS{alertname='Child4', alertstate='firing', severity='critical'}) >=
1
for: 1m
labels:
severity: critical
"""
).lstrip()
)
def test_gen_rule_with_custom_labels():
"""Check how the 'gen-rule' command works with custom labels."""
# pylint: disable=line-too-long
outfile = tempfile.mktemp(suffix=".yaml")
runner = CliRunner()
result = runner.invoke(
EXAMPLE_CLI_WITH_CUSTOM_LABELS,
["gen-rule", "--name", "test.rules", "--namespace", "my-ns", "--out", outfile],
)
assert result.stdout == ""
assert result.exit_code == 0
with open(outfile, "r", encoding="utf-8") as handle:
output = handle.read()
output_lines = output.splitlines()
meta_lineno = output_lines.index("metadata:")
assert output_lines[meta_lineno : meta_lineno + 4] == [
"metadata:",
" labels:",
" hello: world",
" name: test.rules", # Just checking that we don't have other labels after ours
]
|
f1b1303f8186cafcc0b3c0a5f6a9cbcfb0fefad6
|
5c3296ff65e5a07852ff9dad1cc5e07991d08270
|
/lingvo/tasks/car/breakdown_metric_test.py
|
62c9597c97444cbea03570e96c3e0ce8e896b41b
|
[
"Apache-2.0"
] |
permissive
|
tensorflow/lingvo
|
dee164ef6e69edb352f2e855660b9b5227ddcf6f
|
c00a74b260fcf6ba11199cc4a340c127d6616479
|
refs/heads/master
| 2023-09-01T22:08:55.758781
| 2023-08-30T00:50:34
| 2023-08-30T00:51:26
| 142,219,189
| 2,963
| 485
|
Apache-2.0
| 2023-09-07T00:52:48
| 2018-07-24T22:30:28
|
Python
|
UTF-8
|
Python
| false
| false
| 17,257
|
py
|
breakdown_metric_test.py
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for breakdown_metric."""
from lingvo import compat as tf
from lingvo.core import py_utils
from lingvo.core import test_utils
from lingvo.tasks.car import breakdown_metric
from lingvo.tasks.car import kitti_ap_metric
from lingvo.tasks.car import kitti_metadata
import numpy as np
FLAGS = tf.flags.FLAGS
class BreakdownMetricTest(test_utils.TestCase):
def _GenerateRandomBBoxes(self, num_bboxes):
xyz = np.random.uniform(low=-1.0, high=1.0, size=(num_bboxes, 3))
dimension = np.random.uniform(low=-1, high=1.0, size=(num_bboxes, 3))
rotation = np.random.uniform(low=-np.pi, high=np.pi, size=(num_bboxes, 1))
bboxes = np.concatenate([xyz, dimension, rotation], axis=-1)
return bboxes
def _GenerateBBoxesAtDistanceAndRotation(self, num_boxes, distance, rotation):
bboxes = np.zeros(shape=(num_boxes, 7))
bboxes[:, -1] = rotation
bboxes[:, 0] = distance
return bboxes
def _GenerateMetricsWithTestData(self, num_classes):
metadata = kitti_metadata.KITTIMetadata()
num_bins_of_distance = int(
np.rint(metadata.MaximumDistance() / metadata.DistanceBinWidth()))
num_bins_of_rotation = metadata.NumberOfRotationBins()
num_bins_of_points = metadata.NumberOfPointsBins()
# Generate ground truth bounding boxes with prescribed labels, distances,
# rotations and number of points.
expected_objects_at_distance = np.random.randint(
low=0, high=8, size=(num_classes, num_bins_of_distance), dtype=np.int32)
expected_objects_at_rotation = np.zeros(
shape=(num_classes, num_bins_of_rotation), dtype=np.int32)
# Note that we need preserve the same number of objects for each label.
expected_objects_at_points = np.zeros(
shape=(num_classes, num_bins_of_points), dtype=np.int32)
prob = 1.0 / float(num_bins_of_points)
for c in range(num_classes):
num_objects_for_class = np.sum(expected_objects_at_distance[c, :])
expected_objects_at_points[c, :] = np.random.multinomial(
num_objects_for_class, pvals=num_bins_of_points * [prob])
# Zero out the number of boxes in the background class.
expected_objects_at_distance[0, :] = 0
expected_objects_at_points[0, :] = 0
expected_objects_at_rotation[0, :] = 0
bboxes = []
labels = []
num_points = []
bin_width = (
metadata.MaximumRotation() / float(metadata.NumberOfRotationBins()))
# Note that we always skip 'Background' class 0.
for label in range(1, num_classes):
for distance_index in range(num_bins_of_distance):
distance = (
distance_index * metadata.DistanceBinWidth() +
metadata.DistanceBinWidth() / 2.0)
num_box = expected_objects_at_distance[label, distance_index]
if num_box > 0:
rotation_index = np.random.randint(num_bins_of_rotation)
expected_objects_at_rotation[label, rotation_index] += num_box
rotation = rotation_index * bin_width + bin_width / 2.0
bboxes.append(
self._GenerateBBoxesAtDistanceAndRotation(num_box, distance,
rotation))
labels.append(label * np.ones(shape=[num_box], dtype=np.int32))
point_bin_edges = np.logspace(
np.log10(1.0), np.log10(metadata.MaximumNumberOfPoints()),
metadata.NumberOfPointsBins() + 1)
for point_index in range(num_bins_of_points):
num_box = expected_objects_at_points[label, point_index]
for _ in range(num_box):
points = (point_bin_edges[point_index] +
point_bin_edges[point_index + 1]) / 2.0
num_points.append([points])
bboxes = np.concatenate(bboxes)
labels = np.concatenate(labels)
num_points = np.concatenate(num_points)
# Generate dummy predictions as placeholders for the API.
num_predictions = 9
prediction_scores = np.random.uniform(size=[num_classes, num_predictions])
prediction_bboxes = self._GenerateRandomBBoxes(
num_predictions * num_classes).reshape(
(num_classes, num_predictions, 7))
# Update the metrics.
metric_names = ['rotation', 'num_points', 'distance']
ap_params = kitti_ap_metric.KITTIAPMetrics.Params(metadata).Set(
breakdown_metrics=metric_names)
metrics = ap_params.Instantiate()
metrics.Update(
'dummy_image1',
py_utils.NestedMap(
groundtruth_labels=labels,
groundtruth_bboxes=bboxes,
groundtruth_difficulties=np.ones(shape=(bboxes.shape[0])),
groundtruth_num_points=num_points,
detection_scores=prediction_scores,
detection_boxes=prediction_bboxes,
detection_heights_in_pixels=np.ones(
shape=prediction_bboxes.shape[0:2]) * 100))
return py_utils.NestedMap(
metrics=metrics,
expected_objects_at_distance=expected_objects_at_distance,
expected_objects_at_points=expected_objects_at_points,
expected_objects_at_rotation=expected_objects_at_rotation)
def testLoadBoundingBoxes(self):
# Test if all of the groundtruth data loads correctly for each label
# when no distance is specified.
metadata = kitti_metadata.KITTIMetadata()
num_classes = len(metadata.ClassNames())
test_data = self._GenerateMetricsWithTestData(num_classes)
expected_num_objects = np.sum(
test_data.expected_objects_at_distance, axis=1)
# Note that we always skip 'Background' class 0.
for label in range(1, num_classes):
data = test_data.metrics._LoadBoundingBoxes(
'groundtruth', label, distance=None)
if expected_num_objects[label] == 0:
self.assertIsNone(data)
else:
self.assertEqual(expected_num_objects[label], len(data.boxes))
self.assertEqual(expected_num_objects[label], len(data.imgids))
self.assertEqual(expected_num_objects[label], len(data.scores))
self.assertEqual(expected_num_objects[label], len(data.difficulties))
self.assertAllEqual(
np.ones(shape=[expected_num_objects[label]]), data.scores)
self.assertAllEqual(
np.zeros(shape=[expected_num_objects[label]]), data.imgids)
def testLoadBoundingBoxesDifficulty(self):
metadata = kitti_metadata.KITTIMetadata()
num_classes = len(metadata.ClassNames())
test_data = self._GenerateMetricsWithTestData(num_classes)
expected_num_objects = np.sum(
test_data.expected_objects_at_distance, axis=1)
difficulty_metric = test_data.metrics._breakdown_metrics['difficulty']
# Test if difficulties are properly accumulated.
for d in metadata.DifficultyLevels().values():
if d == 1:
self.assertAllEqual(expected_num_objects,
difficulty_metric._histogram[d, :])
else:
self.assertAllEqual(
np.zeros_like(expected_num_objects),
difficulty_metric._histogram[d, :])
def testLoadBoundingBoxesDistance(self):
# Test if all of the groundtruth data loads correctly for each label
# when distance is specified.
metadata = kitti_metadata.KITTIMetadata()
num_classes = len(metadata.ClassNames())
test_data = self._GenerateMetricsWithTestData(num_classes)
num_bins_of_distance = int(
np.rint(metadata.MaximumDistance() / metadata.DistanceBinWidth()))
distance_metric = test_data.metrics._breakdown_metrics['distance']
# Test if all of the groundtruth data loads correctly for each label
# when no distance is specified.
self.assertAllEqual(test_data.expected_objects_at_distance,
np.transpose(distance_metric._histogram))
# Note that we always skip 'Background' class 0.
for label in range(1, num_classes):
for distance in range(num_bins_of_distance):
data = test_data.metrics._LoadBoundingBoxes(
'groundtruth', label, distance=distance)
if test_data.expected_objects_at_distance[label, distance] == 0:
self.assertIsNone(data)
else:
self.assertEqual(
test_data.expected_objects_at_distance[label, distance],
len(data.boxes))
self.assertEqual(
test_data.expected_objects_at_distance[label, distance],
len(data.imgids))
self.assertEqual(
test_data.expected_objects_at_distance[label, distance],
len(data.scores))
self.assertEqual(
test_data.expected_objects_at_distance[label, distance],
len(data.difficulties))
self.assertAllEqual(
np.ones(shape=[
test_data.expected_objects_at_distance[label, distance]
]), data.scores)
self.assertAllEqual(
np.zeros(shape=[
test_data.expected_objects_at_distance[label, distance]
]), data.imgids)
def testLoadBoundingBoxesNumPoints(self):
# Test if all of the groundtruth data loads correctly for each label
# when number of points is specified.
metadata = kitti_metadata.KITTIMetadata()
num_classes = len(metadata.ClassNames())
test_data = self._GenerateMetricsWithTestData(num_classes)
num_bins_of_points = metadata.NumberOfPointsBins()
num_points_metric = test_data.metrics._breakdown_metrics['num_points']
self.assertAllEqual(test_data.expected_objects_at_points,
np.transpose(num_points_metric._histogram))
# Note that we always skip 'Background' class 0.
for label in range(1, num_classes):
for num_points in range(num_bins_of_points):
data = test_data.metrics._LoadBoundingBoxes(
'groundtruth', label, num_points=num_points)
if test_data.expected_objects_at_points[label, num_points] == 0:
self.assertIsNone(data)
else:
# Skip the first bin because it is a special case.
if num_points == 0:
continue
self.assertEqual(
test_data.expected_objects_at_points[label, num_points],
len(data.boxes))
self.assertEqual(
test_data.expected_objects_at_points[label, num_points],
len(data.imgids))
self.assertEqual(
test_data.expected_objects_at_points[label, num_points],
len(data.scores))
self.assertEqual(
test_data.expected_objects_at_points[label, num_points],
len(data.difficulties))
self.assertAllEqual(
np.ones(shape=[
test_data.expected_objects_at_points[label, num_points]
]), data.scores)
self.assertAllEqual(
np.zeros(shape=[
test_data.expected_objects_at_points[label, num_points]
]), data.imgids)
def testLoadBoundingBoxesRotation(self):
# Test if all of the groundtruth data loads correctly for each label
# when rotation is specified.
metadata = kitti_metadata.KITTIMetadata()
num_classes = len(metadata.ClassNames())
test_data = self._GenerateMetricsWithTestData(num_classes)
num_bins_of_rotation = metadata.NumberOfRotationBins()
rotation_metric = test_data.metrics._breakdown_metrics['rotation']
# Test if all of the groundtruth data loads correctly for each label
# when no distance is specified.
self.assertAllEqual(test_data.expected_objects_at_rotation,
np.transpose(rotation_metric._histogram))
# Note that we always skip 'Background' class 0.
for label in range(1, num_classes):
for rotation in range(num_bins_of_rotation):
data = test_data.metrics._LoadBoundingBoxes(
'groundtruth', label, rotation=rotation)
if test_data.expected_objects_at_rotation[label, rotation] == 0:
self.assertIsNone(data)
else:
self.assertEqual(
test_data.expected_objects_at_rotation[label, rotation],
len(data.boxes))
self.assertEqual(
test_data.expected_objects_at_rotation[label, rotation],
len(data.imgids))
self.assertEqual(
test_data.expected_objects_at_rotation[label, rotation],
len(data.scores))
self.assertEqual(
test_data.expected_objects_at_rotation[label, rotation],
len(data.difficulties))
self.assertAllEqual(
np.ones(shape=[
test_data.expected_objects_at_rotation[label, rotation]
]), data.scores)
self.assertAllEqual(
np.zeros(shape=[
test_data.expected_objects_at_rotation[label, rotation]
]), data.imgids)
def testAccumulateHistogram(self):
metadata = kitti_metadata.KITTIMetadata()
num_per_class = np.arange(metadata.NumClasses()) + 1
statistics = [
1 * np.ones(shape=(np.sum(num_per_class)), dtype=np.int32),
2 * np.ones(shape=(np.sum(2 * num_per_class)), dtype=np.int32)
]
statistics = np.concatenate(statistics)
labels = []
for i, n in enumerate(num_per_class):
labels.extend([i] * n)
for i, n in enumerate(num_per_class):
labels.extend([i] * 2 * n)
labels = np.array(labels)
assert len(statistics) == len(labels)
metrics_params = breakdown_metric.BreakdownMetric.Params().Set(
metadata=metadata)
test_breakdown_metric = breakdown_metric.ByDifficulty(metrics_params)
test_breakdown_metric._AccumulateHistogram(
statistics=statistics, labels=labels)
for class_index, n in enumerate(num_per_class):
self.assertEqual(n, test_breakdown_metric._histogram[1, class_index])
self.assertEqual(2 * n, test_breakdown_metric._histogram[2, class_index])
def testByName(self):
metric_class = breakdown_metric.ByName('difficulty')
self.assertEqual(metric_class, breakdown_metric.ByDifficulty)
with self.assertRaises(ValueError):
breakdown_metric.ByName('undefined')
def testFindMaximumRecall(self):
# The shape of the precision_recall_curves is [n, m, 2] where n is the
# number of classes, m is then number of values in the curve, 2 indexes
# between precision [0] and recall [1].
car = np.transpose(
np.array(
[[0.9, 0.7, 0.5, 0.1, 0.0, 0.0], [0.0, 0.2, 0.5, 0.9, 1.0, 1.0]],
dtype=np.float32))
ped = np.transpose(
np.array(
[[0.9, 0.7, 0.5, 0.0, 0.0, 0.0], [0.0, 0.2, 0.5, 0.9, 1.0, 1.0]],
dtype=np.float32))
cyc = np.transpose(
np.array(
[[0.9, 0.7, 0.0, 0.0, 0.0, 0.0], [0.0, 0.2, 0.5, 0.9, 1.0, 1.0]],
dtype=np.float32))
foo = np.transpose(
np.array(
[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.2, 0.5, 0.9, 1.0, 1.0]],
dtype=np.float32))
precision_recall_curves = np.stack([car, ped, cyc, foo])
max_recall = breakdown_metric._FindMaximumRecall(precision_recall_curves)
self.assertAllEqual([4], max_recall.shape)
self.assertNear(0.9, max_recall[0], 1e-7)
self.assertNear(0.5, max_recall[1], 1e-7)
self.assertNear(0.2, max_recall[2], 1e-7)
self.assertNear(0.0, max_recall[3], 1e-7)
def testFindRecallAtGivenPrecision(self):
# The shape of the precision_recall_curves is [n, m, 2] where n is the
# number of classes, m is then number of values in the curve, 2 indexes
# between precision [0] and recall [1].
car = np.transpose(
np.array(
[[0.9, 0.7, 0.5, 0.1, 0.0, 0.0], [0.0, 0.2, 0.5, 0.9, 1.0, 1.0]],
dtype=np.float32))
ped = np.transpose(
np.array(
[[0.9, 0.7, 0.5, 0.0, 0.0, 0.0], [0.0, 0.2, 0.5, 0.9, 1.0, 1.0]],
dtype=np.float32))
cyc = np.transpose(
np.array(
[[0.9, 0.7, 0.0, 0.0, 0.0, 0.0], [0.0, 0.2, 0.5, 0.9, 1.0, 1.0]],
dtype=np.float32))
foo = np.transpose(
np.array(
[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.2, 0.5, 0.9, 1.0, 1.0]],
dtype=np.float32))
precision_recall_curves = np.stack([car, ped, cyc, foo])
precision_level = 0.5
recall = breakdown_metric._FindRecallAtGivenPrecision(
precision_recall_curves, precision_level)
self.assertAllEqual([4], recall.shape)
self.assertNear(0.5, recall[0], 1e-7)
self.assertNear(0.5, recall[1], 1e-7)
self.assertNear(0.2, recall[2], 1e-7)
self.assertNear(0.0, recall[3], 1e-7)
if __name__ == '__main__':
test_utils.main()
|
9c5652b55c9a884cd19041bd0904b864c44339be
|
1dfdc35068eeef9a0525bd1ea75362baa7ed3ff5
|
/deplacy/deplacy.py
|
77b68d86cd65872474bd0f1356621477adc6a581
|
[
"MIT"
] |
permissive
|
KoichiYasuoka/deplacy
|
8204ab8dad209ad0be17e98f00c945eccf0b76e2
|
37a8d28b6dd3e6e85de92b445ecbce7726b1b74b
|
refs/heads/master
| 2023-08-07T03:28:36.733717
| 2023-07-29T03:10:33
| 2023-07-29T03:10:33
| 250,933,692
| 106
| 6
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 12,487
|
py
|
deplacy.py
|
# coding=utf-8
import os,tempfile
from pkg_resources import get_distribution
from http.server import BaseHTTPRequestHandler
from http import HTTPStatus
PACKAGE_DIR=os.path.abspath(os.path.dirname(__file__))
VERSION="HTTP deplacy/"+get_distribution("deplacy").version
EDITOR_URL="https://koichiyasuoka.github.io/deplacy/deplacy/editor.html"
EDITOR_RTOL="https://koichiyasuoka.github.io/deplacy/deplacy/editorRtoL.html"
TEMPFILE=tempfile.TemporaryFile()
def makeDoc(doc):
s=str(type(doc))
if s.find("spacy")==8:
return doc
d=to_conllu(doc)
DOC=[]
m=[]
misc=""
for t in d.split("\n"):
x=t.split("\t")
if len(x)!=10:
continue
try:
i,j=int(x[0]),int(x[6])
except:
try:
i=x[0].index("-")
j=int(x[0][0:i])
k=int(x[0][i+1:])
m.append((len(DOC),j,k,x[1]))
continue
except:
continue
s=type("",(object,),{"i":i})
s.orth_=x[1]
s.pos_=x[3]
s.head=j
s.dep_=x[7]
s.whitespace_=(x[9].find("SpaceAfter=No")<0)
if s.whitespace_:
i=x[9].find("start_char=")
if i>=0:
j=x[9].find("|",i)
k=x[9][i+5:] if j<0 else x[9][i+5:j]
if misc.find("end"+k)>=0:
DOC[-1].whitespace_=False
DOC.append(s)
misc=x[9]
for i,j,k,f in reversed(m):
offset=i-DOC[i].i
DOC[k+offset].contract=(f,[i+offset for i in range(j,k+1)])
for i,t in enumerate(DOC):
if t.head==0:
t.head=t
else:
t.head=DOC[i+t.head-t.i]
return DOC
def catenaArray(DOC):
import functools
f=[[] for i in range(len(DOC))]
h=[]
for i in range(len(DOC)):
if DOC[i].dep_.lower()=="root" or DOC[i].head==DOC[i]:
h.append(-1)
continue
j=i+DOC[i].head.i-DOC[i].i
h.append(j)
f[j].append(i)
def CatenaInseparability(a,b):
if a==b:
return 0
if a-b>0:
return -CatenaInseparability(b,a)
if b<NOW:
return CatenaInseparabilityLL(a,b)
if a>NOW:
return CatenaInseparabilityRR(a,b)
return CatenaInseparabilityLR(a,b)
def CatenaInseparabilityLL(a,b):
if DOC[b].dep_.startswith("punct"):
if not DOC[a].dep_.startswith("punct"):
return -1
if DOC[b].dep_.startswith("discourse"):
if not DOC[a].dep_.startswith("discourse"):
return -1
if DOC[b].dep_.startswith("vocative"):
if not DOC[a].dep_.startswith("vocative"):
return -1
return 1
def CatenaInseparabilityRR(a,b):
if DOC[b].dep_.startswith("compound"):
if not DOC[a].dep_.startswith("compound"):
return 1
if DOC[b].dep_=="prt": # for Penn Treebank (en_core_web)
return 1
if DOC[b].dep_=="svp": # for TIGER Corpus (de_core_news)
return 1
return -1
def CatenaInseparabilityLR(a,b):
if DOC[b].dep_.startswith("punct"):
return -1
if DOC[b].dep_.startswith("discourse"):
return -1
if DOC[b].dep_.startswith("parataxis"):
return -1
if DOC[b].dep_.startswith("mark"):
if DOC[b].pos_=="PART":
return -1
if DOC[a].dep_.startswith("compound"):
return -1
if DOC[b].dep_.startswith("compound"):
return 1
if DOC[a].dep_.startswith("nummod"):
return -1
if DOC[b].dep_.startswith("conj"):
if b-NOW-1>NOW-a:
return -1
return 1
for NOW,e in enumerate(f):
if len(e)<2:
continue
e.sort(key=functools.cmp_to_key(CatenaInseparability))
w=[i for i in reversed(range(len(DOC))) if h[i]==-1]
j=w
while len(w)<len(DOC):
k=[]
for i in j:
k.extend(f[i])
w=k+w
j=k
return f,h,w
def renderMatrix(doc,CatenaAnalysis):
if type(doc)==list:
DOC=doc
else:
DOC=makeDoc(doc)
f,h,w=catenaArray(DOC)
d=[1 if f[i]==[] and abs(h[i]-i)==1 else -1 if h[i]==-1 else 0 for i in range(len(DOC))]
if CatenaAnalysis:
for e in f:
if len(e)>1:
for i in e[1:]:
d[i]=0
while 0 in d:
for i in w:
if d[i]!=0:
continue
k=h[i]
m=min(i,k)
n=max(i,k)
if -1 in d[m+1:n]:
if 0 in d[m+1:n]:
continue
g=[d[j] for j in f[i]]
if 0 in g:
continue
if CatenaAnalysis:
for j in f[k]:
g.append(d[j])
for j in range(m+1,n):
if j in f[i]:
continue
g.append(d[j]-1 if j in f[k] else d[j])
g.extend([d[e] for e in f[j] if e<m or e>n])
g.append(0)
d[i]=max(g)+1
m=max(d)
p=[[0]*(m*2) for i in range(len(DOC))]
for i in range(len(DOC)):
k=h[i]
if k==-1:
continue
j=d[i]*2-1
p[min(i,k)][j]|=9
p[max(i,k)][j]|=5
for l in range(j):
if p[k][l]!=12:
p[k][l]|=3
for l in range(min(i,k)+1,max(i,k)):
if p[l][j]!=3:
p[l][j]|=12
for i in range(len(DOC)):
if h[i]>=0:
j=d[i]*2-2
while j>=0:
if p[i][j]==12:
if j>1:
if p[i][j-2]==0:
j-=1
continue
break
elif p[i][j]>0:
break
p[i][j]|=3
j-=1
p[i][j+1]=16
return p
def render(doc,BoxDrawingWidth=1,EnableCR=False,WordRight=False,CatenaAnalysis=True,file=None,Japanese=False):
import unicodedata
DOC=makeDoc(doc)
if len(DOC)==0:
return
p=renderMatrix(DOC,CatenaAnalysis)
u=[" ","\u2578","\u257A","\u2550","\u2579","\u255D","\u255A","\u2569","\u257B","\u2557","\u2554","\u2566","\u2551","\u2563","\u2560","\u256C","<"]
if WordRight:
for i in [1,5,9,13]:
u[i],u[i+1]=u[i+1],u[i]
u[16]=">"
if CatenaAnalysis:
u[7]=u[5]
u[11]=u[9]
u[15]=u[12]
r={}
if Japanese:
import deplacy.deprelja
r=deplacy.deprelja.deprelja
deps=[]
for i in range(len(DOC)):
d=DOC[i].dep_
if d in r:
d+="("+r[d]+")"
elif d.find(":")>0:
j=d.split(":")
if j[0] in r:
d+="("+r[j[0]]+"["+":".join(j[1:])+"])"
deps.append(d)
if WordRight:
x=[len(d)+len([c for c in d if ord(c)>12287]) for d in deps]
else:
x=[len(t.orth_)+len([c for c in t.orth_ if ord(c)>12287 and unicodedata.category(c)!="Mn"])-len([c for c in t.orth_ if unicodedata.category(c)=="Mn"]) for t in DOC]
m=max(x)+1
n=max([len(t.pos_) for t in DOC if t.pos_!="_"]+[-1])+1
s=""
for i in range(len(DOC)):
if WordRight:
t="".join(u[j] for j in reversed(p[i]))
else:
t="".join(u[j] for j in p[i])
if BoxDrawingWidth>1:
t=t.replace(" "," "*BoxDrawingWidth).replace("<"," "*(BoxDrawingWidth-1)+"<").replace(">",">"+" "*(BoxDrawingWidth-1))
d=deps[i]
r="" if DOC[i].pos_=="_" else DOC[i].pos_
if WordRight:
s+=" "*(m-x[i]-1)+d+" "+t+" "+r+" "*(n-len(r))+DOC[i].orth_+"\n"
elif EnableCR:
s+=" "*m+r+" "*(n-len(r))+t+" "+d+"\r"+DOC[i].orth_+"\n"
else:
s+=DOC[i].orth_+" "*(m-x[i])+r+" "*(n-len(r))+t+" "+d+"\n"
print(s,end="",file=file)
def to_conllu(doc,RtoL=False):
s=str(type(doc))
if s.find("spacy")==8:
c=""
for s in doc.sents:
for t in s:
try:
m=str(t.morph)
if m.startswith("<spacy"):
m=""
except:
m=""
c+=str(t.i-s.start+1)
for i in [t.orth_,t.lemma_,t.pos_,t.tag_,m,str(0 if t.head==t else t.head.i-s.start+1),t.dep_,""]:
c+="\t_" if i.strip()=="" else "\t"+i
if t.ent_iob_=="B" or t.ent_iob_=="I":
u="NE="+t.ent_iob_+"-"+t.ent_type_
else:
u=""
if RtoL and len(t.orth_)>1:
if len([c for c in t.orth_ if ord(c)>12287])>0:
u="Direction=RtoL" if u=="" else "Direction=RtoL|"+u
if not t.whitespace_:
u+=("" if u=="" else "|")+"SpaceAfter=No"
if t.norm_!="" and t.norm_!=t.orth_:
u+=("" if u=="" else "|")+"Translit="+t.norm_
if u=="":
u="_"
c+="\t"+u+"\n"
c+="\n"
return c
elif s.find("stanza")==8:
from stanza.utils.conll import CoNLL
return CoNLL.conll_as_string(CoNLL.convert_dict(doc.to_dict()))
elif s.find("classla")==8:
return doc.to_conll()
elif s.find("stanfordnlp")==8:
return doc.conll_file.conll_as_string()
elif s.find("nltk")==8:
return doc.to_conll(10)
elif s.find("combo")==8:
from combo.data import sentence2conllu
return sentence2conllu(doc,False).serialize()
elif s.find("supar")==8:
if hasattr(doc,"sentences"):
return "".join([str(s)+"\n" for s in doc.sentences])
else:
return str(doc)+"\n"
elif s.find("list")==8:
return "".join("".join(str(t)+"\n" for t in s)+"\n" for s in doc)
elif s.find("dict")==8 and "sentences" in doc:
from trankit.utils.conll import CoNLL
d=[]
for s in doc["sentences"]:
e=[]
for t in s["tokens"]:
if "span" in t:
i,j=t["span"]
t["misc"]="start_char="+str(i)+"|end_char="+str(j)
e.append(t)
if "expanded" in t:
e.extend(t["expanded"])
d.append(list(e))
return CoNLL.conll_as_string(CoNLL.convert_dict(d))
return str(doc)
class DeplacyRequestHandler(BaseHTTPRequestHandler):
server_version=VERSION
header_html="header.html"
def do_GET(self):
p=self.path
p=p[p.rfind("/")+1:]
if p.endswith(".js"):
f=open(os.path.join(PACKAGE_DIR,p),"r",encoding="utf-8")
r=f.read()
f.close()
t="application/javascript"
elif p.endswith(".ico"):
self.send_response(HTTPStatus.NOT_FOUND)
return
else:
f=open(os.path.join(PACKAGE_DIR,self.header_html),"r",encoding="utf-8")
r=f.read()
f.close()
f=TEMPFILE
f.seek(0)
r+=f.read().decode("utf-8")
f=open(os.path.join(PACKAGE_DIR,"tailer.html"),"r",encoding="utf-8")
r+=f.read()
f.close()
t="text/html;charset=UTF-8"
b=r.encode("utf-8")
self.send_response(HTTPStatus.OK)
self.send_header("Content-Type",t)
self.send_header("Content-Length",str(len(b)))
self.end_headers()
self.wfile.write(b)
class DeplacyRequestHandlerRtoL(DeplacyRequestHandler):
header_html="headerRtoL.html"
def serve(doc,port=5000,RtoL=False):
c=to_conllu(doc,RtoL)
if port==None:
from IPython.display import IFrame,display
from urllib.parse import quote
if RtoL:
display(IFrame(src=EDITOR_RTOL+"#"+quote(c),width="100%",height="400"))
else:
display(IFrame(src=EDITOR_URL+"#"+quote(c),width="100%",height="400"))
return
import sys
from http.server import HTTPServer
f=TEMPFILE
f.seek(0)
f.truncate(0)
f.write(c.encode("utf-8"))
if RtoL:
httpd=HTTPServer(("",port),DeplacyRequestHandlerRtoL)
else:
httpd=HTTPServer(("",port),DeplacyRequestHandler)
print("http://127.0.0.1:"+str(port)+" "+VERSION,file=sys.stderr)
try:
httpd.serve_forever()
except:
return
def dot(doc,RtoL=False):
DOC=makeDoc(doc)
if len(DOC)==0:
return None
f,h,w=catenaArray(DOC)
s='digraph deplacy{\n'
s+='node[shape=plaintext,fontsize=14];\n'
s+='edge[color=gray,fontname="sans-serif",fontsize=10];\n'
t='w[shape=record,penwidth=0,label="';
v=[]
j=''
for i in range(len(DOC)):
if RtoL:
t+=j+'{<'+str(len(DOC)-i)+'>'
j=DOC[len(DOC)-i-1].orth_
else:
t+=j+'{<'+str(i+1)+'>'
j=DOC[i].orth_
if j in ['"','|','{','}','<','>','\\']:
t+='\\'+j
v.append('\\'+j)
elif j!='_':
t+=j
v.append(j)
else:
v.append('')
if RtoL:
t+='|'+DOC[len(DOC)-i-1].pos_+'}'
else:
t+='|'+DOC[i].pos_+'}'
j='|'
if RtoL:
v.reverse()
t+='"];\n'
c=[[i] for i in range(len(DOC))]
n=["w:"+str(i+1) for i in range(len(DOC))]
x=1
for i in w:
for j in f[i]:
p='x'+str(x)+'->'+n[i]+';'
q='x'+str(x)+'->'+n[j]+'[label="'+DOC[j].dep_+'"];'
if (i<j)==RtoL:
t=q+p+'\n'+t
else:
t=p+q+'\n'+t
c[i].extend(c[j])
u=""
for j in range(min(c[i]),max(c[i])+1):
if j in c[i]:
u+=v[j]
if hasattr(DOC[j],"contract"):
p,q=DOC[j].contract
r="".join(v[k]+" " if DOC[k].whitespace_ else v[k] for k in q).rstrip()
if u.endswith(r):
u=u[0:-len(r)]+p
if u.endswith(" "):
continue
if DOC[j].whitespace_:
u+=" "
t='x'+str(x)+'[label="'+u.rstrip()+'"];'+t
n[i]='x'+str(x)
x+=1
if h[i]==-1:
t='r'+str(i+1)+'[fontname="sans-serif",fontsize=10,fixedsize=true,height=.15,label="'+DOC[i].dep_+'"];r'+str(i+1)+'->'+n[i]+';\n'+t
s+=t+'}\n'
return s
|
c5c3fcc4b48ce5d6ad05cc5b76bb0ba7f62097e6
|
2f7dc0184e5b4c0c15973b498c589de2d049e277
|
/tests/check_case_conflict_test.py
|
a914f452794be919956b5e26c18eac6362216ce0
|
[
"MIT"
] |
permissive
|
pre-commit/pre-commit-hooks
|
178ab044f0b18893d69521f13fc6cf9a29a13a09
|
3a569ca95749f562ff8b742e3568a077caeb5eb7
|
refs/heads/main
| 2023-08-18T02:31:00.550553
| 2023-08-15T14:18:50
| 2023-08-15T14:18:50
| 17,714,713
| 4,311
| 831
|
MIT
| 2023-09-14T20:13:15
| 2014-03-13T15:21:46
|
Python
|
UTF-8
|
Python
| false
| false
| 3,866
|
py
|
check_case_conflict_test.py
|
from __future__ import annotations
import sys
import pytest
from pre_commit_hooks.check_case_conflict import find_conflicting_filenames
from pre_commit_hooks.check_case_conflict import main
from pre_commit_hooks.check_case_conflict import parents
from pre_commit_hooks.util import cmd_output
from testing.util import git_commit
skip_win32 = pytest.mark.skipif(
sys.platform == 'win32',
reason='case conflicts between directories and files',
)
def test_parents():
assert set(parents('a')) == set()
assert set(parents('a/b')) == {'a'}
assert set(parents('a/b/c')) == {'a/b', 'a'}
assert set(parents('a/b/c/d')) == {'a/b/c', 'a/b', 'a'}
def test_nothing_added(temp_git_dir):
with temp_git_dir.as_cwd():
assert find_conflicting_filenames(['f.py']) == 0
def test_adding_something(temp_git_dir):
with temp_git_dir.as_cwd():
temp_git_dir.join('f.py').write("print('hello world')")
cmd_output('git', 'add', 'f.py')
assert find_conflicting_filenames(['f.py']) == 0
def test_adding_something_with_conflict(temp_git_dir):
with temp_git_dir.as_cwd():
temp_git_dir.join('f.py').write("print('hello world')")
cmd_output('git', 'add', 'f.py')
temp_git_dir.join('F.py').write("print('hello world')")
cmd_output('git', 'add', 'F.py')
assert find_conflicting_filenames(['f.py', 'F.py']) == 1
@skip_win32 # pragma: win32 no cover
def test_adding_files_with_conflicting_directories(temp_git_dir):
with temp_git_dir.as_cwd():
temp_git_dir.mkdir('dir').join('x').write('foo')
temp_git_dir.mkdir('DIR').join('y').write('foo')
cmd_output('git', 'add', '-A')
assert find_conflicting_filenames([]) == 1
@skip_win32 # pragma: win32 no cover
def test_adding_files_with_conflicting_deep_directories(temp_git_dir):
with temp_git_dir.as_cwd():
temp_git_dir.mkdir('x').mkdir('y').join('z').write('foo')
temp_git_dir.join('X').write('foo')
cmd_output('git', 'add', '-A')
assert find_conflicting_filenames([]) == 1
@skip_win32 # pragma: win32 no cover
def test_adding_file_with_conflicting_directory(temp_git_dir):
with temp_git_dir.as_cwd():
temp_git_dir.mkdir('dir').join('x').write('foo')
temp_git_dir.join('DIR').write('foo')
cmd_output('git', 'add', '-A')
assert find_conflicting_filenames([]) == 1
def test_added_file_not_in_pre_commits_list(temp_git_dir):
with temp_git_dir.as_cwd():
temp_git_dir.join('f.py').write("print('hello world')")
cmd_output('git', 'add', 'f.py')
assert find_conflicting_filenames(['g.py']) == 0
def test_file_conflicts_with_committed_file(temp_git_dir):
with temp_git_dir.as_cwd():
temp_git_dir.join('f.py').write("print('hello world')")
cmd_output('git', 'add', 'f.py')
git_commit('-m', 'Add f.py')
temp_git_dir.join('F.py').write("print('hello world')")
cmd_output('git', 'add', 'F.py')
assert find_conflicting_filenames(['F.py']) == 1
@skip_win32 # pragma: win32 no cover
def test_file_conflicts_with_committed_dir(temp_git_dir):
with temp_git_dir.as_cwd():
temp_git_dir.mkdir('dir').join('x').write('foo')
cmd_output('git', 'add', '-A')
git_commit('-m', 'Add f.py')
temp_git_dir.join('DIR').write('foo')
cmd_output('git', 'add', '-A')
assert find_conflicting_filenames([]) == 1
def test_integration(temp_git_dir):
with temp_git_dir.as_cwd():
assert main(argv=[]) == 0
temp_git_dir.join('f.py').write("print('hello world')")
cmd_output('git', 'add', 'f.py')
assert main(argv=['f.py']) == 0
temp_git_dir.join('F.py').write("print('hello world')")
cmd_output('git', 'add', 'F.py')
assert main(argv=['F.py']) == 1
|
1f0ee4c7b1f60a3d393cc52023b066e3f8cd9587
|
5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d
|
/alipay/aop/api/domain/AlipayBossFncOutputinvoiceOutbillApplyModel.py
|
c38ae903bff739b97a3c8ebccbf5153095ba5012
|
[
"Apache-2.0"
] |
permissive
|
alipay/alipay-sdk-python-all
|
8bd20882852ffeb70a6e929038bf88ff1d1eff1c
|
1fad300587c9e7e099747305ba9077d4cd7afde9
|
refs/heads/master
| 2023-08-27T21:35:01.778771
| 2023-08-23T07:12:26
| 2023-08-23T07:12:26
| 133,338,689
| 247
| 70
|
Apache-2.0
| 2023-04-25T04:54:02
| 2018-05-14T09:40:54
|
Python
|
UTF-8
|
Python
| false
| false
| 5,145
|
py
|
AlipayBossFncOutputinvoiceOutbillApplyModel.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi
from alipay.aop.api.domain.InvoiceApplyOpenApi import InvoiceApplyOpenApi
class AlipayBossFncOutputinvoiceOutbillApplyModel(object):
def __init__(self):
self._biz_id = None
self._biz_no = None
self._invoice_amt = None
self._invoice_applys = None
self._invoice_note = None
self._memo = None
self._operator = None
self._source = None
@property
def biz_id(self):
return self._biz_id
@biz_id.setter
def biz_id(self, value):
self._biz_id = value
@property
def biz_no(self):
return self._biz_no
@biz_no.setter
def biz_no(self, value):
self._biz_no = value
@property
def invoice_amt(self):
return self._invoice_amt
@invoice_amt.setter
def invoice_amt(self, value):
if isinstance(value, MultiCurrencyMoneyOpenApi):
self._invoice_amt = value
else:
self._invoice_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value)
@property
def invoice_applys(self):
return self._invoice_applys
@invoice_applys.setter
def invoice_applys(self, value):
if isinstance(value, list):
self._invoice_applys = list()
for i in value:
if isinstance(i, InvoiceApplyOpenApi):
self._invoice_applys.append(i)
else:
self._invoice_applys.append(InvoiceApplyOpenApi.from_alipay_dict(i))
@property
def invoice_note(self):
return self._invoice_note
@invoice_note.setter
def invoice_note(self, value):
self._invoice_note = value
@property
def memo(self):
return self._memo
@memo.setter
def memo(self, value):
self._memo = value
@property
def operator(self):
return self._operator
@operator.setter
def operator(self, value):
self._operator = value
@property
def source(self):
return self._source
@source.setter
def source(self, value):
self._source = value
def to_alipay_dict(self):
params = dict()
if self.biz_id:
if hasattr(self.biz_id, 'to_alipay_dict'):
params['biz_id'] = self.biz_id.to_alipay_dict()
else:
params['biz_id'] = self.biz_id
if self.biz_no:
if hasattr(self.biz_no, 'to_alipay_dict'):
params['biz_no'] = self.biz_no.to_alipay_dict()
else:
params['biz_no'] = self.biz_no
if self.invoice_amt:
if hasattr(self.invoice_amt, 'to_alipay_dict'):
params['invoice_amt'] = self.invoice_amt.to_alipay_dict()
else:
params['invoice_amt'] = self.invoice_amt
if self.invoice_applys:
if isinstance(self.invoice_applys, list):
for i in range(0, len(self.invoice_applys)):
element = self.invoice_applys[i]
if hasattr(element, 'to_alipay_dict'):
self.invoice_applys[i] = element.to_alipay_dict()
if hasattr(self.invoice_applys, 'to_alipay_dict'):
params['invoice_applys'] = self.invoice_applys.to_alipay_dict()
else:
params['invoice_applys'] = self.invoice_applys
if self.invoice_note:
if hasattr(self.invoice_note, 'to_alipay_dict'):
params['invoice_note'] = self.invoice_note.to_alipay_dict()
else:
params['invoice_note'] = self.invoice_note
if self.memo:
if hasattr(self.memo, 'to_alipay_dict'):
params['memo'] = self.memo.to_alipay_dict()
else:
params['memo'] = self.memo
if self.operator:
if hasattr(self.operator, 'to_alipay_dict'):
params['operator'] = self.operator.to_alipay_dict()
else:
params['operator'] = self.operator
if self.source:
if hasattr(self.source, 'to_alipay_dict'):
params['source'] = self.source.to_alipay_dict()
else:
params['source'] = self.source
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayBossFncOutputinvoiceOutbillApplyModel()
if 'biz_id' in d:
o.biz_id = d['biz_id']
if 'biz_no' in d:
o.biz_no = d['biz_no']
if 'invoice_amt' in d:
o.invoice_amt = d['invoice_amt']
if 'invoice_applys' in d:
o.invoice_applys = d['invoice_applys']
if 'invoice_note' in d:
o.invoice_note = d['invoice_note']
if 'memo' in d:
o.memo = d['memo']
if 'operator' in d:
o.operator = d['operator']
if 'source' in d:
o.source = d['source']
return o
|
07ea7cad6ca329188882d3070984e35c3183d3e6
|
9efca95a55cb4df52d895d42f1ec10331516a734
|
/c7n/mu.py
|
af96236b3aa80244c46fcf6bceafe64559b515ec
|
[
"Apache-2.0"
] |
permissive
|
cloud-custodian/cloud-custodian
|
519e602abe00c642786441b64cc40857ef5bc9de
|
27563cf4571040f923124e1acb2463f11e372225
|
refs/heads/main
| 2023-09-04T10:54:55.963703
| 2023-09-01T17:40:17
| 2023-09-01T17:40:17
| 52,837,350
| 3,327
| 1,096
|
Apache-2.0
| 2023-09-14T14:03:30
| 2016-03-01T01:11:20
|
Python
|
UTF-8
|
Python
| false
| false
| 62,412
|
py
|
mu.py
|
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
"""
Cloud Custodian Lambda Provisioning Support
docs/lambda.rst
"""
import abc
import base64
import hashlib
import importlib
import io
import json
import logging
import os
import shutil
import time
import tempfile
import zipfile
import platform
# We use this for freezing dependencies for serverless environments
# that support service side building.
# Its also used for release engineering on our pypi uploads
try:
from importlib import metadata as pkgmd
except ImportError:
try:
import importlib_metadata as pkgmd
except (ImportError, FileNotFoundError):
pkgmd = None
# Static event mapping to help simplify cwe rules creation
from c7n.exceptions import ClientError
from c7n.cwe import CloudWatchEvents
from c7n.utils import parse_s3, local_session, get_retry, merge_dict
log = logging.getLogger('custodian.serverless')
LambdaRetry = get_retry(('InsufficientPermissionsException',
'InvalidParameterValueException',), max_attempts=5)
LambdaConflictRetry = get_retry(('ResourceConflictException',), max_attempts=3)
RuleRetry = get_retry(('ResourceNotFoundException',), max_attempts=2)
class PythonPackageArchive:
"""Creates a zip file for python lambda functions.
:param tuple modules: the Python modules to add to the archive
Amazon doesn't give us straightforward docs here, only `an example
<http://docs.aws.amazon.com/lambda/latest/dg/with-s3-example-deployment-pkg.html#with-s3-example-deployment-pkg-python>`_,
from which we can infer that they simply unzip the file into a directory on
``sys.path``. So what we do is locate all of the ``modules`` specified, and
add all of the ``.py`` files we find for these modules to a zip file.
In addition to the modules specified during instantiation, you can add
arbitrary additional files to the archive using :py:func:`add_file` and
:py:func:`add_contents`. For example, since we only add ``*.py`` files for
you, you'll need to manually add files for any compiled extension modules
that your Lambda requires.
"""
zip_compression = zipfile.ZIP_DEFLATED
def __init__(self, modules=(), cache_file=None):
self._temp_archive_file = tempfile.NamedTemporaryFile(delete=False)
if cache_file:
with open(cache_file, 'rb') as fin:
shutil.copyfileobj(fin, self._temp_archive_file)
self._zip_file = zipfile.ZipFile(
self._temp_archive_file, mode='a',
compression=self.zip_compression)
self._closed = False
self.add_modules(None, modules)
def __del__(self):
try:
if not self._closed:
self.close()
if self._temp_archive_file:
self._temp_archive_file.close()
os.unlink(self.path)
except AttributeError:
# Finalizers in python are fairly problematic, especially when
# breaking cycle references, there are no ordering guaranteees
# so our tempfile may already be gc'd before this ref'd version
# is called.
pass
@property
def path(self):
return self._temp_archive_file.name
@property
def size(self):
if not self._closed:
raise ValueError("Archive not closed, size not accurate")
return os.stat(self._temp_archive_file.name).st_size
def create_zinfo(self, file):
if not isinstance(file, zipfile.ZipInfo):
file = zinfo(file)
# Ensure we apply the compression
file.compress_type = self.zip_compression
# Mark host OS as Linux for all archives
file.create_system = 3
return file
def add_modules(self, ignore, modules):
"""Add the named Python modules to the archive. For consistency's sake
we only add ``*.py`` files, not ``*.pyc``. We also don't add other
files, including compiled modules. You'll have to add such files
manually using :py:meth:`add_file`.
"""
for module_name in modules:
module = importlib.import_module(module_name)
if hasattr(module, '__path__'):
# https://docs.python.org/3/reference/import.html#module-path
for directory in module.__path__:
self.add_directory(directory, ignore)
if getattr(module, '__file__', None) is None:
# Likely a namespace package. Try to add *.pth files so
# submodules are importable under Python 2.7.
sitedir = os.path.abspath(os.path.join(list(module.__path__)[0], os.pardir))
for filename in os.listdir(sitedir):
s = filename.startswith
e = filename.endswith
if s(module_name) and e('-nspkg.pth'):
self.add_file(os.path.join(sitedir, filename))
elif hasattr(module, '__file__'):
# https://docs.python.org/3/reference/import.html#__file__
path = module.__file__
if path.endswith('.pyc'):
_path = path[:-1]
if not os.path.isfile(_path):
raise ValueError(
'Could not find a *.py source file behind ' + path)
path = _path
if not path.endswith('.py'):
raise ValueError(
'We need a *.py source file instead of ' + path)
self.add_file(path)
def add_directory(self, path, ignore=None):
"""Add ``*.py`` files under the directory ``path`` to the archive.
"""
for root, dirs, files in os.walk(path):
arc_prefix = os.path.relpath(root, os.path.dirname(path))
# py3 remove pyc cache dirs.
if '__pycache__' in dirs:
dirs.remove('__pycache__')
for f in files:
dest_path = os.path.join(arc_prefix, f)
# ignore specific files
if ignore and ignore(dest_path):
continue
if f.endswith('.pyc') or f.endswith('.c'):
continue
f_path = os.path.join(root, f)
self.add_file(f_path, dest_path)
def add_file(self, src, dest=None):
"""Add the file at ``src`` to the archive.
If ``dest`` is ``None`` then it is added under just the original
filename. So ``add_file('foo/bar.txt')`` ends up at ``bar.txt`` in the
archive, while ``add_file('bar.txt', 'foo/bar.txt')`` ends up at
``foo/bar.txt``.
"""
dest = dest or os.path.basename(src)
with open(src, 'rb') as fp:
contents = fp.read()
self.add_contents(dest, contents)
def add_py_file(self, src, dest=None):
"""This is a special case of :py:meth:`add_file` that helps for adding
a ``py`` when a ``pyc`` may be present as well. So for example, if
``__file__`` is ``foo.pyc`` and you do:
.. code-block:: python
archive.add_py_file(__file__)
then this method will add ``foo.py`` instead if it exists, and raise
``IOError`` if it doesn't.
"""
src = src[:-1] if src.endswith('.pyc') else src
self.add_file(src, dest)
def add_contents(self, dest, contents):
"""Add file contents to the archive under ``dest``.
If ``dest`` is a path, it will be added compressed and world-readable
(user-writeable). You may also pass a :py:class:`~zipfile.ZipInfo` for
custom behavior.
"""
assert not self._closed, "Archive closed"
dest = self.create_zinfo(dest)
self._zip_file.writestr(dest, contents)
def close(self):
"""Close the zip file.
Note underlying tempfile is removed when archive is garbage collected.
"""
self._closed = True
self._zip_file.close()
log.debug(
"Created custodian serverless archive size: %0.2fmb",
(os.path.getsize(self._temp_archive_file.name) / (1024.0 * 1024.0)))
return self
def remove(self):
"""Dispose of the temp file for garbage collection."""
if self._temp_archive_file:
self._temp_archive_file = None
def get_checksum(self, encoder=base64.b64encode, hasher=hashlib.sha256):
"""Return the b64 encoded sha256 checksum of the archive."""
assert self._closed, "Archive not closed"
with open(self._temp_archive_file.name, 'rb') as fh:
return encoder(checksum(fh, hasher())).decode('ascii')
def get_bytes(self):
"""Return the entire zip file as a byte string. """
assert self._closed, "Archive not closed"
return self.get_stream().read()
def get_stream(self):
"""Return the entire zip file as a stream. """
assert self._closed, "Archive not closed"
return open(self._temp_archive_file.name, 'rb')
def get_reader(self):
"""Return a read-only :py:class:`~zipfile.ZipFile`."""
assert self._closed, "Archive not closed"
buf = io.BytesIO(self.get_bytes())
return zipfile.ZipFile(buf, mode='r')
def get_filenames(self):
"""Return a list of filenames in the archive."""
return [n.filename for n in self.get_reader().filelist]
def get_exec_options(options):
"""preserve cli output options into serverless environment.
"""
d = {}
for k in ('log_group', 'tracer', 'output_dir', 'metrics_enabled'):
if options[k]:
d[k] = options[k]
# ignore local fs/dir output paths
if 'output_dir' in d and '://' not in d['output_dir']:
d.pop('output_dir')
return d
def checksum(fh, hasher, blocksize=65536):
buf = fh.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = fh.read(blocksize)
return hasher.digest()
def generate_requirements(packages, ignore=(), exclude=(), include_self=False):
"""Generate frozen requirements file for the given set of packages
if include_self is True we'll also include the packages in the generated
requirements.
"""
if pkgmd is None:
raise ImportError("importlib_metadata missing")
if isinstance(packages, str):
packages = [packages]
deps = []
for p in packages:
_package_deps(p, deps, ignore=ignore)
lines = []
if include_self:
deps = list(set(deps).union(packages))
for d in sorted(deps):
if d in exclude:
continue
try:
lines.append(
'%s==%s' % (d, pkgmd.distribution(d).version))
except pkgmd.PackageNotFoundError:
continue
return '\n'.join(lines)
def _package_deps(package, deps=None, ignore=()):
"""Recursive gather package's named transitive dependencies"""
if deps is None:
deps = []
try:
pdeps = pkgmd.requires(package) or ()
except pkgmd.PackageNotFoundError:
return deps
for r in pdeps:
# skip optional deps
if ';' in r and 'extra' in r:
continue
for idx, c in enumerate(r):
if not c.isalnum() and c not in ('-', '_', '.'):
break
if idx + 1 == len(r):
idx += 1
pkg_name = r[:idx]
if pkg_name in ignore:
continue
if pkg_name not in deps:
try:
_package_deps(pkg_name, deps, ignore)
except pkgmd.PackageNotFoundError:
continue
deps.append(pkg_name)
return deps
def custodian_archive(packages=None):
"""Create a lambda code archive for running custodian.
Lambda archive currently always includes `c7n`. Add additional
packages via function parameters, or in policy via mode block.
Example policy that includes additional packages
.. code-block:: yaml
policy:
name: lambda-archive-example
resource: s3
mode:
packages:
- botocore
packages: List of additional packages to include in the lambda archive.
"""
modules = {'c7n'}
if packages:
modules = filter(None, modules.union(packages))
return PythonPackageArchive(sorted(modules))
class LambdaManager:
""" Provides CRUD operations around lambda functions
"""
def __init__(self, session_factory, s3_asset_path=None):
self.session_factory = session_factory
self.client = self.session_factory().client('lambda')
self.s3_asset_path = s3_asset_path
def list_functions(self, prefix=None):
p = self.client.get_paginator('list_functions')
for rp in p.paginate():
for f in rp.get('Functions', []):
if not prefix:
yield f
elif f['FunctionName'].startswith(prefix):
yield f
def publish(self, func, alias=None, role=None, s3_uri=None):
result, changed = self._create_or_update(
func, role, s3_uri, qualifier=alias)
func.arn = result['FunctionArn']
if alias and changed:
func.alias = self.publish_alias(result, alias)
elif alias:
func.alias = "%s:%s" % (func.arn, alias)
else:
func.alias = func.arn
for e in func.get_events(self.session_factory):
if e.add(func):
log.debug(
"Added event source: %s to function: %s",
e, func.alias)
return result
add = publish
def remove(self, func, alias=None):
for e in func.get_events(self.session_factory):
e.remove(func, func_deleted=True)
log.info("Removing lambda function %s", func.name)
try:
self.client.delete_function(FunctionName=func.name)
except self.client.exceptions.ResourceNotFoundException:
pass
@staticmethod
def delta_function(old_config, new_config):
changed = []
for k in new_config:
# Layers need special handling as they have extra info on describe.
if k == 'Layers' and k in old_config and new_config[k]:
if sorted(new_config[k]) != sorted([lyr['Arn'] for lyr in old_config[k]]):
changed.append(k)
# Vpc needs special handling as a dict with lists
elif k == 'VpcConfig' and k in old_config and new_config[k]:
if set(old_config[k]['SubnetIds']) != set(
new_config[k]['SubnetIds']):
changed.append(k)
elif set(old_config[k]['SecurityGroupIds']) != set(
new_config[k]['SecurityGroupIds']):
changed.append(k)
elif k not in old_config:
if k in LAMBDA_EMPTY_VALUES and LAMBDA_EMPTY_VALUES[k] == new_config[k]:
continue
changed.append(k)
# For role we allow name only configuration
elif k == 'Role':
if (new_config[k] != old_config[k] and
not old_config[k].split('/', 1)[1] == new_config[k]):
changed.append(k)
elif new_config[k] != old_config[k]:
changed.append(k)
return changed
@staticmethod
def diff_tags(old_tags, new_tags):
add = {}
remove = set()
for k, v in new_tags.items():
if k not in old_tags or old_tags[k] != v:
add[k] = v
for k in old_tags:
if k not in new_tags:
remove.add(k)
return add, list(remove)
def _create_or_update(self, func, role=None, s3_uri=None, qualifier=None):
role = func.role or role
assert role, "Lambda function role must be specified"
archive = func.get_archive()
existing = self.get(func.name, qualifier)
if s3_uri:
# TODO: support versioned buckets
bucket, key = self._upload_func(s3_uri, func, archive)
code_ref = {'S3Bucket': bucket, 'S3Key': key}
else:
code_ref = {'ZipFile': archive.get_bytes()}
changed = False
if existing:
result = old_config = existing['Configuration']
if archive.get_checksum() != old_config['CodeSha256']:
log.debug("Updating function %s code", func.name)
params = dict(FunctionName=func.name, Publish=True)
params.update(code_ref)
result = self.client.update_function_code(**params)
waiter = self.client.get_waiter('function_updated')
waiter.wait(FunctionName=func.name)
changed = True
# TODO/Consider also set publish above to false, and publish
# after configuration change?
new_config = func.get_config()
new_config['Role'] = role
if self._update_tags(existing, new_config.pop('Tags', {})):
changed = True
if self._update_architecture(func, existing,
new_config.pop('Architectures', ["x86_64"]), code_ref):
changed = True
config_changed = self.delta_function(old_config, new_config)
if config_changed:
log.debug("Updating function: %s config %s",
func.name, ", ".join(sorted(config_changed)))
result = self.client.update_function_configuration(**new_config)
changed = True
if self._update_concurrency(existing, func):
changed = True
else:
log.info('Publishing custodian policy lambda function %s', func.name)
params = func.get_config()
params.update({'Publish': True, 'Code': code_ref, 'Role': role})
result = self.client.create_function(**params)
self._update_concurrency(None, func)
waiter = self.client.get_waiter('function_active')
waiter.wait(FunctionName=func.name)
changed = True
return result, changed
def _update_concurrency(self, existing, func):
e_concurrency = None
if existing:
e_concurrency = existing.get('Concurrency', {}).get(
'ReservedConcurrentExecutions')
if e_concurrency == func.concurrency:
return
elif e_concurrency is not None and func.concurrency is None:
log.debug("Removing function: %s concurrency", func.name)
self.client.delete_function_concurrency(
FunctionName=func.name)
return True
log.debug("Updating function: %s concurrency", func.name)
self.client.put_function_concurrency(
FunctionName=func.name,
ReservedConcurrentExecutions=func.concurrency)
def _update_architecture(self, func, existing, new_architecture, code_ref):
existing_config = existing.get('Configuration', {})
existing_architecture = existing_config.get('Architectures', ["x86_64"])
diff = existing_architecture != new_architecture
changed = False
if diff:
log.debug("Updating function architecture: %s" % func.name)
params = dict(FunctionName=func.name, Publish=True,
Architectures=new_architecture)
params.update(code_ref)
self.client.update_function_code(**params)
changed = True
return changed
def _update_tags(self, existing, new_tags):
# tag dance
base_arn = existing['Configuration']['FunctionArn']
if base_arn.count(':') > 6: # trim version/alias
base_arn = base_arn.rsplit(':', 1)[0]
tags_to_add, tags_to_remove = self.diff_tags(
existing.get('Tags', {}), new_tags)
changed = False
if tags_to_add:
log.debug("Updating function tags: %s" % base_arn)
self.client.tag_resource(Resource=base_arn, Tags=tags_to_add)
changed = True
if tags_to_remove:
log.debug("Removing function stale tags: %s" % base_arn)
self.client.untag_resource(Resource=base_arn, TagKeys=tags_to_remove)
changed = True
return changed
def _upload_func(self, s3_uri, func, archive):
from boto3.s3.transfer import S3Transfer, TransferConfig
_, bucket, key_prefix = parse_s3(s3_uri)
key = "%s/%s" % (key_prefix, func.name)
transfer = S3Transfer(
self.session_factory().client('s3'),
config=TransferConfig(
multipart_threshold=1024 * 1024 * 4))
transfer.upload_file(
archive.path,
bucket=bucket,
key=key,
extra_args={
'ServerSideEncryption': 'AES256'})
return bucket, key
def publish_alias(self, func_data, alias):
"""Create or update an alias for the given function.
"""
if not alias:
return func_data['FunctionArn']
func_name = func_data['FunctionName']
func_version = func_data['Version']
exists = resource_exists(
self.client.get_alias, FunctionName=func_name, Name=alias)
if not exists:
log.debug("Publishing custodian lambda alias %s", alias)
alias_result = self.client.create_alias(
FunctionName=func_name,
Name=alias,
FunctionVersion=func_version)
else:
if (exists['FunctionVersion'] == func_version and
exists['Name'] == alias):
return exists['AliasArn']
log.debug('Updating custodian lambda alias %s', alias)
alias_result = self.client.update_alias(
FunctionName=func_name,
Name=alias,
FunctionVersion=func_version)
return alias_result['AliasArn']
def get(self, func_name, qualifier=None):
params = {'FunctionName': func_name}
if qualifier:
params['Qualifier'] = qualifier
return resource_exists(
self.client.get_function, **params)
def resource_exists(op, NotFound="ResourceNotFoundException", *args, **kw):
try:
return op(*args, **kw)
except ClientError as e:
if e.response['Error']['Code'] == NotFound:
return False
raise
class AbstractLambdaFunction:
"""Abstract base class for lambda functions."""
__metaclass__ = abc.ABCMeta
alias = None
@abc.abstractproperty
def name(self):
"""Name for the lambda function"""
@abc.abstractproperty
def event_name(self):
"""Name for event sources"""
@abc.abstractproperty
def runtime(self):
""" """
@abc.abstractproperty
def description(self):
""" """
@abc.abstractproperty
def handler(self):
""" """
@abc.abstractproperty
def memory_size(self):
""" """
@abc.abstractproperty
def timeout(self):
""" """
@abc.abstractproperty
def role(self):
""" """
@abc.abstractproperty
def subnets(self):
""" """
@abc.abstractproperty
def security_groups(self):
""" """
@abc.abstractproperty
def dead_letter_config(self):
""" """
@abc.abstractproperty
def environment(self):
""" """
@abc.abstractproperty
def kms_key_arn(self):
""" """
@abc.abstractproperty
def tracing_config(self):
""" """
@abc.abstractproperty
def tags(self):
""" """
@abc.abstractproperty
def layers(self):
""" """
@abc.abstractproperty
def concurrency(self):
""" """
@abc.abstractmethod
def get_events(self, session_factory):
"""event sources that should be bound to this lambda."""
@abc.abstractmethod
def get_archive(self):
"""Return the lambda distribution archive object."""
@abc.abstractproperty
def architectures(self):
""" """
def get_config(self):
conf = {
'FunctionName': self.name,
'MemorySize': self.memory_size,
'Role': self.role,
'Description': self.description,
'Runtime': self.runtime,
'Handler': self.handler,
'Timeout': self.timeout,
'TracingConfig': self.tracing_config,
'KMSKeyArn': self.kms_key_arn,
'DeadLetterConfig': self.dead_letter_config,
'VpcConfig': LAMBDA_EMPTY_VALUES['VpcConfig'],
'Tags': self.tags}
if self.layers:
conf['Layers'] = self.layers
if self.environment['Variables']:
conf['Environment'] = self.environment
if self.subnets and self.security_groups:
conf['VpcConfig'] = {
'SubnetIds': self.subnets,
'SecurityGroupIds': self.security_groups}
if self.architectures:
conf['Architectures'] = self.architectures
return conf
LAMBDA_EMPTY_VALUES = {
'Environment': {'Variables': {}},
'DeadLetterConfig': {},
'TracingConfig': {'Mode': 'PassThrough'},
'VpcConfig': {'SubnetIds': [], 'SecurityGroupIds': []},
'KMSKeyArn': '',
}
class LambdaFunction(AbstractLambdaFunction):
def __init__(self, func_data, archive):
self.func_data = func_data
required = {
'name', 'handler', 'memory_size',
'timeout', 'role', 'runtime',
'description'}
missing = required.difference(func_data)
if missing:
raise ValueError("Missing required keys %s" % " ".join(missing))
self.archive = archive
@property
def name(self):
return self.func_data['name']
event_name = name
@property
def description(self):
return self.func_data['description']
@property
def handler(self):
return self.func_data['handler']
@property
def memory_size(self):
return self.func_data['memory_size']
@property
def timeout(self):
return self.func_data['timeout']
@property
def runtime(self):
return self.func_data['runtime']
@property
def role(self):
return self.func_data['role']
@property
def layers(self):
return self.func_data.get('layers', ())
@property
def concurrency(self):
return self.func_data.get('concurrency')
@property
def security_groups(self):
return self.func_data.get('security_groups', None)
@property
def subnets(self):
return self.func_data.get('subnets', None)
@property
def dead_letter_config(self):
return self.func_data.get(
'dead_letter_config', LAMBDA_EMPTY_VALUES['DeadLetterConfig'])
@property
def environment(self):
return self.func_data.get(
'environment', LAMBDA_EMPTY_VALUES['Environment'])
@property
def kms_key_arn(self):
return self.func_data.get('kms_key_arn', '')
@property
def tracing_config(self):
# Default
return self.func_data.get(
'tracing_config', LAMBDA_EMPTY_VALUES['TracingConfig'])
@property
def tags(self):
return self.func_data.get('tags', {})
def get_events(self, session_factory):
return self.func_data.get('events', ())
def get_archive(self):
return self.archive
PolicyHandlerTemplate = """\
from c7n import handler
def run(event, context):
return handler.dispatch_event(event, context)
"""
class PolicyLambda(AbstractLambdaFunction):
"""Wraps a custodian policy to turn it into a lambda function.
"""
def __init__(self, policy):
self.policy = policy
self.archive = custodian_archive(packages=self.packages)
@property
def name(self):
prefix = self.policy.data['mode'].get('function-prefix', 'custodian-')
return "%s%s" % (prefix, self.policy.name)
event_name = name
@property
def description(self):
return self.policy.data.get(
'description', 'cloud-custodian lambda policy')
@property
def handler(self):
return self.policy.data['mode'].get('handler', 'custodian_policy.run')
@property
def role(self):
return self.policy.data['mode'].get('role', '')
@property
def runtime(self):
return self.policy.data['mode'].get('runtime', 'python3.9')
@property
def memory_size(self):
return self.policy.data['mode'].get('memory', 512)
@property
def timeout(self):
return self.policy.data['mode'].get('timeout', 900)
@property
def security_groups(self):
return self.policy.data['mode'].get('security_groups', None)
@property
def subnets(self):
return self.policy.data['mode'].get('subnets', None)
@property
def dead_letter_config(self):
return self.policy.data['mode'].get(
'dead_letter_config', LAMBDA_EMPTY_VALUES['DeadLetterConfig'])
@property
def environment(self):
return self.policy.data['mode'].get(
'environment', LAMBDA_EMPTY_VALUES['Environment'])
@property
def kms_key_arn(self):
return self.policy.data['mode'].get('kms_key_arn', '')
@property
def tracing_config(self):
# Default
return self.policy.data['mode'].get(
'tracing_config', {'Mode': 'PassThrough'})
@property
def tags(self):
return self.policy.data['mode'].get('tags', {})
@property
def concurrency(self):
return self.policy.data['mode'].get('concurrency')
@property
def layers(self):
return self.policy.data['mode'].get('layers', ())
@property
def packages(self):
return self.policy.data['mode'].get('packages')
@property
def architectures(self):
architecture = []
arm64_arch = ('aarch64', 'arm64')
if platform.machine().lower() in arm64_arch:
architecture.append('arm64')
else:
architecture.append('x86_64')
return architecture
def get_events(self, session_factory):
events = []
if self.policy.data['mode']['type'] in (
'config-rule', 'config-poll-rule'):
events.append(
ConfigRule(self.policy.data['mode'], session_factory))
elif self.policy.data['mode']['type'] == 'hub-action':
events.append(
SecurityHubAction(self.policy, session_factory))
else:
events.append(
CloudWatchEventSource(
self.policy.data['mode'], session_factory))
return events
def get_archive(self):
self.archive.add_contents(
'config.json', json.dumps(
{'execution-options': get_exec_options(self.policy.options),
'policies': [self.policy.data]}, indent=2))
self.archive.add_contents('custodian_policy.py', PolicyHandlerTemplate)
self.archive.close()
return self.archive
def zinfo(fname):
"""Amazon lambda exec environment setup can break itself
if zip files aren't constructed a particular way.
ie. It respects file perm attributes from the zip including
those that prevent lambda from working. Namely lambda
extracts code as one user, and executes code as a different
user. Without permissions for the executing user to read
the file the lambda function is broken.
Python's default zipfile.writestr does a 0600 perm which
we modify here as a workaround.
"""
info = zipfile.ZipInfo(fname)
# Grant other users permissions to read
# http://unix.stackexchange.com/questions/14705/
info.external_attr = 0o644 << 16
return info
class AWSEventBase:
"""for AWS Event Sources that want to utilize lazy client initialization.
Primarily utilized by sources that support static rendering to
IAAC templates (tools/ops/policylambda.py) to do so in an account
agnostic fashion.
"""
client_service = None
def __init__(self, data, session_factory):
self.session_factory = session_factory
self._session = None
self._client = None
self.data = data
@property
def session(self):
if not self._session:
self._session = self.session_factory()
return self._session
@property
def client(self):
if not self._client:
self._client = self.session.client(self.client_service)
return self._client
def remove_permissions(self, func, remove_permission):
# typically the entire function will be deleted so we dont
# need to bother with removing the permission explicitly
if not remove_permission:
return True
client = self.session.client("lambda")
try:
LambdaConflictRetry(
client.remove_permission,
FunctionName=func.name,
StatementId=func.event_name,
)
return True
except client.ResourceNotFoundException:
pass
class CloudWatchEventSource(AWSEventBase):
"""Subscribe a lambda to cloud watch events.
Cloud watch events supports a number of different event
sources, from periodic timers with cron syntax, to
real time instance state notifications, cloud trail
events, and realtime asg membership changes.
Event Pattern for Instance State
.. code-block:: json
{
"source": ["aws.ec2"],
"detail-type": ["EC2 Instance State-change Notification"],
"detail": { "state": ["pending"]}
}
Event Pattern for Cloud Trail API
.. code-block:: json
{
"detail-type": ["AWS API Call via CloudTrail"],
"detail": {
"eventSource": ["s3.amazonaws.com"],
"eventName": ["CreateBucket", "DeleteBucket"]
}
}
"""
ASG_EVENT_MAPPING = {
'launch-success': 'EC2 Instance Launch Successful',
'launch-failure': 'EC2 Instance Launch Unsuccessful',
'terminate-success': 'EC2 Instance Terminate Successful',
'terminate-failure': 'EC2 Instance Terminate Unsuccessful'}
client_service = 'events'
def get(self, rule_name):
return resource_exists(self.client.describe_rule, Name=rule_name)
@staticmethod
def delta(src, tgt):
"""Given two cwe rules determine if the configuration is the same.
Name is already implied.
"""
for k in ['State', 'EventPattern', 'ScheduleExpression']:
if src.get(k) != tgt.get(k):
return True
return False
def __repr__(self):
return "<CWEvent Type:%s Events:%s>" % (
self.data.get('type'),
', '.join(map(str, self.data.get('events', []))))
def resolve_cloudtrail_payload(self, payload):
sources = self.data.get('sources', [])
events = []
for e in self.data.get('events'):
if not isinstance(e, dict):
events.append(e)
event_info = CloudWatchEvents.get(e)
if event_info is None:
continue
else:
event_info = e
events.append(e['event'])
sources.append(event_info['source'])
payload['detail'] = {
'eventSource': list(set(sources)),
'eventName': events}
def render_event_pattern(self):
event_type = self.data.get('type')
pattern = self.data.get('pattern')
payload = {}
if pattern:
payload.update(pattern)
if event_type == 'cloudtrail':
payload['detail-type'] = ['AWS API Call via CloudTrail']
self.resolve_cloudtrail_payload(payload)
if event_type == 'cloudtrail':
if 'signin.amazonaws.com' in payload['detail']['eventSource']:
payload['detail-type'] = ['AWS Console Sign In via CloudTrail']
elif event_type == 'guard-duty':
payload['source'] = ['aws.guardduty']
payload['detail-type'] = ['GuardDuty Finding']
if 'resource-filter' in self.data:
payload.update({
'detail': {'resource': {'resourceType': [self.data['resource-filter']]}}})
elif event_type == "ec2-instance-state":
payload['source'] = ['aws.ec2']
payload['detail-type'] = [
"EC2 Instance State-change Notification"]
# Technically could let empty be all events, but likely misconfig
payload['detail'] = {"state": self.data.get('events', [])}
elif event_type == "asg-instance-state":
payload['source'] = ['aws.autoscaling']
events = []
for e in self.data.get('events', []):
events.append(self.ASG_EVENT_MAPPING.get(e, e))
payload['detail-type'] = events
elif event_type == 'phd':
payload['source'] = ['aws.health']
payload.setdefault('detail', {})
if self.data.get('events'):
payload['detail'].update({
'eventTypeCode': list(self.data['events'])
})
if self.data.get('categories', []):
payload['detail']['eventTypeCategory'] = self.data['categories']
if not payload['detail']:
payload.pop('detail')
elif event_type == 'hub-finding':
payload['source'] = ['aws.securityhub']
payload['detail-type'] = ['Security Hub Findings - Imported']
elif event_type == 'hub-action':
payload['source'] = ['aws.securityhub']
payload['detail-type'] = [
'Security Hub Findings - Custom Action',
'Security Hub Insight Results']
elif event_type == 'periodic':
pass
else:
raise ValueError(
"Unknown lambda event source type: %s" % event_type)
if not payload:
return None
if self.data.get('pattern'):
payload = merge_dict(payload, self.data['pattern'])
return json.dumps(payload)
def add(self, func):
params = dict(
Name=func.event_name, Description=func.description, State='ENABLED')
pattern = self.render_event_pattern()
if pattern:
params['EventPattern'] = pattern
schedule = self.data.get('schedule')
if schedule:
params['ScheduleExpression'] = schedule
rule = self.get(func.event_name)
if rule and self.delta(rule, params):
log.debug("Updating cwe rule for %s" % func.event_name)
response = self.client.put_rule(**params)
elif not rule:
log.debug("Creating cwe rule for %s" % (self))
response = self.client.put_rule(**params)
else:
response = {'RuleArn': rule['Arn']}
client = self.session.client('lambda')
try:
client.add_permission(
FunctionName=func.name,
StatementId=func.event_name,
SourceArn=response['RuleArn'],
Action='lambda:InvokeFunction',
Principal='events.amazonaws.com')
log.debug('Added lambda invoke cwe rule permission')
except client.exceptions.ResourceConflictException:
pass
# Add Targets
found = False
response = RuleRetry(self.client.list_targets_by_rule, Rule=func.event_name)
# CloudWatchE seems to be quite picky about function arns (no aliases/versions)
func_arn = func.arn
if func_arn.count(':') > 6:
func_arn, version = func_arn.rsplit(':', 1)
for t in response['Targets']:
if func_arn == t['Arn']:
found = True
if found:
return
log.debug('Creating cwe rule target for %s on func:%s' % (
self, func_arn))
self.client.put_targets(
Rule=func.event_name, Targets=[{"Id": func.event_name, "Arn": func_arn}])
return True
def update(self, func):
self.add(func)
def pause(self, func):
try:
self.client.disable_rule(Name=func.event_name)
except ClientError:
pass
def resume(self, func):
try:
self.client.enable_rule(Name=func.event_name)
except ClientError:
pass
def remove(self, func, func_deleted=True):
if self.get(func.event_name):
log.info("Removing cwe targets and rule %s", func.event_name)
try:
targets = self.client.list_targets_by_rule(
Rule=func.event_name)['Targets']
if targets:
self.client.remove_targets(
Rule=func.event_name,
Ids=[t['Id'] for t in targets])
except ClientError as e:
log.warning(
"Could not remove targets for rule %s error: %s",
func.name, e)
self.client.delete_rule(Name=func.event_name)
self.remove_permissions(func, remove_permission=not func_deleted)
return True
class SecurityHubAction:
def __init__(self, policy, session_factory):
self.policy = policy
self.session_factory = session_factory
cwe_data = self.policy.data['mode']
cwe_data['pattern'] = {'resources': [self._get_arn()]}
self.cwe = CloudWatchEventSource(
cwe_data, session_factory)
def __repr__(self):
return "<SecurityHub Action %s>" % self.policy.name
def _get_arn(self):
return 'arn:aws:securityhub:%s:%s:action/custom/%s' % (
self.policy.options.region,
self.policy.options.account_id,
self.policy.name)
def delta(self, src, tgt):
for k in ('Name', 'Description'):
if src[k] != tgt[k]:
return True
return False
def get(self, name):
client = local_session(self.session_factory).client('securityhub')
subscriber = self.cwe.get(name)
arn = self._get_arn()
actions = client.describe_action_targets(
ActionTargetArns=[arn]).get('ActionTargets', ())
assert len(actions) in (0, 1), "Found duplicate action %s" % (
actions,)
action = actions and actions.pop() or None
return {'event': subscriber, 'action': action}
def add(self, func):
self.cwe.add(func)
client = local_session(self.session_factory).client('securityhub')
action = self.get(func.event_name).get('action')
arn = self._get_arn()
params = {'Name': (
self.policy.data.get('title') or (
"%s %s" % (self.policy.resource_type.split('.')[-1].title(),
self.policy.name))),
'Description': (
self.policy.data.get('description') or
self.policy.data.get('title') or
self.policy.name),
'Id': self.policy.name}
params['Description'] = params['Description'].strip()[:500]
if not action:
log.debug('Creating SecurityHub Action %s' % arn)
return client.create_action_target(
**params).get('ActionTargetArn')
params.pop('Id')
if self.delta(action, params):
log.debug('Updating SecurityHub Action %s' % arn)
client.update_action_target(ActionTargetArn=arn, **params)
return arn
def update(self, func):
self.cwe.update(func)
self.add(func)
def remove(self, func, func_deleted=True):
self.cwe.remove(func, func_deleted)
client = local_session(self.session_factory).client('securityhub')
client.delete_action_target(ActionTargetArn=self._get_arn())
class BucketLambdaNotification:
""" Subscribe a lambda to bucket notifications directly. """
def __init__(self, data, session_factory, bucket):
self.data = data
self.session_factory = session_factory
self.session = session_factory()
self.bucket = bucket
def delta(self, src, tgt):
for k in ['Id', 'LambdaFunctionArn', 'Events', 'Filters']:
if src.get(k) != tgt.get(k):
return True
return False
def _get_notifies(self, s3, func):
notifies = s3.get_bucket_notification_configuration(
Bucket=self.bucket['Name'])
found = False
for f in notifies.get('LambdaFunctionConfigurations', []):
if f['Id'] != func.name:
continue
found = f
return notifies, found
def add(self, func):
s3 = self.session.client('s3')
notifies, found = self._get_notifies(s3, func)
notifies.pop('ResponseMetadata', None)
func_arn = func.arn
if func_arn.rsplit(':', 1)[-1].isdigit():
func_arn = func_arn.rsplit(':', 1)[0]
n_params = {
'Id': func.name,
'LambdaFunctionArn': func_arn,
'Events': self.data.get('events', ['s3:ObjectCreated:*'])}
if self.data.get('filters'):
n_params['Filters'] = {
'Key': {'FilterRules': self.filters}}
if found:
if self.delta(found, n_params):
notifies['LambdaFunctionConfigurations'].remove(found)
else:
log.info("Bucket lambda notification present")
return
lambda_client = self.session.client('lambda')
params = dict(
FunctionName=func.name,
StatementId=self.bucket['Name'],
Action='lambda:InvokeFunction',
Principal='s3.amazonaws.com')
if self.data.get('account_s3'):
params['SourceAccount'] = self.data['account_s3']
params['SourceArn'] = 'arn:aws:s3:::*'
else:
params['SourceArn'] = 'arn:aws:s3:::%s' % self.bucket['Name']
try:
lambda_client.add_permission(**params)
except lambda_client.exceptions.ResourceConflictException:
pass
notifies.setdefault('LambdaFunctionConfigurations', []).append(n_params)
s3.put_bucket_notification_configuration(
Bucket=self.bucket['Name'], NotificationConfiguration=notifies)
return True
def remove(self, func, func_deleted=True):
s3 = self.session.client('s3')
notifies, found = self._get_notifies(s3, func)
if not found:
return
lambda_client = self.session.client('lambda')
if not func_deleted:
try:
response = lambda_client.remove_permission(
FunctionName=func.name,
StatementId=self.bucket['Name'])
log.debug("Removed lambda permission result: %s" % response)
except lambda_client.exceptions.ResourceNotFoundException:
pass
notifies['LambdaFunctionConfigurations'].remove(found)
notifies.pop("ResponseMetadata")
s3.put_bucket_notification_configuration(
Bucket=self.bucket['Name'],
NotificationConfiguration=notifies)
return True
class CloudWatchLogSubscription:
""" Subscribe a lambda to a log group[s]
"""
iam_delay = 1.5
def __init__(self, session_factory, log_groups, filter_pattern):
self.log_groups = log_groups
self.filter_pattern = filter_pattern
self.session_factory = session_factory
self.session = session_factory()
self.client = self.session.client('logs')
def add(self, func):
lambda_client = self.session.client('lambda')
for group in self.log_groups:
log.info(
"Creating subscription filter for %s" % group['logGroupName'])
region = group['arn'].split(':', 4)[3]
try:
lambda_client.add_permission(
FunctionName=func.name,
StatementId=group['logGroupName'][1:].replace('/', '-'),
SourceArn=group['arn'],
Action='lambda:InvokeFunction',
Principal='logs.%s.amazonaws.com' % region)
log.debug("Added lambda ipo nvoke log group permission")
# iam eventual consistency and propagation
time.sleep(self.iam_delay)
except lambda_client.exceptions.ResourceConflictException:
pass
# Consistent put semantics / ie no op if extant
self.client.put_subscription_filter(
logGroupName=group['logGroupName'],
filterName=func.event_name,
filterPattern=self.filter_pattern,
destinationArn=func.alias or func.arn)
def remove(self, func, func_deleted=True):
lambda_client = self.session.client('lambda')
found = False
for group in self.log_groups:
# if the function isn't deleted we need to do some cleanup
if not func_deleted:
try:
response = lambda_client.remove_permission(
FunctionName=func.name,
StatementId=group['logGroupName'][1:].replace('/', '-'))
log.debug("Removed lambda permission result: %s" % response)
found = True
except lambda_client.exceptions.ResourceNotFoundException:
pass
try:
response = self.client.delete_subscription_filter(
logGroupName=group['logGroupName'],
filterName=func.event_name)
log.debug("Removed subscription filter from: %s",
group['logGroupName'])
found = True
except lambda_client.exceptions.ResourceNotFoundException:
pass
return found
class SQSSubscription:
""" Subscribe a lambda to one or more SQS queues.
"""
def __init__(self, session_factory, queue_arns, batch_size=10):
self.queue_arns = queue_arns
self.session_factory = session_factory
self.batch_size = batch_size
def add(self, func):
client = local_session(self.session_factory).client('lambda')
event_mappings = {
m['EventSourceArn']: m for m in client.list_event_source_mappings(
FunctionName=func.name).get('EventSourceMappings', ())}
modified = False
for queue_arn in self.queue_arns:
mapping = None
if queue_arn in event_mappings:
mapping = event_mappings[queue_arn]
if (mapping['State'] == 'Enabled' or
mapping['BatchSize'] != self.batch_size):
continue
modified = True
else:
modified = True
if not modified:
return modified
if mapping is not None:
log.info(
"Updating subscription %s on %s", func.name, queue_arn)
client.update_event_source_mapping(
UUID=mapping['UUID'],
Enabled=True,
BatchSize=self.batch_size)
else:
log.info("Subscribing %s to %s", func.name, queue_arn)
client.create_event_source_mapping(
FunctionName=func.name,
EventSourceArn=queue_arn,
BatchSize=self.batch_size)
return modified
def remove(self, func, func_deleted=True):
client = local_session(self.session_factory).client('lambda')
event_mappings = {
m['EventSourceArn']: m for m in client.list_event_source_mappings(
FunctionName=func.name).get('EventSourceMappings', ())}
found = None
for queue_arn in self.queue_arns:
if queue_arn not in event_mappings:
continue
client.delete_event_source_mapping(
UUID=event_mappings[queue_arn]['UUID'])
found = True
return found
class SNSSubscription:
""" Subscribe a lambda to one or more SNS topics.
"""
iam_delay = 1.5
def __init__(self, session_factory, topic_arns):
self.topic_arns = topic_arns
self.session_factory = session_factory
@staticmethod
def _parse_arn(arn):
parts = arn.split(':')
region, topic_name = parts[3], parts[5]
statement_id = 'sns-topic-' + topic_name
return region, topic_name, statement_id
def add(self, func):
session = local_session(self.session_factory)
lambda_client = session.client('lambda')
for arn in self.topic_arns:
region, topic_name, statement_id = self._parse_arn(arn)
log.info("Subscribing %s to %s" % (func.name, topic_name))
# Add permission to lambda for sns invocation.
try:
lambda_client.add_permission(
FunctionName=func.name,
StatementId=statement_id,
SourceArn=arn,
Action='lambda:InvokeFunction',
Principal='sns.amazonaws.com')
log.debug("Added permission for sns to invoke lambda")
# iam eventual consistency and propagation
time.sleep(self.iam_delay)
except lambda_client.exceptions.ResourceConflictException:
pass
# Subscribe the lambda to the topic, idempotent
sns_client = session.client('sns')
sns_client.subscribe(
TopicArn=arn, Protocol='lambda', Endpoint=func.arn)
def remove(self, func, func_deleted=True):
session = local_session(self.session_factory)
lambda_client = session.client('lambda')
sns_client = session.client('sns')
for topic_arn in self.topic_arns:
region, topic_name, statement_id = self._parse_arn(topic_arn)
# if the function isn't deleted we need to do some cleanup
if not func_deleted:
try:
response = lambda_client.remove_permission(
FunctionName=func.name,
StatementId=statement_id)
log.debug("Removed lambda permission result: %s" % response)
except ClientError as e:
if e.response['Error']['Code'] != 'ResourceNotFoundException':
raise
paginator = sns_client.get_paginator('list_subscriptions_by_topic')
class Done(Exception):
pass
try:
for page in paginator.paginate(TopicArn=topic_arn):
for subscription in page['Subscriptions']:
if subscription['Endpoint'] != func.arn:
continue
try:
response = sns_client.unsubscribe(
SubscriptionArn=subscription['SubscriptionArn'])
log.debug("Unsubscribed %s from %s" %
(func.name, topic_name))
except sns_client.exceptions.NotFoundException:
pass
raise Done # break out of both for loops
except Done:
pass
class BucketSNSNotification(SNSSubscription):
""" Subscribe a lambda to bucket notifications via SNS. """
def __init__(self, session_factory, bucket, topic=None):
# NB: We are overwriting __init__ vs. extending.
self.session_factory = session_factory
self.session = session_factory()
self.topic_arns = self.get_topic(bucket) if topic is None else [topic]
self.client = self.session.client('sns')
def get_topic(self, bucket):
session = local_session(self.session_factory)
sns = session.client('sns')
s3 = session.client('s3')
notifies = bucket['Notification']
if 'TopicConfigurations' not in notifies:
notifies['TopicConfigurations'] = []
all_topics = notifies['TopicConfigurations']
topic_arns = [t['TopicArn'] for t in all_topics
if 's3:ObjectCreated:*' in t['Events']]
if not topic_arns:
# No suitable existing topic. Create one.
topic_arn = sns.create_topic(Name=bucket['Name'])['TopicArn']
policy = {
'Statement': [{
'Action': 'SNS:Publish',
'Effect': 'Allow',
'Resource': topic_arn,
'Principal': {'Service': 's3.amazonaws.com'}}]}
sns.set_topic_attributes(
TopicArn=topic_arn,
AttributeName='Policy',
AttributeValue=json.dumps(policy))
notifies['TopicConfigurations'].append({
'TopicArn': topic_arn,
'Events': ['s3:ObjectCreated:*']})
s3.put_bucket_notification_configuration(Bucket=bucket['Name'],
NotificationConfiguration=notifies)
topic_arns = [topic_arn]
return topic_arns
class ConfigRule(AWSEventBase):
"""Use a lambda as a custom config rule.
"""
client_service = 'config'
def __repr__(self):
return "<ConfigRule>"
def get_rule_params(self, func):
# config does not support versions/aliases on lambda funcs
func_arn = func.arn
if isinstance(func_arn, str) and func_arn.count(':') > 6:
func_arn, version = func_arn.rsplit(':', 1)
params = dict(
ConfigRuleName=func.name,
Description=func.description,
Source={
'Owner': 'CUSTOM_LAMBDA',
'SourceIdentifier': func_arn,
'SourceDetails': [{
'EventSource': 'aws.config',
'MessageType': 'ConfigurationItemChangeNotification'}]
}
)
if isinstance(func, PolicyLambda):
manager = func.policy.load_resource_manager()
resource_model = manager.get_model()
if resource_model.config_type:
config_type = resource_model.config_type
elif resource_model.cfn_type and 'schedule' in self.data:
config_type = resource_model.cfn_type
else:
raise Exception("You may have attempted to deploy a config "
"based lambda function with an unsupported config type. "
"The most recent AWS config types are here: http://docs.aws"
".amazon.com/config/latest/developerguide/resource"
"-config-reference.html.")
params['Scope'] = {
'ComplianceResourceTypes': [config_type]}
else:
params['Scope']['ComplianceResourceTypes'] = self.data.get(
'resource-types', ())
if self.data.get('schedule'):
params['Source']['SourceDetails'] = [{
'EventSource': 'aws.config',
'MessageType': 'ScheduledNotification'
}]
params['MaximumExecutionFrequency'] = self.data['schedule']
return params
def get(self, rule_name):
rules = resource_exists(
self.client.describe_config_rules,
ConfigRuleNames=[rule_name],
NotFound="NoSuchConfigRuleException")
if not rules:
return rules
return rules['ConfigRules'][0]
@staticmethod
def delta(rule, params):
# doesn't seem like we have anything mutable at the moment,
# since we restrict params, maybe reusing the same policy name
# with a different resource type.
if rule['Scope'] != params['Scope']:
return True
if rule['Source'] != params['Source']:
return True
if ('MaximumExecutionFrequency' in params and
rule['MaximumExecutionFrequency'] != params['MaximumExecutionFrequency']):
return True
if rule.get('Description', '') != params.get('Description', ''):
return True
return False
def add(self, func):
rule = self.get(func.name)
params = self.get_rule_params(func)
if rule and self.delta(rule, params):
log.debug("Updating config rule for %s" % self)
rule.update(params)
return LambdaRetry(self.client.put_config_rule, ConfigRule=rule)
elif rule:
log.debug("Config rule up to date")
return
client = self.session.client('lambda')
try:
client.add_permission(
FunctionName=func.name,
StatementId=func.name,
SourceAccount=func.arn.split(':')[4],
Action='lambda:InvokeFunction',
Principal='config.amazonaws.com')
except client.exceptions.ResourceConflictException:
pass
log.debug("Adding config rule for %s" % func.name)
return LambdaRetry(self.client.put_config_rule, ConfigRule=params)
def remove(self, func, func_deleted=True):
rule = self.get(func.name)
if not rule:
return
log.info("Removing config rule for %s", func.name)
try:
self.client.delete_config_rule(
ConfigRuleName=func.name)
except self.client.exceptions.NoSuchConfigRuleException:
pass
self.remove_permissions(func, remove_permission=not func_deleted)
return True
|
e366621169479f2eb1ffd21076252def62da7763
|
aa793c2b787ff591f69147e2cc5e23d6c7b4d77e
|
/proxyclient/m1n1/hw/i2c.py
|
e2bda7add04b0021921b8832702268530159c9af
|
[
"MIT",
"BSD-3-Clause",
"OFL-1.1",
"GPL-2.0-only",
"CC0-1.0",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause"
] |
permissive
|
AsahiLinux/m1n1
|
8280a8342c407936beabda0f08a700759a636b05
|
6d0979e71e83f47c5da5fdb8c5e21eb1268d54e8
|
refs/heads/main
| 2023-08-22T20:52:30.090704
| 2023-08-21T14:16:08
| 2023-08-21T14:16:08
| 329,707,886
| 2,966
| 200
|
MIT
| 2023-09-07T10:19:39
| 2021-01-14T18:59:03
|
Python
|
UTF-8
|
Python
| false
| false
| 8,343
|
py
|
i2c.py
|
# SPDX-License-Identifier: MIT
from ..utils import *
from enum import IntEnum
__all__ = ["I2C", "I2CRegs"]
class R_MTXFIFO(Register32):
READ = 10 # Read (DATA=count)
STOP = 9 # Issue START before
START = 8 # Issue STOP after
DATA = 7, 0 # Byte to send or count
class R_MRXFIFO(Register32):
EMPTY = 8 # FIFO empty
DATA = 7, 0 # FIFO data
class R_MCNT(Register32):
S_RXCNT = 31, 24 # Slave RX count
S_TXCNT = 23, 16 # Slave TX count
M_RXCNT = 15, 8 # Master RX count
M_TXCNT = 7, 0 # Master TX count
class E_MST(IntEnum):
IDLE = 0
FRD1 = 1
FRD2 = 2
COMMAND = 3
START = 4
WRITE = 5
READ = 6
ACK = 7
STOP = 8
BAD = 15
class E_SST(IntEnum):
IDLE = 0
START = 1
ST_ACK = 2
DATA = 3
ACK = 4
class R_XFSTA(Register32):
MST = 27, 24, E_MST # Master controller state
SRD = 20 # Slave read in progress
SWR = 19 # Slave write in progress
SST = 18, 16, E_SST # Slave controller state
XFIFO = 9, 8 # FIFO number for error
XFCNT = 7, 0 # Number of bytes in current xfer
class R_SADDR(Register32):
DEB = 31 # Enable SDA/SCL read debug
DIR = 30 # Direct (bitbang) mode
ENS = 29 # Enable slave interface
RST_STX = 28 # Reset slave TX FIFO
RST_SRX = 27 # Reset master RX fifo (if ^ both, controller too)
PEN = 26 # Promiscuous mode (slave)
AAE = 25 # SALT/ALTMASK enable
SAE = 24 # SADDR enable
ALTMASK = 23, 16 # MASK for SALT bits
SALT = 15, 8 # Alt slave address
SADDR = 7, 0 # Slave address
class R_SMSTA(Register32):
XIP = 28 # Xaction in progress
XEN = 27 # Xaction ended
UJF = 26 # UnJam failure
JMD = 25 # Jam ocurred
JAM = 24 # Currently jammed
MTO = 23 # Master timeout
MTA = 22 # Master arb lost
MTN = 21 # Master received NACK
MRF = 20 # Master RX fifo full
MRNE = 19 # Master RX fifo not empty
MTF = 17 # Master TX fifo full
MTE = 16 # Master RX fifo empty
STO = 15 # Slave timeout
STA = 14 # Slave arb lost
STN = 13 # Slave received NACK
SRF = 12 # Slave RX fifo full
SRNE = 11 # Slave RX fifo not empty
STR = 10 # Slave transmit required
STF = 9 # Slave TX fifo full
STE = 8 # Slave TX fifo empty
TOS = 7 # Timeout due to slave FIFO
TOM = 6 # Timeout due to master FIFO
TOE = 5 # Slave timeout due to ext clock stretch
DCI = 4 # Direct clock in
DDI = 3 # Direct data in
DCO = 2 # Direct clock out
DDO = 1 # Direct data out
NN = 0 # NACK next (slave)
class R_CTL(Register32):
MSW = 26, 16 # Maximum slave write size
ENABLE = 11 # Unknown enable bit (clock sel? Apple thing)
MRR = 10 # Master receive FIFO reset
MTR = 9 # Master transmit FIFO reset
UJM = 8 # Enable auto unjam machine
CLK = 7, 0 # Clock divider
class R_STXFIFO(Register32):
DATA = 7, 0 # Data
class R_SRXFIFO(Register32):
N = 12 # NACK received after this byte
P = 11 # Stop received, data not valid
S = 10 # Start received before
O = 9 # Overflow (promisc only)
E = 8 # Empty (data not valid)
DATA = 7, 0 # Data
# Apple reg
class R_FIFOCTL(Register32):
HALT = 0 # Halt machinery
class I2CRegs(RegMap):
MTXFIFO = 0x00, R_MTXFIFO
MRXFIFO = 0x04, R_MRXFIFO
MCNT = 0x08, R_MCNT
XFSTA = 0x0c, R_XFSTA
SADDR = 0x10, R_SADDR
SMSTA = 0x14, R_SMSTA
IMASK = 0x18, R_SMSTA
CTL = 0x1c, R_CTL
STXFIFO = 0x20, R_STXFIFO
SRXFIFO = 0x20, R_SRXFIFO
FIFOCTL = 0x44, R_FIFOCTL
class I2C:
def __init__(self, u, adt_path):
self.u = u
self.p = u.proxy
self.iface = u.iface
self.base = u.adt[adt_path].get_reg(0)[0]
self.regs = I2CRegs(u, self.base)
self.devs = []
def clear_fifos(self):
self.regs.CTL.set(MTR=1, MRR=1)
def clear_status(self):
self.regs.SMSTA.val = 0xffffffff
def _fifo_read(self, nbytes):
read = []
for _ in range(nbytes):
val = self.regs.MRXFIFO.reg
timeout = 10000
while val.EMPTY and timeout > 0:
val = self.regs.MRXFIFO.reg
timeout -= 1
if timeout == 0:
raise Exception("timeout")
read.append(int(val) & 0xff)
return bytes(read)
def _fifo_write(self, buf, stop=False):
for no, byte in enumerate(buf):
sending_stop = stop and no == len(buf) - 1
self.regs.MTXFIFO.set(DATA=byte, STOP=int(sending_stop))
if not stop:
return
timeout = 10000
while not self.regs.SMSTA.reg.XEN and timeout > 0:
timeout -= 1
if timeout == 0:
raise Exception("timeout")
def write_reg(self, addr, reg, data, regaddrlen=1):
self.clear_fifos()
self.clear_status()
self.regs.CTL.set(ENABLE=1, CLK=0x4)
self.regs.MTXFIFO.set(DATA=addr << 1, START=1)
regbytes = int.to_bytes(reg, regaddrlen, byteorder="big")
self._fifo_write(regbytes + bytes(data), stop=True)
self.regs.CTL.set(ENABLE=0, CLK=0x4)
def read_reg(self, addr, reg, nbytes, regaddrlen=1):
self.clear_fifos()
self.clear_status()
self.regs.CTL.set(ENABLE=1, CLK=0x4)
self.regs.MTXFIFO.set(DATA=addr << 1, START=1)
regbytes = int.to_bytes(reg, regaddrlen, byteorder="big")
self._fifo_write(regbytes, stop=False)
self.regs.MTXFIFO.set(DATA=(addr << 1) | 1, START=1)
self.regs.MTXFIFO.set(DATA=nbytes, STOP=1, READ=1)
data = self._fifo_read(nbytes)
self.regs.CTL.set(ENABLE=0, CLK=0x4)
return data
class I2CRegMapDev:
REGMAP = None
ADDRESSING = (0, 1)
def __init__(self, bus, addr, name=None):
self.bus = bus
self.addr = addr
self.curr_page = None
self.name = name
self.paged, self.regimmbytes = self.ADDRESSING
if self.REGMAP is not None:
self.regs = self.REGMAP(self, 0)
@classmethod
def from_adt(cls, bus, path):
node = bus.u.adt[path]
addr = node.reg[0] & 0xff
return cls(bus, addr, node.name)
def _switch_page(self, page):
assert self.paged
self.bus.write_reg(self.addr, 0, bytes([page]),
regaddrlen=self.regimmbytes)
self.curr_page = page
def _snip_regaddr(self, addr):
pageshift = self.regimmbytes * 8
page = addr >> pageshift
immediate = addr & ~(~0 << pageshift)
return (page, immediate)
def write(self, reg, val, width=8):
page, imm = self._snip_regaddr(reg)
if self.paged and page != self.curr_page:
self._switch_page(page)
valbytes = val.to_bytes(width//8, byteorder="little")
self.bus.write_reg(self.addr, imm, valbytes,
regaddrlen=self.regimmbytes)
def read(self, reg, width=8):
page, imm = self._snip_regaddr(reg)
if self.paged and page != self.curr_page:
self._switch_page(page)
data = self.bus.read_reg(self.addr, imm, width//8,
regaddrlen=self.regimmbytes)
return int.from_bytes(data, byteorder='little')
def __repr__(self):
label = self.name or f"@ {self.addr:02x}"
return f"<{type(self).__name__} {label}>"
|
5322adf992c5b3bdc2bd414c204d109418b48a7b
|
d5a3aa96b30a5a6a355b4e004e494a6ef41a339c
|
/dataviz/euforeignleaders.py
|
3ab1b77b3f8e5fe9ee3f03a4434178ed4c914e70
|
[
"MIT"
] |
permissive
|
Udzu/pudzu
|
4c1c134503f62fd1cc08a56e257b864033b38561
|
df5019802bc32064870f31cda8397ad14868cda0
|
refs/heads/master
| 2023-07-10T06:16:35.342990
| 2023-07-04T06:28:00
| 2023-07-04T06:28:00
| 97,936,607
| 120
| 28
|
MIT
| 2021-02-21T16:15:31
| 2017-07-21T10:34:16
|
Roff
|
UTF-8
|
Python
| false
| false
| 5,257
|
py
|
euforeignleaders.py
|
import seaborn as sns
from pudzu.charts import *
from pudzu.sandbox.bamboo import *
FONT, fg, bg = sans, "white", "black"
atlas = pd.read_csv("datasets/countries.csv").split_columns(('nationality', 'tld', 'country'), "|").explode('country').set_index('country')
df = pd.read_csv("datasets/eu_foreignleaders.csv")
def entitle(img):
title = Image.from_text("foreign-born European leaders".upper(), FONT(108, bold=True), fg=fg, bg=bg)
subtitle = Image.from_text("the most recent foreign-born head of state and government from each country", FONT(60), fg=fg, bg=bg).pad((0,0,0,10), bg)
FOOTERS = ["""MORE RECENT (BUT LESS GOOD) ALTERNATIVES
1. President Adolf Schärf (1957-65) was born in Nikolsburg which later became part of Czechoslovakia.
Chancellor Karl Renner (1918-20/1945) was born in Dolní Dunajovice which later became part of Czechoslovakia.
2. President Martti Ahtisaari (1994-00) was born in Viipuri which later became part of Russia.
Prussian-born Frederick Charles was elected king in 1918 but never crowned.
3. President Konstantinos Karamanlis (1980-85/90-95) was born in Proti, Ottoman Empire before it became part of Greece.
4. Council Chairman János Kádár (1961-65) was born in Fiume which later became part of Croatia.""",
"""
5. President Mary McAleese (1997-11) was born in Belfast, Northern Ireland, but at the time Ireland claimed sovereignty over the entire island.
6. Some sources list Prime Minsiter Klaus Tschütscher (2009-13) as being born in Grabs, Switzerland, but others claim Vaduz.
7. Monaco-born Mindaugas II was elected King of Lithuania in 1918, but never assumed the crown.
8. President Emil Constantinescu (1996-00) was born Tighina which later became part of Moldova.
9. President Leonid Kravchuk (1991-94) was born in Velykyi Zhytyn, Poland before it became part of Ukraine.
10. During the Nazi Occupation, Netherlands and Norway had a foreign-born Reichskommissar, but also a government in exile.""",
"""FOOTNOTES
11. Hitler's birthplace was part of Germany between 1938 and 1945.
12. Buzek's birthplace was part of Poland between 1938 and 1939.
BARELY FOREIGN (BUT I COULDN'T FIND ANYONE BETTER)
13. De Gasperi's birthplace became part of Italy during his lifetime.
14. Aura's birthplace was part of Finland before it became Russian.
15. Văcăroiu's birthplace was part of Romania before it became Ukrainian.
16. Atatürk's birthplace was part of Turkey before it became Greek.
""",]
footers = [Image.from_text(FOOTER, FONT(24), "white", padding=10, beard_line=True, line_spacing=1) for FOOTER in FOOTERS]
footer = Image.from_row(footers, padding=(20,5), yalign=0)
img = Image.from_column([title, subtitle, img, Rectangle((img.width, 2), "grey"), footer], bg=bg, padding=5).pad(15,bg=bg)
img = img.place(Image.from_text("/u/Udzu", FONT(16), fg=fg, bg=bg, padding=5).pad((1,1,0,0), fg), align=1, padding=10)
return img
grids = []
all_countries = sorted(set(df.country))
for countries in generate_batches(all_countries, ceil(len(all_countries)/3)):
def match(country, type):
match = df[(df.country == country) & (df.type == type)]
return dict(match.iloc[0]) if len(match) else np.nan
ss = [match(country, "s") for country in countries]
gs = [match(country, "g") for country in countries]
table = pd.DataFrame([ss,gs], index=["s","g"], columns=countries)
DEFAULT_IMG = "https://s-media-cache-ak0.pinimg.com/736x/0d/36/e7/0d36e7a476b06333d9fe9960572b66b9.jpg" # "https://upload.wikimedia.org/wikipedia/commons/6/68/Solid_black.png" #
def cell(d):
if non(d) or not get_non(d, 'name'): return None
logger.info(f"{d['country']} / {d['type']}")
img = Image.from_url_with_cache(get_non(d, 'image', DEFAULT_IMG))
return Image.from_column([
Image.from_text(get_non(d, 'name', ''), FONT(16, bold=True),fg=fg, bg=bg, beard_line=True),
Image.from_text(get_non(d, 'role', ''), FONT(16, italics=True),fg=fg, bg=bg, beard_line=True),
img.cropped_resize((200,200), (0.5,get_non(d, 'align', 0.2)) if img.height >= img.width else (get_non(d, 'align', 0.5), 0.5)),
Image.from_text(f"{d['city']}, {d['place']}", FONT(16, bold=False), max_width=200, fg=fg, bg=bg, beard_line=True),
], bg=bg, padding=2).pad(5, bg)
def flag(column):
flag = Image.from_url_with_cache(atlas.flag[table.columns[column]]).to_rgba()
flag = flag.resize_fixed_aspect(height=140) if flag.width / flag.height < 1.3 else flag.resize((200,140))
flag = flag.trim(1).pad(1, "grey").pad((0,10,0,0), bg)
label = Image.from_text(table.columns[column].upper().replace("BOSNIA","BiH"), FONT(20, bold=True),fg=fg,bg=bg,beard_line=True)
return Image.from_column([flag.pad((0,0,0,10),bg=bg), label])
def row_label(row):
return Image.from_text("HEAD OF STATE" if row==0 else "HEAD OF GOV'T", FONT(20, bold=True), fg=fg, bg=bg, padding=(0,10)).pad_to(width=300).transpose(Image.ROTATE_90)
grid = grid_chart(table, cell, col_label=flag, row_label=row_label, bg=bg, yalign=0)
grids.append(grid)
chart = Image.from_column(grids, xalign=0)
chart = entitle(chart)
chart.convert("RGB").save("output/euforeignleaders.jpg")
|
48caca5834be2e181103c9189ea68b2201b3fb34
|
302ce5ab1045ee93845608c96580c63d54d730af
|
/src/spikeinterface/core/tests/test_frameslicerecording.py
|
9664f3184b05659cdae09093024f4ca01f10ccc7
|
[
"MIT"
] |
permissive
|
SpikeInterface/spikeinterface
|
f900b62720860b2881d2e6b5fa4441e0e560f625
|
ee2237b3f5ce2347b2ec9df90e97b0ee6c738dcf
|
refs/heads/main
| 2023-09-02T11:27:54.687021
| 2023-09-01T13:48:29
| 2023-09-01T13:48:29
| 196,581,117
| 295
| 133
|
MIT
| 2023-09-14T19:12:16
| 2019-07-12T13:07:46
|
Python
|
UTF-8
|
Python
| false
| false
| 1,536
|
py
|
test_frameslicerecording.py
|
import pytest
import numpy as np
from spikeinterface.core import FrameSliceRecording, NumpyRecording
def test_FrameSliceRecording():
traces = np.zeros((1000, 5), dtype="float64")
traces[:] = np.arange(1000)[:, None]
sampling_frequency = 30000
rec = NumpyRecording([traces], sampling_frequency)
times0 = rec.get_times(0)
sub_rec = rec.frame_slice(None, None)
assert sub_rec.get_num_samples(0) == 1000
traces = sub_rec.get_traces()
assert np.array_equal(traces[:, 0], np.arange(0, 1000, dtype="float64"))
sub_times0 = sub_rec.get_times(0)
assert np.allclose(times0, sub_times0)
sub_rec = rec.frame_slice(None, 10)
assert sub_rec.get_num_samples(0) == 10
traces = sub_rec.get_traces()
assert np.array_equal(traces[:, 0], np.arange(0, 10, dtype="float64"))
sub_times0 = sub_rec.get_times(0)
assert np.allclose(times0[:10], sub_times0)
sub_rec = rec.frame_slice(900, 1000)
assert sub_rec.get_num_samples(0) == 100
traces = sub_rec.get_traces()
assert np.array_equal(traces[:, 0], np.arange(900, 1000, dtype="float64"))
sub_times0 = sub_rec.get_times(0)
assert np.allclose(times0[900:1000], sub_times0)
sub_rec = rec.frame_slice(10, 85)
assert sub_rec.get_num_samples(0) == 75
traces = sub_rec.get_traces()
assert np.array_equal(traces[:, 0], np.arange(10, 85, dtype="float64"))
sub_times0 = sub_rec.get_times(0)
assert np.allclose(times0[10:85], sub_times0)
if __name__ == "__main__":
test_FrameSliceRecording()
|
cd6f66c3a54e1d9775a4e5cd2f8e28c70d86668e
|
39568e19301a7a112398be542154950af25591de
|
/hw/vendor/lowrisc_ibex/dv/uvm/core_ibex/directed_tests/gen_testlist.py
|
358469a1c27c82aa15647c2325894e2957a35983
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
lowRISC/opentitan
|
493995bc7cf7cb3aee486a5203af3fd62bba3bfc
|
51f6017b8425b14d5a4aa9abace8fe5a25ef08c8
|
refs/heads/master
| 2023-08-31T22:05:09.425796
| 2023-08-14T14:52:15
| 2023-08-31T20:31:13
| 204,516,692
| 2,077
| 634
|
Apache-2.0
| 2023-09-14T21:16:21
| 2019-08-26T16:30:16
|
SystemVerilog
|
UTF-8
|
Python
| false
| false
| 18,103
|
py
|
gen_testlist.py
|
#!/usr/bin/env python3
"""
Generating testlists for following opensource test suites
- riscv-tests
- riscv-arch-tests
- ePMP directed tests
"""
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
import os
import argparse
import sys
def add_configs_and_handwritten_directed_tests():
testlist_string = '''# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
##########################################################
# This file is generated by gen_testlist.py script and largely copies
# the formatting of the testlist.yaml used by riscv-dv, but only specifies
# directed tests.
#
# - All paths are relative to THIS FILE.
# - Each 'test' can specify a config by name to re-use common configuration
# - If a test redefines a key already in the config, the test option takes priority.
##########################################################
- config: riscv-tests
ld_script: link.ld
includes: .
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
rtl_test: core_ibex_base_test
rtl_params:
PMPEnable: 1
timeout_s: 300
- config: riscv-arch-tests
ld_script: link.ld
includes: .
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-arch-tests/riscv-test-suite/env/
-I../../../vendor/riscv-isa-sim/arch_test_target/spike/
rtl_test: core_ibex_base_test
rtl_params:
PMPEnable: 1
timeout_s: 300
- config: epmp-tests
ld_script: ../../../../vendor/riscv-isa-sim/tests/mseccfg/mseccfg_test.ld
includes: .
gcc_opts: -march=rv32imc -O2 -I . -I ./. -I ../softfloat -I ../riscv -fno-builtin-printf
-fdata-sections -fno-section-anchors -DPRINTF_SUPPORTED=1
../../../vendor/riscv-isa-sim/tests/mseccfg/crt.S
../../../vendor/riscv-isa-sim/tests/mseccfg/syscalls.c
-mcmodel=medany -static -nostdlib -nostartfiles -lm -lgcc
-Wl,-M -Wl,-Map=link.log
rtl_test: core_ibex_base_test
rtl_params:
PMPEnable: 1
timeout_s: 300
'''
# Populate any handwritten directed tests below
# and with suitable config
available_directed_tests = '''
# Custom directed tests
- test: empty
desc: >
Empty directed test
iterations: 1
test_srcs: empty/empty.S
config: riscv-tests
- test: pmp_mseccfg_test_rlb1_l0_0_u0
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DPMP_REGION=4 -DRLB -DSET_PMP_L=0 -DSET_PMP_L_PREV=0
- test: pmp_mseccfg_test_rlb1_l0_1_u0
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DPMP_REGION=4 -DRLB -DSET_PMP_L=1 -DSET_PMP_L_PREV=0
- test: pmp_mseccfg_test_rlb1_l1_0_u0
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DPMP_REGION=4 -DRLB -DSET_PMP_L=0 -DSET_PMP_L_PREV=1
- test: pmp_mseccfg_test_rlb1_l1_1_u0
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DPMP_REGION=4 -DRLB -DSET_PMP_L=1 -DSET_PMP_L_PREV=1
- test: pmp_mseccfg_test_rlb0_l0_0_u0
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DPMP_REGION=4 -DSET_PMP_L=0 -DSET_PMP_L_PREV=0
- test: pmp_mseccfg_test_rlb0_l0_1_next_l1_u0
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DSET_PMP_L=1 -DSET_PMP_L_PREV=0 -DPMP_NEXT_L1
- test: pmp_mseccfg_test_rlb0_l0_1_next_l1_r1_u0
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DSET_PMP_L=1 -DSET_PMP_L_PREV=0 -DPMP_NEXT_L1_R1
- test: pmp_mseccfg_test_rlb0_l0_1_next_l1_w1_u0
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DSET_PMP_L=1 -DSET_PMP_L_PREV=0 -DPMP_NEXT_L1_W1
- test: pmp_mseccfg_test_rlb0_l0_1_next_l1_x1_u0
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DSET_PMP_L=1 -DSET_PMP_L_PREV=0 -DPMP_NEXT_L1_X1
- test: pmp_mseccfg_test_rlb0_l0_1_next_l1_r1_w1_u0
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DSET_PMP_L=1 -DSET_PMP_L_PREV=0 -DPMP_NEXT_L1_R1_W1
- test: pmp_mseccfg_test_rlb0_l0_1_next_l1_r1_x1_u0
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DSET_PMP_L=1 -DSET_PMP_L_PREV=0 -DPMP_NEXT_L1_R1_X1
- test: pmp_mseccfg_test_rlb0_l0_1_next_l1_w1_x1_u0
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DSET_PMP_L=1 -DSET_PMP_L_PREV=0 -DPMP_NEXT_L1_W1_X1
- test: pmp_mseccfg_test_rlb0_l0_1_next_l1_r1_w1_x1_u0
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DSET_PMP_L=1 -DSET_PMP_L_PREV=0 -DPMP_NEXT_L1_R1_W1_X1
- test: pmp_mseccfg_test_rlb1_l0_0_u1
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DPMP_REGION=4 -DRLB -DSET_PMP_L=0 -DSET_PMP_L_PREV=0 -DU_MODE
- test: pmp_mseccfg_test_rlb1_l0_1_u1
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DPMP_REGION=4 -DRLB -DSET_PMP_L=1 -DSET_PMP_L_PREV=0 -DU_MODE
- test: pmp_mseccfg_test_rlb1_l1_0_u1
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DPMP_REGION=4 -DRLB -DSET_PMP_L=0 -DSET_PMP_L_PREV=1 -DU_MODE
- test: pmp_mseccfg_test_rlb1_l1_1_u1
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DPMP_REGION=4 -DRLB -DSET_PMP_L=1 -DSET_PMP_L_PREV=1 -DU_MODE
- test: pmp_mseccfg_test_rlb0_l0_0_u1
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DPMP_REGION=4 -DSET_PMP_L=0 -DSET_PMP_L_PREV=0 -DU_MODE
- test: pmp_mseccfg_test_rlb0_l0_1_next_l1_u1
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DSET_PMP_L=1 -DSET_PMP_L_PREV=0 -DPMP_NEXT_L1 -DU_MODE
- test: pmp_mseccfg_test_rlb0_l0_1_next_l1_r1_u1
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DSET_PMP_L=1 -DSET_PMP_L_PREV=0 -DPMP_NEXT_L1_R1 -DU_MODE
- test: pmp_mseccfg_test_rlb0_l0_1_next_l1_w1_u1
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DSET_PMP_L=1 -DSET_PMP_L_PREV=0 -DPMP_NEXT_L1_W1 -DU_MODE
- test: pmp_mseccfg_test_rlb0_l0_1_next_l1_x1_u1
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DSET_PMP_L=1 -DSET_PMP_L_PREV=0 -DPMP_NEXT_L1_X1 -DU_MODE
- test: pmp_mseccfg_test_rlb0_l0_1_next_l1_r1_w1_u1
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DSET_PMP_L=1 -DSET_PMP_L_PREV=0 -DPMP_NEXT_L1_R1_W1 -DU_MODE
- test: pmp_mseccfg_test_rlb0_l0_1_next_l1_r1_x1_u1
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DSET_PMP_L=1 -DSET_PMP_L_PREV=0 -DPMP_NEXT_L1_R1_X1 -DU_MODE
- test: pmp_mseccfg_test_rlb0_l0_1_next_l1_w1_x1_u1
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DSET_PMP_L=1 -DSET_PMP_L_PREV=0 -DPMP_NEXT_L1_W1_X1 -DU_MODE
- test: pmp_mseccfg_test_rlb0_l0_1_next_l1_r1_w1_x1_u1
desc: >
mseccfg test
iterations: 1
test_srcs: pmp_mseccfg_test/pmp_mseccfg_test.S
config: riscv-tests
gcc_opts: -static -mcmodel=medany -fvisibility=hidden -nostdlib -nostartfiles
-I../../../vendor/riscv-test-env/
-I../../../vendor/riscv-test-env/p/
-I../../../vendor/riscv-tests/isa/macros/scalar/
-DSET_PMP_L=1 -DSET_PMP_L_PREV=0 -DPMP_NEXT_L1_R1_W1_X1 -DU_MODE
- test: access_pmp_overlap
desc: >
PMP access basic test
iterations: 1
test_srcs: access_pmp_overlap/access_pmp_overlap.S
config: riscv-tests
'''
testlist_string += available_directed_tests
with open('directed_testlist.yaml', "a") as f:
f.write(testlist_string)
def append_directed_testlist(tests, test_suite, test_suite_name, is_assembly):
testlist_string = '''
# Test-suite: {test_suite_name}
'''.format(test_suite_name = test_suite_name)
extension = '.S' if is_assembly else '.c'
extension_grep = ' | egrep .S' if is_assembly else ' | egrep .c'
for test_group_name in tests:
available_tests = os.popen('ls '+test_suite+test_group_name+extension_grep).read()
available_testlist = []
for test in available_tests.split('\n')[:-1]:
available_testlist.append(test)
for test_name_str in available_testlist:
test_name = test_name_str.split(extension)[0]
testlist_string = testlist_string + '''
- test: {test_name}
desc: >
riscv test - {test_name}
iterations: 1
test_srcs: {test_suite}{test_group_name}/{test_name}{extension}
config: {config}
'''.format(test_name = test_name, test_group_name = test_group_name, test_suite = test_suite,
config = test_suite_name, extension = extension)
with open('directed_testlist.yaml', "a") as f:
f.write(testlist_string)
def list_tests(dir):
testlist_str = os.popen('ls '+dir).read()
testlist = []
for test in testlist_str.split('\n')[:-1]:
testlist.append(test)
print(testlist)
return testlist
def _main() -> int:
parser = argparse.ArgumentParser()
parser.add_argument('--add_tests',
type=str, required=True,
help='''List test-suite name(s) from following:
1) riscv-tests
2) riscv-arch-tests
3) epmp-tests
e.g. --add_tests=riscv-tests,epmp_tests
''')
args = parser.parse_args()
test_suite = args.add_tests
test_suite_list = test_suite.split(',')
# remove any previous yaml file
with open('directed_testlist.yaml','r+') as file:
file.truncate(0)
# add headers and configs, also adding any handwritten directed tests
add_configs_and_handwritten_directed_tests()
if 'riscv-tests' in test_suite_list or test_suite == 'all':
isa_tests = {'rv32mi', 'rv32uc', 'rv32ui', 'rv32um'}
append_directed_testlist(isa_tests, '../../../../vendor/riscv-tests/isa/', 'riscv-tests', 1)
if 'riscv-arch-tests' in test_suite_list or test_suite == 'all':
arch_tests = {'rv32i_m/B/src', 'rv32i_m/C/src', 'rv32i_m/I/src', 'rv32i_m/M/src', 'rv32i_m/Zifencei/src'}
append_directed_testlist(arch_tests, '../../../../vendor/riscv-arch-tests/riscv-test-suite/', 'riscv-arch-tests', 1)
if 'epmp-tests' in test_suite_list or test_suite == 'all':
append_directed_testlist({'outputs'}, '../../../../vendor/riscv-isa-sim/tests/mseccfg/gengen_src/', 'epmp-tests', 0)
# Always return 0 (success), even if the test failed. We've successfully
# generated a comparison log either way and we don't want to stop Make from
# gathering them all up for us.
return 0
if __name__ == '__main__':
sys.exit(_main())
|
e4d4fa61c722c3e34b658747671966f4ee90955a
|
f9d564f1aa83eca45872dab7fbaa26dd48210d08
|
/huaweicloud-sdk-cpts/huaweicloudsdkcpts/v1/model/debug_case_return_header.py
|
f6e3f0cb83662ce30a77c1e39b41527ae12ae366
|
[
"Apache-2.0"
] |
permissive
|
huaweicloud/huaweicloud-sdk-python-v3
|
cde6d849ce5b1de05ac5ebfd6153f27803837d84
|
f69344c1dadb79067746ddf9bfde4bddc18d5ecf
|
refs/heads/master
| 2023-09-01T19:29:43.013318
| 2023-08-31T08:28:59
| 2023-08-31T08:28:59
| 262,207,814
| 103
| 44
|
NOASSERTION
| 2023-06-22T14:50:48
| 2020-05-08T02:28:43
|
Python
|
UTF-8
|
Python
| false
| false
| 6,200
|
py
|
debug_case_return_header.py
|
# coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class DebugCaseReturnHeader:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'connection': 'str',
'content_length': 'str',
'content_type': 'str',
'date': 'str',
'vary': 'str'
}
attribute_map = {
'connection': 'Connection',
'content_length': 'Content-Length',
'content_type': 'Content-Type',
'date': 'Date',
'vary': 'Vary'
}
def __init__(self, connection=None, content_length=None, content_type=None, date=None, vary=None):
"""DebugCaseReturnHeader
The model defined in huaweicloud sdk
:param connection: Connection
:type connection: str
:param content_length: Content-Length
:type content_length: str
:param content_type: Content-Type
:type content_type: str
:param date: Date
:type date: str
:param vary: Vary
:type vary: str
"""
self._connection = None
self._content_length = None
self._content_type = None
self._date = None
self._vary = None
self.discriminator = None
if connection is not None:
self.connection = connection
if content_length is not None:
self.content_length = content_length
if content_type is not None:
self.content_type = content_type
if date is not None:
self.date = date
if vary is not None:
self.vary = vary
@property
def connection(self):
"""Gets the connection of this DebugCaseReturnHeader.
Connection
:return: The connection of this DebugCaseReturnHeader.
:rtype: str
"""
return self._connection
@connection.setter
def connection(self, connection):
"""Sets the connection of this DebugCaseReturnHeader.
Connection
:param connection: The connection of this DebugCaseReturnHeader.
:type connection: str
"""
self._connection = connection
@property
def content_length(self):
"""Gets the content_length of this DebugCaseReturnHeader.
Content-Length
:return: The content_length of this DebugCaseReturnHeader.
:rtype: str
"""
return self._content_length
@content_length.setter
def content_length(self, content_length):
"""Sets the content_length of this DebugCaseReturnHeader.
Content-Length
:param content_length: The content_length of this DebugCaseReturnHeader.
:type content_length: str
"""
self._content_length = content_length
@property
def content_type(self):
"""Gets the content_type of this DebugCaseReturnHeader.
Content-Type
:return: The content_type of this DebugCaseReturnHeader.
:rtype: str
"""
return self._content_type
@content_type.setter
def content_type(self, content_type):
"""Sets the content_type of this DebugCaseReturnHeader.
Content-Type
:param content_type: The content_type of this DebugCaseReturnHeader.
:type content_type: str
"""
self._content_type = content_type
@property
def date(self):
"""Gets the date of this DebugCaseReturnHeader.
Date
:return: The date of this DebugCaseReturnHeader.
:rtype: str
"""
return self._date
@date.setter
def date(self, date):
"""Sets the date of this DebugCaseReturnHeader.
Date
:param date: The date of this DebugCaseReturnHeader.
:type date: str
"""
self._date = date
@property
def vary(self):
"""Gets the vary of this DebugCaseReturnHeader.
Vary
:return: The vary of this DebugCaseReturnHeader.
:rtype: str
"""
return self._vary
@vary.setter
def vary(self, vary):
"""Sets the vary of this DebugCaseReturnHeader.
Vary
:param vary: The vary of this DebugCaseReturnHeader.
:type vary: str
"""
self._vary = vary
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DebugCaseReturnHeader):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
3bf4b47a1832b9e0120ba37b7eac81ce41a4443d
|
d2621d10d6d0aa4fcecbb11c281e3dd680b985fc
|
/ts/metrics/metric_cache_yaml_impl.py
|
fa170dd816a37a4e288edab9213217c2e94b111f
|
[
"Apache-2.0"
] |
permissive
|
pytorch/serve
|
7b562a4d6372e77ce28fc71a5b8d5455c6f02290
|
242895c6b4596c4119ec09d6139e627c5dd696b6
|
refs/heads/master
| 2023-08-31T05:24:10.950144
| 2023-08-31T02:49:22
| 2023-08-31T02:49:22
| 212,488,700
| 3,689
| 895
|
Apache-2.0
| 2023-09-13T22:34:31
| 2019-10-03T03:17:43
|
Java
|
UTF-8
|
Python
| false
| false
| 6,518
|
py
|
metric_cache_yaml_impl.py
|
"""
Metrics Cache class for creating objects from yaml spec
"""
import logging
import yaml
import ts.metrics.metric_cache_errors as merrors
from ts.metrics.caching_metric import CachingMetric
from ts.metrics.metric_cache_abstract import MetricCacheAbstract
from ts.metrics.metric_type_enum import MetricTypes
logger = logging.getLogger(__name__)
class MetricsCacheYamlImpl(MetricCacheAbstract):
def __init__(self, config_file_path):
"""
Constructor
Passes yaml file and creates metrics objects.
Parameters
----------
config_file_path: str
Path of yaml file to be parsed
"""
super().__init__(config_file_path=config_file_path)
self._parse_yaml_file(self.config_file_path)
def _parse_yaml_file(self, config_file_path) -> None:
"""
Parse yaml file using PyYAML library.
"""
if not config_file_path:
raise merrors.MetricsCacheTypeError("Config file not initialized")
try:
self._parsed_file = yaml.safe_load(
open(config_file_path, "r", encoding="utf-8")
)
logging.info(f"Successfully loaded {config_file_path}.")
except yaml.YAMLError as exc:
raise merrors.MetricsCachePyYamlError(
f"Error parsing file {config_file_path}: {exc}"
)
except IOError as io_err:
raise merrors.MetricsCacheIOError(
f"Error reading file {config_file_path}: {io_err}"
)
except Exception as err:
raise merrors.GeneralMetricsCacheError(
f"General error found in file {config_file_path}: {err}"
)
def _parse_metrics_section(self, key="model_metrics") -> dict:
"""
Given a key present in the yaml, returns the corresponding section
Parameters
----------
key: str
section of yaml file to be parsed
"""
try:
val = self._parsed_file[key]
except KeyError as err:
raise merrors.MetricsCacheKeyError(
f"'{key}' key not found in yaml file: {err}"
)
logging.debug(f"Successfully parsed {key} section of yaml file")
return val
def initialize_cache(self) -> None:
"""
Create Metric objects based off of the model_metrics data and add to cache
"""
metrics_section = self._parse_metrics_section("model_metrics")
if not metrics_section:
raise merrors.MetricsCacheValueError(
"Missing `model_metrics` specification"
)
for metric_type, metrics_list in metrics_section.items():
try:
metric_enum = MetricTypes(metric_type)
except Exception as exc:
raise merrors.MetricsCacheKeyError(f"Invalid metric type: {exc}")
for metric in metrics_list:
try:
metric_name = metric["name"]
unit = metric["unit"]
dimension_names = metric["dimensions"]
self.add_metric_to_cache(
metric_name=metric_name,
unit=unit,
dimension_names=dimension_names,
metric_type=metric_enum,
)
except KeyError as k_err:
raise merrors.MetricsCacheKeyError(
f"Key not found in cache spec: {k_err}"
)
def add_metric_to_cache(
self,
metric_name: str,
unit: str,
dimension_names: list = [],
metric_type: MetricTypes = MetricTypes.COUNTER,
) -> CachingMetric:
"""
Create a new metric and add into cache. Override existing metric with same name if present.
Parameters
----------
metric_name str
Name of metric
unit str
unit can be one of ms, percent, count, MB, GB or a generic string
dimension_names list
list of dimension name strings for the metric
metric_type MetricTypes
Type of metric Counter, Gauge, Histogram
Returns
-------
newly created Metrics object
"""
self._check_type(metric_name, str, "`metric_name` must be a str")
self._check_type(unit, str, "`unit` must be a str")
self._check_type(
metric_type, MetricTypes, "`metric_type` must be a MetricTypes enum"
)
if dimension_names:
self._check_type(
dimension_names,
list,
"`dimension_names` should be a list of dimension name strings",
)
if metric_type not in self.cache.keys():
self.cache[metric_type] = {}
metric = CachingMetric(
metric_name=metric_name,
unit=unit,
dimension_names=dimension_names,
metric_type=metric_type,
)
if metric_name in self.cache[metric_type].keys():
logging.warning(f"Overriding existing key {metric_type}:{metric_name}")
self.cache[metric_type][metric_name] = metric
return metric
def get_metric(
self,
metric_name: str,
metric_type: MetricTypes = MetricTypes.COUNTER,
) -> CachingMetric:
"""
Create a new metric and add into cache
Parameters
----------
metric_name str
Name of metric
metric_type MetricTypes
Type of metric Counter, Gauge, Histogram
Returns
-------
Metrics object or MetricsCacheKeyError if not found
"""
self._check_type(metric_name, str, "`metric_name` must be a str")
self._check_type(
metric_type, MetricTypes, "`metric_type` must be a MetricTypes enum"
)
try:
metric = self.cache[metric_type][metric_name]
except KeyError:
raise merrors.MetricsCacheKeyError(
f"Metric of type '{metric_type}' and name '{metric_name}' doesn't exist"
)
else:
return metric
def cache_keys(self):
"""
Testing util method
"""
keys = []
for metric_type, metric in self.cache.items():
for metric_name in metric.keys():
keys.append(f"{metric_type.value}:{metric_name}")
return keys
|
4131d02a549957d942147916b105bd40bdb1250d
|
6146e33102797407ede06ce2daa56c28fdfa2812
|
/python/GafferUITest/LabelPlugValueWidgetTest.py
|
82682511e7170cda48016959139e48035f6a1fb5
|
[
"BSD-3-Clause"
] |
permissive
|
GafferHQ/gaffer
|
e1eb78ba8682bfbb7b17586d6e7b47988c3b7d64
|
59cab96598c59b90bee6d3fc1806492a5c03b4f1
|
refs/heads/main
| 2023-09-01T17:36:45.227956
| 2023-08-30T09:10:56
| 2023-08-30T09:10:56
| 9,043,124
| 707
| 144
|
BSD-3-Clause
| 2023-09-14T09:05:37
| 2013-03-27T00:04:53
|
Python
|
UTF-8
|
Python
| false
| false
| 7,740
|
py
|
LabelPlugValueWidgetTest.py
|
##########################################################################
#
# Copyright (c) 2023, Cinesite VFX Ltd. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import imath
import Gaffer
import GafferUI
import GafferUITest
class LabelPlugValueWidgetTest( GafferUITest.TestCase ) :
def testHasUserValue( self ) :
node = Gaffer.Node()
node["user"]["v"] = Gaffer.V2fPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
# No user-made edits to the value.
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"]["x"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"]["y"] ) )
# Even if it happens to be at the default value, the existence of a connection
# means that we consider the value to be user-provided. This differs from
# `ValuePlug.isSetToDefault()` which allows input connections if they provide
# a static (non-context-sensitive) value that matches the default value. For
# `ValuePlug` we only care about things that impact computed results, but for
# `LabelPlugValueWidget` we want to highlight any edits made by the user.
node["user"]["v"]["x"].setInput( node["user"]["v"]["y"] )
self.assertTrue( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"] ) )
self.assertTrue( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"] ) )
self.assertTrue( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"]["x"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"]["y"] ) )
# And that applies even if a user default was registered on a parent plug.
Gaffer.Metadata.registerValue( node["user"]["v"], "userDefault", imath.V2f( 0 ) )
self.assertTrue( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"] ) )
self.assertTrue( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"] ) )
self.assertTrue( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"]["x"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"]["y"] ) )
# If we remove the connection, we should be back to the default state.
node["user"]["v"]["x"].setInput( None )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"]["x"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"]["y"] ) )
# If the value differs to the user default, then that's a user-provided value,
# even if `ValuePlug.isSetToDefault()` is True.
Gaffer.Metadata.registerValue( node["user"]["v"], "userDefault", imath.V2f( 1, 2 ) )
self.assertTrue( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"] ) )
self.assertTrue( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"] ) )
self.assertTrue( node["user"]["v"].isSetToDefault() )
# But if the value of the plug matches the user default, then it hasn't been
# edited by the user, regardless of what `ValuePlug.isSetToDefault()` might say.
Gaffer.NodeAlgo.applyUserDefault( node["user"]["v"] )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"] ) )
self.assertFalse( node["user"]["v"].isSetToDefault() )
# And this all applies if we put the user default on the leaf plugs instead.
Gaffer.Metadata.deregisterValue( node["user"]["v"], "userDefault" )
self.assertTrue( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"] ) )
self.assertTrue( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"] ) )
Gaffer.Metadata.registerValue( node["user"]["v"]["x"], "userDefault", 1 )
Gaffer.Metadata.registerValue( node["user"]["v"]["y"], "userDefault", 2 )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"] ) )
self.assertFalse( node["user"]["v"].isSetToDefault() )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"]["x"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"]["y"] ) )
# Output plugs are never considered to have user edits, because the user doesn't
# provide their values directly.
node["user"]["o"] = Gaffer.V2fPlug( direction = Gaffer.Plug.Direction.Out, flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"]["x"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"]["y"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["o"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["o"]["x"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["o"]["y"] ) )
node["user"]["o"]["y"].setInput( node["user"]["o"]["x"] )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"]["x"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["v"]["y"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["o"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["o"]["x"] ) )
self.assertFalse( GafferUI.LabelPlugValueWidget._hasUserValue( node["user"]["o"]["y"] ) )
if __name__ == "__main__":
unittest.main()
|
6c94209dbce672f38899ceeadfb28f355a1c6490
|
6fdb4eaf5b0e6dbd7db4bf947547541e9aebf110
|
/api/tests/opentrons/protocol_reader/__init__.py
|
ee61e64cb964312c6508cbe0b798aedfab8578ff
|
[
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
] |
permissive
|
Opentrons/opentrons
|
874321e01149184960eeaeaa31b1d21719a1ceda
|
026b523c8c9e5d45910c490efb89194d72595be9
|
refs/heads/edge
| 2023-09-02T02:51:49.579906
| 2023-08-31T16:02:45
| 2023-08-31T16:02:45
| 38,644,841
| 326
| 174
|
Apache-2.0
| 2023-09-14T21:47:20
| 2015-07-06T20:41:01
|
Python
|
UTF-8
|
Python
| false
| false
| 54
|
py
|
__init__.py
|
"""Tests for the opentrons.protocol_reader module."""
|
a536c417aea4d61b5c0d966e66588d10bdc09ce9
|
79e8247110b113395f6ad935209c24b3d2590f24
|
/plugins/lighthouse/painting/painter.py
|
63b60034c978f175db258188f74585fa7f0cc09b
|
[
"MIT"
] |
permissive
|
gaasedelen/lighthouse
|
133a3fb019095c06d7432db2ac6a9dfb8be55ef7
|
f4642e8b4b4347b11ccb25a79ec4f490c9ad901d
|
refs/heads/develop
| 2023-07-19T21:13:47.247797
| 2022-02-17T07:59:38
| 2022-02-17T19:29:27
| 81,627,212
| 2,053
| 309
|
MIT
| 2023-05-14T00:11:52
| 2017-02-11T03:13:36
|
Python
|
UTF-8
|
Python
| false
| false
| 23,083
|
py
|
painter.py
|
import abc
import time
import logging
import threading
from lighthouse.util import *
from lighthouse.util.debug import catch_errors
from lighthouse.coverage import FunctionCoverage
logger = logging.getLogger("Lighthouse.Painting")
class DatabasePainter(object):
"""
An asynchronous disassembler database painting engine.
"""
__metaclass__ = abc.ABCMeta
MSG_ABORT = -1
MSG_TERMINATE = 0
MSG_REPAINT = 1
MSG_FORCE_REPAINT = 2
MSG_CLEAR = 3
MSG_FORCE_CLEAR = 4
MSG_REBASE = 5
def __init__(self, lctx, director, palette):
#----------------------------------------------------------------------
# Misc
#----------------------------------------------------------------------
self.lctx = lctx
self.palette = palette
self.director = director
self._enabled = False
self._started = False
#----------------------------------------------------------------------
# Painted State
#----------------------------------------------------------------------
#
# the coverage painter maintains its own internal record of what
# instruction addresses and graph nodes it has painted.
#
self._imagebase = BADADDR
self._painted_nodes = set()
self._painted_partial = set()
self._painted_instructions = set()
#
# these toggles will let the core painter (this class) know that it
# does not have to order explicit paints of instructions or nodes.
#
# this is because a disassembler-specific painter may be able to hook
# unique callbacks for painting graphs nodes or instructions
# 'on-the-fly' as they are rendered.
#
# these types of paints are ephermal and the most performant, they
# also will not need to be tracked by the painter.
#
self._streaming_nodes = False
self._streaming_instructions = False
#----------------------------------------------------------------------
# Async
#----------------------------------------------------------------------
#
# to communicate with the asynchronous painting thread, we send a
# a message via the thread event to signal a new paint request, and
# use the repaint_requested bool to interrupt a running paint request.
#
self._action_complete = threading.Event()
self._msg_queue = queue.Queue()
self._end_threads = False
#
# asynchronous database painting thread
#
self._painting_worker = threading.Thread(
target=self._async_database_painter,
name="DatabasePainter"
)
#----------------------------------------------------------------------
# Callbacks
#----------------------------------------------------------------------
# painter callbacks
self._status_changed_callbacks = []
# register for cues from the director
self.director.coverage_switched(self.repaint)
self.director.coverage_modified(self.repaint)
self.director.refreshed(self.check_rebase)
def start(self):
"""
Start the painter.
"""
if self._started:
return
# start the painter thread
self._painting_worker.start()
# all done
self._started = True
self.set_enabled(True)
#--------------------------------------------------------------------------
# Status
#--------------------------------------------------------------------------
@property
def enabled(self):
"""
Return the active painting status of the painter.
"""
return self._enabled
def set_enabled(self, enabled):
"""
Enable or disable the painter.
"""
# enabled/disabled status is not changing, ignore...
if enabled == self._enabled:
return
lmsg("%s painting..." % ("Enabling" if enabled else "Disabling"))
self._enabled = enabled
# notify listeners that the painter has been enabled/disabled
self._notify_status_changed(enabled)
# paint or clear the database based on the change of status...
if enabled:
self._send_message(self.MSG_REPAINT)
else:
self._send_message(self.MSG_CLEAR)
#--------------------------------------------------------------------------
# Commands
#--------------------------------------------------------------------------
def terminate(self):
"""
Cleanup & terminate the painter.
"""
self._end_threads = True
self._msg_queue.put(self.MSG_TERMINATE)
try:
self._painting_worker.join()
except RuntimeError: # thread was never started...
pass
# best effort to free up resources & improve interpreter spindown
del self._painted_nodes
del self._painted_instructions
del self._status_changed_callbacks
def repaint(self):
"""
Paint coverage defined by the current database mappings.
"""
self._send_message(self.MSG_REPAINT)
def force_repaint(self):
"""
Force a coverage repaint of the current database mappings.
"""
self._send_message(self.MSG_FORCE_REPAINT)
def force_clear(self):
"""
Clear all paint from the current database (based on metadata)
"""
self._send_message(self.MSG_FORCE_CLEAR)
self.set_enabled(False)
def check_rebase(self):
"""
Perform a rebase on the painted data cache (if necessary).
"""
self._send_message(self.MSG_REBASE)
self._send_message(self.MSG_REPAINT)
def _send_message(self, message):
"""
Queue a painter command for execution.
"""
if not self._started:
return
self._msg_queue.put(message)
#--------------------------------------------------------------------------
# Commands
#--------------------------------------------------------------------------
def status_changed(self, callback):
"""
Subscribe a callback for coverage switch events.
"""
register_callback(self._status_changed_callbacks, callback)
def _notify_status_changed(self, status):
"""
Notify listeners of a coverage switch event.
"""
notify_callback(self._status_changed_callbacks, status)
#--------------------------------------------------------------------------
# Paint Primitives
#--------------------------------------------------------------------------
@abc.abstractmethod
def _paint_instructions(self, instructions):
"""
Paint instruction coverage defined by the current database mapping.
"""
pass
@abc.abstractmethod
def _clear_instructions(self, instructions):
"""
Clear paint from the given instructions.
"""
pass
@abc.abstractmethod
def _paint_nodes(self, nodes_coverage):
"""
Paint node coverage defined by the current database mappings.
"""
pass
@abc.abstractmethod
def _clear_nodes(self, nodes_metadata):
"""
Clear paint from the given graph nodes.
"""
pass
@abc.abstractmethod
def _refresh_ui(self):
"""
Refresh the disassembler UI to ensure paint is rendered.
"""
pass
@abc.abstractmethod
def _cancel_action(self, job):
"""
Cancel a paint action using something representing its job.
"""
pass
#------------------------------------------------------------------------------
# Painting - High Level
#------------------------------------------------------------------------------
def _priority_paint(self):
"""
Immediately repaint regions of the database visible to the user.
Return True upon completion, or False if interrupted.
"""
if self._streaming_instructions and self._streaming_nodes:
return True
# get current function / user location in the database
cursor_address = disassembler[self.lctx].get_current_address()
# attempt to paint the functions in the immediate cursor vicinity
result = self._priority_paint_functions(cursor_address)
# force a refresh *now* as this is a prority painting
self._refresh_ui()
# all done
return result
def _priority_paint_functions(self, target_address, neighbors=1):
"""
Paint functions in the immediate vicinity of the given address.
This will paint both the instructions & graph nodes of defined functions.
"""
db_metadata = self.director.metadata
db_coverage = self.director.coverage
blank_coverage = FunctionCoverage(BADADDR)
# get the function metadata for the function closest to our cursor
function_metadata = db_metadata.get_closest_function(target_address)
if not function_metadata:
return False
# select the range of functions around us that we would like to paint
func_num = db_metadata.get_function_index(function_metadata.address)
func_num_start = max(func_num - neighbors, 0)
func_num_end = min(func_num + neighbors + 1, len(db_metadata.functions) - 1)
# repaint the specified range of functions
for current_num in xrange(func_num_start, func_num_end):
# get the next function to paint
function_metadata = db_metadata.get_function_by_index(current_num)
if not function_metadata:
continue
# get the function coverage data for the target address
function_address = function_metadata.address
function_coverage = db_coverage.functions.get(function_address, blank_coverage)
if not self._streaming_nodes:
# clear nodes
must_clear = sorted(set(function_metadata.nodes) - set(function_coverage.nodes))
self._action_complete.clear()
self._clear_nodes(must_clear)
self._action_complete.wait()
# paint nodes
must_paint = sorted(function_coverage.nodes)
self._action_complete.clear()
self._paint_nodes(must_paint)
self._action_complete.wait()
if not self._streaming_instructions:
# clear instructions
must_clear = sorted(function_metadata.instructions - function_coverage.instructions)
self._action_complete.clear()
self._clear_instructions(must_clear)
self._action_complete.wait()
# paint instructions
must_paint = sorted(function_coverage.instructions)
self._action_complete.clear()
self._paint_instructions(must_paint)
self._action_complete.wait()
# paint finished successfully
return True
def _paint_database(self):
"""
Repaint the current database based on the current state.
"""
logger.debug("Painting database...")
# more code-friendly, readable aliases (db_XX == database_XX)
db_coverage = self.director.coverage
db_metadata = self.director.metadata
start = time.time()
#------------------------------------------------------------------
# initialize imagebase if it hasn't been already...
if self._imagebase == BADADDR:
self._imagebase = db_metadata.imagebase
# immediately paint user-visible regions of the database
if not self._priority_paint():
return False # a repaint was requested
#
# if the painter is not capable of 'streaming' the coverage paint,
# then we must explicitly paint the instructions & nodes here
#
if not self._streaming_instructions:
#
# TODO: 'partially painted nodes' might be a little funny / not
# working correctly in IDA if we ever disable instruction streaming...
#
# compute the painted instructions that will not get painted over
stale_instr = self._painted_instructions - db_coverage.coverage
stale_instr |= (self._painted_partial - db_coverage.partial_instructions)
# clear old instruction paint
if not self._async_action(self._clear_instructions, stale_instr):
return False # a repaint was requested
# paint new instructions
new_instr = sorted(db_coverage.coverage - self._painted_instructions)
if not self._async_action(self._paint_instructions, new_instr):
return False # a repaint was requested
if not self._streaming_nodes:
# compute the painted nodes that will not get painted over
stale_nodes = self._painted_nodes - viewkeys(db_coverage.nodes)
stale_nodes |= db_coverage.partial_nodes
# clear old node paint
if not self._async_action(self._clear_nodes, stale_nodes):
return False # a repaint was requested
# paint new nodes
new_nodes = sorted(viewkeys(db_coverage.nodes) - self._painted_nodes)
if not self._async_action(self._paint_nodes, new_nodes):
return False # a repaint was requested
#------------------------------------------------------------------
end = time.time()
logger.debug(" - Painting took %.2f seconds" % (end - start))
# paint finished successfully
return True
def _clear_database(self):
"""
Clear all paint from the current database using the known paint state.
"""
logger.debug("Clearing database paint...")
start = time.time()
#------------------------------------------------------------------
db_metadata = self.director.metadata
# clear all instructions
if not self._streaming_instructions:
if not self._async_action(self._clear_instructions, self._painted_instructions):
return False # a repaint was requested
# clear all nodes
if not self._streaming_nodes:
if not self._async_action(self._clear_nodes, self._painted_nodes):
return False # a repaint was requested
#------------------------------------------------------------------
end = time.time()
logger.debug(" - Database paint cleared in %.2f seconds..." % (end-start))
# sanity checks...
assert self._painted_nodes == set()
assert self._painted_instructions == set()
# paint finished successfully
return True
def _force_paint_database(self):
"""
Forcibly repaint the database.
"""
db_metadata = self.director.metadata
text = "Repainting the database..."
logger.debug(text)
is_modal = bool(disassembler.NAME != "IDA")
disassembler.execute_ui(disassembler.show_wait_box)(text, False)
start = time.time()
#------------------------------------------------------------------
# discard current / known paint state
self._painted_nodes = set()
self._painted_partial = set()
self._painted_instructions = set()
# paint the database...
self._paint_database()
#------------------------------------------------------------------
end = time.time()
logger.debug(" - Database repainted in %.2f seconds..." % (end-start))
time.sleep(.2) # XXX: this seems to fix a bug where the waitbox doesn't close if the paint is too fast??
disassembler.execute_ui(disassembler.hide_wait_box)()
# paint finished successfully
return True
def _force_clear_database(self):
"""
Forcibly clear the paint from all known database addresses.
"""
db_metadata = self.director.metadata
text = "Forcibly clearing all paint from database..."
logger.debug(text)
#
# NOTE: forcefully clearing the database of paint can take a long time
# in certain cases, so we want to block the user from doing anything
# to the database while we're working.
#
# we will pop up a waitbox to block them, but we have to be careful as
# a *modal* waitbox will conflict with IDA's processing of MFF_WRITE
# requests making it wait for the waitbox to close before processing
#
# therefore, we put in a little bodge wire here to make sure the
# waitbox is *not* modal for IDA... but will be in the normal case.
# it also helps that IDA will be busy processing our 'write' requests,
# so the UI will be mostly frozen to the user anyway!
#
is_modal = bool(disassembler.NAME != "IDA")
disassembler.execute_ui(disassembler.show_wait_box)(text, is_modal)
start = time.time()
#------------------------------------------------------------------
self._action_complete.clear()
self._clear_instructions(sorted(db_metadata.instructions))
self._action_complete.wait()
self._action_complete.clear()
self._clear_nodes(sorted(db_metadata.nodes))
self._action_complete.wait()
#------------------------------------------------------------------
end = time.time()
logger.debug(" - Database paint cleared in %.2f seconds..." % (end-start))
time.sleep(.2) # XXX: this seems to fix a bug where the waitbox doesn't close if the clear is too fast??
disassembler.execute_ui(disassembler.hide_wait_box)()
# paint finished successfully
return True
def _rebase_database(self):
"""
Rebase the active database paint.
TODO/XXX: there may be some edgecases where painting can be wrong if
a rebase occurs while the painter is running.
"""
db_metadata = self.director.metadata
instructions = db_metadata.instructions
nodes = viewvalues(db_metadata.nodes)
# a rebase has not occurred
if not db_metadata.cached or (db_metadata.imagebase == self._imagebase):
return False
# compute the offset of the rebase
rebase_offset = db_metadata.imagebase - self._imagebase
# rebase the cached addresses of what we have painted
self._painted_nodes = set([address+rebase_offset for address in self._painted_nodes])
self._painted_instructions = set([address+rebase_offset for address in self._painted_instructions])
self._imagebase = db_metadata.imagebase
# a rebase has been observed
return True
#--------------------------------------------------------------------------
# Asynchronous Painting
#--------------------------------------------------------------------------
@catch_errors
def _async_database_painter(self):
"""
Asynchronous database painting worker loop.
"""
logger.debug("Starting DatabasePainter thread...")
#
# Asynchronous Database Painting Loop
#
while not self._end_threads:
# wait for the next command to come through
action = self._msg_queue.get()
# repaint the database based on the current state
if action == self.MSG_REPAINT:
result = self._paint_database()
# forcibly repaint the database based on the current state
elif action == self.MSG_FORCE_REPAINT:
result = self._force_paint_database()
# clear database base on the current state
elif action == self.MSG_CLEAR:
result = self._clear_database()
# clear all possible database paint
elif action == self.MSG_FORCE_CLEAR:
result = self._force_clear_database()
# check for a rebase of the painted data
elif action == self.MSG_REBASE:
result = self._rebase_database()
# thrown internally to escape a stale paint, just ignore
elif action == self.MSG_ABORT:
continue
# spin down the painting thread (this thread)
elif action == self.MSG_TERMINATE:
break
# unknown command
else:
logger.error("UNKNOWN COMMAND! %s" % str(action))
break
# refresh the UI to ensure paint changes are rendered
self._refresh_ui()
# thread exit
logger.debug("Exiting DatabasePainter thread...")
def _async_action(self, paint_action, work_iterable):
"""
Split a normal paint routine into interruptable chunks.
Internal routine for asynchrnous painting.
"""
CHUNK_SIZE = 1500 # somewhat arbitrary
# split the given nodes into multiple paints
for work_chunk in chunks(list(work_iterable), CHUNK_SIZE):
#
# reset the paint event signal so that it is ready for the next
# paint request. it will let us know when the asynchrnous paint
# action has completed in the IDA main thread
#
self._action_complete.clear()
#
# paint or unpaint a chunk of 'work' (nodes, or instructions) with
# the given paint function (eg, paint_nodes, clear_instructions)
#
paint_job = paint_action(work_chunk)
#
# wait for the asynchronous paint event to complete or a signal that
# we should end this thread (via end_threads)
#
while not (self._action_complete.wait(timeout=0.2) or self._end_threads):
continue
#
# our end_threads signal/bool can only originate from the main IDA
# thread (plugin termination). we make the assumption that no more
# MFF_WRITE requests (eg, 'paint_action') will get processed.
#
# we do a best effort to cancel the in-flight job (just in case)
# and return so we can exit the thread.
#
if self._end_threads:
self._cancel_action(paint_job)
return False
#
# the operation has been interrupted by a repaint request, bail
# immediately so that we can process the next repaint
#
if not self._msg_queue.empty():
return False
# operation completed successfully
return True
|
b0422fc52e96e66f285f90d9eb46cb83faab6a62
|
2c5dffdcdb3d9a1df7d318da69339a1dd223c89f
|
/tests/mysql_ext.py
|
5d9d6005430841c4abf2b178fdf11632bbd996b8
|
[
"MIT"
] |
permissive
|
coleifer/peewee
|
44cd6fe174b1a618852099fc70b3d1ae19ed6af1
|
a6f479dc0e8063a9a7f7053b04d93f34d67737ce
|
refs/heads/master
| 2023-08-31T19:23:04.646702
| 2023-08-30T12:12:23
| 2023-08-30T12:12:23
| 979,480
| 9,841
| 1,885
|
MIT
| 2023-08-22T12:54:42
| 2010-10-11T20:14:11
|
Python
|
UTF-8
|
Python
| false
| false
| 4,533
|
py
|
mysql_ext.py
|
import datetime
from peewee import *
from playhouse.mysql_ext import JSONField
from playhouse.mysql_ext import Match
from .base import IS_MYSQL_JSON
from .base import ModelDatabaseTestCase
from .base import ModelTestCase
from .base import TestModel
from .base import db_loader
from .base import requires_mysql
from .base import skip_if
from .base import skip_unless
try:
import mariadb
except ImportError:
mariadb = mariadb_db = None
else:
mariadb_db = db_loader('mariadb')
try:
import mysql.connector as mysql_connector
except ImportError:
mysql_connector = None
mysql_ext_db = db_loader('mysqlconnector')
class Person(TestModel):
first = CharField()
last = CharField()
dob = DateField(default=datetime.date(2000, 1, 1))
class Note(TestModel):
person = ForeignKeyField(Person, backref='notes')
content = TextField()
timestamp = DateTimeField(default=datetime.datetime.now)
class KJ(TestModel):
key = CharField(primary_key=True, max_length=100)
data = JSONField()
@requires_mysql
@skip_if(mysql_connector is None, 'mysql-connector not installed')
class TestMySQLConnector(ModelTestCase):
database = mysql_ext_db
requires = [Person, Note]
def test_basic_operations(self):
with self.database.atomic():
charlie, huey, zaizee = [Person.create(first=f, last='leifer')
for f in ('charlie', 'huey', 'zaizee')]
# Use nested-transaction.
with self.database.atomic():
data = (
(charlie, ('foo', 'bar', 'zai')),
(huey, ('meow', 'purr', 'hiss')),
(zaizee, ()))
for person, notes in data:
for note in notes:
Note.create(person=person, content=note)
with self.database.atomic() as sp:
Person.create(first='x', last='y')
sp.rollback()
people = Person.select().order_by(Person.first)
self.assertEqual([person.first for person in people],
['charlie', 'huey', 'zaizee'])
with self.assertQueryCount(1):
notes = (Note
.select(Note, Person)
.join(Person)
.order_by(Note.content))
self.assertEqual([(n.person.first, n.content) for n in notes], [
('charlie', 'bar'),
('charlie', 'foo'),
('huey', 'hiss'),
('huey', 'meow'),
('huey', 'purr'),
('charlie', 'zai')])
@requires_mysql
@skip_if(mariadb is None, 'mariadb connector not installed')
class TestMariaDBConnector(TestMySQLConnector):
database = mariadb_db
@requires_mysql
@skip_unless(IS_MYSQL_JSON, 'requires MySQL 5.7+ or 8.x')
class TestMySQLJSONField(ModelTestCase):
requires = [KJ]
def test_mysql_json_field(self):
values = (
0, 1.0, 2.3,
True, False,
'string',
['foo', 'bar', 'baz'],
{'k1': 'v1', 'k2': 'v2'},
{'k3': [0, 1.0, 2.3], 'k4': {'x1': 'y1', 'x2': 'y2'}})
for i, value in enumerate(values):
# Verify data can be written.
kj = KJ.create(key='k%s' % i, data=value)
# Verify value is deserialized correctly.
kj_db = KJ['k%s' % i]
self.assertEqual(kj_db.data, value)
kj = KJ.select().where(KJ.data.extract('$.k1') == 'v1').get()
self.assertEqual(kj.key, 'k7')
with self.assertRaises(IntegrityError):
KJ.create(key='kx', data=None)
@requires_mysql
class TestMatchExpression(ModelDatabaseTestCase):
requires = [Person]
def test_match_expression(self):
query = (Person
.select()
.where(Match(Person.first, 'charlie')))
self.assertSQL(query, (
'SELECT "t1"."id", "t1"."first", "t1"."last", "t1"."dob" '
'FROM "person" AS "t1" '
'WHERE MATCH("t1"."first") AGAINST(?)'), ['charlie'])
query = (Person
.select()
.where(Match((Person.first, Person.last), 'huey AND zaizee',
'IN BOOLEAN MODE')))
self.assertSQL(query, (
'SELECT "t1"."id", "t1"."first", "t1"."last", "t1"."dob" '
'FROM "person" AS "t1" '
'WHERE MATCH("t1"."first", "t1"."last") '
'AGAINST(? IN BOOLEAN MODE)'), ['huey AND zaizee'])
|
4ee76af03f478e2ac7a717373889db56a8d3c18e
|
90e76adae07c81392d64fdfcb95f659e8a0c3f11
|
/tests/unit/auth/test_sigv4.py
|
86c37ceca9ef98135d14ef9c384c05d87ab292f5
|
[
"Apache-2.0",
"MPL-2.0",
"MIT"
] |
permissive
|
boto/botocore
|
b9468d08c83372cf6930643a15f87801b79ffddd
|
7275c5d6e9273caf3804e0ce9491af080518798c
|
refs/heads/develop
| 2023-09-01T18:11:40.617674
| 2023-08-31T18:58:50
| 2023-08-31T18:58:50
| 6,670,942
| 1,289
| 1,234
|
Apache-2.0
| 2023-09-13T17:23:42
| 2012-11-13T13:25:36
|
Python
|
UTF-8
|
Python
| false
| false
| 6,559
|
py
|
test_sigv4.py
|
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""Signature Version 4 test suite.
AWS provides a test suite for signature version 4:
https://github.com/awslabs/aws-c-auth/tree/v0.3.15/tests/aws-sig-v4-test-suite
This module contains logic to run these tests. The test files were
placed in ./aws4_testsuite, and we're using those to dynamically
generate testcases based on these files.
"""
import datetime
import io
import logging
import os
import re
from http.server import BaseHTTPRequestHandler
import pytest
import botocore.auth
from botocore.awsrequest import AWSRequest
from botocore.compat import parse_qsl, urlsplit
from botocore.credentials import Credentials
from tests import FreezeTime
SECRET_KEY = "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY"
ACCESS_KEY = 'AKIDEXAMPLE'
DATE = datetime.datetime(2015, 8, 30, 12, 36, 0)
SERVICE = 'service'
REGION = 'us-east-1'
TESTSUITE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'aws4_testsuite'
)
# The following tests are not run. Each test has a comment as
# to why the test is being ignored.
TESTS_TO_IGNORE = [
# Bad request-line syntax, python's HTTP parser chokes on this.
'normalize-path/get-space',
# Multiple query params of the same key not supported by the SDKs.
'get-vanilla-query-order-key-case',
'get-vanilla-query-order-key',
'get-vanilla-query-order-value',
]
log = logging.getLogger(__name__)
class RawHTTPRequest(BaseHTTPRequestHandler):
def __init__(self, raw_request):
if isinstance(raw_request, str):
raw_request = raw_request.encode('utf-8')
self.rfile = io.BytesIO(raw_request)
self.raw_requestline = self.rfile.readline()
self.error_code = None
self.error_message = None
self.parse_request()
def send_error(self, code, message):
self.error_code = code
self.error_message = message
def generate_test_cases():
for dirpath, dirnames, filenames in os.walk(TESTSUITE_DIR):
if not any(f.endswith('.req') for f in filenames):
continue
test_case = os.path.relpath(dirpath, TESTSUITE_DIR).replace(
os.sep, '/'
)
if test_case in TESTS_TO_IGNORE:
log.debug("Skipping test: %s", test_case)
continue
yield test_case
@pytest.mark.parametrize("test_case", generate_test_cases())
@FreezeTime(module=botocore.auth.datetime, date=DATE)
def test_signature_version_4(test_case):
_test_signature_version_4(test_case)
def create_request_from_raw_request(raw_request):
request = AWSRequest()
raw = RawHTTPRequest(raw_request)
if raw.error_code is not None:
raise Exception(raw.error_message)
request.method = raw.command
datetime_now = DATE
request.context['timestamp'] = datetime_now.strftime('%Y%m%dT%H%M%SZ')
for key, val in raw.headers.items():
request.headers[key] = val
request.data = raw.rfile.read()
host = raw.headers.get('host', '')
# For whatever reason, the BaseHTTPRequestHandler encodes
# the first line of the response as 'iso-8859-1',
# so we need decode this into utf-8.
if isinstance(raw.path, str):
raw.path = raw.path.encode('iso-8859-1').decode('utf-8')
url = f'https://{host}{raw.path}'
if '?' in url:
split_url = urlsplit(url)
params = dict(parse_qsl(split_url.query))
request.url = split_url.path
request.params = params
else:
request.url = url
return request
def _test_signature_version_4(test_case):
test_case = SignatureTestCase(test_case)
request = create_request_from_raw_request(test_case.raw_request)
auth = botocore.auth.SigV4Auth(test_case.credentials, SERVICE, REGION)
actual_canonical_request = auth.canonical_request(request)
actual_string_to_sign = auth.string_to_sign(
request, actual_canonical_request
)
auth.add_auth(request)
actual_auth_header = request.headers['Authorization']
# Some stuff only works right when you go through auth.add_auth()
# So don't assert the interim steps unless the end result was wrong.
if actual_auth_header != test_case.authorization_header:
assert_equal(
actual_canonical_request,
test_case.canonical_request,
test_case.raw_request,
'canonical_request',
)
assert_equal(
actual_string_to_sign,
test_case.string_to_sign,
test_case.raw_request,
'string_to_sign',
)
assert_equal(
actual_auth_header,
test_case.authorization_header,
test_case.raw_request,
'authheader',
)
def assert_equal(actual, expected, raw_request, part):
if actual != expected:
message = "The %s did not match" % part
message += f"\nACTUAL:{actual!r} !=\nEXPECT:{expected!r}"
message += '\nThe raw request was:\n%s' % raw_request
raise AssertionError(message)
class SignatureTestCase:
def __init__(self, test_case):
filepath = os.path.join(
TESTSUITE_DIR, test_case, os.path.basename(test_case)
)
self.raw_request = open(filepath + '.req', encoding='utf-8').read()
self.canonical_request = (
open(filepath + '.creq', encoding='utf-8').read().replace('\r', '')
)
self.string_to_sign = (
open(filepath + '.sts', encoding='utf-8').read().replace('\r', '')
)
self.authorization_header = (
open(filepath + '.authz', encoding='utf-8')
.read()
.replace('\r', '')
)
self.signed_request = open(filepath + '.sreq', encoding='utf-8').read()
token_pattern = r'^x-amz-security-token:(.*)$'
token_match = re.search(
token_pattern, self.canonical_request, re.MULTILINE
)
token = token_match.group(1) if token_match else None
self.credentials = Credentials(ACCESS_KEY, SECRET_KEY, token)
|
58f086328f3aee446d81dffdd7a602d49c4f443a
|
ef82d38558559c01eacff610391c420096f37826
|
/autogl/module/nas/scatter_utils.py
|
b40c2abb065f8ee1799b5c20a04af323c4378b27
|
[
"Apache-2.0"
] |
permissive
|
THUMNLab/AutoGL
|
472e89b43d22487d63e2b91e27c104c50f702649
|
8e9098657c23ed1a48326077557b65fcadde8c34
|
refs/heads/main
| 2023-06-26T11:10:02.804744
| 2022-12-30T06:04:33
| 2022-12-30T06:04:33
| 317,248,195
| 1,007
| 109
|
Apache-2.0
| 2022-12-30T05:43:50
| 2020-11-30T14:26:22
|
Python
|
UTF-8
|
Python
| false
| false
| 10,432
|
py
|
scatter_utils.py
|
# copied from torch_scatter
from typing import Optional, Tuple
import torch
def broadcast(src: torch.Tensor, other: torch.Tensor, dim: int):
if dim < 0:
dim = other.dim() + dim
if src.dim() == 1:
for _ in range(0, dim):
src = src.unsqueeze(0)
for _ in range(src.dim(), other.dim()):
src = src.unsqueeze(-1)
src = src.expand(other.size())
return src
def scatter_sum(src: torch.Tensor, index: torch.Tensor, dim: int = -1,
out: Optional[torch.Tensor] = None,
dim_size: Optional[int] = None) -> torch.Tensor:
index = broadcast(index, src, dim)
if out is None:
size = list(src.size())
if dim_size is not None:
size[dim] = dim_size
elif index.numel() == 0:
size[dim] = 0
else:
size[dim] = int(index.max()) + 1
out = torch.zeros(size, dtype=src.dtype, device=src.device)
return out.scatter_add_(dim, index, src)
else:
return out.scatter_add_(dim, index, src)
def scatter_add(src: torch.Tensor, index: torch.Tensor, dim: int = -1,
out: Optional[torch.Tensor] = None,
dim_size: Optional[int] = None) -> torch.Tensor:
return scatter_sum(src, index, dim, out, dim_size)
def scatter_mul(src: torch.Tensor, index: torch.Tensor, dim: int = -1,
out: Optional[torch.Tensor] = None,
dim_size: Optional[int] = None) -> torch.Tensor:
return torch.ops.torch_scatter.scatter_mul(src, index, dim, out, dim_size)
def scatter_mean(src: torch.Tensor, index: torch.Tensor, dim: int = -1,
out: Optional[torch.Tensor] = None,
dim_size: Optional[int] = None) -> torch.Tensor:
out = scatter_sum(src, index, dim, out, dim_size)
dim_size = out.size(dim)
index_dim = dim
if index_dim < 0:
index_dim = index_dim + src.dim()
if index.dim() <= index_dim:
index_dim = index.dim() - 1
ones = torch.ones(index.size(), dtype=src.dtype, device=src.device)
count = scatter_sum(ones, index, index_dim, None, dim_size)
count[count < 1] = 1
count = broadcast(count, out, dim)
if out.is_floating_point():
out.true_divide_(count)
else:
out.div_(count, rounding_mode='floor')
return out
def scatter_min(
src: torch.Tensor, index: torch.Tensor, dim: int = -1,
out: Optional[torch.Tensor] = None,
dim_size: Optional[int] = None) -> Tuple[torch.Tensor, torch.Tensor]:
return torch.ops.torch_scatter.scatter_min(src, index, dim, out, dim_size)
def scatter_max(
src: torch.Tensor, index: torch.Tensor, dim: int = -1,
out: Optional[torch.Tensor] = None,
dim_size: Optional[int] = None) -> Tuple[torch.Tensor, torch.Tensor]:
return torch.ops.torch_scatter.scatter_max(src, index, dim, out, dim_size)
def scatter(src: torch.Tensor, index: torch.Tensor, dim: int = -1,
out: Optional[torch.Tensor] = None, dim_size: Optional[int] = None,
reduce: str = "sum") -> torch.Tensor:
r"""
|
.. image:: https://raw.githubusercontent.com/rusty1s/pytorch_scatter/
master/docs/source/_figures/add.svg?sanitize=true
:align: center
:width: 400px
|
Reduces all values from the :attr:`src` tensor into :attr:`out` at the
indices specified in the :attr:`index` tensor along a given axis
:attr:`dim`.
For each value in :attr:`src`, its output index is specified by its index
in :attr:`src` for dimensions outside of :attr:`dim` and by the
corresponding value in :attr:`index` for dimension :attr:`dim`.
The applied reduction is defined via the :attr:`reduce` argument.
Formally, if :attr:`src` and :attr:`index` are :math:`n`-dimensional
tensors with size :math:`(x_0, ..., x_{i-1}, x_i, x_{i+1}, ..., x_{n-1})`
and :attr:`dim` = `i`, then :attr:`out` must be an :math:`n`-dimensional
tensor with size :math:`(x_0, ..., x_{i-1}, y, x_{i+1}, ..., x_{n-1})`.
Moreover, the values of :attr:`index` must be between :math:`0` and
:math:`y - 1`, although no specific ordering of indices is required.
The :attr:`index` tensor supports broadcasting in case its dimensions do
not match with :attr:`src`.
For one-dimensional tensors with :obj:`reduce="sum"`, the operation
computes
.. math::
\mathrm{out}_i = \mathrm{out}_i + \sum_j~\mathrm{src}_j
where :math:`\sum_j` is over :math:`j` such that
:math:`\mathrm{index}_j = i`.
.. note::
This operation is implemented via atomic operations on the GPU and is
therefore **non-deterministic** since the order of parallel operations
to the same value is undetermined.
For floating-point variables, this results in a source of variance in
the result.
:param src: The source tensor.
:param index: The indices of elements to scatter.
:param dim: The axis along which to index. (default: :obj:`-1`)
:param out: The destination tensor.
:param dim_size: If :attr:`out` is not given, automatically create output
with size :attr:`dim_size` at dimension :attr:`dim`.
If :attr:`dim_size` is not given, a minimal sized output tensor
according to :obj:`index.max() + 1` is returned.
:param reduce: The reduce operation (:obj:`"sum"`, :obj:`"mul"`,
:obj:`"mean"`, :obj:`"min"` or :obj:`"max"`). (default: :obj:`"sum"`)
:rtype: :class:`Tensor`
.. code-block:: python
from torch_scatter import scatter
src = torch.randn(10, 6, 64)
index = torch.tensor([0, 1, 0, 1, 2, 1])
# Broadcasting in the first and last dim.
out = scatter(src, index, dim=1, reduce="sum")
print(out.size())
.. code-block::
torch.Size([10, 3, 64])
"""
if reduce == 'sum' or reduce == 'add':
return scatter_sum(src, index, dim, out, dim_size)
if reduce == 'mul':
return scatter_mul(src, index, dim, out, dim_size)
elif reduce == 'mean':
return scatter_mean(src, index, dim, out, dim_size)
elif reduce == 'min':
return scatter_min(src, index, dim, out, dim_size)[0]
elif reduce == 'max':
return scatter_max(src, index, dim, out, dim_size)[0]
else:
raise ValueError
def segment_sum_csr(src: torch.Tensor, indptr: torch.Tensor,
out: Optional[torch.Tensor] = None) -> torch.Tensor:
return torch.ops.torch_scatter.segment_sum_csr(src, indptr, out)
def segment_add_csr(src: torch.Tensor, indptr: torch.Tensor,
out: Optional[torch.Tensor] = None) -> torch.Tensor:
return torch.ops.torch_scatter.segment_sum_csr(src, indptr, out)
def segment_mean_csr(src: torch.Tensor, indptr: torch.Tensor,
out: Optional[torch.Tensor] = None) -> torch.Tensor:
return torch.ops.torch_scatter.segment_mean_csr(src, indptr, out)
def segment_min_csr(
src: torch.Tensor, indptr: torch.Tensor,
out: Optional[torch.Tensor] = None
) -> Tuple[torch.Tensor, torch.Tensor]:
return torch.ops.torch_scatter.segment_min_csr(src, indptr, out)
def segment_max_csr(
src: torch.Tensor, indptr: torch.Tensor,
out: Optional[torch.Tensor] = None
) -> Tuple[torch.Tensor, torch.Tensor]:
return torch.ops.torch_scatter.segment_max_csr(src, indptr, out)
def segment_csr(src: torch.Tensor, indptr: torch.Tensor,
out: Optional[torch.Tensor] = None,
reduce: str = "sum") -> torch.Tensor:
r"""
Reduces all values from the :attr:`src` tensor into :attr:`out` within the
ranges specified in the :attr:`indptr` tensor along the last dimension of
:attr:`indptr`.
For each value in :attr:`src`, its output index is specified by its index
in :attr:`src` for dimensions outside of :obj:`indptr.dim() - 1` and by the
corresponding range index in :attr:`indptr` for dimension
:obj:`indptr.dim() - 1`.
The applied reduction is defined via the :attr:`reduce` argument.
Formally, if :attr:`src` and :attr:`indptr` are :math:`n`-dimensional and
:math:`m`-dimensional tensors with
size :math:`(x_0, ..., x_{m-1}, x_m, x_{m+1}, ..., x_{n-1})` and
:math:`(x_0, ..., x_{m-2}, y)`, respectively, then :attr:`out` must be an
:math:`n`-dimensional tensor with size
:math:`(x_0, ..., x_{m-2}, y - 1, x_{m}, ..., x_{n-1})`.
Moreover, the values of :attr:`indptr` must be between :math:`0` and
:math:`x_m` in ascending order.
The :attr:`indptr` tensor supports broadcasting in case its dimensions do
not match with :attr:`src`.
For one-dimensional tensors with :obj:`reduce="sum"`, the operation
computes
.. math::
\mathrm{out}_i =
\sum_{j = \mathrm{indptr}[i]}^{\mathrm{indptr}[i+1]-1}~\mathrm{src}_j.
Due to the use of index pointers, :meth:`segment_csr` is the fastest
method to apply for grouped reductions.
.. note::
In contrast to :meth:`scatter()` and :meth:`segment_coo`, this
operation is **fully-deterministic**.
:param src: The source tensor.
:param indptr: The index pointers between elements to segment.
The number of dimensions of :attr:`index` needs to be less than or
equal to :attr:`src`.
:param out: The destination tensor.
:param reduce: The reduce operation (:obj:`"sum"`, :obj:`"mean"`,
:obj:`"min"` or :obj:`"max"`). (default: :obj:`"sum"`)
:rtype: :class:`Tensor`
.. code-block:: python
from torch_scatter import segment_csr
src = torch.randn(10, 6, 64)
indptr = torch.tensor([0, 2, 5, 6])
indptr = indptr.view(1, -1) # Broadcasting in the first and last dim.
out = segment_csr(src, indptr, reduce="sum")
print(out.size())
.. code-block::
torch.Size([10, 3, 64])
"""
if reduce == 'sum' or reduce == 'add':
return segment_sum_csr(src, indptr, out)
elif reduce == 'mean':
return segment_mean_csr(src, indptr, out)
elif reduce == 'min':
return segment_min_csr(src, indptr, out)[0]
elif reduce == 'max':
return segment_max_csr(src, indptr, out)[0]
else:
raise ValueError
def gather_csr(src: torch.Tensor, indptr: torch.Tensor,
out: Optional[torch.Tensor] = None) -> torch.Tensor:
return torch.ops.torch_scatter.gather_csr(src, indptr, out)
|
0362a4beb77b811a6dcfb5a8e0728495ed4ceccf
|
8868204daf8de5a2910cedefe29e6c56340b06e6
|
/Projects/ESP-MicroPython/MQTT/esp_dht_mqtt.py
|
c72f851b6ca7ddc9275e5625e18e9ba1981d9d8a
|
[] |
no_license
|
RuiSantosdotme/Random-Nerd-Tutorials
|
0f79fb56f9b9732bfb272e8f4abedb803a7b4757
|
dd6133e1205a2cb5fd408cc198eba2aa14f9b163
|
refs/heads/master
| 2023-08-30T15:17:08.020453
| 2023-08-22T18:12:09
| 2023-08-22T18:12:09
| 23,052,165
| 379
| 261
| null | 2023-03-31T01:57:45
| 2014-08-17T22:43:29
|
C++
|
UTF-8
|
Python
| false
| false
| 2,177
|
py
|
esp_dht_mqtt.py
|
# Complete project details at https://RandomNerdTutorials.com/micropython-mqtt-publish-dht11-dht22-esp32-esp8266/
import time
from umqttsimple import MQTTClient
import ubinascii
import machine
import micropython
import network
import esp
from machine import Pin
import dht
esp.osdebug(None)
import gc
gc.collect()
ssid = 'REPLACE_WITH_YOUR_SSID'
password = 'REPLACE_WITH_YOUR_PASSWORD'
mqtt_server = '192.168.1.XXX'
#EXAMPLE IP ADDRESS or DOMAIN NAME
#mqtt_server = '192.168.1.106'
client_id = ubinascii.hexlify(machine.unique_id())
topic_pub_temp = b'esp/dht/temperature'
topic_pub_hum = b'esp/dht/humidity'
last_message = 0
message_interval = 5
station = network.WLAN(network.STA_IF)
station.active(True)
station.connect(ssid, password)
while station.isconnected() == False:
pass
print('Connection successful')
sensor = dht.DHT22(Pin(14))
#sensor = dht.DHT11(Pin(14))
def connect_mqtt():
global client_id, mqtt_server
client = MQTTClient(client_id, mqtt_server)
#client = MQTTClient(client_id, mqtt_server, user=your_username, password=your_password)
client.connect()
print('Connected to %s MQTT broker' % (mqtt_server))
return client
def restart_and_reconnect():
print('Failed to connect to MQTT broker. Reconnecting...')
time.sleep(10)
machine.reset()
def read_sensor():
try:
sensor.measure()
temp = sensor.temperature()
# uncomment for Fahrenheit
#temp = temp * (9/5) + 32.0
hum = sensor.humidity()
if (isinstance(temp, float) and isinstance(hum, float)) or (isinstance(temp, int) and isinstance(hum, int)):
temp = (b'{0:3.1f},'.format(temp))
hum = (b'{0:3.1f},'.format(hum))
return temp, hum
else:
return('Invalid sensor readings.')
except OSError as e:
return('Failed to read sensor.')
try:
client = connect_mqtt()
except OSError as e:
restart_and_reconnect()
while True:
try:
if (time.time() - last_message) > message_interval:
temp, hum = read_sensor()
print(temp)
print(hum)
client.publish(topic_pub_temp, temp)
client.publish(topic_pub_hum, hum)
last_message = time.time()
except OSError as e:
restart_and_reconnect()
|
1465f158b21de9ff0fd90f9861ad6d418b1511aa
|
abbc2d332bdfa036ac12438983e6d74cf4107e64
|
/SiamFCpp/SiamFCpp-video_analyst/siamfcpp/evaluation/vot_benchmark/bbox_helper.py
|
ef61f170c6e48296bde684e509772717cf5caf94
|
[] |
permissive
|
HonglinChu/SiamTrackers
|
c494cff7543a433e8ec7dbf6d9439b1e7395b0c0
|
805208b5348346d35e64abcbe901a3829743e157
|
refs/heads/master
| 2023-08-29T06:50:59.532271
| 2023-03-06T09:13:53
| 2023-03-06T09:13:53
| 253,718,080
| 1,166
| 243
|
Apache-2.0
| 2023-08-03T16:39:53
| 2020-04-07T07:24:00
|
Python
|
UTF-8
|
Python
| false
| false
| 2,198
|
py
|
bbox_helper.py
|
# --------------------------------------------------------
# SiamMask
# Licensed under The MIT License
# Written by Qiang Wang (wangqiang2015 at ia.ac.cn)
# --------------------------------------------------------
from collections import namedtuple
import numpy as np
Corner = namedtuple('Corner', 'x1 y1 x2 y2')
BBox = Corner
Center = namedtuple('Center', 'x y w h')
def corner2center(corner):
"""
:param corner: Corner or np.array 4*N
:return: Center or 4 np.array N
"""
if isinstance(corner, Corner):
x1, y1, x2, y2 = corner
return Center((x1 + x2) * 0.5, (y1 + y2) * 0.5, (x2 - x1), (y2 - y1))
else:
x1, y1, x2, y2 = corner[0], corner[1], corner[2], corner[3]
x = (x1 + x2) * 0.5
y = (y1 + y2) * 0.5
w = x2 - x1
h = y2 - y1
return x, y, w, h
def center2corner(center):
"""
:param center: Center or np.array 4*N
:return: Corner or np.array 4*N
"""
if isinstance(center, Center):
x, y, w, h = center
return Corner(x - w * 0.5, y - h * 0.5, x + w * 0.5, y + h * 0.5)
else:
x, y, w, h = center[0], center[1], center[2], center[3]
x1 = x - w * 0.5
y1 = y - h * 0.5
x2 = x + w * 0.5
y2 = y + h * 0.5
return x1, y1, x2, y2
def cxy_wh_2_rect(pos, sz):
return np.array([pos[0] - sz[0] / 2, pos[1] - sz[1] / 2, sz[0],
sz[1]]) # 0-index
def get_axis_aligned_bbox(region):
nv = region.size
if nv == 8:
cx = np.mean(region[0::2])
cy = np.mean(region[1::2])
x1 = min(region[0::2])
x2 = max(region[0::2])
y1 = min(region[1::2])
y2 = max(region[1::2])
A1 = np.linalg.norm(region[0:2] -
region[2:4]) * np.linalg.norm(region[2:4] -
region[4:6])
A2 = (x2 - x1) * (y2 - y1)
s = np.sqrt(A1 / A2)
w = s * (x2 - x1) + 1
h = s * (y2 - y1) + 1
else:
x = region[0]
y = region[1]
w = region[2]
h = region[3]
cx = x + (w - 1) / 2
cy = y + (h - 1) / 2
return cx, cy, w, h
|
62151ea2ebb74ba844f76ac4270118ba8584cace
|
3c41443364da8b44c74dce08ef94a1acd1b66b3e
|
/osf/metadata/serializers/datacite/datacite_json.py
|
98c57681a17b86fecc4e2d4c9be89781374a3a03
|
[
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-warranty-disclaimer",
"AGPL-3.0-only",
"LGPL-2.0-or-later",
"LicenseRef-scancode-proprietary-license",
"MPL-1.1",
"CPAL-1.0",
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause",
"Apache-2.0"
] |
permissive
|
CenterForOpenScience/osf.io
|
71d9540be7989f7118a33e15bc4a6ce2d2492ac1
|
a3e0a0b9ddda5dd75fc8248d58f3bcdeece0323e
|
refs/heads/develop
| 2023-09-04T03:21:14.970917
| 2023-08-31T14:49:20
| 2023-08-31T14:49:20
| 10,199,599
| 683
| 390
|
Apache-2.0
| 2023-09-14T17:07:52
| 2013-05-21T15:53:37
|
Python
|
UTF-8
|
Python
| false
| false
| 2,219
|
py
|
datacite_json.py
|
import json
from osf.metadata.serializers import _base
from .datacite_tree_walker import DataciteTreeWalker
def _visit_tree_branch_json(parent, child_name: str, *, is_list=False, text=None, attrib=None):
assert isinstance(parent, (dict, list)), (
f'expected parent to be list or dict, got type {type(parent)} (parent={parent})'
)
parent_is_list = isinstance(parent, list)
if is_list:
assert not parent_is_list
if (text is None) and (attrib is None):
child = [] # normal is_list case
else:
# HACK (part 1) to support datacite `affiliation` (repeated item without list wrapper)
child = _child_json_object(child_name, text, attrib)
elif text and not attrib and not parent_is_list:
child = text
else:
child = _child_json_object(child_name, text, attrib)
if parent_is_list:
parent.append(child)
else:
if is_list and isinstance(child, dict): # HACK (part 2)
parent.setdefault(child_name, []).append(child)
else:
parent[child_name] = child
return child
def _child_json_object(child_name, text, attrib) -> dict:
json_obj = {}
if text is not None:
try:
json_obj[child_name] = text.toPython() # quacks like rdflib.Literal
except AttributeError:
json_obj[child_name] = str(text)
language = getattr(text, 'language', None)
if language:
json_obj['lang'] = language
if attrib is not None:
assert child_name not in attrib
json_obj.update(attrib)
return json_obj
class DataciteJsonMetadataSerializer(_base.MetadataSerializer):
mediatype = 'application/json'
def filename_for_itemid(self, itemid: str):
return f'{itemid}-datacite.json'
def serialize(self) -> str:
return json.dumps(
self.metadata_as_dict(),
indent=2,
sort_keys=True,
)
def metadata_as_dict(self) -> dict:
root_dict = {}
walker = DataciteTreeWalker(self.basket, root_dict, _visit_tree_branch_json)
walker.walk(doi_override=self.serializer_config.get('doi_value'))
return root_dict
|
b15a6c667a2ed0b7ab2c1f59d6e21965eae65367
|
d14b5d78b72711e4614808051c0364b7bd5d6d98
|
/third_party/llvm-16.0/llvm/lib/Analysis/models/gen-regalloc-eviction-test-model.py
|
e41e71a09d828181ac7d5f4ff42f8a2db49858d3
|
[
"Apache-2.0"
] |
permissive
|
google/swiftshader
|
76659addb1c12eb1477050fded1e7d067f2ed25b
|
5be49d4aef266ae6dcc95085e1e3011dad0e7eb7
|
refs/heads/master
| 2023-07-21T23:19:29.415159
| 2023-07-21T19:58:29
| 2023-07-21T20:50:19
| 62,297,898
| 1,981
| 306
|
Apache-2.0
| 2023-07-05T21:29:34
| 2016-06-30T09:25:24
|
C++
|
UTF-8
|
Python
| false
| false
| 1,962
|
py
|
gen-regalloc-eviction-test-model.py
|
"""Generate a mock model for LLVM tests for Register Allocation.
The generated model is not a neural net - it is just a tf.function with the
correct input and output parameters. By construction, the mock model will always
output the first liverange that can be evicted.
"""
import os
import sys
import tensorflow as tf
POLICY_DECISION_LABEL = 'index_to_evict'
POLICY_OUTPUT_SPEC = """
[
{
"logging_name": "index_to_evict",
"tensor_spec": {
"name": "StatefulPartitionedCall",
"port": 0,
"type": "int64_t",
"shape": [
1
]
}
}
]
"""
PER_REGISTER_FEATURE_LIST = ['mask']
NUM_REGISTERS = 33
def get_input_signature():
"""Returns (time_step_spec, action_spec) for LLVM register allocation."""
inputs = dict(
(key, tf.TensorSpec(dtype=tf.int64, shape=(NUM_REGISTERS), name=key))
for key in PER_REGISTER_FEATURE_LIST)
return inputs
def get_output_spec_path(path):
return os.path.join(path, 'output_spec.json')
def build_mock_model(path):
"""Build and save the mock model with the given signature."""
module = tf.Module()
# We have to set this useless variable in order for the TF C API to correctly
# intake it
module.var = tf.Variable(0, dtype=tf.int64)
def action(*inputs):
result = tf.math.argmax(
tf.cast(inputs[0]['mask'], tf.int32), axis=-1) + module.var
return {POLICY_DECISION_LABEL: result}
module.action = tf.function()(action)
action = {
'action': module.action.get_concrete_function(get_input_signature())
}
tf.saved_model.save(module, path, signatures=action)
output_spec_path = get_output_spec_path(path)
with open(output_spec_path, 'w') as f:
print(f'Writing output spec to {output_spec_path}.')
f.write(POLICY_OUTPUT_SPEC)
def main(argv):
assert len(argv) == 2
model_path = argv[1]
build_mock_model(model_path)
if __name__ == '__main__':
main(sys.argv)
|
2b48c9cefabe71786ad0c6d2fcd3c36701afe20c
|
75ac6a0090829f7afd7e6e663873bbb06d16d3d1
|
/servicecatalog_factory/workflow/portfolios/create_portfolio_task_test.py
|
08a09c4638a8f262f425f87ef3a28a3bbb29b33f
|
[
"Apache-2.0"
] |
permissive
|
awslabs/aws-service-catalog-factory
|
113206b3290ef6b6cfb30b8c78a2111176439546
|
b185b701000895be01d4dc3ad0c6bbd93f4f7ad2
|
refs/heads/master
| 2023-08-10T04:37:39.475085
| 2023-08-09T11:36:44
| 2023-08-09T11:36:44
| 180,665,776
| 137
| 51
|
Apache-2.0
| 2023-08-09T11:31:51
| 2019-04-10T21:21:38
|
Python
|
UTF-8
|
Python
| false
| false
| 1,479
|
py
|
create_portfolio_task_test.py
|
# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
from unittest import skip
from servicecatalog_factory.workflow import tasks_unit_tests_helper
class CreatePortfolioTaskTest(tasks_unit_tests_helper.FactoryTaskUnitTest):
region = "region"
portfolio_name = "portfolio_name"
description = "description"
provider_name = "provider_name"
tags = []
def setUp(self) -> None:
from servicecatalog_factory.workflow.portfolios import create_portfolio_task
self.module = create_portfolio_task
self.sut = self.module.CreatePortfolioTask(
**self.minimal_common_params,
region=self.region,
portfolio_name=self.portfolio_name,
description=self.description,
provider_name=self.provider_name,
tags=self.tags,
)
self.wire_up_mocks()
def test_params_for_results_display(self):
# setup
expected_result = {
"region": self.region,
"portfolio_name": self.portfolio_name,
"task_reference": self.task_reference,
}
# exercise
actual_result = self.sut.params_for_results_display()
# verify
self.assertEqual(expected_result, actual_result)
@skip
def test_run(self):
# setup
# exercise
actual_result = self.sut.run()
# verify
raise NotImplementedError()
|
72f7c41919dfe07c5cc3af715c2c56777d672a5d
|
cb4f118412a55c52d720bc79e4074606622920ac
|
/arcade/tilemap/tilemap.py
|
726479600b71ad6f1ae277b2b92f4b1493782e03
|
[
"MIT"
] |
permissive
|
pythonarcade/arcade
|
3e536306f0c44f911de149b58958d8b609ffad4b
|
908664efc256697d3098a347f63d217d97841782
|
refs/heads/development
| 2023-08-29T02:53:01.599145
| 2023-08-26T16:54:34
| 2023-08-26T16:54:34
| 49,003,082
| 786
| 215
|
NOASSERTION
| 2023-09-12T18:38:54
| 2016-01-04T14:46:52
|
Python
|
UTF-8
|
Python
| false
| false
| 41,396
|
py
|
tilemap.py
|
"""
This module provides functionality to load in JSON map files from
the Tiled Map Editor. This is achieved using the pytiled-parser
library.
For more info on Tiled see: https://www.mapeditor.org/
For more info on pytiled-parser see: https://github.com/Beefy-Swain/pytiled_parser
"""
from __future__ import annotations
import copy
import math
import os
from collections import OrderedDict
from pathlib import Path
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union, cast
import pytiled_parser
import pytiled_parser.tiled_object
from pytiled_parser import Color
from arcade import (
AnimatedTimeBasedSprite,
AnimationKeyframe,
Sprite,
SpriteList,
get_window,
)
from arcade.hitbox import HitBoxAlgorithm, RotatableHitBox
from arcade.texture.loading import _load_tilemap_texture
if TYPE_CHECKING:
from arcade import TextureAtlas
from pyglet.math import Vec2
from arcade.math import rotate_point
from arcade.resources import resolve
from arcade.types import Point, Rect, TiledObject
_FLIPPED_HORIZONTALLY_FLAG = 0x80000000
_FLIPPED_VERTICALLY_FLAG = 0x40000000
_FLIPPED_DIAGONALLY_FLAG = 0x20000000
__all__ = [
"TileMap",
"load_tilemap",
"read_tmx"
]
prop_to_float = cast(Callable[[pytiled_parser.Property], float], float)
def _get_image_info_from_tileset(tile: pytiled_parser.Tile) -> Tuple[int, int, int, int]:
image_x = 0
image_y = 0
if tile.tileset.image is not None:
margin = tile.tileset.margin or 0
spacing = tile.tileset.spacing or 0
row = tile.id // tile.tileset.columns
image_y = margin + row * (tile.tileset.tile_height + spacing)
col = tile.id % tile.tileset.columns
image_x = margin + col * (tile.tileset.tile_width + spacing)
if tile.tileset.image:
width = tile.tileset.tile_width
height = tile.tileset.tile_height
else:
image_x = tile.x
image_y = tile.y
width = tile.width
height = tile.height
return image_x, image_y, width, height
def _get_image_source(
tile: pytiled_parser.Tile,
map_directory: Optional[str],
) -> Optional[Path]:
image_file = None
if tile.image:
image_file = tile.image
elif tile.tileset.image:
image_file = tile.tileset.image
if not image_file:
print(
f"Warning for tile {tile.id}, no image source listed either for individual tile, or as a tileset."
)
return None
if os.path.exists(image_file):
return image_file
if map_directory:
try2 = Path(map_directory, image_file)
if os.path.exists(try2):
return try2
print(f"Warning, can't find image {image_file} for tile {tile.id}")
return None
class TileMap:
"""
Class that represents a fully parsed and loaded map from Tiled.
For examples on how to use this class, see:
https://api.arcade.academy/en/latest/examples/platform_tutorial/step_09.html
:param Union[str, Path] map_file: A JSON map file for a Tiled map to initialize from
:param scaling: Global scaling to apply to all Sprites.
:param Dict[str, Dict[str, Any]] layer_options: Extra parameters for each layer.
:param use_spatial_hash: If set to True, this will make moving a sprite
in the SpriteList slower, but it will speed up collision detection
with items in the SpriteList. Great for doing collision detection
with static walls/platforms.
:param hit_box_algorithm: The hit box algorithm to use for the Sprite's in this layer.
:param tiled_map: An already parsed pytiled-parser map object.
Passing this means that the ``map_file`` argument will be ignored, and the pre-parsed
map will instead be used. This can be helpful for working with Tiled World files.
:param offset: Can be used to offset the position of all sprites and objects
within the map. This will be applied in addition to any offsets from Tiled. This value
can be overridden with the layer_options dict.
:param texture_atlas: A default texture atlas to use for the
SpriteLists created by this map. If not supplied the global default atlas will be used.
:param lazy: SpriteLists will be created lazily.
The `layer_options` parameter can be used to specify per layer arguments.
The available options for this are:
use_spatial_hash - A boolean to enable spatial hashing on this layer's SpriteList.
scaling - A float providing layer specific Sprite scaling.
hit_box_algorithm - The hit box algorithm to use for the Sprite's in this layer.
offset - A tuple containing X and Y position offsets for the layer
custom_class - All objects in the layer are created from this class instead of Sprite. \
Must be subclass of Sprite.
custom_class_args - Custom arguments, passed into the constructor of the custom_class
texture_atlas - A texture atlas to use for the SpriteList from this layer, if none is \
supplied then the one defined at the map level will be used.
For example:
code-block::
layer_options = {
"Platforms": {
"use_spatial_hash": True,
"scaling": 2.5,
"offset": (-128, 64),
"custom_class": Platform,
"custom_class_args": {
"health": 100
}
},
}
The keys and their values in each layer are passed to the layer processing functions
using the `**` operator on the dictionary.
"""
tiled_map: pytiled_parser.TiledMap
"""
The pytiled-parser map object. This can be useful for implementing features
that aren't supported by this class by accessing the raw map data directly.
"""
width: float
"The width of the map in tiles. This is the number of tiles, not pixels."
height: float
"The height of the map in tiles. This is the number of tiles, not pixels."
tile_width: float
"The width in pixels of each tile."
tile_height: float
"The height in pixels of each tile."
background_color: Optional[Color]
"The background color of the map."
scaling: float
"A global scaling value to be applied to all Sprites in the map."
sprite_lists: Dict[str, SpriteList]
"""A dictionary mapping SpriteLists to their layer names. This is used
for all tile layers of the map."""
object_lists: Dict[str, List[TiledObject]]
"""
A dictionary mapping TiledObjects to their layer names. This is used
for all object layers of the map.
"""
offset: Vec2
"A tuple containing the X and Y position offset values."
def __init__(
self,
map_file: Union[str, Path] = "",
scaling: float = 1.0,
layer_options: Optional[Dict[str, Dict[str, Any]]] = None,
use_spatial_hash: bool = False,
hit_box_algorithm: Optional[HitBoxAlgorithm] = None,
tiled_map: Optional[pytiled_parser.TiledMap] = None,
offset: Vec2 = Vec2(0, 0),
texture_atlas: Optional["TextureAtlas"] = None,
lazy: bool = False,
) -> None:
if not map_file and not tiled_map:
raise AttributeError(
"Initialized TileMap with an empty map_file or no map_object argument"
)
if tiled_map:
self.tiled_map = tiled_map
else:
# If we should pull from local resources, replace with proper path
map_file = resolve(map_file)
# This attribute stores the pytiled-parser map object
self.tiled_map = pytiled_parser.parse_map(map_file)
if self.tiled_map.infinite:
raise AttributeError(
"Attempted to load an infinite TileMap. Arcade currently cannot load "
"infinite maps. Disable the infinite map property and re-save the file."
)
if not texture_atlas:
try:
texture_atlas = get_window().ctx.default_atlas
except RuntimeError:
pass
self._lazy = lazy
# Set Map Attributes
self.width = self.tiled_map.map_size.width
self.height = self.tiled_map.map_size.height
self.tile_width = self.tiled_map.tile_size.width
self.tile_height = self.tiled_map.tile_size.height
self.background_color = self.tiled_map.background_color
# Global Layer Defaults
self.scaling = scaling
self.use_spatial_hash = use_spatial_hash
self.hit_box_algorithm = hit_box_algorithm
self.offset = offset
# Dictionaries to store the SpriteLists for processed layers
self.sprite_lists: Dict[str, SpriteList] = OrderedDict()
self.object_lists: Dict[str, List[TiledObject]] = OrderedDict()
self.properties = self.tiled_map.properties
global_options = { # type: ignore
"scaling": self.scaling,
"use_spatial_hash": self.use_spatial_hash,
"hit_box_algorithm": self.hit_box_algorithm,
"offset": self.offset,
"custom_class": None,
"custom_class_args": {},
"texture_atlas": texture_atlas,
}
for layer in self.tiled_map.layers:
if (layer.name in self.sprite_lists) or (layer.name in self.object_lists):
raise AttributeError(
f"You have a duplicate layer name '{layer.name}' in your Tiled map. "
"Please use unique names for all layers and tilesets in your map."
)
self._process_layer(layer, global_options, layer_options)
def _process_layer(
self,
layer: pytiled_parser.Layer,
global_options: Dict[str, Any],
layer_options: Optional[Dict[str, Dict[str, Any]]] = None,
) -> None:
processed: Union[
SpriteList, Tuple[Optional[SpriteList], Optional[List[TiledObject]]]
]
options = global_options
if layer_options:
if layer.name in layer_options:
new_options = {
key: layer_options[layer.name].get(key, global_options[key])
for key in global_options
}
options = new_options
if isinstance(layer, pytiled_parser.TileLayer):
processed = self._process_tile_layer(layer, **options)
self.sprite_lists[layer.name] = processed
elif isinstance(layer, pytiled_parser.ObjectLayer):
processed = self._process_object_layer(layer, **options)
if processed[0]:
sprite_list = processed[0]
if sprite_list:
self.sprite_lists[layer.name] = sprite_list
if processed[1]:
object_list = processed[1]
if object_list:
self.object_lists[layer.name] = object_list
elif isinstance(layer, pytiled_parser.ImageLayer):
processed = self._process_image_layer(layer, **options)
self.sprite_lists[layer.name] = processed
elif isinstance(layer, pytiled_parser.LayerGroup):
for sub_layer in layer.layers:
self._process_layer(sub_layer, global_options, layer_options)
def get_cartesian(
self,
x: float,
y: float,
) -> Tuple[float, float]:
"""
Given a set of coordinates in pixel units, this returns the cartesian coordinates.
This assumes the supplied coordinates are pixel coordinates, and bases the cartesian
grid off of the Map's tile size.
If you have a map with 128x128 pixel Tiles, and you supply coordinates 500, 250 to
this function you'll receive back 3, 2
:param x: The X Coordinate to convert
:param y: The Y Coordinate to convert
"""
x = math.floor(x / (self.tile_width * self.scaling))
y = math.floor(y / (self.tile_height * self.scaling))
return x, y
def get_tilemap_layer(self, layer_path: str) -> Optional[pytiled_parser.Layer]:
assert isinstance(layer_path, str)
def _get_tilemap_layer(my_path, layers):
layer_name = my_path.pop(0)
for my_layer in layers:
if my_layer.name == layer_name:
if isinstance(my_layer, pytiled_parser.LayerGroup):
if len(my_path) != 0:
return _get_tilemap_layer(my_path, my_layer.layers)
else:
return my_layer
return None
path = layer_path.strip("/").split("/")
layer = _get_tilemap_layer(path, self.tiled_map.layers)
return layer
def _get_tile_by_gid(self, tile_gid: int) -> Optional[pytiled_parser.Tile]:
flipped_diagonally = False
flipped_horizontally = False
flipped_vertically = False
if tile_gid & _FLIPPED_HORIZONTALLY_FLAG:
flipped_horizontally = True
tile_gid -= _FLIPPED_HORIZONTALLY_FLAG
if tile_gid & _FLIPPED_DIAGONALLY_FLAG:
flipped_diagonally = True
tile_gid -= _FLIPPED_DIAGONALLY_FLAG
if tile_gid & _FLIPPED_VERTICALLY_FLAG:
flipped_vertically = True
tile_gid -= _FLIPPED_VERTICALLY_FLAG
for tileset_key, tileset in self.tiled_map.tilesets.items():
if tile_gid < tileset_key:
continue
# No specific tile info, but there is a tile sheet
# print(f"data {tileset_key} {tileset.tiles} {tileset.image} {tileset_key} {tile_gid} {tileset.tile_count}") # noqa
if (
tileset.image is not None
and tileset_key <= tile_gid < tileset_key + tileset.tile_count
):
tile_id = tile_gid - tileset_key
existing_ref = None
if tileset.tiles is not None:
if (tile_gid - tileset_key) in tileset.tiles:
existing_ref = tileset.tiles[tile_id]
existing_ref.image = tileset.image
# No specific tile info, but there is a tile sheet
if existing_ref:
tile_ref = existing_ref
else:
tile_ref = pytiled_parser.Tile(id=(tile_id), image=tileset.image)
elif tileset.tiles is None and tileset.image is not None:
# Not in this tileset, move to the next
continue
else:
if tileset.tiles is None:
return None
tile_ref = tileset.tiles.get(tile_gid - tileset_key)
if tile_ref:
my_tile = copy.copy(tile_ref)
my_tile.tileset = tileset
my_tile.flipped_vertically = flipped_vertically
my_tile.flipped_diagonally = flipped_diagonally
my_tile.flipped_horizontally = flipped_horizontally
return my_tile
print(f"Returning NO tile for {tile_gid}.")
return None
def _get_tile_by_id(
self, tileset: pytiled_parser.Tileset, tile_id: int
) -> Optional[pytiled_parser.Tile]:
for tileset_key, cur_tileset in self.tiled_map.tilesets.items():
if cur_tileset is tileset:
for tile_key, tile in cur_tileset.tiles.items():
if tile_id == tile.id:
return tile
return None
def _create_sprite_from_tile(
self,
tile: pytiled_parser.Tile,
scaling: float = 1.0,
hit_box_algorithm: Optional[HitBoxAlgorithm] = None,
custom_class: Optional[type] = None,
custom_class_args: Dict[str, Any] = {},
) -> Sprite:
"""Given a tile from the parser, try and create a Sprite from it."""
# --- Step 1, Find a reference to an image this is going to be based off of
map_source = self.tiled_map.map_file
map_directory = os.path.dirname(map_source)
image_file = _get_image_source(tile, map_directory)
if tile.animation:
if not custom_class:
custom_class = AnimatedTimeBasedSprite
elif not issubclass(custom_class, AnimatedTimeBasedSprite):
raise RuntimeError(
f"""
Tried to use a custom class {custom_class.__name__} for animated tiles
that doesn't subclass AnimatedTimeBasedSprite.
Custom classes for animated tiles must subclass AnimatedTimeBasedSprite.
"""
)
# print(custom_class.__name__)
args = {"path_or_texture": image_file, "scale": scaling}
my_sprite = custom_class(**custom_class_args, **args) # type: ignore
else:
if not custom_class:
custom_class = Sprite
elif not issubclass(custom_class, Sprite):
raise RuntimeError(
f"""
Tried to use a custom class {custom_class.__name__} for
a tile that doesn't subclass arcade.Sprite.
Custom classes for tiles must subclass arcade.Sprite.
"""
)
# Can image_file be None?
image_x, image_y, width, height = _get_image_info_from_tileset(tile)
texture = _load_tilemap_texture(
image_file, # type: ignore
x=image_x,
y=image_y,
width=width,
height=height,
hit_box_algorithm=hit_box_algorithm,
)
if tile.flipped_diagonally:
texture = texture.flip_diagonally()
if tile.flipped_horizontally:
texture = texture.flip_horizontally()
if tile.flipped_vertically:
texture = texture.flip_vertically()
args = {
"path_or_texture": texture, # type: ignore
"scale": scaling,
}
my_sprite = custom_class(**custom_class_args, **args) # type: ignore
if tile.properties is not None and len(tile.properties) > 0:
for key, value in tile.properties.items():
my_sprite.properties[key] = value
if tile.class_:
my_sprite.properties["class"] = tile.class_
# Add tile ID to sprite properties
my_sprite.properties["tile_id"] = tile.id
if tile.objects is not None:
if not isinstance(tile.objects, pytiled_parser.ObjectLayer):
print("Warning, tile.objects is not an ObjectLayer as expected.")
return my_sprite
if len(tile.objects.tiled_objects) > 1:
if tile.image:
print(
f"Warning, only one hit box supported for tile with image {tile.image}."
)
else:
print("Warning, only one hit box supported for tile.")
for hitbox in tile.objects.tiled_objects:
points: List[Point] = []
if isinstance(hitbox, pytiled_parser.tiled_object.Rectangle):
if hitbox.size is None:
print(
"Warning: Rectangle hitbox created for without a "
"height or width Ignoring."
)
continue
sx = hitbox.coordinates.x - (my_sprite.width / (scaling * 2))
sy = -(hitbox.coordinates.y - (my_sprite.height / (scaling * 2)))
ex = (hitbox.coordinates.x + hitbox.size.width) - (
my_sprite.width / (scaling * 2)
)
# issue #1068
# fixed size of rectangular hitbox
ey = -(hitbox.coordinates.y + hitbox.size.height) + (
my_sprite.height / (scaling * 2)
)
points = [(sx, sy), (ex, sy), (ex, ey), (sx, ey)]
elif isinstance(
hitbox, pytiled_parser.tiled_object.Polygon
) or isinstance(hitbox, pytiled_parser.tiled_object.Polyline):
for point in hitbox.points:
adj_x = (
point.x
+ hitbox.coordinates.x
- my_sprite.width / (scaling * 2)
)
adj_y = -(
point.y
+ hitbox.coordinates.y
- my_sprite.height / (scaling * 2)
)
adj_point = adj_x, adj_y
points.append(adj_point)
if points[0][0] == points[-1][0] and points[0][1] == points[-1][1]:
points.pop()
elif isinstance(hitbox, pytiled_parser.tiled_object.Ellipse):
if not hitbox.size:
print(
f"Warning: Ellipse hitbox created without a height "
f" or width for {tile.image}. Ignoring."
)
continue
hw = hitbox.size.width / 2
hh = hitbox.size.height / 2
cx = hitbox.coordinates.x + hw
cy = hitbox.coordinates.y + hh
acx = cx - (my_sprite.width / (scaling * 2))
acy = cy - (my_sprite.height / (scaling * 2))
total_steps = 8
angles = [
step / total_steps * 2 * math.pi for step in range(total_steps)
]
for angle in angles:
x = hw * math.cos(angle) + acx
y = -(hh * math.sin(angle) + acy)
points.append((x, y))
else:
print(f"Warning: Hitbox type {type(hitbox)} not supported.")
if tile.flipped_vertically:
for point in points:
point = point[0], point[1] * -1
if tile.flipped_horizontally:
for point in points:
point = point[0] * -1, point[1]
if tile.flipped_diagonally:
for point in points:
point = point[1], point[0]
my_sprite.hit_box = RotatableHitBox(
cast(List[Point], points),
position=my_sprite.position,
angle=my_sprite.angle,
scale=my_sprite.scale_xy,
)
if tile.animation:
key_frame_list = []
for frame in tile.animation:
frame_tile = self._get_tile_by_gid(
tile.tileset.firstgid + frame.tile_id
)
if frame_tile:
image_file = _get_image_source(frame_tile, map_directory)
if not frame_tile.tileset.image and image_file:
texture = _load_tilemap_texture(
image_file, hit_box_algorithm=hit_box_algorithm
)
elif image_file:
# No image for tile, pull from tilesheet
(
image_x,
image_y,
width,
height,
) = _get_image_info_from_tileset(frame_tile)
texture = _load_tilemap_texture(
image_file,
x=image_x,
y=image_y,
width=width,
height=height,
hit_box_algorithm=hit_box_algorithm,
)
else:
raise RuntimeError(
f"Warning: failed to load image for animation frame for "
f"tile '{frame_tile.id}', '{image_file}'."
)
key_frame = AnimationKeyframe( # type: ignore
frame.tile_id, frame.duration, texture
)
key_frame_list.append(key_frame)
if len(key_frame_list) == 1:
my_sprite.texture = key_frame.texture
cast(AnimatedTimeBasedSprite, my_sprite).frames = key_frame_list
return my_sprite
def _process_image_layer(
self,
layer: pytiled_parser.ImageLayer,
texture_atlas: "TextureAtlas",
scaling: float = 1.0,
use_spatial_hash: bool = False,
hit_box_algorithm: Optional[HitBoxAlgorithm] = None,
offset: Vec2 = Vec2(0, 0),
custom_class: Optional[type] = None,
custom_class_args: Dict[str, Any] = {},
) -> SpriteList:
sprite_list: SpriteList = SpriteList(
use_spatial_hash=use_spatial_hash,
atlas=texture_atlas,
lazy=self._lazy,
)
map_source = self.tiled_map.map_file
map_directory = os.path.dirname(map_source)
image_file = layer.image
if not os.path.exists(image_file) and (map_directory):
try2 = Path(map_directory, image_file)
if not os.path.exists(try2):
print(
f"Warning, can't find image {image_file} for Image Layer {layer.name}"
)
image_file = try2
my_texture = _load_tilemap_texture(
image_file,
hit_box_algorithm=hit_box_algorithm,
)
if layer.transparent_color:
data = my_texture.image.getdata()
target = layer.transparent_color
new_data = []
for item in data:
if (
item[0] == target[0]
and item[1] == target[1]
and item[2] == target[2]
):
new_data.append((255, 255, 255, 0))
else:
new_data.append(item)
my_texture.image.putdata(new_data)
if not custom_class:
custom_class = Sprite
elif not issubclass(custom_class, Sprite):
raise RuntimeError(
f"""
Tried to use a custom class {custom_class.__name__} for an
Image Layer that doesn't subclass arcade.Sprite.
Custom classes for image layers must subclass arcade.Sprite.
"""
)
args = {
"filename": image_file,
"scale": scaling,
"texture": my_texture,
"hit_box_algorithm": hit_box_algorithm,
}
my_sprite = custom_class(**custom_class_args, **args)
if layer.properties:
sprite_list.properties = layer.properties
for key, value in layer.properties.items():
my_sprite.properties[key] = value
if layer.tint_color:
my_sprite.color = layer.tint_color
if layer.opacity:
my_sprite.alpha = int(layer.opacity * 255)
my_sprite.center_x = (
(layer.offset[0] * scaling) + my_sprite.width / 2
) + offset[0]
my_sprite.center_y = (layer.offset[1]) + offset[1]
sprite_list.visible = layer.visible
sprite_list.append(my_sprite)
return sprite_list
def _process_tile_layer(
self,
layer: pytiled_parser.TileLayer,
texture_atlas: "TextureAtlas",
scaling: float = 1.0,
use_spatial_hash: bool = False,
hit_box_algorithm: Optional[HitBoxAlgorithm] = None,
offset: Vec2 = Vec2(0, 0),
custom_class: Optional[type] = None,
custom_class_args: Dict[str, Any] = {},
) -> SpriteList:
sprite_list: SpriteList = SpriteList(
use_spatial_hash=use_spatial_hash,
atlas=texture_atlas,
lazy=self._lazy,
)
map_array = layer.data
if TYPE_CHECKING:
# Can never be None because we already detect and reject infinite maps
assert map_array
# Loop through the layer and add in the list
for row_index, row in enumerate(map_array):
for column_index, item in enumerate(row):
# Check for an empty tile
if item == 0:
continue
tile = self._get_tile_by_gid(item)
if tile is None:
raise ValueError(
(
f"Couldn't find tile for item {item} in layer "
f"'{layer.name}' in file '{self.tiled_map.map_file}'"
f"at ({column_index}, {row_index})."
)
)
my_sprite = self._create_sprite_from_tile(
tile,
scaling=scaling,
hit_box_algorithm=hit_box_algorithm,
custom_class=custom_class,
custom_class_args=custom_class_args,
)
if my_sprite is None:
print(
f"Warning: Could not create sprite number {item} in layer '{layer.name}' {tile.image}"
)
else:
my_sprite.center_x = (
column_index * (self.tiled_map.tile_size[0] * scaling)
+ my_sprite.width / 2
) + offset[0]
my_sprite.center_y = (
(self.tiled_map.map_size.height - row_index - 1)
* (self.tiled_map.tile_size[1] * scaling)
+ my_sprite.height / 2
) + offset[1]
# Tint
if layer.tint_color:
my_sprite.color = layer.tint_color
# Opacity
opacity = layer.opacity
if opacity:
my_sprite.alpha = int(opacity * 255)
sprite_list.visible = layer.visible
sprite_list.append(my_sprite)
if layer.properties:
sprite_list.properties = layer.properties
return sprite_list
def _process_object_layer(
self,
layer: pytiled_parser.ObjectLayer,
texture_atlas: "TextureAtlas",
scaling: float = 1.0,
use_spatial_hash: bool = False,
hit_box_algorithm: Optional[HitBoxAlgorithm] = None,
offset: Vec2 = Vec2(0, 0),
custom_class: Optional[type] = None,
custom_class_args: Dict[str, Any] = {},
) -> Tuple[Optional[SpriteList], Optional[List[TiledObject]]]:
if not scaling:
scaling = self.scaling
sprite_list: Optional[SpriteList] = None
objects_list: Optional[List[TiledObject]] = []
shape: Union[List[Point], Rect, Point, None] = None
for cur_object in layer.tiled_objects:
# shape: Optional[Union[Point, PointList, Rect]] = None
if isinstance(cur_object, pytiled_parser.tiled_object.Tile):
if not sprite_list:
sprite_list = SpriteList(
use_spatial_hash=use_spatial_hash,
atlas=texture_atlas,
lazy=self._lazy,
)
tile = self._get_tile_by_gid(cur_object.gid)
if tile is None:
raise Exception(f"Tile with gid not found: {cur_object.gid}")
my_sprite = self._create_sprite_from_tile(
tile,
scaling=scaling,
hit_box_algorithm=hit_box_algorithm,
custom_class=custom_class,
custom_class_args=custom_class_args,
)
x = (cur_object.coordinates.x * scaling) + offset[0]
y = (
(
self.tiled_map.map_size.height * self.tiled_map.tile_size[1]
- cur_object.coordinates.y
)
* scaling
) + offset[1]
my_sprite.width = width = cur_object.size[0] * scaling
my_sprite.height = height = cur_object.size[1] * scaling
# center_x = width / 2
# center_y = height / 2
if cur_object.rotation:
rotation = -math.radians(cur_object.rotation)
else:
rotation = 0
angle_degrees = math.degrees(rotation)
rotated_center_x, rotated_center_y = rotate_point(
width / 2, height / 2, 0, 0, angle_degrees
)
my_sprite.position = (x + rotated_center_x, y + rotated_center_y)
my_sprite.angle = angle_degrees
if layer.tint_color:
my_sprite.color = layer.tint_color
opacity = layer.opacity
if opacity:
my_sprite.alpha = int(opacity * 255)
if cur_object.properties and "change_x" in cur_object.properties:
my_sprite.change_x = prop_to_float(cur_object.properties["change_x"])
if cur_object.properties and "change_y" in cur_object.properties:
my_sprite.change_y = prop_to_float(cur_object.properties["change_y"])
if cur_object.properties and "boundary_bottom" in cur_object.properties:
my_sprite.boundary_bottom = prop_to_float(
cur_object.properties["boundary_bottom"]
)
if cur_object.properties and "boundary_top" in cur_object.properties:
my_sprite.boundary_top = prop_to_float(
cur_object.properties["boundary_top"]
)
if cur_object.properties and "boundary_left" in cur_object.properties:
my_sprite.boundary_left = prop_to_float(
cur_object.properties["boundary_left"]
)
if cur_object.properties and "boundary_right" in cur_object.properties:
my_sprite.boundary_right = prop_to_float(
cur_object.properties["boundary_right"]
)
if cur_object.properties:
my_sprite.properties.update(cur_object.properties)
if cur_object.class_:
my_sprite.properties["class"] = cur_object.class_
if cur_object.name:
my_sprite.properties["name"] = cur_object.name
sprite_list.visible = layer.visible
sprite_list.append(my_sprite)
continue
elif isinstance(cur_object, pytiled_parser.tiled_object.Point):
x = cur_object.coordinates.x * scaling
y = (
self.tiled_map.map_size.height * self.tiled_map.tile_size[1]
- cur_object.coordinates.y
) * scaling
shape = (x + offset[0], y + offset[1])
elif isinstance(cur_object, pytiled_parser.tiled_object.Rectangle):
if cur_object.size.width == 0 and cur_object.size.height == 0:
print(
f"WARNING: Tiled object with ID {cur_object.id} is a rectangle "
"with a width and height of 0. Loading it as a single point."
)
x = cur_object.coordinates.x * scaling
y = (
self.tiled_map.map_size.height * self.tiled_map.tile_size[1]
- cur_object.coordinates.y
) * scaling
shape = (x + offset[0], y + offset[1])
else:
sx = cur_object.coordinates.x * scaling + offset[0]
sy = (
self.tiled_map.map_size.height * self.tiled_map.tile_size[1]
- cur_object.coordinates.y
) * scaling + offset[1]
ex = sx + cur_object.size.width * scaling
ey = sy - cur_object.size.height * scaling
p1 = (sx, sy)
p2 = (ex, sy)
p3 = (ex, ey)
p4 = (sx, ey)
shape = [p1, p2, p3, p4]
elif isinstance(
cur_object, pytiled_parser.tiled_object.Polygon
) or isinstance(cur_object, pytiled_parser.tiled_object.Polyline):
points: List[Point] = []
shape = points
for point in cur_object.points:
x = point.x + cur_object.coordinates.x
y = (self.height * self.tile_height) - (
point.y + cur_object.coordinates.y
)
point = (x + offset[0], y + offset[1])
points.append(point)
# If shape is a polyline, and it is closed, we need to remove the duplicate end point
if points[0][0] == points[-1][0] and points[0][1] == points[-1][1]:
points.pop()
elif isinstance(cur_object, pytiled_parser.tiled_object.Ellipse):
hw = cur_object.size.width / 2
hh = cur_object.size.height / 2
cx = cur_object.coordinates.x + hw
cy = cur_object.coordinates.y + hh
total_steps = 8
angles = [
step / total_steps * 2 * math.pi for step in range(total_steps)
]
points = []
shape = points
for angle in angles:
x = hw * math.cos(angle) + cx
y = -(hh * math.sin(angle) + cy)
point = (x + offset[0], y + offset[1])
points.append(point)
elif isinstance(cur_object, pytiled_parser.tiled_object.Text):
pass
else:
continue
if shape:
tiled_object = TiledObject(
shape, cur_object.properties, cur_object.name, cur_object.class_
)
if not objects_list:
objects_list = []
objects_list.append(tiled_object)
return sprite_list or None, objects_list or None
def load_tilemap(
map_file: Union[str, Path],
scaling: float = 1.0,
layer_options: Optional[Dict[str, Dict[str, Any]]] = None,
use_spatial_hash: bool = False,
hit_box_algorithm: Optional[HitBoxAlgorithm] = None,
offset: Vec2 = Vec2(0, 0),
texture_atlas: Optional["TextureAtlas"] = None,
lazy: bool = False,
) -> TileMap:
"""
Given a .json map file, loads in and returns a `TileMap` object.
A TileMap can be created directly using the classes `__init__` function.
This function exists for ease of use.
For more clarification on the layer_options key, see the `__init__` function
of the `TileMap` class
:param Union[str, Path] map_file: The JSON map file.
:param scaling: The global scaling to apply to all Sprite's within the map.
:param use_spatial_hash: If set to True, this will make moving a sprite
in the SpriteList slower, but it will speed up collision detection
with items in the SpriteList. Great for doing collision detection
with static walls/platforms.
:param hit_box_algorithm: The hit box algorithm to use for collision detection.
:param Dict[str, Dict[str, Any]] layer_options: Layer specific options for the map.
:param offset: Can be used to offset the position of all sprites and objects
within the map. This will be applied in addition to any offsets from Tiled. This value
can be overridden with the layer_options dict.
:param lazy: SpriteLists will be created lazily.
"""
return TileMap(
map_file=map_file,
scaling=scaling,
layer_options=layer_options,
use_spatial_hash=use_spatial_hash,
hit_box_algorithm=hit_box_algorithm,
offset=offset,
texture_atlas=texture_atlas,
lazy=lazy,
)
def read_tmx(map_file: Union[str, Path]) -> pytiled_parser.TiledMap:
"""
Deprecated function to raise a warning that it has been removed.
Exists to provide info for outdated code bases.
"""
raise DeprecationWarning(
"The read_tmx function has been replaced by the new TileMap class."
)
|
7072bab636b515e4bcf3d0c753bfdbc1ca5a9b18
|
420910ce3d1afb4777fd6e4dd4ac449899394602
|
/trajectron/model/online/__init__.py
|
a1c907062f62f7b97ca6e0bf490434693cd643c8
|
[
"MIT"
] |
permissive
|
StanfordASL/Trajectron-plus-plus
|
21bf2b1b5cb9251a0de58211c19eb886afbaf75f
|
1031c7bd1a444273af378c1ec1dcca907ba59830
|
refs/heads/master
| 2023-04-13T08:21:08.604997
| 2023-03-12T06:32:17
| 2023-03-12T06:32:17
| 232,498,508
| 572
| 179
|
MIT
| 2023-08-17T06:09:36
| 2020-01-08T06:52:34
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 106
|
py
|
__init__.py
|
from .online_trajectron import OnlineTrajectron
from .online_mgcvae import OnlineMultimodalGenerativeCVAE
|
9de40a6e9be33d49f1834ace9462b3cb4998cbc8
|
4157fb5cecec9efd1a6261ff88c421337794041f
|
/tests/test_export_base.py
|
5aa07f679c5335b475c8f29a606def1fbd32a457
|
[
"MIT"
] |
permissive
|
nickstenning/honcho
|
786859f423dcece8b75773bf65a3c597612f5cab
|
9eeb9b43e82b45c13de9999a0e65287cfbf384d6
|
refs/heads/main
| 2023-08-18T04:54:40.543608
| 2022-10-24T16:16:35
| 2022-10-24T17:12:45
| 3,706,909
| 1,187
| 113
|
MIT
| 2023-09-01T12:42:15
| 2012-03-13T13:44:30
|
Python
|
UTF-8
|
Python
| false
| false
| 1,630
|
py
|
test_export_base.py
|
# -*- coding: utf-8 -*-
from mock import Mock
from mock import patch
import pytest
from honcho.export.base import BaseExport
from honcho.export.base import dashrepl
from honcho.export.base import percentescape
class GiraffeExport(BaseExport):
def get_template_loader(self):
return 'longneck'
class TestBaseExport():
@patch('jinja2.Environment')
def test_env_default(self, env_mock):
GiraffeExport()
env_mock.assert_called_with(loader='longneck')
@patch('jinja2.Environment')
@patch('jinja2.FileSystemLoader')
def test_env_template_dir(self, loader_mock, env_mock):
BaseExport(template_dir='foo/bar')
loader_mock.assert_called_with(['foo/bar'])
env_mock.assert_called_with(loader=loader_mock.return_value)
def test_get_template(self):
fake_env = Mock()
export = BaseExport(template_env=fake_env)
export.get_template('foo/bar.tpl')
fake_env.get_template.assert_called_with('foo/bar.tpl')
@pytest.mark.parametrize('name_in,name_out', (
('foo', 'foo'),
('foo.1', 'foo-1'),
('foo.bar.baz', 'foo-bar-baz'),
('foo_bar_baz', 'foo_bar_baz'),
('foo!bar:baz', 'foo-bar-baz'),
('καλημέρα.κόσμε', 'καλημέρα-κόσμε'),
))
def test_dashrepl(name_in, name_out):
assert dashrepl(name_in) == name_out
@pytest.mark.parametrize('val_in,val_out', (
('foo', 'foo'),
('foo name.%h', 'foo name.%%h'),
('%one two% %three', '%%one two%% %%three'),
('foo%%bar', 'foo%%%%bar'),
))
def test_percentescape(val_in, val_out):
assert percentescape(val_in) == val_out
|
2692542bb0b805bcd1bc80089439823eb02fd9fe
|
9dfa5b8c9448d748a611d7a908b7724bc2160953
|
/packages/cardpay-reward-indexer/cardpay_reward_indexer/config.py
|
14a1d03635bd1232ae3a37585c685eb9c23da716
|
[
"MIT"
] |
permissive
|
cardstack/cardstack
|
8bb5085e3299ce63ab5cf99c0b24511007526fc8
|
3f0e957ebf4b9caafbd2a9b89944c7d201406a3d
|
refs/heads/main
| 2023-08-24T10:42:20.382920
| 2023-08-18T11:01:25
| 2023-08-18T11:01:25
| 79,036,909
| 364
| 64
|
MIT
| 2023-09-07T08:23:29
| 2017-01-15T14:15:10
|
TypeScript
|
UTF-8
|
Python
| false
| false
| 1,725
|
py
|
config.py
|
from functools import lru_cache
from pydantic import BaseSettings
config = {
"staging": {
"reward_program": "0x0885ce31D73b63b0Fcb1158bf37eCeaD8Ff0fC72",
"archived_reward_programs": [],
"reward_pool": "0xcF8852D1aD746077aa4C31B423FdaE5494dbb57A",
"rewards_bucket": "s3://cardpay-staging-reward-programs",
"subgraph_url": "https://graph-staging.stack.cards/subgraphs/name/habdelra/cardpay-sokol",
"tokens": {
"card": "0xB0427e9F03Eb448D030bE3EBC96F423857ceEb2f",
"dai": "0x8F4fdA26e5039eb0bf5dA90c3531AeB91256b56b",
},
},
"production": {
"reward_program": "0x979C9F171fb6e9BC501Aa7eEd71ca8dC27cF1185",
"archived_reward_programs": [],
"rewards_bucket": "s3://cardpay-production-reward-programs",
"subgraph_url": "https://graph.cardstack.com/subgraphs/name/habdelra/cardpay-xdai",
"reward_pool": "0x340EB99eB9aC7DB3a3eb68dB76c6F62738DB656a",
"tokens": {
"card": "0x52031d287Bb58E26A379A7Fec2c84acB54f54fe3",
"dai": "0x26F2319Fbb44772e0ED58fB7c99cf8da59e2b5BE",
},
},
}
config["local"] = config["staging"]
config["test"] = config["staging"]
class Settings(BaseSettings):
ENVIRONMENT: str = "local"
SUBGRAPH_URL: str = config["staging"]["subgraph_url"]
REWARDS_BUCKET: str = config["staging"]["rewards_bucket"]
DB_STRING: str = "postgresql://postgres@localhost:5432/postgres"
SENTRY_DSN: str = None
class Config:
fields = {
"DB_STRING": {
"env": "DB_STRING",
},
"SENTRY_DSN": {"env": "SENTRY_DSN"},
}
@lru_cache
def get_settings():
return Settings()
|
a90d7d8bbad247bfc1fb1910d2a42a98b3443e0a
|
a3d6556180e74af7b555f8d47d3fea55b94bcbda
|
/tools/translation/helper/translation_helper.py
|
75772e4fb580fa1df781446a74733e4b33ea7465
|
[
"BSD-3-Clause"
] |
permissive
|
chromium/chromium
|
aaa9eda10115b50b0616d2f1aed5ef35d1d779d6
|
a401d6cf4f7bf0e2d2e964c512ebb923c3d8832c
|
refs/heads/main
| 2023-08-24T00:35:12.585945
| 2023-08-23T22:01:11
| 2023-08-23T22:01:11
| 120,360,765
| 17,408
| 7,102
|
BSD-3-Clause
| 2023-09-10T23:44:27
| 2018-02-05T20:55:32
| null |
UTF-8
|
Python
| false
| false
| 9,461
|
py
|
translation_helper.py
|
# Copyright 2018 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Helpers for dealing with translation files."""
from __future__ import print_function
import ast
import os
import re
import sys
import xml.etree.cElementTree as ElementTree
if sys.version_info.major != 2:
basestring = str # pylint: disable=redefined-builtin
class GRDFile:
"""Class representing a grd xml file.
Attributes:
path: the path to the grd file.
dir: the path to the the grd's parent directery.
name: the base name of the grd file.
grdp_paths: the list of grdp files included in the grd via <part>.
structure_paths: the paths of any <structure> elements in the grd file.
xtb_paths: the xtb paths where the grd's translations live.
lang_to_xtb_path: maps each language to the xtb path for that language.
appears_translatable: whether the contents of the grd indicate that it's
supposed to be translated.
expected_languages: the languages that this grd is expected to have
translations for, based on the translation expectations file.
"""
def __init__(self, path):
self.path = path
self.dir, self.name = os.path.split(path)
dom, self.grdp_paths = _parse_grd_file(path)
self.structure_paths = [os.path.join(self.dir, s.get('file'))
for s in dom.findall('.//structure')]
self.xtb_paths = [os.path.join(self.dir, f.get('path'))
for f in dom.findall('.//file')]
self.lang_to_xtb_path = {}
self.appears_translatable = (len(self.xtb_paths) != 0 or
dom.find('.//message') is not None)
self.expected_languages = None
def _populate_lang_to_xtb_path(self, errors):
"""Populates the lang_to_xtb_path attribute."""
grd_root = os.path.splitext(self.name)[0]
lang_pattern = re.compile(r'%s_([^_]+)\.xtb$' % re.escape(grd_root))
for xtb_path in self.xtb_paths:
xtb_basename = os.path.basename(xtb_path)
xtb_lang_match = re.match(lang_pattern, xtb_basename)
if not xtb_lang_match:
errors.append('%s: invalid xtb name: %s. xtb name must be %s_<lang>'
'.xtb where <lang> is the language code.' %
(self.name, xtb_basename, grd_root))
continue
xtb_lang = xtb_lang_match.group(1)
if xtb_lang in self.lang_to_xtb_path:
errors.append('%s: %s is listed twice' % (self.name, xtb_basename))
continue
self.lang_to_xtb_path[xtb_lang] = xtb_path
return errors
def get_translatable_grds(repo_root, all_grd_paths,
translation_expectations_path):
"""Returns all the grds that should be translated as a list of GRDFiles.
This verifies that every grd file that appears translatable is listed in
the translation expectations, and that every grd in the translation
expectations actually exists.
Args:
repo_root: The path to the root of the repository.
all_grd_paths: All grd paths in the repository relative to repo_root.
translation_expectations_path: The path to the translation expectations
file, which specifies which grds to translate and into which languages.
"""
parsed_expectations = _parse_translation_expectations(
translation_expectations_path)
grd_to_langs, untranslated_grds, internal_grds = parsed_expectations
errors = []
# Make sure that grds in internal_grds aren't processed, since they might
# contain pieces not available publicly.
for internal_grd in internal_grds:
try:
all_grd_paths.remove(internal_grd)
except ValueError:
errors.append(
'%s is listed in translation expectations as an internal file to be '
'ignored, but this grd file does not exist.' % internal_grd)
# Check that every grd that appears translatable is listed in
# the translation expectations.
grds_with_expectations = set(grd_to_langs.keys()).union(untranslated_grds)
all_grds = {p: GRDFile(os.path.join(repo_root, p)) for p in all_grd_paths}
for path, grd in all_grds.items():
if grd.appears_translatable:
if path not in grds_with_expectations:
errors.append('%s appears to be translatable (because it contains '
'<file> or <message> elements), but is not listed in the '
'translation expectations.' % path)
# Check that every file in translation_expectations exists.
for path in grds_with_expectations:
if path not in all_grd_paths:
errors.append('%s is listed in the translation expectations, but this '
'grd file does not exist.' % path)
if errors:
raise Exception('%s needs to be updated. Please fix these issues:\n - %s' %
(translation_expectations_path, '\n - '.join(errors)))
translatable_grds = []
for path, expected_languages_list in grd_to_langs.items():
grd = all_grds[path]
grd.expected_languages = expected_languages_list
grd._populate_lang_to_xtb_path(errors)
translatable_grds.append(grd)
# Ensure each grd lists the expected languages.
expected_languages = set(expected_languages_list)
actual_languages = set(grd.lang_to_xtb_path.keys())
if expected_languages.difference(actual_languages):
errors.append('%s: missing translations for these languages: %s. Add '
'<file> and <output> elements to the grd file, or update '
'the translation expectations.' % (grd.name,
sorted(expected_languages.difference(actual_languages))))
if actual_languages.difference(expected_languages):
errors.append('%s: references translations for unexpected languages: %s. '
'Remove the offending <file> and <output> elements from the'
' grd file, or update the translation expectations.'
% (grd.name,
sorted(actual_languages.difference(expected_languages))))
if errors:
raise Exception('Please fix these issues:\n - %s' %
('\n - '.join(errors)))
return translatable_grds
def _parse_grd_file(grd_path):
"""Reads a grd(p) file and any subfiles included via <part file="..." />.
Args:
grd_path: The path of the .grd or .grdp file.
Returns:
A tuple (grd_dom, grdp_paths). dom is an ElementTree DOM for the grd file,
with the <part> elements inlined. grdp_paths is the list of grdp files that
were included via <part> elements.
"""
grdp_paths = []
grd_dom = ElementTree.parse(grd_path)
# We modify grd in the loop, so listify this iterable to be safe.
part_nodes = list(grd_dom.findall('.//part'))
for part_node in part_nodes:
grdp_rel_path = part_node.get('file')
grdp_path = os.path.join(os.path.dirname(grd_path), grdp_rel_path)
grdp_paths.append(grdp_path)
grdp_dom, grdp_grdp_paths = _parse_grd_file(grdp_path)
grdp_paths.extend(grdp_grdp_paths)
part_node.append(grdp_dom.getroot())
return grd_dom, grdp_paths
def _parse_translation_expectations(path):
"""Parses a translations expectations file.
Example translations expectations file:
{
"desktop_grds": {
"languages": ["es", "fr"],
"files": [
"ash/ash_strings.grd",
"ui/strings/ui_strings.grd",
],
},
"android_grds": {
"languages": ["de", "pt-BR"],
"files": [
"chrome/android/android_chrome_strings.grd",
],
},
"untranslated_grds": {
"chrome/locale_settings.grd": "Not UI strings; localized separately",
"chrome/locale_settings_mac.grd": "Not UI strings; localized separately",
},
"internal_grds": [
"chrome/internal.grd",
],
}
Returns:
A tuple (grd_to_langs, untranslated_grds, internal_grds).
grd_to_langs maps each grd path to the list of languages into which
that grd should be translated. untranslated_grds is a list of grds
that "appear translatable" but should not be translated.
internal_grds is a list of grds that are internal only and should
not be read by this helper (since they might contain parts not
available publicly).
"""
with open(path, encoding='utf-8') as f:
file_contents = f.read()
def assert_list_of_strings(l, name):
assert isinstance(l, list) and all(isinstance(s, basestring) for s in l), (
'%s must be a list of strings' % name)
try:
translations_expectations = ast.literal_eval(file_contents)
assert isinstance(translations_expectations, dict), (
'%s must be a python dict' % path)
grd_to_langs = {}
untranslated_grds = []
internal_grds = []
for group_name, settings in translations_expectations.items():
if group_name == 'untranslated_grds':
untranslated_grds = list(settings.keys())
assert_list_of_strings(untranslated_grds, 'untranslated_grds')
continue
if group_name == 'internal_grds':
internal_grds = settings
assert_list_of_strings(internal_grds, 'internal_grds')
continue
languages = settings['languages']
files = settings['files']
assert_list_of_strings(languages, group_name + '.languages')
assert_list_of_strings(files, group_name + '.files')
for grd in files:
grd_to_langs[grd] = languages
return grd_to_langs, untranslated_grds, internal_grds
except Exception:
print('Error: failed to parse', path)
raise
|
83d7396d70707fff1ddc36a23e3cfb2049e1ae31
|
33f46bf38c5d8d23eabe9022b3edc16041089c5f
|
/apps/10_movie_search/final/movie_svc.py
|
cc49e4c893a80d85b4cf97393c7ca26efe1b1b9c
|
[
"MIT"
] |
permissive
|
mikeckennedy/python-jumpstart-course-demos
|
0bb8f4571147d4b16a89a28cde546ba6a8a5b2f6
|
4c09fd6e636fc5fcf62dcd696b9600d5342720b9
|
refs/heads/master
| 2023-08-23T10:04:38.425936
| 2023-04-13T17:47:53
| 2023-04-13T17:47:53
| 52,437,699
| 818
| 614
|
MIT
| 2021-04-10T05:32:34
| 2016-02-24T11:35:52
|
Python
|
UTF-8
|
Python
| false
| false
| 716
|
py
|
movie_svc.py
|
import collections
import requests
MovieResult = collections.namedtuple(
'MovieResult',
"imdb_code,title,duration,director,year,rating,imdb_score,keywords,genres")
def find_movies(search_text):
if not search_text or not search_text.strip():
raise ValueError("Search text is required")
# This URL changed since the recording to support SSL.
url = 'https://movieservice.talkpython.fm/api/search/{}'.format(search_text)
resp = requests.get(url)
resp.raise_for_status()
movie_data = resp.json()
movies_list = movie_data.get('hits')
movies = [
MovieResult(**md)
for md in movies_list
]
movies.sort(key=lambda m: -m.year)
return movies
|
cbc177db62a55a4de26fadb5c45016ba1ae2cbcb
|
017090be7ab186cb6b47f49e1066ac5cfec3a542
|
/src/neptune/internal/backends/neptune_backend_mock.py
|
41ef73a3d41fea8fc3055b9574627360f7eafc73
|
[
"Apache-2.0"
] |
permissive
|
neptune-ai/neptune-client
|
9a79f9d93c84b3a20114e6e49a80652930399ece
|
9b697ce548634c30dbc5881d4a0b223c8987515d
|
refs/heads/master
| 2023-08-18T01:48:22.634432
| 2023-08-17T11:55:57
| 2023-08-17T11:55:57
| 170,117,229
| 408
| 55
|
Apache-2.0
| 2023-09-13T12:51:03
| 2019-02-11T11:25:57
|
Python
|
UTF-8
|
Python
| false
| false
| 31,714
|
py
|
neptune_backend_mock.py
|
#
# Copyright (c) 2022, Neptune Labs Sp. z o.o.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__all__ = ["NeptuneBackendMock"]
import os
import uuid
from collections import defaultdict
from datetime import datetime
from shutil import copyfile
from typing import (
Any,
Dict,
Iterable,
List,
Optional,
Tuple,
Type,
TypeVar,
Union,
)
from zipfile import ZipFile
from neptune.common.exceptions import (
InternalClientError,
NeptuneException,
)
from neptune.exceptions import (
ContainerUUIDNotFound,
MetadataInconsistency,
ModelVersionNotFound,
ProjectNotFound,
RunNotFound,
)
from neptune.internal.artifacts.types import ArtifactFileData
from neptune.internal.backends.api_model import (
ApiExperiment,
ArtifactAttribute,
Attribute,
AttributeType,
BoolAttribute,
DatetimeAttribute,
FileAttribute,
FloatAttribute,
FloatPointValue,
FloatSeriesAttribute,
FloatSeriesValues,
ImageSeriesValues,
IntAttribute,
LeaderboardEntry,
Project,
StringAttribute,
StringPointValue,
StringSeriesAttribute,
StringSeriesValues,
StringSetAttribute,
Workspace,
)
from neptune.internal.backends.hosted_file_operations import get_unique_upload_entries
from neptune.internal.backends.neptune_backend import NeptuneBackend
from neptune.internal.backends.nql import NQLQuery
from neptune.internal.container_structure import ContainerStructure
from neptune.internal.container_type import ContainerType
from neptune.internal.id_formats import (
QualifiedName,
SysId,
UniqueId,
)
from neptune.internal.operation import (
AddStrings,
AssignArtifact,
AssignBool,
AssignDatetime,
AssignFloat,
AssignInt,
AssignString,
ClearArtifact,
ClearFloatLog,
ClearImageLog,
ClearStringLog,
ClearStringSet,
ConfigFloatSeries,
CopyAttribute,
DeleteAttribute,
DeleteFiles,
LogFloats,
LogImages,
LogStrings,
Operation,
RemoveStrings,
TrackFilesToArtifact,
UploadFile,
UploadFileContent,
UploadFileSet,
)
from neptune.internal.operation_processors.operation_storage import OperationStorage
from neptune.internal.operation_visitor import OperationVisitor
from neptune.internal.types.file_types import FileType
from neptune.internal.utils import base64_decode
from neptune.internal.utils.generic_attribute_mapper import NoValue
from neptune.internal.utils.git import GitInfo
from neptune.internal.utils.paths import path_to_str
from neptune.types import (
Boolean,
Integer,
)
from neptune.types.atoms import GitRef
from neptune.types.atoms.artifact import Artifact
from neptune.types.atoms.datetime import Datetime
from neptune.types.atoms.file import File
from neptune.types.atoms.float import Float
from neptune.types.atoms.string import String
from neptune.types.file_set import FileSet
from neptune.types.namespace import Namespace
from neptune.types.series.file_series import FileSeries
from neptune.types.series.float_series import FloatSeries
from neptune.types.series.string_series import StringSeries
from neptune.types.sets.string_set import StringSet
from neptune.types.value import Value
from neptune.types.value_visitor import ValueVisitor
Val = TypeVar("Val", bound=Value)
class NeptuneBackendMock(NeptuneBackend):
WORKSPACE_NAME = "mock-workspace"
PROJECT_NAME = "project-placeholder"
PROJECT_KEY = SysId("OFFLINE")
MODEL_SYS_ID = SysId("OFFLINE-MOD")
def __init__(self, credentials=None, proxies=None):
self._project_id: UniqueId = UniqueId(str(uuid.uuid4()))
self._containers: Dict[(UniqueId, ContainerType), ContainerStructure[Value, dict]] = dict()
self._next_run = 1 # counter for runs
self._next_model_version = defaultdict(lambda: 1) # counter for model versions
self._artifacts: Dict[Tuple[str, str], List[ArtifactFileData]] = dict()
self._attribute_type_converter_value_visitor = self.AttributeTypeConverterValueVisitor()
self._create_container(self._project_id, ContainerType.PROJECT, self.PROJECT_KEY)
def get_display_address(self) -> str:
return "OFFLINE"
def get_available_projects(
self, workspace_id: Optional[str] = None, search_term: Optional[str] = None
) -> List[Project]:
return [
Project(
id=UniqueId(str(uuid.uuid4())),
name=self.PROJECT_NAME,
workspace=self.WORKSPACE_NAME,
sys_id=self.PROJECT_KEY,
)
]
def get_available_workspaces(self) -> List[Workspace]:
return [Workspace(id=UniqueId(str(uuid.uuid4())), name=self.WORKSPACE_NAME)]
def _create_container(self, container_id: UniqueId, container_type: ContainerType, sys_id: SysId):
container = self._containers.setdefault((container_id, container_type), ContainerStructure[Value, dict]())
container.set(["sys", "id"], String(str(sys_id)))
container.set(["sys", "state"], String("Active"))
container.set(["sys", "owner"], String("offline_user"))
container.set(["sys", "size"], Float(0))
container.set(["sys", "tags"], StringSet(set()))
container.set(["sys", "creation_time"], Datetime(datetime.now()))
container.set(["sys", "modification_time"], Datetime(datetime.now()))
container.set(["sys", "failed"], Boolean(False))
if container_type == ContainerType.MODEL_VERSION:
container.set(["sys", "model_id"], String(str(self.MODEL_SYS_ID)))
container.set(["sys", "stage"], String("none"))
return container
def _get_container(self, container_id: UniqueId, container_type: ContainerType):
key = (container_id, container_type)
if key not in self._containers:
raise ContainerUUIDNotFound(container_id, container_type)
container = self._containers[(container_id, container_type)]
return container
def create_run(
self,
project_id: UniqueId,
git_info: Optional[GitInfo] = None,
custom_run_id: Optional[str] = None,
notebook_id: Optional[str] = None,
checkpoint_id: Optional[str] = None,
) -> ApiExperiment:
sys_id = SysId(f"{self.PROJECT_KEY}-{self._next_run}")
self._next_run += 1
new_run_id = UniqueId(str(uuid.uuid4()))
self._create_container(new_run_id, ContainerType.RUN, sys_id=sys_id)
return ApiExperiment(
id=new_run_id,
type=ContainerType.RUN,
sys_id=sys_id,
workspace=self.WORKSPACE_NAME,
project_name=self.PROJECT_NAME,
trashed=False,
)
def create_model(self, project_id: str, key: str) -> ApiExperiment:
sys_id = SysId(f"{self.PROJECT_KEY}-{key}")
new_run_id = UniqueId(str(uuid.uuid4()))
self._create_container(new_run_id, ContainerType.MODEL, sys_id=sys_id)
return ApiExperiment(
id=new_run_id,
type=ContainerType.MODEL,
sys_id=sys_id,
workspace=self.WORKSPACE_NAME,
project_name=self.PROJECT_NAME,
trashed=False,
)
def create_model_version(self, project_id: str, model_id: UniqueId) -> ApiExperiment:
try:
model_key = self._get_container(container_id=model_id, container_type=ContainerType.MODEL).get("sys/id")
except ContainerUUIDNotFound:
model_key = "MOD"
sys_id = SysId(f"{self.PROJECT_KEY}-{model_key}-{self._next_model_version[model_id]}")
self._next_model_version[model_id] += 1
new_run_id = UniqueId(str(uuid.uuid4()))
self._create_container(new_run_id, ContainerType.MODEL_VERSION, sys_id=sys_id)
return ApiExperiment(
id=new_run_id,
type=ContainerType.MODEL,
sys_id=sys_id,
workspace=self.WORKSPACE_NAME,
project_name=self.PROJECT_NAME,
trashed=False,
)
def create_checkpoint(self, notebook_id: str, jupyter_path: str) -> Optional[str]:
return None
def get_project(self, project_id: QualifiedName) -> Project:
return Project(
id=self._project_id,
name=self.PROJECT_NAME,
workspace=self.WORKSPACE_NAME,
sys_id=self.PROJECT_KEY,
)
def get_metadata_container(
self,
container_id: Union[UniqueId, QualifiedName],
expected_container_type: ContainerType,
) -> ApiExperiment:
if "/" not in container_id:
raise ValueError("Backend mock expect container_id as QualifiedName only")
if expected_container_type == ContainerType.RUN:
raise RunNotFound(container_id)
elif expected_container_type == ContainerType.MODEL:
return ApiExperiment(
id=UniqueId(str(uuid.uuid4())),
type=ContainerType.MODEL,
sys_id=SysId(container_id.rsplit("/", 1)[-1]),
workspace=self.WORKSPACE_NAME,
project_name=self.PROJECT_NAME,
)
elif expected_container_type == ContainerType.MODEL_VERSION:
raise ModelVersionNotFound(container_id)
else:
raise ProjectNotFound(container_id)
def execute_operations(
self,
container_id: UniqueId,
container_type: ContainerType,
operations: List[Operation],
operation_storage: OperationStorage,
) -> Tuple[int, List[NeptuneException]]:
result = []
for op in operations:
try:
self._execute_operation(container_id, container_type, op, operation_storage)
except NeptuneException as e:
result.append(e)
return len(operations), result
def _execute_operation(
self, container_id: UniqueId, container_type: ContainerType, op: Operation, operation_storage: OperationStorage
) -> None:
run = self._get_container(container_id, container_type)
val = run.get(op.path)
if val is not None and not isinstance(val, Value):
if isinstance(val, dict):
raise MetadataInconsistency("{} is a namespace, not an attribute".format(op.path))
else:
raise InternalClientError("{} is a {}".format(op.path, type(val)))
visitor = NeptuneBackendMock.NewValueOpVisitor(self, op.path, val, operation_storage)
new_val = visitor.visit(op)
if new_val is not None:
run.set(op.path, new_val)
else:
run.pop(op.path)
def get_attributes(self, container_id: str, container_type: ContainerType) -> List[Attribute]:
run = self._get_container(container_id, container_type)
return list(self._generate_attributes(None, run.get_structure()))
def _generate_attributes(self, base_path: Optional[str], values: dict):
for key, value_or_dict in values.items():
new_path = base_path + "/" + key if base_path is not None else key
if isinstance(value_or_dict, dict):
yield from self._generate_attributes(new_path, value_or_dict)
else:
yield Attribute(
new_path,
value_or_dict.accept(self._attribute_type_converter_value_visitor),
)
def download_file(
self,
container_id: str,
container_type: ContainerType,
path: List[str],
destination: Optional[str] = None,
):
run = self._get_container(container_id, container_type)
value: File = run.get(path)
target_path = os.path.abspath(destination or (path[-1] + ("." + value.extension if value.extension else "")))
if value.file_type is FileType.IN_MEMORY:
with open(target_path, "wb") as target_file:
target_file.write(value.content)
elif value.file_type is FileType.LOCAL_FILE:
if value.path != target_path:
copyfile(value.path, target_path)
else:
raise ValueError(f"Unexpected FileType: {value.file_type}")
def download_file_set(
self,
container_id: str,
container_type: ContainerType,
path: List[str],
destination: Optional[str] = None,
):
run = self._get_container(container_id, container_type)
source_file_set_value: FileSet = run.get(path)
if destination is None:
target_file = path[-1] + ".zip"
elif os.path.isdir(destination):
target_file = os.path.join(destination, path[-1] + ".zip")
else:
target_file = destination
upload_entries = get_unique_upload_entries(source_file_set_value.file_globs)
with ZipFile(target_file, "w") as zipObj:
for upload_entry in upload_entries:
zipObj.write(upload_entry.source, upload_entry.target_path)
def get_float_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> FloatAttribute:
val = self._get_attribute(container_id, container_type, path, Float)
return FloatAttribute(val.value)
def get_int_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> IntAttribute:
val = self._get_attribute(container_id, container_type, path, Integer)
return IntAttribute(val.value)
def get_bool_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> BoolAttribute:
val = self._get_attribute(container_id, container_type, path, Boolean)
return BoolAttribute(val.value)
def get_file_attribute(self, container_id: str, container_type: ContainerType, path: List[str]) -> FileAttribute:
val = self._get_attribute(container_id, container_type, path, File)
return FileAttribute(
name=os.path.basename(val.path) if val.file_type is FileType.LOCAL_FILE else "",
ext=val.extension or "",
size=0,
)
def get_string_attribute(
self, container_id: str, container_type: ContainerType, path: List[str]
) -> StringAttribute:
val = self._get_attribute(container_id, container_type, path, String)
return StringAttribute(val.value)
def get_datetime_attribute(
self, container_id: str, container_type: ContainerType, path: List[str]
) -> DatetimeAttribute:
val = self._get_attribute(container_id, container_type, path, Datetime)
return DatetimeAttribute(val.value)
def get_artifact_attribute(
self, container_id: str, container_type: ContainerType, path: List[str]
) -> ArtifactAttribute:
val = self._get_attribute(container_id, container_type, path, Artifact)
return ArtifactAttribute(val.hash)
def list_artifact_files(self, project_id: str, artifact_hash: str) -> List[ArtifactFileData]:
return self._artifacts[(project_id, artifact_hash)]
def get_float_series_attribute(
self, container_id: str, container_type: ContainerType, path: List[str]
) -> FloatSeriesAttribute:
val = self._get_attribute(container_id, container_type, path, FloatSeries)
return FloatSeriesAttribute(val.values[-1] if val.values else None)
def get_string_series_attribute(
self, container_id: str, container_type: ContainerType, path: List[str]
) -> StringSeriesAttribute:
val = self._get_attribute(container_id, container_type, path, StringSeries)
return StringSeriesAttribute(val.values[-1] if val.values else None)
def get_string_set_attribute(
self, container_id: str, container_type: ContainerType, path: List[str]
) -> StringSetAttribute:
val = self._get_attribute(container_id, container_type, path, StringSet)
return StringSetAttribute(set(val.values))
def _get_attribute(
self,
container_id: str,
container_type: ContainerType,
path: List[str],
expected_type: Type[Val],
) -> Val:
run = self._get_container(container_id, container_type)
value: Optional[Value] = run.get(path)
str_path = path_to_str(path)
if value is None:
raise MetadataInconsistency("Attribute {} not found".format(str_path))
if isinstance(value, expected_type):
return value
raise MetadataInconsistency("Attribute {} is not {}".format(str_path, type.__name__))
def get_string_series_values(
self,
container_id: str,
container_type: ContainerType,
path: List[str],
offset: int,
limit: int,
) -> StringSeriesValues:
val = self._get_attribute(container_id, container_type, path, StringSeries)
return StringSeriesValues(
len(val.values),
[StringPointValue(timestampMillis=42342, step=idx, value=v) for idx, v in enumerate(val.values)],
)
def get_float_series_values(
self,
container_id: str,
container_type: ContainerType,
path: List[str],
offset: int,
limit: int,
) -> FloatSeriesValues:
val = self._get_attribute(container_id, container_type, path, FloatSeries)
return FloatSeriesValues(
len(val.values),
[FloatPointValue(timestampMillis=42342, step=idx, value=v) for idx, v in enumerate(val.values)],
)
def get_image_series_values(
self,
container_id: str,
container_type: ContainerType,
path: List[str],
offset: int,
limit: int,
) -> ImageSeriesValues:
return ImageSeriesValues(0)
def download_file_series_by_index(
self,
container_id: str,
container_type: ContainerType,
path: List[str],
index: int,
destination: str,
):
"""Non relevant for backend"""
def get_run_url(self, run_id: str, workspace: str, project_name: str, sys_id: str) -> str:
return f"offline/{run_id}"
def get_project_url(self, project_id: str, workspace: str, project_name: str) -> str:
return f"offline/{project_id}"
def get_model_url(self, model_id: str, workspace: str, project_name: str, sys_id: str) -> str:
return f"offline/{model_id}"
def get_model_version_url(
self,
model_version_id: str,
model_id: str,
workspace: str,
project_name: str,
sys_id: str,
) -> str:
return f"offline/{model_version_id}"
def _get_attribute_values(self, value_dict, path_prefix: List[str]):
assert isinstance(value_dict, dict)
for k, value in value_dict.items():
if isinstance(value, dict):
yield from self._get_attribute_values(value, path_prefix + [k])
else:
attr_type = value.accept(self._attribute_type_converter_value_visitor).value
attr_path = "/".join(path_prefix + [k])
if hasattr(value, "value"):
yield attr_path, attr_type, value.value
else:
return attr_path, attr_type, NoValue
def fetch_atom_attribute_values(
self, container_id: str, container_type: ContainerType, path: List[str]
) -> List[Tuple[str, AttributeType, Any]]:
run = self._get_container(container_id, container_type)
values = self._get_attribute_values(run.get(path), path)
namespace_prefix = path_to_str(path)
if namespace_prefix:
# don't want to catch "ns/attribute/other" while looking for "ns/attr"
namespace_prefix += "/"
return [
(full_path, attr_type, attr_value)
for (full_path, attr_type, attr_value) in values
if full_path.startswith(namespace_prefix)
]
def search_leaderboard_entries(
self,
project_id: UniqueId,
types: Optional[Iterable[ContainerType]] = None,
query: Optional[NQLQuery] = None,
columns: Optional[Iterable[str]] = None,
) -> List[LeaderboardEntry]:
"""Non relevant for mock"""
class AttributeTypeConverterValueVisitor(ValueVisitor[AttributeType]):
def visit_float(self, _: Float) -> AttributeType:
return AttributeType.FLOAT
def visit_integer(self, _: Integer) -> AttributeType:
return AttributeType.INT
def visit_boolean(self, _: Boolean) -> AttributeType:
return AttributeType.BOOL
def visit_string(self, _: String) -> AttributeType:
return AttributeType.STRING
def visit_datetime(self, _: Datetime) -> AttributeType:
return AttributeType.DATETIME
def visit_file(self, _: File) -> AttributeType:
return AttributeType.FILE
def visit_file_set(self, _: FileSet) -> AttributeType:
return AttributeType.FILE_SET
def visit_float_series(self, _: FloatSeries) -> AttributeType:
return AttributeType.FLOAT_SERIES
def visit_string_series(self, _: StringSeries) -> AttributeType:
return AttributeType.STRING_SERIES
def visit_image_series(self, _: FileSeries) -> AttributeType:
return AttributeType.IMAGE_SERIES
def visit_string_set(self, _: StringSet) -> AttributeType:
return AttributeType.STRING_SET
def visit_git_ref(self, _: GitRef) -> AttributeType:
return AttributeType.GIT_REF
def visit_artifact(self, _: Artifact) -> AttributeType:
return AttributeType.ARTIFACT
def visit_namespace(self, _: Namespace) -> AttributeType:
raise NotImplementedError
def copy_value(self, source_type: Type[Attribute], source_path: List[str]) -> AttributeType:
raise NotImplementedError
class NewValueOpVisitor(OperationVisitor[Optional[Value]]):
def __init__(
self, backend, path: List[str], current_value: Optional[Value], operation_storage: OperationStorage
):
self._backend = backend
self._path = path
self._current_value = current_value
self._artifact_hash = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
self._operation_storage = operation_storage
def visit_assign_float(self, op: AssignFloat) -> Optional[Value]:
if self._current_value is not None and not isinstance(self._current_value, Float):
raise self._create_type_error("assign", Float.__name__)
return Float(op.value)
def visit_assign_int(self, op: AssignInt) -> Optional[Value]:
if self._current_value is not None and not isinstance(self._current_value, Integer):
raise self._create_type_error("assign", Integer.__name__)
return Integer(op.value)
def visit_assign_bool(self, op: AssignBool) -> Optional[Value]:
if self._current_value is not None and not isinstance(self._current_value, Boolean):
raise self._create_type_error("assign", Boolean.__name__)
return Boolean(op.value)
def visit_assign_string(self, op: AssignString) -> Optional[Value]:
if self._current_value is not None and not isinstance(self._current_value, String):
raise self._create_type_error("assign", String.__name__)
return String(op.value)
def visit_assign_datetime(self, op: AssignDatetime) -> Optional[Value]:
if self._current_value is not None and not isinstance(self._current_value, Datetime):
raise self._create_type_error("assign", Datetime.__name__)
return Datetime(op.value)
def visit_assign_artifact(self, op: AssignArtifact) -> Optional[Value]:
if self._current_value is not None and not isinstance(self._current_value, Artifact):
raise self._create_type_error("assign", Artifact.__name__)
return Artifact(op.hash)
def visit_track_files_to_artifact(self, _: TrackFilesToArtifact) -> Optional[Value]:
if self._current_value is not None and not isinstance(self._current_value, Artifact):
raise self._create_type_error("save", Artifact.__name__)
return Artifact(self._artifact_hash)
def visit_clear_artifact(self, _: ClearArtifact) -> Optional[Value]:
if self._current_value is None:
return Artifact()
if not isinstance(self._current_value, Artifact):
raise self._create_type_error("clear", Artifact.__name__)
return Artifact()
def visit_upload_file(self, op: UploadFile) -> Optional[Value]:
if self._current_value is not None and not isinstance(self._current_value, File):
raise self._create_type_error("save", File.__name__)
return File.from_path(path=op.get_absolute_path(self._operation_storage), extension=op.ext)
def visit_upload_file_content(self, op: UploadFileContent) -> Optional[Value]:
if self._current_value is not None and not isinstance(self._current_value, File):
raise self._create_type_error("upload_files", File.__name__)
return File.from_content(content=base64_decode(op.file_content), extension=op.ext)
def visit_upload_file_set(self, op: UploadFileSet) -> Optional[Value]:
if self._current_value is None or op.reset:
return FileSet(op.file_globs)
if not isinstance(self._current_value, FileSet):
raise self._create_type_error("save", FileSet.__name__)
return FileSet(self._current_value.file_globs + op.file_globs)
def visit_log_floats(self, op: LogFloats) -> Optional[Value]:
raw_values = [x.value for x in op.values]
if self._current_value is None:
return FloatSeries(raw_values)
if not isinstance(self._current_value, FloatSeries):
raise self._create_type_error("log", FloatSeries.__name__)
return FloatSeries(
self._current_value.values + raw_values,
min=self._current_value.min,
max=self._current_value.max,
unit=self._current_value.unit,
)
def visit_log_strings(self, op: LogStrings) -> Optional[Value]:
raw_values = [x.value for x in op.values]
if self._current_value is None:
return StringSeries(raw_values)
if not isinstance(self._current_value, StringSeries):
raise self._create_type_error("log", StringSeries.__name__)
return StringSeries(self._current_value.values + raw_values)
def visit_log_images(self, op: LogImages) -> Optional[Value]:
raw_values = [File.from_content(base64_decode(x.value.data)) for x in op.values]
if self._current_value is None:
return FileSeries(raw_values)
if not isinstance(self._current_value, FileSeries):
raise self._create_type_error("log", FileSeries.__name__)
return FileSeries(self._current_value.values + raw_values)
def visit_clear_float_log(self, op: ClearFloatLog) -> Optional[Value]:
if self._current_value is None:
return FloatSeries([])
if not isinstance(self._current_value, FloatSeries):
raise self._create_type_error("clear", FloatSeries.__name__)
return FloatSeries(
[],
min=self._current_value.min,
max=self._current_value.max,
unit=self._current_value.unit,
)
def visit_clear_string_log(self, op: ClearStringLog) -> Optional[Value]:
if self._current_value is None:
return StringSeries([])
if not isinstance(self._current_value, StringSeries):
raise self._create_type_error("clear", StringSeries.__name__)
return StringSeries([])
def visit_clear_image_log(self, op: ClearImageLog) -> Optional[Value]:
if self._current_value is None:
return FileSeries([])
if not isinstance(self._current_value, FileSeries):
raise self._create_type_error("clear", FileSeries.__name__)
return FileSeries([])
def visit_config_float_series(self, op: ConfigFloatSeries) -> Optional[Value]:
if self._current_value is None:
return FloatSeries([], min=op.min, max=op.max, unit=op.unit)
if not isinstance(self._current_value, FloatSeries):
raise self._create_type_error("log", FloatSeries.__name__)
return FloatSeries(self._current_value.values, min=op.min, max=op.max, unit=op.unit)
def visit_add_strings(self, op: AddStrings) -> Optional[Value]:
if self._current_value is None:
return StringSet(op.values)
if not isinstance(self._current_value, StringSet):
raise self._create_type_error("add", StringSet.__name__)
return StringSet(self._current_value.values.union(op.values))
def visit_remove_strings(self, op: RemoveStrings) -> Optional[Value]:
if self._current_value is None:
return StringSet(set())
if not isinstance(self._current_value, StringSet):
raise self._create_type_error("remove", StringSet.__name__)
return StringSet(self._current_value.values.difference(op.values))
def visit_clear_string_set(self, op: ClearStringSet) -> Optional[Value]:
if self._current_value is None:
return StringSet(set())
if not isinstance(self._current_value, StringSet):
raise self._create_type_error("clear", StringSet.__name__)
return StringSet(set())
def visit_delete_files(self, op: DeleteFiles) -> Optional[Value]:
if self._current_value is None:
return FileSet([])
if not isinstance(self._current_value, FileSet):
raise self._create_type_error("delete_files", FileSet.__name__)
# It is not important to support deleting properly in debug mode, let's just ignore this operation
return self._current_value
def visit_delete_attribute(self, op: DeleteAttribute) -> Optional[Value]:
if self._current_value is None:
raise MetadataInconsistency(
"Cannot perform delete operation on {}. Attribute is undefined.".format(self._path)
)
return None
def visit_copy_attribute(self, op: CopyAttribute) -> Optional[Value]:
return op.resolve(self._backend).accept(self)
def _create_type_error(self, op_name, expected):
return MetadataInconsistency(
"Cannot perform {} operation on {}. Expected {}, {} found.".format(
op_name, self._path, expected, type(self._current_value)
)
)
|
b2ea2789f0b7f62616a4c5e0f6092ec6fd8b96b0
|
4d44674625100e62be2bb5033339fb641bd454ac
|
/snippet/example/python/oslolog.py
|
65f346e3655e94cacf122efc8b57bf4de8e23294
|
[
"MIT"
] |
permissive
|
xgfone/snippet
|
8b9004a649d2575b493a376c4b4f3d4a7c56a4b0
|
b0b734dd35478b7ef3e6193623981f4f29b6748c
|
refs/heads/master
| 2022-03-18T12:41:09.033144
| 2022-02-20T15:26:35
| 2022-02-20T15:26:35
| 41,615,643
| 158
| 61
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,140
|
py
|
oslolog.py
|
# coding: utf-8
"""
Use the library 'oslo.log' to configure the logging.
Applications should use oslo.log’s configuration functions to register
logging-related configuration options and configure the root and other default
loggers.
(1) Call register_options() before parsing command line options.
(2) Call set_defaults() before configuring logging.
(3) Call setup() to configure logging for the application.
## Example
import sys
from oslo_log import log
def set_log(conf, project, args=None, version="unknown", default_log_levels=None,
logging_config_file=None):
# Register the command line and configuration options used by oslo.log.
log.register_options(conf)
# Set default values for the configuration options used by oslo.log.
log.set_defaults(default_log_levels=default_log_levels)
# Parse the command line options.
args = args if args else sys.argv[1:]
conf(args, project=project, version=version)
# Setup logging for the current application.
if logging_config_file:
log._load_log_config(logging_config_file)
else:
log.setup(conf, project, version)
"""
|
6a7c8b4af60b76fe0f4a3f8ea90d210eb27a78bf
|
73a0f661f1423d63e86489d4b2673f0103698aab
|
/python/oneflow/test/graph/test_alexnet_auto_parallel.py
|
1d9d16a47797e0c22ad056798144ff38eed8c4ce
|
[
"Apache-2.0"
] |
permissive
|
Oneflow-Inc/oneflow
|
4fc3e081e45db0242a465c4330d8bcc8b21ee924
|
0aab78ea24d4b1c784c30c57d33ec69fe5605e4a
|
refs/heads/master
| 2023-08-25T16:58:30.576596
| 2023-08-22T14:15:46
| 2023-08-22T14:15:46
| 81,634,683
| 5,495
| 786
|
Apache-2.0
| 2023-09-14T09:44:31
| 2017-02-11T06:09:53
|
C++
|
UTF-8
|
Python
| false
| false
| 7,459
|
py
|
test_alexnet_auto_parallel.py
|
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import time
import unittest
import argparse
import numpy as np
import oneflow as flow
import oneflow.unittest
from alexnet_model import alexnet
import flowvision as vision
import flowvision.transforms as transforms
def load_data_fashion_mnist(
batch_size,
resize=None,
root="./data-test/fashion-mnist",
download=True,
source_url=None,
num_workers=0,
):
"""Download the Fashion-MNIST dataset and then load into memory."""
root = os.path.expanduser(root)
trans = []
if resize:
trans.append(transforms.Resize(resize))
trans.append(transforms.ToTensor())
transform = transforms.Compose(trans)
mnist_train = vision.datasets.FashionMNIST(
root=root,
train=True,
transform=transform,
download=download,
source_url=source_url,
)
mnist_test = vision.datasets.FashionMNIST(
root=root,
train=False,
transform=transform,
download=download,
source_url=source_url,
)
train_iter = flow.utils.data.DataLoader(
mnist_train, batch_size, shuffle=True, num_workers=num_workers
)
test_iter = flow.utils.data.DataLoader(
mnist_test, batch_size, shuffle=False, num_workers=num_workers
)
return train_iter, test_iter
def _parse_args():
parser = argparse.ArgumentParser("flags for train alexnet")
parser.add_argument(
"--load_checkpoint", type=str, default="", help="load checkpoint"
)
parser.add_argument(
"--ofrecord_path",
type=str,
default=flow.unittest.dataset_dir("imagenette/ofrecord"),
help="dataset path",
)
# training hyper-parameters
parser.add_argument(
"--learning_rate", type=float, default=0.02, help="learning rate"
)
parser.add_argument("--mom", type=float, default=0.9, help="momentum")
parser.add_argument("--epochs", type=int, default=1, help="training epochs")
parser.add_argument("--batch_size", type=int, default=128, help="val batch size")
return parser.parse_known_args()
def _test_alexnet_graph(test_case, args, placement, sbp):
data_dir = os.path.join(
os.getenv("ONEFLOW_TEST_CACHE_DIR", "./data-test"), "fashion-mnist-lenet"
)
source_url = "https://oneflow-public.oss-cn-beijing.aliyuncs.com/datasets/mnist/Fashion-MNIST/"
train_iter, test_iter = load_data_fashion_mnist(
batch_size=args.batch_size,
root=data_dir,
download=True,
source_url=source_url,
num_workers=0,
resize=(112, 112),
)
# oneflow init
start_t = time.time()
alexnet_module = alexnet(num_classes=10)
end_t = time.time()
print("init time : {}".format(end_t - start_t))
alexnet_module.to_global(placement, sbp)
of_cross_entropy = flow.nn.CrossEntropyLoss().to_global(placement, sbp)
of_sgd = flow.optim.SGD(
alexnet_module.parameters(), lr=args.learning_rate, momentum=args.mom
)
class AlexNetGraph(flow.nn.Graph):
def __init__(self):
super().__init__()
self.alexnet = alexnet_module
self.cross_entropy = of_cross_entropy
self.add_optimizer(of_sgd)
self.config.enable_auto_parallel(True)
self.config.enable_auto_parallel_ignore_user_sbp_config(True)
self.config.enable_auto_parallel_trunk_algo(True)
self.config.enable_auto_parallel_sbp_collector(True)
def build(self, image, label):
logits = self.alexnet(image)
loss = self.cross_entropy(logits, label)
loss.backward()
return loss
alexnet_graph = AlexNetGraph()
class AlexNetEvalGraph(flow.nn.Graph):
def __init__(self):
super().__init__()
self.alexnet = alexnet_module
self.config.enable_auto_parallel(True)
self.config.enable_auto_parallel_ignore_user_sbp_config(True)
self.config.enable_auto_parallel_trunk_algo(True)
self.config.enable_auto_parallel_sbp_collector(True)
def build(self, image):
with flow.no_grad():
logits = self.alexnet(image)
predictions = logits.softmax()
return predictions
alexnet_eval_graph = AlexNetEvalGraph()
of_losses = []
print_interval = 20
acc = 0.0
for epoch in range(args.epochs):
alexnet_module.train()
for i, (image, label) in enumerate(train_iter):
# oneflow graph train
if image.shape[0] != args.batch_size:
# drop last batch
break
start_t = time.time()
image = image.to_global(placement, sbp).expand(args.batch_size, 3, 112, 112)
label = label.to_global(placement, sbp)
loss = alexnet_graph(image, label)
end_t = time.time()
if i % print_interval == 0:
l = loss.numpy()
of_losses.append(l)
if flow.env.get_rank() == 0:
print(
"epoch {} train iter {}/{} oneflow loss {}, train time : {}".format(
epoch, i, len(train_iter), l, end_t - start_t
)
)
# Stop after 20 iters to save time
break
if flow.env.get_rank() == 0:
print("epoch %d train done, start validation" % epoch)
alexnet_module.eval()
correct_of = 0.0
total_of = 0.0
for image, label in test_iter:
# oneflow graph eval
if image.shape[0] != args.batch_size:
# drop last batch
break
start_t = time.time()
image = image.to_global(placement, sbp).expand(args.batch_size, 3, 112, 112)
predictions = alexnet_eval_graph(image)
of_predictions = predictions.numpy()
clsidxs = np.argmax(of_predictions, axis=1)
label_nd = label.numpy()
for i in range(args.batch_size):
total_of += 1
if clsidxs[i] == label_nd[i]:
correct_of += 1
end_t = time.time()
acc = correct_of / total_of
if flow.env.get_rank() == 0:
print("epoch %d, oneflow top1 val acc: %f" % (epoch, acc))
# test_case.assertTrue(acc > 0.50)
@unittest.skipIf(os.getenv("ONEFLOW_TEST_CPU_ONLY"), "only test cpu cases")
class TestAlexnetAutoParallel(oneflow.unittest.TestCase):
def test_alexnet_auto_parallel_1d_sbp(test_case):
args, unknown_args = _parse_args()
placement = flow.placement.all("cuda")
sbp = [flow.sbp.broadcast,] * len(placement.ranks.shape)
_test_alexnet_graph(test_case, args, placement, sbp)
if __name__ == "__main__":
unittest.main()
|
26b0687ac0dd05058f86bc998e166e4fa076e331
|
d8a94767b24377947a3e116a43cf9073c5accd8b
|
/evaluate.py
|
0e343c2b6a44b97d16d6b48d1128e3520796c670
|
[
"MIT"
] |
permissive
|
scrapinghub/article-extraction-benchmark
|
d0fa896e8ae17daeefe9fd1e805d650b3d9c1b35
|
7c60d682959d3140fae3df45121c24e9e4614009
|
refs/heads/master
| 2023-08-31T17:59:13.902280
| 2021-07-22T18:07:48
| 2021-07-22T18:07:48
| 233,835,655
| 196
| 26
|
MIT
| 2021-04-13T08:22:37
| 2020-01-14T12:23:09
|
Python
|
UTF-8
|
Python
| false
| false
| 6,869
|
py
|
evaluate.py
|
#!/usr/bin/env python3
import argparse
from collections import Counter
import json
from pathlib import Path
import random
import re
import statistics
from typing import Any, Dict, Tuple, List
def main():
""" Perform evaluation for all ``output/*.json`` files,
loading ground truth from ``groud-truth.json``.
Python3.6+ is required.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--n-bootstrap', type=int, default=1000)
parser.add_argument('--bootstrap-differences', action='store_true',
help='run bootstrap for differences')
parser.add_argument('--output', type=Path, help='output results as json')
args = parser.parse_args()
ground_truth = load_json(Path('ground-truth.json'))
metrics_by_name = {}
for path in sorted(Path('output').glob('*.json')):
name = path.stem
metrics = evaluate(ground_truth, load_json(path), args.n_bootstrap)
print('{name:<20} '
'precision={precision:.3f} ± {precision_std:.3f} '
'recall={recall:.3f} ± {recall_std:.3f} '
'F1={f1:.3f} ± {f1_std:.3f} '
'accuracy={accuracy:.3f} ± {accuracy_std:.3f} '
.format(name=name, **metrics))
metrics_by_name[name] = metrics
if args.bootstrap_differences:
# check differences with bootstrap
for name, metrics in sorted(metrics_by_name.items()):
tp_fp_fns = metrics['tp_fp_fns']
for other_name, other_metrics in sorted(metrics_by_name.items()):
if name >= other_name:
continue
print(f'Comparison: {name} minus {other_name}')
other_tp_fp_fns = other_metrics['tp_fp_fns']
print_metrics_diff(tp_fp_fns, other_tp_fp_fns, args.n_bootstrap)
if args.output:
args.output.write_text(
json.dumps(metrics_by_name, indent=4, sort_keys=True))
def evaluate(
ground_truth: Dict[str, Dict],
prediction: Dict[str, Dict],
n_bootstrap: int,
) -> Dict[str, Any]:
if ground_truth.keys() != prediction.keys():
raise ValueError('prediction keys do not match ground truth')
tp_fp_fns = []
accuracies = []
for key in ground_truth.keys():
true = ground_truth[key].get('articleBody', '')
pred = prediction[key].get('articleBody', '')
tp_fp_fns.append(string_shingle_matching(true=true, pred=pred))
accuracies.append(get_accuracy(true=true, pred=pred))
metrics: Dict[str, Any] = metrics_from_tp_fp_fns(tp_fp_fns)
metrics['tp_fp_fns'] = tp_fp_fns
metrics['accuracy'] = statistics.mean(accuracies)
# add bootstrap estimates of condifence intervals
b_values: Dict[str, List[float]] = {}
for _ in range(n_bootstrap):
n = len(tp_fp_fns)
indices = [random.randint(0, n - 1) for _ in range(n)]
b_metrics = metrics_from_tp_fp_fns([tp_fp_fns[i] for i in indices])
for key in b_metrics:
b_values.setdefault(key, []).append(b_metrics[key])
b_values.setdefault('accuracy', []).append(
statistics.mean([accuracies[i] for i in indices]))
for key, values in sorted(b_values.items()):
metrics[f'{key}_std'] = statistics.stdev(values)
return metrics
def print_metrics_diff(tp_fp_fns, other_tp_fp_fns, n_bootstrap):
diffs = {}
for _ in range(n_bootstrap):
n = len(tp_fp_fns)
indices = [random.randint(0, n - 1) for _ in range(n)]
metrics = metrics_from_tp_fp_fns([tp_fp_fns[i] for i in indices])
other_metrics = metrics_from_tp_fp_fns(
[other_tp_fp_fns[i] for i in indices])
for key in metrics:
diffs.setdefault(key, []).append(metrics[key] - other_metrics[key])
for key, values in sorted(diffs.items()):
mean = statistics.mean(values)
std = statistics.stdev(values)
print(f'{key:<10} {mean:.3f} ± {std:.3f}')
TP_FP_FN = Tuple[float, float, float]
def metrics_from_tp_fp_fns(tp_fp_fns: List[TP_FP_FN]) -> Dict[str, float]:
precision = statistics.mean([
precision_score(tp, fp, fn) for tp, fp, fn in tp_fp_fns
if tp + fp > 0])
recall = statistics.mean([
recall_score(tp, fp, fn) for tp, fp, fn in tp_fp_fns
if tp + fn > 0])
f1 = 2 * precision * recall / (precision + recall)
return {
'f1': f1,
'precision': precision,
'recall': recall,
}
def precision_score(tp: float, fp: float, fn: float) -> float:
if fp == fn == 0:
return 1.
if tp == fp == 0:
return 0.
return tp / (tp + fp)
def recall_score(tp: float, fp: float, fn: float) -> float:
if fp == fn == 0:
return 1.
if tp == fn == 0:
return 0.
return tp / (tp + fn)
def get_accuracy(true: str, pred: str) -> float:
return float(_tokenize(true) == _tokenize(pred))
def string_shingle_matching(
true: str, pred: str, ngram_n: int = 4,
) -> TP_FP_FN:
""" Compute TP/FP/FN across shingles (joined ngrams).
Intended to be used for articleBody comparison,
similar to the one used here (with shingles instead of tokens):
https://moz.com/devblog/benchmarking-python-content-extraction-algorithms-dragnet-readability-goose-and-eatiht/
"""
true_shingles = _all_shingles(true, ngram_n)
pred_shingles = _all_shingles(pred, ngram_n)
tp = fp = fn = 0.
for key in (set(true_shingles) | set(pred_shingles)):
true_count = true_shingles.get(key, 0)
pred_count = pred_shingles.get(key, 0)
tp += min(true_count, pred_count)
fp += max(0, pred_count - true_count)
fn += max(0, true_count - pred_count)
tp_fp_fn = [tp, fp, fn]
s = sum(tp_fp_fn)
# Normalize metrics so that longer texts do not have more weight.
if s > 0:
tp_fp_fn = [x / s for x in tp_fp_fn]
return tuple(tp_fp_fn) # type: ignore
def _all_shingles(text: str, ngram_n: int) -> Dict[Tuple[str, ...], int]:
return dict(Counter(_ngrams(text, ngram_n)))
_TOKEN_RE = re.compile(
r'\w+', re.UNICODE | re.MULTILINE | re.IGNORECASE | re.DOTALL)
def _tokenize(text: str) -> List[str]:
# Note that such simple tokenization will work ok for any language,
# even if several words will be clumped together, as we expect
# that extra predicted text will still be separated.
return _TOKEN_RE.findall(text or '')
def _ngrams(text: str, n: int) -> List[Tuple[str, ...]]:
tokens = _tokenize(text)
result = []
for i in range(0, max(1, len(tokens) - n + 1)):
shingle = tuple(tokens[i: i + n])
if shingle:
result.append(shingle)
return result
def load_json(path: Path):
with path.open('rt', encoding='utf8') as f:
return json.load(f)
if __name__ == '__main__':
main()
|
029b1389e278076a4d0cea8afcda993b7dd8e253
|
2d9a3ce2a04190d0032e8a298829022260b1d76b
|
/indra/assemblers/tsv/assembler.py
|
309beec71c034246d197bf2bb735dcfda00ccc97
|
[
"BSD-2-Clause",
"BSD-2-Clause-Views"
] |
permissive
|
sorgerlab/indra
|
f127a0f9bdd2d3f48df14575883fd31e2f4de4bf
|
6d6ca1174792b6c5a05cbf3afcb9f138fabcec6a
|
refs/heads/master
| 2023-08-21T13:25:54.654995
| 2023-06-11T16:46:41
| 2023-06-11T16:46:41
| 22,848,436
| 158
| 61
|
BSD-2-Clause
| 2023-08-30T21:47:59
| 2014-08-11T17:44:05
|
Python
|
UTF-8
|
Python
| false
| false
| 8,109
|
py
|
assembler.py
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import logging
from copy import copy
from indra.databases import get_identifiers_url
from indra.statements import *
from indra.util import write_unicode_csv
logger = logging.getLogger(__name__)
class TsvAssembler(object):
"""Assembles Statements into a set of tabular files for export or curation.
Currently designed for use with "raw" Statements, i.e., Statements with a
single evidence entry. Exports Statements into a single tab-separated file
with the following columns:
*INDEX*
A 1-indexed integer identifying the statement.
*UUID*
The UUID of the Statement.
*TYPE*
Statement type, given by the name of the class in indra.statements.
*STR*
String representation of the Statement. Contains most relevant
information for curation including any additional statement data
beyond the Statement type and Agents.
*AG_A_TEXT*
For Statements extracted from text, the text in the sentence
corresponding to the first agent (i.e., the 'TEXT' entry in the
db_refs dictionary). For all other Statements, the Agent name is
given. Empty field if the Agent is None.
*AG_A_LINKS*
Groundings for the first agent given as a comma-separated list of
identifiers.org links. Empty if the Agent is None.
*AG_A_STR*
String representation of the first agent, including additional
agent context (e.g. modification, mutation, location, and bound
conditions). Empty if the Agent is None.
*AG_B_TEXT, AG_B_LINKS, AG_B_STR*
As above for the second agent. Note that the Agent may be None (and
these fields left empty) if the Statement consists only of a single
Agent (e.g., SelfModification, ActiveForm, or Translocation statement).
*PMID*
PMID of the first entry in the evidence list for the Statement.
*TEXT*
Evidence text for the Statement.
*IS_HYP*
Whether the Statement represents a "hypothesis", as flagged by some
reading systems and recorded in the `evidence.epistemics['hypothesis']`
field.
*IS_DIRECT*
Whether the Statement represents a direct physical interactions,
as recorded by the `evidence.epistemics['direct']` field.
In addition, if the `add_curation_cols` flag is set when calling
:py:meth:`TsvAssembler.make_model`, the following additional (empty)
columns will be added, to be filled out by curators:
*AG_A_IDS_CORRECT*
Correctness of Agent A grounding.
*AG_A_STATE_CORRECT*
Correctness of Agent A context (e.g., modification, bound, and other
conditions).
*AG_B_IDS_CORRECT, AG_B_STATE_CORRECT*
As above, for Agent B.
*EVENT_CORRECT*
Whether the event is supported by the evidence text if the entities
(Agents A and B) are considered as placeholders (i.e.,
ignoring the correctness of their grounding).
*RES_CORRECT*
For Modification statements, whether the amino acid residue indicated
by the Statement is supported by the evidence.
*POS_CORRECT*
For Modification statements, whether the amino acid position indicated
by the Statement is supported by the evidence.
*SUBJ_ACT_CORRECT*
For Activation/Inhibition Statements, whether the activity indicated
for the subject (Agent A) is supported by the evidence.
*OBJ_ACT_CORRECT*
For Activation/Inhibition Statements, whether the activity indicated
for the object (Agent B) is supported by the evidence.
*HYP_CORRECT*
Whether the Statement is correctly flagged as a hypothesis.
*HYP_CORRECT*
Whether the Statement is correctly flagged as direct.
Parameters
----------
stmts : Optional[list[indra.statements.Statement]]
A list of INDRA Statements to be assembled.
Attributes
----------
statements : list[indra.statements.Statement]
A list of INDRA Statements to be assembled.
"""
def __init__(self, statements=None):
if not statements:
self.statements = []
else:
self.statements = statements
def add_statements(self, stmts):
self.statements.extend(stmts)
def make_model(self, output_file, add_curation_cols=False, up_only=False):
"""Export the statements into a tab-separated text file.
Parameters
----------
output_file : str
Name of the output file.
add_curation_cols : bool
Whether to add columns to facilitate statement curation. Default
is False (no additional columns).
up_only : bool
Whether to include identifiers.org links *only* for the Uniprot
grounding of an agent when one is available. Because most
spreadsheets allow only a single hyperlink per cell, this can makes
it easier to link to Uniprot information pages for curation
purposes. Default is False.
"""
stmt_header = ['INDEX', 'UUID', 'TYPE', 'STR',
'AG_A_TEXT', 'AG_A_LINKS', 'AG_A_STR',
'AG_B_TEXT', 'AG_B_LINKS', 'AG_B_STR',
'PMID', 'TEXT', 'IS_HYP', 'IS_DIRECT']
if add_curation_cols:
stmt_header = stmt_header + \
['AG_A_IDS_CORRECT', 'AG_A_STATE_CORRECT',
'AG_B_IDS_CORRECT', 'AG_B_STATE_CORRECT',
'EVENT_CORRECT',
'RES_CORRECT', 'POS_CORRECT', 'SUBJ_ACT_CORRECT',
'OBJ_ACT_CORRECT', 'HYP_CORRECT', 'DIRECT_CORRECT']
rows = [stmt_header]
for ix, stmt in enumerate(self.statements):
# Complexes
if len(stmt.agent_list()) > 2:
logger.info("Skipping statement with more than two members: %s"
% stmt)
continue
# Self-modifications, ActiveForms
elif len(stmt.agent_list()) == 1:
ag_a = stmt.agent_list()[0]
ag_b = None
# All others
else:
(ag_a, ag_b) = stmt.agent_list()
# Put together the data row
row = [ix+1, stmt.uuid, stmt.__class__.__name__, str(stmt)] + \
_format_agent_entries(ag_a, up_only) + \
_format_agent_entries(ag_b, up_only) + \
[stmt.evidence[0].pmid, stmt.evidence[0].text,
stmt.evidence[0].epistemics.get('hypothesis', ''),
stmt.evidence[0].epistemics.get('direct', '')]
if add_curation_cols:
row = row + ([''] * 11)
rows.append(row)
# Write to file
write_unicode_csv(output_file, rows, delimiter='\t')
def _format_id(ns, id):
"""Format a namespace/ID pair for display and curation."""
label = '%s:%s' % (ns, id)
label = label.replace(' ', '_')
url = get_identifiers_url(ns, id)
return (label, url)
def _format_agent_entries(agent, up_only):
if agent is None:
return ['', '', '']
# Agent text/name
agent_text = agent.db_refs.get('TEXT')
if agent_text is None:
agent_text = agent.name
# Agent db_refs str
db_refs = copy(agent.db_refs)
if 'TEXT' in db_refs:
db_refs.pop('TEXT')
db_refs_str = ','.join(['%s|%s' % (k, v)
for k, v in db_refs.items()])
# Agent links
identifier_links = []
if up_only and 'UP' in db_refs:
up_label, up_url = _format_id('UP', db_refs['UP'])
identifier_links = [up_url]
else:
for ns, id in db_refs.items():
label, url = _format_id(ns, id)
if url is None:
identifier_links.append(label)
else:
identifier_links.append(url)
links_str = ', '.join(identifier_links)
return [agent_text, links_str, str(agent)]
|
a08025124a074bb9c910ea9d2b89e7404e0ddb56
|
9654e0c7628c2607f191438cdea1782309bb3c2a
|
/tests/test_pbs_parser.py
|
638fa496c0e34c29eda8e82b30e5169571a02cd5
|
[
"BSD-2-Clause"
] |
permissive
|
CrayLabs/SmartSim
|
85482706627378be01c6c4adf1ba28193c8439bb
|
f9e17f00ed1109fd09610111d54ac9cb82bccaa7
|
refs/heads/develop
| 2023-08-18T02:11:52.167416
| 2023-08-16T17:07:58
| 2023-08-16T17:07:58
| 311,268,879
| 177
| 33
|
BSD-2-Clause
| 2023-09-14T19:52:08
| 2020-11-09T08:19:12
|
Python
|
UTF-8
|
Python
| false
| false
| 2,731
|
py
|
test_pbs_parser.py
|
# BSD 2-Clause License
#
# Copyright (c) 2021-2023, Hewlett Packard Enterprise
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from os.path import dirname
from pathlib import Path
from smartsim._core.launcher.pbs import pbsParser
# -- qsub ---------------------------------------------------------
def test_parse_qsub():
output = "12345.sdb"
step_id = pbsParser.parse_qsub(output)
assert step_id == "12345.sdb"
def test_parse_qsub_error():
output = "qsub: Unknown queue"
error = "Unknown queue"
parsed_error = pbsParser.parse_qsub_error(output)
assert error == parsed_error
# -- qstat ---------------------------------------------------------
def test_parse_qstat_nodes(fileutils):
"""Parse nodes from qsub called with -f -F json"""
file_path = fileutils.get_test_conf_path("qstat.json")
output = Path(file_path).read_text()
nodes = ["server_1", "server_2"]
parsed_nodes = pbsParser.parse_qstat_nodes(output)
assert nodes == parsed_nodes
def test_parse_qstat_status():
"""test retrieval of status and exitcode"""
output = (
"Job id Name User Time Use S Queue\n"
"---------------- ---------------- ---------------- -------- - -----\n"
"1289903.sdb jobname username 00:00:00 R queue\n"
)
status = "R"
parsed_status = pbsParser.parse_qstat_jobid(output, "1289903.sdb")
assert status == parsed_status
|
e888f47381360758fd381f2304de17bde186bf6e
|
08a8c973eaa984b96be2306c325c4a0b3f997aec
|
/tests/test_module_load.py
|
2a60864b54a24eb3e19b653ccce95f4cb57b0d6a
|
[] |
permissive
|
ultrabug/py3status
|
889ec6679b7aa7d886bc98d86fc4051c7529b469
|
7ada9276ee12fe80491768d60603f8c5e1dc0639
|
refs/heads/master
| 2023-08-24T02:40:10.865393
| 2023-07-29T15:51:42
| 2023-07-29T15:51:42
| 8,292,338
| 934
| 426
|
BSD-3-Clause
| 2023-09-10T09:21:17
| 2013-02-19T14:59:13
|
Python
|
UTF-8
|
Python
| false
| false
| 1,061
|
py
|
test_module_load.py
|
from py3status.module_test import MockPy3statusWrapper
from py3status.module import Module
class TestModule:
static_variable = 123
def __init__(self):
self.instance_variable = 321
def post_config_hook(self):
pass
@staticmethod
def static_method(self):
raise Exception("I don't want to be called!")
@property
def property(self):
raise Exception("I don't want to be called!")
def instance_method(self):
raise Exception("I don't want to be called!")
def _private_instance_method(self):
raise Exception("I don't want to be called!")
def on_click(self, event):
raise Exception("I don't want to be called!")
def test_module_load():
mock = MockPy3statusWrapper(
{
"general": {},
"py3status": {},
".module_groups": {},
"test_module": {},
}
)
module = TestModule()
m = Module("test_module", {}, mock, module)
m.prepare_module()
assert list(m.methods) == ["instance_method"]
|
0f2154fd7a5d4116be8bc4a06e7314f1b5e5c2d1
|
69bf012ca88897cd87535701369f2b87c6522d57
|
/modules/s3db/cr.py
|
5120dd69c4d8cf65e6f4ddac0c04f1df8e709819
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
sahana/eden
|
e2cc73f6b34a2ab6579094da09367a9f0be10fd1
|
1cb5a76f36fb45fa636577e2ee5a9aa39f35b391
|
refs/heads/master
| 2023-08-20T20:56:57.404752
| 2023-02-24T17:16:47
| 2023-02-24T17:16:47
| 3,021,325
| 227
| 253
|
NOASSERTION
| 2023-01-10T10:32:33
| 2011-12-20T17:49:16
|
Python
|
UTF-8
|
Python
| false
| false
| 129,787
|
py
|
cr.py
|
# -*- coding: utf-8 -*-
""" Shelter (Camp) Registry, model
@copyright: 2009-2021 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("ShelterModel",
"ShelterDetailsModel",
"ShelterHousingUnitModel",
"ShelterInspectionModel",
"ShelterRegistrationModel",
"ShelterServiceModel",
#"cr_check_population_availability",
#"cr_notification_dispatcher",
"cr_resolve_shelter_flags", # Called from project_task_update_onaccept
"cr_shelter_rheader",
#"cr_shelter_population_onaccept",
#"cr_update_capacity_from_housing_units",
#"cr_update_housing_unit_population",
"cr_update_shelter_population", # Called from CumbriaEAC
#"cr_AssignUnit",
#"ShelterInspectionFlagRepresent",
#"ShelterInspectionRepresent",
#"CRShelterInspection",
)
import json
from gluon import *
from gluon.storage import Storage
from ..s3 import *
from s3layouts import S3PopupLink
NIGHT = 1
DAY_AND_NIGHT = 2
# =============================================================================
class ShelterModel(S3Model):
names = ("cr_shelter_type",
"cr_shelter",
)
def model(self):
T = current.T
db = current.db
settings = current.deployment_settings
CAMP = settings.get_ui_label_camp()
day_and_night = settings.get_cr_day_and_night()
dynamic = settings.get_cr_shelter_population_dynamic()
configure = self.configure
crud_strings = current.response.s3.crud_strings
define_table = self.define_table
super_link = self.super_link
set_method = self.set_method
# ---------------------------------------------------------------------
# Shelter types
# e.g. NGO-operated, Government evacuation center, School, Hospital -- see Agasti opt_camp_type.)
tablename = "cr_shelter_type"
define_table(tablename,
Field("name", notnull=True,
label = T("Name"),
requires = [IS_NOT_EMPTY(),
IS_NOT_ONE_OF(db, "%s.name" % tablename,
skip_imports = True,
),
],
),
s3_comments(),
*s3_meta_fields())
# CRUD strings
if CAMP:
ADD_SHELTER_TYPE = T("Add Camp Type")
SHELTER_TYPE_LABEL = T("Camp Type")
crud_strings[tablename] = Storage(
label_create = ADD_SHELTER_TYPE,
title_display = T("Camp Type Details"),
title_list = T("Camp Types"),
title_update = T("Edit Camp Type"),
label_list_button = T("List Camp Types"),
msg_record_created = T("Camp Type added"),
msg_record_modified = T("Camp Type updated"),
msg_record_deleted = T("Camp Type deleted"),
msg_list_empty = T("No Camp Types currently registered"),
)
else:
ADD_SHELTER_TYPE = T("Create Shelter Type")
SHELTER_TYPE_LABEL = T("Shelter Type")
crud_strings[tablename] = Storage(
label_create = ADD_SHELTER_TYPE,
title_display = T("Shelter Type Details"),
title_list = T("Shelter Types"),
title_update = T("Edit Shelter Type"),
label_list_button = T("List Shelter Types"),
msg_record_created = T("Shelter Type added"),
msg_record_modified = T("Shelter Type updated"),
msg_record_deleted = T("Shelter Type deleted"),
msg_list_empty = T("No Shelter Types currently registered"),
)
configure(tablename,
deduplicate = S3Duplicate(),
)
represent = S3Represent(lookup = tablename,
translate = True,
)
shelter_type_id = S3ReusableField("shelter_type_id", "reference %s" % tablename,
label = SHELTER_TYPE_LABEL,
ondelete = "RESTRICT",
represent = represent,
requires = IS_EMPTY_OR(
IS_ONE_OF(db, "cr_shelter_type.id",
represent,
)),
comment = S3PopupLink(c = "cr",
f = "shelter_type",
label = ADD_SHELTER_TYPE,
),
)
# -------------------------------------------------------------------------
# Shelters
#
if settings.get_cr_shelter_code_unique():
code_requires = IS_EMPTY_OR([IS_LENGTH(10),
IS_NOT_IN_DB(db, "cr_shelter.code"),
])
else:
code_requires = IS_LENGTH(10)
tablename = "cr_shelter"
define_table(tablename,
super_link("doc_id", "doc_entity"),
super_link("pe_id", "pr_pentity"),
super_link("site_id", "org_site"),
Field("name", notnull=True,
length=64, # Mayon compatibility
label = T("Shelter Name"),
requires = [IS_NOT_EMPTY(),
IS_LENGTH(64),
],
),
Field("code", length=10, # Mayon compatibility
label = T("Code"),
represent = lambda v: v or NONE,
requires = code_requires,
),
self.org_organisation_id(requires = self.org_organisation_requires(updateable = True),
),
shelter_type_id(),
self.gis_location_id(),
self.pr_person_id(label = T("Contact Person / Camp Owner"),
),
# Alternative for person_id: simple name field
Field("contact_name",
label = T("Contact Name"),
represent = lambda v: v if v else NONE,
readable = False,
writable = False,
),
Field("phone",
label = T("Phone"),
requires = IS_EMPTY_OR(IS_PHONE_NUMBER_MULTI()),
represent = lambda v: v if v else NONE,
),
Field("email",
label = T("Email"),
requires = IS_EMPTY_OR(IS_EMAIL()),
represent = lambda v: v if v else NONE,
),
Field("website",
label = T("Website"),
represent = s3_url_represent,
requires = IS_EMPTY_OR(
IS_URL(allowed_schemes = ["http", "https", None],
prepend_scheme = "http",
)),
readable = False,
writable = False,
),
s3_comments(),
Field("obsolete", "boolean",
default = False,
label = T("Obsolete"),
represent = lambda opt: current.messages.OBSOLETE if opt else NONE,
readable = False,
writable = False,
),
*s3_meta_fields())
# Fields for pivot table reports
report_fields = ["name",
"shelter_type_id",
#"organisation_id",
"shelter_details.status",
]
# Text filter fields
text_fields = ["name",
#"code",
"comments",
"organisation_id$name",
"organisation_id$acronym",
"location_id$name",
]
# List fields
list_fields = ["name",
"shelter_details.status",
"shelter_type_id",
#"shelter_service_shelter.service_id",
]
if dynamic:
list_fields.append("shelter_details.capacity_day")
if day_and_night:
list_fields.append("capacity_night")
list_fields.append("population_day")
report_fields.append("shelter_details.population_day")
if day_and_night:
list_fields.append("population_night")
report_fields.append("shelter_details.population_night")
else:
# Manual
list_fields.append("population")
report_fields.append("shelter_details.population")
list_fields.append("location_id$addr_street")
#list_fields.append("person_id")
# Which levels of Hierarchy are we using?
levels = current.gis.get_relevant_hierarchy_levels()
for level in levels:
lfield = "location_id$%s" % level
report_fields.append(lfield)
text_fields.append(lfield)
list_fields.append(lfield)
# Filter widgets
shelter_status_opts = {1 : T("Closed"),
# In many languages, translations of "Open" differ
# between the verb and the adjective, as well as
# between grammatical moods or genders etc - so
# adding a context-comment for T() here to clarify
# which "Open" we mean (will not be rendered):
2 : T("Open##status"),
}
shelter_status_filter_opts = dict(shelter_status_opts)
shelter_status_filter_opts[None] = T("Unspecified")
if settings.get_org_branches():
org_filter = S3HierarchyFilter("organisation_id",
leafonly = False,
)
else:
org_filter = S3OptionsFilter("organisation_id",
search = True,
header = "",
#hidden = True,
)
filter_widgets = [
S3TextFilter(text_fields,
label = T("Search"),
#_class = "filter-search",
),
S3OptionsFilter("shelter_type_id",
label = T("Type"),
# Doesn't translate
#represent = "%(name)s",
),
org_filter,
S3LocationFilter("location_id",
label = T("Location"),
levels = levels,
),
S3OptionsFilter("shelter_details.status",
label = T("Status"),
options = shelter_status_filter_opts,
none = True,
),
]
if dynamic:
if day_and_night:
filter_widgets.append(S3RangeFilter("shelter_details.available_capacity_night",
label = T("Available Capacity (Night)"),
))
else:
filter_widgets.append(S3RangeFilter("shelter_details.available_capacity_day",
label = T("Available Capacity"),
))
if day_and_night:
filter_widgets.append(S3RangeFilter("shelter_details.capacity_night",
label = T("Total Capacity (Night)"),
))
else:
filter_widgets.append(S3RangeFilter("shelter_details.capacity_day",
label = T("Total Capacity"),
))
# Custom create_next
if settings.get_cr_shelter_people_registration():
# Go to People check-in for this shelter after creation
create_next = URL(c="cr", f="shelter",
args = ["[id]", "shelter_registration"],
)
else:
create_next = None
# CRUD Form
crud_form = S3SQLCustomForm("name",
"organisation_id",
"shelter_type_id",
"location_id",
"person_id",
"contact_name",
"phone",
"email",
"website",
"shelter_details.population",
"shelter_details.capacity_day",
"shelter_details.capacity_night",
"shelter_details.available_capacity_day",
"shelter_details.population_day",
"shelter_details.population_night",
"shelter_details.status",
"comments",
"obsolete",
)
# Table configuration
configure(tablename,
create_next = create_next,
crud_form = crud_form,
deduplicate = S3Duplicate(),
filter_widgets = filter_widgets,
list_fields = list_fields,
onaccept = self.cr_shelter_onaccept,
report_options = {"rows": report_fields,
"cols": report_fields,
"fact": report_fields,
"defaults": {"rows": lfield, # Lowest-level of hierarchy
"cols": "shelter_details.status",
"fact": "count(name)",
},
},
super_entity = ("doc_entity",
"org_site",
"pr_pentity",
),
)
from .hrm import hrm_AssignMethod
from .org import org_SiteCheckInMethod
# Custom method to assign HRs
set_method("cr", "shelter",
method = "assign",
action = hrm_AssignMethod(component = "human_resource_site"),
)
# Check-in method
set_method("cr", "shelter",
method = "check-in",
action = org_SiteCheckInMethod,
)
# Notification-dispatch method
set_method("cr", "shelter",
method = "dispatch",
action = cr_notification_dispatcher,
)
# Shelter Inspection method
set_method("cr", "shelter",
method = "inspection",
action = CRShelterInspection,
)
# CRUD strings
if CAMP:
ADD_SHELTER = T("Add Camp")
SHELTER_LABEL = T("Camp")
crud_strings[tablename] = Storage(
label_create = ADD_SHELTER,
title_display = T("Camp Details"),
title_list = T("Camps"),
title_update = T("Edit Camp"),
label_list_button = T("List Camps"),
msg_record_created = T("Camp added"),
msg_record_modified = T("Camp updated"),
msg_record_deleted = T("Camp deleted"),
msg_list_empty = T("No Camps currently registered"),
)
else:
ADD_SHELTER = T("Create Shelter")
SHELTER_LABEL = T("Shelter")
crud_strings[tablename] = Storage(
label_create = ADD_SHELTER,
title_display = T("Shelter Details"),
title_list = T("Shelters"),
title_update = T("Edit Shelter"),
label_list_button = T("List Shelters"),
msg_record_created = T("Shelter added"),
msg_record_modified = T("Shelter updated"),
msg_record_deleted = T("Shelter deleted"),
msg_list_empty = T("No Shelters currently registered"),
)
# ---------------------------------------------------------------------
# Pass variables back to global scope (response.s3.*)
return None
# -------------------------------------------------------------------------
@staticmethod
def cr_shelter_onaccept(form):
"""
After DB I/O
"""
form_vars = form.vars
# Update Affiliation, record ownership and component ownership
from .org import org_update_affiliations
org_update_affiliations("cr_shelter", form_vars)
if current.response.s3.bulk:
# Import
return
record = form.record
if record:
# Update form
# Create an org_site_event record
s3db = current.s3db
stable = s3db.cr_shelter
shelter = current.db(stable.id == form_vars.id).select(stable.site_id,
stable.obsolete,
limitby = (0, 1),
).first()
obsolete = shelter.obsolete
if obsolete != record.obsolete:
s3db.org_site_event.insert(site_id = shelter.site_id,
event = 4, # Obsolete Change
comment = obsolete,
)
# =============================================================================
class ShelterDetailsModel(S3Model):
names = ("cr_shelter_details",
)
def model(self):
T = current.T
db = current.db
settings = current.deployment_settings
day_and_night = settings.get_cr_day_and_night()
dynamic = settings.get_cr_shelter_population_dynamic()
integer_represent = IS_INT_AMOUNT.represent
# -------------------------------------------------------------------------
# Shelter Details
#
shelter_status_opts = {1 : T("Closed"),
# In many languages, translations of "Open" differ
# between the verb and the adjective, as well as
# between grammatical moods or genders etc - so
# adding a context-comment for T() here to clarify
# which "Open" we mean (will not be rendered):
2 : T("Open##status"),
}
if settings.get_cr_shelter_housing_unit_management():
if day_and_night:
capacity_day_comment = DIV(_class = "tooltip",
_title = "%s|%s|%s" % (T("Capacity (Day)"),
T("Capacity of the shelter during the day"),
T("Capacity evaluated adding all defined housing unit capacities"),
),
)
capacity_night_comment = DIV(_class = "tooltip",
_title = "%s|%s|%s" % (T("Capacity (Night)"),
T("Capacity of the shelter during the night"),
T("Capacity evaluated adding all defined housing unit capacities"),
),
)
else:
capacity_day_comment = DIV(_class = "tooltip",
_title = "%s|%s|%s" % (T("Capacity"),
T("Capacity of the shelter as a number of people"),
T("Capacity evaluated adding all defined housing unit capacities"),
),
)
capacity_night_comment = None
else:
if day_and_night:
capacity_day_comment = DIV(_class = "tooltip",
_title = "%s|%s" % (T("Capacity (Day)"),
T("Capacity of the shelter during the day"),
),
)
capacity_night_comment = DIV(_class = "tooltip",
_title = "%s|%s" % (T("Capacity (Night)"),
T("Capacity of the shelter during the night"),
),
)
else:
capacity_day_comment = DIV(_class = "tooltip",
_title = "%s|%s" % (T("Capacity"),
T("Capacity of the shelter as a number of people"),
),
)
capacity_night_comment = None
tablename = "cr_shelter_details"
self.define_table(tablename,
# This is a component, so needs to be a super_link
# - can't override field name, ondelete or requires
self.super_link("site_id", "org_site",
empty = False,
instance_types = ("cr_shelter",),
label = T("Shelter"),
ondelete = "RESTRICT",
),
# Static field
Field("population", "integer",
label = T("Estimated Population"),
represent = integer_represent,
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, None)),
readable = not dynamic,
writable = not dynamic,
comment = DIV(_class = "tooltip",
_title = "%s|%s" % (T("Current estimated population"),
T("Current estimated population in shelter. Staff, Volunteers and Evacuees."),
),
),
),
Field("capacity_day", "integer",
default = 0,
label = T("Capacity (Day)") if day_and_night else T("Capacity"),
represent = integer_represent,
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, None)),
comment = capacity_day_comment,
),
Field("capacity_night", "integer",
default = 0,
label = T("Capacity (Night)"),
represent = integer_represent,
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, None)),
readable = day_and_night,
writable = day_and_night,
comment = capacity_night_comment,
),
# Dynamic field
Field("available_capacity_day", "integer",
default = 0,
label = T("Available Capacity (Day)") if day_and_night else T("Available Capacity"),
represent = integer_represent,
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, None)),
readable = dynamic and day_and_night,
# Automatically updated
writable = False,
),
# Dynamic field
Field("available_capacity_night", "integer",
default = 0,
label = T("Available Capacity (Night)"),
represent = integer_represent,
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, None)),
readable = dynamic and day_and_night,
# Automatically updated
writable = False,
),
# Dynamic field
Field("population_day", "integer",
default = 0,
label = T("Current Population (Day)") if day_and_night else T("Current Population"),
represent = integer_represent,
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, None)),
comment = DIV(_class = "tooltip",
_title = "%s|%s" % (T("Population (Day)"),
T("Number of people registered in the shelter for day and night"),
),
),
readable = dynamic,
# Automatically updated
writable = False
),
# Dynamic field
Field("population_night", "integer",
default = 0,
label = T("Current Population (Night)"),
represent = integer_represent,
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, None)),
comment = DIV(_class = "tooltip",
_title = "%s|%s" % (T("Population (Night)"),
T("Number of people registered in the shelter for the night"),
),
),
readable = dynamic and day_and_night,
# Automatically updated
writable = False
),
Field("status", "integer",
label = T("Status"),
default = 2, # Open
represent = s3_options_represent(shelter_status_opts),
requires = IS_EMPTY_OR(
IS_IN_SET(shelter_status_opts)
),
),
*s3_meta_fields())
self.configure(tablename,
onaccept = self.cr_shelter_details_onaccept,
)
# ---------------------------------------------------------------------
# Pass variables back to global scope (response.s3.*)
return None
# -------------------------------------------------------------------------
@staticmethod
def cr_shelter_details_onaccept(form):
"""
After DB I/O
"""
DYNAMIC = current.deployment_settings.get_cr_shelter_population_dynamic()
s3db = current.s3db
dtable = s3db.cr_shelter_details
fields = [dtable.site_id,
dtable.status,
]
if DYNAMIC:
fields += [dtable.id,
dtable.capacity_day,
dtable.capacity_night,
]
details = current.db(dtable.id == form.vars.id).select(limitby = (0, 1),
*fields).first()
site_id = details.site_id
# Create an org_site_event record
record = form.record
if record:
# Update form
status = details.status
if status != record.status:
s3db.org_site_event.insert(site_id = site_id,
event = 1, # Status Change
status = status,
)
if DYNAMIC:
capacity_day = details.capacity_day
capacity_night = details.capacity_night
if capacity_day != record.capacity_day or \
capacity_night != record.capacity_night:
# Update available capacity
cr_update_shelter_population(site_id)
else:
# Create form
s3db.org_site_event.insert(site_id = site_id,
event = 1, # Status Change
status = details.status,
)
if DYNAMIC:
# Update available capacity
# - no-one checked-in yet
details.update_record(available_capacity_day = details.capacity_day,
available_capacity_night = details.capacity_night,
)
# =============================================================================
class ShelterHousingUnitModel(S3Model):
names = ("cr_shelter_unit",
"cr_shelter_unit_id",
)
def model(self):
T = current.T
db = current.db
settings = current.deployment_settings
day_and_night = settings.get_cr_day_and_night()
dynamic = settings.get_cr_shelter_population_dynamic()
integer_represent = IS_INT_AMOUNT.represent
# -------------------------------------------------------------------------
# Housing units
#
housing_unit_status_opts = {1: T("Available"),
2: T("Not Available"),
}
housing_unit_handicap_facilities = {1: T("Available"),
2: T("Suitable"),
3: T("Not Available"),
}
tablename = "cr_shelter_unit"
self.define_table(tablename,
# This is a component, so needs to be a super_link
# - can't override field name, ondelete or requires
self.super_link("site_id", "org_site",
empty = False,
instance_types = ("cr_shelter",),
label = T("Shelter"),
ondelete = "RESTRICT",
),
Field("name", notnull=True, length = 64,
label = T("Housing Unit Name"),
requires = [IS_NOT_EMPTY(),
IS_LENGTH(64),
],
),
self.gis_location_id(widget = S3LocationSelector(#catalog_layers=True,
points = False,
polygons = True,
),
),
Field("status", "integer",
default = 1,
label = T("Status"),
represent = s3_options_represent(housing_unit_status_opts),
requires = IS_EMPTY_OR(
IS_IN_SET(housing_unit_status_opts)
),
),
Field("transitory", "boolean",
default = False,
label = T("Transitory Accommodation"),
represent = s3_yes_no_represent,
comment = DIV(_class = "tooltip",
_title = "%s|%s" % (T("Transitory Accommodation"),
T("This unit is for transitory accommodation upon arrival."),
),
),
# Enable in template as required:
readable = False,
writable = False,
),
Field("bath", "boolean",
default = True,
label = T("Available Bath"),
represent = s3_yes_no_represent,
comment = DIV(_class = "tooltip",
_title = "%s|%s" % (T("Bath Availability"),
T("Integrated bath within housing unit"),
),
),
),
Field("handicap_bath", "integer",
default = 1,
label = T("Bath with handicap facilities"),
represent = s3_options_represent(housing_unit_handicap_facilities),
requires = IS_EMPTY_OR(
IS_IN_SET(housing_unit_handicap_facilities)
),
comment = DIV(_class = "tooltip",
_title = "%s|%s" % (T("Bath Handicap Facilities"),
T("Availability of bath handicap facilities"),
),
),
),
Field("shower", "boolean",
default = True,
label = T("Available Shower"),
represent = s3_yes_no_represent,
comment = DIV(_class = "tooltip",
_title = "%s|%s" % (T("Shower Availability"),
T("Integrated shower within housing unit"),
),
),
),
Field("handicap_shower", "integer",
default = 1,
label = T("Shower with handicap facilities"),
represent = s3_options_represent(housing_unit_handicap_facilities),
requires = IS_EMPTY_OR(
IS_IN_SET(housing_unit_handicap_facilities)
),
comment = DIV(_class = "tooltip",
_title = "%s|%s" % (T("Shower Handicap Facilities"),
T("Availability of shower handicap facilities"),
),
),
),
Field("capacity_day", "integer",
default = 0,
label = T("Housing Unit Capacity (Day)") if day_and_night else T("Housing Unit Capacity"),
represent = integer_represent,
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, None)),
comment = DIV(_class = "tooltip",
_title = "%s|%s" % (T("Housing Unit Capacity (Day)"),
T("Capacity of the housing unit for people during the day"),
),
),
),
Field("capacity_night", "integer",
default = 0,
label = T("Housing Unit Capacity (Night)"),
represent = integer_represent,
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, None)),
readable = day_and_night,
writable = day_and_night,
comment = DIV(_class = "tooltip",
_title = "%s|%s" % (T("Housing Unit Capacity (Night)"),
T("Capacity of the housing unit for people during the night"),
),
),
),
Field("available_capacity_day", "integer",
default = 0,
label = T("Available Capacity (Day)") if day_and_night else T("Available Capacity"),
represent = integer_represent,
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, None)),
comment = DIV(_class = "tooltip",
_title = T("Currently Available Capacity (Day)"),
),
# Automatically updated
readable = dynamic,
writable = False,
),
Field("available_capacity_night", "integer",
default = 0,
label = T("Population Availability (Night)"),
represent = integer_represent,
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, None)),
comment = DIV(_class = "tooltip",
_title = T("Currently Available Capacity (Night)"),
),
# Automatically updated
readable = dynamic and day_and_night,
writable = False,
),
Field("population_day", "integer",
default = 0,
label = T("Current Population (Day)") if day_and_night else T("Current Population"),
represent = integer_represent,
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, None)),
comment = DIV(_class = "tooltip",
_title = "%s|%s" % (T("Housing Unit Current Population"),
T("Number of people registered in this housing unit for day and night"),
),
),
# Automatically updated
readable = False,
writable = False,
),
Field("population_night", "integer",
default = 0,
label = T("Current Population (Night)"),
represent = integer_represent,
requires = IS_EMPTY_OR(IS_INT_IN_RANGE(0, None)),
comment = DIV(_class = "tooltip",
_title = "%s|%s" % (T("Housing Unit Current Population"),
T("Number of evacuees registered in this housing unit for the night"),
),
),
readable = day_and_night,
# Automatically updated
writable = False,
),
Field("domestic_animals", "boolean",
default = False,
label = T("Free for domestic animals"),
represent = s3_yes_no_represent,
),
Field.Method("cstatus", self.cr_shelter_unit_status),
s3_comments(),
*s3_meta_fields())
# Components
self.add_components(tablename,
cr_shelter_inspection = "shelter_unit_id",
)
# List fields
list_fields = ["name",
]
if day_and_night:
list_fields += ["status", # @ToDO: Move to EVASS template
"handicap_bath", # @ToDO: Move to EVASS template
"capacity_day",
"capacity_night",
"population_day",
"population_night",
]
else:
list_fields += ["available_capacity_day",
"capacity_day",
"population_day",
]
# Table configuration
population_onaccept = lambda form: \
cr_shelter_population_onaccept(form,
tablename = "cr_shelter_unit",
)
self.configure(tablename,
# @ToDo: Allow multiple shelters to have the same
# name of unit (Requires that Shelter is in dvr/person.xsl/csv)
#deduplicate = S3Duplicate(primary=("shelter_id", "name")),
deduplicate = S3Duplicate(),
list_fields = list_fields,
# Extra fields for cr_shelter_unit_status:
extra_fields = ["capacity_day",
"available_capacity_day",
"status",
],
onaccept = population_onaccept,
ondelete = population_onaccept,
)
# Reusable Field
represent = S3Represent(lookup = "cr_shelter_unit")
shelter_unit_id = S3ReusableField("shelter_unit_id", "reference %s" % tablename,
label = T("Housing Unit"),
ondelete = "RESTRICT",
represent = represent,
requires = IS_EMPTY_OR(
IS_ONE_OF(db, "cr_shelter_unit.id",
represent,
orderby = "shelter_id",
#sort = True,
)),
#widget = S3AutocompleteWidget("cr", "shelter_unit")
)
# ---------------------------------------------------------------------
# Pass variables back to global scope (response.s3.*)
return {"cr_shelter_unit_id" : shelter_unit_id,
}
# -------------------------------------------------------------------------
@staticmethod
def defaults():
"""
Return safe defaults in case the model has been deactivated.
"""
return {"cr_shelter_unit_id": S3ReusableField.dummy("shelter_unit_id"),
}
# -------------------------------------------------------------------------
@staticmethod
def cr_shelter_unit_status(row):
"""
Virtual Field to show the status of the unit by available capacity
- used to colour features on the map
0: Full
1: Partial
2: Empty
3: Not Available
"""
if hasattr(row, "cr_shelter_unit"):
row = row.cr_shelter_unit
if hasattr(row, "status"):
status = row.status
else:
status = None
if status == 2:
# Not Available
return 3
if hasattr(row, "available_capacity_day"):
actual = row.available_capacity_day
else:
actual = None
if status is not None and actual is not None:
if actual <= 0:
# Full (or over-capacity)
return 0
if hasattr(row, "capacity_day"):
total = row.capacity_day
if total == 0:
# No capacity ever, so Full
return 0
else:
total = None
if status is not None and total is not None and actual is not None:
if actual == total:
# Empty
return 2
else:
# Partial
return 1
if hasattr(row, "id"):
# Reload the record
current.log.debug("Reloading cr_shelter_unit record")
table = current.s3db.cr_shelter_unit
r = current.db(table.id == row.id).select(table.status,
table.capacity_day,
table.available_capacity_day,
limitby = (0, 1),
).first()
if r:
status = r.status
if status == 2:
# Not Available
return 3
actual = r.available_capacity_day
if actual <= 0:
# Full (or over-capacity)
return 0
total = r.capacity_day
if total == 0:
# No capacity ever, so Full
return 0
elif actual == total:
# Empty
return 2
else:
# Partial
return 1
return NONE
# =============================================================================
class ShelterInspectionModel(S3Model):
""" Model for Shelter / Housing Unit Flags """
names = ("cr_shelter_flag",
"cr_shelter_flag_id",
"cr_shelter_inspection",
"cr_shelter_inspection_flag",
"cr_shelter_inspection_task",
)
def model(self):
T = current.T
db = current.db
s3 = current.response.s3
settings = current.deployment_settings
crud_strings = s3.crud_strings
define_table = self.define_table
configure = self.configure
shelter_inspection_tasks = settings.get_cr_shelter_inspection_tasks()
task_priority_opts = settings.get_project_task_priority_opts()
assignee_represent = self.pr_PersonEntityRepresent(show_label = False,
#show_type = False,
)
# ---------------------------------------------------------------------
# Flags - flags that can be set for a shelter / housing unit
#
tablename = "cr_shelter_flag"
define_table(tablename,
Field("name",
requires = IS_NOT_EMPTY(),
),
Field("create_task", "boolean",
label = T("Create Task"),
default = False,
represent = s3_yes_no_represent,
readable = shelter_inspection_tasks,
writable = shelter_inspection_tasks,
),
Field("task_description", length=100,
label = T("Task Description"),
requires = IS_EMPTY_OR(IS_LENGTH(100)),
represent = lambda v: v if v else "",
readable = shelter_inspection_tasks,
writable = shelter_inspection_tasks,
),
Field("task_priority", "integer",
default = 3,
label = T("Priority"),
represent = s3_options_represent(task_priority_opts),
requires = IS_IN_SET(task_priority_opts,
zero = None,
),
),
# Task Assignee
Field("task_assign_to", "reference pr_pentity",
label = T("Assign to"),
represent = assignee_represent,
requires = IS_EMPTY_OR(
IS_ONE_OF(db, "pr_pentity.pe_id",
assignee_represent,
filterby = "instance_type",
filter_opts = ("pr_person",
"pr_group",
#"org_organisation",
),
),
),
),
s3_comments(),
*s3_meta_fields())
# Table settings
configure(tablename,
deduplicate = S3Duplicate(),
onvalidation = self.shelter_flag_onvalidation,
)
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Create Shelter Flag"),
title_display = T("Shelter Flag Details"),
title_list = T("Shelter Flags"),
title_update = T("Edit Shelter Flag"),
label_list_button = T("List Shelter Flags"),
label_delete_button = T("Delete Shelter Flag"),
msg_record_created = T("Shelter Flag created"),
msg_record_modified = T("Shelter Flag updated"),
msg_record_deleted = T("Shelter Flag deleted"),
msg_list_empty = T("No Shelter Flags currently defined"),
)
# Reusable field
represent = S3Represent(lookup = tablename,
translate = True,
)
flag_id = S3ReusableField("flag_id", "reference %s" % tablename,
label = T("Shelter Flag"),
represent = represent,
requires = IS_ONE_OF(db, "%s.id" % tablename,
represent,
),
sortby = "name",
)
# ---------------------------------------------------------------------
# Shelter Inspection
#
tablename = "cr_shelter_inspection"
define_table(tablename,
#self.cr_shelter_id(ondelete = "CASCADE",
# readable = False,
# writable = False,
# ),
self.cr_shelter_unit_id(ondelete = "CASCADE"),
s3_date(default = "now",
),
s3_comments(),
*s3_meta_fields())
# CRUD Form
crud_form = S3SQLCustomForm("shelter_unit_id",
"date",
S3SQLInlineLink("shelter_flag",
field = "flag_id",
multiple = True,
cols = 3,
),
"comments",
)
# List fields
list_fields = ["shelter_unit_id",
"date",
(T("Flags"), "shelter_flag__link.flag_id"),
(T("Registered by"), "modified_by"),
"comments",
]
# Table configuration
configure(tablename,
crud_form = crud_form,
list_fields = list_fields,
orderby = "%s.date desc" % tablename,
)
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Create Shelter Inspection"),
title_display = T("Shelter Inspection Details"),
title_list = T("Shelter Inspections"),
title_update = T("Edit Shelter Inspection"),
label_list_button = T("List Shelter Inspections"),
label_delete_button = T("Delete Shelter Inspection"),
msg_record_created = T("Shelter Inspection created"),
msg_record_modified = T("Shelter Inspection updated"),
msg_record_deleted = T("Shelter Inspection deleted"),
msg_list_empty = T("No Shelter Inspections currently registered"),
)
# Components
self.add_components(tablename,
cr_shelter_flag = {"link": "cr_shelter_inspection_flag",
"joinby": "inspection_id",
"key": "flag_id",
},
)
# ---------------------------------------------------------------------
# Shelter Inspection <=> Flag link table
#
represent = ShelterInspectionRepresent(show_link = True)
tablename = "cr_shelter_inspection_flag"
define_table(tablename,
Field("inspection_id", "reference cr_shelter_inspection",
label = T("Shelter Inspection"),
ondelete = "CASCADE",
represent = represent,
requires = IS_ONE_OF(db, "cr_shelter_inspection.id",
represent,
),
),
flag_id(label = T("Defect found")),
Field("resolved", "boolean",
label = T("Resolved"),
default = False,
represent = s3_yes_no_represent,
),
*s3_meta_fields())
# List fields
list_fields = ["inspection_id$shelter_unit_id$name",
"inspection_id$date",
(T("Registered by"), "inspection_id$modified_by"),
(T("Defect"), "flag_id"),
"resolved",
]
# Filter widgets
filter_widgets = [S3OptionsFilter("inspection_id$shelter_unit_id",
search = 10,
header = True,
),
S3OptionsFilter("flag_id",
label = T("Defect"),
options = s3_get_filter_opts("cr_shelter_flag"),
),
S3OptionsFilter("resolved",
label = T("Resolved"),
options = {False: T("No"),
True: T("Yes"),
},
default = False,
cols = 2,
),
]
# Table Configuration
configure(tablename,
filter_widgets = filter_widgets,
list_fields = list_fields,
# Can not be directly inserted nor edited
insertable = False,
editable = False,
create_onaccept = self.shelter_inspection_flag_onaccept,
)
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Register Defect"),
title_display = T("Defect Details"),
title_list = T("Defects"),
title_update = T("Edit Defect"),
label_list_button = T("List Defects"),
label_delete_button = T("Delete Defect"),
msg_record_created = T("Defect created"),
msg_record_modified = T("Defect updated"),
msg_record_deleted = T("Defect deleted"),
msg_list_empty = T("No Defects currently registered"),
)
# ---------------------------------------------------------------------
# Inspection Flag <=> Project Task link table
#
tablename = "cr_shelter_inspection_task"
define_table(tablename,
Field("inspection_flag_id", "reference cr_shelter_inspection_flag",
label = T("Defects"),
ondelete = "CASCADE",
represent = ShelterInspectionFlagRepresent(show_link = True),
requires = IS_ONE_OF(db, "cr_shelter_inspection_flag.id"),
),
self.project_task_id(ondelete = "RESTRICT",
),
*s3_meta_fields())
# Table Configuration
configure(tablename,
list_fields = ["id",
"task_id",
"inspection_flag_id",
"inspection_flag_id$resolved",
],
ondelete_cascade = self.shelter_inspection_task_ondelete_cascade,
)
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return {"cr_shelter_flag_id": flag_id,
}
# -------------------------------------------------------------------------
@staticmethod
def defaults():
""" Safe defaults for names in case the module is disabled """
return {"cr_shelter_flag_id": S3ReusableField.dummy("flag_id"),
}
# -------------------------------------------------------------------------
@staticmethod
def shelter_flag_onvalidation(form):
"""
Shelter Flag form validation:
- if create_task=True, then task_description is required
"""
T = current.T
formvars = form.vars
create_task = formvars.get("create_task")
task_description = formvars.get("task_description")
if create_task and not task_description:
form.errors["task_description"] = T("Task Description required")
# -------------------------------------------------------------------------
@staticmethod
def shelter_inspection_flag_onaccept(form):
"""
Shelter inspection flag onaccept:
- auto-create task if/as configured
"""
settings = current.deployment_settings
if not settings.get_cr_shelter_inspection_tasks():
# Automatic task creation disabled
return
try:
record_id = form.vars.id
except AttributeError:
# Nothing we can do
return
db = current.db
s3db = current.s3db
# Tables
table = s3db.cr_shelter_inspection_flag
ftable = s3db.cr_shelter_flag
itable = s3db.cr_shelter_inspection
utable = s3db.cr_shelter_unit
ltable = s3db.cr_shelter_inspection_task
ttable = s3db.project_task
# Get the record
join = (itable.on(itable.id == table.inspection_id),
utable.on(utable.id == itable.shelter_unit_id),
ftable.on(ftable.id == table.flag_id),
)
left = ltable.on(ltable.inspection_flag_id == table.id)
query = (table.id == record_id)
row = db(query).select(table.id,
table.flag_id,
ftable.create_task,
ftable.task_description,
ftable.task_priority,
ftable.task_assign_to,
ltable.task_id,
itable.shelter_unit_id,
utable.name,
join = join,
left = left,
limitby = (0, 1),
).first()
if not row:
return
create_task = False
create_link = None
flag = row.cr_shelter_flag
task_description = flag.task_description
task_priority = flag.task_priority
task_assign_to = flag.task_assign_to
shelter_unit = row.cr_shelter_unit.name
if flag.create_task:
inspection_task = row.cr_shelter_inspection_task
if inspection_task.task_id is None:
shelter_unit_id = row.cr_shelter_inspection.shelter_unit_id
flag_id = row.cr_shelter_inspection_flag.flag_id
# Do we have any active task for the same problem
# in the same shelter unit?
active_statuses = settings.get_cr_shelter_inspection_task_active_statuses()
left = (itable.on(itable.id == table.inspection_id),
ltable.on(ltable.inspection_flag_id == table.id),
ttable.on(ttable.id == ltable.task_id),
)
query = (table.flag_id == flag_id) & \
(table.deleted == False) & \
(ttable.name == task_description) & \
(ttable.status.belongs(active_statuses)) & \
(ttable.deleted == False) & \
(itable.shelter_unit_id == shelter_unit_id) & \
(itable.deleted == False)
row = db(query).select(ttable.id,
left = left,
limitby = (0, 1),
).first()
if row:
# Yes => link to this task
create_link = row.id
else:
# No => create a new task
create_task = True
if create_task:
# Create a new task
task = {"name": "%s: %s" % (shelter_unit, task_description),
"priority": task_priority,
"pe_id": task_assign_to,
}
task_id = ttable.insert(**task)
if task_id:
task["id"] = task_id
# Post-process create
s3db.update_super(ttable, task)
auth = current.auth
auth.s3_set_record_owner(ttable, task_id)
auth.s3_make_session_owner(ttable, task_id)
s3db.onaccept(ttable, task, method="create")
create_link = task_id
if create_link:
# Create the cr_shelter_inspection_task link
ltable.insert(inspection_flag_id = record_id,
task_id = create_link,
)
# -------------------------------------------------------------------------
@staticmethod
def shelter_inspection_task_ondelete_cascade(row):
"""
Ondelete-cascade method for inspection task links:
- close the linked task if there are no other
unresolved flags linked to it
"""
db = current.db
s3db = current.s3db
# Get the task_id
ltable = s3db.cr_shelter_inspection_task
query = (ltable.id == row.id)
link = db(query).select(ltable.id,
ltable.task_id,
limitby = (0, 1),
).first()
task_id = link.task_id
# Are there any other unresolved flags linked to the same task?
ftable = s3db.cr_shelter_inspection_flag
ttable = s3db.project_task
query = (ltable.task_id == task_id) & \
(ltable.id != link.id) & \
(ltable.deleted != True) & \
(ftable.id == ltable.inspection_flag_id) & \
((ftable.resolved == False) | (ftable.resolved == None))
other = db(query).select(ltable.id,
limitby = (0, 1),
).first()
if not other:
# Set task to completed status
closed = current.deployment_settings \
.get_cr_shelter_inspection_task_completed_status()
db(ttable.id == task_id).update(status = closed)
# Remove task_id (to allow deletion of the link)
link.update_record(task_id = None)
# =============================================================================
class ShelterRegistrationModel(S3Model):
names = ("cr_shelter_allocation",
"cr_shelter_registration",
"cr_shelter_registration_history",
"cr_shelter_registration_status_opts",
)
def model(self):
T = current.T
configure = self.configure
define_table = self.define_table
settings = current.deployment_settings
person_id = self.pr_person_id
super_link = self.super_link
day_and_night = settings.get_cr_day_and_night()
# ---------------------------------------------------------------------
# Shelter Allocation: table to allocate shelter capacity to a group
#
allocation_status_opts = {1: T("requested"),
2: T("available"),
3: T("allocated"),
4: T("occupied"),
5: T("departed"),
6: T("obsolete"),
7: T("unavailable"),
}
tablename = "cr_shelter_allocation"
define_table(tablename,
# This is a component, so needs to be a super_link
# - can't override field name, ondelete or requires
super_link("site_id", "org_site",
empty = False,
instance_types = ("cr_shelter",),
label = T("Shelter"),
ondelete = "RESTRICT",
),
self.pr_group_id(comment = None),
Field("status", "integer",
default = 3,
label = T("Status"),
requires = IS_IN_SET(allocation_status_opts),
represent = s3_options_represent(allocation_status_opts),
),
Field("group_size_day", "integer",
default = 0,
label = T("Group Size (Day)") if day_and_night else T("Group Size"),
),
Field("group_size_night", "integer",
default = 0,
label = T("Group Size (Night)"),
readable = day_and_night,
writable = day_and_night,
),
*s3_meta_fields())
population_onaccept = lambda form: \
cr_shelter_population_onaccept(form,
tablename = "cr_shelter_allocation",
)
configure(tablename,
onaccept = population_onaccept,
ondelete = population_onaccept,
)
# ---------------------------------------------------------------------
# Shelter Registration: table to register a person to a shelter
#
cr_day_or_night_opts = {NIGHT: T("Night only"),
DAY_AND_NIGHT: T("Day and Night")
}
# Registration status
reg_status_opts = {1: T("Planned"),
2: T("Checked-in"),
3: T("Checked-out"),
}
reg_status = S3ReusableField("registration_status", "integer",
label = T("Status"),
represent = s3_options_represent(reg_status_opts),
requires = IS_IN_SET(reg_status_opts,
zero = None),
)
housing_unit = settings.get_cr_shelter_housing_unit_management()
tablename = "cr_shelter_registration"
define_table(tablename,
# This is a component, so needs to be a super_link
# - can't override field name, ondelete or requires
super_link("site_id", "org_site",
empty = False,
instance_types = ("cr_shelter",),
label = T("Shelter"),
ondelete = "RESTRICT",
),
person_id(widget = S3AddPersonWidget(pe_label = True)),
self.cr_shelter_unit_id(readable = housing_unit,
writable = housing_unit,
),
Field("day_or_night", "integer",
default = DAY_AND_NIGHT,
label = T("Presence in the shelter"),
represent = s3_options_represent(cr_day_or_night_opts),
requires = IS_IN_SET(cr_day_or_night_opts,
zero = None),
readable = day_and_night,
writable = day_and_night,
),
reg_status(),
s3_datetime("check_in_date",
label = T("Check-in date"),
default = "now",
#empty = False,
future = 0,
),
s3_datetime("check_out_date",
label = T("Check-out date"),
),
s3_comments(),
*s3_meta_fields())
registration_onaccept = self.cr_shelter_registration_onaccept
configure(tablename,
deduplicate = S3Duplicate(primary = ("person_id",
"site_id",
),
),
onaccept = registration_onaccept,
ondelete = registration_onaccept,
)
if housing_unit:
configure(tablename,
onvalidation = self.cr_shelter_registration_onvalidation,
)
# Custom Methods
self.set_method("cr", "shelter_registration",
method = "assign",
action = cr_AssignUnit(),
)
# ---------------------------------------------------------------------
# Shelter Registration History: history of status changes
#
tablename = "cr_shelter_registration_history"
define_table(tablename,
# This is a component, so needs to be a super_link
# - can't override field name, ondelete or requires
super_link("site_id", "org_site",
empty = False,
instance_types = ("cr_shelter",),
ondelete = "RESTRICT",
),
person_id(),
s3_datetime(default = "now",
),
reg_status("previous_status",
label = T("Old Status"),
),
reg_status("status",
label = T("New Status"),
),
*s3_meta_fields())
configure(tablename,
list_fields = ["site_id",
"date",
(T("Status"), "status"),
(T("Modified by"), "modified_by"),
],
insertable = False,
editable = False,
deletable = False,
orderby = "%s.date desc" % tablename,
)
# ---------------------------------------------------------------------
# Pass variables back to global scope (response.s3.*)
return {"cr_shelter_registration_status_opts": reg_status_opts,
}
# -------------------------------------------------------------------------
@staticmethod
def cr_shelter_registration_onvalidation(form):
"""
Check if the housing unit belongs to the requested shelter
"""
#controller = request.controller
#if controller == "dvr":
# # Housing Unit is not mandatory during Case Registration
# return
if type(form) is Row:
form_vars = form
else:
form_vars = form.vars
#if controller == "cr":
# Registration form doesn't include the Shelter
# @ToDo: don't assume that we are running as component of the shelter
site_id = form_vars.site_id or (form.record and form.record.site_id) or current.request.args[0]
unit_id = form_vars.shelter_unit_id
if unit_id is None:
current.response.warning = current.T("Warning: No housing unit selected")
else:
db = current.db
htable = db.cr_shelter_unit
record = db(htable.id == unit_id).select(htable.site_id,
limitby = (0, 1),
).first()
if str(record.site_id) != str(site_id):
error = current.T("You have to select a housing unit belonging to the shelter")
form.errors["unit_id"] = error
current.response.error = error
# -------------------------------------------------------------------------
@classmethod
def cr_shelter_registration_onaccept(cls, form):
"""
Registration onaccept: track status changes, update
shelter population
Args:
form: the FORM (also accepts Row)
"""
try:
if type(form) is Row:
formvars = form
else:
formvars = form.vars
registration_id = formvars.id
except AttributeError:
unit_id = None
else:
unit_id = formvars.get("shelter_unit_id")
if registration_id:
s3db = current.s3db
db = current.db
# Get the current status
rtable = s3db.cr_shelter_registration
query = (rtable.id == registration_id) & \
(rtable.deleted != True)
reg = db(query).select(rtable.id,
rtable.site_id,
rtable.shelter_unit_id,
rtable.registration_status,
rtable.check_in_date,
rtable.check_out_date,
rtable.modified_on,
rtable.person_id,
limitby = (0, 1),
).first()
if reg:
person_id = reg.person_id
# Unit to check availability for
unit_id = reg.shelter_unit_id
# Get the previous status
htable = s3db.cr_shelter_registration_history
query = (htable.person_id == person_id) & \
(htable.site_id == reg.site_id) & \
(htable.deleted != True)
row = db(query).select(htable.status,
htable.date,
limitby = (0, 1),
orderby = ~htable.created_on,
).first()
if row:
previous_status = row.status
previous_date = row.date
else:
previous_status = None
previous_date = None
# Get the current status
current_status = reg.registration_status
# Get the effective date
if current_status == 2:
effective_date_field = "check_in_date"
elif current_status == 3:
effective_date_field = "check_out_date"
else:
effective_date_field = None
if effective_date_field:
# Read from registration
effective_date = reg[effective_date_field]
else:
# Use modified_on for history
effective_date = reg.modified_on
if current_status != previous_status or \
effective_date_field and not effective_date:
if effective_date_field:
# If the new status has an effective date,
# make sure it gets updated when the status
# has changed:
if effective_date_field not in formvars or \
not effective_date or \
previous_date and effective_date < previous_date:
effective_date = current.request.utcnow
reg.update_record(**{effective_date_field: effective_date,
})
# Insert new history entry
htable.insert(previous_status = previous_status,
status = current_status,
date = effective_date,
person_id = person_id,
site_id = reg.site_id,
)
# Update last_seen_on
#if current.deployment_settings.has_module("dvr"):
# s3db.dvr_update_last_seen(person_id)
# Update population
cr_shelter_population_onaccept(form,
tablename = "cr_shelter_registration",
unit_id = unit_id,
)
# =============================================================================
class ShelterServiceModel(S3Model):
""" Model for Shelter Services """
names = ("cr_shelter_service",
"cr_shelter_service_shelter",
)
def model(self):
T = current.T
db = current.db
define_table = self.define_table
# -------------------------------------------------------------------------
# Shelter services
# e.g. medical, housing, food, ...
tablename = "cr_shelter_service"
define_table(tablename,
Field("name", notnull=True,
label = T("Name"),
requires = IS_NOT_EMPTY(),
),
s3_comments(),
*s3_meta_fields())
# CRUD strings
if current.deployment_settings.get_ui_label_camp():
ADD_SHELTER_SERVICE = T("Add Camp Service")
SHELTER_SERVICE_LABEL = T("Camp Service")
current.response.s3.crud_strings[tablename] = Storage(
label_create = ADD_SHELTER_SERVICE,
title_display = T("Camp Service Details"),
title_list = T("Camp Services"),
title_update = T("Edit Camp Service"),
label_list_button = T("List Camp Services"),
msg_record_created = T("Camp Service added"),
msg_record_modified = T("Camp Service updated"),
msg_record_deleted = T("Camp Service deleted"),
msg_list_empty = T("No Camp Services currently registered"),
)
else:
ADD_SHELTER_SERVICE = T("Create Shelter Service")
SHELTER_SERVICE_LABEL = T("Shelter Service")
current.response.s3.crud_strings[tablename] = Storage(
label_create = ADD_SHELTER_SERVICE,
title_display = T("Shelter Service Details"),
title_list = T("Shelter Services"),
title_update = T("Edit Shelter Service"),
label_list_button = T("List Shelter Services"),
msg_record_created = T("Shelter Service added"),
msg_record_modified = T("Shelter Service updated"),
msg_record_deleted = T("Shelter Service deleted"),
msg_list_empty = T("No Shelter Services currently registered"),
)
service_represent = S3Represent(lookup = tablename,
translate = True,
)
service_id = S3ReusableField("service_id", "reference %s" % tablename,
label = SHELTER_SERVICE_LABEL,
ondelete = "CASCADE",
represent = service_represent,
requires = IS_ONE_OF(db, "cr_shelter_service.id",
service_represent,
),
sortby = "name",
comment = S3PopupLink(c = "cr",
f = "shelter_service",
label = ADD_SHELTER_SERVICE,
),
)
self.configure(tablename,
deduplicate = S3Duplicate(),
)
# ---------------------------------------------------------------------
# Shelter Service <> Shelter link table
#
tablename = "cr_shelter_service_shelter"
define_table(tablename,
Field("site_id", self.org_site,
ondelete = "CASCADE",
),
service_id(),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return None
# =============================================================================
def cr_check_population_availability(unit_id, table):
"""
Evaluate the population capacity availability.
Show a non blocking warning in case the people in the shelter/housing unit are more than its capacity
Args:
unit_id: the Site ID / housing unit ID
table: related tablename (cr_shelter or cr_shelter_housing_unit)
"""
T = current.T
tablename = table._tablename
if tablename == "cr_shelter":
query = (table.site_id == site_id)
#elif tablename == "cr_shelter_unit":
else:
query = (table.id == unit_id)
record = current.db(query).select(table.capacity_day,
table.population_day,
table.capacity_night,
table.population_night,
limitby = (0, 1),
).first()
day_and_night = current.deployment_settings.get_cr_day_and_night()
warning = None
full_day = full_night = False
capacity_day = record.capacity_day
population_day = record.population_day
if capacity_day is not None and \
population_day and \
population_day >= capacity_day:
full_day = True
if day_and_night:
capacity_night = record.capacity_night
population_night = record.population_night
if capacity_night is not None and \
population_night and \
population_night >= capacity_night:
full_night = True
if not day_and_night and full_day or full_day and full_night:
if tablename == "cr_shelter":
warning = T("Warning: this shelter is full")
elif tablename == "cr_shelter_unit":
warning = T("Warning: this housing unit is full")
elif full_day:
if tablename == "cr_shelter":
warning = T("Warning: this shelter is full for daytime")
elif tablename == "cr_shelter_unit":
warning = T("Warning: this housing unit is full for daytime")
elif full_night:
if tablename == "cr_shelter":
warning = T("Warning: this shelter is full for the night")
elif tablename == "cr_shelter_unit":
warning = T("Warning: this housing unit is full for the night")
if warning:
response = current.response
response_warning = response.warning
if response_warning:
response.warning = "%s - %s" % (response_warning, warning)
else:
response.warning = warning
# =============================================================================
def cr_notification_dispatcher(r, **attr):
"""
Send a notification.
"""
if r.representation == "html" and \
r.name == "shelter" and r.id and not r.component:
T = current.T
msg = current.msg
record = r.record
message = ""
text = ""
s_id = record.id
s_name = record.name
s_phone = record.phone
s_email = record.email
s_status = record.status
if s_phone in ("", None):
s_phone = T("Not Defined")
if s_email in ("", None):
s_phone = T("Not Defined")
if s_status in ("", None):
s_status = T("Not Defined")
else:
if s_status == 1:
s_status = "Open"
elif s_status == 2:
s_status = "Close"
else:
s_status = "Unassigned Shelter Status"
text += "************************************************"
text += "\n%s " % T("Automatic Message")
text += "\n%s: %s " % (T("Shelter ID"), s_id)
text += " %s: %s" % (T("Shelter name"), s_name)
text += "\n%s: %s " % (T("Email"), s_email)
text += " %s: %s" % (T("Phone"), s_phone)
text += "\n%s: %s " % (T("Working Status"), s_status)
text += "\n************************************************\n"
# Encode the message as an OpenGeoSMS
#message = msg.prepare_opengeosms(record.location_id,
# code="ST",
# map="google",
# text=text)
# URL to redirect to after message sent
url = URL(c="cr", f="shelter",
args = s_id,
)
# Create the form
opts = {"type": "SMS",
# @ToDo: deployment_setting
"subject": T("Deployment Request"),
"message": message + text,
"url": url,
}
output = msg.compose(**opts)
# Maintain RHeader for consistency
if attr.get("rheader"):
rheader = attr["rheader"](r)
if rheader:
output["rheader"] = rheader
output["title"] = T("Send Notification")
current.response.view = "msg/compose.html"
return output
else:
r.error(405, current.messages.BAD_METHOD)
# =============================================================================
def cr_resolve_shelter_flags(task_id):
"""
If a task is set to an inactive status, then mark all linked
shelter inspection flags as resolved
Args:
task_id: the task record ID
"""
db = current.db
s3db = current.s3db
active_statuses = current.deployment_settings.get_cr_shelter_inspection_task_active_statuses()
# Get the task
ttable = s3db.project_task
query = (ttable.id == task_id)
task = db(query).select(ttable.id,
ttable.status,
limitby = (0, 1),
).first()
if task and task.status not in active_statuses:
# Mark all shelter inspection flags as resolved
ltable = s3db.cr_shelter_inspection_task
ftable = s3db.cr_shelter_inspection_flag
query = (ltable.task_id == task.id) & \
(ftable.id == ltable.inspection_flag_id) & \
((ftable.resolved == False) | (ftable.resolved == None))
rows = db(query).select(ftable.id)
ids = set(row.id for row in rows)
db(ftable.id.belongs(ids)).update(resolved = True)
# =============================================================================
def cr_shelter_population_onaccept(form, tablename=None, unit_id=None):
"""
Update the shelter population, onaccept
Args:
form: the FORM
tablename: the table name
unit_id: the shelter unit ID (to warn if full)
"""
db = current.db
s3db = current.s3db
if not tablename:
return
table = s3db[tablename]
try:
if type(form) is Row:
record_id = form.id
else:
record_id = form.vars.id
except AttributeError:
# Nothing we can do
return
if tablename == "cr_shelter_unit":
unit_id = record_id
# Get the record
row = db(table._id == record_id).select(table._id,
table.site_id,
table.deleted,
table.deleted_fk,
limitby = (0, 1),
).first()
if row:
if row.deleted:
if row.deleted_fk:
deleted_fk = json.loads(row.deleted_fk)
else:
return
site_id = deleted_fk.get("site_id")
else:
site_id = row.site_id
if site_id:
if current.deployment_settings.get_cr_shelter_housing_unit_management():
# First update housing units census
cr_update_capacity_from_housing_units(site_id)
# Shelter census
cr_update_shelter_population(site_id)
# Warn if unit is full
if unit_id:
cr_check_population_availability(unit_id,
table = s3db.cr_shelter_unit,
)
# Warn if shelter is full
cr_check_population_availability(site_id,
table = s3db.cr_shelter,
)
# =============================================================================
def cr_shelter_rheader(r, tabs=None):
""" Resource Headers """
if r.representation != "html":
# RHeaders only used in interactive views
return None
rheader = None
tablename, record = s3_rheader_resource(r)
if tablename == "cr_shelter" and record:
T = current.T
s3db = current.s3db
if not tabs:
settings = current.deployment_settings
tabs = [(T("Basic Details"), None),
]
if settings.get_L10n_translate_org_site():
tabs.append((T("Local Names"), "name"))
if settings.get_cr_tags():
tabs.append((T("Tags"), "tag"))
if settings.get_cr_shelter_people_registration():
tabs.extend([(T("Client Reservation"), "shelter_allocation"),
(T("Client Registration"), "shelter_registration"),
])
if settings.has_module("hrm"):
STAFF = settings.get_hrm_staff_label()
tabs.append((STAFF, "human_resource"))
permit = current.auth.s3_has_permission
if permit("update", tablename, r.id) and \
permit("create", "hrm_human_resource_site"):
tabs.append((T("Assign %(staff)s") % {"staff": STAFF}, "assign"))
if settings.get_cr_shelter_housing_unit_management():
tabs.append((T("Housing Units"), "shelter_unit"))
#tabs.append((T("Events"), "event_shelter"))
#if settings.has_module("assess"):
# tabs.append((T("Assessments"), "rat"))
if settings.has_module("inv"):
from .inv import inv_tabs, inv_req_tabs
tabs.extend(inv_req_tabs(r, match=False))
tabs.extend(inv_tabs(r))
tabs.append((T("Assets"), "asset"))
if settings.has_module("msg"):
tabs.append((T("Send Notification"), "dispatch"))
rheader_tabs = s3_rheader_tabs(r, tabs)
if r.name == "shelter":
location = r.table.location_id.represent(record.location_id)
rheader = DIV(TABLE(TR(TH("%s: " % T("Name")), record.name
),
TR(TH("%s: " % T("Location")), location
),
),
rheader_tabs,
)
else:
rheader = DIV(TABLE(TR(TH("%s: " % T("Name")), record.name
),
),
rheader_tabs,
)
return rheader
# =============================================================================
def cr_update_capacity_from_housing_units(site_id):
"""
Update shelter capacity numbers, new capacity numbers are evaluated
adding together all housing unit capacities.
To be called onaccept/ondelete of cr_shelter_registration and
cr_shelter_allocation.
Args:
site_id: the Site ID
"""
db = current.db
s3db = current.s3db
htable = s3db.cr_shelter_unit
query = (htable.site_id == site_id) & \
(htable.status == 1) & \
(htable.deleted != True)
total_capacity_day = htable.capacity_day.sum()
total_capacity_night = htable.capacity_night.sum()
row = db(query).select(total_capacity_day,
total_capacity_night,
#limitby = (0, 1),
).first()
if row:
total_capacity_day = row[total_capacity_day]
total_capacity_night = row[total_capacity_night]
else:
total_capacity_day = total_capacity_night = 0
db(s3db.cr_shelter_details.site_id == site_id).update(capacity_day = total_capacity_day,
capacity_night = total_capacity_night,
)
# =============================================================================
def cr_update_housing_unit_population(site_id):
"""
Update housing unit population number.
To be called onaccept/ondelete of cr_shelter_registration and
cr_shelter_allocation.
Args:
site_id: the Site ID
"""
db = current.db
settings = current.deployment_settings
htable = db.cr_shelter_unit
rtable = db.cr_shelter_registration
rjoin = (htable.id == rtable.shelter_unit_id) & \
(rtable.deleted != True)
check_out_is_final = settings.get_cr_check_out_is_final()
if check_out_is_final:
rtable &= (rtable.registration_status != 3)
query = (htable.site_id == site_id) & \
(htable.status == 1) & \
(htable.deleted != True)
rcount = rtable.id.count()
day_and_night = settings.get_cr_day_and_night()
if day_and_night:
for daytime in (True, False):
if daytime:
fn_capacity = "capacity_day"
fn_population = "population_day"
fn_available_capacity = "available_capacity_day"
left = rtable.on(rjoin & (rtable.day_or_night == DAY_AND_NIGHT))
else:
fn_capacity = "capacity_night"
fn_population = "population_night"
fn_available_capacity = "available_capacity_night"
left = rtable.on(rjoin)
rows = db(query).select(htable.id,
htable[fn_capacity],
htable[fn_population],
htable[fn_available_capacity],
rtable.id.count(),
groupby = htable.id,
left = left,
)
for row in rows:
data = {}
unit = row[str(htable)]
population = row[rcount]
# Update population
current_population = unit[fn_population]
if current_population != population:
data[fn_population] = population
# Update daytime capacity
capacity = unit[fn_capacity]
if capacity > 0:
available_capacity = capacity - population
else:
available_capacity = 0
if unit[fn_available_capacity] != available_capacity:
data[fn_available_capacity] = available_capacity
# Write only if data have changed
if data:
db(htable.id == unit.id).update(**data)
else:
left = rtable.on(rjoin)
rows = db(query).select(htable.id,
htable.capacity_day,
htable.capacity_night,
htable.population_day,
htable.population_night,
htable.available_capacity_day,
htable.available_capacity_night,
rcount,
groupby = htable.id,
left = left,
)
for row in rows:
data = {}
unit = row[str(htable)]
population = row[rcount]
# Update daytime population/capacity
current_population = unit.population_day
if current_population != population:
data["population_day"] = population
capacity = unit.capacity_day
if capacity > 0:
available_capacity = capacity - population
else:
available_capacity = 0
if unit.available_capacity_day != available_capacity:
data["available_capacity_day"] = available_capacity
# Update daytime population/capacity
current_population = unit.population_night
if current_population != population:
data["population_night"] = population
capacity = unit.capacity_night
if capacity > 0:
available_capacity = capacity - population
else:
available_capacity = 0
if unit.available_capacity_night != available_capacity:
data["available_capacity_night"] = available_capacity
# Write only if data have changed
if data:
unit_id = unit.id
db(htable.id == unit_id).update(**data)
# =============================================================================
def cr_update_shelter_population(site_id):
"""
Update population and available capacity numbers, to be
called onaccept/ondelete of cr_shelter_registration and
cr_shelter_allocation.
Args:
site_id: the Site ID
"""
db = current.db
s3db = current.s3db
settings = current.deployment_settings
dtable = s3db.cr_shelter_details
# Get the details record
record = db(dtable.site_id == site_id).select(dtable.id,
dtable.capacity_day,
dtable.capacity_night,
limitby = (0, 1),
).first()
if not record:
# Create one
dtable.insert(site_id = site_id)
record = db(dtable.site_id == site_id).select(dtable.id,
dtable.capacity_day,
dtable.capacity_night,
limitby = (0, 1),
).first()
# Get population numbers
rtable = s3db.cr_shelter_registration
query = (rtable.site_id == site_id) & \
(rtable.deleted != True)
if settings.get_cr_check_out_is_final():
query &= (rtable.registration_status != 3)
cnt = rtable._id.count()
rows = db(query).select(rtable.day_or_night,
cnt,
groupby = rtable.day_or_night,
orderby = rtable.day_or_night,
)
population_day = population_night = 0
for row in rows:
reg_type = row[rtable.day_or_night]
number = row[cnt]
if reg_type == NIGHT and number:
population_night = number
elif reg_type == DAY_AND_NIGHT and number:
population_day = number
# population_day is both day /and/ night
population_night += population_day
# Get allocation numbers
# @ToDo: deployment_setting to disable Allocations
atable = s3db.cr_shelter_allocation
query = (atable.site_id == site_id) & \
(atable.status.belongs((1, 2, 3, 4))) & \
(atable.deleted != True)
dcnt = atable.group_size_day.sum()
ncnt = atable.group_size_night.sum()
row = db(query).select(dcnt,
ncnt,
limitby = (0, 1),
orderby = dcnt,
).first()
if row:
if row[dcnt] is not None:
allocated_capacity_day = row[dcnt]
else:
allocated_capacity_day = 0
if row[ncnt] is not None:
allocated_capacity_night = row[ncnt]
else:
allocated_capacity_night = 0
else:
allocated_capacity_day = allocated_capacity_night = 0
# Compute available capacity
capacity_day = record.capacity_day
if capacity_day:
available_capacity_day = capacity_day - \
population_day - \
allocated_capacity_day
else:
available_capacity_day = 0
capacity_night = record.capacity_night
if capacity_night:
available_capacity_night = record.capacity_night - \
population_night - \
allocated_capacity_night
else:
available_capacity_night = 0
if settings.get_cr_shelter_housing_unit_management():
cr_update_housing_unit_population(site_id)
# Update record
record.update_record(population_day = population_day,
population_night = population_night,
available_capacity_day = available_capacity_day,
available_capacity_night = available_capacity_night,
)
# =============================================================================
class cr_AssignUnit(S3CRUD):
"""
Assign a Person to a Housing Unit
"""
# -------------------------------------------------------------------------
def apply_method(self, r, **attr):
"""
Entry point for REST API
Args:
r: the S3Request
attr: controller arguments
"""
try:
person_id = int(r.get_vars["person_id"])
except (AttributeError, ValueError, TypeError):
r.error(400, current.messages.BAD_REQUEST)
self.settings = current.response.s3.crud
sqlform = self._config("crud_form")
self.sqlform = sqlform if sqlform else S3SQLDefaultForm()
self.data = None
# Create or Update?
table = current.s3db.cr_shelter_registration
query = (table.deleted == False) & \
(table.person_id == person_id)
exists = current.db(query).select(table.id,
limitby = (0, 1),
).first()
if exists:
# Update form
r.method = "update" # Ensure correct View template is used
self.record_id = exists.id
output = self.update(r, **attr)
else:
# Create form
r.method = "create" # Ensure correct View template is used
self.data = {"person_id": person_id}
output = self.create(r, **attr)
return output
# =============================================================================
class ShelterInspectionFlagRepresent(S3Represent):
""" Representations of Shelter Inspection Flags """
def __init__(self, show_link=False):
"""
Args:
show_link: represent as link to the shelter inspection
"""
super(ShelterInspectionFlagRepresent,
self).__init__(lookup = "cr_shelter_inspection_flag",
show_link = show_link,
)
# ---------------------------------------------------------------------
def link(self, k, v, row=None):
"""
Link inspection flag representations to the inspection record
Args:
k: the inspection flag ID
v: the representation
row: the row from lookup_rows
"""
if row:
inspection_id = row.cr_shelter_inspection.id
if inspection_id:
return A(v, _href=URL(c = "cr",
f = "shelter_inspection",
args = [inspection_id],
),
)
return v
# ---------------------------------------------------------------------
def represent_row(self, row):
"""
Represent a Row
Args:
row: the Row
"""
return "%(unit)s (%(date)s): %(flag)s" % {"unit": row.cr_shelter_unit.name,
"date": row.cr_shelter_inspection.date,
"flag": row.cr_shelter_flag.name,
}
# ---------------------------------------------------------------------
def lookup_rows(self, key, values, fields=None):
"""
Lookup all rows referenced by values.
Args:
key: the key Field
values: the values
fields: the fields to retrieve
"""
s3db = current.s3db
table = self.table
ftable = s3db.cr_shelter_flag
itable = s3db.cr_shelter_inspection
utable = s3db.cr_shelter_unit
left = (ftable.on(ftable.id == table.flag_id),
itable.on(itable.id == table.inspection_id),
utable.on(utable.id == itable.shelter_unit_id),
)
count = len(values)
if count == 1:
query = (table.id == values[0])
else:
query = (table.id.belongs(values))
limitby = (0, count)
rows = current.db(query).select(table.id,
utable.name,
itable.id,
itable.date,
ftable.name,
left = left,
limitby = limitby,
)
return rows
# =============================================================================
class ShelterInspectionRepresent(S3Represent):
""" Representations of Shelter Inspections """
def __init__(self, show_link=False):
"""
Args:
show_link: represent as link to the shelter inspection
"""
super(ShelterInspectionRepresent,
self).__init__(lookup = "cr_shelter_inspection",
show_link = show_link,
)
# ---------------------------------------------------------------------
def link(self, k, v, row=None):
"""
Link inspection flag representations to the inspection record
Args:
k: the inspection flag ID
v: the representation
row: the row from lookup_rows
"""
if row:
inspection_id = row.cr_shelter_inspection.id
if inspection_id:
return A(v, _href=URL(c = "cr",
f = "shelter_inspection",
args = [inspection_id],
),
)
return v
# ---------------------------------------------------------------------
def represent_row(self, row):
"""
Represent a Row
Args:
row: the Row
"""
return "%(date)s: %(unit)s" % {"unit": row.cr_shelter_unit.name,
"date": row.cr_shelter_inspection.date,
}
# ---------------------------------------------------------------------
def lookup_rows(self, key, values, fields=None):
"""
Lookup all rows referenced by values.
Args:
key: the key Field
values: the values
fields: the fields to retrieve
"""
s3db = current.s3db
table = self.table
utable = s3db.cr_shelter_unit
left = utable.on(utable.id == table.shelter_unit_id)
count = len(values)
if count == 1:
query = (table.id == values[0])
else:
query = (table.id.belongs(values))
limitby = (0, count)
rows = current.db(query).select(table.id,
table.date,
utable.name,
left = left,
limitby = limitby,
)
return rows
# =============================================================================
class CRShelterInspection(S3Method):
"""
Mobile-optimised UI for shelter inspection
"""
# -------------------------------------------------------------------------
def apply_method(self, r, **attr):
"""
Main entry point for REST interface.
Args:
r: the S3Request instance
attr: controller parameters
"""
if not self.permitted():
current.auth.permission.fail()
output = {}
representation = r.representation
if representation == "html":
if r.http in ("GET", "POST"):
output = self.inspection_form(r, **attr)
else:
r.error(405, current.ERROR.BAD_METHOD)
elif representation == "json":
if r.http == "POST":
output = self.inspection_ajax(r, **attr)
else:
r.error(405, current.ERROR.BAD_METHOD)
else:
r.error(415, current.ERROR.BAD_FORMAT)
return output
# -------------------------------------------------------------------------
def permitted(self):
# @todo: implement
return True
# -------------------------------------------------------------------------
def inspection_form(self, r, **attr):
"""
Generate the form
Args:
r: the S3Request instance
attr: controller parameters
"""
T = current.T
db = current.db
s3db = current.s3db
settings = current.deployment_settings
response = current.response
output = {}
# Limit selection of shelter units to current shelter
record = r.record
if record:
dbset = db(s3db.cr_shelter_unit.site_id == record.site_id)
else:
dbset = db
# Representation methods for form widgets
shelter_unit_represent = S3Represent(lookup = "cr_shelter_unit")
shelter_flag_represent = S3Represent(lookup = "cr_shelter_flag",
translate = True,
)
# Standard form fields and data
formfields = [Field("shelter_unit_id",
label = T("Housing Unit"),
requires = IS_ONE_OF(dbset, "cr_shelter_unit.id",
shelter_unit_represent,
orderby = "shelter_id",
),
widget = S3MultiSelectWidget(multiple = False,
search = True,
),
),
Field("shelter_flags",
label = T("Defects"),
requires = IS_ONE_OF(db, "cr_shelter_flag.id",
shelter_flag_represent,
multiple = True,
),
widget = S3GroupedOptionsWidget(cols = 2,
size = None,
),
),
s3_comments(comment = None),
]
# Buttons
submit_btn = INPUT(_class = "tiny primary button submit-btn",
_name = "submit",
_type = "button",
_value = T("Submit"),
)
buttons = [submit_btn]
# Add the cancel-action
buttons.append(A(T("Cancel"), _class = "cancel-action action-lnk"))
# Generate form
widget_id = "shelter-inspection-form"
formstyle = settings.get_ui_formstyle()
form = SQLFORM.factory(record = None,
showid = False,
formstyle = formstyle,
table_name = "shelter_inspection",
buttons = buttons,
#hidden = hidden,
_id = widget_id,
*formfields)
output["form"] = form
# Custom view
response.view = self._view(r, "cr/shelter_inspection.html")
# Inject JS
options = {"ajaxURL": r.url(None,
method = "inspection",
representation = "json",
),
}
self.inject_js(widget_id, options)
return output
# -------------------------------------------------------------------------
def inspection_ajax(self, r, **attr):
"""
Ajax-registration of shelter inspection
Args:
r: the S3Request instance
attr: controller parameters
"""
T = current.T
db = current.db
s3db = current.s3db
# Load JSON data from request body
s = r.body
s.seek(0)
try:
data = json.load(s)
except (ValueError, TypeError):
r.error(400, current.ERROR.BAD_REQUEST)
shelter_unit_id = data.get("u")
if shelter_unit_id:
# Register shelter inspection
error = False
# Read comments
comments = data.get("c")
# Find inspection record
update = False
itable = s3db.cr_shelter_inspection
query = (itable.shelter_unit_id == shelter_unit_id) & \
(itable.date == current.request.utcnow.date()) & \
(itable.deleted != True)
row = db(query).select(itable.id,
limitby = (0, 1),
).first()
if row:
# Update this inspection
update = True
inspection_id = row.id
row.update_record(comments = comments)
else:
# Create a new inspection
inspection_id = itable.insert(shelter_unit_id = shelter_unit_id,
comments = comments,
)
if inspection_id:
# Currently selected flags
flag_ids = data.get("f")
if update:
# Remove all flags linked to the current inspection
# which are not in the current selection
query = (FS("inspection_id") == inspection_id)
if flag_ids:
query &= ~(FS("flag_id").belongs(flag_ids))
fresource = s3db.resource("cr_shelter_inspection_flag",
filter = query,
)
fresource.delete(cascade = True)
if flag_ids:
# Determine which flags have been newly selected
ftable = s3db.cr_shelter_inspection_flag
if update:
query = (ftable.inspection_id == inspection_id) & \
(ftable.deleted == False)
rows = db(query).select(ftable.flag_id)
new = set(flag_ids) - set(row.flag_id for row in rows)
else:
new = set(flag_ids)
# Create links to newly selected flags
ftable = s3db.cr_shelter_inspection_flag
data = {"inspection_id": inspection_id,
}
for flag_id in new:
data["flag_id"] = flag_id
success = ftable.insert(**data)
if not success:
error = True
break
else:
# Call onaccept to auto-create tasks
record = Storage(data)
record["id"] = success
s3db.onaccept(ftable, record)
else:
error = True
if error:
db.rollback()
output = {"a": s3_str(T("Error registering shelter inspection")),
}
else:
output = {"m": s3_str(T("Registration successful")),
}
else:
# Error - no shelter unit selected
output = {"a": s3_str(T("No shelter unit selected")),
}
return json.dumps(output)
# -------------------------------------------------------------------------
@staticmethod
def inject_js(widget_id, options):
"""
Helper function to inject static JS and instantiate
the shelterInspection widget
Args:
widget_id: the node ID where to instantiate the widget
options: dict of widget options (JSON-serializable)
"""
s3 = current.response.s3
appname = current.request.application
# Static JS
scripts = s3.scripts
if s3.debug:
script = "/%s/static/scripts/S3/s3.shelter_inspection.js" % appname
else:
script = "/%s/static/scripts/S3/s3.shelter_inspection.min.js" % appname
scripts.append(script)
# Instantiate widget
scripts = s3.jquery_ready
script = '''$('#%(id)s').shelterInspection(%(options)s)''' % \
{"id": widget_id, "options": json.dumps(options)}
if script not in scripts:
scripts.append(script)
# END =========================================================================
|
5e601d6d6fe4087c5b17ae222c78e7148f413103
|
ac235a23f22be0d6f1818bb53902177f9969813a
|
/ddtrace/contrib/dogpile_cache/__init__.py
|
8666443da50b23b9b3a8aacd38f88bfbf9e55321
|
[
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
DataDog/dd-trace-py
|
f09d6d48c4c69aea68f999fc8a458ade5c6150cf
|
1e3bd6d4edef5cda5a0831a6a7ec8e4046659d17
|
refs/heads/1.x
| 2023-09-01T20:25:26.746324
| 2023-09-01T18:54:37
| 2023-09-01T18:54:37
| 61,572,326
| 461
| 426
|
NOASSERTION
| 2023-09-14T20:38:57
| 2016-06-20T18:52:23
|
Python
|
UTF-8
|
Python
| false
| false
| 1,653
|
py
|
__init__.py
|
"""
Instrument dogpile.cache__ to report all cached lookups.
This will add spans around the calls to your cache backend (e.g. redis, memory,
etc). The spans will also include the following tags:
- key/keys: The key(s) dogpile passed to your backend. Note that this will be
the output of the region's ``function_key_generator``, but before any key
mangling is applied (i.e. the region's ``key_mangler``).
- region: Name of the region.
- backend: Name of the backend class.
- hit: If the key was found in the cache.
- expired: If the key is expired. This is only relevant if the key was found.
While cache tracing will generally already have keys in tags, some caching
setups will not have useful tag values - such as when you're using consistent
hashing with memcached - the key(s) will appear as a mangled hash.
::
# Patch before importing dogpile.cache
from ddtrace import patch
patch(dogpile_cache=True)
from dogpile.cache import make_region
region = make_region().configure(
"dogpile.cache.pylibmc",
expiration_time=3600,
arguments={"url": ["127.0.0.1"]},
)
@region.cache_on_arguments()
def hello(name):
# Some complicated, slow calculation
return "Hello, {}".format(name)
.. __: https://dogpilecache.sqlalchemy.org/
"""
from ...internal.utils.importlib import require_modules
required_modules = ["dogpile.cache"]
with require_modules(required_modules) as missing_modules:
if not missing_modules:
from .patch import get_version
from .patch import patch
from .patch import unpatch
__all__ = ["patch", "unpatch", "get_version"]
|
a91d87f631fce6bc8719802e1fa6a0692bf43c44
|
605d63d23bc2e07eb054979a14557d469787877e
|
/atest/testresources/testlibs/NonAsciiLibrary.py
|
0769303d0dd73cc38e632ceb129a55806c85bce0
|
[
"Apache-2.0",
"CC-BY-3.0"
] |
permissive
|
robotframework/robotframework
|
407b0cdbe0d3bb088f9bfcf9ea7d16e22eee1ddf
|
cf896995f822f571c33dc5651d51365778b1cf40
|
refs/heads/master
| 2023-08-29T03:19:00.734810
| 2023-08-27T18:14:48
| 2023-08-28T18:14:11
| 21,273,155
| 8,635
| 2,623
|
Apache-2.0
| 2023-09-05T04:58:08
| 2014-06-27T11:10:38
|
Python
|
UTF-8
|
Python
| false
| false
| 798
|
py
|
NonAsciiLibrary.py
|
MESSAGES = ['Circle is 360°',
'Hyvää üötä',
'\u0989\u09C4 \u09F0 \u09FA \u099F \u09EB \u09EA \u09B9']
class NonAsciiLibrary:
def print_non_ascii_strings(self):
"""Prints message containing non-ASCII characters"""
for msg in MESSAGES:
print('*INFO*' + msg)
def print_and_return_non_ascii_object(self):
"""Prints object with non-ASCII `str()` and returns it."""
obj = NonAsciiObject()
print(obj)
return obj
def raise_non_ascii_error(self):
raise AssertionError(', '.join(MESSAGES))
class NonAsciiObject:
def __init__(self):
self.message = ', '.join(MESSAGES)
def __str__(self):
return self.message
def __repr__(self):
return repr(self.message)
|
fe23738fe85a649b39c552537966daaa2931f607
|
96dcea595e7c16cec07b3f649afd65f3660a0bad
|
/homeassistant/components/tcp/__init__.py
|
614f637a71a4053c4c112676f383c15df563f098
|
[
"Apache-2.0"
] |
permissive
|
home-assistant/core
|
3455eac2e9d925c92d30178643b1aaccf3a6484f
|
80caeafcb5b6e2f9da192d0ea6dd1a5b8244b743
|
refs/heads/dev
| 2023-08-31T15:41:06.299469
| 2023-08-31T14:50:53
| 2023-08-31T14:50:53
| 12,888,993
| 35,501
| 20,617
|
Apache-2.0
| 2023-09-14T21:50:15
| 2013-09-17T07:29:48
|
Python
|
UTF-8
|
Python
| false
| false
| 25
|
py
|
__init__.py
|
"""The tcp component."""
|
54e04fea06357592a663e268cbb7a9c1fa38417b
|
e51ec12f619d0adf0bad7cc4a8851415ed89ddab
|
/torch_struct/alignment.py
|
c9840c9d87dbc0354126c88c68f674c449714c8a
|
[
"MIT"
] |
permissive
|
harvardnlp/pytorch-struct
|
9cc0e3167b4fe00e025216176ca054bb537f600e
|
7146de5659ff17ad7be53023c025ffd099866412
|
refs/heads/master
| 2023-07-10T10:23:57.946098
| 2022-01-30T19:49:08
| 2022-01-30T19:49:08
| 204,547,575
| 1,139
| 91
|
MIT
| 2022-01-30T19:49:10
| 2019-08-26T19:34:30
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 6,788
|
py
|
alignment.py
|
import torch
from .helpers import _Struct
import math
try:
import genbmm
except ImportError:
pass
from .semirings import LogSemiring
from .semirings.fast_semirings import broadcast
Down, Mid, Up = 0, 1, 2
Open, Close = 0, 1
class Alignment(_Struct):
def __init__(
self, semiring=LogSemiring, sparse_rounds=3, max_gap=None, local=False
):
self.semiring = semiring
self.sparse_rounds = sparse_rounds
self.local = local
self.max_gap = max_gap
def _check_potentials(self, edge, lengths=None):
batch, N_1, M_1, x = edge.shape
assert x == 3
if self.local:
assert (edge[..., 0] <= 0).all(), "skips must be negative"
assert (edge[..., 1] >= 0).all(), "alignment must be positive"
assert (edge[..., 2] <= 0).all(), "skips must be negative"
edge = self.semiring.convert(edge)
N = N_1
M = M_1
assert M >= N
if lengths is None:
lengths = torch.LongTensor([N] * batch).to(edge.device)
assert max(lengths) <= N, "Length longer than edge scores"
assert max(lengths) == N, "One length must be at least N"
return edge, batch, N, M, lengths
def logpartition(self, log_potentials, lengths=None, force_grad=False, cache=True):
return self._dp_scan(log_potentials, lengths, force_grad)
def _dp_scan(self, log_potentials, lengths=None, force_grad=False):
"Compute forward pass by linear scan"
# Setup
semiring = self.semiring
log_potentials.requires_grad_(True)
ssize = semiring.size()
log_potentials, batch, N, M, lengths = self._check_potentials(
log_potentials, lengths
)
# N is the longer (time) dimension.
steps = M + N
log_N = int(math.ceil(math.log(steps, 2)))
bin_N = int(math.pow(2, log_N))
LOC = 2 if self.local else 1
# Create a chart N, N, back
charta = [None, None]
# offset = 1, left_pos = bin_N
charta[0] = self._make_chart(
1, (batch, bin_N, 1, bin_N, LOC, LOC, 3), log_potentials, force_grad
)[0]
charta[1] = self._make_chart(
1, (batch, bin_N // 2, 3, bin_N, LOC, LOC, 3), log_potentials, force_grad
)[0]
# Init
# This part is complicated. Rotate the scores by 45% and
# then compress one.
grid_x = torch.arange(N).view(N, 1).expand(N, M)
grid_y = torch.arange(M).view(1, M).expand(N, M)
rot_x = grid_x + grid_y
rot_y = grid_y - grid_x + N - 1
ind = torch.arange(bin_N)
ind_M = ind
ind_U = torch.arange(1, bin_N)
ind_D = torch.arange(bin_N - 1)
for b in range(lengths.shape[0]):
# Fill base chart with values.
l = lengths[b]
charta[0][:, b, rot_x[:l], 0, rot_y[:l], :, :, :] = log_potentials[
:, b, :l, :, None, None
]
# Create finalizing paths.
point = (l + M) // 2
charta[1][:, b, point:, 1, ind, :, :, Mid] = semiring.one_(
charta[1][:, b, point:, 1, ind, :, :, Mid]
)
for b in range(lengths.shape[0]):
point = (lengths[b] + M) // 2
lim = point * 2
left_ = charta[0][:, b, 0:lim:2, 0]
right = charta[0][:, b, 1:lim:2, 0]
charta[1][:, b, :point, 1, ind_M] = torch.stack(
[
left_[..., Down],
semiring.plus(left_[..., Mid], right[..., Mid]),
left_[..., Up],
],
dim=-1,
)
y = torch.stack([ind_D, ind_U], dim=0)
z = y.clone()
z[0, :] = 2
z[1, :] = 0
charta[1][:, b, :point, z, y, :, :, :] = torch.stack(
[
semiring.times(
left_[:, :, ind_D, Open : Open + 1 :, :],
right[:, :, ind_U, :, Open : Open + 1, Down : Down + 1],
),
semiring.times(
left_[:, :, ind_U, Open : Open + 1, :, :],
right[:, :, ind_D, :, Open : Open + 1, Up : Up + 1],
),
],
dim=2,
)
chart = charta[1][..., :, :, :].permute(0, 1, 2, 5, 6, 7, 4, 3)
# Scan
def merge(x):
inner = x.shape[-1]
width = (inner - 1) // 2
left = x[:, :, 0::2, Open, :].view(
ssize, batch, -1, 1, LOC, 3, bin_N, inner
)
right = x[:, :, 1::2, :, Open].view(
ssize, batch, -1, LOC, 1, 1, 3, bin_N, inner
)
st = []
for op in (Mid, Up, Down):
leftb, rightb, _ = broadcast(left, right[..., op, :, :])
leftb = genbmm.BandedMatrix(leftb, width, width, semiring.zero)
rightb = genbmm.BandedMatrix(rightb, width, width, semiring.zero)
leftb = leftb.transpose().col_shift(op - 1).transpose()
v = semiring.matmul(rightb, leftb).band_pad(1).band_shift(op - 1)
v = v.data.view(ssize, batch, -1, LOC, LOC, 3, bin_N, v.data.shape[-1])
st.append(v)
if self.local:
def pad(v):
s = list(v.shape)
s[-1] = inner // 2 + 1
pads = torch.zeros(*s, device=v.device, dtype=v.dtype).fill_(
semiring.zero
)
return torch.cat([pads, v, pads], -1)
left_ = x[:, :, 0::2, Close, None]
left_ = pad(left)
right = x[:, :, 1::2, :, Close, None]
right = pad(right)
st.append(torch.cat([semiring.zero_(left_.clone()), left_], dim=3))
st.append(torch.cat([semiring.zero_(right.clone()), right], dim=4))
return semiring.sum(torch.stack(st, dim=-1))
for n in range(2, log_N + 1):
chart = merge(chart)
center = int((chart.shape[-1] - 1) // 2)
if center > (bin_N / 2):
chart = chart[..., center - (bin_N // 2) : center + (bin_N // 2) + 1]
elif self.max_gap is not None and center > self.max_gap:
chart = chart[..., center - self.max_gap : center + self.max_gap + 1]
if self.local:
v = semiring.sum(semiring.sum(chart[..., 0, Close, Close, Mid, :, :]))
else:
v = chart[
..., 0, Open, Open, Mid, N - 1, M - N + ((chart.shape[-1] - 1) // 2)
]
return v, [log_potentials]
|
92d93d9ed561f7016d5e4d44151305e689cdf6f0
|
3c41443364da8b44c74dce08ef94a1acd1b66b3e
|
/addons/forward/routes.py
|
00b130ce2a4a6d4de6d5fc64f72747e106a1ad90
|
[
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-warranty-disclaimer",
"AGPL-3.0-only",
"LGPL-2.0-or-later",
"LicenseRef-scancode-proprietary-license",
"MPL-1.1",
"CPAL-1.0",
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause",
"Apache-2.0"
] |
permissive
|
CenterForOpenScience/osf.io
|
71d9540be7989f7118a33e15bc4a6ce2d2492ac1
|
a3e0a0b9ddda5dd75fc8248d58f3bcdeece0323e
|
refs/heads/develop
| 2023-09-04T03:21:14.970917
| 2023-08-31T14:49:20
| 2023-08-31T14:49:20
| 10,199,599
| 683
| 390
|
Apache-2.0
| 2023-09-14T17:07:52
| 2013-05-21T15:53:37
|
Python
|
UTF-8
|
Python
| false
| false
| 716
|
py
|
routes.py
|
# -*- coding: utf-8 -*-
"""Forward addon routes."""
from framework.routing import Rule, json_renderer
from addons.forward import views
api_routes = {
'rules': [
Rule(
[
'/project/<pid>/forward/config/',
'/project/<pid>/node/<nid>/forward/config/'
],
'get',
views.config.forward_config_get,
json_renderer,
),
Rule(
[
'/project/<pid>/forward/config/',
'/project/<pid>/node/<nid>/forward/config/'
],
'put',
views.config.forward_config_put,
json_renderer,
),
],
'prefix': '/api/v1',
}
|
b80f2fb560691a98d7cbd70624e5fab3cea86cef
|
40dd8330e5f78c4348bbddc2c5acfd59d793dd51
|
/mmseg/models/backbones/unet.py
|
545921db8e14668e454f5834f9a1618fe0c04ffe
|
[
"Apache-2.0"
] |
permissive
|
open-mmlab/mmsegmentation
|
0d12092312e2c465ede1fd7dd9847b6f2b37049c
|
30a3f94f3e2916e27fa38c67cc3b8c69c1893fe8
|
refs/heads/main
| 2023-09-04T10:54:52.299711
| 2023-07-24T07:28:21
| 2023-07-24T07:28:21
| 272,133,018
| 6,534
| 2,375
|
Apache-2.0
| 2023-09-14T01:22:32
| 2020-06-14T04:32:33
|
Python
|
UTF-8
|
Python
| false
| false
| 18,470
|
py
|
unet.py
|
# Copyright (c) OpenMMLab. All rights reserved.
import warnings
import torch.nn as nn
import torch.utils.checkpoint as cp
from mmcv.cnn import ConvModule, build_activation_layer, build_norm_layer
from mmengine.model import BaseModule
from mmengine.utils.dl_utils.parrots_wrapper import _BatchNorm
from mmseg.registry import MODELS
from ..utils import UpConvBlock, Upsample
class BasicConvBlock(nn.Module):
"""Basic convolutional block for UNet.
This module consists of several plain convolutional layers.
Args:
in_channels (int): Number of input channels.
out_channels (int): Number of output channels.
num_convs (int): Number of convolutional layers. Default: 2.
stride (int): Whether use stride convolution to downsample
the input feature map. If stride=2, it only uses stride convolution
in the first convolutional layer to downsample the input feature
map. Options are 1 or 2. Default: 1.
dilation (int): Whether use dilated convolution to expand the
receptive field. Set dilation rate of each convolutional layer and
the dilation rate of the first convolutional layer is always 1.
Default: 1.
with_cp (bool): Use checkpoint or not. Using checkpoint will save some
memory while slowing down the training speed. Default: False.
conv_cfg (dict | None): Config dict for convolution layer.
Default: None.
norm_cfg (dict | None): Config dict for normalization layer.
Default: dict(type='BN').
act_cfg (dict | None): Config dict for activation layer in ConvModule.
Default: dict(type='ReLU').
dcn (bool): Use deformable convolution in convolutional layer or not.
Default: None.
plugins (dict): plugins for convolutional layers. Default: None.
"""
def __init__(self,
in_channels,
out_channels,
num_convs=2,
stride=1,
dilation=1,
with_cp=False,
conv_cfg=None,
norm_cfg=dict(type='BN'),
act_cfg=dict(type='ReLU'),
dcn=None,
plugins=None):
super().__init__()
assert dcn is None, 'Not implemented yet.'
assert plugins is None, 'Not implemented yet.'
self.with_cp = with_cp
convs = []
for i in range(num_convs):
convs.append(
ConvModule(
in_channels=in_channels if i == 0 else out_channels,
out_channels=out_channels,
kernel_size=3,
stride=stride if i == 0 else 1,
dilation=1 if i == 0 else dilation,
padding=1 if i == 0 else dilation,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg,
act_cfg=act_cfg))
self.convs = nn.Sequential(*convs)
def forward(self, x):
"""Forward function."""
if self.with_cp and x.requires_grad:
out = cp.checkpoint(self.convs, x)
else:
out = self.convs(x)
return out
@MODELS.register_module()
class DeconvModule(nn.Module):
"""Deconvolution upsample module in decoder for UNet (2X upsample).
This module uses deconvolution to upsample feature map in the decoder
of UNet.
Args:
in_channels (int): Number of input channels.
out_channels (int): Number of output channels.
with_cp (bool): Use checkpoint or not. Using checkpoint will save some
memory while slowing down the training speed. Default: False.
norm_cfg (dict | None): Config dict for normalization layer.
Default: dict(type='BN').
act_cfg (dict | None): Config dict for activation layer in ConvModule.
Default: dict(type='ReLU').
kernel_size (int): Kernel size of the convolutional layer. Default: 4.
"""
def __init__(self,
in_channels,
out_channels,
with_cp=False,
norm_cfg=dict(type='BN'),
act_cfg=dict(type='ReLU'),
*,
kernel_size=4,
scale_factor=2):
super().__init__()
assert (kernel_size - scale_factor >= 0) and\
(kernel_size - scale_factor) % 2 == 0,\
f'kernel_size should be greater than or equal to scale_factor '\
f'and (kernel_size - scale_factor) should be even numbers, '\
f'while the kernel size is {kernel_size} and scale_factor is '\
f'{scale_factor}.'
stride = scale_factor
padding = (kernel_size - scale_factor) // 2
self.with_cp = with_cp
deconv = nn.ConvTranspose2d(
in_channels,
out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding)
norm_name, norm = build_norm_layer(norm_cfg, out_channels)
activate = build_activation_layer(act_cfg)
self.deconv_upsamping = nn.Sequential(deconv, norm, activate)
def forward(self, x):
"""Forward function."""
if self.with_cp and x.requires_grad:
out = cp.checkpoint(self.deconv_upsamping, x)
else:
out = self.deconv_upsamping(x)
return out
@MODELS.register_module()
class InterpConv(nn.Module):
"""Interpolation upsample module in decoder for UNet.
This module uses interpolation to upsample feature map in the decoder
of UNet. It consists of one interpolation upsample layer and one
convolutional layer. It can be one interpolation upsample layer followed
by one convolutional layer (conv_first=False) or one convolutional layer
followed by one interpolation upsample layer (conv_first=True).
Args:
in_channels (int): Number of input channels.
out_channels (int): Number of output channels.
with_cp (bool): Use checkpoint or not. Using checkpoint will save some
memory while slowing down the training speed. Default: False.
norm_cfg (dict | None): Config dict for normalization layer.
Default: dict(type='BN').
act_cfg (dict | None): Config dict for activation layer in ConvModule.
Default: dict(type='ReLU').
conv_cfg (dict | None): Config dict for convolution layer.
Default: None.
conv_first (bool): Whether convolutional layer or interpolation
upsample layer first. Default: False. It means interpolation
upsample layer followed by one convolutional layer.
kernel_size (int): Kernel size of the convolutional layer. Default: 1.
stride (int): Stride of the convolutional layer. Default: 1.
padding (int): Padding of the convolutional layer. Default: 1.
upsample_cfg (dict): Interpolation config of the upsample layer.
Default: dict(
scale_factor=2, mode='bilinear', align_corners=False).
"""
def __init__(self,
in_channels,
out_channels,
with_cp=False,
norm_cfg=dict(type='BN'),
act_cfg=dict(type='ReLU'),
*,
conv_cfg=None,
conv_first=False,
kernel_size=1,
stride=1,
padding=0,
upsample_cfg=dict(
scale_factor=2, mode='bilinear', align_corners=False)):
super().__init__()
self.with_cp = with_cp
conv = ConvModule(
in_channels,
out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg,
act_cfg=act_cfg)
upsample = Upsample(**upsample_cfg)
if conv_first:
self.interp_upsample = nn.Sequential(conv, upsample)
else:
self.interp_upsample = nn.Sequential(upsample, conv)
def forward(self, x):
"""Forward function."""
if self.with_cp and x.requires_grad:
out = cp.checkpoint(self.interp_upsample, x)
else:
out = self.interp_upsample(x)
return out
@MODELS.register_module()
class UNet(BaseModule):
"""UNet backbone.
This backbone is the implementation of `U-Net: Convolutional Networks
for Biomedical Image Segmentation <https://arxiv.org/abs/1505.04597>`_.
Args:
in_channels (int): Number of input image channels. Default" 3.
base_channels (int): Number of base channels of each stage.
The output channels of the first stage. Default: 64.
num_stages (int): Number of stages in encoder, normally 5. Default: 5.
strides (Sequence[int 1 | 2]): Strides of each stage in encoder.
len(strides) is equal to num_stages. Normally the stride of the
first stage in encoder is 1. If strides[i]=2, it uses stride
convolution to downsample in the correspondence encoder stage.
Default: (1, 1, 1, 1, 1).
enc_num_convs (Sequence[int]): Number of convolutional layers in the
convolution block of the correspondence encoder stage.
Default: (2, 2, 2, 2, 2).
dec_num_convs (Sequence[int]): Number of convolutional layers in the
convolution block of the correspondence decoder stage.
Default: (2, 2, 2, 2).
downsamples (Sequence[int]): Whether use MaxPool to downsample the
feature map after the first stage of encoder
(stages: [1, num_stages)). If the correspondence encoder stage use
stride convolution (strides[i]=2), it will never use MaxPool to
downsample, even downsamples[i-1]=True.
Default: (True, True, True, True).
enc_dilations (Sequence[int]): Dilation rate of each stage in encoder.
Default: (1, 1, 1, 1, 1).
dec_dilations (Sequence[int]): Dilation rate of each stage in decoder.
Default: (1, 1, 1, 1).
with_cp (bool): Use checkpoint or not. Using checkpoint will save some
memory while slowing down the training speed. Default: False.
conv_cfg (dict | None): Config dict for convolution layer.
Default: None.
norm_cfg (dict | None): Config dict for normalization layer.
Default: dict(type='BN').
act_cfg (dict | None): Config dict for activation layer in ConvModule.
Default: dict(type='ReLU').
upsample_cfg (dict): The upsample config of the upsample module in
decoder. Default: dict(type='InterpConv').
norm_eval (bool): Whether to set norm layers to eval mode, namely,
freeze running stats (mean and var). Note: Effect on Batch Norm
and its variants only. Default: False.
dcn (bool): Use deformable convolution in convolutional layer or not.
Default: None.
plugins (dict): plugins for convolutional layers. Default: None.
pretrained (str, optional): model pretrained path. Default: None
init_cfg (dict or list[dict], optional): Initialization config dict.
Default: None
Notice:
The input image size should be divisible by the whole downsample rate
of the encoder. More detail of the whole downsample rate can be found
in UNet._check_input_divisible.
"""
def __init__(self,
in_channels=3,
base_channels=64,
num_stages=5,
strides=(1, 1, 1, 1, 1),
enc_num_convs=(2, 2, 2, 2, 2),
dec_num_convs=(2, 2, 2, 2),
downsamples=(True, True, True, True),
enc_dilations=(1, 1, 1, 1, 1),
dec_dilations=(1, 1, 1, 1),
with_cp=False,
conv_cfg=None,
norm_cfg=dict(type='BN'),
act_cfg=dict(type='ReLU'),
upsample_cfg=dict(type='InterpConv'),
norm_eval=False,
dcn=None,
plugins=None,
pretrained=None,
init_cfg=None):
super().__init__(init_cfg)
self.pretrained = pretrained
assert not (init_cfg and pretrained), \
'init_cfg and pretrained cannot be setting at the same time'
if isinstance(pretrained, str):
warnings.warn('DeprecationWarning: pretrained is a deprecated, '
'please use "init_cfg" instead')
self.init_cfg = dict(type='Pretrained', checkpoint=pretrained)
elif pretrained is None:
if init_cfg is None:
self.init_cfg = [
dict(type='Kaiming', layer='Conv2d'),
dict(
type='Constant',
val=1,
layer=['_BatchNorm', 'GroupNorm'])
]
else:
raise TypeError('pretrained must be a str or None')
assert dcn is None, 'Not implemented yet.'
assert plugins is None, 'Not implemented yet.'
assert len(strides) == num_stages, \
'The length of strides should be equal to num_stages, '\
f'while the strides is {strides}, the length of '\
f'strides is {len(strides)}, and the num_stages is '\
f'{num_stages}.'
assert len(enc_num_convs) == num_stages, \
'The length of enc_num_convs should be equal to num_stages, '\
f'while the enc_num_convs is {enc_num_convs}, the length of '\
f'enc_num_convs is {len(enc_num_convs)}, and the num_stages is '\
f'{num_stages}.'
assert len(dec_num_convs) == (num_stages-1), \
'The length of dec_num_convs should be equal to (num_stages-1), '\
f'while the dec_num_convs is {dec_num_convs}, the length of '\
f'dec_num_convs is {len(dec_num_convs)}, and the num_stages is '\
f'{num_stages}.'
assert len(downsamples) == (num_stages-1), \
'The length of downsamples should be equal to (num_stages-1), '\
f'while the downsamples is {downsamples}, the length of '\
f'downsamples is {len(downsamples)}, and the num_stages is '\
f'{num_stages}.'
assert len(enc_dilations) == num_stages, \
'The length of enc_dilations should be equal to num_stages, '\
f'while the enc_dilations is {enc_dilations}, the length of '\
f'enc_dilations is {len(enc_dilations)}, and the num_stages is '\
f'{num_stages}.'
assert len(dec_dilations) == (num_stages-1), \
'The length of dec_dilations should be equal to (num_stages-1), '\
f'while the dec_dilations is {dec_dilations}, the length of '\
f'dec_dilations is {len(dec_dilations)}, and the num_stages is '\
f'{num_stages}.'
self.num_stages = num_stages
self.strides = strides
self.downsamples = downsamples
self.norm_eval = norm_eval
self.base_channels = base_channels
self.encoder = nn.ModuleList()
self.decoder = nn.ModuleList()
for i in range(num_stages):
enc_conv_block = []
if i != 0:
if strides[i] == 1 and downsamples[i - 1]:
enc_conv_block.append(nn.MaxPool2d(kernel_size=2))
upsample = (strides[i] != 1 or downsamples[i - 1])
self.decoder.append(
UpConvBlock(
conv_block=BasicConvBlock,
in_channels=base_channels * 2**i,
skip_channels=base_channels * 2**(i - 1),
out_channels=base_channels * 2**(i - 1),
num_convs=dec_num_convs[i - 1],
stride=1,
dilation=dec_dilations[i - 1],
with_cp=with_cp,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg,
act_cfg=act_cfg,
upsample_cfg=upsample_cfg if upsample else None,
dcn=None,
plugins=None))
enc_conv_block.append(
BasicConvBlock(
in_channels=in_channels,
out_channels=base_channels * 2**i,
num_convs=enc_num_convs[i],
stride=strides[i],
dilation=enc_dilations[i],
with_cp=with_cp,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg,
act_cfg=act_cfg,
dcn=None,
plugins=None))
self.encoder.append(nn.Sequential(*enc_conv_block))
in_channels = base_channels * 2**i
def forward(self, x):
self._check_input_divisible(x)
enc_outs = []
for enc in self.encoder:
x = enc(x)
enc_outs.append(x)
dec_outs = [x]
for i in reversed(range(len(self.decoder))):
x = self.decoder[i](enc_outs[i], x)
dec_outs.append(x)
return dec_outs
def train(self, mode=True):
"""Convert the model into training mode while keep normalization layer
freezed."""
super().train(mode)
if mode and self.norm_eval:
for m in self.modules():
# trick: eval have effect on BatchNorm only
if isinstance(m, _BatchNorm):
m.eval()
def _check_input_divisible(self, x):
h, w = x.shape[-2:]
whole_downsample_rate = 1
for i in range(1, self.num_stages):
if self.strides[i] == 2 or self.downsamples[i - 1]:
whole_downsample_rate *= 2
assert (h % whole_downsample_rate == 0) \
and (w % whole_downsample_rate == 0),\
f'The input image size {(h, w)} should be divisible by the whole '\
f'downsample rate {whole_downsample_rate}, when num_stages is '\
f'{self.num_stages}, strides is {self.strides}, and downsamples '\
f'is {self.downsamples}.'
|
7865b1a651159dc29db275628b1d40b5511782b0
|
b8d80a23cb27af08a1c4d34b478c76228ae5fbb4
|
/insights/tests/parsers/test_ceph_log.py
|
f1bb5718ba4f6dba187644b226f6b58becd5fa09
|
[
"Apache-2.0"
] |
permissive
|
RedHatInsights/insights-core
|
bb243e2bf8a52446fefb95ebe05478d6e35efe2e
|
b0ea07fc3f4dd8801b505fe70e9b36e628152c4a
|
refs/heads/master
| 2023-09-04T21:15:40.456257
| 2023-09-04T10:46:56
| 2023-09-04T10:46:56
| 92,518,221
| 144
| 290
|
Apache-2.0
| 2023-09-14T02:40:13
| 2017-05-26T14:23:11
|
Python
|
UTF-8
|
Python
| false
| false
| 2,986
|
py
|
test_ceph_log.py
|
import doctest
from insights.parsers import ceph_log
from insights.parsers.ceph_log import CephLog
from insights.tests import context_wrap
from datetime import datetime
CEPH_LOG = """
2017-05-31 13:01:44.034376 mon.0 192.xx.xx.xx:6789/0 742585 : cluster [INF] pgmap v5133969: 320 pgs: 3 active+clean+scrubbing+deep, 317 active+clean; 898 GB data, 1828 GB used, 48447 GB / 50275 GB avail; 2027 kB/s rd, 20215 kB/s wr, 711 op/s
2017-05-31 13:01:45.041760 mon.0 192.xx.xx.xx:6789/0 742586 : cluster [INF] pgmap v5133970: 320 pgs: 3 active+clean+scrubbing+deep, 317 active+clean; 898 GB data, 1828 GB used, 48447 GB / 50275 GB avail; 1606 kB/s rd, 17354 kB/s wr, 718 op/s
2017-05-31 13:01:46.933829 osd.22 192.xx.xx.xx:6814/42154 172581 : cluster [WRN] 44 slow requests, 2 included below; oldest blocked for > 49.982746 secs
2017-05-31 13:01:46.933946 osd.22 192.xx.xx.xx:6814/42154 172582 : cluster [WRN] slow request 30.602517 seconds old, received at 2017-05-31 13:01:06.330484: osd_op(client.3395798.0:2855671 1.54392173 gnocchi_06c8214c-afae-4e64-8a4a-a466c4f257dc_1244160000.0_median_86400.0_v3 [write 26253~9] snapc 0=[] ondisk+write+known_if_redirected e487) currently waiting for subops from 23
2017-05-31 13:01:46.933955 osd.22 192.xx.xx.xx:6814/42154 172583 : cluster [WRN] slow request 30.530961 seconds old, received at 2017-05-31 13:01:06.402041: osd_op(client.324182.0:46141816 1.e637a4b3 measure [omap-rm-keys 0~107] snapc 0=[] ondisk+write+skiprwlocks+known_if_redirected e487) currently waiting for subops from 23
2017-05-31 13:01:47.050539 mon.0 192.xx.xx.xx:6789/0 742589 : cluster [INF] pgmap v5133971: 320 pgs: 3 active+clean+scrubbing+deep, 317 active+clean; 898 GB data, 1828 GB used, 48447 GB / 50275 GB avail; 1597 kB/s rd, 7259 kB/s wr, 398 op/s
2017-05-31 13:01:48.057187 mon.0 192.xx.xx.xx:6789/0 742590 : cluster [INF] pgmap v5133972: 320 pgs: 3 active+clean+scrubbing+deep, 317 active+clean; 898 GB data, 1828 GB used, 48447 GB / 50275 GB avail; 2373 kB/s rd, 5138 kB/s wr, 354 op/s
2017-05-31 13:01:49.064950 mon.0 192.xx.xx.xx:6789/0 742598 : cluster [INF] pgmap v5133973: 320 pgs: 3 active+clean+scrubbing+deep, 317 active+clean; 898 GB data, 1828 GB used, 48447 GB / 50275 GB avail; 4187 kB/s rd, 10266 kB/s wr, 714 op/s
2017-05-31 13:01:50.069437 mon.0 192.xx.xx.xx:6789/0 742599 : cluster [INF] pgmap v5133974: 320 pgs: 3 active+clean+scrubbing+deep, 317 active+clean; 898 GB data, 1828 GB used, 48447 GB / 50275 GB avail; 470 MB/s rd, 11461 kB/s wr, 786 op/s
""".strip()
def test_ceph_log():
ceph_logs = CephLog(context_wrap(CEPH_LOG))
assert len(ceph_logs.get("[WRN] slow request")) == 2
assert len(list(ceph_logs.get_after(datetime(2017, 5, 31, 13, 1, 46)))) == 7
assert len(ceph_logs.get("[INF]")) == 6
assert "slow requests" in ceph_logs
def test_doc():
env = {"ceph_log": CephLog(context_wrap(CEPH_LOG, path="/var/log/ceph/ceph.log"))}
failed, total = doctest.testmod(ceph_log, globs=env)
assert failed == 0
|
42ab0fd73b46e222caebeb894d0ed9844c4ad53e
|
b26c41926fa3a7c2c061132d80e91a2750f2f468
|
/tensorflow_probability/python/experimental/bayesopt/acquisition/__init__.py
|
52f88f2b9eac7400013c63c59834810846bdf2d2
|
[
"Apache-2.0"
] |
permissive
|
tensorflow/probability
|
22e679a4a883e408f8ef237cda56e3e3dfa42b17
|
42a64ba0d9e0973b1707fcd9b8bd8d14b2d4e3e5
|
refs/heads/main
| 2023-09-04T02:06:08.174935
| 2023-08-31T20:30:00
| 2023-08-31T20:31:33
| 108,053,674
| 4,055
| 1,269
|
Apache-2.0
| 2023-09-13T21:49:49
| 2017-10-23T23:50:54
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 2,704
|
py
|
__init__.py
|
# Copyright 2023 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Acquisition Functions."""
from tensorflow_probability.python.experimental.bayesopt.acquisition.acquisition_function import AcquisitionFunction
from tensorflow_probability.python.experimental.bayesopt.acquisition.acquisition_function import MCMCReducer
from tensorflow_probability.python.experimental.bayesopt.acquisition.expected_improvement import GaussianProcessExpectedImprovement
from tensorflow_probability.python.experimental.bayesopt.acquisition.expected_improvement import ParallelExpectedImprovement
from tensorflow_probability.python.experimental.bayesopt.acquisition.expected_improvement import StudentTProcessExpectedImprovement
from tensorflow_probability.python.experimental.bayesopt.acquisition.max_value_entropy_search import GaussianProcessMaxValueEntropySearch
from tensorflow_probability.python.experimental.bayesopt.acquisition.probability_of_improvement import GaussianProcessProbabilityOfImprovement
from tensorflow_probability.python.experimental.bayesopt.acquisition.probability_of_improvement import ParallelProbabilityOfImprovement
from tensorflow_probability.python.experimental.bayesopt.acquisition.upper_confidence_bound import GaussianProcessUpperConfidenceBound
from tensorflow_probability.python.experimental.bayesopt.acquisition.upper_confidence_bound import ParallelUpperConfidenceBound
from tensorflow_probability.python.experimental.bayesopt.acquisition.weighted_power_scalarization import WeightedPowerScalarization
from tensorflow_probability.python.internal import all_util
JAX_MODE = False
_allowed_symbols = [
'AcquisitionFunction',
'GaussianProcessExpectedImprovement',
'GaussianProcessMaxValueEntropySearch',
'GaussianProcessProbabilityOfImprovement',
'GaussianProcessUpperConfidenceBound',
'MCMCReducer',
'ParallelExpectedImprovement',
'ParallelProbabilityOfImprovement',
'ParallelUpperConfidenceBound',
'StudentTProcessExpectedImprovement',
'WeightedPowerScalarization',
]
all_util.remove_undocumented(__name__, _allowed_symbols)
|
89e912ad9dac84438022692b86908eef1e6a7d49
|
1516ab0855a054c43975a7306dccf684839cfb87
|
/examples/servo_micro.py
|
d42fef2715f9a41a061b5bd14a0fd5ab106d984f
|
[
"Apache-2.0"
] |
permissive
|
google/pcbdl
|
7a506e9f678f4da03912fd975916d708a5e15d20
|
d251d191ed3c41baddff704e6131698eb26ee984
|
refs/heads/master
| 2023-06-28T14:47:11.139860
| 2020-09-04T08:44:24
| 2020-09-04T08:44:24
| 179,365,457
| 149
| 25
|
NOASSERTION
| 2021-04-08T17:10:19
| 2019-04-03T20:29:46
|
Python
|
UTF-8
|
Python
| false
| false
| 21,759
|
py
|
servo_micro.py
|
#!/usr/bin/env python3
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Full reimplementation of servo micro in pcbdl.
Servo Micro's information page (including pdf schematics made in orthodox tools) can be found at:
https://chromium.googlesource.com/chromiumos/third_party/hdctools/+/refs/heads/master/docs/servo_micro.md
"""
from pcbdl import *
# Start of things that should really be in a generic library
# It's a TODO to make a library. Until then, 300 lines to start a new schematic from scratch with no library is probably not bad.
def make_connector(pin_count):
class Connector(Part):
REFDES_PREFIX = "CN"
PINS = []
for i in range(pin_count):
i += 1 # 1 indexed
pin = Pin(i, "P%d" % i)
Connector.PINS.append(pin)
return Connector
class UsbConnector(Part):
REFDES_PREFIX = "CN"
part_number = "1981568-1"
package = "TE_1981568-1"
PINS = [
"VBUS",
("DM", "D-"),
("DP", "D+"),
"ID",
"GND",
Pin("G", numbers=("G1", "G2", "G3", "G4")),
]
class FET(Part):
"""FET Transistor"""
REFDES_PREFIX = "Q"
PINS = [
Pin("D", "D"),
Pin("G", "G"),
Pin("S", "S"),
]
class Regulator(Part):
REFDES_PREFIX = "U"
PINS = [
Pin("IN", type=PinType.POWER_INPUT),
Pin("OUT", type=PinType.POWER_OUTPUT),
Pin("GND", type=PinType.POWER_INPUT),
]
class MIC5504(Regulator):
part_number = "MIC5504-3.3YMT"
package = "SON65P100X100X40-5T48X48N"
PINS = [
Pin("4", "IN"),
Pin("1", "OUT"),
Pin("3", "EN"),
Pin(("2", "G1"), ("GND", "PAD")),
]
class TLV70018DSER(Regulator):
part_number = "TLV70018DSER"
package = "SON50P150X150X80-6L"
PINS = [
Pin("1", "IN"),
Pin("3", "OUT"),
Pin("6", "EN"),
Pin("4", "NC1"),
Pin("5", "NC2"),
Pin("2", "GND"),
]
class UsbEsdDiode(Part):
REFDES_PREFIX = "D"
part_number = "TPD2E001DRLR"
package = "SOP50P170X60-5N"
PINS = [
Pin("1", "VCC", type=PinType.POWER_INPUT),
Pin("4", "GND", type=PinType.POWER_INPUT),
Pin("3", "P1"),
Pin("5", "P2"),
Pin("2", "NC"),
]
class DoubleDiode(Part):
REFDES_PREFIX = "D"
part_number = "240-800MV"
package = "SOT95P247X115-3L"
PINS = ["A1", "A2", "K"]
class STM32F072(Part):
REFDES_PREFIX = "U"
part_number = "STM32F072CBU6TR"
package = "QFN05P_7-1X7-1_0-6_49N"
PINS = [
Pin(("24", "48"), "VDD", type=PinType.POWER_INPUT),
Pin("1", "VBAT", type=PinType.POWER_INPUT),
Pin("9", "VDDA", type=PinType.POWER_INPUT),
Pin("36", "VDDIO2", type=PinType.POWER_INPUT),
Pin(("23", "35", "47"), "VSS"),
Pin("8", "VSSA"),
Pin("49", "PAD"),
Pin("44", "BOOT0"),
Pin("7", "NRST"),
]
for i in range(8):
PINS.append(Pin(i + 10, "PA%d" % i))
PINS += [
Pin("29", "PA8"),
Pin("30", "PA9"),
Pin("31", "PA10"),
Pin("32", "PA11"),
Pin("33", "PA12"),
Pin("34", "PA13"),
Pin("37", "PA14"),
Pin("38", "PA15"),
Pin("18", "PB0"),
Pin("19", "PB1"),
Pin("20", "PB2"),
Pin("39", "PB3"),
Pin("40", "PB4"),
Pin("41", "PB5"),
Pin("42", "PB6"),
Pin("43", "PB7"),
Pin("45", "PB8"),
Pin("46", "PB9"),
Pin("21", "PB10"),
Pin("22", "PB11"),
Pin("25", "PB12"),
Pin("26", "PB13"),
Pin("27", "PB14"),
Pin("28", "PB15"),
Pin("2", "PC13"),
Pin("3", ("PC14", "OSC32_IN")),
Pin("4", ("PC15", "OSC32_OUT")),
Pin("5", ("PF0", "OSC_IN")),
Pin("6", ("PF1", "OSC_OUT")),
]
for pin in PINS:
if pin.names[0].startswith("PA"):
pin.well_name = "VDD"
if pin.names[0].startswith("PB"):
pin.well_name = "VDDA"
if pin.names[0].startswith("PC"):
pin.well_name = "VDDA"
if pin.names[0].startswith("PF"):
pin.well_name = "VDDA"
class I2cIoExpander(Part):
REFDES_PREFIX = "U"
part_number = "TCA6416ARTWR"
package = "QFN50P400X400X080-25N"
PINS = [
Pin("23", "VCCI"),
Pin("21", "VCCP"),
Pin("9", "GND"),
Pin("25", "PAD"),
Pin("19", "SCL"),
Pin("20", "SDA"),
Pin("22", "INT_L"),
Pin("24", "RESET_L"),
Pin("18", "A0"),
]
for i in range(8):
PINS.append(Pin(i + 1, "P0%d" % i))
for i in range(8):
PINS.append(Pin(i + 10, "P1%d" % i))
class Mux(Part):
REFDES_PREFIX = "U"
part_number = "313-00929-00"
package = "SOT65P210X110-6L"
PINS = [
Pin("5", "VCC"),
Pin("2", "GND"),
Pin("3", ("0", "IN0")),
Pin("1", ("1", "IN1")),
Pin("6", ("S0", "SEL")),
Pin("4", ("Y", "OUT")),
]
class OutputBuffer(Part):
REFDES_PREFIX = "U"
part_number = "SN74LVC1G126YZPR"
package = "BGA5C50P3X2_141X91X50L"
PINS = [
Pin("A2", "VCC"),
Pin("C1", "GND"),
Pin("B1", ("A", "IN")),
Pin("A1", ("OE", "SEL")),
Pin("C2", ("Y", "OUT")),
]
class LevelShifter(Part):
"""
Bidirectional Level Shifter
DIR=0 : B->A
DIR=1 : A->B
"""
REFDES_PREFIX = "U"
PINS = [
"VCCA",
"VCCB",
"GND",
]
@property
def direction_AB(self):
return self.VCCA.net
@property
def direction_BA(self):
return self.GND.net
class LevelShifter1(LevelShifter):
__doc__ = LevelShifter.__doc__
part_number = "SN74AVC1T45DRLR"
package = "SOP50P170X60-6N"
PINS = [
Pin("1", "VCCA", type=PinType.POWER_INPUT),
Pin("6", "VCCB", type=PinType.POWER_INPUT),
Pin("3", "A"),
Pin("4", "B"),
Pin("5", "DIR"),
Pin("2", "GND", type=PinType.POWER_INPUT),
]
class LevelShifter2(LevelShifter):
__doc__ = LevelShifter.__doc__
part_number = "SN74AVC2T245RSWR"
package = "QFN40P145X185X55-10N"
PINS = [
Pin("7", "VCCA", type=PinType.POWER_INPUT),
Pin("6", "VCCB", type=PinType.POWER_INPUT),
Pin("2", "OE_L"),
Pin("8", "A1"),
Pin("9", "A2"),
Pin("5", "B1"),
Pin("4", "B2"),
Pin("10", "DIR1"),
Pin("1", "DIR2"),
Pin("3", "GND", type=PinType.POWER_INPUT),
]
class LevelShifter4(LevelShifter):
__doc__ = LevelShifter.__doc__
part_number = "SN74AVC4T774RSVR"
package = "QFN40P265X185X55-16N"
PINS = [
Pin("14", "VCCA", type=PinType.POWER_INPUT),
Pin("13", "VCCB", type=PinType.POWER_INPUT),
Pin("7", "OE_L"),
Pin("1", "A1"),
Pin("2", "A2"),
Pin("3", "A3"),
Pin("4", "A4"),
Pin("12", "B1"),
Pin("11", "B2"),
Pin("10", "B3"),
Pin("9", "B4"),
Pin("15", "DIR1"),
Pin("16", "DIR2"),
Pin("5", "DIR3"),
Pin("6", "DIR4"),
Pin("8", "GND", type=PinType.POWER_INPUT),
]
class AnalogSwitch(Part):
"""
Dual Analog Switch
IN DIRECTION
L NC -> COM
H NO -> COM
"""
REFDES_PREFIX = "U"
part_number = "TS3A24159"
package = "BGA10C50P4X3_186X136X50L"
PINS = [
Pin("D2", ("V+", "VCC")),
Pin("A2", "GND"),
Pin("B1", ("IN1", "SEL1")),
Pin("C1", "COM1"),
Pin("A1", "NC1"),
Pin("D1", "NO1"),
Pin("B3", ("IN2", "SEL2")),
Pin("C3", "COM2"),
Pin("A3", "NC2"),
Pin("D3", "NO2"),
]
class PowerSwitch(Part):
REFDES_PREFIX = "U"
part_number = "ADP194ACBZ-R7"
package = "BGA4C40P2X2_80X80X56"
PINS = [
Pin("A1", ("IN", "IN1")),
Pin("A2", ("OUT", "OUT1")),
Pin("B1", "EN"),
Pin("B2", "GND"),
]
# End of things that should be in a generic library
# Maybe this connector could be in a library too, since it's not too specific to this servo schematic
class ServoConnector(make_connector(pin_count=50)):
part_number = "AXK850145WG"
package = "AXK850145WG"
pin_names_match_nets = True
pin_names_match_nets_prefix = "DUT_"
PINS = [
("P1", "GND"),
("P2", "SPI2_CLK", "SPI2_SK"),
("P3", "SPI2_CS"),
("P4", "SPI2_MOSI", "SPI2_DI"),
("P5", "SPI2_MISO", "SPI2_DO"),
("P6", "SPI2_VREF"),
("P7", "SPI2_HOLD_L"),
("P8", "GND"),
("P9", "SPI1_CLK", "SPI1_SK"),
("P10", "SPI1_CS"),
("P11", "SPI1_MOSI", "SPI1_DI"),
("P12", "SPI1_MISO", "SPI1_DO"),
("P13", "SPI1_VREF"),
("P14", "EC_RESET_L", "COLD_RESET_L"),
("P15", "GND"),
("P16", "UART2_SERVO_DUT_TX", "UART2_RXD"),
("P17", "UART2_DUT_SERVO_TX", "UART2_TXD"),
("P18", "UART2_VREF"),
("P19", "SD_DETECT_L"),
("P20", "GND"),
("P21", "JTAG_TCK"),
("P22", "PWR_BUTTON"),
("P23", "JTAG_TMS"),
("P24", "JTAG_TDI"),
("P25", "JTAG_TDO"),
("P26", "JTAG_RTCK"),
("P27", "JTAG_TRST_L"),
("P28", "JTAG_SRST_L", "WARM_RESET_L"),
("P29", "JTAG_VREF"),
("P30", "REC_MODE_L", "GOOG_REC_MODE_L"),
("P31", "GND"),
("P32", "UART1_SERVO_DUT_TX", "UART1_RXD"),
("P33", "UART1_DUT_SERVO_TX", "UART1_TXD"),
("P34", "UART1_VREF"),
("P35", "I2C_3.3V"),
("P36", "GND"),
("P37", "I2C_SDA"),
("P38", "I2C_SCL"),
("P39", "HPD"),
("P40", "FW_WP", "MFG_MODE"),
("P41", "PROC_HOT_L", "FW_UPDATE_L", "FW_UP_L"),
("P42", "GND"),
("P43", "DEV_MODE"),
("P44", "LID_OPEN"),
("P45", "PCH_DISABLE_L", "CPU_NMI"),
("P46", "KBD_COL1"),
("P47", "KBD_COL2"),
("P48", "KBD_ROW1"),
("P49", "KBD_ROW2"),
("P50", "KBD_ROW3"),
]
_postprocess_pin = Pin.second_name_important
# The following part definitions are only related to this circuit
class ProgrammingConnector(make_connector(8)):
part_number = "FH34SRJ-8S-0.5SH(50)"
package = "HRS_FH34SRJ-8S-0-5SH"
PINS = [
("P1", "GND"),
("P2", "UART_TX"),
("P3", "UART_RX"),
("P6", "NRST"),
("P8", "BOOT0"),
Pin("G", numbers=("G1", "G2")),
]
_postprocess_pin = Pin.second_name_important
class JtagConnector(make_connector(10)):
part_number = "HDR_2X5_50MIL-210-00939-00-SAMTEC_FTSH-105-01"
package = "SAMTEC_FTSH-105-01-L-DV-K"
pin_names_match_nets = True
pin_names_match_nets_prefix = "DUT_JTAG_"
PINS = [
("P1", "VCC"),
("P2", "TMS", "SWDIO"),
("P3", "GND"),
("P4", "TCK", "SWDCLK"),
("P5", "GND"),
("P6", "TDO", "SWO"),
("P7", "KEY"),
("P8", "TDI"),
("P9", "GNDDetect"),
("P10", "RESET"),
]
_postprocess_pin = Pin.second_name_important
class ServoEC(STM32F072):
pin_names_match_nets = True
PINS = [
Pin(("PA0", "UART3_TX")),
Pin(("PA1", "UART3_RX")),
Pin(("PA2", "UART1_TX")),
Pin(("PA3", "UART1_RX")),
Pin(("PA4", "SERVO_JTAG_TMS")),
Pin(("PA5", "SPI1_MUX_SEL")),
Pin(("PA6", "SERVO_JTAG_TDO_BUFFER_EN")),
Pin(("PA7", "SERVO_JTAG_TDI")),
Pin(("PA8", "UART1_EN_L")),
Pin(("PA9", "EC_UART_TX")),
Pin(("PA10", "EC_UART_RX")),
Pin(("PA11", "USB_DM")),
Pin(("PA12", "USB_DP")),
Pin(("PA13", "SERVO_JTAG_TRST_L")),
Pin(("PA14", "SPI1_BUF_EN_L")),
Pin(("PA15", "SPI2_BUF_EN_L")),
Pin(("PB0", "UART2_EN_L")),
Pin(("PB1", "SERVO_JTAG_RTCK")),
Pin(("PB2", "SPI1_VREF_33")),
Pin(("PB3", "SPI1_VREF_18")),
Pin(("PB4", "SPI2_VREF_33")),
Pin(("PB5", "SPI2_VREF_18")),
Pin(("PB6", "SERVO_JTAG_TRST_DIR")),
Pin(("PB7", "SERVO_JTAG_TDI_DIR")),
Pin(("PB8", "MASTER_I2C_SCL")),
Pin(("PB9", "MASTER_I2C_SDA")),
Pin(("PB10", "UART2_TX")),
Pin(("PB11", "UART2_RX")),
Pin(("PB12", "SERVO_SPI_CS")),
Pin(("PB13", "SERVO_TO_SPI1_MUX_CLK")),
Pin(("PB14", "SERVO_TO_SPI1_MUX_MISO")),
Pin(("PB15", "SERVO_SPI_MOSI")),
Pin(("PC13", "RESET_L")),
Pin(("PC14", "SERVO_JTAG_TMS_DIR")),
Pin(("PC15", "SERVO_JTAG_TDO_SEL")),
Pin(("PF0", "JTAG_BUFOUT_EN_L")),
Pin(("PF1", "JTAG_BUFIN_EN_L")),
]
_postprocess_pin = Pin.second_name_important
# Start of actual schematic
vbus_in = Net("VBUS_IN")
gnd = Net("GND")
def decoupling(value="100n", package=None):
if package is None:
package = "CAPC0603X33L"
if "u" in value:
package = "CAPC1005X71L"
if "0u" in value:
package = "CAPC1608X80L"
return C(value, to=gnd, package=package, part_number="CY" + value) #defined_at: not here
old_R = R
def R(value, to):
return old_R(value, package="RESC0603X23L", part_number="R" + value, to=to) #defined_at: not here
# usb stuff
usb = UsbConnector()
usb_esd = UsbEsdDiode()
Net("USB_DP") << usb.DP << usb_esd.P1
Net("USB_DM") << usb.DM << usb_esd.P2 >> usb_esd.NC
vbus_in << usb.VBUS << usb_esd.VCC
gnd << usb.GND << usb.G << usb_esd.GND
# We could make this type-c instead!
# 3300 regulator
pp3300 = Net("PP3300")
reg3300 = MIC5504()
vbus_in << (
reg3300.IN, decoupling("2.2u"),
reg3300.EN,
)
gnd << reg3300.GND
pp3300 << (
reg3300.OUT,
decoupling("10u"),
decoupling(),
decoupling("1000p"),
)
# 1800 regulator
pp1800 = Net("PP1800")
reg1800 = TLV70018DSER()
drop_diode = DoubleDiode()
pp3300 << drop_diode.A1 << drop_diode.A2
Net("PP1800_VIN") << (
drop_diode.K,
reg1800.IN, decoupling(),
reg1800.EN
)
gnd << reg1800.GND
pp1800 << reg1800.OUT << decoupling("1u", "CAPC0603X33L")
ec = ServoEC()
usb.DP << ec
usb.DM << ec
# ec power
pp3300 << (
ec.VBAT, decoupling(),
ec.VDD, decoupling(),
decoupling("4.7u"),
)
Net("PP3300_PD_VDDA") << (
ec.VDDA,
L("600@100MHz", to=pp3300, package="INDC1005L", part_number="FERRITE_BEAD-185-00019-00"),
decoupling("1u"),
decoupling("100p"),
)
pp3300 << (
ec.VDDIO2, decoupling(),
decoupling("4.7u"),
)
gnd << ec.VSS << ec.VSSA << ec.PAD
# ec programming/debug
prog = ProgrammingConnector()
gnd << prog.GND << prog.G
Net("PD_NRST_L") << (
ec.NRST,
prog.NRST,
decoupling(),
)
boot0 = Net("PD_BOOT0")
boot0_q = FET("CSD13381F4", package="DFN100X60X35-3L")
# Use OTG + A-TO-A cable to go to bootloader mode
Net("USB_ID") >> boot0_q.G >> R("51.1k", to=vbus_in) << usb.ID
boot0 << boot0_q.D << R("51.1k", to=vbus_in) << ec.BOOT0 << prog.BOOT0
gnd << boot0_q.S
Net("EC_UART_TX") << ec << prog.UART_TX
Net("EC_UART_RX") << ec << prog.UART_RX
ppdut_spi_vrefs = {
1: Net("PPDUT_SPI1_VREF"),
2: Net("PPDUT_SPI2_VREF"),
}
jtag_buffer_to_servo_tdo = Net("JTAG_BUFFER_TO_SERVO_TDO") >> ec.UART3_RX # also Net("UART3_TX")
servo_jtag_tck = Net("SERVO_JTAG_TCK") << ec.UART3_TX # also Net("UART3_TX")
dut = ServoConnector()
gnd << dut.GND
pp3300 >> dut.pins["I2C_3.3V"]
io = I2cIoExpander()
pp3300 << io.VCCI << decoupling()
gnd << io.GND << io.PAD
gnd << io.A0 # i2c addr 7'H=0x20
Net("I2C_REMOTE_ADC_SDA") << R("4.7k", to=pp3300) << ec.MASTER_I2C_SDA << io.SDA << dut.I2C_SDA
Net("I2C_REMOTE_ADC_SCL") << R("4.7k", to=pp3300) << ec.MASTER_I2C_SCL << io.SCL << dut.I2C_SCL
Net("RESET_L") << io.RESET_L << ec
pp1800 << io.VCCP << decoupling()
dut_mfg_mode = Net("DUT_MFG_MODE") << dut
mfg_mode_shifter = LevelShifter1()
gnd << mfg_mode_shifter.GND
Net("FW_WP_EN") << mfg_mode_shifter.VCCA << io.P00 << decoupling() << R("4.7k", to=gnd)
Net("FTDI_MFG_MODE") << io.P01 << mfg_mode_shifter.A
dut_mfg_mode << io.P02
io.P03 << TP(package="TP075") # spare
Net("SPI_HOLD_L") << io.P04 >> dut.SPI2_HOLD_L
Net("DUT_COLD_RESET_L") << io.P05 >> dut
Net("DUT_PWR_BUTTON") << io.P06 >> dut
Net("DUT_GOOG_REC_MODE_L") << io.P10 >> dut
dut_mfg_mode << io.P11
Net("HPD") << io.P12 >> dut
Net("FW_UP_L") << io.P13 >> dut
Net("DUT_LID_OPEN") << io.P14 >> dut
Net("DUT_DEV_MODE") << io.P15 >> dut
Net("PCH_DISABLE_L") << io.P16 >> dut
io.P17 << TP(package="TP075") # spare
mfg_mode_shifter.direction_AB << mfg_mode_shifter.DIR
ppdut_spi_vrefs[2] >> mfg_mode_shifter.VCCB << decoupling()
Net("DUT_MFG_MODE_BUF") << R("0", to=dut_mfg_mode) >> mfg_mode_shifter.B
# JTAG
jtag_connector = JtagConnector()
gnd >> jtag_connector.GND
Net("DUT_WARM_RESET_L") << io.P07 >> dut << jtag_connector.RESET
jtag_vref = Net("PPDUT_JTAG_VREF")
jtag_vref << dut.JTAG_VREF >> jtag_connector.VCC
shifter1 = LevelShifter4()
pp3300 >> shifter1.VCCA << decoupling()
jtag_vref >> shifter1.VCCB << decoupling()
gnd >> shifter1.GND
shifter2 = LevelShifter4()
pp3300 >> shifter2.VCCA << decoupling()
jtag_vref >> shifter2.VCCB << decoupling()
gnd >> shifter2.GND
jtag_mux = Mux()
pp3300 >> jtag_mux.VCC << decoupling()
gnd >> jtag_mux.GND
Net("SERVO_JTAG_TDO_SEL") << ec >> jtag_mux.SEL
jtag_output_buffer = OutputBuffer()
pp3300 >> jtag_output_buffer.VCC << decoupling()
gnd >> jtag_output_buffer.GND
Net("SERVO_JTAG_TDO_BUFFER_EN") << ec >> jtag_output_buffer.OE
Net("SERVO_JTAG_MUX_TDO") << jtag_mux.OUT >> jtag_output_buffer.IN
jtag_buffer_to_servo_tdo << jtag_output_buffer.OUT
Net("JTAG_BUFOUT_EN_L") << ec >> shifter1.OE_L
Net("JTAG_BUFIN_EN_L") << ec >> shifter2.OE_L
pp3300 >> shifter1.A1 # spare
Net("SERVO_JTAG_TRST_L") << ec << shifter1.A2
Net("SERVO_JTAG_TMS") << ec << shifter1.A3
Net("SERVO_JTAG_TDI") << ec << shifter1.A4
shifter1.direction_AB >> shifter1.DIR1 # spare
Net("SERVO_JTAG_TRST_DIR") << ec >> shifter1.DIR2
Net("SERVO_JTAG_TMS_DIR") << ec >> shifter1.DIR3
Net("SERVO_JTAG_TDI_DIR") << ec >> shifter1.DIR4
shifter1.B1 # spare
Net("DUT_JTAG_TRST_L") << dut << shifter1.B2
Net("DUT_JTAG_TMS") >> dut << shifter1.B3 << jtag_connector
Net("DUT_JTAG_TDI") << dut << shifter1.B4 >> shifter2.B3 >> jtag_connector
Net("DUT_JTAG_TDO") << dut >> shifter2.B1 >> jtag_connector
Net("DUT_JTAG_RTCK") << dut >> shifter2.B2
Net("DUT_JTAG_TCK") << dut >> shifter2.B4 >> jtag_connector
shifter2.direction_BA >> shifter2.DIR1
shifter2.direction_BA >> shifter2.DIR2
shifter2.direction_BA >> shifter2.DIR3
shifter2.direction_AB >> shifter2.DIR4
Net("SERVO_JTAG_TDO") << shifter2.A1 >> jtag_mux.IN0
Net("SERVO_JTAG_RTCK") >> ec << shifter2.A2
Net("SERVO_JTAG_SWDIO") << shifter2.A3 >> jtag_mux.IN1
servo_jtag_tck << shifter2.A4
# SPI1 & 2
# TODO SERVO_TO_SPI1_MUX_CLK
servo_spi_mosi = Net("SERVO_SPI_MOSI") << ec
servo_spi_cs = Net("SERVO_SPI_CS") << ec
# Since the circuits look so similar, we'll just have a loop
spi_shifters = {
1: LevelShifter4(),
2: LevelShifter4(),
}
for i, s in spi_shifters.items():
# Power supply
vref = ppdut_spi_vrefs[i]
vref << dut.pins["SPI%d_VREF" % i]
power_switches = [
("18", pp1800, PowerSwitch()),
("33", pp3300, PowerSwitch()),
]
for voltage, input_rail, power_switch in power_switches:
gnd << power_switch.GND
Net("SPI%d_VREF_%s" % (i, voltage)) << ec >> power_switch.EN << R("4.7k", to=gnd)
input_rail << power_switch.IN
vref << power_switch.OUT
# Level shifter setup
pp3300 >> s.VCCA << decoupling()
vref >> s.VCCB << decoupling()
gnd >> s.GND
Net("SPI%d_BUF_EN_L" % i) << ec >> s.OE_L
# MISO
Net("DUT_SPI%d_MISO" % i) << dut >> s.B1
s.direction_BA >> s.DIR1
# A side connected after this loop
# MOSI
servo_spi_mosi >> s.A2
s.direction_AB >> s.DIR2
Net("DUT_SPI%d_MOSI" % i) << dut >> s.B2
# CS
servo_spi_cs >> s.A3
s.direction_AB >> s.DIR3
Net("DUT_SPI%d_CS" % i) << dut >> s.B3
# CLK
# A side connected after this loop
s.direction_AB >> s.DIR4
Net("DUT_SPI%d_CLK" % i) << dut >> s.B4
spi1_mux = AnalogSwitch()
pp3300 >> spi1_mux.VCC >> decoupling()
gnd >> spi1_mux.GND
Net("SPI1_MUX_SEL") << ec >> spi1_mux.SEL1 >> spi1_mux.SEL2
Net("SPI_MUX_TODUT_SPI1_MISO") >> spi1_mux.COM1 << spi_shifters[1].A1
Net("SPI_MUX_TO_DUT_SPI1_CLK") << spi1_mux.COM2 >> spi_shifters[1].A4
Net("SERVO_TO_SPI1_MUX_MISO") << spi1_mux.NO1 << spi_shifters[2].A1 >> ec
Net("SERVO_TO_SPI1_MUX_CLK") >> spi1_mux.NO2 >> spi_shifters[2].A4 << ec
jtag_buffer_to_servo_tdo << spi1_mux.NC1
servo_jtag_tck >> spi1_mux.NC2
# UART 1 & 2
uart_shifters = {
1: LevelShifter2(),
2: LevelShifter2(),
}
for i, s in uart_shifters.items():
vref = Net("PPDUT_UART%d_VREF" % i)
vref << dut.pins["UART%d_VREF" % i]
# Power off to VCCA or VCCB provides isolation
pp3300 >> s.VCCA << decoupling()
vref >> s.VCCB << decoupling()
gnd >> s.GND
Net("UART%d_EN_L" % i) << ec >> s.OE_L
Net("UART%d_TX" % i) << ec >> s.A1
s.direction_AB >> s.DIR1
Net("UART%d_SERVO_DUT_TX" % i) >> dut << s.B1
Net("UART%d_DUT_SERVO_TX" % i) << dut >> s.B2
s.direction_BA >> s.DIR2
Net("UART%d_RX" % i) >> ec << s.A2
global_context.autoname("servo_micro.refdes_mapping")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.