hexsha stringlengths 40 40 | size int64 3 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 972 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 972 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 972 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 3 1.03M | avg_line_length float64 1.13 941k | max_line_length int64 2 941k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c13c401217c12d0e3291f65386d9252a05952acb | 2,861 | py | Python | common/xrd-ui-tests-python/tests/xroad_trust_view_details_cs_settings/view_management.py | ria-ee/XTM | 6103f3f5bbba387b8b59b050c0c4f1fb2180fc37 | [
"MIT"
] | 3 | 2018-03-15T14:22:50.000Z | 2021-11-08T10:30:35.000Z | common/xrd-ui-tests-python/tests/xroad_trust_view_details_cs_settings/view_management.py | ria-ee/XTM | 6103f3f5bbba387b8b59b050c0c4f1fb2180fc37 | [
"MIT"
] | 11 | 2017-04-06T09:25:41.000Z | 2018-06-04T09:08:48.000Z | common/xrd-ui-tests-python/tests/xroad_trust_view_details_cs_settings/view_management.py | ria-ee/XTM | 6103f3f5bbba387b8b59b050c0c4f1fb2180fc37 | [
"MIT"
] | 20 | 2017-03-14T07:21:58.000Z | 2019-05-21T09:26:30.000Z | # coding=utf-8
from selenium.webdriver.common.by import By
from view_models import certification_services, sidebar, ss_system_parameters
import re
import time
def test_ca_cs_details_view_cert(case, profile_class=None):
'''
:param case: MainController object
:param profile_class: string The fully qualified name of the Java class
:return:
'''
self = case
def view_cert():
'''Open "Certification services"'''
self.wait_until_visible(self.by_css(sidebar.CERTIFICATION_SERVICES_CSS)).click()
self.wait_jquery()
view_cert_data(self, profile_class=profile_class)
return view_cert
def view_cert_data(self, profile_class=None):
'''Get approved CA row'''
service_row = self.wait_until_visible(type=By.XPATH, element=certification_services.LAST_ADDED_CERT_XPATH)
'''Double click on approved CA row'''
self.double_click(service_row)
'''Click on "Edit button"'''
self.by_id(certification_services.DETAILS_BTN_ID).click()
self.log('UC TRUST_04 1.CS administrator selects to view the settings of a certification service.')
self.wait_until_visible(type=By.XPATH, element=certification_services.CA_SETTINGS_TAB_XPATH).click()
self.wait_jquery()
self.log(
'UC TRUST_04: 2.System displays the following settings. Usage restrictions for the certificates issued by the certification service.')
auth_checkbox = self.wait_until_visible(certification_services.EDIT_CA_AUTH_ONLY_CHECKBOX_XPATH,
By.XPATH).is_enabled()
self.is_true(auth_checkbox, msg='Authentication chechkbox not found')
'''Click on authentication checkbox'''
self.wait_until_visible(certification_services.EDIT_CA_AUTH_ONLY_CHECKBOX_XPATH, By.XPATH).click()
self.log(
'UC TRUST_04: 2.System displays the following settings. The fully qualified name of the Java class that describes the certificate profile for certificates issued by the certification service.')
'''Get profile info'''
profile_info_area = self.wait_until_visible(type=By.XPATH,
element=certification_services.EDIT_CERTIFICATE_PROFILE_INFO_AREA_XPATH)
profile_info = profile_info_area.get_attribute("value")
'''Verify profile info'''
self.is_equal(profile_info, profile_class,
msg='The name of the Java class that describes the certificate profile is wrong')
self.log(
'UC TRUST_04: 2. The following user action options are displayed:edit the settings of the certification service')
'''Verify "Save" button'''
save_button_id = self.wait_until_visible(type=By.ID,
element=certification_services.SAVE_CA_SETTINGS_BTN_ID).is_enabled()
self.is_true(save_button_id, msg='"Save" button not found')
| 40.295775 | 201 | 0.717581 |
b294f8f6c9cd7847a4cd0ffd68933db492719f39 | 2,456 | py | Python | napari_manual_split_and_merge_labels/_function.py | haesleinhuepf/napari-manual-split-and-merge-labels | b43e3d6bed10b66eb443fd4b0aa686ab2cb66f3c | [
"BSD-3-Clause"
] | 4 | 2021-08-02T13:03:33.000Z | 2021-12-17T19:14:29.000Z | napari_manual_split_and_merge_labels/_function.py | haesleinhuepf/napari-manual-split-and-merge-labels | b43e3d6bed10b66eb443fd4b0aa686ab2cb66f3c | [
"BSD-3-Clause"
] | 1 | 2022-01-18T13:32:07.000Z | 2022-01-18T14:15:19.000Z | napari_manual_split_and_merge_labels/_function.py | haesleinhuepf/napari-manual-split-and-merge-labels | b43e3d6bed10b66eb443fd4b0aa686ab2cb66f3c | [
"BSD-3-Clause"
] | 2 | 2021-07-29T05:22:06.000Z | 2022-01-18T10:43:21.000Z | from typing import TYPE_CHECKING
import numpy as np
from napari_plugin_engine import napari_hook_implementation
from napari_tools_menu import register_function
import napari
# This is the actual plugin function, where we export our function
# (The functions themselves are defined below)
@napari_hook_implementation
def napari_experimental_provide_function():
return [Manually_merge_labels, Manually_split_labels]
@register_function(menu="Utilities > Manually merge labels")
def Manually_merge_labels(labels_layer: napari.layers.Labels, points_layer: napari.layers.Points, viewer : napari.Viewer):
if points_layer is None:
points_layer = viewer.add_points([])
points_layer.mode = 'ADD'
return
labels = labels_layer.data
points = points_layer.data
label_ids = [labels.item(tuple([int(j) for j in i])) for i in points]
# replace labels with minimum of the selected labels
new_label_id = min(label_ids)
for l in label_ids:
if l != new_label_id:
labels[labels == l] = new_label_id
labels_layer.data = labels
points_layer.data = []
@register_function(menu="Utilities > Manually split labels")
def Manually_split_labels(labels_layer: napari.layers.Labels, points_layer: napari.layers.Points, viewer: napari.Viewer):
if points_layer is None:
points_layer = viewer.add_points([])
points_layer.mode = 'ADD'
return
labels = labels_layer.data
points = points_layer.data
label_ids = [labels.item(tuple([int(j) for j in i])) for i in points]
# make a binary image first
binary = np.zeros(labels.shape, dtype=bool)
new_label_id = min(label_ids)
for l in label_ids:
binary[labels == l] = True
# origin: https://scikit-image.org/docs/dev/auto_examples/segmentation/plot_watershed.html
from scipy import ndimage as ndi
from skimage.segmentation import watershed
#from skimage.feature import peak_local_max
#distance = ndi.distance_transform_edt(binary)
#coords = peak_local_max(distance, footprint=np.ones((3, 3)), labels=binary)
mask = np.zeros(labels.shape, dtype=bool)
for i in points:
#mask[tuple(points)] = True
mask[tuple([int(j) for j in i])] = True
markers, _ = ndi.label(mask)
new_labels = watershed(binary, markers, mask=binary)
labels[binary] = new_labels[binary] + labels.max()
labels_layer.data = labels
points_layer.data = []
| 33.643836 | 122 | 0.714984 |
3ffc62aadcfe403e110cc37f069286f306598e52 | 6,089 | py | Python | designate/api/v2/controllers/rest.py | Woody89/designate-private | 0a6ed5a1d7cdac5cb1e9dec8fd3ddfb9a77c58f5 | [
"Apache-2.0"
] | null | null | null | designate/api/v2/controllers/rest.py | Woody89/designate-private | 0a6ed5a1d7cdac5cb1e9dec8fd3ddfb9a77c58f5 | [
"Apache-2.0"
] | null | null | null | designate/api/v2/controllers/rest.py | Woody89/designate-private | 0a6ed5a1d7cdac5cb1e9dec8fd3ddfb9a77c58f5 | [
"Apache-2.0"
] | 1 | 2019-11-16T10:55:49.000Z | 2019-11-16T10:55:49.000Z | # flake8: noqa
# Copyright (c) <2011>, Jonathan LaCour
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import inspect
import pecan
import pecan.rest
import pecan.routing
from designate import exceptions
from designate.central import rpcapi as central_rpcapi
from designate.pool_manager import rpcapi as pool_mgr_rpcapi
from designate.i18n import _
class RestController(pecan.rest.RestController):
"""
Extension for Pecan's RestController to better handle POST/PUT/PATCH
requests.
Ideally, we get these additions merged upstream.
"""
# default sort_keys. The Controllers can override this.
SORT_KEYS = ['created_at', 'id']
@property
def central_api(self):
return central_rpcapi.CentralAPI.get_instance()
@property
def pool_mgr_api(self):
return pool_mgr_rpcapi.PoolManagerAPI.get_instance()
def _apply_filter_params(self, params, accepted_filters, criterion):
invalid = []
for k in params:
if k in accepted_filters:
criterion[k] = params[k].replace("*", "%")
else:
invalid.append(k)
if invalid:
raise exceptions.BadRequest(
'Invalid filters %s' % ', '.join(invalid))
else:
return criterion
def _handle_post(self, method, remainder):
'''
Routes ``POST`` actions to the appropriate controller.
'''
# route to a post_all or get if no additional parts are available
if not remainder or remainder == ['']:
controller = self._find_controller('post_all', 'post')
if controller:
return controller, []
pecan.abort(405)
controller = getattr(self, remainder[0], None)
if controller and not inspect.ismethod(controller):
return pecan.routing.lookup_controller(controller, remainder[1:])
# finally, check for the regular post_one/post requests
controller = self._find_controller('post_one', 'post')
if controller:
return controller, remainder
pecan.abort(405)
def _handle_patch(self, method, remainder):
'''
Routes ``PATCH`` actions to the appropriate controller.
'''
# route to a patch_all or get if no additional parts are available
if not remainder or remainder == ['']:
controller = self._find_controller('patch_all', 'patch')
if controller:
return controller, []
pecan.abort(405)
controller = getattr(self, remainder[0], None)
if controller and not inspect.ismethod(controller):
return pecan.routing.lookup_controller(controller, remainder[1:])
# finally, check for the regular patch_one/patch requests
controller = self._find_controller('patch_one', 'patch')
if controller:
return controller, remainder
pecan.abort(405)
def _handle_put(self, method, remainder):
'''
Routes ``PUT`` actions to the appropriate controller.
'''
# route to a put_all or get if no additional parts are available
if not remainder or remainder == ['']:
controller = self._find_controller('put_all', 'put')
if controller:
return controller, []
pecan.abort(405)
controller = getattr(self, remainder[0], None)
if controller and not inspect.ismethod(controller):
return pecan.routing.lookup_controller(controller, remainder[1:])
# finally, check for the regular put_one/put requests
controller = self._find_controller('put_one', 'put')
if controller:
return controller, remainder
pecan.abort(405)
def _handle_delete(self, method, remainder):
'''
Routes ``DELETE`` actions to the appropriate controller.
'''
# route to a delete_all or get if no additional parts are available
if not remainder or remainder == ['']:
controller = self._find_controller('delete_all', 'delete')
if controller:
return controller, []
pecan.abort(405)
controller = getattr(self, remainder[0], None)
if controller and not inspect.ismethod(controller):
return pecan.routing.lookup_controller(controller, remainder[1:])
# finally, check for the regular delete_one/delete requests
controller = self._find_controller('delete_one', 'delete')
if controller:
return controller, remainder
pecan.abort(405)
| 38.537975 | 79 | 0.66267 |
46772ad7548bd1fffd521432640c8c7904415b2d | 2,275 | py | Python | aloe/aloe/common/plot_2d.py | muell-monster/google-research | 04d2024f4723bc4be3d639a668c19fb1f6a31478 | [
"Apache-2.0"
] | 3 | 2021-01-18T04:46:49.000Z | 2021-03-05T09:21:40.000Z | aloe/aloe/common/plot_2d.py | Alfaxad/google-research | 2c0043ecd507e75e2df9973a3015daf9253e1467 | [
"Apache-2.0"
] | 7 | 2021-11-10T19:44:38.000Z | 2022-02-10T06:48:39.000Z | aloe/aloe/common/plot_2d.py | Alfaxad/google-research | 2c0043ecd507e75e2df9973a3015daf9253e1467 | [
"Apache-2.0"
] | 4 | 2021-02-08T10:25:45.000Z | 2021-04-17T14:46:26.000Z | # coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: skip-file
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
def plot_heatmap(pdf_func, out_name, size=3):
w = 100
x = np.linspace(-size, size, w)
y = np.linspace(-size, size, w)
xx, yy = np.meshgrid(x, y)
coords = np.stack([xx.flatten(), yy.flatten()]).transpose()
scores = pdf_func(coords)
a = scores.reshape((w, w))
plt.imshow(a)
plt.axis('equal')
plt.axis('off')
plt.savefig(out_name, bbox_inches='tight')
plt.close()
def plot_samples(samples, out_name, lim=None, axis=True):
plt.scatter(samples[:, 0], samples[:, 1], marker='.')
plt.axis('equal')
if lim is not None:
plt.xlim(-lim, lim)
plt.ylim(-lim, lim)
if not axis:
plt.axis('off')
plt.savefig(out_name, bbox_inches='tight')
plt.close()
def plot_joint(dataset, samples, out_name):
x = np.max(dataset)
y = np.max(-dataset)
z = np.ceil(max((x, y)))
plt.scatter(dataset[:, 0], dataset[:, 1], c='r', marker='x')
plt.scatter(samples[:, 0], samples[:, 1], c='b', marker='.')
plt.legend(['training data', 'ADE sampled'])
plt.axis('equal')
plt.xlim(-z, z)
plt.ylim(-z, z)
plt.savefig(out_name, bbox_inches='tight')
plt.close()
fname = out_name.split('/')[-1]
out_name = '/'.join(out_name.split('/')[:-1]) + '/none-' + fname
plt.figure(figsize=(8, 8))
plt.scatter(dataset[:, 0], dataset[:, 1], c='r', marker='x')
plt.scatter(samples[:, 0], samples[:, 1], c='b', marker='.')
plt.axis('equal')
plt.xlim(-z, z)
plt.ylim(-z, z)
plt.savefig(out_name, bbox_inches='tight')
plt.close()
| 30.743243 | 74 | 0.635165 |
597e717a8bd908f602603a195ec1538253c85daa | 257 | py | Python | example.py | TechieDheeraj/clrprint | 7214524fef2441cbd4e4ec37326e2e341e3077e7 | [
"MIT"
] | 32 | 2020-07-15T16:05:33.000Z | 2022-03-16T13:47:37.000Z | example.py | TechieDheeraj/clrprint | 7214524fef2441cbd4e4ec37326e2e341e3077e7 | [
"MIT"
] | 2 | 2021-09-20T14:27:01.000Z | 2022-01-21T16:00:22.000Z | example.py | TechieDheeraj/clrprint | 7214524fef2441cbd4e4ec37326e2e341e3077e7 | [
"MIT"
] | 3 | 2021-07-04T09:54:29.000Z | 2021-09-28T15:52:48.000Z | from clrprint import *
clrhelp()
clr = clrinput("Choose any color: ").strip()
clrprint("This is the color choosen by you",clr=clr)
clr = clrinput("Choose 3 colors separated with <space> :").strip().split(' ')
clrprint("color1","color2","color3",clr=clr)
| 25.7 | 77 | 0.696498 |
773bcd77b68a0daa87ba795796fb741805cae17f | 1,567 | py | Python | tests/test_700_jscall.py | icarito/guy | 9477b548b91ae81bfc327dac7ba1ec80804f4f8d | [
"Apache-2.0"
] | null | null | null | tests/test_700_jscall.py | icarito/guy | 9477b548b91ae81bfc327dac7ba1ec80804f4f8d | [
"Apache-2.0"
] | null | null | null | tests/test_700_jscall.py | icarito/guy | 9477b548b91ae81bfc327dac7ba1ec80804f4f8d | [
"Apache-2.0"
] | null | null | null |
from guy import Guy,JSException
def test_jscall(runner):
class W1(Guy):
__doc__="""
<script>
var ll=[];
function adds(a,b) {
ll.push(a)
ll.push(b)
return a+b
}
function makeAnError() {
callInError(); // raise an exception on js side
}
async function ASyncAdds(a,b) {
ll.push(a)
ll.push(b)
return a+b
}
guy.init( async function() {
await new Promise(r => setTimeout(r, 100)); // wait, to be sure that init() is called before step1()
await self.step1()
await self.step2()
self.stop( ll )
})
</script>
"""
async def init(self):
self.ll=[]
self.ll.append( await self.js.adds("A","B") )
async def step1(self):
self.ll.append( await self.js.adds("C","D") )
self.ll.append( await self.js.ASyncAdds("E","F") )
async def step2(self):
try:
await self.js.UNKNOWNMETHOD("C","D")
except JSException:
self.ll.append("Unknown")
try:
await self.js.makeAnError()
except JSException:
self.ll.append("Error")
def stop(self,jll):
assert jll==['A', 'B', 'C', 'D', 'E', 'F']
assert self.ll==['AB', 'CD', 'EF', 'Unknown', 'Error']
self.ok=True
self.exit()
t=W1()
r=runner(t)
assert r.ok
| 25.274194 | 112 | 0.45501 |
2affe96a177722fac83160d8521934a2141cb29a | 3,856 | py | Python | tools/mo/openvino/tools/mo/utils/runtime_info.py | pazamelin/openvino | b7e8ef910d7ed8e52326d14dc6fd53b71d16ed48 | [
"Apache-2.0"
] | 1 | 2021-02-01T06:35:55.000Z | 2021-02-01T06:35:55.000Z | tools/mo/openvino/tools/mo/utils/runtime_info.py | pazamelin/openvino | b7e8ef910d7ed8e52326d14dc6fd53b71d16ed48 | [
"Apache-2.0"
] | 58 | 2020-11-06T12:13:45.000Z | 2022-03-28T13:20:11.000Z | tools/mo/openvino/tools/mo/utils/runtime_info.py | pazamelin/openvino | b7e8ef910d7ed8e52326d14dc6fd53b71d16ed48 | [
"Apache-2.0"
] | 2 | 2021-07-14T07:40:50.000Z | 2021-07-27T01:40:03.000Z | # Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import abc
from collections import defaultdict
from typing import Dict
import numpy as np
from openvino.tools.mo.front.common.partial_infer.utils import int64_array
from openvino.tools.mo.middle.passes.convert_data_type import np_data_type_to_destination_type
class RTInfo:
"""
Class that stores runtime information.
"""
def __init__(self):
"""
Dictionary with runtime information.
Key is a tuple that contains name of runtime info attribute and version of the attribute.
Value is an instance of a class derived from RTInfoElement that represents a particular runtime info attribute.
Example of usage:
rt_info = RTInfo()
rt_info.info[('old_api_map_order', 0)] = OldAPIMapOrder()
"""
self.info = defaultdict(dict)
def contains(self, attribute_name: str):
attr_count = [key[0] for key in list(self.info.keys())].count(attribute_name)
assert attr_count <= 1, 'Incorrect rt_info attribute, got more than one {}.'.format(attribute_name)
return attr_count > 0
def get_attribute_version(self, attribute_name: str):
for name, version in list(self.info.keys()):
if name == attribute_name:
return version
raise Exception("rt_info does not contain attribute with name {}".format(attribute_name))
class RTInfoElement:
"""
Class that stores element of runtime information.
"""
@abc.abstractmethod
def serialize(self, node) -> Dict:
"""
Serialize method for RTInfoElement.
"""
@abc.abstractmethod
def get_version(self):
"""
Get version of RTInfoElement.
"""
@abc.abstractmethod
def get_name(self):
"""
Get name of RTInfoElement.
"""
class OldAPIMapOrder(RTInfoElement):
"""
Class that stores transpose order required for obtaining IR in old API.
"""
def __init__(self, version=0):
self.info = defaultdict(dict)
self.version = version
self.name = "old_api_map_order"
def old_api_transpose_parameter(self, inv: int64_array):
self.info['inverse_order'] = inv
def old_api_transpose_result(self, order: int64_array):
self.info['order'] = order
def serialize_old_api_map_for_parameter(self, node) -> Dict:
if 'inverse_order' not in self.info:
return {}
return {'value': ','.join(map(str, self.info['inverse_order']))}
def serialize_old_api_map_for_result(self, node) -> Dict:
if 'order' not in self.info:
return {}
return {'value': ','.join(map(str, self.info['order']))}
def serialize(self, node) -> Dict:
result = {}
if node.soft_get('type') == 'Parameter':
result = self.serialize_old_api_map_for_parameter(node)
elif node.soft_get('type') == 'Result':
result = self.serialize_old_api_map_for_result(node)
return result
def get_version(self):
return self.version
def get_name(self):
return self.name
class OldAPIMapElementType(RTInfoElement):
"""
Class that stores legacy type required for obtaining IR in old API.
"""
def __init__(self, version=0):
self.info = defaultdict(dict)
self.version = version
self.name = "old_api_map_element_type"
def set_legacy_type(self, legacy_type: np.dtype):
self.info['legacy_type'] = legacy_type
def serialize(self, node) -> Dict:
if 'legacy_type' not in self.info:
return {}
return {'value': np_data_type_to_destination_type(self.info['legacy_type'])}
def get_version(self):
return self.version
def get_name(self):
return self.name
| 29.435115 | 119 | 0.647303 |
93679ae8a6346ea3f96daa3d7ada57c8c0246d1c | 2,090 | py | Python | galaxy/util/odict.py | jmchilton/pulsar | 783b90cf0bce893a11c347fcaf6778b98e0bb062 | [
"Apache-2.0"
] | 1 | 2016-08-17T06:36:03.000Z | 2016-08-17T06:36:03.000Z | galaxy/util/odict.py | jmchilton/pulsar | 783b90cf0bce893a11c347fcaf6778b98e0bb062 | [
"Apache-2.0"
] | null | null | null | galaxy/util/odict.py | jmchilton/pulsar | 783b90cf0bce893a11c347fcaf6778b98e0bb062 | [
"Apache-2.0"
] | null | null | null | """
Ordered dictionary implementation.
"""
from UserDict import UserDict
class odict(UserDict):
"""
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/107747
This dictionary class extends UserDict to record the order in which items are
added. Calling keys(), values(), items(), etc. will return results in this
order.
"""
def __init__(self, dict=None):
self._keys = []
UserDict.__init__(self, dict)
def __delitem__(self, key):
UserDict.__delitem__(self, key)
self._keys.remove(key)
def __setitem__(self, key, item):
UserDict.__setitem__(self, key, item)
if key not in self._keys:
self._keys.append(key)
def clear(self):
UserDict.clear(self)
self._keys = []
def copy(self):
new = odict()
new.update(self)
return new
def items(self):
return zip(self._keys, self.values())
def keys(self):
return self._keys[:]
def popitem(self):
try:
key = self._keys[-1]
except IndexError:
raise KeyError('dictionary is empty')
val = self[key]
del self[key]
return (key, val)
def setdefault(self, key, failobj=None):
if key not in self._keys:
self._keys.append(key)
return UserDict.setdefault(self, key, failobj)
def update(self, dict):
for (key, val) in dict.items():
self.__setitem__(key, val)
def values(self):
return map(self.get, self._keys)
def iterkeys(self):
return iter(self._keys)
def itervalues(self):
for key in self._keys:
yield self.get(key)
def iteritems(self):
for key in self._keys:
yield key, self.get(key)
def __iter__(self):
for key in self._keys:
yield key
def reverse(self):
self._keys.reverse()
def insert(self, index, key, item):
if key not in self._keys:
self._keys.insert(index, key)
UserDict.__setitem__(self, key, item)
| 24.022989 | 81 | 0.583254 |
e5df7205188c560831f70698f2e0addaddef4bb4 | 2,928 | py | Python | bigml/anomalytree.py | ayush1208/python-3 | 8b613225142bbe1dce8fda5c7f8d3acf07c181a7 | [
"Apache-2.0"
] | null | null | null | bigml/anomalytree.py | ayush1208/python-3 | 8b613225142bbe1dce8fda5c7f8d3acf07c181a7 | [
"Apache-2.0"
] | null | null | null | bigml/anomalytree.py | ayush1208/python-3 | 8b613225142bbe1dce8fda5c7f8d3acf07c181a7 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
#!/usr/bin/env python
#
# Copyright 2014-2018 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tree structure for the BigML local Anomaly Detector
This module defines an auxiliary Tree structure that is used in the local
Anomaly Detector to score anomalies locally or embedded into your application
without needing to send requests to BigML.io.
"""
from bigml.predicates import Predicates
from bigml.util import sort_fields, utf8
class AnomalyTree(object):
"""An anomaly tree-like predictive model.
"""
def __init__(self, tree, fields):
self.fields = fields
if tree['predicates'] is True:
self.predicates = Predicates([True])
else:
self.predicates = Predicates(tree['predicates'])
self.id = None
children = []
if 'children' in tree:
for child in tree['children']:
children.append(AnomalyTree(child, self.fields))
self.children = children
def list_fields(self, out):
"""Lists a description of the model's fields.
"""
for field in [(val['name'], val['optype']) for _, val in
sort_fields(self.fields)]:
out.write(utf8(u'[%-32s : %s]\n' % (field[0], field[1])))
out.flush()
return self.fields
def depth(self, input_data, path=None, depth=0):
"""Returns the depth of the node that reaches the input data instance
when ran through the tree, and the associated set of rules.
If a node has any children whose
predicates are all true given the instance, then the instance will
flow through that child. If the node has no children or no
children with all valid predicates, then it outputs the depth of the
node.
"""
if path is None:
path = []
# root node: if predicates are met, depth becomes 1, otherwise is 0
if depth == 0:
if not self.predicates.apply(input_data, self.fields):
return depth, path
depth += 1
if self.children:
for child in self.children:
if child.predicates.apply(input_data, self.fields):
path.append(child.predicates.to_rule(self.fields))
return child.depth(input_data, path=path, depth=depth + 1)
return depth, path
| 34.046512 | 79 | 0.633197 |
0a3bbfd9b2ce8010c30ef98b2784572dbd61f0bf | 5,103 | py | Python | jax/_src/lax/fft.py | sarahbton/jax | 4f32a4f236547c8bb389d7b4afcc2d2ce7589a4c | [
"Apache-2.0"
] | null | null | null | jax/_src/lax/fft.py | sarahbton/jax | 4f32a4f236547c8bb389d7b4afcc2d2ce7589a4c | [
"Apache-2.0"
] | null | null | null | jax/_src/lax/fft.py | sarahbton/jax | 4f32a4f236547c8bb389d7b4afcc2d2ce7589a4c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from functools import partial
import numpy as np
from jax._src.api import jit, linear_transpose, ShapeDtypeStruct
from jax.core import Primitive
from jax.interpreters import xla
from jax._src.util import prod
from jax._src import dtypes
from jax import lax
from jax.interpreters import ad
from jax.interpreters import batching
from jax._src.lib import xla_client
from jax._src.lib import pocketfft
xops = xla_client.ops
__all__ = [
"fft",
"fft_p",
]
def _promote_to_complex(arg):
dtype = dtypes.result_type(arg, np.complex64)
return lax.convert_element_type(arg, dtype)
def _promote_to_real(arg):
dtype = dtypes.result_type(arg, np.float32)
return lax.convert_element_type(arg, dtype)
@partial(jit, static_argnums=(1, 2))
def fft(x, fft_type, fft_lengths):
if fft_type == xla_client.FftType.RFFT:
if np.iscomplexobj(x):
raise ValueError("only real valued inputs supported for rfft")
x = _promote_to_real(x)
else:
x = _promote_to_complex(x)
if len(fft_lengths) == 0:
# XLA FFT doesn't support 0-rank.
return x
fft_lengths = tuple(fft_lengths)
return fft_p.bind(x, fft_type=fft_type, fft_lengths=fft_lengths)
def fft_impl(x, fft_type, fft_lengths):
return xla.apply_primitive(fft_p, x, fft_type=fft_type, fft_lengths=fft_lengths)
_complex_dtype = lambda dtype: (np.zeros((), dtype) + np.zeros((), np.complex64)).dtype
_real_dtype = lambda dtype: np.finfo(dtype).dtype
_is_even = lambda x: x % 2 == 0
def fft_abstract_eval(x, fft_type, fft_lengths):
if fft_type == xla_client.FftType.RFFT:
shape = (x.shape[:-len(fft_lengths)] + fft_lengths[:-1]
+ (fft_lengths[-1] // 2 + 1,))
dtype = _complex_dtype(x.dtype)
elif fft_type == xla_client.FftType.IRFFT:
shape = x.shape[:-len(fft_lengths)] + fft_lengths
dtype = _real_dtype(x.dtype)
else:
shape = x.shape
dtype = x.dtype
return x.update(shape=shape, dtype=dtype)
def fft_translation_rule(c, x, fft_type, fft_lengths):
return xops.Fft(x, fft_type, fft_lengths)
def _naive_rfft(x, fft_lengths):
y = fft(x, xla_client.FftType.FFT, fft_lengths)
n = fft_lengths[-1]
return y[..., : n//2 + 1]
@partial(jit, static_argnums=1)
def _rfft_transpose(t, fft_lengths):
# The transpose of RFFT can't be expressed only in terms of irfft. Instead of
# manually building up larger twiddle matrices (which would increase the
# asymptotic complexity and is also rather complicated), we rely JAX to
# transpose a naive RFFT implementation.
dummy_shape = t.shape[:-len(fft_lengths)] + fft_lengths
dummy_primal = ShapeDtypeStruct(dummy_shape, _real_dtype(t.dtype))
transpose = linear_transpose(
partial(_naive_rfft, fft_lengths=fft_lengths), dummy_primal)
result, = transpose(t)
assert result.dtype == _real_dtype(t.dtype), (result.dtype, t.dtype)
return result
def _irfft_transpose(t, fft_lengths):
# The transpose of IRFFT is the RFFT of the cotangent times a scaling
# factor and a mask. The mask scales the cotangent for the Hermitian
# symmetric components of the RFFT by a factor of two, since these components
# are de-duplicated in the RFFT.
x = fft(t, xla_client.FftType.RFFT, fft_lengths)
n = x.shape[-1]
is_odd = fft_lengths[-1] % 2
full = partial(lax.full_like, t, dtype=t.dtype)
mask = lax.concatenate(
[full(1.0, shape=(1,)),
full(2.0, shape=(n - 2 + is_odd,)),
full(1.0, shape=(1 - is_odd,))],
dimension=0)
#scale = 1 / prod(fft_lengths)
scale = 1
out = scale * mask * x
assert out.dtype == _complex_dtype(t.dtype), (out.dtype, t.dtype)
# Use JAX's convention for complex gradients
# https://github.com/google/jax/issues/6223#issuecomment-807740707
return lax.conj(out)
def fft_transpose_rule(t, operand, fft_type, fft_lengths):
if fft_type == xla_client.FftType.RFFT:
result = _rfft_transpose(t, fft_lengths)
elif fft_type == xla_client.FftType.IRFFT:
result = _irfft_transpose(t, fft_lengths)
else:
result = fft(t, fft_type, fft_lengths)
return result,
def fft_batching_rule(batched_args, batch_dims, fft_type, fft_lengths):
x, = batched_args
bd, = batch_dims
x = batching.moveaxis(x, bd, 0)
return fft(x, fft_type, fft_lengths), 0
fft_p = Primitive('fft')
fft_p.def_impl(fft_impl)
fft_p.def_abstract_eval(fft_abstract_eval)
xla.translations[fft_p] = fft_translation_rule
ad.deflinear2(fft_p, fft_transpose_rule)
batching.primitive_batchers[fft_p] = fft_batching_rule
if pocketfft:
xla.backend_specific_translations['cpu'][fft_p] = pocketfft.pocketfft
| 34.714286 | 87 | 0.732902 |
cff47720811850c16a46d20888f54c8789fd05aa | 8,229 | py | Python | tests/impls/descriptor_generator/test_pytorch.py | joshanderson-kw/SMQTK-Descriptors | f5ca44be0d9c93779e2b323a28843db28341a2d5 | [
"BSD-3-Clause"
] | null | null | null | tests/impls/descriptor_generator/test_pytorch.py | joshanderson-kw/SMQTK-Descriptors | f5ca44be0d9c93779e2b323a28843db28341a2d5 | [
"BSD-3-Clause"
] | null | null | null | tests/impls/descriptor_generator/test_pytorch.py | joshanderson-kw/SMQTK-Descriptors | f5ca44be0d9c93779e2b323a28843db28341a2d5 | [
"BSD-3-Clause"
] | null | null | null | import os
import pickle
from typing import Any, Dict
import unittest
import unittest.mock as mock
import numpy as np
from smqtk_core.configuration import configuration_test_helper, make_default_config
from smqtk_dataprovider.impls.data_element.file import DataFileElement
from smqtk_image_io import ImageReader
from smqtk_image_io.impls.image_reader.pil_io import PilImageReader
from smqtk_descriptors import DescriptorGenerator
# noinspection PyProtectedMember
from smqtk_descriptors.impls.descriptor_generator.pytorch import (
TorchModuleDescriptorGenerator,
Resnet50SequentialTorchDescriptorGenerator,
AlignedReIDResNet50TorchDescriptorGenerator,
)
from tests import TEST_DATA_DIR
@unittest.skipUnless(TorchModuleDescriptorGenerator.is_usable(),
reason="TorchModuleDescriptorGenerator is not usable in"
"current environment.")
class TestTorchDescriptorGenerator (unittest.TestCase):
hopper_image_fp = os.path.join(TEST_DATA_DIR, 'grace_hopper.png')
dummy_image_reader = PilImageReader()
def test_impl_findable(self) -> None:
self.assertIn(Resnet50SequentialTorchDescriptorGenerator,
DescriptorGenerator.get_impls())
self.assertIn(AlignedReIDResNet50TorchDescriptorGenerator,
DescriptorGenerator.get_impls())
def test_get_config(self) -> None:
expected_params: Dict[str, Any] = {
'image_reader': make_default_config(ImageReader.get_impls()),
'image_load_threads': 1,
'weights_filepath': None,
'image_tform_threads': 1,
'batch_size': 32,
'use_gpu': False,
'cuda_device': None,
'normalize': None,
'iter_runtime': False,
'global_average_pool': False
}
# make sure that we're considering all constructor parameter
# options
default_params = TorchModuleDescriptorGenerator.get_default_config()
assert set(default_params) == set(expected_params)
@mock.patch('smqtk_descriptors.impls.descriptor_generator.pytorch'
'.TorchModuleDescriptorGenerator._ensure_module')
def test_config_cycle(self, m_tdg_ensure_module: mock.MagicMock) -> None:
"""
Test being able to get an instances config and use that config to
construct an equivalently parameterized instance. This test initializes
all possible parameters to non-defaults.
"""
# When every parameter is provided.
g1 = Resnet50SequentialTorchDescriptorGenerator(self.dummy_image_reader,
image_load_threads=2,
weights_filepath="cool_filepath",
image_tform_threads=2,
batch_size=64,
use_gpu=True,
cuda_device=1,
normalize=1.0,
iter_runtime=True,
global_average_pool=True)
for inst_g1 in configuration_test_helper(g1):
assert isinstance(inst_g1.image_reader, type(self.dummy_image_reader))
assert inst_g1.image_load_threads == 2
assert inst_g1.weights_filepath == "cool_filepath"
assert inst_g1.image_tform_threads == 2
assert inst_g1.batch_size == 64
assert inst_g1.use_gpu is True
assert inst_g1.cuda_device == 1
assert inst_g1.normalize == 1.0
assert inst_g1.iter_runtime is True
assert inst_g1.global_average_pool is True
# Repeat for AlignedReIDResNet50
g2 = AlignedReIDResNet50TorchDescriptorGenerator(self.dummy_image_reader,
image_load_threads=2,
weights_filepath="cool_filepath",
image_tform_threads=2,
batch_size=64,
use_gpu=True,
cuda_device=1,
normalize=1.0,
iter_runtime=True,
global_average_pool=True)
for inst_g2 in configuration_test_helper(g2):
assert isinstance(inst_g2.image_reader, type(self.dummy_image_reader))
assert inst_g2.image_load_threads == 2
assert inst_g2.weights_filepath == "cool_filepath"
assert inst_g2.image_tform_threads == 2
assert inst_g2.batch_size == 64
assert inst_g2.use_gpu is True
assert inst_g2.cuda_device == 1
assert inst_g2.normalize == 1.0
assert inst_g2.iter_runtime is True
assert inst_g2.global_average_pool is True
@mock.patch('smqtk_descriptors.impls.descriptor_generator.pytorch'
'.TorchModuleDescriptorGenerator._ensure_module')
def test_pickle_save_restore(self, m_tdg_ensure_module: mock.MagicMock) -> None:
expected_params: Dict[str, Any] = {
'image_reader': self.dummy_image_reader,
'image_load_threads': 1,
'weights_filepath': None,
'image_tform_threads': 1,
'batch_size': 32,
'use_gpu': False,
'cuda_device': None,
'normalize': None,
'iter_runtime': False,
'global_average_pool': False
}
g = Resnet50SequentialTorchDescriptorGenerator(**expected_params)
# Initialization sets up the network on construction.
self.assertEqual(m_tdg_ensure_module.call_count, 1)
g_pickled = pickle.dumps(g, -1)
g2 = pickle.loads(g_pickled)
# Network should be setup for second class class just like in
# initial construction.
self.assertEqual(m_tdg_ensure_module.call_count, 2)
self.assertIsInstance(g2, Resnet50SequentialTorchDescriptorGenerator)
self.assertEqual(g.get_config(), g2.get_config())
# Repeat for AlignedReIDResNet50
g3 = AlignedReIDResNet50TorchDescriptorGenerator(**expected_params)
self.assertEqual(m_tdg_ensure_module.call_count, 3)
g3_pickled = pickle.dumps(g3, -1)
g4 = pickle.loads(g3_pickled)
self.assertEqual(m_tdg_ensure_module.call_count, 4)
self.assertIsInstance(g3, AlignedReIDResNet50TorchDescriptorGenerator)
self.assertEqual(g3.get_config(), g4.get_config())
def test_generate_arrays(self) -> None:
g1 = Resnet50SequentialTorchDescriptorGenerator(self.dummy_image_reader)
d_list_g1 = list(g1._generate_arrays(
[DataFileElement(self.hopper_image_fp, readonly=True)]
))
assert len(d_list_g1) == 1
d_resnet_seq = d_list_g1[0]
g2 = AlignedReIDResNet50TorchDescriptorGenerator(self.dummy_image_reader)
d_list_g2 = list(g2._generate_arrays(
[DataFileElement(self.hopper_image_fp, readonly=True)]
))
assert len(d_list_g2) == 1
d_aligned_reid = d_list_g2[0]
# Check that the descriptors generated by both implementations are close
np.testing.assert_allclose(d_resnet_seq, d_aligned_reid, 1e-4)
def test_generate_arrays_no_data(self) -> None:
""" Test that generation method correctly returns an empty iterable
when no data is passed. """
g1 = Resnet50SequentialTorchDescriptorGenerator(self.dummy_image_reader)
r1 = list(g1._generate_arrays([]))
assert r1 == []
# Repeat for AlignedReIDResNet50
g2 = AlignedReIDResNet50TorchDescriptorGenerator(self.dummy_image_reader)
r2 = list(g2._generate_arrays([]))
assert r2 == []
| 44.967213 | 90 | 0.607972 |
a79f4ae9d23cfddd09607334a80e7766cb4d4a48 | 2,390 | py | Python | venv/Lib/site-packages/tensorflow/_api/v2/saved_model/__init__.py | rexliu3/StockTradingBotCloud | 46b732b9c05f73bc0e856a3c4a16854b6d12e18e | [
"MIT"
] | 2 | 2020-09-30T00:11:09.000Z | 2021-10-04T13:00:38.000Z | venv/lib/python3.6/site-packages/tensorflow_core/_api/v2/saved_model/__init__.py | databill86/HyperFoods | 9267937c8c70fd84017c0f153c241d2686a356dd | [
"MIT"
] | null | null | null | venv/lib/python3.6/site-packages/tensorflow_core/_api/v2/saved_model/__init__.py | databill86/HyperFoods | 9267937c8c70fd84017c0f153c241d2686a356dd | [
"MIT"
] | 1 | 2020-06-28T11:47:47.000Z | 2020-06-28T11:47:47.000Z | # This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Public API for tf.saved_model namespace.
"""
from __future__ import print_function as _print_function
import sys as _sys
from tensorflow.python.saved_model.constants import ASSETS_DIRECTORY
from tensorflow.python.saved_model.constants import ASSETS_KEY
from tensorflow.python.saved_model.constants import DEBUG_DIRECTORY
from tensorflow.python.saved_model.constants import DEBUG_INFO_FILENAME_PB
from tensorflow.python.saved_model.constants import SAVED_MODEL_FILENAME_PB
from tensorflow.python.saved_model.constants import SAVED_MODEL_FILENAME_PBTXT
from tensorflow.python.saved_model.constants import SAVED_MODEL_SCHEMA_VERSION
from tensorflow.python.saved_model.constants import VARIABLES_DIRECTORY
from tensorflow.python.saved_model.constants import VARIABLES_FILENAME
from tensorflow.python.saved_model.load import load
from tensorflow.python.saved_model.loader_impl import contains_saved_model
from tensorflow.python.saved_model.save import save
from tensorflow.python.saved_model.save_options import SaveOptions
from tensorflow.python.saved_model.signature_constants import CLASSIFY_INPUTS
from tensorflow.python.saved_model.signature_constants import CLASSIFY_METHOD_NAME
from tensorflow.python.saved_model.signature_constants import CLASSIFY_OUTPUT_CLASSES
from tensorflow.python.saved_model.signature_constants import CLASSIFY_OUTPUT_SCORES
from tensorflow.python.saved_model.signature_constants import DEFAULT_SERVING_SIGNATURE_DEF_KEY
from tensorflow.python.saved_model.signature_constants import PREDICT_INPUTS
from tensorflow.python.saved_model.signature_constants import PREDICT_METHOD_NAME
from tensorflow.python.saved_model.signature_constants import PREDICT_OUTPUTS
from tensorflow.python.saved_model.signature_constants import REGRESS_INPUTS
from tensorflow.python.saved_model.signature_constants import REGRESS_METHOD_NAME
from tensorflow.python.saved_model.signature_constants import REGRESS_OUTPUTS
from tensorflow.python.saved_model.tag_constants import GPU
from tensorflow.python.saved_model.tag_constants import SERVING
from tensorflow.python.saved_model.tag_constants import TPU
from tensorflow.python.saved_model.tag_constants import TRAINING
from tensorflow.python.training.tracking.tracking import Asset
del _print_function
| 58.292683 | 95 | 0.891213 |
fa6592321622c39bf68313ea21a71c0ba96be05c | 921 | py | Python | blogapi/routers.py | mohammadanarul/drf-blog-api | 25d6d72235b2d995639ac1eed5367cf6a8a1535c | [
"MIT"
] | null | null | null | blogapi/routers.py | mohammadanarul/drf-blog-api | 25d6d72235b2d995639ac1eed5367cf6a8a1535c | [
"MIT"
] | null | null | null | blogapi/routers.py | mohammadanarul/drf-blog-api | 25d6d72235b2d995639ac1eed5367cf6a8a1535c | [
"MIT"
] | null | null | null | from rest_framework.routers import DefaultRouter
from accounts.views import AccountModelViewset
from posts.views import PostModelViewset
from comments.views import CommentViewSet
from category.views import CategoryModelViewset
from reports.views import PostReportModelViewset
from likes.views import LikeModelViewset
from favorites.views import FavoriteModelViewSet
# url routur config
router = DefaultRouter()
router.register(r'accounts', AccountModelViewset, basename='accounts')
router.register(r'categories', CategoryModelViewset, basename='categories')
router.register(r'posts', PostModelViewset, basename='posts')
router.register(r'likes', LikeModelViewset, basename='likes')
router.register(r'favorites', FavoriteModelViewSet, basename='favorites')
router.register(r'reports', PostReportModelViewset, basename='reports')
router.register(r'comments', CommentViewSet, basename='comments')
urlpatterns = router.urls | 46.05 | 75 | 0.839305 |
6f8aea72e93ea157ae2c4c5815b4039df739b50b | 955 | py | Python | vendor-local/lib/python/celery/loaders/__init__.py | Mozilla-GitHub-Standards/6f0d85288b5b0ef8beecb60345173dc14c98e40f48e1307a444ab1e08231e695 | bf6a382913901ad193d907f022086931df0de8c4 | [
"BSD-3-Clause"
] | 1 | 2015-07-13T03:29:04.000Z | 2015-07-13T03:29:04.000Z | vendor-local/lib/python/celery/loaders/__init__.py | Mozilla-GitHub-Standards/6f0d85288b5b0ef8beecb60345173dc14c98e40f48e1307a444ab1e08231e695 | bf6a382913901ad193d907f022086931df0de8c4 | [
"BSD-3-Clause"
] | 2 | 2015-03-03T23:02:19.000Z | 2019-03-30T04:45:51.000Z | vendor-local/lib/python/celery/loaders/__init__.py | Mozilla-GitHub-Standards/6f0d85288b5b0ef8beecb60345173dc14c98e40f48e1307a444ab1e08231e695 | bf6a382913901ad193d907f022086931df0de8c4 | [
"BSD-3-Clause"
] | 2 | 2016-04-15T11:43:05.000Z | 2016-04-15T11:43:15.000Z | # -*- coding: utf-8 -*-
"""
celery.loaders
~~~~~~~~~~~~~~
Loaders define how configuration is read, what happens
when workers start, when tasks are executed and so on.
"""
from __future__ import absolute_import
from celery._state import current_app
from celery.utils import deprecated
from celery.utils.imports import symbol_by_name
LOADER_ALIASES = {'app': 'celery.loaders.app:AppLoader',
'default': 'celery.loaders.default:Loader',
'django': 'djcelery.loaders:DjangoLoader'}
def get_loader_cls(loader):
"""Get loader class by name/alias"""
return symbol_by_name(loader, LOADER_ALIASES)
@deprecated(deprecation='2.5', removal='4.0',
alternative='celery.current_app.loader')
def current_loader():
return current_app.loader
@deprecated(deprecation='2.5', removal='4.0',
alternative='celery.current_app.conf')
def load_settings():
return current_app.conf
| 26.527778 | 61 | 0.686911 |
c53ca4e85509ee364361704b88585ccfe4dd9f49 | 757 | py | Python | safe_transaction_service/history/migrations/0026_auto_20201030_1355.py | byteflyfunny/safe-transaction-service | 2a1a855d9881181a57692057aeb91c9fd8ae3de5 | [
"MIT"
] | 67 | 2019-08-16T16:26:42.000Z | 2022-03-21T20:32:43.000Z | safe_transaction_service/history/migrations/0026_auto_20201030_1355.py | byteflyfunny/safe-transaction-service | 2a1a855d9881181a57692057aeb91c9fd8ae3de5 | [
"MIT"
] | 550 | 2019-07-11T12:09:06.000Z | 2022-03-31T16:32:00.000Z | safe_transaction_service/history/migrations/0026_auto_20201030_1355.py | byteflyfunny/safe-transaction-service | 2a1a855d9881181a57692057aeb91c9fd8ae3de5 | [
"MIT"
] | 83 | 2019-12-06T11:22:32.000Z | 2022-03-30T10:09:22.000Z | # Generated by Django 3.1.2 on 2020-10-30 13:55
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("history", "0025_auto_20201015_1147"),
]
operations = [
# Create btree index for EthereumEvents
migrations.RunSQL(
"CREATE INDEX history_ethereumevent_arguments_to ON history_ethereumevent USING BTREE (((arguments->'to')::text))",
reverse_sql="DROP INDEX history_ethereumevent_arguments_to",
),
migrations.RunSQL(
"CREATE INDEX history_ethereumevent_arguments_from ON history_ethereumevent USING BTREE (((arguments->'from')::text))",
reverse_sql="DROP INDEX history_ethereumevent_arguments_from",
),
]
| 32.913043 | 131 | 0.676354 |
8b35ea84035e6e9cc13c47dcba27fb27fac84514 | 507 | py | Python | setup.py | MentalBlood/sharpener | e931bc84cc80a86551388ec267e591513a689ac0 | [
"MIT"
] | null | null | null | setup.py | MentalBlood/sharpener | e931bc84cc80a86551388ec267e591513a689ac0 | [
"MIT"
] | null | null | null | setup.py | MentalBlood/sharpener | e931bc84cc80a86551388ec267e591513a689ac0 | [
"MIT"
] | null | null | null | import os
from setuptools import setup, find_packages
if __name__ == '__main__':
long_description = ''
if os.path.exists('README.md'):
with open('README.md', encoding='utf-8') as f:
long_description = f.read()
setup(
name='sharpener',
version='1.0.0',
description='Handy profiling/benchmarking tool',
long_description=long_description,
long_description_content_type='text/markdown',
author='mentalblood',
install_requires=[
'rich',
'tqdm'
],
packages=find_packages()
)
| 19.5 | 50 | 0.706114 |
72b01d05af6c2821290c6f04c53dfab973ea22c7 | 6,765 | py | Python | lib/model/test.py | insigh/Faster-RCNN-Tensorflow | 4f446a4c1ebefcf6d92b5e01d2b6396bcbbf1a8d | [
"MIT"
] | null | null | null | lib/model/test.py | insigh/Faster-RCNN-Tensorflow | 4f446a4c1ebefcf6d92b5e01d2b6396bcbbf1a8d | [
"MIT"
] | 1 | 2019-04-04T14:19:40.000Z | 2019-04-06T03:32:22.000Z | lib/model/test.py | insigh/Faster-RCNN-Tensorflow | 4f446a4c1ebefcf6d92b5e01d2b6396bcbbf1a8d | [
"MIT"
] | null | null | null | # --------------------------------------------------------
# Tensorflow Faster R-CNN
# Licensed under The MIT License [see LICENSE for details]
# Written by Xinlei Chen
# --------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import cv2
import numpy as np
try:
import cPickle as pickle
except ImportError:
import pickle
import os
import math
from utils.timer import Timer
from utils.blob import im_list_to_blob
from model.config import cfg, get_output_dir
from model.bbox_transform import clip_boxes, bbox_transform_inv
from model.nms_wrapper import nms
def _get_image_blob(im):
"""Converts an image into a network input.
Arguments:
im (ndarray): a color image in BGR order
Returns:
blob (ndarray): a data blob holding an image pyramid
im_scale_factors (list): list of image scales (relative to im) used
in the image pyramid
"""
im_orig = im.astype(np.float32, copy=True)
im_orig -= cfg.PIXEL_MEANS
im_shape = im_orig.shape
im_size_min = np.min(im_shape[0:2])
im_size_max = np.max(im_shape[0:2])
processed_ims = []
im_scale_factors = []
for target_size in cfg.TEST.SCALES:
im_scale = float(target_size) / float(im_size_min)
# Prevent the biggest axis from being more than MAX_SIZE
if np.round(im_scale * im_size_max) > cfg.TEST.MAX_SIZE:
im_scale = float(cfg.TEST.MAX_SIZE) / float(im_size_max)
im = cv2.resize(im_orig, None, None, fx=im_scale, fy=im_scale,
interpolation=cv2.INTER_LINEAR)
im_scale_factors.append(im_scale)
processed_ims.append(im)
# Create a blob to hold the input images
blob = im_list_to_blob(processed_ims)
return blob, np.array(im_scale_factors)
def _get_blobs(im):
"""Convert an image and RoIs within that image into network inputs."""
blobs = {}
blobs['data'], im_scale_factors = _get_image_blob(im)
return blobs, im_scale_factors
def _clip_boxes(boxes, im_shape):
"""Clip boxes to image boundaries."""
# x1 >= 0
boxes[:, 0::4] = np.maximum(boxes[:, 0::4], 0)
# y1 >= 0
boxes[:, 1::4] = np.maximum(boxes[:, 1::4], 0)
# x2 < im_shape[1]
boxes[:, 2::4] = np.minimum(boxes[:, 2::4], im_shape[1] - 1)
# y2 < im_shape[0]
boxes[:, 3::4] = np.minimum(boxes[:, 3::4], im_shape[0] - 1)
return boxes
def _rescale_boxes(boxes, inds, scales):
"""Rescale boxes according to image rescaling."""
for i in range(boxes.shape[0]):
boxes[i, :] = boxes[i, :] / scales[int(inds[i])]
return boxes
def im_detect(sess, net, im):
blobs, im_scales = _get_blobs(im)
assert len(im_scales) == 1, "Only single-image batch implemented"
im_blob = blobs['data']
blobs['im_info'] = np.array([im_blob.shape[1], im_blob.shape[2], im_scales[0]], dtype=np.float32)
_, scores, bbox_pred, rois = net.test_image(sess, blobs['data'], blobs['im_info'])
boxes = rois[:, 1:5] / im_scales[0]
scores = np.reshape(scores, [scores.shape[0], -1])
bbox_pred = np.reshape(bbox_pred, [bbox_pred.shape[0], -1])
if cfg.TEST.BBOX_REG:
# Apply bounding-box regression deltas
box_deltas = bbox_pred
pred_boxes = bbox_transform_inv(boxes, box_deltas)
pred_boxes = _clip_boxes(pred_boxes, im.shape)
else:
# Simply repeat the boxes, once for each class
pred_boxes = np.tile(boxes, (1, scores.shape[1]))
return scores, pred_boxes
def apply_nms(all_boxes, thresh):
"""Apply non-maximum suppression to all predicted boxes output by the
test_net method.
"""
num_classes = len(all_boxes)
num_images = len(all_boxes[0])
nms_boxes = [[[] for _ in range(num_images)] for _ in range(num_classes)]
for cls_ind in range(num_classes):
for im_ind in range(num_images):
dets = all_boxes[cls_ind][im_ind]
if dets == []:
continue
x1 = dets[:, 0]
y1 = dets[:, 1]
x2 = dets[:, 2]
y2 = dets[:, 3]
scores = dets[:, 4]
inds = np.where((x2 > x1) & (y2 > y1))[0]
dets = dets[inds, :]
if dets == []:
continue
keep = nms(dets, thresh)
if len(keep) == 0:
continue
nms_boxes[cls_ind][im_ind] = dets[keep, :].copy()
return nms_boxes
def test_net(sess, net, imdb, weights_filename, max_per_image=100, thresh=0.):
np.random.seed(cfg.RNG_SEED)
"""Test a Fast R-CNN network on an image database."""
num_images = len(imdb.image_index)
# all detections are collected into:
# all_boxes[cls][image] = N x 5 array of detections in
# (x1, y1, x2, y2, score)
all_boxes = [[[] for _ in range(num_images)]
for _ in range(imdb.num_classes)]
output_dir = get_output_dir(imdb, weights_filename)
# timers
_t = {'im_detect': Timer(), 'misc': Timer()}
for i in range(num_images):
im = cv2.imread(imdb.image_path_at(i))
_t['im_detect'].tic()
scores, boxes = im_detect(sess, net, im)
_t['im_detect'].toc()
_t['misc'].tic()
# skip j = 0, because it's the background class
for j in range(1, imdb.num_classes):
inds = np.where(scores[:, j] > thresh)[0]
cls_scores = scores[inds, j]
cls_boxes = boxes[inds, j * 4:(j + 1) * 4]
cls_dets = np.hstack((cls_boxes, cls_scores[:, np.newaxis])) \
.astype(np.float32, copy=False)
keep = nms(cls_dets, cfg.TEST.NMS)
cls_dets = cls_dets[keep, :]
all_boxes[j][i] = cls_dets
# Limit to max_per_image detections *over all classes*
if max_per_image > 0:
image_scores = np.hstack([all_boxes[j][i][:, -1]
for j in range(1, imdb.num_classes)])
if len(image_scores) > max_per_image:
image_thresh = np.sort(image_scores)[-max_per_image]
for j in range(1, imdb.num_classes):
keep = np.where(all_boxes[j][i][:, -1] >= image_thresh)[0]
all_boxes[j][i] = all_boxes[j][i][keep, :]
_t['misc'].toc()
print('im_detect: {:d}/{:d} {:.3f}s {:.3f}s' \
.format(i + 1, num_images, _t['im_detect'].average_time,
_t['misc'].average_time))
det_file = os.path.join(output_dir, 'detections.pkl')
with open(det_file, 'wb') as f:
pickle.dump(all_boxes, f, pickle.HIGHEST_PROTOCOL)
print('Evaluating detections')
imdb.evaluate_detections(all_boxes, output_dir)
| 33.656716 | 101 | 0.597783 |
a13d4fbe17ce14eaec64b3f601a86a6cfc9a9b0f | 17,336 | py | Python | unicom-task/tenscf_rely/Cryptodome/SelfTest/Cipher/common.py | Charles-Hello/myScripts | 07f2d0fe51ab3581d812de64c57110bcad6219cd | [
"MIT"
] | 3 | 2021-12-06T15:35:58.000Z | 2021-12-06T15:57:18.000Z | unicom-task/tenscf_rely/Cryptodome/SelfTest/Cipher/common.py | Charles-Hello/myScripts | 07f2d0fe51ab3581d812de64c57110bcad6219cd | [
"MIT"
] | null | null | null | unicom-task/tenscf_rely/Cryptodome/SelfTest/Cipher/common.py | Charles-Hello/myScripts | 07f2d0fe51ab3581d812de64c57110bcad6219cd | [
"MIT"
] | 2 | 2022-03-21T13:13:50.000Z | 2022-03-24T15:09:59.000Z | # -*- coding: utf-8 -*-
#
# SelfTest/Hash/common.py: Common code for Cryptodome.SelfTest.Hash
#
# Written in 2008 by Dwayne C. Litzenberger <dlitz@dlitz.net>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-testing for PyCryptodome hash modules"""
import unittest
from binascii import a2b_hex, b2a_hex, hexlify
from Cryptodome.Util.py3compat import b
from Cryptodome.Util.strxor import strxor_c
class _NoDefault: pass # sentinel object
def _extract(d, k, default=_NoDefault):
"""Get an item from a dictionary, and remove it from the dictionary."""
try:
retval = d[k]
except KeyError:
if default is _NoDefault:
raise
return default
del d[k]
return retval
# Generic cipher test case
class CipherSelfTest(unittest.TestCase):
def __init__(self, module, params):
unittest.TestCase.__init__(self)
self.module = module
# Extract the parameters
params = params.copy()
self.description = _extract(params, 'description')
self.key = b(_extract(params, 'key'))
self.plaintext = b(_extract(params, 'plaintext'))
self.ciphertext = b(_extract(params, 'ciphertext'))
self.module_name = _extract(params, 'module_name', None)
self.assoc_data = _extract(params, 'assoc_data', None)
self.mac = _extract(params, 'mac', None)
if self.assoc_data:
self.mac = b(self.mac)
mode = _extract(params, 'mode', None)
self.mode_name = str(mode)
if mode is not None:
# Block cipher
self.mode = getattr(self.module, "MODE_" + mode)
self.iv = _extract(params, 'iv', None)
if self.iv is None:
self.iv = _extract(params, 'nonce', None)
if self.iv is not None:
self.iv = b(self.iv)
else:
# Stream cipher
self.mode = None
self.iv = _extract(params, 'iv', None)
if self.iv is not None:
self.iv = b(self.iv)
self.extra_params = params
def shortDescription(self):
return self.description
def _new(self):
params = self.extra_params.copy()
key = a2b_hex(self.key)
old_style = []
if self.mode is not None:
old_style = [ self.mode ]
if self.iv is not None:
old_style += [ a2b_hex(self.iv) ]
return self.module.new(key, *old_style, **params)
def isMode(self, name):
if not hasattr(self.module, "MODE_"+name):
return False
return self.mode == getattr(self.module, "MODE_"+name)
def runTest(self):
plaintext = a2b_hex(self.plaintext)
ciphertext = a2b_hex(self.ciphertext)
assoc_data = []
if self.assoc_data:
assoc_data = [ a2b_hex(b(x)) for x in self.assoc_data]
ct = None
pt = None
#
# Repeat the same encryption or decryption twice and verify
# that the result is always the same
#
for i in range(2):
cipher = self._new()
decipher = self._new()
# Only AEAD modes
for comp in assoc_data:
cipher.update(comp)
decipher.update(comp)
ctX = b2a_hex(cipher.encrypt(plaintext))
ptX = b2a_hex(decipher.decrypt(ciphertext))
if ct:
self.assertEqual(ct, ctX)
self.assertEqual(pt, ptX)
ct, pt = ctX, ptX
self.assertEqual(self.ciphertext, ct) # encrypt
self.assertEqual(self.plaintext, pt) # decrypt
if self.mac:
mac = b2a_hex(cipher.digest())
self.assertEqual(self.mac, mac)
decipher.verify(a2b_hex(self.mac))
class CipherStreamingSelfTest(CipherSelfTest):
def shortDescription(self):
desc = self.module_name
if self.mode is not None:
desc += " in %s mode" % (self.mode_name,)
return "%s should behave like a stream cipher" % (desc,)
def runTest(self):
plaintext = a2b_hex(self.plaintext)
ciphertext = a2b_hex(self.ciphertext)
# The cipher should work like a stream cipher
# Test counter mode encryption, 3 bytes at a time
ct3 = []
cipher = self._new()
for i in range(0, len(plaintext), 3):
ct3.append(cipher.encrypt(plaintext[i:i+3]))
ct3 = b2a_hex(b("").join(ct3))
self.assertEqual(self.ciphertext, ct3) # encryption (3 bytes at a time)
# Test counter mode decryption, 3 bytes at a time
pt3 = []
cipher = self._new()
for i in range(0, len(ciphertext), 3):
pt3.append(cipher.encrypt(ciphertext[i:i+3]))
# PY3K: This is meant to be text, do not change to bytes (data)
pt3 = b2a_hex(b("").join(pt3))
self.assertEqual(self.plaintext, pt3) # decryption (3 bytes at a time)
class RoundtripTest(unittest.TestCase):
def __init__(self, module, params):
from Cryptodome import Random
unittest.TestCase.__init__(self)
self.module = module
self.iv = Random.get_random_bytes(module.block_size)
self.key = b(params['key'])
self.plaintext = 100 * b(params['plaintext'])
self.module_name = params.get('module_name', None)
def shortDescription(self):
return """%s .decrypt() output of .encrypt() should not be garbled""" % (self.module_name,)
def runTest(self):
## ECB mode
mode = self.module.MODE_ECB
encryption_cipher = self.module.new(a2b_hex(self.key), mode)
ciphertext = encryption_cipher.encrypt(self.plaintext)
decryption_cipher = self.module.new(a2b_hex(self.key), mode)
decrypted_plaintext = decryption_cipher.decrypt(ciphertext)
self.assertEqual(self.plaintext, decrypted_plaintext)
class IVLengthTest(unittest.TestCase):
def __init__(self, module, params):
unittest.TestCase.__init__(self)
self.module = module
self.key = b(params['key'])
def shortDescription(self):
return "Check that all modes except MODE_ECB and MODE_CTR require an IV of the proper length"
def runTest(self):
self.assertRaises(TypeError, self.module.new, a2b_hex(self.key),
self.module.MODE_ECB, b(""))
def _dummy_counter(self):
return "\0" * self.module.block_size
class NoDefaultECBTest(unittest.TestCase):
def __init__(self, module, params):
unittest.TestCase.__init__(self)
self.module = module
self.key = b(params['key'])
def runTest(self):
self.assertRaises(TypeError, self.module.new, a2b_hex(self.key))
class BlockSizeTest(unittest.TestCase):
def __init__(self, module, params):
unittest.TestCase.__init__(self)
self.module = module
self.key = a2b_hex(b(params['key']))
def runTest(self):
cipher = self.module.new(self.key, self.module.MODE_ECB)
self.assertEqual(cipher.block_size, self.module.block_size)
class ByteArrayTest(unittest.TestCase):
"""Verify we can use bytearray's for encrypting and decrypting"""
def __init__(self, module, params):
unittest.TestCase.__init__(self)
self.module = module
# Extract the parameters
params = params.copy()
self.description = _extract(params, 'description')
self.key = b(_extract(params, 'key'))
self.plaintext = b(_extract(params, 'plaintext'))
self.ciphertext = b(_extract(params, 'ciphertext'))
self.module_name = _extract(params, 'module_name', None)
self.assoc_data = _extract(params, 'assoc_data', None)
self.mac = _extract(params, 'mac', None)
if self.assoc_data:
self.mac = b(self.mac)
mode = _extract(params, 'mode', None)
self.mode_name = str(mode)
if mode is not None:
# Block cipher
self.mode = getattr(self.module, "MODE_" + mode)
self.iv = _extract(params, 'iv', None)
if self.iv is None:
self.iv = _extract(params, 'nonce', None)
if self.iv is not None:
self.iv = b(self.iv)
else:
# Stream cipher
self.mode = None
self.iv = _extract(params, 'iv', None)
if self.iv is not None:
self.iv = b(self.iv)
self.extra_params = params
def _new(self):
params = self.extra_params.copy()
key = a2b_hex(self.key)
old_style = []
if self.mode is not None:
old_style = [ self.mode ]
if self.iv is not None:
old_style += [ a2b_hex(self.iv) ]
return self.module.new(key, *old_style, **params)
def runTest(self):
plaintext = a2b_hex(self.plaintext)
ciphertext = a2b_hex(self.ciphertext)
assoc_data = []
if self.assoc_data:
assoc_data = [ bytearray(a2b_hex(b(x))) for x in self.assoc_data]
cipher = self._new()
decipher = self._new()
# Only AEAD modes
for comp in assoc_data:
cipher.update(comp)
decipher.update(comp)
ct = b2a_hex(cipher.encrypt(bytearray(plaintext)))
pt = b2a_hex(decipher.decrypt(bytearray(ciphertext)))
self.assertEqual(self.ciphertext, ct) # encrypt
self.assertEqual(self.plaintext, pt) # decrypt
if self.mac:
mac = b2a_hex(cipher.digest())
self.assertEqual(self.mac, mac)
decipher.verify(bytearray(a2b_hex(self.mac)))
class MemoryviewTest(unittest.TestCase):
"""Verify we can use memoryviews for encrypting and decrypting"""
def __init__(self, module, params):
unittest.TestCase.__init__(self)
self.module = module
# Extract the parameters
params = params.copy()
self.description = _extract(params, 'description')
self.key = b(_extract(params, 'key'))
self.plaintext = b(_extract(params, 'plaintext'))
self.ciphertext = b(_extract(params, 'ciphertext'))
self.module_name = _extract(params, 'module_name', None)
self.assoc_data = _extract(params, 'assoc_data', None)
self.mac = _extract(params, 'mac', None)
if self.assoc_data:
self.mac = b(self.mac)
mode = _extract(params, 'mode', None)
self.mode_name = str(mode)
if mode is not None:
# Block cipher
self.mode = getattr(self.module, "MODE_" + mode)
self.iv = _extract(params, 'iv', None)
if self.iv is None:
self.iv = _extract(params, 'nonce', None)
if self.iv is not None:
self.iv = b(self.iv)
else:
# Stream cipher
self.mode = None
self.iv = _extract(params, 'iv', None)
if self.iv is not None:
self.iv = b(self.iv)
self.extra_params = params
def _new(self):
params = self.extra_params.copy()
key = a2b_hex(self.key)
old_style = []
if self.mode is not None:
old_style = [ self.mode ]
if self.iv is not None:
old_style += [ a2b_hex(self.iv) ]
return self.module.new(key, *old_style, **params)
def runTest(self):
plaintext = a2b_hex(self.plaintext)
ciphertext = a2b_hex(self.ciphertext)
assoc_data = []
if self.assoc_data:
assoc_data = [ memoryview(a2b_hex(b(x))) for x in self.assoc_data]
cipher = self._new()
decipher = self._new()
# Only AEAD modes
for comp in assoc_data:
cipher.update(comp)
decipher.update(comp)
ct = b2a_hex(cipher.encrypt(memoryview(plaintext)))
pt = b2a_hex(decipher.decrypt(memoryview(ciphertext)))
self.assertEqual(self.ciphertext, ct) # encrypt
self.assertEqual(self.plaintext, pt) # decrypt
if self.mac:
mac = b2a_hex(cipher.digest())
self.assertEqual(self.mac, mac)
decipher.verify(memoryview(a2b_hex(self.mac)))
def make_block_tests(module, module_name, test_data, additional_params=dict()):
tests = []
extra_tests_added = False
for i in range(len(test_data)):
row = test_data[i]
# Build the "params" dictionary with
# - plaintext
# - ciphertext
# - key
# - mode (default is ECB)
# - (optionally) description
# - (optionally) any other parameter that this cipher mode requires
params = {}
if len(row) == 3:
(params['plaintext'], params['ciphertext'], params['key']) = row
elif len(row) == 4:
(params['plaintext'], params['ciphertext'], params['key'], params['description']) = row
elif len(row) == 5:
(params['plaintext'], params['ciphertext'], params['key'], params['description'], extra_params) = row
params.update(extra_params)
else:
raise AssertionError("Unsupported tuple size %d" % (len(row),))
if not "mode" in params:
params["mode"] = "ECB"
# Build the display-name for the test
p2 = params.copy()
p_key = _extract(p2, 'key')
p_plaintext = _extract(p2, 'plaintext')
p_ciphertext = _extract(p2, 'ciphertext')
p_mode = _extract(p2, 'mode')
p_description = _extract(p2, 'description', None)
if p_description is not None:
description = p_description
elif p_mode == 'ECB' and not p2:
description = "p=%s, k=%s" % (p_plaintext, p_key)
else:
description = "p=%s, k=%s, %r" % (p_plaintext, p_key, p2)
name = "%s #%d: %s" % (module_name, i+1, description)
params['description'] = name
params['module_name'] = module_name
params.update(additional_params)
# Add extra test(s) to the test suite before the current test
if not extra_tests_added:
tests += [
RoundtripTest(module, params),
IVLengthTest(module, params),
NoDefaultECBTest(module, params),
ByteArrayTest(module, params),
BlockSizeTest(module, params),
]
extra_tests_added = True
# Add the current test to the test suite
tests.append(CipherSelfTest(module, params))
return tests
def make_stream_tests(module, module_name, test_data):
tests = []
extra_tests_added = False
for i in range(len(test_data)):
row = test_data[i]
# Build the "params" dictionary
params = {}
if len(row) == 3:
(params['plaintext'], params['ciphertext'], params['key']) = row
elif len(row) == 4:
(params['plaintext'], params['ciphertext'], params['key'], params['description']) = row
elif len(row) == 5:
(params['plaintext'], params['ciphertext'], params['key'], params['description'], extra_params) = row
params.update(extra_params)
else:
raise AssertionError("Unsupported tuple size %d" % (len(row),))
# Build the display-name for the test
p2 = params.copy()
p_key = _extract(p2, 'key')
p_plaintext = _extract(p2, 'plaintext')
p_ciphertext = _extract(p2, 'ciphertext')
p_description = _extract(p2, 'description', None)
if p_description is not None:
description = p_description
elif not p2:
description = "p=%s, k=%s" % (p_plaintext, p_key)
else:
description = "p=%s, k=%s, %r" % (p_plaintext, p_key, p2)
name = "%s #%d: %s" % (module_name, i+1, description)
params['description'] = name
params['module_name'] = module_name
# Add extra test(s) to the test suite before the current test
if not extra_tests_added:
tests += [
ByteArrayTest(module, params),
]
tests.append(MemoryviewTest(module, params))
extra_tests_added = True
# Add the test to the test suite
tests.append(CipherSelfTest(module, params))
tests.append(CipherStreamingSelfTest(module, params))
return tests
# vim:set ts=4 sw=4 sts=4 expandtab:
| 33.925636 | 113 | 0.59039 |
44b85bbe9237b9365ff1bbc9443d799ae82e816c | 4,381 | py | Python | get_lightcurve_data.py | Aparctias/lightcurve_fft | 34b0d25336fa7362d6036309cf2a5129af5d4bf9 | [
"Apache-2.0"
] | null | null | null | get_lightcurve_data.py | Aparctias/lightcurve_fft | 34b0d25336fa7362d6036309cf2a5129af5d4bf9 | [
"Apache-2.0"
] | null | null | null | get_lightcurve_data.py | Aparctias/lightcurve_fft | 34b0d25336fa7362d6036309cf2a5129af5d4bf9 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
#Get lightcurve points and periods.
import json
import os
MPC_SPINS="mpc_spins"
LIGHTCURVE_DIR="lightcurves"
DATASET_FILE="LIGHTCURVE_DATASET"
ERROR_FILE="light_errors"
def get_spins_dict():
data = []
with open(MPC_SPINS) as f:
data = json.load(f)
return dict((d['number'], d['spin_period']) for d in data)
# XXX: Possible place of optimization
def parse_lightcurve_file(filename):
# {'number': , 'observations': [
# {'reducemags':,
# 'ucormag': ,
# 'data': [('julian date', 'magnitude', '..')]
# ]}}
data = {}
i = 0
with open(filename, 'r') as f:
for line in f.readlines():
line = line.strip()
if line.startswith('OBJECTNUMBER=') and not data.get('number'):
data['number'] = int(line.split('=')[1])
data['observations'] = []
def __add_observation(data):
if not data.get('observations'):
data['observations'] = []
if len(data['observations']) - 1 < i:
data['observations'].append([])
data['observations'][i] = {}
# REDUCEMAGS and UCORMAG are presented in one example
# for each metadata
# Arrgh, need to use this in calculations
for param in ('reducemags', 'ucormag'):
if param.upper() + '=' in line:
__add_observation(data)
observs = data['observations'][i]
observs[param] = float(line.split('=')[1])
if not observs.get('data'):
observs['data'] = []
# DATA BLOCK
if line.startswith('DATA='):
data['observations'][i]['data'].append(line.split('=')[1])
if 'ENDDATA' in line:
i += 1
return data
def is_data_for_analyze(data, period):
# get number of periods per observation
MAX_PERIODS = 2.0
threshold = 10
points = [d.split('|') for d in data]
if len(points) < threshold:
return False
time_beg = float(points[0][0])
time_end = float(points[-1][0])
obs_time = (time_end - time_beg) * 60 * 60 * 24
period_in_sec = period * 60 * 60
if period_in_sec > obs_time:
return False
count_periods = obs_time / period_in_sec
print len(points), obs_time, period_in_sec, count_periods
if (len(points) / count_periods < threshold) or \
count_periods < MAX_PERIODS:
return False
return True
def get_observation_points(data, period):
return_obs = {}
good_points = []
for observation in data['observations']:
if is_data_for_analyze(observation['data'], period):
if len(good_points) < len(observation['data']):
good_points.append(observation['data'][:])
if good_points:
return_obs['number'] = data['number']
return_obs['points'] = good_points
# period in hours
return_obs['spin_period'] = period
return return_obs
def dump_dataset(data_set):
with open(DATASET_FILE, 'w') as f:
json.dump(data_set, f)
def dump_desig_name(filename):
with open(ERROR_FILE, 'a') as f:
f.write('Bad filename: %s\n' % filename)
def main():
spins_dict = get_spins_dict()
print len(spins_dict)
# Clean the error file
with open(ERROR_FILE, 'w') as f:
f.write('')
dump_data = []
for _, _, files in os.walk(LIGHTCURVE_DIR):
for i, curve_file in enumerate(files):
try:
aster_number = int(curve_file.split('_')[1])
except ValueError:
dump_desig_name(curve_file)
continue
if aster_number in spins_dict and spins_dict[aster_number] < 11.0:
print curve_file
spin_period = spins_dict[aster_number]
# dump data
filename = os.path.join(LIGHTCURVE_DIR, curve_file)
light_data = parse_lightcurve_file(filename)
data_set = get_observation_points(light_data, spin_period)
if data_set:
dump_data.append(data_set)
break
dump_dataset(dump_data)
print len(dump_data)
if __name__ == '__main__':
main()
| 31.292857 | 78 | 0.55832 |
052d3a2aa78f7de43181a2fed858fe8b8596d355 | 3,233 | py | Python | backend/api/models.py | christian-japan-devs/vietcatholic-jp | 1a800f478584538cc5e21ddd5816894dff8bd90f | [
"MIT"
] | null | null | null | backend/api/models.py | christian-japan-devs/vietcatholic-jp | 1a800f478584538cc5e21ddd5816894dff8bd90f | [
"MIT"
] | 6 | 2021-05-29T04:39:00.000Z | 2021-07-04T00:40:15.000Z | backend/api/models.py | christian-japan-devs/vietcatholic-jp | 1a800f478584538cc5e21ddd5816894dff8bd90f | [
"MIT"
] | null | null | null | from django.db import models
from django.utils import timezone
import sys
from PIL import Image
from io import BytesIO
from django.core.files.uploadedfile import InMemoryUploadedFile
from tinymce.models import HTMLField
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from adminapp.models import Language
# Create your models here
class Contact(models.Model):
contact_full_name = models.CharField(_('Họ Tên'),default='',max_length=30,help_text=_('Họ tên đầy đủ'))
contact_email = models.CharField(_('Email'),max_length=100,help_text=_('Xin nhập địa chỉ email'))
contact_content = models.TextField(_('Nội dung'),help_text=_('Nội dung liên hệ'))
contact_phone = models.CharField(_('Số điện thoại'),default='',blank=True,max_length=12,help_text=_('Xin nhập số điện thoại để liên lạc khi cần'))
contact_date = models.DateTimeField(_('Ngày liên hệ'),default=timezone.now,help_text=_('Ngày liên hệ'))
contact_reply_status = models.BooleanField(_('Trạng thái'),help_text=_('Trạng thái phản hồi'),default=False)
contact_reply_content = models.TextField(_('Nội dung phản hồi'),default='',help_text=_('Nội dung phản hồi'))
def __str__(self):
return f'{self.contact_email}'
class CarouselSlide(models.Model):
carousel_title = models.CharField(max_length=100)
carousel_url = models.ImageField(null=True,blank=True,upload_to='web_images/carousel')
carousel_content = models.CharField(max_length=200)
carousel_language = models.ForeignKey(Language,on_delete=models.CASCADE,help_text=_('Ngôn ngữ'),default=None,blank=True,null=True)
carousel_active = models.CharField(max_length=10,default="",blank=True)
def __str__(self):
return f'{self.carousel_title}'
class Announcement(models.Model):
announcement_title = models.CharField(max_length=100)
announcement_url = models.ImageField(null=True,blank=True,upload_to='web_images/announ')
announcement_content = HTMLField()
announcement_language = models.ForeignKey(Language,on_delete=models.CASCADE,help_text=_('Ngôn ngữ'),default=None,blank=True,null=True)
announcement_active = models.BooleanField(default=True,blank=True)
announcement_create_date = models.DateTimeField(default=timezone.now)
announcement_created_user = models.ForeignKey(User, on_delete=models.CASCADE,default=None,blank=True,null=True)
class Meta:
ordering = ['announcement_create_date']
def __str__(self):
return f'{self.announcement_title}:{self.announcement_create_date}'
class Aboutus(models.Model):
aboutus_title = models.CharField(max_length=100)
aboutus_url = models.ImageField(null=True,blank=True,upload_to='web_images/announ')
aboutus_content = HTMLField()
aboutus_language = models.ForeignKey(Language,on_delete=models.CASCADE,help_text=_('Ngôn ngữ'),default=None,blank=True,null=True)
aboutus_active = models.BooleanField(default=True,blank=True)
aboutus_create_date = models.DateTimeField(default=timezone.now)
aboutus_created_user = models.ForeignKey(User, on_delete=models.CASCADE,default=None,blank=True,null=True)
def __str__(self):
return f'{self.aboutus_title}:{self.aboutus_create_date}' | 53.883333 | 150 | 0.769255 |
07dcbb1d2c3c582f5f8f83fa4c64d384ab0b1a43 | 3,825 | py | Python | idaes/gas_solid_contactors/properties/oxygen_iron_OC_oxidation/tests/test_CLC_solid_prop.py | eslickj/idaes-pse | 328ed07ffb0b4d98c03e972675ea32c41dd2531a | [
"RSA-MD"
] | 112 | 2019-02-11T23:16:36.000Z | 2022-03-23T20:59:57.000Z | idaes/gas_solid_contactors/properties/oxygen_iron_OC_oxidation/tests/test_CLC_solid_prop.py | eslickj/idaes-pse | 328ed07ffb0b4d98c03e972675ea32c41dd2531a | [
"RSA-MD"
] | 621 | 2019-03-01T14:44:12.000Z | 2022-03-31T19:49:25.000Z | idaes/gas_solid_contactors/properties/oxygen_iron_OC_oxidation/tests/test_CLC_solid_prop.py | eslickj/idaes-pse | 328ed07ffb0b4d98c03e972675ea32c41dd2531a | [
"RSA-MD"
] | 154 | 2019-02-01T23:46:33.000Z | 2022-03-23T15:07:10.000Z | #################################################################################
# The Institute for the Design of Advanced Energy Systems Integrated Platform
# Framework (IDAES IP) was produced under the DOE Institute for the
# Design of Advanced Energy Systems (IDAES), and is copyright (c) 2018-2021
# by the software owners: The Regents of the University of California, through
# Lawrence Berkeley National Laboratory, National Technology & Engineering
# Solutions of Sandia, LLC, Carnegie Mellon University, West Virginia University
# Research Corporation, et al. All rights reserved.
#
# Please see the files COPYRIGHT.md and LICENSE.md for full copyright and
# license information.
#################################################################################
"""
Tests for CLC solid phase thermo state block; tests for construction and solve
Author: Chinedu Okoli
"""
import pytest
from pyomo.environ import (ConcreteModel,
TerminationCondition,
SolverStatus,
Var)
from idaes.core import FlowsheetBlock
from idaes.core.util.model_statistics import degrees_of_freedom
from idaes.core.util.testing import (get_default_solver,
initialization_tester)
from idaes.gas_solid_contactors.properties.oxygen_iron_OC_oxidation. \
solid_phase_thermo import SolidPhaseParameterBlock
# Get default solver for testing
solver = get_default_solver()
# -----------------------------------------------------------------------------
@pytest.fixture(scope="class")
def solid_prop():
m = ConcreteModel()
m.fs = FlowsheetBlock(default={"dynamic": False})
# solid properties and state inlet block
m.fs.properties = SolidPhaseParameterBlock()
m.fs.unit = m.fs.properties.build_state_block(
default={"parameters": m.fs.properties,
"defined_state": True})
m.fs.unit.flow_mass.fix(1)
m.fs.unit.particle_porosity.fix(0.27)
m.fs.unit.temperature.fix(1183.15)
m.fs.unit.mass_frac_comp["Fe2O3"].fix(0.244)
m.fs.unit.mass_frac_comp["Fe3O4"].fix(0.202)
m.fs.unit.mass_frac_comp["Al2O3"].fix(0.554)
return m
@pytest.mark.unit
def test_build_inlet_state_block(solid_prop):
assert isinstance(solid_prop.fs.unit.dens_mass_skeletal, Var)
assert isinstance(solid_prop.fs.unit.enth_mol_comp, Var)
assert isinstance(solid_prop.fs.unit.enth_mass, Var)
assert isinstance(solid_prop.fs.unit.cp_mol_comp, Var)
assert isinstance(solid_prop.fs.unit.cp_mass, Var)
@pytest.mark.unit
def test_setInputs_state_block(solid_prop):
assert degrees_of_freedom(solid_prop.fs.unit) == 0
@pytest.mark.solver
@pytest.mark.skipif(solver is None, reason="Solver not available")
@pytest.mark.component
def test_initialize(solid_prop):
initialization_tester(
solid_prop)
@pytest.mark.solver
@pytest.mark.skipif(solver is None, reason="Solver not available")
@pytest.mark.component
def test_solve(solid_prop):
assert hasattr(solid_prop.fs.unit, "dens_mass_skeletal")
assert hasattr(solid_prop.fs.unit, "cp_mass")
assert hasattr(solid_prop.fs.unit, "enth_mass")
results = solver.solve(solid_prop)
# Check for optimal solution
assert results.solver.termination_condition == \
TerminationCondition.optimal
assert results.solver.status == SolverStatus.ok
@pytest.mark.solver
@pytest.mark.skipif(solver is None, reason="Solver not available")
@pytest.mark.component
def test_solution(solid_prop):
assert (pytest.approx(3251.75, abs=1e-2) ==
solid_prop.fs.unit.dens_mass_skeletal.value)
assert (pytest.approx(1, abs=1e-2) ==
solid_prop.fs.unit.cp_mass.value)
assert (pytest.approx(0.0039, abs=1e-2) ==
solid_prop.fs.unit.enth_mass.value)
| 34.459459 | 81 | 0.680261 |
8370ed6304b202b16dcc0dbf5d3965bb884d7053 | 5,328 | py | Python | qcdb/cov_radii.py | nuwandesilva/qcdb | b47fb2ed550fc4176198ddb1dbea3724d6704d23 | [
"BSD-3-Clause"
] | 1 | 2019-02-20T20:18:02.000Z | 2019-02-20T20:18:02.000Z | qcdb/cov_radii.py | vivacebelles/qcdb | 5bbdcb5c833277647a36bb0a5982abb56bf29b20 | [
"BSD-3-Clause"
] | null | null | null | qcdb/cov_radii.py | vivacebelles/qcdb | 5bbdcb5c833277647a36bb0a5982abb56bf29b20 | [
"BSD-3-Clause"
] | null | null | null | #
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2017 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This file is part of Psi4.
#
# Psi4 is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, version 3.
#
# Psi4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with Psi4; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
# Do not modify this file! It is auto-generated by the document_elements
# script, from psi4topdir/include/cov_radii.h
psi_cov_radii = {}
psi_cov_radii['BE'] = 0.96 #
psi_cov_radii['BA'] = 2.15 #
psi_cov_radii['BI'] = 1.48 #
psi_cov_radii['BR'] = 1.20 #
psi_cov_radii['RU'] = 1.46 #
psi_cov_radii['RE'] = 1.51 #
psi_cov_radii['LU'] = 1.87 #
psi_cov_radii['RA'] = 2.21 #
psi_cov_radii['RB'] = 2.20 #
psi_cov_radii['RN'] = 1.50 #
psi_cov_radii['RH'] = 1.42 #
psi_cov_radii['H'] = 0.31 #
psi_cov_radii['P'] = 1.07 #
psi_cov_radii['GE'] = 1.20 #
psi_cov_radii['GD'] = 1.96 #
psi_cov_radii['GA'] = 1.22 #
psi_cov_radii['OS'] = 1.44 #
psi_cov_radii['C'] = 0.76 #
psi_cov_radii['HO'] = 1.92 #
psi_cov_radii['HF'] = 1.75 #
psi_cov_radii['HG'] = 1.32 #
psi_cov_radii['HE'] = 0.28 #
psi_cov_radii['PR'] = 2.03 #
psi_cov_radii['PT'] = 1.36 #
psi_cov_radii['PU'] = 1.87 #
psi_cov_radii['PB'] = 1.46 #
psi_cov_radii['PA'] = 2.00 #
psi_cov_radii['PD'] = 1.39 #
psi_cov_radii['PO'] = 1.40 #
psi_cov_radii['PM'] = 1.99 #
psi_cov_radii['ZN'] = 1.22 #
psi_cov_radii['K'] = 2.03 #
psi_cov_radii['O'] = 0.66 #
psi_cov_radii['S'] = 1.05 #
psi_cov_radii['W'] = 1.62 #
psi_cov_radii['EU'] = 1.98 #
psi_cov_radii['ZR'] = 1.75 #
psi_cov_radii['ER'] = 1.89 #
psi_cov_radii['MG'] = 1.41 #
psi_cov_radii['MO'] = 1.54 #
psi_cov_radii['MN'] = 1.61 #
psi_cov_radii['U'] = 1.96 #
psi_cov_radii['FR'] = 2.60 #
psi_cov_radii['FE'] = 1.52 #
psi_cov_radii['NI'] = 1.24 #
psi_cov_radii['NA'] = 1.66 #
psi_cov_radii['NB'] = 1.64 #
psi_cov_radii['ND'] = 2.01 #
psi_cov_radii['NE'] = 0.58 #
psi_cov_radii['NP'] = 1.90 #
psi_cov_radii['B'] = 0.84 #
psi_cov_radii['CO'] = 1.50 #
psi_cov_radii['CL'] = 1.02 #
psi_cov_radii['CA'] = 1.76 #
psi_cov_radii['CE'] = 2.04 #
psi_cov_radii['N'] = 0.71 #
psi_cov_radii['V'] = 1.53 #
psi_cov_radii['CS'] = 2.44 #
psi_cov_radii['CR'] = 1.39 #
psi_cov_radii['CU'] = 1.32 #
psi_cov_radii['SR'] = 1.95 #
psi_cov_radii['KR'] = 1.16 #
psi_cov_radii['SI'] = 1.11 #
psi_cov_radii['SN'] = 1.39 #
psi_cov_radii['SM'] = 1.98 #
psi_cov_radii['SC'] = 1.70 #
psi_cov_radii['SB'] = 1.39 #
psi_cov_radii['SE'] = 1.20 #
psi_cov_radii['YB'] = 1.87 #
psi_cov_radii['DY'] = 1.92 #
psi_cov_radii['LA'] = 2.07 #
psi_cov_radii['F'] = 0.57 #
psi_cov_radii['LI'] = 1.28 #
psi_cov_radii['TL'] = 1.45 #
psi_cov_radii['TM'] = 1.90 #
psi_cov_radii['TH'] = 2.06 #
psi_cov_radii['TI'] = 1.60 #
psi_cov_radii['TE'] = 1.38 #
psi_cov_radii['TB'] = 1.94 #
psi_cov_radii['TC'] = 1.47 #
psi_cov_radii['TA'] = 1.70 #
psi_cov_radii['AC'] = 2.15 #
psi_cov_radii['AG'] = 1.45 #
psi_cov_radii['I'] = 1.39 #
psi_cov_radii['IR'] = 1.41 #
psi_cov_radii['AM'] = 1.80 #
psi_cov_radii['AL'] = 1.21 #
psi_cov_radii['AS'] = 1.19 #
psi_cov_radii['AR'] = 1.06 #
psi_cov_radii['AU'] = 1.36 #
psi_cov_radii['AT'] = 1.50 #
psi_cov_radii['IN'] = 1.42 #
psi_cov_radii['Y'] = 1.90 #
psi_cov_radii['CD'] = 1.44 #
psi_cov_radii['XE'] = 1.40 #
| 41.625 | 80 | 0.468281 |
30f4e30db8de438a26bac6722860a964e982b23a | 1,263 | py | Python | phage/read_ends.py | linsalrob/EdwardsLab | 3d4eef1dda61c31ce8163d94d86f186275a6e4a4 | [
"MIT"
] | 30 | 2015-01-25T16:22:51.000Z | 2022-01-20T15:56:47.000Z | phage/read_ends.py | linsalrob/EdwardsLab | 3d4eef1dda61c31ce8163d94d86f186275a6e4a4 | [
"MIT"
] | 2 | 2020-04-13T15:00:37.000Z | 2020-09-23T12:35:59.000Z | phage/read_ends.py | linsalrob/EdwardsLab | 3d4eef1dda61c31ce8163d94d86f186275a6e4a4 | [
"MIT"
] | 24 | 2015-04-17T00:52:05.000Z | 2021-11-26T17:50:01.000Z | """
Extract the ends of sequences to a separate file
"""
import os
import sys
import argparse
from roblib import colours, stream_fastq
__author__ = 'Rob Edwards'
__copyright__ = 'Copyright 2020, BacterialProteins'
__credits__ = ['Rob Edwards']
__license__ = 'MIT'
__maintainer__ = 'Rob Edwards'
__email__ = 'raedwards@gmail.com'
def split_fastq(fqf, outdir, frac, verbose=False):
"""
Split a fastq file
:param fqf: fastq file
:param outdir: output directory to write all the files to
:param frac: fraction of the sequence for each end
:param verbose: more output
:return: nothing
"""
if not os.path.exists(outdir):
os.path.mkdir(outdir)
for seqid, header, seq, qual in stream_fastq(fqf):
with open (os.path.join(outdir, seq + ".left.fna"), 'w') as out:
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=" ")
parser.add_argument('-q', help='input fastq file', required=True)
parser.add_argument('-x', help='fraction of the read from each end to put into separate files (<=1)', type=float)
parser.add_argument('-o', help='output file', required=True)
parser.add_argument('-v', help='verbose output', action='store_true')
args = parser.parse_args()
| 25.77551 | 117 | 0.688836 |
8238fb50025a89ace71e80efecf4bcb3159a76f4 | 830 | py | Python | server/app/views/fits_category.py | Celeo/GETIN-Extra | 95c7b5a85f14c8c9bf1e2c728f36fef7bcafef68 | [
"MIT"
] | null | null | null | server/app/views/fits_category.py | Celeo/GETIN-Extra | 95c7b5a85f14c8c9bf1e2c728f36fef7bcafef68 | [
"MIT"
] | 1 | 2019-10-21T19:46:57.000Z | 2019-10-21T19:46:57.000Z | server/app/views/fits_category.py | Celeo/GETIN-Extras | 95c7b5a85f14c8c9bf1e2c728f36fef7bcafef68 | [
"MIT"
] | null | null | null | from flask import request
from flask_restful import Resource, marshal_with
from ..util import restrict_admin
from ..models import db, FitsCategory
class FitsCategoriesResource(Resource):
method_decorators = [restrict_admin]
@marshal_with(FitsCategory.resource_fields)
def get(self):
return FitsCategory.query.all()
def post(self):
db.session.add(FitsCategory(request.json['name']))
db.session.commit()
return {}, 204
class FitsCategoryResource(Resource):
method_decorators = [restrict_admin]
def put(self, id):
FitsCategory.query.get(id).order = int(request.json['order'])
db.session.commit()
return {}, 204
def delete(self, id):
db.session.delete(FitsCategory.query.get(id))
db.session.commit()
return {}, 204
| 23.714286 | 69 | 0.674699 |
dd88c481b16d1c7661082548840671225fae7e18 | 1,790 | py | Python | tools/mo/openvino/tools/mo/front/mxnet/extractors/slice_axis.py | ryanloney/openvino-1 | 4e0a740eb3ee31062ba0df88fcf438564f67edb7 | [
"Apache-2.0"
] | 1,127 | 2018-10-15T14:36:58.000Z | 2020-04-20T09:29:44.000Z | tools/mo/openvino/tools/mo/front/mxnet/extractors/slice_axis.py | ryanloney/openvino-1 | 4e0a740eb3ee31062ba0df88fcf438564f67edb7 | [
"Apache-2.0"
] | 439 | 2018-10-20T04:40:35.000Z | 2020-04-19T05:56:25.000Z | tools/mo/openvino/tools/mo/front/mxnet/extractors/slice_axis.py | ryanloney/openvino-1 | 4e0a740eb3ee31062ba0df88fcf438564f67edb7 | [
"Apache-2.0"
] | 414 | 2018-10-17T05:53:46.000Z | 2020-04-16T17:29:53.000Z | # Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
from openvino.tools.mo.front.caffe.extractors.utils import get_canonical_axis_index
from openvino.tools.mo.utils.error import Error
def slice_axis_ext(attrs):
axis = attrs.int("axis", 0)
begin = attrs.int("begin", 0)
end = attrs.int("end", None)
node_attrs = {
'op': 'Crop',
'axis': axis,
'offset': begin,
'dim': end,
'infer': mxnet_slice_axis_infer
}
return node_attrs
def mxnet_slice_axis_infer(node):
in_shape = node.in_port(0).data.get_shape()
node.axis = get_canonical_axis_index(in_shape, node.axis)
slice_axis = node.axis
new_shape = in_shape.copy()
new_shape[slice_axis] = new_shape[slice_axis] / len(node.out_nodes())
axis_size = in_shape[slice_axis]
if node.offset < 0:
node.offset += axis_size
if not node.dim:
node.dim = axis_size
elif node.dim < 0:
node.dim += axis_size
input_dim = in_shape.size
node.dim = (node.dim - node.offset)
if node.dim > in_shape[slice_axis]:
raise Error(
'{0} node dimension value is bigger than the corresponding value in the input shape {1}. ' +
'\nIn particular {2} is bigger than {3}. The Model Optimizer does not support this case. ' +
'\nTo overcome, try to edit the original model "end" property of the {0} layer.',
node.name, ','.join(str(i) for i in in_shape), str(node.dim), str(in_shape[slice_axis])
)
for i in range(0, input_dim):
if i == slice_axis:
new_shape[i] = node.dim
else:
new_shape[i] = in_shape[i]
for i in range(0, len(node.out_nodes())):
node.out_node(i)['shape'] = new_shape
| 30.862069 | 104 | 0.62514 |
78eb54b650a3d39559843418bef7629dffd8f78e | 3,181 | py | Python | src/compas/datastructures/mesh/operations/weld.py | funkchaser/compas | b58de8771484aa0c6068d43df78b1679503215de | [
"MIT"
] | 235 | 2017-11-07T07:33:22.000Z | 2022-03-25T16:20:00.000Z | src/compas/datastructures/mesh/operations/weld.py | funkchaser/compas | b58de8771484aa0c6068d43df78b1679503215de | [
"MIT"
] | 770 | 2017-09-22T13:42:06.000Z | 2022-03-31T21:26:45.000Z | src/compas/datastructures/mesh/operations/weld.py | funkchaser/compas | b58de8771484aa0c6068d43df78b1679503215de | [
"MIT"
] | 99 | 2017-11-06T23:15:28.000Z | 2022-03-25T16:05:36.000Z | from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from compas.topology import adjacency_from_edges
from compas.topology import connected_components
from compas.utilities import pairwise
from .substitute import mesh_substitute_vertex_in_faces
__all__ = [
'mesh_unweld_vertices',
'mesh_unweld_edges'
]
def mesh_unweld_vertices(mesh, fkey, where=None):
"""Unweld a face of the mesh.
Parameters
----------
mesh : Mesh
A mesh object.
fkey : hashable
The identifier of a face.
where : list (None)
A list of vertices to unweld.
Default is to unweld all vertices of the face.
Examples
--------
>>>
"""
face = []
vertices = mesh.face_vertices(fkey)
if not where:
where = vertices
for u, v in pairwise(vertices + vertices[0:1]):
if u in where:
x, y, z = mesh.vertex_coordinates(u)
u = mesh.add_vertex(x=x, y=y, z=z)
if u in where or v in where:
mesh.halfedge[v][u] = None
face.append(u)
mesh.add_face(face, fkey=fkey)
return face
def mesh_unweld_edges(mesh, edges):
"""Unwelds a mesh along edges.
Parameters
----------
mesh : Mesh
A mesh.
edges: list
List of edges as tuples of vertex keys.
"""
# set of vertices in edges to unweld
vertices = set([i for edge in edges for i in edge])
# to store changes to do all at once
vertex_changes = {}
for vkey in vertices:
# maps between old mesh face index and new network vertex index
old_to_new = {nbr: i for i, nbr in enumerate(mesh.vertex_faces(vkey))}
new_to_old = {i: nbr for i, nbr in enumerate(mesh.vertex_faces(vkey))}
# get adjacency network of faces around the vertex excluding adjacency
# through the edges to unweld
network_edges = []
for nbr in mesh.vertex_neighbors(vkey):
if not mesh.is_edge_on_boundary(vkey, nbr) and (vkey, nbr) not in edges and (nbr, vkey) not in edges:
network_edges.append((old_to_new[mesh.halfedge[vkey][nbr]], old_to_new[mesh.halfedge[nbr][vkey]]))
adjacency = adjacency_from_edges(network_edges)
for key, values in adjacency.items():
adjacency[key] = {value: None for value in values}
# include non connected vertices
edge_vertices = list(set([i for edge in network_edges for i in edge]))
for i in range(len(mesh.vertex_faces(vkey))):
if i not in edge_vertices:
adjacency[i] = {}
# collect the disconnected parts around the vertex due to unwelding
vertex_changes[vkey] = [[new_to_old[key] for key in part] for part in connected_components(adjacency)]
for vkey, changes in vertex_changes.items():
# for each disconnected part replace the vertex by a new vertex in the
# faces of the part
for change in changes:
mesh_substitute_vertex_in_faces(mesh, vkey, mesh.add_vertex(attr_dict=mesh.vertex[vkey]), change)
# delete old vertices
mesh.delete_vertex(vkey)
| 29.728972 | 114 | 0.643194 |
a561cb936f214a89085db1c383db4690e0c3b15e | 613 | py | Python | proxy/http/server/protocols.py | zanachka/proxy.py | ab5c155213115d1664ce429ec155184d16ca9be6 | [
"BSD-3-Clause"
] | null | null | null | proxy/http/server/protocols.py | zanachka/proxy.py | ab5c155213115d1664ce429ec155184d16ca9be6 | [
"BSD-3-Clause"
] | 9 | 2021-12-10T01:22:33.000Z | 2022-03-31T18:21:07.000Z | proxy/http/server/protocols.py | zanachka/proxy.py | ab5c155213115d1664ce429ec155184d16ca9be6 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
proxy.py
~~~~~~~~
⚡⚡⚡ Fast, Lightweight, Pluggable, TLS interception capable proxy server focused on
Network monitoring, controls & Application development, testing, debugging.
:copyright: (c) 2013-present by Abhinav Singh and contributors.
:license: BSD, see LICENSE for more details.
.. spelling::
http
iterable
"""
from typing import NamedTuple
HttpProtocolTypes = NamedTuple(
'HttpProtocolTypes', [
('HTTP', int),
('HTTPS', int),
('WEBSOCKET', int),
],
)
httpProtocolTypes = HttpProtocolTypes(1, 2, 3)
| 22.703704 | 86 | 0.626427 |
944e35f5e9d18bd56ae01ab1de2b9b496c38cc96 | 8,184 | py | Python | packages/qsmstoken/qsmstoken/clients/aliyunsms/api_sdk/aliyun-python-sdk-core/aliyunsdkcore/client.py | lianxiaopang/camel-store-api | b8021250bf3d8cf7adc566deebdba55225148316 | [
"Apache-2.0"
] | 12 | 2020-02-01T01:52:01.000Z | 2021-04-28T15:06:43.000Z | packages/qsmstoken/qsmstoken/clients/aliyunsms/api_sdk/aliyun-python-sdk-core/aliyunsdkcore/client.py | lianxiaopang/camel-store-api | b8021250bf3d8cf7adc566deebdba55225148316 | [
"Apache-2.0"
] | 5 | 2020-02-06T08:07:58.000Z | 2020-06-02T13:03:45.000Z | packages/qsmstoken/qsmstoken/clients/aliyunsms/api_sdk/aliyun-python-sdk-core/aliyunsdkcore/client.py | lianxiaopang/camel-store-api | b8021250bf3d8cf7adc566deebdba55225148316 | [
"Apache-2.0"
] | 11 | 2020-02-03T13:07:46.000Z | 2020-11-29T01:44:06.000Z | from __future__ import absolute_import
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding=utf-8
from future import standard_library
standard_library.install_aliases()
from builtins import str
from builtins import object
import os
import sys
import http.client
import warnings
warnings.filterwarnings("once", category=DeprecationWarning)
try:
import json
except ImportError:
import simplejson as json
from .profile import region_provider
from .profile.location_service import LocationService
from .acs_exception.exceptions import ClientException
from .acs_exception.exceptions import ServerException
from .acs_exception import error_code, error_msg
from .http.http_response import HttpResponse
from .request import AcsRequest
"""
Acs default client module.
Created on 6/15/2015
@author: alex jiang
"""
class AcsClient(object):
def __init__(self, ak, secret, region_id, auto_retry=True, max_retry_time=3, user_agent=None, port=80):
"""
constructor for AcsClient
:param ak: String, access key id
:param secret: String, access key secret
:param region_id: String, region id
:param auto_retry: Boolean
:param max_retry_time: Number
:return:
"""
self.__max_retry_num = max_retry_time
self.__auto_retry = auto_retry
self.__ak = ak
self.__secret = secret
self.__region_id = region_id
self.__user_agent = user_agent
self._port = port
self._location_service = LocationService(self)
self._url_test_flag = False # if true, do_action() will throw a ClientException that contains URL
def get_region_id(self):
"""
:return: String
"""
return self.__region_id
def get_access_key(self):
"""
:return: String
"""
return self.__ak
def get_access_secret(self):
"""
:return: String
"""
return self.__secret
def is_auto_retry(self):
"""
:return:Boolean
"""
return self.__auto_retry
def get_max_retry_num(self):
"""
:return: Number
"""
return self.__max_retry_num
def get_user_agent(self):
return self.__user_agent
def set_region_id(self, region):
self.__region_id = region
def set_access_key(self, ak):
self.__ak = ak
def set_access_secret(self, secret):
self.__secret = secret
def set_max_retry_num(self, num):
"""
set auto retry number
:param num: Numbers
:return: None
"""
self.__max_retry_num = num
def set_auto_retry(self, flag):
"""
set whether or not the client perform auto-retry
:param flag: Booleans
:return: None
"""
self.__auto_retry = flag
def set_user_agent(self, agent):
"""
User agent set to client will overwrite the request setting.
:param agent:
:return:
"""
self.__user_agent = agent
def get_location_service(self):
return self._location_service
def get_port(self):
return self._port
def _resolve_endpoint(self, request):
endpoint = None
if request.get_location_service_code() is not None:
endpoint = self._location_service.find_product_domain(self.get_region_id(), request.get_location_service_code())
if endpoint is None:
endpoint = region_provider.find_product_domain(self.get_region_id(), request.get_product())
if endpoint is None:
raise ClientException(error_code.SDK_INVALID_REGION_ID, error_msg.get_msg('SDK_INVALID_REGION_ID'))
if not isinstance(request, AcsRequest):
raise ClientException(error_code.SDK_INVALID_REQUEST, error_msg.get_msg('SDK_INVALID_REQUEST'))
return endpoint
def _make_http_response(self, endpoint, request):
content = request.get_content()
method = request.get_method()
header = request.get_signed_header(self.get_region_id(), self.get_access_key(),
self.get_access_secret())
if self.get_user_agent() is not None:
header['User-Agent'] = self.get_user_agent()
header['x-sdk-client'] = 'python/2.0.0'
if header is None:
header = {}
protocol = request.get_protocol_type()
url = request.get_url(self.get_region_id(), self.get_access_key(), self.get_access_secret())
response = HttpResponse(endpoint, url, method, header, protocol, content,
self._port)
return response
def _implementation_of_do_action(self, request):
endpoint = self._resolve_endpoint(request)
http_response = self._make_http_response(endpoint, request)
if self._url_test_flag:
raise ClientException("URLTestFlagIsSet", http_response.get_url())
# Do the actual network thing
try:
status, headers, body = http_response.get_response_object()
return status, headers, body
except IOError as e:
raise ClientException(error_code.SDK_SERVER_UNREACHABLE, error_msg.get_msg('SDK_SERVER_UNREACHABLE') + ': ' + str(e))
except AttributeError:
raise ClientException(error_code.SDK_INVALID_REQUEST, error_msg.get_msg('SDK_INVALID_REQUEST'))
def _parse_error_info_from_response_body(self, response_body):
try:
body_obj = json.loads(response_body.decode('utf-8'))
if 'Code' in body_obj and 'Message' in body_obj:
return (body_obj['Code'], body_obj['Message'])
else:
return (error_code.SDK_UNKNOWN_SERVER_ERROR, error_msg.get_msg('SDK_UNKNOWN_SERVER_ERROR'))
except ValueError:
# failed to parse body as json format
return (error_code.SDK_UNKNOWN_SERVER_ERROR, error_msg.get_msg('SDK_UNKNOWN_SERVER_ERROR'))
def do_action_with_exception(self, acs_request):
# set server response format as json, because thie function will
# parse the response so which format doesn't matter
acs_request.set_accept_format('json')
status, headers, body = self._implementation_of_do_action(acs_request)
request_id = None
ret = body
try:
body_obj = json.loads(body.decode('utf-8'))
request_id = body_obj.get('RequestId')
ret = body_obj
except ValueError:
# in case the response body is not a json string, return the raw data instead
pass
if status != http.client.OK:
server_error_code, server_error_message = self._parse_error_info_from_response_body(body)
raise ServerException(server_error_code, server_error_message, http_status=status, request_id=request_id)
return body
def do_action(self, acs_request):
warnings.warn("do_action() method is deprecated, please use do_action_with_exception() instead.", DeprecationWarning)
status, headers, body = self._implementation_of_do_action(acs_request)
return body
def get_response(self, acs_request):
warnings.warn("get_response() method is deprecated, please use do_action_with_exception() instead.", DeprecationWarning)
return self._implementation_of_do_action(acs_request)
| 33.958506 | 129 | 0.668622 |
09da3868208d5aa6b589c42be65ccfa247f6adac | 211 | py | Python | bvs/background_verification/doctype/bvs_report_support/test_bvs_report_support.py | vhrspvl/vhrs-bvs | 56667039d9cc09ad0b092e5e6c5dd6598ff41e7b | [
"MIT"
] | 1 | 2021-08-19T11:16:47.000Z | 2021-08-19T11:16:47.000Z | bvs/background_verification/doctype/bvs_report_support/test_bvs_report_support.py | vhrspvl/vhrs-bvs | 56667039d9cc09ad0b092e5e6c5dd6598ff41e7b | [
"MIT"
] | null | null | null | bvs/background_verification/doctype/bvs_report_support/test_bvs_report_support.py | vhrspvl/vhrs-bvs | 56667039d9cc09ad0b092e5e6c5dd6598ff41e7b | [
"MIT"
] | 4 | 2018-03-21T05:57:54.000Z | 2020-11-26T00:37:29.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2018, VHRS and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
class TestBVSReportSupport(unittest.TestCase):
pass
| 19.181818 | 46 | 0.772512 |
647059ee3c769a6ec4e0d1bbe65b3aa64e202731 | 1,447 | py | Python | tests/transform_for_magma_tests.py | aweimeow/enodebd | e1cd20693153e6b85e5d1bf9d21af2501c358601 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | tests/transform_for_magma_tests.py | aweimeow/enodebd | e1cd20693153e6b85e5d1bf9d21af2501c358601 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | tests/transform_for_magma_tests.py | aweimeow/enodebd | e1cd20693153e6b85e5d1bf9d21af2501c358601 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | # SPDX-FileCopyrightText: 2020 The Magma Authors.
# SPDX-FileCopyrightText: 2022 Open Networking Foundation <support@opennetworking.org>
#
# SPDX-License-Identifier: BSD-3-Clause
# pylint: disable=protected-access
from unittest import TestCase
from data_models.transform_for_magma import bandwidth, gps_tr181
from exceptions import ConfigurationError
class TransformForMagmaTests(TestCase):
def test_gps_tr181(self) -> None:
# Negative longitude
inp = '-122150583'
out = gps_tr181(inp)
expected = '-122.150583'
self.assertEqual(out, expected, 'Should convert negative longitude')
inp = '122150583'
out = gps_tr181(inp)
expected = '122.150583'
self.assertEqual(out, expected, 'Should convert positive longitude')
inp = '0'
out = gps_tr181(inp)
expected = '0.0'
self.assertEqual(out, expected, 'Should leave zero as zero')
def test_bandwidth(self) -> None:
inp = 'n6'
out = bandwidth(inp)
expected = 1.4
self.assertEqual(out, expected, 'Should convert RBs')
inp = 1.4
out = bandwidth(inp)
expected = 1.4
self.assertEqual(out, expected, 'Should accept MHz')
with self.assertRaises(ConfigurationError):
inp = 'asdf'
bandwidth(inp)
with self.assertRaises(ConfigurationError):
inp = 1234
bandwidth(inp)
| 29.530612 | 86 | 0.642018 |
68f38320b5838bd4de77a768a76b85f663f38249 | 57,113 | py | Python | buildenv/lib/python3.5/site-packages/sphinx/builders/html.py | psarando/cyverse-Visual_Interactive_Computing_Environment | 424467bb16cef04837e02907320c759d45d25a18 | [
"CC-BY-4.0"
] | null | null | null | buildenv/lib/python3.5/site-packages/sphinx/builders/html.py | psarando/cyverse-Visual_Interactive_Computing_Environment | 424467bb16cef04837e02907320c759d45d25a18 | [
"CC-BY-4.0"
] | 4 | 2021-03-04T17:18:36.000Z | 2021-07-14T19:08:18.000Z | buildenv/lib/python3.5/site-packages/sphinx/builders/html.py | psarando/cyverse-Visual_Interactive_Computing_Environment | 424467bb16cef04837e02907320c759d45d25a18 | [
"CC-BY-4.0"
] | 2 | 2021-03-02T10:07:11.000Z | 2021-10-12T23:41:07.000Z | # -*- coding: utf-8 -*-
"""
sphinx.builders.html
~~~~~~~~~~~~~~~~~~~~
Several HTML builders.
:copyright: Copyright 2007-2018 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
import sys
import codecs
import warnings
import posixpath
from os import path
from hashlib import md5
from six import iteritems, text_type, string_types
from six.moves import cPickle as pickle
import docutils
from docutils import nodes
from docutils.io import DocTreeInput, StringOutput
from docutils.core import Publisher
from docutils.utils import new_document, relative_path
from docutils.frontend import OptionParser
from docutils.readers.doctree import Reader as DoctreeReader
from sphinx import package_dir, __display_version__
from sphinx.util import jsonimpl, logging, status_iterator
from sphinx.util.i18n import format_date
from sphinx.util.inventory import InventoryFile
from sphinx.util.osutil import SEP, os_path, relative_uri, ensuredir, \
movefile, copyfile
from sphinx.util.nodes import inline_all_toctrees
from sphinx.util.docutils import is_html5_writer_available
from sphinx.util.fileutil import copy_asset
from sphinx.util.matching import patmatch, Matcher, DOTFILES
from sphinx.config import string_classes
from sphinx.deprecation import RemovedInSphinx20Warning
from sphinx.locale import _, l_
from sphinx.search import js_index
from sphinx.theming import HTMLThemeFactory
from sphinx.builders import Builder
from sphinx.application import ENV_PICKLE_FILENAME
from sphinx.highlighting import PygmentsBridge
from sphinx.util.console import bold, darkgreen # type: ignore
from sphinx.writers.html import HTMLWriter, HTMLTranslator
from sphinx.environment.adapters.asset import ImageAdapter
from sphinx.environment.adapters.toctree import TocTree
from sphinx.environment.adapters.indexentries import IndexEntries
if False:
# For type annotation
from typing import Any, Dict, Iterable, Iterator, List, Type, Tuple, Union # NOQA
from sphinx.domains import Domain, Index # NOQA
from sphinx.application import Sphinx # NOQA
# Experimental HTML5 Writer
if is_html5_writer_available():
from sphinx.writers.html5 import HTML5Translator
html5_ready = True
else:
html5_ready = False
#: the filename for the inventory of objects
INVENTORY_FILENAME = 'objects.inv'
#: the filename for the "last build" file (for serializing builders)
LAST_BUILD_FILENAME = 'last_build'
logger = logging.getLogger(__name__)
return_codes_re = re.compile('[\r\n]+')
def get_stable_hash(obj):
# type: (Any) -> unicode
"""
Return a stable hash for a Python data structure. We can't just use
the md5 of str(obj) since for example dictionary items are enumerated
in unpredictable order due to hash randomization in newer Pythons.
"""
if isinstance(obj, dict):
return get_stable_hash(list(obj.items()))
elif isinstance(obj, (list, tuple)):
obj = sorted(get_stable_hash(o) for o in obj)
return md5(text_type(obj).encode('utf8')).hexdigest()
class CSSContainer(list):
"""The container of stylesheets.
To support the extensions which access the container directly, this wraps
the entry with Stylesheet class.
"""
def append(self, obj):
if isinstance(obj, Stylesheet):
super(CSSContainer, self).append(obj)
else:
super(CSSContainer, self).append(Stylesheet(obj, None, 'stylesheet'))
def insert(self, index, obj):
warnings.warn('builder.css_files is deprecated. '
'Please use app.add_stylesheet() instead.',
RemovedInSphinx20Warning)
if isinstance(obj, Stylesheet):
super(CSSContainer, self).insert(index, obj)
else:
super(CSSContainer, self).insert(index, Stylesheet(obj, None, 'stylesheet'))
def extend(self, other):
warnings.warn('builder.css_files is deprecated. '
'Please use app.add_stylesheet() instead.',
RemovedInSphinx20Warning)
for item in other:
self.append(item)
def __iadd__(self, other):
warnings.warn('builder.css_files is deprecated. '
'Please use app.add_stylesheet() instead.',
RemovedInSphinx20Warning)
for item in other:
self.append(item)
return self
def __add__(self, other):
ret = CSSContainer(self)
ret += other
return ret
class Stylesheet(text_type):
"""The metadata of stylesheet.
To keep compatibility with old themes, an instance of stylesheet behaves as
its filename (str).
"""
def __new__(cls, filename, title, rel):
# type: (unicode, unicode, unicode) -> None
self = text_type.__new__(cls, filename) # type: ignore
self.filename = filename
self.title = title
self.rel = rel
return self
class StandaloneHTMLBuilder(Builder):
"""
Builds standalone HTML docs.
"""
name = 'html'
format = 'html'
copysource = True
allow_parallel = True
out_suffix = '.html'
link_suffix = '.html' # defaults to matching out_suffix
indexer_format = js_index # type: Any
indexer_dumps_unicode = True
# create links to original images from images [True/False]
html_scaled_image_link = True
supported_image_types = ['image/svg+xml', 'image/png',
'image/gif', 'image/jpeg']
supported_remote_images = True
supported_data_uri_images = True
searchindex_filename = 'searchindex.js'
add_permalinks = True
allow_sharp_as_current_path = True
embedded = False # for things like HTML help or Qt help: suppresses sidebar
search = True # for things like HTML help and Apple help: suppress search
use_index = False
download_support = True # enable download role
# use html5 translator by default
default_html5_translator = False
# This is a class attribute because it is mutated by Sphinx.add_javascript.
script_files = ['_static/jquery.js', '_static/underscore.js',
'_static/doctools.js'] # type: List[unicode]
# Ditto for this one (Sphinx.add_stylesheet).
css_files = CSSContainer() # type: List[Dict[unicode, unicode]]
imgpath = None # type: unicode
domain_indices = [] # type: List[Tuple[unicode, Type[Index], List[Tuple[unicode, List[List[Union[unicode, int]]]]], bool]] # NOQA
# cached publisher object for snippets
_publisher = None
def init(self):
# type: () -> None
# a hash of all config values that, if changed, cause a full rebuild
self.config_hash = '' # type: unicode
self.tags_hash = '' # type: unicode
# basename of images directory
self.imagedir = '_images'
# section numbers for headings in the currently visited document
self.secnumbers = {} # type: Dict[unicode, Tuple[int, ...]]
# currently written docname
self.current_docname = None # type: unicode
self.init_templates()
self.init_highlighter()
if self.config.html_file_suffix is not None:
self.out_suffix = self.config.html_file_suffix
if self.config.html_link_suffix is not None:
self.link_suffix = self.config.html_link_suffix
else:
self.link_suffix = self.out_suffix
if self.config.language is not None:
if self._get_translations_js():
self.script_files.append('_static/translations.js')
self.use_index = self.get_builder_config('use_index', 'html')
if self.config.html_experimental_html5_writer and not html5_ready:
self.app.warn(('html_experimental_html5_writer is set, but current version '
'is old. Docutils\' version should be 0.13 or newer, but %s.') %
docutils.__version__)
def _get_translations_js(self):
# type: () -> unicode
candidates = [path.join(dir, self.config.language,
'LC_MESSAGES', 'sphinx.js')
for dir in self.config.locale_dirs] + \
[path.join(package_dir, 'locale', self.config.language,
'LC_MESSAGES', 'sphinx.js'),
path.join(sys.prefix, 'share/sphinx/locale',
self.config.language, 'sphinx.js')]
for jsfile in candidates:
if path.isfile(jsfile):
return jsfile
return None
def get_theme_config(self):
# type: () -> Tuple[unicode, Dict]
return self.config.html_theme, self.config.html_theme_options
def init_templates(self):
# type: () -> None
theme_factory = HTMLThemeFactory(self.app)
themename, themeoptions = self.get_theme_config()
self.theme = theme_factory.create(themename)
self.theme_options = themeoptions.copy()
self.create_template_bridge()
self.templates.init(self, self.theme)
def init_highlighter(self):
# type: () -> None
# determine Pygments style and create the highlighter
if self.config.pygments_style is not None:
style = self.config.pygments_style
elif self.theme:
style = self.theme.get_config('theme', 'pygments_style', 'none')
else:
style = 'sphinx'
self.highlighter = PygmentsBridge('html', style,
self.config.trim_doctest_flags)
@property
def default_translator_class(self):
use_html5_writer = self.config.html_experimental_html5_writer
if use_html5_writer is None:
use_html5_writer = self.default_html5_translator
if use_html5_writer and html5_ready:
return HTML5Translator
else:
return HTMLTranslator
def get_outdated_docs(self):
# type: () -> Iterator[unicode]
cfgdict = dict((confval.name, confval.value) for confval in self.config.filter('html'))
self.config_hash = get_stable_hash(cfgdict)
self.tags_hash = get_stable_hash(sorted(self.tags))
old_config_hash = old_tags_hash = ''
try:
with open(path.join(self.outdir, '.buildinfo')) as fp:
version = fp.readline()
if version.rstrip() != '# Sphinx build info version 1':
raise ValueError
fp.readline() # skip commentary
cfg, old_config_hash = fp.readline().strip().split(': ')
if cfg != 'config':
raise ValueError
tag, old_tags_hash = fp.readline().strip().split(': ')
if tag != 'tags':
raise ValueError
except ValueError:
logger.warning('unsupported build info format in %r, building all',
path.join(self.outdir, '.buildinfo'))
except Exception:
pass
if old_config_hash != self.config_hash or \
old_tags_hash != self.tags_hash:
for docname in self.env.found_docs:
yield docname
return
if self.templates:
template_mtime = self.templates.newest_template_mtime()
else:
template_mtime = 0
for docname in self.env.found_docs:
if docname not in self.env.all_docs:
yield docname
continue
targetname = self.get_outfilename(docname)
try:
targetmtime = path.getmtime(targetname)
except Exception:
targetmtime = 0
try:
srcmtime = max(path.getmtime(self.env.doc2path(docname)),
template_mtime)
if srcmtime > targetmtime:
yield docname
except EnvironmentError:
# source doesn't exist anymore
pass
def get_asset_paths(self):
# type: () -> List[unicode]
return self.config.html_extra_path
def render_partial(self, node):
# type: (nodes.Nodes) -> Dict[unicode, unicode]
"""Utility: Render a lone doctree node."""
if node is None:
return {'fragment': ''}
doc = new_document(b'<partial node>')
doc.append(node)
if self._publisher is None:
self._publisher = Publisher(
source_class = DocTreeInput,
destination_class=StringOutput)
self._publisher.set_components('standalone',
'restructuredtext', 'pseudoxml')
pub = self._publisher
pub.reader = DoctreeReader()
pub.writer = HTMLWriter(self)
pub.process_programmatic_settings(
None, {'output_encoding': 'unicode'}, None)
pub.set_source(doc, None)
pub.set_destination(None, None)
pub.publish()
return pub.writer.parts
def prepare_writing(self, docnames):
# type: (Iterable[unicode]) -> nodes.Node
# create the search indexer
self.indexer = None
if self.search:
from sphinx.search import IndexBuilder, languages
lang = self.config.html_search_language or self.config.language
if not lang or lang not in languages:
lang = 'en'
self.indexer = IndexBuilder(self.env, lang,
self.config.html_search_options,
self.config.html_search_scorer)
self.load_indexer(docnames)
self.docwriter = HTMLWriter(self)
self.docsettings = OptionParser(
defaults=self.env.settings,
components=(self.docwriter,),
read_config_files=True).get_default_values()
self.docsettings.compact_lists = bool(self.config.html_compact_lists)
# determine the additional indices to include
self.domain_indices = []
# html_domain_indices can be False/True or a list of index names
indices_config = self.config.html_domain_indices
if indices_config:
for domain_name in sorted(self.env.domains):
domain = None # type: Domain
domain = self.env.domains[domain_name]
for indexcls in domain.indices:
indexname = '%s-%s' % (domain.name, indexcls.name) # type: unicode
if isinstance(indices_config, list):
if indexname not in indices_config:
continue
content, collapse = indexcls(domain).generate()
if content:
self.domain_indices.append(
(indexname, indexcls, content, collapse))
# format the "last updated on" string, only once is enough since it
# typically doesn't include the time of day
lufmt = self.config.html_last_updated_fmt
if lufmt is not None:
self.last_updated = format_date(lufmt or _('%b %d, %Y'), # type: ignore
language=self.config.language)
else:
self.last_updated = None
logo = self.config.html_logo and \
path.basename(self.config.html_logo) or ''
favicon = self.config.html_favicon and \
path.basename(self.config.html_favicon) or ''
if not isinstance(self.config.html_use_opensearch, string_types):
logger.warning('html_use_opensearch config value must now be a string')
self.relations = self.env.collect_relations()
rellinks = [] # type: List[Tuple[unicode, unicode, unicode, unicode]]
if self.use_index:
rellinks.append(('genindex', _('General Index'), 'I', _('index')))
for indexname, indexcls, content, collapse in self.domain_indices:
# if it has a short name
if indexcls.shortname:
rellinks.append((indexname, indexcls.localname,
'', indexcls.shortname))
if self.config.html_style is not None:
stylename = self.config.html_style
elif self.theme:
stylename = self.theme.get_config('theme', 'stylesheet')
else:
stylename = 'default.css'
self.globalcontext = dict(
embedded = self.embedded,
project = self.config.project,
release = return_codes_re.sub('', self.config.release),
version = self.config.version,
last_updated = self.last_updated,
copyright = self.config.copyright,
master_doc = self.config.master_doc,
use_opensearch = self.config.html_use_opensearch,
docstitle = self.config.html_title,
shorttitle = self.config.html_short_title,
show_copyright = self.config.html_show_copyright,
show_sphinx = self.config.html_show_sphinx,
has_source = self.config.html_copy_source,
show_source = self.config.html_show_sourcelink,
sourcelink_suffix = self.config.html_sourcelink_suffix,
file_suffix = self.out_suffix,
script_files = self.script_files,
language = self.config.language,
css_files = self.css_files,
sphinx_version = __display_version__,
style = stylename,
rellinks = rellinks,
builder = self.name,
parents = [],
logo = logo,
favicon = favicon,
html5_doctype = self.config.html_experimental_html5_writer and html5_ready,
) # type: Dict[unicode, Any]
if self.theme:
self.globalcontext.update(
('theme_' + key, val) for (key, val) in
iteritems(self.theme.get_options(self.theme_options)))
self.globalcontext.update(self.config.html_context)
def get_doc_context(self, docname, body, metatags):
# type: (unicode, unicode, Dict) -> Dict[unicode, Any]
"""Collect items for the template context of a page."""
# find out relations
prev = next = None
parents = []
rellinks = self.globalcontext['rellinks'][:]
related = self.relations.get(docname)
titles = self.env.titles
if related and related[2]:
try:
next = {
'link': self.get_relative_uri(docname, related[2]),
'title': self.render_partial(titles[related[2]])['title']
}
rellinks.append((related[2], next['title'], 'N', _('next')))
except KeyError:
next = None
if related and related[1]:
try:
prev = {
'link': self.get_relative_uri(docname, related[1]),
'title': self.render_partial(titles[related[1]])['title']
}
rellinks.append((related[1], prev['title'], 'P', _('previous')))
except KeyError:
# the relation is (somehow) not in the TOC tree, handle
# that gracefully
prev = None
while related and related[0]:
try:
parents.append(
{'link': self.get_relative_uri(docname, related[0]),
'title': self.render_partial(titles[related[0]])['title']})
except KeyError:
pass
related = self.relations.get(related[0])
if parents:
# remove link to the master file; we have a generic
# "back to index" link already
parents.pop()
parents.reverse()
# title rendered as HTML
title = self.env.longtitles.get(docname)
title = title and self.render_partial(title)['title'] or ''
# Suffix for the document
source_suffix = path.splitext(self.env.doc2path(docname))[1]
# the name for the copied source
if self.config.html_copy_source:
sourcename = docname + source_suffix
if source_suffix != self.config.html_sourcelink_suffix:
sourcename += self.config.html_sourcelink_suffix
else:
sourcename = ''
# metadata for the document
meta = self.env.metadata.get(docname)
# local TOC and global TOC tree
self_toc = TocTree(self.env).get_toc_for(docname, self)
toc = self.render_partial(self_toc)['fragment']
return dict(
parents = parents,
prev = prev,
next = next,
title = title,
meta = meta,
body = body,
metatags = metatags,
rellinks = rellinks,
sourcename = sourcename,
toc = toc,
# only display a TOC if there's more than one item to show
display_toc = (self.env.toc_num_entries[docname] > 1),
page_source_suffix = source_suffix,
)
def write_doc(self, docname, doctree):
# type: (unicode, nodes.Node) -> None
destination = StringOutput(encoding='utf-8')
doctree.settings = self.docsettings
self.secnumbers = self.env.toc_secnumbers.get(docname, {})
self.fignumbers = self.env.toc_fignumbers.get(docname, {})
self.imgpath = relative_uri(self.get_target_uri(docname), '_images')
self.dlpath = relative_uri(self.get_target_uri(docname), '_downloads')
self.current_docname = docname
self.docwriter.write(doctree, destination)
self.docwriter.assemble_parts()
body = self.docwriter.parts['fragment']
metatags = self.docwriter.clean_meta
ctx = self.get_doc_context(docname, body, metatags)
self.handle_page(docname, ctx, event_arg=doctree)
def write_doc_serialized(self, docname, doctree):
# type: (unicode, nodes.Node) -> None
self.imgpath = relative_uri(self.get_target_uri(docname), self.imagedir)
self.post_process_images(doctree)
title = self.env.longtitles.get(docname)
title = title and self.render_partial(title)['title'] or ''
self.index_page(docname, doctree, title)
def finish(self):
# type: () -> None
self.finish_tasks.add_task(self.gen_indices)
self.finish_tasks.add_task(self.gen_additional_pages)
self.finish_tasks.add_task(self.copy_image_files)
self.finish_tasks.add_task(self.copy_download_files)
self.finish_tasks.add_task(self.copy_static_files)
self.finish_tasks.add_task(self.copy_extra_files)
self.finish_tasks.add_task(self.write_buildinfo)
# dump the search index
self.handle_finish()
def gen_indices(self):
# type: () -> None
logger.info(bold('generating indices...'), nonl=1)
# the global general index
if self.use_index:
self.write_genindex()
# the global domain-specific indices
self.write_domain_indices()
logger.info('')
def gen_additional_pages(self):
# type: () -> None
# pages from extensions
for pagelist in self.app.emit('html-collect-pages'):
for pagename, context, template in pagelist:
self.handle_page(pagename, context, template)
logger.info(bold('writing additional pages...'), nonl=1)
# additional pages from conf.py
for pagename, template in self.config.html_additional_pages.items():
logger.info(' ' + pagename, nonl=1)
self.handle_page(pagename, {}, template)
# the search page
if self.search:
logger.info(' search', nonl=1)
self.handle_page('search', {}, 'search.html')
# the opensearch xml file
if self.config.html_use_opensearch and self.search:
logger.info(' opensearch', nonl=1)
fn = path.join(self.outdir, '_static', 'opensearch.xml')
self.handle_page('opensearch', {}, 'opensearch.xml', outfilename=fn)
logger.info('')
def write_genindex(self):
# type: () -> None
# the total count of lines for each index letter, used to distribute
# the entries into two columns
genindex = IndexEntries(self.env).create_index(self)
indexcounts = []
for _k, entries in genindex:
indexcounts.append(sum(1 + len(subitems)
for _, (_, subitems, _) in entries))
genindexcontext = dict(
genindexentries = genindex,
genindexcounts = indexcounts,
split_index = self.config.html_split_index,
)
logger.info(' genindex', nonl=1)
if self.config.html_split_index:
self.handle_page('genindex', genindexcontext,
'genindex-split.html')
self.handle_page('genindex-all', genindexcontext,
'genindex.html')
for (key, entries), count in zip(genindex, indexcounts):
ctx = {'key': key, 'entries': entries, 'count': count,
'genindexentries': genindex}
self.handle_page('genindex-' + key, ctx,
'genindex-single.html')
else:
self.handle_page('genindex', genindexcontext, 'genindex.html')
def write_domain_indices(self):
# type: () -> None
for indexname, indexcls, content, collapse in self.domain_indices:
indexcontext = dict(
indextitle = indexcls.localname,
content = content,
collapse_index = collapse,
)
logger.info(' ' + indexname, nonl=1)
self.handle_page(indexname, indexcontext, 'domainindex.html')
def copy_image_files(self):
# type: () -> None
if self.images:
stringify_func = ImageAdapter(self.app.env).get_original_image_uri
ensuredir(path.join(self.outdir, self.imagedir))
for src in status_iterator(self.images, 'copying images... ', "brown",
len(self.images), self.app.verbosity,
stringify_func=stringify_func):
dest = self.images[src]
try:
copyfile(path.join(self.srcdir, src),
path.join(self.outdir, self.imagedir, dest))
except Exception as err:
logger.warning('cannot copy image file %r: %s',
path.join(self.srcdir, src), err)
def copy_download_files(self):
# type: () -> None
def to_relpath(f):
# type: (unicode) -> unicode
return relative_path(self.srcdir, f)
# copy downloadable files
if self.env.dlfiles:
ensuredir(path.join(self.outdir, '_downloads'))
for src in status_iterator(self.env.dlfiles, 'copying downloadable files... ',
"brown", len(self.env.dlfiles), self.app.verbosity,
stringify_func=to_relpath):
dest = self.env.dlfiles[src][1]
try:
copyfile(path.join(self.srcdir, src),
path.join(self.outdir, '_downloads', dest))
except Exception as err:
logger.warning('cannot copy downloadable file %r: %s',
path.join(self.srcdir, src), err)
def copy_static_files(self):
# type: () -> None
# copy static files
logger.info(bold('copying static files... '), nonl=True)
ensuredir(path.join(self.outdir, '_static'))
# first, create pygments style file
with open(path.join(self.outdir, '_static', 'pygments.css'), 'w') as f:
f.write(self.highlighter.get_stylesheet()) # type: ignore
# then, copy translations JavaScript file
if self.config.language is not None:
jsfile = self._get_translations_js()
if jsfile:
copyfile(jsfile, path.join(self.outdir, '_static',
'translations.js'))
# copy non-minified stemmer JavaScript file
if self.indexer is not None:
jsfile = self.indexer.get_js_stemmer_rawcode()
if jsfile:
copyfile(jsfile, path.join(self.outdir, '_static', '_stemmer.js'))
ctx = self.globalcontext.copy()
# add context items for search function used in searchtools.js_t
if self.indexer is not None:
ctx.update(self.indexer.context_for_searchtool())
# then, copy over theme-supplied static files
if self.theme:
for theme_path in self.theme.get_theme_dirs()[::-1]:
entry = path.join(theme_path, 'static')
copy_asset(entry, path.join(self.outdir, '_static'), excluded=DOTFILES,
context=ctx, renderer=self.templates)
# then, copy over all user-supplied static files
excluded = Matcher(self.config.exclude_patterns + ["**/.*"])
for static_path in self.config.html_static_path:
entry = path.join(self.confdir, static_path)
if not path.exists(entry):
logger.warning('html_static_path entry %r does not exist', entry)
continue
copy_asset(entry, path.join(self.outdir, '_static'), excluded,
context=ctx, renderer=self.templates)
# copy logo and favicon files if not already in static path
if self.config.html_logo:
logobase = path.basename(self.config.html_logo)
logotarget = path.join(self.outdir, '_static', logobase)
if not path.isfile(path.join(self.confdir, self.config.html_logo)):
logger.warning('logo file %r does not exist', self.config.html_logo)
elif not path.isfile(logotarget):
copyfile(path.join(self.confdir, self.config.html_logo),
logotarget)
if self.config.html_favicon:
iconbase = path.basename(self.config.html_favicon)
icontarget = path.join(self.outdir, '_static', iconbase)
if not path.isfile(path.join(self.confdir, self.config.html_favicon)):
logger.warning('favicon file %r does not exist', self.config.html_favicon)
elif not path.isfile(icontarget):
copyfile(path.join(self.confdir, self.config.html_favicon),
icontarget)
logger.info('done')
def copy_extra_files(self):
# type: () -> None
# copy html_extra_path files
logger.info(bold('copying extra files... '), nonl=True)
excluded = Matcher(self.config.exclude_patterns)
for extra_path in self.config.html_extra_path:
entry = path.join(self.confdir, extra_path)
if not path.exists(entry):
logger.warning('html_extra_path entry %r does not exist', entry)
continue
copy_asset(entry, self.outdir, excluded)
logger.info('done')
def write_buildinfo(self):
# type: () -> None
# write build info file
try:
with open(path.join(self.outdir, '.buildinfo'), 'w') as fp:
fp.write('# Sphinx build info version 1\n'
'# This file hashes the configuration used when building'
' these files. When it is not found, a full rebuild will'
' be done.\nconfig: %s\ntags: %s\n' %
(self.config_hash, self.tags_hash))
except IOError as exc:
logger.warning('Failed to write build info file: %r', exc)
def cleanup(self):
# type: () -> None
# clean up theme stuff
if self.theme:
self.theme.cleanup()
def post_process_images(self, doctree):
# type: (nodes.Node) -> None
"""Pick the best candidate for an image and link down-scaled images to
their high res version.
"""
Builder.post_process_images(self, doctree)
if self.config.html_scaled_image_link and self.html_scaled_image_link:
for node in doctree.traverse(nodes.image):
scale_keys = ('scale', 'width', 'height')
if not any((key in node) for key in scale_keys) or \
isinstance(node.parent, nodes.reference):
# docutils does unfortunately not preserve the
# ``target`` attribute on images, so we need to check
# the parent node here.
continue
uri = node['uri']
reference = nodes.reference('', '', internal=True)
if uri in self.images:
reference['refuri'] = posixpath.join(self.imgpath,
self.images[uri])
else:
reference['refuri'] = uri
node.replace_self(reference)
reference.append(node)
def load_indexer(self, docnames):
# type: (Iterable[unicode]) -> None
keep = set(self.env.all_docs) - set(docnames)
try:
searchindexfn = path.join(self.outdir, self.searchindex_filename)
if self.indexer_dumps_unicode:
f = codecs.open(searchindexfn, 'r', encoding='utf-8') # type: ignore
else:
f = open(searchindexfn, 'rb') # type: ignore
with f:
self.indexer.load(f, self.indexer_format)
except (IOError, OSError, ValueError):
if keep:
logger.warning('search index couldn\'t be loaded, but not all '
'documents will be built: the index will be '
'incomplete.')
# delete all entries for files that will be rebuilt
self.indexer.prune(keep)
def index_page(self, pagename, doctree, title):
# type: (unicode, nodes.Node, unicode) -> None
# only index pages with title
if self.indexer is not None and title:
filename = self.env.doc2path(pagename, base=None)
try:
self.indexer.feed(pagename, filename, title, doctree)
except TypeError:
# fallback for old search-adapters
self.indexer.feed(pagename, title, doctree) # type: ignore
def _get_local_toctree(self, docname, collapse=True, **kwds):
# type: (unicode, bool, Any) -> unicode
if 'includehidden' not in kwds:
kwds['includehidden'] = False
return self.render_partial(TocTree(self.env).get_toctree_for(
docname, self, collapse, **kwds))['fragment']
def get_outfilename(self, pagename):
# type: (unicode) -> unicode
return path.join(self.outdir, os_path(pagename) + self.out_suffix)
def add_sidebars(self, pagename, ctx):
# type: (unicode, Dict) -> None
def has_wildcard(pattern):
# type: (unicode) -> bool
return any(char in pattern for char in '*?[')
sidebars = None
matched = None
customsidebar = None
for pattern, patsidebars in iteritems(self.config.html_sidebars):
if patmatch(pagename, pattern):
if matched:
if has_wildcard(pattern):
# warn if both patterns contain wildcards
if has_wildcard(matched):
logger.warning('page %s matches two patterns in '
'html_sidebars: %r and %r',
pagename, matched, pattern)
# else the already matched pattern is more specific
# than the present one, because it contains no wildcard
continue
matched = pattern
sidebars = patsidebars
if sidebars is None:
# keep defaults
pass
elif isinstance(sidebars, string_types):
# 0.x compatible mode: insert custom sidebar before searchbox
customsidebar = sidebars
sidebars = None
ctx['sidebars'] = sidebars
ctx['customsidebar'] = customsidebar
# --------- these are overwritten by the serialization builder
def get_target_uri(self, docname, typ=None):
# type: (unicode, unicode) -> unicode
return docname + self.link_suffix
def handle_page(self, pagename, addctx, templatename='page.html',
outfilename=None, event_arg=None):
# type: (unicode, Dict, unicode, unicode, Any) -> None
ctx = self.globalcontext.copy()
# current_page_name is backwards compatibility
ctx['pagename'] = ctx['current_page_name'] = pagename
ctx['encoding'] = self.config.html_output_encoding
default_baseuri = self.get_target_uri(pagename)
# in the singlehtml builder, default_baseuri still contains an #anchor
# part, which relative_uri doesn't really like...
default_baseuri = default_baseuri.rsplit('#', 1)[0]
def pathto(otheruri, resource=False, baseuri=default_baseuri):
# type: (unicode, bool, unicode) -> unicode
if resource and '://' in otheruri:
# allow non-local resources given by scheme
return otheruri
elif not resource:
otheruri = self.get_target_uri(otheruri)
uri = relative_uri(baseuri, otheruri) or '#'
if uri == '#' and not self.allow_sharp_as_current_path:
uri = baseuri
return uri
ctx['pathto'] = pathto
def hasdoc(name):
# type: (unicode) -> bool
if name in self.env.all_docs:
return True
elif name == 'search' and self.search:
return True
elif name == 'genindex' and self.get_builder_config('use_index', 'html'):
return True
return False
ctx['hasdoc'] = hasdoc
def warn(*args, **kwargs):
# type: (Any, Any) -> unicode
"""Simple warn() wrapper for themes."""
self.warn(*args, **kwargs)
return '' # return empty string
ctx['warn'] = warn
ctx['toctree'] = lambda **kw: self._get_local_toctree(pagename, **kw)
self.add_sidebars(pagename, ctx)
ctx.update(addctx)
self.update_page_context(pagename, templatename, ctx, event_arg)
newtmpl = self.app.emit_firstresult('html-page-context', pagename,
templatename, ctx, event_arg)
if newtmpl:
templatename = newtmpl
try:
output = self.templates.render(templatename, ctx)
except UnicodeError:
logger.warning("a Unicode error occurred when rendering the page %s. "
"Please make sure all config values that contain "
"non-ASCII content are Unicode strings.", pagename)
return
if not outfilename:
outfilename = self.get_outfilename(pagename)
# outfilename's path is in general different from self.outdir
ensuredir(path.dirname(outfilename))
try:
with codecs.open(outfilename, 'w', ctx['encoding'], 'xmlcharrefreplace') as f: # type: ignore # NOQA
f.write(output)
except (IOError, OSError) as err:
logger.warning("error writing file %s: %s", outfilename, err)
if self.copysource and ctx.get('sourcename'):
# copy the source file for the "show source" link
source_name = path.join(self.outdir, '_sources',
os_path(ctx['sourcename']))
ensuredir(path.dirname(source_name))
copyfile(self.env.doc2path(pagename), source_name)
def update_page_context(self, pagename, templatename, ctx, event_arg):
# type: (unicode, unicode, Dict, Any) -> None
pass
def handle_finish(self):
# type: () -> None
if self.indexer:
self.finish_tasks.add_task(self.dump_search_index)
self.finish_tasks.add_task(self.dump_inventory)
def dump_inventory(self):
# type: () -> None
logger.info(bold('dumping object inventory... '), nonl=True)
InventoryFile.dump(path.join(self.outdir, INVENTORY_FILENAME), self.env, self)
logger.info('done')
def dump_search_index(self):
# type: () -> None
logger.info(
bold('dumping search index in %s ... ' % self.indexer.label()),
nonl=True)
self.indexer.prune(self.env.all_docs)
searchindexfn = path.join(self.outdir, self.searchindex_filename)
# first write to a temporary file, so that if dumping fails,
# the existing index won't be overwritten
if self.indexer_dumps_unicode:
f = codecs.open(searchindexfn + '.tmp', 'w', encoding='utf-8') # type: ignore
else:
f = open(searchindexfn + '.tmp', 'wb') # type: ignore
with f:
self.indexer.dump(f, self.indexer_format)
movefile(searchindexfn + '.tmp', searchindexfn)
logger.info('done')
class DirectoryHTMLBuilder(StandaloneHTMLBuilder):
"""
A StandaloneHTMLBuilder that creates all HTML pages as "index.html" in
a directory given by their pagename, so that generated URLs don't have
``.html`` in them.
"""
name = 'dirhtml'
def get_target_uri(self, docname, typ=None):
# type: (unicode, unicode) -> unicode
if docname == 'index':
return ''
if docname.endswith(SEP + 'index'):
return docname[:-5] # up to sep
return docname + SEP
def get_outfilename(self, pagename):
# type: (unicode) -> unicode
if pagename == 'index' or pagename.endswith(SEP + 'index'):
outfilename = path.join(self.outdir, os_path(pagename) +
self.out_suffix)
else:
outfilename = path.join(self.outdir, os_path(pagename),
'index' + self.out_suffix)
return outfilename
def prepare_writing(self, docnames):
# type: (Iterable[unicode]) -> None
StandaloneHTMLBuilder.prepare_writing(self, docnames)
self.globalcontext['no_search_suffix'] = True
class SingleFileHTMLBuilder(StandaloneHTMLBuilder):
"""
A StandaloneHTMLBuilder subclass that puts the whole document tree on one
HTML page.
"""
name = 'singlehtml'
copysource = False
def get_outdated_docs(self): # type: ignore
# type: () -> Union[unicode, List[unicode]]
return 'all documents'
def get_target_uri(self, docname, typ=None):
# type: (unicode, unicode) -> unicode
if docname in self.env.all_docs:
# all references are on the same page...
return self.config.master_doc + self.out_suffix + \
'#document-' + docname
else:
# chances are this is a html_additional_page
return docname + self.out_suffix
def get_relative_uri(self, from_, to, typ=None):
# type: (unicode, unicode, unicode) -> unicode
# ignore source
return self.get_target_uri(to, typ)
def fix_refuris(self, tree):
# type: (nodes.Node) -> None
# fix refuris with double anchor
fname = self.config.master_doc + self.out_suffix
for refnode in tree.traverse(nodes.reference):
if 'refuri' not in refnode:
continue
refuri = refnode['refuri']
hashindex = refuri.find('#')
if hashindex < 0:
continue
hashindex = refuri.find('#', hashindex + 1)
if hashindex >= 0:
refnode['refuri'] = fname + refuri[hashindex:]
def _get_local_toctree(self, docname, collapse=True, **kwds):
# type: (unicode, bool, Any) -> unicode
if 'includehidden' not in kwds:
kwds['includehidden'] = False
toctree = TocTree(self.env).get_toctree_for(docname, self, collapse, **kwds)
if toctree is not None:
self.fix_refuris(toctree)
return self.render_partial(toctree)['fragment']
def assemble_doctree(self):
# type: () -> nodes.Node
master = self.config.master_doc
tree = self.env.get_doctree(master)
tree = inline_all_toctrees(self, set(), master, tree, darkgreen, [master])
tree['docname'] = master
self.env.resolve_references(tree, master, self)
self.fix_refuris(tree)
return tree
def assemble_toc_secnumbers(self):
# type: () -> Dict[unicode, Dict[unicode, Tuple[int, ...]]]
# Assemble toc_secnumbers to resolve section numbers on SingleHTML.
# Merge all secnumbers to single secnumber.
#
# Note: current Sphinx has refid confliction in singlehtml mode.
# To avoid the problem, it replaces key of secnumbers to
# tuple of docname and refid.
#
# There are related codes in inline_all_toctres() and
# HTMLTranslter#add_secnumber().
new_secnumbers = {} # type: Dict[unicode, Tuple[int, ...]]
for docname, secnums in iteritems(self.env.toc_secnumbers):
for id, secnum in iteritems(secnums):
alias = "%s/%s" % (docname, id)
new_secnumbers[alias] = secnum
return {self.config.master_doc: new_secnumbers}
def assemble_toc_fignumbers(self):
# type: () -> Dict[unicode, Dict[unicode, Dict[unicode, Tuple[int, ...]]]] # NOQA
# Assemble toc_fignumbers to resolve figure numbers on SingleHTML.
# Merge all fignumbers to single fignumber.
#
# Note: current Sphinx has refid confliction in singlehtml mode.
# To avoid the problem, it replaces key of secnumbers to
# tuple of docname and refid.
#
# There are related codes in inline_all_toctres() and
# HTMLTranslter#add_fignumber().
new_fignumbers = {} # type: Dict[unicode, Dict[unicode, Tuple[int, ...]]]
# {u'foo': {'figure': {'id2': (2,), 'id1': (1,)}}, u'bar': {'figure': {'id1': (3,)}}}
for docname, fignumlist in iteritems(self.env.toc_fignumbers):
for figtype, fignums in iteritems(fignumlist):
alias = "%s/%s" % (docname, figtype)
new_fignumbers.setdefault(alias, {})
for id, fignum in iteritems(fignums):
new_fignumbers[alias][id] = fignum
return {self.config.master_doc: new_fignumbers}
def get_doc_context(self, docname, body, metatags):
# type: (unicode, unicode, Dict) -> Dict
# no relation links...
toc = TocTree(self.env).get_toctree_for(self.config.master_doc,
self, False)
# if there is no toctree, toc is None
if toc:
self.fix_refuris(toc)
toc = self.render_partial(toc)['fragment']
display_toc = True
else:
toc = ''
display_toc = False
return dict(
parents = [],
prev = None,
next = None,
docstitle = None,
title = self.config.html_title,
meta = None,
body = body,
metatags = metatags,
rellinks = [],
sourcename = '',
toc = toc,
display_toc = display_toc,
)
def write(self, *ignored):
# type: (Any) -> None
docnames = self.env.all_docs
logger.info(bold('preparing documents... '), nonl=True)
self.prepare_writing(docnames)
logger.info('done')
logger.info(bold('assembling single document... '), nonl=True)
doctree = self.assemble_doctree()
self.env.toc_secnumbers = self.assemble_toc_secnumbers()
self.env.toc_fignumbers = self.assemble_toc_fignumbers()
logger.info('')
logger.info(bold('writing... '), nonl=True)
self.write_doc_serialized(self.config.master_doc, doctree)
self.write_doc(self.config.master_doc, doctree)
logger.info('done')
def finish(self):
# type: () -> None
# no indices or search pages are supported
logger.info(bold('writing additional files...'), nonl=1)
# additional pages from conf.py
for pagename, template in self.config.html_additional_pages.items():
logger.info(' ' + pagename, nonl=1)
self.handle_page(pagename, {}, template)
if self.config.html_use_opensearch:
logger.info(' opensearch', nonl=1)
fn = path.join(self.outdir, '_static', 'opensearch.xml')
self.handle_page('opensearch', {}, 'opensearch.xml', outfilename=fn)
logger.info('')
self.copy_image_files()
self.copy_download_files()
self.copy_static_files()
self.copy_extra_files()
self.write_buildinfo()
self.dump_inventory()
class SerializingHTMLBuilder(StandaloneHTMLBuilder):
"""
An abstract builder that serializes the generated HTML.
"""
#: the serializing implementation to use. Set this to a module that
#: implements a `dump`, `load`, `dumps` and `loads` functions
#: (pickle, simplejson etc.)
implementation = None # type: Any
implementation_dumps_unicode = False
#: additional arguments for dump()
additional_dump_args = () # type: Tuple
#: the filename for the global context file
globalcontext_filename = None # type: unicode
supported_image_types = ['image/svg+xml', 'image/png',
'image/gif', 'image/jpeg']
def init(self):
# type: () -> None
self.config_hash = ''
self.tags_hash = ''
self.imagedir = '_images'
self.current_docname = None
self.theme = None # no theme necessary
self.templates = None # no template bridge necessary
self.init_templates()
self.init_highlighter()
self.use_index = self.get_builder_config('use_index', 'html')
def get_target_uri(self, docname, typ=None):
# type: (unicode, unicode) -> unicode
if docname == 'index':
return ''
if docname.endswith(SEP + 'index'):
return docname[:-5] # up to sep
return docname + SEP
def dump_context(self, context, filename):
# type: (Dict, unicode) -> None
if self.implementation_dumps_unicode:
f = codecs.open(filename, 'w', encoding='utf-8') # type: ignore
else:
f = open(filename, 'wb') # type: ignore
with f:
self.implementation.dump(context, f, *self.additional_dump_args)
def handle_page(self, pagename, ctx, templatename='page.html',
outfilename=None, event_arg=None):
# type: (unicode, Dict, unicode, unicode, Any) -> None
ctx['current_page_name'] = pagename
self.add_sidebars(pagename, ctx)
if not outfilename:
outfilename = path.join(self.outdir,
os_path(pagename) + self.out_suffix)
# we're not taking the return value here, since no template is
# actually rendered
self.app.emit('html-page-context', pagename, templatename, ctx, event_arg)
ensuredir(path.dirname(outfilename))
self.dump_context(ctx, outfilename)
# if there is a source file, copy the source file for the
# "show source" link
if ctx.get('sourcename'):
source_name = path.join(self.outdir, '_sources',
os_path(ctx['sourcename']))
ensuredir(path.dirname(source_name))
copyfile(self.env.doc2path(pagename), source_name)
def handle_finish(self):
# type: () -> None
# dump the global context
outfilename = path.join(self.outdir, self.globalcontext_filename)
self.dump_context(self.globalcontext, outfilename)
# super here to dump the search index
StandaloneHTMLBuilder.handle_finish(self)
# copy the environment file from the doctree dir to the output dir
# as needed by the web app
copyfile(path.join(self.doctreedir, ENV_PICKLE_FILENAME),
path.join(self.outdir, ENV_PICKLE_FILENAME))
# touch 'last build' file, used by the web application to determine
# when to reload its environment and clear the cache
open(path.join(self.outdir, LAST_BUILD_FILENAME), 'w').close()
class PickleHTMLBuilder(SerializingHTMLBuilder):
"""
A Builder that dumps the generated HTML into pickle files.
"""
implementation = pickle
implementation_dumps_unicode = False
additional_dump_args = (pickle.HIGHEST_PROTOCOL,)
indexer_format = pickle
indexer_dumps_unicode = False
name = 'pickle'
out_suffix = '.fpickle'
globalcontext_filename = 'globalcontext.pickle'
searchindex_filename = 'searchindex.pickle'
# compatibility alias
WebHTMLBuilder = PickleHTMLBuilder
class JSONHTMLBuilder(SerializingHTMLBuilder):
"""
A builder that dumps the generated HTML into JSON files.
"""
implementation = jsonimpl
implementation_dumps_unicode = True
indexer_format = jsonimpl
indexer_dumps_unicode = True
name = 'json'
out_suffix = '.fjson'
globalcontext_filename = 'globalcontext.json'
searchindex_filename = 'searchindex.json'
def init(self):
# type: () -> None
SerializingHTMLBuilder.init(self)
def setup(app):
# type: (Sphinx) -> Dict[unicode, Any]
# builders
app.add_builder(StandaloneHTMLBuilder)
app.add_builder(DirectoryHTMLBuilder)
app.add_builder(SingleFileHTMLBuilder)
app.add_builder(PickleHTMLBuilder)
app.add_builder(JSONHTMLBuilder)
# config values
app.add_config_value('html_theme', 'alabaster', 'html')
app.add_config_value('html_theme_path', [], 'html')
app.add_config_value('html_theme_options', {}, 'html')
app.add_config_value('html_title',
lambda self: l_('%s %s documentation') % (self.project, self.release),
'html', string_classes)
app.add_config_value('html_short_title', lambda self: self.html_title, 'html')
app.add_config_value('html_style', None, 'html', string_classes)
app.add_config_value('html_logo', None, 'html', string_classes)
app.add_config_value('html_favicon', None, 'html', string_classes)
app.add_config_value('html_static_path', [], 'html')
app.add_config_value('html_extra_path', [], 'html')
app.add_config_value('html_last_updated_fmt', None, 'html', string_classes)
app.add_config_value('html_use_smartypants', None, 'html')
app.add_config_value('html_sidebars', {}, 'html')
app.add_config_value('html_additional_pages', {}, 'html')
app.add_config_value('html_domain_indices', True, 'html', [list])
app.add_config_value('html_add_permalinks', u'\u00B6', 'html')
app.add_config_value('html_use_index', True, 'html')
app.add_config_value('html_split_index', False, 'html')
app.add_config_value('html_copy_source', True, 'html')
app.add_config_value('html_show_sourcelink', True, 'html')
app.add_config_value('html_sourcelink_suffix', '.txt', 'html')
app.add_config_value('html_use_opensearch', '', 'html')
app.add_config_value('html_file_suffix', None, 'html', string_classes)
app.add_config_value('html_link_suffix', None, 'html', string_classes)
app.add_config_value('html_show_copyright', True, 'html')
app.add_config_value('html_show_sphinx', True, 'html')
app.add_config_value('html_context', {}, 'html')
app.add_config_value('html_output_encoding', 'utf-8', 'html')
app.add_config_value('html_compact_lists', True, 'html')
app.add_config_value('html_secnumber_suffix', '. ', 'html')
app.add_config_value('html_search_language', None, 'html', string_classes)
app.add_config_value('html_search_options', {}, 'html')
app.add_config_value('html_search_scorer', '', None)
app.add_config_value('html_scaled_image_link', True, 'html')
app.add_config_value('html_experimental_html5_writer', None, 'html')
return {
'version': 'builtin',
'parallel_read_safe': True,
'parallel_write_safe': True,
}
| 40.82416 | 138 | 0.598375 |
05128b7a74b7d20fd2d972d41a0a8f2382666ae1 | 4,440 | py | Python | model.py | ignaciobaldriz/DeepLearning_CNN_API | 60b4ed7a766a5694cd307e57ac0648f9517aabcb | [
"MIT"
] | 1 | 2021-03-25T15:42:55.000Z | 2021-03-25T15:42:55.000Z | model.py | ignaciobaldriz/DeepLearning_CNN_API | 60b4ed7a766a5694cd307e57ac0648f9517aabcb | [
"MIT"
] | null | null | null | model.py | ignaciobaldriz/DeepLearning_CNN_API | 60b4ed7a766a5694cd307e57ac0648f9517aabcb | [
"MIT"
] | null | null | null | import torch
import torch.nn as nn
import torchvision
import torchvision.transforms as transforms
import matplotlib.pyplot as plt
# Device configuration
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
# Hyper-parameters
input_size = 784 # 28x28
hidden_size = 500
num_classes = 10
num_epochs = 2
batch_size = 100
learning_rate = 0.001
# MNIST dataset
train_dataset = torchvision.datasets.MNIST(root='./data',
train=True,
transform=transforms.ToTensor(),
download=True)
test_dataset = torchvision.datasets.MNIST(root='./data',
train=False,
transform=transforms.ToTensor())
# Inspect the data
print(train_dataset)
print(train_dataset.classes)
print(train_dataset.data)
print(train_dataset.data.shape)
print(test_dataset)
print(test_dataset.classes)
print(test_dataset.data)
# Data loader
train_loader = torch.utils.data.DataLoader(dataset=train_dataset,
batch_size=batch_size,
shuffle=True)
test_loader = torch.utils.data.DataLoader(dataset=test_dataset,
batch_size=batch_size,
shuffle=False)
examples = iter(test_loader)
example_data, example_targets = examples.next()
'''
for i in range(6):
plt.subplot(2,3,i+1)
plt.imshow(example_data[i][0], cmap='gray')
plt.show()
'''
# Simple convultional neural network (CNN)
class NeuralNet(nn.Module):
def __init__(self, num_classes):
super(NeuralNet, self).__init__()
# Instantiate the ReLU nonlinearity
self.relu = nn.ReLU()
# Instantiate two convolutional layers
self.conv1 = nn.Conv2d(in_channels=1, out_channels=5, kernel_size=3, padding=1)
self.conv2 = nn.Conv2d(in_channels=5, out_channels=10, kernel_size=3, padding=1)
# Instantiate a max pooling layer
self.pool = nn.MaxPool2d(2, 2)
# Instantiate a fully connected layer
self.fc = nn.Linear(7 * 7 * 10, 10)
def forward(self, x):
# Apply conv followd by relu, then in next line pool
x = self.relu(self.conv1(x))
x = self.pool(x)
# Apply conv followd by relu, then in next line pool
x = self.relu(self.conv2(x))
x = self.pool(x)
# Prepare the image for the fully connected layer
x = x.view(-1, 7 * 7 * 10)
# Apply the fully connected layer and return the result
return self.fc(x)
model = NeuralNet(num_classes).to(device)
# Loss and optimizer
criterion = nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
# Train the model
n_total_steps = len(train_loader)
for epoch in range(num_epochs):
for i, (images, labels) in enumerate(train_loader):
# origin shape: [100, 1, 28, 28]
images = images.to(device)
labels = labels.to(device)
# Forward pass
outputs = model(images)
loss = criterion(outputs, labels)
# Backward and optimize
optimizer.zero_grad()
loss.backward()
optimizer.step()
if (i+1) % 100 == 0:
print (f'Epoch [{epoch+1}/{num_epochs}], Step [{i+1}/{n_total_steps}], Loss: {loss.item():.4f}')
# Test the model
# In test phase, we don't need to compute gradients (for memory efficiency)
with torch.no_grad():
n_correct = 0
n_samples = 0
for images, labels in test_loader:
images = images.to(device)
labels = labels.to(device)
outputs = model(images)
# max returns (value ,index)
_, predicted = torch.max(outputs.data, 1)
n_samples += labels.size(0)
n_correct += (predicted == labels).sum().item()
acc = 100.0 * n_correct / n_samples
print(f'Accuracy of the network on the 10000 test images: {acc} %')
# Loss in epoch 2/2 step 600/600: -------------------------------- 0.0251
# Accuracy: ------------------------------------------------------ 97.13 %
torch.save(model.state_dict(), "mnist_cnn.pth" ) | 31.489362 | 109 | 0.571396 |
6af3ec5dbb15224800c1bfbfd4e226ab0aea3c44 | 292 | py | Python | PythonExercicios/ex064.py | lordvinick/Python | c03fd08d4c204104bf0196b0bd129427fd2067ae | [
"MIT"
] | null | null | null | PythonExercicios/ex064.py | lordvinick/Python | c03fd08d4c204104bf0196b0bd129427fd2067ae | [
"MIT"
] | null | null | null | PythonExercicios/ex064.py | lordvinick/Python | c03fd08d4c204104bf0196b0bd129427fd2067ae | [
"MIT"
] | null | null | null | print("{:=^60}".format('Trantando variaveis v1.0'))
num = s = i = 0
num = int(input('Digite um número [999 para parar]: '))
while num != 999:
i += 1
s += num
num = int(input('Digite um número [999 para parar]: '))
print(f'Você digitou {i} números e a soma entre eles foi {s}.')
| 26.545455 | 63 | 0.599315 |
d7dc49068aa22fa5930873be85cb272a4bb627fa | 1,184 | py | Python | clients/kratos/python/test/test_health_not_ready_status.py | ory/sdk-generator | 958314d130922ad6f20f439b5230141a832231a5 | [
"Apache-2.0"
] | null | null | null | clients/kratos/python/test/test_health_not_ready_status.py | ory/sdk-generator | 958314d130922ad6f20f439b5230141a832231a5 | [
"Apache-2.0"
] | null | null | null | clients/kratos/python/test/test_health_not_ready_status.py | ory/sdk-generator | 958314d130922ad6f20f439b5230141a832231a5 | [
"Apache-2.0"
] | null | null | null | """
Ory Kratos API
Documentation for all public and administrative Ory Kratos APIs. Public and administrative APIs are exposed on different ports. Public APIs can face the public internet without any protection while administrative APIs should never be exposed without prior authorization. To protect the administative API port you should use something like Nginx, Ory Oathkeeper, or any other technology capable of authorizing incoming requests. # noqa: E501
The version of the OpenAPI document: v0.10.1
Contact: hi@ory.sh
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import ory_kratos_client
from ory_kratos_client.model.health_not_ready_status import HealthNotReadyStatus
class TestHealthNotReadyStatus(unittest.TestCase):
"""HealthNotReadyStatus unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testHealthNotReadyStatus(self):
"""Test HealthNotReadyStatus"""
# FIXME: construct object with mandatory attributes with example values
# model = HealthNotReadyStatus() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 32 | 446 | 0.744932 |
4aaf5ea5ceac8ebab07db5bc95bedb58196eca39 | 4,939 | bzl | Python | elm/toolchain.bzl | matsubara0507/rules_elm | 80f94ad06b5e3d2703f5a61059ebe046fbe3e7a2 | [
"Apache-2.0"
] | 4 | 2021-09-27T13:38:52.000Z | 2021-12-09T16:59:27.000Z | elm/toolchain.bzl | matsubara0507/rules_elm | 80f94ad06b5e3d2703f5a61059ebe046fbe3e7a2 | [
"Apache-2.0"
] | 1 | 2022-02-12T07:54:15.000Z | 2022-02-16T14:18:29.000Z | elm/toolchain.bzl | matsubara0507/rules_elm | 80f94ad06b5e3d2703f5a61059ebe046fbe3e7a2 | [
"Apache-2.0"
] | null | null | null | load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive", "http_file")
DEFAULT_VERSION = "0.19.1"
DEFAULT_TEST_VERSION = \
{
"0.19.1": "1.2.1",
}
ELM_COMPILER_BINDIST = \
{
"0.19.1": {
"linux": "e44af52bb27f725a973478e589d990a6428e115fe1bb14f03833134d6c0f155c",
"mac": "05289f0e3d4f30033487c05e689964c3bb17c0c48012510dbef1df43868545d1",
"windows": "d1bf666298cbe3c5447b9ca0ea608552d750e5d232f9845c2af11907b654903b",
},
"0.19.0": {
"linux": "d359adbee89823c641cda326938708d7227dc79aa1f162e0d8fe275f182f528a",
"mac": "f1fa4dd9021e94c5a58b2be8843e3329095232ee3bd21a23524721a40eaabd35",
"windows": "0e27d80537418896cf98326224159a45b6d36bf08e608e3a174ab6d2c572c5ae",
},
}
ELM_TEST_BINDIST = \
{
"1.2.1": {
"linux": "6e5759f832a5e025898c9306ba47b2f9ed7f0c371dc69bd16c15c7ed8bfb1501",
"mac": "890c45a7eda24fd13169d349af9c835ee3ed04974eec36953baba5aefc3628a8",
"windows": "26add13880af484a47cd182547f41370d3bfca812a7cc9e3db6f41ce13b7fc40",
}
}
def _elm_compiler_impl(ctx):
os = ctx.attr.os
version = ctx.attr.version
file_name = "elm"
if os == "windows":
file_name += ".exe"
ctx.download(
url = "https://github.com/elm/compiler/releases/download/{}/binary-for-{}-64-bit.gz".format(version, os),
sha256 = ctx.attr.checksum,
output = file_name + ".gz",
)
ctx.execute([ctx.which("gzip"), "-d", file_name + ".gz"])
ctx.execute([ctx.which("chmod"), "+x", file_name])
test_version = ctx.attr.test_version
if not ELM_TEST_BINDIST.get(test_version):
fail("Binary distribution of elm-test-rs {} is not available.".format(test_version))
elm_test_name = "elm-test-rs"
test_checksum = ELM_TEST_BINDIST.get(test_version).get(os)
test_file_name = "elm-test-{}".format(os)
test_suffix = os
if os == "mac":
test_suffix = "macos"
test_extention = "tar.gz"
if os == "windows":
test_extention = "zip"
elm_test_name = "elm-test-rs.exe"
ctx.download_and_extract(
url = "https://github.com/mpizenberg/elm-test-rs/releases/download/v{}/elm-test-rs_{}.{}".format(test_version, test_suffix, test_extention),
sha256 = test_checksum,
)
ctx.file(
"BUILD",
executable = False,
content = """
load("@rules_elm//elm:toolchain.bzl", "elm_toolchain")
exports_files(["{elm}", "{elm_test}"])
elm_toolchain(name = "{os}_info", elm = ":{elm}", elm_test = ":{elm_test}")
""".format(os = os, elm = file_name, elm_test = elm_test_name),
)
_elm_compiler = repository_rule(
_elm_compiler_impl,
local = False,
attrs = {
"os": attr.string(),
"version": attr.string(),
"test_version": attr.string(),
"checksum": attr.string(),
},
)
def _elm_compiler_toolchain_impl(ctx):
exec_constraints = [{
"linux": "@platforms//os:linux",
"mac": "@platforms//os:osx",
"windows": "@platforms//os:windows",
}.get(ctx.attr.os)]
ctx.file(
"BUILD",
executable = False,
content = """
toolchain(
name = "toolchain",
toolchain_type = "@rules_elm//elm:toolchain",
toolchain = "@{bindist_name}//:{os}_info",
exec_compatible_with = {exec_constraints},
)
""".format(
os = ctx.attr.os,
bindist_name = ctx.attr.bindist_name,
exec_constraints = exec_constraints,
),
)
_elm_compiler_toolchain = repository_rule(
_elm_compiler_toolchain_impl,
local = False,
attrs = {
"bindist_name": attr.string(),
"os": attr.string(),
},
)
def toolchains(version = DEFAULT_VERSION, test_version = ""):
if not ELM_COMPILER_BINDIST.get(version):
fail("Binary distribution of Elm {} is not available.".format(version))
if test_version == "":
test_version = DEFAULT_TEST_VERSION.get(version)
for os, checksum in ELM_COMPILER_BINDIST.get(version).items():
bindist_name = "rules_elm_compiler_{}".format(os)
toolchain_name = bindist_name + "-toolchain"
_elm_compiler(name = bindist_name, os = os, version = version, checksum = checksum, test_version = test_version)
_elm_compiler_toolchain(name = toolchain_name, bindist_name = bindist_name, os = os)
native.register_toolchains("@{}//:toolchain".format(toolchain_name))
def _elm_toolchain_impl(ctx):
return [platform_common.ToolchainInfo(
elm = ctx.file.elm,
elm_test = ctx.file.elm_test,
)]
elm_toolchain = rule(
_elm_toolchain_impl,
attrs = {
"elm": attr.label(
allow_single_file = True,
mandatory = True,
),
"elm_test": attr.label(
allow_single_file = True,
mandatory = True,
),
},
)
| 32.281046 | 148 | 0.627252 |
76daae59b872bd1c8d9352ba372f92caaff571af | 1,699 | py | Python | tests/integration/tools/test_custom_action.py | g-parki/bokeh | 664ead5306bba64609e734d4105c8aa8cfb76d81 | [
"BSD-3-Clause"
] | null | null | null | tests/integration/tools/test_custom_action.py | g-parki/bokeh | 664ead5306bba64609e734d4105c8aa8cfb76d81 | [
"BSD-3-Clause"
] | null | null | null | tests/integration/tools/test_custom_action.py | g-parki/bokeh | 664ead5306bba64609e734d4105c8aa8cfb76d81 | [
"BSD-3-Clause"
] | null | null | null | #-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2022, Anaconda, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import annotations # isort:skip
import pytest ; pytest
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Bokeh imports
from bokeh._testing.plugins.project import SinglePlotPage
from bokeh._testing.util.selenium import RECORD
from bokeh.models import CustomAction, CustomJS
from bokeh.plotting import figure
#-----------------------------------------------------------------------------
# Tests
#-----------------------------------------------------------------------------
pytest_plugins = (
"bokeh._testing.plugins.project",
)
@pytest.mark.selenium
class Test_CustomAction:
def test_tap_triggers_callback(self, single_plot_page: SinglePlotPage) -> None:
plot = figure(height=800, width=1000, tools='')
plot.rect(x=[1, 2], y=[1, 1], width=1, height=1)
plot.add_tools(CustomAction(callback=CustomJS(code=RECORD("activated", "true"))))
page = single_plot_page(plot)
page.click_custom_action()
assert page.results["activated"] == True
assert page.has_no_console_errors()
| 34.673469 | 89 | 0.456151 |
95b7a4f7626e9d44e1915e8881a5d22b9844c16a | 5,036 | py | Python | pyatv/mrp/protobuf/TextInputMessage_pb2.py | stickpin/pyatv | cb45bf5d303593a4e0be05215f21140e2fbbb03d | [
"MIT"
] | null | null | null | pyatv/mrp/protobuf/TextInputMessage_pb2.py | stickpin/pyatv | cb45bf5d303593a4e0be05215f21140e2fbbb03d | [
"MIT"
] | 128 | 2020-04-24T06:42:29.000Z | 2021-02-19T11:34:20.000Z | pyatv/mrp/protobuf/TextInputMessage_pb2.py | stickpin/pyatv | cb45bf5d303593a4e0be05215f21140e2fbbb03d | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: pyatv/mrp/protobuf/TextInputMessage.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from pyatv.mrp.protobuf import ProtocolMessage_pb2 as pyatv_dot_mrp_dot_protobuf_dot_ProtocolMessage__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='pyatv/mrp/protobuf/TextInputMessage.proto',
package='',
syntax='proto2',
serialized_options=None,
serialized_pb=b'\n)pyatv/mrp/protobuf/TextInputMessage.proto\x1a(pyatv/mrp/protobuf/ProtocolMessage.proto\"\xb0\x01\n\x10TextInputMessage\x12\x11\n\ttimestamp\x18\x01 \x01(\x01\x12\x0c\n\x04text\x18\x02 \x01(\t\x12\x30\n\nactionType\x18\x03 \x01(\x0e\x32\x1c.TextInputMessage.ActionType\"I\n\nActionType\x12\x0b\n\x07Unknown\x10\x00\x12\n\n\x06Insert\x10\x01\x12\x07\n\x03Set\x10\x02\x12\n\n\x06\x44\x65lete\x10\x03\x12\r\n\tClearText\x10\x04:=\n\x10textInputMessage\x12\x10.ProtocolMessage\x18\x1e \x01(\x0b\x32\x11.TextInputMessage'
,
dependencies=[pyatv_dot_mrp_dot_protobuf_dot_ProtocolMessage__pb2.DESCRIPTOR,])
TEXTINPUTMESSAGE_FIELD_NUMBER = 30
textInputMessage = _descriptor.FieldDescriptor(
name='textInputMessage', full_name='textInputMessage', index=0,
number=30, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR)
_TEXTINPUTMESSAGE_ACTIONTYPE = _descriptor.EnumDescriptor(
name='ActionType',
full_name='TextInputMessage.ActionType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='Unknown', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Insert', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Set', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='Delete', index=3, number=3,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ClearText', index=4, number=4,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=191,
serialized_end=264,
)
_sym_db.RegisterEnumDescriptor(_TEXTINPUTMESSAGE_ACTIONTYPE)
_TEXTINPUTMESSAGE = _descriptor.Descriptor(
name='TextInputMessage',
full_name='TextInputMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='timestamp', full_name='TextInputMessage.timestamp', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='text', full_name='TextInputMessage.text', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='actionType', full_name='TextInputMessage.actionType', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_TEXTINPUTMESSAGE_ACTIONTYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=88,
serialized_end=264,
)
_TEXTINPUTMESSAGE.fields_by_name['actionType'].enum_type = _TEXTINPUTMESSAGE_ACTIONTYPE
_TEXTINPUTMESSAGE_ACTIONTYPE.containing_type = _TEXTINPUTMESSAGE
DESCRIPTOR.message_types_by_name['TextInputMessage'] = _TEXTINPUTMESSAGE
DESCRIPTOR.extensions_by_name['textInputMessage'] = textInputMessage
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TextInputMessage = _reflection.GeneratedProtocolMessageType('TextInputMessage', (_message.Message,), {
'DESCRIPTOR' : _TEXTINPUTMESSAGE,
'__module__' : 'pyatv.mrp.protobuf.TextInputMessage_pb2'
# @@protoc_insertion_point(class_scope:TextInputMessage)
})
_sym_db.RegisterMessage(TextInputMessage)
textInputMessage.message_type = _TEXTINPUTMESSAGE
pyatv_dot_mrp_dot_protobuf_dot_ProtocolMessage__pb2.ProtocolMessage.RegisterExtension(textInputMessage)
# @@protoc_insertion_point(module_scope)
| 37.864662 | 536 | 0.77085 |
3522b04603c86499d7129a9c9e357daf0c761b3b | 142,165 | py | Python | tests/test_edgeql_functions.py | aaronbrighton/edgedb | 4aacd1d4e248ae0d483c075ba93fc462da291ef4 | [
"Apache-2.0"
] | 1 | 2022-01-15T14:05:03.000Z | 2022-01-15T14:05:03.000Z | tests/test_edgeql_functions.py | aaronbrighton/edgedb | 4aacd1d4e248ae0d483c075ba93fc462da291ef4 | [
"Apache-2.0"
] | null | null | null | tests/test_edgeql_functions.py | aaronbrighton/edgedb | 4aacd1d4e248ae0d483c075ba93fc462da291ef4 | [
"Apache-2.0"
] | null | null | null | #
# This source file is part of the EdgeDB open source project.
#
# Copyright 2017-present MagicStack Inc. and the EdgeDB authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import decimal
import json
import os.path
import edgedb
from edb.testbase import server as tb
from edb.tools import test
class TestEdgeQLFunctions(tb.QueryTestCase):
SCHEMA = os.path.join(os.path.dirname(__file__), 'schemas',
'issues.esdl')
SETUP = os.path.join(os.path.dirname(__file__), 'schemas',
'issues_setup.edgeql')
async def test_edgeql_functions_count_01(self):
await self.assert_query_result(
r"""
WITH
x := (
# User is simply employed as an object to be augmented
SELECT User {
count := 4,
all_issues := Issue
} FILTER .name = 'Elvis'
)
SELECT x.count = count(x.all_issues);
""",
[True]
)
async def test_edgeql_functions_count_02(self):
await self.assert_query_result(
r"""
WITH
x := (
# User is simply employed as an object to be augmented
SELECT User {
count := count(Issue),
all_issues := Issue
} FILTER .name = 'Elvis'
)
SELECT x.count = count(x.all_issues);
""",
[True]
)
async def test_edgeql_functions_count_03(self):
await self.assert_query_result(
r"""
WITH
x := (
# User is simply employed as an object to be augmented
SELECT User {
count := count(<int64>Issue.number),
all_issues := <int64>Issue.number
} FILTER .name = 'Elvis'
)
SELECT x.count = count(x.all_issues);
""",
[True]
)
async def test_edgeql_functions_array_agg_01(self):
await self.assert_query_result(
r'''SELECT array_agg({1, 2, 3});''',
[[1, 2, 3]],
)
await self.assert_query_result(
r'''SELECT array_agg({3, 2, 3});''',
[[3, 2, 3]],
)
await self.assert_query_result(
r'''SELECT array_agg({3, 3, 2});''',
[[3, 3, 2]],
)
async def test_edgeql_functions_array_agg_02(self):
await self.assert_query_result(
r'''SELECT array_agg({1, 2, 3})[0];''',
[1],
)
await self.assert_query_result(
r'''SELECT array_agg({3, 2, 3})[1];''',
[2],
)
await self.assert_query_result(
r'''SELECT array_agg({3, 3, 2})[-1];''',
[2],
)
async def test_edgeql_functions_array_agg_03(self):
await self.assert_query_result(
r'''
WITH x := {3, 1, 2}
SELECT array_agg(x ORDER BY x);
''',
[[1, 2, 3]],
)
await self.assert_query_result(
r'''
WITH x := {3, 1, 2}
SELECT array_agg(x ORDER BY x) = [1, 2, 3];
''',
[True],
)
async def test_edgeql_functions_array_agg_04(self):
await self.assert_query_result(
r'''
WITH x := {3, 1, 2}
SELECT contains(array_agg(x ORDER BY x), 2);
''',
[True],
)
await self.assert_query_result(
r'''
WITH x := {3, 1, 2}
SELECT contains(array_agg(x ORDER BY x), 5);
''',
[False],
)
await self.assert_query_result(
r'''
WITH x := {3, 1, 2}
SELECT contains(array_agg(x ORDER BY x), 5);
''',
[False],
)
async def test_edgeql_functions_array_agg_05(self):
with self.assertRaisesRegex(
edgedb.QueryError,
r'expression returns value of indeterminate type'):
await self.con.execute("""
SELECT array_agg({});
""")
async def test_edgeql_functions_array_agg_06(self):
await self.assert_query_result(
'''SELECT array_agg(<int64>{});''',
[[]],
)
await self.assert_query_result(
'''SELECT array_agg(DISTINCT <int64>{});''',
[[]],
)
async def test_edgeql_functions_array_agg_07(self):
await self.assert_query_result(
r'''
SELECT array_agg((SELECT schema::ObjectType FILTER False));
''',
[[]]
)
await self.assert_query_result(
r'''
SELECT array_agg(
(SELECT schema::ObjectType
FILTER <str>schema::ObjectType.id = '~')
);
''',
[[]]
)
async def test_edgeql_functions_array_agg_08(self):
await self.assert_query_result(
r'''
WITH x := <int64>{}
SELECT array_agg(x);
''',
[[]]
)
await self.assert_query_result(
r'''
WITH x := (SELECT schema::ObjectType FILTER False)
SELECT array_agg(x);
''',
[[]]
)
await self.assert_query_result(
r'''
WITH x := (
SELECT schema::ObjectType
FILTER <str>schema::ObjectType.id = '~'
)
SELECT array_agg(x);
''',
[[]]
)
async def test_edgeql_functions_array_agg_09(self):
await self.assert_query_result(
r"""
WITH MODULE schema
SELECT
ObjectType {
l := array_agg(
ObjectType.properties.name
FILTER
ObjectType.properties.name IN {
'id',
'name'
}
ORDER BY ObjectType.properties.name ASC
)
}
FILTER
ObjectType.name = 'schema::Object';
""",
[{
'l': ['id', 'name']
}]
)
async def test_edgeql_functions_array_agg_10(self):
with self.assertRaisesRegex(
edgedb.UnsupportedFeatureError,
r"nested arrays are not supported"):
await self.con.query(r"""
SELECT array_agg(
[<str>Issue.number, Issue.status.name]
ORDER BY Issue.number);
""")
async def test_edgeql_functions_array_agg_11(self):
await self.assert_query_result(
r"""
SELECT array_agg(
(<str>Issue.number, Issue.status.name)
ORDER BY Issue.number
)[1];
""",
[['2', 'Open']]
)
async def test_edgeql_functions_array_agg_12(self):
await self.assert_query_result(
r'''
SELECT
array_agg(User{name} ORDER BY User.name);
''',
[[{'name': 'Elvis'}, {'name': 'Yury'}]]
)
result = await self.con.query(r'''
SELECT
array_agg(User{name} ORDER BY User.name);
''')
self.assertEqual(result[0][0].name, 'Elvis')
self.assertEqual(result[0][1].name, 'Yury')
async def test_edgeql_functions_array_agg_13(self):
await self.assert_query_result(
r'''
SELECT
Issue {
number,
watchers_array := array_agg(Issue.watchers {name})
}
FILTER
EXISTS Issue.watchers
ORDER BY
Issue.number;
''',
[
{'number': '1', 'watchers_array': [{'name': 'Yury'}]},
{'number': '2', 'watchers_array': [{'name': 'Elvis'}]},
{'number': '3', 'watchers_array': [{'name': 'Elvis'}]}
]
)
async def test_edgeql_functions_array_agg_14(self):
with self.assertRaisesRegex(
edgedb.UnsupportedFeatureError,
r"nested arrays are not supported"):
await self.con.query(r'''
SELECT array_agg(array_agg(User.name));
''')
async def test_edgeql_functions_array_agg_15(self):
await self.assert_query_result(
r'''
SELECT array_agg(
([([User.name],)],) ORDER BY User.name
);
''',
[ # result set
[ # array_agg
[[[['Elvis']]]], [[[['Yury']]]],
]
]
)
async def test_edgeql_functions_array_agg_16(self):
await self.assert_query_result(
r'''
SELECT array_agg( # outer array
( # tuple
array_agg( # array
( # tuple
array_agg(User.name ORDER BY User.name),
)
),
)
);
''',
[ # result set
[ # outer array_agg
[[[['Elvis', 'Yury']]]]
]
]
)
async def test_edgeql_functions_array_agg_17(self):
await self.assert_query_result(
'''SELECT count(array_agg({}))''',
[1],
)
async def test_edgeql_functions_array_agg_18(self):
with self.assertRaisesRegex(
edgedb.QueryError,
r'expression returns value of indeterminate type'):
await self.con.execute(
'''SELECT array_agg({})''',
)
async def test_edgeql_functions_array_agg_19(self):
await self.assert_query_result(
r'''FOR X in {array_agg(0)} UNION (SELECT array_unpack(X));''',
[0],
)
await self.assert_query_result(
r'''
FOR X in {array_agg((0, 1))}
UNION (SELECT array_unpack(X));
''',
[[0, 1]],
)
await self.assert_query_result(
r'''FOR X in {array_agg((0, 1))} UNION (X);''',
[[[0, 1]]],
)
async def test_edgeql_functions_array_agg_20(self):
await self.assert_query_result(
r'''
SELECT Issue { te := array_agg(.time_estimate) };
''',
tb.bag([{"te": [3000]}, {"te": []}, {"te": []}, {"te": []}]),
)
await self.assert_query_result(
r'''
SELECT Issue { te := array_agg(.time_estimate UNION 3000) };
''',
tb.bag(
[{"te": [3000, 3000]}, {"te": [3000]},
{"te": [3000]}, {"te": [3000]}],
)
)
async def test_edgeql_functions_array_unpack_01(self):
await self.assert_query_result(
r'''SELECT [1, 2];''',
[[1, 2]],
)
await self.assert_query_result(
r'''SELECT array_unpack([1, 2]);''',
[1, 2],
)
await self.assert_query_result(
r'''SELECT array_unpack([10, 20]) - 1;''',
[9, 19],
)
async def test_edgeql_functions_array_unpack_02(self):
await self.assert_query_result(
# array_agg and array_unpack are inverses of each other
r'''SELECT array_agg(array_unpack([1, 2, 3])) = [1, 2, 3];''',
[True],
)
await self.assert_query_result(
r'''SELECT array_unpack(array_agg({1, 2, 3}));''',
{1, 2, 3},
)
async def test_edgeql_functions_array_unpack_03(self):
await self.assert_query_result(
r'''
# array_agg and array_unpack are inverses of each other
SELECT array_unpack(array_agg(Issue.number));
''',
{'1', '2', '3', '4'},
)
async def test_edgeql_functions_array_unpack_04(self):
await self.assert_query_result(
r'''
# array_agg and array_unpack are inverses of each other
SELECT array_unpack(array_agg(Issue)){number};
''',
[
{'number': '1'},
{'number': '2'},
{'number': '3'},
{'number': '4'},
],
sort=lambda x: x['number']
)
async def test_edgeql_functions_array_unpack_05(self):
await self.assert_query_result(
r'''SELECT array_unpack([(1,)]).0;''',
[1],
)
async def test_edgeql_functions_array_unpack_06(self):
# We have a special case optimization for "IN array_unpack" so
# it's worth testing it.
await self.assert_query_result(
r'''SELECT 1 IN array_unpack([1]);''',
[True],
)
await self.assert_query_result(
r'''SELECT 2 IN array_unpack([1]);''',
[False],
)
await self.assert_query_result(
r'''SELECT 2 NOT IN array_unpack([1]);''',
[True],
)
await self.assert_query_result(
r'''SELECT 1 IN array_unpack({[1,2,3], [4,5,6]});''',
[True],
)
await self.assert_query_result(
r'''SELECT 0 IN array_unpack({[1,2,3], [4,5,6]});''',
[False],
)
await self.assert_query_result(
r'''SELECT 1 NOT IN array_unpack({[1,2,3], [4,5,6]});''',
[False],
)
await self.assert_query_result(
r'''SELECT 0 NOT IN array_unpack({[1,2,3], [4,5,6]});''',
[True],
)
await self.assert_query_result(
r"""
SELECT ("foo", 1) IN array_unpack([("foo", 1), ("bar", 2)]);
""",
[True],
)
async def test_edgeql_functions_enumerate_01(self):
await self.assert_query_result(
r'''SELECT [10, 20];''',
[[10, 20]],
)
await self.assert_query_result(
r'''SELECT enumerate(array_unpack([10,20]));''',
[[0, 10], [1, 20]],
)
await self.assert_query_result(
r'''SELECT enumerate(array_unpack([10,20])).0 + 100;''',
[100, 101],
)
await self.assert_query_result(
r'''SELECT enumerate(array_unpack([10,20])).1 + 100;''',
[110, 120],
)
await self.assert_query_result(
r'''SELECT enumerate(array_unpack([(1, '2')]))''',
[[0, [1, '2']]],
)
await self.assert_query_result(
r'''SELECT enumerate(array_unpack([(1, '2')])).1.1''',
['2'],
)
async def test_edgeql_functions_enumerate_02(self):
await self.assert_query_result(
r'''SELECT enumerate(array_unpack([(x:=1)])).1;''',
[{"x": 1}],
)
await self.assert_query_result(
r'''SELECT enumerate(array_unpack([(x:=1)])).1.x;''',
[1],
)
await self.assert_query_result(
r'''SELECT enumerate(array_unpack([(x:=(a:=2))])).1;''',
[{"x": {"a": 2}}],
)
await self.assert_query_result(
r'''SELECT enumerate(array_unpack([(x:=(a:=2))])).1.x;''',
[{"a": 2}],
)
await self.assert_query_result(
r'''SELECT enumerate(array_unpack([(x:=(a:=2))])).1.x.a;''',
[2],
)
async def test_edgeql_functions_enumerate_03(self):
await self.assert_query_result(
r'''SELECT enumerate((SELECT User.name ORDER BY User.name));''',
[[0, 'Elvis'], [1, 'Yury']],
)
await self.assert_query_result(
r'''SELECT enumerate({'a', 'b', 'c'});''',
[[0, 'a'], [1, 'b'], [2, 'c']],
)
await self.assert_query_result(
r'''WITH A := {'a', 'b'} SELECT (A, enumerate(A));''',
[['a', [0, 'a']], ['b', [0, 'b']]],
)
await self.assert_query_result(
r'''SELECT enumerate({(1, 2), (3, 4)});''',
[[0, [1, 2]], [1, [3, 4]]],
)
async def test_edgeql_functions_enumerate_04(self):
self.assertEqual(
await self.con.query(
'select <json>enumerate({(1, 2), (3, 4)})'),
['[0, [1, 2]]', '[1, [3, 4]]'])
self.assertEqual(
await self.con.query_json(
'select <json>enumerate({(1, 2), (3, 4)})'),
'[[0, [1, 2]], [1, [3, 4]]]')
async def test_edgeql_functions_enumerate_05(self):
await self.assert_query_result(
r'''SELECT enumerate(User { name } ORDER BY .name);''',
[[0, {"name": "Elvis"}],
[1, {"name": "Yury"}]],
)
await self.assert_query_result(
r'''SELECT enumerate(User ORDER BY .name).1.name;''',
["Elvis", "Yury"],
)
async def test_edgeql_functions_enumerate_06(self):
await self.assert_query_result(
r'''SELECT enumerate(_gen_series(0, 99) FILTER FALSE);''',
[],
)
async def test_edgeql_functions_enumerate_07(self):
# Check that enumerate of a function works when the tuple type
# appears in the schema (like tuple<int64, int64> does)
await self.assert_query_result(
r'''
WITH Z := enumerate(array_unpack([10, 20])),
Y := enumerate(Z),
SELECT (Y.1.0, Y.1.1) ORDER BY Y.0;
''',
[[0, 10], [1, 20]]
)
async def test_edgeql_functions_enumerate_08(self):
await self.assert_query_result(
r'''
SELECT Issue { te := enumerate(.time_estimate) };
''',
tb.bag(
[{"te": [0, 3000]}, {"te": None}, {"te": None}, {"te": None}]
)
)
await self.assert_query_result(
r'''
SELECT Issue { te := enumerate(.time_estimate UNION 3000) };
''',
tb.bag([
{"te": [[0, 3000], [1, 3000]]},
{"te": [[0, 3000]]},
{"te": [[0, 3000]]},
{"te": [[0, 3000]]}
])
)
async def test_edgeql_functions_array_get_01(self):
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], 2);''',
[3],
)
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], -2);''',
[2],
)
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], 20);''',
[],
)
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], -20);''',
[],
)
async def test_edgeql_functions_array_get_02(self):
await self.assert_query_result(
r'''
SELECT array_get(array_agg(
Issue.number ORDER BY Issue.number), 2);
''',
['3'],
)
await self.assert_query_result(
r'''
SELECT array_get(array_agg(
Issue.number ORDER BY Issue.number), -2);
''',
['3'],
)
await self.assert_query_result(
r'''SELECT array_get(array_agg(Issue.number), 20);''',
[]
)
await self.assert_query_result(
r'''SELECT array_get(array_agg(Issue.number), -20);''',
[]
)
async def test_edgeql_functions_array_get_03(self):
with self.assertRaisesRegex(
edgedb.QueryError,
r'function "array_get.+" does not exist'):
await self.con.query(r'''
SELECT array_get([1, 2, 3], 2^40);
''')
async def test_edgeql_functions_array_get_04(self):
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], 0) ?? 42;''',
[1],
)
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], 0, default := -1) ?? 42;''',
[1],
)
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], -2) ?? 42;''',
[2],
)
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], 20) ?? 42;''',
[42],
)
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], -20) ?? 42;''',
[42],
)
async def test_edgeql_functions_array_get_05(self):
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], 1, default := 4200) ?? 42;''',
[2],
)
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], -2, default := 4200) ?? 42;''',
[2],
)
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], 20, default := 4200) ?? 42;''',
[4200],
)
await self.assert_query_result(
r'''SELECT array_get([1, 2, 3], -20, default := 4200) ?? 42;''',
[4200],
)
async def test_edgeql_functions_array_get_06(self):
await self.assert_query_result(
r'''SELECT array_get([(20,), (30,)], 0);''',
[[20]],
)
await self.assert_query_result(
r'''SELECT array_get([(a:=20), (a:=30)], 1);''',
[{'a': 30}],
)
await self.assert_query_result(
r'''SELECT array_get([(20,), (30,)], 0).0;''',
[20],
)
await self.assert_query_result(
r'''SELECT array_get([(a:=20), (a:=30)], 1).0;''',
[30],
)
await self.assert_query_result(
r'''SELECT array_get([(a:=20, b:=1), (a:=30, b:=2)], 0).a;''',
[20],
)
await self.assert_query_result(
r'''SELECT array_get([(a:=20, b:=1), (a:=30, b:=2)], 1).b;''',
[2],
)
@test.xfail(
"Known collation issue on Heroku Postgres",
unless=os.getenv("EDGEDB_TEST_BACKEND_VENDOR") != "heroku-postgres"
)
async def test_edgeql_functions_re_match_01(self):
await self.assert_query_result(
r'''SELECT re_match('ab', 'AbabaB');''',
[['ab']],
)
await self.assert_query_result(
r'''SELECT re_match('AB', 'AbabaB');''',
[],
)
await self.assert_query_result(
r'''SELECT re_match('(?i)AB', 'AbabaB');''',
[['Ab']],
)
await self.assert_query_result(
r'''SELECT re_match('ac', 'AbabaB');''',
[],
)
await self.assert_query_result(
r'''SELECT EXISTS re_match('ac', 'AbabaB');''',
[False],
)
await self.assert_query_result(
r'''SELECT NOT EXISTS re_match('ac', 'AbabaB');''',
[True],
)
await self.assert_query_result(
r'''SELECT EXISTS re_match('ab', 'AbabaB');''',
[True],
)
await self.assert_query_result(
r'''SELECT NOT EXISTS re_match('ab', 'AbabaB');''',
[False],
)
await self.assert_query_result(
r'''SELECT x := re_match({'(?i)ab', 'a'}, 'AbabaB') ORDER BY x;''',
[['Ab'], ['a']],
)
await self.assert_query_result(
r'''
SELECT x := re_match({'(?i)ab', 'a'}, {'AbabaB', 'qwerty'})
ORDER BY x;
''',
[['Ab'], ['a']],
)
async def test_edgeql_functions_re_match_02(self):
await self.assert_query_result(
r'''
WITH MODULE schema
SELECT x := re_match('(\\w+)::(Link|Property)',
ObjectType.name)
ORDER BY x;
''',
[['schema', 'Link'], ['schema', 'Property']],
)
@test.xfail(
"Known collation issue on Heroku Postgres",
unless=os.getenv("EDGEDB_TEST_BACKEND_VENDOR") != "heroku-postgres"
)
async def test_edgeql_functions_re_match_all_01(self):
await self.assert_query_result(
r'''SELECT re_match_all('ab', 'AbabaB');''',
[['ab']],
)
await self.assert_query_result(
r'''SELECT re_match_all('AB', 'AbabaB');''',
[],
)
await self.assert_query_result(
r'''SELECT re_match_all('(?i)AB', 'AbabaB');''',
[['Ab'], ['ab'], ['aB']],
)
await self.assert_query_result(
r'''SELECT re_match_all('ac', 'AbabaB');''',
[],
)
await self.assert_query_result(
r'''SELECT EXISTS re_match_all('ac', 'AbabaB');''',
[False],
)
await self.assert_query_result(
r'''SELECT NOT EXISTS re_match_all('ac', 'AbabaB');''',
[True],
)
await self.assert_query_result(
r'''SELECT EXISTS re_match_all('(?i)ab', 'AbabaB');''',
[True],
)
await self.assert_query_result(
r'''SELECT NOT EXISTS re_match_all('(?i)ab', 'AbabaB');''',
[False],
)
await self.assert_query_result(
r'''
SELECT x := re_match_all({'(?i)ab', 'a'}, 'AbabaB')
ORDER BY x;''',
[['Ab'], ['a'], ['a'], ['aB'], ['ab']],
)
await self.assert_query_result(
r'''
SELECT x := re_match_all({'(?i)ab', 'a'},
{'AbabaB', 'qwerty'})
ORDER BY x;
''',
[['Ab'], ['a'], ['a'], ['aB'], ['ab']],
)
async def test_edgeql_functions_re_match_all_02(self):
await self.assert_query_result(
r'''
WITH
MODULE schema,
C2 := ScalarType
SELECT
count(re_match_all('(\\w+)', ScalarType.name)) =
2 * count(C2);
''',
[True],
)
async def test_edgeql_functions_re_test_01(self):
await self.assert_query_result(
r'''SELECT re_test('ac', 'AbabaB');''',
[False],
)
await self.assert_query_result(
r'''SELECT NOT re_test('ac', 'AbabaB');''',
[True],
)
await self.assert_query_result(
r'''SELECT re_test(r'(?i)ab', 'AbabaB');''',
[True],
)
await self.assert_query_result(
r'''SELECT NOT re_test(r'(?i)ab', 'AbabaB');''',
[False],
)
await self.assert_query_result(
# the result always exists
r'''SELECT EXISTS re_test('(?i)ac', 'AbabaB');''',
[True],
)
await self.assert_query_result(
r'''SELECT NOT EXISTS re_test('(?i)ac', 'AbabaB');''',
[False],
)
await self.assert_query_result(
r'''SELECT x := re_test({'ab', 'a'}, 'AbabaB') ORDER BY x;''',
[True, True],
)
await self.assert_query_result(
r'''
SELECT x := re_test({'ab', 'a'}, {'AbabaB', 'qwerty'})
ORDER BY x;
''',
[False, False, True, True],
)
async def test_edgeql_functions_re_test_02(self):
await self.assert_query_result(
r'''
WITH MODULE schema
SELECT count(
ObjectType FILTER re_test(r'(\W\w)bject$', ObjectType.name)
) = 2;
''',
[True],
)
async def test_edgeql_functions_re_replace_01(self):
await self.assert_query_result(
r'''SELECT re_replace('l', 'L', 'Hello World');''',
['HeLlo World'],
)
await self.assert_query_result(
r'''SELECT re_replace('l', 'L', 'Hello World', flags := 'g');''',
['HeLLo WorLd'],
)
await self.assert_query_result(
r'''
SELECT re_replace('[a-z]', '~', 'Hello World',
flags := 'i');''',
['~ello World'],
)
await self.assert_query_result(
r'''
SELECT re_replace('[a-z]', '~', 'Hello World',
flags := 'gi');
''',
['~~~~~ ~~~~~'],
)
async def test_edgeql_functions_re_replace_02(self):
await self.assert_query_result(
r'''SELECT re_replace('[aeiou]', '~', User.name);''',
{'Elv~s', 'Y~ry'},
)
await self.assert_query_result(
r'''
SELECT re_replace('[aeiou]', '~', User.name,
flags := 'g');
''',
{'Elv~s', 'Y~ry'},
)
await self.assert_query_result(
r'''
SELECT re_replace('[aeiou]', '~', User.name,
flags := 'i');
''',
{'~lvis', 'Y~ry'},
)
await self.assert_query_result(
r'''
SELECT re_replace('[aeiou]', '~', User.name,
flags := 'gi');
''',
{'~lv~s', 'Y~ry'},
)
async def test_edgeql_functions_sum_01(self):
await self.assert_query_result(
r'''SELECT sum({1, 2, 3, -4, 5});''',
[7],
)
await self.assert_query_result(
r'''SELECT sum({0.1, 0.2, 0.3, -0.4, 0.5});''',
[0.7],
)
async def test_edgeql_functions_sum_02(self):
await self.assert_query_result(
r'''
SELECT sum({1, 2, 3, -4.2, 5});
''',
[6.8],
)
async def test_edgeql_functions_sum_03(self):
await self.assert_query_result(
r'''
SELECT sum({1.0, 2.0, 3.0, -4.2, 5});
''',
[6.8],
)
async def test_edgeql_functions_sum_04(self):
await self.assert_query_result(
r'''SELECT sum(<int16>2) IS int64;''',
[True],
)
await self.assert_query_result(
r'''SELECT sum(<int32>2) IS int64;''',
[True],
)
await self.assert_query_result(
r'''SELECT sum(<int64>2) IS int64;''',
[True],
)
await self.assert_query_result(
r'''SELECT sum(<float32>2) IS float32;''',
[True],
)
await self.assert_query_result(
r'''SELECT sum(<float64>2) IS float64;''',
[True],
)
await self.assert_query_result(
r'''SELECT sum(<decimal>2) IS decimal;''',
[True],
)
async def test_edgeql_functions_unix_to_datetime_01(self):
dt = await self.con.query_single(
'SELECT <str>to_datetime(1590595184.584);'
)
self.assertEqual('2020-05-27T15:59:44.584+00:00', dt)
async def test_edgeql_functions_unix_to_datetime_02(self):
dt = await self.con.query_single(
'SELECT <str>to_datetime(1590595184);'
)
self.assertEqual('2020-05-27T15:59:44+00:00', dt)
async def test_edgeql_functions_unix_to_datetime_03(self):
dt = await self.con.query_single(
'SELECT <str>to_datetime(517795200);'
)
self.assertEqual('1986-05-30T00:00:00+00:00', dt)
async def test_edgeql_functions_unix_to_datetime_04(self):
dt = await self.con.query_single(
'SELECT <str>to_datetime(517795200.00n);'
)
self.assertEqual('1986-05-30T00:00:00+00:00', dt)
async def test_edgeql_functions_unix_to_datetime_05(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
"'std::datetime' value out of range"
):
await self.con.query_single(
'SELECT to_datetime(999999999999)'
)
async def test_edgeql_functions_datetime_current_01(self):
# make sure that datetime as a str gets serialized to a
# particular format
dt = await self.con.query_single('SELECT <str>datetime_current();')
self.assertRegex(dt, r'\d+-\d+-\d+T\d+:\d+:\d+\.\d+.*')
async def test_edgeql_functions_datetime_current_02(self):
batch1 = await self.con.query_json(r'''
WITH MODULE schema
SELECT Type {
dt_t := datetime_of_transaction(),
dt_s := datetime_of_statement(),
dt_n := datetime_current(),
};
''')
batch2 = await self.con.query_json(r'''
# NOTE: this test assumes that there's at least 1 microsecond
# time difference between statements
WITH MODULE schema
SELECT Type {
dt_t := datetime_of_transaction(),
dt_s := datetime_of_statement(),
dt_n := datetime_current(),
};
''')
batch1 = json.loads(batch1)
batch2 = json.loads(batch2)
batches = batch1 + batch2
# all of the dt_t should be the same
set_dt_t = {t['dt_t'] for t in batches}
self.assertTrue(len(set_dt_t) == 1)
# all of the dt_s should be the same in each batch
set_dt_s1 = {t['dt_s'] for t in batch1}
set_dt_s2 = {t['dt_s'] for t in batch2}
self.assertTrue(len(set_dt_s1) == 1)
self.assertTrue(len(set_dt_s1) == 1)
# the transaction and statement datetimes should be in
# chronological order
dt_t = set_dt_t.pop()
dt_s1 = set_dt_s1.pop()
dt_s2 = set_dt_s2.pop()
self.assertTrue(dt_t <= dt_s1 < dt_s2)
# the first "now" datetime is no earlier than the statement
# for each batch
self.assertTrue(dt_s1 <= batch1[0]['dt_n'])
self.assertTrue(dt_s2 <= batch2[0]['dt_n'])
# every dt_n is already in chronological order
self.assertEqual(
[t['dt_n'] for t in batches],
sorted([t['dt_n'] for t in batches])
)
# the first dt_n is strictly earlier than the last
self.assertTrue(batches[0]['dt_n'] < batches[-1]['dt_n'])
async def test_edgeql_functions_datetime_get_01(self):
await self.assert_query_result(
r'''
SELECT datetime_get(
<datetime>'2018-05-07T15:01:22.306916-05', 'year');
''',
{2018},
)
await self.assert_query_result(
r'''
SELECT datetime_get(
<datetime>'2018-05-07T15:01:22.306916-05', 'month');
''',
{5},
)
await self.assert_query_result(
r'''
SELECT datetime_get(
<datetime>'2018-05-07T15:01:22.306916-05', 'day');
''',
{7},
)
await self.assert_query_result(
r'''
SELECT datetime_get(
<datetime>'2018-05-07T15:01:22.306916-05', 'hour');
''',
{20},
)
await self.assert_query_result(
r'''
SELECT datetime_get(
<datetime>'2018-05-07T15:01:22.306916-05', 'minutes');
''',
{1},
)
await self.assert_query_result(
r'''
SELECT datetime_get(
<datetime>'2018-05-07T15:01:22.306916-05', 'seconds');
''',
{22.306916},
)
await self.assert_query_result(
r'''
SELECT datetime_get(
<datetime>'2018-05-07T15:01:22.306916-05', 'epochseconds');
''',
{1525723282.306916},
)
async def test_edgeql_functions_datetime_get_02(self):
await self.assert_query_result(
r'''
SELECT datetime_get(
<cal::local_datetime>'2018-05-07T15:01:22.306916', 'year');
''',
{2018},
)
await self.assert_query_result(
r'''
SELECT datetime_get(
<cal::local_datetime>'2018-05-07T15:01:22.306916', 'month');
''',
{5},
)
await self.assert_query_result(
r'''
SELECT datetime_get(
<cal::local_datetime>'2018-05-07T15:01:22.306916', 'day');
''',
{7},
)
await self.assert_query_result(
r'''
SELECT datetime_get(
<cal::local_datetime>'2018-05-07T15:01:22.306916', 'hour');
''',
{15},
)
await self.assert_query_result(
r'''SELECT datetime_get(
<cal::local_datetime>'2018-05-07T15:01:22.306916', 'minutes');
''',
{1},
)
await self.assert_query_result(
r'''SELECT datetime_get(
<cal::local_datetime>'2018-05-07T15:01:22.306916', 'seconds');
''',
{22.306916},
)
async def test_edgeql_functions_datetime_get_03(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
'invalid unit for std::datetime_get'):
await self.con.query('''
SELECT datetime_get(
<cal::local_datetime>'2018-05-07T15:01:22.306916',
'timezone_hour'
);
''')
async def test_edgeql_functions_datetime_get_04(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
'invalid unit for std::datetime_get'):
await self.con.query('''
SELECT datetime_get(
<datetime>'2018-05-07T15:01:22.306916-05',
'timezone_hour');
''')
async def test_edgeql_functions_datetime_get_05(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
'invalid unit for std::datetime_get'):
await self.con.execute(
r'''
SELECT <str>datetime_get(
<datetime>'2018-05-07T15:01:22.306916-05', 'epoch');
''')
async def test_edgeql_functions_date_get_01(self):
await self.assert_query_result(
r'''SELECT cal::date_get(<cal::local_date>'2018-05-07', 'year');
''',
{2018},
)
await self.assert_query_result(
r'''SELECT cal::date_get(<cal::local_date>'2018-05-07', 'month');
''',
{5},
)
await self.assert_query_result(
r'''SELECT cal::date_get(<cal::local_date>'2018-05-07', 'day');
''',
{7},
)
async def test_edgeql_functions_date_get_02(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
'invalid unit for std::date_get'):
await self.con.execute(
r'''
SELECT <str>cal::date_get(
<cal::local_date>'2018-05-07', 'epoch');
''')
async def test_edgeql_functions_time_get_01(self):
await self.assert_query_result(
r'''SELECT
cal::time_get(<cal::local_time>'15:01:22.306916', 'hour')
''',
{15},
)
await self.assert_query_result(
r'''SELECT
cal::time_get(<cal::local_time>'15:01:22.306916', 'minutes')
''',
{1},
)
await self.assert_query_result(
r'''SELECT
cal::time_get(<cal::local_time>'15:01:22.306916', 'seconds')
''',
{22.306916},
)
await self.assert_query_result(
r'''SELECT
cal::time_get(<cal::local_time>'15:01:22.306916',
'midnightseconds')
''',
{54082.306916},
)
async def test_edgeql_functions_time_get_02(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
'invalid unit for std::time_get'):
await self.con.execute(
r'''
SELECT <str>cal::time_get(
<cal::local_time>'15:01:22.306916', 'epoch');
''')
async def test_edgeql_functions_datetime_trunc_01(self):
await self.assert_query_result(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'years');
''',
{'2018-01-01T00:00:00+00:00'},
)
await self.assert_query_result(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'decades');
''',
{'2010-01-01T00:00:00+00:00'},
)
await self.assert_query_result(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'centuries');
''',
{'2001-01-01T00:00:00+00:00'},
)
await self.assert_query_result(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'quarters');
''',
{'2018-04-01T00:00:00+00:00'},
)
await self.assert_query_result(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'months');
''',
{'2018-05-01T00:00:00+00:00'},
)
await self.assert_query_result(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'weeks');
''',
{'2018-05-07T00:00:00+00:00'},
)
await self.assert_query_result(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'days');
''',
{'2018-05-07T00:00:00+00:00'},
)
await self.assert_query_result(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'hours');
''',
{'2018-05-07T20:00:00+00:00'},
)
await self.assert_query_result(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'minutes');
''',
{'2018-05-07T20:01:00+00:00'},
)
await self.assert_query_result(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'seconds');
''',
{'2018-05-07T20:01:22+00:00'},
)
async def test_edgeql_functions_datetime_trunc_02(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
'invalid unit for std::datetime_truncate'):
await self.con.execute(
r'''
SELECT <str>datetime_truncate(
<datetime>'2018-05-07T15:01:22.306916-05', 'second');
''')
async def test_edgeql_functions_duration_trunc_01(self):
await self.assert_query_result(
r'''
SELECT <str>duration_truncate(
<duration>'15:01:22.306916', 'hours');
''',
{'PT15H'},
)
await self.assert_query_result(
r'''
SELECT <str>duration_truncate(
<duration>'15:01:22.306916', 'minutes');
''',
{'PT15H1M'},
)
await self.assert_query_result(
r'''
SELECT <str>duration_truncate(
<duration>'15:01:22.306916', 'seconds');
''',
{'PT15H1M22S'},
)
await self.assert_query_result(
r'''
SELECT <str>duration_truncate(
<duration>'15:01:22.306916', 'milliseconds');
''',
{'PT15H1M22.306S'},
)
# Currently no-op but may be useful if precision is improved
await self.assert_query_result(
r'''
SELECT <str>duration_truncate(
<duration>'15:01:22.306916', 'microseconds');
''',
{'PT15H1M22.306916S'},
)
async def test_edgeql_functions_duration_trunc_02(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
'invalid unit for std::duration_truncate'):
await self.con.execute(
r'''
SELECT <str>duration_truncate(
<duration>'73 hours', 'day');
''')
async def test_edgeql_functions_to_datetime_01(self):
await self.assert_query_result(
r'''
SELECT <str>to_datetime(
2018, 5, 7, 15, 1, 22.306916, 'EST');
''',
['2018-05-07T20:01:22.306916+00:00'],
)
await self.assert_query_result(
r'''
SELECT <str>to_datetime(
2018, 5, 7, 15, 1, 22.306916, '-5');
''',
['2018-05-07T20:01:22.306916+00:00'],
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query('SELECT to_datetime("2017-10-10", "")')
async def test_edgeql_functions_to_datetime_02(self):
await self.assert_query_result(
r'''
SELECT <str>to_datetime(
cal::to_local_datetime(2018, 5, 7, 15, 1, 22.306916),
'EST')
''',
['2018-05-07T20:01:22.306916+00:00'],
)
async def test_edgeql_functions_to_datetime_03(self):
await self.assert_query_result(
r'''
SELECT
to_datetime('2019/01/01 00:00:00 0715',
'YYYY/MM/DD H24:MI:SS TZHTZM') =
<datetime>'2019-01-01T00:00:00+0715';
''',
[True],
)
await self.assert_query_result(
r'''
SELECT
to_datetime('2019/01/01 00:00:00 07TZM',
'YYYY/MM/DD H24:MI:SS TZH"TZM"') =
<datetime>'2019-01-01T00:00:00+07';
''',
[True],
)
await self.assert_query_result(
r'''
SELECT
to_datetime('2019/01/01 00:00:00 TZH07TZM',
'YYYY/MM/DD H24:MI:SS "TZH"TZH"TZM"') =
<datetime>'2019-01-01T00:00:00+07';
''',
[True],
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'missing required time zone in format'):
async with self.con.transaction():
await self.con.query(r'''
SELECT
to_datetime('2019/01/01 00:00:00 TZH07',
'YYYY/MM/DD H24:MI:SS "TZH"TZM') =
<datetime>'2019-01-01T00:00:00+07';
''')
async def test_edgeql_functions_to_datetime_04(self):
with self.assertRaisesRegex(edgedb.InvalidValueError,
'missing required time zone in input'):
async with self.con.transaction():
await self.con.query(r'''
SELECT
to_datetime('2019/01/01 00:00:00 0715',
'YYYY/MM/DD H24:MI:SS "NOPE"TZHTZM');
''')
async def test_edgeql_functions_to_datetime_05(self):
with self.assertRaisesRegex(edgedb.InvalidValueError,
'invalid input syntax'):
async with self.con.transaction():
# omitting time zone
await self.con.query(r'''
SELECT
to_datetime('2019/01/01 00:00:00');
''')
async def test_edgeql_functions_to_datetime_06(self):
async with self.assertRaisesRegexTx(
edgedb.InvalidValueError,
'value out of range',
):
await self.con.query(r'''
SELECT to_datetime(10000, 1, 1, 1, 1, 1, 'UTC');
''')
async with self.assertRaisesRegexTx(
edgedb.InvalidValueError,
'value out of range',
):
await self.con.query(r'''
SELECT to_datetime(0, 1, 1, 1, 1, 1, 'UTC');
''')
async with self.assertRaisesRegexTx(
edgedb.InvalidValueError,
'value out of range',
):
await self.con.query(r'''
SELECT to_datetime(-1, 1, 1, 1, 1, 1, 'UTC');
''')
async def test_edgeql_functions_to_local_datetime_01(self):
await self.assert_query_result(
r'''
SELECT <str>cal::to_local_datetime(
<datetime>'2018-05-07T20:01:22.306916+00:00',
'US/Pacific');
''',
['2018-05-07T13:01:22.306916'],
)
async def test_edgeql_functions_to_local_datetime_02(self):
await self.assert_query_result(
r'''
SELECT <str>cal::to_local_datetime(2018, 5, 7, 15, 1, 22.306916);
''',
['2018-05-07T15:01:22.306916'],
)
async def test_edgeql_functions_to_local_datetime_03(self):
await self.assert_query_result(
# The time zone is ignored because the format string just
# specifies arbitrary characters in its place.
r'''
SELECT
cal::to_local_datetime('2019/01/01 00:00:00 0715',
'YYYY/MM/DD H24:MI:SS "NOTZ"') =
<cal::local_datetime>'2019-01-01T00:00:00';
''',
[True],
)
await self.assert_query_result(
# The time zone is ignored because the format string does
# not expect to parse it.
r'''
SELECT
cal::to_local_datetime('2019/01/01 00:00:00 0715',
'YYYY/MM/DD H24:MI:SS') =
<cal::local_datetime>'2019-01-01T00:00:00';
''',
[True],
)
async def test_edgeql_functions_to_local_datetime_04(self):
with self.assertRaisesRegex(edgedb.InvalidValueError,
'unexpected time zone in format'):
async with self.con.transaction():
await self.con.query(
r'''
SELECT
cal::to_local_datetime('2019/01/01 00:00:00 0715',
'YYYY/MM/DD H24:MI:SS TZH') =
<cal::local_datetime>'2019-01-01T00:00:00';
''')
async def test_edgeql_functions_to_local_datetime_05(self):
await self.assert_query_result(
# Make sure that time zone change (while converting
# `to_local_datetime`) is not leaking.
r'''
SELECT (<str><cal::local_datetime>'2019-01-01 00:00:00',
<str>cal::to_local_datetime('2019/01/01 00:00:00 0715',
'YYYY/MM/DD H24:MI:SS'),
<str><cal::local_datetime>'2019-02-01 00:00:00');
''',
[['2019-01-01T00:00:00',
'2019-01-01T00:00:00',
'2019-02-01T00:00:00']],
)
async def test_edgeql_functions_to_local_datetime_06(self):
with self.assertRaisesRegex(edgedb.InvalidValueError,
'invalid input syntax'):
async with self.con.transaction():
# including time zone
await self.con.query(r'''
SELECT
cal::to_local_datetime('2019/01/01 00:00:00 0715');
''')
async def test_edgeql_functions_to_local_datetime_07(self):
async with self.assertRaisesRegexTx(
edgedb.InvalidValueError,
'value out of range',
):
await self.con.query(r'''
SELECT cal::to_local_datetime(10000, 1, 1, 1, 1, 1);
''')
async with self.assertRaisesRegexTx(
edgedb.InvalidValueError,
'value out of range',
):
await self.con.query(r'''
SELECT cal::to_local_datetime(0, 1, 1, 1, 1, 1);
''')
async with self.assertRaisesRegexTx(
edgedb.InvalidValueError,
'value out of range',
):
await self.con.query(r'''
SELECT cal::to_local_datetime(-1, 1, 1, 1, 1, 1);
''')
async def test_edgeql_functions_to_local_date_01(self):
await self.assert_query_result(
r'''
SELECT <str>cal::to_local_date(2018, 5, 7);
''',
['2018-05-07'],
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(
'SELECT cal::to_local_date("2017-10-10", "")')
async def test_edgeql_functions_to_local_date_02(self):
await self.assert_query_result(
r'''
SELECT <str>cal::to_local_date(
<datetime>'2018-05-07T20:01:22.306916+00:00',
'US/Pacific');
''',
['2018-05-07'],
)
async def test_edgeql_functions_to_local_date_03(self):
with self.assertRaisesRegex(edgedb.InvalidValueError,
'unexpected time zone in format'):
async with self.con.transaction():
await self.con.query(
r'''
SELECT
cal::to_local_date('2019/01/01 00:00:00 0715',
'YYYY/MM/DD H24:MI:SS TZH') =
<cal::local_date>'2019-01-01';
''')
async def test_edgeql_functions_to_local_date_04(self):
with self.assertRaisesRegex(edgedb.InvalidValueError,
'invalid input syntax'):
async with self.con.transaction():
# including too much
await self.con.query(r'''
SELECT
cal::to_local_date('2019/01/01 00:00:00 0715');
''')
async def test_edgeql_functions_to_local_date_05(self):
async with self.assertRaisesRegexTx(
edgedb.InvalidValueError,
'value out of range',
):
await self.con.query(r'''
SELECT cal::to_local_date(10000, 1, 1);
''')
async with self.assertRaisesRegexTx(
edgedb.InvalidValueError,
'value out of range',
):
await self.con.query(r'''
SELECT cal::to_local_date(0, 1, 1);
''')
async with self.assertRaisesRegexTx(
edgedb.InvalidValueError,
'value out of range',
):
await self.con.query(r'''
SELECT cal::to_local_date(-1, 1, 1);
''')
async def test_edgeql_functions_to_local_time_01(self):
await self.assert_query_result(
r'''
SELECT <str>cal::to_local_time(15, 1, 22.306916);
''',
['15:01:22.306916'],
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(
'SELECT cal::to_local_time("12:00:00", "")')
async def test_edgeql_functions_to_local_time_02(self):
await self.assert_query_result(
r'''
SELECT <str>cal::to_local_time(
<datetime>'2018-05-07T20:01:22.306916+00:00',
'US/Pacific');
''',
['13:01:22.306916'],
)
async def test_edgeql_functions_to_local_time_03(self):
with self.assertRaisesRegex(edgedb.InvalidValueError,
'unexpected time zone in format'):
async with self.con.transaction():
await self.con.query(
r'''
SELECT
cal::to_local_time('00:00:00 0715',
'H24:MI:SS TZH') =
<cal::local_time>'00:00:00';
''')
async def test_edgeql_functions_to_local_time_04(self):
with self.assertRaisesRegex(edgedb.InvalidValueError,
'invalid input syntax'):
async with self.con.transaction():
# including time zone
await self.con.query(r'''
SELECT
cal::to_local_datetime('00:00:00 0715');
''')
async def test_edgeql_functions_to_duration_01(self):
await self.assert_query_result(
r'''SELECT <str>to_duration(hours:=20);''',
['PT20H'],
)
await self.assert_query_result(
r'''SELECT <str>to_duration(minutes:=20);''',
['PT20M'],
)
await self.assert_query_result(
r'''SELECT <str>to_duration(seconds:=20);''',
['PT20S'],
)
await self.assert_query_result(
r'''SELECT <str>to_duration(seconds:=20.15);''',
['PT20.15S'],
)
await self.assert_query_result(
r'''SELECT <str>to_duration(microseconds:=100);''',
['PT0.0001S'],
)
async def test_edgeql_functions_to_duration_02(self):
await self.assert_query_result(
r'''SELECT to_duration(hours:=20) > to_duration(minutes:=20);''',
[True],
)
await self.assert_query_result(
r'''SELECT to_duration(minutes:=20) > to_duration(seconds:=20);''',
[True],
)
async def test_edgeql_functions_duration_to_seconds(self):
await self.assert_query_result(
r'''SELECT duration_to_seconds(<duration>'20 hours');''',
[72000.0],
)
await self.assert_query_result(
r'''SELECT duration_to_seconds(<duration>'1:02:03.000123');''',
[3723.000123],
)
async def test_edgeql_functions_duration_to_seconds_exact(self):
# at this value extract(epoch from duration) is imprecise
await self.assert_query_result(
r'''SELECT duration_to_seconds(
<duration>'1801439850 seconds 123456 microseconds');''',
[1801439850.123456],
)
async def test_edgeql_functions_to_str_01(self):
# at the very least the cast <str> should be equivalent to
# a call to to_str() without explicit format for simple scalars
await self.assert_query_result(
r'''
WITH DT := datetime_current()
# FIXME: the cast has a "T" and the str doesn't for some reason
SELECT <str>DT = to_str(DT);
''',
[True],
)
await self.assert_query_result(
r'''
WITH D := cal::to_local_date(datetime_current(), 'UTC')
SELECT <str>D = to_str(D);
''',
[True],
)
await self.assert_query_result(
r'''
WITH NT := cal::to_local_time(datetime_current(), 'UTC')
SELECT <str>NT = to_str(NT);
''',
[True],
)
await self.assert_query_result(
r'''SELECT <str>123 = to_str(123);''',
[True],
)
await self.assert_query_result(
r'''SELECT <str>123.456 = to_str(123.456);''',
[True],
)
await self.assert_query_result(
r'''SELECT <str>123.456e-20 = to_str(123.456e-20);''',
[True],
)
await self.assert_query_result(
r'''
SELECT <str><decimal>'123456789012345678901234567890.1234567890' =
to_str(123456789012345678901234567890.1234567890n);
''',
[True],
)
# Empty format string shouldn't produce an empty set.
#
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(r'''SELECT to_str(1, "")''')
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(r'''SELECT to_str(1.1, "")''')
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(r'''SELECT to_str(1.1n, "")''')
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(
r'''SELECT to_str(to_json('{}'), "")''')
async def test_edgeql_functions_to_str_02(self):
await self.assert_query_result(
r'''
WITH DT := <datetime>'2018-05-07 15:01:22.306916-05'
SELECT to_str(DT, 'YYYY-MM-DD');
''',
{'2018-05-07'},
)
await self.assert_query_result(
r'''
WITH DT := <datetime>'2018-05-07 15:01:22.306916-05'
SELECT to_str(DT, 'YYYYBC');
''',
{'2018AD'},
)
await self.assert_query_result(
r'''
WITH DT := <datetime>'2018-05-07 15:01:22.306916-05'
SELECT to_str(DT, 'FMDDth of FMMonth, YYYY');
''',
{'7th of May, 2018'},
)
await self.assert_query_result(
r'''
WITH DT := <datetime>'2018-05-07 15:01:22.306916-05'
SELECT to_str(DT, 'CCth "century"');
''',
{'21st century'},
)
await self.assert_query_result(
r'''
WITH DT := <datetime>'2018-05-07 15:01:22.306916-05'
SELECT to_str(DT, 'Y,YYY Month DD Day');
''',
{'2,018 May 07 Monday '},
)
await self.assert_query_result(
r'''
WITH DT := <datetime>'2018-05-07 15:01:22.306916-05'
SELECT to_str(DT, 'foo');
''',
{'foo'},
)
await self.assert_query_result(
r'''
WITH DT := <datetime>'2018-05-07 15:01:22.306916-05'
SELECT to_str(DT, ' ');
''',
{' '}
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(r'''
WITH DT := <datetime>'2018-05-07 15:01:22.306916-05'
SELECT to_str(DT, '');
''')
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(r'''
WITH DT := to_duration(hours:=20)
SELECT to_str(DT, '');
''')
async def test_edgeql_functions_to_str_03(self):
await self.assert_query_result(
r'''
WITH DT := <datetime>'2018-05-07 15:01:22.306916-05'
SELECT to_str(DT, 'HH:MI A.M.');
''',
# tests run in UTC time-zone, so 15:01-05 is 20:01 UTC
{'08:01 P.M.'},
)
async def test_edgeql_functions_to_str_04(self):
await self.assert_query_result(
r'''
WITH DT := <cal::local_date>'2018-05-07'
SELECT to_str(DT, 'YYYY-MM-DD');
''',
{'2018-05-07'},
)
await self.assert_query_result(
r'''
WITH DT := <cal::local_date>'2018-05-07'
SELECT to_str(DT, 'YYYYBC');
''',
{'2018AD'},
)
await self.assert_query_result(
r'''
WITH DT := <cal::local_date>'2018-05-07'
SELECT to_str(DT, 'FMDDth of FMMonth, YYYY');
''',
{'7th of May, 2018'},
)
await self.assert_query_result(
r'''
WITH DT := <cal::local_date>'2018-05-07'
SELECT to_str(DT, 'CCth "century"');
''',
{'21st century'},
)
await self.assert_query_result(
r'''
WITH DT := <cal::local_date>'2018-05-07'
SELECT to_str(DT, 'Y,YYY Month DD Day');
''',
{'2,018 May 07 Monday '},
)
await self.assert_query_result(
r'''
# the format string doesn't have any special characters
WITH DT := <cal::local_date>'2018-05-07'
SELECT to_str(DT, 'foo');
''',
{'foo'},
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(r'''
WITH DT := <cal::local_time>'12:00:00'
SELECT to_str(DT, '');
''')
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(r'''
WITH DT := <cal::local_date>'2018-05-07'
SELECT to_str(DT, '');
''')
async def test_edgeql_functions_to_str_05(self):
await self.assert_query_result(
r'''SELECT to_str(123456789, '99');''',
{' ##'}, # the number is too long for the desired representation
)
await self.assert_query_result(
r'''SELECT to_str(123456789, '999999999');''',
{' 123456789'},
)
await self.assert_query_result(
r'''SELECT to_str(123456789, '999,999,999');''',
{' 123,456,789'},
)
await self.assert_query_result(
r'''SELECT to_str(123456789, '999,999,999,999');''',
{' 123,456,789'},
)
await self.assert_query_result(
r'''SELECT to_str(123456789, 'FM999,999,999,999');''',
{'123,456,789'},
)
await self.assert_query_result(
r'''SELECT to_str(123456789, 'S999,999,999,999');''',
{' +123,456,789'},
)
await self.assert_query_result(
r'''SELECT to_str(123456789, 'SG999,999,999,999');''',
{'+ 123,456,789'},
)
await self.assert_query_result(
r'''SELECT to_str(123456789, 'S099,999,999,999');''',
{'+000,123,456,789'},
)
await self.assert_query_result(
r'''SELECT to_str(123456789, 'SG099,999,999,999');''',
{'+000,123,456,789'},
)
await self.assert_query_result(
r'''SELECT to_str(123456789, 'S099999999999');''',
{'+000123456789'},
)
await self.assert_query_result(
r'''SELECT to_str(123456789, 'S990999999999');''',
{' +0123456789'},
)
await self.assert_query_result(
r'''SELECT to_str(123456789, 'FMS990999999999');''',
{'+0123456789'},
)
await self.assert_query_result(
r'''SELECT to_str(-123456789, '999999999PR');''',
{'<123456789>'},
)
await self.assert_query_result(
r'''SELECT to_str(987654321, 'FM999999999th');''',
{'987654321st'},
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(r'''SELECT to_str(987654321, '');''',)
async def test_edgeql_functions_to_str_06(self):
await self.assert_query_result(
r'''SELECT to_str(123.456789, '99');''',
{' ##'}, # the integer part of the number is too long
)
await self.assert_query_result(
r'''SELECT to_str(123.456789, '999');''',
{' 123'},
)
await self.assert_query_result(
r'''SELECT to_str(123.456789, '999.999');''',
{' 123.457'},
)
await self.assert_query_result(
r'''SELECT to_str(123.456789, '999.999999999');''',
{' 123.456789000'},
)
await self.assert_query_result(
r'''SELECT to_str(123.456789, 'FM999.999999999');''',
{'123.456789'},
)
await self.assert_query_result(
r'''SELECT to_str(123.456789e-20, '999.999999999');''',
{' .000000000'},
)
await self.assert_query_result(
r'''SELECT to_str(123.456789e-20, 'FM999.999999999');''',
{'0.'},
)
await self.assert_query_result(
r'''SELECT to_str(123.456789e-20, '099.999999990');''',
{' 000.000000000'},
)
await self.assert_query_result(
r'''SELECT to_str(123.456789e-20, 'FM990.099999999');''',
{'0.0'},
)
await self.assert_query_result(
r'''SELECT to_str(123.456789e-20, '0.0999EEEE');''',
{' 1.2346e-18'},
)
await self.assert_query_result(
r'''SELECT to_str(123.456789e20, '0.0999EEEE');''',
{' 1.2346e+22'},
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(
r'''SELECT to_str(123.456789e20, '');''')
async def test_edgeql_functions_to_str_07(self):
await self.assert_query_result(
r'''SELECT to_str(<cal::local_time>'15:01:22', 'HH:MI A.M.');''',
{'03:01 P.M.'},
)
await self.assert_query_result(
r'''SELECT to_str(<cal::local_time>'15:01:22', 'HH:MI:SSam.');''',
{'03:01:22pm.'},
)
await self.assert_query_result(
r'''SELECT to_str(<cal::local_time>'15:01:22', 'HH24:MI');''',
{'15:01'},
)
await self.assert_query_result(
r'''SELECT to_str(<cal::local_time>'15:01:22', ' ');''',
{' '},
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query(
r'''SELECT to_str(<cal::local_time>'15:01:22', '');''',)
async def test_edgeql_functions_array_join_01(self):
await self.assert_query_result(
r'''SELECT array_join(['one', 'two', 'three'], ', ');''',
['one, two, three'],
)
await self.assert_query_result(
r'''SELECT array_join(['one', 'two', 'three'], '');''',
['onetwothree'],
)
await self.assert_query_result(
r'''SELECT array_join(<array<str>>[], ', ');''',
[''],
)
async def test_edgeql_functions_str_split_01(self):
await self.assert_query_result(
r'''SELECT str_split('one, two, three', ', ');''',
[['one', 'two', 'three']],
)
await self.assert_query_result(
r'''SELECT str_split('', ', ');''',
[[]],
)
await self.assert_query_result(
r'''SELECT str_split('foo', ', ');''',
[['foo']],
)
await self.assert_query_result(
r'''SELECT str_split('foo', '');''',
[['f', 'o', 'o']],
)
async def test_edgeql_functions_to_int_01(self):
await self.assert_query_result(
r'''SELECT to_int64(' 123456789', '999999999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64(' 123,456,789', '999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64(' 123,456,789', '999,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64('123,456,789', 'FM999,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64(' +123,456,789', 'S999,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64('+ 123,456,789', 'SG999,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64('+000,123,456,789', 'S099,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64('+000,123,456,789', 'SG099,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64('+000123456789', 'S099999999999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64(' +0123456789', 'S990999999999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64('+0123456789', 'FMS990999999999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int64('<123456789>', '999999999PR');''',
{-123456789},
)
await self.assert_query_result(
r'''SELECT to_int64('987654321st', 'FM999999999th');''',
{987654321},
)
await self.assert_query_result(
r'''SELECT to_int64('987654321st', <str>$0);''',
{987654321},
variables=('FM999999999th',),
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query('''SELECT to_int64('1', '')''')
async def test_edgeql_functions_to_int_02(self):
await self.assert_query_result(
r'''SELECT to_int32(' 123456789', '999999999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32(' 123,456,789', '999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32(' 123,456,789', '999,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32('123,456,789', 'FM999,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32(' +123,456,789', 'S999,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32('+ 123,456,789', 'SG999,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32('+000,123,456,789', 'S099,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32('+000,123,456,789', 'SG099,999,999,999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32('+000123456789', 'S099999999999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32(' +0123456789', 'S990999999999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32('+0123456789', 'FMS990999999999');''',
{123456789},
)
await self.assert_query_result(
r'''SELECT to_int32('<123456789>', '999999999PR');''',
{-123456789},
)
await self.assert_query_result(
r'''SELECT to_int32('987654321st', 'FM999999999th');''',
{987654321},
)
await self.assert_query_result(
r'''SELECT to_int32('987654321st', <str>$0);''',
{987654321},
variables=('FM999999999th',),
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query('''SELECT to_int32('1', '')''')
async def test_edgeql_functions_to_int_03(self):
await self.assert_query_result(
r'''SELECT to_int16('12345', '999999999');''',
{12345},
)
await self.assert_query_result(
r'''SELECT to_int16('12,345', '999,999,999');''',
{12345},
)
await self.assert_query_result(
r'''SELECT to_int16(' 12,345', '999,999,999,999');''',
{12345},
)
await self.assert_query_result(
r'''SELECT to_int16('12,345', 'FM999,999,999,999');''',
{12345},
)
await self.assert_query_result(
r'''SELECT to_int16('+12,345', 'S999,999,999,999');''',
{12345},
)
await self.assert_query_result(
r'''SELECT to_int16('+ 12,345', 'SG999,999,999,999');''',
{12345},
)
await self.assert_query_result(
r'''SELECT to_int16('-000,012,345', 'S099,999,999,999');''',
{-12345},
)
await self.assert_query_result(
r'''SELECT to_int16('+000,012,345', 'SG099,999,999,999');''',
{12345},
)
await self.assert_query_result(
r'''SELECT to_int16('+00012345', 'S099999999999');''',
{12345},
)
await self.assert_query_result(
r'''SELECT to_int16(' +012345', 'S990999999999');''',
{12345},
)
await self.assert_query_result(
r'''SELECT to_int16('+012345', 'FMS990999999999');''',
{12345},
)
await self.assert_query_result(
r'''SELECT to_int16('<12345>', '999999999PR');''',
{-12345},
)
await self.assert_query_result(
r'''SELECT to_int16('4321st', 'FM999999999th');''',
{4321},
)
await self.assert_query_result(
r'''SELECT to_int16('4321st', <str>$0);''',
{4321},
variables=('FM999999999th',),
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query('''SELECT to_int16('1', '')''')
async def test_edgeql_functions_to_float_01(self):
await self.assert_query_result(
r'''SELECT to_float64(' 123', '999');''',
{123},
)
await self.assert_query_result(
r'''SELECT to_float64('123.457', '999.999');''',
{123.457},
)
await self.assert_query_result(
r'''SELECT to_float64(' 123.456789000', '999.999999999');''',
{123.456789},
)
await self.assert_query_result(
r'''SELECT to_float64('123.456789', 'FM999.999999999');''',
{123.456789},
)
await self.assert_query_result(
r'''SELECT to_float64('123.456789', <str>$0);''',
{123.456789},
variables=('FM999.999999999',)
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query('''SELECT to_float64('1', '')''')
async def test_edgeql_functions_to_float_02(self):
await self.assert_query_result(
r'''SELECT to_float32(' 123', '999');''',
{123},
)
await self.assert_query_result(
r'''SELECT to_float32('123.457', '999.999');''',
{123.457},
)
await self.assert_query_result(
r'''SELECT to_float32(' 123.456789000', '999.999999999');''',
{123.457},
)
await self.assert_query_result(
r'''SELECT to_float32('123.456789', 'FM999.999999999');''',
{123.457},
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query('''SELECT to_float32('1', '')''')
async def test_edgeql_functions_to_bigint_01(self):
await self.assert_query_result(
r'''SELECT to_bigint(' 123', '999');''',
{123},
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query('''SELECT to_bigint('1', '')''')
async def test_edgeql_functions_to_bigint_02(self):
with self.assertRaisesRegex(edgedb.InvalidValueError,
'invalid syntax'):
async with self.con.transaction():
await self.con.query('''SELECT to_bigint('1.02')''')
async def test_edgeql_functions_to_decimal_01(self):
await self.assert_query_result(
r'''SELECT to_decimal(' 123', '999');''',
{123},
)
await self.assert_query_result(
r'''SELECT to_decimal('123.457', '999.999');''',
exp_result_json={123.457},
exp_result_binary={decimal.Decimal('123.457')},
)
await self.assert_query_result(
r'''SELECT to_decimal(' 123.456789000', '999.999999999');''',
exp_result_json={123.456789},
exp_result_binary={decimal.Decimal('123.456789')},
)
await self.assert_query_result(
r'''SELECT to_decimal('123.456789', 'FM999.999999999');''',
exp_result_json={123.456789},
exp_result_binary={decimal.Decimal('123.456789')},
)
with self.assertRaisesRegex(edgedb.InvalidValueError,
'"fmt" argument must be'):
async with self.con.transaction():
await self.con.query('''SELECT to_decimal('1', '')''')
async def test_edgeql_functions_to_decimal_02(self):
await self.assert_query_result(
r'''
SELECT to_decimal(
'123456789123456789123456789.123456789123456789123456789',
'FM999999999999999999999999999.999999999999999999999999999');
''',
exp_result_json={
123456789123456789123456789.123456789123456789123456789},
exp_result_binary={decimal.Decimal(
'123456789123456789123456789.123456789123456789123456789')},
)
async def test_edgeql_functions_len_01(self):
await self.assert_query_result(
r'''SELECT len('');''',
[0],
)
await self.assert_query_result(
r'''SELECT len('hello');''',
[5],
)
await self.assert_query_result(
r'''SELECT __std__::len({'hello', 'world'});''',
[5, 5]
)
async def test_edgeql_functions_len_02(self):
await self.assert_query_result(
r'''SELECT len(b'');''',
[0],
)
await self.assert_query_result(
r'''SELECT len(b'hello');''',
[5],
)
await self.assert_query_result(
r'''SELECT len({b'hello', b'world'});''',
[5, 5]
)
async def test_edgeql_functions_len_03(self):
await self.assert_query_result(
r'''SELECT len(<array<str>>[]);''',
[0],
)
await self.assert_query_result(
r'''SELECT len([]);''',
[0],
)
await self.assert_query_result(
r'''SELECT len(['hello']);''',
[1],
)
await self.assert_query_result(
r'''SELECT len(['hello', 'world']);''',
[2],
)
await self.assert_query_result(
r'''SELECT len([1, 2, 3, 4, 5]);''',
[5],
)
await self.assert_query_result(
r'''SELECT len({['hello'], ['hello', 'world']});''',
{1, 2},
)
@test.xfail(
"Known collation issue on Heroku Postgres",
unless=os.getenv("EDGEDB_TEST_BACKEND_VENDOR") != "heroku-postgres"
)
async def test_edgeql_functions_min_01(self):
await self.assert_query_result(
r'''SELECT min(<int64>{});''',
[],
)
await self.assert_query_result(
r'''SELECT min(4);''',
[4],
)
await self.assert_query_result(
r'''SELECT min({10, 20, -3, 4});''',
[-3],
)
await self.assert_query_result(
r'''SELECT min({10, 2.5, -3.1, 4});''',
[-3.1],
)
await self.assert_query_result(
r'''SELECT min({'10', '20', '-3', '4'});''',
['-3'],
)
await self.assert_query_result(
r'''SELECT min({'10', 'hello', 'world', '-3', '4'});''',
['-3'],
)
await self.assert_query_result(
r'''SELECT min({'hello', 'world'});''',
['hello'],
)
await self.assert_query_result(
r'''SELECT min({[1, 2], [3, 4]});''',
[[1, 2]],
)
await self.assert_query_result(
r'''SELECT min({[1, 2], [3, 4], <array<int64>>[]});''',
[[]],
)
await self.assert_query_result(
r'''SELECT min({[1, 2], [1, 0.4]});''',
[[1, 0.4]],
)
await self.assert_query_result(
r'''
SELECT <str>min(<datetime>{
'2018-05-07T15:01:22.306916-05',
'2017-05-07T16:01:22.306916-05',
'2017-01-07T11:01:22.306916-05',
'2018-01-07T11:12:22.306916-05',
});
''',
['2017-01-07T16:01:22.306916+00:00'],
)
await self.assert_query_result(
r'''
SELECT <str>min(<cal::local_datetime>{
'2018-05-07T15:01:22.306916',
'2017-05-07T16:01:22.306916',
'2017-01-07T11:01:22.306916',
'2018-01-07T11:12:22.306916',
});
''',
['2017-01-07T11:01:22.306916'],
)
await self.assert_query_result(
r'''
SELECT <str>min(<cal::local_date>{
'2018-05-07',
'2017-05-07',
'2017-01-07',
'2018-01-07',
});
''',
['2017-01-07'],
)
await self.assert_query_result(
r'''
SELECT <str>min(<cal::local_time>{
'15:01:22',
'16:01:22',
'11:01:22',
'11:12:22',
});
''',
['11:01:22'],
)
await self.assert_query_result(
r'''
SELECT <str>min(<duration>{
'15:01:22',
'16:01:22',
'11:01:22',
'11:12:22',
});
''',
['PT11H1M22S'],
)
async def test_edgeql_functions_min_02(self):
await self.assert_query_result(
r'''
SELECT min(User.name);
''',
['Elvis'],
)
await self.assert_query_result(
r'''
SELECT min(Issue.time_estimate);
''',
[3000],
)
await self.assert_query_result(
r'''
SELECT min(<int64>Issue.number);
''',
[1],
)
async def test_edgeql_functions_min_03(self):
# Objects are valid inputs to "min" and are ordered by their .id.
await self.assert_query_result(
r'''
SELECT min(User).id = min(User.id);
''',
[True],
)
async def test_edgeql_functions_max_01(self):
await self.assert_query_result(
r'''SELECT max(<int64>{});''',
[],
)
await self.assert_query_result(
r'''SELECT max(4);''',
[4],
)
await self.assert_query_result(
r'''SELECT max({10, 20, -3, 4});''',
[20],
)
await self.assert_query_result(
r'''SELECT max({10, 2.5, -3.1, 4});''',
[10],
)
await self.assert_query_result(
r'''SELECT max({'10', '20', '-3', '4'});''',
['4'],
)
await self.assert_query_result(
r'''SELECT max({'10', 'hello', 'world', '-3', '4'});''',
['world'],
)
await self.assert_query_result(
r'''SELECT max({'hello', 'world'});''',
['world'],
)
await self.assert_query_result(
r'''SELECT max({[1, 2], [3, 4]});''',
[[3, 4]],
)
await self.assert_query_result(
r'''SELECT max({[1, 2], [3, 4], <array<int64>>[]});''',
[[3, 4]],
)
await self.assert_query_result(
r'''SELECT max({[1, 2], [1, 0.4]});''',
[[1, 2]],
)
await self.assert_query_result(
r'''
SELECT <str>max(<datetime>{
'2018-05-07T15:01:22.306916-05',
'2017-05-07T16:01:22.306916-05',
'2017-01-07T11:01:22.306916-05',
'2018-01-07T11:12:22.306916-05',
});
''',
['2018-05-07T20:01:22.306916+00:00'],
)
await self.assert_query_result(
r'''
SELECT <str>max(<cal::local_datetime>{
'2018-05-07T15:01:22.306916',
'2017-05-07T16:01:22.306916',
'2017-01-07T11:01:22.306916',
'2018-01-07T11:12:22.306916',
});
''',
['2018-05-07T15:01:22.306916'],
)
await self.assert_query_result(
r'''
SELECT <str>max(<cal::local_date>{
'2018-05-07',
'2017-05-07',
'2017-01-07',
'2018-01-07',
});
''',
['2018-05-07'],
)
await self.assert_query_result(
r'''
SELECT <str>max(<cal::local_time>{
'15:01:22',
'16:01:22',
'11:01:22',
'11:12:22',
});
''',
['16:01:22'],
)
await self.assert_query_result(
r'''
SELECT <str>max(<duration>{
'15:01:22',
'16:01:22',
'11:01:22',
'11:12:22',
});
''',
['PT16H1M22S'],
)
async def test_edgeql_functions_max_02(self):
await self.assert_query_result(
r'''
SELECT max(User.name);
''',
['Yury'],
)
await self.assert_query_result(
r'''
SELECT max(Issue.time_estimate);
''',
[3000],
)
await self.assert_query_result(
r'''
SELECT max(<int64>Issue.number);
''',
[4],
)
async def test_edgeql_functions_max_03(self):
# Objects are valid inputs to "max" and are ordered by their .id.
await self.assert_query_result(
r'''
SELECT max(User).id = max(User.id);
''',
[True],
)
async def test_edgeql_functions_all_01(self):
await self.assert_query_result(
r'''SELECT all(<bool>{});''',
[True],
)
await self.assert_query_result(
r'''SELECT all({True});''',
[True],
)
await self.assert_query_result(
r'''SELECT all({False});''',
[False],
)
await self.assert_query_result(
r'''SELECT all({True, False, True, False});''',
[False],
)
await self.assert_query_result(
r'''SELECT all({1, 2, 3, 4} > 0);''',
[True],
)
await self.assert_query_result(
r'''SELECT all({1, -2, 3, 4} > 0);''',
[False],
)
await self.assert_query_result(
r'''SELECT all({0, -1, -2, -3} > 0);''',
[False],
)
await self.assert_query_result(
r'''SELECT all({1, -2, 3, 4} IN {-2, -1, 0, 1, 2, 3, 4});''',
[True],
)
await self.assert_query_result(
r'''SELECT all(<int64>{} IN {-2, -1, 0, 1, 2, 3, 4});''',
[True],
)
await self.assert_query_result(
r'''SELECT all({1, -2, 3, 4} IN <int64>{});''',
[False],
)
await self.assert_query_result(
r'''SELECT all(<int64>{} IN <int64>{});''',
[True],
)
async def test_edgeql_functions_all_02(self):
await self.assert_query_result(
r'''
SELECT all(len(User.name) = 4);
''',
[False],
)
await self.assert_query_result(
r'''
SELECT all(
(
FOR I IN {Issue}
UNION EXISTS I.time_estimate
)
);
''',
[False],
)
await self.assert_query_result(
r'''
SELECT all(Issue.number != '');
''',
[True],
)
async def test_edgeql_functions_any_01(self):
await self.assert_query_result(
r'''SELECT any(<bool>{});''',
[False],
)
await self.assert_query_result(
r'''SELECT any({True});''',
[True],
)
await self.assert_query_result(
r'''SELECT any({False});''',
[False],
)
await self.assert_query_result(
r'''SELECT any({True, False, True, False});''',
[True],
)
await self.assert_query_result(
r'''SELECT any({1, 2, 3, 4} > 0);''',
[True],
)
await self.assert_query_result(
r'''SELECT any({1, -2, 3, 4} > 0);''',
[True],
)
await self.assert_query_result(
r'''SELECT any({0, -1, -2, -3} > 0);''',
[False],
)
await self.assert_query_result(
r'''SELECT any({1, -2, 3, 4} IN {-2, -1, 0, 1, 2, 3, 4});''',
[True],
)
await self.assert_query_result(
r'''SELECT any(<int64>{} IN {-2, -1, 0, 1, 2, 3, 4});''',
[False],
)
await self.assert_query_result(
r'''SELECT any({1, -2, 3, 4} IN <int64>{});''',
[False],
)
await self.assert_query_result(
r'''SELECT any(<int64>{} IN <int64>{});''',
[False],
)
async def test_edgeql_functions_any_02(self):
await self.assert_query_result(
r'''
SELECT any(len(User.name) = 4);
''',
[True],
)
await self.assert_query_result(
r'''
SELECT any(
(
FOR I IN {Issue}
UNION EXISTS I.time_estimate
)
);
''',
[True],
)
await self.assert_query_result(
r'''
SELECT any(Issue.number != '');
''',
[True],
)
async def test_edgeql_functions_any_03(self):
await self.assert_query_result(
r'''
SELECT any(len(User.name) = 4) =
NOT all(NOT (len(User.name) = 4));
''',
[True],
)
await self.assert_query_result(
r'''
SELECT any(
(
FOR I IN {Issue}
UNION EXISTS I.time_estimate
)
) = NOT all(
(
FOR I IN {Issue}
UNION NOT EXISTS I.time_estimate
)
);
''',
[True],
)
await self.assert_query_result(
r'''
SELECT any(Issue.number != '') = NOT all(Issue.number = '');
''',
[True],
)
async def test_edgeql_functions_round_01(self):
await self.assert_query_result(
r'''SELECT round(<float64>{});''',
[],
)
await self.assert_query_result(
r'''SELECT round(<float64>1);''',
[1],
)
await self.assert_query_result(
r'''SELECT round(<decimal>1);''',
[1],
)
await self.assert_query_result(
r'''SELECT round(<float64>1.2);''',
[1],
)
await self.assert_query_result(
r'''SELECT round(<float64>-1.2);''',
[-1],
)
await self.assert_query_result(
r'''SELECT round(<decimal>1.2);''',
[1],
)
await self.assert_query_result(
r'''SELECT round(<decimal>-1.2);''',
[-1],
)
await self.assert_query_result(
r'''SELECT round(<float64>-2.5);''',
[-2],
)
await self.assert_query_result(
r'''SELECT round(<float64>-1.5);''',
[-2],
)
await self.assert_query_result(
r'''SELECT round(<float64>-0.5);''',
[0],
)
await self.assert_query_result(
r'''SELECT round(<float64>0.5);''',
[0],
)
await self.assert_query_result(
r'''SELECT round(<float64>1.5);''',
[2],
)
await self.assert_query_result(
r'''SELECT round(<float64>2.5);''',
[2],
)
await self.assert_query_result(
r'''SELECT round(<decimal>-2.5);''',
[-3],
)
await self.assert_query_result(
r'''SELECT round(<decimal>-1.5);''',
[-2],
)
await self.assert_query_result(
r'''SELECT round(<decimal>-0.5);''',
[-1],
)
await self.assert_query_result(
r'''SELECT round(<decimal>0.5);''',
[1],
)
await self.assert_query_result(
r'''SELECT round(<decimal>1.5);''',
[2]
)
await self.assert_query_result(
r'''SELECT round(<decimal>2.5);''',
[3]
)
async def test_edgeql_functions_round_02(self):
await self.assert_query_result(
r'''SELECT round(1) IS int64;''',
[True],
)
await self.assert_query_result(
r'''SELECT round(<float32>1.2) IS float64;''',
[True],
)
await self.assert_query_result(
r'''SELECT round(<float64>1.2) IS float64;''',
[True],
)
await self.assert_query_result(
r'''SELECT round(1.2) IS float64;''',
[True],
)
await self.assert_query_result(
r'''SELECT round(<bigint>1) IS bigint;''',
[True],
)
await self.assert_query_result(
r'''SELECT round(<decimal>1.2) IS decimal;''',
[True],
)
# rounding to a specified decimal place is only defined
# for decimals
await self.assert_query_result(
r'''SELECT round(<decimal>1.2, 0) IS decimal;''',
[True],
)
async def test_edgeql_functions_round_03(self):
await self.assert_query_result(
r'''SELECT round(<decimal>123.456, 10);''',
[123.456],
)
await self.assert_query_result(
r'''SELECT round(<decimal>123.456, 3);''',
[123.456],
)
await self.assert_query_result(
r'''SELECT round(<decimal>123.456, 2);''',
[123.46],
)
await self.assert_query_result(
r'''SELECT round(<decimal>123.456, 1);''',
[123.5],
)
await self.assert_query_result(
r'''SELECT round(<decimal>123.456, 0);''',
[123],
)
await self.assert_query_result(
r'''SELECT round(<decimal>123.456, -1);''',
[120],
)
await self.assert_query_result(
r'''SELECT round(<decimal>123.456, -2);''',
[100],
)
await self.assert_query_result(
r'''SELECT round(<decimal>123.456, -3);''',
[0],
)
async def test_edgeql_functions_round_04(self):
await self.assert_query_result(
r'''
SELECT _ := round(<int64>Issue.number / 2)
ORDER BY _;
''',
[0, 1, 2, 2],
)
await self.assert_query_result(
r'''
SELECT _ := round(<decimal>Issue.number / 2)
ORDER BY _;
''',
[1, 1, 2, 2],
)
async def test_edgeql_functions_contains_01(self):
await self.assert_query_result(
r'''SELECT std::contains(<array<int64>>[], {1, 3});''',
[False, False],
)
await self.assert_query_result(
r'''SELECT contains([1], {1, 3});''',
[True, False],
)
await self.assert_query_result(
r'''SELECT contains([1, 2], 1);''',
[True],
)
await self.assert_query_result(
r'''SELECT contains([1, 2], 3);''',
[False],
)
await self.assert_query_result(
r'''SELECT contains(['a'], <str>{});''',
[],
)
async def test_edgeql_functions_contains_02(self):
await self.assert_query_result(
r'''
WITH x := [3, 1, 2]
SELECT contains(x, 2);
''',
[True],
)
await self.assert_query_result(
r'''
WITH x := [3, 1, 2]
SELECT contains(x, 5);
''',
[False],
)
await self.assert_query_result(
r'''
WITH x := [3, 1, 2]
SELECT contains(x, 5);
''',
[False],
)
async def test_edgeql_functions_contains_03(self):
await self.assert_query_result(
r'''SELECT contains(<str>{}, <str>{});''',
[],
)
await self.assert_query_result(
r'''SELECT contains(<str>{}, 'a');''',
[],
)
await self.assert_query_result(
r'''SELECT contains('qwerty', <str>{});''',
[],
)
await self.assert_query_result(
r'''SELECT contains('qwerty', '');''',
{True},
)
await self.assert_query_result(
r'''SELECT contains('qwerty', 'q');''',
{True},
)
await self.assert_query_result(
r'''SELECT contains('qwerty', 'qwe');''',
{True},
)
await self.assert_query_result(
r'''SELECT contains('qwerty', 'we');''',
{True},
)
await self.assert_query_result(
r'''SELECT contains('qwerty', 't');''',
{True},
)
await self.assert_query_result(
r'''SELECT contains('qwerty', 'a');''',
{False},
)
await self.assert_query_result(
r'''SELECT contains('qwerty', 'azerty');''',
{False},
)
async def test_edgeql_functions_contains_04(self):
await self.assert_query_result(
r'''SELECT contains(<bytes>{}, <bytes>{});''',
[],
)
await self.assert_query_result(
r'''SELECT contains(<bytes>{}, b'a');''',
[],
)
await self.assert_query_result(
r'''SELECT contains(b'qwerty', <bytes>{});''',
[],
)
await self.assert_query_result(
r'''SELECT contains(b'qwerty', b't');''',
{True},
)
await self.assert_query_result(
r'''SELECT contains(b'qwerty', b'a');''',
{False},
)
await self.assert_query_result(
r'''SELECT contains(b'qwerty', b'azerty');''',
{False},
)
async def test_edgeql_functions_contains_05(self):
await self.assert_query_result(
r'''
SELECT contains(
array_agg(User),
(SELECT User FILTER .name = 'Elvis')
)
''',
[True],
)
async def test_edgeql_functions_find_01(self):
await self.assert_query_result(
r'''SELECT find(<str>{}, <str>{});''',
[],
)
await self.assert_query_result(
r'''SELECT find(<str>{}, 'a');''',
[],
)
await self.assert_query_result(
r'''SELECT find('qwerty', <str>{});''',
[],
)
await self.assert_query_result(
r'''SELECT find('qwerty', '');''',
{0},
)
await self.assert_query_result(
r'''SELECT find('qwerty', 'q');''',
{0},
)
await self.assert_query_result(
r'''SELECT find('qwerty', 'qwe');''',
{0},
)
await self.assert_query_result(
r'''SELECT find('qwerty', 'we');''',
{1},
)
await self.assert_query_result(
r'''SELECT find('qwerty', 't');''',
{4},
)
await self.assert_query_result(
r'''SELECT find('qwerty', 'a');''',
{-1},
)
await self.assert_query_result(
r'''SELECT find('qwerty', 'azerty');''',
{-1},
)
async def test_edgeql_functions_find_02(self):
await self.assert_query_result(
r'''SELECT find(<bytes>{}, <bytes>{});''',
[],
)
await self.assert_query_result(
r'''SELECT find(b'qwerty', b'');''',
{0},
)
await self.assert_query_result(
r'''SELECT find(b'qwerty', b'qwe');''',
{0},
)
await self.assert_query_result(
r'''SELECT find(b'qwerty', b'a');''',
{-1},
)
async def test_edgeql_functions_find_03(self):
await self.assert_query_result(
r'''SELECT find(<array<str>>{}, <str>{});''',
[],
)
await self.assert_query_result(
r'''SELECT find(<array<str>>{}, 'the');''',
[],
)
await self.assert_query_result(
r'''SELECT find(['the', 'quick', 'brown', 'fox'], <str>{});''',
[],
)
await self.assert_query_result(
r'''SELECT find(<array<str>>[], 'the');''',
{-1},
)
await self.assert_query_result(
r'''SELECT find(['the', 'quick', 'brown', 'fox'], 'the');''',
{0},
)
await self.assert_query_result(
r'''SELECT find(['the', 'quick', 'brown', 'fox'], 'fox');''',
{3},
)
await self.assert_query_result(
r'''SELECT find(['the', 'quick', 'brown', 'fox'], 'jumps');''',
{-1},
)
await self.assert_query_result(
r'''
SELECT find(['the', 'quick', 'brown', 'fox',
'jumps', 'over', 'the', 'lazy', 'dog'],
'the');
''',
{0},
)
await self.assert_query_result(
r'''
SELECT find(['the', 'quick', 'brown', 'fox',
'jumps', 'over', 'the', 'lazy', 'dog'],
'the', 1);
''',
{6},
)
async def test_edgeql_functions_str_case_01(self):
await self.assert_query_result(
r'''SELECT str_lower({'HeLlO', 'WoRlD!', 'ПриВет', 'мИр'});''',
{'hello', 'world!', 'привет', 'мир'},
)
await self.assert_query_result(
r'''SELECT str_upper({'HeLlO', 'WoRlD!'});''',
{'HELLO', 'WORLD!'},
)
await self.assert_query_result(
r'''SELECT str_title({'HeLlO', 'WoRlD!'});''',
{'Hello', 'World!'},
)
await self.assert_query_result(
r'''SELECT str_lower('HeLlO WoRlD!');''',
{'hello world!'},
)
await self.assert_query_result(
r'''SELECT str_upper('HeLlO WoRlD!');''',
{'HELLO WORLD!'},
)
await self.assert_query_result(
r'''SELECT str_title('HeLlO WoRlD!');''',
{'Hello World!'},
)
async def test_edgeql_functions_str_pad_01(self):
await self.assert_query_result(
r'''SELECT str_pad_start('Hello', 20);''',
{' Hello'},
)
await self.assert_query_result(
r'''SELECT str_pad_start('Hello', 20, '>');''',
{'>>>>>>>>>>>>>>>Hello'},
)
await self.assert_query_result(
r'''SELECT str_pad_start('Hello', 20, '-->');''',
{'-->-->-->-->-->Hello'},
)
await self.assert_query_result(
r'''SELECT str_pad_end('Hello', 20);''',
{'Hello '},
)
await self.assert_query_result(
r'''SELECT str_pad_end('Hello', 20, '<');''',
{'Hello<<<<<<<<<<<<<<<'},
)
await self.assert_query_result(
r'''SELECT str_pad_end('Hello', 20, '<--');''',
{'Hello<--<--<--<--<--'},
)
# Call deprecated functions, too.
await self.assert_query_result(
r'''SELECT str_lpad('Hello', 20);''',
{' Hello'},
)
await self.assert_query_result(
r'''SELECT str_rpad('Hello', 20);''',
{'Hello '},
)
async def test_edgeql_functions_str_pad_02(self):
await self.assert_query_result(
r'''SELECT str_pad_start('Hello', 2);''',
{'He'},
)
await self.assert_query_result(
r'''SELECT str_pad_start('Hello', 2, '>');''',
{'He'},
)
await self.assert_query_result(
r'''SELECT str_pad_start('Hello', 2, '-->');''',
{'He'},
)
await self.assert_query_result(
r'''SELECT str_pad_end('Hello', 2);''',
{'He'},
)
await self.assert_query_result(
r'''SELECT str_pad_end('Hello', 2, '<');''',
{'He'},
)
await self.assert_query_result(
r'''SELECT str_pad_end('Hello', 2, '<--');''',
{'He'},
)
async def test_edgeql_functions_str_pad_03(self):
await self.assert_query_result(
r'''
WITH l := {0, 2, 10, 20}
SELECT len(str_pad_start('Hello', l)) = l;
''',
[True, True, True, True],
)
await self.assert_query_result(
r'''
WITH l := {0, 2, 10, 20}
SELECT len(str_pad_end('Hello', l)) = l;
''',
[True, True, True, True],
)
async def test_edgeql_functions_str_trim_01(self):
await self.assert_query_result(
r'''SELECT str_trim(' Hello ');''',
{'Hello'},
)
await self.assert_query_result(
r'''SELECT str_trim_start(' Hello ');''',
{'Hello '},
)
await self.assert_query_result(
r'''SELECT str_trim_end(' Hello ');''',
{' Hello'},
)
# Call deprecated functions, too.
await self.assert_query_result(
r'''SELECT str_ltrim(' Hello ');''',
{'Hello '},
)
await self.assert_query_result(
r'''SELECT str_rtrim(' Hello ');''',
{' Hello'},
)
async def test_edgeql_functions_str_trim_02(self):
await self.assert_query_result(
r'''SELECT str_trim_start(' Hello', ' <->');''',
{'Hello'},
)
await self.assert_query_result(
r'''SELECT str_trim_start('>>>>>>>>>>>>>>>Hello', ' <->');''',
{'Hello'},
)
await self.assert_query_result(
r'''SELECT str_trim_start('-->-->-->-->-->Hello', ' <->');''',
{'Hello'},
)
await self.assert_query_result(
r'''SELECT str_trim_end('Hello ', ' <->');''',
{'Hello'},
)
await self.assert_query_result(
r'''SELECT str_trim_end('Hello<<<<<<<<<<<<<<<', ' <->');''',
{'Hello'},
)
await self.assert_query_result(
r'''SELECT str_trim_end('Hello<--<--<--<--<--', ' <->');''',
{'Hello'},
)
await self.assert_query_result(
r'''
SELECT str_trim(
'-->-->-->-->-->Hello<--<--<--<--<--', ' <->');
''',
{'Hello'},
)
async def test_edgeql_functions_str_repeat_01(self):
await self.assert_query_result(
r'''SELECT str_repeat('', 1);''',
{''},
)
await self.assert_query_result(
r'''SELECT str_repeat('', 0);''',
{''},
)
await self.assert_query_result(
r'''SELECT str_repeat('', -1);''',
{''},
)
await self.assert_query_result(
r'''SELECT str_repeat('a', 1);''',
{'a'},
)
await self.assert_query_result(
r'''SELECT str_repeat('aa', 3);''',
{'aaaaaa'},
)
await self.assert_query_result(
r'''SELECT str_repeat('a', 0);''',
{''},
)
await self.assert_query_result(
r'''SELECT str_repeat('', -1);''',
{''},
)
async def test_edgeql_functions_math_abs_01(self):
await self.assert_query_result(
r'''SELECT math::abs(2);''',
{2},
)
await self.assert_query_result(
r'''SELECT math::abs(-2);''',
{2},
)
await self.assert_query_result(
r'''SELECT math::abs(2.5);''',
{2.5},
)
await self.assert_query_result(
r'''SELECT math::abs(-2.5);''',
{2.5},
)
await self.assert_query_result(
r'''SELECT math::abs(<decimal>2.5);''',
{2.5},
)
await self.assert_query_result(
r'''SELECT math::abs(<decimal>-2.5);''',
{2.5},
)
async def test_edgeql_functions_math_abs_02(self):
await self.assert_query_result(
r'''SELECT math::abs(<int16>2) IS int16;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::abs(<int32>2) IS int32;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::abs(<int64>2) IS int64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::abs(<float32>2) IS float32;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::abs(<float64>2) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::abs(<decimal>2) IS decimal;''',
{True},
)
async def test_edgeql_functions_math_ceil_01(self):
await self.assert_query_result(
r'''SELECT math::ceil(2);''',
{2},
)
await self.assert_query_result(
r'''SELECT math::ceil(2.5);''',
{3},
)
await self.assert_query_result(
r'''SELECT math::ceil(-2.5);''',
{-2},
)
await self.assert_query_result(
r'''SELECT math::ceil(<decimal>2.5);''',
{3},
)
await self.assert_query_result(
r'''SELECT math::ceil(<decimal>-2.5);''',
{-2},
)
async def test_edgeql_functions_math_ceil_02(self):
await self.assert_query_result(
r'''SELECT math::ceil(<int16>2) IS int64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::ceil(<int32>2) IS int64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::ceil(<int64>2) IS int64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::ceil(<float32>2.5) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::ceil(<float64>2.5) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::ceil(<bigint>2) IS bigint;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::ceil(<decimal>2.5) IS decimal;''',
{True},
)
async def test_edgeql_functions_math_floor_01(self):
await self.assert_query_result(
r'''SELECT math::floor(2);''',
{2},
)
await self.assert_query_result(
r'''SELECT math::floor(2.5);''',
{2},
)
await self.assert_query_result(
r'''SELECT math::floor(-2.5);''',
{-3},
)
await self.assert_query_result(
r'''SELECT math::floor(<decimal>2.5);''',
{2},
)
await self.assert_query_result(
r'''SELECT math::floor(<decimal>-2.5);''',
{-3},
)
async def test_edgeql_functions_math_floor_02(self):
await self.assert_query_result(
r'''SELECT math::floor(<int16>2) IS int64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::floor(<int32>2) IS int64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::floor(<int64>2) IS int64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::floor(<float32>2.5) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::floor(<float64>2.5) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::floor(<bigint>2) IS bigint;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::floor(<decimal>2.5) IS decimal;''',
{True},
)
async def test_edgeql_functions_math_log_01(self):
await self.assert_query_result(
r'''SELECT math::ln({1, 10, 32});''',
{0, 2.30258509299405, 3.46573590279973},
)
await self.assert_query_result(
r'''SELECT math::lg({1, 10, 32});''',
{0, 1, 1.50514997831991},
)
await self.assert_query_result(
r'''SELECT math::log(<decimal>{1, 10, 32}, base := <decimal>2);''',
{0, 3.321928094887362, 5},
)
async def test_edgeql_functions_math_mean_01(self):
await self.assert_query_result(
r'''SELECT math::mean(1);''',
{1.0},
)
await self.assert_query_result(
r'''SELECT math::mean(1.5);''',
{1.5},
)
await self.assert_query_result(
r'''SELECT math::mean({1, 2, 3});''',
{2.0},
)
await self.assert_query_result(
r'''SELECT math::mean({1, 2, 3, 4});''',
{2.5},
)
await self.assert_query_result(
r'''SELECT math::mean({0.1, 0.2, 0.3});''',
{0.2},
)
await self.assert_query_result(
r'''SELECT math::mean({0.1, 0.2, 0.3, 0.4});''',
{0.25},
)
async def test_edgeql_functions_math_mean_02(self):
# int16 is implicitly cast in float32, which produces a
# float64 result
await self.assert_query_result(
r'''SELECT math::mean(<int16>2) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::mean(<int32>2) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::mean(<int64>2) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::mean(<float32>2) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::mean(<float64>2) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::mean(<decimal>2) IS decimal;''',
{True},
)
async def test_edgeql_functions_math_mean_03(self):
await self.assert_query_result(
r'''
WITH
MODULE math,
A := {1, 3, 1}
# the difference between sum and mean * count is due to
# rounding errors, but it should be small
SELECT abs(sum(A) - count(A) * mean(A)) < 1e-10;
''',
{True},
)
async def test_edgeql_functions_math_mean_04(self):
await self.assert_query_result(
r'''
WITH
MODULE math,
A := <float64>{1, 3, 1}
# the difference between sum and mean * count is due to
# rounding errors, but it should be small
SELECT abs(sum(A) - count(A) * mean(A)) < 1e-10;
''',
{True},
)
async def test_edgeql_functions_math_mean_05(self):
await self.assert_query_result(
r'''
WITH
MODULE math,
A := len(default::Named.name)
# the difference between sum and mean * count is due to
# rounding errors, but it should be small
SELECT abs(sum(A) - count(A) * mean(A)) < 1e-10;
''',
{True},
)
async def test_edgeql_functions_math_mean_06(self):
await self.assert_query_result(
r'''
WITH
MODULE math,
A := <float64>len(default::Named.name)
# the difference between sum and mean * count is due to
# rounding errors, but it should be small
SELECT abs(sum(A) - count(A) * mean(A)) < 1e-10;
''',
{True},
)
async def test_edgeql_functions_math_mean_07(self):
await self.assert_query_result(
r'''
WITH
MODULE math,
A := {3}
SELECT mean(A) * count(A);
''',
{3},
)
async def test_edgeql_functions_math_mean_08(self):
await self.assert_query_result(
r'''
WITH
MODULE math,
X := {1, 2, 3, 4}
SELECT mean(X) = sum(X) / count(X);
''',
{True},
)
await self.assert_query_result(
r'''
WITH
MODULE math,
X := {0.1, 0.2, 0.3, 0.4}
SELECT mean(X) = sum(X) / count(X);
''',
{True},
)
async def test_edgeql_functions_math_mean_09(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
r"invalid input to mean\(\): "
r"not enough elements in input set"):
await self.con.query(r'''
SELECT math::mean(<int64>{});
''')
async def test_edgeql_functions_math_stddev_01(self):
await self.assert_query_result(
r'''SELECT math::stddev({1, 1});''',
{0},
)
await self.assert_query_result(
r'''SELECT math::stddev({1, 1, -1, 1});''',
{1.0},
)
await self.assert_query_result(
r'''SELECT math::stddev({1, 2, 3});''',
{1.0},
)
await self.assert_query_result(
r'''SELECT math::stddev({0.1, 0.1, -0.1, 0.1});''',
{0.1},
)
await self.assert_query_result(
r'''SELECT math::stddev(<decimal>{0.1, 0.2, 0.3});''',
{0.1},
)
async def test_edgeql_functions_math_stddev_02(self):
await self.assert_query_result(
r'''SELECT math::stddev(<int16>{1, 1}) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::stddev(<int32>{1, 1}) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::stddev(<int64>{1, 1}) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::stddev(<float32>{1, 1}) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::stddev(<float64>{1, 1}) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::stddev(<decimal>{1, 1}) IS decimal;''',
{True},
)
async def test_edgeql_functions_math_stddev_03(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
r"invalid input to stddev\(\): not enough "
r"elements in input set"):
await self.con.query(r'''
SELECT math::stddev(<int64>{});
''')
async def test_edgeql_functions_math_stddev_04(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
r"invalid input to stddev\(\): not enough "
r"elements in input set"):
await self.con.query(r'''
SELECT math::stddev(1);
''')
async def test_edgeql_functions_math_stddev_pop_01(self):
await self.assert_query_result(
r'''SELECT math::stddev_pop(1);''',
{0.0},
)
await self.assert_query_result(
r'''SELECT math::stddev_pop({1, 1, 1});''',
{0.0},
)
await self.assert_query_result(
r'''SELECT math::stddev_pop({1, 2, 1, 2});''',
{0.5},
)
await self.assert_query_result(
r'''SELECT math::stddev_pop({0.1, 0.1, 0.1});''',
{0.0},
)
await self.assert_query_result(
r'''SELECT math::stddev_pop({0.1, 0.2, 0.1, 0.2});''',
{0.05},
)
async def test_edgeql_functions_math_stddev_pop_02(self):
await self.assert_query_result(
r'''SELECT math::stddev_pop(<int16>1) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::stddev_pop(<int32>1) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::stddev_pop(<int64>1) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::stddev_pop(<float32>1) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::stddev_pop(<float64>1) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::stddev_pop(<decimal>1) IS decimal;''',
{True},
)
async def test_edgeql_functions_math_stddev_pop_04(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
r"invalid input to stddev_pop\(\): not enough "
r"elements in input set"):
await self.con.query(r'''
SELECT math::stddev_pop(<int64>{});
''')
async def test_edgeql_functions_math_var_01(self):
await self.assert_query_result(
r'''SELECT math::var({1, 1});''',
{0},
)
await self.assert_query_result(
r'''SELECT math::var({1, 1, -1, 1});''',
{1.0},
)
await self.assert_query_result(
r'''SELECT math::var({1, 2, 3});''',
{1.0},
)
await self.assert_query_result(
r'''SELECT math::var({0.1, 0.1, -0.1, 0.1});''',
{0.01},
)
await self.assert_query_result(
r'''SELECT math::var(<decimal>{0.1, 0.2, 0.3});''',
{0.01},
)
async def test_edgeql_functions_math_var_02(self):
# int16 is implicitly cast in float32, which produces a
# float64 result
await self.assert_query_result(
r'''SELECT math::var(<int16>{1, 1}) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::var(<int32>{1, 1}) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::var(<int64>{1, 1}) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::var(<float32>{1, 1}) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::var(<float64>{1, 1}) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::var(<decimal>{1, 1}) IS decimal;''',
{True},
)
async def test_edgeql_functions_math_var_03(self):
await self.assert_query_result(
r'''
WITH
MODULE math,
X := {1, 1}
SELECT var(X) = stddev(X) ^ 2;
''',
{True},
)
await self.assert_query_result(
r'''
WITH
MODULE math,
X := {1, 1, -1, 1}
SELECT var(X) = stddev(X) ^ 2;
''',
{True},
)
await self.assert_query_result(
r'''
WITH
MODULE math,
X := {1, 2, 3}
SELECT var(X) = stddev(X) ^ 2;
''',
{True},
)
await self.assert_query_result(
r'''
WITH
MODULE math,
X := {0.1, 0.1, -0.1, 0.1}
SELECT var(X) = stddev(X) ^ 2;
''',
{True},
)
await self.assert_query_result(
r'''
WITH
MODULE math,
X := <decimal>{0.1, 0.2, 0.3}
SELECT var(X) = stddev(X) ^ 2;
''',
{True},
)
async def test_edgeql_functions_math_var_04(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
r"invalid input to var\(\): not enough "
r"elements in input set"):
await self.con.query(r'''
SELECT math::var(<int64>{});
''')
async def test_edgeql_functions_math_var_05(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
r"invalid input to var\(\): not enough "
r"elements in input set"):
await self.con.query(r'''
SELECT math::var(1);
''')
async def test_edgeql_functions_math_var_pop_01(self):
await self.assert_query_result(
r'''SELECT math::var_pop(1);''',
{0.0},
)
await self.assert_query_result(
r'''SELECT math::var_pop({1, 1, 1});''',
{0.0},
)
await self.assert_query_result(
r'''SELECT math::var_pop({1, 2, 1, 2});''',
{0.25},
)
await self.assert_query_result(
r'''SELECT math::var_pop({0.1, 0.1, 0.1});''',
{0.0},
)
await self.assert_query_result(
r'''SELECT math::var_pop({0.1, 0.2, 0.1, 0.2});''',
{0.0025},
)
async def test_edgeql_functions_math_var_pop_02(self):
await self.assert_query_result(
r'''SELECT math::var_pop(<int16>1) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::var_pop(<int32>1) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::var_pop(<int64>1) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::var_pop(<float32>1) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::var_pop(<float64>1) IS float64;''',
{True},
)
await self.assert_query_result(
r'''SELECT math::var_pop(<decimal>1) IS decimal;''',
{True},
)
async def test_edgeql_functions_math_var_pop_03(self):
await self.assert_query_result(
r'''
WITH
MODULE math,
X := {1, 2, 1, 2}
SELECT abs(var_pop(X) - stddev_pop(X) ^ 2) < 1.0e-15;
''',
{True},
)
await self.assert_query_result(
r'''
WITH
MODULE math,
X := {0.1, 0.2, 0.1, 0.2}
SELECT abs(var_pop(X) - stddev_pop(X) ^ 2) < 1.0e-15;
''',
{True},
)
async def test_edgeql_functions_math_var_pop_04(self):
with self.assertRaisesRegex(
edgedb.InvalidValueError,
r"invalid input to var_pop\(\): not enough "
r"elements in input set"):
await self.con.query(r'''
SELECT math::var_pop(<int64>{});
''')
async def test_edgeql_functions__genseries_01(self):
await self.assert_query_result(
r'''
SELECT _gen_series(1, 10)
''',
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
)
await self.assert_query_result(
r'''
SELECT _gen_series(1, 10, 2)
''',
[1, 3, 5, 7, 9]
)
await self.assert_query_result(
r'''
SELECT _gen_series(1n, 10n)
''',
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
)
await self.assert_query_result(
r'''
SELECT _gen_series(1n, 10n, 2n)
''',
[1, 3, 5, 7, 9]
)
async def test_edgeql_functions_sequence_next_reset(self):
await self.con.execute('''
CREATE SCALAR TYPE my_seq_01 EXTENDING std::sequence;
''')
result = await self.con.query_single('''
SELECT sequence_next(INTROSPECT my_seq_01)
''')
self.assertEqual(result, 1)
result = await self.con.query_single('''
SELECT sequence_next(INTROSPECT my_seq_01)
''')
self.assertEqual(result, 2)
await self.con.execute('''
SELECT sequence_reset(INTROSPECT my_seq_01)
''')
result = await self.con.query_single('''
SELECT sequence_next(INTROSPECT my_seq_01)
''')
self.assertEqual(result, 1)
await self.con.execute('''
SELECT sequence_reset(INTROSPECT my_seq_01, 20)
''')
result = await self.con.query_single('''
SELECT sequence_next(INTROSPECT my_seq_01)
''')
self.assertEqual(result, 21)
async def test_edgeql_functions__datetime_range_buckets(self):
await self.assert_query_result(
'''
SELECT <tuple<str, str>>std::_datetime_range_buckets(
<datetime>'2021-01-01T00:00:00Z',
<datetime>'2021-04-01T00:00:00Z',
'1 month');
''',
[
('2021-01-01T00:00:00+00:00', '2021-02-01T00:00:00+00:00'),
('2021-02-01T00:00:00+00:00', '2021-03-01T00:00:00+00:00'),
('2021-03-01T00:00:00+00:00', '2021-04-01T00:00:00+00:00'),
],
)
await self.assert_query_result(
'''
SELECT <tuple<str, str>>std::_datetime_range_buckets(
<datetime>'2021-04-01T00:00:00Z',
<datetime>'2021-04-01T00:00:00Z',
'1 month');
''',
[],
)
await self.assert_query_result(
'''
SELECT <tuple<str, str>>std::_datetime_range_buckets(
<datetime>'2021-01-01T00:00:00Z',
<datetime>'2021-04-01T00:00:00Z',
'1.5 months');
''',
[
('2021-01-01T00:00:00+00:00', '2021-02-16T00:00:00+00:00'),
('2021-02-16T00:00:00+00:00', '2021-03-31T00:00:00+00:00'),
],
)
| 29.898002 | 79 | 0.463004 |
bc49f9c0bd222ed8db1da2e929bc65fb24774b12 | 13,300 | py | Python | homeassistant/components/zwave/lock.py | basicpail/core | 5cc54618c5af3f75c08314bf2375cc7ac40d2b7e | [
"Apache-2.0"
] | 11 | 2018-02-16T15:35:47.000Z | 2020-01-14T15:20:00.000Z | homeassistant/components/zwave/lock.py | jagadeeshvenkatesh/core | 1bd982668449815fee2105478569f8e4b5670add | [
"Apache-2.0"
] | 383 | 2020-03-06T13:01:14.000Z | 2022-03-11T13:14:13.000Z | homeassistant/components/zwave/lock.py | jagadeeshvenkatesh/core | 1bd982668449815fee2105478569f8e4b5670add | [
"Apache-2.0"
] | 11 | 2020-12-16T13:48:14.000Z | 2022-02-01T00:28:05.000Z | """Support for Z-Wave door locks."""
import logging
import voluptuous as vol
from homeassistant.components.lock import DOMAIN, LockEntity
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import ZWaveDeviceEntity, const
_LOGGER = logging.getLogger(__name__)
ATTR_NOTIFICATION = "notification"
ATTR_LOCK_STATUS = "lock_status"
ATTR_CODE_SLOT = "code_slot"
ATTR_USERCODE = "usercode"
CONFIG_ADVANCED = "Advanced"
SERVICE_SET_USERCODE = "set_usercode"
SERVICE_GET_USERCODE = "get_usercode"
SERVICE_CLEAR_USERCODE = "clear_usercode"
POLYCONTROL = 0x10E
DANALOCK_V2_BTZE = 0x2
POLYCONTROL_DANALOCK_V2_BTZE_LOCK = (POLYCONTROL, DANALOCK_V2_BTZE)
WORKAROUND_V2BTZE = 1
WORKAROUND_DEVICE_STATE = 2
WORKAROUND_TRACK_MESSAGE = 4
WORKAROUND_ALARM_TYPE = 8
DEVICE_MAPPINGS = {
POLYCONTROL_DANALOCK_V2_BTZE_LOCK: WORKAROUND_V2BTZE,
# Kwikset 914TRL ZW500 99100-078
(0x0090, 0x440): WORKAROUND_DEVICE_STATE,
(0x0090, 0x446): WORKAROUND_DEVICE_STATE,
(0x0090, 0x238): WORKAROUND_DEVICE_STATE,
# Kwikset 888ZW500-15S Smartcode 888
(0x0090, 0x541): WORKAROUND_DEVICE_STATE,
# Kwikset 916
(0x0090, 0x0001): WORKAROUND_DEVICE_STATE,
# Kwikset Obsidian
(0x0090, 0x0742): WORKAROUND_DEVICE_STATE,
# Yale Locks
# Yale YRD210, YRD220, YRL220
(0x0129, 0x0000): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Yale YRD210, YRD220
(0x0129, 0x0209): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Yale YRL210, YRL220
(0x0129, 0x0409): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Yale YRD256
(0x0129, 0x0600): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Yale YRD110, YRD120
(0x0129, 0x0800): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Yale YRD446
(0x0129, 0x1000): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Yale YRL220
(0x0129, 0x2132): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
(0x0129, 0x3CAC): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Yale YRD210, YRD220
(0x0129, 0xAA00): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Yale YRD220
(0x0129, 0xFFFF): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Yale YRL256
(0x0129, 0x0F00): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Yale YRD220 (Older Yale products with incorrect vendor ID)
(0x0109, 0x0000): WORKAROUND_DEVICE_STATE | WORKAROUND_ALARM_TYPE,
# Schlage BE469
(0x003B, 0x5044): WORKAROUND_DEVICE_STATE | WORKAROUND_TRACK_MESSAGE,
# Schlage FE599NX
(0x003B, 0x504C): WORKAROUND_DEVICE_STATE,
}
LOCK_NOTIFICATION = {
"1": "Manual Lock",
"2": "Manual Unlock",
"5": "Keypad Lock",
"6": "Keypad Unlock",
"11": "Lock Jammed",
"254": "Unknown Event",
}
NOTIFICATION_RF_LOCK = "3"
NOTIFICATION_RF_UNLOCK = "4"
LOCK_NOTIFICATION[NOTIFICATION_RF_LOCK] = "RF Lock"
LOCK_NOTIFICATION[NOTIFICATION_RF_UNLOCK] = "RF Unlock"
LOCK_ALARM_TYPE = {
"9": "Deadbolt Jammed",
"16": "Unlocked by Bluetooth ",
"18": "Locked with Keypad by user ",
"19": "Unlocked with Keypad by user ",
"21": "Manually Locked ",
"22": "Manually Unlocked ",
"27": "Auto re-lock",
"33": "User deleted: ",
"112": "Master code changed or User added: ",
"113": "Duplicate PIN code: ",
"130": "RF module, power restored",
"144": "Unlocked by NFC Tag or Card by user ",
"161": "Tamper Alarm: ",
"167": "Low Battery",
"168": "Critical Battery Level",
"169": "Battery too low to operate",
}
ALARM_RF_LOCK = "24"
ALARM_RF_UNLOCK = "25"
LOCK_ALARM_TYPE[ALARM_RF_LOCK] = "Locked by RF"
LOCK_ALARM_TYPE[ALARM_RF_UNLOCK] = "Unlocked by RF"
MANUAL_LOCK_ALARM_LEVEL = {
"1": "by Key Cylinder or Inside thumb turn",
"2": "by Touch function (lock and leave)",
}
TAMPER_ALARM_LEVEL = {"1": "Too many keypresses", "2": "Cover removed"}
LOCK_STATUS = {
"1": True,
"2": False,
"3": True,
"4": False,
"5": True,
"6": False,
"9": False,
"18": True,
"19": False,
"21": True,
"22": False,
"24": True,
"25": False,
"27": True,
}
ALARM_TYPE_STD = ["18", "19", "33", "112", "113", "144"]
SET_USERCODE_SCHEMA = vol.Schema(
{
vol.Required(const.ATTR_NODE_ID): vol.Coerce(int),
vol.Required(ATTR_CODE_SLOT): vol.Coerce(int),
vol.Required(ATTR_USERCODE): cv.string,
}
)
GET_USERCODE_SCHEMA = vol.Schema(
{
vol.Required(const.ATTR_NODE_ID): vol.Coerce(int),
vol.Required(ATTR_CODE_SLOT): vol.Coerce(int),
}
)
CLEAR_USERCODE_SCHEMA = vol.Schema(
{
vol.Required(const.ATTR_NODE_ID): vol.Coerce(int),
vol.Required(ATTR_CODE_SLOT): vol.Coerce(int),
}
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Z-Wave Lock from Config Entry."""
@callback
def async_add_lock(lock):
"""Add Z-Wave Lock."""
async_add_entities([lock])
async_dispatcher_connect(hass, "zwave_new_lock", async_add_lock)
network = hass.data[const.DATA_NETWORK]
def set_usercode(service):
"""Set the usercode to index X on the lock."""
node_id = service.data.get(const.ATTR_NODE_ID)
lock_node = network.nodes[node_id]
code_slot = service.data.get(ATTR_CODE_SLOT)
usercode = service.data.get(ATTR_USERCODE)
for value in lock_node.get_values(
class_id=const.COMMAND_CLASS_USER_CODE
).values():
if value.index != code_slot:
continue
if len(str(usercode)) < 4:
_LOGGER.error(
"Invalid code provided: (%s) "
"usercode must be at least 4 and at most"
" %s digits",
usercode,
len(value.data),
)
break
value.data = str(usercode)
break
def get_usercode(service):
"""Get a usercode at index X on the lock."""
node_id = service.data.get(const.ATTR_NODE_ID)
lock_node = network.nodes[node_id]
code_slot = service.data.get(ATTR_CODE_SLOT)
for value in lock_node.get_values(
class_id=const.COMMAND_CLASS_USER_CODE
).values():
if value.index != code_slot:
continue
_LOGGER.info("Usercode at slot %s is: %s", value.index, value.data)
break
def clear_usercode(service):
"""Set usercode to slot X on the lock."""
node_id = service.data.get(const.ATTR_NODE_ID)
lock_node = network.nodes[node_id]
code_slot = service.data.get(ATTR_CODE_SLOT)
data = ""
for value in lock_node.get_values(
class_id=const.COMMAND_CLASS_USER_CODE
).values():
if value.index != code_slot:
continue
for i in range(len(value.data)):
data += "\0"
i += 1
_LOGGER.debug("Data to clear lock: %s", data)
value.data = data
_LOGGER.info("Usercode at slot %s is cleared", value.index)
break
hass.services.async_register(
DOMAIN, SERVICE_SET_USERCODE, set_usercode, schema=SET_USERCODE_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_GET_USERCODE, get_usercode, schema=GET_USERCODE_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_CLEAR_USERCODE, clear_usercode, schema=CLEAR_USERCODE_SCHEMA
)
def get_device(node, values, **kwargs):
"""Create Z-Wave entity device."""
return ZwaveLock(values)
class ZwaveLock(ZWaveDeviceEntity, LockEntity):
"""Representation of a Z-Wave Lock."""
def __init__(self, values):
"""Initialize the Z-Wave lock device."""
ZWaveDeviceEntity.__init__(self, values, DOMAIN)
self._state = None
self._notification = None
self._lock_status = None
self._v2btze = None
self._state_workaround = False
self._track_message_workaround = False
self._previous_message = None
self._alarm_type_workaround = False
# Enable appropriate workaround flags for our device
# Make sure that we have values for the key before converting to int
if self.node.manufacturer_id.strip() and self.node.product_id.strip():
specific_sensor_key = (
int(self.node.manufacturer_id, 16),
int(self.node.product_id, 16),
)
if specific_sensor_key in DEVICE_MAPPINGS:
workaround = DEVICE_MAPPINGS[specific_sensor_key]
if workaround & WORKAROUND_V2BTZE:
self._v2btze = 1
_LOGGER.debug("Polycontrol Danalock v2 BTZE workaround enabled")
if workaround & WORKAROUND_DEVICE_STATE:
self._state_workaround = True
_LOGGER.debug("Notification device state workaround enabled")
if workaround & WORKAROUND_TRACK_MESSAGE:
self._track_message_workaround = True
_LOGGER.debug("Message tracking workaround enabled")
if workaround & WORKAROUND_ALARM_TYPE:
self._alarm_type_workaround = True
_LOGGER.debug("Alarm Type device state workaround enabled")
self.update_properties()
def update_properties(self):
"""Handle data changes for node values."""
self._state = self.values.primary.data
_LOGGER.debug("lock state set to %s", self._state)
if self.values.access_control:
notification_data = self.values.access_control.data
self._notification = LOCK_NOTIFICATION.get(str(notification_data))
if self._state_workaround:
self._state = LOCK_STATUS.get(str(notification_data))
_LOGGER.debug("workaround: lock state set to %s", self._state)
if (
self._v2btze
and self.values.v2btze_advanced
and self.values.v2btze_advanced.data == CONFIG_ADVANCED
):
self._state = LOCK_STATUS.get(str(notification_data))
_LOGGER.debug(
"Lock state set from Access Control value and is %s, get=%s",
str(notification_data),
self.state,
)
if self._track_message_workaround:
this_message = self.node.stats["lastReceivedMessage"][5]
if this_message == const.COMMAND_CLASS_DOOR_LOCK:
self._state = self.values.primary.data
_LOGGER.debug("set state to %s based on message tracking", self._state)
if self._previous_message == const.COMMAND_CLASS_DOOR_LOCK:
if self._state:
self._notification = LOCK_NOTIFICATION[NOTIFICATION_RF_LOCK]
self._lock_status = LOCK_ALARM_TYPE[ALARM_RF_LOCK]
else:
self._notification = LOCK_NOTIFICATION[NOTIFICATION_RF_UNLOCK]
self._lock_status = LOCK_ALARM_TYPE[ALARM_RF_UNLOCK]
return
self._previous_message = this_message
if not self.values.alarm_type:
return
alarm_type = self.values.alarm_type.data
if self.values.alarm_level:
alarm_level = self.values.alarm_level.data
else:
alarm_level = None
if not alarm_type:
return
if self._alarm_type_workaround:
self._state = LOCK_STATUS.get(str(alarm_type))
_LOGGER.debug(
"workaround: lock state set to %s -- alarm type: %s",
self._state,
str(alarm_type),
)
if alarm_type == 21:
self._lock_status = (
f"{LOCK_ALARM_TYPE.get(str(alarm_type))}"
f"{MANUAL_LOCK_ALARM_LEVEL.get(str(alarm_level))}"
)
return
if str(alarm_type) in ALARM_TYPE_STD:
self._lock_status = f"{LOCK_ALARM_TYPE.get(str(alarm_type))}{alarm_level}"
return
if alarm_type == 161:
self._lock_status = (
f"{LOCK_ALARM_TYPE.get(str(alarm_type))}"
f"{TAMPER_ALARM_LEVEL.get(str(alarm_level))}"
)
return
if alarm_type != 0:
self._lock_status = LOCK_ALARM_TYPE.get(str(alarm_type))
return
@property
def is_locked(self):
"""Return true if device is locked."""
return self._state
def lock(self, **kwargs):
"""Lock the device."""
self.values.primary.data = True
def unlock(self, **kwargs):
"""Unlock the device."""
self.values.primary.data = False
@property
def extra_state_attributes(self):
"""Return the device specific state attributes."""
data = super().extra_state_attributes
if self._notification:
data[ATTR_NOTIFICATION] = self._notification
if self._lock_status:
data[ATTR_LOCK_STATUS] = self._lock_status
return data
| 34.545455 | 87 | 0.62594 |
00ca088d9f0c9ce23edcc4d08fe27be71a69d0e9 | 1,784 | py | Python | jaxrk/rkhs/conditional_operator.py | ingmarschuster/JaxRK | 1de11e1fe79c1cf14a111b441d9c7b60c3725bf9 | [
"MIT"
] | null | null | null | jaxrk/rkhs/conditional_operator.py | ingmarschuster/JaxRK | 1de11e1fe79c1cf14a111b441d9c7b60c3725bf9 | [
"MIT"
] | null | null | null | jaxrk/rkhs/conditional_operator.py | ingmarschuster/JaxRK | 1de11e1fe79c1cf14a111b441d9c7b60c3725bf9 | [
"MIT"
] | null | null | null | from copy import copy
from ..reduce.centop_reductions import CenterInpFeat, DecenterOutFeat
from ..reduce.lincomb import LinearReduce
from ..reduce.base import Prefactors, Sum
from typing import Generic, TypeVar, Callable, Union
import jax.numpy as np
from jax.interpreters.xla import DeviceArray
from scipy.optimize import minimize
from ..rkhs.vector import FiniteVec, inner
from ..core.typing import AnyOrInitFn, Array
from .base import LinOp, RkhsObject, Vec, InpVecT, OutVecT, RhInpVectT, CombT
from .cov import *
from .operator import FiniteOp
def Cmo(inp_feat:InpVecT, outp_feat:OutVecT, regul:float = None) -> FiniteOp[InpVecT, OutVecT]:
if regul is not None:
regul = np.array(regul, dtype=np.float32)
assert regul.squeeze().size == 1 or regul.squeeze().shape[0] == len(inp_feat)
return CrossCovOp(Cov_solve(CovOp(inp_feat), inp_feat, regul=regul), outp_feat)
def RidgeCmo(inp_feat:InpVecT, outp_feat:OutVecT, regul:float = None) -> FiniteOp[InpVecT, OutVecT]:
if regul is None:
regul = Cov_regul(1, len(inp_feat))
else:
regul = np.array(regul, dtype=np.float32)
assert regul.squeeze().size == 1 or regul.squeeze().shape[0] == len(inp_feat)
matr = np.linalg.inv(inp_feat.inner() + regul * np.eye(len(inp_feat)))
return FiniteOp(inp_feat, outp_feat, matr)
def Cdo(inp_feat:InpVecT, outp_feat:OutVecT, ref_feat:OutVecT, regul = None) -> FiniteOp[InpVecT, OutVecT]:
if regul is not None:
regul = np.array(regul, dtype=np.float32)
assert regul.squeeze().size == 1 or regul.squeeze().shape[0] == len(inp_feat)
mo = Cmo(inp_feat, outp_feat, regul)
rval = Cov_solve(CovOp(ref_feat), mo, regul=regul)
return rval | 45.74359 | 107 | 0.692825 |
fa97e3371fa6aa44575c9b6a8fee7d2d08c66263 | 300 | py | Python | projects/DensePose_wrong/rename_keys_in_checkpoint.py | charliememory/detectron2 | a2a6220068e73c616ee4c84cb52ea023c0203fa0 | [
"Apache-2.0"
] | null | null | null | projects/DensePose_wrong/rename_keys_in_checkpoint.py | charliememory/detectron2 | a2a6220068e73c616ee4c84cb52ea023c0203fa0 | [
"Apache-2.0"
] | null | null | null | projects/DensePose_wrong/rename_keys_in_checkpoint.py | charliememory/detectron2 | a2a6220068e73c616ee4c84cb52ea023c0203fa0 | [
"Apache-2.0"
] | null | null | null | import torch, pdb
path = "./hrnetv2_w48_imagenet_pretrained.pth"
save_path = "./hrnetv2_w48_imagenet_pretrained_renamekeys.pth"
pretrained_dict = torch.load(path)
pdb.set_trace()
pretrained_dict = {"backbone.bottom_up."+k: v for k, v in pretrained_dict.items()}
torch.save(pretrained_dict,save_path) | 37.5 | 82 | 0.8 |
6f597c18686e8d11b14e78d93b9c8a95c0c5cc2f | 1,690 | py | Python | datareactor/atoms/base.py | data-dev/DataReactor | 26cd08129d348cf5ff3596c3e509619c59e300b8 | [
"MIT"
] | 1 | 2022-02-08T11:10:08.000Z | 2022-02-08T11:10:08.000Z | datareactor/atoms/base.py | data-dev/DataReactor | 26cd08129d348cf5ff3596c3e509619c59e300b8 | [
"MIT"
] | null | null | null | datareactor/atoms/base.py | data-dev/DataReactor | 26cd08129d348cf5ff3596c3e509619c59e300b8 | [
"MIT"
] | null | null | null | import logging
from datareactor.dataset import Dataset, DerivedColumn
logger = logging.getLogger(__name__)
class Atom():
"""Generate derived columns for a dataset.
Each `Atom` is responsible for generating one or more derived columns for
the target table.
"""
def transform(self, dataset):
"""Generate derived columns for the dataset.
The `transform` function takes in a dataset and returns a sequence of
derived columns.
Args:
dataset (Dataset): The dataset.
Returns:
(:obj:`list` of :obj:`DerivedColumn`): The derived columns.
"""
derived_columns = []
assert isinstance(dataset, Dataset)
for table_name in dataset.metadata.get_table_names():
logger.info("Generating columns in table %s using %s." % (
table_name,
self.__class__.__name__
))
for derived_column in self.derive(dataset, table_name):
assert isinstance(derived_column, DerivedColumn)
derived_columns.append(derived_column)
return derived_columns
def derive(self, dataset, table_name):
"""Generate derived columns for the specified table.
The `derive` function takes in a dataset and the name of the target
column. It returns a list of derived columns which can be concatenated
to the target table.
Args:
dataset (Dataset): The dataset.
table_name (str): The name of the target table.
Returns:
(:obj:`list` of :obj:`DerivedColumn`): The derived columns.
"""
raise NotImplementedError()
| 31.296296 | 78 | 0.62426 |
5f0cc94f6cecc3aebc1038d1cebddc0c4c81263c | 4,277 | py | Python | pyransac3d/cylinder.py | jungerm2/pyRANSAC-3D | 88a6c302c31505386f9c51507ab87ef80c49005f | [
"Apache-2.0"
] | null | null | null | pyransac3d/cylinder.py | jungerm2/pyRANSAC-3D | 88a6c302c31505386f9c51507ab87ef80c49005f | [
"Apache-2.0"
] | null | null | null | pyransac3d/cylinder.py | jungerm2/pyRANSAC-3D | 88a6c302c31505386f9c51507ab87ef80c49005f | [
"Apache-2.0"
] | null | null | null | import random
import numpy as np
from .aux_functions import rodrigues_rot
class Cylinder:
"""
!!! warning
The cylinder RANSAC in this library works! but does not present good results on real data on the current version.
We are working to make a better algorithim using normals. If you want to contribute, please create a MR on github.
You'll be our hero!
Implementation for cylinder RANSAC.
This class finds a infinite height cilinder and returns the cylinder axis, center and radius.
---
"""
def __init__(self):
self.inliers = []
self.center = []
self.axis = []
self.radius = 0
def fit(self, pts, thresh=0.2, maxIteration=10000):
"""
Find the parameters (axis and radius) defining a cylinder.
:param pts: 3D point cloud as a numpy array (N,3).
:param thresh: Threshold distance from the cylinder hull which is considered inlier.
:param maxIteration: Number of maximum iteration which RANSAC will loop over.
:returns:
- `center`: Center of the cylinder np.array(1,3) which the cylinder axis is passing through.
- `axis`: Vector describing cylinder's axis np.array(1,3).
- `radius`: Radius of cylinder.
- `inliers`: Inlier's index from the original point cloud.
---
"""
n_points = pts.shape[0]
best_inliers = []
for it in range(maxIteration):
# Samples 3 random points
id_samples = random.sample(range(1, n_points - 1), 3)
pt_samples = pts[id_samples]
# We have to find the plane equation described by those 3 points
# We find first 2 vectors that are part of this plane
# A = pt2 - pt1
# B = pt3 - pt1
vecA = pt_samples[1, :] - pt_samples[0, :]
vecA_norm = vecA / np.linalg.norm(vecA)
vecB = pt_samples[2, :] - pt_samples[0, :]
vecB_norm = vecB / np.linalg.norm(vecB)
# Now we compute the cross product of vecA and vecB to get vecC which is normal to the plane
vecC = np.cross(vecA_norm, vecB_norm)
vecC = vecC / np.linalg.norm(vecC)
# Now we calculate the rotation of the points with rodrigues equation
P_rot = rodrigues_rot(pt_samples, vecC, [0, 0, 1])
# Find center from 3 points
# http://paulbourke.net/geometry/circlesphere/
# Find lines that intersect the points
# Slope:
ma = 0
mb = 0
while ma == 0:
ma = (P_rot[1, 1] - P_rot[0, 1]) / (P_rot[1, 0] - P_rot[0, 0])
mb = (P_rot[2, 1] - P_rot[1, 1]) / (P_rot[2, 0] - P_rot[1, 0])
if ma == 0:
P_rot = np.roll(P_rot, -1, axis=0)
else:
break
# Calulate the center by verifying intersection of each orthogonal line
p_center_x = (
ma * mb * (P_rot[0, 1] - P_rot[2, 1])
+ mb * (P_rot[0, 0] + P_rot[1, 0])
- ma * (P_rot[1, 0] + P_rot[2, 0])
) / (2 * (mb - ma))
p_center_y = -1 / (ma) * (p_center_x - (P_rot[0, 0] + P_rot[1, 0]) / 2) + (P_rot[0, 1] + P_rot[1, 1]) / 2
p_center = [p_center_x, p_center_y, 0]
radius = np.linalg.norm(p_center - P_rot[0, :])
# Remake rodrigues rotation
center = rodrigues_rot(p_center, [0, 0, 1], vecC)[0]
# Distance from a point to a line
pt_id_inliers = [] # list of inliers ids
vecC_stakado = np.stack([vecC] * n_points, 0)
dist_pt = np.cross(vecC_stakado, (center - pts))
dist_pt = np.linalg.norm(dist_pt, axis=1)
# Select indexes where distance is biggers than the threshold
pt_id_inliers = np.where(np.abs(dist_pt - radius) <= thresh)[0]
if len(pt_id_inliers) > len(best_inliers):
best_inliers = pt_id_inliers
self.inliers = best_inliers
self.center = center
self.axis = vecC
self.radius = radius
return self.center, self.axis, self.radius, self.inliers
| 37.517544 | 122 | 0.553659 |
0435d045c65cf116eac0e0ce3a1be3b539ed3ddb | 6,447 | py | Python | lib/services/server/ncloud_server/model/get_instance_tag_list_request.py | NaverCloudPlatform/ncloud-sdk-python | 5976dfabd205c615fcf57ac2f0ab67313ee6953c | [
"MIT"
] | 12 | 2018-11-20T04:30:49.000Z | 2021-11-09T12:34:26.000Z | lib/services/server/ncloud_server/model/get_instance_tag_list_request.py | NaverCloudPlatform/ncloud-sdk-python | 5976dfabd205c615fcf57ac2f0ab67313ee6953c | [
"MIT"
] | 1 | 2019-01-24T15:56:15.000Z | 2019-05-31T07:56:55.000Z | lib/services/server/ncloud_server/model/get_instance_tag_list_request.py | NaverCloudPlatform/ncloud-sdk-python | 5976dfabd205c615fcf57ac2f0ab67313ee6953c | [
"MIT"
] | 6 | 2018-06-29T03:45:50.000Z | 2022-03-18T01:51:45.000Z | # coding: utf-8
"""
server
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class GetInstanceTagListRequest(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'instance_no_list': 'list[str]',
'tag_key_list': 'list[str]',
'tag_value_list': 'list[str]',
'page_no': 'int',
'page_size': 'int'
}
attribute_map = {
'instance_no_list': 'instanceNoList',
'tag_key_list': 'tagKeyList',
'tag_value_list': 'tagValueList',
'page_no': 'pageNo',
'page_size': 'pageSize'
}
def __init__(self, instance_no_list=None, tag_key_list=None, tag_value_list=None, page_no=None, page_size=None): # noqa: E501
"""GetInstanceTagListRequest - a model defined in Swagger""" # noqa: E501
self._instance_no_list = None
self._tag_key_list = None
self._tag_value_list = None
self._page_no = None
self._page_size = None
self.discriminator = None
if instance_no_list is not None:
self.instance_no_list = instance_no_list
if tag_key_list is not None:
self.tag_key_list = tag_key_list
if tag_value_list is not None:
self.tag_value_list = tag_value_list
if page_no is not None:
self.page_no = page_no
if page_size is not None:
self.page_size = page_size
@property
def instance_no_list(self):
"""Gets the instance_no_list of this GetInstanceTagListRequest. # noqa: E501
인스턴스번호리스트 # noqa: E501
:return: The instance_no_list of this GetInstanceTagListRequest. # noqa: E501
:rtype: list[str]
"""
return self._instance_no_list
@instance_no_list.setter
def instance_no_list(self, instance_no_list):
"""Sets the instance_no_list of this GetInstanceTagListRequest.
인스턴스번호리스트 # noqa: E501
:param instance_no_list: The instance_no_list of this GetInstanceTagListRequest. # noqa: E501
:type: list[str]
"""
self._instance_no_list = instance_no_list
@property
def tag_key_list(self):
"""Gets the tag_key_list of this GetInstanceTagListRequest. # noqa: E501
태그키리스트 # noqa: E501
:return: The tag_key_list of this GetInstanceTagListRequest. # noqa: E501
:rtype: list[str]
"""
return self._tag_key_list
@tag_key_list.setter
def tag_key_list(self, tag_key_list):
"""Sets the tag_key_list of this GetInstanceTagListRequest.
태그키리스트 # noqa: E501
:param tag_key_list: The tag_key_list of this GetInstanceTagListRequest. # noqa: E501
:type: list[str]
"""
self._tag_key_list = tag_key_list
@property
def tag_value_list(self):
"""Gets the tag_value_list of this GetInstanceTagListRequest. # noqa: E501
태그값리스트 # noqa: E501
:return: The tag_value_list of this GetInstanceTagListRequest. # noqa: E501
:rtype: list[str]
"""
return self._tag_value_list
@tag_value_list.setter
def tag_value_list(self, tag_value_list):
"""Sets the tag_value_list of this GetInstanceTagListRequest.
태그값리스트 # noqa: E501
:param tag_value_list: The tag_value_list of this GetInstanceTagListRequest. # noqa: E501
:type: list[str]
"""
self._tag_value_list = tag_value_list
@property
def page_no(self):
"""Gets the page_no of this GetInstanceTagListRequest. # noqa: E501
페이지번호 # noqa: E501
:return: The page_no of this GetInstanceTagListRequest. # noqa: E501
:rtype: int
"""
return self._page_no
@page_no.setter
def page_no(self, page_no):
"""Sets the page_no of this GetInstanceTagListRequest.
페이지번호 # noqa: E501
:param page_no: The page_no of this GetInstanceTagListRequest. # noqa: E501
:type: int
"""
self._page_no = page_no
@property
def page_size(self):
"""Gets the page_size of this GetInstanceTagListRequest. # noqa: E501
페이지사이즈 # noqa: E501
:return: The page_size of this GetInstanceTagListRequest. # noqa: E501
:rtype: int
"""
return self._page_size
@page_size.setter
def page_size(self, page_size):
"""Sets the page_size of this GetInstanceTagListRequest.
페이지사이즈 # noqa: E501
:param page_size: The page_size of this GetInstanceTagListRequest. # noqa: E501
:type: int
"""
self._page_size = page_size
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, GetInstanceTagListRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 28.78125 | 130 | 0.602451 |
99661e48b6018e85ae827e486bd32750d325e612 | 430 | py | Python | src/ik_index_lmtd/config.py | jorisvanzundert/lvrmtw | 30d902a90e366812959b6392371578f45d996403 | [
"MIT"
] | null | null | null | src/ik_index_lmtd/config.py | jorisvanzundert/lvrmtw | 30d902a90e366812959b6392371578f45d996403 | [
"MIT"
] | 6 | 2021-04-20T23:44:45.000Z | 2022-03-12T00:56:46.000Z | src/ik_index_lmtd/config.py | jorisvanzundert/riddle_ikindex | 30d902a90e366812959b6392371578f45d996403 | [
"MIT"
] | null | null | null | import sys
from os.path import dirname, abspath, join
work_dir = dirname( dirname( abspath( __file__ ) ) )
sys.path.append( work_dir )
PATH_TO_TEXTS_DIALOGUE_ONLY = 'data/undisclosed/lmtd/dialogue_only'
PATH_TO_TEXTS_NO_DIALOGUE = 'data/undisclosed/lmtd/no_dialogue'
PATH_TO_TEXTS = 'data/undisclosed/lmtd/full_txt'
PATH_TO_METADATA = 'data/undisclosed/lmtd/metadata/'
PATH_TO_RESULT = 'results/lmtd'
RESULT_FILE_PREFIX = 'lmtd'
| 35.833333 | 67 | 0.802326 |
9f931c7d29b4431e4c7310bce9a2e8bda06c42cc | 525 | py | Python | lightning/MNIST_Lightning_v2/train.py | sachinumrao/pytorch_tutorials | 113b17875e6858ea50ececd29948d0054f3a535c | [
"MIT"
] | null | null | null | lightning/MNIST_Lightning_v2/train.py | sachinumrao/pytorch_tutorials | 113b17875e6858ea50ececd29948d0054f3a535c | [
"MIT"
] | null | null | null | lightning/MNIST_Lightning_v2/train.py | sachinumrao/pytorch_tutorials | 113b17875e6858ea50ececd29948d0054f3a535c | [
"MIT"
] | null | null | null | import pytorch_lightning as pl
from pytorch_lightning.loggers import WandbLogger
from model import Net
from dataset import MNISTDataset
if __name__ == "__main__":
model = Net()
dm = MNISTDataset()
# Create logger
LOG = True
if LOG:
logger = WandbLogger(project="MNIST_Lightning_V2")
logger.watch(model, log='all', log_freq=100)
else:
logger = None
trainer = pl.Trainer(max_epochs=50,
logger=logger)
trainer.fit(model,
dm)
| 21.875 | 58 | 0.626667 |
321f38e8b6c542fe780b77e2ae849e05e224f57c | 6,208 | py | Python | src/sage/interfaces/jmoldata.py | drvinceknight/sage | 00199fb220aa173d8585b9e90654dafd3247d82d | [
"BSL-1.0"
] | 2 | 2015-08-11T05:05:47.000Z | 2019-05-15T17:27:25.000Z | src/sage/interfaces/jmoldata.py | kaushik94/sage | 00199fb220aa173d8585b9e90654dafd3247d82d | [
"BSL-1.0"
] | null | null | null | src/sage/interfaces/jmoldata.py | kaushik94/sage | 00199fb220aa173d8585b9e90654dafd3247d82d | [
"BSL-1.0"
] | 1 | 2020-07-24T11:56:55.000Z | 2020-07-24T11:56:55.000Z | r"""
Interface for extracting data and generating images from Jmol readable files.
JmolData is a no GUI version of Jmol useful for extracting data from files Jmol
reads and for generating image files.
AUTHORS:
- Jonathan Gutow (2012-06-14): complete doctest coverage
- Jonathan Gutow (2012-03-21): initial version
"""
#*******************************************************************************
# Copyright (C) 2012 Jonathan Gutow (gutow@uwosh.edu)
#
# Distributed under the terms of the GNU General Public License (GPL)
# as published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
# http://www.gnu.org/licenses/
#*******************************************************************************
from sage.structure.sage_object import SageObject
from sage.misc.misc import SAGE_LOCAL, DOT_SAGE, sage_makedirs
from sage.misc.temporary_file import tmp_filename
import subprocess
import os
class JmolData(SageObject):
r"""
.. todo::
Create an animated image file (GIF) if spin is on and put data
extracted from a file into a variable/string/structure to return
"""
def __init__(self):
"""
EXAMPLES:
Create a JmolData object::
sage: from sage.interfaces.jmoldata import JmolData
sage: JData = JmolData()
"""
pass
def is_jvm_available(self):
"""
Returns True if the Java Virtual Machine is available and False if not.
EXAMPLES:
Check that it returns a boolean::
sage: from sage.interfaces.jmoldata import JmolData
sage: JData = JmolData()
sage: type(JData.is_jvm_available())
<type 'bool'>
"""
try:
version = subprocess.check_output(['java', '-version'], stderr=subprocess.STDOUT)
except (subprocess.CalledProcessError, OSError):
return False
import re
java_version = re.search("version.*[1][.][567]", version)
return java_version is not None
def export_image(self,
targetfile,
datafile, #name (path) of data file Jmol can read or script file telling it what to read or load
datafile_cmd='script', #"script" or "load"
image_type ='PNG', #PNG, JPG, GIF
figsize=5,
**kwds):
r"""
This executes JmolData.jar to make an image file.
INPUT:
- targetfile -- the full path to the file where the image
should be written.
- datafile -- full path to the data file Jmol can read or
text of a script telling Jmol what to read or load.
- datafile_cmd -- (default ``'script'``) ``'load'`` or ``'script'``
should be ``"load"`` for a data file.
- image_type -- (default ``"PNG"``) ``'PNG'`` ``'JPG'`` or ``'GIF'``
- figsize -- number (default 5) equal to (pixels/side)/100
OUTPUT:
Image file, .png, .gif or .jpg (default .png)
.. note::
Examples will generate an error message if a functional Java Virtual Machine (JVM)
is not installed on the machine the Sage instance is running on.
.. warning::
Programmers using this module should check that the JVM is
available before making calls to avoid the user getting
error messages. Check for the JVM using the function
:meth:`is_jvm_available`, which returns True if a JVM is available.
EXAMPLES:
Use Jmol to load a pdb file containing some DNA from a web data
base and make an image of the DNA. If you execute this in the
notebook, the image will appear in the output cell::
sage: from sage.interfaces.jmoldata import JmolData
sage: JData = JmolData()
sage: script = "load =1lcd;display DNA;moveto 0.0 { -473 -713 -518 59.94} 100.0 0.0 0.0 {21.17 26.72 27.295} 27.544636 {0.0 0.0 0.0} -25.287832 64.8414 0.0;"
sage: testfile = tmp_filename(ext="DNA.png")
sage: JData.export_image(targetfile=testfile,datafile=script,image_type="PNG") # optional -- java internet
sage: print os.path.exists(testfile) # optional -- java internet
True
Use Jmol to save an image of a 3-D object created in Sage.
This method is used internally by plot3d to generate static images.
This example doesn't have correct scaling::
sage: from sage.interfaces.jmoldata import JmolData
sage: JData = JmolData()
sage: D=dodecahedron()
sage: from sage.misc.misc import SAGE_TMP
sage: archive_name=os.path.join(SAGE_TMP, "archive.jmol.zip")
sage: D.export_jmol(archive_name) #not scaled properly...need some more steps.
sage: testfile = os.path.join(SAGE_TMP, "testimage.png")
sage: script = 'set defaultdirectory "%s"\n script SCRIPT\n'%archive_name
sage: JData.export_image(targetfile =testfile,datafile = script, image_type="PNG") # optional -- java
sage: print os.path.exists(testfile) # optional -- java
True
"""
# Set up paths, file names and scripts
jmolpath = os.path.join(SAGE_LOCAL, "share", "jmol", "JmolData.jar")
launchscript = ""
if (datafile_cmd!='script'):
launchscript = "load "
launchscript = launchscript + datafile
imagescript = "write "+ image_type +" "+targetfile+"\n"
sizeStr = "%sx%s" %(figsize*100,figsize*100)
# Scratch file for Jmol errors
scratchout = tmp_filename(ext=".txt")
with open(scratchout, 'w') as jout:
# Now call the java application and write the file.
subprocess.call(["java", "-Xmx512m", "-Djava.awt.headless=true",
"-jar", jmolpath, "-iox", "-g", sizeStr,
"-J", launchscript, "-j", imagescript], stdout=jout, stderr=jout)
if not os.path.isfile(targetfile):
raise RuntimeError("Jmol failed to create file %s, see %s for details"%(repr(targetfile), repr(scratchout)))
os.unlink(scratchout)
| 38.559006 | 169 | 0.603576 |
ea3cb6799db4584f1a9feb797ed3713b5ca56afa | 5,352 | py | Python | test/test_e2e_st_transformer.py | roshansh-cmu/espnet | 5fa6dcc4e649dc66397c629d0030d09ecef36b80 | [
"Apache-2.0"
] | null | null | null | test/test_e2e_st_transformer.py | roshansh-cmu/espnet | 5fa6dcc4e649dc66397c629d0030d09ecef36b80 | [
"Apache-2.0"
] | null | null | null | test/test_e2e_st_transformer.py | roshansh-cmu/espnet | 5fa6dcc4e649dc66397c629d0030d09ecef36b80 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
# Copyright 2019 Hirofumi Inaguma
# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
import argparse
import pytest
import torch
from espnet.nets.pytorch_backend.e2e_st_transformer import E2E
from espnet.nets.pytorch_backend.transformer import plot
def make_arg(**kwargs):
defaults = dict(
adim=2,
aheads=1,
dropout_rate=0.0,
transformer_attn_dropout_rate=None,
elayers=1,
eunits=2,
dlayers=1,
dunits=2,
sym_space="<space>",
sym_blank="<blank>",
transformer_init="pytorch",
transformer_input_layer="conv2d",
transformer_length_normalized_loss=True,
report_bleu=False,
report_cer=False,
report_wer=False,
mtlalpha=0.0, # for CTC-ASR
lsm_weight=0.001,
char_list=["<blank>", "a", "e", "i", "o", "u"],
ctc_type="warpctc",
asr_weight=0.0,
mt_weight=0.0,
)
defaults.update(kwargs)
return argparse.Namespace(**defaults)
def prepare(args):
idim = 10
odim = 5
model = E2E(idim, odim, args)
batchsize = 2
ilens = [10, 9]
olens = [3, 4]
n_token = odim - 1
x = torch.randn(batchsize, max(ilens), idim)
y_src = (torch.rand(batchsize, max(olens)) * n_token % n_token).long()
y_tgt = (torch.rand(batchsize, max(olens)) * n_token % n_token).long()
for i in range(batchsize):
x[i, ilens[i] :] = -1
y_tgt[i, olens[i] :] = model.ignore_id
y_src[i, olens[i] :] = model.ignore_id
data = {}
uttid_list = []
for i in range(batchsize):
data["utt%d" % i] = {
"input": [{"shape": [ilens[i], idim]}],
"output": [{"shape": [olens[i]]}],
}
uttid_list.append("utt%d" % i)
return model, x, torch.tensor(ilens), y_tgt, y_src, data, uttid_list
ldconv_lconv_args = dict(
transformer_decoder_selfattn_layer_type="lightconv",
transformer_encoder_selfattn_layer_type="lightconv",
wshare=2,
ldconv_encoder_kernel_length="5_7_11",
ldconv_decoder_kernel_length="3_7",
ldconv_usebias=False,
)
ldconv_dconv_args = dict(
transformer_decoder_selfattn_layer_type="dynamicconv",
transformer_encoder_selfattn_layer_type="dynamicconv",
wshare=2,
ldconv_encoder_kernel_length="5_7_11",
ldconv_decoder_kernel_length="3_7",
ldconv_usebias=False,
)
ldconv_lconv2d_args = dict(
transformer_decoder_selfattn_layer_type="lightconv2d",
transformer_encoder_selfattn_layer_type="lightconv2d",
wshare=2,
ldconv_encoder_kernel_length="5_7_11",
ldconv_decoder_kernel_length="3_7",
ldconv_usebias=False,
)
ldconv_dconv2d_args = dict(
transformer_decoder_selfattn_layer_type="dynamicconv2d",
transformer_encoder_selfattn_layer_type="dynamicconv2d",
wshare=2,
ldconv_encoder_kernel_length="5_7_11",
ldconv_decoder_kernel_length="3_7",
ldconv_usebias=False,
)
def _savefn(*args, **kwargs):
return
@pytest.mark.parametrize(
"model_dict",
[
{"asr_weight": 0.0, "mt_weight": 0.0}, # pure E2E-ST
ldconv_lconv_args,
ldconv_dconv_args,
ldconv_lconv2d_args,
ldconv_dconv2d_args,
# MTL w/ attention ASR
{"asr_weight": 0.1, "mtlalpha": 0.0, "mt_weight": 0.0},
# MTL w/ attention ASR + MT
{"asr_weight": 0.1, "mtlalpha": 0.0, "mt_weight": 0.1},
# MTL w/ CTC ASR
{"asr_weight": 0.1, "mtlalpha": 1.0, "mt_weight": 0.0},
{"asr_weight": 0.1, "mtlalpha": 1.0, "ctc_type": "builtin"},
{"asr_weight": 0.1, "mtlalpha": 1.0, "report_cer": True},
{"asr_weight": 0.1, "mtlalpha": 1.0, "report_wer": True},
{"asr_weight": 0.1, "mtlalpha": 1.0, "report_cer": True, "report_wer": True},
# MTL w/ CTC ASR + MT
{"asr_weight": 0.1, "mtlalpha": 1.0, "mt_weight": 0.1},
# MTL w/ attention ASR + CTC ASR
{"asr_weight": 0.1, "mtlalpha": 0.5, "mt_weight": 0.0},
# MTL w/ attention ASR + CTC ASR + MT
{"asr_weight": 0.1, "mtlalpha": 0.5, "mt_weight": 0.1},
],
)
def test_transformer_trainable_and_decodable(model_dict):
args = make_arg(**model_dict)
model, x, ilens, y_tgt, y_src, data, uttid_list = prepare(args)
# test beam search
trans_args = argparse.Namespace(
beam_size=1,
penalty=0.0,
ctc_weight=0.0,
maxlenratio=1.0,
lm_weight=0,
minlenratio=0,
nbest=1,
tgt_lang=False,
)
# test trainable
optim = torch.optim.Adam(model.parameters(), 0.01)
loss = model(x, ilens, y_tgt, y_src)
optim.zero_grad()
loss.backward()
optim.step()
# test attention plot
attn_dict = model.calculate_all_attentions(
x[0:1], ilens[0:1], y_tgt[0:1], y_src[0:1]
)
plot.plot_multi_head_attention(data, uttid_list, attn_dict, "", savefn=_savefn)
# test CTC plot
ctc_probs = model.calculate_all_ctc_probs(
x[0:1], ilens[0:1], y_tgt[0:1], y_src[0:1]
)
if args.asr_weight > 0 and args.mtlalpha > 0:
print(ctc_probs.shape)
else:
assert ctc_probs is None
# test decodable
with torch.no_grad():
nbest = model.translate(x[0, : ilens[0]].numpy(), trans_args, args.char_list)
print(y_tgt[0])
print(nbest[0]["yseq"][1:-1])
| 29.733333 | 85 | 0.621263 |
c8a808d82f5f300c7fed67e73855faf00e8e9c09 | 114 | py | Python | python_docs/08PassagemDeParametro/aula21c.py | Matheus-IT/lang-python-related | dd2e5d9b9f16d3838ba1670fdfcba1fa3fe305e9 | [
"MIT"
] | null | null | null | python_docs/08PassagemDeParametro/aula21c.py | Matheus-IT/lang-python-related | dd2e5d9b9f16d3838ba1670fdfcba1fa3fe305e9 | [
"MIT"
] | null | null | null | python_docs/08PassagemDeParametro/aula21c.py | Matheus-IT/lang-python-related | dd2e5d9b9f16d3838ba1670fdfcba1fa3fe305e9 | [
"MIT"
] | null | null | null | def funcao():
n1 = 4
print(f'N1 dentro vale {n1}')
n1 = 2
funcao()
print(f'N1 fora vale {n1}')
| 12.666667 | 34 | 0.517544 |
115a9757dfa7301d5ea57b72a852f0a93c678928 | 260 | py | Python | codekitchen/py_files/hello-world-dspy.py | ineelhere/pylearn | 14ce12691124ea556845fab36583280d5c7475d8 | [
"MIT"
] | 4 | 2021-08-29T01:46:21.000Z | 2022-01-08T21:45:12.000Z | codekitchen/py_files/hello-world-dspy.py | ineelhere/pylearn | 14ce12691124ea556845fab36583280d5c7475d8 | [
"MIT"
] | 4 | 2021-08-31T13:30:34.000Z | 2021-09-14T02:14:57.000Z | codekitchen/py_files/hello-world-dspy.py | ineelhere/pylearn | 14ce12691124ea556845fab36583280d5c7475d8 | [
"MIT"
] | 7 | 2020-07-22T10:30:50.000Z | 2021-09-14T01:54:01.000Z | print('Hello World!!')
print('Get ready to rock-N-roll ')
# we saw that the lines automatically shifted to new lines.
# although we can do this manually too, in one print statement
print('Hello World!! \nGet ready to rock-N-roll')
# and that's how we do it !!
| 37.142857 | 62 | 0.719231 |
27905e53a673038479afabab788c876d2426abcd | 266 | py | Python | wbb/modules/send.py | Dikatochhawng/lynn | 8ecd2fec5695c892da812886983db1a7c6a10f54 | [
"MIT"
] | null | null | null | wbb/modules/send.py | Dikatochhawng/lynn | 8ecd2fec5695c892da812886983db1a7c6a10f54 | [
"MIT"
] | null | null | null | wbb/modules/send.py | Dikatochhawng/lynn | 8ecd2fec5695c892da812886983db1a7c6a10f54 | [
"MIT"
] | 1 | 2021-11-10T17:32:35.000Z | 2021-11-10T17:32:35.000Z | from pyrogram import filters
from wbb import app
@app.on_message(filters.command("snd"))
async def send(_, message):
rsr = message.text.split(None, 1)[1]
await app.send_message(message.chat.id, text=rsr, disable_web_page_preview=True)
await message.delete()
| 26.6 | 82 | 0.759398 |
0039f53dd3f4b6de71d987d5c62d3f4a4ee9f84c | 6,725 | py | Python | contrib/devtools/symbol-check.py | vivuscoin/vivuscoin | ba0db89712234bf68b2d6b63ef2c420d65c7c25d | [
"MIT"
] | null | null | null | contrib/devtools/symbol-check.py | vivuscoin/vivuscoin | ba0db89712234bf68b2d6b63ef2c420d65c7c25d | [
"MIT"
] | null | null | null | contrib/devtools/symbol-check.py | vivuscoin/vivuscoin | ba0db89712234bf68b2d6b63ef2c420d65c7c25d | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2014 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
A script to check that the (Linux) executables produced by gitian only contain
allowed gcc, glibc and libstdc++ version symbols. This makes sure they are
still compatible with the minimum supported Linux distribution versions.
Example usage:
find ../gitian-builder/build -type f -executable | xargs python contrib/devtools/symbol-check.py
'''
import subprocess
import re
import sys
import os
# Debian 6.0.9 (Squeeze) has:
#
# - g++ version 4.4.5 (https://packages.debian.org/search?suite=default§ion=all&arch=any&searchon=names&keywords=g%2B%2B)
# - libc version 2.11.3 (https://packages.debian.org/search?suite=default§ion=all&arch=any&searchon=names&keywords=libc6)
# - libstdc++ version 4.4.5 (https://packages.debian.org/search?suite=default§ion=all&arch=any&searchon=names&keywords=libstdc%2B%2B6)
#
# Ubuntu 10.04.4 (Lucid Lynx) has:
#
# - g++ version 4.4.3 (http://packages.ubuntu.com/search?keywords=g%2B%2B&searchon=names&suite=lucid§ion=all)
# - libc version 2.11.1 (http://packages.ubuntu.com/search?keywords=libc6&searchon=names&suite=lucid§ion=all)
# - libstdc++ version 4.4.3 (http://packages.ubuntu.com/search?suite=lucid§ion=all&arch=any&keywords=libstdc%2B%2B&searchon=names)
#
# Taking the minimum of these as our target.
#
# According to GNU ABI document (http://gcc.gnu.org/onlinedocs/libstdc++/manual/abi.html) this corresponds to:
# GCC 4.4.0: GCC_4.4.0
# GCC 4.4.2: GLIBCXX_3.4.13, CXXABI_1.3.3
# (glibc) GLIBC_2_11
#
MAX_VERSIONS = {
'GCC': (4,4,0),
'CXXABI': (1,3,3),
'GLIBCXX': (3,4,13),
'GLIBC': (2,11),
'LIBATOMIC': (1,0)
}
# See here for a description of _IO_stdin_used:
# https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=634261#109
# Ignore symbols that are exported as part of every executable
IGNORE_EXPORTS = {
'_edata', '_end', '__end__', '_init', '__bss_start', '__bss_start__', '_bss_end__', '__bss_end__', '_fini', '_IO_stdin_used', 'stdin', 'stdout', 'stderr'
}
READELF_CMD = os.getenv('READELF', '/usr/bin/readelf')
CPPFILT_CMD = os.getenv('CPPFILT', '/usr/bin/c++filt')
# Allowed NEEDED libraries
ALLOWED_LIBRARIES = {
# vivuscoind and vivuscoin-qt
'libgcc_s.so.1', # GCC base support
'libc.so.6', # C library
'libpthread.so.0', # threading
'libanl.so.1', # DNS resolve
'libm.so.6', # math library
'librt.so.1', # real-time (clock)
'libatomic.so.1',
'ld-linux-x86-64.so.2', # 64-bit dynamic linker
'ld-linux.so.2', # 32-bit dynamic linker
'ld-linux-aarch64.so.1', # 64-bit ARM dynamic linker
'ld-linux-armhf.so.3', # 32-bit ARM dynamic linker
'ld-linux-riscv64-lp64d.so.1', # 64-bit RISC-V dynamic linker
# vivuscoin-qt only
'libX11-xcb.so.1', # part of X11
'libX11.so.6', # part of X11
'libxcb.so.1', # part of X11
'libfontconfig.so.1', # font support
'libfreetype.so.6', # font parsing
'libdl.so.2' # programming interface to dynamic linker
}
ARCH_MIN_GLIBC_VER = {
'80386': (2,1),
'X86-64': (2,2,5),
'ARM': (2,4),
'AArch64':(2,17),
'RISC-V': (2,27)
}
class CPPFilt(object):
'''
Demangle C++ symbol names.
Use a pipe to the 'c++filt' command.
'''
def __init__(self):
self.proc = subprocess.Popen(CPPFILT_CMD, stdin=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=True)
def __call__(self, mangled):
self.proc.stdin.write(mangled + '\n')
self.proc.stdin.flush()
return self.proc.stdout.readline().rstrip()
def close(self):
self.proc.stdin.close()
self.proc.stdout.close()
self.proc.wait()
def read_symbols(executable, imports=True):
'''
Parse an ELF executable and return a list of (symbol,version) tuples
for dynamic, imported symbols.
'''
p = subprocess.Popen([READELF_CMD, '--dyn-syms', '-W', '-h', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Could not read symbols for %s: %s' % (executable, stderr.strip()))
syms = []
for line in stdout.splitlines():
line = line.split()
if 'Machine:' in line:
arch = line[-1]
if len(line)>7 and re.match('[0-9]+:$', line[0]):
(sym, _, version) = line[7].partition('@')
is_import = line[6] == 'UND'
if version.startswith('@'):
version = version[1:]
if is_import == imports:
syms.append((sym, version, arch))
return syms
def check_version(max_versions, version, arch):
if '_' in version:
(lib, _, ver) = version.rpartition('_')
else:
lib = version
ver = '0'
ver = tuple([int(x) for x in ver.split('.')])
if not lib in max_versions:
return False
return ver <= max_versions[lib] or lib == 'GLIBC' and ver <= ARCH_MIN_GLIBC_VER[arch]
def read_libraries(filename):
p = subprocess.Popen([READELF_CMD, '-d', '-W', filename], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, universal_newlines=True)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
libraries = []
for line in stdout.splitlines():
tokens = line.split()
if len(tokens)>2 and tokens[1] == '(NEEDED)':
match = re.match('^Shared library: \[(.*)\]$', ' '.join(tokens[2:]))
if match:
libraries.append(match.group(1))
else:
raise ValueError('Unparseable (NEEDED) specification')
return libraries
if __name__ == '__main__':
cppfilt = CPPFilt()
retval = 0
for filename in sys.argv[1:]:
# Check imported symbols
for sym,version,arch in read_symbols(filename, True):
if version and not check_version(MAX_VERSIONS, version, arch):
print('%s: symbol %s from unsupported version %s' % (filename, cppfilt(sym), version))
retval = 1
# Check exported symbols
if arch != 'RISC-V':
for sym,version,arch in read_symbols(filename, False):
if sym in IGNORE_EXPORTS:
continue
print('%s: export of symbol %s not allowed' % (filename, cppfilt(sym)))
retval = 1
# Check dependency libraries
for library_name in read_libraries(filename):
if library_name not in ALLOWED_LIBRARIES:
print('%s: NEEDED library %s is not allowed' % (filename, library_name))
retval = 1
sys.exit(retval)
| 37.780899 | 173 | 0.646394 |
6686ccce4335a36d1d184c6fa88ef70b23dcf206 | 272 | py | Python | tests/artificial/transf_Anscombe/trend_MovingMedian/cycle_5/ar_12/test_artificial_128_Anscombe_MovingMedian_5_12_20.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | null | null | null | tests/artificial/transf_Anscombe/trend_MovingMedian/cycle_5/ar_12/test_artificial_128_Anscombe_MovingMedian_5_12_20.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | 1 | 2019-11-30T23:39:38.000Z | 2019-12-01T04:34:35.000Z | tests/artificial/transf_Anscombe/trend_MovingMedian/cycle_5/ar_12/test_artificial_128_Anscombe_MovingMedian_5_12_20.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | null | null | null | import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "MovingMedian", cycle_length = 5, transform = "Anscombe", sigma = 0.0, exog_count = 20, ar_order = 12); | 38.857143 | 167 | 0.735294 |
7ae5e865beb12452ff018a4de2553cdbcfd7f93d | 3,230 | py | Python | mason/examples/operators/table/merge/__init__.py | kyprifog/mason | bf45672124ef841bc16216c293034f4ccc506621 | [
"Apache-2.0"
] | 4 | 2021-04-12T17:49:34.000Z | 2022-01-23T19:54:29.000Z | mason/examples/operators/table/merge/__init__.py | kyprifog/mason | bf45672124ef841bc16216c293034f4ccc506621 | [
"Apache-2.0"
] | 24 | 2021-04-30T18:40:25.000Z | 2021-05-12T20:52:06.000Z | mason/examples/operators/table/merge/__init__.py | kyprifog/mason | bf45672124ef841bc16216c293034f4ccc506621 | [
"Apache-2.0"
] | 3 | 2021-04-12T19:40:43.000Z | 2021-09-07T21:56:36.000Z | from typing import Set, Union
from mason.clients.response import Response
from mason.configurations.config import Config
from mason.engines.execution.models.jobs import InvalidJob
from mason.engines.execution.models.jobs.executed_job import ExecutedJob
from mason.engines.execution.models.jobs.merge_job import MergeJob
from mason.engines.metastore.models.credentials import MetastoreCredentials
from mason.engines.metastore.models.table.table import Table
from mason.operators.operator_definition import OperatorDefinition
from mason.operators.operator_response import DelayedOperatorResponse
from mason.parameters.validated_parameters import ValidatedParameters
from mason.util.environment import MasonEnvironment
class TableMerge(OperatorDefinition):
def run_async(self, env: MasonEnvironment, config: Config, parameters: ValidatedParameters, response: Response) -> DelayedOperatorResponse:
SUPPORTED_SCHEMAS = {
"parquet",
"csv",
"json",
"jsonl"
}
# TODO: Replace db_name, tb_name with protocol path, ie s3://bucket/path, athena://database:table
database_name = parameters.get_required("database_name")
table_name = parameters.get_required("table_name")
output_path = parameters.get_required("output_path")
read_headers = parameters.get_optional("read_headers")
table, response = config.metastore().get_table(database_name, table_name, {"read_headers": read_headers}, response)
final: Union[ExecutedJob, InvalidJob]
if isinstance(table, Table):
final = InvalidJob("No conflicting schemas found. Merge Unnecessary")
else:
conflicting_table = table.conflicting_table()
if conflicting_table:
schemas = conflicting_table.schema_conflict.unique_schemas
schema_types: Set[str] = set(map(lambda schema: schema.type, schemas))
if len(schemas) > 0 and schema_types.issubset(SUPPORTED_SCHEMAS):
if len(schema_types) == 1:
schema_type = next(iter(schema_types))
inp = config.storage().table_path(database_name, table_name)
outp = config.storage().path(output_path)
credentials = config.metastore().credentials()
if isinstance(credentials, MetastoreCredentials):
job = MergeJob(schema_type, inp, outp, credentials)
final, response = config.execution().run_job(job, response)
else:
final = InvalidJob("MetastoreCredentials not found")
else:
final = InvalidJob("Mixed schemas not supported at this time.")
else:
final = InvalidJob(f"Unsupported schemas for merge operator: {', '.join(list(schema_types.difference(SUPPORTED_SCHEMAS)))}")
else:
final = InvalidJob(f"No conflicting schemas found at {database_name},{table_name}. Merge unnecessary. Invalid Schemas {table.message()}")
return DelayedOperatorResponse(final, response) | 53.833333 | 153 | 0.663158 |
b359ffe149b29d288c352c4b9c249f770d0e2ee0 | 989 | py | Python | evaluator.py | shizuo-kaji/PretrainCNNwithNoData | 6d076e4bc2effcd91e9275470db79e0125704087 | [
"MIT"
] | 1 | 2021-11-18T07:18:44.000Z | 2021-11-18T07:18:44.000Z | evaluator.py | shizuo-kaji/PretrainCNNwithNoData | 6d076e4bc2effcd91e9275470db79e0125704087 | [
"MIT"
] | null | null | null | evaluator.py | shizuo-kaji/PretrainCNNwithNoData | 6d076e4bc2effcd91e9275470db79e0125704087 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import torch
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0.
self.avg = 0.
self.sum = 0.
self.count = 0.
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def accuracy(output, target, topk=(1,)):
"""Computes the accuracy over the k top predictions for the specified values of k"""
with torch.no_grad():
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = (pred == target.unsqueeze(dim=0)).expand_as(pred)
res = []
for k in topk:
correct_k = correct[:k].reshape(-1).float().sum(0, keepdim=True)
res.append(correct_k.mul_(100.0 / batch_size))
return res | 28.257143 | 88 | 0.55814 |
e386accbce1e2aa9bd3dcdc11ae9880e6b83279d | 507 | py | Python | pytglib/api/types/mask_point_forehead.py | iTeam-co/pytglib | e5e75e0a85f89b77762209b32a61b0a883c0ae61 | [
"MIT"
] | 6 | 2019-10-30T08:57:27.000Z | 2021-02-08T14:17:43.000Z | pytglib/api/types/mask_point_forehead.py | iTeam-co/python-telegram | e5e75e0a85f89b77762209b32a61b0a883c0ae61 | [
"MIT"
] | 1 | 2021-08-19T05:44:10.000Z | 2021-08-19T07:14:56.000Z | pytglib/api/types/mask_point_forehead.py | iTeam-co/python-telegram | e5e75e0a85f89b77762209b32a61b0a883c0ae61 | [
"MIT"
] | 5 | 2019-12-04T05:30:39.000Z | 2021-05-21T18:23:32.000Z |
from ..utils import Object
class MaskPointForehead(Object):
"""
A mask should be placed relatively to the forehead
Attributes:
ID (:obj:`str`): ``MaskPointForehead``
No parameters required.
Returns:
MaskPoint
Raises:
:class:`telegram.Error`
"""
ID = "maskPointForehead"
def __init__(self, **kwargs):
pass
@staticmethod
def read(q: dict, *args) -> "MaskPointForehead":
return MaskPointForehead()
| 16.354839 | 54 | 0.587771 |
e978a6e2b9aff63d29158c98057eb55911e68592 | 3,642 | py | Python | funboost/consumers/zeromq_consumer.py | DJMIN/funboost | 7570ca2909bb0b44a1080f5f98aa96c86d3da9d4 | [
"Apache-2.0"
] | 120 | 2021-12-26T03:27:12.000Z | 2022-03-31T16:20:44.000Z | funboost/consumers/zeromq_consumer.py | mooti-barry/funboost | 2cd9530e2c4e5a52fc921070d243d402adbc3a0e | [
"Apache-2.0"
] | 18 | 2021-12-31T06:26:37.000Z | 2022-03-31T16:16:33.000Z | funboost/consumers/zeromq_consumer.py | mooti-barry/funboost | 2cd9530e2c4e5a52fc921070d243d402adbc3a0e | [
"Apache-2.0"
] | 27 | 2021-12-26T16:12:31.000Z | 2022-03-26T17:43:08.000Z | # -*- coding: utf-8 -*-
# @Author : ydf
import os
import socket
import json
# import time
import zmq
import multiprocessing
from funboost.consumers.base_consumer import AbstractConsumer
from nb_log import get_logger
# noinspection PyPep8
def check_port_is_used(ip, port):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# noinspection PyPep8,PyBroadException
try:
s.connect((ip, int(port)))
s.shutdown(2)
# 利用shutdown()函数使socket双向数据传输变为单向数据传输。shutdown()需要一个单独的参数,
# 该参数表示了如何关闭socket。具体为:0表示禁止将来读;1表示禁止将来写;2表示禁止将来读和写。
return True
except Exception:
return False
logger_zeromq_broker = get_logger('zeromq_broker')
# noinspection PyUnresolvedReferences
def start_broker(port_router: int, port_dealer: int):
try:
context = zmq.Context()
# noinspection PyUnresolvedReferences
frontend = context.socket(zmq.ROUTER)
backend = context.socket(zmq.DEALER)
frontend.bind(f"tcp://*:{port_router}")
backend.bind(f"tcp://*:{port_dealer}")
# Initialize poll set
poller = zmq.Poller()
poller.register(frontend, zmq.POLLIN)
poller.register(backend, zmq.POLLIN)
logger_zeromq_broker.info(f'broker 绑定端口 {port_router} {port_dealer} 成功')
# Switch messages between sockets
# noinspection DuplicatedCode
while True:
socks = dict(poller.poll()) # 轮询器 循环接收
if socks.get(frontend) == zmq.POLLIN:
message = frontend.recv_multipart()
backend.send_multipart(message)
if socks.get(backend) == zmq.POLLIN:
message = backend.recv_multipart()
frontend.send_multipart(message)
except Exception as e:
logger_zeromq_broker.warning(e)
class ZeroMqConsumer(AbstractConsumer):
"""
zeromq 中间件的消费者,zeromq基于socket代码,不会持久化,且不需要安装软件。
"""
BROKER_KIND = 13
def start_broker_queue_name_as_port(self):
# threading.Thread(target=self._start_broker).start()
# noinspection PyBroadException
try:
if not (10000 < int(self._queue_name) < 65535):
raise ValueError(",请设置queue的名字是一个 10000到65535的之间的一个端口数字")
except Exception:
self.logger.critical(f" zeromq 模式以 queue 的民资作为tcp 端口,请设置queue的名字是一个 10000 到 65535 之间的一个端口数字")
# noinspection PyProtectedMember
os._exit(444)
if check_port_is_used('127.0.0.1', int(self._queue_name)):
self.logger.debug(f"""{int(self._queue_name)} router端口已经启动(或占用) """)
return
if check_port_is_used('127.0.0.1', int(self._queue_name) + 1):
self.logger.debug(f"""{int(self._queue_name) + 1} dealer 端口已经启动(或占用) """)
return
multiprocessing.Process(target=start_broker, args=(int(self._queue_name), int(self._queue_name) + 1)).start()
# noinspection DuplicatedCode
def _shedual_task(self):
self.start_broker_queue_name_as_port()
context = zmq.Context()
# noinspection PyUnresolvedReferences
zsocket = context.socket(zmq.REP)
zsocket.connect(f"tcp://localhost:{int(self._queue_name) + 1}")
while True:
message = zsocket.recv()
# self.logger.debug(f""" 从 zeromq 取出的消息是 {message}""")
self._print_message_get_from_broker('zeromq',message)
self._submit_task({'body': json.loads(message)})
zsocket.send('recv ok'.encode())
def _confirm_consume(self, kw):
pass #
def _requeue(self, kw):
self.publisher_of_same_queue.publish(kw['body'])
| 34.358491 | 117 | 0.646623 |
13cca4cb5610ea4a104e5b2cc59c4b820f175857 | 3,369 | py | Python | neurokit2/stats/fit_error.py | kassyray/NeuroKit | b84d110a71d5d17c0d1efde0d60d00446fda16cb | [
"MIT"
] | null | null | null | neurokit2/stats/fit_error.py | kassyray/NeuroKit | b84d110a71d5d17c0d1efde0d60d00446fda16cb | [
"MIT"
] | null | null | null | neurokit2/stats/fit_error.py | kassyray/NeuroKit | b84d110a71d5d17c0d1efde0d60d00446fda16cb | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import numpy as np
def fit_error(y, y_predicted, n_parameters=2):
"""Calculate the fit error for a model.
Also specific and direct access functions can be used, such as `fit_mse()`, `fit_rmse()` and `fit_r2()`.
Parameters
----------
y : list, array or Series
The response variable (the y axis).
y_predicted : list, array or Series
The fitted data generated by a model.
n_parameters : int
Number of model parameters (for the degrees of freedom used in R2).
Returns
-------
dict
A dictionary containing different indices of fit error.
See Also
--------
fit_mse, fit_rmse, fit_r2
Examples
--------
>>> import neurokit2 as nk
>>>
>>> y = np.array([-1.0, -0.5, 0, 0.5, 1])
>>> y_predicted = np.array([0.0, 0, 0, 0, 0])
>>>
>>> # Master function
>>> x = nk.fit_error(y, y_predicted)
>>> x #doctest: +SKIP
>>>
>>> # Direct access
>>> nk.fit_mse(y, y_predicted) #doctest: +ELLIPSIS
0.5
>>>
>>> nk.fit_rmse(y, y_predicted) #doctest: +ELLIPSIS
0.7071067811865476
>>>
>>> nk.fit_r2(y, y_predicted, adjusted=False) #doctest: +ELLIPSIS
0.7071067811865475
>>>
>>> nk.fit_r2(y, y_predicted, adjusted=True, n_parameters=2) #doctest: +ELLIPSIS
0.057190958417936755
"""
# Get information
SSE, n, df = _fit_error_prepare(y, y_predicted, n_parameters)
# Mean squared error
MSE = SSE / n
# Root mean squared error
RMSE = np.sqrt(SSE / n)
# Adjusted r-squared
# For optimization use 1 - adjR2 since we want to minimize the function
SST = np.std(y) * n
# Get R2
if SST == 0:
R2 = 1
else:
R2 = SSE / SST
# R2 adjusted
R2_adjusted = 1 - (1 - (1 - R2)) * (n - 1) / df
return {"SSE": SSE, "MSE": MSE, "RMSE": RMSE, "R2": R2, "R2_adjusted": R2_adjusted}
# =============================================================================
# Direct accessors
# =============================================================================
def fit_mse(y, y_predicted):
"""Compute Mean Square Error (MSE)."""
return fit_error(y, y_predicted)["MSE"]
def fit_rmse(y, y_predicted):
"""Compute Root Mean Square Error (RMSE)."""
return fit_error(y, y_predicted)["RMSE"]
def fit_r2(y, y_predicted, adjusted=True, n_parameters=2):
"""Compute R2."""
if adjusted is True:
return fit_error(y, y_predicted, n_parameters=n_parameters)["R2_adjusted"]
return fit_error(y, y_predicted, n_parameters=n_parameters)["R2"]
# =============================================================================
# Internals
# =============================================================================
def _fit_error_prepare(y, y_predicted, n_parameters=2):
# n, i.e., how many observations (signal length)
n = len(y)
# Sanitize
if n != len(y_predicted):
raise TypeError("NeuroKit error: fit_error(): 'y' and 'y_predicted' are not of the same length.")
# Residual, i.e. the difference between data and model
residual = y - y_predicted
# Degrees of freedom, i.e., number of observations (length of signal) minus number of parameters
df = n - n_parameters
# Calculate sum of squared errors
SSE = np.sum(residual ** 2)
return SSE, n, df
| 27.842975 | 108 | 0.554467 |
63c910c56cf8b28b2a219befdbc3a23a7e771598 | 2,878 | py | Python | test/functional/interface_bitcoin_cli.py | CryptoDev-Project/CHTC | 9d2f8a849ac2989fe5514956fbb6b98cc81f0eff | [
"MIT"
] | 4 | 2019-04-06T22:56:04.000Z | 2019-07-27T07:58:03.000Z | test/functional/interface_bitcoin_cli.py | CryptoDev-Project/CHTC | 9d2f8a849ac2989fe5514956fbb6b98cc81f0eff | [
"MIT"
] | 2 | 2020-12-19T14:36:51.000Z | 2020-12-20T16:19:36.000Z | test/functional/interface_bitcoin_cli.py | CryptoDev-Project/CHTC | 9d2f8a849ac2989fe5514956fbb6b98cc81f0eff | [
"MIT"
] | 6 | 2019-03-27T20:02:29.000Z | 2019-05-07T17:49:14.000Z | #!/usr/bin/env python3
# Copyright (c) 2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test chtc-cli"""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_process_error, get_auth_cookie
import time
class TestBitcoinCli(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def run_test(self):
"""Main test logic"""
self.log.info("Sleeping 30 seconds...")
time.sleep(30)
self.log.info("Compare responses from gewalletinfo RPC and `chtc-cli getwalletinfo`")
cli_response = self.nodes[0].cli.getwalletinfo()
rpc_response = self.nodes[0].getwalletinfo()
assert_equal(cli_response, rpc_response)
self.log.info("Compare responses from getblockchaininfo RPC and `chtc-cli getblockchaininfo`")
cli_response = self.nodes[0].cli.getblockchaininfo()
rpc_response = self.nodes[0].getblockchaininfo()
assert_equal(cli_response, rpc_response)
user, password = get_auth_cookie(self.nodes[0].datadir)
self.log.info("Compare responses from `chtc-cli -getinfo` and the RPCs data is retrieved from.")
cli_get_info = self.nodes[0].cli('getinfo').send_cli()
wallet_info = self.nodes[0].getwalletinfo()
network_info = self.nodes[0].getnetworkinfo()
blockchain_info = self.nodes[0].getblockchaininfo()
assert_equal(cli_get_info['version'], network_info['version'])
assert_equal(cli_get_info['protocolversion'], network_info['protocolversion'])
assert_equal(cli_get_info['walletversion'], wallet_info['walletversion'])
assert_equal(cli_get_info['balance'], wallet_info['balance'])
assert_equal(cli_get_info['blocks'], blockchain_info['blocks'])
assert_equal(cli_get_info['timeoffset'], network_info['timeoffset'])
assert_equal(cli_get_info['connections'], network_info['connections'])
assert_equal(cli_get_info['proxy'], network_info['networks'][0]['proxy'])
assert_equal(cli_get_info['difficulty'], blockchain_info['difficulty'])
assert_equal(cli_get_info['testnet'], blockchain_info['chain'] == "test")
assert_equal(cli_get_info['balance'], wallet_info['balance'])
assert_equal(cli_get_info['keypoololdest'], wallet_info['keypoololdest'])
assert_equal(cli_get_info['keypoolsize'], wallet_info['keypoolsize'])
assert_equal(cli_get_info['paytxfee'], wallet_info['paytxfee'])
assert_equal(cli_get_info['relayfee'], network_info['relayfee'])
# unlocked_until is not tested because the wallet is not encrypted
if __name__ == '__main__':
TestBitcoinCli().main()
| 48.779661 | 104 | 0.714038 |
e1b706f81f9b0f2d2e059f5f330e6dd6089cfa0c | 2,635 | py | Python | examples/json_import_example/import_json.py | fenrisl/cyberwatch_api_toolbox | 5bc691c87446f117a9e52acd3c8590eb3693dcd6 | [
"MIT"
] | 10 | 2018-11-16T14:21:03.000Z | 2022-01-17T13:31:21.000Z | examples/json_import_example/import_json.py | fenrisl/cyberwatch_api_toolbox | 5bc691c87446f117a9e52acd3c8590eb3693dcd6 | [
"MIT"
] | 108 | 2018-12-10T14:49:49.000Z | 2022-02-15T09:45:08.000Z | examples/json_import_example/import_json.py | fenrisl/cyberwatch_api_toolbox | 5bc691c87446f117a9e52acd3c8590eb3693dcd6 | [
"MIT"
] | 26 | 2018-12-06T15:03:24.000Z | 2022-02-14T13:37:55.000Z | """Create remote access"""
import os
import json
from configparser import ConfigParser
from cbw_api_toolbox.cbw_api import CBWApi
CONF = ConfigParser()
CONF.read(os.path.join(os.path.abspath(os.path.dirname(__file__)), '..', '..', 'api.conf'))
CLIENT = CBWApi(CONF.get('cyberwatch', 'url'), CONF.get('cyberwatch', 'api_key'), CONF.get('cyberwatch', 'secret_key'))
def match_system_cyberwatch(system):
"""Function used to match the system specified in the file with Cyberwatch syntax"""
if system == "windows":
return "CbwRam::RemoteAccess::WinRm::WithNegotiate", 5985
if system == "linux":
return "CbwRam::RemoteAccess::Ssh::WithPassword", 22
if system == "network device":
return "CbwRam::RemoteAccess::Snmp", 161
print("System '{}' not recognized, setting default as 'Linux' and port 22".format(system))
return "CbwRam::RemoteAccess::Ssh::WithPassword", 22
def parse_json_file(json_file_path):
"""Parse the json file specified and create remote access objects in Cyberwatch"""
# Set default values for the source (node_id) and the credential in case they are not specified
remote_access_infos = {
"address": "",
"type": "",
"port": "",
"credential_id": "4",
"node_id": "1",
"server_groups": ""
}
# Parse the json file, we assume "host" and "system" are mandatory values
with open(json_file_path) as json_data:
data = json.load(json_data)
for json_dict in data:
remote_access_infos["address"] = json_dict["host"]
# Get system and set default port value based on the system
remote_access_infos["type"], remote_access_infos["port"] = match_system_cyberwatch(json_dict["system"])
# If the port is defined is the json, use its value to override else keep the default value
remote_access_infos["port"] = json_dict.get("port", remote_access_infos["port"])
remote_access_infos["credential_id"] = json_dict.get("credential_id", remote_access_infos["credential_id"])
remote_access_infos["node_id"] = json_dict.get("node_id", remote_access_infos["node_id"])
remote_access_infos["server_groups"] = json_dict.get("cyberwatch_groups",
remote_access_infos["server_groups"])
print("Trying to create Cyberwatch remote access with the following information : {}"
.format(remote_access_infos))
CLIENT.create_remote_access(remote_access_infos)
json_file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'example.json')
parse_json_file(json_file)
| 46.22807 | 119 | 0.678937 |
dbcfa3a0a447ef58448d348a0f46eb8c57edd9ec | 821 | py | Python | cleanab/ynab.py | janw/cleanab | 0d97bc976d0a1190d5223ca2eaa0407663e88e59 | [
"Apache-2.0"
] | 2 | 2019-10-06T13:52:33.000Z | 2021-12-31T19:29:30.000Z | cleanab/ynab.py | janw/cleanab | 0d97bc976d0a1190d5223ca2eaa0407663e88e59 | [
"Apache-2.0"
] | null | null | null | cleanab/ynab.py | janw/cleanab | 0d97bc976d0a1190d5223ca2eaa0407663e88e59 | [
"Apache-2.0"
] | null | null | null | from functools import lru_cache
import ynab_api as ynab
@lru_cache(maxsize=1)
def get_ynab_api(access_token):
ynab_conf = ynab.Configuration()
ynab_conf.api_key["Authorization"] = access_token
ynab_conf.api_key_prefix["Authorization"] = "Bearer"
return ynab.TransactionsApi(ynab.ApiClient(ynab_conf))
@lru_cache(maxsize=1)
def get_ynab_account_api(access_token):
ynab_conf = ynab.Configuration()
ynab_conf.api_key["Authorization"] = access_token
ynab_conf.api_key_prefix["Authorization"] = "Bearer"
return ynab.AccountsApi(ynab.ApiClient(ynab_conf))
def create_transactions(access_token, budget_id, processed_transactions):
return get_ynab_api(access_token).create_transaction(
budget_id,
ynab.SaveTransactionsWrapper(transactions=processed_transactions),
)
| 30.407407 | 74 | 0.773447 |
e88938b8275728b4b0e677eaed13c8c2b595df5d | 4,552 | py | Python | acrobot_simulator/acro_utils.py | dtak/hip-mdp-public | 3deb3fbfd341d572634889cd4c2f8e6c1e8898e3 | [
"MIT"
] | 23 | 2017-10-26T17:26:11.000Z | 2021-09-09T17:48:53.000Z | acrobot_simulator/acro_utils.py | dtak/hip-mdp-public | 3deb3fbfd341d572634889cd4c2f8e6c1e8898e3 | [
"MIT"
] | 1 | 2017-12-26T03:42:47.000Z | 2017-12-28T06:08:27.000Z | acrobot_simulator/acro_utils.py | dtak/hip-mdp-public | 3deb3fbfd341d572634889cd4c2f8e6c1e8898e3 | [
"MIT"
] | 10 | 2017-10-27T19:24:10.000Z | 2022-03-17T03:04:00.000Z | import sys
import os
import numpy as np
import numbers
""" This set of utility functions are adapted from rlpy team."""
# Original attribution information:
__copyright__ = "Copyright 2013, RLPy http://acl.mit.edu/RLPy"
__credits__ = ["Alborz Geramifard", "Robert H. Klein", "Christoph Dann",
"William Dabney", "Jonathan P. How"]
__license__ = "BSD 3-Clause"
__author__ = "Christoph Dann"
def rk4(derivs, y0, t, *args, **kwargs):
"""
Integrate 1D or ND system of ODEs using 4-th order Runge-Kutta.
This is a toy implementation which may be useful if you find
yourself stranded on a system w/o scipy. Otherwise use
:func:`scipy.integrate`.
*y0*
initial state vector
*t*
sample times
*derivs*
returns the derivative of the system and has the
signature ``dy = derivs(yi, ti)``
*args*
additional arguments passed to the derivative function
*kwargs*
additional keyword arguments passed to the derivative function
Example 1 ::
## 2D system
def derivs6(x,t):
d1 = x[0] + 2*x[1]
d2 = -3*x[0] + 4*x[1]
return (d1, d2)
dt = 0.0005
t = arange(0.0, 2.0, dt)
y0 = (1,2)
yout = rk4(derivs6, y0, t)
Example 2::
## 1D system
alpha = 2
def derivs(x,t):
return -alpha*x + exp(-t)
y0 = 1
yout = rk4(derivs, y0, t)
If you have access to scipy, you should probably be using the
scipy.integrate tools rather than this function.
"""
try:
Ny = len(y0)
except TypeError:
yout = np.zeros((len(t),), np.float_)
else:
yout = np.zeros((len(t), Ny), np.float_)
yout[0] = y0
i = 0
for i in np.arange(len(t) - 1):
thist = t[i]
dt = t[i + 1] - thist
dt2 = dt / 2.0
y0 = yout[i]
k1 = np.asarray(derivs(y0, thist, *args, **kwargs))
k2 = np.asarray(derivs(y0 + dt2 * k1, thist + dt2, *args, **kwargs))
k3 = np.asarray(derivs(y0 + dt2 * k2, thist + dt2, *args, **kwargs))
k4 = np.asarray(derivs(y0 + dt * k3, thist + dt, *args, **kwargs))
yout[i + 1] = y0 + dt / 6.0 * (k1 + 2 * k2 + 2 * k3 + k4)
return yout
def wrap(x, m, M):
"""
:param x: a scalar
:param m: minimum possible value in range
:param M: maximum possible value in range
Wraps ``x`` so m <= x <= M; but unlike ``bound()`` which
truncates, ``wrap()`` wraps x around the coordinate system defined by m,M.\n
For example, m = -180, M = 180 (degrees), x = 360 --> returns 0.
"""
diff = M - m
while x > M:
x = x - diff
while x < m:
x = x + diff
return x
def bound(x, m, M=None):
"""
:param x: scalar
Either have m as scalar, so bound(x,m,M) which returns m <= x <= M *OR*
have m as length 2 vector, bound(x,m, <IGNORED>) returns m[0] <= x <= m[1].
"""
if M is None:
M = m[1]
m = m[0]
# bound x between min (m) and Max (M)
return min(max(x, m), M)
def fromAtoB(x1, y1, x2, y2, color='k', connectionstyle="arc3,rad=-0.4",
shrinkA=10, shrinkB=10, arrowstyle="fancy", ax=None):
"""
Draws an arrow from point A=(x1,y1) to point B=(x2,y2) on the (optional)
axis ``ax``.
.. note::
See matplotlib documentation.
"""
if ax is None:
return pl.annotate("",
xy=(x2, y2), xycoords='data',
xytext=(x1, y1), textcoords='data',
arrowprops=dict(
arrowstyle=arrowstyle, # linestyle="dashed",
color=color,
shrinkA=shrinkA, shrinkB=shrinkB,
patchA=None,
patchB=None,
connectionstyle=connectionstyle),
)
else:
return ax.annotate("",
xy=(x2, y2), xycoords='data',
xytext=(x1, y1), textcoords='data',
arrowprops=dict(
arrowstyle=arrowstyle, # linestyle="dashed",
color=color,
shrinkA=shrinkA, shrinkB=shrinkB,
patchA=None,
patchB=None,
connectionstyle=connectionstyle),
) | 29.179487 | 80 | 0.498462 |
09c54d5a06aed2e1188c94497debc7bf5d19b582 | 1,742 | py | Python | src/python/grapl-tests-common/grapl_tests_common/clients/engagement_edge_client.py | wimax-grapl/grapl | be0a49a83f62b84a10182c383d12f911cc555b24 | [
"Apache-2.0"
] | null | null | null | src/python/grapl-tests-common/grapl_tests_common/clients/engagement_edge_client.py | wimax-grapl/grapl | be0a49a83f62b84a10182c383d12f911cc555b24 | [
"Apache-2.0"
] | null | null | null | src/python/grapl-tests-common/grapl_tests_common/clients/engagement_edge_client.py | wimax-grapl/grapl | be0a49a83f62b84a10182c383d12f911cc555b24 | [
"Apache-2.0"
] | null | null | null | from http import HTTPStatus
from typing import Optional
import requests
_JSON_CONTENT_TYPE_HEADERS = {"Content-type": "application/json"}
_ORIGIN = {
"Origin": "https://local-grapl-engagement-ux-bucket.s3.amazonaws.com",
}
class EngagementEdgeException(Exception):
pass
class EngagementEdgeClient:
def __init__(self, use_docker_links: bool = False) -> None:
hostname = "grapl-engagement-edge" if use_docker_links else "localhost"
self.endpoint = f"http://{hostname}:8900"
def get_jwt(self) -> str:
resp = requests.post(
f"{self.endpoint}/login",
json={
# hardcoded when IS_LOCAL
"username": "grapluser",
# sha'd and pepper'd - see engagement view Login.tsx
"password": "2ae5ddfb1eeeed45d502bcfd0c7b8f962f24bf85328ba942f32a31c0229c295a",
},
# TODO: Should consume the deployment name instead of hardcoded.
headers={
**_JSON_CONTENT_TYPE_HEADERS,
**_ORIGIN,
},
)
if resp.status_code != HTTPStatus.OK:
raise EngagementEdgeException(f"{resp.status_code}: {resp.text}")
cookie: Optional[str] = resp.cookies.get("grapl_jwt")
if not cookie:
raise EngagementEdgeException(
f"Couldn't find grapl_jwt cookie in {resp.cookies}"
)
return cookie
def get_notebook(self, jwt: str) -> str:
cookies = {"grapl_jwt": jwt}
resp = requests.post(
f"{self.endpoint}/getNotebook",
cookies=cookies,
headers=_ORIGIN,
)
url: str = resp.json()["success"]["notebook_url"]
return url
| 32.259259 | 95 | 0.594719 |
4190feb4f3235203f4a4c7e3ed8d66bf2d1ddc6c | 52,957 | py | Python | CortexPal.py | flynnwastaken/cortex-discord | 2cf8791e25696652262987c3be13e710eaa36313 | [
"MIT"
] | null | null | null | CortexPal.py | flynnwastaken/cortex-discord | 2cf8791e25696652262987c3be13e710eaa36313 | [
"MIT"
] | null | null | null | CortexPal.py | flynnwastaken/cortex-discord | 2cf8791e25696652262987c3be13e710eaa36313 | [
"MIT"
] | null | null | null | import discord
import random
import os
import traceback
import re
import logging
import logging.handlers
import configparser
import datetime
import uuid
import sqlite3
import copy
from discord.ext import commands
from datetime import datetime, timedelta, timezone
PREFIX = '$'
PURGE_DAYS = 180
DICE_EXPRESSION = re.compile('(\d*(d|D))?(4|6|8|10|12)')
DIE_SIZES = [4, 6, 8, 10, 12]
UNTYPED_STRESS = 'General'
ADD_SYNONYMS = ['add', 'give', 'new', 'create']
REMOVE_SYNOYMS = ['remove', 'spend', 'delete', 'subtract']
UP_SYNONYMS = ['stepup', 'up']
DOWN_SYNONYMS = ['stepdown', 'down']
CLEAR_SYNONYMS = ['clear', 'erase']
DIE_FACE_ERROR = '{0} is not a valid die size. You may only use dice with sizes of 4, 6, 8, 10, or 12.'
DIE_STRING_ERROR = '{0} is not a valid die or dice.'
DIE_EXCESS_ERROR = 'You can\'t use that many dice.'
DIE_MISSING_ERROR = 'There were no valid dice in that command.'
DIE_LACK_ERROR = 'That pool only has {0}D{1}.'
DIE_NONE_ERROR = 'That pool doesn\'t have any D{0}s.'
NOT_EXIST_ERROR = 'There\'s no such {0} yet.'
HAS_NONE_ERROR = '{0} doesn\'t have any {1}.'
HAS_ONLY_ERROR = '{0} only has {1} {2}.'
INSTRUCTION_ERROR = '`{0}` is not a valid instruction for the `{1}` command.'
UNKNOWN_COMMAND_ERROR = 'That\'s not a valid command.'
UNEXPECTED_ERROR = 'Oops. A software error interrupted this command.'
ABOUT_TEXT = 'CortexPal v1.1: a Discord bot for Cortex Prime RPG players.'
# Read configuration.
config = configparser.ConfigParser()
config.read('cortexpal.ini')
# Set up logging.
logHandler = logging.handlers.TimedRotatingFileHandler(filename=config['logging']['file'], when='D', backupCount=9)
logging.basicConfig(handlers=[logHandler], format='%(asctime)s %(message)s', level=logging.DEBUG)
# Set up database.
db = sqlite3.connect(config['database']['file'])
db.row_factory = sqlite3.Row
cursor = db.cursor()
cursor.execute(
'CREATE TABLE IF NOT EXISTS GAME'
'(GUID VARCHAR(32) PRIMARY KEY,'
'SERVER INT NOT NULL,'
'CHANNEL INT NOT NULL,'
'ACTIVITY DATETIME NOT NULL)'
)
cursor.execute(
'CREATE TABLE IF NOT EXISTS GAME_OPTIONS'
'(GUID VARCHAR(32) PRIMARY KEY,'
'KEY VARCHAR(16) NOT NULL,'
'VALUE VARCHAR(256),'
'PARENT_GUID VARCHAR(32) NOT NULL)'
)
cursor.execute(
'CREATE TABLE IF NOT EXISTS DIE'
'(GUID VARCHAR(32) PRIMARY KEY,'
'NAME VARCHAR(64),'
'SIZE INT NOT NULL,'
'QTY INT NOT NULL,'
'PARENT_GUID VARCHAR(32) NOT NULL)'
)
cursor.execute(
'CREATE TABLE IF NOT EXISTS DICE_COLLECTION'
'(GUID VARCHAR(32) PRIMARY KEY,'
'CATEGORY VARCHAR(64) NOT NULL,'
'GRP VARCHAR(64),'
'PARENT_GUID VARCHAR(32) NOT NULL)'
)
cursor.execute(
'CREATE TABLE IF NOT EXISTS RESOURCE'
'(GUID VARCHAR(32) PRIMARY KEY,'
'CATEGORY VARCHAR(64) NOT NULL,'
'NAME VARCHAR(64) NOT NULL,'
'QTY INT NOT NULL,'
'PARENT_GUID VARCHAR(64) NOT NULL)'
)
# Classes and functions follow.
class CortexError(Exception):
"""Exception class for command and rules errors specific to this bot."""
def __init__(self, message, *args):
self.message = message
self.args = args
def __str__(self):
return self.message.format(*(self.args))
def get_prefix(bot, message):
game_info = CortexGame(None, message.guild.id, message.channel.id)
prefix = game_info.get_option('prefix')
if not prefix:
prefix = '$'
return prefix
def separate_dice_and_name(inputs):
"""Sort the words of an input string, and identify which are dice notations and which are not."""
dice = []
words = []
for input in inputs:
if DICE_EXPRESSION.fullmatch(input):
dice.append(Die(input))
else:
words.append(input.lower().capitalize())
return {'dice': dice, 'name': ' '.join(words)}
def separate_numbers_and_name(inputs):
"""Sort the words of an input string, and identify which are numerals and which are not."""
numbers = []
words = []
for input in inputs:
if input.isdecimal():
numbers.append(int(input))
else:
words.append(input.lower().capitalize())
return {'numbers': numbers, 'name': ' '.join(words)}
def fetch_all_dice_for_parent(db_parent):
"""Given an object from the database, get all the dice that belong to it."""
dice = []
cursor.execute('SELECT * FROM DIE WHERE PARENT_GUID=:PARENT_GUID', {'PARENT_GUID':db_parent.db_guid})
fetching = True
while fetching:
row = cursor.fetchone()
if row:
die = Die(name=row['NAME'], size=row['SIZE'], qty=row['QTY'])
die.already_in_db(db_parent, row['GUID'])
dice.append(die)
else:
fetching = False
return dice
def purge():
"""Scan for old unused games and remove them."""
logging.debug('Running the purge')
purge_time = datetime.now(timezone.utc) - timedelta(days=PURGE_DAYS)
games_to_purge = []
cursor.execute('SELECT * FROM GAME WHERE ACTIVITY<:purge_time', {'purge_time':purge_time})
fetching = True
while fetching:
row = cursor.fetchone()
if row:
games_to_purge.append(row['GUID'])
else:
fetching = False
for game_guid in games_to_purge:
cursor.execute('DELETE FROM GAME_OPTIONS WHERE PARENT_GUID=:guid', {'guid':game_guid})
cursor.execute('SELECT * FROM DICE_COLLECTION WHERE PARENT_GUID=:guid', {'guid':game_guid})
collections = []
fetching = True
while fetching:
row = cursor.fetchone()
if row:
collections.append(row['GUID'])
else:
fetching = False
for collection_guid in collections:
cursor.execute('DELETE FROM DIE WHERE PARENT_GUID=:guid', {'guid':collection_guid})
cursor.execute('DELETE FROM DIE WHERE PARENT_GUID=:guid', {'guid':game_guid})
cursor.execute('DELETE FROM DICE_COLLECTION WHERE PARENT_GUID=:guid', {'guid':game_guid})
cursor.execute('DELETE FROM RESOURCE WHERE PARENT_GUID=:guid', {'guid':game_guid})
cursor.execute('DELETE FROM GAME WHERE GUID=:guid', {'guid':game_guid})
db.commit()
logging.debug('Deleted %d games', len(games_to_purge))
class Die:
"""A single die, or a set of dice of the same size."""
def __init__(self, expression=None, name=None, size=4, qty=1):
self.name = name
self.size = size
self.qty = qty
self.db_parent = None
self.db_guid = None
if expression:
if not DICE_EXPRESSION.fullmatch(expression):
raise CortexError(DIE_STRING_ERROR, expression)
numbers = expression.lower().split('d')
if len(numbers) == 1:
self.size = int(numbers[0])
else:
if numbers[0]:
self.qty = int(numbers[0])
self.size = int(numbers[1])
def store_in_db(self, db_parent):
"""Store this die in the database, under a given parent."""
self.db_parent = db_parent
self.db_guid = uuid.uuid1().hex
cursor.execute('INSERT INTO DIE (GUID, NAME, SIZE, QTY, PARENT_GUID) VALUES (?, ?, ?, ?, ?)', (self.db_guid, self.name, self.size, self.qty, self.db_parent.db_guid))
db.commit()
def already_in_db(self, db_parent, db_guid):
"""Inform the Die that it is already in the database, under a given parent and guid."""
self.db_parent = db_parent
self.db_guid = db_guid
def remove_from_db(self):
"""Remove this Die from the database."""
if self.db_guid:
cursor.execute('DELETE FROM DIE WHERE GUID=:guid', {'guid':self.db_guid})
db.commit()
def step_down(self):
"""Step down the die size."""
if self.size > 4:
self.update_size(self.size - 2)
def step_up(self):
"""Step up the die size."""
if self.size < 12:
self.update_size(self.size + 2)
def combine(self, other_die):
"""Combine this die with another die (as when applying a new stress die to existing stress)."""
if self.size < other_die.size:
self.update_size(other_die.size)
elif self.size < 12:
self.update_size(self.size + 2)
def update_size(self, new_size):
"""Change the size of the die."""
self.size = new_size
if self.db_guid:
cursor.execute('UPDATE DIE SET SIZE=:size WHERE GUID=:guid', {'size':self.size, 'guid':self.db_guid})
db.commit()
def update_qty(self, new_qty):
"""Change the quantity of the dice."""
self.qty = new_qty
if self.db_guid:
cursor.execute('UPDATE DIE SET QTY=:qty WHERE GUID=:guid', {'qty':self.qty, 'guid':self.db_guid})
def is_max(self):
"""Identify whether the Die is at the maximum allowed size."""
return self.size == 12
def output(self):
"""Return the Die as a string suitable for output in Discord."""
return str(self)
def __str__(self):
"""General purpose string representation of the Die."""
if self.qty > 1:
return '{0}D{1}'.format(self.qty, self.size)
else:
return 'D{0}'.format(self.size)
class NamedDice:
"""A collection of user-named single-die traits, suitable for complications and assets."""
def __init__(self, category, group, db_parent, db_guid=None):
self.dice = {}
self.category = category
self.group = group
self.db_parent = db_parent
if db_guid:
self.db_guid = db_guid
else:
if self.group:
cursor.execute('SELECT * FROM DICE_COLLECTION WHERE PARENT_GUID=:PARENT_GUID AND CATEGORY=:category AND GRP=:group', {'PARENT_GUID':self.db_parent.db_guid, 'category':self.category, 'group':self.group})
else:
cursor.execute('SELECT * FROM DICE_COLLECTION WHERE PARENT_GUID=:PARENT_GUID AND CATEGORY=:category AND GRP IS NULL', {'PARENT_GUID':self.db_parent.db_guid, 'category':self.category})
row = cursor.fetchone()
if row:
self.db_guid = row['GUID']
else:
self.db_guid = uuid.uuid1().hex
cursor.execute('INSERT INTO DICE_COLLECTION (GUID, CATEGORY, GRP, PARENT_GUID) VALUES (?, ?, ?, ?)', (self.db_guid, self.category, self.group, self.db_parent.db_guid))
db.commit()
fetched_dice = fetch_all_dice_for_parent(self)
for die in fetched_dice:
self.dice[die.name] = die
def remove_from_db(self):
"""Remove these NamedDice from the database."""
for name in list(self.dice):
self.dice[name].remove_from_db()
cursor.execute("DELETE FROM DICE_COLLECTION WHERE GUID=:db_guid", {'db_guid':self.db_guid})
db.commit()
self.dice = {}
def is_empty(self):
"""Identify whether there are any dice in this object."""
return not self.dice
def add(self, name, die):
"""Add a new die, with a given name."""
die.name = name
if not name in self.dice:
die.store_in_db(self)
self.dice[name] = die
return 'New: ' + self.output(name)
elif self.dice[name].is_max():
return 'This would step up beyond {0}'.format(self.output(name))
else:
self.dice[name].combine(die)
return 'Raised to ' + self.output(name)
def remove(self, name):
"""Remove a die with a given name."""
if not name in self.dice:
raise CortexError(NOT_EXIST_ERROR, self.category)
output = 'Removed: ' + self.output(name)
self.dice[name].remove_from_db()
del self.dice[name]
return output
def step_up(self, name):
"""Step up the die with a given name."""
if not name in self.dice:
raise CortexError(NOT_EXIST_ERROR, self.category)
if self.dice[name].is_max():
return 'This would step up beyond {0}'.format(self.output(name))
self.dice[name].step_up()
return 'Stepped up to ' + self.output(name)
def step_down(self, name):
"""Step down the die with a given name."""
if not name in self.dice:
raise CortexError(NOT_EXIST_ERROR, self.category)
if self.dice[name].size == 4:
self.remove(name)
return 'Stepped down and removed: ' + name
else:
self.dice[name].step_down()
return 'Stepped down to ' + self.output(name)
def get_all_names(self):
"""Identify the names of all the dice in this object."""
return list(self.dice)
def output(self, name):
"""For a die of a given name, return a formatted description of that die."""
return '{0} {1}'.format(self.dice[name].output(), name)
def output_all(self, separator='\n'):
"""Return a formatted description of all the dice in this object."""
output = ''
prefix = ''
for name in list(self.dice):
output += prefix + self.output(name)
prefix = separator
return output
class DicePool:
"""A single-purpose collection of die sizes and quantities, suitable for doom pools, crisis pools, and growth pools."""
def __init__(self, roller, group, incoming_dice=[]):
self.roller = roller
self.group = group
self.dice = [None, None, None, None, None]
self.db_parent = None
self.db_guid = None
if incoming_dice:
self.add(incoming_dice)
def store_in_db(self, db_parent):
"""Store this pool in the database."""
self.db_guid = uuid.uuid1().hex
self.db_parent = db_parent
logging.debug('going to store DicePool guid {0} grp {1} parent {2}'.format(self.db_guid, self.group, self.db_parent.db_guid))
cursor.execute("INSERT INTO DICE_COLLECTION (GUID, CATEGORY, GRP, PARENT_GUID) VALUES (?, 'pool', ?, ?)", (self.db_guid, self.group, self.db_parent.db_guid))
db.commit()
def already_in_db(self, db_parent, db_guid):
"""Inform the pool that it is already in the database, under a given parent and guid."""
self.db_parent = db_parent
self.db_guid = db_guid
def fetch_dice_from_db(self):
"""Get all the dice from the database that would belong to this pool."""
fetched_dice = fetch_all_dice_for_parent(self)
for die in fetched_dice:
self.dice[DIE_SIZES.index(die.size)] = die
def disconnect_from_db(self):
"""Prevent further changes to this pool from affecting the database."""
self.db_parent = None
self.db_guid = None
def is_empty(self):
"""Identify whether this pool is empty."""
return not self.dice
def remove_from_db(self):
"""Remove this entire pool from the database."""
for index in range(len(self.dice)):
if self.dice[index]:
self.dice[index].remove_from_db()
cursor.execute("DELETE FROM DICE_COLLECTION WHERE GUID=:db_guid", {'db_guid':self.db_guid})
db.commit()
self.dice = [None, None, None, None, None]
def add(self, dice):
"""Add dice to the pool."""
for die in dice:
index = DIE_SIZES.index(die.size)
if self.dice[index]:
self.dice[index].update_qty(self.dice[index].qty + die.qty)
else:
self.dice[index] = die
if self.db_parent and not die.db_parent:
die.store_in_db(self)
return self.output()
def remove(self, dice):
"""Remove dice from the pool."""
for die in dice:
index = DIE_SIZES.index(die.size)
if self.dice[index]:
stored_die = self.dice[index]
if die.qty > stored_die.qty:
raise CortexError(DIE_LACK_ERROR, stored_die.qty, stored_die.size)
stored_die.update_qty(stored_die.qty - die.qty)
if stored_die.qty == 0:
if self.db_parent:
stored_die.remove_from_db()
self.dice[index] = None
else:
raise CortexError(DIE_NONE_ERROR, die.size)
return self.output()
def temporary_copy(self):
"""Return a temporary, non-persisted copy of this dice pool."""
copy = DicePool(self.roller, self.group)
dice_copies = []
for die in self.dice:
if die:
dice_copies.append(Die(size=die.size, qty=die.qty))
copy.add(dice_copies)
return copy
def roll(self):
"""Roll all the dice in the pool, and return a formatted summary of the results."""
output = ''
separator = ''
results = [[],[]]
for die in self.dice:
if die:
output += '{0}D{1} : '.format(separator, die.size)
for num in range(die.qty)
roll = self.roller.roll(die.size)
results[0].append(die.size)
results[1].append(roll)
roll_str = str(roll)
if roll_str == '1':
roll_str = '**(1)**'
output += roll_str + ' '
separator = '\n'
if sum(results[1]) == len(results[1])
output += '{0}Best Total : N/A - Botch'.format(separator)
else
highest = max(results[1])
highest_removed = results[1].remove(index(highest))
if max(highest_removed) == 1
second_highest = 0
effect = 'D4'
elif max(highest_removed) != 1 and max(highest_removed.remove(index(max(highest_removed)))) == 1
second_highest = max(highest_removed)
effect = 'D4'
else
second_highest = max(highest_removed)
effect = 'D' + str(max(results[0].remove(index(highest).remove(index(second_highest)))))
highest_total = str(highest + second_highest)
output += '{0}Best Total : {1} Effect: {2}'.format(separator, highest_total, effect)
return output
def output(self):
"""Return a formatted list of the dice in this pool."""
if self.is_empty():
return 'empty'
output = ''
for die in self.dice:
if die:
output += die.output() + ' '
return output
class DicePools:
"""A collection of DicePool objects."""
def __init__(self, roller, db_parent):
self.roller = roller
self.pools = {}
self.db_parent = db_parent
cursor.execute('SELECT * FROM DICE_COLLECTION WHERE CATEGORY="pool" AND PARENT_GUID=:PARENT_GUID', {'PARENT_GUID':self.db_parent.db_guid})
pool_info = []
fetching = True
while fetching:
row = cursor.fetchone()
if row:
pool_info.append({'db_guid':row['GUID'], 'grp':row['GRP'], 'parent_guid':row['PARENT_GUID']})
else:
fetching = False
for fetched_pool in pool_info:
new_pool = DicePool(self.roller, fetched_pool['grp'])
new_pool.already_in_db(fetched_pool['parent_guid'], fetched_pool['db_guid'])
new_pool.fetch_dice_from_db()
self.pools[new_pool.group] = new_pool
def is_empty(self):
"""Identify whether we have any pools."""
return not self.pools
def remove_from_db(self):
"""Remove all of these pools from the database."""
for group in list(self.pools):
self.pools[group].remove_from_db()
self.pools = {}
def add(self, group, dice):
"""Add some dice to a pool under a given name."""
if not group in self.pools:
self.pools[group] = DicePool(self.roller, group)
self.pools[group].store_in_db(self.db_parent)
self.pools[group].add(dice)
return '{0}: {1}'.format(group, self.pools[group].output())
def remove(self, group, dice):
"""Remove some dice from a pool with a given name."""
if not group in self.pools:
raise CortexError(NOT_EXIST_ERROR, 'pool')
self.pools[group].remove(dice)
return '{0}: {1}'.format(group, self.pools[group].output())
def clear(self, group):
"""Remove one entire pool."""
if not group in self.pools:
raise CortexError(NOT_EXIST_ERROR, 'pool')
self.pools[group].remove_from_db()
del self.pools[group]
return 'Cleared {0} pool.'.format(group)
def temporary_copy(self, group):
"""Return an independent, non-persistent copy of a pool."""
if not group in self.pools:
raise CortexError(NOT_EXIST_ERROR, 'pool')
return self.pools[group].temporary_copy()
def roll(self, group):
"""Roll all the dice in a certain pool and return the results."""
return self.pools[group].roll()
def output(self):
"""Return a formatted summary of all the pools in this object."""
output = ''
prefix = ''
for key in list(self.pools):
output += '{0}{1}: {2}'.format(prefix, key, self.pools[key].output())
prefix = '\n'
return output
class Resources:
"""Holds simple quantity-based resources, like plot points."""
def __init__(self, category, db_parent):
self.resources = {}
self.category = category
self.db_parent = db_parent
cursor.execute("SELECT * FROM RESOURCE WHERE PARENT_GUID=:PARENT_GUID AND CATEGORY=:category", {'PARENT_GUID':self.db_parent.db_guid, 'category':self.category})
fetching = True
while fetching:
row = cursor.fetchone()
if row:
self.resources[row['NAME']] = {'qty':row['QTY'], 'db_guid':row['GUID']}
else:
fetching = False
def is_empty(self):
"""Identify whether there are any resources stored here."""
return not self.resources
def remove_from_db(self):
"""Removce these resources from the database."""
cursor.executemany("DELETE FROM RESOURCE WHERE GUID=:db_guid", [{'db_guid':self.resources[resource]['db_guid']} for resource in list(self.resources)])
db.commit()
self.resources = {}
def add(self, name, qty=1):
"""Add a quantity of resources to a given name."""
if not name in self.resources:
db_guid = uuid.uuid1().hex
self.resources[name] = {'qty':qty, 'db_guid':db_guid}
cursor.execute("INSERT INTO RESOURCE (GUID, CATEGORY, NAME, QTY, PARENT_GUID) VALUES (?, ?, ?, ?, ?)", (db_guid, self.category, name, qty, self.db_parent.db_guid))
db.commit()
else:
self.resources[name]['qty'] += qty
cursor.execute("UPDATE RESOURCE SET QTY=:qty WHERE GUID=:db_guid", {'qty':self.resources[name]['qty'], 'db_guid':self.resources[name]['db_guid']})
db.commit()
return self.output(name)
def remove(self, name, qty=1):
"""Remove a quantity of resources from a given name."""
if not name in self.resources:
raise CortexError(HAS_NONE_ERROR, name, self.category)
if self.resources[name]['qty'] < qty:
raise CortexError(HAS_ONLY_ERROR, name, self.resources[name]['qty'], self.category)
self.resources[name]['qty'] -= qty
cursor.execute("UPDATE RESOURCE SET QTY=:qty WHERE GUID=:db_guid", {'qty':self.resources[name]['qty'], 'db_guid':self.resources[name]['db_guid']})
db.commit()
return self.output(name)
def clear(self, name):
"""Remove a name from the catalog entirely."""
if not name in self.resources:
raise CortexError(HAS_NONE_ERROR, name, self.category)
cursor.execute("DELETE FROM RESOURCE WHERE GUID=:db_guid", {'db_guid':self.resources[name]['db_guid']})
db.commit()
del self.resources[name]
return 'Cleared {0} from {1} list.'.format(name, self.category)
def output(self, name):
"""Return a formatted description of the resources held by a given name."""
return '{0}: {1}'.format(name, self.resources[name]['qty'])
def output_all(self):
"""Return a formatted summary of all resources."""
output = ''
prefix = ''
for name in list(self.resources):
output += prefix + self.output(name)
prefix = '\n'
return output
class GroupedNamedDice:
"""Holds named dice that are separated by groups, such as mental and physical stress (the dice names) assigned to characters (the dice groups)."""
def __init__(self, category, db_parent):
self.groups = {}
self.category = category
self.db_parent = db_parent
cursor.execute("SELECT * FROM DICE_COLLECTION WHERE PARENT_GUID=:parent_guid AND CATEGORY=:category", {'parent_guid':self.db_parent.db_guid, 'category':self.category})
group_guids = {}
fetching = True
while fetching:
row = cursor.fetchone()
if row:
group_guids[row['GRP']] = row['GUID']
else:
fetching = False
for group in group_guids:
new_group = NamedDice(self.category, group, self.db_parent, db_guid=group_guids[group])
self.groups[group] = new_group
def is_empty(self):
"""Identifies whether we're holding any dice yet."""
return not self.groups
def remove_from_db(self):
"""Remove all of these dice from the database."""
for group in list(self.groups):
self.groups[group].remove_from_db()
self.groups = {}
def add(self, group, name, die):
"""Add dice with a given name to a given group."""
if not group in self.groups:
self.groups[group] = NamedDice(self.category, group, self.db_parent)
return self.groups[group].add(name, die)
def remove(self, group, name):
"""Remove dice with a given name from a given group."""
if not group in self.groups:
raise CortexError(HAS_NONE_ERROR, group, self.category)
return self.groups[group].remove(name)
def clear(self, group):
"""Remove all dice from a given group."""
if not group in self.groups:
raise CortexError(HAS_NONE_ERROR, group, self.category)
self.groups[group].remove_from_db()
del self.groups[group]
return 'Cleared all {0} for {1}.'.format(self.category, group)
def step_up(self, group, name):
"""Step up the die with a given name, within a given group."""
if not group in self.groups:
raise CortexError(HAS_NONE_ERROR, group, self.category)
return self.groups[group].step_up(name)
def step_down(self, group, name):
"""Step down the die with a given name, within a given group."""
if not group in self.groups:
raise CortexError(HAS_NONE_ERROR, group, self.category)
return self.groups[group].step_down(name)
def output(self, group):
"""Return a formatted list of all the dice within a given group."""
if self.groups[group].is_empty():
return '{0}: None'.format(group)
return '{0}: {1}'.format(group, self.groups[group].output_all(separator=', '))
def output_all(self):
"""Return a formatted summary of all dice under all groups."""
output = ''
prefix = ''
for group in list(self.groups):
output += prefix + self.output(group)
prefix = '\n'
return output
class CortexGame:
"""All information for a game, within a single server and channel."""
def __init__(self, roller, server, channel):
self.roller = roller
self.pinned_message = None
cursor.execute('SELECT * FROM GAME WHERE SERVER=:server AND CHANNEL=:channel', {"server":server, "channel":channel})
row = cursor.fetchone()
if not row:
self.db_guid = uuid.uuid1().hex
cursor.execute('INSERT INTO GAME (GUID, SERVER, CHANNEL, ACTIVITY) VALUES (?, ?, ?, ?)', (self.db_guid, server, channel, datetime.now(timezone.utc)))
db.commit()
else:
self.db_guid = row['GUID']
self.new()
def new(self):
"""Set up new, empty traits for the game."""
self.complications = NamedDice('complication', None, self)
self.assets = NamedDice('asset', None, self)
self.pools = DicePools(self.roller, self)
self.plot_points = Resources('plot points', self)
self.stress = GroupedNamedDice('stress', self)
self.xp = Resources('xp', self)
def clean(self):
"""Resets and erases the game's traits."""
self.complications.remove_from_db()
self.assets.remove_from_db()
self.pools.remove_from_db()
self.plot_points.remove_from_db()
self.stress.remove_from_db()
self.xp.remove_from_db()
def output(self):
"""Return a report of all of the game's traits."""
output = '**Cortex Game Information**\n'
if not self.assets.is_empty():
output += '\n**Assets**\n'
output += self.assets.output_all()
output += '\n'
if not self.complications.is_empty():
output += '\n**Complications**\n'
output += self.complications.output_all()
output += '\n'
if not self.stress.is_empty():
output += '\n**Stress**\n'
output += self.stress.output_all()
output += '\n'
if not self.plot_points.is_empty():
output += '\n**Plot Points**\n'
output += self.plot_points.output_all()
output += '\n'
if not self.pools.is_empty():
output += '\n**Dice Pools**\n'
output += self.pools.output()
output += '\n'
if not self.xp.is_empty():
output += '\n**Experience Points**\n'
output += self.xp.output_all()
output += '\n'
return output
def get_option(self, key):
value = None
cursor.execute('SELECT * FROM GAME_OPTIONS WHERE PARENT_GUID=:game_guid AND KEY=:key', {'game_guid':self.db_guid, 'key':key})
row = cursor.fetchone()
if row:
value = row['VALUE']
return value
def set_option(self, key, value):
prior = self.get_option(key)
if not prior:
new_guid = uuid.uuid1().hex
cursor.execute('INSERT INTO GAME_OPTIONS (GUID, KEY, VALUE, PARENT_GUID) VALUES (?, ?, ?, ?)', (new_guid, key, value, self.db_guid))
else:
cursor.execute('UPDATE GAME_OPTIONS SET VALUE=:value where KEY=:key and PARENT_GUID=:game_guid', {'value':value, 'key':key, 'game_guid':self.db_guid})
db.commit()
def update_activity(self):
cursor.execute('UPDATE GAME SET ACTIVITY=:now WHERE GUID=:db_guid', {'now':datetime.now(timezone.utc), 'db_guid':self.db_guid})
db.commit()
class Roller:
"""Generates random die rolls and remembers the frequency of results."""
def __init__(self):
self.results = {}
for size in DIE_SIZES:
self.results[size] = [0] * size
def roll(self, size):
"""Roll a die of a given size and return the result."""
face = random.SystemRandom().randrange(1, int(size) + 1)
self.results[size][face - 1] += 1
return face
def output(self):
"""Return a report of die roll frequencies."""
total = 0
frequency = ''
separator = ''
for size in self.results:
subtotal = sum(self.results[size])
total += subtotal
frequency += '**{0}D{1}** : {2} rolls'.format(separator, size, subtotal)
separator = '\n'
if subtotal > 0:
for face in range(1, size + 1):
frequency += ' : **{0}** {1}x {2}%'.format(
face,
self.results[size][face - 1],
round(float(self.results[size][face - 1]) / float(subtotal) * 100.0, 1))
output = (
'**Randomness**\n'
'The bot has rolled {0} dice since starting up.\n'
'\n'
'Roll frequency statistics:\n'
'{1}'
).format(total, frequency)
return output
class CortexPal(commands.Cog):
"""This cog encapsulates the commands and state of the bot."""
def __init__(self, bot):
"""Initialize."""
self.bot = bot
self.games = []
self.startup_time = datetime.now(timezone.utc)
self.last_command_time = None
self.roller = Roller()
def get_game_info(self, context):
"""Match a server and channel to a Cortex game."""
game_info = None
game_key = [context.guild.id, context.message.channel.id]
for existing_game in self.games:
if game_key == existing_game[0]:
game_info = existing_game[1]
if not game_info:
game_info = CortexGame(self.roller, context.guild.id, context.message.channel.id)
self.games.append([game_key, game_info])
return game_info
@commands.Cog.listener()
async def on_command_error(self, ctx, error):
"""Intercepts any exceptions we haven't specifically caught elsewhere."""
logging.error(error)
if isinstance(error, commands.CommandNotFound):
await ctx.send(UNKNOWN_COMMAND_ERROR)
else:
await ctx.send(UNEXPECTED_ERROR)
@commands.Cog.listener()
async def on_command_completion(self, ctx):
"""After every command, determine whether we want to run a purge."""
run_purge = False
now = datetime.now(timezone.utc)
if self.last_command_time:
# Run purge after midnight
if now.day != self.last_command_time.day:
run_purge = True
else:
# Run purge on first command after startup
run_purge = True
if run_purge:
purge()
self.games = []
self.last_command_time = now
@commands.command()
async def info(self, ctx):
"""Display all game information."""
game = self.get_game_info(ctx)
game.update_activity()
await ctx.send(game.output())
@commands.command()
async def pin(self, ctx):
"""Pin a message to the channel to hold game information."""
pins = await ctx.channel.pins()
for pin in pins:
if pin.author == self.bot.user:
await pin.unpin()
game = self.get_game_info(ctx)
game.update_activity()
game.pinned_message = await ctx.send(game.output())
await game.pinned_message.pin()
@commands.command()
async def comp(self, ctx, *args):
"""
Adjust complications.
For example:
$comp add 6 cloud of smoke (creates a D6 Cloud Of Smoke complication)
$comp stepup confused (steps up the Confused complication)
$comp stepdown dazed (steps down the Dazed complication)
$comp remove sun in your eyes (removes the Sun In Your Eyes complication)
"""
logging.info("comp command invoked")
try:
if not args:
await ctx.send_help("comp")
else:
output = ''
game = self.get_game_info(ctx)
game.update_activity()
separated = separate_dice_and_name(args[1:])
dice = separated['dice']
name = separated['name']
update_pin = False
if args[0] in ADD_SYNONYMS:
if not dice:
raise CortexError(DIE_MISSING_ERROR)
elif len(dice) > 1:
raise CortexError(DIE_EXCESS_ERROR)
elif dice[0].qty > 1:
raise CortexError(DIE_EXCESS_ERROR)
output = game.complications.add(name, dice[0])
update_pin = True
elif args[0] in REMOVE_SYNOYMS:
output = game.complications.remove(name)
update_pin = True
elif args[0] in UP_SYNONYMS:
output = game.complications.step_up(name)
update_pin = True
elif args[0] in DOWN_SYNONYMS:
output = game.complications.step_down(name)
update_pin = True
else:
raise CortexError(INSTRUCTION_ERROR, args[0], '$comp')
if update_pin and game.pinned_message:
await game.pinned_message.edit(content=game.output())
await ctx.send(output)
except CortexError as err:
await ctx.send(err)
except:
logging.error(traceback.format_exc())
await ctx.send(UNEXPECTED_ERROR)
@commands.command()
async def pp(self, ctx, *args):
"""
Adjust plot points.
For example:
$pp add alice 3 (gives Alice 3 plot points)
$pp remove alice (spends one of Alice's plot points)
$pp clear alice (clears Alice from plot point lists)
"""
logging.info("pp command invoked")
try:
if not args:
await ctx.send_help("pp")
else:
output = ''
update_pin = False
game = self.get_game_info(ctx)
game.update_activity()
separated = separate_numbers_and_name(args[1:])
name = separated['name']
qty = 1
if separated['numbers']:
qty = separated['numbers'][0]
if args[0] in ADD_SYNONYMS:
output = 'Plot points for ' + game.plot_points.add(name, qty)
update_pin = True
elif args[0] in REMOVE_SYNOYMS:
output = 'Plot points for ' + game.plot_points.remove(name, qty)
update_pin = True
elif args[0] in CLEAR_SYNONYMS:
output = game.plot_points.clear(name)
update_pin = True
else:
raise CortexError(INSTRUCTION_ERROR, args[0], '$pp')
if update_pin and game.pinned_message:
await game.pinned_message.edit(content=game.output())
await ctx.send(output)
except CortexError as err:
await ctx.send(err)
except:
logging.error(traceback.format_exc())
await ctx.send(UNEXPECTED_ERROR)
@commands.command()
async def roll(self, ctx, *args):
"""
Roll some dice.
For example:
$roll 12 (rolls a D12)
$roll 4 3d8 10 10 (rolls a D4, 3D8, and 2D10)
You may include your trait names. The command will ignore any words that don't look like dice.
For example:
$roll D6 Mind D10 Navigation D6 Pirate (rolls 2D6 and a D10, ignoring the trait names)
"""
logging.info("roll command invoked")
results = {}
try:
if not args:
await ctx.send_help("roll")
else:
separated = separate_dice_and_name(args)
ignored_strings = separated['name']
dice = separated['dice']
"""
ignored_line = ''
if ignored_strings:
ignored_line = '\n*Ignored: {0}*'.format(ignored_strings)
"""
pool = DicePool(self.roller, None, incoming_dice=dice)
echo_line = 'Rolling: {0}\n'.format(pool.output())
await ctx.send(echo_line + pool.roll())
except CortexError as err:
await ctx.send(err)
except:
logging.error(traceback.format_exc())
await ctx.send(UNEXPECTED_ERROR)
@commands.command()
async def pool(self, ctx, *args):
"""
Adjust dice pools.
For example:
$pool add doom 6 2d8 (gives the Doom pool a D6 and 2D8)
$pool remove doom 10 (spends a D10 from the Doom pool)
$pool roll doom (rolls the Doom pool)
$pool roll doom 2d6 10 (rolls the Doom pool and adds 2D6 and a D10)
$pool clear doom (clears the entire Doom pool)
"""
logging.info("pool command invoked")
try:
if not args:
await ctx.send_help("pool")
else:
output = ''
update_pin = False
game = self.get_game_info(ctx)
game.update_activity()
separated = separate_dice_and_name(args[1:])
dice = separated['dice']
name = separated['name']
if args[0] in ADD_SYNONYMS:
output = game.pools.add(name, dice)
update_pin = True
elif args[0] in REMOVE_SYNOYMS:
output = game.pools.remove(name, dice)
update_pin = True
elif args[0] in CLEAR_SYNONYMS:
output = game.pools.clear(name)
update_pin = True
elif args[0] == 'roll':
temp_pool = game.pools.temporary_copy(name)
temp_pool.add(dice)
output = temp_pool.roll()
else:
raise CortexError(INSTRUCTION_ERROR, args[0], '$pool')
if update_pin and game.pinned_message:
await game.pinned_message.edit(content=game.output())
await ctx.send(output)
except CortexError as err:
await ctx.send(err)
except:
logging.error(traceback.format_exc())
await ctx.send(UNEXPECTED_ERROR)
@commands.command()
async def stress(self, ctx, *args):
"""
Adjust stress.
For example:
$stress add amy 8 (gives Amy D8 general stress)
$stress add ben mental 6 (gives Ben D6 Mental stress)
$stress stepup cat social (steps up Cat's Social stress)
$stress stepdown doe physical (steps down Doe's Physical stress)
$stress remove eve psychic (removes Eve's Psychic stress)
$stress clear fin (clears all of Fin's stress)
"""
logging.info("stress command invoked")
try:
if not args:
await ctx.send_help("stress")
else:
output = ''
update_pin = False
game = self.get_game_info(ctx)
game.update_activity()
separated = separate_dice_and_name(args[1:])
dice = separated['dice']
split_name = separated['name'].split(' ', maxsplit=1)
owner_name = split_name[0]
if len(split_name) == 1:
stress_name = UNTYPED_STRESS
else:
stress_name = split_name[1]
if args[0] in ADD_SYNONYMS:
if not dice:
raise CortexError(DIE_MISSING_ERROR)
elif len(dice) > 1:
raise CortexError(DIE_EXCESS_ERROR)
elif dice[0].qty > 1:
raise CortexError(DIE_EXCESS_ERROR)
output = '{0} Stress for {1}'.format(game.stress.add(owner_name, stress_name, dice[0]), owner_name)
update_pin = True
elif args[0] in REMOVE_SYNOYMS:
output = '{0} Stress for {1}'.format(game.stress.remove(owner_name, stress_name), owner_name)
update_pin = True
elif args[0] in UP_SYNONYMS:
output = '{0} Stress for {1}'.format(game.stress.step_up(owner_name, stress_name), owner_name)
update_pin = True
elif args[0] in DOWN_SYNONYMS:
output = '{0} Stress for {1}'.format(game.stress.step_down(owner_name, stress_name), owner_name)
update_pin = True
elif args[0] in CLEAR_SYNONYMS:
output = game.stress.clear(owner_name)
update_pin = True
else:
raise CortexError(INSTRUCTION_ERROR, args[0], '$stress')
if update_pin and game.pinned_message:
await game.pinned_message.edit(content=game.output())
await ctx.send(output)
except CortexError as err:
await ctx.send(err)
except:
logging.error(traceback.format_exc())
await ctx.send(UNEXPECTED_ERROR)
@commands.command()
async def asset(self, ctx, *args):
"""
Adjust assets.
For example:
$asset add 6 big wrench (adds a D6 Big Wrench asset)
$asset stepup fast car (steps up the Fast Car asset)
$asset stepdown nice outfit (steps down the Nice Outfit asset)
$asset remove jetpack (removes the Jetpack asset)
"""
logging.info("asset command invoked")
output = ''
try:
if not args:
await ctx.send_help("asset")
else:
output = ''
game = self.get_game_info(ctx)
game.update_activity()
separated = separate_dice_and_name(args[1:])
dice = separated['dice']
name = separated['name']
update_pin = False
if args[0] in ADD_SYNONYMS:
if not dice:
raise CortexError(DIE_MISSING_ERROR)
elif len(dice) > 1:
raise CortexError(DIE_EXCESS_ERROR)
elif dice[0].qty > 1:
raise CortexError(DIE_EXCESS_ERROR)
output = game.assets.add(name, dice[0])
update_pin = True
elif args[0] in REMOVE_SYNOYMS:
output = game.assets.remove(name)
update_pin = True
elif args[0] in UP_SYNONYMS:
output = game.assets.step_up(name)
update_pin = True
elif args[0] in DOWN_SYNONYMS:
output = game.assets.step_down(name)
update_pin = True
else:
raise CortexError(INSTRUCTION_ERROR, args[0], '$asset')
if update_pin and game.pinned_message:
await game.pinned_message.edit(content=game.output())
await ctx.send(output)
except CortexError as err:
await ctx.send(err)
except:
logging.error(traceback.format_exc())
await ctx.send(UNEXPECTED_ERROR)
@commands.command()
async def xp(self, ctx, *args):
"""
Award experience points.
For example:
$xp add alice 3 (gives Alice 3 experience points)
$xp remove alice (spends one of Alice's experience points)
$xp clear alice (clears Alice from experience point lists)
"""
logging.info("xp command invoked")
try:
if not args:
await ctx.send_help("xp")
else:
output = ''
update_pin = False
game = self.get_game_info(ctx)
game.update_activity()
separated = separate_numbers_and_name(args[1:])
name = separated['name']
qty = 1
if separated['numbers']:
qty = separated['numbers'][0]
if args[0] in ADD_SYNONYMS:
output = 'Experience points for ' + game.xp.add(name, qty)
update_pin = True
elif args[0] in REMOVE_SYNOYMS:
output = 'Experience points for ' + game.xp.remove(name, qty)
update_pin = True
elif args[0] in CLEAR_SYNONYMS:
output = game.xp.clear(name)
update_pin = True
else:
raise CortexError(INSTRUCTION_ERROR, args[0], '$xp')
if update_pin and game.pinned_message:
await game.pinned_message.edit(content=game.output())
await ctx.send(output)
except CortexError as err:
await ctx.send(err)
except:
logging.error(traceback.format_exc())
await ctx.send(UNEXPECTED_ERROR)
@commands.command()
async def clean(self, ctx):
"""
Reset all game data for a channel.
"""
logging.info("clean command invoked")
try:
game = self.get_game_info(ctx)
game.update_activity()
game.clean()
if game.pinned_message:
await game.pinned_message.edit(content=game.output())
await ctx.send('Cleaned up all game information.')
except CortexError as err:
await ctx.send(err)
except:
logging.error(traceback.format_exc())
await ctx.send(UNEXPECTED_ERROR)
@commands.command()
async def report(self, ctx):
"""
Report the bot's statistics.
"""
start_formatted = self.startup_time.isoformat(sep=' ', timespec='seconds')
last_formatted = '(no user commands yet)'
if self.last_command_time:
last_formatted = self.last_command_time.isoformat(sep=' ', timespec='seconds')
output = (
'**CortexPal Usage Report**\n'
'Bot started up at UTC {0}.\n'
'Last user command was at UTC {1}.\n'
'\n'
).format(start_formatted, last_formatted)
output += self.roller.output()
await ctx.send(output)
@commands.command()
async def option(self, ctx, *args):
"""
Change the bot's optional behavior.
For example:
$option prefix ! (change the command prefix to ! instead of $)
"""
game = self.get_game_info(ctx)
game.update_activity()
output = 'No such option.'
try:
if not args:
await ctx.send_help("option")
else:
if args[0] == 'prefix':
if len(args[1]) > 1:
output = 'Prefix must be a single character.'
else:
game.set_option('prefix', args[1])
output = 'Prefix set to {0}'.format(args[1])
await ctx.send(output)
except CortexError as err:
await ctx.send(err)
except:
logging.error(traceback.format_exc())
await ctx.send(UNEXPECTED_ERROR)
# Set up bot.
TOKEN = config['discord']['token']
bot = commands.Bot(command_prefix=get_prefix, description=ABOUT_TEXT)
# Start the bot.
logging.info("Bot startup")
bot.add_cog(CortexPal(bot))
bot.run(TOKEN)
| 37.451909 | 219 | 0.554525 |
0a99e79be27418dca49598f46e91b148eb2a23c2 | 84,736 | py | Python | robot/Cumulus/resources/NPSP.py | SFDO-Alliances/NPSP | 3711a3cf8e3124bc2d7e61644d6abecb4042004e | [
"BSD-3-Clause"
] | 413 | 2015-01-02T09:53:04.000Z | 2019-12-05T15:31:25.000Z | robot/Cumulus/resources/NPSP.py | SFDO-Alliances/NPSP | 3711a3cf8e3124bc2d7e61644d6abecb4042004e | [
"BSD-3-Clause"
] | 2,471 | 2015-01-02T03:33:55.000Z | 2019-12-13T17:55:10.000Z | robot/Cumulus/resources/NPSP.py | SFDO-Alliances/NPSP | 3711a3cf8e3124bc2d7e61644d6abecb4042004e | [
"BSD-3-Clause"
] | 296 | 2015-01-06T13:03:33.000Z | 2019-12-11T14:19:31.000Z | import logging
import warnings
import time
import random
import string
import re
from datetime import datetime
from datetime import timedelta
from dateutil.relativedelta import relativedelta
from robot.libraries.BuiltIn import RobotNotRunningError
from selenium.common.exceptions import ElementNotInteractableException
from selenium.common.exceptions import ElementClickInterceptedException
from selenium.common.exceptions import StaleElementReferenceException
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoSuchWindowException
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common.keys import Keys
from SeleniumLibrary.errors import ElementNotFound
from simple_salesforce import SalesforceMalformedRequest
from simple_salesforce import SalesforceResourceNotFound
from selenium.webdriver import ActionChains
from cumulusci.robotframework.utils import selenium_retry
from cumulusci.robotframework.utils import capture_screenshot_on_error
from cumulusci.robotframework import locator_manager
from email.mime import text
from cumulusci.tasks.apex.anon import AnonymousApexTask
from cumulusci.core.config import TaskConfig
from tasks.salesforce_robot_library_base import SalesforceRobotLibraryBase
from BaseObjects import BaseNPSPPage
from locators_52 import npsp_lex_locators as locators_52
from locators_51 import npsp_lex_locators as locators_51
from locators_50 import npsp_lex_locators as locators_50
locators_by_api_version = {
52.0: locators_52, # summer '21
51.0: locators_51, # spring '21
50.0: locators_50 # winter '21
}
# will get populated in _init_locators
npsp_lex_locators = {}
OID_REGEX = r"^(%2F)?([a-zA-Z0-9]{15,18})$"
@selenium_retry
class NPSP(BaseNPSPPage,SalesforceRobotLibraryBase):
ROBOT_LIBRARY_SCOPE = 'GLOBAL'
ROBOT_LIBRARY_VERSION = 1.0
def __init__(self, debug=False):
self.debug = debug
self.current_page = None
self._session_records = []
self.val=0
self.payment_list= []
# Turn off info logging of all http requests
logging.getLogger('requests.packages.urllib3.connectionpool').setLevel(logging.WARN)
self._init_locators()
locator_manager.register_locators("npsp",npsp_lex_locators)
def _init_locators(self):
try:
client = self.cumulusci.tooling
response = client._call_salesforce(
'GET', 'https://{}/services/data'.format(client.sf_instance))
self.latest_api_version = float(response.json()[-1]['version'])
if not self.latest_api_version in locators_by_api_version:
warnings.warn("Could not find locator library for API %d" % self.latest_api_version)
self.latest_api_version = max(locators_by_api_version.keys())
except RobotNotRunningError:
# We aren't part of a running test, likely because we are
# generating keyword documentation. If that's the case, assume
# the latest supported version
self.latest_api_version = max(locators_by_api_version.keys())
locators = locators_by_api_version[self.latest_api_version]
npsp_lex_locators.update(locators)
def get_namespace_prefix(self, name):
parts = name.split('__')
if parts[-1] == 'c':
parts = parts[:-1]
if len(parts) > 1:
return parts[0] + '__'
else:
return ''
def get_npsp_namespace_prefix(self):
if not hasattr(self.cumulusci, '_describe_result'):
self.cumulusci._describe_result = self.cumulusci.sf.describe()
objects = self.cumulusci._describe_result['sobjects']
level_object = [o for o in objects if o['label'] == 'Level'][0]
return self.get_namespace_prefix(level_object['name'])
def _loop_is_text_present(self,text, max_attempts=3):
"""This is a fix to handle staleelementreference exception. Waits for the text to be present and loops through till the text appears"""
attempt = 1
while True:
try:
return self.selenium.page_should_contain(text)
except StaleElementReferenceException:
if attempt == max_attempts:
raise
attempt += 1
def populate_campaign(self,loc,value):
"""This is a temporary keyword added to address difference in behaviour between summer19 and winter20 release"""
self.search_field_by_value(loc, value)
print(self.latest_api_version)
self.selenium.click_link(value)
def verify_button_disabled(self,loc):
"""Verifies the specified button is disabled"""
locator = npsp_lex_locators["lightning-button"].format(loc)
element = self.selenium.driver.find_element_by_xpath(locator)
self.selenium.element_should_be_disabled(element)
def click_record_button(self, title):
""" Pass title of the button to click the buttons on the records edit page. Usually save and cancel are the buttons seen.
"""
locator = npsp_lex_locators['record']['button'].format(title)
self.selenium.set_focus_to_element(locator)
button = self.selenium.get_webelement(locator)
button.click()
time.sleep(5)
@capture_screenshot_on_error
def select_tab(self, title):
""" Switch between different tabs on a record page like Related, Details, News, Activity and Chatter
Pass title of the tab
"""
tab_found = False
locators = npsp_lex_locators["tabs"].values()
for i in locators:
locator = i.format(title)
if self.check_if_element_exists(locator):
print(locator)
buttons = self.selenium.get_webelements(locator)
for button in buttons:
print(button)
if button.is_displayed():
print("button displayed is {}".format(button))
self.salesforce._focus(button)
button.click()
time.sleep(5)
tab_found = True
break
assert tab_found, "tab not found"
def click_special_related_list_button(self, heading, button_title):
""" To Click on a related list button which would open up a new lightning page rather than a modal.
Pass the list name and button name"""
self.salesforce.load_related_list(heading)
b_found = False
locator = npsp_lex_locators["record"]["related"]["button"].format(
heading, button_title
)
buttons = self.selenium.driver.find_elements_by_xpath(locator)
for button in buttons:
if button.is_displayed():
self.selenium.driver.execute_script('arguments[0].click()', button)
b_found = True
break
assert b_found, "{} related list with button {} not found.".format(heading, button_title)
def wait_and_click_button(self, click_locator):
""" Clicks on the button with locator 'click_locator'
if it doesn't exist, repeat the click (loops for 3 times)
"""
for i in range(3):
self.builtin.log("Iteration: " + str(i))
try:
self.selenium.click_button(click_locator)
return
except Exception:
time.sleep(2)
raise Exception(f"Click on element failed. Locator: {click_locator}")
@capture_screenshot_on_error
def click_related_list_dd_button(self, heading, dd_title, button_title):
""" To Click on a related list dropdown button.
Pass the list name, dd name and button name"""
self.salesforce.load_related_list(heading)
locator = npsp_lex_locators["record"]["related"]["button"].format(heading, dd_title)
element = self.selenium.driver.find_element_by_xpath(locator)
self.selenium.driver.execute_script('arguments[0].click()', element)
time.sleep(1)
loc=npsp_lex_locators["record"]["related"]["dd-link"].format(button_title)
self.selenium.wait_until_element_is_visible(loc)
element = self.selenium.driver.find_element_by_xpath(loc)
self.selenium.driver.execute_script('arguments[0].click()', element)
@capture_screenshot_on_error
def click_flexipage_dropdown(self, title,value):
"""Click the lightning dropdown to open it and select value"""
locator = npsp_lex_locators['record']['flexipage-list'].format(title)
option=npsp_lex_locators['span'].format(value)
self.selenium.wait_until_page_contains_element(locator)
self.salesforce.scroll_element_into_view(locator)
element = self.selenium.driver.find_element_by_xpath(locator)
try:
self.selenium.get_webelement(locator).click()
self.wait_for_locator('flexipage-popup')
self.salesforce.scroll_element_into_view(option)
self.selenium.click_element(option)
except Exception:
self.builtin.sleep(1,"waiting for a second and retrying click again")
self.selenium.driver.execute_script('arguments[0].click()', element)
self.wait_for_locator('flexipage-popup')
self.salesforce.scroll_element_into_view(option)
self.selenium.click_element(option)
def click_modal_footer_button(self,value):
"""Click the specified lightning button on modal footer"""
if self.latest_api_version == 50.0:
btnlocator = npsp_lex_locators["button-text"].format(value)
self.salesforce.scroll_element_into_view(btnlocator)
self.salesforce._jsclick(btnlocator)
else:
self.salesforce.click_modal_button(value)
def change_month(self, value):
"""To pick month in the date picker"""
locator = npsp_lex_locators['record']['month_pick'].format(value)
self.selenium.set_focus_to_element(locator)
self.selenium.get_webelement(locator).click()
def select_row(self, value):
"""To select a row on object page based on name and open the dropdown"""
locators = npsp_lex_locators['name']
list_ele = self.selenium.get_webelements(locators)
for index, element in enumerate(list_ele):
if element.text == value:
drop_down = npsp_lex_locators['locate_dropdown'].format(index + 1)
self.selenium.get_webelement(drop_down).click()
self.selenium.wait_until_page_contains("Delete")
def select_related_row(self, value):
"""To select a row on object page based on name and open the dropdown"""
locators = npsp_lex_locators['related_name']
list_ele = self.selenium.get_webelements(locators)
for index, element in enumerate(list_ele):
if element.text == value:
drop_down = npsp_lex_locators['rel_loc_dd'].format(index + 1)
self.selenium.get_webelement(drop_down).click()
time.sleep(1)
def click_id(self, title):
locator=npsp_lex_locators['aff_id'].format(title)
self.selenium.get_webelement(locator).click()
def select_object_dropdown(self):
locator=npsp_lex_locators['object_dd']
self.selenium.get_webelement(locator).click()
def check_status(self, acc_name):
aff_list = npsp_lex_locators['aff_status'].format(acc_name)
aff_list_text=self.selenium.get_webelement(aff_list).text
self.aff_id=npsp_lex_locators['aff_id'].format(acc_name)
self.aff_id_text=self.selenium.get_webelement(self.aff_id).text
return self.aff_id_text,aff_list_text
def get_id(self):
locator=npsp_lex_locators['click_aff_id'].format(self.aff_id_text)
self.selenium.get_webelement(locator).click()
@selenium_retry
@capture_screenshot_on_error
def navigate_to_and_validate_field_value(self, field,status,value,section=None):
"""If status is 'contains' then the specified value should be present in the field
'does not contain' then the specified value should not be present in the field
"""
if section is not None:
section="text:"+section
self.salesforce.scroll_element_into_view(section)
list_found = False
locators = npsp_lex_locators["confirm"].values()
if status == "contains":
for i in locators:
print("inside for loop")
locator = i.format(field,value)
print(locator)
if self.check_if_element_exists(locator):
print(f"element exists {locator}")
actual_value=self.selenium.get_webelement(locator).text
print(f"actual value is {actual_value}")
assert value == actual_value, "Expected {} value to be {} but found {}".format(field,value, actual_value)
list_found=True
break
if status == "does not contain":
for i in locators:
locator = i.format(field,value)
if self.check_if_element_exists(locator):
print(f"locator is {locator}")
raise Exception(f"{field} should not contain value {value}")
list_found = True
assert list_found, "locator not found"
@capture_screenshot_on_error
def verify_record(self, name):
""" Checks for the record in the object page and returns true if found else returns false
"""
locator=npsp_lex_locators['account_list'].format(name)
self.selenium.wait_until_page_contains_element(locator, error="could not find "+name+" on the page")
def select_option(self, name):
"""selects various options in Contact>New opportunity page using name
"""
locator=npsp_lex_locators['dd_options'].format(name)
self.selenium.get_webelement(locator).click()
def verify_related_list_items(self,list_name,value):
"""Verifies a specified related list has specified value(doesn't work if the list is in table format)"""
self.salesforce.load_related_list(list_name)
locator=npsp_lex_locators['related_list_items'].format(list_name,value)
self.selenium.page_should_contain_element(locator)
def click_span_button(self,title):
"""clicks on the button under span tag"""
locator=npsp_lex_locators['span_button'].format(title)
self.selenium.get_webelement(locator).click()
def header_field_value(self,title,value):
"""Validates if the specified header field has specified value"""
locator= npsp_lex_locators['header_field_value'].format(title,value)
self.selenium.page_should_contain_element(locator)
def verify_header(self,value):
"""Validates header value"""
locator= npsp_lex_locators['header'].format(value)
self.selenium.page_should_contain_element(locator)
@capture_screenshot_on_error
def verify_related_list(self,list_name,status,name):
"""If status is 'contains' then the specified related list should contain name
'does not contain' then the specified related list should not contain name"""
locator = self.salesforce.get_locator('record.related.link', list_name, name)
if status=="contains":
self.selenium.page_should_contain_element(locator)
elif status=="does not contain":
self.selenium.page_should_not_contain_element(locator)
def fill_address_form(self, **kwargs):
"""Validates if the affiliated contacts have the added contact details enter Y for positive case and N for negative case"""
for label, value in kwargs.items():
locator= npsp_lex_locators['manage_hh_page']['address'].format(label,value)
if label=="Street":
locator = locator+"textarea"
self.selenium.get_webelement(locator).send_keys(value)
else:
locator = locator+"input"
self.selenium.get_webelement(locator).send_keys(value)
def fill_bge_form(self, **kwargs):
for label, value in kwargs.items():
if label=="Batch Description" or label == "custom_textarea":
locator= npsp_lex_locators['bge']['field-text'].format(label,value)
self.selenium.click_element(locator)
self.salesforce._populate_field(locator, value)
else:
locator= npsp_lex_locators['bge']['field-input'].format(label,value)
self.selenium.click_element(locator)
self.salesforce._populate_field(locator, value)
def verify_address_details(self,field,value,**kwargs):
"""Validates if the details page address field has specified value
Field is the The address type field we are trying to match to the Expected address Map that is sent through Kwargs"""
locator= npsp_lex_locators['detail_page']['address'].format(field)
street, city, country = self.selenium.get_webelements(locator)
status = None
for key, value in kwargs.items():
if street.text == kwargs.get("street") and city.text == kwargs.get("city") and country.text == kwargs.get("country"):
status = "pass"
else:
status = "fail"
if value.lower() == "contains":
assert status == "pass", "Expected address {} , {}, {} does not match".format(street.text,city.text,country.text)
def validate_checkboxes(self,name,checkbox_title):
"""validates all 3 checkboxes for contact on manage hh page and returns locator for the checkbox thats required"""
locator=npsp_lex_locators['manage_hh_page']['mhh_checkbox'].format(name,"fauxCBInformal")
self.selenium.page_should_contain_element(locator)
locator=npsp_lex_locators['manage_hh_page']['mhh_checkbox'].format(name,"fauxCBFormal")
self.selenium.page_should_contain_element(locator)
locator=npsp_lex_locators['manage_hh_page']['mhh_checkbox'].format(name,"fauxCBExName")
self.selenium.page_should_contain_element(locator)
if checkbox_title == "Informal Greeting":
locator=npsp_lex_locators['manage_hh_page']['mhh_checkbox'].format(name,"fauxCBInformal")
elif checkbox_title == "Formal Greeting":
locator=npsp_lex_locators['manage_hh_page']['mhh_checkbox'].format(name,"fauxCBFormal")
elif checkbox_title.capitalize() == "Household Name":
locator=npsp_lex_locators['manage_hh_page']['mhh_checkbox'].format(name,"fauxCBExName")
return locator
def check_field_value(self, title, value):
"""checks value of a field in details page(section without header)"""
fv_found=False
locators = npsp_lex_locators['detail_page']["field-value"].values()
for i in locators:
locator = i.format(title,value)
if self.check_if_element_exists(locator):
self.selenium.page_should_contain_element(locator)
fv_found = True
break
assert fv_found, "{} with {} not found".format(title,value)
def click_managehh_button(self,title):
"""clicks on the new contact button on manage hh page"""
locator=npsp_lex_locators['manage_hh_page']['button'].format(title)
self.selenium.get_webelement(locator).click()
def click_managehh_link(self,title):
locator=npsp_lex_locators['manage_hh_page']['address_link'].format(title)
self.selenium.get_webelement(locator).click()
def set_checkbutton_to(self,title,status):
"""If status is 'checked' then checks the box if its not already checked. Prints a warning msg if already checked
'unchecked' then unchecks the box if its not already checked. Prints a warning msg if already unchecked
"""
cb_found=False
locators = npsp_lex_locators["checkbox"].values()
for i in locators:
locator = i.format(title)
if self.check_if_element_exists(locator):
checkbox=self.selenium.get_webelement(locator)
if (status == 'checked' and checkbox.is_selected() == False) or (status == 'unchecked' and checkbox.is_selected() == True):
self.salesforce.scroll_element_into_view(locator)
self.salesforce._jsclick(locator)
else:
self.builtin.log("This checkbox is already in the expected status", "WARN")
cb_found = True
break
assert cb_found, "Checkbox not found"
def select_bge_checkbox(self,title):
locator=npsp_lex_locators['bge']['checkbox'].format(title)
self.selenium.get_webelement(locator).click()
def populate_modal_field(self, title, value):
locator=npsp_lex_locators['modal_field'].format(title,value)
self.salesforce._populate_field(locator, value)
def populate_field_with_id(self,id,value):
locator=npsp_lex_locators['id'].format(id)
if value == 'null':
field = self.selenium.get_webelement(locator)
self.salesforce._clear(field)
else :
self.salesforce._populate_field(locator, value)
@capture_screenshot_on_error
def validate_related_record_count(self,title,value):
"""Navigates to the Related tab and validates the record count for the specified title section"""
self.select_tab("Related")
self.salesforce.load_related_list(title)
exp_value="("+value+")"
locator=npsp_lex_locators['record']['related']['check_occurrence'].format(title,exp_value)
actual_value = self.selenium.get_element_attribute(locator, "title")
assert exp_value == actual_value, "Expected value to be {} but found {}".format(
exp_value, actual_value
)
def verify_occurence(self,title,value):
self.salesforce.load_related_list(title)
time.sleep(1)
locator=npsp_lex_locators['record']['related']['check_occurrence'].format(title,value)
actual_value=self.selenium.get_webelement(locator).text
exp_value="("+value+")"
assert exp_value == actual_value, "Expected value to be {} but found {}".format(
exp_value, actual_value
)
def check_record_related_item(self,title,value):
"""Verifies that the given value is displayed under the related list identified by title on a record view page"""
self.salesforce.load_related_list(title)
locator=npsp_lex_locators['record']['related']['item'].format(title,value)
self.selenium.wait_until_page_contains_element(locator)
actual_value=self.selenium.get_webelement(locator).text
assert value == actual_value, "Expected value to be {} but found {}".format(
value, actual_value
)
def select_related_dropdown(self,title):
"""Clicks on the dropdown next to Related List"""
locator=npsp_lex_locators['record']['related']['drop-down'].format(title)
self.selenium.get_webelement(locator).click()
def get_header_date_value(self,title):
"""Validates if the specified header field has specified value"""
locator= npsp_lex_locators['header_datepicker'].format(title)
date=self.selenium.get_webelement(locator).text
return date
def get_main_header(self):
header_found = False
locators = npsp_lex_locators["main-header"].values()
for locator in locators:
if self.check_if_element_exists(locator):
header = self.selenium.get_webelement(locator).text
header_found = True
return header
assert header_found, "Header with the provided locator not found"
def verify_contact_role(self,name,role):
"""verifies the contact role on opportunity page"""
locator=npsp_lex_locators['opportunity']['contact_role'].format(name,role)
self.selenium.page_should_contain_element(locator)
def select_relatedlist(self,title):
"""click on the related list to open it"""
locator=npsp_lex_locators['record']['related']['title'].format(title)
element = self.selenium.driver.find_element_by_xpath(locator)
self.selenium.driver.execute_script('arguments[0].click()', element)
def verify_related_list_field_values(self, listname=None, **kwargs):
"""verifies the values in the related list objects page"""
if listname is not None:
self.selenium.wait_until_page_contains(listname)
self.select_relatedlist(listname)
for name, value in kwargs.items():
locator= npsp_lex_locators['record']['related']['field_value'].format(name,value)
self.selenium.wait_until_page_contains_element(locator,error="Could not find the "+ name +" with value " + value + " on the page")
def verify_related_object_field_values(self, rel_object,**kwargs):
"""verifies the specified field,value pairs in the related object page (table format)"""
self.salesforce.load_related_list(rel_object)
self.select_relatedlist(rel_object)
for name, value in kwargs.items():
locator= npsp_lex_locators['object']['field-value'].format(name,value)
self.selenium.wait_until_page_contains_element(locator,error="Could not find the "+ name +" with value " + value + " on the page")
def page_contains_record(self,title):
"""Validates if the specified record is present on the page"""
locator= npsp_lex_locators['object']['record'].format(title)
self.selenium.wait_until_page_does_not_contain_element(locator)
def click_special_object_button(self, title):
"""Clicks a button in an object's actions but doesn't wait for a model to open"""
locator = npsp_lex_locators['object']['button'].format(title)
self.selenium.wait_until_element_is_visible(locator,error="Button "+ title +" not found on the page")
self.selenium.get_webelement(locator).click()
def check_related_list_values(self,list_name,*args):
"""Verifies the value of custom related list"""
self.salesforce.load_related_list(list_name)
for value in args:
locator = npsp_lex_locators['check_related_list_item'].format(list_name,value)
self.selenium.page_should_contain_element(locator)
def check_activity_tasks(self, *args):
"""verifies that the specified tasks are present under activity tab """
for value in args:
locator = npsp_lex_locators['engagement_plan']['tasks'].format(value)
self.selenium.page_should_contain_element(locator)
def select_app_launcher_link(self,title):
locator = npsp_lex_locators['app_launcher']['select-option'].format(title)
self.selenium.get_webelement(locator).click()
time.sleep(1)
def click_on_first_record(self):
"""selects first record of the page"""
locator = npsp_lex_locators['select_one_record']
self.selenium.get_webelement(locator).click()
time.sleep(1)
def select_search(self, index, value):
""""""
locator = npsp_lex_locators["click_search"].format(index)
loc_value = self.selenium.get_webelement(locator).send_keys(value)
loc = self.selenium.get_webelement(locator)
#loc.send_keys(Keys.TAB+ Keys.RETURN)
time.sleep(1)
def enter_gau(self, value):
id = "lksrch"
locator = npsp_lex_locators["id"].format(id)
loc = self.selenium.get_webelement(locator)
loc.send_keys(value)
self.selenium.get_webelement("//*[@title='Go!']").click()
time.sleep(1)
def click_save(self, page):
if page== "GAU":
id="j_id0:theForm:j_id9:j_id10:saveBTN"
locator = npsp_lex_locators["id"].format(id)
self.selenium.get_webelement(locator).click()
def enter_payment_schedule(self, *args):
"""Enter values into corresponding fields in Levels page"""
#if name == "Payments":
#id = ["paymentCount","intervals","intervalunits"]
id = ["paymentCount","vfForm:intervalnumber","intervalunits"]
for i in range(len(args)):
locator = npsp_lex_locators['id'].format(id[i])
loc = self.selenium.get_webelement(locator)
self.selenium.set_focus_to_element(locator)
self.selenium.select_from_list_by_label(loc,args[i])
time.sleep(2)
def verify_payment_split(self, amount, no_payments):
#loc = "//input[@value= '{}']"
input_loc = npsp_lex_locators['button']
values = int(amount)/int(no_payments)
values_1 = "{:0.2f}".format(values)
self.val = str(values_1)
input_field = input_loc.format(self.val)
list_payments = self.selenium.get_webelements(input_field)
self.t_loc=len(list_payments)
if self.t_loc == int(no_payments):
for i in list_payments:
self.selenium.page_should_contain_element(i)
actual_payments = str(self.t_loc)
else:
actual_payments = str(self.t_loc)
assert no_payments == actual_payments, "Expected {} number of payment but found {}".format(no_payments,actual_payments)
def verify_date_split(self,date, no_payments, interval):
ddate=[]
mm, dd, yyyy = date.split("/")
mm, dd, yyyy = int(mm), int(dd), int(yyyy)
locator = npsp_lex_locators['payments']['date_loc'].format(date)
t_dates = self.selenium.get_webelement(locator)
self.selenium.page_should_contain_element(t_dates)
# for i in range(int(no_payments) + 1):
if mm <= 12:
date_list = [mm, dd, yyyy]
dates = list(map(str, date_list))
new_date = "/".join(dates)
mm = mm + int(interval)
dates = list(map(str, date_list))
#if new_date not in t_dates:
date_locator = npsp_lex_locators['payments']['date_loc'].format(new_date)
t_dates = self.selenium.get_webelement(date_locator)
self.selenium.page_should_contain_element(t_dates)
elif mm > 12:
yyyy = yyyy + 1
mm = (mm + int(interval))-(12+int(interval))
#return "pass"
# else:
# return "fail"
def click_viewall_related_list (self,title):
"""clicks on the View All link under the Related List"""
locator=npsp_lex_locators['record']['related']['viewall'].format(title)
element = self.selenium.driver.find_element_by_xpath(locator)
self.selenium.driver.execute_script('arguments[0].click()', element)
def click_button_with_value (self,title):
"""clicks on the button on the payments page"""
locator=npsp_lex_locators['button'].format(title)
self.selenium.get_webelement(locator).click()
def verify_details(self, **kwargs):
"""To verify no. of records with given same column values
key is value in a table column, value is expected count of rows with that value
"""
for key, value in kwargs.items():
locators = npsp_lex_locators['payments']['pays'].format(key)
list_ele = self.selenium.get_webelements(locators)
p_count=len(list_ele)
assert p_count == int(value), "Expected {} payment with status {} but found {}".format(value, key, p_count)
def verify_allocations(self,header, **kwargs):
"""To verify allocations, header is related list
key is value in 1st td element, value is value in 2nd element
"""
self.salesforce.load_related_list(header)
for key, value in kwargs.items():
locator = npsp_lex_locators['record']['related']['allocations'].format(header,key,value)
self.selenium.wait_until_page_contains_element(locator,error="Expected {} allocation of {} was not found".format(key,value))
# ele = self.selenium.get_webelement(locator).text
# assert ele == value, "Expected {} allocation to be {} but found {}".format(key,value,ele)
def verify_occurrence_payments(self,title,value=None):
""""""
locator=npsp_lex_locators['payments']['check_occurrence'].format(title)
occ_value=self.selenium.get_webelement(locator).text
return occ_value
def verify_payment(self):
locators=npsp_lex_locators['payments']['no_payments']
list_ele=self.selenium.get_webelements(locators)
l_no_payments = len(list_ele)
for element in list_ele:
payment_com=self.selenium.get_webelement(element).text
cc=payment_com.replace("$","")
if cc == str(self.val) and self.t_loc == l_no_payments :
result = 'pass'
else:
result = "fail"
assert result == 'pass', "Expected payment value not present."
def select_value_from_bge_dd(self, list_name,value):
list_found = False
locators = npsp_lex_locators["bge-lists"].values()
for i in locators:
locator = i.format(list_name)
if self.check_if_element_exists(locator):
loc=self.selenium.get_webelement(locator)
self.selenium.set_focus_to_element(locator)
self.selenium.select_from_list_by_label(loc,value)
list_found = True
break
assert list_found, "Dropdown with the provided locator not found"
def check_if_element_exists(self, xpath):
elements =self.selenium.get_element_count(xpath)
return True if elements > 0 else False
def check_if_element_displayed(self, xpath):
""" Check for the visibility of an element based on the xpath sent"""
element = self.selenium.get_webelement(xpath)
return True if element.is_displayed() else False
def select_multiple_values_from_list(self,list_name,*args):
"""Pass the list name and values to be selected from the dropdown. Please note that this doesn't unselect the existing values"""
locator = npsp_lex_locators['npsp_settings']['multi_list'].format(list_name)
loc = self.selenium.get_webelement(locator)
self.selenium.set_focus_to_element(locator)
self.selenium.select_from_list_by_label(loc,*args)
def choose_frame(self, value):
"""Returns the first displayed iframe on the page with the given name or title"""
locator = npsp_lex_locators['frame_new'].format(value,value)
frames = self.selenium.get_webelements(locator)
self.selenium.capture_page_screenshot()
print(f'list of frames {frames}')
for frame in frames:
print(f'inside for loop for {frame}')
self.selenium.capture_page_screenshot()
if frame.is_displayed():
try:
print("inside try")
self.selenium.select_frame(frame)
except NoSuchWindowException:
print("inside except")
self.builtin.log("caught NoSuchWindowException;trying gain..","WARN")
time.sleep(.5)
self.selenium.select_frame(frame)
return frame
raise Exception('unable to find visible iframe with title "{}"'.format(value))
@capture_screenshot_on_error
def select_frame_and_click_element(self,iframe,path, *args, **kwargs):
"""Waits for the iframe and Selects the first displayed frame with given name or title and scrolls to element identified by locator and clicks """
self.wait_for_locator('frame_new',iframe,iframe)
self.choose_frame(iframe)
loc = self.get_npsp_locator(path, *args, **kwargs)
self.selenium.wait_until_element_is_visible(loc, timeout=60)
self.salesforce.scroll_element_into_view(loc)
self.selenium.click_element(loc)
def get_npsp_locator(self, path, *args, **kwargs):
""" Returns a rendered locator string from the npsp_lex_locators
dictionary. This can be useful if you want to use an element in
a different way than the built in keywords allow.
"""
locator = npsp_lex_locators
for key in path.split('.'):
locator = locator[key]
main_loc = locator.format(*args, **kwargs)
return main_loc
def wait_for_locator(self, path, *args, **kwargs):
"""Waits for 60 sec for the specified locator"""
main_loc = self.get_npsp_locator(path,*args, **kwargs)
self.selenium.wait_until_element_is_visible(main_loc, timeout=60)
def wait_for_locator_is_not_visible(self, path, *args, **kwargs):
"""Waits for 60 sec for the specified locator"""
main_loc = self.get_npsp_locator(path,*args, **kwargs)
self.selenium.wait_until_element_is_not_visible(main_loc, timeout=60)
def page_should_not_contain_locator(self, path, *args, **kwargs):
"""Waits for the locator specified to be not present on the page"""
main_loc = self.get_npsp_locator(path,*args, **kwargs)
self.selenium.wait_until_page_does_not_contain_element(main_loc, timeout=60)
@capture_screenshot_on_error
def wait_for_batch_to_complete(self, path, *args, **kwargs):
"""Checks every 15 secs for upto 3.5mins for batch with given status
"""
i = 0
locator = self.get_npsp_locator(path,*args, **kwargs)
while True:
i += 1
if i > 14:
self.selenium.capture_page_screenshot()
raise AssertionError(
"Timed out waiting for batch with locator {} to load.".format(locator)
)
else:
try:
self.selenium.wait_until_element_is_visible(locator)
break
except Exception:
time.sleep(15)
@capture_screenshot_on_error
def wait_for_batch_to_process(self, batch,status):
"""Checks every 30 secs for upto 9mins for batch with given status
"""
i = 0
sec=0
expected = npsp_lex_locators['batch_status'].format(batch,status)
error = npsp_lex_locators['batch_status'].format(batch,"Errors")
self.selenium.capture_page_screenshot()
while True:
i += 1
if i > 18:
self.selenium.capture_page_screenshot()
raise AssertionError("Timed out waiting for batch {} with status {} to load.".format(batch,status))
elif self.check_if_element_exists(error):
if status != "Errors":
raise AssertionError("Batch {} failed with Error".format(batch))
break
else:
try:
self.selenium.wait_until_element_is_visible(expected)
break
except Exception:
sec= sec+30
print("Batch processing is not finished with {} status in {} seconds".format(status,sec))
self.selenium.capture_page_screenshot()
def get_npsp_settings_value(self,field_name):
locator = npsp_lex_locators['npsp_settings']['field_value'].format(field_name)
loc = self.selenium.get_webelement(locator).text
return loc
def verify_payment_details(self, numpayments):
"""Gets the payment details from the UI and compares with the expected number of payments"""
locator = npsp_lex_locators['payments']['loc1']
locs1 = self.selenium.get_webelements(locator)
locator2 = npsp_lex_locators['payments']['loc2']
locs2 = self.selenium.get_webelements(locator2)
for i, j in list(zip(locs1, locs2)):
#loc1_vaue = self.selenium.get_webelemt(i).text
#loc2_vaue = self.selenium.get_webelemt(j).text
if i.text == "Pledged" and j.text == "$100.00":
pass
else:
return "fail"
self.builtin.should_be_equal_as_strings(len(locs1), numpayments)
# def verify_opportunities(self, len_value):
# locator = "//tbody/tr[12]/th"
# s = self.selenium.get_webelement(locator).text
# #return s
# strip_list = s.split(" ")
# date = strip_list[-1]
# date = date.split("/")
# date = list(map(int, date))
# mm, dd, yyyy = date
# for _ in range(int(len_value)):
# if mm == 12:
# mm = 1
# yyyy = yyyy + 1
# date = [mm, dd, yyyy]
# date = list(map(str, date))
# date = "/".join(date)
# loctor_contains = "//tbody//a[contains(@title , '{}')]".format(date)
# self.selenium.page_should_contain_element(loctor_contains)
# else:
# mm = mm + 1
# date = [mm, dd, yyyy]
# date = list(map(str, date))
# date = "/".join(date)
# loctor_contains = "//tbody//a[contains(@title , '{}')]".format(date)
# self.selenium.page_should_contain_element(loctor_contains)
def click_object_manager_button(self,title):
"""clicks on the buttons in object manager"""
locator=npsp_lex_locators['object_manager']['button'].format(title)
self.selenium.get_webelement(locator).click()
def click_bge_button(self,text):
"""clicks on buttons for BGE"""
locator=npsp_lex_locators['bge']['button'].format(text)
time.sleep(1)
element = self.selenium.driver.find_element_by_xpath(locator)
self.selenium.driver.execute_script('arguments[0].click()', element)
def verify_title(self,title,value):
""""""
locator=npsp_lex_locators['bge']['title'].format(title,value)
actual_value=self.selenium.get_webelement(locator).text
assert value == actual_value, "Expected value to be {} but found {}".format(
value, actual_value
)
def page_scroll_to_locator(self, path, *args, **kwargs):
locator = self.get_npsp_locator(path, *args, **kwargs)
self.salesforce.scroll_element_into_view(locator)
def get_bge_card_header(self,title):
"""Validates if the specific header field has specified value"""
locator= npsp_lex_locators['bge']['card-header'].format(title)
id=self.selenium.get_webelement(locator).text
return id
def click_bge_edit_button(self, title):
"""clicks the button in the table by using name mentioned in data-label"""
locator=npsp_lex_locators['bge']['edit_button'].format(title)
#self.selenium.get_webelement(locator).click()
self.selenium.click_button(locator)
def populate_bge_edit_field(self, title, value):
"""Clears the data in input field and enters the value specified """
locator=npsp_lex_locators['bge']['edit_field'].format(title)
field=self.salesforce._populate_field(locator, value)
@capture_screenshot_on_error
def verify_row_count(self,value):
"""verifies if actual row count matches with expected value"""
locator=npsp_lex_locators['bge']['count']
self.selenium.wait_until_element_is_visible(locator)
actual_value=self.selenium.get_webelements(locator)
count=len(actual_value)
assert int(value) == count, "Expected rows to be {} but found {}".format(
value, count
)
def return_locator_value(self, path, *args, **kwargs):
"""Returns the value pointed by the specified locator"""
locator=self.get_npsp_locator(path, *args, **kwargs)
self.selenium.wait_until_page_contains_element(locator)
value=self.selenium.get_webelement(locator).text
return value
def return_list(self, path, *args, **kwargs):
"""Returns all the values pointed by the specified locator"""
locator=self.get_npsp_locator(path, *args, **kwargs)
values=self.selenium.get_webelements(locator)
return [i.text for i in values]
def select_bge_row(self, value):
"""To select a row on object page based on name and open the dropdown"""
locators = npsp_lex_locators['bge']['name']
list_ele = self.selenium.get_webelements(locators)
for index, element in enumerate(list_ele):
if element.text == value:
drop_down = npsp_lex_locators['bge']['locate_dropdown'].format(index+1)
self.selenium.click_element(drop_down)
time.sleep(1)
def click_link_with_text(self, text):
locator = npsp_lex_locators['link-text'].format(text)
self.selenium.wait_until_page_contains_element(locator)
element = self.selenium.driver.find_element_by_xpath(locator)
self.selenium.driver.execute_script('arguments[0].click()', element)
time.sleep(1)
def click_link_with_spantext(self,text):
locator = npsp_lex_locators['custom_objects']['option'].format(text)
self.selenium.wait_until_page_contains_element(locator,30)
element = self.selenium.driver.find_element_by_xpath(locator)
self.selenium.click_element(element)
time.sleep(1)
def verify_expected_batch_values(self, batch_id,**kwargs):
"""To verify that the data in Data Import Batch matches expected value provide batch_id and the data u want to verify"""
ns=self.get_npsp_namespace_prefix()
table=ns + "DataImportBatch__c"
bge_batch=self.salesforce.salesforce_get(table,batch_id)
for key, value in kwargs.items():
label=ns + key
self.builtin.should_be_equal_as_strings(bge_batch[label], value)
def click_element_with_locator(self, path, *args, **kwargs):
"""Pass the locator and its values for the element you want to click """
locator=self.get_npsp_locator(path, *args, **kwargs)
self.selenium.click_element(locator)
def wait_for_record_to_update(self, id, value):
"""Waits for specified record header to be updated by checking every second for 10 times.
"""
i = 0
while True:
i += 1
if i > 10:
raise AssertionError(
"Timed out waiting for record name to be {} .".format(value)
)
self.salesforce.go_to_record_home(id)
try:
self.verify_header(value)
break
except Exception:
time.sleep(1)
def load_locator(self, locator):
"""Scrolls down until the specified locator is found.
"""
i = 0
while True:
i += 1
if i > 20:
raise AssertionError(
"Timed out waiting for locator {} to load.".format(locator)
)
self.selenium.execute_javascript("window.scrollBy(0, 100)")
self.wait_for_aura()
try:
self.selenium.get_webelement(locator)
break
except ElementNotFound:
time.sleep(0.2)
def select_multiple_values_from_duellist(self,path,list_name,section,*args):
"""Pass the list name and values to be selected from the dropdown. """
main_loc = npsp_lex_locators
for key in path.split('.'):
main_loc = main_loc[key]
for i in args:
locator = main_loc.format(list_name,section,i)
if args.index(i)==0:
self.selenium.click_element(locator)
else:
self.selenium.click_element(locator,'COMMAND')
def click_duellist_button(self, list_name,button):
list_found = False
locators = npsp_lex_locators["bge-duellist-btn"].values()
for i in locators:
locator = i.format(list_name,button)
if self.check_if_element_exists(locator):
loc=self.selenium.get_webelement(locator)
self.selenium.click_element(locator)
list_found = True
break
assert list_found, "Dropdown with the provided locator not found"
def verify_expected_values(self,ns_ind,obj_api,rec_id,**kwargs):
"""To verify that the data in database table match with expected value,
provide ns if object has namespace prefix otherwise nonns,
object api name, record_id and the data u want to verify"""
if(ns_ind=='ns'):
ns=self.get_npsp_namespace_prefix()
table=ns + obj_api
else:
table=obj_api
try :
rec=self.salesforce.salesforce_get(table,rec_id)
for key, value in kwargs.items():
print(f"executing {key}, {value} pair")
self.builtin.should_be_equal_as_strings(rec[key], value)
except Exception :
print("Retrying after exception")
time.sleep(10)
rec=self.salesforce.salesforce_get(table,rec_id)
for key, value in kwargs.items():
print(f"executing {key}, {value} pair")
self.builtin.should_be_equal_as_strings(rec[key], value)
def get_org_namespace_prefix(self):
if self.cumulusci.org.namespaced:
return "npsp__"
else:
return ""
@capture_screenshot_on_error
def click_first_matching_related_item_popup_link(self,heading,rel_status,link):
'''Clicks a link in the popup menu for first matching related list item.
heading specifies the name of the list,
rel_status specifies the status or other field vaule to identify a particular item,
and link specifies the name of the link'''
self.salesforce.load_related_list(heading)
locator = npsp_lex_locators["record"]["related"]["link"].format(heading, rel_status)
mylist=self.selenium.get_webelements(locator)
title=mylist[0].text
print(f"title is {title}")
self.click_special_related_item_popup_link(heading, title, link)
def click_special_related_item_popup_link(self, heading, title, link):
"""Clicks a link in the popup menu for a related list item.
heading specifies the name of the list,
title specifies the name of the item,
and link specifies the name of the link
"""
self.salesforce.load_related_list(heading)
locator = npsp_lex_locators["record"]["related"]["popup_trigger"].format(heading, title)
self.selenium.wait_until_page_contains_element(locator)
self.salesforce._jsclick(locator)
locator = npsp_lex_locators["popup-link"].format(link)
self.salesforce._jsclick(locator)
self.salesforce.wait_until_loading_is_complete()
def verify_field_values(self,**kwargs):
"""Verifies values in the specified fields"""
for key, value in kwargs.items():
locator=npsp_lex_locators["field-value"].format(key)
res=self.selenium.get_webelement(locator).text
assert value == res, "Expected {} value to be {} but found {}".format(key,value,res)
def checkbox_status(self,cbx_name,status):
"""verifies if the specified checkbox is with expected status in readonly mode"""
locator=npsp_lex_locators["custom_settings"]["cbx_status"].format(cbx_name,status)
self.selenium.page_should_contain_element(locator, message='{cbx_name} checkbox is supposed to be {status}')
def go_to_setup_page(self,page):
""" Navigates to the specified page in Salesforce Setup """
url = self.cumulusci.org.lightning_base_url
url = "{}/lightning/setup/{}/home".format(url,page)
self.selenium.go_to(url)
self.salesforce.wait_until_loading_is_complete()
def click_wrapper_related_list_button(self,heading,button_title):
"""Clicks a button in the heading of a related list when the related list is enclosed in wrapper.
Waits for a modal to open after clicking the button.
"""
locator = npsp_lex_locators["record"]["related"]["button"].format(heading, button_title)
element = self.selenium.driver.find_element_by_xpath(locator)
self.selenium.driver.execute_script('arguments[0].click()', element)
@capture_screenshot_on_error
def change_view_to(self,view_name):
"""Selects a different view for the object records in listing page"""
locator=npsp_lex_locators['object_dd']
view=npsp_lex_locators['link'].format(view_name,view_name)
self.selenium.wait_until_page_contains("List Views")
self.selenium.wait_until_element_is_visible(locator,30)
try:
self.selenium.get_webelement(locator).click()
except ElementClickInterceptedException:
self.selenium.execute_javascript("window.scrollBy(0,100)")
ele = self.selenium.driver.find_element_by_xpath(locator)
self.selenium.driver.execute_script('arguments[0].click()', ele)
element = self.selenium.driver.find_element_by_xpath(view)
self.selenium.driver.execute_script('arguments[0].click()', element)
self.selenium.wait_until_page_contains(view_name)
@capture_screenshot_on_error
def search_field_by_value(self, fieldname, value):
""" Searches the field with the placeholder given by 'fieldname' for the given 'value'
"""
xpath = npsp_lex_locators["placeholder"].format(fieldname)
field = self.selenium.get_webelement(xpath)
self.selenium.clear_element_text(field)
field.send_keys(value)
time.sleep(3)
field.send_keys(Keys.ENTER)
@capture_screenshot_on_error
def search_field_and_perform_action(self, fieldname, value, type=None):
""" Searches the field with the placeholder given by 'fieldname' for the given 'value'
and clicks on the option containing the value from the dropdown if the value is found
if the type is specified as "New' then enter key is pressed for the modal to appear.
"""
xpath = npsp_lex_locators["placeholder"].format(fieldname)
lookup_option = npsp_lex_locators["gift_entry"]["lookup-option"].format(value)
field = self.selenium.get_webelement(xpath)
self.salesforce._clear(field)
field.send_keys(value)
time.sleep(3)
if type == 'New':
field.send_keys(Keys.ENTER)
self.salesforce.wait_until_modal_is_open()
else:
self.selenium.wait_until_element_is_visible(lookup_option)
self.selenium.click_element(lookup_option)
def save_current_record_id_for_deletion(self,object_name):
"""Gets the current page record id and stores it for specified object
in order to delete record during suite teardown """
# self.pageobjects.current_page_should_be("Details",object_name)
id=self.salesforce.get_current_record_id()
self.salesforce.store_session_record(object_name,id)
return id
def verify_record_is_created_in_database(self,object_name,id):
"""Verifies that a record with specified id is saved
in specified object table in database and returns the record"""
record=self.salesforce.salesforce_get(object_name,id)
self.builtin.should_not_be_empty(record, msg="The database object {} with id {} is not in the database".format(object_name,id))
return record
@capture_screenshot_on_error
def select_value_from_dropdown(self,dropdown,value):
"""Select given value in the dropdown field"""
if self.latest_api_version == 51.0 or self.latest_api_version == 52.0 and dropdown not in ("Installment Period","Role"):
self.click_flexipage_dropdown(dropdown,value)
else:
if dropdown in ("Open Ended Status","Payment Method"):
locator = npsp_lex_locators['record']['rdlist'].format(dropdown)
selection_value = npsp_lex_locators["erd"]["modal_selection_value"].format(value)
if self.npsp.check_if_element_exists(locator):
self.selenium.set_focus_to_element(locator)
self.selenium.wait_until_element_is_visible(locator)
self.salesforce.scroll_element_into_view(locator)
self.salesforce._jsclick(locator)
self.selenium.wait_until_element_is_visible(selection_value)
self.selenium.click_element(selection_value)
if self.latest_api_version == 51.0 or self.latest_api_version == 52.0 and dropdown in ("Installment Period","Role"):
locator = npsp_lex_locators['record']['select_dropdown']
selection_value = npsp_lex_locators["record"]["select_value"].format(value)
if self.npsp.check_if_element_exists(locator):
self.selenium.set_focus_to_element(locator)
self.selenium.wait_until_element_is_visible(locator)
self.salesforce.scroll_element_into_view(locator)
self.salesforce._jsclick(locator)
self.selenium.wait_until_element_is_visible(selection_value)
self.selenium.click_element(selection_value)
elif dropdown not in ("Payment Method"):
locator = npsp_lex_locators['record']['list'].format(dropdown)
self.salesforce.scroll_element_into_view(locator)
self.selenium.get_webelement(locator).click()
self.wait_for_locator('popup')
self.npsp.click_link_with_text(value)
def edit_record(self):
"""Clicks on the edit button on record page for standard objects
and waits for the modal to open"""
self.salesforce.click_object_button("Edit")
self.salesforce.wait_until_modal_is_open()
def randomString(self,stringLength=10):
"""Generate a random string of fixed length """
letters = string.ascii_lowercase
return ''.join(random.choice(letters) for i in range(stringLength))
@capture_screenshot_on_error
def scroll_button_into_view_and_click_using_js(self, value):
"""Scrolls the button element into view and clicksthe button using JS """
xpath = npsp_lex_locators['button'].format(value)
self.selenium.wait_until_element_is_visible(xpath)
javascript = (
"window.document.evaluate("
f" '{xpath}', document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null"
").singleNodeValue.scrollIntoView(true)"
)
self.selenium.execute_javascript(javascript)
time.sleep(2)
self.npsp.click_button_with_value(value)
time.sleep(1)
def setupdata(self, name, contact_data=None, opportunity_data=None, account_data=None, payment_data=None, engagement_data=None,
recurringdonation_data=None, gau_data=None):
""" Creates an Account if account setup data is passed
Creates a contact if contact_data is passed
Creates an opportunity for the contact if opportunit_data is provided
Creates a contact and sets an opportunity simultaneously if both the
contact_data and opportunity_data is specified
Creates a contact and sets up an engagement plan with both contact and engagement plan information is provided
"""
# get the data variable, or an empty dictionary if not set
data = self.builtin.get_variable_value("${data}", {})
ns=self.get_npsp_namespace_prefix()
if account_data is not None:
# create the account based on the user input specified account type
acctname = self.randomString(10)
rt_id = self.salesforce.get_record_type_id("Account",account_data["Type"])
account_data.update( {'Name' : acctname,'RecordTypeId' : rt_id})
account_id = self.salesforce.salesforce_insert("Account", **account_data)
account = self.salesforce.salesforce_get("Account",account_id)
# save the account object to data dictionary
data[name] = account
if contact_data is not None:
# create the contact
firstname = self.randomString(10)
lastname = self.randomString(10)
contact_data.update( {'Firstname' : firstname,'Lastname' : lastname})
contact_id = self.salesforce.salesforce_insert("Contact", **contact_data)
contact = self.salesforce.salesforce_get("Contact",contact_id)
# save the contact object to data dictionary
data[name] = contact
if engagement_data is not None:
# set up enegagement template based on the user input specified and link the contact to the engagement template
engobjname = "Engagement_Plan_Template__c"
contactobjname = "Contact__c"
# Fromatting the objects names with namespace prefix
formattedengobjname = "{}{}".format(self.get_npsp_namespace_prefix(), engobjname)
formattedcontactobjname = "{}{}".format(self.cumulusci.get_namespace_prefix(), contactobjname)
engagement_id = self.salesforce.salesforce_insert(formattedengobjname, **engagement_data)
engagement = self.salesforce.salesforce_get(formattedengobjname,engagement_id)
# If the keyword is contact, link the contact to the engagement plan created
if name.lower() == 'contact':
testdata={}
testdata.update( {formattedcontactobjname : data[name]["Id"], formattedengobjname: engagement_id } )
self.salesforce.salesforce_insert(formattedengobjname, **testdata)
# save the engagement object to data dictionary
if name.lower() == 'contact':
data[f"{name}_engagement"] = engagement
else:
data[name] = engagement
# set a recurring donation for a contact
if recurringdonation_data is not None:
recurringdonation_data.update( {'npe03__Contact__c' : data[name]["Id"] } )
rd_id = self.salesforce.salesforce_insert("npe03__Recurring_Donation__c", **recurringdonation_data)
recurringdonation = self.salesforce.salesforce_get("npe03__Recurring_Donation__c",rd_id)
data[f"{name}_rd"] = recurringdonation
#set gau data
if gau_data is not None:
object_key = f"{ns}General_Accounting_Unit__c"
gauname = gau_data['Name']
random = self.randomString(10)
gau_data.update( {'name' : f"{random}{gauname}"} )
gau_id = self.salesforce.salesforce_insert(object_key, **gau_data)
gau = self.salesforce.salesforce_get(object_key,gau_id)
data[name] = gau
# set opportunity association with a contact or account
if opportunity_data is not None:
# create opportunity
rt_id = self.salesforce.get_record_type_id("Opportunity",opportunity_data["Type"])
# if user did not specify any date value add the default value
if 'CloseDate' not in opportunity_data:
date = datetime.now().strftime('%Y-%m-%d')
opportunity_data.update({'CloseDate' : date})
if 'npe01__Do_Not_Automatically_Create_Payment__c' not in opportunity_data:
Automatically_create_key = 'npe01__Do_Not_Automatically_Create_Payment__c'
Automatically_create_value = 'true'
opportunity_data.update({Automatically_create_key : Automatically_create_value})
if 'StageName' not in opportunity_data:
opportunity_data.update( {'StageName' : 'Closed Won'} )
if 'AccountId' not in opportunity_data:
opportunity_data.update( {'AccountId' : data[name]["AccountId"] } )
opportunity_data.update( {'RecordTypeId': rt_id } )
opportunity_id = self.salesforce.salesforce_insert("Opportunity", **opportunity_data)
opportunity = self.salesforce.salesforce_get("Opportunity",opportunity_id)
# save the opportunity
data[f"{name}_opportunity"] = opportunity
if payment_data is not None:
numdays = 30
i = 1
while i <= int(payment_data['NumPayments']):
payment_schedule_data = {}
# Based on the number of payments parameter numpayments, populate the number of payments and associate it to the opportunity
# While populating the number of payments if a desired scheduled payment date is provided use it if not use the current date
if 'Scheduledate' in payment_data:
# Referring the payment date and scheduled date to be the same value
scheduled_date = payment_data['Scheduledate']
payment_date = payment_data['Scheduledate']
#Altering shceduled date to increemnt by every month
scheduled_date = (datetime.strptime(scheduled_date , '%Y-%m-%d').date() + relativedelta(months=i)).strftime('%Y-%m-%d')
else:
scheduled_date = (datetime.now() + timedelta(days = numdays)).strftime('%Y-%m-%d')
payment_schedule_data.update( {'npe01__Opportunity__c' : data[f"{name}_opportunity"]["Id"] , 'npe01__Scheduled_Date__c' : scheduled_date,'npe01__Payment_Amount__c' : payment_data['Amount'] } )
payment_id = self.salesforce.salesforce_insert("npe01__OppPayment__c", **payment_schedule_data)
# Out of the total number of payments being generated if user paid the payements for n number of payments specified in the field completedPyaments
# Mark the payments as paid and populate the payment date
if 'CompletedPayments' in payment_data:
if i<= int(payment_data['CompletedPayments']):
payment_update_data = {}
#Altering Payment date to increment by every month for the set number of installments
payment_date = (datetime.strptime(payment_date , '%Y-%m-%d').date() + relativedelta(months=i*2)).strftime('%Y-%m-%d')
payment_update_data.update( {'npe01__Payment_Date__c' : payment_date ,'npe01__Paid__c': "true"} )
payment_id = self.salesforce.salesforce_update("npe01__OppPayment__c",payment_id , **payment_update_data)
i = i+1
self.builtin.set_suite_variable('${data}', data)
return data
def delete_record(self,value):
"""Select the row to be deleted on the listing page, click delete
and wait till the focus is back on the listings page."""
self.select_row(value)
self.selenium.click_link("Delete")
self.selenium.wait_until_location_contains("/list")
self.selenium.wait_until_page_does_not_contain(value)
def _check_and_populate_lightning_fields(self,**kwargs):
"""During winter 2020 part of the modal fields appear as lightning elements.
This keyword validates , identifies the element and populates value"""
for key, value in kwargs.items():
if key in ("Payment Amount") :
locator = npsp_lex_locators["erd"]["modal_input_field"].format(key)
if self.npsp.check_if_element_exists(locator):
self.selenium.set_focus_to_element(locator)
self.salesforce._populate_field(locator, value)
else:
self.builtin.log(f"Element {key} not found")
if key in ("Payment Method"):
"""Selects given value from the dropdown field on the rd2 modal"""
self.npsp.select_value_from_dropdown(key, value)
else:
return
@capture_screenshot_on_error
def populate_modal_form(self,**kwargs):
"""Populates modal form with the field-value pairs
supported keys are any input, textarea, lookup, checkbox, date and dropdown fields"""
# As part of winter release 2020 some modal elements are changed to lightning. To support that
# An extra check is added to check for lightning fields and populate accordingly
for key, value in kwargs.items():
if key in ("Payment Amount", "Payment Method"):
self._check_and_populate_lightning_fields(**kwargs)
else:
locator = npsp_lex_locators["modal-form"]["label"].format(key)
if self.check_if_element_exists(locator):
ele=self.selenium.get_webelements(locator)
for e in ele:
classname=e.get_attribute("class")
self.builtin.log(f"key is {key} and class is {classname}")
if "Lookup" in classname and "readonly" not in classname:
self.salesforce.populate_lookup_field(key,value)
print("Executed populate lookup field for {}".format(key))
break
elif "Select" in classname and "readonly" not in classname:
self.select_value_from_dropdown(key,value)
print("Executed select value from dropdown for {}".format(key))
break
elif "Checkbox" in classname and "readonly" not in classname:
if value == "checked":
locator = npsp_lex_locators["checkbox"]["model-checkbox"].format(key)
self.selenium.get_webelement(locator).click()
break
elif "Date" in classname and "readonly" not in classname:
self.select_date_from_datepicker(key,value)
print("Executed open date picker and pick date for {}".format(key))
break
else:
try :
self.search_field_by_value(key,value)
print("Executed search field by value for {}".format(key))
except Exception :
try :
self.salesforce.populate_field(key,value)
print("Executed populate field for {}".format(key))
except Exception:
print ("class name for key {} did not match with field type supported by this keyword".format(key))
else:
raise Exception("Locator for {} is not found on the page".format(key))
def verify_toast_message(self,value):
"""Verifies that toast contains specified value"""
locator=npsp_lex_locators["toast-msg"]
self.selenium.wait_until_page_contains_element(locator)
msg=self.selenium.get_webelement(locator).text
if msg == value:
print("Toast message verified")
else:
raise Exception("Expected Toast message not found on page")
def edit_record_field_value(self,field,value):
"""Scrolls just a little below the field
Clicks on Edit icon next to field and enters a value into the field"""
scroll_loc=npsp_lex_locators["span_button"].format(field)
# To make sure the field we want to edit has rendered
# and is not obscured by the footer, scroll down a little below the element
self.salesforce.scroll_element_into_view(scroll_loc)
self.selenium.execute_javascript("window.scrollBy(0,50)")
btn="Edit "+field
self.selenium.click_button(btn)
footer=npsp_lex_locators["record"]["footer"]
self.selenium.wait_until_page_contains_element(footer)
self.salesforce.populate_lookup_field(field,value)
@capture_screenshot_on_error
def edit_record_dropdown_value(self,field,value):
"""Scrolls just a little below the field
Clicks on Edit icon next to field and enters a value into the field"""
scroll_loc=npsp_lex_locators["span_button"].format(field)
# To make sure the field we want to edit has rendered
# and is not obscured by the footer, scroll down a little below the element
self.selenium.wait_until_element_is_visible(scroll_loc)
self.salesforce.scroll_element_into_view(scroll_loc)
self.selenium.execute_javascript("window.scrollBy(0,50)")
btn="Edit "+field
self.selenium.click_button(btn)
footer=npsp_lex_locators["record"]["footer"]
self.selenium.wait_until_page_contains_element(footer)
time.sleep(2)
self.click_flexipage_dropdown(field, value)
def edit_record_checkbox(self,field,status):
"""Scrolls just a little below the field
Clicks on Edit icon next to field
checks if status is 'checked'
unchecks if status in 'unchecked'"""
scroll_loc=npsp_lex_locators["span_button"].format(field)
# To make sure the field we want to edit has rendered
# and is not obscured by the footer, scroll down a little below the element
self.salesforce.scroll_element_into_view(scroll_loc)
self.selenium.execute_javascript("window.scrollBy(0,50)")
btn="Edit "+field
self.selenium.click_button(btn)
footer=npsp_lex_locators["record"]["footer"]
self.selenium.wait_until_page_contains_element(footer)
self.set_checkbutton_to(field,status)
def save_record(self):
"""Saves record by clicking on footer button 'Save'"""
footer=npsp_lex_locators["record"]["footer"]
self.click_record_button("Save")
self.selenium.wait_until_page_does_not_contain_element(footer)
#Once the record is saved, scroll to top in order to be able to interact with elements above this
self.selenium.execute_javascript("window.scrollTo(0,0)")
def Delete_record_field_value(self,field,value):
"""Scrolls just a little below the field
Clicks on Edit icon next to field and delete the value by clicking on 'X'"""
scroll_loc=npsp_lex_locators["span_button"].format(field)
# To make sure the field we want to edit has rendered
# and is not obscured by the footer, scroll down a little below the element
self.salesforce.scroll_element_into_view(scroll_loc)
self.selenium.execute_javascript("window.scrollBy(0,50)")
btn="Edit "+field
self.selenium.click_button(btn)
footer=npsp_lex_locators["record"]["footer"]
self.selenium.wait_until_page_contains_element(footer)
locator=npsp_lex_locators['delete_icon_record'].format(field,value)
self.selenium.get_webelement(locator).click()
def select_date_from_datepicker(self,field,value):
field_loc=npsp_lex_locators["bge"]["field-input"].format(field)
if self.check_if_element_exists(field_loc):
locator=npsp_lex_locators["bge"]["datepicker_open"].format(field)
self.selenium.click_element(field_loc)
self.selenium.wait_until_page_contains_element(locator)
self.click_bge_button(value)
self.selenium.wait_until_page_does_not_contain_element(locator,error="could not open datepicker")
else:
field_loc=npsp_lex_locators['record']['lt_date_picker'].format(field)
locator=npsp_lex_locators['record']['ltdatepicker'].format(value)
self.selenium.click_element(field_loc)
self.selenium.wait_until_page_contains_element(locator)
self.selenium.click_element(locator)
def click_more_actions_button(self):
"""clicks on the more actions dropdown button in the actions container on record page"""
locator=npsp_lex_locators['link'].format("more actions","more actions")
self.salesforce._jsclick(locator)
def click_more_actions_lightning_button(self):
"""clicks on the more actions dropdown button in the actions container on record page"""
locator = npsp_lex_locators['manage_hh_page']['more_actions_btn']
self.selenium.wait_until_element_is_visible(locator)
self.salesforce._jsclick(locator)
time.sleep(2)
@capture_screenshot_on_error
def click_related_table_item_link(self, heading, title):
"""Clicks a table header field link in the related list identified with the specified heading.
This keyword will automatically call `Wait until loading is complete`
"""
self.builtin.log("loading related list...", "DEBUG")
self.salesforce.load_related_list(heading)
locator = npsp_lex_locators["record"]["related"]["link"].format(heading, title)
self.builtin.log("clicking...", "DEBUG")
self.salesforce._jsclick(locator)
self.builtin.log("waiting...", "DEBUG")
self.salesforce.wait_until_loading_is_complete()
def click_actions_link(self,title):
"""Clicks on the link in the actions container on top right corner of the page using Javascript"""
locator=npsp_lex_locators["link-title"].format(title)
self.salesforce._jsclick(locator)
def click_more_activity_button(self):
"""Clicks on View More button on Activity tab of the record"""
locator = npsp_lex_locators["record"]["activity-button"].format('showMoreButton')
self.salesforce._jsclick(locator)
def click_button_with_title(self,title):
"""Clicks button identified by title using Javascript"""
locator = npsp_lex_locators["button-title"].format(title)
self.salesforce._jsclick(locator)
@capture_screenshot_on_error
def click_show_more_actions_button(self,title):
"""Clicks on more actions dropdown and click the given title"""
locator=npsp_lex_locators['link-contains'].format("more actions")
self.selenium.wait_until_element_is_visible(locator)
self.selenium.click_element(locator)
time.sleep(1)
self.selenium.wait_until_page_contains(title)
link_locator=npsp_lex_locators['custom_objects']['actions-link'].format(title,title)
self.selenium.click_link(link_locator)
def get_url_formatted_object_name(self,name):
"""Returns a map with BaseURl and the namespace formatted object name"""
out = {}
base_url = self.cumulusci.org.lightning_base_url
object_name = "{}{}".format(self.cumulusci.get_namespace_prefix(), name)
out['baseurl'] = base_url
out['objectname'] = object_name
return out
def check_submenu_link_exists(self,title):
"""Checks for the presence of the submenu item under the main menu"""
locator=npsp_lex_locators['link-text'].format(title)
isPresent = False
if self.npsp.check_if_element_exists(locator):
isPresent = True
return isPresent
def click_special_button(self,title):
"""This keyword is similar to click button but uses set focus to button and javascript
In the cases where javascript is being triggered on moving away from field,
click button doesn't seem to work in headless mode, hence using actionchains moving focus out of field
and clicking on screen before performing actual click for next element"""
actions = ActionChains(self.selenium.driver)
actions.move_by_offset(0, 20).click().perform()
if title=="Schedule Payments" and self.latest_api_version == 50.0:
locator=npsp_lex_locators['schedule_payments'].format(title)
else:
locator=npsp_lex_locators['button-with-text'].format(title)
element = self.selenium.driver.find_element_by_xpath(locator)
self.salesforce.scroll_element_into_view(locator)
self.selenium.set_focus_to_element(locator)
self.selenium.driver.execute_script('arguments[0].click()', element)
def verify_record_count_for_object(self,object,count,**kwargs):
"""Queries the given object table by using key,value pair passed and
verifies that the record count matches with expected count"""
records=self.salesforce.salesforce_query(object,**kwargs)
actual_count=len(records)
if actual_count != int(count):
raise Exception(f'Expected total count of records to be {count} but found {actual_count}')
def get_record_id_from_field_link(self,locator,field):
"""Using the locator for field, gets the link and using the href url of the link,
gets the record id and returns it.
Ex: Get Record Id From Field link bge.value Donation"""
value=self.return_locator_value(locator,field)
locator=npsp_lex_locators['link-text'].format(value)
url=self.selenium.get_webelement(locator).get_attribute('href')
for part in url.split("/"):
oid_match = re.match(OID_REGEX, part)
if oid_match is not None:
return oid_match.group(2)
raise AssertionError(f"Could not parse record id from url: {url}")
def query_object_and_return_id(self,object_name,**kwargs):
"""Queries the given object table by using key,value pair passed and returns ids of matched records"""
list=self.salesforce.salesforce_query(object_name,**kwargs)
ids=[sub['Id'] for sub in list]
print(f"ID's saved are: {ids}")
return ids
def query_and_store_records_to_delete(self,object_name,**kwargs):
"""Queries the given object table by using key,value pair passed and
stores the ids of matched records in order to delete as part of suite teardown"""
records=self.query_object_and_return_id(object_name,**kwargs)
if records:
for i in records:
self.salesforce.store_session_record(object_name,i)
@capture_screenshot_on_error
def verify_table_contains_row(self,table_name,record,**kwargs):
"""verifies that batch number format table contains a record with given name
and record field contains specified value. Example usage:
Verify Table Contains Row | Batches | MyBatch | Batch Number=MyBatch-01
"""
for key,value in kwargs.items():
locator=npsp_lex_locators['datatable'].format(table_name,record,key)
actual=self.selenium.get_text(locator)
print(f'actual value is {actual}')
if actual==value:
print(f'Table contains {record} with expected {key}={value}')
elif value=='None' and actual=='':
print(f'Table contains {record} with empty {key} as expected')
else:
raise Exception(f'Table did not contain {record} with expected {key}={value}')
def run_flow(self, flow_name):
"""
Runs the specified cci flow
"""
from cumulusci.core.flowrunner import FlowCoordinator
flow_config = self.cumulusci.project_config.get_flow(flow_name)
flow = FlowCoordinator(self.cumulusci.project_config, flow_config, flow_name)
flow.run(self.cumulusci.org)
@capture_screenshot_on_error
def wait_until_bge_batch_processes(self, batch_name, contents=None):
"""Clicks the 'Process Batch' BGE button and waits for the processing to complete."""
batchsuccess=npsp_lex_locators["gift_entry"]["success_toast"].format(batch_name)
if contents=='has_cc_gifts':
self.builtin.sleep(180,"Waiting for all gifts to process")
#Code is commented out until credit card gift processing speed is increased.
#self.selenium.wait_until_page_does_not_contain("This can take a while. Check back in a bit!",60)
#self.selenium.wait_until_element_is_visible(batchsuccess,60)
else:
self.selenium.wait_until_page_does_not_contain("This can take a while. Check back in a bit!",180)
self.selenium.wait_until_element_is_visible(batchsuccess,180) | 48.503721 | 212 | 0.653205 |
41829ce695ff6e9aceef54fa36f63ffb47119104 | 1,402 | py | Python | gokart/zip_client.py | skmatz/gokart | ba1dc497dca1c7901bc861f49b1f081adc2a1888 | [
"MIT"
] | null | null | null | gokart/zip_client.py | skmatz/gokart | ba1dc497dca1c7901bc861f49b1f081adc2a1888 | [
"MIT"
] | null | null | null | gokart/zip_client.py | skmatz/gokart | ba1dc497dca1c7901bc861f49b1f081adc2a1888 | [
"MIT"
] | null | null | null | import os
import shutil
import zipfile
from abc import abstractmethod
from typing import Union, IO
def _unzip_file(fp: Union[str, IO, os.PathLike], extract_dir: str) -> None:
zip_file = zipfile.ZipFile(fp)
zip_file.extractall(extract_dir)
zip_file.close()
class ZipClient(object):
@abstractmethod
def exists(self) -> bool:
pass
@abstractmethod
def make_archive(self) -> None:
pass
@abstractmethod
def unpack_archive(self) -> None:
pass
@abstractmethod
def remove(self) -> None:
pass
@property
@abstractmethod
def path(self) -> str:
pass
class LocalZipClient(ZipClient):
def __init__(self, file_path: str, temporary_directory: str) -> None:
self._file_path = file_path
self._temporary_directory = temporary_directory
def exists(self) -> bool:
return os.path.exists(self._file_path)
def make_archive(self) -> None:
[base, extension] = os.path.splitext(self._file_path)
shutil.make_archive(base_name=base, format=extension[1:], root_dir=self._temporary_directory)
def unpack_archive(self) -> None:
_unzip_file(fp=self._file_path, extract_dir=self._temporary_directory)
def remove(self) -> None:
shutil.rmtree(self._file_path, ignore_errors=True)
@property
def path(self) -> str:
return self._file_path
| 24.172414 | 101 | 0.674037 |
d96d04c83879d095af323ee8ee83e91e3c1b50b0 | 745 | py | Python | supcon/python_scripts/run_baseline_a100.py | hanseungwook/google-research | e53a66b9388b34c456b7cbcd276e36099220a26e | [
"Apache-2.0"
] | null | null | null | supcon/python_scripts/run_baseline_a100.py | hanseungwook/google-research | e53a66b9388b34c456b7cbcd276e36099220a26e | [
"Apache-2.0"
] | null | null | null | supcon/python_scripts/run_baseline_a100.py | hanseungwook/google-research | e53a66b9388b34c456b7cbcd276e36099220a26e | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
import os
from cvar_pyutils.ccc import submit_job, submit_dependant_jobs
os.environ['PYTHONPATH'] += ':/dccstor/hansri/models/'
os.environ['TPU'] = 'CCC'
os.environ['DATA_DIR'] = '~/dccstor/hansri/data/'
os.environ['MODEL_DIR'] = '~/dccstor/hansri/google-research/supcon/trained_models1/'
submit_dependant_jobs(number_of_rolling_jobs=16, command_to_run='scripts/supcon_imagenet_resnet50.sh --mode=train_then_eval --tpu_name=CCC --data_dir=~/dccstor/hansri/data/imagenet2012/ --model_dir=$~/dccstor/hansri/google-research/supcon/trained_models1 --use_tpu=False', machine_type='x86', time='6h', num_cores=32, num_gpus=4, mem='400g', gpu_type='a100', conda_env='tf-new',
mail_log_file_when_done='seungwook.han@ibm.com')
| 53.214286 | 378 | 0.771812 |
9ac34f355402aa41d2e5b63095ab4346d438e807 | 4,405 | py | Python | .history/src/data/data_20191019134602.py | bkraft4257/kaggle_titanic | f29ea1773773109a867278c001dbd21a9f7b21dd | [
"MIT"
] | null | null | null | .history/src/data/data_20191019134602.py | bkraft4257/kaggle_titanic | f29ea1773773109a867278c001dbd21a9f7b21dd | [
"MIT"
] | null | null | null | .history/src/data/data_20191019134602.py | bkraft4257/kaggle_titanic | f29ea1773773109a867278c001dbd21a9f7b21dd | [
"MIT"
] | null | null | null | import pandas as pd
from typing import Union
from pathlib import Path
from nameparser import HumanName
class ExtractData:
def __init__(self, filename: Union[str, Path], drop_columns=None):
# """Extract Training Data from file or Path
# Arguments:
# filename {[str]} -- Filename of CSV data file containing data.
# drop_columns -- Columns in dataframe that should be dropped.
# """
if drop_columns is None:
drop_columns = ["age", "cabin", "name", "ticket"]
self.filename = filename
self.drop_columns = drop_columns
self.all_label_columns = ["survived"]
self.all_feature_columns = [
"pclass",
"name",
"sex",
"age",
"sibsp",
"parch",
"ticket",
"fare",
"cabin",
"embarked",
]
self.Xy_raw = None
self.extract_raw()
def extract_raw(self):
"""
Extracts data from a CSV file.
Returns:
pd.DataFrame -- [description]
"""
Xy_raw = pd.read_csv(self.filename)
Xy_raw.columns = Xy_raw.columns.str.lower().str.replace(" ", "_")
Xy_raw = Xy_raw.rename(columns={"age": "age_known"})
Xy_raw["pclass"] = Xy_raw["pclass"].astype("category")
self.Xy_raw = Xy_raw.set_index("passengerid")
class TransformData:
title_translator = {
"Mlle.": "Mrs.",
"Mme.": "Mrs.",
"Sir.": "Mr.",
"Ms.": "Mrs.",
"Rev.": "Mr.",
"": "Mr.",
"Col.": "Mr.",
"Capt.": "Mr.",
"Lady.": "Mrs.",
"the Countess. of": "Mrs.",
}
def __init__(
self,
raw_data,
adult_age_threshold_min=13,
Xy_age_estimate=None,
drop_columns=None,
):
# """Extract Training Data from file or Path
# Arguments:
# filename {[str]} -- Filename of CSV data file containing data.
# drop_columns -- Columns in dataframe that should be dropped.
# """
if drop_columns is None:
drop_columns = ["age", "cabin", "name", "ticket"]
self.raw = raw_data
self.adult_age_threshold_min = adult_age_threshold_min
self.Xy_age_estimate = Xy_age_estimate
self.Xy = self.raw.Xy_raw.copy()
self.extract_title()
self.extract_last_name()
self.extract_cabin_number()
self.extract_cabin_prefix()
self.estimate_age()
self.calc_is_child()
def calc_is_travelling_alone(self):
self.Xy["is_travelling_alone"] = (self.Xy.age > 0) |
def calc_is_child(self):
self.Xy["is_child"] = self.Xy.age < self.adult_age_threshold_min
def extract_cabin_number(self):
self.Xy["cabin_number"] = self.Xy.ticket.str.extract("(\d+)$")
def extract_cabin_prefix(self):
self.Xy["cabin_prefix"] = self.Xy.ticket.str.extract("^(.+) ")
def extract_title(self):
"""[summary]
"""
self.Xy["title"] = (
self.Xy.name.apply(lambda x: HumanName(x).title)
.replace(self.title_translator)
.replace({"\.": ""}, regex=True)
)
def extract_last_name(self):
self.Xy["last_name"] = self.Xy.name.apply(lambda x: HumanName(x).last)
def clean(self,):
"""Clean data to remove missing data and "unnecessary" features.
Arguments:
in_raw_df {pd.DataFrame} -- Dataframe containing all columns and rows Kaggle Titanic Training Data set
"""
self.Xy = self.Xy_raw.drop(self.drop_columns, axis=1)
def estimate_age(self, groupby_columns=["sex", "title"]):
"""[summary]
Keyword Arguments:
groupby {list} -- [description] (default: {['sex','title']})
"""
if self.Xy_age_estimate is None:
Xy_age_estimate = (
self.Xy.groupby(groupby_columns).age_known.mean().to_frame().round(1)
)
Xy_age_estimate = Xy_age_estimate.rename(
columns={"age_known": "age_estimate"}
)
out_df = self.Xy.reset_index().merge(Xy_age_estimate, on=groupby_columns)
out_df["age"] = out_df["age_known"].fillna(out_df["age_estimate"])
self.Xy = out_df
self.Xy_age_estimate = Xy_age_estimate
| 29.763514 | 114 | 0.558456 |
d5c69b3ec9fd310e3dca9c2c489e5befcbd21617 | 6,855 | py | Python | scripts/backup.py | midoks/mdweb | 06455b094302645d7501c7de02921194ca05f4d6 | [
"Apache-2.0"
] | 26 | 2019-04-07T15:59:52.000Z | 2022-03-30T15:04:26.000Z | scripts/backup.py | midoks/mdweb | 06455b094302645d7501c7de02921194ca05f4d6 | [
"Apache-2.0"
] | 6 | 2020-10-03T13:58:39.000Z | 2021-10-31T16:38:13.000Z | scripts/backup.py | midoks/mdweb | 06455b094302645d7501c7de02921194ca05f4d6 | [
"Apache-2.0"
] | 8 | 2019-09-01T00:00:42.000Z | 2022-03-14T05:23:45.000Z | #!/usr/bin/python
# coding: utf-8
#-----------------------------
# 网站备份工具
#-----------------------------
import sys
import os
if sys.platform != 'darwin':
os.chdir('/www/server/mdserver-web')
chdir = os.getcwd()
sys.path.append(chdir + '/class/core')
reload(sys)
sys.setdefaultencoding('utf-8')
import mw
import db
import time
class backupTools:
def backupSite(self, name, count):
sql = db.Sql()
path = sql.table('sites').where('name=?', (name,)).getField('path')
startTime = time.time()
if not path:
endDate = time.strftime('%Y/%m/%d %X', time.localtime())
log = u"网站[" + name + "]不存在!"
print(u"★[" + endDate + "] " + log)
print(
"----------------------------------------------------------------------------")
return
backup_path = mw.getRootDir() + '/backup/site'
if not os.path.exists(backup_path):
mw.execShell("mkdir -p " + backup_path)
filename = backup_path + "/web_" + name + "_" + \
time.strftime('%Y%m%d_%H%M%S', time.localtime()) + '.tar.gz'
mw.execShell("cd " + os.path.dirname(path) + " && tar zcvf '" +
filename + "' '" + os.path.basename(path) + "' > /dev/null")
endDate = time.strftime('%Y/%m/%d %X', time.localtime())
print filename
if not os.path.exists(filename):
log = u"网站[" + name + u"]备份失败!"
print(u"★[" + endDate + "] " + log)
print(
u"----------------------------------------------------------------------------")
return
outTime = time.time() - startTime
pid = sql.table('sites').where('name=?', (name,)).getField('id')
sql.table('backup').add('type,name,pid,filename,addtime,size', ('0', os.path.basename(
filename), pid, filename, endDate, os.path.getsize(filename)))
log = u"网站[" + name + u"]备份成功,用时[" + str(round(outTime, 2)) + u"]秒"
mw.writeLog(u'计划任务', log)
print(u"★[" + endDate + "] " + log)
print(u"|---保留最新的[" + count + u"]份备份")
print(u"|---文件名:" + filename)
# 清理多余备份
backups = sql.table('backup').where(
'type=? and pid=?', ('0', pid)).field('id,filename').select()
num = len(backups) - int(count)
if num > 0:
for backup in backups:
mw.execShell("rm -f " + backup['filename'])
sql.table('backup').where('id=?', (backup['id'],)).delete()
num -= 1
print(u"|---已清理过期备份文件:" + backup['filename'])
if num < 1:
break
def backupDatabase(self, name, count):
db_path = mw.getServerDir() + '/mysql'
db_name = 'mysql'
name = mw.M('databases').dbPos(db_path, 'mysql').where(
'name=?', (name,)).getField('name')
startTime = time.time()
if not name:
endDate = time.strftime('%Y/%m/%d %X', time.localtime())
log = u"数据库[" + name + u"]不存在!"
print(u"★[" + endDate + "] " + log)
print(
u"----------------------------------------------------------------------------")
return
backup_path = mw.getRootDir() + '/backup/database'
if not os.path.exists(backup_path):
mw.execShell("mkdir -p " + backup_path)
filename = backup_path + "/db_" + name + "_" + \
time.strftime('%Y%m%d_%H%M%S', time.localtime()) + ".sql.gz"
import re
mysql_root = mw.M('config').dbPos(db_path, db_name).where(
"id=?", (1,)).getField('mysql_root')
mycnf = mw.readFile(db_path + '/etc/my.cnf')
rep = "\[mysqldump\]\nuser=root"
sea = "[mysqldump]\n"
subStr = sea + "user=root\npassword=" + mysql_root + "\n"
mycnf = mycnf.replace(sea, subStr)
if len(mycnf) > 100:
mw.writeFile(db_path + '/etc/my.cnf', mycnf)
mw.execShell(
db_path + "/bin/mysqldump --opt --default-character-set=utf8 " + name + " | gzip > " + filename)
if not os.path.exists(filename):
endDate = time.strftime('%Y/%m/%d %X', time.localtime())
log = u"数据库[" + name + u"]备份失败!"
print(u"★[" + endDate + "] " + log)
print(
u"----------------------------------------------------------------------------")
return
mycnf = mw.readFile(db_path + '/etc/my.cnf')
mycnf = mycnf.replace(subStr, sea)
if len(mycnf) > 100:
mw.writeFile(db_path + '/etc/my.cnf', mycnf)
endDate = time.strftime('%Y/%m/%d %X', time.localtime())
outTime = time.time() - startTime
pid = mw.M('databases').dbPos(db_path, db_name).where(
'name=?', (name,)).getField('id')
mw.M('backup').add('type,name,pid,filename,addtime,size', (1, os.path.basename(
filename), pid, filename, endDate, os.path.getsize(filename)))
log = u"数据库[" + name + u"]备份成功,用时[" + str(round(outTime, 2)) + u"]秒"
mw.writeLog(u'计划任务', log)
print("★[" + endDate + "] " + log)
print(u"|---保留最新的[" + count + u"]份备份")
print(u"|---文件名:" + filename)
# 清理多余备份
backups = mw.M('backup').where(
'type=? and pid=?', ('1', pid)).field('id,filename').select()
num = len(backups) - int(count)
if num > 0:
for backup in backups:
mw.execShell("rm -f " + backup['filename'])
mw.M('backup').where('id=?', (backup['id'],)).delete()
num -= 1
print(u"|---已清理过期备份文件:" + backup['filename'])
if num < 1:
break
def backupSiteAll(self, save):
sites = mw.M('sites').field('name').select()
for site in sites:
self.backupSite(site['name'], save)
def backupDatabaseAll(self, save):
db_path = mw.getServerDir() + '/mysql'
db_name = 'mysql'
databases = mw.M('databases').dbPos(
db_path, db_name).field('name').select()
for database in databases:
self.backupDatabase(database['name'], save)
if __name__ == "__main__":
backup = backupTools()
type = sys.argv[1]
if type == 'site':
if sys.argv[2] == 'ALL':
backup.backupSiteAll(sys.argv[3])
else:
backup.backupSite(sys.argv[2], sys.argv[3])
elif type == 'database':
if sys.argv[2] == 'ALL':
backup.backupDatabaseAll(sys.argv[3])
else:
backup.backupDatabase(sys.argv[2], sys.argv[3])
| 37.054054 | 109 | 0.46229 |
a1f70138934b9e963b4506263549553f67655a19 | 8,157 | py | Python | contrib/devtools/update-translations.py | worldpaycoin/WPAY | 51c2e7fc8c68bf108c44b46909efadfc27f4ce81 | [
"MIT"
] | null | null | null | contrib/devtools/update-translations.py | worldpaycoin/WPAY | 51c2e7fc8c68bf108c44b46909efadfc27f4ce81 | [
"MIT"
] | null | null | null | contrib/devtools/update-translations.py | worldpaycoin/WPAY | 51c2e7fc8c68bf108c44b46909efadfc27f4ce81 | [
"MIT"
] | 1 | 2019-11-08T21:12:01.000Z | 2019-11-08T21:12:01.000Z | #!/usr/bin/env python
# Copyright (c) 2014 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Run this script from the root of the repository to update all translations from
transifex.
It will do the following automatically:
- fetch all translations using the tx tool
- post-process them into valid and committable format
- remove invalid control characters
- remove location tags (makes diffs less noisy)
TODO:
- auto-add new translations to the build system according to the translation process
'''
from __future__ import division, print_function
import subprocess
import re
import sys
import os
import io
import xml.etree.ElementTree as ET
# Name of transifex tool
TX = 'tx'
# Name of source language file
SOURCE_LANG = 'worldpaycoin_en.ts'
# Directory with locale files
LOCALE_DIR = 'src/qt/locale'
# Minimum number of messages for translation to be considered at all
MIN_NUM_MESSAGES = 10
def check_at_repository_root():
if not os.path.exists('.git'):
print('No .git directory found')
print('Execute this script at the root of the repository', file=sys.stderr)
exit(1)
def fetch_all_translations():
if subprocess.call([TX, 'pull', '-f', '-a']):
print('Error while fetching translations', file=sys.stderr)
exit(1)
def find_format_specifiers(s):
'''Find all format specifiers in a string.'''
pos = 0
specifiers = []
while True:
percent = s.find('%', pos)
if percent < 0:
break
try:
specifiers.append(s[percent+1])
except:
print('Failed to get specifier')
pos = percent+2
return specifiers
def split_format_specifiers(specifiers):
'''Split format specifiers between numeric (Qt) and others (strprintf)'''
numeric = []
other = []
for s in specifiers:
if s in {'1','2','3','4','5','6','7','8','9'}:
numeric.append(s)
else:
other.append(s)
# If both numeric format specifiers and "others" are used, assume we're dealing
# with a Qt-formatted message. In the case of Qt formatting (see https://doc.qt.io/qt-5/qstring.html#arg)
# only numeric formats are replaced at all. This means "(percentage: %1%)" is valid, without needing
# any kind of escaping that would be necessary for strprintf. Without this, this function
# would wrongly detect '%)' as a printf format specifier.
if numeric:
other = []
# numeric (Qt) can be present in any order, others (strprintf) must be in specified order
return set(numeric),other
def sanitize_string(s):
'''Sanitize string for printing'''
return s.replace('\n',' ')
def check_format_specifiers(source, translation, errors, numerus):
source_f = split_format_specifiers(find_format_specifiers(source))
# assert that no source messages contain both Qt and strprintf format specifiers
# if this fails, go change the source as this is hacky and confusing!
assert(not(source_f[0] and source_f[1]))
try:
translation_f = split_format_specifiers(find_format_specifiers(translation))
except IndexError:
errors.append("Parse error in translation for '%s': '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
else:
if source_f != translation_f:
if numerus and source_f == (set(), ['n']) and translation_f == (set(), []) and translation.find('%') == -1:
# Allow numerus translations to omit %n specifier (usually when it only has one possible value)
return True
errors.append("Mismatch between '%s' and '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
return True
def all_ts_files(suffix=''):
for filename in os.listdir(LOCALE_DIR):
# process only language files, and do not process source language
if not filename.endswith('.ts'+suffix) or filename == SOURCE_LANG+suffix:
continue
if suffix: # remove provided suffix
filename = filename[0:-len(suffix)]
filepath = os.path.join(LOCALE_DIR, filename)
yield(filename, filepath)
FIX_RE = re.compile(b'[\x00-\x09\x0b\x0c\x0e-\x1f]')
def remove_invalid_characters(s):
'''Remove invalid characters from translation string'''
return FIX_RE.sub(b'', s)
# Override cdata escape function to make our output match Qt's (optional, just for cleaner diffs for
# comparison, disable by default)
_orig_escape_cdata = None
def escape_cdata(text):
text = _orig_escape_cdata(text)
text = text.replace("'", ''')
text = text.replace('"', '"')
return text
def postprocess_translations(reduce_diff_hacks=False):
print('Checking and postprocessing...')
if reduce_diff_hacks:
global _orig_escape_cdata
_orig_escape_cdata = ET._escape_cdata
ET._escape_cdata = escape_cdata
for (filename,filepath) in all_ts_files():
os.rename(filepath, filepath+'.orig')
have_errors = False
for (filename,filepath) in all_ts_files('.orig'):
# pre-fixups to cope with transifex output
parser = ET.XMLParser(encoding='utf-8') # need to override encoding because 'utf8' is not understood only 'utf-8'
with open(filepath + '.orig', 'rb') as f:
data = f.read()
# remove control characters; this must be done over the entire file otherwise the XML parser will fail
data = remove_invalid_characters(data)
tree = ET.parse(io.BytesIO(data), parser=parser)
# iterate over all messages in file
root = tree.getroot()
for context in root.findall('context'):
for message in context.findall('message'):
numerus = message.get('numerus') == 'yes'
source = message.find('source').text
translation_node = message.find('translation')
# pick all numerusforms
if numerus:
translations = [i.text for i in translation_node.findall('numerusform')]
else:
translations = [translation_node.text]
for translation in translations:
if translation is None:
continue
errors = []
valid = check_format_specifiers(source, translation, errors, numerus)
for error in errors:
print('%s: %s' % (filename, error))
if not valid: # set type to unfinished and clear string if invalid
translation_node.clear()
translation_node.set('type', 'unfinished')
have_errors = True
# Remove location tags
for location in message.findall('location'):
message.remove(location)
# Remove entire message if it is an unfinished translation
if translation_node.get('type') == 'unfinished':
context.remove(message)
# check if document is (virtually) empty, and remove it if so
num_messages = 0
for context in root.findall('context'):
for message in context.findall('message'):
num_messages += 1
if num_messages < MIN_NUM_MESSAGES:
print('Removing %s, as it contains only %i messages' % (filepath, num_messages))
continue
# write fixed-up tree
# if diff reduction requested, replace some XML to 'sanitize' to qt formatting
if reduce_diff_hacks:
out = io.BytesIO()
tree.write(out, encoding='utf-8')
out = out.getvalue()
out = out.replace(b' />', b'/>')
with open(filepath, 'wb') as f:
f.write(out)
else:
tree.write(filepath, encoding='utf-8')
return have_errors
if __name__ == '__main__':
check_at_repository_root()
fetch_all_translations()
postprocess_translations()
| 38.658768 | 124 | 0.634179 |
ed51903dcb452b908d79383334bbdbb0b31b1cb8 | 8,154 | py | Python | zhixuewang/models.py | SherkeyXD/zhixuewang-python | 16c59a3c4bb78269ec8b59f91aff2aab3c8f19e9 | [
"MIT"
] | null | null | null | zhixuewang/models.py | SherkeyXD/zhixuewang-python | 16c59a3c4bb78269ec8b59f91aff2aab3c8f19e9 | [
"MIT"
] | null | null | null | zhixuewang/models.py | SherkeyXD/zhixuewang-python | 16c59a3c4bb78269ec8b59f91aff2aab3c8f19e9 | [
"MIT"
] | null | null | null | from enum import Enum
from typing import List, Callable, TypeVar
from dataclasses import dataclass, field
from zhixuewang.tools.datetime_tool import get_property
T = TypeVar("T")
class ExtendedList(List[T]):
"""扩展列表, 方便找到列表里的元素"""
def __init__(self, l: List[T] = None):
super().__init__(l or list())
def foreach(self, f: Callable[[T], None]):
for each in self:
f(each)
def find(self, f: Callable[[T], bool]) -> T:
"""返回列表里满足函数f的第一个元素"""
result = (each for each in self if f(each))
try:
return next(result)
except StopIteration:
return None
def find_all(self, f: Callable[[T], bool]) -> List[T]:
"""返回列表里所有满足函数f的元素"""
result = (each for each in self if f(each))
return ExtendedList(list(result))
def find_by_name(self, name: str) -> T:
"""返回列表里第一个特定名字的元素"""
return self.find(lambda d: d.name == name)
def find_all_by_name(self, name: str) -> List[T]:
"""返回列表里所有特定名字的元素"""
return self.find_all(lambda d: d.name == name)
def find_by_id(self, id: str) -> T:
"""返回列表里第一个特定id的元素"""
return self.find(lambda d: d.id == id)
def find_all_by_id(self, id: str) -> List[T]:
"""返回列表里所有特定id的元素"""
return self.find_all(lambda d: d.id == id)
@dataclass
class Phase:
"""学期, 比如七年级, 八年级"""
name: str = ""
code: str = ""
@dataclass
class Grade:
"""年级"""
name: str = ""
code: str = ""
phase: Phase = field(default_factory=Phase)
@dataclass
class School:
"""学校"""
id: str = ""
name: str = ""
def __str__(self):
return self.name
class Sex(Enum):
"""性别"""
GIRL = "女"
BOY = "男"
def __str__(self):
return self._value_
@dataclass(eq=False)
class StuClass:
"""班级"""
id: str = ""
name: str = ""
grade: Grade = field(default_factory=Grade, repr=False)
school: School = field(default_factory=School, repr=False)
def __eq__(self, other):
return type(other) == type(self) and other.id == self.id
def __str__(self):
return f"学校: {self.school} 班级: {self.name}"
def __repr__(self):
return f"StuClass(id={self.id}, name={self.name}, school={self.school.__repr__()})"
@dataclass(repr=False)
class Person:
"""一些基本属性"""
id: str = ""
name: str = ""
gender: Sex = Sex.GIRL
email: str = ""
mobile: str = ""
qq_number: str = ""
_birthday_timestamp: float = 0
birthday = get_property("_birthday_timestamp")
avatar: str = ""
@dataclass(repr=False)
class StuPerson(Person):
"""一些关于学生的信息"""
code: str = ""
clazz: StuClass = field(default_factory=StuClass, repr=False)
def __str__(self):
return f"{self.clazz} 姓名: {self.name} 性别: {self.gender} " \
f"{f'QQ: {self.qq_number} ' if self.qq_number != '' else ''}" \
f"{f'手机号码: {self.mobile}' if self.mobile != '' else ''}"
def __repr__(self):
return f"Person(id={self.id}, clazz={self.clazz.__repr__()}, name={self.name}, gender={self.gender}" \
f"{f', qq_number={self.qq_number}' if self.qq_number != '' else ''}" \
f"{f', mobile={self.mobile}' if self.mobile != '' else ''}" + ")"
@dataclass(eq=False)
class Exam:
"""考试"""
id: str = ""
name: str = ""
status: str = ""
grade_code: str = ""
subject_codes: ExtendedList[str] = field(default_factory=ExtendedList, repr=False)
schools: ExtendedList[School] = field(default_factory=ExtendedList, repr=False)
create_school: School = field(default_factory=School, repr=False)
create_user: Person = field(default_factory=Person, repr=False)
_create_timestamp: float = field(default=0, repr=False)
create_time = get_property("_create_timestamp")
_exam_timestamp: float = field(default=0, repr=False)
exam_time = get_property("_exam_timestamp")
_complete_timestamp: float = field(default=0, repr=False)
complete_time = get_property("_complete_timestamp")
class_rank: int = field(default=0, repr=False)
grade_rank: int = field(default=0, repr=False)
is_final: bool = False
def __bool__(self):
return bool(self.id)
def __eq__(self, other):
return type(other) == type(self) and other.id == self.id
@dataclass(eq=False)
class Subject:
"""学科"""
id: str = ""
name: str = ""
code: str = ""
standard_score: float = 0
status: str = field(default="", repr=False)
exam: Exam = field(default_factory=Exam, repr=False)
create_user: Person = field(default_factory=Person, repr=False)
_create_timestamp: float = field(default=0, repr=False)
create_time = get_property("_create_timestamp")
def __eq__(self, other):
return type(other) == type(self) and other.id == self.id
@dataclass(eq=False)
class ExamInfo(Exam):
classId: str = ""
subjects: ExtendedList[Subject] = field(default_factory=ExtendedList, repr=False)
@dataclass
class ExtraRank:
"""关于分数的额外信息"""
rank: int = 0
avg_score: float = 0
low_score: float = 0
high_score: float = 0
def __bool__(self):
return bool(self.rank or self.avg_score or self.low_score or self.high_score)
def __str__(self):
msg = ""
if not self:
return msg
if self.rank:
msg += f"排名: {self.rank}\n"
if self.avg_score:
msg += f"平均分: {self.avg_score}\n"
if self.low_score:
msg += f"最低分: {self.low_score}\n"
if self.high_score:
msg += f"最高分: {self.high_score}\n"
return msg[:-1]
@dataclass
class SubjectScore:
"""一门学科的成绩"""
score: float = 0
subject: Subject = field(default_factory=Subject)
person: StuPerson = field(default_factory=StuPerson)
_create_timestamp: float = field(default=0, repr=False)
create_time = get_property("_create_timestamp")
class_extraRank: ExtraRank = field(default_factory=ExtraRank, compare=False)
grade_extraRank: ExtraRank = field(default_factory=ExtraRank, compare=False)
def __str__(self):
msg = f"{self.subject.name}:{self.score}\n"
if self.class_extraRank:
msg += f"班级{self.class_extraRank}\n"
if self.grade_extraRank:
msg += f"年级{self.grade_extraRank}\n"
return msg[:-1]
class Mark(ExtendedList[SubjectScore]):
"""一场考试的成绩"""
def __init__(self, l: list = None, exam: Exam = None, person: StuPerson = None):
super().__init__(l)
self.exam = exam
self.person = person
def __repr__(self):
if self.exam and self.person:
msg = f"{self.person.name}-{self.exam.name}\n" + \
"".join([f"{subject}\n" for subject in self])
return msg[:-1]
def __str__(self):
return self.__repr__()
class StuPersonList(ExtendedList):
"""学生列表"""
def find_by_code(self, code: str) -> StuPerson:
"""返回第一个准考证号为code的学生"""
return self.find(lambda p: p.code == code)
def find_by_clazz_id(self, clazz_id: str) -> StuPerson:
"""返回第一个班级id为clazz_id的学生"""
return self.find(lambda p: p.clazz.id == clazz_id)
def find_all_by_clazz_id(self, clazz_id: str) -> ExtendedList[StuPerson]:
"""返回所有班级id为clazz_id的学生"""
return self.find_all(lambda p: p.clazz.id == clazz_id)
def find_by_clazz(self, clazz: StuClass) -> StuPerson:
"""返回第一个班级为clazz的学生"""
return self.find(lambda p: p.clazz == clazz)
def find_all_by_clazz(self, clazz: StuClass) -> ExtendedList[StuPerson]:
"""返回所有班级为clazz的学生"""
return self.find_all(lambda p: p.clazz == clazz)
def find_by_school_id(self, school_id: str) -> StuPerson:
"""返回第一个学校id为school_id的学生"""
return self.find(lambda p: p.school.id == school_id)
def find_by_school(self, school: School) -> StuPerson:
"""返回第一个学校为school的学生"""
return self.find(lambda p: p.school == school)
class SubjectTable(Enum):
chinese = ""
math = ""
english = ""
physics = "01"
chemistry = ""
history = ""
| 28.020619 | 110 | 0.609272 |
7a346e77d80c3dc40a6efd7e4deb5da0052d5938 | 383 | py | Python | hotels/migrations/0003_auto_20190721_1504.py | akramakh/Django-Hotel-Management-System | 75ca308a9fe8d09b89f772acac0400e798a85ee1 | [
"MIT"
] | null | null | null | hotels/migrations/0003_auto_20190721_1504.py | akramakh/Django-Hotel-Management-System | 75ca308a9fe8d09b89f772acac0400e798a85ee1 | [
"MIT"
] | 7 | 2019-12-04T23:47:59.000Z | 2021-06-09T18:07:05.000Z | hotels/migrations/0003_auto_20190721_1504.py | akramakh/Django-Hotel-Management-System | 75ca308a9fe8d09b89f772acac0400e798a85ee1 | [
"MIT"
] | null | null | null | # Generated by Django 2.2.3 on 2019-07-21 12:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hotels', '0002_auto_20190721_1340'),
]
operations = [
migrations.AlterField(
model_name='hotel',
name='name',
field=models.CharField(max_length=255),
),
]
| 20.157895 | 51 | 0.5953 |
14336a3819b59ebf3f2e33df0d6c8d7b77a301d5 | 13,855 | py | Python | monitoring/collectd-extensions/src/mtce_notifier.py | marcelarosalesj/x.stx-integ | 4ac95cc0d0cf64ea09ffe3a3b130b73175f38ee8 | [
"Apache-2.0"
] | null | null | null | monitoring/collectd-extensions/src/mtce_notifier.py | marcelarosalesj/x.stx-integ | 4ac95cc0d0cf64ea09ffe3a3b130b73175f38ee8 | [
"Apache-2.0"
] | null | null | null | monitoring/collectd-extensions/src/mtce_notifier.py | marcelarosalesj/x.stx-integ | 4ac95cc0d0cf64ea09ffe3a3b130b73175f38ee8 | [
"Apache-2.0"
] | null | null | null | #
# Copyright (c) 2018-2019 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
#############################################################################
#
# This file is the collectd 'Maintenance' Notifier.
#
# Collects provides information about each event as an object passed to the
# notification handler ; the notification object.
#
# object.host - the hostname
#
# object.plugin - the name of the plugin aka resource
# object.plugin_instance - plugin instance string i.e. say mountpoint
# for df plugin
# object.type, - the unit i.e. percent or absolute
# object.type_instance - the attribute i.e. free, used, etc
#
# object.severity - a integer value 0=OK , 1=warning, 2=failure
# object.message - a log-able message containing the above along
# with the value
#
# This notifier manages requesting mtce to assert or clear its collectd
# host-degrade-cause flag based on notification messages sent from collectd.
#
# Messages to maintenance are throttled ONE_EVERY while this state is the
# same as last state.
#
# Message is sent on every state change
# from clear to assert or
# from assert to clear
#
# See code comments for details.
#
############################################################################
#
# Import list
import os
import socket
import collectd
import tsconfig.tsconfig as tsc
# This plugin name
PLUGIN = 'degrade notifier'
# collectd severity definitions ;
# Note: can't seem to pull then in symbolically with a header
NOTIF_FAILURE = 1
NOTIF_WARNING = 2
NOTIF_OKAY = 4
# default mtce port.
# ... with configuration override
MTCE_CMD_RX_PORT = 2101
# same state message throttle count.
# ... only send the degrade message every 'this' number
# while the state of assert or clear remains the same.
ONE_EVERY = 10
PLUGIN__DF = 'df'
PLUGIN__MEM = 'memory'
PLUGIN__CPU = 'cpu'
PLUGIN__VSWITCH_MEM = 'vswitch_mem'
PLUGIN__VSWITCH_CPU = 'vswitch_cpu'
PLUGIN__VSWITCH_PORT = "vswitch_port"
PLUGIN__VSWITCH_IFACE = "vswitch_iface"
PLUGIN_INTERFACE = 'interface'
PLUGIN__EXAMPLE = 'example'
# The collectd Maintenance Notifier Object
class collectdMtceNotifierObject:
def __init__(self, port):
"""collectdMtceNotifierObject Class constructor"""
# default maintenance port
self.port = port
self.addr = None
# specifies the protocol family to use when messaging maintenance.
# if system is IPV6, then that is learned and this 'protocol' is
# updated with AF_INET6
self.protocol = socket.AF_INET
# List of plugin names that require degrade for specified severity.
self.degrade_list__failure = [PLUGIN__DF,
PLUGIN__MEM,
PLUGIN__CPU,
PLUGIN__VSWITCH_MEM,
PLUGIN__VSWITCH_CPU,
PLUGIN__VSWITCH_PORT,
PLUGIN__VSWITCH_IFACE,
PLUGIN_INTERFACE,
PLUGIN__EXAMPLE]
self.degrade_list__warning = [PLUGIN_INTERFACE]
# the running list of resources that require degrade.
# a degrade clear message is sent whenever this list is empty.
# a degrade assert message is sent whenever this list is not empty.
self.degrade_list = []
# throttle down sending of duplicate degrade assert/clear messages
self.last_state = "undef"
self.msg_throttle = 0
# Instantiate the mtce_notifier object
# This object persists from notificaiton to notification
obj = collectdMtceNotifierObject(MTCE_CMD_RX_PORT)
def _get_active_controller_ip():
"""Get the active controller host IP"""
try:
obj.addr = socket.getaddrinfo('controller', None)[0][4][0]
collectd.info("%s controller ip: %s" % (PLUGIN, obj.addr))
except Exception as ex:
obj.addr = None
collectd.error("%s failed to get controller ip ; %s" %
(PLUGIN, str(ex)))
return 0
def _df_instance_to_path(df_inst):
"""Convert a df instance name to a mountpoint"""
# df_root is not a dynamic file system. Ignore that one.
if df_inst == 'df_root':
return '/'
else:
# For all others replace all '-' with '/'
return('/' + df_inst[3:].replace('-', '/'))
# This function removes degraded file systems that are no longer present.
def _clear_degrade_for_missing_filesystems():
"""Remove degraded file systems that are no longer mounted or present"""
for df_inst in obj.degrade_list:
# Only file system plugins are looked at.
# File system plugin instance names are prefixed with 'df_'
# as the first 3 chars in the instance name.
if df_inst[0:3] == 'df_':
path = _df_instance_to_path(df_inst)
# check the mount point.
# if the mount point no longer exists then remove
# this instance from the degrade list.
if os.path.ismount(path) is False:
collectd.info("%s clearing degrade for missing %s ; %s" %
(PLUGIN, path, obj.degrade_list))
obj.degrade_list.remove(df_inst)
return 0
# The collectd configuration interface
#
# Used to configure the maintenance port.
# key = 'port'
# val = port number
#
def config_func(config):
"""Configure the maintenance degrade notifier plugin"""
collectd.debug('%s config function' % PLUGIN)
for node in config.children:
key = node.key.lower()
val = node.values[0]
if key == 'port':
obj.port = int(val)
collectd.info("%s configured mtce port: %d" %
(PLUGIN, obj.port))
return 0
obj.port = MTCE_CMD_RX_PORT
collectd.error("%s no mtce port provided ; defaulting to %d" %
(PLUGIN, obj.port))
# Collectd calls this function on startup.
def init_func():
"""Collectd Mtce Notifier Initialization Function"""
obj.host = os.uname()[1]
collectd.info("%s %s:%s sending to mtce port %d" %
(PLUGIN, tsc.nodetype, obj.host, obj.port))
collectd.debug("%s init function" % PLUGIN)
# This is the Notifier function that is called by collectd.
#
# Handling steps are
#
# 1. build resource name from notification object.
# 2. check resource against severity lists.
# 3. manage this instance's degrade state.
# 4. send mtcAgent the degrade state message.
#
def notifier_func(nObject):
"""Collectd Mtce Notifier Handler Function"""
# Create the resource name from the notifier object.
# format: <plugin name>_<plugin_instance_name>
resource = nObject.plugin
if nObject.plugin_instance:
resource += "_" + nObject.plugin_instance
# This block looks at the current notification severity
# and manages the degrade_list.
# If the specified plugin name exists in each of the warnings
# or failure lists and there is a current severity match then
# add that resource instance to the degrade list.
# Conversly if this notification is OKAY then make sure this
# resource instance is not in the degrade list (remove it if it is)
if nObject.severity is NOTIF_OKAY:
if obj.degrade_list and resource in obj.degrade_list:
obj.degrade_list.remove(resource)
elif nObject.severity is NOTIF_FAILURE:
if obj.degrade_list__failure:
if nObject.plugin in obj.degrade_list__failure:
if resource not in obj.degrade_list:
# handle dynamic filesystems going missing over a swact
# or unmount and being reported as a transient error by
# the df plugin. Don't add it to the failed list if the
# mountpoint is gone.
add = True
if nObject.plugin == PLUGIN__DF:
path = _df_instance_to_path(resource)
add = os.path.ismount(path)
if add is True:
collectd.info("%s %s added to degrade list" %
(PLUGIN, resource))
obj.degrade_list.append(resource)
else:
# If severity is failure and no failures cause degrade
# then make sure this plugin is not in the degrade list,
# Should never occur.
if resource in obj.degrade_list:
obj.degrade_list.remove(resource)
elif nObject.severity is NOTIF_WARNING:
if obj.degrade_list__warning:
if nObject.plugin in obj.degrade_list__warning:
if resource not in obj.degrade_list:
# handle dynamic filesystems going missing over a swact
# or unmount and being reported as a transient error by
# the df plugin. Don't add it to the failed list if the
# mountpoint is gone.
add = True
if nObject.plugin == PLUGIN__DF:
path = _df_instance_to_path(resource)
add = os.path.ismount(path)
if add is True:
collectd.info("%s %s added to degrade list" %
(PLUGIN, resource))
obj.degrade_list.append(resource)
else:
# If severity is warning and no warnings cause degrade
# then make sure this plugin is not in the degrade list.
# Should never occur..
if resource in obj.degrade_list:
obj.degrade_list.remove(resource)
else:
collectd.info("%s unsupported severity %d" %
(PLUGIN, nObject.severity))
return 0
# running counter of notifications.
obj.msg_throttle += 1
# Support for Dynamic File Systems
# --------------------------------
# Some active controller mounted filesystems can become
# unmounted under the watch of collectd. This can occur
# as a result of a Swact. If an 'degrade' is raised at the
# time an fs disappears then that state can become stuck
# active until the next Swact. This call handles this case.
#
# Audit file system presence every time we get the
# notification for the root file system.
# Depending on the root filesystem always being there.
if nObject.plugin == 'df' \
and nObject.plugin_instance == 'root' \
and len(obj.degrade_list):
_clear_degrade_for_missing_filesystems()
# If degrade list is empty then a clear state is sent to maintenance.
# If degrade list is NOT empty then an assert state is sent to maintenance
# For logging and to ease debug the code below will create a list of
# degraded resource instances to be included in the message to maintenance
# for mtcAgent to optionally log it.
resources = ""
if obj.degrade_list:
# loop over the list,
# limit the degraded resource list being sent to mtce to 5
for r in obj.degrade_list[0:1:5]:
resources += r + ','
resources = resources[:-1]
state = "assert"
else:
state = "clear"
# Message throttling ....
# Avoid sending the same last state message for up to ONE_EVERY count.
# Just reduce load on mtcAgent
if obj.last_state == state and obj.msg_throttle < ONE_EVERY:
return 0
# if the degrade state has changed then log it and proceed
if obj.last_state != state:
if obj.last_state != "undef":
collectd.info("%s degrade %s %s" %
(PLUGIN,
state,
obj.degrade_list))
# Save state for next time
obj.last_state = state
# Clear the message throttle counter
obj.msg_throttle = 0
# Send the degrade state ; assert or clear message to mtcAgent.
# If we get a send failure then log it and set the addr to None
# so it forces us to refresh the controller address on the next
# notification
try:
mtce_socket = socket.socket(obj.protocol, socket.SOCK_DGRAM)
if mtce_socket:
if obj.addr is None:
_get_active_controller_ip()
if obj.addr is None:
return 0
# Create the Maintenance message.
message = "{\"service\":\"collectd_notifier\","
message += "\"hostname\":\"" + nObject.host + "\","
message += "\"degrade\":\"" + state + "\","
message += "\"resource\":\"" + resources + "\"}"
collectd.debug("%s: %s" % (PLUGIN, message))
mtce_socket.settimeout(1.0)
mtce_socket.sendto(message, (obj.addr, obj.port))
mtce_socket.close()
else:
collectd.error("%s %s failed to open socket (%s)" %
(PLUGIN, resource, obj.addr))
except socket.error as e:
if e.args[0] == socket.EAI_ADDRFAMILY:
# Handle IPV4 to IPV6 switchover:
obj.protocol = socket.AF_INET6
collectd.info("%s %s ipv6 addressing (%s)" %
(PLUGIN, resource, obj.addr))
else:
collectd.error("%s %s socket error (%s) ; %s" %
(PLUGIN, resource, obj.addr, str(e)))
# try self correction
obj.addr = None
obj.protocol = socket.AF_INET
return 0
collectd.register_config(config_func)
collectd.register_init(init_func)
collectd.register_notification(notifier_func)
| 36.364829 | 78 | 0.600072 |
2a6e70b8a7ade63410b5b8c8a20e433d0fdfa795 | 1,219 | py | Python | disk/tests/mocks.py | glasser/integrations-core | 1dd515d49b1690a1369ee5195713605b1b072b1f | [
"BSD-3-Clause"
] | null | null | null | disk/tests/mocks.py | glasser/integrations-core | 1dd515d49b1690a1369ee5195713605b1b072b1f | [
"BSD-3-Clause"
] | null | null | null | disk/tests/mocks.py | glasser/integrations-core | 1dd515d49b1690a1369ee5195713605b1b072b1f | [
"BSD-3-Clause"
] | 1 | 2019-12-23T13:35:17.000Z | 2019-12-23T13:35:17.000Z | # (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import os
from .common import DEFAULT_DEVICE_NAME, DEFAULT_FILE_SYSTEM, DEFAULT_MOUNT_POINT, HERE
def mock_blkid_output():
"""
Load fixtures from tests/fixtures/ folder and return a tuple matching the
return value of `get_subprocess_output`
"""
with open(os.path.join(HERE, 'fixtures', 'blkid')) as f:
return f.read(), '', ''
class MockPart(object):
def __init__(
self, device=DEFAULT_DEVICE_NAME, fstype=DEFAULT_FILE_SYSTEM, mountpoint=DEFAULT_MOUNT_POINT, opts='ro'
):
self.device = device
self.fstype = fstype
self.mountpoint = mountpoint
self.opts = opts
class MockDiskMetrics(object):
total = 5 * 1024
used = 4 * 1024
free = 1 * 1024
percent = 80
read_time = 50
write_time = 90
class MockDiskIOMetrics(dict):
def __init__(self, device=DEFAULT_DEVICE_NAME):
super(MockDiskIOMetrics, self).__init__()
self[device] = MockDiskMetrics()
class MockInodesMetrics(object):
f_files = 10
f_ffree = 9
class MockIoCountersMetrics(object):
read_time = 15
write_time = 25
| 23.901961 | 111 | 0.676784 |
c73ca9a1e32c2132fc583dd9970c8cbeb14a65c6 | 1,878 | py | Python | watson/dev/middleware.py | watsonpy/watson-dev | 3021c736da8a70e259f9a42fde98f58eb394996b | [
"BSD-3-Clause"
] | null | null | null | watson/dev/middleware.py | watsonpy/watson-dev | 3021c736da8a70e259f9a42fde98f58eb394996b | [
"BSD-3-Clause"
] | null | null | null | watson/dev/middleware.py | watsonpy/watson-dev | 3021c736da8a70e259f9a42fde98f58eb394996b | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
import __main__
import os
import mimetypes
import stat
class StaticFileMiddleware(object):
"""
A WSGI compatibile Middleware class that allows content to be retrieved
from the directory that the __main__ is called from.
Example:
.. code-block:: python
def app(environ, start_response):
start_response('200 OK', [('Content-Type', 'text/plain')])
return [b'Hello World!']
my_app = StaticFileMiddleware(app)
"""
app = None
initial_dir = None
script_path = None
def __init__(self, app, initial_dir=None):
self.script_path = os.path.abspath(__main__.__file__)
self.initial_dir = initial_dir or os.getcwd()
self.app = app
def __call__(self, environ, start_response):
path = os.path.join(self.initial_dir, environ['PATH_INFO'][1:])
run_app = True
actual_path = os.path.join(path)
exists = os.path.exists(actual_path)
if exists and not (exists and stat.S_ISDIR(os.stat(actual_path).st_mode)):
run_app = False
if run_app:
return self.app(environ, start_response)
else:
file_stat = os.stat(actual_path)
return self.serve_static(path,
file_stat,
environ,
start_response)
def serve_static(self, path, file_stat, environ, start_response):
if stat.S_ISREG(file_stat.st_mode):
mime = mimetypes.guess_type(path)[0]
start_response('200 OK', [
('Content-Type', '{0}; charset=utf-8'.format(mime)),
('Content-Length', str(os.path.getsize(path)))
])
with open(path, 'rb') as file:
contents = file.read()
return [contents]
| 31.830508 | 82 | 0.573482 |
8bda5d2bae72dd9b2fa5408b2d97738e25b9f0ae | 12,836 | py | Python | utils/utils_bbox.py | hito0512/yolov4-tf2 | 97d23a70d0ec3f6ce1d49f85398a584a466a0713 | [
"MIT"
] | 298 | 2020-06-05T14:31:17.000Z | 2022-03-28T02:59:40.000Z | utils/utils_bbox.py | wkc2014/yolov4-tf2 | 97d23a70d0ec3f6ce1d49f85398a584a466a0713 | [
"MIT"
] | 36 | 2020-06-15T11:41:17.000Z | 2022-03-31T15:21:50.000Z | utils/utils_bbox.py | wkc2014/yolov4-tf2 | 97d23a70d0ec3f6ce1d49f85398a584a466a0713 | [
"MIT"
] | 107 | 2020-06-07T10:40:44.000Z | 2022-03-30T11:32:48.000Z | import tensorflow as tf
from tensorflow.keras import backend as K
#---------------------------------------------------#
# 对box进行调整,使其符合真实图片的样子
#---------------------------------------------------#
def yolo_correct_boxes(box_xy, box_wh, input_shape, image_shape, letterbox_image):
#-----------------------------------------------------------------#
# 把y轴放前面是因为方便预测框和图像的宽高进行相乘
#-----------------------------------------------------------------#
box_yx = box_xy[..., ::-1]
box_hw = box_wh[..., ::-1]
input_shape = K.cast(input_shape, K.dtype(box_yx))
image_shape = K.cast(image_shape, K.dtype(box_yx))
if letterbox_image:
#-----------------------------------------------------------------#
# 这里求出来的offset是图像有效区域相对于图像左上角的偏移情况
# new_shape指的是宽高缩放情况
#-----------------------------------------------------------------#
new_shape = K.round(image_shape * K.min(input_shape/image_shape))
offset = (input_shape - new_shape)/2./input_shape
scale = input_shape/new_shape
box_yx = (box_yx - offset) * scale
box_hw *= scale
box_mins = box_yx - (box_hw / 2.)
box_maxes = box_yx + (box_hw / 2.)
boxes = K.concatenate([box_mins[..., 0:1], box_mins[..., 1:2], box_maxes[..., 0:1], box_maxes[..., 1:2]])
boxes *= K.concatenate([image_shape, image_shape])
return boxes
#---------------------------------------------------#
# 将预测值的每个特征层调成真实值
#---------------------------------------------------#
def get_anchors_and_decode(feats, anchors, num_classes, input_shape, calc_loss=False):
num_anchors = len(anchors)
#------------------------------------------#
# grid_shape指的是特征层的高和宽
#------------------------------------------#
grid_shape = K.shape(feats)[1:3]
#--------------------------------------------------------------------#
# 获得各个特征点的坐标信息。生成的shape为(13, 13, num_anchors, 2)
#--------------------------------------------------------------------#
grid_x = K.tile(K.reshape(K.arange(0, stop=grid_shape[1]), [1, -1, 1, 1]), [grid_shape[0], 1, num_anchors, 1])
grid_y = K.tile(K.reshape(K.arange(0, stop=grid_shape[0]), [-1, 1, 1, 1]), [1, grid_shape[1], num_anchors, 1])
grid = K.cast(K.concatenate([grid_x, grid_y]), K.dtype(feats))
#---------------------------------------------------------------#
# 将先验框进行拓展,生成的shape为(13, 13, num_anchors, 2)
#---------------------------------------------------------------#
anchors_tensor = K.reshape(K.constant(anchors), [1, 1, num_anchors, 2])
anchors_tensor = K.tile(anchors_tensor, [grid_shape[0], grid_shape[1], 1, 1])
#---------------------------------------------------#
# 将预测结果调整成(batch_size,13,13,3,85)
# 85可拆分成4 + 1 + 80
# 4代表的是中心宽高的调整参数
# 1代表的是框的置信度
# 80代表的是种类的置信度
#---------------------------------------------------#
feats = K.reshape(feats, [-1, grid_shape[0], grid_shape[1], num_anchors, num_classes + 5])
#------------------------------------------#
# 对先验框进行解码,并进行归一化
#------------------------------------------#
box_xy = (K.sigmoid(feats[..., :2]) + grid) / K.cast(grid_shape[::-1], K.dtype(feats))
box_wh = K.exp(feats[..., 2:4]) * anchors_tensor / K.cast(input_shape[::-1], K.dtype(feats))
#------------------------------------------#
# 获得预测框的置信度
#------------------------------------------#
box_confidence = K.sigmoid(feats[..., 4:5])
box_class_probs = K.sigmoid(feats[..., 5:])
#---------------------------------------------------------------------#
# 在计算loss的时候返回grid, feats, box_xy, box_wh
# 在预测的时候返回box_xy, box_wh, box_confidence, box_class_probs
#---------------------------------------------------------------------#
if calc_loss == True:
return grid, feats, box_xy, box_wh
return box_xy, box_wh, box_confidence, box_class_probs
#---------------------------------------------------#
# 图片预测
#---------------------------------------------------#
def DecodeBox(outputs,
anchors,
num_classes,
input_shape,
#-----------------------------------------------------------#
# 13x13的特征层对应的anchor是[116,90],[156,198],[373,326]
# 26x26的特征层对应的anchor是[30,61],[62,45],[59,119]
# 52x52的特征层对应的anchor是[10,13],[16,30],[33,23]
#-----------------------------------------------------------#
anchor_mask = [[6, 7, 8], [3, 4, 5], [0, 1, 2]],
max_boxes = 100,
confidence = 0.5,
nms_iou = 0.3,
letterbox_image = True):
image_shape = K.reshape(outputs[-1],[-1])
box_xy = []
box_wh = []
box_confidence = []
box_class_probs = []
for i in range(len(anchor_mask)):
sub_box_xy, sub_box_wh, sub_box_confidence, sub_box_class_probs = \
get_anchors_and_decode(outputs[i], anchors[anchor_mask[i]], num_classes, input_shape)
box_xy.append(K.reshape(sub_box_xy, [-1, 2]))
box_wh.append(K.reshape(sub_box_wh, [-1, 2]))
box_confidence.append(K.reshape(sub_box_confidence, [-1, 1]))
box_class_probs.append(K.reshape(sub_box_class_probs, [-1, num_classes]))
box_xy = K.concatenate(box_xy, axis = 0)
box_wh = K.concatenate(box_wh, axis = 0)
box_confidence = K.concatenate(box_confidence, axis = 0)
box_class_probs = K.concatenate(box_class_probs, axis = 0)
#------------------------------------------------------------------------------------------------------------#
# 在图像传入网络预测前会进行letterbox_image给图像周围添加灰条,因此生成的box_xy, box_wh是相对于有灰条的图像的
# 我们需要对其进行修改,去除灰条的部分。 将box_xy、和box_wh调节成y_min,y_max,xmin,xmax
# 如果没有使用letterbox_image也需要将归一化后的box_xy, box_wh调整成相对于原图大小的
#------------------------------------------------------------------------------------------------------------#
boxes = yolo_correct_boxes(box_xy, box_wh, input_shape, image_shape, letterbox_image)
box_scores = box_confidence * box_class_probs
#-----------------------------------------------------------#
# 判断得分是否大于score_threshold
#-----------------------------------------------------------#
mask = box_scores >= confidence
max_boxes_tensor = K.constant(max_boxes, dtype='int32')
boxes_out = []
scores_out = []
classes_out = []
for c in range(num_classes):
#-----------------------------------------------------------#
# 取出所有box_scores >= score_threshold的框,和成绩
#-----------------------------------------------------------#
class_boxes = tf.boolean_mask(boxes, mask[:, c])
class_box_scores = tf.boolean_mask(box_scores[:, c], mask[:, c])
#-----------------------------------------------------------#
# 非极大抑制
# 保留一定区域内得分最大的框
#-----------------------------------------------------------#
nms_index = tf.image.non_max_suppression(class_boxes, class_box_scores, max_boxes_tensor, iou_threshold=nms_iou)
#-----------------------------------------------------------#
# 获取非极大抑制后的结果
# 下列三个分别是:框的位置,得分与种类
#-----------------------------------------------------------#
class_boxes = K.gather(class_boxes, nms_index)
class_box_scores = K.gather(class_box_scores, nms_index)
classes = K.ones_like(class_box_scores, 'int32') * c
boxes_out.append(class_boxes)
scores_out.append(class_box_scores)
classes_out.append(classes)
boxes_out = K.concatenate(boxes_out, axis=0)
scores_out = K.concatenate(scores_out, axis=0)
classes_out = K.concatenate(classes_out, axis=0)
return boxes_out, scores_out, classes_out
if __name__ == "__main__":
import matplotlib.pyplot as plt
import numpy as np
def sigmoid(x):
s = 1 / (1 + np.exp(-x))
return s
#---------------------------------------------------#
# 将预测值的每个特征层调成真实值
#---------------------------------------------------#
def get_anchors_and_decode(feats, anchors, num_classes):
# feats [batch_size, 13, 13, 3 * (5 + num_classes)]
# anchors [3, 2]
# num_classes
# 3
num_anchors = len(anchors)
#------------------------------------------#
# grid_shape指的是特征层的高和宽
# grid_shape [13, 13]
#------------------------------------------#
grid_shape = np.shape(feats)[1:3]
#--------------------------------------------------------------------#
# 获得各个特征点的坐标信息。生成的shape为(13, 13, num_anchors, 2)
# grid_x [13, 13, 3, 1]
# grid_y [13, 13, 3, 1]
# grid [13, 13, 3, 2]
#--------------------------------------------------------------------#
grid_x = np.tile(np.reshape(np.arange(0, stop=grid_shape[1]), [1, -1, 1, 1]), [grid_shape[0], 1, num_anchors, 1])
grid_y = np.tile(np.reshape(np.arange(0, stop=grid_shape[0]), [-1, 1, 1, 1]), [1, grid_shape[1], num_anchors, 1])
grid = np.concatenate([grid_x, grid_y], -1)
#---------------------------------------------------------------#
# 将先验框进行拓展,生成的shape为(13, 13, num_anchors, 2)
# [1, 1, 3, 2]
# [13, 13, 3, 2]
#---------------------------------------------------------------#
anchors_tensor = np.reshape(anchors, [1, 1, num_anchors, 2])
anchors_tensor = np.tile(anchors_tensor, [grid_shape[0], grid_shape[1], 1, 1])
#---------------------------------------------------#
# 将预测结果调整成(batch_size,13,13,3,85)
# 85可拆分成4 + 1 + 80
# 4代表的是中心宽高的调整参数
# 1代表的是框的置信度
# 80代表的是种类的置信度
# [batch_size, 13, 13, 3 * (5 + num_classes)]
# [batch_size, 13, 13, 3, 5 + num_classes]
#---------------------------------------------------#
feats = np.reshape(feats, [-1, grid_shape[0], grid_shape[1], num_anchors, num_classes + 5])
#------------------------------------------#
# 对先验框进行解码,并进行归一化
#------------------------------------------#
box_xy = sigmoid(feats[..., :2]) + grid
box_wh = np.exp(feats[..., 2:4]) * anchors_tensor
#------------------------------------------#
# 获得预测框的置信度
#------------------------------------------#
box_confidence = sigmoid(feats[..., 4:5])
box_class_probs = sigmoid(feats[..., 5:])
box_wh = box_wh / 32
anchors_tensor = anchors_tensor / 32
fig = plt.figure()
ax = fig.add_subplot(121)
plt.ylim(-2,15)
plt.xlim(-2,15)
plt.scatter(grid_x,grid_y)
plt.scatter(5,5,c='black')
plt.gca().invert_yaxis()
anchor_left = grid_x - anchors_tensor/2
anchor_top = grid_y - anchors_tensor/2
print(np.shape(anchors_tensor))
print(np.shape(box_xy))
rect1 = plt.Rectangle([anchor_left[5,5,0,0],anchor_top[5,5,0,1]],anchors_tensor[0,0,0,0],anchors_tensor[0,0,0,1],color="r",fill=False)
rect2 = plt.Rectangle([anchor_left[5,5,1,0],anchor_top[5,5,1,1]],anchors_tensor[0,0,1,0],anchors_tensor[0,0,1,1],color="r",fill=False)
rect3 = plt.Rectangle([anchor_left[5,5,2,0],anchor_top[5,5,2,1]],anchors_tensor[0,0,2,0],anchors_tensor[0,0,2,1],color="r",fill=False)
ax.add_patch(rect1)
ax.add_patch(rect2)
ax.add_patch(rect3)
ax = fig.add_subplot(122)
plt.ylim(-2,15)
plt.xlim(-2,15)
plt.scatter(grid_x,grid_y)
plt.scatter(5,5,c='black')
plt.scatter(box_xy[0,5,5,:,0],box_xy[0,5,5,:,1],c='r')
plt.gca().invert_yaxis()
pre_left = box_xy[...,0] - box_wh[...,0]/2
pre_top = box_xy[...,1] - box_wh[...,1]/2
rect1 = plt.Rectangle([pre_left[0,5,5,0],pre_top[0,5,5,0]],box_wh[0,5,5,0,0],box_wh[0,5,5,0,1],color="r",fill=False)
rect2 = plt.Rectangle([pre_left[0,5,5,1],pre_top[0,5,5,1]],box_wh[0,5,5,1,0],box_wh[0,5,5,1,1],color="r",fill=False)
rect3 = plt.Rectangle([pre_left[0,5,5,2],pre_top[0,5,5,2]],box_wh[0,5,5,2,0],box_wh[0,5,5,2,1],color="r",fill=False)
ax.add_patch(rect1)
ax.add_patch(rect2)
ax.add_patch(rect3)
plt.show()
#
feat = np.random.normal(0,0.5,[4,13,13,75])
anchors = [[142, 110],[192, 243],[459, 401]]
get_anchors_and_decode(feat,anchors,20) | 47.191176 | 143 | 0.436273 |
304fe78c5e0d5c492f0ce4fc29adbf67aeedb17f | 3,884 | py | Python | tests/test_flaskparser.py | Reskov/webargs | 1ca80a5da13f3d600808fbcca83e9769821865ac | [
"MIT"
] | null | null | null | tests/test_flaskparser.py | Reskov/webargs | 1ca80a5da13f3d600808fbcca83e9769821865ac | [
"MIT"
] | null | null | null | tests/test_flaskparser.py | Reskov/webargs | 1ca80a5da13f3d600808fbcca83e9769821865ac | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
import mock
from werkzeug.exceptions import HTTPException
import pytest
from flask import Flask
from webargs import fields, ValidationError, missing
from webargs.flaskparser import parser, abort
from webargs.core import MARSHMALLOW_VERSION_INFO
from .apps.flask_app import app
from .common import CommonTestCase
class TestFlaskParser(CommonTestCase):
def create_app(self):
return app
def test_parsing_view_args(self, testapp):
res = testapp.get('/echo_view_arg/42')
assert res.json == {'view_arg': 42}
def test_parsing_invalid_view_arg(self, testapp):
res = testapp.get('/echo_view_arg/foo', expect_errors=True)
assert res.status_code == 422
assert res.json == {'errors': {'view_arg': ['Not a valid integer.']}}
def test_use_args_with_view_args_parsing(self, testapp):
res = testapp.get('/echo_view_arg_use_args/42')
assert res.json == {'view_arg': 42}
def test_use_args_on_a_method_view(self, testapp):
res = testapp.post('/echo_method_view_use_args', {'val': 42})
assert res.json == {'val': 42}
def test_use_kwargs_on_a_method_view(self, testapp):
res = testapp.post('/echo_method_view_use_kwargs', {'val': 42})
assert res.json == {'val': 42}
def test_use_kwargs_with_missing_data(self, testapp):
res = testapp.post('/echo_use_kwargs_missing', {'username': 'foo'})
assert res.json == {'username': 'foo'}
# regression test for https://github.com/sloria/webargs/issues/145
def test_nested_many_with_data_key(self, testapp):
res = testapp.post_json('/echo_nested_many_data_key', {'x_field': [{'id': 42}]})
# https://github.com/marshmallow-code/marshmallow/pull/714
if MARSHMALLOW_VERSION_INFO[0] < 3:
assert res.json == {'x_field': [{'id': 42}]}
res = testapp.post_json('/echo_nested_many_data_key', {'X-Field': [{'id': 24}]})
assert res.json == {'x_field': [{'id': 24}]}
res = testapp.post_json('/echo_nested_many_data_key', {})
assert res.json == {}
@mock.patch('webargs.flaskparser.abort')
def test_abort_called_on_validation_error(mock_abort):
app = Flask('testapp')
def validate(x):
return x == 42
argmap = {'value': fields.Field(validate=validate)}
with app.test_request_context('/foo', method='post',
data=json.dumps({'value': 41}), content_type='application/json'):
parser.parse(argmap)
mock_abort.assert_called
abort_args, abort_kwargs = mock_abort.call_args
assert abort_args[0] == 422
expected_msg = u'Invalid value.'
assert abort_kwargs['messages']['value'] == [expected_msg]
assert type(abort_kwargs['exc']) == ValidationError
def test_parse_form_returns_missing_if_no_form():
req = mock.Mock()
req.form.get.side_effect = AttributeError('no form')
assert parser.parse_form(req, 'foo', fields.Field()) is missing
def test_abort_with_message():
with pytest.raises(HTTPException) as excinfo:
abort(400, message='custom error message')
assert excinfo.value.data['message'] == 'custom error message'
def test_abort_has_serializable_data():
with pytest.raises(HTTPException) as excinfo:
abort(400, message='custom error message')
serialized_error = json.dumps(excinfo.value.data)
error = json.loads(serialized_error)
assert isinstance(error, dict)
assert error['message'] == 'custom error message'
with pytest.raises(HTTPException) as excinfo:
abort(400, message='custom error message',
exc=ValidationError('custom error message'))
serialized_error = json.dumps(excinfo.value.data)
error = json.loads(serialized_error)
assert isinstance(error, dict)
assert error['message'] == 'custom error message'
| 37.346154 | 88 | 0.688723 |
23d1ac4719ba8843fe08b5b62e7096579f445167 | 169 | py | Python | mayan/apps/mayan_statistics/__init__.py | prezi/mayan-edms | e9bc10a056c3379b57115c6e83022f48c6298e1d | [
"Apache-2.0"
] | 4 | 2019-02-17T08:35:42.000Z | 2019-03-28T06:02:11.000Z | mayan/apps/mayan_statistics/__init__.py | zhoubear/mayan-edms | e9bc10a056c3379b57115c6e83022f48c6298e1d | [
"Apache-2.0"
] | 1 | 2018-10-11T13:01:34.000Z | 2018-10-11T13:01:34.000Z | mayan/apps/mayan_statistics/__init__.py | prezi/mayan-edms | e9bc10a056c3379b57115c6e83022f48c6298e1d | [
"Apache-2.0"
] | 3 | 2019-01-29T13:21:57.000Z | 2019-10-27T03:20:15.000Z | from __future__ import unicode_literals
from .classes import StatisticLineChart, StatisticNamespace # NOQA
default_app_config = 'mayan_statistics.apps.StatisticsApp'
| 28.166667 | 67 | 0.852071 |
b8f84ce733dfa219965798f72cc8d095933ef358 | 11,789 | py | Python | sdks/python/apache_beam/runners/portability/local_job_service.py | violalyu/beam | dd605e568d70b1a6ebea60c15b2aec3e240f3914 | [
"Apache-2.0"
] | null | null | null | sdks/python/apache_beam/runners/portability/local_job_service.py | violalyu/beam | dd605e568d70b1a6ebea60c15b2aec3e240f3914 | [
"Apache-2.0"
] | 1 | 2019-10-15T00:19:08.000Z | 2019-10-16T22:46:54.000Z | sdks/python/apache_beam/runners/portability/local_job_service.py | violalyu/beam | dd605e568d70b1a6ebea60c15b2aec3e240f3914 | [
"Apache-2.0"
] | null | null | null | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import logging
import os
import queue
import shutil
import subprocess
import tempfile
import threading
import time
import traceback
from builtins import object
import grpc
from google.protobuf import text_format
from apache_beam.metrics import monitoring_infos
from apache_beam.portability.api import beam_artifact_api_pb2
from apache_beam.portability.api import beam_artifact_api_pb2_grpc
from apache_beam.portability.api import beam_fn_api_pb2_grpc
from apache_beam.portability.api import beam_job_api_pb2
from apache_beam.portability.api import beam_job_api_pb2_grpc
from apache_beam.portability.api import beam_provision_api_pb2
from apache_beam.portability.api import endpoints_pb2
from apache_beam.runners.portability import abstract_job_service
from apache_beam.runners.portability import artifact_service
from apache_beam.runners.portability import fn_api_runner
from apache_beam.utils.thread_pool_executor import UnboundedThreadPoolExecutor
class LocalJobServicer(abstract_job_service.AbstractJobServiceServicer):
"""Manages one or more pipelines, possibly concurrently.
Experimental: No backward compatibility guaranteed.
Servicer for the Beam Job API.
This JobService uses a basic local implementation of runner to run the job.
This JobService is not capable of managing job on remote clusters.
By default, this JobService executes the job in process but still uses GRPC
to communicate pipeline and worker state. It can also be configured to use
inline calls rather than GRPC (for speed) or launch completely separate
subprocesses for the runner and worker(s).
"""
def __init__(self, staging_dir=None):
super(LocalJobServicer, self).__init__()
self._cleanup_staging_dir = staging_dir is None
self._staging_dir = staging_dir or tempfile.mkdtemp()
self._artifact_service = artifact_service.BeamFilesystemArtifactService(
self._staging_dir)
self._artifact_staging_endpoint = None
def create_beam_job(self, preparation_id, job_name, pipeline, options):
# TODO(angoenka): Pass an appropriate staging_session_token. The token can
# be obtained in PutArtifactResponse from JobService
if not self._artifact_staging_endpoint:
# The front-end didn't try to stage anything, but the worker may
# request what's here so we should at least store an empty manifest.
self._artifact_service.CommitManifest(
beam_artifact_api_pb2.CommitManifestRequest(
staging_session_token=preparation_id,
manifest=beam_artifact_api_pb2.Manifest()))
provision_info = fn_api_runner.ExtendedProvisionInfo(
beam_provision_api_pb2.ProvisionInfo(
job_id=preparation_id,
job_name=job_name,
pipeline_options=options,
retrieval_token=self._artifact_service.retrieval_token(
preparation_id)),
self._staging_dir)
return BeamJob(
preparation_id,
pipeline,
options,
provision_info,
self._artifact_staging_endpoint)
def start_grpc_server(self, port=0):
self._server = grpc.server(UnboundedThreadPoolExecutor())
port = self._server.add_insecure_port('localhost:%d' % port)
beam_job_api_pb2_grpc.add_JobServiceServicer_to_server(self, self._server)
beam_artifact_api_pb2_grpc.add_ArtifactStagingServiceServicer_to_server(
self._artifact_service, self._server)
self._artifact_staging_endpoint = endpoints_pb2.ApiServiceDescriptor(
url='localhost:%d' % port)
self._server.start()
logging.info('Grpc server started on port %s', port)
return port
def stop(self, timeout=1):
self._server.stop(timeout)
if os.path.exists(self._staging_dir) and self._cleanup_staging_dir:
shutil.rmtree(self._staging_dir, ignore_errors=True)
def GetJobMetrics(self, request, context=None):
if request.job_id not in self._jobs:
raise LookupError("Job {} does not exist".format(request.job_id))
result = self._jobs[request.job_id].result
monitoring_info_list = []
for mi in result._monitoring_infos_by_stage.values():
monitoring_info_list.extend(mi)
# Filter out system metrics
user_monitoring_info_list = [
x for x in monitoring_info_list
if monitoring_infos._is_user_monitoring_info(x) or
monitoring_infos._is_user_distribution_monitoring_info(x)
]
return beam_job_api_pb2.GetJobMetricsResponse(
metrics=beam_job_api_pb2.MetricResults(
committed=user_monitoring_info_list))
class SubprocessSdkWorker(object):
"""Manages a SDK worker implemented as a subprocess communicating over grpc.
"""
def __init__(self, worker_command_line, control_address, worker_id=None):
self._worker_command_line = worker_command_line
self._control_address = control_address
self._worker_id = worker_id
def run(self):
logging_server = grpc.server(UnboundedThreadPoolExecutor())
logging_port = logging_server.add_insecure_port('[::]:0')
logging_server.start()
logging_servicer = BeamFnLoggingServicer()
beam_fn_api_pb2_grpc.add_BeamFnLoggingServicer_to_server(
logging_servicer, logging_server)
logging_descriptor = text_format.MessageToString(
endpoints_pb2.ApiServiceDescriptor(url='localhost:%s' % logging_port))
control_descriptor = text_format.MessageToString(
endpoints_pb2.ApiServiceDescriptor(url=self._control_address))
env_dict = dict(
os.environ,
CONTROL_API_SERVICE_DESCRIPTOR=control_descriptor,
LOGGING_API_SERVICE_DESCRIPTOR=logging_descriptor
)
# only add worker_id when it is set.
if self._worker_id:
env_dict['WORKER_ID'] = self._worker_id
with fn_api_runner.SUBPROCESS_LOCK:
p = subprocess.Popen(
self._worker_command_line,
shell=True,
env=env_dict)
try:
p.wait()
if p.returncode:
raise RuntimeError(
'Worker subprocess exited with return code %s' % p.returncode)
finally:
if p.poll() is None:
p.kill()
logging_server.stop(0)
class BeamJob(abstract_job_service.AbstractBeamJob):
"""This class handles running and managing a single pipeline.
The current state of the pipeline is available as self.state.
"""
def __init__(self,
job_id,
pipeline,
options,
provision_info,
artifact_staging_endpoint):
super(BeamJob, self).__init__(
job_id, provision_info.provision_info.job_name, pipeline, options)
self._provision_info = provision_info
self._artifact_staging_endpoint = artifact_staging_endpoint
self._state = None
self._state_queues = []
self._log_queues = []
self.state = beam_job_api_pb2.JobState.STOPPED
self.daemon = True
self.result = None
@property
def state(self):
return self._state
@state.setter
def state(self, new_state):
# Inform consumers of the new state.
for queue in self._state_queues:
queue.put(new_state)
self._state = new_state
def get_state(self):
return self.state
def prepare(self):
pass
def artifact_staging_endpoint(self):
return self._artifact_staging_endpoint
def run(self):
self.state = beam_job_api_pb2.JobState.STARTING
self._run_thread = threading.Thread(target=self._run_job)
self._run_thread.start()
def _run_job(self):
self.state = beam_job_api_pb2.JobState.RUNNING
with JobLogHandler(self._log_queues):
try:
result = fn_api_runner.FnApiRunner(
provision_info=self._provision_info).run_via_runner_api(
self._pipeline_proto)
logging.info('Successfully completed job.')
self.state = beam_job_api_pb2.JobState.DONE
self.result = result
except: # pylint: disable=bare-except
logging.exception('Error running pipeline.')
logging.exception(traceback)
self.state = beam_job_api_pb2.JobState.FAILED
raise
def cancel(self):
if not self.is_terminal_state(self.state):
self.state = beam_job_api_pb2.JobState.CANCELLING
# TODO(robertwb): Actually cancel...
self.state = beam_job_api_pb2.JobState.CANCELLED
def get_state_stream(self):
# Register for any new state changes.
state_queue = queue.Queue()
self._state_queues.append(state_queue)
yield self.state
while True:
current_state = state_queue.get(block=True)
yield current_state
if self.is_terminal_state(current_state):
break
def get_message_stream(self):
# Register for any new messages.
log_queue = queue.Queue()
self._log_queues.append(log_queue)
self._state_queues.append(log_queue)
current_state = self.state
yield current_state
while not self.is_terminal_state(current_state):
msg = log_queue.get(block=True)
yield msg
if isinstance(msg, int):
current_state = msg
class BeamFnLoggingServicer(beam_fn_api_pb2_grpc.BeamFnLoggingServicer):
def Logging(self, log_bundles, context=None):
for log_bundle in log_bundles:
for log_entry in log_bundle.log_entries:
logging.info('Worker: %s', str(log_entry).replace('\n', ' '))
return iter([])
class JobLogHandler(logging.Handler):
"""Captures logs to be returned via the Beam Job API.
Enabled via the with statement."""
# Mapping from logging levels to LogEntry levels.
LOG_LEVEL_MAP = {
logging.FATAL: beam_job_api_pb2.JobMessage.JOB_MESSAGE_ERROR,
logging.CRITICAL: beam_job_api_pb2.JobMessage.JOB_MESSAGE_ERROR,
logging.ERROR: beam_job_api_pb2.JobMessage.JOB_MESSAGE_ERROR,
logging.WARNING: beam_job_api_pb2.JobMessage.JOB_MESSAGE_WARNING,
logging.INFO: beam_job_api_pb2.JobMessage.JOB_MESSAGE_BASIC,
logging.DEBUG: beam_job_api_pb2.JobMessage.JOB_MESSAGE_DEBUG,
}
def __init__(self, log_queues):
super(JobLogHandler, self).__init__()
self._last_id = 0
self._logged_thread = None
self._log_queues = log_queues
def __enter__(self):
# Remember the current thread to demultiplex the logs of concurrently
# running pipelines (as Python log handlers are global).
self._logged_thread = threading.current_thread()
logging.getLogger().addHandler(self)
def __exit__(self, *args):
self._logged_thread = None
self.close()
def _next_id(self):
self._last_id += 1
return str(self._last_id)
def emit(self, record):
if self._logged_thread is threading.current_thread():
msg = beam_job_api_pb2.JobMessage(
message_id=self._next_id(),
time=time.strftime('%Y-%m-%d %H:%M:%S.',
time.localtime(record.created)),
importance=self.LOG_LEVEL_MAP[record.levelno],
message_text=self.format(record))
# Inform all message consumers.
for queue in self._log_queues:
queue.put(msg)
| 35.509036 | 79 | 0.731445 |
5d59c4b2d3f1f5030687e3969d85b87d9f89af8f | 1,145 | py | Python | backend/migrations/versions/557da25b46fc_initial_migration.py | Astropilot/BigEye | 4e1c246391c5f6e1c8ea4e4c35ee5fec209960a6 | [
"MIT"
] | null | null | null | backend/migrations/versions/557da25b46fc_initial_migration.py | Astropilot/BigEye | 4e1c246391c5f6e1c8ea4e4c35ee5fec209960a6 | [
"MIT"
] | null | null | null | backend/migrations/versions/557da25b46fc_initial_migration.py | Astropilot/BigEye | 4e1c246391c5f6e1c8ea4e4c35ee5fec209960a6 | [
"MIT"
] | null | null | null | """Initial migration.
Revision ID: 557da25b46fc
Revises:
Create Date: 2022-01-21 13:41:40.574867
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '557da25b46fc'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('email', sa.String(length=320), nullable=False),
sa.Column('username', sa.String(length=100), nullable=False),
sa.Column('role', sa.Enum('CLASSIC', 'ADMIN', name='userroles'), nullable=False),
sa.Column('password', sa.String(length=128), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_users_email'), table_name='users')
op.drop_table('users')
# ### end Alembic commands ###
| 29.358974 | 85 | 0.676856 |
77ebab5522e8edfa0169d6094146a56a7c565184 | 5,539 | py | Python | app/server/__init__.py | SSU-NC-22/AirPost_Sink | a867441886a25f2edccb118e5a1e7b5e1ed05891 | [
"Apache-2.0"
] | null | null | null | app/server/__init__.py | SSU-NC-22/AirPost_Sink | a867441886a25f2edccb118e5a1e7b5e1ed05891 | [
"Apache-2.0"
] | 2 | 2021-09-27T13:17:16.000Z | 2021-09-27T13:18:24.000Z | app/server/__init__.py | SSU-NC-22/AirPost_Sink | a867441886a25f2edccb118e5a1e7b5e1ed05891 | [
"Apache-2.0"
] | 1 | 2021-07-20T13:08:19.000Z | 2021-07-20T13:08:19.000Z | import ast
import datetime
import json
import time
import threading
import socket
import paho.mqtt.client as mqtt
from flask import Flask
from flask import request
from kafka import KafkaProducer
from message.mqtt_message import MqttMessages
from .healthcheck import HealthCheck
from .actuator import Actuator
from .http_codes import http_response_code
from .setup import args
def on_connect(client, userdata, flags, rc):
print("connected to mqtt broker")
def on_subscribe():
print('subscribed')
def on_message(client, userdata, message):
print('messaging')
# 할 일 : 메시지 받아서 카프카로 전송하는 메시지 형식 변경 필요 mqtt_message.py 파일 수정.
# give message to kafka as kafka producer
def send_message_to_kafka(msg):
v_topic = msg.topic.split('/') # 토픽이 data/# 이런식으로 오면 쪼개서 [data, #]으로 저장 #은 노드 이름
kafka_message = msg.payload.decode()
kafka_message = json.loads(kafka_message) # dict 형식으로 변환
topic_manager.add_node(str(v_topic[1])) # 카프카로 보낼때 센서 노드 추가
print("data by mqtt: sending message to kafka : %s" % msg)
print(kafka_message)
producer.send("sensor-data", kafka_message)
producer.flush()
def handle_uplink_command(msg):
v_topic = msg.topic.split('/') # command / uplink / MacCommand / nodeid
if v_topic[2] == 'DevStatusAns':
print('Received DevStatusAns!')
json_msg = json.loads(str(msg.payload.decode()))
health_check.set_node_state(v_topic[3], True, json_msg)
# callbacks
def data_callback(client, userdata, msg):
return send_message_to_kafka(msg)
def command_callback(client, userdata, msg):
return handle_uplink_command(msg)
# connecting mqtt client to mqtt broker
def mqtt_run():
client.on_connect = on_connect
#client.on_message = on_message
client.on_disconnect = on_disconnect
client.connect(args.b, 1883)
client.loop_start()
client.message_callback_add('data/#', data_callback)
client.message_callback_add("command/uplink/#", command_callback)
client.subscribe('data/#')
client.subscribe("command/uplink/#")
return http_response_code['success200']
def on_disconnect(client, user_data, rc):
print("Disconnected")
client.disconnect()
def health_check_handler():
while(1):
if health_check.get_health_check_mode():
healthcheck_server = '192.168.0.18' # '220.70.2.5'
healthcheck_port = 8085
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print('Connect to HealthCheck Server...')
client_socket.connect((healthcheck_server, healthcheck_port))
print("Connected to HealthCheck...")
print("healthcheck target: ", topic_manager.get_nodes())
health_check.setup_target_nodelist(topic_manager.get_nodes())
health_check.send_req(client)
time.sleep(health_check.get_time())
print("health_check: Send Json to HealthCheck Server...")
client_socket.sendall(health_check.create_msg())
# start the node webserver
app = Flask(__name__)
producer = KafkaProducer(acks=0, compression_type='gzip', bootstrap_servers=[args.k+':9092'], value_serializer=lambda v: json.dumps(v).encode('utf-8'))
topic_manager = MqttMessages()
client = mqtt.Client()
app.debug = False
#app.threaded = True
health_check = HealthCheck()
actuator = Actuator()
mqtt_run()
# create socket and run health_check thread
health_check.set_health_check_mode(True)
th = threading.Thread(target=health_check_handler, args=())
th.start()
# setting interval of the health check time
@app.route('/health-check/set_time/<time>', methods=['GET'])
def health_check_set_time():
health_check.set_time(time)
return http_response_code['success200']
# interval of the health check time
@app.route('/health-check/time', methods=['GET'])
def health_check_get_time():
health_check.get_time()
return http_response_code['success200']
# make the format of the topics from the data which toiot server gave
@app.route('/topics', methods=['POST'])
def response_getMessageFormat():
topic_manager.clear_topics()
temp = json.loads(request.get_data().decode())
topic_manager.get_message_format(temp)
client.subscribe(topic_manager.mqtt_topic)
print(topic_manager.mqtt_topic)
return http_response_code['success200']
# delete node
@app.route('/node/<node>', methods=['GET', 'DELETE'])
def delete_node(node):
client.unsubscribe(topic_manager.get_delete_node(node))
return http_response_code['success200']
# handle actuator
@app.route('/actuator', methods=['GET', 'POST'])
def actuator_command():
json_data = request.get_json(silent=True)
print(type(json_data), "type of data")
actuator.send_req(client, json_data)
return http_response_code['success200']
# handle actuator
@app.route('/drone', methods=['GET', 'POST'])
def drone_command():
json_data = request.get_json(silent=True)
print(type(json_data), "type of data")
actuator.send_req(client, json_data)
return http_response_code['success200']
# error handlers
@app.errorhandler(400)
def page_bad_request(error):
return http_response_code['error400']
@app.errorhandler(401)
def page_unauthorized(error):
return http_response_code['error401']
@app.errorhandler(403)
def page_forbidden(error):
return http_response_code['error403']
@app.errorhandler(404)
def page_not_found(error):
return http_response_code['error404']
@app.errorhandler(408)
def page_timeout(error):
return http_response_code['error408']
| 28.848958 | 151 | 0.722694 |
0329eb3cf280a6ade062eaf80f16b99217383b18 | 3,397 | py | Python | gym_minigrid/envs/keycorridor.py | patras91/gym-minigrid | 8c47bc2685d1070f7b5146685a52d60024bc6349 | [
"Apache-2.0"
] | 1 | 2021-09-28T12:56:56.000Z | 2021-09-28T12:56:56.000Z | gym_minigrid/envs/keycorridor.py | patras91/gym-minigrid | 8c47bc2685d1070f7b5146685a52d60024bc6349 | [
"Apache-2.0"
] | null | null | null | gym_minigrid/envs/keycorridor.py | patras91/gym-minigrid | 8c47bc2685d1070f7b5146685a52d60024bc6349 | [
"Apache-2.0"
] | 1 | 2022-03-31T16:30:39.000Z | 2022-03-31T16:30:39.000Z | from gym_minigrid.roomgrid import RoomGrid
from gym_minigrid.register import register
class KeyCorridor(RoomGrid):
"""
A ball is behind a locked door, the key is placed in a
random room.
"""
def __init__(
self,
num_rows=3,
obj_type="ball",
room_size=6,
seed=None
):
self.obj_type = obj_type
super().__init__(
room_size=room_size,
num_rows=num_rows,
max_steps=30*room_size**2,
seed=seed,
)
def _gen_grid(self, width, height):
super()._gen_grid(width, height)
# Connect the middle column rooms into a hallway
for j in range(1, self.num_rows):
self.remove_wall(1, j, 3)
# Add a locked door on the bottom right
# Add an object behind the locked door
room_idx = self._rand_int(0, self.num_rows)
door, _ = self.add_door(2, room_idx, 2, locked=True)
obj, _ = self.add_object(2, room_idx, kind=self.obj_type)
# Add a key in a random room on the left side
self.add_object(0, self._rand_int(0, self.num_rows), 'key', door.color)
# Place the agent in the middle
self.place_agent(1, self.num_rows // 2)
# Make sure all rooms are accessible
self.connect_all()
self.obj = obj
self.mission = "pick up the %s %s" % (obj.color, obj.type)
def step(self, action):
obs, reward, done, info = super().step(action)
if action == self.actions.pickup:
if self.carrying and self.carrying == self.obj:
reward = self._reward()
done = True
return obs, reward, done, info
class KeyCorridorS3R1(KeyCorridor):
def __init__(self, seed=None):
super().__init__(
room_size=3,
num_rows=1,
seed=seed
)
class KeyCorridorS3R2(KeyCorridor):
def __init__(self, seed=None):
super().__init__(
room_size=3,
num_rows=2,
seed=seed
)
class KeyCorridorS3R3(KeyCorridor):
def __init__(self, seed=None):
super().__init__(
room_size=3,
num_rows=3,
seed=seed
)
class KeyCorridorS4R3(KeyCorridor):
def __init__(self, seed=None):
super().__init__(
room_size=4,
num_rows=3,
seed=seed
)
class KeyCorridorS5R3(KeyCorridor):
def __init__(self, seed=None):
super().__init__(
room_size=5,
num_rows=3,
seed=seed
)
class KeyCorridorS6R3(KeyCorridor):
def __init__(self, seed=None):
super().__init__(
room_size=6,
num_rows=3,
seed=seed
)
register(
id='MiniGrid-KeyCorridorS3R1-v0',
entry_point='gym_minigrid.envs:KeyCorridorS3R1'
)
register(
id='MiniGrid-KeyCorridorS3R2-v0',
entry_point='gym_minigrid.envs:KeyCorridorS3R2'
)
register(
id='MiniGrid-KeyCorridorS3R3-v0',
entry_point='gym_minigrid.envs:KeyCorridorS3R3'
)
register(
id='MiniGrid-KeyCorridorS4R3-v0',
entry_point='gym_minigrid.envs:KeyCorridorS4R3'
)
register(
id='MiniGrid-KeyCorridorS5R3-v0',
entry_point='gym_minigrid.envs:KeyCorridorS5R3'
)
register(
id='MiniGrid-KeyCorridorS6R3-v0',
entry_point='gym_minigrid.envs:KeyCorridorS6R3'
) | 24.79562 | 79 | 0.595231 |
a7ba766233597c0d99d704232c4a7c3899fd17bf | 6,789 | py | Python | goji/daemon/client.py | zcomputerwiz/gojiv2-blockchain | 3be896d4dcb48a734f8d2a901ab5648201fbd4d7 | [
"Apache-2.0"
] | 2 | 2022-02-09T04:30:19.000Z | 2022-03-19T14:01:43.000Z | goji/daemon/client.py | zcomputerwiz/goji-blockchain | 3be896d4dcb48a734f8d2a901ab5648201fbd4d7 | [
"Apache-2.0"
] | 1 | 2021-12-30T09:17:47.000Z | 2021-12-30T09:17:47.000Z | goji/daemon/client.py | zcomputerwiz/gojiv2-blockchain | 3be896d4dcb48a734f8d2a901ab5648201fbd4d7 | [
"Apache-2.0"
] | 1 | 2022-03-15T08:42:52.000Z | 2022-03-15T08:42:52.000Z | import asyncio
import json
import ssl
from contextlib import asynccontextmanager
from pathlib import Path
from typing import Any, Dict, Optional
import websockets
from goji.types.blockchain_format.sized_bytes import bytes32
from goji.util.config import load_config
from goji.util.json_util import dict_to_json_str
from goji.util.ws_message import WsRpcMessage, create_payload_dict
class DaemonProxy:
def __init__(self, uri: str, ssl_context: Optional[ssl.SSLContext]):
self._uri = uri
self._request_dict: Dict[bytes32, asyncio.Event] = {}
self.response_dict: Dict[bytes32, Any] = {}
self.ssl_context = ssl_context
def format_request(self, command: str, data: Dict[str, Any]) -> WsRpcMessage:
request = create_payload_dict(command, data, "client", "daemon")
return request
async def start(self):
self.websocket = await websockets.connect(self._uri, max_size=None, ssl=self.ssl_context)
async def listener():
while True:
try:
message = await self.websocket.recv()
except websockets.exceptions.ConnectionClosedOK:
return None
decoded = json.loads(message)
id = decoded["request_id"]
if id in self._request_dict:
self.response_dict[id] = decoded
self._request_dict[id].set()
asyncio.create_task(listener())
await asyncio.sleep(1)
async def _get(self, request: WsRpcMessage) -> WsRpcMessage:
request_id = request["request_id"]
self._request_dict[request_id] = asyncio.Event()
string = dict_to_json_str(request)
asyncio.create_task(self.websocket.send(string))
async def timeout():
await asyncio.sleep(30)
if request_id in self._request_dict:
print("Error, timeout.")
self._request_dict[request_id].set()
asyncio.create_task(timeout())
await self._request_dict[request_id].wait()
if request_id in self.response_dict:
response = self.response_dict[request_id]
self.response_dict.pop(request_id)
else:
response = None
self._request_dict.pop(request_id)
return response
async def get_version(self) -> WsRpcMessage:
data: Dict[str, Any] = {}
request = self.format_request("get_version", data)
response = await self._get(request)
return response
async def start_service(self, service_name: str) -> WsRpcMessage:
data = {"service": service_name}
request = self.format_request("start_service", data)
response = await self._get(request)
return response
async def stop_service(self, service_name: str, delay_before_kill: int = 15) -> WsRpcMessage:
data = {"service": service_name}
request = self.format_request("stop_service", data)
response = await self._get(request)
return response
async def is_running(self, service_name: str) -> bool:
data = {"service": service_name}
request = self.format_request("is_running", data)
response = await self._get(request)
if "is_running" in response["data"]:
return bool(response["data"]["is_running"])
return False
async def is_keyring_locked(self) -> bool:
data: Dict[str, Any] = {}
request = self.format_request("is_keyring_locked", data)
response = await self._get(request)
if "is_keyring_locked" in response["data"]:
return bool(response["data"]["is_keyring_locked"])
return False
async def unlock_keyring(self, passphrase: str) -> WsRpcMessage:
data = {"key": passphrase}
request = self.format_request("unlock_keyring", data)
response = await self._get(request)
return response
async def notify_keyring_migration_completed(self, passphrase: Optional[str]) -> WsRpcMessage:
data: Dict[str, Any] = {"key": passphrase}
request: WsRpcMessage = self.format_request("notify_keyring_migration_completed", data)
response: WsRpcMessage = await self._get(request)
return response
async def ping(self) -> WsRpcMessage:
request = self.format_request("ping", {})
response = await self._get(request)
return response
async def close(self) -> None:
await self.websocket.close()
async def exit(self) -> WsRpcMessage:
request = self.format_request("exit", {})
return await self._get(request)
async def connect_to_daemon(self_hostname: str, daemon_port: int, ssl_context: Optional[ssl.SSLContext]) -> DaemonProxy:
"""
Connect to the local daemon.
"""
client = DaemonProxy(f"wss://{self_hostname}:{daemon_port}", ssl_context)
await client.start()
return client
async def connect_to_daemon_and_validate(root_path: Path, quiet: bool = False) -> Optional[DaemonProxy]:
"""
Connect to the local daemon and do a ping to ensure that something is really
there and running.
"""
from goji.server.server import ssl_context_for_client
try:
net_config = load_config(root_path, "config.yaml")
crt_path = root_path / net_config["daemon_ssl"]["private_crt"]
key_path = root_path / net_config["daemon_ssl"]["private_key"]
ca_crt_path = root_path / net_config["private_ssl_ca"]["crt"]
ca_key_path = root_path / net_config["private_ssl_ca"]["key"]
ssl_context = ssl_context_for_client(ca_crt_path, ca_key_path, crt_path, key_path)
connection = await connect_to_daemon(net_config["self_hostname"], net_config["daemon_port"], ssl_context)
r = await connection.ping()
if "value" in r["data"] and r["data"]["value"] == "pong":
return connection
except Exception:
if not quiet:
print("Daemon not started yet")
return None
return None
@asynccontextmanager
async def acquire_connection_to_daemon(root_path: Path, quiet: bool = False):
"""
Asynchronous context manager which attempts to create a connection to the daemon.
The connection object (DaemonProxy) is yielded to the caller. After the caller's
block exits scope, execution resumes in this function, wherein the connection is
closed.
"""
from goji.daemon.client import connect_to_daemon_and_validate
daemon: Optional[DaemonProxy] = None
try:
daemon = await connect_to_daemon_and_validate(root_path, quiet=quiet)
yield daemon # <----
except Exception as e:
print(f"Exception occurred while communicating with the daemon: {e}")
if daemon is not None:
await daemon.close()
| 37.098361 | 120 | 0.659596 |
7bb48801bd147abe300c5fd6239c2e6a1e97c94d | 1,729 | py | Python | src/outpost/django/campusonline/migrations/0029_indices.py | medunigraz/outpost.django.campusonline | 06776bce7556e438c1e00a96aaa9271a7aac8fe4 | [
"BSD-2-Clause"
] | null | null | null | src/outpost/django/campusonline/migrations/0029_indices.py | medunigraz/outpost.django.campusonline | 06776bce7556e438c1e00a96aaa9271a7aac8fe4 | [
"BSD-2-Clause"
] | null | null | null | src/outpost/django/campusonline/migrations/0029_indices.py | medunigraz/outpost.django.campusonline | 06776bce7556e438c1e00a96aaa9271a7aac8fe4 | [
"BSD-2-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-08-31 09:37
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
forward = [
"""
CREATE UNIQUE INDEX campusonline_distributionlist_id_idx ON "public"."campusonline_distributionlist" ("id");
""",
"""
CREATE INDEX campusonline_distributionlist_name_idx ON "public"."campusonline_distributionlist" ("name");
""",
"""
CREATE UNIQUE INDEX campusonline_distributionlist_person_distributionlist_id_person_id_idx ON "public"."campusonline_distributionlist_person" ("distributionlist_id", "person_id");
""",
"""
CREATE INDEX campusonline_distributionlist_person_distributionlist_id_idx ON "public"."campusonline_distributionlist_person" ("distributionlist_id");
""",
"""
CREATE INDEX campusonline_distributionlist_person_person_id_idx ON "public"."campusonline_distributionlist_person" ("person_id");
""",
]
reverse = [
"""
DROP INDEX IF EXISTS campusonline_distributionlist_person_person_id_idx;
""",
"""
DROP INDEX IF EXISTS campusonline_distributionlist_person_distributionlist_id_idx;
""",
"""
DROP INDEX IF EXISTS campusonline_distributionlist_person_distributionlist_id_person_id_idx;
""",
"""
DROP INDEX IF EXISTS campusonline_distributionlist_name_idx;
""",
"""
DROP INDEX IF EXISTS campusonline_distributionlist_id_idx;
""",
]
dependencies = [("campusonline", "0028_distributionlist")]
operations = [migrations.RunSQL(forward, reverse)]
| 35.285714 | 187 | 0.676692 |
d44b5a4425929b5e879e7d623f0597c22627562a | 1,485 | py | Python | scripts/ImageToExcel2.py | elishatofunmi/Image-to-Excel | 8ce636d086999d903305eb3d0d2e0f95d8b8eb6f | [
"Apache-2.0"
] | null | null | null | scripts/ImageToExcel2.py | elishatofunmi/Image-to-Excel | 8ce636d086999d903305eb3d0d2e0f95d8b8eb6f | [
"Apache-2.0"
] | null | null | null | scripts/ImageToExcel2.py | elishatofunmi/Image-to-Excel | 8ce636d086999d903305eb3d0d2e0f95d8b8eb6f | [
"Apache-2.0"
] | 1 | 2021-12-13T09:00:59.000Z | 2021-12-13T09:00:59.000Z | import cv2
import pytesseract
import numpy as np
import os, sys
import pandas as pd
def engageData(out):
DictData = {}
for k in out:
try:
listData = k.strip(' ').split(' / ')
DictData[listData[0]] = listData[1]
except:
DictData[listData[0]] = 'nil'
return DictData
def FrameData(cate):
Name, description = [], []
for k in cate.keys():
Name.append(k)
description.append(cate[k])
DictFrame = {'Name':Name, 'description':description}
frame = pd.DataFrame(DictFrame)
return frame
def main(image, directory):
os.getcwd()
os.chdir(directory)
img = cv2.imread(image)
gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
gray, img_bin = cv2.threshold(gray,128,255,cv2.THRESH_BINARY | cv2.THRESH_OTSU)
gray = cv2.bitwise_not(img_bin)
kernel = np.ones((2, 1), np.uint8)
img = cv2.erode(gray, kernel, iterations=1)
img = cv2.dilate(img, kernel, iterations=1)
out_below = pytesseract.image_to_string(img)
out = []
for k in out_below.split('-'):
out.append(k.replace('\n', ''))
DictData = engageData(out)
frame = FrameData(DictData)
print(frame.head())
fileName = image.split('.')[0] + '.xlsx'
frame.to_excel(fileName)
return
if __name__ == '__main__':
image = 'menu.jpg'
directory = '/home/odemakinde/Desktop/Image to Excel/Image-to-Excel/test images/'
main(image, directory) | 23.203125 | 85 | 0.612795 |
b89d6a69ac94e2feeab42aae020cb0e14e0558ee | 1,853 | py | Python | bootstrap.py | nanonyme/pypi-mirror | beac1cf639f5bf311060e1a1e0464195b56b2a00 | [
"ZPL-1.1"
] | 1 | 2015-11-08T16:07:54.000Z | 2015-11-08T16:07:54.000Z | bootstrap.py | nanonyme/pypi-mirror | beac1cf639f5bf311060e1a1e0464195b56b2a00 | [
"ZPL-1.1"
] | null | null | null | bootstrap.py | nanonyme/pypi-mirror | beac1cf639f5bf311060e1a1e0464195b56b2a00 | [
"ZPL-1.1"
] | null | null | null | ##############################################################################
#
# Copyright (c) 2006 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Bootstrap a buildout-based project
Simply run this script in a directory containing a buildout.cfg.
The script accepts buildout command-line options, so you can
use the -c option to specify an alternate configuration file.
$Id: bootstrap.py 78032 2007-07-16 14:26:49Z jim $
"""
import os, shutil, sys, tempfile, urllib2
tmpeggs = tempfile.mkdtemp()
try:
import pkg_resources
except ImportError:
ez = {}
exec urllib2.urlopen('http://peak.telecommunity.com/dist/ez_setup.py'
).read() in ez
ez['use_setuptools'](to_dir=tmpeggs, download_delay=0)
import pkg_resources
cmd = 'from setuptools.command.easy_install import main; main()'
if sys.platform == 'win32':
cmd = '"%s"' % cmd # work around spawn lamosity on windows
ws = pkg_resources.working_set
assert os.spawnle(
os.P_WAIT, sys.executable, sys.executable,
'-c', cmd, '-mqNxd', tmpeggs, 'zc.buildout',
dict(os.environ,
PYTHONPATH=
ws.find(pkg_resources.Requirement.parse('setuptools')).location
),
) == 0
ws.add_entry(tmpeggs)
ws.require('zc.buildout')
import zc.buildout.buildout
zc.buildout.buildout.main(sys.argv[1:] + ['bootstrap'])
shutil.rmtree(tmpeggs)
| 33.089286 | 78 | 0.654614 |
2b757fd4427f1fee4d775bd40a17283b181e1eef | 2,987 | py | Python | sdk/peering/azure-mgmt-peering/azure/mgmt/peering/models/_peering_management_client_enums.py | tzhanl/azure-sdk-for-python | 18cd03f4ab8fd76cc0498f03e80fbc99f217c96e | [
"MIT"
] | 1 | 2022-03-22T15:02:32.000Z | 2022-03-22T15:02:32.000Z | sdk/peering/azure-mgmt-peering/azure/mgmt/peering/models/_peering_management_client_enums.py | tzhanl/azure-sdk-for-python | 18cd03f4ab8fd76cc0498f03e80fbc99f217c96e | [
"MIT"
] | 1 | 2021-02-10T22:04:59.000Z | 2021-02-10T22:04:59.000Z | sdk/peering/azure-mgmt-peering/azure/mgmt/peering/models/_peering_management_client_enums.py | tzhanl/azure-sdk-for-python | 18cd03f4ab8fd76cc0498f03e80fbc99f217c96e | [
"MIT"
] | 1 | 2021-06-03T19:31:10.000Z | 2021-06-03T19:31:10.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from enum import Enum
class Name(str, Enum):
basic_exchange_free = "Basic_Exchange_Free"
basic_direct_free = "Basic_Direct_Free"
premium_direct_free = "Premium_Direct_Free"
premium_exchange_metered = "Premium_Exchange_Metered"
premium_direct_metered = "Premium_Direct_Metered"
premium_direct_unlimited = "Premium_Direct_Unlimited"
class Tier(str, Enum):
basic = "Basic"
premium = "Premium"
class Family(str, Enum):
direct = "Direct"
exchange = "Exchange"
class Size(str, Enum):
free = "Free"
metered = "Metered"
unlimited = "Unlimited"
class Kind(str, Enum):
direct = "Direct"
exchange = "Exchange"
class SessionAddressProvider(str, Enum):
microsoft = "Microsoft"
peer = "Peer"
class ConnectionState(str, Enum):
none = "None"
pending_approval = "PendingApproval"
approved = "Approved"
provisioning_started = "ProvisioningStarted"
provisioning_failed = "ProvisioningFailed"
provisioning_completed = "ProvisioningCompleted"
validating = "Validating"
active = "Active"
class SessionStateV4(str, Enum):
none = "None"
idle = "Idle"
connect = "Connect"
active = "Active"
open_sent = "OpenSent"
open_confirm = "OpenConfirm"
open_received = "OpenReceived"
established = "Established"
pending_add = "PendingAdd"
pending_update = "PendingUpdate"
pending_remove = "PendingRemove"
class SessionStateV6(str, Enum):
none = "None"
idle = "Idle"
connect = "Connect"
active = "Active"
open_sent = "OpenSent"
open_confirm = "OpenConfirm"
open_received = "OpenReceived"
established = "Established"
pending_add = "PendingAdd"
pending_update = "PendingUpdate"
pending_remove = "PendingRemove"
class DirectPeeringType(str, Enum):
edge = "Edge"
transit = "Transit"
cdn = "Cdn"
internal = "Internal"
class ProvisioningState(str, Enum):
succeeded = "Succeeded"
updating = "Updating"
deleting = "Deleting"
failed = "Failed"
class ValidationState(str, Enum):
none = "None"
pending = "Pending"
approved = "Approved"
failed = "Failed"
class PrefixValidationState(str, Enum):
none = "None"
invalid = "Invalid"
verified = "Verified"
failed = "Failed"
pending = "Pending"
warning = "Warning"
unknown = "Unknown"
class LearnedType(str, Enum):
none = "None"
via_service_provider = "ViaServiceProvider"
via_session = "ViaSession"
| 21.644928 | 76 | 0.644459 |
097a7d7ae3a339b001871a098616186e85687952 | 334 | py | Python | test/unit/conftest.py | jlhood/timestamp-custom-resource | 69aee2bd3f00c2aff035a34325b5a7e1e7e7d919 | [
"MIT"
] | null | null | null | test/unit/conftest.py | jlhood/timestamp-custom-resource | 69aee2bd3f00c2aff035a34325b5a7e1e7e7d919 | [
"MIT"
] | null | null | null | test/unit/conftest.py | jlhood/timestamp-custom-resource | 69aee2bd3f00c2aff035a34325b5a7e1e7e7d919 | [
"MIT"
] | null | null | null | """Setup unit test environment."""
import sys
import os
import test_constants
# make sure tests can import the app code
my_path = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, my_path + '/../../src/')
# set expected config environment variables to test constants
os.environ['LOG_LEVEL'] = test_constants.LOG_LEVEL
| 23.857143 | 61 | 0.751497 |
fefa2fb692ea92cdea3634f55bd54f522bf98b6b | 13,566 | py | Python | comic_dl/honcho.py | alextorquin/comic-dl | f34488e88ab462a83c3e4a169cff441e0192c453 | [
"MIT"
] | null | null | null | comic_dl/honcho.py | alextorquin/comic-dl | f34488e88ab462a83c3e4a169cff441e0192c453 | [
"MIT"
] | null | null | null | comic_dl/honcho.py | alextorquin/comic-dl | f34488e88ab462a83c3e4a169cff441e0192c453 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
import logging
from sites import foolSlide
from sites import readcomicOnlineto
from sites import comicNaver
from sites import mangaHere
from sites import rawSenManga
from sites import mangaFox
from sites import omgBeauPeep
from sites import mangaReader
from sites import mangaEden
from sites import acQQ
from sites import stripUtopia
from sites import readComicBooksOnline
from sites import readComicsWebsite
from sites import mangaRock
from sites import batoto
from sites import hqbr
from sites import comicextra
from sites import readComicsIO
import globalFunctions
class Honcho(object):
def comic_language_resolver(self, language_code):
# Will return the Language Name corresponding to the language code.
language_dict = {
'0': 'English',
'1': 'Italian',
'2': 'Spanish',
'3': 'French',
'4': 'German',
'5': 'Portuguese',
'6': 'Turkish',
'7': 'Indonesian',
'8': 'Greek',
'9': 'Filipino',
'10': 'Polish',
'11': 'Thai',
'12': 'Malay',
'13 ': 'Hungarian',
'14': 'Romanian',
'15': ' Arabic',
'16': 'Hebrew',
'17': 'Russian',
'18': 'Vietnamese',
'19': 'Dutch',
'20': 'Bengali',
'21': 'Persian',
'22': 'Czech',
'23': 'Brazilian',
'24': 'Bulgarian',
'25': 'Danish',
'26': 'Esperanto',
'27': 'Swedish',
'28': 'Lithuanian',
'29': 'Other'
}
return language_dict[language_code]
def checker(self, comic_url, download_directory, chapter_range, **kwargs):
user_name = kwargs.get("username")
password = kwargs.get("password")
current_directory = kwargs.get("current_directory")
log_flag = kwargs.get("logger")
sorting = kwargs.get("sorting_order")
comic_language = kwargs.get("comic_language")
print_index = kwargs.get("print_index")
if log_flag is True:
logging.basicConfig(format='%(levelname)s: %(message)s', filename="Error Log.log", level=logging.DEBUG)
logging.debug("Comic Url : %s" % comic_url)
domain = urlparse(comic_url).netloc
logging.debug("Selected Domain : %s" % domain)
# Remove the "/" from ending to make checking URL for Full Series or Single Chapter easier.
if comic_url[-1] == "/":
comic_url = comic_url[:-1]
if domain in ["yomanga.co", "gomanga.co"]:
foolSlide.FoolSlide(manga_url=comic_url, logger=logging, current_directory=current_directory,
sorting_order=sorting, log_flag=log_flag, download_directory=download_directory,
chapter_range=chapter_range, conversion=kwargs.get("conversion"),
keep_files=kwargs.get("keep_files"))
return 0
elif domain in ["www.readcomiconline.to", "readcomiconline.to"]:
readcomicOnlineto.ReadComicOnlineTo(manga_url=comic_url, logger=logging,
current_directory=current_directory, sorting_order=sorting,
log_flag=log_flag, download_directory=download_directory,
chapter_range=chapter_range, conversion=kwargs.get("conversion"),
keep_files=kwargs.get("keep_files"),
image_quality=kwargs.get("image_quality"),
print_index=print_index)
return 0
elif domain in ["www.comic.naver.com", "comic.naver.com"]:
comicNaver.ComicNaver(manga_url=comic_url, logger=logging, current_directory=current_directory,
sorting_order=sorting, log_flag=log_flag, download_directory=download_directory,
chapter_range=chapter_range, conversion=kwargs.get("conversion"),
keep_files=kwargs.get("keep_files"),
print_index=print_index)
return 0
elif domain in ["www.mangahere.co", "mangahere.co", "www.mangahere.cc", "mangahere.cc"]:
mangaHere.MangaHere(manga_url=comic_url, logger=logging, current_directory=current_directory,
sorting_order=sorting, log_flag=log_flag, download_directory=download_directory,
chapter_range=chapter_range, conversion=kwargs.get("conversion"),
keep_files=kwargs.get("keep_files"),
print_index=print_index)
return 0
elif domain in ["www.raw.senmanga.com", "raw.senmanga.com"]:
rawSenManga.RawSenaManga(manga_url=comic_url, logger=logging, current_directory=current_directory,
sorting_order=sorting, log_flag=log_flag, download_directory=download_directory,
chapter_range=chapter_range, conversion=kwargs.get("conversion"),
keep_files=kwargs.get("keep_files"),
print_index=print_index)
return 0
elif domain in ["www.mangafox.me", "mangafox.me", "www.mangafox.la", "mangafox.la", "www.fanfox.net",
"fanfox.net"]:
mangaFox.MangaFox(manga_url=comic_url, logger=logging, current_directory=current_directory,
sorting_order=sorting, log_flag=log_flag, download_directory=download_directory,
chapter_range=chapter_range, conversion=kwargs.get("conversion"),
keep_files=kwargs.get("keep_files"),
print_index=print_index)
return 0
elif domain in ["www.omgbeaupeep.com", "omgbeaupeep.com", "www.otakusmash.com", "otakusmash.com"]:
omgBeauPeep.OmgBeauPeep(manga_url=comic_url, logger=logging, current_directory=current_directory,
sorting_order=sorting, log_flag=log_flag, download_directory=download_directory,
chapter_range=chapter_range, conversion=kwargs.get("conversion"),
keep_files=kwargs.get("keep_files"),
print_index=print_index)
return 0
# TODO KO --print-index -i http://ac.qq.com/Comic/comicInfo/id/547059?trace_id=907_27.156.162.231_1539265645 broken?
elif domain in ["www.ac.qq.com", "ac.qq.com"]:
acQQ.AcQq(manga_url=comic_url, logger=logging, current_directory=current_directory,
sorting_order=sorting, log_flag=log_flag, download_directory=download_directory,
chapter_range=chapter_range,
print_index=print_index)
return 0
elif domain in ["www.striputopija.blogspot.in", "striputopija.blogspot.in", "www.striputopija.blogspot.com",
"striputopija.blogspot.com"]:
stripUtopia.StripUtopia(manga_url=comic_url, logger=logging, current_directory=current_directory,
sorting_order=sorting, log_flag=log_flag, download_directory=download_directory,
chapter_range=chapter_range,
print_index=print_index)
return 0
elif domain in ["www.mangareader.net", "mangareader.net"]:
mangaReader.MangaReader(manga_url=comic_url, logger=logging, current_directory=current_directory,
sorting_order=sorting, log_flag=log_flag, download_directory=download_directory,
chapter_range=chapter_range, conversion=kwargs.get("conversion"),
keep_files=kwargs.get("keep_files"),
print_index=print_index)
return 0
elif domain in ["www.readcomicbooksonline.net", "readcomicbooksonline.net", "www.readcomicbooksonline.org",
"readcomicbooksonline.org"]:
readComicBooksOnline.ReadComicBooksOnline(manga_url=comic_url, logger=logging,
current_directory=current_directory, sorting_order=sorting,
log_flag=log_flag, download_directory=download_directory,
chapter_range=chapter_range, conversion=kwargs.get("conversion"),
keep_files=kwargs.get("keep_files"),
print_index=print_index)
return 0
# TODO KO seems broken
elif domain in ["www.readcomics.website", "readcomics.website"]:
readComicsWebsite.ReadComicsWebsite(manga_url=comic_url, logger=logging,
current_directory=current_directory, sorting_order=sorting,
log_flag=log_flag, download_directory=download_directory,
chapter_range=chapter_range, conversion=kwargs.get("conversion"),
keep_files=kwargs.get("keep_files"),
print_index=print_index)
return 0
elif domain in ["www.mangarock.com", "mangarock.com"]:
mangaRock.MangaRock(manga_url=comic_url, logger=logging, current_directory=current_directory,
sorting_order=sorting, log_flag=log_flag, download_directory=download_directory,
chapter_range=chapter_range, conversion=kwargs.get("conversion"),
keep_files=kwargs.get("keep_files"),
print_index=print_index)
return 0
elif domain in ["www.hqbr.com.br", "hqbr.com.br"]:
hqbr.Hqbr(manga_url=comic_url, logger=logging, current_directory=current_directory,
sorting_order=sorting, log_flag=log_flag, download_directory=download_directory,
chapter_range=chapter_range, conversion=kwargs.get("conversion"),
keep_files=kwargs.get("keep_files"),
print_index=print_index)
return 0
elif domain in ["www.comicextra.com", "comicextra.com"]:
comicextra.ComicExtra(manga_url=comic_url, logger=logging, current_directory=current_directory,
sorting_order=sorting, log_flag=log_flag, download_directory=download_directory,
chapter_range=chapter_range, conversion=kwargs.get("conversion"),
keep_files=kwargs.get("keep_files"),
print_index=print_index)
return 0
# TODO KO seems broken
elif domain in ["www.readcomics.io", "readcomics.io"]:
readComicsIO.ReadComicsIO(manga_url=comic_url, logger=logging, current_directory=current_directory,
sorting_order=sorting, log_flag=log_flag, download_directory=download_directory,
chapter_range=chapter_range, conversion=kwargs.get("conversion"),
keep_files=kwargs.get("keep_files"),
print_index=print_index)
return 0
elif domain in ["www.kissmanga.com", "kissmanga.com"]:
# kissManga.KissManga(manga_url = comic_url, logger = logging,
# current_directory = current_directory, sorting_order = sorting)
print("Under Development!")
return 0
elif domain in ["www.bato.to", "bato.to"]:
batoto.Batoto(manga_url=comic_url, logger=logging, current_directory=current_directory,
sorting_order=sorting, log_flag=log_flag, download_directory=download_directory,
chapter_range=chapter_range, conversion=kwargs.get("conversion"),
keep_files=kwargs.get("keep_files"), username=user_name, password=password,
comic_language=self.comic_language_resolver(comic_language),
print_index=print_index)
return 0
elif domain in ["www.mangaeden.com"]:
if print_index:
print("please use -find and -cid instead!")
return -1
mangaEden.MangaEden(manga_url=comic_url, logger=logging, current_directory=current_directory,
sorting_order=sorting, log_flag=log_flag, download_directory=download_directory,
chapter_range=chapter_range, conversion=kwargs.get("conversion"),
keep_files=kwargs.get("keep_files"))
return 0
else:
print("%s is not supported at the moment. You can request it on the Github repository." % domain)
| 58.727273 | 126 | 0.571797 |
53171516643b606471cf00e38a3da00a47b17c2e | 535 | py | Python | epaper/question/migrations/0005_question_detail_importance_level.py | ahsanaatir/epaper | 5dbd79dfcebb6951b4729c397124afe6977cad24 | [
"Apache-2.0"
] | null | null | null | epaper/question/migrations/0005_question_detail_importance_level.py | ahsanaatir/epaper | 5dbd79dfcebb6951b4729c397124afe6977cad24 | [
"Apache-2.0"
] | null | null | null | epaper/question/migrations/0005_question_detail_importance_level.py | ahsanaatir/epaper | 5dbd79dfcebb6951b4729c397124afe6977cad24 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-10-16 15:24
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('question', '0004_auto_20161016_1357'),
]
operations = [
migrations.AddField(
model_name='question_detail',
name='importance_level',
field=models.CharField(choices=[('easy', 'Easy'), ('Diff', 'Difficult')], default='easy', max_length=4),
),
]
| 25.47619 | 116 | 0.624299 |
f044818426ee97c6bd31632ae47f9b6c60f8cbb4 | 2,158 | py | Python | extra/plates/adtag.py | EQXTFL/avare | 6f401ea551b6d51ae6a0adc01915042b200efcc6 | [
"BSD-2-Clause"
] | 113 | 2015-02-21T16:37:51.000Z | 2022-02-21T16:29:03.000Z | extra/plates/adtag.py | EQXTFL/avare | 6f401ea551b6d51ae6a0adc01915042b200efcc6 | [
"BSD-2-Clause"
] | 266 | 2015-02-23T16:11:26.000Z | 2022-03-20T03:21:39.000Z | extra/plates/adtag.py | EQXTFL/avare | 6f401ea551b6d51ae6a0adc01915042b200efcc6 | [
"BSD-2-Clause"
] | 126 | 2015-01-11T21:53:13.000Z | 2022-02-14T05:11:13.000Z | # Copyright (c) 2012-2017, Apps4av Inc. (apps4av@gmail.com)
# Author: Zubair Khan
#All rights reserved.
#
#Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
#
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import glob, os
# read AP diagram tag database
d = {}
with open("aps.csv") as f:
for line in f:
(key, val0, val1, val2, val3, val4, val5, val6, val7, val8, val9, val10, val11) = line.rstrip().split(",")
v6 = float(val6)
v7 = float(val7)
v8 = float(val8)
v9 = float(val9)
v10 = float(val10)
v11 = float(val11)
d[str(key)] = str(v6) + "," + str(v7) + "," + str(v8) + "," + str(v9) + "," + str(v10) + "," + str(v11)
# now get AD pngs that need to be tagged
for f in glob.iglob("plates/**/*AIRPORT-DIAGRAM.png"):
plates, airport, name = f.split("/")
comment = d[airport]
cmd = "mogrify -quiet -set Comment '" + comment + "' " + f
# add comment tag
if 0 != os.system(cmd) :
print "unable to tag " + f
| 55.333333 | 756 | 0.699722 |
f956fe88baaa56bc46ab59d68e50b9921fdf16c6 | 216 | py | Python | modules/module9/9a/rename_files.py | shourya01/power_data_analytics_tools | 7621eaebb5b3bd107238016b07e5ae71c891c4b1 | [
"MIT"
] | 1 | 2021-09-26T17:29:46.000Z | 2021-09-26T17:29:46.000Z | modules/module9/9a/rename_files.py | shourya01/power_data_analytics_tools | 7621eaebb5b3bd107238016b07e5ae71c891c4b1 | [
"MIT"
] | null | null | null | modules/module9/9a/rename_files.py | shourya01/power_data_analytics_tools | 7621eaebb5b3bd107238016b07e5ae71c891c4b1 | [
"MIT"
] | null | null | null | import os
#os.rename("test.txt", "test.csv")
for filename in os.listdir("Data"):
if filename.endswith(".txt"):
os.rename("Data\\" + filename, "Data\\" + filename[:-4] + ".csv")
print("...Done")
| 24 | 73 | 0.564815 |
8d98a5e58372d437fe376dd901ec12418b723167 | 7,637 | py | Python | exps/experimental/GeMOSA/baselines/slbm-ft.py | Joey61Liuyi/AutoDL-Projects | 2092e144920e82d74753a7ac31e1890a150d41cf | [
"MIT"
] | 817 | 2020-01-15T00:23:41.000Z | 2022-03-31T14:52:03.000Z | exps/experimental/GeMOSA/baselines/slbm-ft.py | Joey61Liuyi/AutoDL-Projects | 2092e144920e82d74753a7ac31e1890a150d41cf | [
"MIT"
] | 77 | 2020-01-14T14:02:45.000Z | 2022-03-25T07:06:02.000Z | exps/experimental/GeMOSA/baselines/slbm-ft.py | Joey61Liuyi/AutoDL-Projects | 2092e144920e82d74753a7ac31e1890a150d41cf | [
"MIT"
] | 176 | 2020-01-15T10:39:41.000Z | 2022-03-31T04:24:53.000Z | #####################################################
# Copyright (c) Xuanyi Dong [GitHub D-X-Y], 2021.04 #
#####################################################
# python exps/GeMOSA/baselines/slbm-ft.py --env_version v1 --hidden_dim 16 --epochs 500 --init_lr 0.1 --device cuda
# python exps/GeMOSA/baselines/slbm-ft.py --env_version v2 --hidden_dim 16 --epochs 500 --init_lr 0.1 --device cuda
# python exps/GeMOSA/baselines/slbm-ft.py --env_version v3 --hidden_dim 32 --epochs 1000 --init_lr 0.05 --device cuda
# python exps/GeMOSA/baselines/slbm-ft.py --env_version v4 --hidden_dim 32 --epochs 1000 --init_lr 0.05 --device cuda
#####################################################
import sys, time, copy, torch, random, argparse
from tqdm import tqdm
from copy import deepcopy
from pathlib import Path
lib_dir = (Path(__file__).parent / ".." / ".." / "..").resolve()
print("LIB-DIR: {:}".format(lib_dir))
if str(lib_dir) not in sys.path:
sys.path.insert(0, str(lib_dir))
from xautodl.procedures import (
prepare_seed,
prepare_logger,
save_checkpoint,
copy_checkpoint,
)
from xautodl.log_utils import time_string
from xautodl.log_utils import AverageMeter, convert_secs2time
from xautodl.procedures.metric_utils import (
SaveMetric,
MSEMetric,
Top1AccMetric,
ComposeMetric,
)
from xautodl.datasets.synthetic_core import get_synthetic_env
from xautodl.models.xcore import get_model
from xautodl.utils import show_mean_var
def subsample(historical_x, historical_y, maxn=10000):
total = historical_x.size(0)
if total <= maxn:
return historical_x, historical_y
else:
indexes = torch.randint(low=0, high=total, size=[maxn])
return historical_x[indexes], historical_y[indexes]
def main(args):
prepare_seed(args.rand_seed)
logger = prepare_logger(args)
env = get_synthetic_env(mode="test", version=args.env_version)
model_kwargs = dict(
config=dict(model_type="norm_mlp"),
input_dim=env.meta_info["input_dim"],
output_dim=env.meta_info["output_dim"],
hidden_dims=[args.hidden_dim] * 2,
act_cls="relu",
norm_cls="layer_norm_1d",
)
logger.log("The total enviornment: {:}".format(env))
w_containers = dict()
if env.meta_info["task"] == "regression":
criterion = torch.nn.MSELoss()
metric_cls = MSEMetric
elif env.meta_info["task"] == "classification":
criterion = torch.nn.CrossEntropyLoss()
metric_cls = Top1AccMetric
else:
raise ValueError(
"This task ({:}) is not supported.".format(all_env.meta_info["task"])
)
def finetune(index):
seq_times = env.get_seq_times(index, args.seq_length)
_, (allxs, allys) = env.seq_call(seq_times)
allxs, allys = allxs.view(-1, allxs.shape[-1]), allys.view(-1, 1)
if env.meta_info["task"] == "classification":
allys = allys.view(-1)
historical_x, historical_y = allxs.to(args.device), allys.to(args.device)
model = get_model(**model_kwargs)
model = model.to(args.device)
optimizer = torch.optim.Adam(model.parameters(), lr=args.init_lr, amsgrad=True)
lr_scheduler = torch.optim.lr_scheduler.MultiStepLR(
optimizer,
milestones=[
int(args.epochs * 0.25),
int(args.epochs * 0.5),
int(args.epochs * 0.75),
],
gamma=0.3,
)
train_metric = metric_cls(True)
best_loss, best_param = None, None
for _iepoch in range(args.epochs):
preds = model(historical_x)
optimizer.zero_grad()
loss = criterion(preds, historical_y)
loss.backward()
optimizer.step()
lr_scheduler.step()
# save best
if best_loss is None or best_loss > loss.item():
best_loss = loss.item()
best_param = copy.deepcopy(model.state_dict())
model.load_state_dict(best_param)
# model.analyze_weights()
with torch.no_grad():
train_metric(preds, historical_y)
train_results = train_metric.get_info()
return train_results, model
metric = metric_cls(True)
per_timestamp_time, start_time = AverageMeter(), time.time()
for idx, (future_time, (future_x, future_y)) in enumerate(env):
need_time = "Time Left: {:}".format(
convert_secs2time(per_timestamp_time.avg * (len(env) - idx), True)
)
logger.log(
"[{:}]".format(time_string())
+ " [{:04d}/{:04d}]".format(idx, len(env))
+ " "
+ need_time
)
# train the same data
train_results, model = finetune(idx)
# build optimizer
xmetric = ComposeMetric(metric_cls(True), SaveMetric())
future_x, future_y = future_x.to(args.device), future_y.to(args.device)
future_y_hat = model(future_x)
future_loss = criterion(future_y_hat, future_y)
metric(future_y_hat, future_y)
log_str = (
"[{:}]".format(time_string())
+ " [{:04d}/{:04d}]".format(idx, len(env))
+ " train-score: {:.5f}, eval-score: {:.5f}".format(
train_results["score"], metric.get_info()["score"]
)
)
logger.log(log_str)
logger.log("")
per_timestamp_time.update(time.time() - start_time)
start_time = time.time()
save_checkpoint(
{"w_containers": w_containers},
logger.path(None) / "final-ckp.pth",
logger,
)
logger.log("-" * 200 + "\n")
logger.close()
return metric.get_info()["score"]
if __name__ == "__main__":
parser = argparse.ArgumentParser("Use the data in the past.")
parser.add_argument(
"--save_dir",
type=str,
default="./outputs/GeMOSA-synthetic/use-same-ft-timestamp",
help="The checkpoint directory.",
)
parser.add_argument(
"--env_version",
type=str,
required=True,
help="The synthetic enviornment version.",
)
parser.add_argument(
"--hidden_dim",
type=int,
required=True,
help="The hidden dimension.",
)
parser.add_argument(
"--init_lr",
type=float,
default=0.1,
help="The initial learning rate for the optimizer (default is Adam)",
)
parser.add_argument(
"--seq_length", type=int, default=20, help="The sequence length."
)
parser.add_argument(
"--batch_size",
type=int,
default=512,
help="The batch size",
)
parser.add_argument(
"--epochs",
type=int,
default=300,
help="The total number of epochs.",
)
parser.add_argument(
"--device",
type=str,
default="cpu",
help="",
)
parser.add_argument(
"--workers",
type=int,
default=4,
help="The number of data loading workers (default: 4)",
)
# Random Seed
parser.add_argument("--rand_seed", type=int, default=-1, help="manual seed")
args = parser.parse_args()
args.save_dir = "{:}-d{:}_e{:}_lr{:}-env{:}".format(
args.save_dir, args.hidden_dim, args.epochs, args.init_lr, args.env_version
)
if args.rand_seed is None or args.rand_seed < 0:
results = []
for iseed in range(3):
args.rand_seed = random.randint(1, 100000)
result = main(args)
results.append(result)
show_mean_var(results)
else:
main(args)
| 33.349345 | 117 | 0.590284 |
cda82fe7d883228dbbdd7ff30e759849bce8599a | 4,783 | py | Python | pangtreebuild/mafgraph/sorter.py | meoke/PangTreeBuild | 7cafb76df32c559a76ed1d269699dc0e52313312 | [
"MIT"
] | 2 | 2019-09-04T20:01:28.000Z | 2019-12-23T22:41:57.000Z | pangtreebuild/mafgraph/sorter.py | meoke/PangTreeBuild | 7cafb76df32c559a76ed1d269699dc0e52313312 | [
"MIT"
] | 2 | 2019-08-10T16:18:01.000Z | 2019-10-28T21:40:23.000Z | pangtreebuild/mafgraph/sorter.py | meoke/PangTreeBuild | 7cafb76df32c559a76ed1d269699dc0e52313312 | [
"MIT"
] | 2 | 2020-04-23T23:57:52.000Z | 2020-07-12T17:09:02.000Z | # -*- coding: utf-8 -*-
import networkx as nx
import six
from .mafreader import read_maf
from .weighted_edges import weight, Edge
def _out_edges(v, blocks, ub, G):
# Return a list of nodes connected to node v by edge leaving the vertex v
return [u for u in G[v] if ub >= blocks[u].order() > blocks[v].order()]
def _in_edges(v, blocks, lb, G):
# Return a list of nodes connected to node v by edge coming in to the vertex v
return [u for u in G[v] if lb < blocks[u].order() < blocks[v].order()]
def _dfs_f(root, ub, G, blocks):
visited = set()
stack = [root,]
while stack:
node = stack.pop()
if blocks[node].order()==ub: return []
if node not in visited:
visited.add(node)
stack.extend([x for x in _out_edges(node, blocks, ub, G) if x not in visited])
return list(visited)
def _dfs_b(root, lb, G, blocks):
visited = set()
stack = [root,]
while stack:
node = stack.pop()
if node not in visited:
visited.add(node)
stack.extend([x for x in _in_edges(node, blocks, lb, G) if x not in visited])
return list(visited)
def _reorder(R_f, R_b, blocks):
R_f.sort(key = lambda x: blocks[x].order())
R_b.sort(key = lambda x: blocks[x].order())
L = R_b + R_f
O = sorted(blocks[x].order() for x in L)
if six.PY2:
for i in xrange(len(L)):
blocks[L[i]].reorder(O[i])
else:
for i in range(len(L)):
blocks[L[i]].reorder(O[i])
def _add_edge_within_component(x, y, G, blocks):
# Add edge between blocks of the same component
lb = blocks[y].order()
ub = blocks[x].order()
if lb is ub: return
elif lb < ub:
R_f = _dfs_f(y, ub, G, blocks)
if R_f:
R_b = _dfs_b(x, lb, G, blocks)
_reorder(R_f, R_b, blocks)
G.add_edge(x,y)
else:
G.add_edge(x,y)
def _add_edge_between_components(e, blocks):
# Add edge between blocks of different components
reverse, flank = 1, 1
if blocks[e.left].size() < blocks[e.right].size():
e = Edge(e.right, e.left, e.type[::-1])
if blocks[e.left].orientation()*blocks[e.right].orientation() is e.type[0]*e.type[1]:
reverse = -1
if blocks[e.left].orientation()*e.type[0] < 0:
flank = -1
blocks[e.right].unionto(blocks[e.left], reverse, flank)
def connect_components(blocks):
if len(blocks) != blocks[0].size():
d = {blocks[0].find(): 0}
n = blocks[0].maximum()
for block in blocks:
if block.find() not in d:
i = n - block.minimum() + 1
d[block.find()] = i
block.reorder(i + block.order())
n += block.size()
else:
block.reorder(d[block.find()] + block.order())
block.orient_block()
else:
for block in blocks:
block.orient_block()
def set_out_edges(d, blocks):
for edge in d:
edge_type = (blocks[edge.left].orientation()*edge.type[0], blocks[edge.right].orientation()*edge.type[1])
for tup in d[edge][0]:
tup[0].set_start_position(blocks[edge.left].alignment, blocks[edge.left].orientation())
tup[1].set_start_position(blocks[edge.right].alignment, blocks[edge.right].orientation())
if blocks[edge.left].order() < blocks[edge.right].order():
blocks[edge.left].add_out_edges(edge.right, edge_type, d[edge][0])
elif blocks[edge.left].order() > blocks[edge.right].order():
sequences = [x[::-1] for x in d[edge][0]]
edge_type = edge_type[::-1]
blocks[edge.right].add_out_edges(edge.left, edge_type, sequences)
else:
blocks[edge.left].add_out_edges(edge.right, edge.type, d[edge][0])
def sort_mafblocks(maf_file):
blocks, seq = read_maf(maf_file) # blocks - list of Block instances
d = weight(seq)
edges = sorted(d.keys(), key=lambda x: (d[x][1], x.type, x.left, x.right)) # list of edges sorted by the weight
G = nx.Graph()
for e in edges:
if blocks[e.left].find() is blocks[e.right].find():
if blocks[e.left].orientation()*blocks[e.right].orientation() is e.type[0]*e.type[1]:
continue
elif blocks[e.left].orientation()*e.type[0] > 0:
_add_edge_within_component(e.left, e.right, G, blocks)
else:
_add_edge_within_component(e.right, e.left, G, blocks)
else:
G.add_edge(e.left, e.right)
_add_edge_between_components(e, blocks)
set_out_edges(d, blocks)
connect_components(blocks)
blocks = sorted(blocks, key = lambda b: b.order())
return blocks
| 37.960317 | 115 | 0.58018 |
ebdaa8ba513c48c1adfa28f022a03e8a346acaf7 | 1,047 | py | Python | setup.py | paulo-romano/attributetools | b9ca72b74de2e3979b2eabbe1152e33979e7c2a9 | [
"MIT"
] | null | null | null | setup.py | paulo-romano/attributetools | b9ca72b74de2e3979b2eabbe1152e33979e7c2a9 | [
"MIT"
] | null | null | null | setup.py | paulo-romano/attributetools | b9ca72b74de2e3979b2eabbe1152e33979e7c2a9 | [
"MIT"
] | null | null | null | # coding: utf-8
from setuptools import setup
import os
setup(name='attributetools',
version='0.1.4',
description='A decorator to set some attribute to a function.',
long_description='A decorator to set some attribute to a function.',
author="Paulo Romano", author_email="pauloromanocarvalho@gmail.com",
license="MIT",
py_modules=['attributetools'],
zip_safe=False,
platforms='any',
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries',
],
url='https://github.com/paulo-romano/attributetools',
download_url='https://github.com/paulo-romano/attributetools/tarball/0.1',) | 38.777778 | 81 | 0.629417 |
e83bd098090845feb0f05486e36a68b109ddc40f | 1,593 | py | Python | notes-n-resources/Data-Structures-N-Algo/_DS-n-Algos/Interview-Problems/LeetCode/RomanToInteger.py | side-projects-42/INTERVIEW-PREP-COMPLETE | 627a3315cee4bbc38a0e81c256f27f928eac2d63 | [
"MIT"
] | 13 | 2021-03-11T00:25:22.000Z | 2022-03-19T00:19:23.000Z | notes-n-resources/Data-Structures-N-Algo/_DS-n-Algos/Interview-Problems/LeetCode/RomanToInteger.py | side-projects-42/INTERVIEW-PREP-COMPLETE | 627a3315cee4bbc38a0e81c256f27f928eac2d63 | [
"MIT"
] | 160 | 2021-04-26T19:04:15.000Z | 2022-03-26T20:18:37.000Z | notes-n-resources/Data-Structures-N-Algo/_DS-n-Algos/Interview-Problems/LeetCode/RomanToInteger.py | side-projects-42/INTERVIEW-PREP-COMPLETE | 627a3315cee4bbc38a0e81c256f27f928eac2d63 | [
"MIT"
] | 12 | 2021-04-26T19:43:01.000Z | 2022-01-31T08:36:29.000Z | class Solution:
def romanToInt(self, s: str) -> int:
ans = 0
prev = ""
for i in range(len(s)):
if s[i] == "M":
if prev == "C":
ans += 800
prev = "M"
continue
ans += 1000
prev = "M"
continue
if s[i] == "D":
if prev == "C":
ans += 300
prev = "D"
continue
ans += 500
prev = "D"
continue
if s[i] == "C":
if prev == "X":
ans += 80
prev = "C"
continue
ans += 100
prev = "C"
continue
if s[i] == "L":
if prev == "X":
ans += 30
prev = "L"
continue
ans += 50
prev = "L"
continue
if s[i] == "X":
if prev == "I":
ans += 8
prev = "X"
continue
ans += 10
prev = "X"
continue
if s[i] == "V":
if prev == "I":
ans += 3
prev = "V"
continue
ans += 5
prev = "V"
continue
if s[i] == "I":
ans += 1
prev = "I"
return ans
| 27.465517 | 40 | 0.234777 |
5e231655f6380553a670b5c5c91d5159e1bc75a6 | 907 | py | Python | setup.py | mirzadelic/django-auditlog | a5dde204cf27b0baadf139f60520bed5896e270e | [
"MIT"
] | null | null | null | setup.py | mirzadelic/django-auditlog | a5dde204cf27b0baadf139f60520bed5896e270e | [
"MIT"
] | 1 | 2020-08-17T20:19:49.000Z | 2020-08-17T20:19:49.000Z | setup.py | mirzadelic/django-auditlog | a5dde204cf27b0baadf139f60520bed5896e270e | [
"MIT"
] | 2 | 2020-08-17T10:22:29.000Z | 2020-08-25T10:59:33.000Z | from distutils.core import setup
setup(
name='django-auditlog',
version='0.4.7',
packages=['auditlog', 'auditlog.migrations', 'auditlog.management', 'auditlog.management.commands'],
package_dir={'': 'src'},
url='https://github.com/jjkester/django-auditlog',
license='MIT',
author='Jan-Jelle Kester',
description='Audit log app for Django',
install_requires=[
'django-jsonfield>=1.0.0',
'python-dateutil>=2.6.0'
],
zip_safe=False,
classifiers=[
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'License :: OSI Approved :: MIT License',
],
)
| 32.392857 | 104 | 0.598677 |
97c76b575796c8154fd5fd0278b2ba245cedfe48 | 36 | py | Python | pureskillgg_dsdk/ds_models/__init__.py | pureskillgg/dsdk | 2e91a815dc06cc37ac8272d87014301c64c1b46e | [
"MIT"
] | null | null | null | pureskillgg_dsdk/ds_models/__init__.py | pureskillgg/dsdk | 2e91a815dc06cc37ac8272d87014301c64c1b46e | [
"MIT"
] | 1 | 2022-03-31T15:16:17.000Z | 2022-03-31T19:38:05.000Z | pureskillgg_dsdk/ds_models/__init__.py | pureskillgg/dsdk | 2e91a815dc06cc37ac8272d87014301c64c1b46e | [
"MIT"
] | null | null | null | from .model import create_ds_models
| 18 | 35 | 0.861111 |
e9306c5588aa383d9ed1912487374de769d024d0 | 12,107 | py | Python | rqalpha/mod/rqalpha_mod_sys_analyser/mod.py | hughkong/ricequantframe | a7823681bb1581427d4a7ca7e47febc5f3ed5482 | [
"Apache-2.0"
] | 17 | 2017-04-20T05:17:25.000Z | 2020-09-30T08:58:03.000Z | rqalpha/mod/rqalpha_mod_sys_analyser/mod.py | vladhj38/InplusTrader_Linux | 5f7eb17004da0b76ceafb93cb314de7a6009cd04 | [
"MIT"
] | 1 | 2017-11-12T01:24:06.000Z | 2019-09-19T08:50:38.000Z | rqalpha/mod/rqalpha_mod_sys_analyser/mod.py | vladhj38/InplusTrader_Linux | 5f7eb17004da0b76ceafb93cb314de7a6009cd04 | [
"MIT"
] | 17 | 2017-04-17T08:17:00.000Z | 2020-10-25T01:56:49.000Z | # -*- coding: utf-8 -*-
#
# Copyright 2017 Ricequant, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pickle
from collections import defaultdict
from enum import Enum
import numpy as np
import pandas as pd
import six
from rqalpha.const import EXIT_CODE, ACCOUNT_TYPE
from rqalpha.events import EVENT
from rqalpha.interface import AbstractMod
from rqalpha.utils.risk import Risk
class AnalyserMod(AbstractMod):
def __init__(self):
self._env = None
self._mod_config = None
self._enabled = False
self._orders = []
self._trades = []
self._total_portfolios = []
self._total_benchmark_portfolios = []
self._sub_accounts = defaultdict(list)
self._positions = defaultdict(list)
self._benchmark_daily_returns = []
self._portfolio_daily_returns = []
def start_up(self, env, mod_config):
self._env = env
self._mod_config = mod_config
self._enabled = (self._mod_config.record or self._mod_config.plot or self._mod_config.output_file or
self._mod_config.plot_save_file or self._mod_config.report_save_path)
if self._enabled:
env.event_bus.add_listener(EVENT.POST_SETTLEMENT, self._collect_daily)
env.event_bus.add_listener(EVENT.TRADE, self._collect_trade)
env.event_bus.add_listener(EVENT.ORDER_CREATION_PASS, self._collect_order)
def _collect_trade(self, event):
self._trades.append(self._to_trade_record(event.trade))
def _collect_order(self, event):
self._orders.append(event.order)
def _collect_daily(self, event):
date = self._env.calendar_dt.date()
portfolio = self._env.portfolio
benchmark_portfolio = self._env.benchmark_portfolio
self._portfolio_daily_returns.append(portfolio.daily_returns)
self._total_portfolios.append(self._to_portfolio_record(date, portfolio))
if benchmark_portfolio is None:
self._benchmark_daily_returns.append(0)
else:
self._benchmark_daily_returns.append(benchmark_portfolio.daily_returns)
self._total_benchmark_portfolios.append(self._to_portfolio_record(date, benchmark_portfolio))
for account_type, account in six.iteritems(self._env.portfolio.accounts):
self._sub_accounts[account_type].append(self._to_account_record(date, account))
for order_book_id, position in six.iteritems(account.positions):
self._positions[account_type].append(self._to_position_record(date, order_book_id, position))
def _symbol(self, order_book_id):
return self._env.data_proxy.instruments(order_book_id).symbol
@staticmethod
def _safe_convert(value, ndigits=3):
if isinstance(value, Enum):
return value.name
if isinstance(value, (float, np.float64, np.float32, np.float16, np.float)):
return round(value, ndigits)
return value
def _to_portfolio_record(self, date, portfolio):
return {
'date': date,
'cash': self._safe_convert(portfolio.cash),
'total_returns': self._safe_convert(portfolio.total_returns),
'daily_returns': self._safe_convert(portfolio.daily_returns),
'daily_pnl': self._safe_convert(portfolio.daily_pnl),
'total_value': self._safe_convert(portfolio.total_value),
'market_value': self._safe_convert(portfolio.market_value),
'annualized_returns': self._safe_convert(portfolio.annualized_returns),
'unit_net_value': self._safe_convert(portfolio.unit_net_value),
'units': portfolio.units,
'static_unit_net_value': self._safe_convert(portfolio.static_unit_net_value),
}
ACCOUNT_FIELDS_MAP = {
ACCOUNT_TYPE.STOCK: ['dividend_receivable'],
ACCOUNT_TYPE.FUTURE: ['holding_pnl', 'realized_pnl', 'daily_pnl', 'margin'],
}
def _to_account_record(self, date, account):
data = {
'date': date,
'total_cash': self._safe_convert(account.cash + account.frozen_cash),
'transaction_cost': self._safe_convert(account.transaction_cost),
'market_value': self._safe_convert(account.market_value),
'total_value': self._safe_convert(account.total_value),
}
for f in self.ACCOUNT_FIELDS_MAP[account.type]:
data[f] = self._safe_convert(getattr(account, f))
return data
POSITION_FIELDS_MAP = {
ACCOUNT_TYPE.STOCK: [
'quantity', 'last_price', 'avg_price', 'market_value', 'sellable'
],
ACCOUNT_TYPE.FUTURE: [
'pnl', 'daily_pnl', 'holding_pnl', 'realized_pnl', 'margin', 'market_value',
'buy_pnl', 'sell_pnl', 'closable_buy_quantity', 'buy_margin', 'buy_today_quantity',
'buy_avg_open_price', 'buy_avg_holding_price', 'closable_sell_quantity',
'sell_margin', 'sell_today_quantity', 'sell_quantity', 'sell_avg_open_price',
'sell_avg_holding_price'
],
}
def _to_position_record(self, date, order_book_id, position):
data = {
'order_book_id': order_book_id,
'symbol': self._symbol(order_book_id),
'date': date,
}
for f in self.POSITION_FIELDS_MAP[position.type]:
data[f] = self._safe_convert(getattr(position, f))
return data
def _to_trade_record(self, trade):
return {
'datetime': trade.datetime.strftime("%Y-%m-%d %H:%M:%S"),
'trading_datetime': trade.trading_datetime.strftime("%Y-%m-%d %H:%M:%S"),
'order_book_id': trade.order_book_id,
'symbol': self._symbol(trade.order_book_id),
'side': self._safe_convert(trade.side),
'position_effect': self._safe_convert(trade.position_effect),
'exec_id': trade.exec_id,
'tax': trade.tax,
'commission': trade.commission,
'last_quantity': trade.last_quantity,
'last_price': self._safe_convert(trade.last_price),
'order_id': trade.order_id,
'transaction_cost': trade.transaction_cost,
}
def tear_down(self, code, exception=None):
if code != EXIT_CODE.EXIT_SUCCESS or not self._enabled:
return
strategy_name = os.path.basename(self._env.config.base.strategy_file).split(".")[0]
data_proxy = self._env.data_proxy
summary = {
'strategy_name': strategy_name,
'start_date': self._env.config.base.start_date.strftime('%Y-%m-%d'),
'end_date': self._env.config.base.end_date.strftime('%Y-%m-%d'),
'strategy_file': self._env.config.base.strategy_file,
'securities': self._env.config.base.securities,
'run_type': self._env.config.base.run_type.value,
'stock_starting_cash': self._env.config.base.stock_starting_cash,
'future_starting_cash': self._env.config.base.future_starting_cash,
}
risk = Risk(np.array(self._portfolio_daily_returns), np.array(self._benchmark_daily_returns),
data_proxy.get_risk_free_rate(self._env.config.base.start_date, self._env.config.base.end_date),
(self._env.config.base.end_date - self._env.config.base.start_date).days + 1)
summary.update({
'alpha': self._safe_convert(risk.alpha, 3),
'beta': self._safe_convert(risk.beta, 3),
'sharpe': self._safe_convert(risk.sharpe, 3),
'information_ratio': self._safe_convert(risk.information_ratio, 3),
'downside_risk': self._safe_convert(risk.annual_downside_risk, 3),
'tracking_error': self._safe_convert(risk.annual_tracking_error, 3),
'sortino': self._safe_convert(risk.sortino, 3),
'volatility': self._safe_convert(risk.annual_volatility, 3),
'max_drawdown': self._safe_convert(risk.max_drawdown, 3),
})
summary.update({
'total_value': self._safe_convert(self._env.portfolio.total_value),
'cash': self._safe_convert(self._env.portfolio.cash),
'total_returns': self._safe_convert(self._env.portfolio.total_returns),
'annualized_returns': self._safe_convert(self._env.portfolio.annualized_returns),
'unit_net_value': self._safe_convert(self._env.portfolio.unit_net_value),
'units': self._env.portfolio.units,
})
if self._env.benchmark_portfolio:
summary['benchmark_total_returns'] = self._safe_convert(self._env.benchmark_portfolio.total_returns)
summary['benchmark_annualized_returns'] = self._safe_convert(
self._env.benchmark_portfolio.annualized_returns)
trades = pd.DataFrame(self._trades)
if 'datetime' in trades.columns:
trades = trades.set_index('datetime')
df = pd.DataFrame(self._total_portfolios)
df['date'] = pd.to_datetime(df['date'])
total_portfolios = df.set_index('date').sort_index()
result_dict = {
'summary': summary,
'trades': trades,
'total_portfolios': total_portfolios,
}
if self._env.benchmark_portfolio is not None:
b_df = pd.DataFrame(self._total_benchmark_portfolios)
df['date'] = pd.to_datetime(df['date'])
benchmark_portfolios = b_df.set_index('date').sort_index()
result_dict['benchmark_portfolios'] = benchmark_portfolios
if self._env.plot_store is not None:
plots = self._env.get_plot_store().get_plots()
plots_items = defaultdict(dict)
for series_name, value_dict in six.iteritems(plots):
for date, value in six.iteritems(value_dict):
plots_items[date][series_name] = value
plots_items[date]["date"] = date
df = pd.DataFrame([dict_data for date, dict_data in six.iteritems(plots_items)])
df["date"] = pd.to_datetime(df["date"])
df = df.set_index("date").sort_index()
result_dict["plots"] = df
for account_type, account in six.iteritems(self._env.portfolio.accounts):
account_name = account_type.name.lower()
portfolios_list = self._sub_accounts[account_type]
df = pd.DataFrame(portfolios_list)
df["date"] = pd.to_datetime(df["date"])
portfolios_df = df.set_index("date").sort_index()
result_dict["{}_portfolios".format(account_name)] = portfolios_df
positions_list = self._positions[account_type]
positions_df = pd.DataFrame(positions_list)
if "date" in positions_df.columns:
positions_df["date"] = pd.to_datetime(positions_df["date"])
positions_df = positions_df.set_index("date").sort_index()
result_dict["{}_positions".format(account_name)] = positions_df
if self._mod_config.output_file:
with open(self._mod_config.output_file, 'wb') as f:
pickle.dump(result_dict, f)
if self._mod_config.report_save_path:
from .report import generate_report
generate_report(result_dict, self._mod_config.report_save_path)
if self._mod_config.plot or self._mod_config.plot_save_file:
from .plot import plot_result
plot_result(result_dict, self._mod_config.plot, self._mod_config.plot_save_file)
return result_dict
| 43.239286 | 116 | 0.654084 |
a9cf1f421ec1691902255629229c1bd81b3eb062 | 28,531 | py | Python | zerver/tests/test_docs.py | Pulkit007/zulip | 8a5f6f8d95baa55c4b28972cfc5a498f5f388e0f | [
"Apache-2.0"
] | null | null | null | zerver/tests/test_docs.py | Pulkit007/zulip | 8a5f6f8d95baa55c4b28972cfc5a498f5f388e0f | [
"Apache-2.0"
] | null | null | null | zerver/tests/test_docs.py | Pulkit007/zulip | 8a5f6f8d95baa55c4b28972cfc5a498f5f388e0f | [
"Apache-2.0"
] | null | null | null | import os
import re
from typing import Any, Dict, Sequence
from unittest import mock, skipUnless
from urllib.parse import urlsplit
import orjson
from django.conf import settings
from django.http import HttpResponse
from django.test import override_settings
from django.utils.timezone import now as timezone_now
from corporate.models import Customer, CustomerPlan
from zerver.context_processors import get_apps_page_url
from zerver.lib.integrations import INTEGRATIONS
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.test_helpers import HostRequestMock
from zerver.lib.utils import split_by
from zerver.models import Realm, get_realm
from zerver.views.documentation import add_api_uri_context
class DocPageTest(ZulipTestCase):
def get_doc(self, url: str, subdomain: str) -> HttpResponse:
if url[0:23] == "/integrations/doc-html/":
return self.client_get(url, subdomain=subdomain, HTTP_X_REQUESTED_WITH="XMLHttpRequest")
return self.client_get(url, subdomain=subdomain)
def print_msg_if_error(self, url: str, response: HttpResponse) -> None: # nocoverage
if response.status_code == 200:
return
print("Error processing URL:", url)
if response.get("Content-Type") == "application/json":
content = orjson.loads(response.content)
print()
print("======================================================================")
print("ERROR: {}".format(content.get("msg")))
print()
def _test(
self,
url: str,
expected_content: str,
extra_strings: Sequence[str] = [],
landing_missing_strings: Sequence[str] = [],
landing_page: bool = True,
doc_html_str: bool = False,
) -> None:
# Test the URL on the "zephyr" subdomain
result = self.get_doc(url, subdomain="zephyr")
self.print_msg_if_error(url, result)
self.assertEqual(result.status_code, 200)
self.assertIn(expected_content, str(result.content))
for s in extra_strings:
self.assertIn(s, str(result.content))
if not doc_html_str:
self.assert_in_success_response(
['<meta name="robots" content="noindex,nofollow" />'], result
)
# Test the URL on the root subdomain
result = self.get_doc(url, subdomain="")
self.print_msg_if_error(url, result)
self.assertEqual(result.status_code, 200)
self.assertIn(expected_content, str(result.content))
if not doc_html_str:
self.assert_in_success_response(
['<meta name="robots" content="noindex,nofollow" />'], result
)
for s in extra_strings:
self.assertIn(s, str(result.content))
if not landing_page:
return
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
# Test the URL on the root subdomain with the landing page setting
result = self.get_doc(url, subdomain="")
self.print_msg_if_error(url, result)
self.assertEqual(result.status_code, 200)
self.assertIn(expected_content, str(result.content))
for s in extra_strings:
self.assertIn(s, str(result.content))
for s in landing_missing_strings:
self.assertNotIn(s, str(result.content))
if not doc_html_str:
# Every page has a meta-description
self.assert_in_success_response(['<meta name="description" content="'], result)
self.assert_not_in_success_response(
['<meta name="robots" content="noindex,nofollow" />'], result
)
# Test the URL on the "zephyr" subdomain with the landing page setting
result = self.get_doc(url, subdomain="zephyr")
self.print_msg_if_error(url, result)
self.assertEqual(result.status_code, 200)
self.assertIn(expected_content, str(result.content))
for s in extra_strings:
self.assertIn(s, str(result.content))
if not doc_html_str:
self.assert_in_success_response(
['<meta name="robots" content="noindex,nofollow" />'], result
)
def test_api_doc_endpoints(self) -> None:
# We extract the set of /api/ endpoints to check by parsing
# the /api/ page sidebar for links starting with /api/.
api_page_raw = str((self.client_get("/api/").content))
ENDPOINT_REGEXP = re.compile(r"href=\"/api/\s*(.*?)\"")
endpoint_list_set = set(re.findall(ENDPOINT_REGEXP, api_page_raw))
endpoint_list = [f"/api/{endpoint}" for endpoint in endpoint_list_set]
# Validate that the parsing logic isn't broken, since if it
# broke, the below would become a noop.
self.assertGreater(len(endpoint_list), 70)
for endpoint in endpoint_list:
self._test(endpoint, "", doc_html_str=True)
result = self.client_get(
"/api/nonexistent-page",
follow=True,
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(result.status_code, 404)
def test_doc_endpoints(self) -> None:
self._test("/api/", "The Zulip API")
self._test("/api/api-keys", "be careful with it")
self._test("/api/installation-instructions", "No download required!")
self._test("/api/send-message", "steal away your hearts")
self._test("/api/render-message", "**foo**")
self._test("/api/get-streams", "include_public")
self._test("/api/get-stream-id", "The name of the stream to access.")
self._test("/api/get-subscriptions", "Get all streams that the user is subscribed to.")
self._test("/api/get-users", "client_gravatar")
self._test("/api/register-queue", "apply_markdown")
self._test("/api/get-events", "dont_block")
self._test("/api/delete-queue", "Delete a previously registered queue")
self._test("/api/update-message", "propagate_mode")
self._test("/api/get-own-user", "does not accept any parameters.")
self._test("/api/subscribe", "authorization_errors_fatal")
self._test("/api/create-user", "zuliprc-admin")
self._test("/api/unsubscribe", "not_removed")
if settings.ZILENCER_ENABLED:
self._test("/team/", "industry veterans")
self._test("/history/", "Cambridge, Massachusetts")
# Test the i18n version of one of these pages.
self._test("/en/history/", "Cambridge, Massachusetts")
if settings.ZILENCER_ENABLED:
self._test("/apps/", "Apps for every platform.")
self._test("/features/", "Beautiful messaging")
self._test("/hello/", "Chat for distributed teams", landing_missing_strings=["Log in"])
self._test("/why-zulip/", "Why Zulip?")
self._test("/for/open-source/", "for open source projects")
self._test("/case-studies/tum/", "Technical University of Munich")
self._test("/case-studies/ucsd/", "UCSD")
self._test("/for/research/", "for researchers")
self._test("/for/companies/", "in a company")
self._test("/for/communities/", "Zulip for communities")
self._test("/security/", "TLS encryption")
self._test("/devlogin/", "Normal users", landing_page=False)
self._test("/devtools/", "Useful development URLs")
self._test("/errors/404/", "Page not found")
self._test("/errors/5xx/", "Internal server error")
self._test("/emails/", "manually generate most of the emails by clicking")
result = self.client_get(
"/integrations/doc-html/nonexistent_integration",
follow=True,
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(result.status_code, 404)
result = self.client_get("/new-user/")
self.assertEqual(result.status_code, 301)
self.assertIn("hello", result["Location"])
def test_portico_pages_open_graph_metadata(self) -> None:
# Why Zulip
url = "/why-zulip/"
title = '<meta property="og:title" content="Team chat with first-class threading" />'
description = '<meta property="og:description" content="Most team chats are overwhelming'
self._test(url, title, doc_html_str=True)
self._test(url, description, doc_html_str=True)
# Features
url = "/features/"
title = '<meta property="og:title" content="Zulip features" />'
description = '<meta property="og:description" content="First class threading'
self._test(url, title, doc_html_str=True)
self._test(url, description, doc_html_str=True)
def test_integration_doc_endpoints(self) -> None:
self._test(
"/integrations/",
"native integrations.",
extra_strings=[
"And hundreds more through",
"Zapier",
"IFTTT",
],
)
for integration in INTEGRATIONS.keys():
url = f"/integrations/doc-html/{integration}"
self._test(url, "", doc_html_str=True)
def test_integration_pages_open_graph_metadata(self) -> None:
url = "/integrations/doc/github"
title = '<meta property="og:title" content="Connect GitHub to Zulip" />'
description = '<meta property="og:description" content="Zulip comes with over'
self._test(url, title, doc_html_str=True)
self._test(url, description, doc_html_str=True)
# Test category pages
url = "/integrations/communication"
title = '<meta property="og:title" content="Connect your Communication tools to Zulip" />'
description = '<meta property="og:description" content="Zulip comes with over'
self._test(url, title, doc_html_str=True)
self._test(url, description, doc_html_str=True)
# Test integrations page
url = "/integrations/"
title = '<meta property="og:title" content="Connect the tools you use to Zulip" />'
description = '<meta property="og:description" content="Zulip comes with over'
self._test(url, title, doc_html_str=True)
self._test(url, description, doc_html_str=True)
def test_doc_html_str_non_ajax_call(self) -> None:
# We don't need to test all the pages for 404
for integration in list(INTEGRATIONS.keys())[5]:
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
url = f"/en/integrations/doc-html/{integration}"
result = self.client_get(url, subdomain="", follow=True)
self.assertEqual(result.status_code, 404)
result = self.client_get(url, subdomain="zephyr", follow=True)
self.assertEqual(result.status_code, 404)
url = f"/en/integrations/doc-html/{integration}"
result = self.client_get(url, subdomain="", follow=True)
self.assertEqual(result.status_code, 404)
result = self.client_get(url, subdomain="zephyr", follow=True)
self.assertEqual(result.status_code, 404)
result = self.client_get("/integrations/doc-html/nonexistent_integration", follow=True)
self.assertEqual(result.status_code, 404)
def test_electron_detection(self) -> None:
result = self.client_get("/accounts/password/reset/")
# TODO: Ideally, this Mozilla would be the specific browser.
self.assertTrue('data-platform="Mozilla"' in result.content.decode("utf-8"))
result = self.client_get("/accounts/password/reset/", HTTP_USER_AGENT="ZulipElectron/1.0.0")
self.assertTrue('data-platform="ZulipElectron"' in result.content.decode("utf-8"))
class HelpTest(ZulipTestCase):
def test_help_settings_links(self) -> None:
result = self.client_get("/help/change-the-time-format")
self.assertEqual(result.status_code, 200)
self.assertIn(
'Go to <a href="/#settings/display-settings">Display settings</a>', str(result.content)
)
# Check that the sidebar was rendered properly.
self.assertIn("Getting started with Zulip", str(result.content))
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
result = self.client_get("/help/change-the-time-format", subdomain="")
self.assertEqual(result.status_code, 200)
self.assertIn("<strong>Display settings</strong>", str(result.content))
self.assertNotIn("/#settings", str(result.content))
def test_help_relative_links_for_gear(self) -> None:
result = self.client_get("/help/analytics")
self.assertIn('<a href="/stats">Usage statistics</a>', str(result.content))
self.assertEqual(result.status_code, 200)
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
result = self.client_get("/help/analytics", subdomain="")
self.assertEqual(result.status_code, 200)
self.assertIn("<strong>Usage statistics</strong>", str(result.content))
self.assertNotIn("/stats", str(result.content))
def test_help_relative_links_for_stream(self) -> None:
result = self.client_get("/help/message-a-stream-by-email")
self.assertIn('<a href="/#streams/subscribed">Your streams</a>', str(result.content))
self.assertEqual(result.status_code, 200)
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
result = self.client_get("/help/message-a-stream-by-email", subdomain="")
self.assertEqual(result.status_code, 200)
self.assertIn("<strong>Manage streams</strong>", str(result.content))
self.assertNotIn("/#streams", str(result.content))
class IntegrationTest(ZulipTestCase):
def test_check_if_every_integration_has_logo_that_exists(self) -> None:
for integration in INTEGRATIONS.values():
path = urlsplit(integration.logo_url).path
self.assertTrue(os.path.isfile(settings.DEPLOY_ROOT + path), integration.name)
def test_api_url_view_subdomains_base(self) -> None:
context: Dict[str, Any] = {}
add_api_uri_context(context, HostRequestMock())
self.assertEqual(context["api_url_scheme_relative"], "testserver/api")
self.assertEqual(context["api_url"], "http://testserver/api")
self.assertTrue(context["html_settings_links"])
@override_settings(ROOT_DOMAIN_LANDING_PAGE=True)
def test_api_url_view_subdomains_homepage_base(self) -> None:
context: Dict[str, Any] = {}
add_api_uri_context(context, HostRequestMock())
self.assertEqual(context["api_url_scheme_relative"], "yourZulipDomain.testserver/api")
self.assertEqual(context["api_url"], "http://yourZulipDomain.testserver/api")
self.assertFalse(context["html_settings_links"])
def test_api_url_view_subdomains_full(self) -> None:
context: Dict[str, Any] = {}
request = HostRequestMock(host="mysubdomain.testserver")
add_api_uri_context(context, request)
self.assertEqual(context["api_url_scheme_relative"], "mysubdomain.testserver/api")
self.assertEqual(context["api_url"], "http://mysubdomain.testserver/api")
self.assertTrue(context["html_settings_links"])
def test_html_settings_links(self) -> None:
context: Dict[str, Any] = {}
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
add_api_uri_context(context, HostRequestMock())
self.assertEqual(context["settings_html"], "Zulip settings page")
self.assertEqual(context["subscriptions_html"], "streams page")
context = {}
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
add_api_uri_context(context, HostRequestMock(host="mysubdomain.testserver"))
self.assertEqual(context["settings_html"], '<a href="/#settings">Zulip settings page</a>')
self.assertEqual(
context["subscriptions_html"], '<a target="_blank" href="/#streams">streams page</a>'
)
context = {}
add_api_uri_context(context, HostRequestMock())
self.assertEqual(context["settings_html"], '<a href="/#settings">Zulip settings page</a>')
self.assertEqual(
context["subscriptions_html"], '<a target="_blank" href="/#streams">streams page</a>'
)
class AboutPageTest(ZulipTestCase):
@skipUnless(settings.ZILENCER_ENABLED, "requires zilencer")
def test_endpoint(self) -> None:
with self.settings(CONTRIBUTOR_DATA_FILE_PATH="zerver/tests/fixtures/authors.json"):
result = self.client_get("/team/")
self.assert_in_success_response(["Our amazing community"], result)
self.assert_in_success_response(["2017-11-20"], result)
self.assert_in_success_response(["timabbott", "showell", "gnprice", "rishig"], result)
with mock.patch("zerver.views.portico.open", side_effect=FileNotFoundError) as m:
result = self.client_get("/team/")
self.assertEqual(result.status_code, 200)
self.assert_in_success_response(["Never ran"], result)
m.assert_called_once()
with self.settings(ZILENCER_ENABLED=False):
result = self.client_get("/team/")
self.assertEqual(result.status_code, 301)
self.assertEqual(result["Location"], "https://zulip.com/team/")
def test_split_by(self) -> None:
"""Utility function primarily used in authors page"""
flat_list = [1, 2, 3, 4, 5, 6, 7, 8, 9]
expected_result = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
self.assertEqual(split_by(flat_list, 3, None), expected_result)
class SmtpConfigErrorTest(ZulipTestCase):
def test_smtp_error(self) -> None:
result = self.client_get("/config-error/smtp")
self.assertEqual(result.status_code, 200)
self.assert_in_success_response(["email configuration"], result)
class PlansPageTest(ZulipTestCase):
def test_plans_auth(self) -> None:
root_domain = ""
result = self.client_get("/plans/", subdomain=root_domain)
self.assert_in_success_response(["Sign up now"], result)
self.assert_not_in_success_response(["/upgrade#sponsorship"], result)
self.assert_in_success_response(["/accounts/go/?next=/upgrade%23sponsorship"], result)
non_existent_domain = "moo"
result = self.client_get("/plans/", subdomain=non_existent_domain)
self.assertEqual(result.status_code, 404)
self.assert_in_response("does not exist", result)
realm = get_realm("zulip")
realm.plan_type = Realm.STANDARD_FREE
realm.save(update_fields=["plan_type"])
result = self.client_get("/plans/", subdomain="zulip")
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "/accounts/login/?next=/plans")
guest_user = "polonius"
self.login(guest_user)
result = self.client_get("/plans/", subdomain="zulip", follow=True)
self.assertEqual(result.status_code, 404)
organization_member = "hamlet"
self.login(organization_member)
result = self.client_get("/plans/", subdomain="zulip")
self.assert_in_success_response(["Current plan"], result)
self.assert_in_success_response(["/upgrade#sponsorship"], result)
self.assert_not_in_success_response(["/accounts/go/?next=/upgrade%23sponsorship"], result)
# Test root domain, with login on different domain
result = self.client_get("/plans/", subdomain="")
# TODO: works in manual testing, but I suspect something is funny in
# the test environment
# self.assert_in_success_response(["Sign up now"], result)
def test_CTA_text_by_plan_type(self) -> None:
sign_up_now = "Sign up now"
buy_standard = "Buy Standard"
current_plan = "Current plan"
sponsorship_pending = "Sponsorship pending"
# Root domain
result = self.client_get("/plans/", subdomain="")
self.assert_in_success_response([sign_up_now, buy_standard], result)
self.assert_not_in_success_response([current_plan, sponsorship_pending], result)
realm = get_realm("zulip")
realm.plan_type = Realm.SELF_HOSTED
realm.save(update_fields=["plan_type"])
with self.settings(PRODUCTION=True):
result = self.client_get("/plans/", subdomain="zulip")
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "https://zulip.com/plans")
self.login("iago")
# SELF_HOSTED should hide the local plans page, even if logged in
result = self.client_get("/plans/", subdomain="zulip")
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "https://zulip.com/plans")
# But in the development environment, it renders a page
result = self.client_get("/plans/", subdomain="zulip")
self.assert_in_success_response([sign_up_now, buy_standard], result)
self.assert_not_in_success_response([current_plan, sponsorship_pending], result)
realm.plan_type = Realm.LIMITED
realm.save(update_fields=["plan_type"])
result = self.client_get("/plans/", subdomain="zulip")
self.assert_in_success_response([current_plan, buy_standard], result)
self.assert_not_in_success_response([sign_up_now, sponsorship_pending], result)
with self.settings(FREE_TRIAL_DAYS=60):
result = self.client_get("/plans/", subdomain="zulip")
self.assert_in_success_response([current_plan, "Start 60 day free trial"], result)
self.assert_not_in_success_response(
[sign_up_now, sponsorship_pending, buy_standard], result
)
realm.plan_type = Realm.STANDARD_FREE
realm.save(update_fields=["plan_type"])
result = self.client_get("/plans/", subdomain="zulip")
self.assert_in_success_response([current_plan], result)
self.assert_not_in_success_response(
[sign_up_now, buy_standard, sponsorship_pending], result
)
realm.plan_type = Realm.STANDARD
realm.save(update_fields=["plan_type"])
result = self.client_get("/plans/", subdomain="zulip")
self.assert_in_success_response([current_plan], result)
self.assert_not_in_success_response(
[sign_up_now, buy_standard, sponsorship_pending], result
)
customer = Customer.objects.create(realm=get_realm("zulip"), stripe_customer_id="cus_id")
plan = CustomerPlan.objects.create(
customer=customer,
tier=CustomerPlan.STANDARD,
status=CustomerPlan.FREE_TRIAL,
billing_cycle_anchor=timezone_now(),
billing_schedule=CustomerPlan.MONTHLY,
)
result = self.client_get("/plans/", subdomain="zulip")
self.assert_in_success_response(["Current plan (free trial)"], result)
self.assert_not_in_success_response(
[sign_up_now, buy_standard, sponsorship_pending], result
)
realm.plan_type = Realm.LIMITED
realm.save()
customer.sponsorship_pending = True
customer.save()
plan.delete()
result = self.client_get("/plans/", subdomain="zulip")
self.assert_in_success_response([current_plan], result)
self.assert_in_success_response([current_plan, sponsorship_pending], result)
self.assert_not_in_success_response([sign_up_now, buy_standard], result)
class AppsPageTest(ZulipTestCase):
def test_get_apps_page_url(self) -> None:
with self.settings(ZILENCER_ENABLED=False):
apps_page_url = get_apps_page_url()
self.assertEqual(apps_page_url, "https://zulip.com/apps/")
with self.settings(ZILENCER_ENABLED=True):
apps_page_url = get_apps_page_url()
self.assertEqual(apps_page_url, "/apps/")
def test_apps_view(self) -> None:
result = self.client_get("/apps")
self.assertEqual(result.status_code, 301)
self.assertTrue(result["Location"].endswith("/apps/"))
with self.settings(ZILENCER_ENABLED=False):
result = self.client_get("/apps/")
self.assertEqual(result.status_code, 301)
self.assertTrue(result["Location"] == "https://zulip.com/apps/")
with self.settings(ZILENCER_ENABLED=False):
result = self.client_get("/apps/linux")
self.assertEqual(result.status_code, 301)
self.assertTrue(result["Location"] == "https://zulip.com/apps/")
with self.settings(ZILENCER_ENABLED=True):
result = self.client_get("/apps/")
self.assertEqual(result.status_code, 200)
html = result.content.decode("utf-8")
self.assertIn("Apps for every platform.", html)
def test_app_download_link_view(self) -> None:
return_value = "https://github.com/zulip/zulip-desktop/releases/download/v5.4.3/Zulip-Web-Setup-5.4.3.exe"
with mock.patch(
"zerver.views.portico.get_latest_github_release_download_link_for_platform",
return_value=return_value,
) as m:
result = self.client_get("/apps/download/windows")
m.assert_called_once_with("windows")
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"] == return_value)
result = self.client_get("/apps/download/plan9")
self.assertEqual(result.status_code, 404)
class PrivacyTermsTest(ZulipTestCase):
def test_custom_tos_template(self) -> None:
response = self.client_get("/terms/")
self.assert_in_success_response(
[
'Thanks for using our products and services ("Services"). ',
"By using our Services, you are agreeing to these terms",
],
response,
)
def test_custom_terms_of_service_template(self) -> None:
not_configured_message = (
"This installation of Zulip does not have a configured terms of service"
)
with self.settings(TERMS_OF_SERVICE=None):
response = self.client_get("/terms/")
self.assert_in_success_response([not_configured_message], response)
with self.settings(TERMS_OF_SERVICE="zerver/tests/markdown/test_markdown.md"):
response = self.client_get("/terms/")
self.assert_in_success_response(["This is some <em>bold text</em>."], response)
self.assert_not_in_success_response([not_configured_message], response)
def test_custom_privacy_policy_template(self) -> None:
not_configured_message = (
"This installation of Zulip does not have a configured privacy policy"
)
with self.settings(PRIVACY_POLICY=None):
response = self.client_get("/privacy/")
self.assert_in_success_response([not_configured_message], response)
with self.settings(PRIVACY_POLICY="zerver/tests/markdown/test_markdown.md"):
response = self.client_get("/privacy/")
self.assert_in_success_response(["This is some <em>bold text</em>."], response)
self.assert_not_in_success_response([not_configured_message], response)
def test_custom_privacy_policy_template_with_absolute_url(self) -> None:
current_dir = os.path.dirname(os.path.abspath(__file__))
abs_path = os.path.join(
current_dir, "..", "..", "templates/zerver/tests/markdown/test_markdown.md"
)
with self.settings(PRIVACY_POLICY=abs_path):
response = self.client_get("/privacy/")
self.assert_in_success_response(["This is some <em>bold text</em>."], response)
def test_no_nav(self) -> None:
# Test that our ?nav=0 feature of /privacy and /terms,
# designed to comply with the Apple App Store draconian
# policies that ToS/Privacy pages linked from an iOS app have
# no links to the rest of the site if there's pricing
# information for anything elsewhere on the site.
response = self.client_get("/terms/")
self.assert_in_success_response(["Plans"], response)
response = self.client_get("/terms/", {"nav": "no"})
self.assert_not_in_success_response(["Plans"], response)
response = self.client_get("/privacy/", {"nav": "no"})
self.assert_not_in_success_response(["Plans"], response)
| 46.316558 | 114 | 0.653219 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.