id int64 0 300k | label stringlengths 1 74 ⌀ | text stringlengths 4k 8k |
|---|---|---|
5,400 | test no user | from __future__ import annotations
from typing import Any
from unittest import mock
from sentry.integrations.discord.message_builder.base.component import (
DiscordComponentCustomIds as CustomIds,
)
from sentry.integrations.discord.requests.base import (
DiscordMessageComponentTypes,
DiscordRequestTypes,
)
from sentry.integrations.discord.webhooks.message_component import (
ARCHIVE_UNTIL_ESCALATES,
ASSIGNEE_UPDATED,
MARKED_ONGOING,
NO_IDENTITY,
NOT_IN_ORG,
RESOLVE_DIALOG_OPTIONS,
RESOLVED,
RESOLVED_IN_CURRENT_RELEASE,
RESOLVED_IN_NEXT_RELEASE,
UNRESOLVED,
)
from sentry.models.release import Release
from sentry.testutils.cases import APITestCase
WEBHOOK_URL = "/extensions/discord/interactions/"
class DiscordMessageComponentInteractionTest(APITestCase):
def setUp(self):
patcher = mock.patch(
"sentry.integrations.discord.requests.base.verify_signature", return_value=True
)
patcher.start()
self.guild_id = "guild-id"
self.channel_id = "channel-id"
self.discord_user_id = "user1234"
self.discord_integration = self.create_integration(
provider="discord",
name="Cool server",
external_id=self.guild_id,
organization=self.organization,
)
self.provider = self.create_identity_provider(integration=self.discord_integration)
self.create_identity(
user=self.user, identity_provider=self.provider, external_id=self.discord_user_id
)
def send_interaction(self, data: Any | None = None, member: Any | None = None):
if data is None:
data = {"custom_id": f"unknown:{self.group.id}"}
if member is None:
member = {"user": {"id": self.discord_user_id}}
return self.client.post(
path=WEBHOOK_URL,
data={
"type": DiscordRequestTypes.MESSAGE_COMPONENT,
"guild_id": self.guild_id,
"channel_id": self.channel_id,
"data": data,
"member": member,
},
format="json",
HTTP_X_SIGNATURE_ED25519="signature",
HTTP_X_SIGNATURE_TIMESTAMP="timestamp",
)
def get_message_content(self, response: Any) -> str:
return response.json()["data"]["content"]
def get_message_components(self, response: Any) -> Any:
return response.json()["data"]["components"]
def get_select_options(self, response: Any) -> Any:
return self.get_message_components(response)[0]["components"][0]["options"]
def test_unknown_id_interaction(self):
response = self.send_interaction({"custom_id": f"unknown:{self.group.id}"})
assert response.status_code == 404
def METHOD_NAME(self):
response = self.send_interaction(member={"user": {"id": "not-our-user"}})
assert self.get_message_content(response) == NO_IDENTITY
def test_not_in_org(self):
other_user = self.create_user()
other_user_discord_id = "other-user1234"
other_org = self.create_organization()
self.discord_integration.add_organization(other_org)
self.create_identity(
user=other_user, identity_provider=self.provider, external_id=other_user_discord_id
)
response = self.send_interaction(member={"user": {"id": other_user_discord_id}})
assert self.get_message_content(response) == NOT_IN_ORG
def test_assign_dialog(self):
response = self.send_interaction(
{
"component_type": DiscordMessageComponentTypes.BUTTON,
"custom_id": f"{CustomIds.ASSIGN_DIALOG}:{self.group.id}",
}
)
assert self.get_select_options(response) == [
{"label": f"#{self.team.slug}", "value": f"team:{self.team.id}", "default": False},
{"label": self.user.email, "value": f"user:{self.user.id}", "default": False},
]
def test_assign(self):
response = self.send_interaction(
{
"component_type": DiscordMessageComponentTypes.SELECT,
"custom_id": f"{CustomIds.ASSIGN}:{self.group.id}",
"values": [f"user:{self.user.id}"],
}
)
assert self.get_message_content(response) == ASSIGNEE_UPDATED
def test_resolve_dialog(self):
response = self.send_interaction(
{
"component_type": DiscordMessageComponentTypes.BUTTON,
"custom_id": f"{CustomIds.RESOLVE_DIALOG}:{self.group.id}",
}
)
assert self.get_select_options(response) == [
option.build() for option in RESOLVE_DIALOG_OPTIONS
]
def test_resolve_non_dialog(self):
response = self.send_interaction(
{
"component_type": DiscordMessageComponentTypes.BUTTON,
"custom_id": f"{CustomIds.RESOLVE}:{self.group.id}",
}
)
assert self.get_message_content(response) == RESOLVED
def test_resolve_now_from_dialog(self):
response = self.send_interaction(
{
"component_type": DiscordMessageComponentTypes.SELECT,
"custom_id": f"{CustomIds.RESOLVE}:{self.group.id}",
"values": [""],
}
)
assert self.get_message_content(response) == RESOLVED
def test_resolve_in_next_release(self):
release = Release.objects.create(
organization_id=self.organization.id,
version="1.0",
)
release.add_project(self.project)
response = self.send_interaction(
{
"component_type": DiscordMessageComponentTypes.SELECT,
"custom_id": f"{CustomIds.RESOLVE}:{self.group.id}",
"values": ["inNextRelease"],
}
)
assert self.get_message_content(response) == RESOLVED_IN_NEXT_RELEASE
def test_resolve_in_current_release(self):
release = Release.objects.create(
organization_id=self.organization.id,
version="1.0",
)
release.add_project(self.project)
response = self.send_interaction(
{
"component_type": DiscordMessageComponentTypes.SELECT,
"custom_id": f"{CustomIds.RESOLVE}:{self.group.id}",
"values": ["inCurrentRelease"],
}
)
assert self.get_message_content(response) == RESOLVED_IN_CURRENT_RELEASE
def test_unresolve(self):
response = self.send_interaction(
{
"component_type": DiscordMessageComponentTypes.BUTTON,
"custom_id": f"{CustomIds.UNRESOLVE}:{self.group.id}",
}
)
assert self.get_message_content(response) == UNRESOLVED
def test_mark_ongoing(self):
response = self.send_interaction(
{
"component_type": DiscordMessageComponentTypes.BUTTON,
"custom_id": f"{CustomIds.MARK_ONGOING}:{self.group.id}",
}
)
assert self.get_message_content(response) == MARKED_ONGOING
def test_archive(self):
response = self.send_interaction(
{
"component_type": DiscordMessageComponentTypes.BUTTON,
"custom_id": f"{CustomIds.ARCHIVE}:{self.group.id}",
}
)
assert self.get_message_content(response) == ARCHIVE_UNTIL_ESCALATES |
5,401 | lode angle | #!/usr/bin/env python3
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
import os
import sys
import numpy as np
import matplotlib.pyplot as plt
def expected(scheme, angle_degrees):
angle = angle_degrees * np.pi / 180.0
cohesion = 10
friction_degrees = 20
tip_smoother = 4
mean = -10
friction = friction_degrees * np.pi / 180.0
if (scheme == "native"):
coh = cohesion
fric = friction
elif (scheme == "outer_tip"):
coh = 2 * np.sqrt(3) * cohesion * np.cos(friction) / (3.0 - np.sin(friction))
fric = np.arctan(2 * np.sin(friction) / np.sqrt(3) / (3.0 - np.sin(friction)))
elif (scheme == "inner_tip"):
coh = 2 * np.sqrt(3) * cohesion * np.cos(friction) / (3.0 + np.sin(friction))
fric = np.arctan(2 * np.sin(friction) / np.sqrt(3) / (3.0 + np.sin(friction)))
elif (scheme == "lode_zero"):
coh = cohesion * np.cos(friction)
fric = np.arctan(np.sin(friction) / 3.0)
elif (scheme == "inner_edge"):
coh = 3 * cohesion * np.cos(friction) / np.sqrt(9.0 + 3.0 * np.power(np.sin(friction), 2))
fric = np.arctan(np.sin(friction) / np.sqrt(9.0 + 3.0 * np.power(np.sin(friction), 2)))
bar = np.sqrt(np.power(coh - mean * 3.0 * np.tan(fric), 2) - np.power(tip_smoother, 2))
x = bar * np.cos(angle)
y = bar * np.sin(angle)
return (x, y)
def sigma_mean(stress):
return (stress[0] + stress[3] + stress[5])/3.0
def sigma_bar(stress):
mean = sigma_mean(stress)
return np.sqrt(0.5 * (np.power(stress[0] - mean, 2) + 2*stress[1]*stress[1] + 2*stress[2]*stress[2] + np.power(stress[3] - mean, 2) + 2*stress[4]*stress[4] + np.power(stress[5] - mean, 2)))
def third_inv(stress):
mean = sigma_mean(stress)
return (stress[0] - mean)*(stress[3] - mean)*(stress[5] - mean)
def METHOD_NAME(stress):
bar = sigma_bar(stress)
third = third_inv(stress)
return np.arcsin(-1.5 * np.sqrt(3.0) * third / np.power(bar, 3)) / 3.0
def moose_result(fn):
f = open(fn)
x = []
y = []
for line in f:
if not line.strip():
continue
line = line.strip()
if line.startswith("time") or line.startswith("0"):
continue
line = map(float, line.split(","))
if line[1] < -1E-10:
continue # this is an elastic deformation
bar = sigma_bar(line[4:])
lode = METHOD_NAME(line[4:])
x.append(bar * np.cos(lode))
y.append(bar * np.sin(lode))
f.close()
return (x, y)
angles = np.arange(-30, 31, 1)
plt.figure()
plt.plot(expected("native", angles)[0], expected("native", angles)[1], 'k-', label = 'expected (native)')
mr = moose_result("gold/small_deform2_native.csv")
plt.plot(mr[0], mr[1], 'k^', label = 'MOOSE (native)')
plt.plot(expected("outer_tip", angles)[0], expected("outer_tip", angles)[1], 'g-', label = 'expected (outer_tip)')
mr = moose_result("gold/small_deform2_outer_tip.csv")
plt.plot(mr[0], mr[1], 'g^', label = 'MOOSE (outer_tip)')
plt.plot(expected("inner_tip", angles)[0], expected("inner_tip", angles)[1], 'b-', label = 'expected (inner_tip)')
mr = moose_result("gold/small_deform2_inner_tip.csv")
plt.plot(mr[0], mr[1], 'b^', label = 'MOOSE (inner_tip)')
plt.plot(expected("lode_zero", angles)[0], expected("lode_zero", angles)[1], 'c-', label = 'expected (lode_zero)')
mr = moose_result("gold/small_deform2_lode_zero.csv")
plt.plot(mr[0], mr[1], 'c^', label = 'MOOSE (lode_zero)')
plt.plot(expected("inner_edge", angles)[0], expected("inner_edge", angles)[1], 'r-', label = 'expected (inner_edge)')
mr = moose_result("gold/small_deform2_inner_edge.csv")
plt.plot(mr[0], mr[1], 'r^', label = 'MOOSE (inner_edge)')
legend = plt.legend(bbox_to_anchor=(1.16, 0.95))
for label in legend.get_texts():
label.set_fontsize('small')
plt.xlabel("Stress")
plt.ylabel("Stress")
plt.title("Drucker-Prager yield function on octahedral plane")
plt.axis([5, 25, -12, 12])
plt.savefig("small_deform2.png")
sys.exit(0) |
5,402 | test unsecure read device certificate bad data | # Parsec Cloud (https://parsec.cloud) Copyright (c) BUSL-1.1 2016-present Scille SAS
from __future__ import annotations
import zlib
import pytest
from parsec._parsec import DateTime
from parsec.api.data import DataError, DeviceCertificate, RevokedUserCertificate, UserCertificate
from parsec.api.protocol import UserProfile
from parsec.serde import packb, unpackb
def METHOD_NAME():
with pytest.raises(DataError):
DeviceCertificate.unsecure_load(b"dummy")
def test_unsecure_read_revoked_user_certificate_bad_data():
with pytest.raises(DataError):
RevokedUserCertificate.unsecure_load(b"dummy")
def test_unsecure_read_user_certificate_bad_data():
with pytest.raises(DataError):
UserCertificate.unsecure_load(b"dummy")
def test_build_user_certificate(alice, bob, mallory):
now = DateTime.now()
certif = UserCertificate(
author=alice.device_id,
timestamp=now,
user_id=bob.user_id,
human_handle=bob.human_handle,
public_key=bob.public_key,
profile=UserProfile.ADMIN,
).dump_and_sign(alice.signing_key)
assert isinstance(certif, bytes)
unsecure = UserCertificate.unsecure_load(certif)
assert isinstance(unsecure, UserCertificate)
assert unsecure.user_id == bob.user_id
assert unsecure.public_key == bob.public_key
assert unsecure.timestamp == now
assert unsecure.author == alice.device_id
assert unsecure.profile == UserProfile.ADMIN
verified = UserCertificate.verify_and_load(
certif, author_verify_key=alice.verify_key, expected_author=alice.device_id
)
assert verified == unsecure
with pytest.raises(DataError) as exc:
UserCertificate.verify_and_load(
certif, author_verify_key=alice.verify_key, expected_author=mallory.device_id
)
assert str(exc.value) == "Invalid author: expected `mallory@dev1`, got `alice@dev1`"
with pytest.raises(DataError) as exc:
UserCertificate.verify_and_load(
certif, author_verify_key=mallory.verify_key, expected_author=alice.device_id
)
assert str(exc.value) == "Invalid signature"
with pytest.raises(DataError) as exc:
UserCertificate.verify_and_load(
certif,
author_verify_key=alice.verify_key,
expected_author=alice.device_id,
expected_user=mallory.user_id,
)
assert str(exc.value) == "Invalid user ID: expected `mallory`, got `bob`"
def test_user_certificate_supports_legacy_is_admin_field(alice, bob):
now = DateTime.now()
certif = UserCertificate(
author=bob.device_id,
timestamp=now,
user_id=alice.user_id,
human_handle=None,
public_key=alice.public_key,
profile=alice.profile,
)
# Manually craft a certificate in legacy format
raw_legacy_certif = {
"type": "user_certificate",
"author": bob.device_id.str,
"timestamp": now,
"user_id": alice.user_id.str,
"public_key": alice.public_key.encode(),
"is_admin": True,
}
dumped_legacy_certif = bob.signing_key.sign(zlib.compress(packb(raw_legacy_certif)))
# Make sure the legacy format can be loaded
legacy_certif = UserCertificate.verify_and_load(
dumped_legacy_certif,
author_verify_key=bob.verify_key,
expected_author=bob.device_id,
expected_user=alice.user_id,
expected_human_handle=None,
)
assert legacy_certif == certif
# Manually decode new format to check it is compatible with legacy
dumped_certif = certif.dump_and_sign(bob.signing_key)
raw_certif = unpackb(zlib.decompress(bob.verify_key.verify(dumped_certif)))
assert raw_certif == {**raw_legacy_certif, "profile": alice.profile.str, "human_handle": None}
def test_build_device_certificate(alice, bob, mallory):
now = DateTime.now()
certif = DeviceCertificate(
author=alice.device_id,
timestamp=now,
device_id=bob.device_id,
device_label=bob.device_label,
verify_key=bob.verify_key,
).dump_and_sign(alice.signing_key)
assert isinstance(certif, bytes)
unsecure = DeviceCertificate.unsecure_load(certif)
assert isinstance(unsecure, DeviceCertificate)
assert unsecure.device_id == bob.device_id
assert unsecure.verify_key == bob.verify_key
assert unsecure.timestamp == now
assert unsecure.author == alice.device_id
verified = DeviceCertificate.verify_and_load(
certif, author_verify_key=alice.verify_key, expected_author=alice.device_id
)
assert verified == unsecure
with pytest.raises(DataError) as exc:
DeviceCertificate.verify_and_load(
certif, author_verify_key=alice.verify_key, expected_author=mallory.device_id
)
assert str(exc.value) == "Invalid author: expected `mallory@dev1`, got `alice@dev1`"
with pytest.raises(DataError) as exc:
DeviceCertificate.verify_and_load(
certif, author_verify_key=mallory.verify_key, expected_author=alice.device_id
)
assert str(exc.value) == "Invalid signature"
with pytest.raises(DataError) as exc:
DeviceCertificate.verify_and_load(
certif,
author_verify_key=alice.verify_key,
expected_author=alice.device_id,
expected_device=mallory.device_id,
)
assert str(exc.value) == "Invalid device ID: expected `mallory@dev1`, got `bob@dev1`"
def test_build_revoked_user_certificate(alice, bob, mallory):
now = DateTime.now()
certif = RevokedUserCertificate(
author=alice.device_id, timestamp=now, user_id=bob.user_id
).dump_and_sign(alice.signing_key)
assert isinstance(certif, bytes)
unsecure = RevokedUserCertificate.unsecure_load(certif)
assert isinstance(unsecure, RevokedUserCertificate)
assert unsecure.user_id == bob.user_id
assert unsecure.timestamp == now
assert unsecure.author == alice.device_id
verified = RevokedUserCertificate.verify_and_load(
certif, author_verify_key=alice.verify_key, expected_author=alice.device_id
)
assert verified == unsecure
with pytest.raises(DataError) as exc:
RevokedUserCertificate.verify_and_load(
certif, author_verify_key=alice.verify_key, expected_author=mallory.device_id
)
assert str(exc.value) == "Invalid author: expected `mallory@dev1`, got `alice@dev1`"
with pytest.raises(DataError) as exc:
RevokedUserCertificate.verify_and_load(
certif, author_verify_key=mallory.verify_key, expected_author=alice.device_id
)
assert str(exc.value) == "Invalid signature"
with pytest.raises(DataError) as exc:
RevokedUserCertificate.verify_and_load(
certif,
author_verify_key=alice.verify_key,
expected_author=alice.device_id,
expected_user=mallory.user_id,
)
assert str(exc.value) == "Invalid user ID: expected `mallory`, got `bob`" |
5,403 | get numbered param names | """
Copyright 2020 The Magma Authors.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from typing import Any, Dict
from unittest import TestCase
from unittest.mock import patch
from magma.common.service import MagmaService
from magma.enodebd.data_models.data_model import DataModel
from magma.enodebd.devices.device_utils import EnodebDeviceName
from magma.enodebd.exceptions import Tr069Error
from magma.enodebd.state_machines.enb_acs_impl import BasicEnodebAcsStateMachine
from magma.enodebd.state_machines.enb_acs_states import (
AcsMsgAndTransition,
AcsReadMsgResult,
EnodebAcsState,
WaitEmptyMessageState,
WaitInformState,
WaitSetParameterValuesState,
)
from magma.enodebd.tests.test_utils.enb_acs_builder import (
EnodebAcsStateMachineBuilder,
)
from magma.enodebd.tr069 import models
class DummyDataModel(DataModel):
@classmethod
def get_parameter(cls, param_name):
return None
@classmethod
def _get_magma_transforms(cls):
return {}
@classmethod
def _get_enb_transforms(cls):
return {}
@classmethod
def get_load_parameters(cls):
return []
@classmethod
def get_num_plmns(cls) -> int:
return 1
@classmethod
def get_parameter_names(cls):
return []
@classmethod
def METHOD_NAME(cls):
return {}
class DummyHandler(BasicEnodebAcsStateMachine):
def __init__(
self,
service: MagmaService,
) -> None:
self._state_map: Dict[str, Any] = {}
super().__init__(service=service, use_param_key=False)
def are_invasive_changes_applied(self) -> bool:
return False
def _init_state_map(self) -> None:
self._state_map = {
'wait_inform': WaitInformState(
self,
when_done='wait_empty',
when_boot='wait_rem',
),
}
@property
def state_map(self) -> Dict[str, EnodebAcsState]:
return self._state_map
@property
def disconnected_state_name(self) -> str:
return 'wait_inform'
@property
def unexpected_fault_state_name(self) -> str:
""" State to handle unexpected Fault messages """
return ''
@property
def device_name(self) -> EnodebDeviceName:
return "dummy"
@property
def config_postprocessor(self):
pass
def reboot_asap(self) -> None:
"""
Send a request to reboot the eNodeB ASAP
"""
pass
def is_enodeb_connected(self) -> bool:
return True
@property
def data_model_class(self):
return DummyDataModel
class EnodebStatusTests(TestCase):
def _get_acs(self):
""" Get a dummy ACS statemachine for tests"""
service = EnodebAcsStateMachineBuilder.build_magma_service()
return DummyHandler(service)
@patch(
'magma.enodebd.state_machines.enb_acs_states'
'.get_param_values_to_set',
)
@patch(
'magma.enodebd.state_machines.enb_acs_states.get_obj_param_values_to_set',
)
def test_wait_set_parameter_values_state(
self, mock_get_obj_param,
mock_get_param,
):
""" Test SetParameter return values"""
mock_get_param.return_value = {}
mock_get_obj_param.return_value = {}
test_message_0 = models.SetParameterValuesResponse()
test_message_0.Status = 0
test_message_1 = models.SetParameterValuesResponse()
test_message_1.Status = 1
# TC-1: return value is 0. No fault
acs_state = WaitSetParameterValuesState(
self._get_acs(), 'done',
'invasive',
)
rc = acs_state.read_msg(test_message_0)
self.assertEqual(type(rc), AcsReadMsgResult)
# It raises exception if we return 1
self.assertRaises(
Tr069Error,
acs_state.read_msg, test_message_1,
)
# It passes if we return 1 and pass the non zero flag
acs_state = WaitSetParameterValuesState(
self._get_acs(), 'done',
'invasive',
status_non_zero_allowed=True,
)
rc = acs_state.read_msg(test_message_1)
self.assertEqual(type(rc), AcsReadMsgResult)
rc = acs_state.read_msg(test_message_0)
self.assertEqual(type(rc), AcsReadMsgResult)
@patch(
'magma.enodebd.state_machines.enb_acs_states.get_optional_param_to_check',
)
def test_wait_empty_message_state(
self,
mock_param_to_check,
):
test_message_1 = models.DummyInput()
test_message_2 = models.SetParameterValuesResponse()
mock_param_to_check.return_value = True
# test 1: No missing_param_transition
# ensure we go to done state even when there are
# optional params to check
acs_state = WaitEmptyMessageState(
self._get_acs(),
when_done='done',
)
rc = acs_state.read_msg(test_message_1)
self.assertEqual(type(rc), AcsReadMsgResult)
self.assertEqual(rc.next_state, 'done')
self.assertEqual(rc.msg_handled, True)
# test 2: No unknown_param_transition
# ensure we go to missing state when there are
# optional params to check and missing state is specified
acs_state = WaitEmptyMessageState(
self._get_acs(),
when_done='done',
when_missing='missing',
)
rc = acs_state.read_msg(test_message_1)
self.assertEqual(type(rc), AcsReadMsgResult)
self.assertEqual(rc.next_state, 'missing')
self.assertEqual(rc.msg_handled, True)
# test 3: Negative test case send a message that is not empty
# ensure we return msg_handled is False
acs_state = WaitEmptyMessageState(
self._get_acs(),
when_done='done',
when_missing='missing',
)
rc = acs_state.read_msg(test_message_2)
self.assertEqual(type(rc), AcsReadMsgResult)
self.assertEqual(rc.next_state, None)
self.assertEqual(rc.msg_handled, False)
# test 4: Test get_msg
rc = acs_state.get_msg(test_message_1)
self.assertEqual(type(rc), AcsMsgAndTransition)
self.assertEqual(type(rc.msg), models.DummyInput)
self.assertEqual(rc.next_state, None) |
5,404 | list contract by code | from subprocess import Popen, PIPE
import json
import time
# Presetup some commands
query_list_code = ['secretcli', 'query', 'compute', 'list-code']
MAX_TRIES = 10
GAS_METRICS = []
STORE_GAS = '4000000'
GAS = '4000000'
def run_command(command):
"""
Will run any cli command and return its output after waiting a set amount
:param command: Array of command to run
:param wait: Time to wait for command
:return: Output string
"""
#print(' '.join(command))
p = Popen(command, stdout=PIPE, stderr=PIPE, text=True)
output, err = p.communicate()
status = p.wait()
if err and not output:
return err
return output
def store_contract(contract, user='a', backend='test'):
"""
Store contract and return its ID
:param contract: Contract name
:param user: User to upload with
:param gas: Gas to use
:param backend: Keyring backend
:return: Contract ID
"""
command = ['secretcli', 'tx', 'compute', 'store', f'./compiled/{contract}',
'--from', user, '--gas', STORE_GAS, '-y']
if backend is not None:
command += ['--keyring-backend', backend]
output = run_command_query_hash(command)
try:
for attribute in output['logs'][0]['events'][0]['attributes']:
if attribute["key"] == "code_id":
return attribute['value']
except:
# print(output)
return output
def instantiate_contract(contract, msg, label, user='a', backend='test'):
"""
Instantiates a contract
:param contract: Contract name
:param msg: Init msg
:param label: Name to give to the contract
:param user: User to instantiate with
:param backend: Keyring backend
:return:
"""
command = ['secretcli', 'tx', 'compute', 'instantiate', contract, msg, '--from',
user, '--label', label, '-y', '--gas', '500000']
if backend is not None:
command += ['--keyring-backend', backend]
return run_command_query_hash(command)
def list_code():
command = ['secretcli', 'query', 'compute', 'list-code']
return json.loads(run_command(command))
def METHOD_NAME(code):
command = ['secretcli', 'query', 'compute', 'list-contract-by-code', code]
return json.loads(run_command(command))
def contract_hash(address):
command = ['secretcli', 'query', 'compute', 'contract-hash', address]
return run_command(command)
def execute_contract(contract, msg, user='a', backend='test', amount=None, compute=True):
command = ['secretcli', 'tx', 'compute', 'execute', contract, json.dumps(msg), '--from', user, '--gas', GAS, '-y']
if backend is not None:
command += ['--keyring-backend', backend]
if amount is not None:
command.append("--amount")
command.append(amount)
if compute:
return run_command_compute_hash(command)
return run_command_query_hash(command)
def query_hash(hash):
return run_command(['secretcli', 'q', 'tx', hash])
def compute_hash(hash):
print(hash)
return run_command(['secretcli', 'q', 'compute', 'tx', hash])
def query_contract(contract, msg):
command = ['secretcli', 'query', 'compute', 'query', contract, json.dumps(msg)]
out = run_command(command)
try:
return json.loads(out)
except json.JSONDecodeError as e:
print(out)
raise e
def run_command_compute_hash(command):
out = run_command(command)
try:
txhash = json.loads(out)["txhash"]
#print(txhash)
except Exception as e:
# print(out)
raise e
for _ in range(MAX_TRIES):
try:
out = compute_hash(txhash)
out = json.loads(out)
# print(out)
# querying hash once the hash is computed so we can check gas usage
tx_data = json.loads(query_hash(txhash))
# print(json.dumps(tx_data))
# print('gas:', tx_data['gas_used'], '\t/', tx_data['gas_wanted'])
GAS_METRICS.append({
'want': tx_data['gas_wanted'],
'used': tx_data['gas_used'],
'cmd': ' '.join(command)
})
return out
except json.JSONDecodeError as e:
time.sleep(1)
print(out)
print(' '.join(command), f'exceeded max tries ({MAX_TRIES})')
def run_command_query_hash(command):
out = run_command(command)
try:
txhash = json.loads(out)["txhash"]
except json.JSONDecodeError as e:
print(out)
raise e
for _ in range(MAX_TRIES):
try:
# TODO: Read the gas used and store somewhere for metrics
out = query_hash(txhash)
out = json.loads(out)
# print('gas:', out['gas_used'], '\t/', out['gas_wanted'])
GAS_METRICS.append({
'want': out['gas_wanted'],
'used': out['gas_used'],
'cmd': ' '.join(command)
})
return out
except json.JSONDecodeError as e:
time.sleep(1)
print(out)
print(' '.join(command), f'exceeded max tries ({MAX_TRIES})') |
5,405 | convert length | # docbook.py: extension module
# $Id: docbook.py 8353 2009-03-17 16:57:50Z mzjn $
import sys
import string
import libxml2
import libxslt
import re
import math
# Some globals
pixelsPerInch = 96.0
unitHash = { 'in': pixelsPerInch,
'cm': pixelsPerInch / 2.54,
'mm': pixelsPerInch / 25.4,
'pc': (pixelsPerInch / 72.0) * 12,
'pt': pixelsPerInch / 72.0,
'px': 1 }
# ======================================================================
def adjustColumnWidths(ctx, nodeset):
#
# Small check to verify the context is correcly accessed
#
try:
pctxt = libxslt.xpathParserContext(_obj=ctx)
ctxt = pctxt.context()
tctxt = ctxt.transformContext()
except:
pass
# Get the nominal table width
varString = lookupVariable(tctxt, "nominal.table.width", None)
if varString == None:
nominalWidth = 6 * pixelsPerInch;
else:
nominalWidth = METHOD_NAME(varString);
# Get the requested table width
tableWidth = lookupVariable(tctxt, "table.width", "100%")
foStylesheet = (tctxt.variableLookup("stylesheet.result.type", None) == "fo")
relTotal = 0
relParts = []
absTotal = 0
absParts = []
colgroup = libxml2.xmlNode(_obj = nodeset[0])
# If this is an foStylesheet, we've been passed a list of fo:table-columns.
# Otherwise we've been passed a colgroup that contains a list of cols.
if foStylesheet:
colChildren = colgroup
else:
colChildren = colgroup.children
col = colChildren
while col != None:
if foStylesheet:
width = col.prop("column-width")
else:
width = col.prop("width")
if width == None:
width = "1*"
relPart = 0.0
absPart = 0.0
starPos = string.find(width, "*")
if starPos >= 0:
relPart, absPart = string.split(width, "*", 2)
relPart = float(relPart)
relTotal = relTotal + float(relPart)
else:
absPart = width
pixels = METHOD_NAME(absPart)
absTotal = absTotal + pixels
relParts.append(relPart)
absParts.append(pixels)
col = col.next
# Ok, now we have the relative widths and absolute widths in
# two parallel arrays.
#
# - If there are no relative widths, output the absolute widths
# - If there are no absolute widths, output the relative widths
# - If there are a mixture of relative and absolute widths,
# - If the table width is absolute, turn these all into absolute
# widths.
# - If the table width is relative, turn these all into absolute
# widths in the nominalWidth and then turn them back into
# percentages.
widths = []
if relTotal == 0:
for absPart in absParts:
if foStylesheet:
inches = absPart / pixelsPerInch
widths.append("%4.2fin" % inches)
else:
widths.append("%d" % absPart)
elif absTotal == 0:
for relPart in relParts:
rel = relPart / relTotal * 100
widths.append(rel)
widths = correctRoundingError(widths)
else:
pixelWidth = nominalWidth
if string.find(tableWidth, "%") < 0:
pixelWidth = METHOD_NAME(tableWidth)
if pixelWidth <= absTotal:
print "Table is wider than table width"
else:
pixelWidth = pixelWidth - absTotal
absTotal = 0
for count in range(len(relParts)):
rel = relParts[count] / relTotal * pixelWidth
relParts[count] = rel + absParts[count]
absTotal = absTotal + rel + absParts[count]
if string.find(tableWidth, "%") < 0:
for count in range(len(relParts)):
if foStylesheet:
pixels = relParts[count]
inches = pixels / pixelsPerInch
widths.append("%4.2fin" % inches)
else:
widths.append(relParts[count])
else:
for count in range(len(relParts)):
rel = relParts[count] / absTotal * 100
widths.append(rel)
widths = correctRoundingError(widths)
# Danger, Will Robinson! In-place modification of the result tree!
# Side-effect free? We don' need no steenkin' side-effect free!
count = 0
col = colChildren
while col != None:
if foStylesheet:
col.setProp("column-width", widths[count])
else:
col.setProp("width", widths[count])
count = count+1
col = col.next
return nodeset
def METHOD_NAME(length):
# Given "3.4in" return the width in pixels
global pixelsPerInch
global unitHash
m = re.search('([+-]?[\d\.]+)(\S+)', length)
if m != None and m.lastindex > 1:
unit = pixelsPerInch
if unitHash.has_key(m.group(2)):
unit = unitHash[m.group(2)]
else:
print "Unrecognized length: " + m.group(2)
pixels = unit * float(m.group(1))
else:
pixels = 0
return pixels
def correctRoundingError(floatWidths):
# The widths are currently floating point numbers, we have to truncate
# them back to integers and then distribute the error so that they sum
# to exactly 100%.
totalWidth = 0
widths = []
for width in floatWidths:
width = math.floor(width)
widths.append(width)
totalWidth = totalWidth + math.floor(width)
totalError = 100 - totalWidth
columnError = totalError / len(widths)
error = 0
for count in range(len(widths)):
width = widths[count]
error = error + columnError
if error >= 1.0:
adj = math.floor(error)
error = error - adj
widths[count] = "%d%%" % (width + adj)
else:
widths[count] = "%d%%" % width
return widths
def lookupVariable(tctxt, varName, default):
varString = tctxt.variableLookup(varName, None)
if varString == None:
return default
# If it's a list, get the first element
if type(varString) == type([]):
varString = varString[0]
# If it's not a string, it must be a node, get its content
if type(varString) != type(""):
varString = varString.content
return varString
# ======================================================================
# Random notes...
#once you have a node which is a libxml2 python xmlNode wrapper all common
#operations are possible:
# .children .last .parent .next .prev .doc for navigation
# .content .type for introspection
# .prop("attribute_name") to lookup attribute values
# # Now make a nodeset to return
# # Danger, Will Robinson! This creates a memory leak!
# newDoc = libxml2.newDoc("1.0")
# newColGroup = newDoc.newDocNode(None, "colgroup", None)
# newDoc.addChild(newColGroup)
# col = colgroup.children
# while col != None:
# newCol = newDoc.newDocNode(None, "col", None)
# newCol.copyPropList(col);
# newCol.setProp("width", "4")
# newColGroup.addChild(newCol)
# col = col.next |
5,406 | add args | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import torch.optim.lr_scheduler
from . import FairseqLRScheduler, register_lr_scheduler
@register_lr_scheduler('reduce_lr_on_plateau')
class ReduceLROnPlateau(FairseqLRScheduler):
"""
Decay the LR by a factor every time the validation loss plateaus.
Also comes with optional warmup phase, where we linearly increase the learning rate
from some initial learning rate (``--warmup-init-lr``) until the configured
learning rate (``--lr``). Thereafter the lr is adjusted according to original reduce_on_plateau scheme
During warmup::
lrs = torch.linspace(args.warmup_init_lr, args.lr, args.warmup_updates)
lr = lrs[update_num]
"""
def __init__(self, args, optimizer):
super().__init__(args, optimizer)
if len(args.lr) > 1:
raise ValueError(
'Cannot use a fixed learning rate schedule with reduce_lr_on_plateau.'
' Consider --lr-scheduler=fixed instead.'
)
self.lr_scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(
self.optimizer.optimizer, patience=0, factor=args.lr_shrink,
threshold=args.lr_threshold)
warmup_end_lr = args.lr[0]
"""if no warm up, sets initial lr to be args.lr[0]"""
if args.warmup_init_lr < 0:
args.warmup_init_lr = 0 if args.warmup_updates > 0 else warmup_end_lr
""" linearly warmup for the first args.warmup_updates"""
if args.warmup_updates > 0:
self.lr_step = (warmup_end_lr - args.warmup_init_lr) / args.warmup_updates
""" this flag is either set from arg when no warm up, or set by step_update() when warmup finishes"""
self.warmup_end = True if args.warmup_updates <= 0 else False
""" initial learning rate"""
"""this self.lr is used only during init and/or warm up period"""
self.lr = args.warmup_init_lr
self.optimizer.set_lr(self.lr)
@staticmethod
def METHOD_NAME(parser):
"""Add arguments to the parser for this LR scheduler."""
# fmt: off
parser.add_argument('--lr-shrink', default=0.1, type=float, metavar='LS',
help='shrink factor for annealing, lr_new = (lr * lr_shrink)')
parser.add_argument('--lr-threshold', default=1e-4, type=float, metavar='LT',
help='Threshold for measuring the new optimum, \
to only focus on significant changes')
parser.add_argument('--warmup-updates', default=0, type=int, metavar='N',
help='warmup the learning rate linearly for the first N updates')
parser.add_argument('--warmup-init-lr', default=-1, type=float, metavar='LR',
help='initial learning rate during warmup phase; default is args.lr')
# fmt: on
def state_dict(self):
"""Return the LR scheduler state dict."""
return {
'best': self.lr_scheduler.best,
'last_epoch': self.lr_scheduler.last_epoch,
}
def load_state_dict(self, state_dict):
"""Load an LR scheduler state dict."""
self.lr_scheduler.best = state_dict['best']
if 'last_epoch' in state_dict:
self.lr_scheduler.last_epoch = state_dict['last_epoch']
def step(self, epoch, val_loss=None):
"""Update the learning rate at the end of the given epoch if warmup finishes"""
""" otherwise no update of lr on epoch boundaries"""
if val_loss is not None and self.warmup_end is True:
self.lr_scheduler.step(val_loss, epoch)
else:
self.lr_scheduler.last_epoch = epoch
return self.optimizer.get_lr()
def step_update(self, num_updates):
"""Update the learning rate after each update."""
""" if there is warmup"""
if self.args.warmup_updates > 0:
if num_updates <= self.args.warmup_updates:
self.lr = self.args.warmup_init_lr + num_updates*self.lr_step
self.optimizer.set_lr(self.lr)
else:
if self.warmup_end is False:
self.warmup_end = True
"""else do nothing """
return self.optimizer.get_lr() |
5,407 | validate single test result | import itertools
import os
import re
import logbook
from .generator_fixture import GeneratorFixture
_logger = logbook.Logger(__name__)
def validate_run(suite, run_result, expect_interruption, expect_session_errors):
if expect_interruption:
assert run_result.session.results.global_result.is_interrupted(), \
'Session global result is not marked as interrupted, even though interruption was expected'
if expect_interruption or not run_result.session.results.is_success(allow_skips=True):
assert run_result.exit_code != 0, '`slash run` unexpectedly returned 0'
else:
assert run_result.exit_code == 0, '`slash run` unexpectedly returned {}. Output: {}'.format(
run_result.exit_code, run_result.get_console_output())
global_result = run_result.session.results.global_result
errors = global_result.get_errors() + global_result.get_failures()
if expect_session_errors:
assert errors, 'Expected session errors but found none'
else:
assert not errors, 'Sessions errors were not expected (Got {})'.format(errors)
for test, results in _group_results_by_test_id(suite, run_result).items():
_validate_single_test(test, results)
def _validate_single_test(test, results):
param_names = {p.id: p.name for p in _find_all_parameters(test)}
for param_values in _iter_param_value_sets(test):
is_excluded = any((param_names[param_id], value) in test.excluded_param_values for param_id, value in param_values.items())
for repetition in range(test.get_num_expected_repetitions()): # pylint: disable=unused-variable
for index, result in enumerate(results):
if _result_matches(result, param_values):
results.pop(index)
if is_excluded:
assert result.is_skip()
else:
METHOD_NAME(test, result)
break
else:
assert False, 'Could not find parameter set {}'.format(
param_values)
assert not results, 'Unmatched results exist'
def _iter_param_value_sets(test):
params = _find_all_parameters(test)
param_ids = [p.id for p in params]
for combination in itertools.product(*(param.values for param in params)):
yield dict(zip(param_ids, combination))
def _find_all_parameters(func):
params = []
stack = [func]
while stack:
f = stack.pop()
for subfixture in f.get_fixtures():
if isinstance(subfixture, GeneratorFixture):
params.append(subfixture)
continue
else:
stack.append(subfixture)
params.extend(f.get_parameters())
# This function returns a list of parameters (type: Parameter) with unique ID
# Therefore, returning list(params) is not engouth
return list({p.id: p for p in params}.values())
def _result_matches(result, param_values):
values = result.test_metadata.variation.values.copy()
for param_name in list(values):
# handle the case of a fixture with a single param, which is logically a parameter by itself
if re.match(r'^fx_\d+.param$', param_name):
values_name = param_name.split('_')[1].split('.')[0]
else:
values_name = param_name.rsplit('_', 1)[-1]
values[values_name] = values.pop(param_name)
return values == param_values
def METHOD_NAME(test, result):
expected = test.get_expected_result()
if expected == 'ERROR':
assert result.is_error(), 'Test did not issue error as expected'
elif expected == 'FAIL':
assert result.is_failure(), 'Test did not fail as expected'
elif expected == 'SUCCESS':
assert result.is_success(), 'Test {} unexpectedly unsuccessful:\n{}'.format(
test.id, list(itertools.chain(result.get_errors(), result.get_failures())))
elif expected == 'INTERRUPT':
assert result.is_interrupted(), 'Test did not get interrupted as expected'
elif expected == 'SKIP':
assert result.is_skip()
elif expected == 'NOT_RUN':
assert result.is_not_run()
else:
raise NotImplementedError(
'Unknown expected result: {!r}'.format(expected)) # pragma: no cover
def _group_results_by_test_id(suite, run_result):
tests_by_id = dict((t.id, t) for t in suite)
unseen = tests_by_id.copy()
groups = {}
for result in run_result.session.results:
if result.test_metadata.is_interactive():
continue
test_id = get_test_id_from_test_address(result.test_metadata.address)
assert tests_by_id[test_id].is_selected(), 'Test {} appears in results, although not expected!'.format(test_id)
groups.setdefault(tests_by_id[test_id], []).append(result)
unseen.pop(test_id, None)
for test_id, test in list(unseen.items()):
if not test.is_selected():
unseen.pop(test_id, None)
assert not unseen, 'Expected results not found ({})'.format(unseen)
return groups
def get_test_id_from_test_address(addr):
_, addr = os.path.splitdrive(addr)
_, addr = addr.split(':', 1)
return addr.split('_')[1].split('(')[0] |
5,408 | ps nsto pn ns | #! /usr/bin/env python
"""
Version of Services/CRIC intended to be used with mock or unittest.mock
"""
from __future__ import division, print_function
from builtins import object, str, bytes
import os
import json
import re
from WMCore.WMBase import getTestBase
from RestClient.ErrorHandling.RestClientExceptions import HTTPError
# Read in the data just once so that we don't have to do it for every test (in __init__)
mockData = {}
globalFile = os.path.join(getTestBase(), '..', 'data', 'Mock', 'CRICMockData.json')
print("Reading mocked CRIC data from the file %s" % globalFile)
try:
with open(globalFile, 'r') as mockFile:
mockData = json.load(mockFile)
except IOError:
mockData = {}
class MockCRICApi(object):
def __init__(self, url=None, logger=None, configDict=None):
print("Using MockCRICApi")
def genericLookup(self, callname):
"""
This function returns the mocked CRIC data
:param callname: the CRIC REST API name
:return: the dictionary that CRIC would have returned
"""
if callname not in mockData:
raise RuntimeError("Mock CRIC emulator knows nothing about API %s" % callname)
if mockData[callname] == 'Raises HTTPError':
raise HTTPError('http:/cric.mock.fail', 400, 'MockCRIC is raising an exception in place of CRIC',
'Dummy header', 'Dummy body')
else:
return mockData[callname]
def userNameDn(self, username):
callname = 'people'
res = self.genericLookup(callname)
userdn = ""
for x in res:
if x['username'] == username:
userdn = x['dn']
break
return userdn
def getAllPSNs(self):
callname = 'site-names'
sitenames = self.genericLookup(callname)
cmsnames = [x['alias'] for x in sitenames if x['type'] == 'psn']
return cmsnames
def getAllPhEDExNodeNames(self, pattern=None, excludeBuffer=False):
callname = 'site-names'
sitenames = self.genericLookup(callname)
nodeNames = [x['alias'] for x in sitenames if x['type'] == 'phedex']
if excludeBuffer:
nodeNames = [x for x in nodeNames if not x.endswith("_Buffer")]
if pattern and isinstance(pattern, (str, bytes)):
pattern = re.compile(pattern)
nodeNames = [x for x in nodeNames if pattern.match(x)]
return nodeNames
def PNNstoPSNs(self, pnns):
callname = 'data-processing'
mapping = self.genericLookup(callname)
if isinstance(pnns, (str, bytes)):
pnns = [pnns]
psns = set()
for pnn in pnns:
psnSet = set()
for item in mapping:
if pnn == item['phedex_name']:
psnSet.add(item['psn_name'])
if psnSet:
psns.update(psnSet)
return list(psns)
def METHOD_NAME(self, psns, allowPNNLess=False):
callname = 'data-processing'
mapping = self.genericLookup(callname)
if isinstance(psns, (str, bytes)):
psns = [psns]
pnns = set()
for psn in psns:
pnnSet = set()
for item in mapping:
if item['psn_name'] == psn:
pnnSet.add(item['phedex_name'])
if pnnSet:
pnns.update(pnnSet)
elif allowPNNLess:
pnns.add(psn)
return list(pnns)
def PSNtoPNNMap(self, psnPattern=''):
if not isinstance(psnPattern, (str, bytes)):
raise TypeError('psnPattern argument must be of type str or bytes')
callname = 'data-processing'
results = self.genericLookup(callname)
mapping = {}
psnPattern = re.compile(psnPattern)
for entry in results:
if psnPattern.match(entry['psn_name']):
mapping.setdefault(entry['psn_name'], set()).add(entry['phedex_name'])
return mapping |
5,409 | prepare step | # #
# Copyright 2021-2023 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
# #
"""
EasyBuild support for installing Intel compilers, implemented as an easyblock
@author: Kenneth Hoste (Ghent University)
"""
import os
from distutils.version import LooseVersion
from easybuild.easyblocks.generic.intelbase import IntelBase
from easybuild.easyblocks.t.tbb import get_tbb_gccprefix
from easybuild.tools.build_log import EasyBuildError, print_msg
from easybuild.tools.run import run_cmd
class EB_intel_minus_compilers(IntelBase):
"""
Support for installing Intel compilers, starting with verion 2021.x (oneAPI)
"""
def __init__(self, *args, **kwargs):
"""
Easyblock constructor: check version
"""
super(EB_intel_minus_compilers, self).__init__(*args, **kwargs)
# this easyblock is only valid for recent versions of the Intel compilers (2021.x, oneAPI)
if LooseVersion(self.version) < LooseVersion('2021'):
raise EasyBuildError("Invalid version %s, should be >= 2021.x" % self.version)
self.compilers_subdir = os.path.join('compiler', self.version, 'linux')
# note that tbb may have a lower version number than the compiler, so use 'latest' symlink
# for example compiler 2021.1.2 has tbb 2021.1.1.
self.tbb_subdir = os.path.join('tbb', 'latest')
def METHOD_NAME(self, *args, **kwargs):
"""
Prepare environment for installing.
Specify that oneAPI versions of Intel compilers don't require a runtime license.
"""
# avoid that IntelBase trips over not having license info specified
kwargs['requires_runtime_license'] = False
super(EB_intel_minus_compilers, self).METHOD_NAME(*args, **kwargs)
def configure_step(self):
"""Configure installation."""
# redefine $HOME for install step, to avoid that anything is stored in $HOME/intel
# (like the 'installercache' database)
self.cfg['preinstallopts'] += " HOME=%s " % self.builddir
def install_step(self):
"""
Install step: install each 'source file' one by one.
Installing the Intel compilers could be done via a single installation file (HPC Toolkit),
or with separate installation files (patch releases of the C++ and Fortran compilers).
"""
srcs = self.src[:]
cnt = len(srcs)
for idx, src in enumerate(srcs):
print_msg("installing part %d/%s (%s)..." % (idx + 1, cnt, src['name']))
self.src = [src]
super(EB_intel_minus_compilers, self).install_step()
def sanity_check_step(self):
"""
Custom sanity check for Intel compilers.
"""
classic_compiler_cmds = ['icc', 'icpc', 'ifort']
oneapi_compiler_cmds = [
'dpcpp', # Intel oneAPI Data Parallel C++ compiler
'icx', # oneAPI Intel C compiler
'icpx', # oneAPI Intel C++ compiler
'ifx', # oneAPI Intel Fortran compiler
]
bindir = os.path.join(self.compilers_subdir, 'bin')
classic_compiler_paths = [os.path.join(bindir, x) for x in oneapi_compiler_cmds]
oneapi_compiler_paths = [os.path.join(bindir, 'intel64', x) for x in classic_compiler_cmds]
custom_paths = {
'files': classic_compiler_paths + oneapi_compiler_paths,
'dirs': [self.compilers_subdir],
}
all_compiler_cmds = classic_compiler_cmds + oneapi_compiler_cmds
custom_commands = ["which %s" % c for c in all_compiler_cmds]
# only for 2021.x versions do all compiler commands have the expected version;
# for example: for 2022.0.1, icc has version 2021.5.0, icpx has 2022.0.0
if LooseVersion(self.version) >= LooseVersion('2022.0'):
custom_commands.extend("%s --version" % c for c in all_compiler_cmds)
else:
custom_commands.extend("%s --version | grep %s" % (c, self.version) for c in all_compiler_cmds)
super(EB_intel_minus_compilers, self).sanity_check_step(custom_paths=custom_paths,
custom_commands=custom_commands)
def make_module_req_guess(self):
"""
Paths to consider for prepend-paths statements in module file
"""
libdirs = [
'lib',
os.path.join('lib', 'x64'),
os.path.join('compiler', 'lib', 'intel64_lin'),
]
libdirs = [os.path.join(self.compilers_subdir, x) for x in libdirs]
# resolve 'latest' symlink for tbb (if module guess is run with install in place)
if os.path.islink(os.path.join(self.installdir, self.tbb_subdir)):
tbb_version = os.readlink(os.path.join(self.installdir, self.tbb_subdir))
else:
tbb_version = 'latest'
tbb_subdir = os.path.join('tbb', tbb_version)
tbb_libsubdir = os.path.join(tbb_subdir, 'lib', 'intel64')
libdirs.append(os.path.join(tbb_libsubdir,
get_tbb_gccprefix(os.path.join(self.installdir, tbb_libsubdir))))
guesses = {
'PATH': [
os.path.join(self.compilers_subdir, 'bin'),
os.path.join(self.compilers_subdir, 'bin', 'intel64'),
],
'LD_LIBRARY_PATH': libdirs,
'LIBRARY_PATH': libdirs,
'MANPATH': [
os.path.join('compiler', self.version, 'documentation', 'en', 'man', 'common'),
],
'OCL_ICD_FILENAMES': [
os.path.join(self.compilers_subdir, 'lib', 'x64', 'libintelocl.so'),
],
'CPATH': [
os.path.join(tbb_subdir, 'include'),
],
'TBBROOT': [tbb_subdir],
}
return guesses
def make_module_extra(self):
"""Additional custom variables for intel-compiler"""
txt = super(EB_intel_minus_compilers, self).make_module_extra()
# On Debian/Ubuntu, /usr/include/x86_64-linux-gnu, or whatever dir gcc uses, needs to be included
# in $CPATH for Intel C compiler
multiarch_out, ec = run_cmd("gcc -print-multiarch", simple=False)
multiarch_out = multiarch_out.strip()
if ec == 0 and multiarch_out:
multiarch_inc_dir, ec = run_cmd("gcc -E -Wp,-v -xc /dev/null 2>&1 | grep %s$" % multiarch_out)
if ec == 0 and multiarch_inc_dir:
multiarch_inc_dir = multiarch_inc_dir.strip()
self.log.info("Adding multiarch include path %s to $CPATH in generated module file", multiarch_inc_dir)
# system location must be appended at the end, so use append_paths
txt += self.module_generator.append_paths('CPATH', [multiarch_inc_dir], allow_abs=True)
return txt |
5,410 | convert pred to entry | from typing import Dict, Any, Tuple, List
import datasets
from primeqa.mrc.metrics.tydi_f1.eval_utils import Span, TyDiLabel
from primeqa.mrc.metrics.tydi_f1.tydi_eval import pretty_print
from primeqa.mrc.data_models.target_type import TargetType
_DESCRIPTION = """
The F1 score is the harmonic mean of the precision and recall. It can be computed with:
F1 = 2 * (precision * recall) / (precision + recall). This implementation of F1 is based
on the TyDi QA leaderboard.
Adapted from https://github.com/google-research-datasets/tydiqa/blob/master/tydi_eval.py.
"""
_KWARGS_DESCRIPTION = """
Args:
predictions: Predicted labels.
references: Ground truth labels.
passage_non_null_threshold: threshold for number of null annotations annotations to consider the passage answer as null (default=2)
span_non_null_threshold: threshold for number of null annotations annotations to consider the span answer as null (default=2)
verbose: dump reference and prediction for debugging purposes
Returns: metrics dict comprising:
* minimal_f1: Minimal Answer F1.
* minimal_precision: Minimal Answer Precision.
* minimal_recall: Minimal Answer Recall.
* passage_f1: Passage Answer F1.
* passage_precision: Passage Answer Precision.
* passage_recall: Passage Answer Recall.
"""
_CITATION = """\
@article{tydiqa,
title = {TyDi QA: A Benchmark for Information-Seeking Question Answering in Typologically Diverse Languages},
author = {Jonathan H. Clark and Eunsol Choi and Michael Collins and Dan Garrette and Tom Kwiatkowski and
Vitaly Nikolaev and Jennimaria Palomaki}
year = {2020},
journal = {Transactions of the Association for Computational Linguistics}
}
"""
@datasets.utils.file_utils.add_start_docstrings(_DESCRIPTION, _KWARGS_DESCRIPTION)
class TyDiF1(datasets.Metric):
_common_answer_schema = dict(
start_position=datasets.Value("int32"),
end_position=datasets.Value("int32"),
passage_index=datasets.Value("int32"),
yes_no_answer=datasets.Value("int32"),
example_id=datasets.Value("string"),
)
_pred_answer_schema = dict(
confidence_score=datasets.Value("float32"),
)
_ref_answer_schema = dict(
language=datasets.Value("string"),
document_plaintext=datasets.Value("string"),
question=datasets.Value("string")
)
def _info(self):
return datasets.MetricInfo(
description=_DESCRIPTION,
citation=_CITATION,
inputs_description=_KWARGS_DESCRIPTION,
features=datasets.Features(
dict(
predictions={**self._common_answer_schema, **self._pred_answer_schema},
references=datasets.Sequence(feature={**self._common_answer_schema, **self._ref_answer_schema})
)),
reference_urls=["https://github.com/google-research-datasets/tydiqa/blob/master/tydi_eval.py"],
)
def _compute(self, *, predictions=None, references=None, passage_non_null_threshold=2, span_non_null_threshold=2, verbose=False, **kwargs) -> Dict[str, Any]:
if not predictions:
raise ValueError("No predictions provided")
elif not references:
raise ValueError("No references provided")
predictions = dict(map(self.METHOD_NAME, predictions))
references = dict(map(self._convert_ref_to_entry, references))
metrics = pretty_print(references, predictions, passage_non_null_threshold=passage_non_null_threshold, span_non_null_threshold=span_non_null_threshold, verbose=verbose)
return metrics
def _convert_ref_to_entry(self, ref: dict) -> Tuple[str, List[TyDiLabel]]:
"""
Converts a reference dict into an example_id, [labels] pair.
"""
if not all(ref['example_id'][0] == ref['example_id'][i] for i in range(len(ref['example_id']))):
raise ValueError("Found mismatched examples")
elif not all(ref['language'][0] == ref['language'][i] for i in range(len(ref['language']))):
raise ValueError("Found mismatched languages")
key = ref['example_id'][0]
value = [
TyDiLabel(
example_id=ref['example_id'][i],
passage_answer_index=ref['passage_index'][i],
minimal_answer_span=Span(
ref['start_position'][i],
ref['end_position'][i])
,
yes_no_answer=self._bool_target(
TargetType(ref['yes_no_answer'][i])
),
passage_score=0,
minimal_score=0,
language=ref['language'][i],
passage_span=None,
question_text=ref['question'][i],
plaintext=ref['document_plaintext'][i],
) for i in range(len(ref['passage_index']))
]
return key, value
def METHOD_NAME(self, pred: dict) -> Tuple[str, TyDiLabel]:
"""
Converts a prediction dict into an example_id, label pair.
"""
key = pred['example_id']
value = TyDiLabel(
example_id=pred['example_id'],
passage_answer_index=pred['passage_index'],
minimal_answer_span=Span(
pred['start_position'],
pred['end_position'])
,
yes_no_answer=self._bool_target(
TargetType(pred['yes_no_answer'])
),
passage_score=pred['confidence_score'] ,
minimal_score=pred['confidence_score'] ,
language=None,
passage_span=None,
question_text='',
plaintext='',
)
return key, value
@staticmethod
def _bool_target(target_type: TargetType) -> str:
"""
Converts a target type into a boolean string as expected by TyDi eval.
"""
if target_type == TargetType.YES:
return 'yes'
elif target_type == TargetType.NO:
return 'no'
elif target_type == TargetType.NO_ANSWER:
return 'none'
else:
raise NotImplementedError(f"Unexpected target type for tydi bool string conversion: {target_type}")
|
5,411 | configure | #!/usr/bin/env nemesis
#
# ----------------------------------------------------------------------
#
# Brad T. Aagaard, U.S. Geological Survey
# Charles A. Williams, GNS Science
# Matthew G. Knepley, University at Buffalo
#
# This code was developed as part of the Computational Infrastructure
# for Geodynamics (http://geodynamics.org).
#
# Copyright (c) 2010-2021 University of California, Davis
#
# See LICENSE.md for license information.
#
# ----------------------------------------------------------------------
#
# @file grabpoints/grabpoints
# @brief Python application to grab a set of points specified in a pset
# file from a UCD file along with the associated normals and write them to a
# file.
import math
import numpy
from pythia.pyre.applications.Script import Script as Application
class GrabPoints(Application):
"""Python application to grab a specified set of point coordinates and
normals from a UCD file.
"""
class Inventory(Application.Inventory):
"""Python object for managing GrabPoints facilities and properties.
"""
# @class Inventory
# Python object for managing GrabPoints facilities and properties.
##
# \b Properties
# @li \b pset_file Filename of file specifying vertex numbers.
# @li \b ucd_file Filename of input UCD file.
# @li \b point_output_file Filename of output set of points and normals.
# @li \b values_list List specifying position of desired attributes in UCD file.
# @li \b output_index Flag indicating whether to output the vertex indices.
# @li \b exclude_zero_normals Flag indicating whether to exclude points if the associated normal has zero magnitude.
##
# \b Facilities
# @li None
import pythia.pyre.inventory
psetFile = pythia.pyre.inventory.str("pset_file", default="test.pset")
psetFile.meta['tip'] = "Filename of pset file specifying vertex indices."
ucdFile = pythia.pyre.inventory.str("ucd_file", default="test.inp")
ucdFile.meta['tip'] = "Filename of ucd file containing mesh and attributes."
pointOutputFile = pythia.pyre.inventory.str("point_output_file",
default="points.coordnorm")
pointOutputFile.meta['tip'] = "Filename of output coordinates and normals."
valuesList = pythia.pyre.inventory.list(
"values_list", default=[1, 2, 3])
valuesList.meta['tip'] = "Position of desired values in UCD attributes."
outputIndex = pythia.pyre.inventory.bool("output_index", default=False)
outputIndex.meta['tip'] = "Whether to output vertex indices."
excludeZeroNormals = pythia.pyre.inventory.bool("exclude_zero_normals",
default=False)
excludeZeroNormals.meta['tip'] = "Whether to exclude points with zero normals."
# PUBLIC METHODS /////////////////////////////////////////////////////
def __init__(self, name="grabpoints"):
Application.__init__(self, name)
self.numPoints = 0
self.indices = []
self.pointCoords = []
return
def main(self):
# import pdb
# pdb.set_trace()
self._readPset()
self._grabPoints()
return
# PRIVATE METHODS ////////////////////////////////////////////////////
def METHOD_NAME(self):
"""Setup members using inventory.
"""
Application.METHOD_NAME(self)
self.psetFile = self.inventory.psetFile
self.ucdFile = self.inventory.ucdFile
self.pointOutputFile = self.inventory.pointOutputFile
self.valuesList = self.inventory.valuesList
self.outputIndex = self.inventory.outputIndex
self.excludeZeroNormals = self.inventory.excludeZeroNormals
return
def _readPset(self):
"""Reads vertex indices from a pset file.
"""
f = file(self.psetFile)
lines = f.readlines()
line2 = lines[1]
self.numPoints = int(line2.split()[2])
numLines = len(lines)
for lineCount in range(2, numLines):
line = lines[lineCount]
for number in line.split():
self.indices.append(int(number))
self.indices.sort()
f.close()
return
def _grabPoints(self):
"""Reads vertex coordinates and vertex attributes from a UCD file.
"""
f = file(self.ucdFile)
lines = f.readlines()
fileLen = len(lines)
firstline = lines[0].split()
numVerts = int(firstline[0])
numCells = int(firstline[1])
numVertAttrs = int(firstline[2])
vertInd = 0
ucdInd = 1
# Get vertex coordinates
for lineCount in range(1, numVerts+1):
vertex = self.indices[vertInd]
if vertex == ucdInd:
data = lines[lineCount].split()
for dim in range(1, 4):
self.pointCoords.append(float(data[dim]))
vertInd += 1
vertInd = min([vertInd, len(self.indices) - 1])
ucdInd += 1
# Skip elements and then start reading normals/values and write out
# the selected values.
o = open(self.pointOutputFile, 'w')
lineBegin = 2 + numVerts + numCells + numVertAttrs
lineEnd = lineBegin + numVerts
vertInd = 0
ucdInd = 1
coordCount = 0
normals = [0.0, 0.0, 0.0]
v0 = int(self.valuesList[0])
v1 = int(self.valuesList[1])
v2 = int(self.valuesList[2])
for lineCount in range(lineBegin, lineEnd):
vertex = self.indices[vertInd]
if vertex == ucdInd:
data = lines[lineCount].split()
normals = [float(data[v0]), float(data[v1]), float(data[v2])]
outputPoint = not self.excludeZeroNormals
outputPoint = outputPoint or \
normals[0] != 0.0 or \
normals[1] != 0.0 or \
normals[2] != 0.0
if outputPoint:
if self.outputIndex:
o.write(' %i' % vertex)
for dim in range(3):
o.write(' %.12e' %
self.pointCoords[coordCount + dim])
for dim in range(3):
o.write(' %.12e' % normals[dim])
o.write('\n')
vertInd += 1
coordCount += 3
if vertInd == len(self.indices):
break
ucdInd += 1
f.close()
o.close()
return
# ----------------------------------------------------------------------
if __name__ == '__main__':
app = GrabPoints()
app.run()
# End of file |
5,412 | test lazy item access should fail with | # -*- coding: utf-8 -*-
"""
Tests for behave.importing.
The module provides a lazy-loading/importing mechanism.
"""
from __future__ import absolute_import
import pytest
from behave.importer import LazyObject, LazyDict, load_module, parse_scoped_name
from behave.formatter.base import Formatter
import sys
import types
# import unittest
class TestTheory(object):
"""Marker for test-theory classes as syntactic sugar."""
pass
class ImportModuleTheory(TestTheory):
"""
Provides a test theory for importing modules.
"""
@classmethod
def ensure_module_is_not_imported(cls, module_name):
if module_name in sys.modules:
del sys.modules[module_name]
cls.assert_module_is_not_imported(module_name)
@staticmethod
def assert_module_is_imported(module_name):
module = sys.modules.get(module_name, None)
assert module_name in sys.modules
assert module is not None
@staticmethod
def assert_module_is_not_imported(module_name):
assert module_name not in sys.modules
@staticmethod
def assert_module_with_name(module, name):
assert isinstance(module, types.ModuleType)
assert module.__name__ == name
class TestLoadModule(object):
theory = ImportModuleTheory
def test_load_module__should_fail_for_unknown_module(self):
with pytest.raises(ImportError) as e:
load_module("__unknown_module__")
# OLD: assert_raises(ImportError, load_module, "__unknown_module__")
def test_load_module__should_succeed_for_already_imported_module(self):
module_name = "behave.importer"
self.theory.assert_module_is_imported(module_name)
module = load_module(module_name)
self.theory.assert_module_with_name(module, module_name)
self.theory.assert_module_is_imported(module_name)
def test_load_module__should_succeed_for_existing_module(self):
module_name = "tests.unit._importer_candidate"
self.theory.ensure_module_is_not_imported(module_name)
module = load_module(module_name)
self.theory.assert_module_with_name(module, module_name)
self.theory.assert_module_is_imported(module_name)
class TestLazyObject(object):
def test_get__should_succeed_for_known_object(self):
lazy = LazyObject("behave.importer", "LazyObject")
value = lazy.get()
assert value is LazyObject
lazy2 = LazyObject("behave.importer:LazyObject")
value2 = lazy2.get()
assert value2 is LazyObject
lazy3 = LazyObject("behave.formatter.steps", "StepsFormatter")
value3 = lazy3.get()
assert issubclass(value3, Formatter)
def test_get__should_fail_for_unknown_module(self):
lazy = LazyObject("__unknown_module__", "xxx")
with pytest.raises(ImportError):
lazy.get()
def test_get__should_fail_for_unknown_object_in_module(self):
lazy = LazyObject("test._importer_candidate", "xxx")
with pytest.raises(ImportError):
lazy.get()
class LazyDictTheory(TestTheory):
@staticmethod
def safe_getitem(data, key):
return dict.__getitem__(data, key)
@classmethod
def assert_item_is_lazy(cls, data, key):
value = cls.safe_getitem(data, key)
cls.assert_is_lazy_object(value)
@classmethod
def assert_item_is_not_lazy(cls, data, key):
value = cls.safe_getitem(data, key)
cls.assert_is_not_lazy_object(value)
@staticmethod
def assert_is_lazy_object(obj):
assert isinstance(obj, LazyObject)
@staticmethod
def assert_is_not_lazy_object(obj):
assert not isinstance(obj, LazyObject)
class TestLazyDict(object):
theory = LazyDictTheory
def test_unknown_item_access__should_raise_keyerror(self):
lazy_dict = LazyDict({"alice": 42})
item_access = lambda key: lazy_dict[key]
with pytest.raises(KeyError):
item_access("unknown")
def test_plain_item_access__should_succeed(self):
theory = LazyDictTheory
lazy_dict = LazyDict({"alice": 42})
theory.assert_item_is_not_lazy(lazy_dict, "alice")
value = lazy_dict["alice"]
assert value == 42
def test_lazy_item_access__should_load_object(self):
ImportModuleTheory.ensure_module_is_not_imported("inspect")
lazy_dict = LazyDict({"alice": LazyObject("inspect:ismodule")})
self.theory.assert_item_is_lazy(lazy_dict, "alice")
self.theory.assert_item_is_lazy(lazy_dict, "alice")
value = lazy_dict["alice"]
self.theory.assert_is_not_lazy_object(value)
self.theory.assert_item_is_not_lazy(lazy_dict, "alice")
def METHOD_NAME(self):
lazy_dict = LazyDict({"bob": LazyObject("__unknown_module__", "xxx")})
item_access = lambda key: lazy_dict[key]
with pytest.raises(ImportError):
item_access("bob")
def test_lazy_item_access__should_fail_with_unknown_object(self):
lazy_dict = LazyDict({
"bob": LazyObject("behave.importer", "XUnknown")
})
item_access = lambda key: lazy_dict[key]
with pytest.raises(ImportError):
item_access("bob") |
5,413 | test failure 2 | import unittest
import hcl2
from checkov.terraform.checks.resource.gcp.GoogleComputeIPForward import check
from checkov.common.models.enums import CheckResult
class TestGoogleComputeIPForward(unittest.TestCase):
def test_failure(self):
hcl_res = hcl2.loads("""
resource "google_compute_instance" "default" {
name = "test"
machine_type = "n1-standard-1"
zone = "us-central1-a"
can_ip_forward = true
}
""")
resource_conf = hcl_res['resource'][0]['google_compute_instance']['default']
scan_result = check.scan_resource_conf(conf=resource_conf)
self.assertEqual(CheckResult.FAILED, scan_result)
def test_failure_1(self):
hcl_res = hcl2.loads("""
resource "google_compute_instance_template" "default" {
name = "test"
machine_type = "n1-standard-1"
zone = "us-central1-a"
can_ip_forward = true
}
""")
resource_conf = hcl_res['resource'][0]['google_compute_instance_template']['default']
scan_result = check.scan_resource_conf(conf=resource_conf)
self.assertEqual(CheckResult.FAILED, scan_result)
def METHOD_NAME(self):
hcl_res = hcl2.loads("""
resource "google_compute_instance_from_template" "default" {
name = "test"
source_instance_template = google_compute_instance_template.default.id
can_ip_forward = true
}
""")
resource_conf = hcl_res['resource'][0]['google_compute_instance_from_template']['default']
scan_result = check.scan_resource_conf(conf=resource_conf)
self.assertEqual(CheckResult.FAILED, scan_result)
def test_success(self):
hcl_res = hcl2.loads("""
resource "google_compute_instance" "default" {
name = "test"
machine_type = "n1-standard-1"
zone = "us-central1-a"
}
""")
resource_conf = hcl_res['resource'][0]['google_compute_instance']['default']
scan_result = check.scan_resource_conf(conf=resource_conf)
self.assertEqual(CheckResult.PASSED, scan_result)
def test_success_2(self):
hcl_res = hcl2.loads("""
resource "google_compute_instance" "default" {
name = "gke-test"
machine_type = "n1-standard-1"
zone = "us-central1-a"
can_ip_forward = true
}
""")
resource_conf = hcl_res['resource'][0]['google_compute_instance']['default']
scan_result = check.scan_resource_conf(conf=resource_conf)
self.assertEqual(CheckResult.PASSED, scan_result)
def test_success_3(self):
hcl_res = hcl2.loads("""
resource "google_compute_instance_template" "default" {
name = "test"
machine_type = "n1-standard-1"
zone = "us-central1-a"
boot_disk {}
}
""")
resource_conf = hcl_res['resource'][0]['google_compute_instance_template']['default']
scan_result = check.scan_resource_conf(conf=resource_conf)
self.assertEqual(CheckResult.PASSED, scan_result)
def test_success_4(self):
hcl_res = hcl2.loads("""
resource "google_compute_instance_from_template" "default" {
name = "test"
source_instance_template = google_compute_instance_template.default.id
can_ip_forward = false
}
""")
resource_conf = hcl_res['resource'][0]['google_compute_instance_from_template']['default']
scan_result = check.scan_resource_conf(conf=resource_conf)
self.assertEqual(CheckResult.PASSED, scan_result)
def test_unknown(self):
hcl_res = hcl2.loads("""
resource "google_compute_instance_from_template" "default" {
name = "test"
source_instance_template = google_compute_instance_template.default.id
}
""")
resource_conf = hcl_res['resource'][0]['google_compute_instance_from_template']['default']
scan_result = check.scan_resource_conf(conf=resource_conf)
self.assertEqual(CheckResult.UNKNOWN, scan_result)
if __name__ == '__main__':
unittest.main() |
5,414 | ease in quint | #!/usr/bin/python3
"""
made with the file cjs/tweener/equations.js
Original copyright notice of the source file:
Copyright 2008 litl, LLC.
Equations
Main equations for the Tweener class
@author Zeh Fernando, Nate Chatellier
@version 1.0.2
Disclaimer for Robert Penner's Easing Equations license:
TERMS OF USE - EASING EQUATIONS
Open source under the BSD License.
Copyright (c) 2001 Robert Penner
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of the author nor the names of contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
TWEENING EQUATIONS functions
(the original equations are Robert Penner's work as mentioned on the disclaimer)
"""
from __future__ import division
import math
def easeNone(t, b, c, d):
return c * t / d + b
def easeInQuad(t, b, c, d):
t /= d
return c * t * t + b
def easeOutQuad(t, b, c, d):
t /= d
return -c * t * (t - 2) + b
def easeInOutQuad(t, b, c, d):
t /= d / 2
if t < 1:
return c / 2 * t * t + b
t -= 1
return -c / 2 * (t * (t - 2) - 1) + b
def easeOutInQuad(t, b, c, d):
if t < d / 2:
return easeOutQuad(t * 2, b, c / 2, d)
return easeInQuad((t * 2) - d, b + c / 2, c / 2, d)
def easeInCubic(t, b, c, d):
t /= d
return c * t ** 3 + b
def easeOutCubic(t, b, c, d):
t = t / d - 1
return c * (t ** 3 + 1) + b
def easeInOutCubic(t, b, c, d):
t /= d / 2
if t < 1:
return c / 2 * t ** 3 + b
t -= 2
return c / 2 * (t ** 3 + 2) + b
def easeOutInCubic(t, b, c, d):
if t < d / 2:
return easeOutCubic (t * 2, b, c / 2, d)
return easeInCubic((t * 2) - d, b + c / 2, c / 2, d)
def easeInQuart(t, b, c, d):
t /= d
return c * t ** 4 + b
def easeOutQuart(t, b, c, d):
t = t / d - 1
return -c * (t ** 4 - 1) + b
def easeInOutQuart(t, b, c, d):
t /= d / 2
if t < 1:
return c / 2 * t ** 4 + b
t -= 2
return -c / 2 * (t ** 4 - 2) + b
def easeOutInQuart(t, b, c, d):
if t < d / 2:
return easeOutQuart(t * 2, b, c / 2, d)
return easeInQuart((t * 2) - d, b + c / 2, c / 2, d)
def METHOD_NAME(t, b, c, d):
t /= d
return c * t ** 5 + b
def easeOutQuint(t, b, c, d):
t = t / d - 1
return c * (t ** 5 + 1) + b
def easeInOutQuint(t, b, c, d):
t /= d / 2
if t < 1:
return c / 2 * t ** 5 + b
t -= 2
return c / 2 * (t ** 5 + 2) + b
def easeOutInQuint(t, b, c, d):
if t < d / 2:
return easeOutQuint (t * 2, b, c / 2, d)
return METHOD_NAME((t * 2) - d, b + c / 2, c / 2, d)
def easeInSine(t, b, c, d):
return -c * math.cos(t / d * (math.pi / 2)) + c + b
def easeOutSine(t, b, c, d):
return c * math.sin(t / d * (math.pi / 2)) + b
def easeInOutSine(t, b, c, d):
return -c / 2 * (math.cos(math.pi * t / d) - 1) + b
def easeOutInSine(t, b, c, d):
if t < d / 2:
return easeOutSine(t * 2, b, c / 2, d)
return easeInSine((t * 2) - d, b + c / 2, c / 2, d)
def easeInExpo(t, b, c, d):
if t <= 0:
return b
return c * pow(2, 10 * (t / d - 1)) + b
def easeOutExpo(t, b, c, d):
if t >= d:
return b + c
return c * (-pow(2, -10 * t / d) + 1) + b
def easeInOutExpo(t, b, c, d):
if t <= 0:
return b
if t >= d:
return b + c
t /= d / 2
if t < 1:
return c / 2 * pow(2, 10 * (t - 1)) + b
return c / 2 * (-pow(2, -10 * (t - 1)) + 2) + b
def easeOutInExpo(t, b, c, d):
if t < d / 2:
return easeOutExpo (t * 2, b, c / 2, d)
return easeInExpo((t * 2) - d, b + c / 2, c / 2, d)
def easeInCirc(t, b, c, d):
t /= d
return -c * (math.sqrt(1 - t * t) - 1) + b
def easeOutCirc(t, b, c, d):
t = t / d - 1
return c * math.sqrt(1 - t * t) + b
def easeInOutCirc(t, b, c, d):
t /= d / 2
if t < 1:
return -c / 2 * (math.sqrt(1 - t * t) - 1) + b
t -= 2
return c / 2 * (math.sqrt(1 - t * t) + 1) + b
def easeOutInCirc(t, b, c, d):
if t < d / 2:
return easeOutCirc(t * 2, b, c / 2, d)
return easeInCirc((t * 2) - d, b + c / 2, c / 2, d)
def easeInElastic(t, b, c, d):
if t <= 0:
return b
t /= d
if t >= 1:
return b + c
p = d * .3
a = c
s = p / 4
t -= 1
return -(a * pow(2, 10 * t) * math.sin((t * d - s) * (2 * math.pi) / p)) + b
def easeOutElastic(t, b, c, d):
if t <= 0:
return b
t /= d
if t >= 1:
return b + c
p = d * .3
a = c
s = p / 4
return a * pow(2, -10 * t) * math.sin((t * d - s) * 2 * math.pi / p) + c + b
def easeInOutElastic(t, b, c, d):
if t <= 0:
return b
t /= d / 2
if t >= 2:
return b + c
p = d * (.3 * 1.5)
s = p / 4
a = c
if t < 1:
t -= 1
return -.5 * (a * pow(2, (10 * t)) * math.sin((t * d - s) * 2 * math.pi / p)) + b
t -= 1
return a * pow(2, (-10 * t)) * math.sin((t * d - s) * 2 * math.pi / p) * .5 + c + b
def easeOutInElastic(t, b, c, d):
if t < d / 2:
return easeOutElastic (t * 2, b, c / 2, d)
return easeInElastic((t * 2) - d, b + c / 2, c / 2, d)
def easeInBack(t, b, c, d):
s = 1.70158
t /= d
return c * t * t * ((s + 1) * t - s) + b
def easeOutBack(t, b, c, d):
s = 1.70158
t = t / d - 1
return c * (t * t * ((s + 1) * t + s) + 1) + b
def easeInOutBack(t, b, c, d):
s = 1.70158 * 1.525
t /= d / 2
if t < 1:
return c / 2 * (t * t * ((s + 1) * t - s)) + b
t -= 2
return c / 2 * (t * t * ((s + 1) * t + s) + 2) + b
def easeOutInBack(t, b, c, d):
if t < d / 2:
return easeOutBack (t * 2, b, c / 2, d)
return easeInBack((t * 2) - d, b + c / 2, c / 2, d)
def easeInBounce(t, b, c, d):
return c - easeOutBounce (d - t, 0, c, d) + b
def easeOutBounce(t, b, c, d):
t /= d
if t < (1 / 2.75):
return c * (7.5625 * t * t) + b
elif t < (2 / 2.75):
t -= (1.5 / 2.75)
return c * (7.5625 * t * t + .75) + b
elif t < (2.5 / 2.75):
t -= (2.25 / 2.75)
return c * (7.5625 * t * t + .9375) + b
t -= (2.625 / 2.75)
return c * (7.5625 * t * t + .984375) + b
def easeInOutBounce(t, b, c, d):
if t < d / 2:
return easeInBounce (t * 2, 0, c, d) * .5 + b
return easeOutBounce (t * 2 - d, 0, c, d) * .5 + c*.5 + b
def easeOutInBounce(t, b, c, d):
if t < d / 2:
return easeOutBounce (t * 2, b, c / 2, d)
return easeInBounce((t * 2) - d, b + c / 2, c / 2, d) |
5,415 | test checks | import pytest
from pkgcheck import checks as checks_mod
from pkgcheck import objects, results
from ..misc import init_check
def METHOD_NAME():
"""Scan through all public checks and verify various aspects."""
for name, cls in objects.CHECKS.items():
assert cls.known_results, f"check class {name!r} doesn't define known results"
def test_keywords():
"""Scan through all public result keywords and verify various aspects."""
for name, cls in objects.KEYWORDS.items():
assert cls.level is not None, f"result class {name!r} missing level"
class TestMetadataError:
"""Test MetadataError attribute registry."""
def test_reregister_error(self):
with pytest.raises(ValueError, match="metadata attribute 'eapi' already registered"):
class InvalidEapi2(results.MetadataError, results.VersionResult):
attr = "eapi"
def test_register_missing_attr(self):
with pytest.raises(ValueError, match="class missing metadata attributes"):
class InvalidAttr(results.MetadataError, results.VersionResult):
pass
class TestGentooRepoCheck:
def test_non_gentoo_repo(self, tool, make_repo):
self.repo = make_repo()
args = ["scan", "--repo", self.repo.location]
options, _ = tool.parse_args(args)
with pytest.raises(checks_mod.SkipCheck, match="not running against gentoo repo"):
init_check(checks_mod.GentooRepoCheck, options)
def test_gentoo_repo(self, tool, make_repo):
self.repo = make_repo(repo_id="gentoo")
args = ["scan", "--repo", self.repo.location]
options, _ = tool.parse_args(args)
assert init_check(checks_mod.GentooRepoCheck, options)
class TestOverlayCheck:
def test_non_overlay_repo(self, tool, testconfig):
tool.parser.set_defaults(config_path=testconfig)
options, _ = tool.parse_args(["scan", "--repo", "gentoo"])
with pytest.raises(checks_mod.SkipCheck, match="not running against overlay"):
init_check(checks_mod.OverlayRepoCheck, options)
def test_overlay_repo(self, tool, testconfig):
tool.parser.set_defaults(config_path=testconfig)
options, _ = tool.parse_args(["scan", "--repo", "overlay"])
assert init_check(checks_mod.OverlayRepoCheck, options)
class TestGitCommitsCheck:
@pytest.fixture(autouse=True)
def _setup(self, tool, make_repo, make_git_repo):
# initialize parent repo
self.parent_git_repo = make_git_repo()
self.parent_repo = make_repo(self.parent_git_repo.path, repo_id="gentoo", arches=["amd64"])
self.parent_git_repo.add_all("initial commit")
# initialize child repo
self.child_git_repo = make_git_repo()
self.child_git_repo.run(["git", "remote", "add", "origin", self.parent_git_repo.path])
self.child_git_repo.run(["git", "pull", "origin", "main"])
self.child_git_repo.run(["git", "remote", "set-head", "origin", "main"])
self.child_repo = make_repo(self.child_git_repo.path)
def test_no_commits_option(self, tool, make_git_repo):
options, _ = tool.parse_args(["scan", "--repo", self.child_repo.location])
with pytest.raises(checks_mod.SkipCheck, match="not scanning against git commits"):
init_check(checks_mod.GitCommitsCheck, options)
def test_commits_option(self, tool, make_repo):
self.child_repo.create_ebuild("cat/pkg-1")
self.child_git_repo.add_all("cat/pkg-1")
options, _ = tool.parse_args(["scan", "--repo", self.child_repo.location, "--commits"])
assert init_check(checks_mod.GitCommitsCheck, options)
def test_no_local_commits(self, tool):
with pytest.raises(SystemExit) as excinfo:
tool.parse_args(["scan", "--repo", self.child_repo.location, "--commits"])
assert excinfo.value.code == 0
# parent repo has new commits
self.parent_repo.create_ebuild("cat/pkg-1")
self.parent_git_repo.add_all("cat/pkg-1")
self.child_git_repo.run(["git", "pull", "origin", "main"])
with pytest.raises(SystemExit) as excinfo:
tool.parse_args(["scan", "--repo", self.child_repo.location, "--commits"])
assert excinfo.value.code == 0
class TestNetworkCheck:
def test_network_disabled(self, tool):
options, _ = tool.parse_args(["scan"])
with pytest.raises(checks_mod.SkipCheck, match="network checks not enabled"):
init_check(checks_mod.NetworkCheck, options)
def test_network_enabled(self, tool):
options, _ = tool.parse_args(["scan", "--net"])
assert init_check(checks_mod.NetworkCheck, options) |
5,416 | test show with app | #-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2023, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import annotations # isort:skip
import pytest ; pytest
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
from unittest.mock import MagicMock, Mock, patch
# Bokeh imports
from bokeh.application.application import Application
from bokeh.io.doc import curdoc
from bokeh.io.output import output_notebook
from bokeh.io.state import State, curstate
from bokeh.models import ColumnDataSource, GlyphRenderer, Plot
# Module under test
import bokeh.io.showing as bis # isort:skip
#-----------------------------------------------------------------------------
# Setup
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
@patch('bokeh.io.showing._show_with_state')
def test_show_with_default_args(mock__show_with_state: MagicMock) -> None:
curstate().reset()
default_kwargs = dict(browser=None, new="tab", notebook_handle=False)
p = Plot()
bis.show(p, **default_kwargs)
assert mock__show_with_state.call_count == 1
assert mock__show_with_state.call_args[0] == (p, curstate(), None, "tab")
assert mock__show_with_state.call_args[1] == {'notebook_handle': False}
assert curdoc().roots == []
@patch('bokeh.io.showing._show_with_state')
def test_show_with_explicit_args(mock__show_with_state: MagicMock) -> None:
curstate().reset()
kwargs = dict(browser="browser", new="new", notebook_handle=True)
p = Plot()
bis.show(p, **kwargs)
assert mock__show_with_state.call_count == 1
assert mock__show_with_state.call_args[0] == (p, curstate(), "browser", "new")
assert mock__show_with_state.call_args[1] == {'notebook_handle': True}
assert curdoc().roots == []
@patch('bokeh.io.showing.run_notebook_hook')
def METHOD_NAME(mock_run_notebook_hook: MagicMock, ipython) -> None:
curstate().reset()
app = Application()
output_notebook()
bis.show(app, notebook_url="baz")
assert curstate().notebook_type == "jupyter"
assert mock_run_notebook_hook.call_count == 1
assert mock_run_notebook_hook.call_args[0][0] == curstate().notebook_type
assert mock_run_notebook_hook.call_args[0][1:] == ("app", app, curstate(), "baz")
assert mock_run_notebook_hook.call_args[1] == {}
@patch('bokeh.io.showing._show_with_state')
def test_show_doesn_not_adds_obj_to_curdoc(m) -> None:
curstate().reset()
assert curstate().document.roots == []
p = Plot()
bis.show(p)
assert curstate().document.roots == []
p = Plot()
bis.show(p)
assert curstate().document.roots == []
@pytest.mark.parametrize('obj', [1, 2.3, None, "str", GlyphRenderer(data_source=ColumnDataSource())])
def test_show_with_bad_object(obj) -> None:
with pytest.raises(ValueError):
bis.show(obj)
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
@patch('bokeh.io.showing.run_notebook_hook')
@patch('bokeh.io.showing._show_file_with_state')
@patch('bokeh.io.showing.get_browser_controller')
def test__show_with_state_with_notebook(
mock_get_browser_controller: MagicMock,
mock__show_file_with_state: MagicMock,
mock_run_notebook_hook: MagicMock) -> None:
mock_get_browser_controller.return_value = "controller"
s = State()
p = Plot()
s.output_notebook()
bis._show_with_state(p, s, "browser", "new")
assert s.notebook_type == "jupyter"
assert mock_run_notebook_hook.call_count == 1
assert mock_run_notebook_hook.call_args[0] == ("jupyter", "doc", p, s, False)
assert mock_run_notebook_hook.call_args[1] == {}
assert mock__show_file_with_state.call_count == 0
s.output_file("foo.html")
bis._show_with_state(p, s, "browser", "new")
assert s.notebook_type == "jupyter"
assert mock_run_notebook_hook.call_count == 2
assert mock_run_notebook_hook.call_args[0] == ("jupyter", "doc", p, s, False)
assert mock_run_notebook_hook.call_args[1] == {}
assert mock__show_file_with_state.call_count == 1
assert mock__show_file_with_state.call_args[0] == (p, s, "new", "controller")
assert mock__show_file_with_state.call_args[1] == {}
@patch('bokeh.io.notebook.get_comms')
@patch('bokeh.io.notebook.show_doc')
@patch('bokeh.io.showing._show_file_with_state')
@patch('bokeh.io.showing.get_browser_controller')
def test__show_with_state_with_no_notebook(
mock_get_browser_controller: MagicMock,
mock__show_file_with_state: MagicMock,
mock_show_doc: MagicMock,
mock_get_comms: MagicMock):
mock_get_browser_controller.return_value = "controller"
mock_get_comms.return_value = "comms"
s = State()
s.output_file("foo.html")
bis._show_with_state("obj", s, "browser", "new")
assert s.notebook_type is None
assert mock_show_doc.call_count == 0
assert mock__show_file_with_state.call_count == 1
assert mock__show_file_with_state.call_args[0] == ("obj", s, "new", "controller")
assert mock__show_file_with_state.call_args[1] == {}
@patch('os.path.abspath')
@patch('bokeh.io.showing.save')
def test(mock_save: MagicMock, mock_abspath: MagicMock):
controller = Mock()
mock_save.return_value = "savepath"
s = State()
s.output_file("foo.html")
bis._show_file_with_state("obj", s, "window", controller)
assert mock_save.call_count == 1
assert mock_save.call_args[0] == ("obj",)
assert mock_save.call_args[1] == {"state": s}
assert controller.open.call_count == 1
assert controller.open.call_args[0] == ("file://savepath",)
assert controller.open.call_args[1] == {"new": 1}
bis._show_file_with_state("obj", s, "tab", controller)
assert mock_save.call_count == 2
assert mock_save.call_args[0] == ("obj",)
assert mock_save.call_args[1] == {"state": s}
assert controller.open.call_count == 2
assert controller.open.call_args[0] == ("file://savepath",)
assert controller.open.call_args[1] == {"new": 2}
#-----------------------------------------------------------------------------
# Code
#----------------------------------------------------------------------------- |
5,417 | tanh | """
mathematical functions.
Descriptions taken from:
https://raw.githubusercontent.com/micropython/micropython/master/docs/library/math.rst.
=====================================
.. module:: math
:synopsis: mathematical functions
|see_cpython_module| :mod:`python:math`.
The ``math`` module provides some basic mathematical functions for
working with floating-point numbers.
*Note:* On the pyboard, floating-point numbers have 32-bit precision.
Availability: not available on WiPy. Floating point support required
for this module.
"""
__author__ = "Howard C Lovatt"
__copyright__ = "Howard C Lovatt, 2020 onwards."
__license__ = "MIT https://opensource.org/licenses/MIT (as used by MicroPython)."
__version__ = "7.3.9" # Version set by https://github.com/hlovatt/tag2ver
from typing import SupportsFloat, Final
def acos(x: SupportsFloat, /) -> float:
"""
Return the inverse cosine of ``x``.
"""
def acosh(x: SupportsFloat, /) -> float:
"""
Return the inverse hyperbolic cosine of ``x``.
"""
def asin(x: SupportsFloat, /) -> float:
"""
Return the inverse sine of ``x``.
"""
def asinh(x: SupportsFloat, /) -> float:
"""
Return the inverse hyperbolic sine of ``x``.
"""
def atan(x: SupportsFloat, /) -> float:
"""
Return the inverse tangent of ``x``.
"""
def atan2(y: SupportsFloat, x: SupportsFloat, /) -> float:
"""
Return the principal value of the inverse tangent of ``y/x``.
"""
def atanh(x: SupportsFloat, /) -> float:
"""
Return the inverse hyperbolic tangent of ``x``.
"""
def ceil(x: SupportsFloat, /) -> int:
"""
Return an integer, being ``x`` rounded towards positive infinity.
"""
def copysign(x: SupportsFloat, y: SupportsFloat, /) -> float:
"""
Return ``x`` with the sign of ``y``.
"""
def cos(x: SupportsFloat, /) -> float:
"""
Return the cosine of ``x``.
"""
def cosh(x: SupportsFloat, /) -> float:
"""
Return the hyperbolic cosine of ``x``.
"""
def degrees(x: SupportsFloat, /) -> float:
"""
Return radians ``x`` converted to degrees.
"""
def erf(x: SupportsFloat, /) -> float:
"""
Return the error function of ``x``.
"""
def erfc(x: SupportsFloat, /) -> float:
"""
Return the complementary error function of ``x``.
"""
def exp(x: SupportsFloat, /) -> float:
"""
Return the exponential of ``x``.
"""
def expm1(x: SupportsFloat, /) -> float:
"""
Return ``exp(x) - 1``.
"""
def fabs(x: SupportsFloat, /) -> float:
"""
Return the absolute value of ``x``.
"""
def floor(x: SupportsFloat, /) -> int:
"""
Return an integer, being ``x`` rounded towards negative infinity.
"""
def fmod(x: SupportsFloat, y: SupportsFloat, /) -> float:
"""
Return the remainder of ``x/y``.
"""
def frexp(x: SupportsFloat, /) -> tuple[float, int]:
"""
Decomposes a floating-point number into its mantissa and exponent.
The returned value is the tuple ``(m, e)`` such that ``x == m * 2**e``
exactly. If ``x == 0`` then the function returns ``(0.0, 0)``, otherwise
the relation ``0.5 <= abs(m) < 1`` holds.
"""
def gamma(x: SupportsFloat, /) -> float:
"""
Return the gamma function of ``x``.
"""
def isfinite(x: SupportsFloat, /) -> bool:
"""
Return ``True`` if ``x`` is finite.
"""
def isinf(x: SupportsFloat, /) -> bool:
"""
Return ``True`` if ``x`` is infinite.
"""
def isnan(x: SupportsFloat, /) -> bool:
"""
Return ``True`` if ``x`` is not-a-number
"""
# noinspection PyShadowingNames
def ldexp(x: SupportsFloat, exp: int, /) -> float:
"""
Return ``x * (2**exp)``.
"""
def lgamma(x: SupportsFloat, /) -> float:
"""
Return the natural logarithm of the gamma function of ``x``.
"""
def log(x: SupportsFloat, /) -> float:
"""
Return the natural logarithm of ``x``.
"""
def log10(x: SupportsFloat, /) -> float:
"""
Return the base-10 logarithm of ``x``.
"""
def log2(x: SupportsFloat, /) -> float:
"""
Return the base-2 logarithm of ``x``.
"""
def modf(x: SupportsFloat, /) -> tuple[float, float]:
"""
Return a tuple of two floats, being the fractional and integral parts of
``x``. Both return values have the same sign as ``x``.
"""
def pow(x: SupportsFloat, y: SupportsFloat, /) -> float:
"""
Returns ``x`` to the power of ``y``.
"""
def radians(x: SupportsFloat, /) -> float:
"""
Return degrees ``x`` converted to radians.
"""
def sin(x: SupportsFloat, /) -> float:
"""
Return the sine of ``x``.
"""
def sinh(x: SupportsFloat, /) -> float:
"""
Return the hyperbolic sine of ``x``.
"""
def sqrt(x: SupportsFloat, /) -> float:
"""
Return the square root of ``x``.
"""
def tan(x: SupportsFloat, /) -> float:
"""
Return the tangent of ``x``.
"""
def METHOD_NAME(x: SupportsFloat, /) -> float:
"""
Return the hyperbolic tangent of ``x``.
"""
def trunc(x: SupportsFloat, /) -> float:
"""
Return an integer, being ``x`` rounded towards 0.
"""
e: Final[float] = ...
"""
base of the natural logarithm
"""
pi: Final[float] = ...
"""
the ratio of a circle's circumference to its diameter
""" |
5,418 | test graph maximum bipartite matching | from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import assert_equal
from scipy.sparse.csgraph import (reverse_cuthill_mckee,
maximum_bipartite_matching, structural_rank)
from scipy.sparse import diags, csc_matrix, csr_matrix, coo_matrix
def test_graph_reverse_cuthill_mckee():
A = np.array([[1, 0, 0, 0, 1, 0, 0, 0],
[0, 1, 1, 0, 0, 1, 0, 1],
[0, 1, 1, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 1, 0],
[1, 0, 1, 0, 1, 0, 0, 0],
[0, 1, 0, 0, 0, 1, 0, 1],
[0, 0, 0, 1, 0, 0, 1, 0],
[0, 1, 0, 0, 0, 1, 0, 1]], dtype=int)
graph = csr_matrix(A)
perm = reverse_cuthill_mckee(graph)
correct_perm = np.array([6, 3, 7, 5, 1, 2, 4, 0])
assert_equal(perm, correct_perm)
# Test int64 indices input
graph.indices = graph.indices.astype('int64')
graph.indptr = graph.indptr.astype('int64')
perm = reverse_cuthill_mckee(graph, True)
assert_equal(perm, correct_perm)
def test_graph_reverse_cuthill_mckee_ordering():
data = np.ones(63,dtype=int)
rows = np.array([0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2,
2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5,
6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 8, 9, 9,
9, 10, 10, 10, 10, 10, 11, 11, 11, 11,
12, 12, 12, 13, 13, 13, 13, 14, 14, 14,
14, 15, 15, 15, 15, 15])
cols = np.array([0, 2, 5, 8, 10, 1, 3, 9, 11, 0, 2,
7, 10, 1, 3, 11, 4, 6, 12, 14, 0, 7, 13,
15, 4, 6, 14, 2, 5, 7, 15, 0, 8, 10, 13,
1, 9, 11, 0, 2, 8, 10, 15, 1, 3, 9, 11,
4, 12, 14, 5, 8, 13, 15, 4, 6, 12, 14,
5, 7, 10, 13, 15])
graph = coo_matrix((data, (rows,cols))).tocsr()
perm = reverse_cuthill_mckee(graph)
correct_perm = np.array([12, 14, 4, 6, 10, 8, 2, 15,
0, 13, 7, 5, 9, 11, 1, 3])
assert_equal(perm, correct_perm)
def METHOD_NAME():
A = diags(np.ones(25), offsets=0, format='csc')
rand_perm = np.random.permutation(25)
rand_perm2 = np.random.permutation(25)
Rrow = np.arange(25)
Rcol = rand_perm
Rdata = np.ones(25,dtype=int)
Rmat = coo_matrix((Rdata,(Rrow,Rcol))).tocsc()
Crow = rand_perm2
Ccol = np.arange(25)
Cdata = np.ones(25,dtype=int)
Cmat = coo_matrix((Cdata,(Crow,Ccol))).tocsc()
# Randomly permute identity matrix
B = Rmat*A*Cmat
# Row permute
perm = maximum_bipartite_matching(B,perm_type='row')
Rrow = np.arange(25)
Rcol = perm
Rdata = np.ones(25,dtype=int)
Rmat = coo_matrix((Rdata,(Rrow,Rcol))).tocsc()
C1 = Rmat*B
# Column permute
perm2 = maximum_bipartite_matching(B,perm_type='column')
Crow = perm2
Ccol = np.arange(25)
Cdata = np.ones(25,dtype=int)
Cmat = coo_matrix((Cdata,(Crow,Ccol))).tocsc()
C2 = B*Cmat
# Should get identity matrix back
assert_equal(any(C1.diagonal() == 0), False)
assert_equal(any(C2.diagonal() == 0), False)
# Test int64 indices input
B.indices = B.indices.astype('int64')
B.indptr = B.indptr.astype('int64')
perm = maximum_bipartite_matching(B,perm_type='row')
Rrow = np.arange(25)
Rcol = perm
Rdata = np.ones(25,dtype=int)
Rmat = coo_matrix((Rdata,(Rrow,Rcol))).tocsc()
C3 = Rmat*B
assert_equal(any(C3.diagonal() == 0), False)
def test_graph_structural_rank():
# Test square matrix #1
A = csc_matrix([[1, 1, 0],
[1, 0, 1],
[0, 1, 0]])
assert_equal(structural_rank(A), 3)
# Test square matrix #2
rows = np.array([0,0,0,0,0,1,1,2,2,3,3,3,3,3,3,4,4,5,5,6,6,7,7])
cols = np.array([0,1,2,3,4,2,5,2,6,0,1,3,5,6,7,4,5,5,6,2,6,2,4])
data = np.ones_like(rows)
B = coo_matrix((data,(rows,cols)), shape=(8,8))
assert_equal(structural_rank(B), 6)
#Test non-square matrix
C = csc_matrix([[1, 0, 2, 0],
[2, 0, 4, 0]])
assert_equal(structural_rank(C), 2)
#Test tall matrix
assert_equal(structural_rank(C.T), 2) |
5,419 | method | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"disk delete",
confirmation="Are you sure you want to perform this operation?",
)
class Delete(AAZCommand):
"""Delete a managed disk.
:example: Delete a managed disk.
az disk delete --name MyManagedDisk --resource-group MyResourceGroup
"""
_aaz_info = {
"version": "2017-03-30",
"resources": [
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.compute/disks/{}", "2017-03-30"],
]
}
AZ_SUPPORT_NO_WAIT = True
def _handler(self, command_args):
super()._handler(command_args)
return self.build_lro_poller(self._execute_operations, self._output)
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.disk_name = AAZStrArg(
options=["-n", "--name", "--disk-name"],
help="The name of the managed disk that is being created. The name can't be changed after the disk is created. Supported characters for the name are a-z, A-Z, 0-9, _ and -. The maximum name length is 80 characters.",
required=True,
id_part="name",
)
_args_schema.resource_group = AAZResourceGroupNameArg(
required=True,
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
yield self.DisksDelete(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
def _output(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True)
return result
class DisksDelete(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [202]:
return self.client.build_lro_polling(
self.ctx.args.no_wait,
session,
self.on_200,
self.on_error,
lro_options={"final-state-via": "azure-async-operation"},
path_format_arguments=self.url_parameters,
)
if session.http_response.status_code in [200]:
return self.client.build_lro_polling(
self.ctx.args.no_wait,
session,
self.on_200,
self.on_error,
lro_options={"final-state-via": "azure-async-operation"},
path_format_arguments=self.url_parameters,
)
if session.http_response.status_code in [204]:
return self.client.build_lro_polling(
self.ctx.args.no_wait,
session,
self.on_204,
self.on_error,
lro_options={"final-state-via": "azure-async-operation"},
path_format_arguments=self.url_parameters,
)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/disks/{diskName}",
**self.url_parameters
)
@property
def METHOD_NAME(self):
return "DELETE"
@property
def error_format(self):
return "MgmtErrorFormat"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"diskName", self.ctx.args.disk_name,
required=True,
),
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2017-03-30",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200
)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.end_time = AAZStrType(
serialized_name="endTime",
flags={"read_only": True},
)
_schema_on_200.error = AAZObjectType()
_schema_on_200.name = AAZStrType(
flags={"read_only": True},
)
_schema_on_200.start_time = AAZStrType(
serialized_name="startTime",
flags={"read_only": True},
)
_schema_on_200.status = AAZStrType(
flags={"read_only": True},
)
error = cls._schema_on_200.error
error.code = AAZStrType()
error.details = AAZListType()
error.innererror = AAZObjectType()
error.message = AAZStrType()
error.target = AAZStrType()
details = cls._schema_on_200.error.details
details.Element = AAZObjectType()
_element = cls._schema_on_200.error.details.Element
_element.code = AAZStrType()
_element.message = AAZStrType()
_element.target = AAZStrType()
innererror = cls._schema_on_200.error.innererror
innererror.errordetail = AAZStrType()
innererror.exceptiontype = AAZStrType()
return cls._schema_on_200
def on_204(self, session):
pass
class _DeleteHelper:
"""Helper class for Delete"""
__all__ = ["Delete"] |
5,420 | process request | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.testsdk.preparers import RoleBasedServicePrincipalPreparer
from azure.cli.testsdk.scenario_tests.utilities import is_text_payload
from azure.cli.testsdk.utilities import GraphClientPasswordReplacer
from msrestazure.tools import resource_id
MOCK_GUID = '00000000-0000-0000-0000-000000000001'
MOCK_SECRET = 'fake-secret'
class AROClusterServicePrincipalPreparer(RoleBasedServicePrincipalPreparer):
def __init__(
self,
name_prefix="clitest",
skip_assignment=True,
parameter_name="client_id",
parameter_password="client_secret",
dev_setting_sp_name="AZURE_CLI_TEST_DEV_SP_NAME",
dev_setting_sp_password="AZURE_CLI_TEST_DEV_SP_PASSWORD",
key="aro_csp",
):
super(AROClusterServicePrincipalPreparer, self).__init__(
name_prefix,
skip_assignment,
parameter_name,
parameter_password,
dev_setting_sp_name,
dev_setting_sp_password,
key,
)
self.client_id_to_replace = None
self.client_secret_to_replace = None
def create_resource(self, name, **kwargs):
client_id, client_secret = self._get_csp_credentials(name)
self.test_class_instance.kwargs[self.key] = client_id
self.test_class_instance.kwargs["{}_pass".format(self.key)] = client_secret
return {
self.parameter_name: client_id,
self.parameter_password: client_secret,
}
# Overriden because RoleBasedServicePrincipal.remove_resource does not delete
# the underlying AAD application generated when creating the service principal
def remove_resource(self, name, **kwargs):
super().remove_resource(name, **kwargs)
if not self.dev_setting_sp_name:
self.live_only_execute(self.cli_ctx, 'az ad app delete --id {}'.format(self.result.get('appId')))
def METHOD_NAME(self, request):
if self.client_id_to_replace in request.uri:
request.uri = request.uri.replace(self.client_id_to_replace, MOCK_GUID)
if is_text_payload(request) and isinstance(request.body, bytes):
request.body = self._replace_byte_keys(request.body)
elif is_text_payload(request) and isinstance(request.body, str):
request.body = self._replace_string_keys(request.body)
return request
def process_response(self, response):
if is_text_payload(response) and response['body']['string']:
response['body']['string'] = self._replace_string_keys(response['body']['string'])
return response
def _get_csp_credentials(self, name):
if not self.live_test and not self.test_class_instance.in_recording:
return MOCK_GUID, MOCK_SECRET
client_id, client_secret = self._generate_csp(name)
# call AbstractPreparer.moniker to make resource counts and self.resource_moniker consistent between live
# and play-back. see SingleValueReplacer.process_request, AbstractPreparer.__call__._preparer_wrapper
# and ScenarioTest.create_random_name. This is so that when self.create_random_name is called for the
# first time during live or playback, it would have the same value.
# In short, the default sp preparer in live mode does not call moniker, which leads to inconsistent counts.
_ = self.moniker
self.client_id_to_replace = client_id
self.client_secret_to_replace = client_secret
return client_id, client_secret
def _generate_csp(self, name):
if self.dev_setting_sp_name:
client_id = self.dev_setting_sp_name
client_secret = self.dev_setting_sp_password
return client_id, client_secret
subscription = self.test_class_instance.get_subscription_id()
resource_group = self.test_class_instance.kwargs.get('rg')
command = 'az ad sp create-for-rbac -n {} --role contributor --scopes "{}"'\
.format(name, resource_id(subscription=subscription, resource_group=resource_group))
try:
self.result = self.live_only_execute(self.cli_ctx, command).get_output_in_json()
except AttributeError:
pass
client_id = self.result['appId']
client_secret = self.result.get('password') or GraphClientPasswordReplacer.PWD_REPLACEMENT
return client_id, client_secret
def _replace_string_keys(self, val):
if self.client_id_to_replace is None:
return val
return val.replace(self.client_id_to_replace, MOCK_GUID).replace(self.client_secret_to_replace, MOCK_SECRET)
def _replace_byte_keys(self, val):
return self._replace_string_keys(val.decode('utf-8')).encode('utf-8') |
5,421 | handle match | """
EscapeAll.
pymdownx.escapeall
Escape everything.
MIT license.
Copyright (c) 2017 Isaac Muse <isaacmuse@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from markdown import Extension
from markdown.inlinepatterns import InlineProcessor, SubstituteTagInlineProcessor
from markdown.postprocessors import Postprocessor
from markdown import util as md_util
import re
from . import util
# We need to ignore these as they are used in Markdown processing
STX = '\u0002'
ETX = '\u0003'
ESCAPE_RE = r'\\(.)'
ESCAPE_NO_NL_RE = r'\\([^\n])'
HARDBREAK_RE = r'\\\n'
UNESCAPE_PATTERN = re.compile(r'%s(\d+)%s' % (md_util.STX, md_util.ETX))
class EscapeAllPattern(InlineProcessor):
"""Return an escaped character."""
def __init__(self, pattern, nbsp, md):
"""Initialize."""
self.nbsp = nbsp
InlineProcessor.__init__(self, pattern, md)
def METHOD_NAME(self, m, data):
"""Convert the char to an escaped character."""
char = m.group(1)
if char in ('<', '>', '&'):
if char == '<':
char = '<'
elif char == '>':
char = '>'
elif char == '&':
char = '&'
escape = self.md.htmlStash.store(char)
elif self.nbsp and char == ' ':
escape = self.md.htmlStash.store(' ')
elif char in (STX, ETX):
escape = char
else:
escape = '%s%s%s' % (md_util.STX, util.get_ord(char), md_util.ETX)
return escape, m.start(0), m.end(0)
class EscapeAllPostprocessor(Postprocessor):
"""Post processor to strip out unwanted content."""
def unescape(self, m):
"""Unescape the escaped chars."""
return util.get_char(int(m.group(1)))
def run(self, text):
"""Search document for escaped chars."""
return UNESCAPE_PATTERN.sub(self.unescape, text)
class EscapeAllExtension(Extension):
"""Extension that allows you to escape everything."""
def __init__(self, *args, **kwargs):
"""Initialize."""
self.config = {
'hardbreak': [
False,
"Turn escaped newlines to hardbreaks - Default: False"
],
'nbsp': [
False,
"Turn escaped spaces to non-breaking spaces - Default: False"
]
}
super(EscapeAllExtension, self).__init__(*args, **kwargs)
def extendMarkdown(self, md):
"""Escape all."""
config = self.getConfigs()
hardbreak = config['hardbreak']
md.inlinePatterns.register(
EscapeAllPattern(ESCAPE_NO_NL_RE if hardbreak else ESCAPE_RE, config['nbsp'], md),
"escape",
180
)
md.postprocessors.register(EscapeAllPostprocessor(md), "unescape", 10)
if config['hardbreak']:
md.inlinePatterns.register(SubstituteTagInlineProcessor(HARDBREAK_RE, 'br'), "hardbreak", 5.1)
def makeExtension(*args, **kwargs):
"""Return extension."""
return EscapeAllExtension(*args, **kwargs) |
5,422 | test managed zone delete | # Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
from gcp_common import BaseTest, event_data
class DnsManagedZoneTest(BaseTest):
def test_managed_zone_query(self):
project_id = 'cloud-custodian'
managed_zone_name = 'custodian'
session_factory = self.replay_flight_data(
'dns-managed-zone-query', project_id=project_id)
policy = self.load_policy(
{'name': 'gcp-dns-managed-zone-dryrun',
'resource': 'gcp.dns-managed-zone'},
session_factory=session_factory)
managed_zone_resources = policy.run()
self.assertEqual(managed_zone_resources[0]['name'], managed_zone_name)
self.assertEqual(
policy.resource_manager.get_urns(managed_zone_resources),
[
'gcp:dns::cloud-custodian:managed-zone/custodian'
],
)
def test_managed_zone_get(self):
project_id = 'cloud-custodian'
resource_name = 'custodian'
session_factory = self.replay_flight_data(
'dns-managed-zone-get', project_id=project_id)
policy = self.load_policy(
{'name': 'gcp-dns-managed-zone-dryrun',
'resource': 'gcp.dns-managed-zone',
'mode': {
'type': 'gcp-audit',
'methods': ['dns.managedZones.create']
}}, session_factory=session_factory)
exec_mode = policy.get_execution_mode()
event = event_data('dns-managed-zone-create.json')
resources = exec_mode.run(event, None)
self.assertEqual(resources[0]['name'], resource_name)
self.assertEqual(
policy.resource_manager.get_urns(resources),
[
'gcp:dns::cloud-custodian:managed-zone/custodian'
],
)
def METHOD_NAME(self):
project_id = "cloud-custodian"
resource_name = "custodian-delete-test"
factory = self.replay_flight_data('dns-managed-zone-delete')
p = self.load_policy(
{'name': 'gcp-dns-managed-zone-delete',
'resource': 'gcp.dns-managed-zone',
'filters': [{'name': resource_name}],
'actions': ['delete']},
session_factory=factory
)
resources = p.run()
self.assertEqual(len(resources), 1)
client = p.resource_manager.get_client()
result = client.execute_query('list', {"project": project_id})
self.assertNotIn(resource_name, result['managedZones'])
class DnsPolicyTest(BaseTest):
def test_policy_query(self):
project_id = 'cloud-custodian'
policy_name = 'custodian'
session_factory = self.replay_flight_data(
'dns-policy-query', project_id=project_id)
policy = self.load_policy(
{'name': 'gcp-dns-policy-dryrun',
'resource': 'gcp.dns-policy'},
session_factory=session_factory)
policy_resources = policy.run()
self.assertEqual(policy_resources[0]['name'], policy_name)
self.assertEqual(
policy.resource_manager.get_urns(policy_resources),
[
'gcp:dns::cloud-custodian:policy/custodian'
],
)
def test_policy_get(self):
project_id = 'cloud-custodian'
policy_name = 'custodian'
session_factory = self.replay_flight_data(
'dns-policy-get', project_id=project_id)
policy = self.load_policy(
{'name': 'gcp-dns-policy-dryrun',
'resource': 'gcp.dns-policy',
'mode': {
'type': 'gcp-audit',
'methods': ['dns.policies.create']
}}, session_factory=session_factory)
exec_mode = policy.get_execution_mode()
event = event_data('dns-policy-create.json')
resources = exec_mode.run(event, None)
self.assertEqual(resources[0]['name'], policy_name)
self.assertEqual(
policy.resource_manager.get_urns(resources),
[
'gcp:dns::cloud-custodian:policy/custodian'
],
)
class TestDnsResourceRecordsFilter(BaseTest):
def test_query(self):
project_id = 'cloud-custodian'
session_factory = self.replay_flight_data(
'test-dns-resource-records-filter-query', project_id=project_id)
policy = self.load_policy(
{'name': 'dns-resource-record',
'resource': 'gcp.dns-managed-zone',
'filters': [{'type': 'records-sets',
'attrs': [{
'type': 'value',
'key': 'type',
'op': 'eq',
'value': 'TXT'
}]
}]},
session_factory=session_factory)
policy_resources = policy.run()
self.assertEqual(len(policy_resources), 1)
self.assertEqual(policy_resources[0]['name'], 'zone-277-red') |
5,423 | recent block slot | from __future__ import annotations
from dataclasses import dataclass
from typing import List, Set
from .solana_tx import SolPubKey
from .solana_tx_legacy import SolLegacyTx
from .constants import ADDRESS_LOOKUP_TABLE_ID
from .errors import ALTError
from .solana_alt_list_filter import ALTListFilter
from .layouts import ALTAccountInfo
@dataclass(frozen=True)
class ALTAddress:
table_account: str
METHOD_NAME: int
nonce: int
class ALTInfo:
def __init__(self, alt_address: ALTAddress):
self._alt_address = alt_address
self._acct_key_set: Set[str] = set()
self._acct_key_list: List[SolPubKey] = list()
self._new_acct_key_list: List[SolPubKey] = list()
self._is_exist = False
@staticmethod
def derive_lookup_table_address(signer_key: SolPubKey, METHOD_NAME: int) -> ALTAddress:
acct, nonce = SolPubKey.find_program_address(
seeds=[bytes(signer_key), METHOD_NAME.to_bytes(8, "little")],
program_id=ADDRESS_LOOKUP_TABLE_ID
)
return ALTAddress(str(acct), METHOD_NAME, nonce)
@property
def alt_address(self) -> ALTAddress:
return self._alt_address
@property
def table_account(self) -> SolPubKey:
return SolPubKey.from_string(self._alt_address.table_account)
@property
def METHOD_NAME(self) -> int:
return self._alt_address.METHOD_NAME
@property
def nonce(self) -> int:
return self._alt_address.nonce
@property
def account_key_list(self) -> List[SolPubKey]:
return self._acct_key_list
@property
def new_account_key_list(self) -> List[SolPubKey]:
return self._new_acct_key_list
@property
def len_account_key_list(self) -> int:
return len(self._acct_key_list)
def is_exist(self) -> bool:
return self._is_exist
def init_from_legacy_tx(self, legacy_tx: SolLegacyTx) -> None:
assert not len(self._acct_key_list)
legacy_msg = legacy_tx.message
alt_filter = ALTListFilter(legacy_msg)
alt_acct_set = alt_filter.filter_alt_account_key_set()
self._acct_key_set = alt_acct_set
self._update_account_key_list()
self._new_acct_key_list = [acct_key for acct_key in self._acct_key_list]
self._is_exist = False
if not self.len_account_key_list:
raise ALTError(f'No accounts for the lookup table {self._alt_address.table_account}')
def _update_account_key_list(self) -> None:
self._acct_key_list = [SolPubKey.from_string(key) for key in self._acct_key_set]
def remove_account_key_list(self, acct_key_list: List[SolPubKey]) -> bool:
if self._is_exist:
raise ALTError('Trying to remove account from existing address lookup table')
result = False
for acct_key in acct_key_list:
key = str(acct_key)
if key in self._acct_key_set:
result = True
self._acct_key_set.remove(key)
if result:
self._update_account_key_list()
return result
def update_from_account(self, alt_acct_info: ALTAccountInfo) -> None:
if self._alt_address.table_account != str(alt_acct_info.table_account):
raise ALTError(
'Trying to update account list from another lookup table: '
f'{self._alt_address.table_account} != {str(alt_acct_info.table_account)}'
)
self._acct_key_list = alt_acct_info.account_key_list
self._new_acct_key_list: List[SolPubKey] = list()
self._acct_key_set = set([str(acct_key) for acct_key in alt_acct_info.account_key_list])
self._is_exist = True
def add_account_key_list(self, acct_key_list: List[SolPubKey]) -> None:
if not self._is_exist:
raise ALTError('Trying to add account to not-existing address lookup table')
for acct_key in acct_key_list:
acct_key_str = str(acct_key)
if acct_key_str in self._acct_key_set:
continue
self._acct_key_set.add(acct_key_str)
self._acct_key_list.append(acct_key)
self._new_acct_key_list.append(acct_key) |
5,424 | parse for builtin | #!/usr/bin/env python
# Copyright 2016 Andy Chu. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
"""
bool_parse.py - Parse boolean expressions.
In contrast to test / [, the parsing of [[ expressions is done BEFORE
evaluation. So we are parsing a list of Word instances to an AST, rather than
a list of strings.
TODO: If we implement "test / [", we should share the parsing.
Grammar from http://compilers.iecc.com/crenshaw/tutor6.txt, adapted to ANTLR
syntax.
Expr : Term (OR Term)*
Term : Negated (AND Negated)*
Negated : '!'? Factor
Factor : WORD
| UNARY_OP WORD
| WORD BINARY_OP WORD
| '(' Expr ')'
OR = || -o
AND = && -a
WORD = any word
UNARY_OP: -z -n, etc.
BINARY_OP: -gt, -ot, ==, etc.
"""
from core import word
from core import util
from osh.meta import ast, Id, Kind, LookupKind, types
try:
import libc # for regex_parse
except ImportError:
from benchmarks import fake_libc as libc
lex_mode_e = types.lex_mode_e
log = util.log
class BoolParser(object):
"""Parses [[ at compile time and [ at runtime."""
def __init__(self, w_parser):
"""
Args:
w_parser: WordParser
"""
self.w_parser = w_parser
# Either one word or two words for lookahead
self.words = []
self.cur_word = None
self.op_id = Id.Undefined_Tok
self.b_kind = Kind.Undefined
self.error_stack = []
def Error(self):
return self.error_stack
def AddErrorContext(self, msg, *args, **kwargs):
err = util.ParseError(msg, *args, **kwargs)
self.error_stack.append(err)
def _NextOne(self, lex_mode=lex_mode_e.DBRACKET):
#print('_Next', self.cur_word)
n = len(self.words)
if n == 2:
assert lex_mode == lex_mode_e.DBRACKET
self.words[0] = self.words[1]
self.cur_word = self.words[0]
del self.words[1]
elif n in (0, 1):
w = self.w_parser.ReadWord(lex_mode)
if not w:
err = self.w_parser.Error()
self.error_stack.extend(err)
return False
if n == 0:
self.words.append(w)
else:
self.words[0] = w
self.cur_word = w
self.op_id = word.BoolId(self.cur_word)
self.b_kind = LookupKind(self.op_id)
#log('--- word %s', self.cur_word)
#log('op_id %s %s %s', self.op_id, self.b_kind, lex_mode)
return True
def _Next(self, lex_mode=lex_mode_e.DBRACKET):
"""Advance to the next token, skipping newlines.
We don't handle newlines in the lexer because we want the newline after ]]
to be Id.Op_Newline rather than Id.WS_Newline. It's more complicated if
it's Id.WS_Newline -- we might have to unread tokens, etc.
"""
while True:
w = self._NextOne(lex_mode=lex_mode)
if not w:
return False
if self.op_id != Id.Op_Newline:
break
return True
def _LookAhead(self):
n = len(self.words)
if n != 1:
raise AssertionError(self.words)
w = self.w_parser.ReadWord(lex_mode_e.DBRACKET)
self.words.append(w) # Save it for _Next()
return w
def Parse(self):
if not self._Next(): return None
node = self.ParseExpr()
if self.op_id != Id.Lit_DRightBracket:
self.AddErrorContext("Unexpected extra word %r", self.cur_word,
word=self.cur_word)
return None
return node
def _TestAtEnd(self):
"""For unit tests only."""
return self.op_id == Id.Lit_DRightBracket
def METHOD_NAME(self):
"""For test builtin."""
if not self._Next(): return None
node = self.ParseExpr()
if self.op_id != Id.Eof_Real:
self.AddErrorContext(
'Unexpected trailing word in test expression: %s',
self.cur_word)
return None
return node
def ParseExpr(self):
"""
Iterative:
Expr : Term (OR Term)*
Right recursion:
Expr : Term (OR Expr)?
"""
left = self.ParseTerm()
# [[ uses || while [ uses -o
if self.op_id in (Id.Op_DPipe, Id.BoolUnary_o):
if not self._Next(): return None
right = self.ParseExpr()
if not right: return None
return ast.LogicalOr(left, right)
else:
return left
def ParseTerm(self):
"""
Term : Negated (AND Negated)*
Right recursion:
Term : Negated (AND Term)?
"""
left = self.ParseNegatedFactor()
if not left:
return None # TODO: An exception should handle this case.
# [[ uses && while [ uses -a
if self.op_id in (Id.Op_DAmp, Id.BoolUnary_a):
if not self._Next(): return None
right = self.ParseTerm()
if not right: return None
return ast.LogicalAnd(left, right)
else:
return left
def ParseNegatedFactor(self):
"""
Negated : '!'? Factor
"""
if self.op_id == Id.KW_Bang:
if not self._Next(): return None
child = self.ParseFactor()
if not child: return None
return ast.LogicalNot(child)
else:
return self.ParseFactor()
def ParseFactor(self):
"""
Factor : WORD
| UNARY_OP WORD
| WORD BINARY_OP WORD
| '(' Expr ')'
"""
if self.b_kind == Kind.BoolUnary:
# Just save the type and not the token itself?
op = self.op_id
if not self._Next(): return None
w = self.cur_word
if not self._Next(): return None
node = ast.BoolUnary(op, w)
return node
if self.b_kind == Kind.Word:
# Peek ahead another token.
t2 = self._LookAhead()
t2_op_id = word.BoolId(t2)
t2_b_kind = LookupKind(t2_op_id)
#log('t2 %s / t2_op_id %s / t2_b_kind %s', t2, t2_op_id, t2_b_kind)
# Redir pun for < and >, -a and -o pun
if t2_b_kind in (Kind.BoolBinary, Kind.Redir):
left = self.cur_word
if not self._Next(): return None
op = self.op_id
# TODO: Need to change to lex_mode_e.BASH_REGEX.
# _Next(lex_mode) then?
is_regex = t2_op_id == Id.BoolBinary_EqualTilde
if is_regex:
if not self._Next(lex_mode=lex_mode_e.BASH_REGEX): return None
else:
if not self._Next(): return None
right = self.cur_word
if is_regex:
# TODO: Quoted parts need to be regex-escaped, e.g. [[ $a =~ "{" ]].
# I don't think libc has a function to do this. Escape these
# characters:
# https://www.gnu.org/software/sed/manual/html_node/ERE-syntax.html0
ok, regex_str, unused_quoted = word.StaticEval(right)
# doesn't contain $foo, etc.
if ok and not libc.regex_parse(regex_str):
self.AddErrorContext("Invalid regex: %r" % regex_str, word=right)
return None
if not self._Next(): return None
return ast.BoolBinary(op, left, right)
else:
# [[ foo ]]
w = self.cur_word
if not self._Next(): return None
return ast.WordTest(w)
if self.op_id == Id.Op_LParen:
if not self._Next(): return None
node = self.ParseExpr()
if self.op_id != Id.Op_RParen:
self.AddErrorContext(
'Expected ), got %s', self.cur_word, word=self.cur_word)
return None
if not self._Next(): return None
return node
# TODO: A proper error, e.g. for [[ && ]] or [[ ]]
self.AddErrorContext(
'Unexpected token: %s' % self.cur_word, word=self.cur_word)
return None |
5,425 | on cr entity editing started | # Copyright 2020-2023 Capypara and the SkyTemple Contributors
#
# This file is part of SkyTemple.
#
# SkyTemple is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SkyTemple is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SkyTemple. If not, see <https://www.gnu.org/licenses/>.
import re
import typing
from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, Optional, Dict, List, Union
import cairo
from gi.repository import Gtk, GLib
from skytemple_files.data.md.protocol import Gender
from skytemple.core.list_icon_renderer import ListIconRenderer
from skytemple.core.module_controller import AbstractController
from skytemple.core.string_provider import StringType
if TYPE_CHECKING:
from skytemple.module.lists.module import ListsModule
PATTERN_MD_ENTRY = re.compile(r'.*\(\$(\d+)\).*')
class ListBaseController(AbstractController, ABC):
def __init__(self, module: 'ListsModule', *args):
self.module = module
self.builder: Gtk.Builder = None # type: ignore
self._sprite_provider = self.module.project.get_sprite_provider()
self._ent_names: Dict[int, str] = {}
self._list_store: Optional[Gtk.ListStore] = None
self.icon_renderer: Optional[ListIconRenderer] = None
self._loading = False
def load(self):
self._loading = True
self._init_monster_store()
self.refresh_list()
self.builder.connect_signals(self)
self._loading = False
@typing.no_type_check
def unload(self):
super().unload()
self.builder = None # type: ignore
self._sprite_provider = None
self._ent_names = None
self._list_store = None
if self.icon_renderer is not None:
self.icon_renderer.unload()
self.icon_renderer = None
self._loading = False
def _init_monster_store(self):
monster_md = self.module.get_monster_md()
monster_store: Gtk.ListStore = self.builder.get_object('monster_store')
for idx, entry in enumerate(monster_md.entries):
if idx == 0:
continue
name = self.module.project.get_string_provider().get_value(StringType.POKEMON_NAMES, entry.md_index_base)
self._ent_names[idx] = f'{name} ({Gender(entry.gender).print_name}) (${idx:04})'
monster_store.append([self._ent_names[idx]])
def on_draw_example_placeholder_draw(self, widget: Gtk.DrawingArea, ctx: cairo.Context):
sprite, x, y, w, h = self._sprite_provider.get_actor_placeholder(
9999, 0, lambda: GLib.idle_add(lambda: self.builder.get_object('draw_example_placeholder').queue_draw()) # type: ignore
)
ctx.set_source_surface(sprite)
ctx.get_source().set_filter(cairo.Filter.NEAREST)
ctx.paint()
if widget.get_size_request() != (w, h):
widget.set_size_request(w, h)
def on_completion_entities_match_selected(self, completion, model, tree_iter):
pass
def METHOD_NAME(self, renderer, editable, path):
editable.set_completion(self.builder.get_object('completion_entities'))
@abstractmethod
def refresh_list(self):
pass
def can_be_placeholder(self):
return False
def _get_icon(self, entid, idx, force_placeholder=False, store=None, store_iters=None):
# store_iters is deprecated and unused
if self.icon_renderer is None:
self.icon_renderer = ListIconRenderer(self._get_store_icon_id(), self.can_be_placeholder())
if store is None:
store = self._list_store
if entid <= 0 or force_placeholder:
load_fn: typing.Callable = self._sprite_provider.get_actor_placeholder
target_name = f'pl{idx}'
parameters = idx, 0
is_placeholder = True
else:
load_fn = self._sprite_provider.get_monster
target_name = entid
parameters = entid, 0
is_placeholder = False
return self.icon_renderer.load_icon(store, load_fn, target_name, idx, parameters, is_placeholder)
def _get_store_icon_id(self):
return 3
def _get_store_entid_id(self):
return 4 |
5,426 | response hook | # Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import grpc
from tests.protobuf import ( # pylint: disable=no-name-in-module
test_server_pb2_grpc,
)
from opentelemetry import trace
from opentelemetry.instrumentation.grpc import GrpcInstrumentorClient
from opentelemetry.test.test_base import TestBase
from ._client import simple_method
from ._server import create_test_server
# User defined interceptor. Is used in the tests along with the opentelemetry client interceptor.
class Interceptor(
grpc.UnaryUnaryClientInterceptor,
grpc.UnaryStreamClientInterceptor,
grpc.StreamUnaryClientInterceptor,
grpc.StreamStreamClientInterceptor,
):
def __init__(self):
pass
def intercept_unary_unary(
self, continuation, client_call_details, request
):
return self._intercept_call(continuation, client_call_details, request)
def intercept_unary_stream(
self, continuation, client_call_details, request
):
return self._intercept_call(continuation, client_call_details, request)
def intercept_stream_unary(
self, continuation, client_call_details, request_iterator
):
return self._intercept_call(
continuation, client_call_details, request_iterator
)
def intercept_stream_stream(
self, continuation, client_call_details, request_iterator
):
return self._intercept_call(
continuation, client_call_details, request_iterator
)
@staticmethod
def _intercept_call(
continuation, client_call_details, request_or_iterator
):
return continuation(client_call_details, request_or_iterator)
def request_hook(span, request):
span.set_attribute("request_data", request.request_data)
def METHOD_NAME(span, response):
span.set_attribute("response_data", response.response_data)
def request_hook_with_exception(_span, _request):
raise Exception()
def response_hook_with_exception(_span, _response):
raise Exception()
class TestHooks(TestBase):
def setUp(self):
super().setUp()
self.server = create_test_server(25565)
self.server.start()
# use a user defined interceptor along with the opentelemetry client interceptor
self.interceptors = [Interceptor()]
def tearDown(self):
super().tearDown()
self.server.stop(None)
def test_response_and_request_hooks(self):
instrumentor = GrpcInstrumentorClient()
try:
instrumentor.instrument(
request_hook=request_hook,
METHOD_NAME=METHOD_NAME,
)
channel = grpc.insecure_channel("localhost:25565")
channel = grpc.intercept_channel(channel, *self.interceptors)
stub = test_server_pb2_grpc.GRPCTestServerStub(channel)
simple_method(stub)
spans = self.memory_exporter.get_finished_spans()
self.assertEqual(len(spans), 1)
span = spans[0]
self.assertEqual(span.name, "/GRPCTestServer/SimpleMethod")
self.assertIs(span.kind, trace.SpanKind.CLIENT)
self.assertIn("request_data", span.attributes)
self.assertEqual(span.attributes["request_data"], "data")
self.assertIn("response_data", span.attributes)
self.assertEqual(span.attributes["response_data"], "data")
finally:
instrumentor.uninstrument()
def test_hooks_with_exception(self):
instrumentor = GrpcInstrumentorClient()
try:
instrumentor.instrument(
request_hook=request_hook_with_exception,
METHOD_NAME=response_hook_with_exception,
)
channel = grpc.insecure_channel("localhost:25565")
channel = grpc.intercept_channel(channel, *self.interceptors)
stub = test_server_pb2_grpc.GRPCTestServerStub(channel)
simple_method(stub)
spans = self.memory_exporter.get_finished_spans()
self.assertEqual(len(spans), 1)
span = spans[0]
self.assertEqual(span.name, "/GRPCTestServer/SimpleMethod")
self.assertIs(span.kind, trace.SpanKind.CLIENT)
finally:
instrumentor.uninstrument() |
5,427 | collater | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from collections import OrderedDict
from typing import Callable, Dict, List
import numpy as np
from . import FairseqDataset
def uniform_sampler(x):
# Sample from uniform distribution
return np.random.choice(x, 1).item()
class MultiCorpusSampledDataset(FairseqDataset):
"""
Stores multiple instances of FairseqDataset together and in every iteration
creates a batch by first sampling a dataset according to a specified
probability distribution and then getting instances from that dataset.
Args:
datasets: an OrderedDict of FairseqDataset instances.
sampling_func: A function for sampling over list of dataset keys.
The default strategy is to sample uniformly.
"""
def __init__(
self,
datasets: Dict[str, FairseqDataset],
sampling_func: Callable[[List], int] = None,
):
super().__init__()
assert isinstance(datasets, OrderedDict)
self.datasets = datasets
if sampling_func is None:
sampling_func = uniform_sampler
self.sampling_func = sampling_func
self.total_num_instances = 0
for _, dataset in datasets.items():
assert isinstance(dataset, FairseqDataset)
self.total_num_instances += len(dataset)
self._ordered_indices = None
def __len__(self):
"""
Length of this dataset is the sum of individual datasets
"""
return self.total_num_instances
def ordered_indices(self):
"""
Ordered indices for batching. Here we call the underlying
dataset's ordered_indices() so that we get the same random ordering
as we would have from using the underlying dataset directly.
"""
if self._ordered_indices is None:
self._ordered_indices = OrderedDict(
[
(key, dataset.ordered_indices())
for key, dataset in self.datasets.items()
]
)
return np.arange(len(self))
def _map_index_to_dataset(self, key: int, index: int):
"""
Different underlying datasets have different lengths. In order to ensure
we are not accessing an index outside the range of the current dataset
size, we wrap around. This function should be called after we have
created an ordering for this and all underlying datasets.
"""
assert (
self._ordered_indices is not None
), "Must call MultiCorpusSampledDataset.ordered_indices() first"
mapped_index = index % len(self.datasets[key])
return self._ordered_indices[key][mapped_index]
def __getitem__(self, index: int):
"""
Get the item associated with index from each underlying dataset.
Since index is in the range of [0, TotalNumInstances], we need to
map the index to the dataset before retrieving the item.
"""
return OrderedDict(
[
(key, dataset[self._map_index_to_dataset(key, index)])
for key, dataset in self.datasets.items()
]
)
def METHOD_NAME(self, samples: List[Dict]):
"""
Generate a mini-batch for this dataset.
To convert this into a regular mini-batch we use the following
logic:
1. Select a dataset using the specified probability distribution.
2. Call the collater function of the selected dataset.
"""
if len(samples) == 0:
return None
selected_key = self.sampling_func(list(self.datasets.keys()))
selected_samples = [sample[selected_key] for sample in samples]
return self.datasets[selected_key].METHOD_NAME(selected_samples)
def num_tokens(self, index: int):
"""
Return an example's length (number of tokens), used for batching. Here
we return the max across all examples at index across all underlying
datasets.
"""
return max(
dataset.num_tokens(self._map_index_to_dataset(key, index))
for key, dataset in self.datasets.items()
)
def size(self, index: int):
"""
Return an example's size as a float or tuple. Here we return the max
across all underlying datasets. This value is used when filtering a
dataset with max-positions.
"""
return max(
dataset.size(self._map_index_to_dataset(key, index))
for key, dataset in self.datasets.items()
)
@property
def supports_prefetch(self):
return all(
getattr(dataset, "supports_prefetch", False)
for dataset in self.datasets.values()
)
def prefetch(self, indices):
for key, dataset in self.datasets.items():
dataset.prefetch(
[self._map_index_to_dataset(key, index) for index in indices]
)
@property
def supports_fetch_outside_dataloader(self):
return all(
self.datasets[key].supports_fetch_outside_dataloader
for key in self.datasets
) |
5,428 | set up | #
# Copyright (C) 2010-2022 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import unittest as ut
import unittest_decorators as utx
import numpy as np
import espressomd.lb
import espressomd.shapes
AGRID = .25
EXT_FORCE = .1
KINEMATIC_VISC = 2.7
DENS = 1.7
TIME_STEP = 0.07
LB_PARAMS = {'agrid': AGRID,
'density': DENS,
'kinematic_viscosity': KINEMATIC_VISC,
'tau': TIME_STEP,
'ext_force_density': [0.0, 0.0, EXT_FORCE]}
def poiseuille_flow(z, H, ext_force_density, dyn_visc):
"""
Analytical solution for planar Poiseuille flow.
Parameters
----------
z : :obj:`float`
Distance to the mid plane of the channel.
H : :obj:`float`
Distance between the boundaries.
ext_force_density : :obj:`float`
Force density on the fluid normal to the boundaries.
dyn_visc : :obj:`float`
Dynamic viscosity of the LB fluid.
"""
return ext_force_density * 1. / (2 * dyn_visc) * (H**2.0 / 4.0 - z**2.0)
class LBPoiseuilleCommon:
"""
Check the lattice-Boltzmann pressure-driven flow in a slab system
by comparing to the analytical solution for the planar Poiseuille.
"""
system = espressomd.System(box_l=[9.0, 3.0, 3.0])
system.time_step = TIME_STEP
system.cell_system.skin = 0.4 * AGRID
def METHOD_NAME(self):
self.lbf = self.lb_class(**LB_PARAMS, **self.lb_params)
self.system.lb = self.lbf
def tearDown(self):
self.system.lb = None
def prepare(self):
"""
Integrate the LB fluid until steady state is reached within a certain
accuracy.
"""
wall_shape1 = espressomd.shapes.Wall(normal=[1, 0, 0], dist=AGRID)
wall_shape2 = espressomd.shapes.Wall(
normal=[-1, 0, 0], dist=-(self.system.box_l[0] - AGRID))
self.lbf.add_boundary_from_shape(wall_shape1)
self.lbf.add_boundary_from_shape(wall_shape2)
mid_indices = (self.system.box_l / AGRID / 2).astype(int)
diff = float("inf")
old_val = self.lbf[mid_indices].velocity[2]
while diff > 0.005:
self.system.integrator.run(200)
new_val = self.lbf[mid_indices].velocity[2]
diff = abs(new_val - old_val)
old_val = new_val
def test_profile(self):
"""
Compare against analytical function by calculating the RMSD.
"""
self.prepare()
velocities = np.zeros((int(self.system.box_l[0] / AGRID), 2))
for x in range(velocities.shape[0]):
v_tmp = []
for y in range(int(self.system.box_l[1] + 1)):
for z in range(int(self.system.box_l[2] + 1)):
v_tmp.append(self.lbf[x, y, z].velocity[2])
velocities[x, 1] = np.mean(np.array(v_tmp))
velocities[x, 0] = (x + 0.5) * AGRID
v_measured = velocities[1:-1, 1]
v_expected = poiseuille_flow(velocities[1:-1, 0] - 0.5 * self.system.box_l[0],
self.system.box_l[0] - 2.0 * AGRID,
EXT_FORCE,
KINEMATIC_VISC * DENS)
np.testing.assert_allclose(v_measured, v_expected, rtol=5E-5)
@utx.skipIfMissingFeatures(["WALBERLA"])
class LBPoiseuilleWalberla(LBPoiseuilleCommon, ut.TestCase):
"""Test for the Walberla implementation of the LB in double-precision."""
lb_class = espressomd.lb.LBFluidWalberla
lb_params = {"single_precision": False}
@utx.skipIfMissingFeatures(["WALBERLA"])
class LBPoiseuilleWalberlaSinglePrecision(LBPoiseuilleCommon, ut.TestCase):
"""Test for the Walberla implementation of the LB in single-precision."""
lb_class = espressomd.lb.LBFluidWalberla
lb_params = {"single_precision": True}
if __name__ == '__main__':
ut.main() |
5,429 | build revision arguments |
# Copyright 2009-2012 Jaap Karssenberg <jaap.karssenberg@gmail.com>
# Copyright 2012 Damien Accorsi <damien.accorsi@free.fr>
import os
import re
import logging
from zim.plugins.versioncontrol import VCSApplicationBase
from zim.applications import Application
logger = logging.getLogger('zim.vcs.fossil')
RE_Date = re.compile(r"===\s*([0-9]+-[0-9]+-[0-9]+)\s*===")
# 23:03:33 [8cf3ffde61] *MERGE* *BRANCH* Stas Bushuev wrote this message. (user: Xitsa tags: trunk)
RE_LogRecord = re.compile(r"([0-9:-]+)\s*\[([0-9a-fA-F]+)\]\s*(\*\S+\* *)*(.*)\((.*)\)")
RE_Tag = re.compile(r"(?:,\s*)?(\S+:)")
RE_Time = re.compile(r"[0-9][0-9]:[0-9][0-9]:[0-9][0-9]")
class FOSSILApplicationBackend(VCSApplicationBase):
@classmethod
def build_bin_application_instance(cls):
return Application(('fossil',))
def METHOD_NAME(self, versions):
"""Build a list including required string/int for running an VCS command
# Accepts: None, int, string, (string,), (string, string)
# Always returns a list
# versions content:
- None: return an empty list
- string: return ['--from', int]
- tuple or list: return ['--from', from, '--to', to]
It's all based on the fact that defining revision with current VCS is:
--from revision
--from rev1 --to rev2
"""
if isinstance(versions, (tuple, list)):
assert 1 <= len(versions) <= 2
if len(versions) == 2:
return ['-r', versions[0], '--to', versions[1]]
else:
return ['-r', versions[0]]
if versions is None:
return []
return ['-r', versions]
########
#
# NOW ARE ALL REVISION CONTROL SYSTEM SHORTCUTS
def add(self, path=None):
"""
Runs: fossil add {{PATH}}
"""
if path is None:
# `fossil addremove` does not support a path argument,
# need to check ourselves :S
if self.notebook_dir != self.root:
logger.warning('"Fossil addremove" does not support path argument, so files outside of notebook may be added to the repository!')
return self.run(['addremove'])
else:
return self.run(['add', path])
def annotate(self, file, version=None):
"""FIXME Document
return
0: line1
2: line1
...
"""
# Annotate doesn't take a version
return self.pipe(['annotate', file])
def cat(self, path, version):
"""
Runs: fossil cat {{PATH}} {{REV_ARGS}}
"""
revision_args = self.METHOD_NAME(version)
return self.pipe(['cat', path] + revision_args)
def commit(self, path, msg):
"""
Runs: fossil commit -m {{MSG}} {{PATH}}
"""
params = ['commit']
if msg != '' and msg is not None:
params.append('-m')
params.append(msg)
# To minimize interaction
params.append('--no-warnings')
if path != '' and path is not None:
params.append(path)
return self.run(params)
def diff(self, versions=None, file=None):
"""
Runs:
fossil diff {{REVISION_ARGS}}
or
fossil diff {{REVISION_ARGS}} {{PATH}}
"""
revision_args = self.METHOD_NAME(versions)
if file is None:
return self.pipe(['diff'] + revision_args)
# Using --git option allow to show the renaming of files
else:
return self.pipe(['diff'] + revision_args + [file])
def ignore(self, file_to_ignore_regexp):
"""
Configure settings for files to ignore
@param file_to_ignore_regexp: str representing the ignore-glob content.
this must be a list of regexp defining the file / path to ignore,
separated by a comma.
@returns: nothing
"""
return self.run(['settings', 'ignore-glob', file_to_ignore_regexp])
def init_repo(self):
"""Initialize a new repo
The init operation consists in:
- running the VCS init command
- defining files to ignore
- adding all other existing files
@returns: nothing
"""
self.init()
self.ignore('\.zim*/*,notebook.fossil')
self.add('.') # add all existing files
def repo_exists(self):
"""Returns True if a repository is already setup, or False
@returns: a boolean True if a repo is already setup, or False
"""
return self.root.file('.fslckout').exists() or self.root.file('notebook.fossil').exists()
def init(self):
"""
Runs: fossil init
Usually, the repository is located in some other place than
checkout folder, but we put it in the notepad folder and then checkout it.
"""
infolder_repo = self.root.file('notebook.fossil')
self.run(['init', infolder_repo])
return self.checkout(infolder_repo)
def checkout(self, file):
# Create working folder
return self.run(['open', '--keep', file])
def is_modified(self):
"""Returns true if the repo is not up-to-date, or False
@returns: True if the repo is not up-to-date, or False
"""
# If status return an empty answer, this means the local repo is up-to-date
return ''.join(self.status()).strip() != ''
def log(self, path=None):
"""
Runs: fossil timeline --type ci {{PATH}}
"--type ci" option for file commits only
"""
options = ['--limit', '1000']
if not path is None:
return self.pipe(['finfo'] + options + [path])
return self.pipe(['timeline', '--type', 'ci'] + options)
def log_to_revision_list(self, log_op_output):
# returns a list of tuple (revision-id, date, user, commit-message)
def ExtractUserName(line):
tags = RE_Tag.split(line)
if len(tags) > 2:
if tags[1] == "user:":
return tags[2].strip()
return ""
def CombineDateTime(CurDate, TimeOrDate):
if RE_Time.match(TimeOrDate):
return CurDate + " " + TimeOrDate
return TimeOrDate
versions = []
CurDate = ""
for line in log_op_output:
(rev, date, user, msg) = (None, None, None, None)
DateMatch = RE_Date.search(line)
if DateMatch:
CurDate = DateMatch.group(1)
else:
RecordMatch = RE_LogRecord.search(line)
if RecordMatch:
date = CombineDateTime(CurDate, RecordMatch.group(1))
rev = RecordMatch.group(2)
msg = RecordMatch.group(4)
user = ExtractUserName(RecordMatch.group(5))
versions.append((rev, date, user, msg))
return versions
def move(self, oldpath, newpath):
"""
Runs: fossil mv {{OLDPATH}} {{NEWPATH}}
"""
return self.run(['mv', oldpath, newpath])
def remove(self, path):
"""
Runs: fossil rm {{PATH}}
"""
return self.run(['rm', path])
def revert(self, path=None, version=None):
"""
Runs:
fossil revert {{PATH}} {{REV_ARGS}}
or
fossil revert {{REV_ARGS}}
"""
revision_params = self.METHOD_NAME(version)
if path:
return self.run(['revert', path] + revision_params)
else:
return self.run(['revert'] + revision_params)
def status(self):
"""
Runs: fossil changes
"""
return self.pipe(['changes']) |
5,430 | add deprecations | import abc
import warnings
from typing import TYPE_CHECKING, List, Set, no_type_check
from .config_dict import ConfigDict
from .config_errors import ConfigValidationError, ConfigWarning
from .config_schema_item import SchemaItem
from .context_values import ContextList, ContextString
from .deprecation_info import DeprecationInfo
from .error_info import ErrorInfo, WarningInfo
# Python 3.8 does not implement UserDict as a MutableMapping, meaning it's not
# possible to specify the key and value types.
#
# Instead, we only set the types during type checking
if TYPE_CHECKING:
from collections import UserDict
_UserDict = UserDict[str, SchemaItem]
else:
from collections import UserDict as _UserDict
class SchemaItemDict(_UserDict):
def search_for_unset_required_keywords(
self, config_dict: ConfigDict, filename: str
) -> None:
errors = []
# schema.values()
# can return duplicate values due to aliases
# so we need to run this keyed by the keyword itself
# Ex: there is an alias for NUM_REALIZATIONS
# NUM_REALISATIONS
# both with the same value
# which causes .values() to return the NUM_REALIZATIONS keyword twice
# which again leads to duplicate collection of errors related to this
visited: Set[str] = set()
for constraints in self.values():
if constraints.kw in visited:
continue
visited.add(constraints.kw)
if constraints.required_set and constraints.kw not in config_dict:
errors.append(
ErrorInfo(
message=f"{constraints.kw} must be set.",
filename=filename,
)
)
if errors:
raise ConfigValidationError.from_collected(errors)
def METHOD_NAME(self, deprecated_keywords_list: List[DeprecationInfo]) -> None:
for info in deprecated_keywords_list:
# Add it to the schema only so that it is
# catched by the parser
if info.keyword not in self:
self[info.keyword] = SchemaItem.deprecated_dummy_keyword(info)
else:
self[info.keyword].deprecation_info = info
@no_type_check
def search_for_deprecated_keyword_usages(
self,
config_dict: ConfigDict,
filename: str,
) -> None:
detected_deprecations = []
def push_deprecation(info: DeprecationInfo, line: List[ContextString]):
if info.check is None or (callable(info.check) and info.check(line)):
detected_deprecations.append((info, line))
for kw, v in config_dict.items():
schema_info = self.get(kw)
if schema_info is not None and schema_info.deprecation_info is not None:
if v is None:
# Edge case: Happens if
# a keyword is specified in the schema and takes N args
# and is also specified as deprecated,
# and is specified in the config with 0 arguments
# which parses to None for the args
continue
if isinstance(v, ContextString):
push_deprecation(
schema_info.deprecation_info,
ContextList.with_values(token=v.keyword_token, values=[v]),
)
elif isinstance(v, list) and (
len(v) == 0 or isinstance(v[0], ContextString)
):
push_deprecation(schema_info.deprecation_info, v)
elif isinstance(v[0], list):
for arglist in v:
push_deprecation(schema_info.deprecation_info, arglist)
if detected_deprecations:
for deprecation, line in detected_deprecations:
warnings.warn(
ConfigWarning(
WarningInfo(
is_deprecation=True,
filename=filename,
message=deprecation.resolve_message(line),
).set_context_keyword(line)
),
stacklevel=1,
)
@abc.abstractmethod
def check_required(
self,
config_dict: ConfigDict,
filename: str,
) -> None:
pass |
5,431 | get batchnorm params |
#
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numpy as np
from nndct_shared.base import NNDCT_OP
def METHOD_NAME(param_list, param_getter, center=True, scale=True):
#order: gamma,beta,mean,var
if all(param_getter(p) is not None for p in param_list):
param_shape = param_getter(param_list[-1]).shape
bn_params = []
if center and scale:
bn_params = [param_getter(p) for p in param_list]
elif center:
bn_params = [np.ones(param_shape),
param_getter(param_list[0])
] + [param_getter(p) for p in param_list[-2:]]
elif scale:
bn_params = [param_getter(node.op.params[0]),
np.zeros(param_shape)
] + [param_getter(p) for p in param_list[-2:]]
if len(bn_params) == 2:
#no mean and var
bn_params.extend([np.zeros(param_shape), np.ones(param_shape)])
else:
bn_params = [None] * 4
assert len(
bn_params
) == 4, "batch norm should has 4 variables: gamma, beta, mean, var, please check!"
return bn_params
def get_batchnorm_param_names(param_list, center=True, scale=True):
if center and scale:
assert len(
param_list) == 4, "expect 4 parameters names, got " + str(param_list)
return {
'gamma': param_list[0],
'beta': param_list[1],
'mean': param_list[2],
'var': param_list[3]
}
elif center:
assert len(
param_list) == 3, "expect 3 parameters names, got " + str(param_list)
return {'beta': param_list[0], 'mean': param_list[1], 'var': param_list[2]}
elif scale:
assert len(
param_list) == 3, "expect 3 parameters names, got " + str(param_list)
return {'gamma': param_list[0], 'mean': param_list[1], 'var': param_list[2]}
def get_in_out_channel_idx(ndim, optype, data_formats):
#TODO: same shape with different format, is this possible?
if ndim == 1:
return 0, 0
if optype == NNDCT_OP.CONV2D:
if data_formats[optype] == 'HWIO':
in_idx, out_idx = 2, 3
elif data_formats[optype] == 'OIHW':
in_idx, out_idx = 1, 0
else:
raise Exception("data format of conv2d kernel {} is not supported".format(
data_formats[NNDCT_OP.CONV2D]))
elif optype == NNDCT_OP.DEPTHWISE_CONV2D:
if data_formats[optype] == 'HWIO':
in_idx, out_idx = 2, 2
elif data_formats[optype] == 'OIHW':
in_idx, out_idx = 1, 1
else:
raise Exception(
"data format of depthwise_conv2d kernel {} is not supported".format(
data_formats[NNDCT_OP.CONV2D]))
elif optype in [NNDCT_OP.DENSE, NNDCT_OP.BASIC_LSTM]:
if data_formats[optype] == 'IO':
in_idx, out_idx = 0, 1
elif data_formats[optype] == 'OI':
in_idx, out_idx = 1, 0
else:
raise Exception("data format of 2 dim mat {} is not supported".format(
data_formats[NNDCT_OP.CONV2D]))
else:
raise Exception("unexpected optype: " + str(optype))
return in_idx, out_idx
def get_tensor_out_dim(tensor, optype, data_formats):
_, out_idx = get_in_out_channel_idx(tensor.ndim, optype, data_formats)
return tensor.shape[out_idx]
def get_tensor_in_dim(tensor, optype, data_formats):
in_idx, _ = get_in_out_channel_idx(tensor.ndim, optype, data_formats)
return tensor.shape[in_idx]
def delete_in_out_channel_indexs(data,
in_idx=None,
out_idx=None,
in_channel_array=None,
out_channel_array=None):
if in_idx is not None and in_channel_array is not None and not (
in_idx == out_idx and out_channel_array is not None):
data = np.delete(data, in_channel_array, axis=in_idx)
if out_idx is not None and out_channel_array is not None:
data = np.delete(data, out_channel_array, axis=out_idx)
return data
def insert_in_out_channel_indexs(data,
in_idx=None,
out_idx=None,
in_channel_array=None,
out_channel_array=None):
if in_idx is not None and in_channel_array is not None and not (
in_idx == out_idx and out_channel_array is not None):
for pos in sorted(in_channel_array.tolist()):
data = np.insert(data, pos, 0, axis=in_idx)
if out_idx is not None and out_channel_array is not None:
for pos in sorted(out_channel_array.tolist()):
data = np.insert(data, pos, 0, axis=out_idx)
return data
def expand_in_out_channel_indexs(data,
in_idx=None,
out_idx=None,
in_channel_array=None,
out_channel_array=None):
# assert len(data.shape) in [1,2,4], 'unexpected param data shape'
in_dim = None
out_dim = None
if in_channel_array is not None and in_idx is not None and not (
in_idx == out_idx and out_channel_array is not None):
in_dim = data.shape[in_idx] + len(in_channel_array)
if out_idx is not None and out_channel_array is not None:
out_dim = data.shape[out_idx] + len(out_channel_array)
assert in_dim is not None or out_dim is not None
expand_shape = [0] * len(data.shape)
expand_idxs = [0] * len(data.shape)
for idx, dim in enumerate(data.shape):
if in_dim is not None and idx == in_idx:
expand_shape[idx] = in_dim
idx_in_channel = sorted(
np.array(list(set(range(in_dim)) - set(in_channel_array))))
expand_idxs[idx] = idx_in_channel
elif out_dim is not None and idx == out_idx:
expand_shape[idx] = out_dim
idx_out_channel = sorted(
np.array(list(set(range(out_dim)) - set(out_channel_array))))
expand_idxs[idx] = idx_out_channel
else:
expand_shape[idx] = dim
expand_idxs[idx] = np.array(range(dim))
expand_data = np.zeros(expand_shape, dtype=data.dtype)
expand_data[np.ix_(*expand_idxs)] = data
return expand_data |
5,432 | get code version output | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetCodeVersionResult',
'AwaitableGetCodeVersionResult',
'get_code_version',
'get_code_version_output',
]
@pulumi.output_type
class GetCodeVersionResult:
"""
Azure Resource Manager resource envelope.
"""
def __init__(__self__, code_version_properties=None, id=None, name=None, system_data=None, type=None):
if code_version_properties and not isinstance(code_version_properties, dict):
raise TypeError("Expected argument 'code_version_properties' to be a dict")
pulumi.set(__self__, "code_version_properties", code_version_properties)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="codeVersionProperties")
def code_version_properties(self) -> 'outputs.CodeVersionResponse':
"""
[Required] Additional attributes of the entity.
"""
return pulumi.get(self, "code_version_properties")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
Azure Resource Manager metadata containing createdBy and modifiedBy information.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
class AwaitableGetCodeVersionResult(GetCodeVersionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetCodeVersionResult(
code_version_properties=self.code_version_properties,
id=self.id,
name=self.name,
system_data=self.system_data,
type=self.type)
def get_code_version(name: Optional[str] = None,
resource_group_name: Optional[str] = None,
version: Optional[str] = None,
workspace_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetCodeVersionResult:
"""
Azure Resource Manager resource envelope.
:param str name: Container name. This is case-sensitive.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str version: Version identifier. This is case-sensitive.
:param str workspace_name: Name of Azure Machine Learning workspace.
"""
__args__ = dict()
__args__['name'] = name
__args__['resourceGroupName'] = resource_group_name
__args__['version'] = version
__args__['workspaceName'] = workspace_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:machinelearningservices/v20230601preview:getCodeVersion', __args__, opts=opts, typ=GetCodeVersionResult).value
return AwaitableGetCodeVersionResult(
code_version_properties=pulumi.get(__ret__, 'code_version_properties'),
id=pulumi.get(__ret__, 'id'),
name=pulumi.get(__ret__, 'name'),
system_data=pulumi.get(__ret__, 'system_data'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_code_version)
def METHOD_NAME(name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[str]] = None,
workspace_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetCodeVersionResult]:
"""
Azure Resource Manager resource envelope.
:param str name: Container name. This is case-sensitive.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str version: Version identifier. This is case-sensitive.
:param str workspace_name: Name of Azure Machine Learning workspace.
"""
... |
5,433 | make test | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import numpy as np
import random
# Read Word Dict and Inverse Word Dict
def read_word_dict(filename):
word_dict = {}
for line in open(filename):
line = line.strip().split()
word_dict[int(line[1])] = line[0]
print('[%s]\n\tWord dict size: %d' % (filename, len(word_dict)))
return word_dict
# Read Embedding File
def read_embedding(filename):
embed = {}
for line in open(filename):
line = line.strip().split()
embed[int(line[0])] = list(map(float, line[1:]))
print('[%s]\n\tEmbedding size: %d' % (filename, len(embed)))
return embed
# Convert Embedding Dict 2 numpy array
def convert_embed_2_numpy(embed_dict, embed=None):
for k in embed_dict:
embed[k] = np.array(embed_dict[k])
print('Generate numpy embed:', embed.shape)
return embed
# Read Data
def read_data(filename):
data = {}
for line in open(filename):
line = line.strip().split()
data[line[0]] = list(map(int, line[2:]))
print('[%s]\n\tData size: %s' % (filename, len(data)))
return data
# Read Relation Data
def read_relation(filename):
data = []
for line in open(filename):
line = line.strip().split()
data.append((int(line[0]), line[1], line[2]))
print('[%s]\n\tInstance size: %s' % (filename, len(data)))
return data
Letor07Path = "./data"
word_dict = read_word_dict(filename=os.path.join(Letor07Path, 'word_dict.txt'))
query_data = read_data(filename=os.path.join(Letor07Path, 'qid_query.txt'))
doc_data = read_data(filename=os.path.join(Letor07Path, 'docid_doc.txt'))
embed_dict = read_embedding(filename=os.path.join(Letor07Path,
'embed_wiki-pdc_d50_norm'))
_PAD_ = len(word_dict) #193367
embed_dict[_PAD_] = np.zeros((50, ), dtype=np.float32)
word_dict[_PAD_] = '[PAD]'
W_init_embed = np.float32(np.random.uniform(-0.02, 0.02, [len(word_dict), 50]))
embedding = convert_embed_2_numpy(embed_dict, embed=W_init_embed)
np.save("embedding.npy", embedding)
batch_size = 64
data1_maxlen = 20
data2_maxlen = 500
embed_size = 50
train_iters = 2500
def make_train():
rel_set = {}
pair_list = []
rel = read_relation(filename=os.path.join(Letor07Path,
'relation.train.fold1.txt'))
for label, d1, d2 in rel:
if d1 not in rel_set:
rel_set[d1] = {}
if label not in rel_set[d1]:
rel_set[d1][label] = []
rel_set[d1][label].append(d2)
for d1 in rel_set:
label_list = sorted(rel_set[d1].keys(), reverse=True)
for hidx, high_label in enumerate(label_list[:-1]):
for low_label in label_list[hidx + 1:]:
for high_d2 in rel_set[d1][high_label]:
for low_d2 in rel_set[d1][low_label]:
pair_list.append((d1, high_d2, low_d2))
print('Pair Instance Count:', len(pair_list))
f = open("./data/big_train/train.txt", "w")
for batch in range(800):
X1 = np.zeros((batch_size * 2, data1_maxlen), dtype=np.int32)
X2 = np.zeros((batch_size * 2, data2_maxlen), dtype=np.int32)
X1[:] = _PAD_
X2[:] = _PAD_
for i in range(batch_size):
d1, d2p, d2n = random.choice(pair_list)
d1_len = min(data1_maxlen, len(query_data[d1]))
d2p_len = min(data2_maxlen, len(doc_data[d2p]))
d2n_len = min(data2_maxlen, len(doc_data[d2n]))
X1[i, :d1_len] = query_data[d1][:d1_len]
X2[i, :d2p_len] = doc_data[d2p][:d2p_len]
X1[i + batch_size, :d1_len] = query_data[d1][:d1_len]
X2[i + batch_size, :d2n_len] = doc_data[d2n][:d2n_len]
for i in range(batch_size * 2):
q = [str(x) for x in list(X1[i])]
d = [str(x) for x in list(X2[i])]
f.write(",".join(q) + "\t" + ",".join(d) + "\n")
f.close()
def METHOD_NAME():
rel = read_relation(filename=os.path.join(Letor07Path,
'relation.test.fold1.txt'))
f = open("./data/big_test/test.txt", "w")
for label, d1, d2 in rel:
X1 = np.zeros(data1_maxlen, dtype=np.int32)
X2 = np.zeros(data2_maxlen, dtype=np.int32)
X1[:] = _PAD_
X2[:] = _PAD_
d1_len = min(data1_maxlen, len(query_data[d1]))
d2_len = min(data2_maxlen, len(doc_data[d2]))
X1[:d1_len] = query_data[d1][:d1_len]
X2[:d2_len] = doc_data[d2][:d2_len]
q = [str(x) for x in list(X1)]
d = [str(x) for x in list(X2)]
f.write(",".join(q) + "\t" + ",".join(d) + "\t" + str(label) + "\t" +
d1 + "\n")
f.close()
make_train()
METHOD_NAME() |
5,434 | validate imsi | """
Clean and validate a DataFrame column containing
International Mobile Subscriber Identity (IMSI) numbers.
"""
# pylint: disable=too-many-lines, too-many-arguments, too-many-branches, unused-argument
from typing import Any, Union
from operator import itemgetter
import dask.dataframe as dd
import numpy as np
import pandas as pd
from stdnum import imsi
from ..progress_bar import ProgressBar
from .utils import NULL_VALUES, to_dask
def clean_imsi(
df: Union[pd.DataFrame, dd.DataFrame],
column: str,
output_format: str = "standard",
split: bool = False,
inplace: bool = False,
errors: str = "coerce",
progress: bool = True,
) -> pd.DataFrame:
"""
Clean IMSI type data in a DataFrame column.
Parameters
----------
df
A pandas or Dask DataFrame containing the data to be cleaned.
column
The name of the column containing data of ISBN type.
output_format
The output format of standardized number string.
If output_format = 'compact', return string without any separators.
If output_format = 'standard', return string with proper separators.
(default: "standard")
split
If True,
each component of derived from its number string will be put into its own column.
(default: False)
inplace
If True, delete the column containing the data that was cleaned.
Otherwise, keep the original column.
(default: False)
errors
How to handle parsing errors.
- ‘coerce’: invalid parsing will be set to NaN.
- ‘ignore’: invalid parsing will return the input.
- ‘raise’: invalid parsing will raise an exception.
(default: 'coerce')
progress
If True, display a progress bar.
(default: True)
Examples
--------
Clean a column of ISBN data.
>>> df = pd.DataFrame({
"imsi": [
"429 011 234567890",
"310-150-123456789"]
})
>>> clean_imsi(df, 'imsi', inplace=True)
imsi_clean
0 429011234567890
1 310150123456789
"""
if output_format not in {"compact", "standard"}:
raise ValueError(
f"output_format {output_format} is invalid. " 'It needs to be "compact", "standard".'
)
# convert to dask
df = to_dask(df)
# To clean, create a new column "clean_code_tup" which contains
# the cleaned values and code indicating how the initial value was
# changed in a tuple. Then split the column of tuples and count the
# amount of different codes to produce the report
df["clean_code_tup"] = df[column].map_partitions(
lambda srs: [_format(x, output_format, split, errors) for x in srs],
meta=object,
)
if split:
# For some reason the meta data for the last 3 components needs to be
# set. I think this is a dask bug
df = df.assign(
_temp_=df["clean_code_tup"].map(itemgetter(0), meta=("_temp", object)),
mobile_country_code=df["clean_code_tup"].map(
itemgetter(1), meta=("mobile_country_code", object)
),
mobile_network_code=df["clean_code_tup"].map(
itemgetter(2), meta=("mobile_network_code", object)
),
mobile_station_identification_number=df["clean_code_tup"].map(
itemgetter(3), meta=("mobile_station_identification_number", object)
),
)
else:
df = df.assign(
_temp_=df["clean_code_tup"].map(itemgetter(0)),
)
df = df.rename(columns={"_temp_": f"{column}_clean"})
df = df.drop(columns=["clean_code_tup"])
if inplace:
df[column] = df[f"{column}_clean"]
df = df.drop(columns=f"{column}_clean")
df = df.rename(columns={column: f"{column}_clean"})
with ProgressBar(minimum=1, disable=not progress):
df = df.compute()
# output a report describing the result of clean_country
return df
def METHOD_NAME(
df: Union[str, pd.Series, dd.Series, pd.DataFrame, dd.DataFrame],
column: str = "",
) -> Union[bool, pd.Series, pd.DataFrame]:
"""
Validate if a data cell is IMSI in a DataFrame column. For each cell, return True or False.
Parameters
----------
df
A pandas or Dask DataFrame containing the data to be validated.
column
The name of the column to be validated.
"""
if isinstance(df, (pd.Series, dd.Series)):
return df.apply(imsi.is_valid)
elif isinstance(df, (pd.DataFrame, dd.DataFrame)):
if column != "":
return df[column].apply(imsi.is_valid)
else:
return df.applymap(imsi.is_valid)
return imsi.is_valid(df)
def _format(
val: Any, output_format: str = "standard", split: bool = False, errors: str = "coarse"
) -> Any:
"""
Reformat a number string with proper separators (formats).
Parameters
----------
val
The value of number string.
output_format
If output_format = 'compact', return string without any separators.
If output_format = 'standard', return string with proper separators function.
"""
val = str(val)
result: Any = []
if val in NULL_VALUES:
if split:
return [np.nan, np.nan, np.nan, np.nan]
else:
return [np.nan]
if not METHOD_NAME(val):
if errors == "raise":
raise ValueError(f"Unable to parse value {val}")
error_result = val if errors == "ignore" else np.nan
if split:
return [error_result, np.nan, np.nan, np.nan]
else:
return [error_result]
if split:
result = list(imsi.split(val))
result = [imsi.compact(val)] + result
return result |
5,435 | kind | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'GetStaticSiteCustomDomainResult',
'AwaitableGetStaticSiteCustomDomainResult',
'get_static_site_custom_domain',
'get_static_site_custom_domain_output',
]
@pulumi.output_type
class GetStaticSiteCustomDomainResult:
"""
Static Site Custom Domain Overview ARM resource.
"""
def __init__(__self__, created_on=None, domain_name=None, error_message=None, id=None, METHOD_NAME=None, name=None, status=None, type=None, validation_token=None):
if created_on and not isinstance(created_on, str):
raise TypeError("Expected argument 'created_on' to be a str")
pulumi.set(__self__, "created_on", created_on)
if domain_name and not isinstance(domain_name, str):
raise TypeError("Expected argument 'domain_name' to be a str")
pulumi.set(__self__, "domain_name", domain_name)
if error_message and not isinstance(error_message, str):
raise TypeError("Expected argument 'error_message' to be a str")
pulumi.set(__self__, "error_message", error_message)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if METHOD_NAME and not isinstance(METHOD_NAME, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", METHOD_NAME)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if status and not isinstance(status, str):
raise TypeError("Expected argument 'status' to be a str")
pulumi.set(__self__, "status", status)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if validation_token and not isinstance(validation_token, str):
raise TypeError("Expected argument 'validation_token' to be a str")
pulumi.set(__self__, "validation_token", validation_token)
@property
@pulumi.getter(name="createdOn")
def created_on(self) -> str:
"""
The date and time on which the custom domain was created for the static site.
"""
return pulumi.get(self, "created_on")
@property
@pulumi.getter(name="domainName")
def domain_name(self) -> str:
"""
The domain name for the static site custom domain.
"""
return pulumi.get(self, "domain_name")
@property
@pulumi.getter(name="errorMessage")
def error_message(self) -> str:
return pulumi.get(self, "error_message")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def METHOD_NAME(self) -> Optional[str]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource Name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def status(self) -> str:
"""
The status of the custom domain
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="validationToken")
def validation_token(self) -> str:
"""
The TXT record validation token
"""
return pulumi.get(self, "validation_token")
class AwaitableGetStaticSiteCustomDomainResult(GetStaticSiteCustomDomainResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetStaticSiteCustomDomainResult(
created_on=self.created_on,
domain_name=self.domain_name,
error_message=self.error_message,
id=self.id,
METHOD_NAME=self.METHOD_NAME,
name=self.name,
status=self.status,
type=self.type,
validation_token=self.validation_token)
def get_static_site_custom_domain(domain_name: Optional[str] = None,
name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetStaticSiteCustomDomainResult:
"""
Description for Gets an existing custom domain for a particular static site.
:param str domain_name: The custom domain name.
:param str name: Name of the static site resource to search in.
:param str resource_group_name: Name of the resource group to which the resource belongs.
"""
__args__ = dict()
__args__['domainName'] = domain_name
__args__['name'] = name
__args__['resourceGroupName'] = resource_group_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:web/v20220901:getStaticSiteCustomDomain', __args__, opts=opts, typ=GetStaticSiteCustomDomainResult).value
return AwaitableGetStaticSiteCustomDomainResult(
created_on=pulumi.get(__ret__, 'created_on'),
domain_name=pulumi.get(__ret__, 'domain_name'),
error_message=pulumi.get(__ret__, 'error_message'),
id=pulumi.get(__ret__, 'id'),
METHOD_NAME=pulumi.get(__ret__, 'kind'),
name=pulumi.get(__ret__, 'name'),
status=pulumi.get(__ret__, 'status'),
type=pulumi.get(__ret__, 'type'),
validation_token=pulumi.get(__ret__, 'validation_token'))
@_utilities.lift_output_func(get_static_site_custom_domain)
def get_static_site_custom_domain_output(domain_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetStaticSiteCustomDomainResult]:
"""
Description for Gets an existing custom domain for a particular static site.
:param str domain_name: The custom domain name.
:param str name: Name of the static site resource to search in.
:param str resource_group_name: Name of the resource group to which the resource belongs.
"""
... |
5,436 | match resource sub properties | """
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
SPDX-License-Identifier: MIT-0
"""
import hashlib
import json
from cfnlint.helpers import RESOURCE_SPECS
from cfnlint.rules import CloudFormationLintRule, RuleMatch
class ListDuplicatesAllowed(CloudFormationLintRule):
"""Check if duplicates exist in a List"""
id = "I3037"
shortdesc = "Check if a list that allows duplicates has any duplicates"
description = (
"Certain lists support duplicate items."
"Provide an alert when list of strings or numbers have repeats."
)
source_url = "https://github.com/aws-cloudformation/cfn-python-lint/blob/main/docs/rules.md#rules-1"
tags = ["resources", "property", "list"]
def initialize(self, cfn):
"""Initialize the rule"""
for resource_type_spec in RESOURCE_SPECS.get(cfn.regions[0]).get(
"ResourceTypes"
):
self.resource_property_types.append(resource_type_spec)
for property_type_spec in RESOURCE_SPECS.get(cfn.regions[0]).get(
"PropertyTypes"
):
self.resource_sub_property_types.append(property_type_spec)
def _check_duplicates(self, values, path, scenario=None):
"""Check for Duplicates"""
matches = []
list_items = []
if isinstance(values, list):
for index, value in enumerate(values):
value_hash = hashlib.sha1(
json.dumps(value, sort_keys=True).encode("utf-8")
).hexdigest()
if value_hash in list_items:
if not scenario:
message = "List has a duplicate value at {0}"
matches.append(
RuleMatch(
path + [index],
message.format("/".join(map(str, path + [index]))),
)
)
else:
scenario_text = " and ".join(
[f'condition "{k}" is {v}' for (k, v) in scenario.items()]
)
message = "List has a duplicate value at {0} when {1}"
matches.append(
RuleMatch(
path,
message.format("/".join(map(str, path)), scenario_text),
)
)
list_items.append(value_hash)
return matches
def check_duplicates(self, values, path, cfn):
"""Check for duplicates"""
matches = []
if isinstance(values, list):
matches.extend(self._check_duplicates(values, path))
elif isinstance(values, dict):
props = cfn.get_object_without_conditions(values)
for prop in props:
matches.extend(
self._check_duplicates(
prop.get("Object"), path, prop.get("Scenario")
)
)
return matches
def check(self, cfn, properties, value_specs, path):
"""Check itself"""
matches = []
for p_value, p_path in properties.items_safe(path[:]):
for prop in p_value:
if prop in value_specs:
property_type = value_specs.get(prop).get("Type")
primitive_type = value_specs.get(prop).get("PrimitiveItemType")
duplicates_allowed = value_specs.get(prop).get(
"DuplicatesAllowed", False
)
if (
property_type == "List"
and duplicates_allowed
and primitive_type in ["String", "Integer"]
):
matches.extend(
self.check_duplicates(p_value[prop], p_path + [prop], cfn)
)
return matches
def METHOD_NAME(self, properties, property_type, path, cfn):
"""Match for sub properties"""
matches = []
specs = (
RESOURCE_SPECS.get(cfn.regions[0])
.get("PropertyTypes")
.get(property_type, {})
.get("Properties", {})
)
matches.extend(self.check(cfn, properties, specs, path))
return matches
def match_resource_properties(self, properties, resource_type, path, cfn):
"""Check CloudFormation Properties"""
matches = []
specs = (
RESOURCE_SPECS.get(cfn.regions[0])
.get("ResourceTypes")
.get(resource_type, {})
.get("Properties", {})
)
matches.extend(self.check(cfn, properties, specs, path))
return matches |
5,437 | format unencoded | """
pygments.formatters.terminal
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Formatter for terminal output with ANSI sequences.
:copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.formatter import Formatter
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Token, Whitespace
from pygments.console import ansiformat
from pygments.util import get_choice_opt
__all__ = ['TerminalFormatter']
#: Map token types to a tuple of color values for light and dark
#: backgrounds.
TERMINAL_COLORS = {
Token: ('', ''),
Whitespace: ('gray', 'brightblack'),
Comment: ('gray', 'brightblack'),
Comment.Preproc: ('cyan', 'brightcyan'),
Keyword: ('blue', 'brightblue'),
Keyword.Type: ('cyan', 'brightcyan'),
Operator.Word: ('magenta', 'brightmagenta'),
Name.Builtin: ('cyan', 'brightcyan'),
Name.Function: ('green', 'brightgreen'),
Name.Namespace: ('_cyan_', '_brightcyan_'),
Name.Class: ('_green_', '_brightgreen_'),
Name.Exception: ('cyan', 'brightcyan'),
Name.Decorator: ('brightblack', 'gray'),
Name.Variable: ('red', 'brightred'),
Name.Constant: ('red', 'brightred'),
Name.Attribute: ('cyan', 'brightcyan'),
Name.Tag: ('brightblue', 'brightblue'),
String: ('yellow', 'yellow'),
Number: ('blue', 'brightblue'),
Generic.Deleted: ('brightred', 'brightred'),
Generic.Inserted: ('green', 'brightgreen'),
Generic.Heading: ('**', '**'),
Generic.Subheading: ('*magenta*', '*brightmagenta*'),
Generic.Prompt: ('**', '**'),
Generic.Error: ('brightred', 'brightred'),
Error: ('_brightred_', '_brightred_'),
}
class TerminalFormatter(Formatter):
r"""
Format tokens with ANSI color sequences, for output in a text console.
Color sequences are terminated at newlines, so that paging the output
works correctly.
The `get_style_defs()` method doesn't do anything special since there is
no support for common styles.
Options accepted:
`bg`
Set to ``"light"`` or ``"dark"`` depending on the terminal's background
(default: ``"light"``).
`colorscheme`
A dictionary mapping token types to (lightbg, darkbg) color names or
``None`` (default: ``None`` = use builtin colorscheme).
`linenos`
Set to ``True`` to have line numbers on the terminal output as well
(default: ``False`` = no line numbers).
"""
name = 'Terminal'
aliases = ['terminal', 'console']
filenames = []
def __init__(self, **options):
Formatter.__init__(self, **options)
self.darkbg = get_choice_opt(options, 'bg',
['light', 'dark'], 'light') == 'dark'
self.colorscheme = options.get('colorscheme', None) or TERMINAL_COLORS
self.linenos = options.get('linenos', False)
self._lineno = 0
def format(self, tokensource, outfile):
return Formatter.format(self, tokensource, outfile)
def _write_lineno(self, outfile):
self._lineno += 1
outfile.write("%s%04d: " % (self._lineno != 1 and '\n' or '', self._lineno))
def _get_color(self, ttype):
# self.colorscheme is a dict containing usually generic types, so we
# have to walk the tree of dots. The base Token type must be a key,
# even if it's empty string, as in the default above.
colors = self.colorscheme.get(ttype)
while colors is None:
ttype = ttype.parent
colors = self.colorscheme.get(ttype)
return colors[self.darkbg]
def METHOD_NAME(self, tokensource, outfile):
if self.linenos:
self._write_lineno(outfile)
for ttype, value in tokensource:
color = self._get_color(ttype)
for line in value.splitlines(True):
if color:
outfile.write(ansiformat(color, line.rstrip('\n')))
else:
outfile.write(line.rstrip('\n'))
if line.endswith('\n'):
if self.linenos:
self._write_lineno(outfile)
else:
outfile.write('\n')
if self.linenos:
outfile.write("\n") |
5,438 | time to ground | # Copyright 2017 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#***************************************
#* Simple analytic test ExternalModule *
#***************************************
#
# Simulates time-dependent track of a projectile through the air from start to 0,
# assuming no air resistance.
# Inputs:
# (x0,y0) - initial position
# v0 - initial total velocity
# ang - angle of initial motion, in degrees, with respect to flat ground
# Outputs:
# (x,y) - vector positions of projectile in time
# t - corresponding time steps
#
import os
import numpy as np
in_vars = ['x0', 'y0', 'v0', 'ang', 'timeOption']
out_vars = ['x', 'y', 'r', 't', 'v', 'a']
def prange(v,th,y0=0,g=9.8):
"""
Calculates the analytic range.
@ In, v, float, velocity of the projectile
@ In, th, float, angle to the ground for initial projectile motion
@ In, y0, float, optional, initial height of projectile
@ In, g, float, optional, gravitational constant (m/s/s)
@ Out, prange, float, range
"""
return v*np.cos(th)/g * (v*np.sin(th) + np.sqrt(v*v*np.sin(th)**2+2.*g*y0))
def METHOD_NAME(v,th,y0=0,g=9.8):
"""
Calculates the analytic time of flight
@ In, v, float, velocity of the projectile
@ In, th, float, angle to the ground for initial projectile motion
@ In, y0, float, optional, initial height of projectile
@ In, g, float, optional, gravitational constant (m/s/s)
@ Out, time_to_ground, float, time projectile is above the ground
"""
return v*np.sin(th)/g + np.sqrt(v*v*np.sin(th)**2+2.*g*y0)/g
def x_pos(x0,v,t):
"""
Calculates the x position in time
@ In, x0, float, initial horizontal position
@ In, v, float, velocity of the projectile
@ In, t, float, time of flight
@ Out, x_pos, float, horizontal position
"""
return x0 + v*t
def y_pos(y0,v,t):
"""
Calculates the analytic vertical position in time
@ In, y0, float, initial vertical position
@ In, v, float, velocity of the projectile
@ In, t, float, time of flight
@ Out, y_pos, float, vertical position
"""
return y0 + v*t - 4.9*t*t
def calc_vel(y0, y, v0, ang, g=9.8):
E_m = 0.5 * v0*v0 + g*y0
vel = np.sqrt(v0*v0 - 2*g*(y-y0))
x_vel = v0 * np.cos(ang)
y_vel = np.sqrt(vel*vel - x_vel*x_vel)
return x_vel, y_vel, vel
def current_angle(v0, ang, vel):
return np.arccos(v0 * np.cos(ang) / vel)
def run(raven, inputs):
vars = {'x0': get_from_raven('x0', raven, 0),
'y0': get_from_raven('y0', raven, 0),
'v0': get_from_raven('v0', raven, 1),
'angle': get_from_raven('angle', raven, 45),
'timeOption': get_from_raven('timeOption', raven, 0)}
res = main(vars)
raven.x = res['x']
raven.y = res['y']
raven.t = res['t']
raven.r = res['r'] * np.ones(len(raven.x))
raven.v = res['v']
raven.a = res['a']
def get_from_raven(attr, raven, default=None):
return np.squeeze(getattr(raven, attr, default))
def main(Input):
x0 = Input.get('x0', 0)
y0 = Input.get('y0', 0)
v0 = Input.get('v0', 1)
ang = Input.get('angle', 45)
g = Input.get('g', 9.8)
timeOption = Input.get('timeOption', 0)
ang = ang * np.pi / 180
if timeOption == 0:
ts = np.linspace(0,1,10)
else:
# due to numpy library update, the return shape of np.linspace
# is changed when an array-like input is provided, i.e. return from time_to_ground
ts = np.linspace(0,METHOD_NAME(v0,ang,y0),10)
vx0 = np.cos(ang)*v0
vy0 = np.sin(ang)*v0
r = prange(v0,ang,y0)
x = np.zeros(len(ts))
y = np.zeros(len(ts))
v = np.zeros(len(ts))
a = np.zeros(len(ts))
for i,t in enumerate(ts):
x[i] = x_pos(x0,vx0,t)
y[i] = y_pos(y0,vy0,t)
vx, vy, vm = calc_vel(y0, y[i], v0, ang, g)
v[i] = vm
a[i] = current_angle(v0, ang, vm)
t = ts
res = {'x': x, 'y': y, 'r': r, 't': ts, 'v': v, 'a': a,
'x0': x0, 'y0': y0, 'v0': v0, 'ang': ang, 'timeOption': timeOption}
return res
#can be used as a code as well
if __name__=="__main__":
import sys
textOutput = False
if '-i' not in sys.argv:
raise IOError("INPUT MUST BE PROVIDED WITH THE -i nomenclature")
if '-o' not in sys.argv:
raise IOError("OUTPUT MUST BE PROVIDED WITH THE -o nomenclature")
if '-text' in sys.argv:
textOutput = True
inFile = sys.argv[sys.argv.index('-i')+1]
outFile = sys.argv[sys.argv.index('-o')+1]
#construct the input
Input = {}
for line in open(inFile,'r'):
arg, val = (a.strip() for a in line.split('='))
if arg == 'aux':
auxFile = val
else:
Input[arg] = float(val)
# check auxfile exists
if not os.path.isfile(auxFile):
raise IOError('Aux file not found: {}'.format(auxFile))
#run the code
res = main(Input)
#write output
outName = outFile+ ('.txt' if textOutput else '.csv')
delm = ' ' if textOutput else ','
with open(outName, 'w') as outFile:
outFile.writelines(delm.join(in_vars) + delm + delm.join(out_vars) + '\n')
template = delm.join('{{}}'.format(v) for v in in_vars + out_vars) + '\n'
for i in range(len(res['t'])):
this = [(res[v][i] if len(np.shape(res[v])) else res[v]) for v in in_vars + out_vars]
outFile.writelines(template.format(*this))
if textOutput:
outFile.write('---------------------------------------------------------------------------\n')
outFile.write('SUCCESS\n')
print('Wrote results to "{}"'.format(outName)) |
5,439 | system data | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetDataContainerResult',
'AwaitableGetDataContainerResult',
'get_data_container',
'get_data_container_output',
]
@pulumi.output_type
class GetDataContainerResult:
"""
Azure Resource Manager resource envelope.
"""
def __init__(__self__, data_container_properties=None, id=None, name=None, METHOD_NAME=None, type=None):
if data_container_properties and not isinstance(data_container_properties, dict):
raise TypeError("Expected argument 'data_container_properties' to be a dict")
pulumi.set(__self__, "data_container_properties", data_container_properties)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if METHOD_NAME and not isinstance(METHOD_NAME, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", METHOD_NAME)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="dataContainerProperties")
def data_container_properties(self) -> 'outputs.DataContainerResponse':
"""
[Required] Additional attributes of the entity.
"""
return pulumi.get(self, "data_container_properties")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="systemData")
def METHOD_NAME(self) -> 'outputs.SystemDataResponse':
"""
Azure Resource Manager metadata containing createdBy and modifiedBy information.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
class AwaitableGetDataContainerResult(GetDataContainerResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetDataContainerResult(
data_container_properties=self.data_container_properties,
id=self.id,
name=self.name,
METHOD_NAME=self.METHOD_NAME,
type=self.type)
def get_data_container(name: Optional[str] = None,
resource_group_name: Optional[str] = None,
workspace_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetDataContainerResult:
"""
Azure Resource Manager resource envelope.
Azure REST API version: 2023-04-01.
:param str name: Container name.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str workspace_name: Name of Azure Machine Learning workspace.
"""
__args__ = dict()
__args__['name'] = name
__args__['resourceGroupName'] = resource_group_name
__args__['workspaceName'] = workspace_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:machinelearningservices:getDataContainer', __args__, opts=opts, typ=GetDataContainerResult).value
return AwaitableGetDataContainerResult(
data_container_properties=pulumi.get(__ret__, 'data_container_properties'),
id=pulumi.get(__ret__, 'id'),
name=pulumi.get(__ret__, 'name'),
METHOD_NAME=pulumi.get(__ret__, 'system_data'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_data_container)
def get_data_container_output(name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
workspace_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetDataContainerResult]:
"""
Azure Resource Manager resource envelope.
Azure REST API version: 2023-04-01.
:param str name: Container name.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str workspace_name: Name of Azure Machine Learning workspace.
"""
... |
5,440 | start | import rospy
import threading
from math import asin, atan2, pi
from nav_msgs.msg import Odometry
def quat2Yaw(qw, qx, qy, qz):
'''
Translates from Quaternion to Yaw.
@param qw,qx,qy,qz: Quaternion values
@type qw,qx,qy,qz: float
@return Yaw value translated from Quaternion
'''
rotateZa0=2.0*(qx*qy + qw*qz)
rotateZa1=qw*qw + qx*qx - qy*qy - qz*qz
rotateZ=0.0
if(rotateZa0 != 0.0 and rotateZa1 != 0.0):
rotateZ=atan2(rotateZa0,rotateZa1)
return rotateZ
def quat2Pitch(qw, qx, qy, qz):
'''
Translates from Quaternion to Pitch.
@param qw,qx,qy,qz: Quaternion values
@type qw,qx,qy,qz: float
@return Pitch value translated from Quaternion
'''
rotateYa0=-2.0*(qx*qz - qw*qy)
rotateY=0.0
if(rotateYa0 >= 1.0):
rotateY = pi/2.0
elif(rotateYa0 <= -1.0):
rotateY = -pi/2.0
else:
rotateY = asin(rotateYa0)
return rotateY
def quat2Roll (qw, qx, qy, qz):
'''
Translates from Quaternion to Roll.
@param qw,qx,qy,qz: Quaternion values
@type qw,qx,qy,qz: float
@return Roll value translated from Quaternion
'''
rotateXa0=2.0*(qy*qz + qw*qx)
rotateXa1=qw*qw - qx*qx - qy*qy + qz*qz
rotateX=0.0
if(rotateXa0 != 0.0 and rotateXa1 != 0.0):
rotateX=atan2(rotateXa0, rotateXa1)
return rotateX
def odometry2Pose3D(odom):
'''
Translates from ROS Odometry to JderobotTypes Pose3d.
@param odom: ROS Odometry to translate
@type odom: Odometry
@return a Pose3d translated from odom
'''
pose = Pose3d()
ori = odom.pose.pose.orientation
pose.x = odom.pose.pose.position.x
pose.y = odom.pose.pose.position.y
pose.z = odom.pose.pose.position.z
#pose.h = odom.pose.pose.position.h
pose.yaw = quat2Yaw(ori.w, ori.x, ori.y, ori.z)
pose.pitch = quat2Pitch(ori.w, ori.x, ori.y, ori.z)
pose.roll = quat2Roll(ori.w, ori.x, ori.y, ori.z)
pose.q = [ori.w, ori.x, ori.y, ori.z]
pose.timeStamp = odom.header.stamp.secs + (odom.header.stamp.nsecs *1e-9)
return pose
class Pose3d ():
def __init__(self):
self.x = 0 # X coord [meters]
self.y = 0 # Y coord [meters]
self.z = 0 # Z coord [meters]
self.h = 1 # H param
self.yaw = 0 #Yaw angle[rads]
self.pitch = 0 # Pitch angle[rads]
self.roll = 0 # Roll angle[rads]
self.q = [0,0,0,0] # Quaternion
self.timeStamp = 0 # Time stamp [s]
def __str__(self):
s = "Pose3D: {\n x: " + str(self.x) + "\n Y: " + str(self.y)
s = s + "\n Z: " + str(self.z) + "\n H: " + str(self.h)
s = s + "\n Yaw: " + str(self.yaw) + "\n Pitch: " + str(self.pitch) + "\n Roll: " + str(self.roll)
s = s + "\n quaternion: " + str(self.q) + "\n timeStamp: " + str(self.timeStamp) + "\n}"
return s
class ListenerPose3d:
'''
ROS Pose3D Subscriber. Pose3D Client to Receive pose3d from ROS nodes.
'''
def __init__(self, topic):
'''
ListenerPose3d Constructor.
@param topic: ROS topic to subscribe
@type topic: String
'''
self.topic = topic
self.data = Pose3d()
self.sub = None
self.lock = threading.Lock()
self.METHOD_NAME()
def __callback (self, odom):
'''
Callback function to receive and save Pose3d.
@param odom: ROS Odometry received
@type odom: Odometry
'''
pose = odometry2Pose3D(odom)
self.lock.acquire()
self.data = pose
self.lock.release()
def stop(self):
'''
Stops (Unregisters) the client.
'''
self.sub.unregister()
def METHOD_NAME (self):
'''
Starts (Subscribes) the client.
'''
self.sub = rospy.Subscriber(self.topic, Odometry, self.__callback)
def getPose3d(self):
'''
Returns last Pose3d.
@return last JdeRobotTypes Pose3d saved
'''
self.lock.acquire()
pose = self.data
self.lock.release()
return pose
|
5,441 | set site | # -*- coding: utf-8 -*-
# Copyright © 2012-2023 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the
# Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice
# shall be included in all copies or substantial portions of
# the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Vimeo directive for reStructuredText."""
import json
import requests
from docutils import nodes
from docutils.parsers.rst import Directive, directives
from nikola.plugin_categories import RestExtension
from nikola.plugins.compile.rest import _align_choice, _align_options_base
class Plugin(RestExtension):
"""Plugin for vimeo reST directive."""
name = "rest_vimeo"
def METHOD_NAME(self, site):
"""Set Nikola site."""
self.site = site
directives.register_directive('vimeo', Vimeo)
return super().METHOD_NAME(site)
CODE = """<div class="vimeo-video{align}">
<iframe src="https://player.vimeo.com/video/{vimeo_id}"
width="{width}" height="{height}"
frameborder="0" webkitAllowFullScreen="webkitAllowFullScreen" mozallowfullscreen="mozallowfullscreen" allowFullScreen="allowFullScreen">
</iframe>
</div>
"""
VIDEO_DEFAULT_HEIGHT = 500
VIDEO_DEFAULT_WIDTH = 281
class Vimeo(Directive):
"""reST extension for inserting vimeo embedded videos.
Usage:
.. vimeo:: 20241459
:height: 400
:width: 600
"""
has_content = True
required_arguments = 1
option_spec = {
"width": directives.positive_int,
"height": directives.positive_int,
"align": _align_choice
}
# set to False for not querying the vimeo api for size
request_size = True
def run(self):
"""Run the vimeo directive."""
self.check_content()
options = {
'vimeo_id': self.arguments[0],
'width': VIDEO_DEFAULT_WIDTH,
'height': VIDEO_DEFAULT_HEIGHT,
}
if self.request_size:
err = self.check_modules()
if err:
return err
self.set_video_size()
options.update(self.options)
if self.options.get('align') in _align_options_base:
options['align'] = ' align-' + self.options['align']
else:
options['align'] = ''
return [nodes.raw('', CODE.format(**options), format='html')]
def check_modules(self):
"""Check modules."""
return None
def set_video_size(self):
"""Set video size."""
# Only need to make a connection if width and height aren't provided
if 'height' not in self.options or 'width' not in self.options:
self.options['height'] = VIDEO_DEFAULT_HEIGHT
self.options['width'] = VIDEO_DEFAULT_WIDTH
if json: # we can attempt to retrieve video attributes from vimeo
try:
url = ('https://vimeo.com/api/v2/video/{0}'
'.json'.format(self.arguments[0]))
data = requests.get(url).text
video_attributes = json.loads(data)[0]
self.options['height'] = video_attributes['height']
self.options['width'] = video_attributes['width']
except Exception:
# fall back to the defaults
pass
def check_content(self):
"""Check if content exists."""
if self.content:
raise self.warning("This directive does not accept content. The "
"'key=value' format for options is deprecated, "
"use ':key: value' instead") |
5,442 | save | # Copyright © 2019 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module holds data for party roles in a business."""
from __future__ import annotations
from datetime import datetime
from enum import Enum
from sqlalchemy import Date, cast, or_
from .db import db # noqa: I001
from .party import Party # noqa: I001,F401,I003 pylint: disable=unused-import; needed by the SQLAlchemy rel
class PartyRole(db.Model):
"""Class that manages data for party roles related to a business."""
class RoleTypes(Enum):
"""Render an Enum of the role types."""
APPLICANT = 'applicant'
COMPLETING_PARTY = 'completing_party'
CUSTODIAN = 'custodian'
DIRECTOR = 'director'
INCORPORATOR = 'incorporator'
LIQUIDATOR = 'liquidator'
PROPRIETOR = 'proprietor'
PARTNER = 'partner'
__versioned__ = {}
__tablename__ = 'party_roles'
id = db.Column(db.Integer, primary_key=True)
role = db.Column('role', db.String(30), default=RoleTypes.DIRECTOR)
appointment_date = db.Column('appointment_date', db.DateTime(timezone=True))
cessation_date = db.Column('cessation_date', db.DateTime(timezone=True))
business_id = db.Column('business_id', db.Integer, db.ForeignKey('businesses.id'))
filing_id = db.Column('filing_id', db.Integer, db.ForeignKey('filings.id'))
party_id = db.Column('party_id', db.Integer, db.ForeignKey('parties.id'))
# relationships
party = db.relationship('Party')
def METHOD_NAME(self):
"""Save the object to the database immediately."""
db.session.add(self)
db.session.commit()
@property
def json(self) -> dict:
"""Return the party member as a json object."""
party = {
**self.party.json,
'appointmentDate': datetime.date(self.appointment_date).isoformat(),
'cessationDate': datetime.date(self.cessation_date).isoformat() if self.cessation_date else None,
'role': self.role
}
return party
@classmethod
def find_by_internal_id(cls, internal_id: int) -> PartyRole:
"""Return a party role by the internal id."""
party_role = None
if internal_id:
party_role = cls.query.filter_by(id=internal_id).one_or_none()
return party_role
@classmethod
def find_party_by_name(cls, business_id: int, first_name: str, # pylint: disable=too-many-arguments; one too many
last_name: str, middle_initial: str, org_name: str) -> Party:
"""Return a Party connected to the given business_id by the given name."""
party_roles = cls.query.filter_by(business_id=business_id).all()
party = None
# the given name to find
search_name = ''
if org_name:
search_name = org_name
elif middle_initial:
search_name = ' '.join((first_name.strip(), middle_initial.strip(), last_name.strip()))
else:
search_name = ' '.join((first_name.strip(), last_name.strip()))
for role in party_roles:
# the name of the party for each role
name = role.party.name
if name and name.strip().upper() == search_name.strip().upper():
party = role.party
break
return party
@staticmethod
def get_parties_by_role(business_id: int, role: str) -> list:
"""Return all people/oraganizations with the given role for this business (ceased + current)."""
members = db.session.query(PartyRole). \
filter(PartyRole.business_id == business_id). \
filter(PartyRole.role == role). \
all()
return members
@staticmethod
def get_active_directors(business_id: int, end_date: datetime) -> list:
"""Return the active directors as of given date."""
directors = db.session.query(PartyRole). \
filter(PartyRole.business_id == business_id). \
filter(PartyRole.role == PartyRole.RoleTypes.DIRECTOR.value). \
filter(cast(PartyRole.appointment_date, Date) <= end_date). \
filter(or_(PartyRole.cessation_date.is_(None), cast(PartyRole.cessation_date, Date) > end_date)). \
all()
return directors
@staticmethod
def get_party_roles(business_id: int, end_date: datetime, role: str = None) -> list:
"""Return the parties that match the filter conditions."""
party_roles = db.session.query(PartyRole). \
filter(PartyRole.business_id == business_id). \
filter(cast(PartyRole.appointment_date, Date) <= end_date). \
filter(or_(PartyRole.cessation_date.is_(None), cast(PartyRole.cessation_date, Date) > end_date))
if role is not None:
party_roles = party_roles.filter(PartyRole.role == role.lower())
party_roles = party_roles.all()
return party_roles
@staticmethod
def get_party_roles_by_party_id(business_id: int, party_id: int) -> list:
"""Return the parties that match the filter conditions."""
party_roles = db.session.query(PartyRole). \
filter(PartyRole.business_id == business_id). \
filter(PartyRole.party_id == party_id). \
all()
return party_roles
@staticmethod
def get_party_roles_by_filing(filing_id: int, end_date: datetime, role: str = None) -> list:
"""Return the parties that match the filter conditions."""
party_roles = db.session.query(PartyRole). \
filter(PartyRole.filing_id == filing_id). \
filter(cast(PartyRole.appointment_date, Date) <= end_date). \
filter(or_(PartyRole.cessation_date.is_(None), cast(PartyRole.cessation_date, Date) > end_date))
if role is not None:
party_roles = party_roles.filter(PartyRole.role == role.lower())
party_roles = party_roles.all()
return party_roles |
5,443 | run save | from collections import namedtuple
from d20 import roll
import cogs5e.initiative as init
from cogs5e.models import embeds
from cogs5e.models.errors import InvalidArgument
from cogs5e.models.sheet.base import Skill
from utils.constants import SKILL_MAP, STAT_ABBREVIATIONS, STAT_NAMES
from utils.functions import a_or_an, camel_to_title, maybe_http_url, verbose_stat
def update_csetting_args(char, args, skill=None):
"""
Updates a ParsedArguments with arguments representing a character's csettings.
:type char: cogs5e.models.character.Character
:type args: utils.argparser.ParsedArguments
:type skill: cogs5e.models.sheet.base.Skill or None
:return:
"""
# reliable talent (#654)
rt = bool(char.options.talent and (skill and skill.prof >= 1))
args["mc"] = args.get("mc") or 10 * rt
# halfling luck
args["ro"] = char.options.reroll
def run_check(skill_key, caster, args, embed):
"""
Runs a caster's skill check, building on an existing embed and handling most arguments.
:type skill_key: str
:type caster: cogs5e.models.sheet.statblock.StatBlock
:type args: utils.argparser.ParsedArguments
:type embed: disnake.Embed
:return: The total of each check.
:rtype: CheckResult
"""
skill = caster.skills[skill_key]
skill_name = camel_to_title(skill_key)
mod = skill.value
base_ability_key = SKILL_MAP[skill_key]
# str/dex/con/int/wis/cha
if any(args.last(s, type_=bool) for s in STAT_ABBREVIATIONS):
base = next(s for s in STAT_ABBREVIATIONS if args.last(s, type_=bool))
mod = mod - caster.stats.get_mod(base_ability_key) + caster.stats.get_mod(base)
base_ability_key = STAT_NAMES[STAT_ABBREVIATIONS.index(base)]
skill_name = f"{verbose_stat(base)} ({skill_name})"
# -title
if args.last("title"):
embed.title = args.last("title", "").replace("[name]", caster.get_title_name()).replace("[cname]", skill_name)
elif args.last("h"):
embed.title = f"An unknown creature makes {a_or_an(skill_name)} check!"
else:
embed.title = f"{caster.get_title_name()} makes {a_or_an(skill_name)} check!"
# ieffect handling
if isinstance(caster, init.Combatant):
# -cb
args["b"] = args.get("b") + caster.active_effects(mapper=lambda effect: effect.effects.check_bonus, default=[])
# -cadv/cdis
cadv_effects = caster.active_effects(
mapper=lambda effect: effect.effects.check_adv, reducer=lambda checks: set().union(*checks), default=set()
)
cdis_effects = caster.active_effects(
mapper=lambda effect: effect.effects.check_dis, reducer=lambda checks: set().union(*checks), default=set()
)
if skill_key in cadv_effects or base_ability_key in cadv_effects:
args["adv"] = True
if skill_key in cdis_effects or base_ability_key in cdis_effects:
args["dis"] = True
result = _run_common(skill, args, embed, mod_override=mod)
return CheckResult(rolls=result.rolls, skill=skill, skill_name=skill_name, skill_roll_result=result)
def METHOD_NAME(save_key, caster, args, embed):
"""
Runs a caster's saving throw, building on an existing embed and handling most arguments.
Also handles save bonuses from ieffects if caster is a combatant.
:type save_key: str
:type caster: cogs5e.models.sheet.statblock.StatBlock
:type args: utils.argparser.ParsedArguments
:type embed: disnake.Embed
:return: The total of each save.
:rtype: SaveResult
"""
if save_key.startswith("death"):
save = Skill(0)
stat_name = stat = "Death"
save_name = "Death Save"
else:
try:
save = caster.saves.get(save_key)
stat = save_key[:3]
stat_name = verbose_stat(stat).title()
save_name = f"{stat_name} Save"
except ValueError:
raise InvalidArgument("That's not a valid save.")
# -title
if args.last("title"):
embed.title = args.last("title", "").replace("[name]", caster.get_title_name()).replace("[sname]", save_name)
elif args.last("h"):
embed.title = f"An unknown creature makes {a_or_an(save_name)}!"
else:
embed.title = f"{caster.get_title_name()} makes {a_or_an(save_name)}!"
# ieffect handling
if isinstance(caster, init.Combatant):
# -sb
args["b"] = args.get("b") + caster.active_effects(mapper=lambda effect: effect.effects.save_bonus, default=[])
# -sadv/sdis
sadv_effects = caster.active_effects(
mapper=lambda effect: effect.effects.save_adv, reducer=lambda saves: set().union(*saves), default=set()
)
sdis_effects = caster.active_effects(
mapper=lambda effect: effect.effects.save_dis, reducer=lambda saves: set().union(*saves), default=set()
)
if stat in sadv_effects:
args["adv"] = True # Because adv() only checks last() just forcibly add them
if stat in sdis_effects:
args["dis"] = True
result = _run_common(save, args, embed, rr_format="Save {}")
return SaveResult(rolls=result.rolls, skill=save, skill_name=stat_name, skill_roll_result=result)
def _run_common(skill, args, embed, mod_override=None, rr_format="Check {}"):
"""
Runs a roll for a given Skill.
:rtype: SkillRollResult
"""
# ephemeral support: adv, b
# phrase
phrase = args.join("phrase", "\n")
# num rolls
iterations = max(min(args.last("rr", 1, int), 25), 1)
# dc
dc = args.last("dc", type_=int)
# ro
ro = args.last("ro", type_=int)
# mc
mc = args.last("mc", type_=int)
desc_out = []
num_successes = 0
results = []
# add DC text
if dc:
desc_out.append(f"**DC {dc}**")
for i in range(iterations):
# advantage
adv = args.adv(boolwise=True, ephem=True)
# roll bonus
b = args.join("b", "+", ephem=True)
# set up dice
roll_str = skill.d20(base_adv=adv, reroll=ro, min_val=mc, mod_override=mod_override)
if b is not None:
roll_str = f"{roll_str}+{b}"
# roll
result = roll(roll_str)
if dc and result.total >= dc:
num_successes += 1
results.append(result)
# output
if iterations > 1:
embed.add_field(name=rr_format.format(str(i + 1)), value=result.result)
else:
desc_out.append(result.result)
# phrase
if phrase:
desc_out.append(f"*{phrase}*")
# DC footer
if iterations > 1 and dc:
embed.set_footer(text=f"{num_successes} Successes | {iterations - num_successes} Failures")
elif dc:
embed.set_footer(text="Success!" if num_successes else "Failure!")
# build embed
embed.description = "\n".join(desc_out)
embeds.add_fields_from_args(embed, args.get("f"))
if "thumb" in args:
embed.set_thumbnail(url=maybe_http_url(args.last("thumb", "")))
return SkillRollResult(rolls=results, iterations=iterations, dc=dc, successes=num_successes)
SkillRollResult = namedtuple("SkillRollResult", "rolls iterations dc successes")
CheckResult = namedtuple("CheckResult", "rolls skill skill_name skill_roll_result")
SaveResult = namedtuple("SaveResult", "rolls skill skill_name skill_roll_result") |
5,444 | get data frame | # GridCal
# Copyright (C) 2022 Santiago Peñate Vera
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import io
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
from PySide2.QtWidgets import *
from PySide2 import QtCore
from GridCal.Engine.Simulations.result_types import ResultTypes
from GridCal.Engine.Simulations.results_table import ResultsTable
def fast_data_to_numpy_text(data):
if len(data.shape) == 1:
txt = '[' + ', '.join(['{0:.6f}'.format(x) for x in data]) + ']'
elif len(data.shape) == 2:
if data.shape[1] > 1:
# header first
txt = '['
# data
for t in range(data.shape[0]):
txt += '[' + ', '.join(['{0:.6f}'.format(x) for x in data[t, :]]) + '],\n'
txt += ']'
else:
txt = '[' + ', '.join(['{0:.6f}'.format(x) for x in data[:, 0]]) + ']'
else:
txt = '[]'
return txt
class ResultsModel(QtCore.QAbstractTableModel):
"""
Class to populate a Qt table view with data from the results
"""
def __init__(self, table: ResultsTable, parent=None):
"""
:param table:
"""
QtCore.QAbstractTableModel.__init__(self, parent)
self.table = table
self.units = table.units
def flags(self, index):
if self.table.editable and index.column() > self.table.editable_min_idx:
return QtCore.Qt.ItemIsEditable | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
else:
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
def rowCount(self, parent=None):
"""
:param parent:
:return:
"""
return self.table.r
def columnCount(self, parent=None):
"""
:param parent:
:return:
"""
return self.table.c
def data(self, index, role=QtCore.Qt.DisplayRole):
"""
:param index:
:param role:
:return:
"""
if index.isValid():
val = self.table.data_c[index.row(), index.column()]
if role == QtCore.Qt.DisplayRole:
if isinstance(val, str):
return val
elif isinstance(val, complex):
if val.real != 0 or val.imag != 0:
return val.__format__(self.table.format_string)
else:
return '0'
else:
if val != 0:
return val.__format__(self.table.format_string)
else:
return '0'
elif role == QtCore.Qt.BackgroundRole:
return None # QBrush(Qt.yellow)
return None
def headerData(self, section, orientation, role=None):
"""
Get the header value
:param section: header index
:param orientation: Orientation {QtCore.Qt.Horizontal, QtCore.Qt.Vertical}
:param role:
:return:
"""
if role == QtCore.Qt.DisplayRole:
if orientation == QtCore.Qt.Horizontal:
if len(self.table.cols_c) > section:
return self.table.cols_c[section]
elif orientation == QtCore.Qt.Vertical:
if self.table.index_c is None:
return section
else:
if self.table.isDate:
return self.table.index_c[section].strftime('%Y/%m/%d %H:%M.%S')
else:
return str(self.table.index_c[section])
return None
def slice_cols(self, col_idx) -> "ResultsModel":
"""
Make column slicing
:param col_idx: indices of the columns
:return: Nothing
"""
return ResultsModel(self.table.slice_cols(col_idx))
def search_in_columns(self, txt):
"""
Search stuff
:param txt:
:return:
"""
print('Searching', txt)
mdl = self.table.search_in_columns(txt)
if mdl is not None:
return ResultsModel(mdl)
else:
return None
def search(self, txt):
"""
Search stuff
:param txt:
:return:
"""
mdl = self.table.search(txt)
if mdl is not None:
return ResultsModel(mdl)
else:
return None
def copy_to_column(self, row, col):
"""
Copies one value to all the column
@param row: Row of the value
@param col: Column of the value
@return: Nothing
"""
self.table.copy_to_column(row, col)
def is_complex(self):
return self.table.is_complex()
def get_data(self):
"""
Returns: index, columns, data
"""
return self.table.get_data()
def convert_to_cdf(self):
"""
Convert the data in-place to CDF based
:return:
"""
# calculate the proportional values of samples
self.table.convert_to_cdf()
def convert_to_abs(self):
"""
Convert the data to abs
:return:
"""
self.table.convert_to_abs()
def to_df(self):
"""
get DataFrame
"""
return self.table.to_df()
def save_to_excel(self, file_name):
"""
save data to excel
:param file_name:
"""
self.to_df().to_excel(file_name)
def save_to_csv(self, file_name):
"""
Save data to csv
:param file_name:
"""
self.to_df().to_csv(file_name)
def METHOD_NAME(self):
"""
Save data to csv
"""
return self.table.METHOD_NAME()
def copy_to_clipboard(self):
"""
Copy profiles to clipboard
"""
n = len(self.table.cols_c)
if n > 0:
df = self.METHOD_NAME()
s = io.StringIO()
df.to_csv(s, sep='\t')
txt = s.getvalue()
# copy to clipboard
cb = QApplication.clipboard()
cb.clear(mode=cb.Clipboard)
cb.setText(txt, mode=cb.Clipboard)
else:
# there are no elements
pass
def copy_numpy_to_clipboard(self):
"""
Copy profiles to clipboard
"""
n = len(self.table.cols_c)
if n > 0:
index, columns, data = self.get_data()
txt = fast_data_to_numpy_text(data)
# copy to clipboard
cb = QApplication.clipboard()
cb.clear(mode=cb.Clipboard)
cb.setText(txt, mode=cb.Clipboard)
else:
# there are no elements
pass
def plot(self, ax=None, selected_col_idx=None, selected_rows=None, stacked=False):
"""
Plot the data model
:param ax: Matplotlib axis
:param selected_col_idx: list of selected column indices
:param selected_rows: list of rows to plot
"""
self.table.plot(ax=ax, selected_col_idx=selected_col_idx, selected_rows=selected_rows, stacked=stacked) |
5,445 | usage | #!/usr/bin/env python3
# coding: utf-8
# Copyright (C) 1994-2021 Altair Engineering, Inc.
# For more information, contact Altair at www.altair.com.
#
# This file is part of both the OpenPBS software ("OpenPBS")
# and the PBS Professional ("PBS Pro") software.
#
# Open Source License Information:
#
# OpenPBS is free software. You can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# OpenPBS is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
# License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Commercial License Information:
#
# PBS Pro is commercially licensed software that shares a common core with
# the OpenPBS software. For a copy of the commercial license terms and
# conditions, go to: (http://www.pbspro.com/agreement.html) or contact the
# Altair Legal Department.
#
# Altair's dual-license business model allows companies, individuals, and
# organizations to create proprietary derivative works of OpenPBS and
# distribute them - whether embedded or bundled with other software -
# under a commercial license agreement.
#
# Use of Altair's trademarks, including but not limited to "PBS™",
# "OpenPBS®", "PBS Professional®", and "PBS Pro™" and Altair's logos is
# subject to Altair's trademark licensing policies.
import os
import sys
import getopt
import logging
import logging.config
import errno
import ptl
from ptl.utils.pbs_cliutils import CliUtils
from ptl.utils.pbs_covutils import LcovUtils
from ptl.lib.pbs_testlib import PtlConfig
# trap SIGINT and SIGPIPE
def trap_exceptions(etype, value, tb):
sys.excepthook = sys.__excepthook__
if issubclass(etype, IOError) and value.errno == errno.EPIPE:
pass
else:
sys.__excepthook__(etype, value, tb)
sys.excepthook = trap_exceptions
def METHOD_NAME():
msg = []
msg += ['Usage: ' + os.path.basename(sys.argv[0]) + ' [OPTION]\n\n']
msg += [' code coverage tools\n\n']
msg += ['-c: capture coverage\n']
msg += ['-d <path>: path to directory that contains coverage data\n']
msg += ['-i: initialize coverage\n']
msg += ['-o <path>: path to output directory\n']
msg += ['-m <f1,f2>: merge comma-separated coverage files\n']
msg += ['-r <path>: path to file to remove coverage patterns from\n']
msg += ['-z: reset coverage counters\n']
msg += ['--exclude=<p1,p2>: comma-separated pattern of files to exclude\n']
msg += ['--summarize: summarize coverage analysis\n']
msg += ['--html: Generate HTML from coverage analysis\n']
msg += ['--no-source: don\'t include PBS source in coverage analysis']
msg += [' (Must be used with --html)\n']
msg += ['--baseurl=<url>: use <url> as baseurl in html report']
msg += [' (Must be used with --html)\n']
msg += [' Default source will be in coverage analysis\n']
msg += ['--version: print version number and exit\n']
print("".join(msg))
if __name__ == '__main__':
if len(sys.argv) < 2:
METHOD_NAME()
sys.exit(1)
data_dir = None
capture = None
initialize = None
merge = None
reset = None
remove = None
out = None
html_nosrc = False
html = False
html_baseurl = None
exclude = ['"*work/gSOAP/*"', '"*/pbs/doc/*"', 'lex.yy.c',
'pbs_ifl_wrap.c', 'usr/include/*', 'unsupported/*']
summarize = None
lvl = logging.INFO
logconf = None
lopts = ["version", "exclude=", "summarize", 'no-source', 'html']
lopts += ['baseurl=']
try:
opts, args = getopt.getopt(sys.argv[1:], "ciszd:mo:l:rh", lopts)
except Exception:
METHOD_NAME()
sys.exit(1)
for o, val in opts:
if o == '-d':
data_dir = CliUtils.expand_abs_path(val)
elif o == '-c':
capture = True
elif o == '-o':
out = CliUtils.expand_abs_path(val)
elif o == '-i':
initialize = True
elif o == '-l':
lvl = CliUtils.get_logging_level(val)
elif o == '-m':
merge = val
elif o == '-l':
lvl = CliUtils().get_logging_level(val)
elif o == '-r':
remove = CliUtils.expand_abs_path(val)
elif o == '-z':
reset = True
elif o == '-h':
METHOD_NAME()
sys.exit(0)
elif o == '--exclude':
exclude = val.split(',')
elif o == '--log-conf':
logconf = val
elif o in ('-s', '--summarize'):
summarize = True
elif o == '--html':
html = True
elif o in '--no-source':
html_nosrc = False
elif o in '--baseurl':
html_baseurl = val
elif o == '--version':
print(ptl.__version__)
sys.exit(0)
else:
sys.stderr.write("Unrecognized option")
METHOD_NAME()
sys.exit(1)
PtlConfig()
if logconf:
logging.config.fileConfig(logconf)
else:
logging.basicConfig(level=lvl)
if html_nosrc and not html:
logging.error('--no-source must be used with --html')
sys.exit(1)
if html_baseurl and not html:
logging.error('--baseurl must be used with --html')
sys.exit(1)
cu = LcovUtils(cov_out=out, data_dir=data_dir, html_nosrc=html_nosrc,
html_baseurl=html_baseurl)
if reset:
cu.zero_coverage()
if initialize:
cu.initialize_coverage()
if capture:
cu.capture_coverage()
if merge is not None:
for m in merge.split(','):
cu.add_trace(m)
cu.merge_coverage_traces(exclude=exclude)
if html:
cu.generate_html()
if html_baseurl:
cu.change_baseurl()
if summarize:
cu.summarize_coverage() |
5,446 | get future | # Owner(s): ["oncall: distributed"]
import os
import torch
import torch.distributed as dist
from torch.testing._internal.common_utils import (
run_tests,
)
from torch.futures import Future
import torch.nn as nn
from torch.nn.parallel import DistributedDataParallel as DDP
import test_c10d_common
import weakref
from torch._C._distributed_c10d import _create_work_from_future
from torch.testing._internal.common_distributed import (
MultiProcessTestCase,
)
def create_work(result):
future = Future()
future.set_result(result)
return _create_work_from_future(future)
class MyWork(dist._Work):
def __init__(self, result, pg):
super().__init__()
self.result_ = result
self.future_ = torch.futures.Future()
self.future_.set_result(result)
self.pg_ = weakref.ref(pg)
def wait(self, timeout):
self.pg_().wait_count += 1
return True
def METHOD_NAME(self):
self.pg_().get_future_count += 1
return self.future_
class LonelyRankProcessGroup(dist.ProcessGroup):
"""
This PG only supports world_size of 1
"""
def __init__(self, rank, world, use_wrapper):
super().__init__(rank, world)
assert rank == 0
assert world == 1
self._rank = rank
self._world = world
self.wait_count = 0
self.get_future_count = 0
self.use_wrapper = use_wrapper
self._work = []
def broadcast(self, tensor_list, opts):
if self.use_wrapper:
return create_work(tensor_list)
res = MyWork(tensor_list, self)
self._work.append(res)
return res
def allgather(self, output_tensors, input_tensor, opts):
for o, i in zip(output_tensors[0], input_tensor):
o.copy_(i)
if self.use_wrapper:
return create_work(output_tensors)
res = MyWork(output_tensors, self)
self._work.append(res)
return res
def allreduce(self, tensors, opts):
if self.use_wrapper:
return create_work(tensors)
res = MyWork(tensors, self)
self._work.append(res)
return res
def size(self):
return self._world
def getBackendName(self):
return "lonely-pg"
def __repr__(self):
return f"PLG w:{self._world} r:{self._rank}"
# We cannot use parametrize as some tests are defined on the base class and use _get_process_group
class AbstractDDPSingleRank(test_c10d_common.CommonDistributedDataParallelTest):
def setUp(self):
super().setUp()
self._spawn_processes()
@property
def world_size(self):
return 1
def tearDown(self):
super().tearDown()
try:
os.remove(self.file_name)
except OSError:
pass
def _get_process_group(self):
return LonelyRankProcessGroup(self.rank, self.world_size, self.use_wrapper)
def test_ddp_invoke_work_object(self):
pg = self._get_process_group()
torch.manual_seed(123)
model = nn.Sequential(
nn.Linear(2, 2),
nn.ReLU()
)
wrapped_model = model
input_tensor = torch.rand(2)
model = DDP(model, process_group=pg)
model(input_tensor).sum().backward()
ddp_grad = wrapped_model[0].bias.grad.clone()
wrapped_model.zero_grad()
wrapped_model(input_tensor).sum().backward()
self.assertEqual(wrapped_model[0].bias.grad, ddp_grad)
if not self.use_wrapper:
self.assertTrue(pg.wait_count > 0)
self.assertTrue(pg.get_future_count > 0)
def test_ddp_with_pypg(self):
pg = self._get_process_group()
self._test_ddp_with_process_group(pg, [torch.device("cpu")], device_ids=None)
def test_ddp_with_pypg_with_grad_views(self):
pg = self._get_process_group()
self._test_ddp_with_process_group(pg, [torch.device("cpu")], device_ids=None, gradient_as_bucket_view=True)
class TestDDPWithWorkSubclass(AbstractDDPSingleRank, MultiProcessTestCase):
@property
def use_wrapper(self):
return False
class TestDDPWithWorkWrapper(AbstractDDPSingleRank, MultiProcessTestCase):
@property
def use_wrapper(self):
return True
if __name__ == '__main__':
run_tests() |
5,447 | test process user is set | import sys
import pytest
import salt.modules.win_status as status
from tests.support.mock import ANY, Mock, patch
from tests.support.unit import TestCase
try:
import wmi
except ImportError:
pass
@pytest.mark.skipif(status.HAS_WMI is False, reason="This test requires Windows")
class TestProcsBase(TestCase):
def __init__(self, *args, **kwargs):
TestCase.__init__(self, *args, **kwargs)
self.__processes = []
def add_process(
self,
pid=100,
cmd="cmd",
name="name",
user="user",
user_domain="domain",
get_owner_result=0,
):
process = Mock()
process.GetOwner = Mock(return_value=(user_domain, get_owner_result, user))
process.ProcessId = pid
process.CommandLine = cmd
process.Name = name
self.__processes.append(process)
def call_procs(self):
WMI = Mock()
WMI.win32_process = Mock(return_value=self.__processes)
with patch.object(wmi, "WMI", Mock(return_value=WMI)):
self.result = status.procs()
class TestProcsCount(TestProcsBase):
def setUp(self):
self.add_process(pid=100)
self.add_process(pid=101)
self.call_procs()
def test_process_count(self):
self.assertEqual(len(self.result), 2)
def test_process_key_is_pid(self):
self.assertSetEqual(set(self.result.keys()), {100, 101})
class TestProcsAttributes(TestProcsBase):
def setUp(self):
self._expected_name = "name"
self._expected_cmd = "cmd"
self._expected_user = "user"
self._expected_domain = "domain"
pid = 100
self.add_process(
pid=pid,
cmd=self._expected_cmd,
user=self._expected_user,
user_domain=self._expected_domain,
get_owner_result=0,
)
self.call_procs()
self.proc = self.result[pid]
def test_process_cmd_is_set(self):
self.assertEqual(self.proc["cmd"], self._expected_cmd)
def test_process_name_is_set(self):
self.assertEqual(self.proc["name"], self._expected_name)
def METHOD_NAME(self):
self.assertEqual(self.proc["user"], self._expected_user)
def test_process_user_domain_is_set(self):
self.assertEqual(self.proc["user_domain"], self._expected_domain)
@pytest.mark.skipif(
sys.stdin.encoding != "UTF-8",
reason="UTF-8 encoding required for this test is not supported",
)
class TestProcsUnicodeAttributes(TestProcsBase):
def setUp(self):
unicode_str = "\xc1"
self.ustr = unicode_str
pid = 100
self.add_process(
pid=pid,
user=unicode_str,
user_domain=unicode_str,
cmd=unicode_str,
name=unicode_str,
)
self.call_procs()
self.proc = self.result[pid]
def test_process_cmd_is_utf8(self):
self.assertEqual(self.proc["cmd"], self.ustr)
def test_process_name_is_utf8(self):
self.assertEqual(self.proc["name"], self.ustr)
def test_process_user_is_utf8(self):
self.assertEqual(self.proc["user"], self.ustr)
def test_process_user_domain_is_utf8(self):
self.assertEqual(self.proc["user_domain"], self.ustr)
class TestProcsWMIGetOwnerAccessDeniedWorkaround(TestProcsBase):
def setUp(self):
self.expected_user = "SYSTEM"
self.expected_domain = "NT AUTHORITY"
self.add_process(pid=0, get_owner_result=2)
self.add_process(pid=4, get_owner_result=2)
self.call_procs()
def test_user_is_set(self):
self.assertEqual(self.result[0]["user"], self.expected_user)
self.assertEqual(self.result[4]["user"], self.expected_user)
def test_process_user_domain_is_set(self):
self.assertEqual(self.result[0]["user_domain"], self.expected_domain)
self.assertEqual(self.result[4]["user_domain"], self.expected_domain)
class TestProcsWMIGetOwnerErrorsAreLogged(TestProcsBase):
def setUp(self):
self.expected_error_code = 8
self.add_process(get_owner_result=self.expected_error_code)
def test_error_logged_if_process_get_owner_fails(self):
with patch("salt.modules.win_status.log") as log:
self.call_procs()
log.warning.assert_called_once_with(ANY, ANY, self.expected_error_code)
class TestEmptyCommandLine(TestProcsBase):
def setUp(self):
self.expected_error_code = 8
pid = 100
self.add_process(pid=pid, cmd=None)
self.call_procs()
self.proc = self.result[pid]
def test_cmd_is_empty_string(self):
self.assertEqual(self.proc["cmd"], "")
# class TestProcsComInitialization(TestProcsBase):
# def setUp(self):
# call_count = 5
# for _ in range(call_count):
# self.call_procs()
# self.expected_calls = [call()] * call_count
#
# def test_initialize_and_uninitialize_called(self):
# pythoncom.CoInitialize.assert_has_calls(self.expected_calls)
# pythoncom.CoUninitialize.assert_has_calls(self.expected_calls) |
5,448 | setup | # coding=utf-8
#
# QEMU hxtool .hx file parsing extension
#
# Copyright (c) 2020 Linaro
#
# This work is licensed under the terms of the GNU GPLv2 or later.
# See the COPYING file in the top-level directory.
"""hxtool is a Sphinx extension that implements the hxtool-doc directive"""
# The purpose of this extension is to read fragments of rST
# from .hx files, and insert them all into the current document.
# The rST fragments are delimited by SRST/ERST lines.
# The conf.py file must set the hxtool_srctree config value to
# the root of the QEMU source tree.
# Each hxtool-doc:: directive takes one argument which is the
# path of the .hx file to process, relative to the source tree.
import os
import re
from enum import Enum
from docutils import nodes
from docutils.statemachine import ViewList
from docutils.parsers.rst import directives, Directive
from sphinx.errors import ExtensionError
from sphinx.util.nodes import nested_parse_with_titles
import sphinx
# Sphinx up to 1.6 uses AutodocReporter; 1.7 and later
# use switch_source_input. Check borrowed from kerneldoc.py.
Use_SSI = sphinx.__version__[:3] >= '1.7'
if Use_SSI:
from sphinx.util.docutils import switch_source_input
else:
from sphinx.ext.autodoc import AutodocReporter
__version__ = '1.0'
# We parse hx files with a state machine which may be in one of two
# states: reading the C code fragment, or inside a rST fragment.
class HxState(Enum):
CTEXT = 1
RST = 2
def serror(file, lnum, errtext):
"""Raise an exception giving a user-friendly syntax error message"""
raise ExtensionError('%s line %d: syntax error: %s' % (file, lnum, errtext))
def parse_directive(line):
"""Return first word of line, if any"""
return re.split('\W', line)[0]
def parse_defheading(file, lnum, line):
"""Handle a DEFHEADING directive"""
# The input should be "DEFHEADING(some string)", though note that
# the 'some string' could be the empty string. If the string is
# empty we ignore the directive -- these are used only to add
# blank lines in the plain-text content of the --help output.
#
# Return the heading text. We strip out any trailing ':' for
# consistency with other headings in the rST documentation.
match = re.match(r'DEFHEADING\((.*?):?\)', line)
if match is None:
serror(file, lnum, "Invalid DEFHEADING line")
return match.group(1)
def parse_archheading(file, lnum, line):
"""Handle an ARCHHEADING directive"""
# The input should be "ARCHHEADING(some string, other arg)",
# though note that the 'some string' could be the empty string.
# As with DEFHEADING, empty string ARCHHEADINGs will be ignored.
#
# Return the heading text. We strip out any trailing ':' for
# consistency with other headings in the rST documentation.
match = re.match(r'ARCHHEADING\((.*?):?,.*\)', line)
if match is None:
serror(file, lnum, "Invalid ARCHHEADING line")
return match.group(1)
class HxtoolDocDirective(Directive):
"""Extract rST fragments from the specified .hx file"""
required_argument = 1
optional_arguments = 1
option_spec = {
'hxfile': directives.unchanged_required
}
has_content = False
def run(self):
env = self.state.document.settings.env
hxfile = env.config.hxtool_srctree + '/' + self.arguments[0]
# Tell sphinx of the dependency
env.note_dependency(os.path.abspath(hxfile))
state = HxState.CTEXT
# We build up lines of rST in this ViewList, which we will
# later put into a 'section' node.
rstlist = ViewList()
current_node = None
node_list = []
with open(hxfile) as f:
lines = (l.rstrip() for l in f)
for lnum, line in enumerate(lines, 1):
directive = parse_directive(line)
if directive == 'HXCOMM':
pass
elif directive == 'SRST':
if state == HxState.RST:
serror(hxfile, lnum, 'expected ERST, found SRST')
else:
state = HxState.RST
elif directive == 'ERST':
if state == HxState.CTEXT:
serror(hxfile, lnum, 'expected SRST, found ERST')
else:
state = HxState.CTEXT
elif directive == 'DEFHEADING' or directive == 'ARCHHEADING':
if directive == 'DEFHEADING':
heading = parse_defheading(hxfile, lnum, line)
else:
heading = parse_archheading(hxfile, lnum, line)
if heading == "":
continue
# Put the accumulated rST into the previous node,
# and then start a fresh section with this heading.
if len(rstlist) > 0:
if current_node is None:
# We had some rST fragments before the first
# DEFHEADING. We don't have a section to put
# these in, so rather than magicing up a section,
# make it a syntax error.
serror(hxfile, lnum,
'first DEFHEADING must precede all rST text')
self.do_parse(rstlist, current_node)
rstlist = ViewList()
if current_node is not None:
node_list.append(current_node)
section_id = 'hxtool-%d' % env.new_serialno('hxtool')
current_node = nodes.section(ids=[section_id])
current_node += nodes.title(heading, heading)
else:
# Not a directive: put in output if we are in rST fragment
if state == HxState.RST:
# Sphinx counts its lines from 0
rstlist.append(line, hxfile, lnum - 1)
if current_node is None:
# We don't have multiple sections, so just parse the rst
# fragments into a dummy node so we can return the children.
current_node = nodes.section()
self.do_parse(rstlist, current_node)
return current_node.children
else:
# Put the remaining accumulated rST into the last section, and
# return all the sections.
if len(rstlist) > 0:
self.do_parse(rstlist, current_node)
node_list.append(current_node)
return node_list
# This is from kerneldoc.py -- it works around an API change in
# Sphinx between 1.6 and 1.7. Unlike kerneldoc.py, we use
# sphinx.util.nodes.nested_parse_with_titles() rather than the
# plain self.state.nested_parse(), and so we can drop the saving
# of title_styles and section_level that kerneldoc.py does,
# because nested_parse_with_titles() does that for us.
def do_parse(self, result, node):
if Use_SSI:
with switch_source_input(self.state, result):
nested_parse_with_titles(self.state, result, node)
else:
save = self.state.memo.reporter
self.state.memo.reporter = AutodocReporter(result, self.state.memo.reporter)
try:
nested_parse_with_titles(self.state, result, node)
finally:
self.state.memo.reporter = save
def METHOD_NAME(app):
""" Register hxtool-doc directive with Sphinx"""
app.add_config_value('hxtool_srctree', None, 'env')
app.add_directive('hxtool-doc', HxtoolDocDirective)
return dict(
version = __version__,
parallel_read_safe = True,
parallel_write_safe = True
) |
5,449 | test printing the job base manifest | import yaml
from prefect.infrastructure.kubernetes import KubernetesJob
from prefect.settings import (
PREFECT_API_KEY,
PREFECT_API_URL,
PREFECT_LOGGING_SERVER_LEVEL,
)
from prefect.testing.cli import invoke_and_assert
from prefect.utilities.dockerutils import get_prefect_image_name
def test_printing_the_server_manifest_with_no_args():
"""`prefect kubernetes manifest server` should print a valid YAML file
representing a basic Prefect server deployment to a cluster"""
result = invoke_and_assert(
["kubernetes", "manifest", "server"],
expected_output_contains="kind: Deployment",
)
manifests = yaml.load_all(result.stdout, yaml.SafeLoader)
# Spot-check a few things. This test is mostly just confirming that the output
# looks roughly like a set of Kubernetes manifests in YAML, not that this is a
# valid and working API deployment.
assert manifests
for manifest in manifests:
assert manifest["metadata"]["namespace"] == "default"
if manifest["kind"] == "Deployment":
assert manifest["metadata"]["name"] == "prefect-server"
assert len(manifest["spec"]["template"]["spec"]["containers"]) == 1
server_container = manifest["spec"]["template"]["spec"]["containers"][0]
assert server_container["image"] == get_prefect_image_name()
assert server_container["command"][0:3] == ["prefect", "server", "start"]
assert server_container["command"][0:3] == ["prefect", "server", "start"]
assert server_container["command"][5:] == [
"--log-level",
str(PREFECT_LOGGING_SERVER_LEVEL.value()),
]
def test_printing_the_server_manifest_with_image_tag_and_log_level():
result = invoke_and_assert(
[
"kubernetes",
"manifest",
"server",
"-i",
"test_image_tag",
"--log-level",
"test_log_level",
],
expected_output_contains="kind: Deployment",
)
manifests = yaml.load_all(result.stdout, yaml.SafeLoader)
assert manifests
manifests = yaml.load_all(result.stdout, yaml.SafeLoader)
assert manifests
deployment = next(m for m in manifests if m["kind"] == "Deployment")
assert deployment["metadata"]["name"] == "prefect-server"
assert len(deployment["spec"]["template"]["spec"]["containers"]) == 1
server_container = deployment["spec"]["template"]["spec"]["containers"][0]
assert server_container["image"] == "test_image_tag"
assert server_container["command"][5:] == ["--log-level", "test_log_level"]
def test_printing_the_server_manifest_with_namespace():
result = invoke_and_assert(
["kubernetes", "manifest", "server", "-n", "test_namespace"],
expected_output_contains="kind: Deployment",
)
manifests = yaml.load_all(result.stdout, yaml.SafeLoader)
assert manifests
for manifest in manifests:
assert manifest["metadata"]["namespace"] == "test_namespace"
def test_printing_the_agent_manifest_with_no_args():
"""`prefect kubernetes manifest agent` should print a valid YAML file
representing a basic agent deployment to a cluster"""
result = invoke_and_assert(
["kubernetes", "manifest", "agent"],
expected_output_contains="kind: Deployment",
)
manifests = yaml.load_all(result.stdout, yaml.SafeLoader)
# Spot-check a few things. This test is mostly just confirming that the output
# looks roughly like a set of Kubernetes manifests in YAML.
assert manifests
for manifest in manifests:
if manifest["kind"] not in ["ClusterRole", "ClusterRoleBinding"]:
assert manifest["metadata"]["namespace"] == "default"
if manifest["kind"] == "Deployment":
assert manifest["metadata"]["name"] == "prefect-agent"
assert len(manifest["spec"]["template"]["spec"]["containers"]) == 1
agent_container = manifest["spec"]["template"]["spec"]["containers"][0]
assert agent_container["image"] == get_prefect_image_name()
assert agent_container["command"] == [
"prefect",
"agent",
"start",
"-q",
"kubernetes",
]
assert len(agent_container["env"]) == 2
assert agent_container["env"][0]["name"] == "PREFECT_API_URL"
assert agent_container["env"][1]["name"] == "PREFECT_API_KEY"
assert agent_container["env"][0]["value"] == str(PREFECT_API_URL.value())
assert agent_container["env"][1]["value"] == str(PREFECT_API_KEY.value())
def test_printing_the_agent_manifest_with_api_url_image_tag_and_work_queue():
result = invoke_and_assert(
[
"kubernetes",
"manifest",
"agent",
"--api-url",
"test_api_url",
"--api-key",
"test_api_key",
"-i",
"test_image_tag",
"-q",
"test_work_queue",
],
expected_output_contains="kind: Deployment",
)
manifests = yaml.load_all(result.stdout, yaml.SafeLoader)
assert manifests
deployment = next(m for m in manifests if m["kind"] == "Deployment")
assert deployment["metadata"]["name"] == "prefect-agent"
assert len(deployment["spec"]["template"]["spec"]["containers"]) == 1
agent_container = deployment["spec"]["template"]["spec"]["containers"][0]
assert agent_container["image"] == "test_image_tag"
assert agent_container["command"][3:5] == ["-q", "test_work_queue"]
assert len(agent_container["env"]) == 2
assert agent_container["env"][0]["name"] == "PREFECT_API_URL"
assert agent_container["env"][1]["name"] == "PREFECT_API_KEY"
assert agent_container["env"][0]["value"] == "test_api_url"
assert agent_container["env"][1]["value"] == "test_api_key"
def test_printing_the_agent_manifest_with_namespace():
result = invoke_and_assert(
["kubernetes", "manifest", "agent", "-n", "test_namespace"],
expected_output_contains="kind: Deployment",
)
manifests = yaml.load_all(result.stdout, yaml.SafeLoader)
assert manifests
for manifest in manifests:
if manifest["kind"] not in ["ClusterRole", "ClusterRoleBinding"]:
assert manifest["metadata"]["namespace"] == "test_namespace"
def METHOD_NAME():
"""`prefect kubernetes manifest flow-run-job` should print a valid YAML file
representing the minimum starting point for a Kubernetes Job"""
result = invoke_and_assert(
["kubernetes", "manifest", "flow-run-job"],
expected_output_contains="kind: Job",
)
# check for the presence of helpful comments
assert "# the first container is required" in result.stdout
parsed = yaml.load(result.stdout, yaml.SafeLoader)
assert parsed == KubernetesJob.base_job_manifest() |
5,450 | window frame rows start end | import json
from collections.abc import Iterable, Sequence
from datetime import date, time, timedelta
from datetime import datetime as real_datetime
from decimal import Decimal
from typing import Any
from django.core.management.color import Style
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.utils import CursorWrapper
from django.db.models.base import Model
from django.db.models.constants import OnConflict
from django.db.models.expressions import Case, Expression
from django.db.models.fields import Field
from django.db.models.sql.compiler import SQLCompiler
class BaseDatabaseOperations:
compiler_module: str
integer_field_ranges: dict[str, tuple[int, int]]
set_operators: dict[str, str]
cast_data_types: dict[Any, Any]
cast_char_field_without_max_length: Any
PRECEDING: str
FOLLOWING: str
UNBOUNDED_PRECEDING: str
UNBOUNDED_FOLLOWING: str
CURRENT_ROW: str
explain_prefix: str | None
connection: BaseDatabaseWrapper
def __init__(self, connection: BaseDatabaseWrapper) -> None: ...
def autoinc_sql(self, table: str, column: str) -> str | None: ...
def bulk_batch_size(self, fields: Any, objs: Any) -> int: ...
def cache_key_culling_sql(self) -> str: ...
def unification_cast_sql(self, output_field: Field) -> str: ...
def date_extract_sql(self, lookup_type: str, sql: Any, params: Any) -> tuple[str, Any]: ...
# def date_interval_sql(self, timedelta: None) -> Any: ...
def date_trunc_sql(self, lookup_type: str, sql: str, params: Any, tzname: str | None = ...) -> tuple[str, Any]: ...
def datetime_cast_date_sql(self, sql: str, params: Any, tzname: str | None) -> tuple[str, Any]: ...
def datetime_cast_time_sql(self, sql: str, params: Any, tzname: str | None) -> tuple[str, Any]: ...
def datetime_extract_sql(self, lookup_type: str, sql: str, params: Any, tzname: str | None) -> tuple[str, Any]: ...
def datetime_trunc_sql(self, lookup_type: str, sql: str, params: Any, tzname: str | None) -> str: ...
def time_trunc_sql(self, lookup_type: str, sql: str, params: Any, tzname: str | None = ...) -> str: ...
def time_extract_sql(self, lookup_type: str, sql: str, params: Any) -> str: ...
def deferrable_sql(self) -> str: ...
def distinct_sql(self, fields: list[str], params: list[Any] | None) -> tuple[list[str], list[str]]: ...
def fetch_returned_insert_columns(self, cursor: Any, returning_params: Any) -> Any: ...
def field_cast_sql(self, db_type: str | None, internal_type: str) -> str: ...
def force_no_ordering(self) -> list[Any]: ...
def for_update_sql(self, nowait: bool = ..., skip_locked: bool = ..., of: Any = ..., no_key: bool = ...) -> str: ...
def limit_offset_sql(self, low_mark: int, high_mark: int | None) -> str: ...
def last_executed_query(self, cursor: Any, sql: Any, params: Any) -> str: ...
def last_insert_id(self, cursor: CursorWrapper, table_name: str, pk_name: str) -> int: ...
def lookup_cast(self, lookup_type: str, internal_type: str | None = ...) -> str: ...
def max_in_list_size(self) -> int | None: ...
def max_name_length(self) -> int | None: ...
def no_limit_value(self) -> str | None: ...
def pk_default_value(self) -> str: ...
def prepare_sql_script(self, sql: Any) -> list[str]: ...
def process_clob(self, value: str) -> str: ...
def return_insert_columns(self, fields: Any) -> Any: ...
def compiler(self, compiler_name: str) -> type[SQLCompiler]: ...
def quote_name(self, name: str) -> str: ...
def regex_lookup(self, lookup_type: str) -> str: ...
def savepoint_create_sql(self, sid: str) -> str: ...
def savepoint_commit_sql(self, sid: str) -> str: ...
def savepoint_rollback_sql(self, sid: str) -> str: ...
def set_time_zone_sql(self) -> str: ...
def sql_flush(
self, style: Any, tables: Sequence[str], *, reset_sequences: bool = ..., allow_cascade: bool = ...
) -> list[str]: ...
def execute_sql_flush(self, sql_list: Iterable[str]) -> None: ...
def sequence_reset_by_name_sql(self, style: Style | None, sequences: list[Any]) -> list[Any]: ...
def sequence_reset_sql(self, style: Style, model_list: Sequence[type[Model]]) -> list[Any]: ...
def start_transaction_sql(self) -> str: ...
def end_transaction_sql(self, success: bool = ...) -> str: ...
def tablespace_sql(self, tablespace: str | None, inline: bool = ...) -> str: ...
def prep_for_like_query(self, x: str) -> str: ...
prep_for_iexact_query: Any
def validate_autopk_value(self, value: int) -> int: ...
def adapt_unknown_value(self, value: Any) -> Any: ...
def adapt_datefield_value(self, value: date | None) -> str | None: ...
def adapt_datetimefield_value(self, value: real_datetime | None) -> str | None: ...
def adapt_timefield_value(self, value: real_datetime | time | None) -> str | None: ...
def adapt_decimalfield_value(
self, value: Decimal | None, max_digits: int | None = ..., decimal_places: int | None = ...
) -> str | None: ...
def adapt_ipaddressfield_value(self, value: str | None) -> str | None: ...
def adapt_json_value(self, value: Any, encoder: type[json.JSONEncoder] | None) -> str: ...
def adapt_integerfield_value(self, value: Any, internal_type: Any) -> Any: ...
def year_lookup_bounds_for_date_field(self, value: int, iso_year: bool = ...) -> list[str]: ...
def year_lookup_bounds_for_datetime_field(self, value: int, iso_year: bool = ...) -> list[str]: ...
def get_db_converters(self, expression: Expression) -> list[Any]: ...
def convert_durationfield_value(
self, value: float | None, expression: Expression, connection: BaseDatabaseWrapper
) -> timedelta | None: ...
def check_expression_support(self, expression: Any) -> None: ...
def conditional_expression_supported_in_where_clause(self, expression: Any) -> bool: ...
def combine_expression(self, connector: str, sub_expressions: list[str]) -> str: ...
def combine_duration_expression(self, connector: Any, sub_expressions: Any) -> str: ...
def binary_placeholder_sql(self, value: Case | None) -> str: ...
def modify_insert_params(self, placeholder: str, params: Any) -> Any: ...
def integer_field_range(self, internal_type: Any) -> tuple[int, int]: ...
def subtract_temporals(self, internal_type: Any, lhs: Any, rhs: Any) -> tuple[str, tuple[Any, ...]]: ...
def window_frame_start(self, start: Any) -> str: ...
def window_frame_end(self, end: Any) -> str: ...
def METHOD_NAME(self, start: int | None = ..., end: int | None = ...) -> tuple[str, str]: ...
def window_frame_range_start_end(self, start: int | None = ..., end: int | None = ...) -> tuple[str, str]: ...
def explain_query_prefix(self, format: str | None = ..., **options: Any) -> str: ...
def insert_statement(self, on_conflict: OnConflict | None = ...) -> str: ...
def on_conflict_suffix_sql(
self, fields: Any, on_conflict: Any, update_fields: Any, unique_fields: Any
) -> str | Any: ...
def format_for_duration_arithmetic(self, sql: str) -> str: ... |
5,451 | fetch int8 blob | import collections
from typing import Any, Dict, List, Optional, Protocol, Tuple, Union, overload
from typing_extensions import TypeAlias
import numpy as np
import google.protobuf.message
import torch
from caffe2.proto import caffe2_pb2
from . import core
# pybind11 will automatically accept either Python str or bytes for C++ APIs
# that accept std::string.
_PybindStr: TypeAlias = Union[str, bytes]
_PerOpEnginePrefType: TypeAlias = Dict[int, Dict[str, List[str]]]
_EnginePrefType: TypeAlias = Dict[int, List[str]]
Int8Tensor = collections.namedtuple(
'Int8Tensor', ['data', 'scale', 'zero_point']
)
class _HasProto(Protocol):
def Proto(self) -> Any: ...
class TensorCPU:
def init(self, dims: List[int], caffe_type: int) -> None: ...
def to_torch(self) -> torch.Tensor: ...
class Blob:
def feed(
self,
arg: Any,
device_option: Union[
None, str, bytes, google.protobuf.message.Message, _HasProto,
] = None,
) -> bool: ...
def is_tensor(self) -> bool: ...
def as_tensor(self) -> TensorCPU: ...
def tensor(self) -> TensorCPU: ...
def to_torch(self) -> torch.Tensor: ...
def fetch(self) -> Any: ...
class Net:
def run(self) -> None: ...
def cancel(self) -> None: ...
class Workspace:
@overload
def __init__(self) -> None: ...
@overload
def __init__(self, workspace: Workspace) -> None: ...
@property
def blobs(self) -> Dict[str, Blob]: ...
def create_blob(self, name: _PybindStr) -> Blob: ...
def fetch_blob(self, name: _PybindStr) -> Any: ...
def METHOD_NAME(
self, name: Union[str, bytes, core.BlobReference]
) -> Int8Tensor: ...
def _create_net(self, _def: bytes, overwrite: bool) -> Net: ...
def create_net(
self,
net: Union[str, bytes, core.Net, caffe2_pb2.NetDef],
overwrite: bool = False,
) -> Net: ...
def _run_net(self, _def: bytes) -> None: ...
def _run_operator(self, _def: bytes) -> None: ...
def _run_plan(self, _def: bytes) -> None: ...
def run(
self,
obj: Union[
caffe2_pb2.PlanDef,
caffe2_pb2.NetDef,
caffe2_pb2.OperatorDef,
_HasProto,
],
) -> None: ...
def feed_blob(
self,
name: Union[str, bytes, core.BlobReference],
arr: Union[caffe2_pb2.TensorProto, np.ndarray],
device_option: Optional[caffe2_pb2.DeviceOption] = None,
) -> bool: ...
def remove_blob(self, blob: Any) -> None: ...
current: Workspace
class Argument:
@property
def name(self) -> str: ...
@property
def description(self) -> str: ...
@property
def required(self) -> bool: ...
class OpSchema:
@staticmethod
def get(key: str) -> OpSchema: ...
@property
def args(self) -> List[Argument]: ...
@property
def input_desc(self) -> List[Tuple[str, str]]: ...
@property
def output_desc(self) -> List[Tuple[str, str]]: ...
@property
def max_input(self) -> int: ...
@property
def max_output(self) -> int: ...
@property
def min_input(self) -> int: ...
@property
def min_output(self) -> int: ...
def inplace_enforced(self, x: int, y: int) -> bool: ...
class DummyName:
...
class Graph:
...
class Node:
...
class Edge:
...
class NeuralNetOperator:
...
class NeuralNetData:
...
class NNSubgraph:
...
class NNMatchGraph:
...
class Annotation:
...
is_asan: bool
has_mkldnn: bool
use_mkldnn: bool
has_fbgemm: bool
use_rocm: bool
use_trt: bool
define_caffe2_no_operator_schema: bool
def registered_dbs() -> List[str]: ...
def get_build_options() -> Dict[str, str]: ...
def set_per_op_engine_pref(pref: _PerOpEnginePrefType) -> None: ...
def set_global_engine_pref(pref: _EnginePrefType) -> None: ...
def set_engine_pref(
per_op_pref: _PerOpEnginePrefType, global_pref: _EnginePrefType
) -> None: ...
def set_op_engine_pref(
op_type: _PybindStr, op_pref: _EnginePrefType
) -> None: ...
def op_registry_key(op_type: _PybindStr, engine: _PybindStr) -> str: ...
def global_init(args: List[str]) -> None: ...
def registered_operators() -> List[str]: ...
def on_module_exit() -> None: ...
@overload
def switch_workspace(ws: Workspace): ...
@overload
def switch_workspace(name: _PybindStr, create_if_missing: Optional[bool] = None): ...
def create_child_workspace(
parent_ws_name: _PybindStr, child_ws_name: _PybindStr
) -> None: ...
def root_folder() -> str: ...
def current_workspace() -> str: ...
def workspaces() -> List[str]: ...
def benchmark_net(
name: _PybindStr, warmup_runs: int, main_runs: int, run_individual: bool
) -> List[float]: ...
def benchmark_net_once(name: _PybindStr) -> float: ...
def blobs() -> Dict[str, Blob]: ...
def has_blob(name: _PybindStr) -> bool: ...
def create_blob(name: _PybindStr) -> bool: ...
def reset_blob(name: _PybindStr) -> None: ...
@overload
def deserialize_blob(content: _PybindStr) -> Blob: ...
@overload
def deserialize_blob(name: _PybindStr, serialized: bytes) -> None: ...
def serialize_blob(name: _PybindStr) -> bytes: ...
def get_stats() -> Dict[str, int]: ...
def is_numa_enabled() -> bool: ...
def get_num_numa_nodes() -> int: ...
def get_blob_numa_node(blob_name: _PybindStr) -> int: ...
def get_blob_size_bytes(blob_name: _PybindStr) -> int: ...
def create_offline_tensor(
name: _PybindStr, dims: List[int], datatype: int
) -> bool: ...
def fakeFp16FuseOps(net_str: bytes) -> bytes: ...
def num_cuda_devices() -> int: ...
def get_cuda_version() -> int: ...
def get_cudnn_version() -> int: ...
def get_gpu_memory_info(device_id: int) -> Tuple[int, int]: ...
def get_device_properties(deviceid: int) -> Dict[str, Any]: ...
def num_hip_devices() -> int: ...
def get_hip_version() -> int: ...
def get_miopen_version() -> int: ...
has_hip_support: bool
has_cuda_support: bool
has_gpu_support: bool |
5,452 | close | """Test fixtures."""
import copy
import lxml
import yaml
import pytest
from napalm.base.test import conftest as parent_conftest
from napalm.base.test.double import BaseTestDouble
from napalm.junos import junos
from ncclient.devices.junos import JunosDeviceHandler
@pytest.fixture(scope="class")
def set_device_parameters(request):
"""Set up the class."""
def fin():
request.cls.device.METHOD_NAME()
request.addfinalizer(fin)
request.cls.driver = junos.JunOSDriver
request.cls.patched_driver = PatchedJunOSDriver
request.cls.vendor = "junos"
parent_conftest.set_device_parameters(request)
def pytest_generate_tests(metafunc):
"""Generate test cases dynamically."""
parent_conftest.pytest_generate_tests(metafunc, __file__)
class PatchedJunOSDriver(junos.JunOSDriver):
"""Patched JunOS Driver."""
def __init__(self, hostname, username, password, timeout=60, optional_args=None):
optional_args["config_lock"] = False # to not try lock on open()
super(self.__class__, self).__init__(
hostname, username, password, timeout, optional_args
)
self.patched_attrs = ["device"]
self.device = FakeJunOSDevice()
def is_alive(self):
return {"is_alive": True} # always alive during the tests...
class FakeJunOSDevice(BaseTestDouble):
def __init__(self):
self.rpc = FakeRPCObject(self)
self._conn = FakeConnection(self.rpc)
self.alternative_facts_file = "facts.yml"
self.ON_JUNOS = True # necessary for fake devices
self.hostname = "test"
self.default_facts = {
"domain": None,
"hostname": "vsrx",
"ifd_style": "CLASSIC",
"2RE": False,
"serialnumber": "beb914a9cca3",
"fqdn": "vsrx",
"virtual": True,
"switch_style": "NONE",
"version": "12.1X47-D20.7",
"HOME": "/cf/var/home/vagrant",
"srx_cluster": False,
"model": "FIREFLY-PERIMETER",
"RE0": {
"status": "Testing",
"last_reboot_reason": "Router rebooted after a normal shutdown.",
"model": "FIREFLY-PERIMETER RE",
"up_time": "1 hour, 13 minutes, 37 seconds",
},
"vc_capable": False,
"personality": "SRX_BRANCH",
}
self._uptime = 4380
# Since junos-eznc 2.3.0 the new SAX parser is used as default. Thus
# disable it to use the DOM parser which was used prior.
self._use_filter = False
@property
def transform(self):
# Junos device transform, inherited from the ncclient class
return self._conn._device_handler.transform_reply
@transform.setter
def transform(self, func):
self._conn._device_handler.transform_reply = func
@property
def facts(self):
# we want to reinitialize it every time to avoid side effects
self._facts = copy.deepcopy(self.default_facts)
try:
alt_facts_filepath = self.find_file(self.alternative_facts_file)
except IOError:
self._facts = self.default_facts
return self._facts
with open(alt_facts_filepath, "r") as alt_facts:
self._facts.update(yaml.safe_load(alt_facts))
return self._facts
@property
def uptime(self):
return self._uptime
def open(self, auto_probe=0):
pass
def METHOD_NAME(self):
pass
def bind(*args, **kvargs):
pass
def cli(self, command="", encoding="text"):
filename = "{safe_command}.txt".format(safe_command=self.sanitize_text(command))
fielpath = self.find_file(filename)
return self.read_txt_file(fielpath)
class FakeRPCObject:
"""
Fake RPC caller.
"""
def __init__(self, device):
self._device = device
def __getattr__(self, item):
self.item = item
return self
def response(self, **rpc_args):
instance = rpc_args.pop("instance", "")
filename = "{item}{instance}.xml".format(item=self.item, instance=instance)
filepathpath = self._device.find_file(filename)
xml_string = self._device.read_txt_file(filepathpath)
return lxml.etree.fromstring(xml_string)
def get_config(self, get_cmd=None, filter_xml=None, options={}):
# get_cmd is an XML tree that requests a specific part of the config
# E.g.: <configuration><protocols><bgp><group/></bgp></protocols></configuration>
if get_cmd is not None:
get_cmd_str = lxml.etree.tostring(get_cmd).decode("utf-8")
filename = self._device.sanitize_text(get_cmd_str)
# no get_cmd means it should mock the eznc get_config
else:
filename = "get_config__" + "__".join(
["{0}_{1}".format(k, v) for k, v in sorted(options.items())]
)
filename = "{filename}.xml".format(filename=filename[0:150])
filepathpath = self._device.find_file(filename)
xml_string = self._device.read_txt_file(filepathpath)
return lxml.etree.fromstring(xml_string)
__call__ = response
class FakeConnectionRPCObject:
"""
Will make fake RPC requests that usually are directly made via netconf.
"""
def __init__(self, rpc):
self._rpc = rpc
def response(self, non_std_command=None):
class RPCReply:
def __init__(self, reply):
self._NCElement__doc = reply
rpc_reply = RPCReply(self._rpc.get_config(get_cmd=non_std_command))
return rpc_reply
__call__ = response
class FakeConnection:
def __init__(self, rpc):
self.rpc = FakeConnectionRPCObject(rpc)
self._session = FakeSession()
self._device_handler = JunosDeviceHandler({})
class FakeSession:
def __init__(self):
self.transport = FakeTransport()
class FakeTransport:
def set_keepalive(self, keepalive):
self.keepalive = keepalive |
5,453 | create template | # Example of video streaming simulation
from trex.astf.api import *
import argparse
class Prof1():
def __init__(self):
pass
def __setup_ip_gen(self, client_ip_range, server_ip_range):
ip_gen_c = ASTFIPGenDist(ip_range=client_ip_range, distribution="seq")
ip_gen_s = ASTFIPGenDist(ip_range=server_ip_range, distribution="seq")
self.ip_gen = ASTFIPGen(dist_client=ip_gen_c, dist_server=ip_gen_s)
def __setup_assoc(self, port, ip_start, ip_end, l7_offset):
self.assoc = ASTFAssociationRule(port=port, ip_start=ip_start, ip_end=ip_end)
self.l7_offset = l7_offset
self.l7_assoc = ASTFAssociationRule(port=port, ip_start=ip_start, ip_end=ip_end, l7_map=l7_offset)
def _setup_defaults(self):
self.client_ip_range = ["16.0.0.0", "16.0.0.255"]
self.port = 80
self.server_ip_range = ["48.0.0.0", "48.0.255.255"]
self.__setup_ip_gen(self.client_ip_range, self.server_ip_range)
self.__setup_assoc(self.port, *self.server_ip_range, [0, 1, 2, 3])
def _setup_params(self, speeds):
self.template_names = {}
for speed in speeds:
self.template_names[speed] = '{:04}'.format(speed)
def _create_client_program(self, speed_BPS, chunk_time, tg_data):
prog_c = ASTFProgram(stream=True)
prog_c.set_tick_var('base')
prog_c.send(tg_data)
prog_c.recv(int(speed_BPS * chunk_time))
speeds = list(self.template_names)
speeds.sort(reverse=True)
for speed in speeds[:-1]:
prog_c.jmp_dp('base', 'L{}:'.format(speed), speed_BPS/speed * chunk_time)
prog_c.set_next_template(self.template_names[speeds[-1]])
for speed in speeds[:-1]:
prog_c.jmp('exit:')
prog_c.set_label('L{}:'.format(speed))
prog_c.set_next_template(self.template_names[speed])
prog_c.set_label('exit:')
return prog_c
def _create_server_program(self, speed_BPS, chunk_time, tg_data):
prog_s = ASTFProgram(stream=True)
prog_s.recv(len(tg_data))
prog_s.send('', int(speed_BPS * chunk_time))
return prog_s
def METHOD_NAME(self, speed_BPS, chunk_time, tg_name):
prog_c = self._create_client_program(speed_BPS, chunk_time, tg_name)
prog_s = self._create_server_program(speed_BPS, chunk_time, tg_name)
# template
temp_c = ASTFTCPClientTemplate(program=prog_c, ip_gen=self.ip_gen, cps=0, port=self.port)
temp_s = ASTFTCPServerTemplate(program=prog_s, assoc=self.l7_assoc)
template = ASTFTemplate(client_template=temp_c, server_template=temp_s, tg_name=tg_name)
return template
def _create_scheduler_program(self, video_time, chunk_time, buffer_time, initial_tg_name):
prog = ASTFProgram(stream=False)
prog.set_keepalive_msg(int((video_time+chunk_time)*1000)) # to prevent keepalive timeout
# setup initial values
prog.set_var("fetch_chunks", round(video_time/chunk_time))
prog.set_var("saved_chunks", 0)
play_time = chunk_time / 10 # playing resolution
prog.set_next_template(initial_tg_name);
prog.set_tick_var('base_time')
prog.jmp('L_main_cond:')
prog.set_label("L_main_loop:")
if True:
# fetch a chunk by the control of saved chunks level
prog.jmp_gt('saved_chunks', 'L_play:', int(buffer_time/chunk_time))
prog.jmp_le('fetch_chunks', 'L_play:', 0) # whole chunks are fetched
if True:
prog.set_tick_var('fetch_base')
prog.exec_template()
prog.add_var('saved_chunks', 1)
prog.add_var('fetch_chunks', -1)
prog.add_tick_stats('B', 'fetch_base')
prog.jmp('L_fetch_end:')
# simulate playing only
prog.set_label('L_play:')
if True:
prog.delay(int(play_time * 1000000))
prog.jmp('L_fetch_end:')
prog.set_label('L_fetch_end:')
# update played time
prog.set_label('L_update_loop:')
prog.jmp_dp('base_time', 'L_played_end:', chunk_time)
if True:
prog.jmp_le('saved_chunks', 'L_stall:', 0)
if True:
prog.add_tick_var('base_time', chunk_time)
prog.add_var('saved_chunks', -1)
prog.jmp('L_update_loop:')
prog.set_label('L_stall:')
prog.jmp_le('fetch_chunks', 'L_played_end:', 0) # exit loop if no fetch
if True:
prog.add_stats('A', 1) # stall count
prog.set_tick_var('base_time')
prog.jmp('L_played_end:')
prog.set_label('L_played_end:')
prog.set_label("L_main_cond:")
prog.jmp_gt('fetch_chunks', 'L_main_loop:', 0)
prog.jmp_gt('saved_chunks', 'L_main_loop:', 0)
return prog
def _create_initial_template(self, video_time, chunk_time, buffer_time, initial_speed):
initial_tg_name = self.template_names[initial_speed]
prog_c = self._create_scheduler_program(video_time, chunk_time, buffer_time, initial_tg_name)
prog_s = ASTFProgram(stream=False)
prog_s.close_msg()
# template
temp_c = ASTFTCPClientTemplate(program=prog_c, ip_gen=self.ip_gen, port=self.port)
temp_s = ASTFTCPServerTemplate(program=prog_s, assoc=self.assoc)
template = ASTFTemplate(client_template=temp_c, server_template=temp_s)
return template
def create_profile(self, speeds, video_time, chunk_time, buffer_time):
self._setup_defaults()
self._setup_params(speeds)
# initial active template
speeds.sort(reverse=True) # best quality first
initial_speed = speeds[0]
temp = self._create_initial_template(video_time, chunk_time, buffer_time, initial_speed)
templates = [ temp ]
# inactive templates
for speed in speeds:
temp = self.METHOD_NAME(speed, chunk_time, self.template_names[speed])
templates.append(temp)
# profile
profile = ASTFProfile(default_ip_gen=self.ip_gen, templates=templates)
return profile
def get_profile(self, tunables, **kwargs):
parser = argparse.ArgumentParser(description='Argparser for {}'.format(os.path.basename(__file__)),
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--speeds',
type=list,
default=[558000, 264250, 115875, 75000, 43625],
help='')
parser.add_argument('--video_time',
type=float,
default=60.0,
help='')
parser.add_argument('--chunk_time',
type=float,
default=4.0,
help='')
parser.add_argument('--buffer_time',
type=float,
default=10.0,
help='')
args = parser.parse_args(tunables)
return self.create_profile(args.speeds, args.video_time, args.chunk_time, args.buffer_time)
def register():
return Prof1()
|
5,454 | prettify | #
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import json
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataTransform
from pipeline.component.evaluation import Evaluation
from pipeline.component.intersection import Intersection
from pipeline.component.reader import Reader
from pipeline.interface.data import Data
from pipeline.interface.model import Model
from pipeline.component.data_statistics import DataStatistics
from pipeline.utils.tools import load_job_config
def METHOD_NAME(response, verbose=True):
if verbose:
print(json.dumps(response, indent=4, ensure_ascii=False))
print()
return response
def main(config="../../config.yaml", namespace=""):
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
hosts = parties.host[0]
guest_train_data = {"name": "ionosphere_scale_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "ionosphere_scale_hetero_host", "namespace": f"experiment{namespace}"}
# guest_train_data = {"name": "default_credit_hetero_guest", "namespace": f"experiment{namespace}"}
# host_train_data = {"name": "default_credit_hetero_host", "namespace": f"experiment{namespace}"}
# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts)
# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)
data_transform_0 = DataTransform(name="data_transform_0", output_format='dense', missing_fill=False)
# get DataTransform party instance of guest
data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
# configure DataTransform for guest
data_transform_0_guest_party_instance.component_param(with_label=True, label_name="label")
# get and configure DataTransform party instance of host
data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)
# define Intersection components
intersection_0 = Intersection(name="intersection_0")
pipeline.add_component(reader_0)
pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))
statistic_param = {
"name": "statistic_0",
"statistics": ["95%", "coefficient_of_variance", "stddev"],
"column_indexes": [1, 2],
"column_names": ["x3"]
}
statistic_0 = DataStatistics(**statistic_param)
pipeline.add_component(statistic_0, data=Data(data=intersection_0.output.data))
pipeline.compile()
# fit model
pipeline.fit()
# query component summary
METHOD_NAME(pipeline.get_component("statistic_0").get_summary())
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main() |
5,455 | handle | import socket
from meerk40t.kernel import Module
def plugin(kernel, lifecycle=None):
if lifecycle == "register":
_ = kernel.translation
kernel.register("module/TCPServer", TCPServer)
class TCPServer(Module):
"""
TCPServer opens up a localhost server and waits. Any connection is given its own handler.
"""
def __init__(self, context, name, port=23):
"""
Laser Server init.
@param context: Context at which this module is attached.
@param name: Name of this module
@param port: Port being used for the server.
"""
Module.__init__(self, context, name)
self.port = port
self.socket = None
self.events_channel = self.context.channel(f"server-tcp-{port}")
self.data_channel = self.context.channel(f"data-tcp-{port}")
self.context.threaded(
self.run_tcp_delegater, thread_name=f"tcp-{port}", daemon=True
)
def stop(self):
self.state = "terminate"
def module_close(self, *args, **kwargs):
_ = self.context._
self.events_channel(_("Shutting down server."))
self.state = "terminate"
if self.socket is not None:
self.socket.close()
self.socket = None
def run_tcp_delegater(self):
"""
TCP Run is a connection thread delegate. Any connections are given a different threaded
handle to interact with that connection. This thread here waits for sockets and delegates.
"""
_ = self.context._
self.socket = socket.socket()
try:
self.socket.bind(("", self.port))
self.socket.listen(1)
except OSError:
self.events_channel(_("Could not start listening."))
return
METHOD_NAME = 1
while self.state != "terminate":
self.events_channel(
_("Listening {name} on port {port}...").format(
name=self.name, port=self.port
)
)
connection = None
address = None
try:
connection, address = self.socket.accept()
self.events_channel(
_("Socket Connected: {address}").format(address=address)
)
self.context.threaded(
self.connection_handler(connection, address),
thread_name=f"handler-{self.port}-{METHOD_NAME}",
daemon=True,
)
METHOD_NAME += 1
except socket.timeout:
pass
except OSError:
self.events_channel(
_("Socket was killed: {address}").format(address=address)
)
if connection is not None:
connection.close()
break
except AttributeError:
self.events_channel(_("Socket did not exist to accept connection."))
break
if self.socket is not None:
self.socket.close()
def connection_handler(self, connection, address):
"""
The TCP Connection Handle, handles all connections delegated by the tcp_run() method.
The threaded context is entirely local and independent.
"""
_ = self.context._
def METHOD_NAME():
def send(e):
if connection is None:
return
try:
connection.send(bytes(e, "utf-8"))
self.data_channel(f"<-- {str(e)}")
except (ConnectionAbortedError, ConnectionResetError):
connection.close()
except OSError:
# Connection is likely already closed.
if connection is not None:
connection.close()
recv = self.context.channel(f"{self.name}/recv", pure=True)
send_channel_name = f"{self.name}/send"
self.context.channel(send_channel_name, pure=True).watch(send)
while self.state != "terminate":
try:
data_from_socket = connection.recv(1024)
if len(data_from_socket):
self.data_channel(f"--> {str(data_from_socket)}")
else:
break
recv(data_from_socket)
except socket.timeout:
self.events_channel(
_("Connection to {address} timed out.").format(address=address)
)
break
except OSError:
if connection is not None:
connection.close()
break
self.context.channel(send_channel_name).unwatch(send)
self.events_channel(
_("Connection to {address} was closed.").format(address=address)
)
return METHOD_NAME |
5,456 | test well posed external circuit differential power | #
# Tests for the Thevenin equivalant circuit model
#
from tests import TestCase
import pybamm
import unittest
class TestThevenin(TestCase):
def test_standard_model(self):
model = pybamm.equivalent_circuit.Thevenin()
model.check_well_posedness()
def test_changing_number_of_rcs(self):
options = {"number of rc elements": 0}
model = pybamm.equivalent_circuit.Thevenin(options=options)
model.check_well_posedness()
options = {"number of rc elements": 2}
model = pybamm.equivalent_circuit.Thevenin(options=options)
model.check_well_posedness()
options = {"number of rc elements": 3}
model = pybamm.equivalent_circuit.Thevenin(options=options)
model.check_well_posedness()
options = {"number of rc elements": 4}
model = pybamm.equivalent_circuit.Thevenin(options=options)
model.check_well_posedness()
with self.assertRaisesRegex(pybamm.OptionError, "natural numbers"):
options = {"number of rc elements": -1}
model = pybamm.equivalent_circuit.Thevenin(options=options)
model.check_well_posedness()
def test_calculate_discharge_energy(self):
options = {"calculate discharge energy": "true"}
model = pybamm.equivalent_circuit.Thevenin(options=options)
model.check_well_posedness()
def test_well_posed_external_circuit_voltage(self):
options = {"operating mode": "voltage"}
model = pybamm.equivalent_circuit.Thevenin(options=options)
model.check_well_posedness()
def test_well_posed_external_circuit_power(self):
options = {"operating mode": "power"}
model = pybamm.equivalent_circuit.Thevenin(options=options)
model.check_well_posedness()
def METHOD_NAME(self):
options = {"operating mode": "differential power"}
model = pybamm.equivalent_circuit.Thevenin(options=options)
model.check_well_posedness()
def test_well_posed_external_circuit_resistance(self):
options = {"operating mode": "resistance"}
model = pybamm.equivalent_circuit.Thevenin(options=options)
model.check_well_posedness()
def test_well_posed_external_circuit_differential_resistance(self):
options = {"operating mode": "differential resistance"}
model = pybamm.equivalent_circuit.Thevenin(options=options)
model.check_well_posedness()
def test_well_posed_external_circuit_cccv(self):
options = {"operating mode": "CCCV"}
model = pybamm.equivalent_circuit.Thevenin(options=options)
model.check_well_posedness()
def test_well_posed_external_circuit_function(self):
def external_circuit_function(variables):
I = variables["Current [A]"]
V = variables["Voltage [V]"]
return (
V
+ I
- pybamm.FunctionParameter(
"Function", {"Time [s]": pybamm.t}, print_name="test_fun"
)
)
options = {"operating mode": external_circuit_function}
model = pybamm.equivalent_circuit.Thevenin(options=options)
model.check_well_posedness()
def test_raise_option_error(self):
options = {"not an option": "something"}
with self.assertRaisesRegex(
pybamm.OptionError, "Option 'not an option' not recognised"
):
pybamm.equivalent_circuit.Thevenin(options=options)
def test_not_a_valid_option(self):
options = {"operating mode": "not a valid option"}
with self.assertRaisesRegex(
pybamm.OptionError, "Option 'operating mode' must be one of"
):
pybamm.equivalent_circuit.Thevenin(options=options)
def test_get_default_parameters(self):
model = pybamm.equivalent_circuit.Thevenin()
values = model.default_parameter_values
self.assertIn("Initial SoC", list(values.keys()))
values.process_model(model)
def test_get_default_quick_plot_variables(self):
model = pybamm.equivalent_circuit.Thevenin()
variables = model.default_quick_plot_variables
self.assertIn("Current [A]", variables)
if __name__ == "__main__":
print("Add -v for more debug output")
import sys
if "-v" in sys.argv:
debug = True
pybamm.settings.debug_mode = True
unittest.main() |
5,457 | has tag | # Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
from __future__ import annotations
import copy
from typing import NewType, Sequence
import anki # pylint: disable=unused-import
import anki.cards
import anki.collection
import anki.decks
import anki.template
from anki import hooks, notes_pb2
from anki._legacy import DeprecatedNamesMixin
from anki.consts import MODEL_STD
from anki.models import NotetypeDict, NotetypeId, TemplateDict
from anki.utils import join_fields
DuplicateOrEmptyResult = notes_pb2.NoteFieldsCheckResponse.State
NoteFieldsCheckResult = notes_pb2.NoteFieldsCheckResponse.State
DefaultsForAdding = notes_pb2.DeckAndNotetype
# types
NoteId = NewType("NoteId", int)
class Note(DeprecatedNamesMixin):
# not currently exposed
flags = 0
data = ""
id: NoteId
mid: NotetypeId
def __init__(
self,
col: anki.collection.Collection,
model: NotetypeDict | NotetypeId | None = None,
id: NoteId | None = None,
) -> None:
if model and id:
raise Exception("only model or id should be provided")
notetype_id = model["id"] if isinstance(model, dict) else model
self.col = col.weakref()
if id:
# existing note
self.id = id
self.load()
else:
# new note for provided notetype
self._load_from_backend_note(self.col._backend.new_note(notetype_id))
def load(self) -> None:
note = self.col._backend.get_note(self.id)
assert note
self._load_from_backend_note(note)
def _load_from_backend_note(self, note: notes_pb2.Note) -> None:
self.id = NoteId(note.id)
self.guid = note.guid
self.mid = NotetypeId(note.notetype_id)
self.mod = note.mtime_secs
self.usn = note.usn
self.tags = list(note.tags)
self.fields = list(note.fields)
self._fmap = self.col.models.field_map(self.note_type())
def _to_backend_note(self) -> notes_pb2.Note:
hooks.note_will_flush(self)
return notes_pb2.Note(
id=self.id,
guid=self.guid,
notetype_id=self.mid,
mtime_secs=self.mod,
usn=self.usn,
tags=self.tags,
fields=self.fields,
)
def flush(self) -> None:
"""This preserves any current checkpoint.
For an undo entry, use col.update_note() instead."""
if self.id == 0:
raise Exception("can't flush a new note")
self.col._backend.update_notes(
notes=[self._to_backend_note()], skip_undo_entry=True
)
def joined_fields(self) -> str:
return join_fields(self.fields)
def ephemeral_card(
self,
ord: int = 0,
*,
custom_note_type: NotetypeDict = None,
custom_template: TemplateDict = None,
fill_empty: bool = False,
) -> anki.cards.Card:
card = anki.cards.Card(self.col)
card.ord = ord
card.did = anki.decks.DEFAULT_DECK_ID
model = custom_note_type or self.note_type()
template = copy.copy(
custom_template
or (
model["tmpls"][ord] if model["type"] == MODEL_STD else model["tmpls"][0]
)
)
# may differ in cloze case
template["ord"] = card.ord
output = anki.template.TemplateRenderContext.from_card_layout(
self,
card,
notetype=model,
template=template,
fill_empty=fill_empty,
).render()
card.set_render_output(output)
card._note = self
return card
def cards(self) -> list[anki.cards.Card]:
return [self.col.get_card(id) for id in self.card_ids()]
def card_ids(self) -> Sequence[anki.cards.CardId]:
return self.col.card_ids_of_note(self.id)
def note_type(self) -> NotetypeDict | None:
return self.col.models.get(self.mid)
_note_type = property(note_type)
def cloze_numbers_in_fields(self) -> Sequence[int]:
return self.col._backend.cloze_numbers_in_note(self._to_backend_note())
# Dict interface
##################################################
def keys(self) -> list[str]:
return list(self._fmap.keys())
def values(self) -> list[str]:
return self.fields
def items(self) -> list[tuple[str, str]]:
return [(f["name"], self.fields[ord]) for ord, f in sorted(self._fmap.values())]
def _field_index(self, key: str) -> int:
try:
return self._fmap[key][0]
except Exception as exc:
raise KeyError(key) from exc
def __getitem__(self, key: str) -> str:
return self.fields[self._field_index(key)]
def __setitem__(self, key: str, value: str) -> None:
self.fields[self._field_index(key)] = value
def __contains__(self, key: str) -> bool:
return key in self._fmap
# Tags
##################################################
def METHOD_NAME(self, tag: str) -> bool:
return self.col.tags.in_list(tag, self.tags)
def remove_tag(self, tag: str) -> None:
rem = []
for tag_ in self.tags:
if tag_.lower() == tag.lower():
rem.append(tag_)
for tag_ in rem:
self.tags.remove(tag_)
def add_tag(self, tag: str) -> None:
"Add tag. Duplicates will be stripped on save."
self.tags.append(tag)
def string_tags(self) -> str:
return self.col.tags.join(self.tags)
def set_tags_from_str(self, tags: str) -> None:
self.tags = self.col.tags.split(tags)
# Unique/duplicate/cloze check
##################################################
def fields_check(self) -> NoteFieldsCheckResult.V:
return self.col._backend.note_fields_check(self._to_backend_note()).state
dupeOrEmpty = duplicate_or_empty = fields_check
Note.register_deprecated_aliases(
delTag=Note.remove_tag, _fieldOrd=Note._field_index, model=Note.note_type
) |
5,458 | play check | import logging
from datetime import datetime
import requests
from flask import Blueprint, request
from src.api_helpers import success_response
from src.queries.get_celery_tasks import convert_epoch_to_datetime, get_celery_tasks
from src.queries.get_db_seed_restore_status import get_db_seed_restore_status
from src.queries.get_entities_count_check import get_entities_count_check
from src.queries.get_health import get_health, get_location
from src.queries.get_latest_play import get_latest_play
from src.queries.get_sol_plays import get_latest_sol_play_check_info
from src.queries.get_trusted_notifier_discrepancies import (
get_trusted_notifier_discrepancies,
)
from src.queries.queries import parse_bool_param
from src.tasks.index_profile_challenge_backfill import (
index_profile_challenge_backfill_tablename,
)
from src.utils import helpers, redis_connection
from src.utils.db_session import get_db_read_replica
from src.utils.elasticdsl import esclient
from src.utils.update_indexing_checkpoints import get_last_indexed_checkpoint
logger = logging.getLogger(__name__)
bp = Blueprint("health_check", __name__)
disc_prov_version = helpers.get_discovery_provider_version()
@bp.route("/version", methods=["GET"])
def version():
return success_response(disc_prov_version, sign_response=False)
@bp.route("/health_check", methods=["GET"])
def health_check():
args = {
"verbose": parse_bool_param(request.args.get("verbose")),
"healthy_block_diff": request.args.get("healthy_block_diff", type=int),
"enforce_block_diff": parse_bool_param(request.args.get("enforce_block_diff")),
"challenge_events_age_max_drift": request.args.get(
"challenge_events_age_max_drift", type=int
),
"plays_count_max_drift": request.args.get("plays_count_max_drift", type=int),
"reactions_max_indexing_drift": request.args.get(
"reactions_max_indexing_drift", type=int
),
"reactions_max_last_reaction_drift": request.args.get(
"reactions_max_last_reaction_drift", type=int
),
"rewards_manager_max_drift": request.args.get(
"rewards_manager_max_drift", type=int
),
"user_bank_max_drift": request.args.get("user_bank_max_drift", type=int),
"spl_audio_max_drift": request.args.get("spl_audio_max_drift", type=int),
}
try:
comms_health = {"comms": requests.get("http://comms:8925/comms").json()}
except Exception as e:
logger.error(f"Error fetching comms health {e}")
comms_health = {}
(health_results, error) = get_health(args)
return success_response(
health_results, 500 if error else 200, sign_response=False, extras=comms_health
)
@bp.route("/trusted_notifier_discrepancies_check", methods=["GET"])
def trusted_notifier_discrepancies_check():
(health_results, error) = get_trusted_notifier_discrepancies()
return success_response(health_results, 500 if error else 200, sign_response=False)
@bp.route("/entities_count_check", methods=["GET"])
def entities_count_check():
res_count = get_entities_count_check()
return success_response(res_count)
# Health check for block diff between DB and chain.
@bp.route("/block_check", methods=["GET"])
def block_check():
args = {
"verbose": parse_bool_param(request.args.get("verbose")),
"healthy_block_diff": request.args.get("healthy_block_diff", type=int),
"enforce_block_diff": True,
}
(health_results, error) = get_health(args, use_redis_cache=False)
return success_response(health_results, 500 if error else 200, sign_response=False)
# Health check for latest play stored in the db
@bp.route("/play_check", methods=["GET"])
def METHOD_NAME():
"""
max_drift: maximum duration in seconds between `now` and the
latest recorded play record to be considered healthy
"""
max_drift = request.args.get("max_drift", type=int)
latest_play = get_latest_play()
drift = (datetime.now() - latest_play).total_seconds()
# Error if max drift was provided and the drift is greater than max_drift
error = max_drift and drift > max_drift
return success_response(latest_play, 500 if error else 200, sign_response=False)
# Health check for latest play stored in the db
@bp.route("/sol_play_check", methods=["GET"])
def sol_play_check():
"""
limit: number of latest plays to return
max_drift: maximum duration in seconds between `now` and the
latest recorded play record to be considered healthy
"""
limit = request.args.get("limit", type=int, default=20)
max_drift = request.args.get("max_drift", type=int)
error = None
redis = redis_connection.get_redis()
response = {}
response = get_latest_sol_play_check_info(redis, limit)
latest_db_sol_plays = response["latest_db_sol_plays"]
if latest_db_sol_plays:
latest_db_play = latest_db_sol_plays[0]
latest_created_at = latest_db_play["created_at"]
drift = (datetime.now() - latest_created_at).total_seconds()
# Error if max drift was provided and the drift is greater than max_drift
error = max_drift and drift > max_drift
return success_response(response, 500 if error else 200, sign_response=False)
@bp.route("/ip_check", methods=["GET"])
def ip_check():
ip = helpers.get_ip(request)
return success_response(ip, sign_response=False)
@bp.route("/es_health", methods=["GET"])
def es_health():
ok = esclient.cat.indices(v=True)
return str(ok)
@bp.route("/celery_tasks_check", methods=["GET"])
def celery_tasks_check():
tasks = get_celery_tasks()
all_tasks = tasks.get("celery_tasks", [])
for task in all_tasks.get("active_tasks", []):
task["started_at_est_timestamp"] = convert_epoch_to_datetime(
task.get("started_at")
)
return success_response(tasks, sign_response=False)
@bp.route("/db_seed_restore_check", methods=["GET"])
def db_seed_restore_check():
has_restored, seed_hash = get_db_seed_restore_status()
response = {"has_restored": has_restored, "seed_hash": seed_hash}
return success_response(response, sign_response=False)
@bp.route("/location", methods=["GET"])
def location():
location = get_location()
return success_response(location, sign_response=False)
@bp.route("/backfill_profile_challenge", methods=["GET"])
def backfill_profile_challenge_check():
db = get_db_read_replica()
with db.scoped_session() as session:
checkpoint = get_last_indexed_checkpoint(
session, index_profile_challenge_backfill_tablename
)
return success_response(checkpoint, sign_response=False) |
5,459 | get context | from __future__ import annotations
import contextlib
import json
from io import BytesIO
from typing import Any, AsyncGenerator, Dict, List, Optional, Union
from typing_extensions import Literal
from starlette.requests import Request
from starlette.responses import Response as StarletteResponse
from starlette.testclient import TestClient
from starlette.websockets import WebSocket, WebSocketDisconnect
from strawberry.asgi import GraphQL as BaseGraphQLView
from strawberry.asgi.handlers import GraphQLTransportWSHandler, GraphQLWSHandler
from strawberry.http import GraphQLHTTPResponse
from strawberry.types import ExecutionResult
from tests.views.schema import Query, schema
from ..context import METHOD_NAME
from .base import (
JSON,
DebuggableGraphQLTransportWSMixin,
DebuggableGraphQLWSMixin,
HttpClient,
Message,
Response,
ResultOverrideFunction,
WebSocketClient,
)
class DebuggableGraphQLTransportWSHandler(
DebuggableGraphQLTransportWSMixin, GraphQLTransportWSHandler
):
pass
class DebuggableGraphQLWSHandler(DebuggableGraphQLWSMixin, GraphQLWSHandler):
pass
class GraphQLView(BaseGraphQLView):
result_override: ResultOverrideFunction = None
graphql_transport_ws_handler_class = DebuggableGraphQLTransportWSHandler
graphql_ws_handler_class = DebuggableGraphQLWSHandler
async def get_root_value(self, request: Union[WebSocket, Request]) -> Query:
return Query()
async def METHOD_NAME(
self,
request: Union[Request, WebSocket],
response: Optional[StarletteResponse] = None,
) -> object:
context = await super().METHOD_NAME(request, response)
return METHOD_NAME(context)
async def process_result(
self, request: Request, result: ExecutionResult
) -> GraphQLHTTPResponse:
if self.result_override:
return self.result_override(result)
return await super().process_result(request, result)
class AsgiHttpClient(HttpClient):
def __init__(
self,
graphiql: bool = True,
allow_queries_via_get: bool = True,
result_override: ResultOverrideFunction = None,
):
view = GraphQLView(
schema,
graphiql=graphiql,
allow_queries_via_get=allow_queries_via_get,
keep_alive=False,
)
view.result_override = result_override
self.client = TestClient(view)
def create_app(self, **kwargs: Any) -> None:
view = GraphQLView(schema=schema, **kwargs)
self.client = TestClient(view)
async def _graphql_request(
self,
method: Literal["get", "post"],
query: Optional[str] = None,
variables: Optional[Dict[str, object]] = None,
files: Optional[Dict[str, BytesIO]] = None,
headers: Optional[Dict[str, str]] = None,
**kwargs: Any,
) -> Response:
body = self._build_body(
query=query, variables=variables, files=files, method=method
)
if method == "get":
kwargs["params"] = body
elif body:
if files:
kwargs["data"] = body
else:
kwargs["content"] = json.dumps(body)
if files is not None:
kwargs["files"] = files
response = getattr(self.client, method)(
"/graphql",
headers=self._get_headers(method=method, headers=headers, files=files),
**kwargs,
)
return Response(
status_code=response.status_code,
data=response.content,
headers=response.headers,
)
async def request(
self,
url: str,
method: Literal["get", "post", "patch", "put", "delete"],
headers: Optional[Dict[str, str]] = None,
) -> Response:
response = getattr(self.client, method)(url, headers=headers)
return Response(
status_code=response.status_code,
data=response.content,
headers=response.headers,
)
async def get(
self,
url: str,
headers: Optional[Dict[str, str]] = None,
) -> Response:
return await self.request(url, "get", headers=headers)
async def post(
self,
url: str,
data: Optional[bytes] = None,
json: Optional[JSON] = None,
headers: Optional[Dict[str, str]] = None,
) -> Response:
response = self.client.post(url, headers=headers, content=data, json=json)
return Response(
status_code=response.status_code,
data=response.content,
headers=response.headers,
)
@contextlib.asynccontextmanager
async def ws_connect(
self,
url: str,
*,
protocols: List[str],
) -> AsyncGenerator[WebSocketClient, None]:
try:
with self.client.websocket_connect(url, protocols) as ws:
yield AsgiWebSocketClient(ws)
except WebSocketDisconnect as error:
ws = AsgiWebSocketClient(None)
ws.handle_disconnect(error)
yield ws
class AsgiWebSocketClient(WebSocketClient):
def __init__(self, ws: Any):
self.ws = ws
self._closed: bool = False
self._close_code: Optional[int] = None
self._close_reason: Optional[str] = None
def handle_disconnect(self, exc: WebSocketDisconnect) -> None:
self._closed = True
self._close_code = exc.code
self._close_reason = exc.reason
async def send_json(self, payload: Dict[str, Any]) -> None:
self.ws.send_json(payload)
async def send_bytes(self, payload: bytes) -> None:
self.ws.send_bytes(payload)
async def receive(self, timeout: Optional[float] = None) -> Message:
if self._closed:
# if close was received via exception, fake it so that recv works
return Message(
type="websocket.close", data=self._close_code, extra=self._close_reason
)
m = self.ws.receive()
if m["type"] == "websocket.close":
self._closed = True
self._close_code = m["code"]
self._close_reason = m["reason"]
return Message(type=m["type"], data=m["code"], extra=m["reason"])
elif m["type"] == "websocket.send":
return Message(type=m["type"], data=m["text"])
return Message(type=m["type"], data=m["data"], extra=m["extra"])
async def receive_json(self, timeout: Optional[float] = None) -> Any:
m = self.ws.receive()
assert m["type"] == "websocket.send"
assert "text" in m
return json.loads(m["text"])
async def close(self) -> None:
self.ws.close()
self._closed = True
@property
def closed(self) -> bool:
return self._closed
@property
def close_code(self) -> int:
assert self._close_code is not None
return self._close_code
def assert_reason(self, reason: str) -> None:
assert self._close_reason == reason |
5,460 | test login with default | import utility as util
import os
import os.path
import time
# test oauth login with default parameters
def METHOD_NAME():
# execute the azcopy login.
output = util.Command("login").execute_azcopy_command_interactive()
if output is None:
print("error login")
print("test_login_with_default test failed")
return
# for windows, further check access token file, for other os, report success if login succeeded.
if os.name == 'nt':
# get the job Id of new job started by parsing the azcopy console output.
output = util.Command("info").add_arguments("AzCopyAppPath").execute_azcopy_info()
if output is None:
print("error get info")
print("test_login_with_default test internal error, fail to validate login")
token_file_path = os.path.join(output, "AccessToken.json")
if not os.path.isfile(token_file_path):
print("cannot find cached AccessToken.json")
print("test_login_with_default test failed")
return
# check access token should be refreshed. 5 minutes should be enough for manual operations.
if time.time() - os.stat(token_file_path).st_mtime < 30:
print("test_login_with_default passed successfully")
else:
print("test_login_with_default test failed")
else:
print("test_login_with_default passed successfully")
# test oauth login with customized tenant and aad endpoint
def test_login(tenant, aadEndpoint):
print("test_login tenant: ", tenant , " aadEndpoint: ", aadEndpoint)
# execute the azcopy login.
cmd = util.Command("login")
if tenant != "":
cmd.add_flags("tenant-id", tenant)
if aadEndpoint != "":
cmd.add_flags("aad-endpoint", aadEndpoint)
output = cmd.execute_azcopy_command_interactive()
if output is None:
print("error login")
print("test_login test failed")
return
# for windows, further check access token file, for other os, report success if login succeeded.
if os.name == 'nt':
# get the job Id of new job started by parsing the azcopy console output.
output = util.Command("info").add_arguments("AzCopyAppPath").execute_azcopy_info()
if output is None:
print("error get info")
print("test_login test internal error, fail to validate login")
token_file_path = os.path.join(output, "AccessToken.json")
if not os.path.isfile(token_file_path):
print("cannot find cached AccessToken.json")
print("test_login test failed")
return
# check access token should be refreshed. 5 minutes should be enough for manual operations.
if time.time() - os.stat(token_file_path).st_mtime < 30:
print("test_login passed successfully")
else:
print("test_login test failed")
else:
print("test_login passed successfully")
# test oauth logout
def test_logout():
print("test_logout")
# execute the azcopy login.
output = util.Command("logout").execute_azcopy_copy_command_get_output()
if output is None:
print("error logout")
print("test_logout test failed")
return
# for windows, further check access token file, for other os, report success if login succeeded.
if os.name == 'nt':
# get the job Id of new job started by parsing the azcopy console output.
output = util.Command("info").add_arguments("AzCopyAppPath").execute_azcopy_info()
if output is None:
print("error get info")
print("test_logout test internal error, fail to validate logout")
print("test_logout AzCopyAppPath detected ", output)
token_file_path = os.path.join(output, "AccessToken.json")
if os.path.isfile(token_file_path):
print("find cached AccessToken.json after logout")
print("test_logout test failed")
else:
print("test_logout passed successfully")
else:
print("test_logout passed successfully") |
5,461 | decode image | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cv2
import numpy as np
def METHOD_NAME(im_file, im_info):
"""read rgb image
Args:
im_file (str|np.ndarray): input can be image path or np.ndarray
im_info (dict): info of image
Returns:
im (np.ndarray): processed image (np.ndarray)
im_info (dict): info of processed image
"""
if isinstance(im_file, str):
with open(im_file, 'rb') as f:
im_read = f.read()
data = np.frombuffer(im_read, dtype='uint8')
im = cv2.imdecode(data, 1) # BGR mode, but need RGB mode
im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB)
else:
im = im_file
im_info['im_shape'] = np.array(im.shape[:2], dtype=np.float32)
im_info['scale_factor'] = np.array([1., 1.], dtype=np.float32)
return im, im_info
class Resize(object):
"""resize image by target_size and max_size
Args:
target_size (int): the target size of image
keep_ratio (bool): whether keep_ratio or not, default true
interp (int): method of resize
"""
def __init__(self, target_size, keep_ratio=True, interp=cv2.INTER_LINEAR):
if isinstance(target_size, int):
target_size = [target_size, target_size]
self.target_size = target_size
self.keep_ratio = keep_ratio
self.interp = interp
def __call__(self, im, im_info):
"""
Args:
im (np.ndarray): image (np.ndarray)
im_info (dict): info of image
Returns:
im (np.ndarray): processed image (np.ndarray)
im_info (dict): info of processed image
"""
assert len(self.target_size) == 2
assert self.target_size[0] > 0 and self.target_size[1] > 0
im_channel = im.shape[2]
im_scale_y, im_scale_x = self.generate_scale(im)
im = cv2.resize(
im,
None,
None,
fx=im_scale_x,
fy=im_scale_y,
interpolation=self.interp)
im_info['im_shape'] = np.array(im.shape[:2]).astype('float32')
im_info['scale_factor'] = np.array(
[im_scale_y, im_scale_x]).astype('float32')
return im, im_info
def generate_scale(self, im):
"""
Args:
im (np.ndarray): image (np.ndarray)
Returns:
im_scale_x: the resize ratio of X
im_scale_y: the resize ratio of Y
"""
origin_shape = im.shape[:2]
im_c = im.shape[2]
if self.keep_ratio:
im_size_min = np.min(origin_shape)
im_size_max = np.max(origin_shape)
target_size_min = np.min(self.target_size)
target_size_max = np.max(self.target_size)
im_scale = float(target_size_min) / float(im_size_min)
if np.round(im_scale * im_size_max) > target_size_max:
im_scale = float(target_size_max) / float(im_size_max)
im_scale_x = im_scale
im_scale_y = im_scale
else:
resize_h, resize_w = self.target_size
im_scale_y = resize_h / float(origin_shape[0])
im_scale_x = resize_w / float(origin_shape[1])
return im_scale_y, im_scale_x
class NormalizeImage(object):
"""normalize image
Args:
mean (list): im - mean
std (list): im / std
is_scale (bool): whether need im / 255
norm_type (str): type in ['mean_std', 'none']
"""
def __init__(self, mean, std, is_scale=True, norm_type='mean_std'):
self.mean = mean
self.std = std
self.is_scale = is_scale
self.norm_type = norm_type
def __call__(self, im, im_info):
"""
Args:
im (np.ndarray): image (np.ndarray)
im_info (dict): info of image
Returns:
im (np.ndarray): processed image (np.ndarray)
im_info (dict): info of processed image
"""
im = im.astype(np.float32, copy=False)
if self.is_scale:
scale = 1.0 / 255.0
im *= scale
if self.norm_type == 'mean_std':
mean = np.array(self.mean)[np.newaxis, np.newaxis, :]
std = np.array(self.std)[np.newaxis, np.newaxis, :]
im -= mean
im /= std
return im, im_info
class Permute(object):
"""permute image
Args:
to_bgr (bool): whether convert RGB to BGR
channel_first (bool): whether convert HWC to CHW
"""
def __init__(self, ):
super(Permute, self).__init__()
def __call__(self, im, im_info):
"""
Args:
im (np.ndarray): image (np.ndarray)
im_info (dict): info of image
Returns:
im (np.ndarray): processed image (np.ndarray)
im_info (dict): info of processed image
"""
im = im.transpose((2, 0, 1)).copy()
return im, im_info
class PadStride(object):
""" padding image for model with FPN, instead PadBatch(pad_to_stride) in original config
Args:
stride (bool): model with FPN need image shape % stride == 0
"""
def __init__(self, stride=0):
self.coarsest_stride = stride
def __call__(self, im, im_info):
"""
Args:
im (np.ndarray): image (np.ndarray)
im_info (dict): info of image
Returns:
im (np.ndarray): processed image (np.ndarray)
im_info (dict): info of processed image
"""
coarsest_stride = self.coarsest_stride
if coarsest_stride <= 0:
return im, im_info
im_c, im_h, im_w = im.shape
pad_h = int(np.ceil(float(im_h) / coarsest_stride) * coarsest_stride)
pad_w = int(np.ceil(float(im_w) / coarsest_stride) * coarsest_stride)
padding_im = np.zeros((im_c, pad_h, pad_w), dtype=np.float32)
padding_im[:, :im_h, :im_w] = im
return padding_im, im_info
def preprocess(im, preprocess_ops):
# process image by preprocess_ops
im_info = {
'scale_factor': np.array(
[1., 1.], dtype=np.float32),
'im_shape': None,
}
im, im_info = METHOD_NAME(im, im_info)
for operator in preprocess_ops:
im, im_info = operator(im, im_info)
return im, im_info |
5,462 | close | import h5py # type: ignore[import]
import numpy as np
from numpy.core.defchararray import encode, decode
from typing import Union, Dict, Any, Iterable, Optional, Tuple
# Note: I would like to replace Any here with str, float, int, np.ndarray, etc.
# However, this heterogeneous pattern causes issues with mypy indexing
hdf5_get_types = Union['hdf5_wrapper', Any]
nested_dict_type = Dict[str, Any]
hdf5_set_types = Union['hdf5_wrapper', nested_dict_type, Any]
class hdf5_wrapper():
"""
A class for reading/writing hdf5 files, which behaves similar to a native dict
"""
def __init__(self, fname: str = '', target: Optional[h5py.File] = None, mode: str = 'r') -> None:
"""
Initialize the hdf5_wrapper class
If the fname is supplied (either by a positional or keyword argument),
the wrapper will open a hdf5 database from the filesystem.
The recommended options for the mode flag include 'r' for read-only
and 'a' for read/write access.
If write mode is enabled, and the fname does not point
to an existing file, a new database will be created.
If the target is supplied, then a new instance of the wrapper will
be created using an existing database handle.
Args:
fname (str): the filename of a new or existing hdf5 database
target (hdf5_wrapper): the handle of an existing hdf5 dataset
mode (str): the read/write behavior of the database (default='r')
"""
self.mode: str = mode
self.target: h5py.File = target
if fname:
self.target = h5py.File(fname, self.mode)
def __getitem__(self, k: str) -> hdf5_get_types:
"""
Get a target from the database
If the target is not present in the datastructure and the
database is open in read/write mode, the wrapper will create a
new group and return an hdf5_wrapper. Otherwise it will throw an error
Args:
k (str): name of target group or array
Returns:
hdf5_wrapper/np.ndarray: The returned value
"""
if (k not in self.target):
if (self.mode in ['w', 'a']):
self.target.create_group(k)
else:
raise ValueError('Entry does not exist in database: %s' % (k))
tmp = self.target[k]
if isinstance(tmp, h5py._hl.group.Group):
return hdf5_wrapper(target=tmp, mode=self.mode)
elif isinstance(tmp, h5py._hl.dataset.Dataset):
tmp = np.array(tmp)
# Decode any string types
if (tmp.dtype.kind in ['S', 'U', 'O']):
tmp = decode(tmp)
# Convert any 0-length arrays to native types
if not tmp.shape:
tmp = tmp[()]
return tmp
else:
return tmp
def __setitem__(self, k: str, value: hdf5_set_types):
"""
Write an object to the database if write-mode is enabled
Args:
k (str): the name of the object
value (dict, np.ndarray, float, int, str): the object to be written
"""
if (self.mode in ['w', 'a']):
if isinstance(value, (dict, hdf5_wrapper)):
# Recursively add groups and their children
if (k not in self.target):
self.target.create_group(k)
new_group = self[k]
for kb, x in value.items():
new_group[kb] = x
else:
# Delete the old copy if necessary
if (k in self.target):
del (self.target[k])
# Add everything else as an ndarray
tmp = np.array(value)
if (tmp.dtype.kind in ['S', 'U', 'O']):
tmp = encode(tmp)
self.target[k] = tmp
else:
raise ValueError(
'Cannot write to an hdf5 opened in read-only mode! This can be changed by overriding the default mode argument for the wrapper.'
)
def link(self, k: str, target: str) -> None:
"""
Link an external hdf5 file to this location in the database
Args:
k (str): the name of the new link in the database
target (str): the path to the external database
"""
self.target[k] = h5py.ExternalLink(target, '/')
def keys(self) -> Iterable[str]:
"""
Get a list of groups and arrays located at the current level
Returns:
list: a list of key names pointing to objects at the current level
"""
if isinstance(self.target, h5py._hl.group.Group):
return list(self.target)
else:
raise ValueError('Object not a group!')
def values(self) -> Iterable[hdf5_get_types]:
"""
Get a list of values located on the current level
"""
return [self[k] for k in self.keys()]
def items(self) -> Iterable[Tuple[str, hdf5_get_types]]:
return zip(self.keys(), self.values())
def __enter__(self):
"""
Entry point for an iterator
"""
return self
def __exit__(self, type, value, traceback) -> None:
"""
End point for an iterator
"""
self.target.METHOD_NAME()
def __del__(self) -> None:
"""
Closes the database on wrapper deletion
"""
try:
if isinstance(self.target, h5py._hl.files.File):
self.target.METHOD_NAME()
except:
pass
def METHOD_NAME(self) -> None:
"""
Closes the database
"""
if isinstance(self.target, h5py._hl.files.File):
self.target.METHOD_NAME()
def get_copy(self) -> nested_dict_type:
"""
Copy the entire database into memory
Returns:
dict: a dictionary holding the database contents
"""
result: Dict[Union[str, int], Any] = {}
for k in self.keys():
tmp = self[k]
if isinstance(tmp, hdf5_wrapper):
result[k] = tmp.get_copy()
else:
result[k] = tmp
return result
def copy(self) -> nested_dict_type:
"""
Copy the entire database into memory
Returns:
dict: a dictionary holding the database contents
"""
return self.get_copy()
def insert(self, x: Union[nested_dict_type, 'hdf5_wrapper']) -> None:
"""
Insert the contents of the target object to the current location
Args:
x (dict, hdf5_wrapper): the dictionary to insert
"""
for k, v in x.items():
self[k] = v |
5,463 | array string multi null | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, List, Optional, TypeVar
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.rest import AsyncHttpResponse, HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from ...operations._operations import (
build_queries_array_string_multi_empty_request,
build_queries_array_string_multi_null_request,
build_queries_array_string_multi_valid_request,
)
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class QueriesOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~urlmulticollectionformatversiontolerant.aio.AutoRestUrlMutliCollectionFormatTestService`'s
:attr:`queries` attribute.
"""
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace_async
async def METHOD_NAME( # pylint: disable=inconsistent-return-statements
self, *, array_query: Optional[List[str]] = None, **kwargs: Any
) -> None:
"""Get a null array of string using the multi-array format.
:keyword array_query: a null array of string using the multi-array format. Default value is
None.
:paramtype array_query: list[str]
:return: None
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_queries_array_string_multi_null_request(
array_query=array_query,
headers=_headers,
params=_params,
)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
if _stream:
await response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
@distributed_trace_async
async def array_string_multi_empty( # pylint: disable=inconsistent-return-statements
self, *, array_query: Optional[List[str]] = None, **kwargs: Any
) -> None:
"""Get an empty array [] of string using the multi-array format.
:keyword array_query: an empty array [] of string using the multi-array format. Default value
is None.
:paramtype array_query: list[str]
:return: None
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_queries_array_string_multi_empty_request(
array_query=array_query,
headers=_headers,
params=_params,
)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
if _stream:
await response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {})
@distributed_trace_async
async def array_string_multi_valid( # pylint: disable=inconsistent-return-statements
self, *, array_query: Optional[List[str]] = None, **kwargs: Any
) -> None:
"""Get an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null, ''] using the
mult-array format.
:keyword array_query: an array of string ['ArrayQuery1', 'begin!*'();:@ &=+$,/?#[]end' , null,
''] using the mult-array format. Default value is None.
:paramtype array_query: list[str]
:return: None
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = kwargs.pop("params", {}) or {}
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_queries_array_string_multi_valid_request(
array_query=array_query,
headers=_headers,
params=_params,
)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
if _stream:
await response.read() # Load the body in memory and close the socket
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)
if cls:
return cls(pipeline_response, None, {}) |
5,464 | test run context provider registry register entrypoints | from importlib import reload
from unittest import mock
import pytest
import mlflow.tracking.context.registry
from mlflow.tracking.context.databricks_job_context import DatabricksJobRunContext
from mlflow.tracking.context.databricks_notebook_context import DatabricksNotebookRunContext
from mlflow.tracking.context.databricks_repo_context import DatabricksRepoRunContext
from mlflow.tracking.context.default_context import DefaultRunContext
from mlflow.tracking.context.git_context import GitRunContext
from mlflow.tracking.context.registry import RunContextProviderRegistry, resolve_tags
# pylint: disable=unused-argument
def test_run_context_provider_registry_register():
provider_class = mock.Mock()
registry = RunContextProviderRegistry()
registry.register(provider_class)
assert set(registry) == {provider_class.return_value}
def test_run_context_provider_registry_register_entrypoints():
provider_class = mock.Mock()
mock_entrypoint = mock.Mock()
mock_entrypoint.load.return_value = provider_class
with mock.patch(
"entrypoints.get_group_all", return_value=[mock_entrypoint]
) as mock_get_group_all:
registry = RunContextProviderRegistry()
registry.register_entrypoints()
assert set(registry) == {provider_class.return_value}
mock_entrypoint.load.assert_called_once_with()
mock_get_group_all.assert_called_once_with("mlflow.run_context_provider")
@pytest.mark.parametrize(
"exception", [AttributeError("test exception"), ImportError("test exception")]
)
def METHOD_NAME(exception):
mock_entrypoint = mock.Mock()
mock_entrypoint.load.side_effect = exception
with mock.patch(
"entrypoints.get_group_all", return_value=[mock_entrypoint]
) as mock_get_group_all:
registry = RunContextProviderRegistry()
# Check that the raised warning contains the message from the original exception
with pytest.warns(UserWarning, match="test exception"):
registry.register_entrypoints()
mock_entrypoint.load.assert_called_once_with()
mock_get_group_all.assert_called_once_with("mlflow.run_context_provider")
def _currently_registered_run_context_provider_classes():
return {
provider.__class__
for provider in mlflow.tracking.context.registry._run_context_provider_registry
}
def test_registry_instance_defaults():
expected_classes = {
DefaultRunContext,
GitRunContext,
DatabricksNotebookRunContext,
DatabricksJobRunContext,
DatabricksRepoRunContext,
}
assert expected_classes.issubset(_currently_registered_run_context_provider_classes())
def test_registry_instance_loads_entrypoints():
class MockRunContext:
pass
mock_entrypoint = mock.Mock()
mock_entrypoint.load.return_value = MockRunContext
with mock.patch(
"entrypoints.get_group_all", return_value=[mock_entrypoint]
) as mock_get_group_all:
# Entrypoints are registered at import time, so we need to reload the module to register the
# entrypoint given by the mocked entrypoints.get_group_all
reload(mlflow.tracking.context.registry)
assert MockRunContext in _currently_registered_run_context_provider_classes()
mock_get_group_all.assert_called_once_with("mlflow.run_context_provider")
def test_run_context_provider_registry_with_installed_plugin(tmp_path, monkeypatch):
"""This test requires the package in tests/resources/mlflow-test-plugin to be installed"""
monkeypatch.chdir(tmp_path)
reload(mlflow.tracking.context.registry)
from mlflow_test_plugin.run_context_provider import PluginRunContextProvider
assert PluginRunContextProvider in _currently_registered_run_context_provider_classes()
# The test plugin's context provider always returns False from in_context
# to avoid polluting tags in developers' environments. The following mock overrides this to
# perform the integration test.
with mock.patch.object(PluginRunContextProvider, "in_context", return_value=True):
assert resolve_tags()["test"] == "tag"
@pytest.fixture
def mock_run_context_providers():
base_provider = mock.Mock()
base_provider.in_context.return_value = True
base_provider.tags.return_value = {"one": "one-val", "two": "two-val", "three": "three-val"}
skipped_provider = mock.Mock()
skipped_provider.in_context.return_value = False
exception_provider = mock.Mock()
exception_provider.in_context.return_value = True
exception_provider.tags.return_value = {
"random-key": "This val will never make it to tag resolution"
}
exception_provider.tags.side_effect = Exception(
"This should be caught by logic in resolve_tags()"
)
override_provider = mock.Mock()
override_provider.in_context.return_value = True
override_provider.tags.return_value = {"one": "override", "new": "new-val"}
providers = [base_provider, skipped_provider, exception_provider, override_provider]
with mock.patch("mlflow.tracking.context.registry._run_context_provider_registry", providers):
yield
skipped_provider.tags.assert_not_called()
def test_resolve_tags(mock_run_context_providers):
tags_arg = {"two": "arg-override", "arg": "arg-val"}
assert resolve_tags(tags_arg) == {
"one": "override",
"two": "arg-override",
"three": "three-val",
"new": "new-val",
"arg": "arg-val",
}
def test_resolve_tags_no_arg(mock_run_context_providers):
assert resolve_tags() == {
"one": "override",
"two": "two-val",
"three": "three-val",
"new": "new-val",
} |
5,465 | change permissions form | from django import forms
from django.core.exceptions import PermissionDenied
from django.http import Http404
from django.utils.translation import pgettext_lazy
from ..acl import algebra
from ..acl.decorators import return_boolean
from ..admin.forms import YesNoSwitch
from .models import Category, CategoryRole, RoleCategoryACL
class PermissionsForm(forms.Form):
legend = pgettext_lazy("categories permission", "Category access")
can_see = YesNoSwitch(
label=pgettext_lazy("categories permission", "Can see category")
)
can_browse = YesNoSwitch(
label=pgettext_lazy("categories permission", "Can see category contents")
)
def METHOD_NAME(role):
if isinstance(role, CategoryRole):
return PermissionsForm
def build_acl(acl, roles, key_name):
new_acl = {"visible_categories": [], "browseable_categories": [], "categories": {}}
new_acl.update(acl)
roles = get_categories_roles(roles)
for category in Category.objects.all_categories():
build_category_acl(new_acl, category, roles, key_name)
return new_acl
def get_categories_roles(roles):
queryset = RoleCategoryACL.objects.filter(role__in=roles)
queryset = queryset.select_related("category_role")
roles = {}
for acl_relation in queryset.iterator():
role = acl_relation.category_role
roles.setdefault(acl_relation.category_id, []).append(role)
return roles
def build_category_acl(acl, category, categories_roles, key_name):
if category.level > 1:
if category.parent_id not in acl["visible_categories"]:
# dont bother with child categories of invisible parents
return
if not acl["categories"][category.parent_id]["can_browse"]:
# parent's visible, but its contents aint
return
category_roles = categories_roles.get(category.pk, [])
final_acl = {"can_see": 0, "can_browse": 0}
algebra.sum_acls(
final_acl,
roles=category_roles,
key=key_name,
can_see=algebra.greater,
can_browse=algebra.greater,
)
if final_acl["can_see"]:
acl["visible_categories"].append(category.pk)
acl["categories"][category.pk] = final_acl
if final_acl["can_browse"]:
acl["browseable_categories"].append(category.pk)
def add_acl_to_category(user_acl, target):
target.acl["can_see"] = can_see_category(user_acl, target)
target.acl["can_browse"] = can_browse_category(user_acl, target)
def serialize_categories_acls(user_acl):
categories_acl = []
for category, acl in user_acl.pop("categories").items():
if acl["can_browse"]:
categories_acl.append(
{
"id": category,
"can_start_threads": acl.get("can_start_threads", False),
"can_reply_threads": acl.get("can_reply_threads", False),
"can_pin_threads": acl.get("can_pin_threads", 0),
"can_hide_threads": acl.get("can_hide_threads", 0),
"can_close_threads": acl.get("can_close_threads", False),
}
)
user_acl["categories"] = categories_acl
def register_with(registry):
registry.acl_annotator(Category, add_acl_to_category)
registry.user_acl_serializer(serialize_categories_acls)
def allow_see_category(user_acl, target):
try:
category_id = target.pk
except AttributeError:
category_id = int(target)
if not category_id in user_acl["visible_categories"]:
raise Http404()
can_see_category = return_boolean(allow_see_category)
def allow_browse_category(user_acl, target):
target_acl = user_acl["categories"].get(target.id, {"can_browse": False})
if not target_acl["can_browse"]:
message = pgettext_lazy(
"categories permission",
'You don\'t have permission to browse "%(category)s" contents.',
)
raise PermissionDenied(message % {"category": target.name})
can_browse_category = return_boolean(allow_browse_category) |
5,466 | author | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetIncidentCommentResult',
'AwaitableGetIncidentCommentResult',
'get_incident_comment',
'get_incident_comment_output',
]
@pulumi.output_type
class GetIncidentCommentResult:
"""
Represents an incident comment
"""
def __init__(__self__, METHOD_NAME=None, created_time_utc=None, etag=None, id=None, last_modified_time_utc=None, message=None, name=None, system_data=None, type=None):
if METHOD_NAME and not isinstance(METHOD_NAME, dict):
raise TypeError("Expected argument 'author' to be a dict")
pulumi.set(__self__, "author", METHOD_NAME)
if created_time_utc and not isinstance(created_time_utc, str):
raise TypeError("Expected argument 'created_time_utc' to be a str")
pulumi.set(__self__, "created_time_utc", created_time_utc)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if last_modified_time_utc and not isinstance(last_modified_time_utc, str):
raise TypeError("Expected argument 'last_modified_time_utc' to be a str")
pulumi.set(__self__, "last_modified_time_utc", last_modified_time_utc)
if message and not isinstance(message, str):
raise TypeError("Expected argument 'message' to be a str")
pulumi.set(__self__, "message", message)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def METHOD_NAME(self) -> 'outputs.ClientInfoResponse':
"""
Describes the client that created the comment
"""
return pulumi.get(self, "author")
@property
@pulumi.getter(name="createdTimeUtc")
def created_time_utc(self) -> str:
"""
The time the comment was created
"""
return pulumi.get(self, "created_time_utc")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
Etag of the azure resource
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> str:
"""
Azure resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="lastModifiedTimeUtc")
def last_modified_time_utc(self) -> str:
"""
The time the comment was updated
"""
return pulumi.get(self, "last_modified_time_utc")
@property
@pulumi.getter
def message(self) -> str:
"""
The comment message
"""
return pulumi.get(self, "message")
@property
@pulumi.getter
def name(self) -> str:
"""
Azure resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
Azure Resource Manager metadata containing createdBy and modifiedBy information.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> str:
"""
Azure resource type
"""
return pulumi.get(self, "type")
class AwaitableGetIncidentCommentResult(GetIncidentCommentResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetIncidentCommentResult(
METHOD_NAME=self.METHOD_NAME,
created_time_utc=self.created_time_utc,
etag=self.etag,
id=self.id,
last_modified_time_utc=self.last_modified_time_utc,
message=self.message,
name=self.name,
system_data=self.system_data,
type=self.type)
def get_incident_comment(incident_comment_id: Optional[str] = None,
incident_id: Optional[str] = None,
operational_insights_resource_provider: Optional[str] = None,
resource_group_name: Optional[str] = None,
workspace_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetIncidentCommentResult:
"""
Gets an incident comment.
:param str incident_comment_id: Incident comment ID
:param str incident_id: Incident ID
:param str operational_insights_resource_provider: The namespace of workspaces resource provider- Microsoft.OperationalInsights.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str workspace_name: The name of the workspace.
"""
__args__ = dict()
__args__['incidentCommentId'] = incident_comment_id
__args__['incidentId'] = incident_id
__args__['operationalInsightsResourceProvider'] = operational_insights_resource_provider
__args__['resourceGroupName'] = resource_group_name
__args__['workspaceName'] = workspace_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:securityinsights/v20210301preview:getIncidentComment', __args__, opts=opts, typ=GetIncidentCommentResult).value
return AwaitableGetIncidentCommentResult(
METHOD_NAME=pulumi.get(__ret__, 'author'),
created_time_utc=pulumi.get(__ret__, 'created_time_utc'),
etag=pulumi.get(__ret__, 'etag'),
id=pulumi.get(__ret__, 'id'),
last_modified_time_utc=pulumi.get(__ret__, 'last_modified_time_utc'),
message=pulumi.get(__ret__, 'message'),
name=pulumi.get(__ret__, 'name'),
system_data=pulumi.get(__ret__, 'system_data'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_incident_comment)
def get_incident_comment_output(incident_comment_id: Optional[pulumi.Input[str]] = None,
incident_id: Optional[pulumi.Input[str]] = None,
operational_insights_resource_provider: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
workspace_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetIncidentCommentResult]:
"""
Gets an incident comment.
:param str incident_comment_id: Incident comment ID
:param str incident_id: Incident ID
:param str operational_insights_resource_provider: The namespace of workspaces resource provider- Microsoft.OperationalInsights.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str workspace_name: The name of the workspace.
"""
... |
5,467 | get port x | from __future__ import annotations
from gdsfactory.port import Port
def METHOD_NAME(port: Port) -> float:
return port.center[0]
def get_port_y(port: Port) -> float:
return port.center[1]
def sort_ports_x(ports: list[Port]) -> list[Port]:
f_key = METHOD_NAME
ports.sort(key=f_key)
return ports
def sort_ports_y(ports: list[Port]) -> list[Port]:
f_key = get_port_y
ports.sort(key=f_key)
return ports
def sort_ports(
ports1: list[Port], ports2: list[Port], enforce_port_ordering: bool
) -> tuple[list[Port], list[Port]]:
"""Returns two lists of sorted ports.
Args:
ports1: the starting ports
ports2: the ending ports
enforce_port_ordering: if True, only ports2 will be sorted in accordance with ports1. If False, the two lists will be sorted independently.
"""
if len(ports1) != len(ports2):
raise ValueError(f"ports1={len(ports1)} and ports2={len(ports2)} must be equal")
if not ports1:
raise ValueError("ports1 is an empty list")
if not ports2:
raise ValueError("ports2 is an empty list")
if isinstance(ports1, dict):
ports1 = list(ports1.values())
if isinstance(ports2, dict):
ports2 = list(ports2.values())
ports1_original_order = {p: i for i, p in enumerate(ports1)}
if ports1[0].orientation in [0, 180] and ports2[0].orientation in [0, 180]:
f_key1 = get_port_y
f_key2 = get_port_y
ports1.sort(key=f_key1)
if not enforce_port_ordering:
ports2.sort(key=f_key2)
elif ports1[0].orientation in [90, 270] and ports2[0].orientation in [90, 270]:
f_key1 = METHOD_NAME
f_key2 = METHOD_NAME
ports1.sort(key=f_key1)
if not enforce_port_ordering:
ports2.sort(key=f_key2)
else:
axis = "X" if ports1[0].orientation in [0, 180] else "Y"
f_key1 = get_port_y if axis in {"X", "x"} else METHOD_NAME
ports2_by1 = dict(zip(ports1, ports2))
ports1.sort(key=f_key1)
if not enforce_port_ordering:
ports2 = [ports2_by1[p1] for p1 in ports1]
# if port ordering should be enforced, always sort ports2 against the ports1 ordering
if enforce_port_ordering:
ports2 = [ports2[ports1_original_order[p1]] for p1 in ports1]
return ports1, ports2
if __name__ == "__main__":
import gdsfactory as gf
from gdsfactory.cell import cell
@cell
def demo_connect_corner(N=6, config="A"):
d = 10.0
sep = 5.0
top_cell = gf.Component(name="connect_corner")
if config in ["A", "B"]:
a = 100.0
ports_A_TR = [
Port(f"A_TR_{i}", (d, a / 2 + i * sep), 0.5, 0) for i in range(N)
]
ports_A_TL = [
Port(f"A_TL_{i}", (-d, a / 2 + i * sep), 0.5, 180) for i in range(N)
]
ports_A_BR = [
Port(f"A_BR_{i}", (d, -a / 2 - i * sep), 0.5, 0) for i in range(N)
]
ports_A_BL = [
Port(f"A_BL_{i}", (-d, -a / 2 - i * sep), 0.5, 180) for i in range(N)
]
ports_A = [ports_A_TR, ports_A_TL, ports_A_BR, ports_A_BL]
ports_B_TR = [
Port(f"B_TR_{i}", (a / 2 + i * sep, d), 0.5, 90) for i in range(N)
]
ports_B_TL = [
Port(f"B_TL_{i}", (-a / 2 - i * sep, d), 0.5, 90) for i in range(N)
]
ports_B_BR = [
Port(f"B_BR_{i}", (a / 2 + i * sep, -d), 0.5, 270) for i in range(N)
]
ports_B_BL = [
Port(f"B_BL_{i}", (-a / 2 - i * sep, -d), 0.5, 270) for i in range(N)
]
ports_B = [ports_B_TR, ports_B_TL, ports_B_BR, ports_B_BL]
elif config in ["C", "D"]:
a = N * sep + 2 * d
ports_A_TR = [Port(f"A_TR_{i}", (a, d + i * sep), 0.5, 0) for i in range(N)]
ports_A_TL = [
Port(f"A_TL_{i}", (-a, d + i * sep), 0.5, 180) for i in range(N)
]
ports_A_BR = [
Port(f"A_BR_{i}", (a, -d - i * sep), 0.5, 0) for i in range(N)
]
ports_A_BL = [
Port(f"A_BL_{i}", (-a, -d - i * sep), 0.5, 180) for i in range(N)
]
ports_A = [ports_A_TR, ports_A_TL, ports_A_BR, ports_A_BL]
ports_B_TR = [
Port(f"B_TR_{i}", (d + i * sep, a), 0.5, 90) for i in range(N)
]
ports_B_TL = [
Port(f"B_TL_{i}", (-d - i * sep, a), 0.5, 90) for i in range(N)
]
ports_B_BR = [
Port(f"B_BR_{i}", (d + i * sep, -a), 0.5, 270) for i in range(N)
]
ports_B_BL = [
Port(f"B_BL_{i}", (-d - i * sep, -a), 0.5, 270) for i in range(N)
]
ports_B = [ports_B_TR, ports_B_TL, ports_B_BR, ports_B_BL]
if config in ["A", "C"]:
for ports1, ports2 in zip(ports_A, ports_B):
routes = gf.routing.get_bundle(ports1, ports2, radius=8)
for route in routes:
top_cell.add(route.references)
elif config in ["B", "D"]:
for ports1, ports2 in zip(ports_A, ports_B):
routes = gf.routing.get_bundle(ports2, ports1, radius=8)
for route in routes:
top_cell.add(route.references)
return top_cell
c = gf.Component()
c1 = c << demo_connect_corner(config="A")
c2 = c << demo_connect_corner(config="C")
c2.xmin = c1.xmax + 5
c.show(show_ports=True) |
5,468 | solve | """Dog-leg trust-region optimization."""
from __future__ import division, print_function, absolute_import
import numpy as np
import scipy.linalg
from ._trustregion import (_minimize_trust_region, BaseQuadraticSubproblem)
__all__ = []
def _minimize_dogleg(fun, x0, args=(), jac=None, hess=None,
**trust_region_options):
"""
Minimization of scalar function of one or more variables using
the dog-leg trust-region algorithm.
Options
-------
initial_trust_radius : float
Initial trust-region radius.
max_trust_radius : float
Maximum value of the trust-region radius. No steps that are longer
than this value will be proposed.
eta : float
Trust region related acceptance stringency for proposed steps.
gtol : float
Gradient norm must be less than `gtol` before successful
termination.
"""
if jac is None:
raise ValueError('Jacobian is required for dogleg minimization')
if hess is None:
raise ValueError('Hessian is required for dogleg minimization')
return _minimize_trust_region(fun, x0, args=args, jac=jac, hess=hess,
subproblem=DoglegSubproblem,
**trust_region_options)
class DoglegSubproblem(BaseQuadraticSubproblem):
"""Quadratic subproblem solved by the dogleg method"""
def cauchy_point(self):
"""
The Cauchy point is minimal along the direction of steepest descent.
"""
if self._cauchy_point is None:
g = self.jac
Bg = self.hessp(g)
self._cauchy_point = -(np.dot(g, g) / np.dot(g, Bg)) * g
return self._cauchy_point
def newton_point(self):
"""
The Newton point is a global minimum of the approximate function.
"""
if self._newton_point is None:
g = self.jac
B = self.hess
cho_info = scipy.linalg.cho_factor(B)
self._newton_point = -scipy.linalg.cho_solve(cho_info, g)
return self._newton_point
def METHOD_NAME(self, trust_radius):
"""
Minimize a function using the dog-leg trust-region algorithm.
This algorithm requires function values and first and second derivatives.
It also performs a costly Hessian decomposition for most iterations,
and the Hessian is required to be positive definite.
Parameters
----------
trust_radius : float
We are allowed to wander only this far away from the origin.
Returns
-------
p : ndarray
The proposed step.
hits_boundary : bool
True if the proposed step is on the boundary of the trust region.
Notes
-----
The Hessian is required to be positive definite.
References
----------
.. [1] Jorge Nocedal and Stephen Wright,
Numerical Optimization, second edition,
Springer-Verlag, 2006, page 73.
"""
# Compute the Newton point.
# This is the optimum for the quadratic model function.
# If it is inside the trust radius then return this point.
p_best = self.newton_point()
if scipy.linalg.norm(p_best) < trust_radius:
hits_boundary = False
return p_best, hits_boundary
# Compute the Cauchy point.
# This is the predicted optimum along the direction of steepest descent.
p_u = self.cauchy_point()
# If the Cauchy point is outside the trust region,
# then return the point where the path intersects the boundary.
p_u_norm = scipy.linalg.norm(p_u)
if p_u_norm >= trust_radius:
p_boundary = p_u * (trust_radius / p_u_norm)
hits_boundary = True
return p_boundary, hits_boundary
# Compute the intersection of the trust region boundary
# and the line segment connecting the Cauchy and Newton points.
# This requires solving a quadratic equation.
# ||p_u + t*(p_best - p_u)||**2 == trust_radius**2
# Solve this for positive time t using the quadratic formula.
_, tb = self.get_boundaries_intersections(p_u, p_best - p_u,
trust_radius)
p_boundary = p_u + tb * (p_best - p_u)
hits_boundary = True
return p_boundary, hits_boundary |
5,469 | test decoder optimizations | import decimal
from io import StringIO, BytesIO
from collections import OrderedDict
from test.test_json import PyTest, CTest
class TestDecode:
def test_decimal(self):
rval = self.loads('1.1', parse_float=decimal.Decimal)
self.assertTrue(isinstance(rval, decimal.Decimal))
self.assertEqual(rval, decimal.Decimal('1.1'))
def test_float(self):
rval = self.loads('1', parse_int=float)
self.assertTrue(isinstance(rval, float))
self.assertEqual(rval, 1.0)
def test_empty_objects(self):
self.assertEqual(self.loads('{}'), {})
self.assertEqual(self.loads('[]'), [])
self.assertEqual(self.loads('""'), "")
def test_object_pairs_hook(self):
s = '{"xkd":1, "kcw":2, "art":3, "hxm":4, "qrt":5, "pad":6, "hoy":7}'
p = [("xkd", 1), ("kcw", 2), ("art", 3), ("hxm", 4),
("qrt", 5), ("pad", 6), ("hoy", 7)]
self.assertEqual(self.loads(s), eval(s))
self.assertEqual(self.loads(s, object_pairs_hook=lambda x: x), p)
self.assertEqual(self.json.load(StringIO(s),
object_pairs_hook=lambda x: x), p)
od = self.loads(s, object_pairs_hook=OrderedDict)
self.assertEqual(od, OrderedDict(p))
self.assertEqual(type(od), OrderedDict)
# the object_pairs_hook takes priority over the object_hook
self.assertEqual(self.loads(s, object_pairs_hook=OrderedDict,
object_hook=lambda x: None),
OrderedDict(p))
# check that empty objects literals work (see #17368)
self.assertEqual(self.loads('{}', object_pairs_hook=OrderedDict),
OrderedDict())
self.assertEqual(self.loads('{"empty": {}}',
object_pairs_hook=OrderedDict),
OrderedDict([('empty', OrderedDict())]))
def METHOD_NAME(self):
# Several optimizations were made that skip over calls to
# the whitespace regex, so this test is designed to try and
# exercise the uncommon cases. The array cases are already covered.
rval = self.loads('{ "key" : "value" , "k":"v" }')
self.assertEqual(rval, {"key":"value", "k":"v"})
def check_keys_reuse(self, source, loads):
rval = loads(source)
(a, b), (c, d) = sorted(rval[0]), sorted(rval[1])
self.assertIs(a, c)
self.assertIs(b, d)
def test_keys_reuse(self):
s = '[{"a_key": 1, "b_\xe9": 2}, {"a_key": 3, "b_\xe9": 4}]'
self.check_keys_reuse(s, self.loads)
self.check_keys_reuse(s, self.json.decoder.JSONDecoder().decode)
def test_extra_data(self):
s = '[1, 2, 3]5'
msg = 'Extra data'
self.assertRaisesRegex(ValueError, msg, self.loads, s)
def test_invalid_escape(self):
s = '["abc\\y"]'
msg = 'escape'
self.assertRaisesRegex(ValueError, msg, self.loads, s)
def test_invalid_input_type(self):
msg = 'the JSON object must be str'
for value in [1, 3.14, b'bytes', b'\xff\x00', [], {}, None]:
self.assertRaisesRegex(TypeError, msg, self.loads, value)
with self.assertRaisesRegex(TypeError, msg):
self.json.load(BytesIO(b'[1,2,3]'))
def test_string_with_utf8_bom(self):
# see #18958
bom_json = "[1,2,3]".encode('utf-8-sig').decode('utf-8')
with self.assertRaises(ValueError) as cm:
self.loads(bom_json)
self.assertIn('BOM', str(cm.exception))
with self.assertRaises(ValueError) as cm:
self.json.load(StringIO(bom_json))
self.assertIn('BOM', str(cm.exception))
# make sure that the BOM is not detected in the middle of a string
bom_in_str = '"{}"'.format(''.encode('utf-8-sig').decode('utf-8'))
self.assertEqual(self.loads(bom_in_str), '\ufeff')
self.assertEqual(self.json.load(StringIO(bom_in_str)), '\ufeff')
def test_negative_index(self):
d = self.json.JSONDecoder()
self.assertRaises(ValueError, d.raw_decode, 'a'*42, -50000)
class TestPyDecode(TestDecode, PyTest): pass
class TestCDecode(TestDecode, CTest): pass |
5,470 | rename floor | from typing import Any, List
from ... import auth
from ...api.socket.constants import GAME_NS
from ...app import app, sio
from ...db.create.floor import create_floor
from ...db.db import db
from ...db.models.floor import Floor
from ...db.models.player_room import PlayerRoom
from ...logs import logger
from ...models.role import Role
from ...state.game import game_state
from ...transform.to_api.floor import transform_floor
from ..helpers import _send_game
from ..models.floor import FloorCreate, FloorRename
from ..models.floor.background import FloorBackgroundSet
from ..models.floor.type import FloorTypeSet
from ..models.floor.visible import FloorVisibleSet
@sio.on("Floor.Create", namespace=GAME_NS)
@auth.login_required(app, sio, "game")
async def _create_floor(sid: str, floor_name: str):
pr: PlayerRoom = game_state.get(sid)
if pr.role != Role.DM:
logger.warning(f"{pr.player.name} attempted to create a new floor")
return
floor = create_floor(pr.active_location, floor_name)
for psid, tpr in game_state.get_t(active_location=pr.active_location):
await _send_game(
"Floor.Create",
FloorCreate(creator=pr.player.name, floor=transform_floor(floor, tpr)),
room=psid,
)
@sio.on("Floor.Remove", namespace=GAME_NS)
@auth.login_required(app, sio, "game")
async def remove_floor(sid: str, floor_name: str):
pr: PlayerRoom = game_state.get(sid)
if pr.role != Role.DM:
logger.warning(f"{pr.player.name} attempted to remove a floor")
return
floor = Floor.get(location=pr.active_location, name=floor_name)
floor.delete_instance(recursive=True)
await _send_game(
"Floor.Remove",
floor_name,
room=pr.active_location.get_path(),
skip_sid=sid,
)
@sio.on("Floor.Visible.Set", namespace=GAME_NS)
@auth.login_required(app, sio, "game")
async def set_floor_visibility(sid: str, raw_data: Any):
data = FloorVisibleSet(**raw_data)
pr: PlayerRoom = game_state.get(sid)
if pr.role != Role.DM:
logger.warning(f"{pr.player.name} attempted to toggle floor visibility")
return
floor = Floor.get(location=pr.active_location, name=data.name)
floor.player_visible = data.visible
floor.save()
await _send_game(
"Floor.Visible.Set",
data,
room=pr.active_location.get_path(),
skip_sid=sid,
)
@sio.on("Floor.Rename", namespace=GAME_NS)
@auth.login_required(app, sio, "game")
async def METHOD_NAME(sid: str, raw_data: Any):
data = FloorRename(**raw_data)
pr: PlayerRoom = game_state.get(sid)
if pr.role != Role.DM:
logger.warning(f"{pr.player.name} attempted to rename a floor")
return
floor: Floor = Floor.get(location=pr.active_location, index=data.index)
floor.name = data.name
floor.save()
await _send_game(
"Floor.Rename",
data,
room=pr.active_location.get_path(),
skip_sid=sid,
)
@sio.on("Floor.Type.Set", namespace=GAME_NS)
@auth.login_required(app, sio, "game")
async def set_floor_type(sid: str, raw_data: Any):
data = FloorTypeSet(**raw_data)
pr: PlayerRoom = game_state.get(sid)
if pr.role != Role.DM:
logger.warning(f"{pr.player.name} attempted to set floor type")
return
floor: Floor = Floor.get(location=pr.active_location, name=data.name)
floor.type_ = data.floorType
floor.save()
await _send_game(
"Floor.Type.Set",
data,
room=pr.active_location.get_path(),
skip_sid=sid,
)
@sio.on("Floor.Background.Set", namespace=GAME_NS)
@auth.login_required(app, sio, "game")
async def set_floor_background(sid: str, raw_data: Any):
data = FloorBackgroundSet(**raw_data)
pr: PlayerRoom = game_state.get(sid)
if pr.role != Role.DM:
logger.warning(f"{pr.player.name} attempted to set floor background")
return
floor: Floor = Floor.get(location=pr.active_location, name=data.name)
floor.background_color = data.background or None
floor.save()
await _send_game(
"Floor.Background.Set",
data,
room=pr.active_location.get_path(),
skip_sid=sid,
)
@sio.on("Floors.Reorder", namespace=GAME_NS)
@auth.login_required(app, sio, "game")
async def reorder_floors(sid: str, data: List[str]):
pr: PlayerRoom = game_state.get(sid)
if pr.role != Role.DM:
logger.warning(f"{pr.player.name} attempted to reorder floors")
return
with db.atomic():
for i, name in enumerate(data):
init = Floor.get(location=pr.active_location, name=name)
init.index = i
init.save()
await _send_game(
"Floors.Reorder",
data,
room=pr.active_location.get_path(),
skip_sid=sid,
) |
5,471 | step hyperv host list | # --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# EXAMPLE: /Sites/put/Create VMware site
def step_vmware_site_create(test, checks=None):
if checks is None:
checks = []
test.cmd('az offazure vmware site create '
'--location "centralus" '
'--resource-group "{rg}" '
'--site-name "{vmware_site}" ',
checks=checks)
# EXAMPLE: /Sites/get/Get VMware site
def step_vmware_site_show(test, checks=None):
if checks is None:
checks = []
test.cmd('az offazure vmware site show '
'--resource-group "{rg}" '
'--name "{vmware_site}" ',
checks=checks)
# EXAMPLE: /Sites/delete/Delete VMware site
def step_vmware_site_delete(test):
test.cmd('az offazure vmware site delete -y '
'--resource-group "{rg}" '
'--name "{vmware_site}" ')
# EXAMPLE: /HyperVMachines/get/List hosts by site
def step_hyperv_machine_list(test, checks=None):
if checks is None:
checks = []
test.cmd('az offazure hyperv machine list '
'--resource-group "{rg}" '
'--site-name "{hyperv_site}" ',
checks=checks)
# EXAMPLE: /HyperVRunAsAccounts/get/Get run as account.
def step_hyperv_run_as_account_show(test):
test.cmd('az offazure hyperv run-as-account show '
'--account-name "not-exist-account" '
'--resource-group "{rg}" '
'--site-name "{hyperv_site}" ',
expect_failure=True)
# EXAMPLE: /HyperVRunAsAccounts/get/List Run As Accounts by site
def step_hyperv_run_as_account_list(test, checks=None):
if checks is None:
checks = []
test.cmd('az offazure hyperv run-as-account list '
'--resource-group "{rg}" '
'--site-name "{hyperv_site}" ',
checks=checks)
# EXAMPLE: /HyperVSites/put/Create Hyper-V site
def step_hyperv_site_create(test, checks=None):
if checks is None:
checks = []
test.cmd('az offazure hyperv site create '
'--location "centralus" '
'--resource-group "{rg}" '
'--site-name "{hyperv_site}" ',
checks=checks)
# EXAMPLE: /HyperVSites/get/Get Hyper-V site
def step_hyperv_site_show(test, checks=None):
if checks is None:
checks = []
test.cmd('az offazure hyperv site show '
'--resource-group "{rg}" '
'--site-name "{hyperv_site}" ',
checks=checks)
# EXAMPLE: /HyperVCluster/get/Get cluster
def step_hyperv_cluster_show(test):
test.cmd('az offazure hyperv cluster show '
'--cluster-name "not-exist-cluster" '
'--resource-group "{rg}" '
'--site-name "{hyperv_site}" ',
expect_failure=True)
# EXAMPLE: /HyperVCluster/get/List cluster by site
def step_hyperv_cluster_list(test, checks=None):
if checks is None:
checks = []
test.cmd('az offazure hyperv cluster list '
'--resource-group "{rg}" '
'--site-name "{hyperv_site}" ',
checks=checks)
# EXAMPLE: /HyperVHost/get/Get host
def step_hyperv_host_show(test):
test.cmd('az offazure hyperv host show '
'--host-name "not-exist-host" '
'--resource-group "{rg}" '
'--site-name "{hyperv_site}" ',
expect_failure=True)
# EXAMPLE: /HyperVHost/get/List hosts by site
def METHOD_NAME(test, checks=None):
if checks is None:
checks = []
test.cmd('az offazure hyperv host list '
'--resource-group "{rg}" '
'--site-name "{hyperv_site}" ',
checks=checks)
# EXAMPLE: /HyperVSites/delete/Delete Hyper-V site.
def step_hyperv_site_delete(test):
test.cmd('az offazure hyperv site delete -y '
'--resource-group "{rg}" '
'--site-name "{hyperv_site}" ')
# EXAMPLE: /Machines/get/Get VMware machine.
def step_vmware_machine_show(test):
test.cmd('az offazure vmware machine show '
'--name "not-exist-machine" '
'--resource-group "{rg}" '
'--site-name "{vmware_site}" ',
expect_failure=True)
# EXAMPLE: /Machines/get/Get VMware machines
def step_vmware_machine_list(test, checks=None):
if checks is None:
checks = []
test.cmd('az offazure vmware machine list '
'--resource-group "{rg}" '
'--site-name "{vmware_site}" ',
checks=checks)
# EXAMPLE: /HyperVMachines/get/Get machine.
def step_hyperv_machine_show(test):
test.cmd('az offazure hyperv machine show '
'--machine-name "not-exist-machine" '
'--resource-group "{rg}" '
'--site-name "{hyperv_site}" ',
expect_failure=True)
# EXAMPLE: /RunAsAccounts/get/Get VMware run as account.
def step_vmware_run_as_account_show(test):
test.cmd('az offazure vmware run-as-account show '
'--account-name "not-exist-account" '
'--resource-group "{rg}" '
'--site-name "{vmware_site}" ',
expect_failure=True)
# EXAMPLE: /RunAsAccounts/get/List VMware run as account by site.
def step_vmware_run_as_account_list(test, checks=None):
if checks is None:
checks = []
test.cmd('az offazure vmware run-as-account list '
'--resource-group "{rg}" '
'--site-name "{vmware_site}" ',
checks=checks)
# EXAMPLE: /VCenter/get/Get VMware Vcenter.
def step_vmware_vcenter_show(test):
test.cmd('az offazure vmware vcenter show '
'--resource-group "{rg}" '
'--site-name "{vmware_site}" '
'--name "not-exist-vcenter"',
expect_failure=True)
# EXAMPLE: /VCenter/get/List VMware vCenters by site
def step_vmware_vcenter_list(test, checks=None):
if checks is None:
checks = []
test.cmd('az offazure vmware vcenter list '
'--resource-group "{rg}" '
'--site-name "{vmware_site}" ',
checks=checks) |
5,472 | test main | # Test the module type
import unittest
import weakref
from test.support import run_unittest, gc_collect
from test.script_helper import assert_python_ok
import sys
ModuleType = type(sys)
class FullLoader:
@classmethod
def module_repr(cls, m):
return "<module '{}' (crafted)>".format(m.__name__)
class BareLoader:
pass
class ModuleTests(unittest.TestCase):
def test_uninitialized(self):
# An uninitialized module has no __dict__ or __name__,
# and __doc__ is None
foo = ModuleType.__new__(ModuleType)
self.assertTrue(foo.__dict__ is None)
self.assertRaises(SystemError, dir, foo)
try:
s = foo.__name__
self.fail("__name__ = %s" % repr(s))
except AttributeError:
pass
self.assertEqual(foo.__doc__, ModuleType.__doc__)
def test_no_docstring(self):
# Regularly initialized module, no docstring
foo = ModuleType("foo")
self.assertEqual(foo.__name__, "foo")
self.assertEqual(foo.__doc__, None)
self.assertIs(foo.__loader__, None)
self.assertIs(foo.__package__, None)
self.assertIs(foo.__spec__, None)
self.assertEqual(foo.__dict__, {"__name__": "foo", "__doc__": None,
"__loader__": None, "__package__": None,
"__spec__": None})
def test_ascii_docstring(self):
# ASCII docstring
foo = ModuleType("foo", "foodoc")
self.assertEqual(foo.__name__, "foo")
self.assertEqual(foo.__doc__, "foodoc")
self.assertEqual(foo.__dict__,
{"__name__": "foo", "__doc__": "foodoc",
"__loader__": None, "__package__": None,
"__spec__": None})
def test_unicode_docstring(self):
# Unicode docstring
foo = ModuleType("foo", "foodoc\u1234")
self.assertEqual(foo.__name__, "foo")
self.assertEqual(foo.__doc__, "foodoc\u1234")
self.assertEqual(foo.__dict__,
{"__name__": "foo", "__doc__": "foodoc\u1234",
"__loader__": None, "__package__": None,
"__spec__": None})
def test_reinit(self):
# Reinitialization should not replace the __dict__
foo = ModuleType("foo", "foodoc\u1234")
foo.bar = 42
d = foo.__dict__
foo.__init__("foo", "foodoc")
self.assertEqual(foo.__name__, "foo")
self.assertEqual(foo.__doc__, "foodoc")
self.assertEqual(foo.bar, 42)
self.assertEqual(foo.__dict__,
{"__name__": "foo", "__doc__": "foodoc", "bar": 42,
"__loader__": None, "__package__": None, "__spec__": None})
self.assertTrue(foo.__dict__ is d)
def test_dont_clear_dict(self):
# See issue 7140.
def f():
foo = ModuleType("foo")
foo.bar = 4
return foo
gc_collect()
self.assertEqual(f().__dict__["bar"], 4)
def test_clear_dict_in_ref_cycle(self):
destroyed = []
m = ModuleType("foo")
m.destroyed = destroyed
s = """class A:
def __init__(self, l):
self.l = l
def __del__(self):
self.l.append(1)
a = A(destroyed)"""
exec(s, m.__dict__)
del m
gc_collect()
self.assertEqual(destroyed, [1])
def test_weakref(self):
m = ModuleType("foo")
wr = weakref.ref(m)
self.assertIs(wr(), m)
del m
gc_collect()
self.assertIs(wr(), None)
def test_module_repr_minimal(self):
# reprs when modules have no __file__, __name__, or __loader__
m = ModuleType('foo')
del m.__name__
self.assertEqual(repr(m), "<module '?'>")
def test_module_repr_with_name(self):
m = ModuleType('foo')
self.assertEqual(repr(m), "<module 'foo'>")
def test_module_repr_with_name_and_filename(self):
m = ModuleType('foo')
m.__file__ = '/tmp/foo.py'
self.assertEqual(repr(m), "<module 'foo' from '/tmp/foo.py'>")
def test_module_repr_with_filename_only(self):
m = ModuleType('foo')
del m.__name__
m.__file__ = '/tmp/foo.py'
self.assertEqual(repr(m), "<module '?' from '/tmp/foo.py'>")
def test_module_repr_with_loader_as_None(self):
m = ModuleType('foo')
assert m.__loader__ is None
self.assertEqual(repr(m), "<module 'foo'>")
def test_module_repr_with_bare_loader_but_no_name(self):
m = ModuleType('foo')
del m.__name__
# Yes, a class not an instance.
m.__loader__ = BareLoader
loader_repr = repr(BareLoader)
self.assertEqual(
repr(m), "<module '?' ({})>".format(loader_repr))
def test_module_repr_with_full_loader_but_no_name(self):
# m.__loader__.module_repr() will fail because the module has no
# m.__name__. This exception will get suppressed and instead the
# loader's repr will be used.
m = ModuleType('foo')
del m.__name__
# Yes, a class not an instance.
m.__loader__ = FullLoader
loader_repr = repr(FullLoader)
self.assertEqual(
repr(m), "<module '?' ({})>".format(loader_repr))
def test_module_repr_with_bare_loader(self):
m = ModuleType('foo')
# Yes, a class not an instance.
m.__loader__ = BareLoader
module_repr = repr(BareLoader)
self.assertEqual(
repr(m), "<module 'foo' ({})>".format(module_repr))
def test_module_repr_with_full_loader(self):
m = ModuleType('foo')
# Yes, a class not an instance.
m.__loader__ = FullLoader
self.assertEqual(
repr(m), "<module 'foo' (crafted)>")
def test_module_repr_with_bare_loader_and_filename(self):
# Because the loader has no module_repr(), use the file name.
m = ModuleType('foo')
# Yes, a class not an instance.
m.__loader__ = BareLoader
m.__file__ = '/tmp/foo.py'
self.assertEqual(repr(m), "<module 'foo' from '/tmp/foo.py'>")
def test_module_repr_with_full_loader_and_filename(self):
# Even though the module has an __file__, use __loader__.module_repr()
m = ModuleType('foo')
# Yes, a class not an instance.
m.__loader__ = FullLoader
m.__file__ = '/tmp/foo.py'
self.assertEqual(repr(m), "<module 'foo' (crafted)>")
def test_module_repr_builtin(self):
self.assertEqual(repr(sys), "<module 'sys' (built-in)>")
def test_module_repr_source(self):
r = repr(unittest)
starts_with = "<module 'unittest' from '"
ends_with = "__init__.py'>"
self.assertEqual(r[:len(starts_with)], starts_with,
'{!r} does not start with {!r}'.format(r, starts_with))
self.assertEqual(r[-len(ends_with):], ends_with,
'{!r} does not end with {!r}'.format(r, ends_with))
def test_module_finalization_at_shutdown(self):
# Module globals and builtins should still be available during shutdown
rc, out, err = assert_python_ok("-c", "from test import final_a")
self.assertFalse(err)
lines = out.splitlines()
self.assertEqual(set(lines), {
b"x = a",
b"x = b",
b"final_a.x = a",
b"final_b.x = b",
b"len = len",
b"shutil.rmtree = rmtree"})
# frozen and namespace module reprs are tested in importlib.
def METHOD_NAME():
run_unittest(ModuleTests)
if __name__ == '__main__':
METHOD_NAME() |
5,473 | set path | # installer.py
#
# Copyright 2022 brombinmirko <send@mirko.pm>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, in version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import urllib.request
from gi.repository import Gtk, GLib, Gio, GdkPixbuf, Adw
from bottles.backend.utils.threading import RunAsync
from bottles.frontend.utils.gtk import GtkUtils
@Gtk.Template(resource_path='/com/usebottles/bottles/local-resource-entry.ui')
class LocalResourceEntry(Adw.ActionRow):
__gtype_name__ = 'LocalResourceEntry'
# region Widgets
btn_path = Gtk.Template.Child()
# endregion
def __init__(self, parent, resource, **kwargs):
super().__init__(**kwargs)
# common variables and references
self.parent = parent
self.resource = resource
self.set_title(resource)
# connect signals
self.btn_path.connect("clicked", self.__choose_path)
def __choose_path(self, *_args):
"""
Open the file chooser dialog and set the path to the
selected file
"""
def METHOD_NAME(_dialog, response):
if response != Gtk.ResponseType.ACCEPT:
return
path = dialog.get_file().get_path()
self.parent.add_resource(self.resource, path)
self.set_subtitle(path)
dialog = Gtk.FileChooserNative.new(
title=_("Select Resource File"),
action=Gtk.FileChooserAction.OPEN,
parent=self.parent,
)
dialog.set_modal(True)
dialog.connect("response", METHOD_NAME)
dialog.show()
@Gtk.Template(resource_path='/com/usebottles/bottles/dialog-installer.ui')
class InstallerDialog(Adw.Window):
__gtype_name__ = 'InstallerDialog'
__sections = {}
__steps = 0
__current_step = 0
__local_resources = []
__final_resources = {}
# region widgets
stack = Gtk.Template.Child()
window_title = Gtk.Template.Child()
btn_install = Gtk.Template.Child()
btn_proceed = Gtk.Template.Child()
btn_close = Gtk.Template.Child()
status_init = Gtk.Template.Child()
status_installed = Gtk.Template.Child()
status_error = Gtk.Template.Child()
progressbar = Gtk.Template.Child()
group_resources = Gtk.Template.Child()
install_status_page = Gtk.Template.Child()
img_icon = Gtk.Template.Child()
img_icon_install = Gtk.Template.Child()
style_provider = Gtk.CssProvider()
# endregion
def __init__(self, window, config, installer, **kwargs):
super().__init__(**kwargs)
self.set_transient_for(window)
self.window = window
self.manager = window.manager
self.config = config
self.installer = installer
self.__steps_phrases = {
"deps": _("Installing Windows dependencies…"),
"params": _("Configuring the bottle…"),
"steps": _("Processing installer steps…"),
"exe": _("Installing the {}…".format(installer[1].get("Name"))),
"checks": _("Performing final checks…")
}
self.status_init.set_title(installer[1].get("Name"))
self.install_status_page.set_title(_("Installing {0}…").format(installer[1].get("Name")))
self.status_installed.set_description(
_("{0} is now available in the programs view.").format(installer[1].get("Name")))
self.__set_icon()
self.btn_install.connect("clicked", self.__check_resources)
self.btn_proceed.connect("clicked", self.__install)
self.btn_close.connect("clicked", self.__close)
def __set_icon(self):
try:
url = self.manager.installer_manager.get_icon_url(self.installer[0])
if url is None:
self.img_icon.set_visible(False)
self.img_icon_install.set_visible(False)
return
with urllib.request.urlopen(url) as res:
stream = Gio.MemoryInputStream.new_from_data(res.read(), None)
pixbuf = GdkPixbuf.Pixbuf.new_from_stream(stream, None)
self.img_icon.set_pixel_size(78)
self.img_icon.set_from_pixbuf(pixbuf)
self.img_icon_install.set_pixel_size(78)
self.img_icon_install.set_from_pixbuf(pixbuf)
except:
self.img_icon.set_visible(False)
self.img_icon_install.set_visible(False)
def __check_resources(self, *_args):
self.__local_resources = self.manager.installer_manager.has_local_resources(self.installer)
if len(self.__local_resources) == 0:
self.__install()
return
for resource in self.__local_resources:
_entry = LocalResourceEntry(self, resource)
GLib.idle_add(self.group_resources.add, _entry)
self.btn_proceed.set_visible(True)
self.stack.set_visible_child_name("page_resources")
def __install(self, *_args):
self.set_deletable(False)
self.stack.set_visible_child_name("page_install")
@GtkUtils.run_in_main_loop
def set_status(result, error=False):
if result.ok:
return self.__installed()
_err = result.data.get("message", _("Installer failed with unknown error"))
self.__error(_err)
self.set_steps(self.manager.installer_manager.count_steps(self.installer))
RunAsync(
task_func=self.manager.installer_manager.install,
callback=set_status,
config=self.config,
installer=self.installer,
step_fn=self.next_step,
local_resources=self.__final_resources
)
def __installed(self):
self.set_deletable(False)
self.stack.set_visible_child_name("page_installed")
self.window.page_details.view_bottle.update_programs()
self.window.page_details.go_back_sidebar()
def __error(self, error):
self.set_deletable(True)
self.status_error.set_description(error)
self.stack.set_visible_child_name("page_error")
def next_step(self):
"""Next step"""
phrase = self.__steps_phrases[self.__sections[self.__current_step]]
self.progressbar.set_text(phrase)
self.__current_step += 1
self.progressbar.set_fraction(self.__current_step * (1 / self.__steps))
def set_steps(self, steps):
"""Set steps"""
self.__steps = steps["total"]
self.__sections = steps["sections"]
def add_resource(self, resource, path):
self.__final_resources[resource] = path
if len(self.__local_resources) == len(self.__final_resources):
self.btn_proceed.set_sensitive(True)
def __close(self, *_args):
self.destroy() |
5,474 | gen bucket policy | import datetime
import logging
import boto3
from ocs_ci.ocs import constants
from ocs_ci.ocs.bucket_utils import retrieve_verification_mode
from ocs_ci.utility import version
logger = logging.getLogger(__name__)
class HttpResponseParser(object):
"""
A simple class for parsing HTTP responses
"""
def __init__(self, http_response):
"""
Initializer function
Args:
http_response (dict): HTTP response
"""
logger.info("http response:\n%s" % http_response)
self.metadata = http_response["ResponseMetadata"]
logger.info(f"metadata: {self.metadata}")
self.headers = self.metadata["HTTPHeaders"]
logger.info(f"headers: {self.headers}")
self.status_code = self.metadata["HTTPStatusCode"]
logger.info(f"status code: {self.status_code}")
self.error = http_response.get("Error", None)
logger.info(f"Error: {self.error}")
class NoobaaAccount(object):
"""
Class for Noobaa account
"""
(
s3_resource,
s3_endpoint,
account_name,
email_id,
token,
access_key_id,
access_key,
) = (None,) * 7
def __init__(
self,
mcg,
name,
email,
buckets=None,
admin_access=False,
s3_access=True,
full_bucket_access=True,
backingstore_name=constants.DEFAULT_NOOBAA_BACKINGSTORE,
):
"""
Initializer function
Args:
mcg (obj): Multi cloud gateway object
name (str): Name of noobaa account
email (str): Email id to be assigned to noobaa account
buckets (list): list of bucket names to be given permission
admin_access (bool): True for admin privilege, otherwise False. Default (False)
s3_access (bool): True for S3 access, otherwise False. Default (True)
backingstore_name (str): Backingstore name on which buckets created
using this account to be placed by default. Default("noobaa-default-backing-store")
full_bucket_access (bool): True for future bucket access, otherwise False. Default (False)
"""
self.account_name = name
self.email_id = email
if buckets:
params_dict = {
"email": email,
"name": name,
"has_login": admin_access,
"s3_access": s3_access,
"default_pool": backingstore_name,
"allowed_buckets": {
"full_permission": full_bucket_access,
"permission_list": buckets,
},
}
else:
params_dict = {
"email": email,
"name": name,
"has_login": admin_access,
"s3_access": s3_access,
"default_pool": backingstore_name,
}
params_dict if (
version.get_semantic_ocs_version_from_config() < version.VERSION_4_9
) else params_dict.pop("default_pool")
response = mcg.send_rpc_query(
api="account_api", method="create_account", params=params_dict
).json()
self.access_key_id = response["reply"]["access_keys"][0]["access_key"]
self.access_key = response["reply"]["access_keys"][0]["secret_key"]
self.s3_endpoint = mcg.s3_endpoint
self.token = response["reply"]["token"]
self.s3_resource = boto3.resource(
"s3",
verify=retrieve_verification_mode(),
endpoint_url=self.s3_endpoint,
aws_access_key_id=self.access_key_id,
aws_secret_access_key=self.access_key,
)
self.s3_client = boto3.client(
"s3",
verify=retrieve_verification_mode(),
endpoint_url=self.s3_endpoint,
aws_access_key_id=self.access_key_id,
aws_secret_access_key=self.access_key,
)
def METHOD_NAME(
user_list, actions_list, resources_list, effect="Allow", sid="statement"
):
"""
Function prepares bucket policy parameters in syntax and format provided by AWS bucket policy
Args:
user_list (list): List of user accounts to access bucket policy
actions_list (list): List of actions in bucket policy eg: Get, Put objects etc
resources_list (list): List of resources. Eg: Bucket name, specific object in a bucket etc
effect (str): Permission given to the bucket policy ie: Allow(default) or Deny
sid (str): Statement name. Can be any string. Default: "Statement"
Returns:
dict: Bucket policy in json format
"""
principals = user_list
actions = list(map(lambda action: "s3:%s" % action, actions_list))
resources = list(
map(lambda bucket_name: "arn:aws:s3:::%s" % bucket_name, resources_list)
)
ver = datetime.date.today().strftime("%Y-%m-%d")
logger.info(f"version: {ver}")
logger.info(f"principal_list: {principals}")
logger.info(f"actions_list: {actions_list}")
logger.info(f"resource: {resources_list}")
logger.info(f"effect: {effect}")
logger.info(f"sid: {sid}")
bucket_policy = {
"Version": ver,
"Statement": [
{
"Action": actions,
"Principal": principals,
"Resource": resources,
"Effect": effect,
"Sid": sid,
}
],
}
logger.info(f"bucket_policy: {bucket_policy}")
return bucket_policy |
5,475 | test already existing fail | """Test the Create Index action"""
# pylint: disable=missing-function-docstring, missing-class-docstring, line-too-long
import os
from curator import IndexList
from curator.helpers.date_ops import parse_date_pattern
from curator.helpers.getters import get_indices
from . import CuratorTestCase
from . import testvars
HOST = os.environ.get('TEST_ES_SERVER', 'http://127.0.0.1:9200')
class TestCLICreateIndex(CuratorTestCase):
def test_plain(self):
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.create_index.format('testing'))
assert not get_indices(self.client)
self.invoke_runner()
self.assertEqual(['testing'], get_indices(self.client))
assert ['testing'] == get_indices(self.client)
def test_with_extra_settings(self):
idx = 'testing'
alias = 'aliasname'
mapkey1 = 'meep'
mapval1 = 'integer'
mapkey2 = 'beep'
mapval2 = 'keyword'
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'],
testvars.create_index_with_extra_settings.format(idx, alias, mapkey1, mapval1, mapkey2, mapval2))
assert not get_indices(self.client)
self.invoke_runner()
ilo = IndexList(self.client)
ilo.get_index_settings()
aliases = self.client.indices.get_alias(name=alias)
mapping = self.client.indices.get_mapping(index=idx)
assert [idx] == ilo.indices
assert '1' == ilo.index_info[idx]['number_of_shards']
assert '0' == ilo.index_info[idx]['number_of_replicas']
assert mapping[idx]['mappings']['properties'][mapkey1] == {'type': mapval1}
assert mapping[idx]['mappings']['properties'][mapkey2] == {'type': mapval2}
assert aliases[idx]['aliases'] == {alias: {'is_write_index': True}}
def test_with_strftime(self):
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.create_index.format('testing-%Y.%m.%d'))
assert not get_indices(self.client)
idx = parse_date_pattern('testing-%Y.%m.%d')
self.invoke_runner()
assert [idx] == get_indices(self.client)
def test_with_date_math(self):
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.create_index.format('<testing-{now/d}>'))
assert not get_indices(self.client)
idx = parse_date_pattern('testing-%Y.%m.%d')
self.invoke_runner()
assert [idx] == get_indices(self.client)
def test_extra_option(self):
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.bad_option_proto_test.format('create_index'))
self.invoke_runner()
assert not get_indices(self.client)
assert 1 == self.result.exit_code
def METHOD_NAME(self):
idx = 'testing'
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.create_index.format(idx))
self.create_index(idx)
self.invoke_runner()
assert [idx] == get_indices(self.client)
assert 1 == self.result.exit_code
def test_already_existing_pass(self):
config = (
'---\n'
'actions:\n'
' 1:\n'
' description: "Create index as named"\n'
' action: create_index\n'
' options:\n'
' name: {0}\n'
' ignore_existing: true\n'
)
idx = 'testing'
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], config.format(idx))
self.create_index(idx)
self.invoke_runner()
assert [idx] == get_indices(self.client)
assert 0 == self.result.exit_code |
5,476 | test type with nested if | import unittest
from robot.model import For, If, IfBranch, TestCase, Try, TryBranch, While
from robot.utils.asserts import assert_equal
IF = If.IF
ELSE_IF = If.ELSE_IF
ELSE = If.ELSE
TRY = Try.TRY
EXCEPT = Try.EXCEPT
FINALLY = Try.FINALLY
class TestFor(unittest.TestCase):
def test_string_reprs(self):
for for_, exp_str, exp_repr in [
(For(),
'FOR IN',
"For(assign=(), flavor='IN', values=())"),
(For(('${x}',), 'IN RANGE', ('10',)),
'FOR ${x} IN RANGE 10',
"For(assign=('${x}',), flavor='IN RANGE', values=('10',))"),
(For(('${x}', '${y}'), 'IN ENUMERATE', ('a', 'b')),
'FOR ${x} ${y} IN ENUMERATE a b',
"For(assign=('${x}', '${y}'), flavor='IN ENUMERATE', values=('a', 'b'))"),
(For(['${x}'], 'IN ENUMERATE', ['@{stuff}'], start='1'),
'FOR ${x} IN ENUMERATE @{stuff} start=1',
"For(assign=('${x}',), flavor='IN ENUMERATE', values=('@{stuff}',), start='1')"),
(For(('${x}', '${y}'), 'IN ZIP', ('${xs}', '${ys}'), mode='LONGEST', fill='-'),
'FOR ${x} ${y} IN ZIP ${xs} ${ys} mode=LONGEST fill=-',
"For(assign=('${x}', '${y}'), flavor='IN ZIP', values=('${xs}', '${ys}'), mode='LONGEST', fill='-')"),
(For(['${ü}'], 'IN', ['föö']),
'FOR ${ü} IN föö',
"For(assign=('${ü}',), flavor='IN', values=('föö',))")
]:
assert_equal(str(for_), exp_str)
assert_equal(repr(for_), 'robot.model.' + exp_repr)
class TestWhile(unittest.TestCase):
def test_string_reprs(self):
for while_, exp_str, exp_repr in [
(While(),
'WHILE',
"While(condition=None)"),
(While('$x', limit='100'),
'WHILE $x limit=100',
"While(condition='$x', limit='100')")
]:
assert_equal(str(while_), exp_str)
assert_equal(repr(while_), 'robot.model.' + exp_repr)
class TestIf(unittest.TestCase):
def test_type(self):
assert_equal(IfBranch().type, IF)
assert_equal(IfBranch(type=ELSE).type, ELSE)
assert_equal(IfBranch(type=ELSE_IF).type, ELSE_IF)
def METHOD_NAME(self):
branch = IfBranch()
branch.body.create_if()
assert_equal(branch.body[0].body.create_branch().type, IF)
assert_equal(branch.body[0].body.create_branch(ELSE_IF).type, ELSE_IF)
assert_equal(branch.body[0].body.create_branch(ELSE).type, ELSE)
def test_root_id(self):
assert_equal(If().id, None)
assert_equal(TestCase().body.create_if().id, None)
def test_branch_id_without_parent(self):
assert_equal(IfBranch().id, 'k1')
def test_branch_id_with_only_root(self):
root = If()
assert_equal(root.body.create_branch().id, 'k1')
assert_equal(root.body.create_branch().id, 'k2')
def test_branch_id_with_only_root_when_branch_not_in_root(self):
assert_equal(IfBranch(parent=If()).id, 'k1')
def test_branch_id_with_real_parent(self):
root = TestCase().body.create_if()
assert_equal(root.body.create_branch().id, 't1-k1')
assert_equal(root.body.create_branch().id, 't1-k2')
def test_branch_id_when_parent_has_setup(self):
tc = TestCase()
assert_equal(tc.setup.config(name='X').id, 't1-k1')
assert_equal(tc.body.create_keyword().id, 't1-k2')
assert_equal(tc.body.create_if().body.create_branch().id, 't1-k3')
assert_equal(tc.body.create_keyword().id, 't1-k4')
assert_equal(tc.body.create_if().body.create_branch().id, 't1-k5')
def test_string_reprs(self):
for if_, exp_str, exp_repr in [
(IfBranch(),
'IF None',
"IfBranch(type='IF', condition=None)"),
(IfBranch(condition='$x > 1'),
'IF $x > 1',
"IfBranch(type='IF', condition='$x > 1')"),
(IfBranch(ELSE_IF, condition='$x > 2'),
'ELSE IF $x > 2',
"IfBranch(type='ELSE IF', condition='$x > 2')"),
(IfBranch(ELSE),
'ELSE',
"IfBranch(type='ELSE', condition=None)"),
(IfBranch(condition=u'$x == "\xe4iti"'),
u'IF $x == "\xe4iti"',
u"IfBranch(type='IF', condition=%r)" % u'$x == "\xe4iti"'),
]:
assert_equal(str(if_), exp_str)
assert_equal(repr(if_), 'robot.model.' + exp_repr)
class TestTry(unittest.TestCase):
def test_type(self):
assert_equal(TryBranch().type, TRY)
assert_equal(TryBranch(type=EXCEPT).type, EXCEPT)
assert_equal(TryBranch(type=ELSE).type, ELSE)
assert_equal(TryBranch(type=FINALLY).type, FINALLY)
def test_type_with_nested_Try(self):
branch = TryBranch()
branch.body.create_try()
assert_equal(branch.body[0].body.create_branch().type, TRY)
assert_equal(branch.body[0].body.create_branch(type=EXCEPT).type, EXCEPT)
assert_equal(branch.body[0].body.create_branch(type=ELSE).type, ELSE)
assert_equal(branch.body[0].body.create_branch(type=FINALLY).type, FINALLY)
def test_root_id(self):
assert_equal(Try().id, None)
assert_equal(TestCase().body.create_try().id, None)
def test_branch_id_without_parent(self):
assert_equal(TryBranch().id, 'k1')
def test_branch_id_with_only_root(self):
root = Try()
assert_equal(root.body.create_branch().id, 'k1')
assert_equal(root.body.create_branch().id, 'k2')
def test_branch_id_with_only_root_when_branch_not_in_root(self):
assert_equal(TryBranch(parent=Try()).id, 'k1')
def test_branch_id_with_real_parent(self):
root = TestCase().body.create_try()
assert_equal(root.body.create_branch().id, 't1-k1')
assert_equal(root.body.create_branch().id, 't1-k2')
def test_branch_id_when_parent_has_setup(self):
tc = TestCase()
assert_equal(tc.setup.config(name='X').id, 't1-k1')
assert_equal(tc.body.create_keyword().id, 't1-k2')
assert_equal(tc.body.create_try().body.create_branch().id, 't1-k3')
assert_equal(tc.body.create_keyword().id, 't1-k4')
assert_equal(tc.body.create_try().body.create_branch().id, 't1-k5')
def test_string_reprs(self):
for try_, exp_str, exp_repr in [
(TryBranch(),
'TRY',
"TryBranch(type='TRY')"),
(TryBranch(EXCEPT),
'EXCEPT',
"TryBranch(type='EXCEPT')"),
(TryBranch(EXCEPT, ('Message',)),
'EXCEPT Message',
"TryBranch(type='EXCEPT', patterns=('Message',))"),
(TryBranch(EXCEPT, ('M', 'S', 'G', 'S')),
'EXCEPT M S G S',
"TryBranch(type='EXCEPT', patterns=('M', 'S', 'G', 'S'))"),
(TryBranch(EXCEPT, (), None, '${x}'),
'EXCEPT AS ${x}',
"TryBranch(type='EXCEPT', assign='${x}')"),
(TryBranch(EXCEPT, ('Message',), 'glob', '${x}'),
'EXCEPT Message type=glob AS ${x}',
"TryBranch(type='EXCEPT', patterns=('Message',), pattern_type='glob', assign='${x}')"),
(TryBranch(ELSE),
'ELSE',
"TryBranch(type='ELSE')"),
(TryBranch(FINALLY),
'FINALLY',
"TryBranch(type='FINALLY')"),
]:
assert_equal(str(try_), exp_str)
assert_equal(repr(try_), 'robot.model.' + exp_repr)
if __name__ == '__main__':
unittest.main() |
5,477 | render settings box | # This file is part of Indico.
# Copyright (C) 2002 - 2023 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from flask import redirect, session
from werkzeug.exceptions import Forbidden
from indico.core import signals
from indico.core.config import config
from indico.core.db import db
from indico.core.db.sqlalchemy.util.queries import db_dates_overlap
from indico.modules.events.management.controllers.base import RHManageEventBase
from indico.modules.events.management.forms import (EventClassificationForm, EventContactInfoForm, EventDataForm,
EventDatesForm, EventLanguagesForm, EventLocationForm,
EventPersonsForm)
from indico.modules.events.management.util import flash_if_unregistered
from indico.modules.events.management.views import WPEventSettings, render_event_management_header_right
from indico.modules.events.models.labels import EventLabel
from indico.modules.events.models.references import ReferenceType
from indico.modules.events.operations import update_event
from indico.modules.events.util import should_show_draft_warning, track_location_changes, track_time_changes
from indico.modules.rb.models.reservation_occurrences import ReservationOccurrence
from indico.modules.rb.models.reservations import Reservation
from indico.modules.rb.models.rooms import Room
from indico.util.signals import values_from_signal
from indico.web.flask.templating import get_template_module
from indico.web.forms.base import FormDefaults
from indico.web.util import jsonify_data, jsonify_form, jsonify_template
class RHEventSettings(RHManageEventBase):
"""Event settings dashboard."""
def _check_access(self):
if not session.user:
raise Forbidden
# If the user cannot manage the whole event see if anything gives them
# limited management access.
if not self.event.can_manage(session.user):
urls = sorted(values_from_signal(signals.event_management.management_url.send(self.event),
single_value=True))
response = redirect(urls[0]) if urls else None
raise Forbidden(response=response)
RHManageEventBase._check_access(self) # mainly to trigger the legacy "event locked" check
def _process(self):
show_booking_warning = False
if (config.ENABLE_ROOMBOOKING and not self.event.has_ended and self.event.room
and not self.event.room_reservation_links):
# Check if any of the managers of the event already have a booking that overlaps with the event datetime
manager_ids = [p.user.id for p in self.event.acl_entries if p.user]
has_overlap = (ReservationOccurrence.query
.filter(ReservationOccurrence.is_valid,
db.or_(Reservation.booked_for_id.in_(manager_ids),
Reservation.created_by_id.in_(manager_ids)),
db_dates_overlap(ReservationOccurrence,
'start_dt', self.event.start_dt_local,
'end_dt', self.event.end_dt_local),
Reservation.room_id == self.event.room.id,
~Room.is_deleted)
.join(Reservation)
.join(Room)
.has_rows())
show_booking_warning = not has_overlap
has_reference_types = ReferenceType.query.has_rows()
has_event_labels = EventLabel.query.has_rows()
return WPEventSettings.render_template('settings.html', self.event, 'settings',
show_booking_warning=show_booking_warning,
show_draft_warning=should_show_draft_warning(self.event),
has_reference_types=has_reference_types,
has_event_labels=has_event_labels)
class RHEditEventDataBase(RHManageEventBase):
form_class = None
section_name = None
def render_form(self, form):
return jsonify_form(form, footer_align_right=True)
def METHOD_NAME(self):
tpl = get_template_module('events/management/_settings.html')
assert self.section_name
has_reference_types = ReferenceType.query.has_rows()
has_event_labels = EventLabel.query.has_rows()
return tpl.render_event_settings(self.event, has_reference_types, has_event_labels,
section=self.section_name, with_container=False)
def jsonify_success(self):
return jsonify_data(settings_box=self.METHOD_NAME(),
right_header=render_event_management_header_right(self.event))
def _update(self, form_data):
update_event(self.event, **form_data)
def _process(self):
form = self.form_class(obj=self.event, event=self.event)
if form.validate_on_submit():
with flash_if_unregistered(self.event, lambda: self.event.person_links):
self._update(form.data)
return self.jsonify_success()
self.commit = False
return self.render_form(form)
class RHEditEventData(RHEditEventDataBase):
form_class = EventDataForm
section_name = 'data'
class RHEditEventLocation(RHEditEventDataBase):
form_class = EventLocationForm
section_name = 'location'
def _update(self, form_data):
with track_location_changes():
return super()._update(form_data)
class RHEditEventPersons(RHEditEventDataBase):
form_class = EventPersonsForm
section_name = 'persons'
class RHEditEventContactInfo(RHEditEventDataBase):
form_class = EventContactInfoForm
section_name = 'contact_info'
def render_form(self, form):
return jsonify_template('events/management/event_contact_info.html', form=form)
class RHEditEventClassification(RHEditEventDataBase):
form_class = EventClassificationForm
section_name = 'classification'
class RHEditEventLanguages(RHEditEventDataBase):
form_class = EventLanguagesForm
section_name = 'languages'
class RHEditEventDates(RHEditEventDataBase):
section_name = 'dates'
def _process(self):
defaults = FormDefaults(self.event, update_timetable=True)
form = EventDatesForm(obj=defaults, event=self.event)
if form.validate_on_submit():
with track_time_changes():
update_event(self.event, **form.data)
return self.jsonify_success()
show_screen_dates = form.has_displayed_dates and (form.start_dt_override.data or form.end_dt_override.data)
return jsonify_template('events/management/event_dates.html', form=form, show_screen_dates=show_screen_dates) |
5,478 | remote shutdown | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import defer
from buildbot.util import ComparableMixin
from buildbot.util import subscription
from buildbot.util.eventual import eventually
class Listener:
pass
class UpdateRegistrationListener(Listener):
def __init__(self):
super().__init__()
# username : (password, portstr, manager registration)
self._registrations = {}
@defer.inlineCallbacks
def updateRegistration(self, username, password, portStr):
# NOTE: this method is only present on the PB and MsgPack protocols; others do not
# use registrations
if username in self._registrations:
currentPassword, currentPortStr, currentReg = \
self._registrations[username]
else:
currentPassword, currentPortStr, currentReg = None, None, None
iseq = (ComparableMixin.isEquivalent(currentPassword, password) and
ComparableMixin.isEquivalent(currentPortStr, portStr))
if iseq:
return currentReg
if currentReg:
yield currentReg.unregister()
del self._registrations[username]
if portStr is not None and password:
reg = yield self.get_manager().register(portStr, username, password,
self._create_connection)
self._registrations[username] = (password, portStr, reg)
return reg
return currentReg
@defer.inlineCallbacks
def _create_connection(self, mind, workerName):
self.before_connection_setup(mind, workerName)
worker = self.master.workers.getWorkerByName(workerName)
conn = self.ConnectionClass(self.master, worker, mind)
# inform the manager, logging any problems in the deferred
accepted = yield self.master.workers.newConnection(conn, workerName)
# return the Connection as the perspective
if accepted:
return conn
else:
# TODO: return something more useful
raise RuntimeError("rejecting duplicate worker")
class Connection:
proxies = {}
def __init__(self, name):
self._disconnectSubs = subscription.SubscriptionPoint(f"disconnections from {name}")
# This method replace all Impl args by their Proxy protocol implementation
def createArgsProxies(self, args):
newargs = {}
for k, v in args.items():
for implclass, proxyclass in self.proxies.items():
if isinstance(v, implclass):
v = proxyclass(v)
newargs[k] = v
return newargs
def get_peer(self):
raise NotImplementedError
# disconnection handling
def wait_shutdown_started(self):
d = defer.Deferred()
self.notifyOnDisconnect(lambda: eventually(d.callback, None))
return d
def waitShutdown(self):
return self._disconnectSubs.waitForDeliveriesToFinish()
def notifyOnDisconnect(self, cb):
return self._disconnectSubs.subscribe(cb)
def notifyDisconnected(self):
self._disconnectSubs.deliver()
def loseConnection(self):
raise NotImplementedError
# methods to send messages to the worker
def remotePrint(self, message):
raise NotImplementedError
def remoteGetWorkerInfo(self):
raise NotImplementedError
def remoteSetBuilderList(self, builders):
raise NotImplementedError
def remoteStartCommand(self, remoteCommand, builderName, commandId, commandName, args):
raise NotImplementedError
def METHOD_NAME(self):
raise NotImplementedError
def remoteStartBuild(self, builderName):
raise NotImplementedError
def remoteInterruptCommand(self, builderName, commandId, why):
raise NotImplementedError
# RemoteCommand base implementation and base proxy
class RemoteCommandImpl:
def remote_update(self, updates):
raise NotImplementedError
def remote_complete(self, failure=None):
raise NotImplementedError
# FileWriter base implementation
class FileWriterImpl:
def remote_write(self, data):
raise NotImplementedError
def remote_utime(self, accessed_modified):
raise NotImplementedError
def remote_unpack(self):
raise NotImplementedError
def remote_close(self):
raise NotImplementedError
# FileReader base implementation
class FileReaderImpl:
def remote_read(self, maxLength):
raise NotImplementedError
def remote_close(self):
raise NotImplementedError |
5,479 | test scan get partitions status results parts | # -*- coding: utf-8 -*-
import pytest
from .test_base_class import TestBaseClass
class TestScanGetPartitionsStatus(TestBaseClass):
@pytest.fixture(autouse=True)
def setup(self, request, as_connection):
self.test_ns = "test"
self.test_set = "demo"
self.partition_1000_count = 0
self.partition_1001_count = 0
self.partition_1002_count = 0
self.partition_1003_count = 0
as_connection.truncate(self.test_ns, None, 0)
for i in range(1, 100000):
put = 0
key = (self.test_ns, self.test_set, str(i))
rec_partition = as_connection.get_key_partition_id(self.test_ns, self.test_set, str(i))
if rec_partition == 1000:
self.partition_1000_count += 1
put = 1
if rec_partition == 1001:
self.partition_1001_count += 1
put = 1
if rec_partition == 1002:
self.partition_1002_count += 1
put = 1
if rec_partition == 1003:
self.partition_1003_count += 1
put = 1
if put:
rec = {
"i": i,
"s": "xyz",
"l": [2, 4, 8, 16, 32, None, 128, 256],
"m": {"partition": rec_partition, "b": 4, "c": 8, "d": 16},
}
as_connection.put(key, rec)
def teardown():
for i in range(1, 100000):
put = 0
key = ("test", "demo", str(i))
rec_partition = as_connection.get_key_partition_id(self.test_ns, self.test_set, str(i))
if rec_partition == 1000:
self.partition_1000_count += 1
put = 1
if rec_partition == 1001:
self.partition_1001_count += 1
put = 1
if rec_partition == 1002:
self.partition_1002_count += 1
put = 1
if rec_partition == 1003:
self.partition_1003_count += 1
put = 1
if put:
as_connection.remove(key)
request.addfinalizer(teardown)
def test_scan_get_partitions_status_no_tracking(self):
scan_obj = self.as_connection.scan(self.test_ns, self.test_set)
stats = scan_obj.get_partitions_status()
assert stats == {}
def test_get_partitions_status_after_foreach(self):
"""
Resume a scan using foreach.
"""
records = 0
resumed_records = 0
def callback(part_id, input_tuple):
nonlocal records
if records == 5:
return False
records += 1
scan_obj = self.as_connection.scan(self.test_ns, self.test_set)
scan_obj.foreach(callback, {"partition_filter": {"begin": 1001, "count": 1}})
assert records == 5
partition_status = scan_obj.get_partitions_status()
def resume_callback(part_id, input_tuple):
nonlocal resumed_records
resumed_records += 1
scan_obj2 = self.as_connection.scan(self.test_ns, self.test_set)
policy = {
"partition_filter": {"begin": 1001, "count": 1, "partition_status": partition_status},
}
scan_obj2.foreach(resume_callback, policy)
assert records + resumed_records == self.partition_1001_count
def test_scan_get_partitions_status_results(self):
scan_obj = self.as_connection.scan(self.test_ns, self.test_set)
# policy = {'partition_filter': {'begin': 1001, 'count': 1}}
scan_obj.paginate()
scan_obj.results()
stats = scan_obj.get_partitions_status()
assert stats
def test_scan_get_partitions_status_results_no_tracking(self):
scan_obj = self.as_connection.scan(self.test_ns, self.test_set)
# policy = {'partition_filter': {'begin': 1001, 'count': 1}}
scan_obj.results()
stats = scan_obj.get_partitions_status()
assert not stats
def METHOD_NAME(self):
scan_obj = self.as_connection.scan(self.test_ns, self.test_set)
policy = {"partition_filter": {"begin": 1001, "count": 1}}
results = scan_obj.results(policy)
assert len(results) == self.partition_1001_count
stats = scan_obj.get_partitions_status()
assert stats
def test_scan_get_partitions_status_foreach_parts(self):
scan_obj = self.as_connection.scan(self.test_ns, self.test_set)
ids = []
def callback(part_id, input_tuple):
ids.append(part_id)
policy = {"partition_filter": {"begin": 1001, "count": 1}}
scan_obj.foreach(callback, policy)
assert len(ids) == self.partition_1001_count
stats = scan_obj.get_partitions_status()
assert stats |
5,480 | test smoothed box prior log prob log | #!/usr/bin/env python3
import math
import unittest
import torch
from gpytorch.priors import SmoothedBoxPrior
from gpytorch.test.utils import approx_equal, least_used_cuda_device
class TestSmoothedBoxPrior(unittest.TestCase):
def test_smoothed_box_prior_to_gpu(self):
if torch.cuda.is_available():
prior = SmoothedBoxPrior(torch.zeros(2), torch.ones(2)).cuda()
self.assertEqual(prior.a.device.type, "cuda")
self.assertEqual(prior.b.device.type, "cuda")
self.assertEqual(prior.sigma.device.type, "cuda")
self.assertEqual(prior._c.device.type, "cuda")
self.assertEqual(prior._r.device.type, "cuda")
self.assertEqual(prior._M.device.type, "cuda")
self.assertEqual(prior.tails.loc.device.type, "cuda")
self.assertEqual(prior.tails.scale.device.type, "cuda")
def test_smoothed_box_prior_validate_args(self):
with self.assertRaises(ValueError):
SmoothedBoxPrior(torch.ones(2), torch.zeros(2), validate_args=True)
def test_smoothed_box_prior_log_prob(self, cuda=False):
device = torch.device("cuda") if cuda else torch.device("cpu")
a, b = torch.zeros(2, device=device), torch.ones(2, device=device)
sigma = 0.1
prior = SmoothedBoxPrior(a, b, sigma)
self.assertTrue(torch.equal(prior.a, a))
self.assertTrue(torch.equal(prior.b, b))
self.assertTrue(torch.equal(prior.sigma, torch.full_like(prior.a, sigma)))
self.assertTrue(torch.all(approx_equal(prior._M, torch.full_like(prior.a, 1.6073))))
t = torch.tensor([0.5, 1.1], device=device)
self.assertAlmostEqual(prior.log_prob(t).item(), -0.9473, places=4)
t = torch.tensor([[0.5, 1.1], [0.1, 0.25]], device=device)
log_prob_expected = torch.tensor([-0.947347, -0.447347], device=t.device)
self.assertTrue(torch.all(approx_equal(prior.log_prob(t), log_prob_expected)))
with self.assertRaises(RuntimeError):
prior.log_prob(torch.zeros(3, device=device))
def test_smoothed_box_prior_log_prob_cuda(self):
if torch.cuda.is_available():
with least_used_cuda_device():
return self.test_smoothed_box_prior_log_prob(cuda=True)
def METHOD_NAME(self, cuda=False):
device = torch.device("cuda") if cuda else torch.device("cpu")
a, b = torch.zeros(2, device=device), torch.ones(2, device=device)
sigma = 0.1
prior = SmoothedBoxPrior(a, b, sigma, transform=torch.exp)
t = torch.tensor([0.5, 1.1], device=device).log()
self.assertAlmostEqual(prior.log_prob(t).item(), -0.9473, places=4)
t = torch.tensor([[0.5, 1.1], [0.1, 0.25]], device=device).log()
log_prob_expected = torch.tensor([-0.947347, -0.447347], device=t.device)
self.assertTrue(torch.all(approx_equal(prior.log_prob(t), log_prob_expected)))
with self.assertRaises(RuntimeError):
prior.log_prob(torch.ones(3, device=device))
def test_smoothed_box_prior_log_prob_log_transform_cuda(self):
if torch.cuda.is_available():
with least_used_cuda_device():
return self.METHOD_NAME(cuda=True)
def test_smoothed_box_prior_batch_log_prob(self, cuda=False):
# TODO: Implement test for batch mode
pass
def test_smoothed_box_prior_batch_log_prob_cuda(self):
if torch.cuda.is_available():
with least_used_cuda_device():
return self.test_smoothed_box_prior_batch_log_prob(cuda=True)
def test_sample(self):
a = torch.as_tensor(0.0)
b = torch.as_tensor(1.0)
sigma = 0.01
gauss_max = 1 / (math.sqrt(2 * math.pi) * sigma)
ratio_gaussian_mass = 1 / (gauss_max * (b - a) + 1)
prior = SmoothedBoxPrior(a, b, sigma)
n_samples = 50000
samples = prior.sample((n_samples,))
gaussian_idx = (samples < a) | (samples > b)
gaussian_samples = samples[gaussian_idx]
n_gaussian = gaussian_samples.shape[0]
self.assertTrue(
torch.all(approx_equal(torch.as_tensor(n_gaussian / n_samples), ratio_gaussian_mass, epsilon=0.005))
)
if __name__ == "__main__":
unittest.main() |
5,481 | create support tickets | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=unused-argument
# pylint: disable=too-many-locals
import json
from datetime import date, datetime, timedelta
from azext_support._utils import (get_bearer_token, is_quota_ticket,
is_technical_ticket, parse_support_area_path)
from knack.log import get_logger
logger = get_logger(__name__)
def list_support_tickets(cmd, client, filters=None):
if filters is None:
filters = "CreatedDate ge " + str(date.today() - timedelta(days=7))
return client.list(top=100, filter=filters)
def get_support_tickets(cmd, client, ticket_name=None):
return client.get(support_ticket_name=ticket_name)
def update_support_tickets(cmd, client,
ticket_name=None,
severity=None,
status=None,
contact_first_name=None,
contact_last_name=None,
contact_method=None,
contact_email=None,
contact_additional_emails=None,
contact_phone_number=None,
contact_timezone=None,
contact_country=None,
contact_language=None):
contactBody = {}
contactBody["first_name"] = contact_first_name
contactBody["last_name"] = contact_last_name
contactBody["preferred_contact_method"] = contact_method
contactBody["primary_email_address"] = contact_email
contactBody["additional_email_addresses"] = contact_additional_emails
contactBody["phone_number"] = contact_phone_number
contactBody["preferred_time_zone"] = contact_timezone
contactBody["country"] = contact_country
contactBody["preferred_support_language"] = contact_language
body = {}
body["severity"] = severity
body["status"] = status
if not all(x is None for x in contactBody.values()):
body["contact_details"] = contactBody
else:
body["contact_details"] = None
return client.update(support_ticket_name=ticket_name, update_support_ticket=body)
def list_support_tickets_communications(cmd, client, ticket_name=None, filters=None):
return client.list(support_ticket_name=ticket_name, filter=filters)
def get_support_tickets_communications(cmd, client, ticket_name=None, communication_name=None):
return client.get(support_ticket_name=ticket_name, communication_name=communication_name)
def METHOD_NAME(cmd, client,
ticket_name=None,
problem_classification=None,
title=None,
description=None,
severity=None,
start_time=None,
require_24_by_7_response=None,
contact_first_name=None,
contact_last_name=None,
contact_method=None,
contact_email=None,
contact_additional_emails=None,
contact_phone_number=None,
contact_timezone=None,
contact_country=None,
contact_language=None,
technical_resource=None,
quota_change_version=None,
quota_change_subtype=None,
quota_change_regions=None,
quota_change_payload=None,
partner_tenant_id=None):
service_name = parse_support_area_path(problem_classification)["service_name"]
service = "/providers/Microsoft.Support/services/{0}".format(service_name)
contactBody = {}
contactBody["first_name"] = contact_first_name
contactBody["last_name"] = contact_last_name
contactBody["preferred_contact_method"] = contact_method
contactBody["primary_email_address"] = contact_email
contactBody["additional_email_addresses"] = contact_additional_emails
contactBody["phone_number"] = contact_phone_number
contactBody["preferred_time_zone"] = contact_timezone
contactBody["country"] = contact_country
contactBody["preferred_support_language"] = contact_language
body = {}
body["description"] = description
body["problem_classification_id"] = problem_classification
body["severity"] = severity
body["contact_details"] = contactBody
body["title"] = title
body["service_id"] = service
body["require24_x7_response"] = require_24_by_7_response if require_24_by_7_response is not None else False
start_date_time = start_time if start_time is not None else datetime.now()
start_date_time = start_date_time.strftime("%Y-%m-%dT%H:%M:%SZ")
body["problem_start_time"] = start_date_time
if is_quota_ticket(service) and quota_change_version is not None:
quotaBody = {}
quotaBody["quota_change_request_sub_type"] = quota_change_subtype
quotaBody["quota_change_request_version"] = quota_change_version
quota_change_requests = []
if quota_change_regions is not None and quota_change_payload is not None:
for (region, payload) in zip(quota_change_regions, quota_change_payload):
quota_change_requests.append({"region": region, "payload": payload})
quotaBody["quota_change_requests"] = quota_change_requests
body["quota_ticket_details"] = quotaBody
if is_technical_ticket(service) and technical_resource is not None:
body["technical_ticket_details"] = {"resource_id": technical_resource}
logger.debug("Sending create request with below payload: ")
logger.debug(json.dumps(body, indent=4))
if partner_tenant_id is not None:
external_bearer_token = get_bearer_token(cmd, partner_tenant_id)
return client.begin_create(support_ticket_name=ticket_name, create_support_ticket_parameters=body,
headers={'x-ms-authorization-auxiliary': external_bearer_token})
return client.begin_create(support_ticket_name=ticket_name, create_support_ticket_parameters=body)
def create_support_tickets_communications(cmd, client,
ticket_name=None,
communication_name=None,
communication_body=None,
communication_subject=None,
communication_sender=None):
body = {}
body["sender"] = communication_sender
body["subject"] = communication_subject
body["body"] = communication_body
return client.begin_create(support_ticket_name=ticket_name, communication_name=communication_name,
create_communication_parameters=body) |
5,482 | expiration date | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'CertificatePropertiesResponse',
]
@pulumi.output_type
class CertificatePropertiesResponse(dict):
"""
Certificate resource payload.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "activateDate":
suggest = "activate_date"
elif key == "dnsNames":
suggest = "dns_names"
elif key == "expirationDate":
suggest = "expiration_date"
elif key == "issuedDate":
suggest = "issued_date"
elif key == "keyVaultCertName":
suggest = "key_vault_cert_name"
elif key == "subjectName":
suggest = "subject_name"
elif key == "vaultUri":
suggest = "vault_uri"
elif key == "certVersion":
suggest = "cert_version"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in CertificatePropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
CertificatePropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
CertificatePropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
activate_date: str,
dns_names: Sequence[str],
METHOD_NAME: str,
issued_date: str,
issuer: str,
key_vault_cert_name: str,
subject_name: str,
thumbprint: str,
vault_uri: str,
cert_version: Optional[str] = None):
"""
Certificate resource payload.
:param str activate_date: The activate date of certificate.
:param Sequence[str] dns_names: The domain list of certificate.
:param str expiration_date: The expiration date of certificate.
:param str issued_date: The issue date of certificate.
:param str issuer: The issuer of certificate.
:param str key_vault_cert_name: The certificate name of key vault.
:param str subject_name: The subject name of certificate.
:param str thumbprint: The thumbprint of certificate.
:param str vault_uri: The vault uri of user key vault.
:param str cert_version: The certificate version of key vault.
"""
pulumi.set(__self__, "activate_date", activate_date)
pulumi.set(__self__, "dns_names", dns_names)
pulumi.set(__self__, "expiration_date", METHOD_NAME)
pulumi.set(__self__, "issued_date", issued_date)
pulumi.set(__self__, "issuer", issuer)
pulumi.set(__self__, "key_vault_cert_name", key_vault_cert_name)
pulumi.set(__self__, "subject_name", subject_name)
pulumi.set(__self__, "thumbprint", thumbprint)
pulumi.set(__self__, "vault_uri", vault_uri)
if cert_version is not None:
pulumi.set(__self__, "cert_version", cert_version)
@property
@pulumi.getter(name="activateDate")
def activate_date(self) -> str:
"""
The activate date of certificate.
"""
return pulumi.get(self, "activate_date")
@property
@pulumi.getter(name="dnsNames")
def dns_names(self) -> Sequence[str]:
"""
The domain list of certificate.
"""
return pulumi.get(self, "dns_names")
@property
@pulumi.getter(name="expirationDate")
def METHOD_NAME(self) -> str:
"""
The expiration date of certificate.
"""
return pulumi.get(self, "expiration_date")
@property
@pulumi.getter(name="issuedDate")
def issued_date(self) -> str:
"""
The issue date of certificate.
"""
return pulumi.get(self, "issued_date")
@property
@pulumi.getter
def issuer(self) -> str:
"""
The issuer of certificate.
"""
return pulumi.get(self, "issuer")
@property
@pulumi.getter(name="keyVaultCertName")
def key_vault_cert_name(self) -> str:
"""
The certificate name of key vault.
"""
return pulumi.get(self, "key_vault_cert_name")
@property
@pulumi.getter(name="subjectName")
def subject_name(self) -> str:
"""
The subject name of certificate.
"""
return pulumi.get(self, "subject_name")
@property
@pulumi.getter
def thumbprint(self) -> str:
"""
The thumbprint of certificate.
"""
return pulumi.get(self, "thumbprint")
@property
@pulumi.getter(name="vaultUri")
def vault_uri(self) -> str:
"""
The vault uri of user key vault.
"""
return pulumi.get(self, "vault_uri")
@property
@pulumi.getter(name="certVersion")
def cert_version(self) -> Optional[str]:
"""
The certificate version of key vault.
"""
return pulumi.get(self, "cert_version")
|
5,483 | test multiplication | # -*- coding: utf-8 -*-
# Copyright (c) Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
import numpy as np
from numpy.testing import assert_allclose
import pytest
import vispy.visuals.transforms as tr
from vispy.geometry import Rect
from vispy.testing import run_tests_if_main
NT = tr.NullTransform
ST = tr.STTransform
AT = tr.MatrixTransform
RT = tr.MatrixTransform
PT = tr.PolarTransform
LT = tr.LogTransform
CT = tr.ChainTransform
def assert_chain_types(chain, types):
assert list(map(type, chain.transforms)) == types
def assert_chain_objects(chain1, chain2):
assert chain1.transforms == chain2.transforms
def METHOD_NAME():
n = NT()
s = ST()
a = AT()
p = PT()
log_trans = LT()
c1 = CT([s, a, p])
assert c1
c2 = CT([s, a, s])
assert isinstance(n * n, NT)
assert isinstance(n * s, ST)
assert isinstance(s * s, ST)
assert isinstance(a * s, AT)
assert isinstance(a * a, AT)
assert isinstance(s * a, AT)
assert isinstance(n * p, PT)
assert isinstance(s * p, CT)
assert isinstance(a * p, CT)
assert isinstance(p * a, CT)
assert isinstance(p * s, CT)
assert_chain_types(p * a, [PT, AT])
assert_chain_types(p * s, [PT, ST])
assert_chain_types(s * p, [ST, PT])
assert_chain_types(s * p * a, [ST, PT, AT])
assert_chain_types(s * a * p, [AT, PT])
assert_chain_types(p * s * a, [PT, ST, AT])
assert_chain_types(s * p * s, [ST, PT, ST])
assert_chain_types(s * a * p * s * a, [AT, PT, ST, AT])
assert_chain_types(c2 * a, [ST, AT, ST, AT])
assert_chain_types(p * log_trans * s, [PT, LT, ST])
def test_transform_chain():
# Make dummy classes for easier distinguishing the transforms
class DummyTrans(tr.BaseTransform):
glsl_map = "vec4 trans(vec4 pos) {return pos;}"
glsl_imap = "vec4 trans(vec4 pos) {return pos;}"
class TransA(DummyTrans):
pass
class TransB(DummyTrans):
pass
class TransC(DummyTrans):
pass
# Create test transforms
a, b, c = TransA(), TransB(), TransC()
# Test Chain creation
assert tr.ChainTransform().transforms == []
assert tr.ChainTransform(a).transforms == [a]
assert tr.ChainTransform(a, b).transforms == [a, b]
assert tr.ChainTransform(a, b, c, a).transforms == [a, b, c, a]
# Test composition by multiplication
assert_chain_objects(a * b, tr.ChainTransform(a, b))
assert_chain_objects(a * b * c, tr.ChainTransform(a, b, c))
assert_chain_objects(a * b * c * a, tr.ChainTransform(a, b, c, a))
# Test adding/prepending to transform
chain = tr.ChainTransform()
chain.append(a)
assert chain.transforms == [a]
chain.append(b)
assert chain.transforms == [a, b]
chain.append(c)
assert chain.transforms == [a, b, c]
chain.prepend(b)
assert chain.transforms == [b, a, b, c]
chain.prepend(c)
assert chain.transforms == [c, b, a, b, c]
# Test simplifying
t1 = tr.STTransform(scale=(2, 3))
t2 = tr.STTransform(translate=(3, 4))
t3 = tr.STTransform(translate=(3, 4))
# Create multiplied versions
t123 = t1*t2*t3
t321 = t3*t2*t1
c123 = tr.ChainTransform(t1, t2, t3)
c321 = tr.ChainTransform(t3, t2, t1)
c123s = c123.simplified
c321s = c321.simplified
#
assert isinstance(t123, tr.STTransform) # or the test is useless
assert isinstance(t321, tr.STTransform) # or the test is useless
assert isinstance(c123s, tr.ChainTransform) # or the test is useless
assert isinstance(c321s, tr.ChainTransform) # or the test is useless
# Test Mapping
t1 = tr.STTransform(scale=(2, 3))
t2 = tr.STTransform(translate=(3, 4))
chain1 = tr.ChainTransform(t1, t2)
chain2 = tr.ChainTransform(t2, t1)
#
assert chain1.transforms == [t1, t2] # or the test is useless
assert chain2.transforms == [t2, t1] # or the test is useless
#
m12 = (t1*t2).map((1, 1)).tolist()
m21 = (t2*t1).map((1, 1)).tolist()
m12_ = chain1.map((1, 1)).tolist()
m21_ = chain2.map((1, 1)).tolist()
#
# print(m12, m21, m12_, m21_)
assert m12 != m21
assert m12 == m12_
assert m21 == m21_
# Test shader map
t1 = tr.STTransform(scale=(2, 3))
t2 = tr.STTransform(translate=(3, 4))
chain = tr.ChainTransform(t1, t2)
#
funcs = chain.shader_map().dependencies()
funcsi = chain.shader_imap().dependencies()
#
assert t1.shader_map() in funcs
assert t2.shader_map() in funcs
assert t1.shader_imap() in funcsi
assert t2.shader_imap() in funcsi
def test_map_rect():
r = Rect((2, 7), (13, 19))
r1 = ST(scale=(2, 2), translate=(-10, 10)).map(r)
assert r1 == Rect((-6, 24), (26, 38))
def test_st_transform():
# Check that STTransform maps exactly like MatrixTransform
pts = np.random.normal(size=(10, 4))
scale = (1, 7.5, -4e-8)
translate = (1e6, 0.2, 0)
st = tr.STTransform(scale=scale, translate=translate)
at = tr.MatrixTransform()
at.scale(scale)
at.translate(translate)
assert np.allclose(st.map(pts), at.map(pts))
assert np.allclose(st.inverse.map(pts), at.inverse.map(pts))
def test_st_mapping():
p1 = [[5., 7.], [23., 8.]]
p2 = [[-1.3, -1.4], [1.1, 1.2]]
t = tr.STTransform()
t.set_mapping(p1, p2)
assert np.allclose(t.map(p1)[:, :len(p2)], p2)
def test_affine_mapping():
t = tr.MatrixTransform()
p1 = np.array([[0, 0, 0],
[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
# test pure translation
p2 = p1 + 5.5
t.set_mapping(p1, p2)
assert np.allclose(t.map(p1)[:, :p2.shape[1]], p2)
# test pure scaling
p2 = p1 * 5.5
t.set_mapping(p1, p2)
assert np.allclose(t.map(p1)[:, :p2.shape[1]], p2)
# test scale + translate
p2 = (p1 * 5.5) + 3.5
t.set_mapping(p1, p2)
assert np.allclose(t.map(p1)[:, :p2.shape[1]], p2)
# test SRT
p2 = np.array([[10, 5, 3],
[10, 15, 3],
[30, 5, 3],
[10, 5, 3.5]])
t.set_mapping(p1, p2)
assert np.allclose(t.map(p1)[:, :p2.shape[1]], p2)
m = np.random.RandomState(0).normal(size=(4, 4))
transforms = [
NT(),
ST(scale=(1e-4, 2e5), translate=(10, -6e9)),
AT(m),
RT(m),
]
@pytest.mark.parametrize('trn', transforms)
def test_inverse(trn):
rng = np.random.RandomState(0)
N = 20
x = rng.normal(size=(N, 3))
pw = rng.normal(size=(N, 3), scale=3)
pos = x * 10 ** pw
assert_allclose(pos, trn.inverse.map(trn.map(pos))[:, :3], atol=1e-7)
# log transform only works on positive values
# abs_pos = np.abs(pos)
# tr = LT(base=(2, 4.5, 0))
# assert np.allclose(abs_pos, tr.inverse.map(tr.map(abs_pos))[:,:3])
run_tests_if_main() |
5,484 | tags | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetAddressByNameResult',
'AwaitableGetAddressByNameResult',
'get_address_by_name',
'get_address_by_name_output',
]
@pulumi.output_type
class GetAddressByNameResult:
"""
Address Resource.
"""
def __init__(__self__, address_validation_status=None, contact_details=None, id=None, location=None, name=None, shipping_address=None, system_data=None, METHOD_NAME=None, type=None):
if address_validation_status and not isinstance(address_validation_status, str):
raise TypeError("Expected argument 'address_validation_status' to be a str")
pulumi.set(__self__, "address_validation_status", address_validation_status)
if contact_details and not isinstance(contact_details, dict):
raise TypeError("Expected argument 'contact_details' to be a dict")
pulumi.set(__self__, "contact_details", contact_details)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if shipping_address and not isinstance(shipping_address, dict):
raise TypeError("Expected argument 'shipping_address' to be a dict")
pulumi.set(__self__, "shipping_address", shipping_address)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if METHOD_NAME and not isinstance(METHOD_NAME, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", METHOD_NAME)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="addressValidationStatus")
def address_validation_status(self) -> str:
"""
Status of address validation
"""
return pulumi.get(self, "address_validation_status")
@property
@pulumi.getter(name="contactDetails")
def contact_details(self) -> 'outputs.ContactDetailsResponse':
"""
Contact details for the address
"""
return pulumi.get(self, "contact_details")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> str:
"""
The geo-location where the resource lives
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="shippingAddress")
def shipping_address(self) -> Optional['outputs.ShippingAddressResponse']:
"""
Shipping details for the address
"""
return pulumi.get(self, "shipping_address")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
Represents resource creation and update time
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def METHOD_NAME(self) -> Optional[Mapping[str, str]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
class AwaitableGetAddressByNameResult(GetAddressByNameResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetAddressByNameResult(
address_validation_status=self.address_validation_status,
contact_details=self.contact_details,
id=self.id,
location=self.location,
name=self.name,
shipping_address=self.shipping_address,
system_data=self.system_data,
METHOD_NAME=self.METHOD_NAME,
type=self.type)
def get_address_by_name(address_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetAddressByNameResult:
"""
Gets information about the specified address.
Azure REST API version: 2021-12-01.
:param str address_name: The name of the address Resource within the specified resource group. address names must be between 3 and 24 characters in length and use any alphanumeric and underscore only
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
__args__ = dict()
__args__['addressName'] = address_name
__args__['resourceGroupName'] = resource_group_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:edgeorder:getAddressByName', __args__, opts=opts, typ=GetAddressByNameResult).value
return AwaitableGetAddressByNameResult(
address_validation_status=pulumi.get(__ret__, 'address_validation_status'),
contact_details=pulumi.get(__ret__, 'contact_details'),
id=pulumi.get(__ret__, 'id'),
location=pulumi.get(__ret__, 'location'),
name=pulumi.get(__ret__, 'name'),
shipping_address=pulumi.get(__ret__, 'shipping_address'),
system_data=pulumi.get(__ret__, 'system_data'),
METHOD_NAME=pulumi.get(__ret__, 'tags'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_address_by_name)
def get_address_by_name_output(address_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetAddressByNameResult]:
"""
Gets information about the specified address.
Azure REST API version: 2021-12-01.
:param str address_name: The name of the address Resource within the specified resource group. address names must be between 3 and 24 characters in length and use any alphanumeric and underscore only
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
... |
5,485 | stop ocr server process | import argparse
import multiprocessing
import pickle
from module.logger import logger
from module.webui.setting import State
process: multiprocessing.Process = None
class ModelProxy:
client = None
online = True
@classmethod
def init(cls, address="127.0.0.1:22268"):
import zerorpc
cls.client = zerorpc.Client(timeout=5)
cls.client.connect(f"tcp://{address}")
try:
logger.info(f"Connecting to OCR server {address}")
cls.client.hello()
logger.info("Successfully connected to OCR server")
except:
cls.online = False
logger.warning("Ocr server not running")
def __init__(self, lang) -> None:
self.lang = lang
def ocr(self, img_fp):
"""
Args:
img_fp (np.ndarray):
Returns:
"""
if self.online:
img_str = img_fp.dumps()
try:
return self.client("ocr", self.lang, img_str)
except:
self.online = False
from module.ocr.models import OCR_MODEL
return OCR_MODEL.__getattribute__(self.lang).ocr(img_fp)
def ocr_for_single_line(self, img_fp):
"""
Args:
img_fp (np.ndarray):
Returns:
"""
if self.online:
img_str = img_fp.dumps()
try:
return self.client("ocr_for_single_line", self.lang, img_str)
except:
self.online = False
from module.ocr.models import OCR_MODEL
return OCR_MODEL.__getattribute__(self.lang).ocr_for_single_line(img_fp)
def ocr_for_single_lines(self, img_list):
"""
Args:
img_list (list[np.ndarray]):
Returns:
"""
if self.online:
img_str_list = [img_fp.dumps() for img_fp in img_list]
try:
return self.client("ocr_for_single_lines", self.lang, img_str_list)
except:
self.online = False
from module.ocr.models import OCR_MODEL
return OCR_MODEL.__getattribute__(self.lang).ocr_for_single_lines(img_list)
def set_cand_alphabet(self, cand_alphabet: str):
if self.online:
try:
return self.client("set_cand_alphabet", self.lang, cand_alphabet)
except:
self.online = False
from module.ocr.models import OCR_MODEL
return OCR_MODEL.__getattribute__(self.lang).set_cand_alphabet(cand_alphabet)
def debug(self, img_list):
"""
Args:
img_list (list[np.ndarray]):
Returns:
"""
if self.online:
img_str_list = [img_fp.dumps() for img_fp in img_list]
try:
return self.client("debug", self.lang, img_str_list)
except:
self.online = False
from module.ocr.models import OCR_MODEL
return OCR_MODEL.__getattribute__(self.lang).debug(img_list)
class ModelProxyFactory:
def __getattribute__(self, __name: str) -> ModelProxy:
if __name in ["azur_lane", "cnocr", "jp", "tw"]:
if ModelProxy.client is None:
ModelProxy.init(address=State.deploy_config.OcrClientAddress)
return ModelProxy(lang=__name)
else:
return super().__getattribute__(__name)
def start_ocr_server(port=22268):
import zerorpc
import zmq
from module.ocr.al_ocr import AlOcr
from module.ocr.models import OcrModel
class OCRServer(OcrModel):
def hello(self):
return "hello"
def ocr(self, lang, img_fp):
img_fp = pickle.loads(img_fp)
cnocr: AlOcr = self.__getattribute__(lang)
return cnocr.ocr(img_fp)
def ocr_for_single_line(self, lang, img_fp):
img_fp = pickle.loads(img_fp)
cnocr: AlOcr = self.__getattribute__(lang)
return cnocr.ocr_for_single_line(img_fp)
def ocr_for_single_lines(self, lang, img_list):
img_list = [pickle.loads(img_fp) for img_fp in img_list]
cnocr: AlOcr = self.__getattribute__(lang)
return cnocr.ocr_for_single_lines(img_list)
def set_cand_alphabet(self, lang, cand_alphabet):
cnocr: AlOcr = self.__getattribute__(lang)
return cnocr.set_cand_alphabet(cand_alphabet)
def debug(self, lang, img_list):
img_list = [pickle.loads(img_fp) for img_fp in img_list]
cnocr: AlOcr = self.__getattribute__(lang)
return cnocr.debug(img_list)
server = zerorpc.Server(OCRServer())
try:
server.bind(f"tcp://*:{port}")
except zmq.error.ZMQError:
logger.error(f"Ocr server cannot bind on port {port}")
return
logger.info(f"Ocr server listen on port {port}")
server.run()
def start_ocr_server_process(port=22268):
global process
if not alive():
process = multiprocessing.Process(target=start_ocr_server, args=(port,))
process.start()
def METHOD_NAME():
global process
if alive():
process.kill()
process = None
def alive() -> bool:
global process
if process is not None:
return process.is_alive()
else:
return False
if __name__ == "__main__":
# Run server
parser = argparse.ArgumentParser(description="Alas OCR service")
parser.add_argument(
"--port",
type=int,
help="Port to listen. Default to OcrServerPort in deploy setting",
)
args, _ = parser.parse_known_args()
port = args.port or State.deploy_config.OcrServerPort
start_ocr_server(port=port) |
5,486 | paths | #
# See top-level LICENSE.rst file for Copyright information
#
# -*- coding: utf-8 -*-
"""
desispec.pipeline.tasks.preproc
===============================
"""
from __future__ import absolute_import, division, print_function
import os
import re
from collections import OrderedDict
from ..defs import (task_name_sep, task_state_to_int, task_int_to_state)
from ...util import option_list
from ...io import findfile
from .base import (BaseTask, task_classes)
from desiutil.log import get_logger
import numpy as np
# NOTE: only one class in this file should have a name that starts with "Task".
class TaskPreproc(BaseTask):
"""Class containing the properties of one preprocessed pixel file.
"""
def __init__(self):
# do that first
super(TaskPreproc, self).__init__()
# then put int the specifics of this class
# _cols must have a state
self._type = "preproc"
self._cols = [
"night",
"band",
"spec",
"expid",
"flavor",
"state"
]
self._coltypes = [
"integer",
"text",
"integer",
"integer",
"text",
"integer"
]
# _name_fields must also be in _cols
self._name_fields = ["night","band","spec","expid"]
self._name_formats = ["08d","s","d","08d"]
def METHOD_NAME(self, name):
"""See BaseTask.paths.
"""
props = self.name_split(name)
camera = "{}{}".format(props["band"], props["spec"])
return [ findfile("preproc", night=props["night"], expid=props["expid"],
camera=camera, groupname=None, nside=None, band=props["band"],
spectrograph=props["spec"]) ]
def _deps(self, name, db, inputs):
"""See BaseTask.deps.
"""
from .base import task_classes
props = self.name_split(name)
deptasks = {
"fibermap" : task_classes["fibermap"].name_join(props),
"rawdata" : task_classes["rawdata"].name_join(props)
}
return deptasks
def _run_max_procs(self):
# This is a serial task.
return 1
def _run_time(self, name, procs, db):
# Run time on one proc on machine with scale factor == 1.0
return 3.0
def _run_max_mem_proc(self, name, db):
# Per-process memory requirements
return 0
def _run_defaults(self):
"""See BaseTask.run_defaults.
"""
return dict()
def _option_list(self, name, opts):
"""Build the full list of options.
This includes appending the filenames and incorporating runtime
options.
"""
from .base import task_classes, task_type
dp = self.deps(name)
options = OrderedDict()
options.update(opts)
props = self.name_split(name)
options["infile"] = task_classes["rawdata"].paths(dp["rawdata"])[0]
options["cameras"] = "{}{}".format(props["band"],props["spec"])
outfile = self.paths(name)[0]
options["outfile"] = outfile
return option_list(options)
def _run_cli(self, name, opts, procs, db):
"""See BaseTask.run_cli.
"""
entry = "desi_preproc"
optlist = self._option_list(name, opts)
com = "{} {}".format(entry, " ".join(optlist))
return com
def _run(self, name, opts, comm, db):
"""See BaseTask.run.
"""
from ...scripts import preproc
optlist = self._option_list(name, opts)
args = preproc.parse(optlist)
preproc.main(args)
return
def postprocessing(self, db, name, cur):
"""For successful runs, postprocessing on DB"""
# run getready for all extraction with same night,band,spec
props = self.name_split(name)
log = get_logger()
tt = "psf"
cmd = "select name from {} where night={} and band='{}' and spec={} and expid={} and state=0".format(tt,props["night"],props["band"],props["spec"],props["expid"])
cur.execute(cmd)
tasks = [ x for (x,) in cur.fetchall() ]
log.debug("checking {}".format(tasks))
for task in tasks:
task_classes[tt].getready(db=db, name=task, cur=cur)
tt = "traceshift"
cmd = "select name from {} where night={} and band='{}' and spec={} and expid={} and state=0".format(tt,props["night"],props["band"],props["spec"],props["expid"])
cur.execute(cmd)
tasks = [ x for (x,) in cur.fetchall() ]
log.debug("checking {}".format(tasks))
for task in tasks:
task_classes[tt].getready(db=db, name=task, cur=cur) |
5,487 | assert eq | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that Google Test correctly parses environment variables."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
IS_WINDOWS = os.name == 'nt'
IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_env_var_test_')
environ = os.environ.copy()
def METHOD_NAME(expected, actual):
if expected != actual:
print('Expected: %s' % (expected,))
print(' Actual: %s' % (actual,))
raise AssertionError
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
environ[env_var] = value
elif env_var in environ:
del environ[env_var]
def GetFlag(flag):
"""Runs gtest_env_var_test_ and returns its output."""
args = [COMMAND]
if flag is not None:
args += [flag]
return gtest_test_utils.Subprocess(args, env=environ).output
def TestFlag(flag, test_val, default_val):
"""Verifies that the given flag is affected by the corresponding env var."""
env_var = 'GTEST_' + flag.upper()
SetEnvVar(env_var, test_val)
METHOD_NAME(test_val, GetFlag(flag))
SetEnvVar(env_var, None)
METHOD_NAME(default_val, GetFlag(flag))
class GTestEnvVarTest(gtest_test_utils.TestCase):
def testEnvVarAffectsFlag(self):
"""Tests that environment variable should affect the corresponding flag."""
TestFlag('break_on_failure', '1', '0')
TestFlag('color', 'yes', 'auto')
TestFlag('filter', 'FooTest.Bar', '*')
SetEnvVar('XML_OUTPUT_FILE', None) # For 'output' test
TestFlag('output', 'xml:tmp/foo.xml', '')
TestFlag('print_time', '0', '1')
TestFlag('repeat', '999', '1')
TestFlag('throw_on_failure', '1', '0')
TestFlag('death_test_style', 'threadsafe', 'fast')
TestFlag('catch_exceptions', '0', '1')
if IS_LINUX:
TestFlag('death_test_use_fork', '1', '0')
TestFlag('stack_trace_depth', '0', '100')
def testXmlOutputFile(self):
"""Tests that $XML_OUTPUT_FILE affects the output flag."""
SetEnvVar('GTEST_OUTPUT', None)
SetEnvVar('XML_OUTPUT_FILE', 'tmp/bar.xml')
METHOD_NAME('xml:tmp/bar.xml', GetFlag('output'))
def testXmlOutputFileOverride(self):
"""Tests that $XML_OUTPUT_FILE is overridden by $GTEST_OUTPUT"""
SetEnvVar('GTEST_OUTPUT', 'xml:tmp/foo.xml')
SetEnvVar('XML_OUTPUT_FILE', 'tmp/bar.xml')
METHOD_NAME('xml:tmp/foo.xml', GetFlag('output'))
if __name__ == '__main__':
gtest_test_utils.Main() |
5,488 | test repo update branch |
# Copyright 2016 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals, absolute_import
from django.test import override_settings
from ci.tests import utils, SeleniumTester
from ci import models
from django.urls import reverse
@override_settings(INSTALLED_GITSERVERS=[utils.github_config()])
class Tests(SeleniumTester.SeleniumTester):
@SeleniumTester.test_drivers()
def test_basic(self):
repo, branch = self.create_repo_with_prs()
url = reverse('ci:view_repo', args=[repo.pk])
self.get(url)
self.check_repos()
self.check_events()
@SeleniumTester.test_drivers()
def test_repo_update_all(self):
repo, branch = self.create_repo_with_prs()
url = reverse('ci:view_repo', args=[repo.pk])
self.get(url)
self.check_repos()
self.check_events()
self.wait_for_js()
branch.status = models.JobStatus.SUCCESS
branch.save()
for pr in repo.pull_requests.all():
pr.status = models.JobStatus.SUCCESS
pr.title = "New title"
pr.username = "foobar"
pr.save()
self.wait_for_js()
self.check_js_error()
self.check_repos()
self.check_events()
@SeleniumTester.test_drivers()
def METHOD_NAME(self):
repo, branch = self.create_repo_with_prs()
url = reverse('ci:view_repo', args=[repo.pk])
self.get(url)
self.check_repos()
self.check_events()
# need to sleep so that last_modified will trigger
self.wait_for_js()
branch.status = models.JobStatus.SUCCESS
branch.save()
self.wait_for_js()
self.check_js_error()
self.check_repos()
self.check_events()
@SeleniumTester.test_drivers()
def test_repo_update_pr(self):
repo, branch = self.create_repo_with_prs()
url = reverse('ci:view_repo', args=[repo.pk])
self.get(url)
self.check_repos()
self.check_events()
self.wait_for_js()
pr = repo.pull_requests.last()
pr.status = models.JobStatus.SUCCESS
pr.title = "New title"
pr.username = "foobar"
pr.save()
self.wait_for_js()
self.check_js_error()
self.check_repos()
self.check_events()
@SeleniumTester.test_drivers()
def test_new_branch(self):
repo, branch = self.create_repo_with_prs()
url = reverse('ci:view_repo', args=[repo.pk])
self.get(url)
self.check_repos()
self.check_events()
self.wait_for_js()
branch2 = utils.create_branch(name="branch2", repo=repo)
branch2.status = models.JobStatus.SUCCESS
branch2.save()
self.wait_for_js()
self.check_js_error()
self.check_repos()
self.check_events()
@SeleniumTester.test_drivers()
def test_new_pr(self):
repo, branch = self.create_repo_with_prs()
url = reverse('ci:view_repo', args=[repo.pk])
self.get(url)
self.check_repos()
self.check_events()
pr = utils.create_pr(repo=repo, number=100)
pr.status = models.JobStatus.RUNNING
pr.save()
self.wait_for_js()
self.check_js_error()
self.check_repos()
self.check_events()
@SeleniumTester.test_drivers()
def test_close_pr(self):
repo, branch = self.create_repo_with_prs()
url = reverse('ci:view_repo', args=[repo.pk])
self.get(url)
self.check_repos()
self.check_events()
pr = repo.pull_requests.first()
pr.closed = True
pr.save()
self.wait_for_js()
self.check_js_error()
self.check_repos()
self.check_events()
@SeleniumTester.test_drivers()
def test_event_update(self):
ev = self.create_event_with_jobs()
url = reverse('ci:view_repo', args=[ev.base.branch.repository.pk])
self.get(url)
self.check_repos()
self.check_events()
ev.status = models.JobStatus.SUCCESS
ev.save()
for job in ev.jobs.all():
job.status = models.JobStatus.SUCCESS
job.failed_step = "Failed"
job.invalidated = True
job.save()
self.wait_for_js()
self.check_js_error()
self.check_repos()
self.check_events()
@SeleniumTester.test_drivers()
def test_new_event(self):
ev = self.create_event_with_jobs()
url = reverse('ci:view_repo', args=[ev.base.branch.repository.pk])
self.get(url)
self.check_repos()
self.check_events()
# need to sleep to make sure creation time is different
self.wait_for_js()
self.create_event_with_jobs(commit='4321')
self.wait_for_js()
self.check_js_error()
self.check_repos()
self.check_events()
@SeleniumTester.test_drivers()
def test_event_new_job(self):
ev = self.create_event_with_jobs()
url = reverse('ci:view_repo', args=[ev.base.branch.repository.pk])
self.get(url)
self.check_repos()
self.check_events()
ev = models.Event.objects.first()
r2 = utils.create_recipe(name="r2")
ev.save() # to trigger the update
utils.create_job(event=ev, recipe=r2)
self.wait_for_js()
self.check_js_error()
self.check_repos()
self.check_events() |
5,489 | parse authors | # -*- coding: utf-8 -*-
"""
# TODO: Read the TODOs carefully and remove all existing comments in this file.
This is a sample using the ChapterWithVolumeBrowserTemplate as the template.
It provides a wrapper around the GeneralBrowserTemplate that generates both
the chapter list and optionally the volume list.
Put your source file inside the language folder. The `en` folder has too many
files, therefore it is grouped using the first letter of the domain name.
"""
import logging
from typing import Generator
from bs4 import BeautifulSoup, Tag
from lncrawl.models import Chapter, Volume
from lncrawl.templates.browser.optional_volume import OptionalVolumeBrowserTemplate
logger = logging.getLogger(__name__)
# TODO: You can safely delete all [OPTIONAL] methods if you do not need them.
class MyCrawlerName(OptionalVolumeBrowserTemplate):
# TODO: [REQUIRED] Provide the URLs supported by this crawler.
base_url = ["http://sample.url/"]
# TODO: [OPTIONAL] Set True if this crawler is for manga/manhua/manhwa.
has_manga = False
# TODO: [OPTIONAL] Set True if this source contains machine translations.
has_mtl = False
# TODO: [OPTIONAL] This is called before all other methods.
def initialize(self) -> None:
# You can customize `TextCleaner` and other necessary things.
pass
# TODO: [OPTIONAL] Open the Novel URL in the browser
def visit_novel_page_in_browser(self) -> BeautifulSoup:
# self.visit(self.novel_url)
pass
# TODO: [OPTIONAL] Parse and return the novel title in the browser
def parse_title_in_browser(self) -> str:
# return self.parse_title(self.browser.soup)
pass
# TODO: [REQUIRED] Parse and return the novel title
def parse_title(self, soup: BeautifulSoup) -> str:
# The soup here is the result of `self.get_soup(self.novel_url)`
pass
# TODO: [OPTIONAL] Parse and return the novel cover image in the browser
def parse_cover_in_browser(self) -> str:
# return self.parse_cover(self.browser.soup)
pass
# TODO: [REQUIRED] Parse and return the novel cover
def parse_cover(self, soup: BeautifulSoup) -> str:
# The soup here is the result of `self.get_soup(self.novel_url)`
pass
# TODO: [OPTIONAL] Parse and return the novel author in the browser
def parse_authors_in_browser(self) -> Generator[Tag, None, None]:
# yield from self.parse_authors(self.browser.soup)
pass
# TODO: [REQUIRED] Parse and return the novel authors
def METHOD_NAME(self, soup: BeautifulSoup) -> Generator[str, None, None]:
# The soup here is the result of `self.get_soup(self.novel_url)`
#
# Example 1: <a single author example>
# tag = soup.find("strong", string="Author:")
# assert tag
# yield tag.next_sibling.text.strip()
#
# Example 2: <multiple authors example>
# for a in soup.select(".m-imgtxt a[href*='/authors/']"):
# yield a.text.strip()
pass
# TODO: [OPTIONAL] Open the Chapter URL in the browser
def visit_chapter_page_in_browser(self, chapter: Chapter) -> None:
# self.visit(chapter.url)
pass
# TODO: [OPTIONAL] Select volume list item tags from the page soup
def select_volume_tags(self, soup: BeautifulSoup) -> Generator[Tag, None, None]:
# The soup here is the result of `self.get_soup(self.novel_url)`
#
# Example: yield from soup.select("#toc .vol-item")
pass
# TODO: [OPTIONAL] Select volume list item tags from the browser
def select_volume_tags_in_browser(self) -> Generator[Tag, None, None]:
# return self.select_volume_tags(self.browser.soup)
pass
# TODO: [OPTIONAL] Parse a single volume from volume list item tag
def parse_volume_item(self, tag: Tag, id: int) -> Volume:
# The tag here comes from `self.select_volume_tags`
# The id here is the next available volume id
#
# Example:
# return Volume(
# id=id,
# title= tag.text.strip(),
# )
pass
# TODO: [OPTIONAL] Parse a single volume from volume list item tag when using browser
def parse_volume_item_in_browser(self, tag: Tag, id: int) -> Volume:
# return self.parse_volume_item(tag, id)
pass
# TODO: [REQUIRED] Select chapter list item tags from volume tag and page soup
def select_chapter_tags(self, tag: Tag) -> Generator[Tag, None, None]:
# The tag here comes from `self.select_volume_tags`
# The vol here comes from `self.parse_volume_item`
#
# Example: yield from tag.select(".chapter-item")
pass
# TODO: [OPTIONAL] Select chapter list item tags from volume tag and page soup when in browser
def select_chapter_tags_in_browser(self, tag: Tag) -> Generator[Tag, None, None]:
# raise self.select_chapter_tags(tag, vol)
pass
# TODO: [REQUIRED] Parse a single chapter from chapter list item tag
def parse_chapter_item(self, tag: Tag, id: int, vol: Volume) -> Chapter:
# The tag here comes from `self.select_chapter_tags`
# The vol here comes from `self.parse_volume_item`
# The id here is the next available chapter id
#
# Example:
# return Chapter(
# id=id,
# volume=vol.id,
# title=tag.text.strip(),
# url=self.absolute_url(tag["href"]),
# )
pass
# TODO: [OPTIONAL] Parse a single chapter from chapter list item tag when in browser
def parse_chapter_item_in_browser(self, tag: Tag, id: int, vol: Volume) -> Chapter:
# raise self.parse_chapter_item(tag, id, vol)
pass
# TODO: [OPTIONAL] Select the tag containing the chapter text in the browser
def select_chapter_body_in_browser(self) -> Tag:
# return self.select_chapter_body(self.browser.soup)
pass
# TODO: [REQUIRED] Select the tag containing the chapter text
def select_chapter_body(self, soup: BeautifulSoup) -> Tag:
# The soup here is the result of `self.get_soup(chapter.url)`
#
# Example: return soup.select_one(".m-read .txt")
pass
# TODO: [OPTIONAL] Return the index in self.chapters which contains a chapter URL
def index_of_chapter(self, url: str) -> int:
# To get more help, check the default implemention in the `Crawler` class.
pass |
5,490 | task | # Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Helpers for defining Milan dual-encoder models."""
import functools
from lingvo.core import base_model_params
from lingvo.core import layers as lingvo_layers
from lingvo.core import optimizer
from lingvo.core import schedule
from lingvo.tasks.milan import constants
from lingvo.tasks.milan import dataset_spec
from lingvo.tasks.milan import dual_encoder
from lingvo.tasks.milan import input_generator
class RecipeError(Exception):
pass
class DualEncoderRecipe(base_model_params.SingleTaskModelParams):
"""Base class that simplifies configuration of Milan dual encoder models.
`DualEncoderRecipe` is a `SingleTaskModelParams` with extra builder-like
methods for configuring the dual encoder (the `Task()` params) and input
generators (advertised through `GetAllDatasetParams()`).
In typical usage, model definitions subclass `DualEncoderRecipe`, call helper
methods in the constructor to configure the dual encoder, and specify a
`default_dataset` for the model to run on. For example::
@model_registry.RegisterSingleTaskModel
class MyExperiment(DualEncoderRecipe):
def __init__(self):
super().__init__()
self.AddModality(
'TEXT',
input_feature='text_feature',
id_feature='text_id',
encoder=MyTextEncoder.Params(),
encoder_output_dim=42)
# Preprocess the raw 'image_feature' input prior to encoding.
self.AddPreprocessor('image_feature', ImagePreprocessor.Params())
self.AddModality(
'IMAGE',
input_feature='image_feature',
id_feature='image_id',
encoder=MyImageEncoder.Params(),
encoder_output_dim=67)
@property
def default_dataset(self) -> DatasetSpec:
# Point to your dataset of choice
...
"""
def __init__(self):
# Define these members here to make pytype happy.
self.dataset = None
self.input_params = None
self.task_params = None
self.dataset = self._ChooseDatasetSpec()
# Base input params, be shared by both train and eval sets.
self.input_params = input_generator.MilanInputGenerator.Params().Set(
batch_size=64,
# Run input pipeline on each TPU host (vs. one for all hosts) to
# avoid input-boundedness.
use_per_host_infeed=True)
# Default optimization and checkpointer settings.
self.task_params = dual_encoder.MilanTask.Params()
self.task_params.train.Set(
clip_gradient_norm_to_value=1.0,
grad_norm_tracker=lingvo_layers.GradNormTracker.Params().Set(
name='grad_norm_tracker',
# Don't clip if the grad norm is already smaller than this.
grad_norm_clip_cap_min=0.1),
save_max_to_keep=2000,
save_keep_checkpoint_every_n_hours=0.1667, # At most every 10 min.
optimizer=optimizer.Adam.Params().Set(
beta1=0.9, beta2=0.999, epsilon=1e-8),
learning_rate=0.0001,
lr_schedule=schedule.StepwiseExponentialSchedule.Params().Set(
decay=0.999, num_steps_per_decay=1000),
tpu_steps_per_loop=100,
max_steps=40000)
def _ChooseDatasetSpec(self):
"""Returns the `DatasetSpec` to be used by the recipe."""
return self.default_dataset
@property
def default_dataset(self) -> dataset_spec.DatasetSpec:
"""Returns a default dataset for the recipe to use.
Subclasses should override this method to specify a dataset, or add logic
(elsewhere) to choose the dataset at runtime, falling back to this one
as the default.
"""
raise NotImplementedError()
@property
def encoder_configs(self):
return self.task_params.dual_encoder.encoder_configs
def AddModality(self, name: str, **kwargs):
config = dual_encoder.EncoderConfig().Set(**kwargs)
self.encoder_configs[name] = config
return config
def AddPreprocessor(self, input_feature, preprocessor):
self.input_params.preprocessors[input_feature] = preprocessor.Copy()
def StartFromCheckpoint(self, checkpoint_path: str):
"""Configures the recipe to start training from the given model checkpoint.
This is intended to be used in fine-tuning recipes. All variables, including
Adam accumulators, are loaded from the checkpoint except for global step
(so that it resets to 0 in new experiment) and grad norm tracker stats
(since gradients are likely to have different moments in the new
experiment).
Args:
checkpoint_path: Path of the checkpoint to start training from.
"""
self.task_params.train.init_from_checkpoint_rules = {
checkpoint_path: (
[('(.*)', '%s')],
# Don't load vars matching these regexes.
['.*grad_norm_tracker/.*', 'global_step'])
}
# Methods below implement the lingvo SingleTaskModelParams interface, allowing
# the recipe to be registered with `RegisterSingleTaskModel()`.
def GetAllDatasetParams(self):
return {
'Train':
self.input_params.Copy().Set(
name='Train',
dataset_fn=functools.partial(
self.dataset.Read,
split=constants.Split.TRAIN,
shuffle_buffer_size=1024)),
'Dev':
self.input_params.Copy().Set(
name='Dev',
dataset_fn=functools.partial(
self.dataset.Read,
split=constants.Split.DEV,
shuffle_buffer_size=0)),
'Test':
self.input_params.Copy().Set(
name='Test',
dataset_fn=functools.partial(
self.dataset.Read,
split=constants.Split.TEST,
shuffle_buffer_size=0)),
}
def METHOD_NAME(self):
task_params = self.task_params.Copy()
if not task_params.dual_encoder.encoder_configs:
raise RecipeError('Must configure at least one encoder.')
assert task_params.dual_encoder.label_fn is None
task_params.dual_encoder.label_fn = self.dataset.Label
return task_params |
5,491 | adapt | """ Codec for the Punicode encoding, as specified in RFC 3492
Written by Martin v. Löwis.
"""
import codecs
##################### Encoding #####################################
def segregate(str):
"""3.1 Basic code point segregation"""
base = bytearray()
extended = set()
for c in str:
if ord(c) < 128:
base.append(ord(c))
else:
extended.add(c)
extended = sorted(extended)
return bytes(base), extended
def selective_len(str, max):
"""Return the length of str, considering only characters below max."""
res = 0
for c in str:
if ord(c) < max:
res += 1
return res
def selective_find(str, char, index, pos):
"""Return a pair (index, pos), indicating the next occurrence of
char in str. index is the position of the character considering
only ordinals up to and including char, and pos is the position in
the full string. index/pos is the starting position in the full
string."""
l = len(str)
while 1:
pos += 1
if pos == l:
return (-1, -1)
c = str[pos]
if c == char:
return index+1, pos
elif c < char:
index += 1
def insertion_unsort(str, extended):
"""3.2 Insertion unsort coding"""
oldchar = 0x80
result = []
oldindex = -1
for c in extended:
index = pos = -1
char = ord(c)
curlen = selective_len(str, char)
delta = (curlen+1) * (char - oldchar)
while 1:
index,pos = selective_find(str,c,index,pos)
if index == -1:
break
delta += index - oldindex
result.append(delta-1)
oldindex = index
delta = 0
oldchar = char
return result
def T(j, bias):
# Punycode parameters: tmin = 1, tmax = 26, base = 36
res = 36 * (j + 1) - bias
if res < 1: return 1
if res > 26: return 26
return res
digits = b"abcdefghijklmnopqrstuvwxyz0123456789"
def generate_generalized_integer(N, bias):
"""3.3 Generalized variable-length integers"""
result = bytearray()
j = 0
while 1:
t = T(j, bias)
if N < t:
result.append(digits[N])
return bytes(result)
result.append(digits[t + ((N - t) % (36 - t))])
N = (N - t) // (36 - t)
j += 1
def METHOD_NAME(delta, first, numchars):
if first:
delta //= 700
else:
delta //= 2
delta += delta // numchars
# ((base - tmin) * tmax) // 2 == 455
divisions = 0
while delta > 455:
delta = delta // 35 # base - tmin
divisions += 36
bias = divisions + (36 * delta // (delta + 38))
return bias
def generate_integers(baselen, deltas):
"""3.4 Bias adaptation"""
# Punycode parameters: initial bias = 72, damp = 700, skew = 38
result = bytearray()
bias = 72
for points, delta in enumerate(deltas):
s = generate_generalized_integer(delta, bias)
result.extend(s)
bias = METHOD_NAME(delta, points==0, baselen+points+1)
return bytes(result)
def punycode_encode(text):
base, extended = segregate(text)
deltas = insertion_unsort(text, extended)
extended = generate_integers(len(base), deltas)
if base:
return base + b"-" + extended
return extended
##################### Decoding #####################################
def decode_generalized_number(extended, extpos, bias, errors):
"""3.3 Generalized variable-length integers"""
result = 0
w = 1
j = 0
while 1:
try:
char = ord(extended[extpos])
except IndexError:
if errors == "strict":
raise UnicodeError("incomplete punicode string")
return extpos + 1, None
extpos += 1
if 0x41 <= char <= 0x5A: # A-Z
digit = char - 0x41
elif 0x30 <= char <= 0x39:
digit = char - 22 # 0x30-26
elif errors == "strict":
raise UnicodeError("Invalid extended code point '%s'"
% extended[extpos])
else:
return extpos, None
t = T(j, bias)
result += digit * w
if digit < t:
return extpos, result
w = w * (36 - t)
j += 1
def insertion_sort(base, extended, errors):
"""3.2 Insertion unsort coding"""
char = 0x80
pos = -1
bias = 72
extpos = 0
while extpos < len(extended):
newpos, delta = decode_generalized_number(extended, extpos,
bias, errors)
if delta is None:
# There was an error in decoding. We can't continue because
# synchronization is lost.
return base
pos += delta+1
char += pos // (len(base) + 1)
if char > 0x10FFFF:
if errors == "strict":
raise UnicodeError("Invalid character U+%x" % char)
char = ord('?')
pos = pos % (len(base) + 1)
base = base[:pos] + chr(char) + base[pos:]
bias = METHOD_NAME(delta, (extpos == 0), len(base))
extpos = newpos
return base
def punycode_decode(text, errors):
if isinstance(text, str):
text = text.encode("ascii")
if isinstance(text, memoryview):
text = bytes(text)
pos = text.rfind(b"-")
if pos == -1:
base = ""
extended = str(text, "ascii").upper()
else:
base = str(text[:pos], "ascii", errors)
extended = str(text[pos+1:], "ascii").upper()
return insertion_sort(base, extended, errors)
### Codec APIs
class Codec(codecs.Codec):
def encode(self, input, errors='strict'):
res = punycode_encode(input)
return res, len(input)
def decode(self, input, errors='strict'):
if errors not in ('strict', 'replace', 'ignore'):
raise UnicodeError("Unsupported error handling "+errors)
res = punycode_decode(input, errors)
return res, len(input)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return punycode_encode(input)
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
if self.errors not in ('strict', 'replace', 'ignore'):
raise UnicodeError("Unsupported error handling "+self.errors)
return punycode_decode(input, self.errors)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='punycode',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
) |
5,492 | run self scheduler simulator | #################################################################################
# The Institute for the Design of Advanced Energy Systems Integrated Platform
# Framework (IDAES IP) was produced under the DOE Institute for the
# Design of Advanced Energy Systems (IDAES).
#
# Copyright (c) 2018-2023 by the software owners: The Regents of the
# University of California, through Lawrence Berkeley National Laboratory,
# National Technology & Engineering Solutions of Sandia, LLC, Carnegie Mellon
# University, West Virginia University Research Corporation, et al.
# All rights reserved. Please see the files COPYRIGHT.md and LICENSE.md
# for full copyright and license information.
#################################################################################
from importlib import resources
from numbers import Number
from pathlib import Path
from typing import Dict, Union
import os
import pytest
# define custom type for type hinting
PrescientOptions = Dict[str, Union[str, bool, Number, dict]]
from idaes.apps.grid_integration import DoubleLoopCoordinator
from idaes.apps.grid_integration.tests.util import (
make_testing_tracker,
make_testing_bidder,
)
## create trackers
thermal_tracker = make_testing_tracker()
thermal_projection_tracker = make_testing_tracker()
thermal_bidder = make_testing_bidder()
# create coordinator
coordinator = DoubleLoopCoordinator(
bidder=thermal_bidder,
tracker=thermal_tracker,
projection_tracker=thermal_projection_tracker,
)
class TestDoubleLoopIntegration:
"Integration test for the double loop using 5bus use case."
@pytest.fixture
def data_path(self) -> Path:
# NOTE here we want the path to the entire 5bus directory
# we need to specify __init__.py as a workaround for Python 3.9,
# where importlib.resources.path() requires the resource to be a file
# directories are not supported and will raise an error if attempted
with resources.path("idaes.tests.prescient.5bus", "__init__.py") as pkg_file:
return Path(pkg_file).parent
@pytest.mark.unit
def test_data_path_available(self, data_path: Path):
assert data_path.is_dir()
@pytest.fixture
def output_dir(self, tmp_path: Path) -> Path:
path = tmp_path / "bidder_integration_test_output"
path.mkdir()
return path
@pytest.fixture
def self_scheduler_output_dir(self, tmp_path: Path) -> Path:
path = tmp_path / "self_scheduler_integration_test_output"
path.mkdir()
return path
@pytest.fixture
def self_scheduler_plugin_path(self) -> Path:
with resources.path(
"idaes.apps.grid_integration.tests",
"self_scheduler_integration_test_plugin.py",
) as p:
return Path(p)
@pytest.mark.unit
def test_self_scheduler_plugin_path_is_existing_file(
self, self_scheduler_plugin_path
):
assert self_scheduler_plugin_path.is_file()
@pytest.fixture
def prescient_options(self, data_path: Path) -> PrescientOptions:
return {
"data_path": str(data_path),
"input_format": "rts-gmlc",
"simulate_out_of_sample": True,
"run_sced_with_persistent_forecast_errors": True,
"start_date": "07-10-2020",
"num_days": 2,
"sced_horizon": 4,
"ruc_mipgap": 0.01,
"reserve_factor": 0.0,
"deterministic_ruc_solver": "cbc",
"day_ahead_pricing": "LMP",
"symbolic_solver_labels": True,
"deterministic_ruc_solver_options": {
"feas": "off",
"DivingF": "on",
},
"sced_solver": "cbc",
"sced_frequency_minutes": 60,
"ruc_horizon": 48,
"compute_market_settlements": True,
"monitor_all_contingencies": False,
"output_solver_logs": False,
"price_threshold": 1000,
"contingency_price_threshold": 100,
"reserve_price_threshold": 5,
}
@pytest.fixture
def bidder_sim_options(
self,
prescient_options,
output_dir: Path,
) -> PrescientOptions:
prescient_options["plugin"] = {
"doubleloop": {
"module": coordinator.prescient_plugin_module,
"bidding_generator": "10_STEAM",
}
}
prescient_options["output_directory"] = str(output_dir)
return prescient_options
@pytest.fixture
def self_scheduler_sim_options(
self,
prescient_options,
self_scheduler_output_dir: Path,
self_scheduler_plugin_path: Path,
) -> PrescientOptions:
prescient_options["plugin"] = {
"doubleloop": {
"module": str(self_scheduler_plugin_path),
"bidding_generator": "10_STEAM",
}
}
prescient_options["output_directory"] = str(self_scheduler_output_dir)
return prescient_options
@pytest.fixture
def run_bidder_simulator(self, bidder_sim_options: PrescientOptions) -> None:
prescient_simulator = pytest.importorskip(
"prescient.simulator",
reason="Prescient (optional dependency) not available",
)
prescient_simulator.Prescient().simulate(**bidder_sim_options)
@pytest.fixture
def METHOD_NAME(
self, self_scheduler_sim_options: PrescientOptions
) -> None:
prescient_simulator = pytest.importorskip(
"prescient.simulator",
reason="Prescient (optional dependency) not available",
)
prescient_simulator.Prescient().simulate(**self_scheduler_sim_options)
@pytest.fixture
def simulation_results_dir(self, run_bidder_simulator, output_dir):
return output_dir
@pytest.fixture
def self_scheduler_simulation_results_dir(
self, METHOD_NAME, self_scheduler_output_dir
):
return self_scheduler_output_dir
@pytest.mark.unit
def test_prescient_outputs_exist(
self, simulation_results_dir, self_scheduler_simulation_results_dir
):
assert os.path.isdir(simulation_results_dir)
assert os.path.isdir(self_scheduler_simulation_results_dir)
file_names = [
"hourly_gen_summary.csv",
"tracker_detail.csv",
"hourly_summary.csv",
"bus_detail.csv",
"overall_simulation_output.csv",
"virtual_detail.csv",
"bidding_model_detail.csv",
"bidder_detail.csv",
"daily_summary.csv",
"line_detail.csv",
"thermal_detail.csv",
"runtimes.csv",
"tracking_model_detail.csv",
"renewables_detail.csv",
"contingency_detail.csv",
]
for f in file_names:
file_path = os.path.join(simulation_results_dir, f)
assert os.path.isfile(file_path)
file_path = os.path.join(self_scheduler_simulation_results_dir, f)
assert os.path.isfile(file_path) |
5,493 | test count filter group by | from tests.testmodels import Author, Book
from tortoise.contrib import test
from tortoise.functions import Avg, Count, Sum, Upper
class TestGroupBy(test.TestCase):
async def asyncSetUp(self) -> None:
await super(TestGroupBy, self).asyncSetUp()
self.a1 = await Author.create(name="author1")
self.a2 = await Author.create(name="author2")
for i in range(10):
await Book.create(name=f"book{i}", author=self.a1, rating=i)
for i in range(5):
await Book.create(name=f"book{i}", author=self.a2, rating=i)
async def test_count_group_by(self):
ret = (
await Book.annotate(count=Count("id"))
.group_by("author_id")
.values("author_id", "count")
)
for item in ret:
author_id = item.get("author_id")
count = item.get("count")
if author_id == self.a1.pk:
self.assertEqual(count, 10)
elif author_id == self.a2.pk:
self.assertEqual(count, 5)
async def test_count_group_by_with_join(self):
ret = (
await Book.annotate(count=Count("id"))
.group_by("author__name")
.values("author__name", "count")
)
self.assertListSortEqual(
ret,
[{"author__name": "author1", "count": 10}, {"author__name": "author2", "count": 5}],
sorted_key="author__name",
)
async def METHOD_NAME(self):
ret = (
await Book.annotate(count=Count("id"))
.filter(count__gt=6)
.group_by("author_id")
.values("author_id", "count")
)
self.assertEqual(len(ret), 1)
self.assertEqual(ret[0].get("count"), 10)
async def test_sum_group_by(self):
ret = (
await Book.annotate(sum=Sum("rating")).group_by("author_id").values("author_id", "sum")
)
for item in ret:
author_id = item.get("author_id")
sum_ = item.get("sum")
if author_id == self.a1.pk:
self.assertEqual(sum_, 45.0)
elif author_id == self.a2.pk:
self.assertEqual(sum_, 10.0)
async def test_sum_group_by_with_join(self):
ret = (
await Book.annotate(sum=Sum("rating"))
.group_by("author__name")
.values("author__name", "sum")
)
self.assertListSortEqual(
ret,
[{"author__name": "author1", "sum": 45.0}, {"author__name": "author2", "sum": 10.0}],
sorted_key="author__name",
)
async def test_sum_filter_group_by(self):
ret = (
await Book.annotate(sum=Sum("rating"))
.filter(sum__gt=11)
.group_by("author_id")
.values("author_id", "sum")
)
self.assertEqual(len(ret), 1)
self.assertEqual(ret[0].get("sum"), 45.0)
async def test_avg_group_by(self):
ret = (
await Book.annotate(avg=Avg("rating")).group_by("author_id").values("author_id", "avg")
)
for item in ret:
author_id = item.get("author_id")
avg = item.get("avg")
if author_id == self.a1.pk:
self.assertEqual(avg, 4.5)
elif author_id == self.a2.pk:
self.assertEqual(avg, 2.0)
async def test_avg_group_by_with_join(self):
ret = (
await Book.annotate(avg=Avg("rating"))
.group_by("author__name")
.values("author__name", "avg")
)
self.assertListSortEqual(
ret,
[{"author__name": "author1", "avg": 4.5}, {"author__name": "author2", "avg": 2}],
sorted_key="author__name",
)
async def test_avg_filter_group_by(self):
ret = (
await Book.annotate(avg=Avg("rating"))
.filter(avg__gt=3)
.group_by("author_id")
.values_list("author_id", "avg")
)
self.assertEqual(len(ret), 1)
self.assertEqual(ret[0][1], 4.5)
async def test_count_values_list_group_by(self):
ret = (
await Book.annotate(count=Count("id"))
.group_by("author_id")
.values_list("author_id", "count")
)
for item in ret:
author_id = item[0]
count = item[1]
if author_id == self.a1.pk:
self.assertEqual(count, 10)
elif author_id == self.a2.pk:
self.assertEqual(count, 5)
async def test_count_values_list_group_by_with_join(self):
ret = (
await Book.annotate(count=Count("id"))
.group_by("author__name")
.values_list("author__name", "count")
)
self.assertListSortEqual(ret, [("author1", 10), ("author2", 5)])
async def test_count_values_list_filter_group_by(self):
ret = (
await Book.annotate(count=Count("id"))
.filter(count__gt=6)
.group_by("author_id")
.values_list("author_id", "count")
)
self.assertEqual(len(ret), 1)
self.assertEqual(ret[0][1], 10)
async def test_sum_values_list_group_by(self):
ret = (
await Book.annotate(sum=Sum("rating"))
.group_by("author_id")
.values_list("author_id", "sum")
)
for item in ret:
author_id = item[0]
sum_ = item[1]
if author_id == self.a1.pk:
self.assertEqual(sum_, 45.0)
elif author_id == self.a2.pk:
self.assertEqual(sum_, 10.0)
async def test_sum_values_list_group_by_with_join(self):
ret = (
await Book.annotate(sum=Sum("rating"))
.group_by("author__name")
.values_list("author__name", "sum")
)
self.assertListSortEqual(ret, [("author1", 45.0), ("author2", 10.0)])
async def test_sum_values_list_filter_group_by(self):
ret = (
await Book.annotate(sum=Sum("rating"))
.filter(sum__gt=11)
.group_by("author_id")
.values_list("author_id", "sum")
)
self.assertEqual(len(ret), 1)
self.assertEqual(ret[0][1], 45.0)
async def test_avg_values_list_group_by(self):
ret = (
await Book.annotate(avg=Avg("rating"))
.group_by("author_id")
.values_list("author_id", "avg")
)
for item in ret:
author_id = item[0]
avg = item[1]
if author_id == self.a1.pk:
self.assertEqual(avg, 4.5)
elif author_id == self.a2.pk:
self.assertEqual(avg, 2.0)
async def test_avg_values_list_group_by_with_join(self):
ret = (
await Book.annotate(avg=Avg("rating"))
.group_by("author__name")
.values_list("author__name", "avg")
)
self.assertListSortEqual(ret, [("author1", 4.5), ("author2", 2.0)])
async def test_avg_values_list_filter_group_by(self):
ret = (
await Book.annotate(avg=Avg("rating"))
.filter(avg__gt=3)
.group_by("author_id")
.values_list("author_id", "avg")
)
self.assertEqual(len(ret), 1)
self.assertEqual(ret[0][1], 4.5)
async def test_implicit_group_by(self):
ret = await Author.annotate(count=Count("books")).filter(count__gt=6)
self.assertEqual(ret[0].count, 10)
async def test_group_by_annotate_result(self):
ret = (
await Book.annotate(upper_name=Upper("author__name"), count=Count("id"))
.group_by("upper_name")
.values("upper_name", "count")
)
self.assertListSortEqual(
ret,
[{"upper_name": "AUTHOR1", "count": 10}, {"upper_name": "AUTHOR2", "count": 5}],
sorted_key="upper_name",
) |
5,494 | initialize | """
Base module for all text based default handler.
Contains various text based utility methods
"""
import logging
import os
import re
import string
import unicodedata
from abc import ABC
import torch
import torch.nn.functional as F
from captum.attr import LayerIntegratedGradients
from torchtext.data.utils import get_tokenizer
from ..utils.util import CLEANUP_REGEX
from .base_handler import BaseHandler
from .contractions import CONTRACTION_MAP
logger = logging.getLogger(__name__)
CONTRACTIONS_PATTERN = re.compile(
"({})".format("|".join(CONTRACTION_MAP.keys())),
flags=re.IGNORECASE | re.DOTALL,
)
class TextHandler(BaseHandler, ABC):
"""
Base class for all text based default handler.
Contains various text based utility methods
"""
def __init__(self):
super().__init__()
self.source_vocab = None
self.tokenizer = get_tokenizer("basic_english")
self.input_text = None
self.lig = None
self.initialized = None
def METHOD_NAME(self, context):
"""
Loads the model and Initializes the necessary artifacts
"""
super().METHOD_NAME(context)
self.initialized = False
source_vocab = (
self.manifest["model"]["sourceVocab"]
if "sourceVocab" in self.manifest["model"]
else None
)
if source_vocab:
# Backward compatibility
self.source_vocab = torch.load(source_vocab)
else:
self.source_vocab = torch.load(self.get_source_vocab_path(context))
# Captum initialization
self.lig = LayerIntegratedGradients(self.model, self.model.embedding)
self.initialized = True
def get_source_vocab_path(self, ctx):
properties = ctx.system_properties
model_dir = properties.get("model_dir")
source_vocab_path = os.path.join(model_dir, "source_vocab.pt")
if os.path.isfile(source_vocab_path):
return source_vocab_path
else:
raise Exception(
"Missing the source_vocab file. Refer default handler "
"documentation for details on using text_handler."
)
def _expand_contractions(self, text):
"""
Expands the contracted words in the text
"""
def expand_match(contraction):
match = contraction.group(0)
first_char = match[0]
expanded_contraction = (
CONTRACTION_MAP.get(match)
if CONTRACTION_MAP.get(match)
else CONTRACTION_MAP.get(match.lower())
)
expanded_contraction = first_char + expanded_contraction[1:]
return expanded_contraction
text = CONTRACTIONS_PATTERN.sub(expand_match, text)
text = re.sub("'", "", text)
return text
def _remove_accented_characters(self, text):
"""
Removes remove_accented_characters
"""
text = (
unicodedata.normalize("NFKD", text)
.encode("ascii", "ignore")
.decode("utf-8", "ignore")
)
return text
def _remove_html_tags(self, text):
"""
Removes html tags
"""
clean_text = CLEANUP_REGEX.sub("", text)
return clean_text
def _remove_puncutation(self, *args, **kwargs):
"""
Mispelled in original version. This is a compat layer
"""
return self._remove_punctuation(*args, **kwargs)
def _remove_punctuation(self, text):
"""
Removes punctuation
"""
return text.translate(str.maketrans("", "", string.punctuation))
def _tokenize(self, text):
return self.tokenizer(text)
def get_word_token(self, input_tokens):
"""
Constructs word tokens from text
"""
# Remove unicode space character from BPE Tokeniser
tokens = [token.replace("Ġ", "") for token in input_tokens]
return tokens
def summarize_attributions(self, attributions):
"""
Summarises the attribution across multiple runs
"""
attributions = F.softmax(attributions)
attributions_sum = attributions.sum(dim=-1)
logger.info("attributions sum shape %d", attributions_sum.shape)
attributions = attributions / torch.norm(attributions_sum)
return attributions |
5,495 | get index | '''
Camera
======
Core class for acquiring the camera and converting its input into a
:class:`~kivy.graphics.texture.Texture`.
.. versionchanged:: 1.10.0
The pygst and videocapture providers have been removed.
.. versionchanged:: 1.8.0
There is now 2 distinct Gstreamer implementation: one using Gi/Gst
working for both Python 2+3 with Gstreamer 1.0, and one using PyGST
working only for Python 2 + Gstreamer 0.10.
'''
__all__ = ('CameraBase', 'Camera')
from kivy.utils import platform
from kivy.event import EventDispatcher
from kivy.logger import Logger
from kivy.core import core_select_lib
class CameraBase(EventDispatcher):
'''Abstract Camera Widget class.
Concrete camera classes must implement initialization and
frame capturing to a buffer that can be uploaded to the gpu.
:Parameters:
`index`: int
Source index of the camera.
`size`: tuple (int, int)
Size at which the image is drawn. If no size is specified,
it defaults to the resolution of the camera image.
`resolution`: tuple (int, int)
Resolution to try to request from the camera.
Used in the gstreamer pipeline by forcing the appsink caps
to this resolution. If the camera doesn't support the resolution,
a negotiation error might be thrown.
:Events:
`on_load`
Fired when the camera is loaded and the texture has become
available.
`on_texture`
Fired each time the camera texture is updated.
'''
__events__ = ('on_load', 'on_texture')
def __init__(self, **kwargs):
kwargs.setdefault('stopped', False)
kwargs.setdefault('resolution', (640, 480))
kwargs.setdefault('index', 0)
self.stopped = kwargs.get('stopped')
self._resolution = kwargs.get('resolution')
self._index = kwargs.get('index')
self._buffer = None
self._format = 'rgb'
self._texture = None
self.capture_device = None
kwargs.setdefault('size', self._resolution)
super(CameraBase, self).__init__()
self.init_camera()
if not self.stopped:
self.start()
def _set_resolution(self, res):
self._resolution = res
self.init_camera()
def _get_resolution(self):
return self._resolution
resolution = property(lambda self: self._get_resolution(),
lambda self, x: self._set_resolution(x),
doc='Resolution of camera capture (width, height)')
def _set_index(self, x):
if x == self._index:
return
self._index = x
self.init_camera()
def METHOD_NAME(self):
return self._x
index = property(lambda self: self.METHOD_NAME(),
lambda self, x: self._set_index(x),
doc='Source index of the camera')
def _get_texture(self):
return self._texture
texture = property(lambda self: self._get_texture(),
doc='Return the camera texture with the latest capture')
def init_camera(self):
'''Initialize the camera (internal)'''
pass
def start(self):
'''Start the camera acquire'''
self.stopped = False
def stop(self):
'''Release the camera'''
self.stopped = True
def _update(self, dt):
'''Update the camera (internal)'''
pass
def _copy_to_gpu(self):
'''Copy the buffer into the texture.'''
if self._texture is None:
Logger.debug('Camera: copy_to_gpu() failed, _texture is None !')
return
self._texture.blit_buffer(self._buffer, colorfmt=self._format)
self._buffer = None
self.dispatch('on_texture')
def on_texture(self):
pass
def on_load(self):
pass
# Load the appropriate providers
providers = ()
if platform in ['macosx', 'ios']:
providers += (('avfoundation', 'camera_avfoundation',
'CameraAVFoundation'), )
elif platform == 'android':
providers += (('android', 'camera_android', 'CameraAndroid'), )
else:
providers += (('picamera', 'camera_picamera', 'CameraPiCamera'), )
providers += (('gi', 'camera_gi', 'CameraGi'), )
providers += (('opencv', 'camera_opencv', 'CameraOpenCV'), )
Camera = core_select_lib('camera', (providers)) |
5,496 | test pipeline communicate no sync | from collections import deque
from functools import partial
from typing import List
import pytest
import psycopg
from psycopg import waiting
from psycopg import pq
from psycopg.conninfo import conninfo_to_dict, make_conninfo
def test_connect_operationalerror_pgconn(generators, dsn, monkeypatch):
"""Check that when generators.connect() fails, the resulting
OperationalError has a pgconn attribute set with needs_password.
"""
gen = generators.connect(dsn)
pgconn = waiting.wait_conn(gen)
if not pgconn.used_password:
pytest.skip("test connection needs no password")
with monkeypatch.context() as m:
try:
m.delenv("PGPASSWORD", raising=True)
except KeyError:
info = conninfo_to_dict(dsn)
del info["password"] # should not raise per check above.
dsn = make_conninfo(**info)
gen = generators.connect(dsn)
with pytest.raises(
psycopg.OperationalError, match="connection failed:"
) as excinfo:
waiting.wait_conn(gen)
pgconn = excinfo.value.pgconn
assert pgconn is not None
assert pgconn.needs_password
assert b"fe_sendauth: no password supplied" in pgconn.error_message
assert pgconn.status == pq.ConnStatus.BAD.value
assert pgconn.transaction_status == pq.TransactionStatus.UNKNOWN.value
assert pgconn.pipeline_status == pq.PipelineStatus.OFF.value
with pytest.raises(psycopg.OperationalError, match="connection is closed"):
pgconn.exec_(b"select 1")
@pytest.fixture
def pipeline(pgconn):
nb, pgconn.nonblocking = pgconn.nonblocking, True
assert pgconn.nonblocking
pgconn.enter_pipeline_mode()
yield
if pgconn.pipeline_status:
pgconn.exit_pipeline_mode()
pgconn.nonblocking = nb
def _run_pipeline_communicate(pgconn, generators, commands, expected_statuses):
actual_statuses: List[pq.ExecStatus] = []
while len(actual_statuses) != len(expected_statuses):
if commands:
gen = generators.pipeline_communicate(pgconn, commands)
results = waiting.wait(gen, pgconn.socket)
for (result,) in results:
actual_statuses.append(result.status)
else:
gen = generators.fetch_many(pgconn)
results = waiting.wait(gen, pgconn.socket)
for result in results:
actual_statuses.append(result.status)
assert actual_statuses == expected_statuses
@pytest.mark.pipeline
def test_pipeline_communicate_multi_pipeline(pgconn, pipeline, generators):
commands = deque(
[
partial(pgconn.send_query_params, b"select 1", None),
pgconn.pipeline_sync,
partial(pgconn.send_query_params, b"select 2", None),
pgconn.pipeline_sync,
]
)
expected_statuses = [
pq.ExecStatus.TUPLES_OK,
pq.ExecStatus.PIPELINE_SYNC,
pq.ExecStatus.TUPLES_OK,
pq.ExecStatus.PIPELINE_SYNC,
]
_run_pipeline_communicate(pgconn, generators, commands, expected_statuses)
@pytest.mark.pipeline
def METHOD_NAME(pgconn, pipeline, generators):
numqueries = 10
commands = deque(
[partial(pgconn.send_query_params, b"select repeat('xyzxz', 12)", None)]
* numqueries
+ [pgconn.send_flush_request]
)
expected_statuses = [pq.ExecStatus.TUPLES_OK] * numqueries
_run_pipeline_communicate(pgconn, generators, commands, expected_statuses)
@pytest.fixture
def pipeline_demo(pgconn):
assert pgconn.pipeline_status == 0
res = pgconn.exec_(b"DROP TABLE IF EXISTS pg_pipeline")
assert res.status == pq.ExecStatus.COMMAND_OK, res.error_message
res = pgconn.exec_(
b"CREATE UNLOGGED TABLE pg_pipeline(" b" id serial primary key, itemno integer)"
)
assert res.status == pq.ExecStatus.COMMAND_OK, res.error_message
yield "pg_pipeline"
res = pgconn.exec_(b"DROP TABLE IF EXISTS pg_pipeline")
assert res.status == pq.ExecStatus.COMMAND_OK, res.error_message
# TODOCRDB: 1 doesn't get rolled back. Open a ticket?
@pytest.mark.pipeline
@pytest.mark.crdb("skip", reason="pipeline aborted")
def test_pipeline_communicate_abort(pgconn, pipeline_demo, pipeline, generators):
insert_sql = b"insert into pg_pipeline(itemno) values ($1)"
commands = deque(
[
partial(pgconn.send_query_params, insert_sql, [b"1"]),
partial(pgconn.send_query_params, b"select no_such_function(1)", None),
partial(pgconn.send_query_params, insert_sql, [b"2"]),
pgconn.pipeline_sync,
partial(pgconn.send_query_params, insert_sql, [b"3"]),
pgconn.pipeline_sync,
]
)
expected_statuses = [
pq.ExecStatus.COMMAND_OK,
pq.ExecStatus.FATAL_ERROR,
pq.ExecStatus.PIPELINE_ABORTED,
pq.ExecStatus.PIPELINE_SYNC,
pq.ExecStatus.COMMAND_OK,
pq.ExecStatus.PIPELINE_SYNC,
]
_run_pipeline_communicate(pgconn, generators, commands, expected_statuses)
pgconn.exit_pipeline_mode()
res = pgconn.exec_(b"select itemno from pg_pipeline order by itemno")
assert res.ntuples == 1
assert res.get_value(0, 0) == b"3"
@pytest.fixture
def pipeline_uniqviol(pgconn):
if not psycopg.Pipeline.is_supported():
pytest.skip(psycopg.Pipeline._not_supported_reason())
assert pgconn.pipeline_status == 0
res = pgconn.exec_(b"DROP TABLE IF EXISTS pg_pipeline_uniqviol")
assert res.status == pq.ExecStatus.COMMAND_OK, res.error_message
res = pgconn.exec_(
b"CREATE UNLOGGED TABLE pg_pipeline_uniqviol("
b" id bigint primary key, idata bigint)"
)
assert res.status == pq.ExecStatus.COMMAND_OK, res.error_message
res = pgconn.exec_(b"BEGIN")
assert res.status == pq.ExecStatus.COMMAND_OK, res.error_message
res = pgconn.prepare(
b"insertion",
b"insert into pg_pipeline_uniqviol values ($1, $2) returning id",
)
assert res.status == pq.ExecStatus.COMMAND_OK, res.error_message
return "pg_pipeline_uniqviol"
def test_pipeline_communicate_uniqviol(pgconn, pipeline_uniqviol, pipeline, generators):
commands = deque(
[
partial(pgconn.send_query_prepared, b"insertion", [b"1", b"2"]),
partial(pgconn.send_query_prepared, b"insertion", [b"2", b"2"]),
partial(pgconn.send_query_prepared, b"insertion", [b"1", b"2"]),
partial(pgconn.send_query_prepared, b"insertion", [b"3", b"2"]),
partial(pgconn.send_query_prepared, b"insertion", [b"4", b"2"]),
partial(pgconn.send_query_params, b"commit", None),
]
)
expected_statuses = [
pq.ExecStatus.TUPLES_OK,
pq.ExecStatus.TUPLES_OK,
pq.ExecStatus.FATAL_ERROR,
pq.ExecStatus.PIPELINE_ABORTED,
pq.ExecStatus.PIPELINE_ABORTED,
pq.ExecStatus.PIPELINE_ABORTED,
]
_run_pipeline_communicate(pgconn, generators, commands, expected_statuses) |
5,497 | real random vector | # -*- coding: utf-8 -*-
# This file is part of the pyMOR project (https://www.pymor.org).
# Copyright pyMOR developers and contributors. All rights reserved.
# License: BSD 2-Clause License (https://opensource.org/licenses/BSD-2-Clause)
from pymor.core.config import config
config.require('DUNEGDT')
from numbers import Integral
import numpy as np
from dune.xt.la import IstlVector
from pymor.operators.list import LinearComplexifiedListVectorArrayOperatorBase
from pymor.vectorarrays.interface import _create_random_values
from pymor.vectorarrays.list import ComplexifiedListVectorSpace, CopyOnWriteVector
class DuneXTVector(CopyOnWriteVector):
"""Wraps a vector from dune-xt to make it usable with ListVectorArray.
Parameters
----------
impl
The actual vector from dune.xt.la, usually IstlVector.
"""
def __init__(self, impl):
self.impl = impl
@classmethod
def from_instance(cls, instance):
return cls(instance.impl)
def _copy_data(self):
self.impl = self.impl.copy(True)
def _scal(self, alpha):
self.impl.scal(alpha)
def _axpy(self, alpha, x):
self.impl.axpy(alpha, x.impl)
def inner(self, other):
return self.impl.dot(other.impl)
def norm(self):
return self.impl.l2_norm()
def norm2(self):
return self.impl.l2_norm() ** 2
def sup_norm(self):
return self.impl.sup_norm()
def dofs(self, dof_indices):
impl = self.impl
return np.array([impl[i] for i in dof_indices])
def amax(self):
_amax = self.impl.amax()
return _amax[0], _amax[1]
def __add__(self, other):
return DuneXTVector(self.impl + other.impl)
def __iadd__(self, other):
self.impl += other.impl
return self
__radd__ = __add__
def __sub__(self, other):
return DuneXTVector(self.impl - other.impl)
def __isub__(self, other):
self.impl -= other.impl
return self
def __mul__(self, other):
return DuneXTVector(self.impl * other)
def __imul__(self, other):
self.impl *= other
return self
def __neg__(self):
return self * (-1)
def to_numpy(self, ensure_copy=False):
return np.array(self.impl, copy=ensure_copy)
class DuneXTVectorSpace(ComplexifiedListVectorSpace):
"""A |VectorSpace| yielding DuneXTVector.
Parameters
----------
dim
Dimension of the |VectorSpace|, i.e., length of the resulting vectors.
vector_type
Type of the actual vector from dune.xt.la, usually IstlVector.
id
Identifier of the |VectorSpace|.
"""
real_vector_type = DuneXTVector
def __init__(self, dim, dune_vector_type=IstlVector, id='STATE'):
assert isinstance(dim, Integral)
dim = int(dim)
self.__auto_init(locals())
def __eq__(self, other):
return type(other) is DuneXTVectorSpace \
and self.dune_vector_type == other.dune_vector_type \
and self.dim == other.dim \
and self.id == other.id
# since we implement __eq__, we also need to implement __hash__
def __hash__(self):
return id(self.dune_vector_type) + hash(self.dim)
def real_zero_vector(self):
return DuneXTVector(self.dune_vector_type(self.dim, 0.))
def real_full_vector(self, value):
return DuneXTVector(self.dune_vector_type(self.dim, value))
def METHOD_NAME(self, distribution, **kwargs):
values = _create_random_values(self.dim, distribution, **kwargs)
return self.real_vector_from_numpy(values)
def real_vector_from_numpy(self, data, ensure_copy=False):
v = self.real_zero_vector()
np_view = np.array(v.impl, copy=False)
np_view[:] = data
return v
def real_make_vector(self, obj):
return DuneXTVector(obj)
class DuneXTMatrixOperator(LinearComplexifiedListVectorArrayOperatorBase):
"""Wraps a dune-xt matrix as an |Operator|.
Parameters
----------
matrix
The actual matrix from dune.xt.la, usually IstlMatrix.
source_id
Identifier of the source |VectorSpace|.
range_id
Identifier of the source |VectorSpace|.
solver_options
If specified, either a string or a dict specifying the solver used in apply_inverse. See
https://zivgitlab.uni-muenster.de/ag-ohlberger/dune-community/dune-xt/-/tree/master/dune/xt/la/solver
for available options, depending on the type of `matrix`. E.g., for
dune.xt.la.IstlSparseMatrix, (as can be queried from dune.xt.la.IstlSparseMatrixSolver
via `types()` and `options(type)`):
- 'bicgstab.ssor'
- 'bicgstab.amg.ssor'
- 'bicgstab.amg.ilu0'
- 'bicgstab.ilut'
- 'bicgstab'
- 'cg'
name
Optional name of the resulting |Operator|.
"""
linear = True
def __init__(self, matrix, source_id='STATE', range_id='STATE', solver_options=None, name=None):
self.source = DuneXTVectorSpace(matrix.cols, matrix.vector_type(), source_id)
self.range = DuneXTVectorSpace(matrix.rows, matrix.vector_type(), range_id)
self.__auto_init(locals())
def _real_apply_one_vector(self, u, mu=None, prepare_data=None):
r = self.range.real_zero_vector()
self.matrix.mv(u.impl, r.impl)
return r
def _apply_adjoint_one_vector(self, v, mu=None, prepare_data=None):
r = self.source.real_zero_vector()
self.matrix.mtv(v.impl, r.impl)
return r
def _real_apply_inverse_one_vector(self, v, mu=None, initial_guess=None,
least_squares=False, prepare_data=None):
if least_squares:
raise NotImplementedError
r = (self.source.real_zero_vector() if initial_guess is None else
initial_guess.copy(deep=True))
options = self.solver_options.get('inverse') if self.solver_options else None
from dune.xt.la import make_solver
solver = make_solver(self.matrix)
if options:
solver.apply(v.impl, r.impl, options)
else:
solver.apply(v.impl, r.impl)
return r
def _assemble_lincomb(self, operators, coefficients, identity_shift=0., solver_options=None, name=None):
if not all(isinstance(op, DuneXTMatrixOperator) for op in operators):
return None
if identity_shift != 0:
return None
if np.iscomplexobj(coefficients):
return None
if coefficients[0] == 1:
matrix = operators[0].matrix.copy()
else:
matrix = operators[0].matrix * coefficients[0]
for op, c in zip(operators[1:], coefficients[1:]):
matrix.axpy(c, op.matrix) # TODO: Not guaranteed to work for all backends! For different
# sparsity patterns one would have to extract the patterns from the pruned
# matrices, merge them and create a new matrix.
return DuneXTMatrixOperator(matrix, self.source.id, self.range.id, solver_options=solver_options, name=name) |
5,498 | run | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ast
import json
"""
LLDB type summary providers for common Firestore types.
This is primarily useful for debugging Firestore internals. It will add useful
summaries and consolidate common types in a way that makes them easier to
observe in the debugger.
Use this by adding the following to your ~/.lldbinit file:
command script import ~/path/to/firebase-ios-sdk/scripts/lldb/firestore.py
Most of this implementation is based on "Variable Formatting" in the LLDB online
manual: https://lldb.llvm.org/use/variable.html. There are two major features
we're making use of:
* Summary Providers: these are classes or functions that take an object and
produce a (typically one line) summary of the type
* Synthetic Children Providers: these are classes that provide an alternative
view of the data. The children that are synthesized here show up in the
graphical debugger.
"""
class ForwardingSynthProvider(object):
"""A synthetic child provider that forwards all methods to another provider.
Override the `delegate` method to customize the target to which this forwards.
"""
def __init__(self, value, params):
self.value = value
def delegate(self):
return self.value
def has_children(self):
return self.delegate().MightHaveChildren()
def num_children(self):
return self.delegate().GetNumChildren()
def get_child_index(self, name):
return self.delegate().GetIndexOfChildWithName(name)
def get_child_at_index(self, index):
return self.delegate().GetChildAtIndex(index)
def update(self):
# No additional state so nothing needs updating when the value changes.
pass
# Abseil
class AbseilOptional_SynthProvider(object):
"""A synthetic child provider that hides the internals of absl::optional.
"""
def __init__(self, value, params):
self.value = value
self.engaged = None
self.data = None
def update(self):
# Unwrap all the internal optional_data and similar types
value = self.value
while True:
if value.GetNumChildren() <= 0:
break
child = value.GetChildAtIndex(0)
if not child.IsValid():
break
if 'optional_internal' not in child.GetType().GetName():
break
value = child
# value should now point to the innermost absl::optional container type.
self.engaged = value.GetChildMemberWithName('engaged_')
if self.has_children():
self.data = value.GetChildMemberWithName('data_')
else:
self.data = None
def has_children(self):
return self.engaged.GetValueAsUnsigned(0) != 0
def num_children(self):
return 2 if self.has_children() else 1
def get_child_index(self, name):
if name == 'engaged_':
return 0
if name == 'data_':
return 1
return -1
def get_child_at_index(self, index):
if index == 0:
return self.engaged
if index == 1:
return self.data
def AbseilOptional_SummaryProvider(value, params):
# Operates on the synthetic children above, calling has_children.
return 'engaged={0}'.format(format_bool(value.MightHaveChildren()))
# model
class DatabaseId_SynthProvider(ForwardingSynthProvider):
"""Makes DatabaseId behave as if `*rep_` were inline, hiding its
`shared_ptr<Rep>` implementation details.
"""
def delegate(self):
return deref_shared(self.value.GetChildMemberWithName('rep_'))
def DatabaseId_SummaryProvider(value, params):
# Operates on the result of the SynthProvider; value is *rep_.
parts = [
get_string(value.GetChildMemberWithName('project_id')),
get_string(value.GetChildMemberWithName('database_id'))
]
return format_string('/'.join(parts))
def DocumentKey_SummaryProvider(value, params):
"""Summarizes DocumentKey as if path_->segments_ were inline and a single,
slash-delimited string like `"users/foo"`.
"""
return deref_shared(value.GetChildMemberWithName('path_')).GetSummary()
def ResourcePath_SummaryProvider(value, params):
"""Summarizes ResourcePath as if segments_ were a single string,
slash-delimited string like `"users/foo"`.
"""
segments = value.GetChildMemberWithName('segments_')
segment_text = [get_string(child) for child in segments]
return format_string('/'.join(segment_text))
# api
def DocumentReference_SummaryProvider(value, params):
"""Summarizes DocumentReference as a single slash-delimited string like
`"users/foo"`.
"""
return value.GetChildMemberWithName('key_').GetSummary()
def DocumentSnapshot_SummaryProvider(value, params):
"""Summarizes DocumentSnapshot as a single slash-delimited string like
`"users/foo"` that names the path of the document in the snapshot.
"""
return value.GetChildMemberWithName('internal_key_').GetSummary()
# Objective-C
def FIRDocumentReference_SummaryProvider(value, params):
return value.GetChildMemberWithName('_documentReference').GetSummary()
def FIRDocumentSnapshot_SummaryProvider(value, params):
return value.GetChildMemberWithName('_snapshot').GetSummary()
def get_string(value):
"""Returns a Python string from the underlying LLDB SBValue."""
# TODO(wilhuff): Actually use the SBData API to get this.
# Get the summary as a C literal and parse it (for now). Using the SBData
# API would allow this to directly read the string contents.
summary = value.GetSummary()
return ast.literal_eval(summary)
def format_string(string):
"""Formats a Python string as a C++ string literal."""
# JSON and C escapes work the ~same.
return json.dumps(string)
def format_bool(value):
"""Formats a Python value as a C++ bool literal."""
return 'true' if value else 'false'
def deref_shared(value):
"""Dereference a shared_ptr."""
return value.GetChildMemberWithName('__ptr_').Dereference()
def __lldb_init_module(debugger, params):
def METHOD_NAME(command):
debugger.HandleCommand(command)
def add_summary(provider, typename, *args):
args = ' '.join(args)
METHOD_NAME('type summary add -w firestore -F {0} {1} {2}'.format(
qname(provider), args, typename))
def add_synthetic(provider, typename, *args):
args = ' '.join(args)
METHOD_NAME('type synthetic add -l {0} -w firestore {1} {2}'.format(
qname(provider), args, typename))
optional_matcher = '-x absl::[^:]*::optional<.*>'
add_summary(AbseilOptional_SummaryProvider, optional_matcher, '-e')
add_synthetic(AbseilOptional_SynthProvider, optional_matcher)
api = 'firebase::firestore::api::'
add_summary(DocumentReference_SummaryProvider, api + 'DocumentReference')
add_summary(DocumentSnapshot_SummaryProvider, api + 'DocumentSnapshot', '-e')
model = 'firebase::firestore::model::'
add_summary(DocumentKey_SummaryProvider, model + 'DocumentKey')
add_summary(ResourcePath_SummaryProvider, model + 'ResourcePath')
add_summary(DatabaseId_SummaryProvider, model + 'DatabaseId')
add_synthetic(DatabaseId_SynthProvider, model + 'DatabaseId')
add_summary(FIRDocumentReference_SummaryProvider, 'FIRDocumentReference')
add_summary(FIRDocumentSnapshot_SummaryProvider, 'FIRDocumentSnapshot', '-e')
METHOD_NAME('type category enable firestore')
def qname(fn):
"""Returns the module-qualified name of the given class or function."""
return '{0}.{1}'.format(__name__, fn.__name__) |
5,499 | test active stop greater than timestep | '''
Test some of the base class functionality independent of derived classes.
Just simpler to do the testing here
Tests the Process methods and the Mover's get_move
'''
from datetime import datetime, timedelta
import numpy as np
from pytest import raises
from ..conftest import sample_sc_release
from gnome.utilities.inf_datetime import InfDateTime
from gnome.movers import PyMover
def test_exceptions():
with raises(ValueError):
now = datetime.now()
_mover = PyMover(active_range=(now, now))
def test_default_properties():
mover = PyMover()
#assert mover.name == 'PyMover'
assert mover.on is True
assert mover.active_range == (InfDateTime('-inf'), InfDateTime('inf'))
#assert mover.array_types == set()
assert mover.make_default_refs is True
assert mover.default_num_method == 'RK2'
class TestActive(object):
time_step = 15 * 60 # seconds
model_time = datetime(2012, 8, 20, 13)
sc = sample_sc_release(1, (0, 0, 0)) # no used for anything
mv = PyMover()
def test_active_default(self):
mv = PyMover() # active range defaults to (-Inf, Inf)
mv.prepare_for_model_step(self.sc, self.time_step, self.model_time)
# model_time should be within range at any datetime.
assert mv.active is True
def test_active_start_modeltime(self):
mv = PyMover(active_range=(self.model_time, InfDateTime('inf')))
mv.prepare_for_model_step(self.sc, self.time_step, self.model_time)
# model_time should be within active range
assert mv.active is True
def test_active_start_after_one_timestep(self):
start_time = self.model_time + timedelta(seconds=self.time_step)
mv = PyMover(active_range=(start_time, InfDateTime('inf')))
mv.prepare_for_model_step(self.sc, self.time_step, self.model_time)
# model_time + time_step should be within active range
assert mv.active is False
def test_active_start_after_half_timestep(self):
self.mv.active_range = ((self.model_time +
timedelta(seconds=self.time_step / 2)),
InfDateTime('inf'))
self.mv.prepare_for_model_step(self.sc, self.time_step,
self.model_time)
# model_time + time_step / 2 should be within active range
assert self.mv.active is True
# Next test just some more borderline cases that active is set correctly
def METHOD_NAME(self):
self.mv.active_range = (self.model_time,
(self.model_time +
timedelta(seconds=1.5 * self.time_step)))
self.mv.prepare_for_model_step(self.sc, self.time_step,
self.model_time)
# model_time + 1.5 * time_step should be within active range
assert self.mv.active is True
def test_active_stop_after_half_timestep(self):
self.mv.active_range = (self.model_time,
(self.model_time +
timedelta(seconds=0.5 * self.time_step)))
self.mv.prepare_for_model_step(self.sc, self.time_step,
self.model_time)
# model_time + 1.5 * time_step should be within active range
assert self.mv.active is True
def test_active_stop_less_than_half_timestep(self):
self.mv.active_range = (self.model_time,
(self.model_time +
timedelta(seconds=0.25 * self.time_step)))
self.mv.prepare_for_model_step(self.sc, self.time_step,
self.model_time)
# current_model_time = active_stop
assert self.mv.active is False
def test_get_move():
'''
assert base class get_move returns an array of nan[s]
'''
time_step = 15 * 60 # seconds
model_time = datetime(2012, 8, 20, 13)
sc = sample_sc_release(10, (0, 0, 0)) # no used for anything
mv = PyMover()
delta = mv.get_move(sc, time_step, model_time)
assert np.all(np.isnan(delta)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.