id int64 0 300k | label stringlengths 1 74 ⌀ | text stringlengths 4k 8k |
|---|---|---|
299,400 | check metric can be zero | # (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import re
POD_REGEX = re.compile('pod-([0-9]+)')
BD_REGEX = re.compile('/BD-([^/]+)/')
APP_REGEX = re.compile('/ap-([^/]+)/')
CEP_REGEX = re.compile('/cep-([^/]+)/')
EPG_REGEX = re.compile('/epg-([^/]+)/')
IP_REGEX = re.compile('/ip-([^/]+)/')
NODE_REGEX = re.compile('node-([0-9]+)')
def parse_capacity_tags(dn):
"""
This parses tags from a dn designator. They look like this:
topology/pod-1/node-101/sys/phys-[eth1/6]/CDeqptMacsectxpkts5min
"""
tags = []
pod = get_pod_from_dn(dn)
if pod:
tags.append("fabric_pod_id:{}".format(pod))
node = get_node_from_dn(dn)
if node:
tags.append("node_id:{}".format(node))
return tags
def get_pod_from_dn(dn):
"""
This parses the pod from a dn designator. They look like this:
topology/pod-1/node-101/sys/phys-[eth1/6]/CDeqptMacsectxpkts5min
"""
return _get_value_from_dn(POD_REGEX, dn)
def get_bd_from_dn(dn):
"""
This parses the bd from the dn designator. They look like this:
uni/tn-DataDog/BD-DataDog-BD1
"""
return _get_value_from_dn(BD_REGEX, dn)
def get_app_from_dn(dn):
"""
This parses the app from the dn designator. They look like this:
uni/tn-DataDog/ap-DtDg-AP1-EcommerceApp/epg-DtDg-Ecomm/HDl2IngrPktsAg1h
"""
return _get_value_from_dn(APP_REGEX, dn)
def get_cep_from_dn(dn):
"""
This parses the cep from the dn designator. They look like this:
uni/tn-DataDog/ap-DtDg-AP1-EcommerceApp/epg-DtDg-Ecomm/cep-00:50:56:9E:FB:48
"""
return _get_value_from_dn(CEP_REGEX, dn)
def get_epg_from_dn(dn):
"""
This parses the epg from the dn designator. They look like this:
uni/tn-DataDog/ap-DtDg-AP1-EcommerceApp/epg-DtDg-Ecomm/HDl2IngrPktsAg1h
"""
return _get_value_from_dn(EPG_REGEX, dn)
def get_ip_from_dn(dn):
"""
This parses the ip from the dn designator. They look like this:
uni/tn-DataDog/ap-DtDg-AP1-EcommerceApp/epg-DtDg-Ecomm/cep-00:50:56:9D:91:B5/ip-[10.10.10.17]
"""
return _get_value_from_dn(IP_REGEX, dn)
def get_node_from_dn(dn):
"""
This parses the node from a dn designator. They look like this:
topology/pod-1/node-101/sys/phys-[eth1/6]/CDeqptMacsectxpkts5min
"""
return _get_value_from_dn(NODE_REGEX, dn)
def _get_value_from_dn(regex, dn):
if not dn:
return None
v = regex.search(dn)
if v:
return v.group(1)
else:
return None
def get_event_tags_from_dn(dn):
"""
This grabs the event tags from the dn designator. They look like this:
uni/tn-DataDog/ap-DtDg-AP1-EcommerceApp/epg-DtDg-Ecomm/HDl2IngrPktsAg1h
"""
tags = []
node = get_node_from_dn(dn)
if node:
tags.append("node:" + node)
app = get_app_from_dn(dn)
if app:
tags.append("app:" + app)
bd = get_bd_from_dn(dn)
if bd:
tags.append("bd:" + bd)
cep = get_cep_from_dn(dn)
if cep:
tags.append("mac:" + cep)
ip = get_ip_from_dn(dn)
if ip:
tags.append("ip:" + ip)
epg = get_epg_from_dn(dn)
if epg:
tags.append("epg:" + epg)
return tags
def get_hostname_from_dn(dn):
"""
This parses the hostname from a dn designator. They look like this:
topology/pod-1/node-101/sys/phys-[eth1/6]/CDeqptMacsectxpkts5min
"""
pod = get_pod_from_dn(dn)
node = get_node_from_dn(dn)
if pod and node:
return "pod-{}-node-{}".format(pod, node)
else:
return None
def get_fabric_hostname(obj):
"""
This grabs the hostname from the object
The object looks something like this:
{
"dn": "topology/pod-1/node-101/sys/phys-[eth1/6]/CDeqptMacsectxpkts5min"
...
}
"""
attrs = get_attributes(obj)
dn = attrs['dn']
return get_hostname_from_dn(dn)
def get_attributes(obj):
"""
the json objects look like this:
{
"objType": {
"attributes": {
...
}
}
It always has the attributes nested below the object type
This helper provides a way of getting at the attributes
"""
if not obj or type(obj) is not dict:
return {}
keys = list(obj.keys())
if len(keys) > 0:
key = keys[0]
else:
return {}
key_obj = obj.get(key, {})
if type(key_obj) is not dict:
# if the object is not a dict
# it is probably already scoped to attributes
return obj
if key != "attributes":
attrs = key_obj.get('attributes')
if type(attrs) is not dict:
# if the attributes doesn't exist,
# it is probably already scoped to attributes
return obj
else:
# if the attributes exist, we return the value, except if it's not a dict type
attrs = key_obj
if type(attrs) is not dict:
return obj
return attrs
def METHOD_NAME(metric_name, metric_value, json_attributes):
"""
When a counter is reset, don't send a zero because it will look bad on the graphs
This checks if the zero makes sense or not
"""
if "last" in metric_name.lower():
return True
if not metric_value:
return False
try:
if metric_value == 0 or metric_value == "0" or metric_value == "0.000000" or float(metric_value) == 0.0:
if not json_attributes or not json_attributes.get('cnt'):
return False
if json_attributes.get('cnt') == "0" or json_attributes.get('cnt') == 0:
return False
except ValueError:
return False
return True |
299,401 | test modules tool | # #
# Copyright 2018-2023 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
# #
"""
Unit tests for using EasyBuild as a library.
@author: Kenneth Hoste (Ghent University)
"""
import os
import shutil
import sys
import tempfile
from unittest import TextTestRunner
from test.framework.utilities import TestLoaderFiltered
# deliberately *not* using EnhancedTestCase from test.framework.utilities to avoid automatic configuration via setUp
from easybuild.base.testing import TestCase
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.config import BuildOptions
from easybuild.tools.options import set_up_configuration
from easybuild.tools.filetools import mkdir
from easybuild.tools.modules import modules_tool
from easybuild.tools.run import run_cmd
class EasyBuildLibTest(TestCase):
"""Test cases for using EasyBuild as a library."""
def setUp(self):
"""Prepare for running test."""
super(EasyBuildLibTest, self).setUp()
# make sure BuildOptions instance is re-created
if BuildOptions in BuildOptions._instances:
del BuildOptions._instances[BuildOptions]
self.tmpdir = tempfile.mkdtemp()
def tearDown(self):
"""Cleanup after running test."""
super(EasyBuildLibTest, self).tearDown()
shutil.rmtree(self.tmpdir)
def configure(self):
"""Utility function to set up EasyBuild configuration."""
# wipe BuildOption singleton instance, so it gets re-created when set_up_configuration is called
if BuildOptions in BuildOptions._instances:
del BuildOptions._instances[BuildOptions]
self.assertNotIn(BuildOptions, BuildOptions._instances)
set_up_configuration(silent=True)
self.assertIn(BuildOptions, BuildOptions._instances)
def test_run_cmd(self):
"""Test use of run_cmd function in the context of using EasyBuild framework as a library."""
error_pattern = r"Undefined build option: .*"
error_pattern += r" Make sure you have set up the EasyBuild configuration using set_up_configuration\(\)"
self.assertErrorRegex(EasyBuildError, error_pattern, run_cmd, "echo hello")
self.configure()
# run_cmd works fine if set_up_configuration was called first
(out, ec) = run_cmd("echo hello")
self.assertEqual(ec, 0)
self.assertEqual(out, 'hello\n')
def test_mkdir(self):
"""Test use of run_cmd function in the context of using EasyBuild framework as a library."""
test_dir = os.path.join(self.tmpdir, 'test123')
error_pattern = r"Undefined build option: .*"
error_pattern += r" Make sure you have set up the EasyBuild configuration using set_up_configuration\(\)"
self.assertErrorRegex(EasyBuildError, error_pattern, mkdir, test_dir)
self.configure()
# mkdir works fine if set_up_configuration was called first
self.assertNotExists(test_dir)
mkdir(test_dir)
self.assertExists(test_dir)
def METHOD_NAME(self):
"""Test use of modules_tool function in the context of using EasyBuild framework as a library."""
error_pattern = r"Undefined build option: .*"
error_pattern += r" Make sure you have set up the EasyBuild configuration using set_up_configuration\(\)"
self.assertErrorRegex(EasyBuildError, error_pattern, modules_tool)
self.configure()
test_mods_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'modules')
modtool = modules_tool()
modtool.use(test_mods_path)
self.assertIn('GCC/6.4.0-2.28', modtool.available())
modtool.load(['GCC/6.4.0-2.28'])
self.assertEqual(modtool.list(), [{'default': None, 'mod_name': 'GCC/6.4.0-2.28'}])
def suite():
return TestLoaderFiltered().loadTestsFromTestCase(EasyBuildLibTest, sys.argv[1:])
if __name__ == '__main__':
res = TextTestRunner(verbosity=1).run(suite())
sys.exit(len(res.failures)) |
299,402 | get status and proof summaries | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0
import argparse
import json
import logging
import os
import sys
DESCRIPTION = """Print 2 tables in GitHub-flavored Markdown that summarize
an execution of CBMC proofs."""
def get_args():
"""Parse arguments for summarize script."""
parser = argparse.ArgumentParser(description=DESCRIPTION)
for arg in [{
"flags": ["--run-file"],
"help": "path to the Litani run.json file",
"required": True,
}]:
flags = arg.pop("flags")
parser.add_argument(*flags, **arg)
return parser.parse_args()
def _get_max_length_per_column_list(data):
ret = [len(item) + 1 for item in data[0]]
for row in data[1:]:
for idx, item in enumerate(row):
ret[idx] = max(ret[idx], len(item) + 1)
return ret
def _get_table_header_separator(max_length_per_column_list):
line_sep = ""
for max_length_of_word_in_col in max_length_per_column_list:
line_sep += "|" + "-" * (max_length_of_word_in_col + 1)
line_sep += "|\n"
return line_sep
def _get_entries(max_length_per_column_list, row_data):
entries = []
for row in row_data:
entry = ""
for idx, word in enumerate(row):
max_length_of_word_in_col = max_length_per_column_list[idx]
space_formatted_word = (max_length_of_word_in_col - len(word)) * " "
entry += "| " + word + space_formatted_word
entry += "|\n"
entries.append(entry)
return entries
def _get_rendered_table(data):
table = []
max_length_per_column_list = _get_max_length_per_column_list(data)
entries = _get_entries(max_length_per_column_list, data)
for idx, entry in enumerate(entries):
if idx == 1:
line_sep = _get_table_header_separator(max_length_per_column_list)
table.append(line_sep)
table.append(entry)
table.append("\n")
return "".join(table)
def METHOD_NAME(run_dict):
"""Parse a dict representing a Litani run and create lists summarizing the
proof results.
Parameters
----------
run_dict
A dictionary representing a Litani run.
Returns
-------
A list of 2 lists.
The first sub-list maps a status to the number of proofs with that status.
The second sub-list maps each proof to its status.
"""
count_statuses = {}
proofs = [["Proof", "Status"]]
for proof_pipeline in run_dict["pipelines"]:
status_pretty_name = proof_pipeline["status"].title().replace("_", " ")
try:
count_statuses[status_pretty_name] += 1
except KeyError:
count_statuses[status_pretty_name] = 1
if proof_pipeline["name"] == "print_tool_versions":
continue
proofs.append([proof_pipeline["name"], status_pretty_name])
statuses = [["Status", "Count"]]
for status, count in count_statuses.items():
statuses.append([status, str(count)])
return [statuses, proofs]
def print_proof_results(out_file):
"""
Print 2 strings that summarize the proof results.
When printing, each string will render as a GitHub flavored Markdown table.
"""
output = "## Summary of CBMC proof results\n\n"
with open(out_file, encoding='utf-8') as run_json:
run_dict = json.load(run_json)
status_table, proof_table = METHOD_NAME(run_dict)
for summary in (status_table, proof_table):
output += _get_rendered_table(summary)
print(output)
sys.stdout.flush()
github_summary_file = os.getenv("GITHUB_STEP_SUMMARY")
if github_summary_file:
with open(github_summary_file, "a") as handle:
print(output, file=handle)
handle.flush()
else:
logging.warning(
"$GITHUB_STEP_SUMMARY not set, not writing summary file")
msg = (
"Click the 'Summary' button to view a Markdown table "
"summarizing all proof results")
if run_dict["status"] != "success":
logging.error("Not all proofs passed.")
logging.error(msg)
sys.exit(1)
logging.info(msg)
if __name__ == '__main__':
args = get_args()
logging.basicConfig(format="%(levelname)s: %(message)s")
try:
print_proof_results(args.run_file)
except Exception as ex: # pylint: disable=broad-except
logging.critical("Could not print results. Exception: %s", str(ex)) |
299,403 | block key | from collections.abc import Callable, Generator, Iterable, Sequence
from queue import Queue
from threading import Event as _UninterruptibleEvent
from typing_extensions import TypeAlias
from ._canonical_names import all_modifiers as all_modifiers, sided_modifiers as sided_modifiers
from ._keyboard_event import KEY_DOWN as KEY_DOWN, KEY_UP as KEY_UP, KeyboardEvent as KeyboardEvent
_Key: TypeAlias = int | str
_ScanCodeList: TypeAlias = list[int] | tuple[int, ...]
_ParseableHotkey: TypeAlias = _Key | list[int | _ScanCodeList] | tuple[int | _ScanCodeList, ...]
_Callback: TypeAlias = Callable[[KeyboardEvent], bool | None] | Callable[[], bool | None]
# mypy doesn't support PEP 646's TypeVarTuple yet: https://github.com/python/mypy/issues/12280
# _Ts = TypeVarTuple("_Ts")
_Ts: TypeAlias = tuple[object, ...]
version: str
class _Event(_UninterruptibleEvent):
def wait(self) -> None: ... # type: ignore[override] # Actual implementation
def is_modifier(key: _Key | None) -> bool: ...
def key_to_scan_codes(key: _ParseableHotkey, error_if_missing: bool = True) -> tuple[int, ...]: ...
def parse_hotkey(hotkey: _ParseableHotkey) -> tuple[tuple[tuple[int, ...], ...], ...]: ...
def send(hotkey: _ParseableHotkey, do_press: bool = True, do_release: bool = True) -> None: ...
press_and_release = send
def press(hotkey: _ParseableHotkey) -> None: ...
def release(hotkey: _ParseableHotkey) -> None: ...
# is_pressed cannot check multi-step hotkeys, so not using _ParseableHotkey
def is_pressed(hotkey: _Key | _ScanCodeList) -> bool: ...
def call_later(fn: Callable[..., None], args: _Ts = (), delay: float = 0.001) -> None: ...
def hook(callback: _Callback, suppress: bool = False, on_remove: Callable[[], None] = ...) -> Callable[[], None]: ...
def on_press(callback: _Callback, suppress: bool = False) -> Callable[[], None]: ...
def on_release(callback: _Callback, suppress: bool = False) -> Callable[[], None]: ...
def hook_key(key: _ParseableHotkey, callback: _Callback, suppress: bool = False) -> Callable[[], None]: ...
def on_press_key(key: _ParseableHotkey, callback: _Callback, suppress: bool = False) -> Callable[[], None]: ...
def on_release_key(key: _ParseableHotkey, callback: _Callback, suppress: bool = False) -> Callable[[], None]: ...
def unhook(remove: _Callback) -> None: ...
unhook_key = unhook
def unhook_all() -> None: ...
def METHOD_NAME(key: _ParseableHotkey) -> Callable[[], None]: ...
unblock_key = unhook_key
def remap_key(src: _ParseableHotkey, dst: _ParseableHotkey) -> Callable[[], None]: ...
unremap_key = unhook_key
def parse_hotkey_combinations(hotkey: _ParseableHotkey) -> tuple[tuple[tuple[int, ...], ...], ...]: ...
def add_hotkey(
hotkey: _ParseableHotkey,
callback: Callable[..., bool | None],
args: _Ts = (),
suppress: bool = False,
timeout: float = 1,
trigger_on_release: bool = False,
) -> Callable[[], None]: ...
register_hotkey = add_hotkey
def remove_hotkey(hotkey_or_callback: _ParseableHotkey | _Callback) -> None: ...
unregister_hotkey = remove_hotkey
clear_hotkey = remove_hotkey
def unhook_all_hotkeys() -> None: ...
unregister_all_hotkeys = unhook_all_hotkeys
remove_all_hotkeys = unhook_all_hotkeys
clear_all_hotkeys = unhook_all_hotkeys
def remap_hotkey(
src: _ParseableHotkey, dst: _ParseableHotkey, suppress: bool = True, trigger_on_release: bool = False
) -> Callable[[], None]: ...
unremap_hotkey = remove_hotkey
def stash_state() -> list[int]: ...
def restore_state(scan_codes: Iterable[int]) -> None: ...
def restore_modifiers(scan_codes: Iterable[int]) -> None: ...
def write(text: str, delay: float = 0, restore_state_after: bool = True, exact: bool | None = None) -> None: ...
def wait(hotkey: _ParseableHotkey | None = None, suppress: bool = False, trigger_on_release: bool = False) -> None: ...
def get_hotkey_name(names: Iterable[str] | None = None) -> str: ...
def read_event(suppress: bool = False) -> KeyboardEvent: ...
def read_key(suppress: bool = False) -> _Key: ...
def read_hotkey(suppress: bool = True) -> str: ...
def get_typed_strings(events: Iterable[KeyboardEvent], allow_backspace: bool = True) -> Generator[str, None, None]: ...
def start_recording(
recorded_events_queue: Queue[KeyboardEvent] | None = None,
) -> tuple[Queue[KeyboardEvent], Callable[[], None]]: ...
def stop_recording() -> list[KeyboardEvent]: ...
def record(until: str = "escape", suppress: bool = False, trigger_on_release: bool = False) -> list[KeyboardEvent]: ...
def play(events: Iterable[KeyboardEvent], speed_factor: float = 1.0) -> None: ...
replay = play
def add_word_listener(
word: str, callback: _Callback, triggers: Sequence[str] = ["space"], match_suffix: bool = False, timeout: float = 2
) -> Callable[[], None]: ...
def remove_word_listener(word_or_handler: str | _Callback) -> None: ...
def add_abbreviation(
source_text: str, replacement_text: str, match_suffix: bool = False, timeout: float = 2
) -> Callable[[], None]: ...
register_word_listener = add_word_listener
register_abbreviation = add_abbreviation
remove_abbreviation = remove_word_listener |
299,404 | test activate totp with incorrect code | from django.conf import settings
from django.urls import reverse
import pytest
from allauth.account.models import EmailAddress
from allauth.mfa import app_settings
from allauth.mfa.adapter import get_adapter
from allauth.mfa.models import Authenticator
@pytest.mark.parametrize(
"url_name",
(
"mfa_activate_totp",
"mfa_index",
"mfa_deactivate_totp",
),
)
def test_login_required_views(client, url_name):
resp = client.get(reverse(url_name))
assert resp.status_code == 302
assert resp["location"].startswith(reverse("account_login"))
def METHOD_NAME(auth_client, reauthentication_bypass):
with reauthentication_bypass():
resp = auth_client.get(reverse("mfa_activate_totp"))
resp = auth_client.post(
reverse("mfa_activate_totp"),
{
"code": "123",
"signed_secret": resp.context["form"].initial["signed_secret"],
},
)
assert resp.context["form"].errors == {
"code": [get_adapter().error_messages["incorrect_code"]]
}
def test_activate_totp_with_tampered_secret(auth_client, reauthentication_bypass):
with reauthentication_bypass():
resp = auth_client.post(
reverse("mfa_activate_totp"), {"code": "123", "signed_secret": "tampered"}
)
assert resp.context["form"].errors == {"signed_secret": ["Tampered form."]}
def test_activate_totp_with_unverified_email(
auth_client, user, totp_validation_bypass, reauthentication_bypass
):
EmailAddress.objects.filter(user=user).update(verified=False)
with reauthentication_bypass():
resp = auth_client.get(reverse("mfa_activate_totp"))
with totp_validation_bypass():
resp = auth_client.post(
reverse("mfa_activate_totp"),
{
"code": "123",
"signed_secret": resp.context["form"].initial["signed_secret"],
},
)
assert resp.context["form"].errors == {
"__all__": [get_adapter().error_messages["unverified_email"]],
}
def test_activate_totp_success(
auth_client, totp_validation_bypass, user, reauthentication_bypass
):
with reauthentication_bypass():
resp = auth_client.get(reverse("mfa_activate_totp"))
with totp_validation_bypass():
resp = auth_client.post(
reverse("mfa_activate_totp"),
{
"code": "123",
"signed_secret": resp.context["form"].initial["signed_secret"],
},
)
assert resp["location"] == reverse("mfa_view_recovery_codes")
assert Authenticator.objects.filter(
user=user, type=Authenticator.Type.TOTP
).exists()
assert Authenticator.objects.filter(
user=user, type=Authenticator.Type.RECOVERY_CODES
).exists()
def test_index(auth_client, user_with_totp):
resp = auth_client.get(reverse("mfa_index"))
assert "authenticators" in resp.context
def test_deactivate_totp_success(auth_client, user_with_totp, user_password):
resp = auth_client.post(reverse("mfa_deactivate_totp"))
assert resp.status_code == 302
assert resp["location"].startswith(reverse("account_reauthenticate"))
resp = auth_client.post(resp["location"], {"password": user_password})
assert resp.status_code == 302
resp = auth_client.post(reverse("mfa_deactivate_totp"))
assert resp.status_code == 302
assert resp["location"] == reverse("mfa_index")
def test_user_without_totp_deactivate_totp(auth_client):
resp = auth_client.get(reverse("mfa_deactivate_totp"))
assert resp.status_code == 404
def test_user_with_totp_activate_totp(
auth_client, user_with_totp, reauthentication_bypass
):
with reauthentication_bypass():
resp = auth_client.get(reverse("mfa_activate_totp"))
assert resp.status_code == 302
assert resp["location"] == reverse("mfa_deactivate_totp")
def test_totp_login(client, user_with_totp, user_password, totp_validation_bypass):
resp = client.post(
reverse("account_login"),
{"login": user_with_totp.username, "password": user_password},
)
assert resp.status_code == 302
assert resp["location"] == reverse("mfa_authenticate")
resp = client.get(reverse("mfa_authenticate"))
assert resp.context["request"].user.is_anonymous
resp = client.post(reverse("mfa_authenticate"), {"code": "123"})
assert resp.context["form"].errors == {
"code": [get_adapter().error_messages["incorrect_code"]]
}
with totp_validation_bypass():
resp = client.post(
reverse("mfa_authenticate"),
{"code": "123"},
)
assert resp.status_code == 302
assert resp["location"] == settings.LOGIN_REDIRECT_URL
def test_download_recovery_codes(auth_client, user_with_recovery_codes, user_password):
resp = auth_client.get(reverse("mfa_download_recovery_codes"))
assert resp["location"].startswith(reverse("account_reauthenticate"))
resp = auth_client.post(resp["location"], {"password": user_password})
assert resp.status_code == 302
resp = auth_client.get(resp["location"])
assert resp["content-disposition"] == 'attachment; filename="recovery-codes.txt"'
def test_view_recovery_codes(auth_client, user_with_recovery_codes, user_password):
resp = auth_client.get(reverse("mfa_view_recovery_codes"))
assert resp["location"].startswith(reverse("account_reauthenticate"))
resp = auth_client.post(resp["location"], {"password": user_password})
assert resp.status_code == 302
resp = auth_client.get(resp["location"])
assert len(resp.context["unused_codes"]) == app_settings.RECOVERY_CODE_COUNT
def test_generate_recovery_codes(auth_client, user_with_recovery_codes, user_password):
rc = Authenticator.objects.get(
user=user_with_recovery_codes, type=Authenticator.Type.RECOVERY_CODES
).wrap()
prev_code = rc.get_unused_codes()[0]
resp = auth_client.get(reverse("mfa_generate_recovery_codes"))
assert resp["location"].startswith(reverse("account_reauthenticate"))
resp = auth_client.post(resp["location"], {"password": user_password})
assert resp.status_code == 302
resp = auth_client.post(resp["location"])
assert resp["location"] == reverse("mfa_view_recovery_codes")
rc = Authenticator.objects.get(
user=user_with_recovery_codes, type=Authenticator.Type.RECOVERY_CODES
).wrap()
assert not rc.validate_code(prev_code)
def test_add_email_not_allowed(auth_client, user_with_totp):
resp = auth_client.post(
reverse("account_email"),
{"action_add": "", "email": "change-to@this.org"},
)
assert resp.status_code == 200
assert resp.context["form"].errors == {
"email": [
"You cannot add an email address to an account protected by two-factor authentication."
]
} |
299,405 | precompute | """Quadratic Interpolation (Code 2)."""
import logging
import pyhf
from pyhf.tensor.manager import get_backend
from pyhf import events
from pyhf.interpolators import _slow_interpolator_looper
log = logging.getLogger(__name__)
class code2:
r"""
The quadratic interpolation and linear extrapolation strategy.
.. math::
\sigma_{sb} (\vec{\alpha}) = \sigma_{sb}^0(\vec{\alpha}) + \underbrace{\sum_{p \in \text{Syst}} I_\text{quad.|lin.} (\alpha_p; \sigma_{sb}^0, \sigma_{psb}^+, \sigma_{psb}^-)}_\text{deltas to calculate}
with
.. math::
I_\text{quad.|lin.}(\alpha; I^0, I^+, I^-) = \begin{cases} (b + 2a)(\alpha - 1) \qquad \alpha \geq 1\\ a\alpha^2 + b\alpha \qquad |\alpha| < 1 \\ (b - 2a)(\alpha + 1) \qquad \alpha < -1 \end{cases}
and
.. math::
a = \frac{1}{2} (I^+ + I^-) - I^0 \qquad \mathrm{and} \qquad b = \frac{1}{2}(I^+ - I^-)
"""
def __init__(self, histogramssets, subscribe=True):
"""Quadratic Interpolation."""
default_backend = pyhf.default_backend
self._histogramssets = default_backend.astensor(histogramssets)
# initial shape will be (nsysts, 1)
self.alphasets_shape = (self._histogramssets.shape[0], 1)
# precompute terms that only depend on the histogramssets
self._a = (
0.5 * (self._histogramssets[:, :, 2] + self._histogramssets[:, :, 0])
- self._histogramssets[:, :, 1]
)
self._b = 0.5 * (self._histogramssets[:, :, 2] - self._histogramssets[:, :, 0])
self._b_plus_2a = self._b + 2 * self._a
self._b_minus_2a = self._b - 2 * self._a
self._broadcast_helper = default_backend.ones(default_backend.shape(self._a))
self.METHOD_NAME()
if subscribe:
events.subscribe('tensorlib_changed')(self.METHOD_NAME)
def METHOD_NAME(self):
tensorlib, _ = get_backend()
self.a = tensorlib.astensor(self._a)
self.b = tensorlib.astensor(self._b)
self.b_plus_2a = tensorlib.astensor(self._b_plus_2a)
self.b_minus_2a = tensorlib.astensor(self._b_minus_2a)
# make up the masks correctly
self.broadcast_helper = tensorlib.astensor(self._broadcast_helper)
self.mask_on = tensorlib.ones(self.alphasets_shape)
self.mask_off = tensorlib.zeros(self.alphasets_shape)
def _precompute_alphasets(self, alphasets_shape):
if alphasets_shape == self.alphasets_shape:
return
tensorlib, _ = get_backend()
self.alphasets_shape = alphasets_shape
self.mask_on = tensorlib.ones(self.alphasets_shape)
self.mask_off = tensorlib.zeros(self.alphasets_shape)
def __call__(self, alphasets):
"""Compute Interpolated Values."""
tensorlib, _ = get_backend()
self._precompute_alphasets(tensorlib.shape(alphasets))
# select where alpha > 1
where_alphasets_gt1 = tensorlib.where(
alphasets > 1, self.mask_on, self.mask_off
)
# select where alpha >= -1
where_alphasets_not_lt1 = tensorlib.where(
alphasets >= -1, self.mask_on, self.mask_off
)
# s: set under consideration (i.e. the modifier)
# a: alpha variation
# h: histogram affected by modifier
# b: bin of histogram
value_gt1 = tensorlib.einsum(
'sa,shb->shab', alphasets - self.mask_on, self.b_plus_2a
)
value_btwn = tensorlib.einsum(
'sa,sa,shb->shab', alphasets, alphasets, self.a
) + tensorlib.einsum('sa,shb->shab', alphasets, self.b)
value_lt1 = tensorlib.einsum(
'sa,shb->shab', alphasets + self.mask_off, self.b_minus_2a
)
masks_gt1 = tensorlib.astensor(
tensorlib.einsum(
'sa,shb->shab', where_alphasets_gt1, self.broadcast_helper
),
dtype="bool",
)
masks_not_lt1 = tensorlib.astensor(
tensorlib.einsum(
'sa,shb->shab', where_alphasets_not_lt1, self.broadcast_helper
),
dtype="bool",
)
# first, build a result where:
# alpha > 1 : fill with (b+2a)(alpha - 1)
# not(alpha > 1) : fill with (a * alpha^2 + b * alpha)
results_gt1_btwn = tensorlib.where(masks_gt1, value_gt1, value_btwn)
# then, build a result where:
# alpha >= -1 : do nothing (fill with previous result)
# not(alpha >= -1): fill with (b-2a)(alpha + 1)
return tensorlib.where(masks_not_lt1, results_gt1_btwn, value_lt1)
class _slow_code2:
def summand(self, down, nom, up, alpha):
a = 0.5 * (up + down) - nom
b = 0.5 * (up - down)
if alpha > 1:
delta = (b + 2 * a) * (alpha - 1)
elif -1 <= alpha <= 1:
delta = a * alpha * alpha + b * alpha
else:
delta = (b - 2 * a) * (alpha + 1)
return delta
def __init__(self, histogramssets, subscribe=True):
self._histogramssets = histogramssets
def __call__(self, alphasets):
tensorlib, _ = get_backend()
return tensorlib.astensor(
_slow_interpolator_looper(
self._histogramssets, tensorlib.tolist(alphasets), self.summand
)
) |
299,406 | compress | # Copyright (c) 2021 Pieter Wuille
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test-only pure Python RIPEMD160 implementation."""
import unittest
# Message schedule indexes for the left path.
ML = [
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
7,
4,
13,
1,
10,
6,
15,
3,
12,
0,
9,
5,
2,
14,
11,
8,
3,
10,
14,
4,
9,
15,
8,
1,
2,
7,
0,
6,
13,
11,
5,
12,
1,
9,
11,
10,
0,
8,
12,
4,
13,
3,
7,
15,
14,
5,
6,
2,
4,
0,
5,
9,
7,
12,
2,
10,
14,
1,
3,
8,
11,
6,
15,
13,
]
# Message schedule indexes for the right path.
MR = [
5,
14,
7,
0,
9,
2,
11,
4,
13,
6,
15,
8,
1,
10,
3,
12,
6,
11,
3,
7,
0,
13,
5,
10,
14,
15,
8,
12,
4,
9,
1,
2,
15,
5,
1,
3,
7,
14,
6,
9,
11,
8,
12,
2,
10,
0,
4,
13,
8,
6,
4,
1,
3,
11,
15,
0,
5,
12,
2,
13,
9,
7,
10,
14,
12,
15,
10,
4,
1,
5,
8,
7,
6,
2,
13,
14,
0,
3,
9,
11,
]
# Rotation counts for the left path.
RL = [
11,
14,
15,
12,
5,
8,
7,
9,
11,
13,
14,
15,
6,
7,
9,
8,
7,
6,
8,
13,
11,
9,
7,
15,
7,
12,
15,
9,
11,
7,
13,
12,
11,
13,
6,
7,
14,
9,
13,
15,
14,
8,
13,
6,
5,
12,
7,
5,
11,
12,
14,
15,
14,
15,
9,
8,
9,
14,
5,
6,
8,
6,
5,
12,
9,
15,
5,
11,
6,
8,
13,
12,
5,
12,
13,
14,
11,
8,
5,
6,
]
# Rotation counts for the right path.
RR = [
8,
9,
9,
11,
13,
15,
15,
5,
7,
7,
8,
11,
14,
14,
12,
6,
9,
13,
15,
7,
12,
8,
9,
11,
7,
7,
12,
7,
6,
15,
13,
11,
9,
7,
15,
11,
8,
6,
6,
14,
12,
13,
5,
14,
13,
13,
7,
5,
15,
5,
8,
11,
14,
14,
6,
14,
6,
9,
12,
9,
12,
5,
15,
8,
8,
5,
12,
9,
12,
5,
14,
6,
8,
13,
6,
5,
15,
13,
11,
11,
]
# K constants for the left path.
KL = [0, 0x5A827999, 0x6ED9EBA1, 0x8F1BBCDC, 0xA953FD4E]
# K constants for the right path.
KR = [0x50A28BE6, 0x5C4DD124, 0x6D703EF3, 0x7A6D76E9, 0]
def fi(x, y, z, i):
"""The f1, f2, f3, f4, and f5 functions from the specification."""
if i == 0:
return x ^ y ^ z
elif i == 1:
return (x & y) | (~x & z)
elif i == 2:
return (x | ~y) ^ z
elif i == 3:
return (x & z) | (y & ~z)
elif i == 4:
return x ^ (y | ~z)
else:
assert False
def rol(x, i):
"""Rotate the bottom 32 bits of x left by i bits."""
return ((x << i) | ((x & 0xFFFFFFFF) >> (32 - i))) & 0xFFFFFFFF
def METHOD_NAME(h0, h1, h2, h3, h4, block):
"""Compress state (h0, h1, h2, h3, h4) with block."""
# Left path variables.
al, bl, cl, dl, el = h0, h1, h2, h3, h4
# Right path variables.
ar, br, cr, dr, er = h0, h1, h2, h3, h4
# Message variables.
x = [int.from_bytes(block[4 * i : 4 * (i + 1)], "little") for i in range(16)]
# Iterate over the 80 rounds of the compression.
for j in range(80):
rnd = j >> 4
# Perform left side of the transformation.
al = rol(al + fi(bl, cl, dl, rnd) + x[ML[j]] + KL[rnd], RL[j]) + el
al, bl, cl, dl, el = el, al, bl, rol(cl, 10), dl
# Perform right side of the transformation.
ar = rol(ar + fi(br, cr, dr, 4 - rnd) + x[MR[j]] + KR[rnd], RR[j]) + er
ar, br, cr, dr, er = er, ar, br, rol(cr, 10), dr
# Compose old state, left transform, and right transform into new state.
return h1 + cl + dr, h2 + dl + er, h3 + el + ar, h4 + al + br, h0 + bl + cr
def ripemd160(data):
"""Compute the RIPEMD-160 hash of data."""
# Initialize state.
state = (0x67452301, 0xEFCDAB89, 0x98BADCFE, 0x10325476, 0xC3D2E1F0)
# Process full 64-byte blocks in the input.
for b in range(len(data) >> 6):
state = METHOD_NAME(*state, data[64 * b : 64 * (b + 1)])
# Construct final blocks (with padding and size).
pad = b"\x80" + b"\x00" * ((119 - len(data)) & 63)
fin = data[len(data) & ~63 :] + pad + (8 * len(data)).to_bytes(8, "little")
# Process final blocks.
for b in range(len(fin) >> 6):
state = METHOD_NAME(*state, fin[64 * b : 64 * (b + 1)])
# Produce output.
return b"".join((h & 0xFFFFFFFF).to_bytes(4, "little") for h in state)
class TestFrameworkKey(unittest.TestCase):
def test_ripemd160(self):
"""RIPEMD-160 test vectors."""
# See https://homes.esat.kuleuven.be/~bosselae/ripemd160.html
for msg, hexout in [
(b"", "9c1185a5c5e9fc54612808977ee8f548b2258d31"),
(b"a", "0bdc9d2d256b3ee9daae347be6f4dc835a467ffe"),
(b"abc", "8eb208f7e05d987a9b044a8e98c6b087f15a0bfc"),
(b"message digest", "5d0689ef49d2fae572b881b123a85ffa21595f36"),
(b"abcdefghijklmnopqrstuvwxyz", "f71c27109c692c1b56bbdceb5b9d2865b3708dbc"),
(
b"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
"12a053384a9c0c88e405a06c27dcf49ada62eb2b",
),
(
b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789",
"b0e20b6e3116640286ed3a87a5713079b21f5189",
),
(b"1234567890" * 8, "9b752e45573d4b39f4dbd3323cab82bf63326bfb"),
(b"a" * 1000000, "52783243c1697bdbe16d37f97f68f08325dc1528"),
]:
self.assertEqual(ripemd160(msg).hex(), hexout) |
299,407 | get vulnerability id | __all__ = [
'KeyServerInterface',
'OasisLookupInterface',
]
# 'OasisBaseKeysLookup' -> OasisLookupInterface
import os
import abc
from ..utils.log import oasis_log
from ..utils.status import OASIS_KEYS_STATUS
''' Interface class for developing custom key server or key lookup code
'''
class KeyServerInterface(metaclass=abc.ABCMeta):
"""
Interface to implement to create a KeyServer
It define the method to be implemented to be used correctly in lookup.factory.KeyServerFactory
all classes must:
- specify the version of the interface they use
- implement the init method
- implement the generate_key_files method
"""
interface_version = "1"
@abc.abstractmethod
def __init__(self, config, config_dir, user_data_dir, output_dir):
"""
During the key generation step, the generic factory will call the constructor of the lookup class with the
following parameters.
:param config: contains all the information necessary to run the model
:type config: dict
:param config_dir: path to the model directory, can be used to locate relative path to all the files
that serve as base for the model
:type config_dir: str
:param user_data_dir: Path to additional data necessary for the model that can vary from analysis to analysis
:type user_data_dir: str
:param output_dir: Path to the analysis output directory, can be use to write additional files that are produce
during the keys file generation
"""
raise NotImplementedError
@abc.abstractmethod
def generate_key_files(self,
location_fp,
successes_fp,
errors_fp=None,
output_format='oasis',
keys_success_msg=False,
multiproc_enabled=True,
multiproc_num_cores=-1,
multiproc_num_partitions=-1,
**kwargs):
"""
Writes a keys file, and optionally a keys error file.
:param location_fp: path to the locations file
:type location_fp: str
:param successes_fp: path to the success keys file
:type successes_fp: str
:param errors_fp: path to the error keys file (optional)
:type errors_fp: str
:param output_format: format of the keys files (oasis or json)
:type output_format: str
:param keys_success_msg: option to write msg for success key
:type keys_success_msg: bool
:param multiproc_enabled: option to run with multiple processor
:type multiproc_enabled: bool
:param multiproc_num_cores: number of cores to use in multiproc mode
:type multiproc_num_cores: int
:param multiproc_num_partitions: number of partition to create in multiproc mode
:type multiproc_num_partitions: int
If ``keys_errors_file_path`` is not present then the method returns a
pair ``(p, n)`` where ``p`` is the keys file path and ``n`` is the
number of "successful" keys records written to the keys file, otherwise
it returns a quadruple ``(p1, n1, p2, n2)`` where ``p1`` is the keys
file path, ``n1`` is the number of "successful" keys records written to
the keys file, ``p2`` is the keys errors file path and ``n2`` is the
number of "unsuccessful" keys records written to keys errors file.
"""
raise NotImplementedError
class KeyLookupInterface(metaclass=abc.ABCMeta):
"""Interface for KeyLookup
it define the interface to be used correctly by lookup.factory.BasicKeyServer
all classes must:
- specify the version of the interface they use
- implement the init method
- implement the process_location method
"""
interface_version = "1"
@abc.abstractmethod
def __init__(self, config, config_dir, user_data_dir, output_dir):
raise NotImplementedError
@abc.abstractmethod
def process_locations(self, loc_df):
"""
Process location rows - passed in as a pandas dataframe.
"""
raise NotImplementedError
class OasisLookupInterface: # pragma: no cover
"""
Old Oasis base class -deprecated
If you were using this interface, you can make you class inherit from the new abstract class AbstractBasicKeyServer
or implement the KeyServerInterface interface
"""
interface_version = "0"
@oasis_log()
def __init__(
self,
keys_data_directory=None,
supplier=None,
model_name=None,
model_version=None,
complex_lookup_config_fp=None,
output_directory=None
):
"""
Class constructor
"""
if keys_data_directory is not None:
self.keys_data_directory = keys_data_directory
else:
self.keys_data_directory = os.path.join(os.sep, 'var', 'oasis', 'keys_data')
self.supplier = supplier
self.model_name = model_name
self.model_version = model_version
self.complex_lookup_config_fp = complex_lookup_config_fp
self.output_directory = output_directory
self.UNKNOWN_ID = -1
@oasis_log()
def process_locations(self, loc_df):
"""
Process location rows - passed in as a pandas dataframe.
"""
pass
def _get_area_peril_id(self, record):
"""
Get the area peril ID for a particular location record.
"""
return self.UNKNOWN_ID, "Not implemented"
def METHOD_NAME(self, record):
"""
Get the vulnerability ID for a particular location record.
"""
return self.UNKNOWN_ID, "Not implemented"
@oasis_log()
def _get_area_peril_ids(self, loc_data, include_context=True):
"""
Generates area peril IDs in two modes - if include_context is
True (default) it will generate location records/rows including
the area peril IDs, otherwise it will generate pairs of location
IDs and the corresponding area peril IDs.
"""
pass
@oasis_log()
def _get_vulnerability_ids(self, loc_data, include_context=True):
"""
Generates vulnerability IDs in two modes - if include_context is
True (default) it will generate location records/rows including
the area peril IDs, otherwise it will generate pairs of location
IDs and the corresponding vulnerability IDs.
"""
pass
def _get_custom_lookup_success(self, ap_id, vul_id):
"""
Determine the status of the keys lookup.
"""
if ap_id == self.UNKNOWN_ID or vul_id == self.UNKNOWN_ID:
return OASIS_KEYS_STATUS['nomatch']['id']
return OASIS_KEYS_STATUS['success']['id'] |
299,408 | test is uri job or run | from wandb.sdk.launch.wandb_reference import WandbReference
def test_parse_bad() -> None:
ref = WandbReference.parse("not a url")
assert ref is None
def test_parse_hostonly() -> None:
test_cases = [
"https://wandb.ai",
"https://wandb.ai/",
]
for test_case in test_cases:
ref = WandbReference.parse(test_case)
assert ref.host == "wandb.ai"
assert ref.url_host() == "https://wandb.ai"
def test_parse_beta() -> None:
test_cases = [
"https://beta.wandb.ai",
"https://beta.wandb.ai/settings",
]
for test_case in test_cases:
ref = WandbReference.parse(test_case)
assert ref.host == "beta.wandb.ai"
assert ref.entity is None
def test_parse_run() -> None:
test_cases = [
"https://wandb.ai/my-entity/my-project/runs/2aqbwbek",
"https://wandb.ai/my-entity/my-project/runs/2aqbwbek?workspace=user-my-entity",
"https://wandb.ai/my-entity/my-project/runs/2aqbwbek/logs?workspace=user-my-entity",
]
for test_case in test_cases:
ref = WandbReference.parse(test_case)
assert ref.is_run()
assert ref.host == "wandb.ai"
assert ref.entity == "my-entity"
assert ref.project == "my-project"
assert ref.run_id == "2aqbwbek"
def test_parse_run_localhost() -> None:
"""This format can be seen when running old unit tests."""
test_case = "http://localhost:42051/mock_server_entity/test/runs/12345678"
ref = WandbReference.parse(test_case)
assert ref.is_run()
assert ref.host == "localhost:42051"
assert ref.entity == "mock_server_entity"
assert ref.project == "test"
assert ref.run_id == "12345678"
def test_parse_run_bare() -> None:
test_cases = [
"/my-entity/my-project/runs/2aqbwbek",
"/my-entity/my-project/runs/2aqbwbek?workspace=user-my-entity",
"/my-entity/my-project/runs/2aqbwbek/logs?workspace=user-my-entity",
]
for test_case in test_cases:
ref = WandbReference.parse(test_case)
assert ref.is_bare()
assert ref.is_run()
assert ref.host is None
assert ref.entity == "my-entity"
assert ref.project == "my-project"
assert ref.run_id == "2aqbwbek"
def test_parse_job() -> None:
test_cases = [
"https://wandb.ai/my-entity/my-project/artifacts/job/my-job.py",
"https://wandb.ai/my-entity/my-project/artifacts/job/my-job.py/_view/versions",
"https://wandb.ai/my-entity/my-project/artifacts/job/my-job.py/latest/lineage",
]
for test_case in test_cases:
ref = WandbReference.parse(test_case)
assert ref.is_job()
assert ref.host == "wandb.ai"
assert ref.entity == "my-entity"
assert ref.project == "my-project"
assert ref.job_name == "my-job.py"
assert ref.job_alias == "latest"
assert ref.job_reference() == "my-job.py:latest"
assert ref.job_reference_scoped() == "my-entity/my-project/my-job.py:latest"
test_cases = [
"https://wandb.ai/my-entity/my-project/artifacts/job/my-job.py/v0",
"https://wandb.ai/my-entity/my-project/artifacts/job/my-job.py/v0/",
"https://wandb.ai/my-entity/my-project/artifacts/job/my-job.py/v0/files",
]
for test_case in test_cases:
ref = WandbReference.parse(test_case)
assert ref.is_job()
assert ref.host == "wandb.ai"
assert ref.entity == "my-entity"
assert ref.project == "my-project"
assert ref.job_name == "my-job.py"
assert ref.job_alias == "v0"
assert ref.job_reference() == "my-job.py:v0"
assert ref.job_reference_scoped() == "my-entity/my-project/my-job.py:v0"
def test_parse_job_bare() -> None:
test_cases = [
"/my-entity/my-project/artifacts/job/my-job.py",
"/my-entity/my-project/artifacts/job/my-job.py/_view/versions",
"/my-entity/my-project/artifacts/job/my-job.py/latest/lineage",
]
for test_case in test_cases:
ref = WandbReference.parse(test_case)
assert ref.is_bare()
assert ref.is_job()
assert ref.host is None
assert ref.entity == "my-entity"
assert ref.project == "my-project"
assert ref.job_name == "my-job.py"
assert ref.job_alias == "latest"
test_cases = [
"/my-entity/my-project/artifacts/job/my-job.py/v0",
"/my-entity/my-project/artifacts/job/my-job.py/v0/",
"/my-entity/my-project/artifacts/job/my-job.py/v0/files",
]
for test_case in test_cases:
ref = WandbReference.parse(test_case)
assert ref.is_bare()
assert ref.is_job()
assert ref.host is None
assert ref.entity == "my-entity"
assert ref.project == "my-project"
assert ref.job_name == "my-job.py"
assert ref.job_alias == "v0"
def METHOD_NAME() -> None:
test_cases = [
"https://wandb.ai/my-entity/my-project/runs/2aqbwbek?workspace=user-my-entity",
"/my-entity/my-project/runs/2aqbwbek",
"/my-entity/my-project/artifacts/job/my-job.py/_view/versions",
"https://wandb.ai/my-entity/my-project/artifacts/job/my-job.py/latest/lineage",
]
for test_case in test_cases:
assert WandbReference.is_uri_job_or_run(test_case)
test_cases = [
"",
"https://wandb.ai/",
"https://beta.wandb.ai/settings",
"https://github.com/wandb/examples/pull/123/files",
]
for test_case in test_cases:
assert not WandbReference.is_uri_job_or_run(test_case) |
299,409 | get description | #!/usr/bin/env python
########################################################################
# DELLEMC S5212F
#
# Module contains an implementation of SONiC Platform Base API and
# provides the Components' (e.g., BIOS, CPLD, FPGA, BMC etc.) available in
# the platform
#
########################################################################
try:
import subprocess
from sonic_platform_base.component_base import ComponentBase
import sonic_platform.hwaccess as hwaccess
except ImportError as e:
raise ImportError(str(e) + "- required module not found")
def get_bios_version():
return subprocess.check_output(['dmidecode', '-s',
'system-version']).decode('utf-8').strip()
def get_fpga_version():
val = hwaccess.pci_get_value('/sys/bus/pci/devices/0000:03:00.0/resource0', 0)
return '{}.{}'.format((val >> 8) & 0xff, val & 0xff)
def get_bmc_version():
return subprocess.check_output(
['cat', '/sys/class/ipmi/ipmi0/device/bmc/firmware_revision']
).decode('utf-8').strip()
def get_cpld_version(bus, i2caddr):
return '{}.{}'.format(hwaccess.i2c_get(bus, i2caddr, 1),
hwaccess.i2c_get(bus, i2caddr, 0)
)
def get_cpld0_version():
return get_cpld_version(601, 0x31)
def get_cpld1_version():
return get_cpld_version(600, 0x30)
class Component(ComponentBase):
"""DellEMC Platform-specific Component class"""
CHASSIS_COMPONENTS = [
['BIOS',
'Performs initialization of hardware components during booting',
get_bios_version
],
['FPGA',
'Used for managing the system LEDs',
get_fpga_version
],
['BMC',
'Platform management controller for on-board temperature monitoring, in-chassis power, Fan and LED control',
get_bmc_version
],
['System CPLD',
'Used for managing the CPU power sequence and CPU states',
get_cpld0_version
],
['Slave CPLD 1',
'Used for managing SFP28/QSFP28 port transceivers (SFP28 1-24, QSFP28 1-4)',
get_cpld1_version
]
]
def __init__(self, component_index = 0):
self.index = component_index
self.name = self.CHASSIS_COMPONENTS[self.index][0]
self.description = self.CHASSIS_COMPONENTS[self.index][1]
self.version = None
def get_name(self):
"""
Retrieves the name of the component
Returns:
A string containing the name of the component
"""
return self.name
def METHOD_NAME(self):
"""
Retrieves the description of the component
Returns:
A string containing the description of the component
"""
return self.description
def get_firmware_version(self):
"""
Retrieves the firmware version of the component
Returns:
A string containing the firmware version of the component
"""
if self.version == None:
self.version = self.CHASSIS_COMPONENTS[self.index][2]()
return self.version
def get_presence(self):
"""
Retrieves the presence of the component
Returns:
bool: True if present, False if not
"""
return True
def get_model(self):
"""
Retrieves the part number of the component
Returns:
string: Part number of component
"""
return 'NA'
def get_serial(self):
"""
Retrieves the serial number of the component
Returns:
string: Serial number of component
"""
return 'NA'
def get_status(self):
"""
Retrieves the operational status of the component
Returns:
bool: True if component is operating properly, False if not
"""
return True
def get_position_in_parent(self):
"""
Retrieves 1-based relative physical position in parent device.
Returns:
integer: The 1-based relative physical position in parent
device or -1 if cannot determine the position
"""
return -1
def is_replaceable(self):
"""
Indicate whether component is replaceable.
Returns:
bool: True if it is replaceable.
"""
return False
def install_firmware(self, image_path):
"""
Installs firmware to the component
Args:
image_path: A string, path to firmware image
Returns:
A boolean, True if install was successful, False if not
"""
return False
def get_available_firmware_version(self, image_path):
"""
Retrieves the available firmware version of the component
Note: the firmware version will be read from image
Args:
image_path: A string, path to firmware image
Returns:
A string containing the available firmware version of the component
"""
return "N/A"
def get_firmware_update_notification(self, image_path):
"""
Retrieves a notification on what should be done in order to complete
the component firmware update
Args:
image_path: A string, path to firmware image
Returns:
A string containing the component firmware update notification if required.
By default 'None' value will be used, which indicates that no actions are required
"""
return "None"
def update_firmware(self, image_path):
"""
Updates firmware of the component
This API performs firmware update: it assumes firmware installation and loading in a single call.
In case platform component requires some extra steps (apart from calling Low Level Utility)
to load the installed firmware (e.g, reboot, power cycle, etc.) - this will be done automatically by API
Args:
image_path: A string, path to firmware image
Raises:
RuntimeError: update failed
"""
return False |
299,410 | id | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetCassandraResourceCassandraKeyspaceResult',
'AwaitableGetCassandraResourceCassandraKeyspaceResult',
'get_cassandra_resource_cassandra_keyspace',
'get_cassandra_resource_cassandra_keyspace_output',
]
@pulumi.output_type
class GetCassandraResourceCassandraKeyspaceResult:
"""
An Azure Cosmos DB Cassandra keyspace.
"""
def __init__(__self__, METHOD_NAME=None, identity=None, location=None, name=None, options=None, resource=None, tags=None, type=None):
if METHOD_NAME and not isinstance(METHOD_NAME, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", METHOD_NAME)
if identity and not isinstance(identity, dict):
raise TypeError("Expected argument 'identity' to be a dict")
pulumi.set(__self__, "identity", identity)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if options and not isinstance(options, dict):
raise TypeError("Expected argument 'options' to be a dict")
pulumi.set(__self__, "options", options)
if resource and not isinstance(resource, dict):
raise TypeError("Expected argument 'resource' to be a dict")
pulumi.set(__self__, "resource", resource)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def METHOD_NAME(self) -> str:
"""
The unique resource identifier of the ARM resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def identity(self) -> Optional['outputs.ManagedServiceIdentityResponse']:
"""
Identity for the resource.
"""
return pulumi.get(self, "identity")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
The location of the resource group to which the resource belongs.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the ARM resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def options(self) -> Optional['outputs.CassandraKeyspaceGetPropertiesResponseOptions']:
return pulumi.get(self, "options")
@property
@pulumi.getter
def resource(self) -> Optional['outputs.CassandraKeyspaceGetPropertiesResponseResource']:
return pulumi.get(self, "resource")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Tags are a list of key-value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups). A maximum of 15 tags can be provided for a resource. Each tag must have a key no greater than 128 characters and value no greater than 256 characters. For example, the default experience for a template type is set with "defaultExperience": "Cassandra". Current "defaultExperience" values also include "Table", "Graph", "DocumentDB", and "MongoDB".
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of Azure resource.
"""
return pulumi.get(self, "type")
class AwaitableGetCassandraResourceCassandraKeyspaceResult(GetCassandraResourceCassandraKeyspaceResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetCassandraResourceCassandraKeyspaceResult(
METHOD_NAME=self.METHOD_NAME,
identity=self.identity,
location=self.location,
name=self.name,
options=self.options,
resource=self.resource,
tags=self.tags,
type=self.type)
def get_cassandra_resource_cassandra_keyspace(account_name: Optional[str] = None,
keyspace_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetCassandraResourceCassandraKeyspaceResult:
"""
Gets the Cassandra keyspaces under an existing Azure Cosmos DB database account with the provided name.
:param str account_name: Cosmos DB database account name.
:param str keyspace_name: Cosmos DB keyspace name.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
__args__ = dict()
__args__['accountName'] = account_name
__args__['keyspaceName'] = keyspace_name
__args__['resourceGroupName'] = resource_group_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:documentdb/v20230315preview:getCassandraResourceCassandraKeyspace', __args__, opts=opts, typ=GetCassandraResourceCassandraKeyspaceResult).value
return AwaitableGetCassandraResourceCassandraKeyspaceResult(
METHOD_NAME=pulumi.get(__ret__, 'id'),
identity=pulumi.get(__ret__, 'identity'),
location=pulumi.get(__ret__, 'location'),
name=pulumi.get(__ret__, 'name'),
options=pulumi.get(__ret__, 'options'),
resource=pulumi.get(__ret__, 'resource'),
tags=pulumi.get(__ret__, 'tags'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_cassandra_resource_cassandra_keyspace)
def get_cassandra_resource_cassandra_keyspace_output(account_name: Optional[pulumi.Input[str]] = None,
keyspace_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetCassandraResourceCassandraKeyspaceResult]:
"""
Gets the Cassandra keyspaces under an existing Azure Cosmos DB database account with the provided name.
:param str account_name: Cosmos DB database account name.
:param str keyspace_name: Cosmos DB keyspace name.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
... |
299,411 | mouse release event | from PyQt5.QtCore import QRect, QSize, Qt, pyqtSignal
from PyQt5.QtGui import QColor, QPainter, QPainterPath
from PyQt5.QtWidgets import QApplication, QSizePolicy, QWidget
_hPad = 10
_vPad = 6
class RoundedButtonSet(QWidget):
clicked = pyqtSignal()
SingleSelection = 1
OneOrMoreSelection = 2
def __init__(self, parent=None):
super().__init__(parent)
self.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
self._options = []
self._selection = set()
self._selectionMode = RoundedButtonSet.SingleSelection
def options(self):
return self._options
def setOptions(self, options, selectFirst=True):
self._options = options
if selectFirst and self._options:
self._selection = {0}
else:
self._selection = set()
self.update()
def selectedOptions(self):
return [self._options[index] for index in sorted(self._selection)]
def setSelectedOptions(self, options):
self._selection = set()
for option in options:
index = self._options.index(option)
self._selection.add(index)
self.update()
def selectionMode(self):
return self._selectionMode
def setSelectionMode(self, mode):
self._selectionMode = mode
# ----------
# Qt methods
# ----------
def mousePressEvent(self, event):
if event.button() == Qt.LeftButton:
for recordIndex, rect in self._optionsRects.items():
if QRect(*rect).contains(event.pos()):
self._clickedIndex = recordIndex
self._oldSelection = self._selection
self._selection = {recordIndex}
if (
self._selectionMode > 1
and QApplication.keyboardModifiers() & Qt.ShiftModifier
):
shiftSelection = self._selection ^ self._oldSelection
if shiftSelection:
self._selection = shiftSelection
else:
self._selection |= self._oldSelection
break
self.update()
else:
super().mousePressEvent(event)
def METHOD_NAME(self, event):
if event.button() == Qt.LeftButton:
clickedRect = self._optionsRects[self._clickedIndex]
if QRect(*clickedRect).contains(event.pos()):
self._selection = {self._clickedIndex}
if (
self._selectionMode > 1
and QApplication.keyboardModifiers() & Qt.ShiftModifier
):
shiftSelection = self._selection ^ self._oldSelection
if shiftSelection:
self._selection = shiftSelection
self.clicked.emit()
else:
self._selection = self._oldSelection
self.update()
del self._clickedIndex
del self._oldSelection
else:
super().METHOD_NAME(event)
def paintEvent(self, event):
painter = QPainter(self)
painter.setRenderHint(QPainter.Antialiasing)
self._optionsRects = {}
w, h = self.width(), self.height()
metrics = self.fontMetrics()
hphp = 2 * _hPad
painter.save()
path = QPainterPath()
path.addRoundedRect(0.5, 0.5, w - 1, h - 1, 4, 4)
painter.fillPath(path, QColor(250, 250, 250))
x = 0
linePath = QPainterPath()
for text in self._options[:-1]:
x += hphp + metrics.width(text)
linePath.moveTo(x, 0)
linePath.lineTo(x, h)
pen = painter.pen()
pen.setColor(QColor(218, 218, 218))
pen.setWidth(0)
painter.setPen(pen)
painter.drawPath(path)
painter.setRenderHint(QPainter.Antialiasing, False)
painter.drawPath(linePath)
painter.restore()
painter.translate(_hPad, _vPad + metrics.ascent())
left = 0
for index, text in enumerate(self._options):
if index in self._selection:
color = QColor(20, 146, 230)
else:
color = QColor(63, 63, 63)
painter.setPen(color)
painter.drawText(0, 0, text)
textWidth = metrics.width(text)
rectWidth = textWidth + hphp
rect = (left, 0, rectWidth, h)
self._optionsRects[index] = rect
painter.translate(rectWidth, 0)
left += rectWidth
def sizeHint(self):
metrics = self.fontMetrics()
hphp = 2 * _hPad
width = sum(metrics.width(text) + hphp for text in self._options) or hphp
height = 2 * _vPad + metrics.lineSpacing()
return QSize(width, height) |
299,412 | port start | #! /usr/bin/python
try:
import time
import string
from ctypes import create_string_buffer
from sonic_sfp.sfputilbase import SfpUtilBase
except ImportError as e:
raise ImportError(str(e) + "- required module not found")
class SfpUtil(SfpUtilBase):
"""Platform specific sfputil class"""
_port_start = 0
_port_end = 31
ports_in_block = 32
_port_to_eeprom_mapping = {}
_qsfp_ports = list(range(0, ports_in_block + 1))
def __init__(self):
# Override port_to_eeprom_mapping for class initialization
eeprom_path = '/sys/bus/i2c/devices/{0}-0050/sfp_eeprom'
for x in range(self.METHOD_NAME, self.port_end + 1):
self._port_to_eeprom_mapping[x] = eeprom_path.format(x + 18)
SfpUtilBase.__init__(self)
def reset(self, port_num):
raise NotImplementedError
def get_presence(self, port_num):
# Check for invalid port_num
if port_num < self._port_start or port_num > self._port_end:
return False
path = "/sys/bus/i2c/devices/{0}-0050/sfp_is_present"
port_ps = path.format(port_num+18)
reg_value = '0'
try:
reg_file = open(port_ps)
reg_value = reg_file.readline().rstrip()
reg_file.close()
except IOError as e:
print("Error: unable to access file: %s" % str(e))
return False
if reg_value == '1':
return True
return False
@property
def METHOD_NAME(self):
return self._port_start
@property
def port_end(self):
return self._port_end
@property
def qsfp_ports(self):
return list(range(0, self.ports_in_block + 1))
@property
def port_to_eeprom_mapping(self):
return self._port_to_eeprom_mapping
def get_transceiver_change_event(self):
"""
TODO: This function need to be implemented
when decide to support monitoring SFP(Xcvrd)
on this platform.
"""
raise NotImplementedError
def get_low_power_mode(self, port_num):
# Check for invalid port_num
if port_num < self._port_start or port_num > self._port_end:
return False
try:
eeprom = None
if not self.get_presence(port_num):
return False
eeprom = open(self.port_to_eeprom_mapping[port_num], "rb")
eeprom.seek(93)
lpmode = ord(eeprom.read(1))
if ((lpmode & 0x3) == 0x3):
return True # Low Power Mode if "Power override" bit is 1 and "Power set" bit is 1
else:
# High Power Mode if one of the following conditions is matched:
# 1. "Power override" bit is 0
# 2. "Power override" bit is 1 and "Power set" bit is 0
return False
except IOError as e:
print("Error: unable to open file: %s" % str(e))
return False
finally:
if eeprom is not None:
eeprom.close()
time.sleep(0.01)
def set_low_power_mode(self, port_num, lpmode):
# Check for invalid port_num
if port_num < self._port_start or port_num > self._port_end:
return False
try:
eeprom = None
if not self.get_presence(port_num):
return False # Port is not present, unable to set the eeprom
# Fill in write buffer
regval = 0x3 if lpmode else 0x1 # 0x3:Low Power Mode, 0x1:High Power Mode
buffer = create_string_buffer(1)
buffer[0] = chr(regval)
# Write to eeprom
eeprom = open(self.port_to_eeprom_mapping[port_num], "r+b")
eeprom.seek(93)
eeprom.write(buffer[0])
return True
except IOError as e:
print("Error: unable to open file: %s" % str(e))
return False
finally:
if eeprom is not None:
eeprom.close()
time.sleep(0.01) |
299,413 | matching records count | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = [
'ListFirewallPolicyIdpsSignatureResult',
'AwaitableListFirewallPolicyIdpsSignatureResult',
'list_firewall_policy_idps_signature',
'list_firewall_policy_idps_signature_output',
]
@pulumi.output_type
class ListFirewallPolicyIdpsSignatureResult:
"""
Query result
"""
def __init__(__self__, METHOD_NAME=None, signatures=None):
if METHOD_NAME and not isinstance(METHOD_NAME, float):
raise TypeError("Expected argument 'matching_records_count' to be a float")
pulumi.set(__self__, "matching_records_count", METHOD_NAME)
if signatures and not isinstance(signatures, list):
raise TypeError("Expected argument 'signatures' to be a list")
pulumi.set(__self__, "signatures", signatures)
@property
@pulumi.getter(name="matchingRecordsCount")
def METHOD_NAME(self) -> Optional[float]:
"""
Number of total records matching the query.
"""
return pulumi.get(self, "matching_records_count")
@property
@pulumi.getter
def signatures(self) -> Optional[Sequence['outputs.SingleQueryResultResponse']]:
"""
Array containing the results of the query
"""
return pulumi.get(self, "signatures")
class AwaitableListFirewallPolicyIdpsSignatureResult(ListFirewallPolicyIdpsSignatureResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return ListFirewallPolicyIdpsSignatureResult(
METHOD_NAME=self.METHOD_NAME,
signatures=self.signatures)
def list_firewall_policy_idps_signature(filters: Optional[Sequence[pulumi.InputType['FilterItems']]] = None,
firewall_policy_name: Optional[str] = None,
order_by: Optional[pulumi.InputType['OrderBy']] = None,
resource_group_name: Optional[str] = None,
results_per_page: Optional[int] = None,
search: Optional[str] = None,
skip: Optional[int] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableListFirewallPolicyIdpsSignatureResult:
"""
Retrieves the current status of IDPS signatures for the relevant policy
:param Sequence[pulumi.InputType['FilterItems']] filters: Contain all filters names and values
:param str firewall_policy_name: The name of the Firewall Policy.
:param pulumi.InputType['OrderBy'] order_by: Column to sort response by
:param str resource_group_name: The name of the resource group.
:param int results_per_page: The number of the results to return in each page
:param str search: Search term in all columns
:param int skip: The number of records matching the filter to skip
"""
__args__ = dict()
__args__['filters'] = filters
__args__['firewallPolicyName'] = firewall_policy_name
__args__['orderBy'] = order_by
__args__['resourceGroupName'] = resource_group_name
__args__['resultsPerPage'] = results_per_page
__args__['search'] = search
__args__['skip'] = skip
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:network/v20230201:listFirewallPolicyIdpsSignature', __args__, opts=opts, typ=ListFirewallPolicyIdpsSignatureResult).value
return AwaitableListFirewallPolicyIdpsSignatureResult(
METHOD_NAME=pulumi.get(__ret__, 'matching_records_count'),
signatures=pulumi.get(__ret__, 'signatures'))
@_utilities.lift_output_func(list_firewall_policy_idps_signature)
def list_firewall_policy_idps_signature_output(filters: Optional[pulumi.Input[Optional[Sequence[pulumi.InputType['FilterItems']]]]] = None,
firewall_policy_name: Optional[pulumi.Input[str]] = None,
order_by: Optional[pulumi.Input[Optional[pulumi.InputType['OrderBy']]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
results_per_page: Optional[pulumi.Input[Optional[int]]] = None,
search: Optional[pulumi.Input[Optional[str]]] = None,
skip: Optional[pulumi.Input[Optional[int]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[ListFirewallPolicyIdpsSignatureResult]:
"""
Retrieves the current status of IDPS signatures for the relevant policy
:param Sequence[pulumi.InputType['FilterItems']] filters: Contain all filters names and values
:param str firewall_policy_name: The name of the Firewall Policy.
:param pulumi.InputType['OrderBy'] order_by: Column to sort response by
:param str resource_group_name: The name of the resource group.
:param int results_per_page: The number of the results to return in each page
:param str search: Search term in all columns
:param int skip: The number of records matching the filter to skip
"""
... |
299,414 | tag | from .base import DiscordObject
from aiohttp import FormData
from datetime import datetime
from typing import type_check_only
import attrs
from interactions.client import Client
from interactions.client.const import Absent
from interactions.client.mixins.send import SendMixin
from interactions.models.discord.activity import Activity
from interactions.models.discord.asset import Asset
from interactions.models.discord.channel import DM, TYPE_GUILD_CHANNEL
from interactions.models.discord.color import Color
from interactions.models.discord.enums import MemberFlags, Permissions, PremiumType, Status, UserFlags
from interactions.models.discord.file import UPLOADABLE_TYPE
from interactions.models.discord.guild import Guild
from interactions.models.discord.role import Role
from interactions.models.discord.snowflake import Snowflake_Type
from interactions.models.discord.timestamp import Timestamp
from interactions.models.discord.voice_state import VoiceState
from typing import Any, Dict, Iterable, List, Optional, Set, Union
class _SendDMMixin(SendMixin):
id: Snowflake_Type
async def _send_http_request(
self, message_payload: Union[dict, "FormData"], files: Union[list["UPLOADABLE_TYPE"], None] = ...
) -> dict: ...
# note: what we're trying to achieve here is making isinstance checks as accurate as possible when typehinting
# Member, while "having" the attributes of User (because of __getattr__), is not actually a subclass of either
# BaseUser or User - it's its own seperate class
# we still want to typehint Member with all of the User attributes though, so what we do is create fake
# mixins that actually don't exist, and make BaseUser and User inherit from that
# then, we can make Member inheir the fake user mixin, and now we have a Member class with User attributes
# and that understands isinstance(member, User) is false
@type_check_only
@attrs.define(eq=False, order=False, hash=False, kw_only=True) # properly typehints added attributes by attrs
class FakeBaseUserMixin(DiscordObject, _SendDMMixin):
username: str
global_name: str | None
discriminator: str
avatar: Asset
def __str__(self) -> str: ...
@classmethod
def _process_dict(cls, data: Dict[str, Any], client: Client) -> Dict[str, Any]: ...
@property
def METHOD_NAME(self) -> str: ...
@property
def mention(self) -> str: ...
@property
def display_name(self) -> str: ...
@property
def display_avatar(self) -> Asset: ...
@property
def avatar_url(self) -> str: ...
async def fetch_dm(self, *, force: bool) -> DM: ...
def get_dm(self) -> Optional["DM"]: ...
@property
def mutual_guilds(self) -> List["Guild"]: ...
@attrs.define(eq=False, order=False, hash=False, kw_only=True)
class BaseUser(FakeBaseUserMixin): ...
@type_check_only
@attrs.define(eq=False, order=False, hash=False, kw_only=True)
class FakeUserMixin(FakeBaseUserMixin):
bot: bool
system: bool
public_flags: UserFlags
premium_type: PremiumType
banner: Optional["Asset"]
avatar_decoration: Optional["Asset"]
accent_color: Optional["Color"]
activities: list[Activity]
status: Absent[Status]
_fetched: bool
@classmethod
def _process_dict(cls, data: Dict[str, Any], client: Client) -> Dict[str, Any]: ...
@property
def member_instances(self) -> List["Member"]: ...
@attrs.define(eq=False, order=False, hash=False, kw_only=True)
class User(FakeUserMixin, BaseUser): ...
@attrs.define(eq=False, order=False, hash=False, kw_only=True)
class ClientUser(User):
verified: bool
mfa_enabled: bool
email: Optional[str]
locale: Optional[str]
bio: Optional[str]
flags: UserFlags
_guild_ids: Set["Snowflake_Type"]
def _add_guilds(self, guild_ids: Set["Snowflake_Type"]) -> None: ...
@property
def guilds(self) -> List["Guild"]: ...
async def edit(self, *, username: Absent[str] = ..., avatar: Absent[UPLOADABLE_TYPE] = ...) -> None: ...
@attrs.define(eq=False, order=False, hash=False, kw_only=True)
class Member(FakeUserMixin):
bot: bool
nick: Optional[str]
deaf: bool
mute: bool
flags: MemberFlags
joined_at: Timestamp
premium_since: Optional["Timestamp"]
pending: Optional[bool]
guild_avatar: Asset
communication_disabled_until: Optional["Timestamp"]
_guild_id: Snowflake_Type
_role_ids: List["Snowflake_Type"]
_user_ref: frozenset
@classmethod
def _process_dict(cls, data: Dict[str, Any], client: Client) -> Dict[str, Any]: ...
def update_from_dict(self, data) -> None: ...
@property
def user(self) -> User: ...
def __str__(self) -> str: ...
@property
def nickname(self) -> str: ...
@nickname.setter
def nickname(self, nickname: str) -> None: ...
@property
def guild(self) -> Guild: ...
@property
def roles(self) -> List["Role"]: ...
@property
def top_role(self) -> Role: ...
@property
def display_name(self) -> str: ...
@property
def display_avatar(self) -> Asset: ...
@property
def premium(self) -> bool: ...
@property
def guild_permissions(self) -> Permissions: ...
@property
def voice(self) -> Optional["VoiceState"]: ...
def has_permission(self, *permissions: Permissions) -> bool: ...
def channel_permissions(self, channel: TYPE_GUILD_CHANNEL) -> Permissions: ...
async def edit_nickname(self, new_nickname: Absent[str] = ..., reason: Absent[str] = ...) -> None: ...
async def add_role(self, role: Union[Snowflake_Type, Role], reason: Absent[str] = ...) -> None: ...
async def add_roles(self, roles: Iterable[Union[Snowflake_Type, Role]], reason: Absent[str] = ...) -> None: ...
async def remove_role(self, role: Union[Snowflake_Type, Role], reason: Absent[str] = ...) -> None: ...
async def remove_roles(self, roles: Iterable[Union[Snowflake_Type, Role]], reason: Absent[str] = ...) -> None: ...
def has_role(self, *roles: Union[Snowflake_Type, Role]) -> bool: ...
async def timeout(
self,
communication_disabled_until: Union["Timestamp", datetime, int, float, str, None],
reason: Absent[str] = ...,
) -> dict: ...
async def move(self, channel_id: Snowflake_Type) -> None: ...
async def disconnect(self) -> None: ...
async def edit(
self,
*,
nickname: Absent[str] = ...,
roles: Absent[Iterable["Snowflake_Type"]] = ...,
mute: Absent[bool] = ...,
deaf: Absent[bool] = ...,
channel_id: Absent["Snowflake_Type"] = ...,
communication_disabled_until: Absent[Union["Timestamp", None]] = ...,
reason: Absent[str] = ...
) -> None: ...
async def kick(self, reason: Absent[str] = ...) -> None: ...
async def ban(
self, delete_message_days: Absent[int] = ..., delete_message_seconds: int = ..., reason: Absent[str] = ...
) -> None: ... |
299,415 | get raw user link | from django.utils.translation import get_language
from django.utils.translation import gettext as _
from django.utils.translation import gettext_noop
from memoized import memoized
from corehq.apps.es import filters as es_filters
from corehq.apps.es import forms as form_es
from corehq.apps.locations.permissions import location_safe
from corehq.apps.reports.datatables import DataTablesColumn, DataTablesHeader
from corehq.apps.reports.display import FormDisplay
from corehq.apps.reports.filters.forms import FormsByApplicationFilter
from corehq.apps.reports.filters.users import \
ExpandedMobileWorkerFilter as EMWF
from corehq.apps.reports.generic import (
ElasticProjectInspectionReport,
GenericTabularReport,
ProjectInspectionReportParamsMixin,
)
from corehq.apps.reports.models import HQUserType
from corehq.apps.reports.standard import (
DatespanMixin,
ProjectReport,
ProjectReportParametersMixin,
)
from corehq.apps.reports.standard.monitoring import (
CompletionOrSubmissionTimeMixin,
MultiFormDrilldownMixin,
)
from corehq.apps.reports.util import datespan_from_beginning
from corehq.apps.users.util import SYSTEM_USER_ID
from corehq.const import MISSING_APP_ID
from corehq.toggles import SUPPORT
class ProjectInspectionReport(
ProjectInspectionReportParamsMixin,
GenericTabularReport,
ProjectReport,
ProjectReportParametersMixin
):
"""
Base class for this reporting section
"""
exportable = False
asynchronous = False
ajax_pagination = True
fields = ['corehq.apps.reports.filters.users.UserTypeFilter',
'corehq.apps.reports.filters.users.SelectMobileWorkerFilter']
def get_user_link(self, user):
user_link = self.METHOD_NAME(user)
return self.table_cell(user.raw_username, user_link)
def METHOD_NAME(self, user):
raise NotImplementedError
class SubmitHistoryMixin(ElasticProjectInspectionReport,
ProjectReportParametersMixin,
CompletionOrSubmissionTimeMixin, MultiFormDrilldownMixin,
DatespanMixin):
name = gettext_noop('Submit History')
slug = 'submit_history'
fields = [
'corehq.apps.reports.filters.users.ExpandedMobileWorkerFilter',
'corehq.apps.reports.filters.forms.FormsByApplicationFilter',
'corehq.apps.reports.filters.forms.CompletionOrSubmissionTimeFilter',
'corehq.apps.reports.filters.dates.DatespanFilter',
]
ajax_pagination = True
include_inactive = True
@property
def default_datespan(self):
return datespan_from_beginning(self.domain_object, self.timezone)
def _get_users_filter(self, mobile_user_and_group_slugs):
if (
EMWF.no_filters_selected(mobile_user_and_group_slugs)
and self.request.couch_user.has_permission(self.domain, 'access_all_locations')
):
return None
user_ids = (EMWF.user_es_query(self.domain,
mobile_user_and_group_slugs,
self.request.couch_user)
.values_list('_id', flat=True))
if HQUserType.UNKNOWN in EMWF.selected_user_types(mobile_user_and_group_slugs):
user_ids.append(SYSTEM_USER_ID)
return form_es.user_id(user_ids)
@staticmethod
def _form_filter(form):
app_id = form.get('app_id', None)
if app_id and app_id != MISSING_APP_ID:
return es_filters.AND(
form_es.app(app_id),
form_es.xmlns(form['xmlns'])
)
return form_es.xmlns(form['xmlns'])
@property
def es_query(self):
time_filter = form_es.submitted if self.by_submission_time else form_es.completed
mobile_user_and_group_slugs = self.request.GET.getlist(EMWF.slug)
query = (form_es.FormES()
.domain(self.domain)
.filter(time_filter(gte=self.datespan.startdate,
lt=self.datespan.enddate_adjusted)))
users_filter = self._get_users_filter(mobile_user_and_group_slugs)
if users_filter:
query = query.filter(users_filter)
# filter results by app and xmlns if applicable
if FormsByApplicationFilter.has_selections(self.request):
form_values = list(self.all_relevant_forms.values())
if form_values:
query = query.OR(*[self._form_filter(f) for f in form_values])
else:
query = query.NOT(es_filters.missing("app_id"))
return query
@property
@memoized
def es_query_result(self):
return (self.es_query
.set_sorting_block(self.get_sorting_block())
.start(self.pagination.start)
.size(self.pagination.count)
.run())
def get_sorting_block(self):
sorting_block = super(SubmitHistoryMixin, self).get_sorting_block()
if sorting_block:
return sorting_block
else:
return [{self.time_field: {'order': 'desc'}}]
@property
def time_field(self):
return 'received_on' if self.by_submission_time else 'form.meta.timeEnd'
@property
def total_records(self):
return int(self.es_query_result.total)
@location_safe
class SubmitHistory(SubmitHistoryMixin, ProjectReport):
@property
def show_extra_columns(self):
return self.request.user and SUPPORT.enabled(self.request.user.username)
@classmethod
def display_in_dropdown(cls, domain=None, project=None, user=None):
if project and project.commtrack_enabled:
return False
else:
return True
@classmethod
def get_subpages(cls):
def _get_form_name(request=None, **context):
if 'instance' in context:
try:
return context['instance'].form_data['@name']
except KeyError:
pass
return _('View Form')
from corehq.apps.reports.views import FormDataView
return [
{
'title': _get_form_name,
'urlname': FormDataView.urlname,
},
]
@property
def headers(self):
h = [
DataTablesColumn(_("View Form"), css_class="view-form-link"),
DataTablesColumn(_("Username"), prop_name='form.meta.username'),
DataTablesColumn(
_("Submission Time") if self.by_submission_time
else _("Completion Time"),
prop_name=self.time_field
),
DataTablesColumn(_("Form"), prop_name='form.@name'),
]
if self.show_extra_columns:
h.append(DataTablesColumn(_("Sync Log")))
return DataTablesHeader(*h)
@property
def rows(self):
for form in self.es_query_result.hits:
display = FormDisplay(form, self, lang=get_language())
row = [
display.form_data_link,
display.username,
display.submission_or_completion_time,
display.readable_form_name,
]
if self.show_extra_columns:
row.append(form.get('last_sync_token', ''))
yield row |
299,416 | test list mutable configs unknown pipette id | import json
import os
from pathlib import Path
from typing import Dict, Any, cast, Union, Generator
import pytest
from opentrons_shared_data.pipette import (
mutable_configurations,
types,
pipette_definition,
pipette_load_name_conversions as pip_conversions,
load_data,
dev_types,
)
TEST_SERIAL_NUMBER = "P50MV1520200304"
TestOverrideType = Dict[str, Union[float, int, bool]]
@pytest.fixture
def TMPFILE_DATA() -> Dict[str, Any]:
return {
"dropTipShake": True,
"doubleDropTip": True,
"model": "p50_multi_v1.5",
"quirks": {"pickUpPresses": True, "dropTipShake": True, "doubleDropTip": True},
"pickUpSpeed": {
"value": 5.0,
"min": 1,
"max": 100,
"units": "mm/s",
"type": "float",
"default": 30,
},
}
@pytest.fixture
def override_configuration_path(tmp_path: Path) -> Generator[Path, None, None]:
os.environ["OT_API_CONFIG_DIR"] = str(tmp_path)
tmp_path.mkdir(parents=True, exist_ok=True)
with_pip_path = tmp_path / Path("pipettes")
with_pip_path.mkdir(parents=True, exist_ok=True)
yield with_pip_path
del os.environ["OT_API_CONFIG_DIR"]
@pytest.fixture
def overrides_fixture(
override_configuration_path: Path, TMPFILE_DATA: Dict[str, Any]
) -> types.MutableConfig:
with open(override_configuration_path / f"{TEST_SERIAL_NUMBER}.json", "w") as f:
json.dump(TMPFILE_DATA, f)
return types.MutableConfig.build(**TMPFILE_DATA["pickUpSpeed"], name="pickUpSpeed")
def METHOD_NAME(
override_configuration_path: Path,
) -> None:
"""Test unknown pipette id mutable configs.
Test that a user receives a list of all possible mutable configurations
with the default value equal to the regular value.
"""
found_configurations = mutable_configurations.list_mutable_configs(
TEST_SERIAL_NUMBER, override_configuration_path
)
for c in found_configurations:
if isinstance(c, str):
# model string, ignore
continue
if isinstance(c, types.QuirkConfig):
assert isinstance(c.value, bool)
else:
assert c.default == c.value
def test_list_mutable_configs_known_pipette_id(
overrides_fixture: types.MutableConfig, override_configuration_path: Path
) -> None:
"""Test known pipette id mutable configs.
Test that a user receives a list of all possible mutable configurations
with the expected overrides also listed.
"""
found_configurations = mutable_configurations.list_mutable_configs(
TEST_SERIAL_NUMBER, override_configuration_path
)
for c in found_configurations:
if isinstance(c, str):
# model string, ignore
continue
if overrides_fixture.name == c.name:
assert c.value == overrides_fixture.value
elif isinstance(c, types.QuirkConfig):
assert isinstance(c.value, bool)
else:
assert c.default == c.value
@pytest.mark.parametrize(
argnames=["overrides_dict", "saved_dict"],
argvalues=[
[
{"pickUpCurrent": 0.5, "dropTipSpeed": 5, "dropTipShake": False},
{
"quirks": {"dropTipShake": False},
"pickUpCurrent": {
"value": 0.5,
"default": 0.8,
"units": "amps",
"type": "float",
"min": 0.1,
"max": 2.0,
},
"model": "p50_multi_v1.5",
"dropTipSpeed": {
"value": 5,
"default": 5.0,
"units": "mm/s",
"type": "float",
"min": 0.01,
"max": 30,
},
},
]
],
)
def test_save_new_overrides_new_file(
override_configuration_path: Path,
overrides_dict: TestOverrideType,
saved_dict: Dict[str, Any],
) -> None:
mutable_configurations.save_overrides(
TEST_SERIAL_NUMBER, overrides_dict, override_configuration_path
)
with open(override_configuration_path / f"{TEST_SERIAL_NUMBER}.json") as f:
new_file = json.load(f)
assert saved_dict == new_file
@pytest.mark.parametrize(
argnames=["overrides_dict"],
argvalues=[
[{"pickUpCurrent": 1, "pickUpSpeed": 10, "dropTipShake": False}],
[{"pickUpCurrent": 2}],
],
)
def test_save_new_overrides_update_file(
override_configuration_path: Path,
overrides_fixture: types.MutableConfig,
overrides_dict: TestOverrideType,
TMPFILE_DATA: Dict[str, Any],
) -> None:
mutable_configurations.save_overrides(
TEST_SERIAL_NUMBER, overrides_dict, override_configuration_path
)
with open(override_configuration_path / f"{TEST_SERIAL_NUMBER}.json") as f:
new_file = json.load(f)
for k, v in overrides_dict.items():
if isinstance(v, bool):
TMPFILE_DATA["quirks"][k] = v
elif TMPFILE_DATA.get(k):
TMPFILE_DATA[k]["value"] = v
TMPFILE_DATA["pickUpCurrent"] = {
"default": 0.8,
"max": 2.0,
"min": 0.1,
"type": "float",
"units": "amps",
"value": overrides_dict["pickUpCurrent"],
}
del TMPFILE_DATA["quirks"]["pickUpPresses"]
assert TMPFILE_DATA == new_file
@pytest.mark.parametrize(
argnames=["overrides_dict"],
argvalues=[
[{"pickUpCurrent": 1231.213, "dropTipSpeed": 121, "dropTipShake": False}],
[{"quirk123": True}],
],
)
def test_save_invalid_overrides(
overrides_fixture: types.MutableConfig,
override_configuration_path: Path,
overrides_dict: TestOverrideType,
TMPFILE_DATA: Dict[str, Any],
) -> None:
with pytest.raises(ValueError):
mutable_configurations.save_overrides(
TEST_SERIAL_NUMBER, overrides_dict, override_configuration_path
)
with open(override_configuration_path / f"{TEST_SERIAL_NUMBER}.json") as f:
new_file = json.load(f)
assert TMPFILE_DATA == new_file
@pytest.mark.parametrize(
argnames=["pipette_model", "serial_number"],
argvalues=[
[
pip_conversions.convert_pipette_model(
cast(dev_types.PipetteModel, "p1000_96_v3.3")
),
"P1KHV3320230629",
],
[
pip_conversions.convert_pipette_model(
cast(dev_types.PipetteModel, "p50_multi_v1.5")
),
TEST_SERIAL_NUMBER,
],
],
)
def test_load_with_overrides(
overrides_fixture: types.MutableConfig,
pipette_model: pipette_definition.PipetteModelVersionType,
serial_number: str,
override_configuration_path: Path,
) -> None:
"""Test that you can load configurations both with pre-existing overrides and non-pre-existing overrides."""
updated_configurations = mutable_configurations.load_with_mutable_configurations(
pipette_model, override_configuration_path, serial_number
)
loaded_base_configurations = load_data.load_definition(
pipette_model.pipette_type,
pipette_model.pipette_channels,
pipette_model.pipette_version,
)
if serial_number == TEST_SERIAL_NUMBER:
dict_loaded_configs = loaded_base_configurations.dict(by_alias=True)
dict_loaded_configs["pickUpTipConfigurations"]["speed"] = 5.0
updated_configurations_dict = updated_configurations.dict(by_alias=True)
assert set(dict_loaded_configs.pop("quirks")) == set(
updated_configurations_dict.pop("quirks")
)
assert updated_configurations_dict == dict_loaded_configs
else:
assert updated_configurations == loaded_base_configurations |
299,417 | run job | from . import helpers
def METHOD_NAME(config):
config["general"]["relevant_filetypes"] = [
"log",
"mon",
"outdata",
"restart_out",
"bin",
"config",
"forcing",
"input",
"restart_in",
"ignore",
]
helpers.evaluate(config, "postprocess", "post_recipe")
return config
def _assemble_postprocess_tasks(config):
"""
Generates all tasks for post processing which will be written to the run file.
Parameters
----------
post_file
File handle to which information should be written.
Returns
-------
post_task_list : list
The list of post commands which will be executed. These are written
to the run file.
"""
postfile = config["general"]["post_file"]
post_task_list = []
for component in config["general"]["valid_model_names"]:
post_file.write(40 * "+ " + "\n")
post_file.write("Generating post-processing tasks for: %s \n" % component)
post_task_list.append("\n#Postprocessing %s\n" % component)
post_task_list.append(
"cd " + config[component]["experiment_outdata_dir"] + "\n"
)
pconfig_tasks = config[component].get("postprocess_tasks", {})
post_file.write("Configuration for post processing: %s \n" % pconfig_tasks)
for outfile in pconfig_tasks:
post_file.write("Generating task to create: %s \n" % outfile)
ofile_config = pconfig_tasks[outfile]
# TODO(PG): This can be cleaned up. I probably actually want a
# ChainMap here for more than just the bottom...
#
# Run CDO tasks (default)
task_definition = (
config[component]
.get("postprocess_task_definitions", {})
.get(ofile_config["post_process"])
)
method_definition = (
config[component]
.get("postprocess_method_definitions", {})
.get(task_definition["method"])
)
program = method_definition.get("program", task_definition["method"])
possible_args = method_definition.get("possible_args", [])
required_args = method_definition.get("required_args", [])
possible_flags = method_definition.get("possible_flags", [])
required_flags = method_definition.get("required_flags", [])
outfile_flags = ofile_config.get("flags")
outfile_args = ofile_config.get("args")
task_def_flags = task_definition.get("flags")
task_def_args = task_definition.get("args")
args = collections.ChainMap(outfile_args, task_def_args)
flags = outfile_flags + task_def_flags
flags = ["-" + flag for flag in flags]
# See here: https://stackoverflow.com/questions/21773866/how-to-sort-a-dictionary-based-on-a-list-in-python
all_call_things = {
"program": program,
"outfile": outfile,
**args,
"flags": flags,
}
print(all_call_things)
index_map = {v: i for i, v in enumerate(method_definition["call_order"])}
call_list = sorted(
all_call_things.items(), key=lambda pair: index_map[pair[0]]
)
call = []
for call_id, call_part in call_list:
if isinstance(call_part, str):
call.append(call_part)
elif isinstance(call_part, list):
call.append(" ".join(call_part))
else:
raise TypeError(
"Something straaaange happened. Consider starting the debugger."
)
post_file.write(" ".join(call) + "\n")
post_task_list.append(" ".join(call))
post_task_list.append("cd -\n")
config["general"]["post_task_list"] = post_task_list
return config
# ?????
# def write_simple_postscript(config):
# batch_system.write_simple_runscript(config)
# return config |
299,418 | eb | #!/usr/bin/python
#
# (c) 2008 Mandriva, http://www.mandriva.com/
#
# $Id$
#
# This file is part of Pulse 2, http://pulse2.mandriva.org
#
# Pulse 2 is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Pulse 2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Pulse 2; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
""" A XMLRPC Client
Arguments must follow this convention:
- args are splitted over ';' (we call it an arg)
=> use --func myfunc --args "a;b;c" to request func('a', 'b', 'c')
- in one arg, array/dict component are splitted over '|' (we call it a token)
=> use --func myfunc --args "a|b" to request func(['a', 'b'])
- if an token contain '=', it is interpreted as a tuple
=> use --func myfunc --args "a=b|c=d" to request func(['a': 'b', 'c': 'd'])
- if an token contain ',', it is interpreted as a list
=> use --func myfunc --args "a,b,c,d" to request func('a', 'b', 'c', 'd')
=> use --func myfunc --args "a,b|c,d" to request func(['a', 'b'], ['c', 'd'])
"""
import twisted.python.usage
import twisted.internet.reactor
import twisted.web.xmlrpc
import sys
class Options(twisted.python.usage.Options):
optParameters = [
["func", None, None, "The XML RPC Function to use"],
["args", None, None, "The XML RPC Arguments to use, see below"],
["server", None, None, "The XML RPC server to contact, URI format"],
]
def _cb(result): # server do answer us
print "RESULT : %s" % result
twisted.internet.reactor.callLater(0, _end)
def METHOD_NAME(reason): # can't contact scheduler
print "ERROR : %s" % reason
twisted.internet.reactor.callLater(0, _end)
def _start():
(method, parsedargs) = parseCliArgs(config)
return twisted.web.xmlrpc.Proxy(config["server"]).\
callRemote(method, *parsedargs).\
addCallback(_cb).\
addErrback(METHOD_NAME)
def _end():
twisted.internet.reactor.stop()
# parse cli args
def parseCliArgs(config):
args=[]
method = config["func"]
if config["args"]:
args = config["args"].split(';')
parsedargs = []
for arg in args: # parse args
tokenlist = arg.split('|') # split arrays args
for token in tokenlist: # iterate over array content
if token.count('=') == 1: # found a dict token
(key, val) = token.split('=') # process it
try:
items = []
items[key] = val
except:
items = {}
items = {key: val}
elif token.count(',') > 0: # found a list token
try:
items += [token.split(',')]
except:
items = [token.split(',')]
elif token.count('~') == 1: # found a number
try:
items += int(token.split('~')[1])
except:
items = [int(token.split('~')[1])]
else: # found something else (simple value ?)
try:
items += token
except:
items = [token]
if type(items) == type({}):
parsedargs.append(items)
elif type(items) == type([]):
parsedargs += items
else:
parsedargs += items
del(items)
return (method, parsedargs)
config = Options()
try:
config.parseOptions()
except twisted.python.usage.UsageError, errortext:
print '%s: %s' % (sys.argv[0], errortext)
print '%s: Try --help for usage details.' % (sys.argv[0])
sys.exit(1)
twisted.internet.reactor.callWhenRunning(_start)
twisted.internet.reactor.run() |
299,419 | make my optimizer | import inspect
import pickle
from types import GeneratorType
from typing import Any, Callable, Dict, Iterable, List, Optional, Union
import catalogue
import numpy
import pytest
try:
from pydantic.v1 import BaseModel, PositiveInt, StrictBool, StrictFloat, constr
except ImportError:
from pydantic import BaseModel, PositiveInt, StrictBool, StrictFloat, constr # type: ignore
import thinc.config
from thinc.api import Config, Model, NumpyOps, RAdam
from thinc.config import ConfigValidationError
from thinc.types import Generator, Ragged
from thinc.util import partial
from .util import make_tempdir
EXAMPLE_CONFIG = """
[optimizer]
@optimizers = "Adam.v1"
beta1 = 0.9
beta2 = 0.999
use_averages = true
[optimizer.learn_rate]
@schedules = "warmup_linear.v1"
initial_rate = 0.1
warmup_steps = 10000
total_steps = 100000
[pipeline]
[pipeline.parser]
name = "parser"
factory = "parser"
[pipeline.parser.model]
@layers = "spacy.ParserModel.v1"
hidden_depth = 1
hidden_width = 64
token_vector_width = 128
[pipeline.parser.model.tok2vec]
@layers = "Tok2Vec.v1"
width = ${pipeline.parser.model:token_vector_width}
[pipeline.parser.model.tok2vec.embed]
@layers = "spacy.MultiFeatureHashEmbed.v1"
width = ${pipeline.parser.model.tok2vec:width}
[pipeline.parser.model.tok2vec.embed.hidden]
@layers = "MLP.v1"
depth = 1
pieces = 3
layer_norm = true
outputs = ${pipeline.parser.model.tok2vec.embed:width}
[pipeline.parser.model.tok2vec.encode]
@layers = "spacy.MaxoutWindowEncoder.v1"
depth = 4
pieces = 3
window_size = 1
[pipeline.parser.model.lower]
@layers = "spacy.ParserLower.v1"
[pipeline.parser.model.upper]
@layers = "thinc.Linear.v1"
"""
OPTIMIZER_CFG = """
[optimizer]
@optimizers = "Adam.v1"
beta1 = 0.9
beta2 = 0.999
use_averages = true
[optimizer.learn_rate]
@schedules = "warmup_linear.v1"
initial_rate = 0.1
warmup_steps = 10000
total_steps = 100000
"""
class my_registry(thinc.config.registry):
cats = catalogue.create("thinc", "tests", "cats", entry_points=False)
class HelloIntsSchema(BaseModel):
hello: int
world: int
class Config:
extra = "forbid"
class DefaultsSchema(BaseModel):
required: int
optional: str = "default value"
class Config:
extra = "forbid"
class ComplexSchema(BaseModel):
outer_req: int
outer_opt: str = "default value"
level2_req: HelloIntsSchema
level2_opt: DefaultsSchema = DefaultsSchema(required=1)
@my_registry.cats.register("catsie.v1")
def catsie_v1(evil: StrictBool, cute: bool = True) -> str:
if evil:
return "scratch!"
else:
return "meow"
@my_registry.cats.register("catsie.v2")
def catsie_v2(evil: StrictBool, cute: bool = True, cute_level: int = 1) -> str:
if evil:
return "scratch!"
else:
if cute_level > 2:
return "meow <3"
return "meow"
good_catsie = {"@cats": "catsie.v1", "evil": False, "cute": True}
ok_catsie = {"@cats": "catsie.v1", "evil": False, "cute": False}
bad_catsie = {"@cats": "catsie.v1", "evil": True, "cute": True}
worst_catsie = {"@cats": "catsie.v1", "evil": True, "cute": False}
def test_make_config_positional_args_dicts():
cfg = {
"hyper_params": {"n_hidden": 512, "dropout": 0.2, "learn_rate": 0.001},
"model": {
"@layers": "chain.v1",
"*": {
"relu1": {"@layers": "Relu.v1", "nO": 512, "dropout": 0.2},
"relu2": {"@layers": "Relu.v1", "nO": 512, "dropout": 0.2},
"softmax": {"@layers": "Softmax.v1"},
},
},
"optimizer": {"@optimizers": "Adam.v1", "learn_rate": 0.001},
}
resolved = my_registry.resolve(cfg)
model = resolved["model"]
X = numpy.ones((784, 1), dtype="f")
model.initialize(X=X, Y=numpy.zeros((784, 1), dtype="f"))
model.begin_update(X)
model.finish_update(resolved["optimizer"])
def test_objects_from_config():
config = {
"optimizer": {
"@optimizers": "my_cool_optimizer.v1",
"beta1": 0.2,
"learn_rate": {
"@schedules": "my_cool_repetitive_schedule.v1",
"base_rate": 0.001,
"repeat": 4,
},
}
}
@thinc.registry.optimizers.register("my_cool_optimizer.v1")
def METHOD_NAME(learn_rate: List[float], beta1: float):
return RAdam(learn_rate, beta1=beta1)
@thinc.registry.schedules("my_cool_repetitive_schedule.v1")
def decaying(base_rate: float, repeat: int) -> List[float]:
return repeat * [base_rate]
optimizer = my_registry.resolve(config)["optimizer"]
assert optimizer.b1 == 0.2
assert "learn_rate" in optimizer.schedules
assert optimizer.learn_rate == 0.001
def test_handle_generic_model_type():
"""Test that validation can handle checks against arbitrary generic
types in function argument annotations."""
@my_registry.layers("my_transform.v1")
def my_transform(model: Model[int, int]):
model.name = "transformed_model"
return model
cfg = {"@layers": "my_transform.v1", "model": {"@layers": "Linear.v1"}}
model = my_registry.resolve({"test": cfg})["test"]
assert isinstance(model, Model)
assert model.name == "transformed_model"
def test_arg_order_is_preserved():
str_cfg = """
[model]
[model.chain]
@layers = "chain.v1"
[model.chain.*.hashembed]
@layers = "HashEmbed.v1"
nO = 8
nV = 8
[model.chain.*.expand_window]
@layers = "expand_window.v1"
window_size = 1
"""
cfg = Config().from_str(str_cfg)
resolved = my_registry.resolve(cfg)
model = resolved["model"]["chain"]
# Fails when arguments are sorted, because expand_window
# is sorted before hashembed.
assert model.name == "hashembed>>expand_window" |
299,420 | open | from __future__ import absolute_import, division, print_function
import zipfile
import fsspec
from fsspec.archive import AbstractArchiveFileSystem
class ZipFileSystem(AbstractArchiveFileSystem):
"""Read/Write contents of ZIP archive as a file-system
Keeps file object open while instance lives.
This class is pickleable, but not necessarily thread-safe
"""
root_marker = ""
protocol = "zip"
cachable = False
def __init__(
self,
fo="",
mode="r",
target_protocol=None,
target_options=None,
compression=zipfile.ZIP_STORED,
allowZip64=True,
compresslevel=None,
**kwargs,
):
"""
Parameters
----------
fo: str or file-like
Contains ZIP, and must exist. If a str, will fetch file using
:meth:`~fsspec.open_files`, which must return one file exactly.
mode: str
Accept: "r", "w", "a"
target_protocol: str (optional)
If ``fo`` is a string, this value can be used to override the
FS protocol inferred from a URL
target_options: dict (optional)
Kwargs passed when instantiating the target FS, if ``fo`` is
a string.
compression, allowZip64, compresslevel: passed to ZipFile
Only relevant when creating a ZIP
"""
super().__init__(self, **kwargs)
if mode not in set("rwa"):
raise ValueError(f"mode '{mode}' no understood")
self.mode = mode
if isinstance(fo, str):
fo = fsspec.open(
fo, mode=mode + "b", protocol=target_protocol, **(target_options or {})
)
self.of = fo
self.fo = fo.__enter__() # the whole instance is a context
self.zip = zipfile.ZipFile(
self.fo,
mode=mode,
compression=compression,
allowZip64=allowZip64,
compresslevel=compresslevel,
)
self.dir_cache = None
@classmethod
def _strip_protocol(cls, path):
# zip file paths are always relative to the archive root
return super()._strip_protocol(path).lstrip("/")
def __del__(self):
if hasattr(self, "zip"):
self.close()
del self.zip
def close(self):
"""Commits any write changes to the file. Done on ``del`` too."""
self.zip.close()
def _get_dirs(self):
if self.dir_cache is None or self.mode in set("wa"):
# when writing, dir_cache is always in the ZipFile's attributes,
# not read from the file.
files = self.zip.infolist()
self.dir_cache = {
dirname + "/": {"name": dirname + "/", "size": 0, "type": "directory"}
for dirname in self._all_dirnames(self.zip.namelist())
}
for z in files:
f = {s: getattr(z, s, None) for s in zipfile.ZipInfo.__slots__}
f.update(
{
"name": z.filename,
"size": z.file_size,
"type": ("directory" if z.is_dir() else "file"),
}
)
self.dir_cache[f["name"]] = f
def pipe_file(self, path, value, **kwargs):
# override upstream, because we know the exact file size in this case
self.zip.writestr(path, value, **kwargs)
def METHOD_NAME(
self,
path,
mode="rb",
block_size=None,
autocommit=True,
cache_options=None,
**kwargs,
):
path = self._strip_protocol(path)
if "r" in mode and self.mode in set("wa"):
if self.exists(path):
raise IOError("ZipFS can only be open for reading or writing, not both")
raise FileNotFoundError(path)
if "r" in self.mode and "w" in mode:
raise IOError("ZipFS can only be open for reading or writing, not both")
out = self.zip.open(path, mode.strip("b"))
if "r" in mode:
info = self.info(path)
out.size = info["size"]
out.name = info["name"]
return out |
299,421 | format percentage | import json
import re
import sys
TEXT_CODES = {'bold': {'start': '\x1b[1m',
'end': '\x1b[22m'},
'cyan': {'start': '\x1b[36m',
'end': '\x1b[39m'},
'blue': {'start': '\x1b[34m',
'end': '\x1b[39m'},
'red': {'start': '\x1b[31m',
'end': '\x1b[39m'},
'magenta': {'start': '\x1b[35m',
'end': '\x1b[39m'},
'green': {'start': '\x1b[32m',
'end': '\x1b[39m'},
'yellow': {'start': '\x1b[33m',
'end': '\x1b[39m'},
'underline': {'start': '\x1b[4m',
'end': '\x1b[24m'}}
class TextCodesStripper:
keys = [re.escape(v['start']) for k,v in TEXT_CODES.items()]
keys += [re.escape(v['end']) for k,v in TEXT_CODES.items()]
pattern = re.compile("|".join(keys))
@staticmethod
def strip (s):
return re.sub(TextCodesStripper.pattern, '', s)
def clear_formatting(s):
return TextCodesStripper.strip(s)
def format_num (size, suffix = "", compact = True, opts = None):
if opts is None:
opts = ()
txt = "NaN"
if type(size) == str:
return "N/A"
u = ''
if compact:
for unit in ['','K','M','G','T','P']:
if abs(size) < 1000.0:
u = unit
break
size /= 1000.0
if isinstance(size, float):
txt = "%3.2f" % (size)
else:
txt = "{:,}".format(size)
if u or suffix:
txt += " {:}{:}".format(u, suffix)
if isinstance(opts, tuple):
return format_text(txt, *opts)
else:
return format_text(txt, (opts))
def format_time (t_sec):
if t_sec < 0:
return "infinite"
if t_sec == 0:
return "zero"
if t_sec < 1:
# low numbers
for unit in ['ms', 'usec', 'ns']:
t_sec *= 1000.0
if t_sec >= 1.0:
return '{:,.2f} [{:}]'.format(t_sec, unit)
return "NaN"
else:
# seconds
if t_sec < 60.0:
return '{:,.2f} [{:}]'.format(t_sec, 'sec')
# minutes
t_sec /= 60.0
if t_sec < 60.0:
return '{:,.2f} [{:}]'.format(t_sec, 'minutes')
# hours
t_sec /= 60.0
if t_sec < 24.0:
return '{:,.2f} [{:}]'.format(t_sec, 'hours')
# days
t_sec /= 24.0
return '{:,.2f} [{:}]'.format(t_sec, 'days')
def METHOD_NAME (size):
return "%0.2f %%" % (size)
def bold(text):
return text_attribute(text, 'bold')
def cyan(text):
return text_attribute(text, 'cyan')
def blue(text):
return text_attribute(text, 'blue')
def red(text):
return text_attribute(text, 'red')
def magenta(text):
return text_attribute(text, 'magenta')
def green(text):
return text_attribute(text, 'green')
def yellow(text):
return text_attribute(text, 'yellow')
def underline(text):
return text_attribute(text, 'underline')
# apply attribute on each non-empty line
def text_attribute(text, attribute):
return '\n'.join(['{start}{txt}{end}'.format(
start = TEXT_CODES[attribute]['start'],
txt = line,
end = TEXT_CODES[attribute]['end'])
if line else '' for line in str(text).split('\n')])
FUNC_DICT = {'blue': blue,
'bold': bold,
'green': green,
'yellow': yellow,
'cyan': cyan,
'magenta': magenta,
'underline': underline,
'red': red}
def __format_text_tty(text, *args):
return_string = text
for i in args:
func = FUNC_DICT.get(i)
if func:
return_string = func(return_string)
return return_string
def __format_text_non_tty (text, *args):
return str(text)
# choose according to stdout type
format_text = __format_text_tty if sys.stdout.isatty() else __format_text_non_tty
def format_threshold (value, red_zone, green_zone):
try:
if value >= red_zone[0] and value <= red_zone[1]:
return format_text("{0}".format(value), 'red')
if value >= green_zone[0] and value <= green_zone[1]:
return format_text("{0}".format(value), 'green')
except TypeError:
# if value is not comparable or not a number - skip this
pass
return "{0}".format(value)
# pretty print for JSON
def pretty_json (json_str, use_colors = True):
pretty_str = json.dumps(json.loads(json_str), indent = 4, separators=(',', ': '), sort_keys = True)
if not use_colors:
return pretty_str
try:
# int numbers
pretty_str = re.sub(r'([ ]*:[ ]+)(\-?[1-9][0-9]*[^.])',r'\1{0}'.format(blue(r'\2')), pretty_str)
# float
pretty_str = re.sub(r'([ ]*:[ ]+)(\-?[1-9][0-9]*\.[0-9]+)',r'\1{0}'.format(magenta(r'\2')), pretty_str)
# # strings
#
pretty_str = re.sub(r'([ ]*:[ ]+)("[^"]*")',r'\1{0}'.format(red(r'\2')), pretty_str)
pretty_str = re.sub(r"('[^']*')", r'{0}\1{1}'.format(TEXT_CODES['magenta']['start'],
TEXT_CODES['red']['start']), pretty_str)
except :
pass
return pretty_str
if __name__ == "__main__":
pass |
299,422 | calc md5 | #!/usr/bin/env python
import time
import hashlib
import io
import logging
from netmiko import ConnectHandler
def METHOD_NAME(file_name=None, contents=None):
"""Compute MD5 hash of file."""
if contents is not None:
pass
elif file_name:
with open(file_name, "rb") as f:
contents = f.read()
else:
raise ValueError("Most specify either file_name or contents")
return hashlib.md5(contents.strip()).hexdigest()
def read_session_log(session_file, append=False):
"""Leading white-space can vary. Strip off leading white-space."""
with open(session_file, "rb") as f:
if append is True:
line = f.readline().decode()
assert "Initial file contents" in line
log_content = f.read().lstrip()
return log_content
def session_action(my_connect, command):
"""Common actions in the netmiko session to generate the session log."""
time.sleep(1)
my_connect.clear_buffer()
output = my_connect.send_command(command)
my_connect.disconnect()
return output
def session_log_md5(session_file, compare_file):
"""Compare the session_log MD5 to the compare_file MD5"""
compare_log_md5 = METHOD_NAME(file_name=compare_file)
log_content = read_session_log(session_file)
session_log_md5 = METHOD_NAME(contents=log_content)
assert session_log_md5 == compare_log_md5
def session_log_md5_append(session_file, compare_file):
"""Compare the session_log MD5 to the compare_file MD5"""
compare_log_md5 = METHOD_NAME(file_name=compare_file)
log_content = read_session_log(session_file, append=True)
session_log_md5 = METHOD_NAME(contents=log_content)
assert session_log_md5 == compare_log_md5
def test_session_log(net_connect, commands, expected_responses):
"""Verify session_log matches expected content."""
command = commands["basic"]
session_action(net_connect, command)
compare_file = expected_responses["compare_log"]
session_file = expected_responses["session_log"]
session_log_md5(session_file, compare_file)
def test_session_log_write(net_connect_slog_wr, commands, expected_responses):
"""Verify session_log matches expected content, but when channel writes are also logged."""
command = commands["basic"]
nc = net_connect_slog_wr
# Send a marker down the channel
nc.send_command("show foooooooo")
time.sleep(1)
nc.clear_buffer()
nc.send_command(command)
nc.disconnect()
compare_file = expected_responses["compare_log_wr"]
session_file = expected_responses["session_log_wr"]
with open(compare_file, "rb") as f:
compare_contents = f.read()
# Header information varies too much due to device behavior differences.
# So just discard it.
marker = b"% Invalid input detected at '^' marker."
_, compare_contents = compare_contents.split(marker)
compare_log_md5 = METHOD_NAME(contents=compare_contents.strip())
log_content = read_session_log(session_file)
marker = b"% Invalid input detected at '^' marker."
_, log_content = log_content.split(marker)
session_log_md5 = METHOD_NAME(contents=log_content.strip())
assert session_log_md5 == compare_log_md5
def test_session_log_append(device_slog, commands, expected_responses):
"""Verify session_log matches expected content, but when channel writes are also logged."""
session_file = expected_responses["session_log_append"]
# Create a starting file
with open(session_file, "wb") as f:
f.write(b"Initial file contents\n\n")
# The netmiko connection has not been established yet.
device_slog["session_log"] = session_file
conn = ConnectHandler(**device_slog)
command = commands["basic"]
session_action(conn, command)
compare_file = expected_responses["compare_log_append"]
session_log_md5_append(session_file, compare_file)
def test_session_log_secrets(device_slog):
"""Verify session_log does not contain password or secret."""
conn = ConnectHandler(**device_slog)
conn.session_log.write("\nTesting password and secret replacement\n")
conn.session_log.write("This is my password {}\n".format(conn.password))
conn.session_log.write("This is my secret {}\n".format(conn.secret))
file_name = device_slog["session_log"]
with open(file_name, "r") as f:
session_log = f.read()
if conn.password:
assert conn.password not in session_log
if conn.secret:
assert conn.secret not in session_log
def test_logging_filter_secrets(net_connect_slog_wr):
"""Verify logging DEBUG output does not contain password or secret."""
nc = net_connect_slog_wr
# setup logger to output to file
file_name = "SLOG/netmiko.log"
netmikologger = logging.getLogger("netmiko")
netmikologger.setLevel(logging.DEBUG)
file_handler = logging.FileHandler(file_name)
file_handler.setLevel(logging.DEBUG)
netmikologger.addHandler(file_handler)
# cleanup the log file
with open(file_name, "w") as f:
f.write("")
# run sequence
nc.enable()
time.sleep(1)
nc.clear_buffer()
nc.disconnect()
with open(file_name, "r") as f:
netmiko_log = f.read()
if nc.password:
assert nc.password not in netmiko_log
if nc.secret:
assert nc.secret not in netmiko_log
def test_unicode(device_slog):
"""Verify that you can write unicode characters into the session_log."""
conn = ConnectHandler(**device_slog)
smiley_face = "\N{grinning face with smiling eyes}"
conn.session_log.write("\nTesting unicode\n")
conn.session_log.write(smiley_face)
conn.session_log.write(smiley_face)
file_name = device_slog["session_log"]
with open(file_name, "r") as f:
session_log = f.read()
assert smiley_face in session_log
def test_session_log_bytesio(device_slog, commands, expected_responses):
"""Verify session_log matches expected content, but when channel writes are also logged."""
s_log = io.BytesIO()
# The netmiko connection has not been established yet.
device_slog["session_log"] = s_log
device_slog["session_log_file_mode"] = "write"
conn = ConnectHandler(**device_slog)
command = commands["basic"]
session_action(conn, command)
compare_file = expected_responses["compare_log"]
compare_log_md5 = METHOD_NAME(file_name=compare_file)
log_content = s_log.getvalue()
session_log_md5 = METHOD_NAME(contents=log_content)
assert session_log_md5 == compare_log_md5 |
299,423 | handle display options | from _typeshed import FileDescriptorOrPath, Incomplete, SupportsWrite
from collections.abc import Iterable, Mapping
from re import Pattern
from typing import IO, Any, ClassVar, TypeVar, overload
from typing_extensions import TypeAlias
from .cmd import Command
command_re: Pattern[str]
_OptionsList: TypeAlias = list[tuple[str, str | None, str, int] | tuple[str, str | None, str]]
_CommandT = TypeVar("_CommandT", bound=Command)
class DistributionMetadata:
def __init__(self, path: FileDescriptorOrPath | None = None) -> None: ...
name: str | None
version: str | None
author: str | None
author_email: str | None
maintainer: str | None
maintainer_email: str | None
url: str | None
license: str | None
description: str | None
long_description: str | None
keywords: str | list[str] | None
platforms: str | list[str] | None
classifiers: str | list[str] | None
download_url: str | None
provides: list[str] | None
requires: list[str] | None
obsoletes: list[str] | None
def read_pkg_file(self, file: IO[str]) -> None: ...
def write_pkg_info(self, base_dir: str) -> None: ...
def write_pkg_file(self, file: SupportsWrite[str]) -> None: ...
def get_name(self) -> str: ...
def get_version(self) -> str: ...
def get_fullname(self) -> str: ...
def get_author(self) -> str: ...
def get_author_email(self) -> str: ...
def get_maintainer(self) -> str: ...
def get_maintainer_email(self) -> str: ...
def get_contact(self) -> str: ...
def get_contact_email(self) -> str: ...
def get_url(self) -> str: ...
def get_license(self) -> str: ...
def get_licence(self) -> str: ...
def get_description(self) -> str: ...
def get_long_description(self) -> str: ...
def get_keywords(self) -> str | list[str]: ...
def get_platforms(self) -> str | list[str]: ...
def get_classifiers(self) -> str | list[str]: ...
def get_download_url(self) -> str: ...
def get_requires(self) -> list[str]: ...
def set_requires(self, value: Iterable[str]) -> None: ...
def get_provides(self) -> list[str]: ...
def set_provides(self, value: Iterable[str]) -> None: ...
def get_obsoletes(self) -> list[str]: ...
def set_obsoletes(self, value: Iterable[str]) -> None: ...
class Distribution:
cmdclass: dict[str, type[Command]]
metadata: DistributionMetadata
def __init__(self, attrs: Mapping[str, Any] | None = None) -> None: ...
def get_option_dict(self, command: str) -> dict[str, tuple[str, str]]: ...
def parse_config_files(self, filenames: Iterable[str] | None = None) -> None: ...
def get_command_obj(self, command: str, create: bool = True) -> Command | None: ...
global_options: ClassVar[_OptionsList]
common_usage: ClassVar[str]
display_options: ClassVar[_OptionsList]
display_option_names: ClassVar[list[str]]
negative_opt: ClassVar[dict[str, str]]
verbose: int
dry_run: int
help: int
command_packages: list[str] | None
script_name: str | None
script_args: list[str] | None
command_options: dict[str, dict[str, tuple[str, str]]]
dist_files: list[tuple[str, str, str]]
packages: Incomplete
package_data: dict[str, list[str]]
package_dir: Incomplete
py_modules: Incomplete
libraries: Incomplete
headers: Incomplete
ext_modules: Incomplete
ext_package: Incomplete
include_dirs: Incomplete
extra_path: Incomplete
scripts: Incomplete
data_files: Incomplete
password: str
command_obj: dict[str, Command]
have_run: dict[str, bool]
want_user_cfg: bool
def dump_option_dicts(
self, header: Incomplete | None = None, commands: Incomplete | None = None, indent: str = ""
) -> None: ...
def find_config_files(self): ...
commands: Incomplete
def parse_command_line(self): ...
def finalize_options(self) -> None: ...
def METHOD_NAME(self, option_order): ...
def print_command_list(self, commands, header, max_length) -> None: ...
def print_commands(self) -> None: ...
def get_command_list(self): ...
def get_command_packages(self): ...
def get_command_class(self, command: str) -> type[Command]: ...
@overload
def reinitialize_command(self, command: str, reinit_subcommands: bool = False) -> Command: ...
@overload
def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool = False) -> _CommandT: ...
def announce(self, msg, level: int = ...) -> None: ...
def run_commands(self) -> None: ...
def run_command(self, command: str) -> None: ...
def has_pure_modules(self) -> bool: ...
def has_ext_modules(self) -> bool: ...
def has_c_libraries(self) -> bool: ...
def has_modules(self) -> bool: ...
def has_headers(self) -> bool: ...
def has_scripts(self) -> bool: ...
def has_data_files(self) -> bool: ...
def is_pure(self) -> bool: ...
# Getter methods generated in __init__
def get_name(self) -> str: ...
def get_version(self) -> str: ...
def get_fullname(self) -> str: ...
def get_author(self) -> str: ...
def get_author_email(self) -> str: ...
def get_maintainer(self) -> str: ...
def get_maintainer_email(self) -> str: ...
def get_contact(self) -> str: ...
def get_contact_email(self) -> str: ...
def get_url(self) -> str: ...
def get_license(self) -> str: ...
def get_licence(self) -> str: ...
def get_description(self) -> str: ...
def get_long_description(self) -> str: ...
def get_keywords(self) -> str | list[str]: ...
def get_platforms(self) -> str | list[str]: ...
def get_classifiers(self) -> str | list[str]: ...
def get_download_url(self) -> str: ...
def get_requires(self) -> list[str]: ...
def get_provides(self) -> list[str]: ...
def get_obsoletes(self) -> list[str]: ... |
299,424 | assign objective | from abc import ABCMeta, abstractmethod
import numpy as np
import pandas as pd
import pytest
from evalml import AutoMLSearch
from evalml.objectives.standard_metrics import AUC, F1
def test_optimize_threshold():
ypred_proba = np.array([0.2, 0.4])
y_true = np.array([0, 1])
obj = F1()
np.random.seed(
42,
) # unfortunately scipy.optimize.minimize_scalar has no ability to accept seed as input
threshold = obj.optimize_threshold(ypred_proba, y_true)
assert 0.2 < threshold and threshold < 0.4
def test_optimize_threshold_neg():
ypred_proba = np.array([0.2, 0.4])
y_true = np.array([0, 1])
obj = AUC()
np.random.seed(0)
with pytest.raises(
RuntimeError,
match="Trying to optimize objective that can't be optimized!",
):
obj.optimize_threshold(ypred_proba, y_true)
def test_can_optimize_threshold():
assert F1().can_optimize_threshold
assert not AUC().can_optimize_threshold
def test_decision_function():
ypred_proba = np.arange(6) / 5.0
obj = F1()
pd.testing.assert_series_equal(
obj.decision_function(ypred_proba),
pd.Series(np.array([0] * 3 + [1] * 3), dtype=bool),
)
pd.testing.assert_series_equal(
obj.decision_function(ypred_proba, threshold=0.5),
pd.Series(np.array([0] * 3 + [1] * 3), dtype=bool),
)
pd.testing.assert_series_equal(
obj.decision_function(ypred_proba, threshold=0.0),
pd.Series(np.array([0] + [1] * 5, dtype=int), dtype=bool),
)
pd.testing.assert_series_equal(
obj.decision_function(ypred_proba, threshold=1.0),
pd.Series(np.array([0] * 6, dtype=int), dtype=bool),
)
def test_decision_function_neg():
ypred_proba = np.arange(6) / 5.0
y_true = pd.Series(np.array([0] * 3 + [1] * 3), dtype=bool)
obj = F1()
pd.testing.assert_series_equal(obj.decision_function(ypred_proba), y_true)
pd.testing.assert_series_equal(
obj.decision_function(pd.Series(ypred_proba, dtype=float)),
y_true,
)
class TestBinaryObjective(metaclass=ABCMeta):
__test__ = False
def assign_problem_type(self):
self.problem_type = "binary"
@abstractmethod
def METHOD_NAME(self, **kwargs):
"""Get objective object using specified parameters."""
def run_pipeline(self, X_y_binary, **kwargs):
self.X, self.y = X_y_binary
automl = AutoMLSearch(
X_train=self.X,
y_train=self.y,
problem_type=self.problem_type,
objective=self.objective,
max_iterations=1,
)
automl.search()
pipeline = automl.best_pipeline
pipeline.fit(self.X, self.y)
pipeline.predict(self.X, self.objective)
pipeline.predict_proba(self.X)
pipeline.score(self.X, self.y, [self.objective])
@abstractmethod
def test_score(self, y_true, y_predicted, expected_score):
"""Objective score matches expected score.
Args:
y_true (pd.Series): true classes
y_predicted (pd.Series): predicted classes
expected_score (float): expected output from objective.objective_function()
"""
@abstractmethod
def test_all_base_tests(self):
"""Run all relevant tests from the base class."""
@pytest.fixture(scope="class")
def fix_y_pred_na(self):
return np.array([np.nan, 0, 0])
@pytest.fixture(scope="class")
def fix_y_true(self):
return np.array([1, 2, 1])
@pytest.fixture(scope="class")
def fix_y_pred_diff_len(self):
return np.array([0, 1])
@pytest.fixture(scope="class")
def fix_empty_array(self):
return np.array([])
@pytest.fixture(scope="class")
def fix_y_pred_multi(self):
return np.array([0, 1, 2])
def input_contains_nan_inf(self, fix_y_pred_na, fix_y_true):
with pytest.raises(ValueError, match="y_predicted contains NaN or infinity"):
self.objective.score(fix_y_true, fix_y_pred_na)
def different_input_lengths(self, fix_y_pred_diff_len, fix_y_true):
with pytest.raises(ValueError, match="Inputs have mismatched dimensions"):
self.objective.score(fix_y_true, fix_y_pred_diff_len)
def zero_input_lengths(self, fix_empty_array):
with pytest.raises(ValueError, match="Length of inputs is 0"):
self.objective.score(fix_empty_array, fix_empty_array)
def binary_more_than_two_unique_values(self, fix_y_pred_multi, fix_y_true):
with pytest.raises(
ValueError,
match="y_predicted contains more than two unique values",
):
self.objective.score(fix_y_true, fix_y_pred_multi) |
299,425 | test anydbm read | """Test script for the dbm.open function based on testdumbdbm.py"""
import os
import unittest
import glob
import test.support
# Skip tests if dbm module doesn't exist.
dbm = test.support.import_module('dbm')
try:
from dbm import ndbm
except ImportError:
ndbm = None
_fname = test.support.TESTFN
#
# Iterates over every database module supported by dbm currently available,
# setting dbm to use each in turn, and yielding that module
#
def dbm_iterator():
for name in dbm._names:
try:
mod = __import__(name, fromlist=['open'])
except ImportError:
continue
dbm._modules[name] = mod
yield mod
#
# Clean up all scratch databases we might have created during testing
#
def delete_files():
# we don't know the precise name the underlying database uses
# so we use glob to locate all names
for f in glob.glob(_fname + "*"):
test.support.unlink(f)
class AnyDBMTestCase:
_dict = {'0': b'',
'a': b'Python:',
'b': b'Programming',
'c': b'the',
'd': b'way',
'f': b'Guido',
'g': b'intended',
}
def init_db(self):
f = dbm.open(_fname, 'n')
for k in self._dict:
f[k.encode("ascii")] = self._dict[k]
f.close()
def keys_helper(self, f):
keys = sorted(k.decode("ascii") for k in f.keys())
dkeys = sorted(self._dict.keys())
self.assertEqual(keys, dkeys)
return keys
def test_error(self):
self.assertTrue(issubclass(self.module.error, OSError))
def test_anydbm_not_existing(self):
self.assertRaises(dbm.error, dbm.open, _fname)
def test_anydbm_creation(self):
f = dbm.open(_fname, 'c')
self.assertEqual(list(f.keys()), [])
for key in self._dict:
f[key.encode("ascii")] = self._dict[key]
self.read_helper(f)
f.close()
def test_anydbm_creation_n_file_exists_with_invalid_contents(self):
# create an empty file
test.support.create_empty_file(_fname)
f = dbm.open(_fname, 'n')
self.addCleanup(f.close)
self.assertEqual(len(f), 0)
def test_anydbm_modification(self):
self.init_db()
f = dbm.open(_fname, 'c')
self._dict['g'] = f[b'g'] = b"indented"
self.read_helper(f)
f.close()
def METHOD_NAME(self):
self.init_db()
f = dbm.open(_fname, 'r')
self.read_helper(f)
f.close()
def test_anydbm_keys(self):
self.init_db()
f = dbm.open(_fname, 'r')
keys = self.keys_helper(f)
f.close()
def test_anydbm_access(self):
self.init_db()
f = dbm.open(_fname, 'r')
key = "a".encode("ascii")
self.assertIn(key, f)
assert(f[key] == b"Python:")
f.close()
def read_helper(self, f):
keys = self.keys_helper(f)
for key in self._dict:
self.assertEqual(self._dict[key], f[key.encode("ascii")])
def tearDown(self):
delete_files()
def setUp(self):
dbm._defaultmod = self.module
delete_files()
class WhichDBTestCase(unittest.TestCase):
def test_whichdb(self):
for module in dbm_iterator():
# Check whether whichdb correctly guesses module name
# for databases opened with "module" module.
# Try with empty files first
name = module.__name__
if name == 'dbm.dumb':
continue # whichdb can't support dbm.dumb
delete_files()
f = module.open(_fname, 'c')
f.close()
self.assertEqual(name, self.dbm.whichdb(_fname))
# Now add a key
f = module.open(_fname, 'w')
f[b"1"] = b"1"
# and test that we can find it
self.assertIn(b"1", f)
# and read it
self.assertTrue(f[b"1"] == b"1")
f.close()
self.assertEqual(name, self.dbm.whichdb(_fname))
@unittest.skipUnless(ndbm, reason='Test requires ndbm')
def test_whichdb_ndbm(self):
# Issue 17198: check that ndbm which is referenced in whichdb is defined
db_file = '{}_ndbm.db'.format(_fname)
with open(db_file, 'w'):
self.addCleanup(test.support.unlink, db_file)
self.assertIsNone(self.dbm.whichdb(db_file[:-3]))
def tearDown(self):
delete_files()
def setUp(self):
delete_files()
self.filename = test.support.TESTFN
self.d = dbm.open(self.filename, 'c')
self.d.close()
self.dbm = test.support.import_fresh_module('dbm')
def test_keys(self):
self.d = dbm.open(self.filename, 'c')
self.assertEqual(self.d.keys(), [])
a = [(b'a', b'b'), (b'12345678910', b'019237410982340912840198242')]
for k, v in a:
self.d[k] = v
self.assertEqual(sorted(self.d.keys()), sorted(k for (k, v) in a))
for k, v in a:
self.assertIn(k, self.d)
self.assertEqual(self.d[k], v)
self.assertNotIn(b'xxx', self.d)
self.assertRaises(KeyError, lambda: self.d[b'xxx'])
self.d.close()
def load_tests(loader, tests, pattern):
classes = []
for mod in dbm_iterator():
classes.append(type("TestCase-" + mod.__name__,
(AnyDBMTestCase, unittest.TestCase),
{'module': mod}))
suites = [unittest.makeSuite(c) for c in classes]
tests.addTests(suites)
return tests
if __name__ == "__main__":
unittest.main() |
299,426 | on connect | from __future__ import print_function, unicode_literals
import os
import re
import ssl
import sys
from builtins import str
from threading import Event, Thread
import paho.mqtt.client as mqtt
import ttfw_idf
from tiny_test_fw import DUT
event_client_connected = Event()
event_stop_client = Event()
event_client_received_correct = Event()
event_client_received_binary = Event()
message_log = ''
# The callback for when the client receives a CONNACK response from the server.
def METHOD_NAME(client, userdata, flags, rc):
print('Connected with result code ' + str(rc))
event_client_connected.set()
client.subscribe('/topic/qos0')
def mqtt_client_task(client):
while not event_stop_client.is_set():
client.loop()
# The callback for when a PUBLISH message is received from the server.
def on_message(client, userdata, msg):
global message_log
global event_client_received_correct
global event_client_received_binary
if msg.topic == '/topic/binary':
binary, bin_size = userdata
print('Receiving binary from esp and comparing with {}, size {}...'.format(binary, bin_size))
with open(binary, 'rb') as f:
bin = f.read()
if bin[:bin_size] == msg.payload[:bin_size]:
print('...matches!')
event_client_received_binary.set()
return
recv_binary = binary + '.received'
with open(recv_binary, 'w') as fw:
fw.write(msg.payload)
raise ValueError('Received binary (saved as: {}) does not match the original file: {}'.format(recv_binary, binary))
payload = msg.payload.decode()
if not event_client_received_correct.is_set() and payload == 'data':
client.subscribe('/topic/binary')
client.publish('/topic/qos0', 'send binary please')
if msg.topic == '/topic/qos0' and payload == 'data':
event_client_received_correct.set()
message_log += 'Received data:' + msg.topic + ' ' + payload + '\n'
@ttfw_idf.idf_example_test(env_tag='Example_EthKitV1')
def test_examples_protocol_mqtt_ssl(env, extra_data):
broker_url = ''
broker_port = 0
"""
steps:
1. join AP and connects to ssl broker
2. Test connects a client to the same broker
3. Test evaluates python client received correct qos0 message
4. Test ESP32 client received correct qos0 message
5. Test python client receives binary data from running partition and compares it with the binary
"""
dut1 = env.get_dut('mqtt_ssl', 'examples/protocols/mqtt/ssl', dut_class=ttfw_idf.ESP32DUT)
# check and log bin size
binary_file = os.path.join(dut1.app.binary_path, 'mqtt_ssl.bin')
bin_size = os.path.getsize(binary_file)
ttfw_idf.log_performance('mqtt_ssl_bin_size', '{}KB'
.format(bin_size // 1024))
# Look for host:port in sdkconfig
try:
value = re.search(r'\:\/\/([^:]+)\:([0-9]+)', dut1.app.get_sdkconfig()['CONFIG_BROKER_URI'])
broker_url = value.group(1)
broker_port = int(value.group(2))
bin_size = min(int(dut1.app.get_sdkconfig()['CONFIG_BROKER_BIN_SIZE_TO_SEND']), bin_size)
except Exception:
print('ENV_TEST_FAILURE: Cannot find broker url in sdkconfig')
raise
client = None
# 1. Test connects to a broker
try:
client = mqtt.Client()
client.METHOD_NAME = METHOD_NAME
client.on_message = on_message
client.user_data_set((binary_file, bin_size))
client.tls_set(None,
None,
None, cert_reqs=ssl.CERT_NONE, tls_version=ssl.PROTOCOL_TLSv1_2, ciphers=None)
client.tls_insecure_set(True)
print('Connecting...')
client.connect(broker_url, broker_port, 60)
except Exception:
print('ENV_TEST_FAILURE: Unexpected error while connecting to broker {}: {}:'.format(broker_url, sys.exc_info()[0]))
raise
# Starting a py-client in a separate thread
thread1 = Thread(target=mqtt_client_task, args=(client,))
thread1.start()
try:
print('Connecting py-client to broker {}:{}...'.format(broker_url, broker_port))
if not event_client_connected.wait(timeout=30):
raise ValueError('ENV_TEST_FAILURE: Test script cannot connect to broker: {}'.format(broker_url))
dut1.start_app()
try:
ip_address = dut1.expect(re.compile(r' eth ip: ([^,]+),'), timeout=30)
print('Connected to AP with IP: {}'.format(ip_address))
except DUT.ExpectTimeout:
print('ENV_TEST_FAILURE: Cannot connect to AP')
raise
print('Checking py-client received msg published from esp...')
if not event_client_received_correct.wait(timeout=30):
raise ValueError('Wrong data received, msg log: {}'.format(message_log))
print('Checking esp-client received msg published from py-client...')
dut1.expect(re.compile(r'DATA=send binary please'), timeout=30)
print('Receiving binary data from running partition...')
if not event_client_received_binary.wait(timeout=30):
raise ValueError('Binary not received within timeout')
finally:
event_stop_client.set()
thread1.join()
if __name__ == '__main__':
test_examples_protocol_mqtt_ssl() |
299,427 | func1 | #! /usr/bin/env python3
"""
"PYSTONE" Benchmark Program
Version: Python/1.2 (corresponds to C/1.1 plus 3 Pystone fixes)
Author: Reinhold P. Weicker, CACM Vol 27, No 10, 10/84 pg. 1013.
Translated from ADA to C by Rick Richardson.
Every method to preserve ADA-likeness has been used,
at the expense of C-ness.
Translated from C to Python by Guido van Rossum.
Version History:
Version 1.1 corrects two bugs in version 1.0:
First, it leaked memory: in Proc1(), NextRecord ends
up having a pointer to itself. I have corrected this
by zapping NextRecord.PtrComp at the end of Proc1().
Second, Proc3() used the operator != to compare a
record to None. This is rather inefficient and not
true to the intention of the original benchmark (where
a pointer comparison to None is intended; the !=
operator attempts to find a method __cmp__ to do value
comparison of the record). Version 1.1 runs 5-10
percent faster than version 1.0, so benchmark figures
of different versions can't be compared directly.
Version 1.2 changes the division to floor division.
Under Python 3 version 1.1 would use the normal division
operator, resulting in some of the operations mistakenly
yielding floats. Version 1.2 instead uses floor division
making the benchmark a integer benchmark again.
"""
LOOPS = 500
from utime import ticks_ms, ticks_diff
__version__ = "1.2"
[Ident1, Ident2, Ident3, Ident4, Ident5] = range(1, 6)
class Record:
def __init__(self, PtrComp = None, Discr = 0, EnumComp = 0,
IntComp = 0, StringComp = 0):
self.PtrComp = PtrComp
self.Discr = Discr
self.EnumComp = EnumComp
self.IntComp = IntComp
self.StringComp = StringComp
def copy(self):
return Record(self.PtrComp, self.Discr, self.EnumComp,
self.IntComp, self.StringComp)
TRUE = 1
FALSE = 0
def main(loops=LOOPS):
benchtime, stones = pystones(loops)
print("Pystone(%s) time for %d passes = %gms" % \
(__version__, loops, benchtime))
print("This machine benchmarks at %g pystones/second" % stones)
def pystones(loops=LOOPS):
return Proc0(loops)
IntGlob = 0
BoolGlob = FALSE
Char1Glob = '\0'
Char2Glob = '\0'
Array1Glob = [0]*(51 // 2)
Array2Glob = [x[:] for x in [Array1Glob]*(51 // 2)]
PtrGlb = None
PtrGlbNext = None
def Proc0(loops=LOOPS):
global IntGlob
global BoolGlob
global Char1Glob
global Char2Glob
global Array1Glob
global Array2Glob
global PtrGlb
global PtrGlbNext
starttime = ticks_ms()
for i in range(loops):
pass
nulltime = ticks_diff(ticks_ms(), starttime)
PtrGlbNext = Record()
PtrGlb = Record()
PtrGlb.PtrComp = PtrGlbNext
PtrGlb.Discr = Ident1
PtrGlb.EnumComp = Ident3
PtrGlb.IntComp = 40
PtrGlb.StringComp = "DHRYSTONE PROGRAM, SOME STRING"
String1Loc = "DHRYSTONE PROGRAM, 1'ST STRING"
Array2Glob[8 // 2][7 // 2] = 10
starttime = ticks_ms()
for i in range(loops):
Proc5()
Proc4()
IntLoc1 = 2
IntLoc2 = 3
String2Loc = "DHRYSTONE PROGRAM, 2'ND STRING"
EnumLoc = Ident2
BoolGlob = not Func2(String1Loc, String2Loc)
while IntLoc1 < IntLoc2:
IntLoc3 = 5 * IntLoc1 - IntLoc2
IntLoc3 = Proc7(IntLoc1, IntLoc2)
IntLoc1 = IntLoc1 + 1
Proc8(Array1Glob, Array2Glob, IntLoc1, IntLoc3)
PtrGlb = Proc1(PtrGlb)
CharIndex = 'A'
while CharIndex <= Char2Glob:
if EnumLoc == METHOD_NAME(CharIndex, 'C'):
EnumLoc = Proc6(Ident1)
CharIndex = chr(ord(CharIndex)+1)
IntLoc3 = IntLoc2 * IntLoc1
IntLoc2 = IntLoc3 // IntLoc1
IntLoc2 = 7 * (IntLoc3 - IntLoc2) - IntLoc1
IntLoc1 = Proc2(IntLoc1)
benchtime = ticks_diff(ticks_ms(), starttime) - nulltime
if benchtime == 0:
loopsPerBenchtime = 0
else:
loopsPerBenchtime = (loops * 1000 // benchtime)
return benchtime, loopsPerBenchtime
def Proc1(PtrParIn):
PtrParIn.PtrComp = NextRecord = PtrGlb.copy()
PtrParIn.IntComp = 5
NextRecord.IntComp = PtrParIn.IntComp
NextRecord.PtrComp = PtrParIn.PtrComp
NextRecord.PtrComp = Proc3(NextRecord.PtrComp)
if NextRecord.Discr == Ident1:
NextRecord.IntComp = 6
NextRecord.EnumComp = Proc6(PtrParIn.EnumComp)
NextRecord.PtrComp = PtrGlb.PtrComp
NextRecord.IntComp = Proc7(NextRecord.IntComp, 10)
else:
PtrParIn = NextRecord.copy()
NextRecord.PtrComp = None
return PtrParIn
def Proc2(IntParIO):
IntLoc = IntParIO + 10
while 1:
if Char1Glob == 'A':
IntLoc = IntLoc - 1
IntParIO = IntLoc - IntGlob
EnumLoc = Ident1
if EnumLoc == Ident1:
break
return IntParIO
def Proc3(PtrParOut):
global IntGlob
if PtrGlb is not None:
PtrParOut = PtrGlb.PtrComp
else:
IntGlob = 100
PtrGlb.IntComp = Proc7(10, IntGlob)
return PtrParOut
def Proc4():
global Char2Glob
BoolLoc = Char1Glob == 'A'
BoolLoc = BoolLoc or BoolGlob
Char2Glob = 'B'
def Proc5():
global Char1Glob
global BoolGlob
Char1Glob = 'A'
BoolGlob = FALSE
def Proc6(EnumParIn):
EnumParOut = EnumParIn
if not Func3(EnumParIn):
EnumParOut = Ident4
if EnumParIn == Ident1:
EnumParOut = Ident1
elif EnumParIn == Ident2:
if IntGlob > 100:
EnumParOut = Ident1
else:
EnumParOut = Ident4
elif EnumParIn == Ident3:
EnumParOut = Ident2
elif EnumParIn == Ident4:
pass
elif EnumParIn == Ident5:
EnumParOut = Ident3
return EnumParOut
def Proc7(IntParI1, IntParI2):
IntLoc = IntParI1 + 2
IntParOut = IntParI2 + IntLoc
return IntParOut
def Proc8(Array1Par, Array2Par, IntParI1, IntParI2):
global IntGlob
IntLoc = IntParI1 + 5
Array1Par[IntLoc // 2] = IntParI2
Array1Par[(IntLoc+1) // 2] = Array1Par[IntLoc // 2]
Array1Par[(IntLoc+30) // 2] = IntLoc
for IntIndex in range(IntLoc, IntLoc+2):
Array2Par[IntLoc // 2][IntIndex // 2] = IntLoc
Array2Par[IntLoc // 2][(IntLoc-1) // 2] = Array2Par[IntLoc // 2][(IntLoc-1) // 2] + 1
Array2Par[(IntLoc+20) // 2][IntLoc // 2] = Array1Par[IntLoc // 2]
IntGlob = 5
def METHOD_NAME(CharPar1, CharPar2):
CharLoc1 = CharPar1
CharLoc2 = CharLoc1
if CharLoc2 != CharPar2:
return Ident1
else:
return Ident2
def Func2(StrParI1, StrParI2):
IntLoc = 1
while IntLoc <= 1:
if METHOD_NAME(StrParI1[IntLoc], StrParI2[IntLoc+1]) == Ident1:
CharLoc = 'A'
IntLoc = IntLoc + 1
if CharLoc >= 'W' and CharLoc <= 'Z':
IntLoc = 7
if CharLoc == 'X':
return TRUE
else:
if StrParI1 > StrParI2:
IntLoc = IntLoc + 7
return TRUE
else:
return FALSE
def Func3(EnumParIn):
EnumLoc = EnumParIn
if EnumLoc == Ident3: return TRUE
return FALSE
if __name__ == '__main__':
import sys
def error(msg):
print(msg, end=' ', file=sys.stderr)
print("usage: %s [number_of_loops]" % sys.argv[0], file=sys.stderr)
sys.exit(100)
nargs = len(sys.argv) - 1
if nargs > 1:
error("%d arguments are too many;" % nargs)
elif nargs == 1:
try: loops = int(sys.argv[1])
except ValueError:
error("Invalid argument %r;" % sys.argv[1])
else:
loops = LOOPS
main(loops) |
299,428 | test deploy limited release with keep days | # Tests for commcare_cloud.commands.deploy.command.Deploy
# Way too many things are mocked here.
import sys
from datetime import datetime
from pathlib import Path
from unittest.mock import Mock, patch
from testil import assert_raises, eq
from commcare_cloud.commands.deploy import command, commcare
from commcare_cloud.commands.deploy.deploy_diff import DeployDiff
from commcare_cloud.commands import preindex_views
from commcare_cloud.commcare_cloud import call_commcare_cloud
from commcare_cloud.environment.main import Environment, get_environment
def test_deploy_commcare_happy_path():
def run_playbook(playbook, context, *args, unknown_args={}, **kw):
eq(unknown_args, ["-e", "code_version=def456"])
eq(context.environment.release_name, "2020-01-02_03.04")
log.append(playbook)
return 0
log = []
with patch.multiple(
commcare,
record_deploy_failed=Mock(),
record_deploy_start=Mock(),
record_successful_deploy=Mock(),
run_ansible_playbook=run_playbook,
):
_deploy_commcare()
eq(log, ["deploy_hq.yml"])
def test_resume_deploy_with_release_name():
def run_playbook(playbook, context, *args, unknown_args=None, **kw):
eq(unknown_args, ["-e", "code_version=def456"])
eq(context.environment.release_name, "FRANK")
log.append(playbook)
return 0
log = []
with patch.multiple(
commcare,
record_deploy_failed=Mock(),
record_deploy_start=Mock(),
record_successful_deploy=Mock(),
run_ansible_playbook=run_playbook,
):
_deploy_commcare("--resume=FRANK")
eq(log, ["deploy_hq.yml"])
def test_resume_deploy_without_release_name_raises():
def run_playbook(playbook, context, *args, unknown_args=None, **kw):
raise Exception("unexpected")
with (
patch.object(commcare, "run_ansible_playbook", run_playbook),
assert_raises(SystemExit),
patch("sys.stderr", sys.stdout)
):
_deploy_commcare("--resume")
def test_deploy_limited_release():
def run_playbook(playbook, context, *args, unknown_args=None, **kw):
eq(unknown_args, [
"-e", "code_version=def456",
"-e", "keep_until=2020-01-03_03.04",
"--tags=private_release",
])
eq(context.environment.release_name, "2020-01-02_03.04")
eq(kw.get("limit"), "django_manage")
log.append(playbook)
return 0
log = []
with patch.multiple(
commcare,
datetime=fakedatetime,
run_ansible_playbook=run_playbook,
):
_deploy_commcare("--private")
eq(log, ["deploy_hq.yml"])
def test_deploy_limited_release_to_webworker():
def run_playbook(playbook, context, *args, unknown_args=None, **kw):
eq(unknown_args, [
"-e", "code_version=def456",
"-e", "keep_until=2020-01-03_03.04",
"--tags=private_release",
])
eq(context.environment.release_name, "2020-01-02_03.04")
eq(kw.get("limit"), "webworkers[0]")
log.append(playbook)
return 0
log = []
with patch.multiple(
commcare,
datetime=fakedatetime,
run_ansible_playbook=run_playbook,
):
_deploy_commcare("--private", "--limit=webworkers[0]")
eq(log, ["deploy_hq.yml"])
def test_deploy_private_release_to_all_applicable_hosts():
def run_playbook(playbook, context, *args, unknown_args=None, **kw):
eq(unknown_args, [
"-e", "code_version=def456",
"-e", "keep_until=2020-01-03_03.04",
"--tags=private_release",
])
eq(context.environment.release_name, "2020-01-02_03.04")
eq(kw.get("limit"), "all")
log.append(playbook)
return 0
log = []
summary = []
with patch.multiple(
commcare,
color_summary=summary.append,
datetime=fakedatetime,
run_ansible_playbook=run_playbook,
):
_deploy_commcare("--private", "--limit=all")
eq(log, ["deploy_hq.yml"])
eq(summary, [
"Your private release is located here:",
"/home/cchq/www/small_cluster/releases/2020-01-02_03.04",
])
def METHOD_NAME():
def run_playbook(playbook, context, *args, unknown_args=None, **kw):
eq(unknown_args, [
"-e", "code_version=def456",
"-e", "keep_until=2020-01-12_03.04",
"--tags=private_release",
])
eq(context.environment.release_name, "2020-01-02_03.04")
eq(kw.get("limit"), "django_manage")
log.append(playbook)
return 0
log = []
with patch.multiple(
commcare,
datetime=fakedatetime,
run_ansible_playbook=run_playbook,
):
_deploy_commcare("--private", "--keep-days=10")
eq(log, ["deploy_hq.yml"])
def test_preindex_views():
def run_playbook(playbook, context, *args, unknown_args=None, **kw):
eq(unknown_args, [
"-e", "code_version=def456",
"-e", "keep_until=2020-01-03_03.04",
"--tags=private_release",
])
eq(context.environment.release_name, "2020-01-02_03.04")
eq(kw.get("limit"), "pillowtop[0]")
log.append(playbook)
return 0
def run_command(env_name, cmd, *args, **kw):
assert not kw, kw
log.append(" ".join((cmd,) + args))
return 0
log = []
with (
patch.object(preindex_views, "check_branch"),
patch.object(preindex_views, "commcare_cloud", run_command),
patch.object(preindex_views, "datetime", fakedatetime),
patch.object(commcare, "run_ansible_playbook", run_playbook),
patch.object(commcare, "datetime", fakedatetime),
):
_deploy_commcare(cmd=("preindex-views",))
eq(log, [
"deploy_hq.yml",
"django-manage preindex_everything --server=pillowtop[0] --release=2020-01-02_03.04 --tmux --mail",
])
def _deploy_commcare(*argv, cmd=("deploy", "commcare")):
envs = Path(__file__).parent.parent / "test_envs"
diff = DeployDiff(None, "abc123", "def456", None)
get_environment.reset_cache()
with (
patch("commcare_cloud.environment.paths.ENVIRONMENTS_DIR", envs),
patch.object(command, "check_branch"),
patch.object(command, "datetime", fakedatetime),
patch.object(commcare, "confirm_deploy", lambda *a: True),
patch.object(commcare, "DEPLOY_DIFF", diff),
patch.object(Environment, "_create_generated_yml", lambda self:None),
):
argv = ("cchq", "small_cluster") + cmd + argv
try:
call_commcare_cloud(argv)
finally:
get_environment.reset_cache()
class fakedatetime:
def utcnow():
return datetime(2020, 1, 2, 3, 4) |
299,429 | authorization key | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'GetGlobalReachConnectionResult',
'AwaitableGetGlobalReachConnectionResult',
'get_global_reach_connection',
'get_global_reach_connection_output',
]
@pulumi.output_type
class GetGlobalReachConnectionResult:
"""
A global reach connection resource
"""
def __init__(__self__, address_prefix=None, METHOD_NAME=None, circuit_connection_status=None, express_route_id=None, id=None, name=None, peer_express_route_circuit=None, provisioning_state=None, type=None):
if address_prefix and not isinstance(address_prefix, str):
raise TypeError("Expected argument 'address_prefix' to be a str")
pulumi.set(__self__, "address_prefix", address_prefix)
if METHOD_NAME and not isinstance(METHOD_NAME, str):
raise TypeError("Expected argument 'authorization_key' to be a str")
pulumi.set(__self__, "authorization_key", METHOD_NAME)
if circuit_connection_status and not isinstance(circuit_connection_status, str):
raise TypeError("Expected argument 'circuit_connection_status' to be a str")
pulumi.set(__self__, "circuit_connection_status", circuit_connection_status)
if express_route_id and not isinstance(express_route_id, str):
raise TypeError("Expected argument 'express_route_id' to be a str")
pulumi.set(__self__, "express_route_id", express_route_id)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if peer_express_route_circuit and not isinstance(peer_express_route_circuit, str):
raise TypeError("Expected argument 'peer_express_route_circuit' to be a str")
pulumi.set(__self__, "peer_express_route_circuit", peer_express_route_circuit)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="addressPrefix")
def address_prefix(self) -> str:
"""
The network used for global reach carved out from the original network block provided for the private cloud
"""
return pulumi.get(self, "address_prefix")
@property
@pulumi.getter(name="authorizationKey")
def METHOD_NAME(self) -> Optional[str]:
"""
Authorization key from the peer express route used for the global reach connection
"""
return pulumi.get(self, "authorization_key")
@property
@pulumi.getter(name="circuitConnectionStatus")
def circuit_connection_status(self) -> str:
"""
The connection status of the global reach connection
"""
return pulumi.get(self, "circuit_connection_status")
@property
@pulumi.getter(name="expressRouteId")
def express_route_id(self) -> Optional[str]:
"""
The ID of the Private Cloud's ExpressRoute Circuit that is participating in the global reach connection
"""
return pulumi.get(self, "express_route_id")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="peerExpressRouteCircuit")
def peer_express_route_circuit(self) -> Optional[str]:
"""
Identifier of the ExpressRoute Circuit to peer with in the global reach connection
"""
return pulumi.get(self, "peer_express_route_circuit")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The state of the ExpressRoute Circuit Authorization provisioning
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
class AwaitableGetGlobalReachConnectionResult(GetGlobalReachConnectionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetGlobalReachConnectionResult(
address_prefix=self.address_prefix,
METHOD_NAME=self.METHOD_NAME,
circuit_connection_status=self.circuit_connection_status,
express_route_id=self.express_route_id,
id=self.id,
name=self.name,
peer_express_route_circuit=self.peer_express_route_circuit,
provisioning_state=self.provisioning_state,
type=self.type)
def get_global_reach_connection(global_reach_connection_name: Optional[str] = None,
private_cloud_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetGlobalReachConnectionResult:
"""
A global reach connection resource
:param str global_reach_connection_name: Name of the global reach connection in the private cloud
:param str private_cloud_name: Name of the private cloud
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
__args__ = dict()
__args__['globalReachConnectionName'] = global_reach_connection_name
__args__['privateCloudName'] = private_cloud_name
__args__['resourceGroupName'] = resource_group_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:avs/v20220501:getGlobalReachConnection', __args__, opts=opts, typ=GetGlobalReachConnectionResult).value
return AwaitableGetGlobalReachConnectionResult(
address_prefix=pulumi.get(__ret__, 'address_prefix'),
METHOD_NAME=pulumi.get(__ret__, 'authorization_key'),
circuit_connection_status=pulumi.get(__ret__, 'circuit_connection_status'),
express_route_id=pulumi.get(__ret__, 'express_route_id'),
id=pulumi.get(__ret__, 'id'),
name=pulumi.get(__ret__, 'name'),
peer_express_route_circuit=pulumi.get(__ret__, 'peer_express_route_circuit'),
provisioning_state=pulumi.get(__ret__, 'provisioning_state'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_global_reach_connection)
def get_global_reach_connection_output(global_reach_connection_name: Optional[pulumi.Input[str]] = None,
private_cloud_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetGlobalReachConnectionResult]:
"""
A global reach connection resource
:param str global_reach_connection_name: Name of the global reach connection in the private cloud
:param str private_cloud_name: Name of the private cloud
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
... |
299,430 | token to transition | # Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
# Modifications:
# Copyright David Halter and Contributors
# Modifications are dual-licensed: MIT and PSF.
# 99% of the code is different from pgen2, now.
"""
The ``Parser`` tries to convert the available Python code in an easy to read
format, something like an abstract syntax tree. The classes who represent this
tree, are sitting in the :mod:`parso.tree` module.
The Python module ``tokenize`` is a very important part in the ``Parser``,
because it splits the code into different words (tokens). Sometimes it looks a
bit messy. Sorry for that! You might ask now: "Why didn't you use the ``ast``
module for this? Well, ``ast`` does a very good job understanding proper Python
code, but fails to work as soon as there's a single line of broken code.
There's one important optimization that needs to be known: Statements are not
being parsed completely. ``Statement`` is just a representation of the tokens
within the statement. This lowers memory usage and cpu time and reduces the
complexity of the ``Parser`` (there's another parser sitting inside
``Statement``, which produces ``Array`` and ``Call``).
"""
from parso import tree
from parso.pgen2.generator import ReservedString
class ParserSyntaxError(Exception):
"""
Contains error information about the parser tree.
May be raised as an exception.
"""
def __init__(self, message, error_leaf):
self.message = message
self.error_leaf = error_leaf
class InternalParseError(Exception):
"""
Exception to signal the parser is stuck and error recovery didn't help.
Basically this shouldn't happen. It's a sign that something is really
wrong.
"""
def __init__(self, msg, type_, value, start_pos):
Exception.__init__(self, "%s: type=%r, value=%r, start_pos=%r" %
(msg, type_.name, value, start_pos))
self.msg = msg
self.type = type
self.value = value
self.start_pos = start_pos
class Stack(list):
def _allowed_transition_names_and_token_types(self):
def iterate():
# An API just for Jedi.
for stack_node in reversed(self):
for transition in stack_node.dfa.transitions:
if isinstance(transition, ReservedString):
yield transition.value
else:
yield transition # A token type
if not stack_node.dfa.is_final:
break
return list(iterate())
class StackNode(object):
def __init__(self, dfa):
self.dfa = dfa
self.nodes = []
@property
def nonterminal(self):
return self.dfa.from_rule
def __repr__(self):
return '%s(%s, %s)' % (self.__class__.__name__, self.dfa, self.nodes)
def METHOD_NAME(grammar, type_, value):
# Map from token to label
if type_.contains_syntax:
# Check for reserved words (keywords)
try:
return grammar.reserved_syntax_strings[value]
except KeyError:
pass
return type_
class BaseParser(object):
"""Parser engine.
A Parser instance contains state pertaining to the current token
sequence, and should not be used concurrently by different threads
to parse separate token sequences.
See python/tokenize.py for how to get input tokens by a string.
When a syntax error occurs, error_recovery() is called.
"""
node_map = {}
default_node = tree.Node
leaf_map = {
}
default_leaf = tree.Leaf
def __init__(self, pgen_grammar, start_nonterminal='file_input', error_recovery=False):
self._pgen_grammar = pgen_grammar
self._start_nonterminal = start_nonterminal
self._error_recovery = error_recovery
def parse(self, tokens):
first_dfa = self._pgen_grammar.nonterminal_to_dfas[self._start_nonterminal][0]
self.stack = Stack([StackNode(first_dfa)])
for token in tokens:
self._add_token(token)
while True:
tos = self.stack[-1]
if not tos.dfa.is_final:
# We never broke out -- EOF is too soon -- Unfinished statement.
# However, the error recovery might have added the token again, if
# the stack is empty, we're fine.
raise InternalParseError(
"incomplete input", token.type, token.string, token.start_pos
)
if len(self.stack) > 1:
self._pop()
else:
return self.convert_node(tos.nonterminal, tos.nodes)
def error_recovery(self, token):
if self._error_recovery:
raise NotImplementedError("Error Recovery is not implemented")
else:
type_, value, start_pos, prefix = token
error_leaf = tree.ErrorLeaf(type_, value, start_pos, prefix)
raise ParserSyntaxError('SyntaxError: invalid syntax', error_leaf)
def convert_node(self, nonterminal, children):
try:
node = self.node_map[nonterminal](children)
except KeyError:
node = self.default_node(nonterminal, children)
for c in children:
c.parent = node
return node
def convert_leaf(self, type_, value, prefix, start_pos):
try:
return self.leaf_map[type_](value, start_pos, prefix)
except KeyError:
return self.default_leaf(value, start_pos, prefix)
def _add_token(self, token):
"""
This is the only core function for parsing. Here happens basically
everything. Everything is well prepared by the parser generator and we
only apply the necessary steps here.
"""
grammar = self._pgen_grammar
stack = self.stack
type_, value, start_pos, prefix = token
transition = METHOD_NAME(grammar, type_, value)
while True:
try:
plan = stack[-1].dfa.transitions[transition]
break
except KeyError:
if stack[-1].dfa.is_final:
self._pop()
else:
self.error_recovery(token)
return
except IndexError:
raise InternalParseError("too much input", type_, value, start_pos)
stack[-1].dfa = plan.next_dfa
for push in plan.dfa_pushes:
stack.append(StackNode(push))
leaf = self.convert_leaf(type_, value, prefix, start_pos)
stack[-1].nodes.append(leaf)
def _pop(self):
tos = self.stack.pop()
# If there's exactly one child, return that child instead of
# creating a new node. We still create expr_stmt and
# file_input though, because a lot of Jedi depends on its
# logic.
if len(tos.nodes) == 1:
new_node = tos.nodes[0]
else:
new_node = self.convert_node(tos.dfa.from_rule, tos.nodes)
self.stack[-1].nodes.append(new_node) |
299,431 | get remaining time | #!/usr/bin/env python
########################################################################
#
# DELLEMC Z9332F
#
# Abstract base class for implementing a platform-specific class with
# which to interact with a hardware watchdog module in SONiC
#
########################################################################
try:
import ctypes
from sonic_platform_base.watchdog_base import WatchdogBase
from sonic_platform.hwaccess import io_reg_read, io_reg_write
except ImportError as e:
raise ImportError(str(e) + "- required module not found")
class _timespec(ctypes.Structure):
_fields_ = [
('tv_sec', ctypes.c_long),
('tv_nsec', ctypes.c_long)
]
class Watchdog(WatchdogBase):
"""
Abstract base class for interfacing with a hardware watchdog module
"""
TIMERS = [0.2, 30, 60, 180, 240, 300, 420, 600]
io_resource = "/dev/port"
wd_timer_offset = 0xA181
wd_status_offset = 0xA182
wd_timer_punch_offset = 0xA184
wd_enable = 1
wd_disable = 0
wd_punch_enable = 0
armed_time = 0
timeout = 0
CLOCK_MONOTONIC = 1
def __init__(self):
WatchdogBase.__init__(self)
self._librt = ctypes.CDLL('librt.so.1', use_errno=True)
self._clock_gettime = self._librt.clock_gettime
self._clock_gettime.argtypes=[ctypes.c_int, ctypes.POINTER(_timespec)]
def _get_time(self):
"""
To get clock monotonic time
"""
ts = _timespec()
if self._clock_gettime(self.CLOCK_MONOTONIC, ctypes.pointer(ts)) != 0:
self._errno = ctypes.get_errno()
return 0
return ts.tv_sec + ts.tv_nsec * 1e-9
def arm(self, seconds):
"""
Arm the hardware watchdog with a timeout of <seconds> seconds.
If the watchdog is currently armed, calling this function will
simply reset the timer to the provided value. If the underlying
hardware does not support the value provided in <seconds>, this
method should arm the watchdog with the *next greater*
available value.
Returns:
An integer specifying the *actual* number of seconds the
watchdog was armed with. On failure returns -1.
"""
timer_offset = -1
for key,timer_seconds in enumerate(self.TIMERS):
if seconds > 0 and seconds <= timer_seconds:
timer_offset = key
seconds = timer_seconds
break
if timer_offset == -1:
return -1
wd_timer_val = io_reg_read(self.io_resource, self.wd_timer_offset)
if wd_timer_val != timer_offset:
self.disarm()
io_reg_write(self.io_resource, self.wd_timer_offset, timer_offset)
if self.is_armed():
# Setting the WD timer punch
io_reg_write(self.io_resource, self.wd_timer_punch_offset, self.wd_punch_enable)
self.armed_time = self._get_time()
self.timeout = seconds
return seconds
else:
# Enable WD
io_reg_write(self.io_resource, self.wd_status_offset, self.wd_enable)
self.armed_time = self._get_time()
self.timeout = seconds
return seconds
def disarm(self):
"""
Disarm the hardware watchdog
Returns:
A boolean, True if watchdog is disarmed successfully, False
if not
"""
if self.is_armed():
# Disable WD
io_reg_write(self.io_resource, self.wd_status_offset, self.wd_disable)
self.armed_time = 0
self.timeout = 0
return True
return False
def is_armed(self):
"""
Retrieves the armed state of the hardware watchdog.
Returns:
A boolean, True if watchdog is armed, False if not
"""
# Getting the WD Enable/Disable status
# 0 - Disabled WD
# 1 - Enabled WD
wd_status = io_reg_read(self.io_resource, self.wd_status_offset)
return bool(wd_status)
def METHOD_NAME(self):
"""
If the watchdog is armed, retrieve the number of seconds
remaining on the watchdog timer
Returns:
An integer specifying the number of seconds remaining on
their watchdog timer. If the watchdog is not armed, returns
-1.
Z9332 does not have hardware support to show remaining time.
Due to this limitation, this API is implemented in software.
This API would return correct software time difference if it
is called from the process which armed the watchdog timer.
If this API called from any other process, it would return
0. If the watchdog is not armed, this API would return -1.
"""
if not self.is_armed():
return -1
if self.armed_time > 0 and self.timeout != 0:
cur_time = self._get_time()
if cur_time <= 0:
return 0
diff_time = int(cur_time - self.armed_time)
if diff_time > self.timeout:
return self.timeout
else:
return self.timeout - diff_time
return 0
|
299,432 | test opts builder repr | """
Unit tests of the helper functions in utils
"""
from unittest import SkipTest
from collections import OrderedDict
from holoviews import notebook_extension
from holoviews.element.comparison import ComparisonTestCase
from holoviews import Store
from holoviews.util import output, opts, OutputSettings, Options
from holoviews.core.options import OptionTree
from pyviz_comms import CommManager
from holoviews.plotting import mpl
from holoviews.plotting import bokeh
BACKENDS = ['matplotlib', 'bokeh']
from ..utils import LoggingComparisonTestCase
try:
import notebook
except ImportError:
notebook = None
class TestOutputUtil(ComparisonTestCase):
def setUp(self):
if notebook is None:
raise SkipTest("Jupyter Notebook not available")
notebook_extension(*BACKENDS)
Store.current_backend = 'matplotlib'
Store.renderers['matplotlib'] = mpl.MPLRenderer.instance()
Store.renderers['bokeh'] = bokeh.BokehRenderer.instance()
OutputSettings.options = OrderedDict(OutputSettings.defaults.items())
super().setUp()
def tearDown(self):
Store.renderers['matplotlib'] = mpl.MPLRenderer.instance()
Store.renderers['bokeh'] = bokeh.BokehRenderer.instance()
OutputSettings.options = OrderedDict(OutputSettings.defaults.items())
for renderer in Store.renderers.values():
renderer.comm_manager = CommManager
super().tearDown()
def test_output_util_svg_string(self):
self.assertEqual(OutputSettings.options.get('fig', None), None)
output("fig='svg'")
self.assertEqual(OutputSettings.options.get('fig', None), 'svg')
def test_output_util_png_kwargs(self):
self.assertEqual(OutputSettings.options.get('fig', None), None)
output(fig='png')
self.assertEqual(OutputSettings.options.get('fig', None), 'png')
def test_output_util_backend_string(self):
self.assertEqual(OutputSettings.options.get('backend', None), None)
output("backend='bokeh'")
self.assertEqual(OutputSettings.options.get('backend', None), 'bokeh')
def test_output_util_backend_kwargs(self):
self.assertEqual(OutputSettings.options.get('backend', None), None)
output(backend='bokeh')
self.assertEqual(OutputSettings.options.get('backend', None), 'bokeh')
def test_output_util_object_noop(self):
self.assertEqual(output("fig='svg'",3), 3)
class TestOptsUtil(LoggingComparisonTestCase):
"""
Mirrors the magic tests in TestOptsMagic
"""
def setUp(self):
self.backend = Store.current_backend
Store.current_backend = 'matplotlib'
self.store_copy = OptionTree(sorted(Store.options().items()),
groups=Options._option_groups)
super().setUp()
def tearDown(self):
Store.current_backend = self.backend
Store.options(val=self.store_copy)
Store._custom_options = {k:{} for k in Store._custom_options.keys()}
super().tearDown()
def METHOD_NAME(self):
magic= "Bivariate [bandwidth=0.5] (cmap='jet') Points [logx=True] (size=2)"
expected= ["opts.Bivariate(bandwidth=0.5, cmap='jet')",
"opts.Points(logx=True, size=2)"]
reprs = opts._builder_reprs(magic)
self.assertEqual(reprs, expected)
def test_opts_builder_repr_line_magic(self):
magic= "%opts Bivariate [bandwidth=0.5] (cmap='jet') Points [logx=True] (size=2)"
expected= ["opts.Bivariate(bandwidth=0.5, cmap='jet')",
"opts.Points(logx=True, size=2)"]
reprs = opts._builder_reprs(magic)
self.assertEqual(reprs, expected)
def test_opts_builder_repr_cell_magic(self):
magic= "%%opts Bivariate [bandwidth=0.5] (cmap='jet') Points [logx=True] (size=2)"
expected= ["opts.Bivariate(bandwidth=0.5, cmap='jet')",
"opts.Points(logx=True, size=2)"]
reprs = opts._builder_reprs(magic)
self.assertEqual(reprs, expected)
def test_opts_builder_repr_options_dotted(self):
options = [Options('Bivariate.Test.Example', bandwidth=0.5, cmap='Blues'),
Options('Points', size=2, logx=True)]
expected= ["opts.Bivariate('Test.Example', bandwidth=0.5, cmap='Blues')",
"opts.Points(logx=True, size=2)"]
reprs = opts._builder_reprs(options)
self.assertEqual(reprs, expected) |
299,433 | input shape | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=unused-argument
"""Resize 2D tesst.
"""
import numpy as np
import tvm
from tvm import te
from tvm.topi.testing import resize2d_python
import tvm.topi.hexagon as s1
from tvm.contrib.hexagon import allocate_hexagon_array
from ..infrastructure import transform_numpy, get_hexagon_target
class TestResize2d:
"""Test resize 2D class."""
(batch, channel, in_height, in_width, out_height, out_width,) = tvm.testing.parameters(
(
1,
32,
8,
8,
16,
16,
),
(
1,
32,
48,
48,
8,
8,
),
)
(layout, input_crouton_layout, output_layout, dtype,) = tvm.testing.parameters(
("NHWC", "nhwc-8h2w32c2w-2d", "nhwc-8h2w32c2w-2d", "float16"),
("NHWC", "nhwc-8h8w32c-2d", "nhwc-8h8w32c-2d", "uint8"),
)
coord_trans = tvm.testing.parameter("asymmetric", "align_corners", "half_pixel")
method = tvm.testing.parameter("nearest_neighbor", "linear")
@tvm.testing.fixture
def expected_output_np(
self,
input_np,
in_height,
in_width,
out_height,
out_width,
layout,
method,
coord_trans,
):
"""Generate expected output."""
scale_h = out_height / in_height
scale_w = out_width / in_width
return resize2d_python(input_np, (scale_h, scale_w), layout, method, coord_trans)
@tvm.testing.fixture
def input_np(self, METHOD_NAME, dtype):
if dtype == "float16":
return np.random.random(METHOD_NAME).astype(dtype)
if dtype == "uint8":
return np.random.randint(0, 255, METHOD_NAME).astype(dtype)
if dtype == "int8":
return np.random.randint(-128, 127, METHOD_NAME).astype(dtype)
raise RuntimeError(f"dtype {dtype} is not valid.")
@tvm.testing.fixture
def transformed_input_np(self, input_np, layout, input_crouton_layout, dtype):
if dtype in ["float16", "uint8", "int8"]:
return transform_numpy(input_np, layout.lower(), input_crouton_layout)
raise RuntimeError(f"Unsupported data type '{dtype}'")
@tvm.testing.fixture
def transformed_expected_output_np(self, expected_output_np, layout, output_layout, dtype):
if dtype in ["float16", "uint8", "int8"]:
return transform_numpy(expected_output_np, layout.lower(), output_layout)
raise RuntimeError(f"Unsupported data type '{dtype}'")
@tvm.testing.fixture
def METHOD_NAME(self, batch, channel, in_height, in_width):
return (batch, in_height, in_width, channel)
@tvm.testing.fixture
def output_shape(self, batch, channel, out_height, out_width):
return (batch, out_height, out_width, channel)
@tvm.testing.requires_hexagon
def test_resize2d(
self,
dtype,
input_np,
transformed_input_np,
METHOD_NAME,
output_shape,
expected_output_np,
transformed_expected_output_np,
layout,
input_crouton_layout,
output_layout,
coord_trans,
method,
hexagon_session,
):
"""Test resize 2D."""
a_tensor = te.placeholder(METHOD_NAME, name="a_tensor", dtype=dtype)
m_tensor = s1.resize2d_compute(
a_tensor,
[0.0] * 4,
(output_shape[1], output_shape[2]),
layout=layout,
coordinate_transformation_mode=coord_trans,
method=method,
out_dtype=dtype,
)
tir_schedule = s1.tir_resize2d_schedule(
m_tensor, a_tensor, input_crouton_layout, output_layout
)
sch = tir_schedule.mod
input_axis_separator = [4]
if output_layout in (
"nhwc-8h2w32c2w-2d",
"nhwc-8h8w32c-2d",
):
output_axis_separator = [4]
else:
raise RuntimeError(f"Unexpected layout '{output_layout}'")
with tvm.transform.PassContext(opt_level=3):
func = tvm.build(
sch,
[a_tensor, m_tensor],
get_hexagon_target("v69"),
name="resize2d",
)
a_data_nd = allocate_hexagon_array(
hexagon_session.device,
data=transformed_input_np,
dtype=dtype,
axis_separators=input_axis_separator,
mem_scope="global.vtcm",
)
m_data_nd = allocate_hexagon_array(
hexagon_session.device,
transformed_expected_output_np.shape,
dtype=dtype,
axis_separators=output_axis_separator,
mem_scope="global.vtcm",
)
mod = hexagon_session.load_module(func)
mod(a_data_nd, m_data_nd)
batch_size, height, width, channel = output_shape
# convert nd to np and reshape to fixed chunk size layout
if output_layout == "nhwc-8h2w32c2w-2d":
m_data_np = m_data_nd.numpy().reshape(
[batch_size, height // 8, width // 4, channel // 32, 8, 2, 32, 2]
)
elif output_layout == "nhwc-8h8w32c-2d":
m_data_np = m_data_nd.numpy().reshape(
[batch_size, height // 8, width // 8, channel // 32, 8, 8, 32]
)
if dtype == "float16":
np.testing.assert_allclose(
transformed_expected_output_np, m_data_np, rtol=1e-3, atol=1e-3
)
elif dtype in ["int8", "uint8"]:
np.testing.assert_allclose(transformed_expected_output_np, m_data_np, rtol=1, atol=1)
if __name__ == "__main__":
tvm.testing.main() |
299,434 | squash by groups | """Filter copy number segments."""
import functools
import logging
import numpy as np
import pandas as pd
from .descriptives import weighted_median
def require_column(*colnames):
"""Wrapper to coordinate the segment-filtering functions.
Verify that the given columns are in the CopyNumArray the wrapped function
takes. Also log the number of rows in the array before and after filtration.
"""
if len(colnames) == 1:
msg = "'{}' filter requires column '{}'"
else:
msg = "'{}' filter requires columns " + ", ".join(["'{}'"] * len(colnames))
def wrap(func):
@functools.wraps(func)
def wrapped_f(segarr):
filtname = func.__name__
if any(c not in segarr for c in colnames):
raise ValueError(msg.format(filtname, *colnames))
result = func(segarr)
logging.info(
"Filtered by '%s' from %d to %d rows",
filtname,
len(segarr),
len(result),
)
return result
return wrapped_f
return wrap
def METHOD_NAME(cnarr, levels, by_arm=False):
"""Reduce CopyNumArray rows to a single row within each given level."""
# Enumerate runs of identical values
change_levels = enumerate_changes(levels)
assert (change_levels.index == levels.index).all()
assert cnarr.data.index.is_unique
assert levels.index.is_unique
assert change_levels.index.is_unique
if by_arm:
# Enumerate chromosome arms
arm_levels = []
for i, (_chrom, cnarm) in enumerate(cnarr.by_arm()):
arm_levels.append(np.repeat(i, len(cnarm)))
change_levels += np.concatenate(arm_levels)
else:
# Enumerate chromosomes
chrom_names = cnarr["chromosome"].unique()
chrom_col = cnarr["chromosome"].replace(
chrom_names, np.arange(len(chrom_names))
)
change_levels += chrom_col
data = cnarr.data.assign(_group=change_levels)
groupkey = ["_group"]
if "cn1" in cnarr:
# Keep allele-specific CNAs separate
data["_g1"] = enumerate_changes(cnarr["cn1"])
data["_g2"] = enumerate_changes(cnarr["cn2"])
groupkey.extend(["_g1", "_g2"])
data = (
data.groupby(groupkey, as_index=False, group_keys=False, sort=False)
.apply(squash_region)
.reset_index(drop=True)
)
return cnarr.as_dataframe(data)
def enumerate_changes(levels):
"""Assign a unique integer to each run of identical values.
Repeated but non-consecutive values will be assigned different integers.
"""
return levels.diff().fillna(0).abs().cumsum().astype(int)
def squash_region(cnarr):
"""Reduce a CopyNumArray to 1 row, keeping fields sensible.
Most fields added by the `segmetrics` command will be dropped.
"""
assert "weight" in cnarr
out = {
"chromosome": [cnarr["chromosome"].iat[0]],
"start": cnarr["start"].iat[0],
"end": cnarr["end"].iat[-1],
}
region_weight = cnarr["weight"].sum()
if region_weight > 0:
out["log2"] = np.average(cnarr["log2"], weights=cnarr["weight"])
else:
out["log2"] = np.mean(cnarr["log2"])
out["gene"] = ",".join(cnarr["gene"].drop_duplicates())
out["probes"] = cnarr["probes"].sum() if "probes" in cnarr else len(cnarr)
out["weight"] = region_weight
if "depth" in cnarr:
if region_weight > 0:
out["depth"] = np.average(cnarr["depth"], weights=cnarr["weight"])
else:
out["depth"] = np.mean(cnarr["depth"])
if "baf" in cnarr:
if region_weight > 0:
out["baf"] = np.average(cnarr["baf"], weights=cnarr["weight"])
else:
out["baf"] = np.mean(cnarr["baf"])
if "cn" in cnarr:
if region_weight > 0:
out["cn"] = weighted_median(cnarr["cn"], cnarr["weight"])
else:
out["cn"] = np.median(cnarr["cn"])
if "cn1" in cnarr:
if region_weight > 0:
out["cn1"] = weighted_median(cnarr["cn1"], cnarr["weight"])
else:
out["cn1"] = np.median(cnarr["cn1"])
out["cn2"] = out["cn"] - out["cn1"]
if "p_bintest" in cnarr:
# Only relevant for single-bin segments, but this seems safe/conservative
out["p_bintest"] = cnarr["p_bintest"].max()
return pd.DataFrame(out)
@require_column("cn")
def ampdel(segarr):
"""Merge segments by amplified/deleted/neutral copy number status.
Follow the clinical reporting convention:
- 5+ copies (2.5-fold gain) is amplification
- 0 copies is homozygous/deep deletion
- CNAs of lesser degree are not reported
This is recommended only for selecting segments corresponding to
actionable, usually focal, CNAs. Any real and potentially informative but
lower-level CNAs will be dropped.
"""
levels = np.zeros(len(segarr))
levels[segarr["cn"] == 0] = -1
levels[segarr["cn"] >= 5] = 1
# or: segarr['log2'] >= np.log2(2.5)
cnarr = METHOD_NAME(segarr, pd.Series(levels))
return cnarr[(cnarr["cn"] == 0) | (cnarr["cn"] >= 5)]
@require_column("depth")
def bic(segarr):
"""Merge segments by Bayesian Information Criterion.
See: BIC-seq (Xi 2011), doi:10.1073/pnas.1110574108
"""
return NotImplemented
@require_column("ci_lo", "ci_hi")
def ci(segarr):
"""Merge segments by confidence interval (overlapping 0).
Segments with lower CI above 0 are kept as gains, upper CI below 0 as
losses, and the rest with CI overlapping zero are collapsed as neutral.
"""
levels = np.zeros(len(segarr))
# if len(segarr) < 10:
# logging.warning("* segarr :=\n%s", segarr)
# logging.warning("* segarr['ci_lo'] :=\n%s", segarr['ci_lo'])
# logging.warning("* segarr['ci_lo']>0 :=\n%s", segarr['ci_lo'] > 0)
levels[segarr["ci_lo"].values > 0] = 1
levels[segarr["ci_hi"].values < 0] = -1
return METHOD_NAME(segarr, pd.Series(levels))
@require_column("cn")
def cn(segarr):
"""Merge segments by integer copy number."""
return METHOD_NAME(segarr, segarr["cn"])
@require_column("sem")
def sem(segarr, zscore=1.96):
"""Merge segments by Standard Error of the Mean (SEM).
Use each segment's SEM value to estimate a 95% confidence interval (via
`zscore`). Segments with lower CI above 0 are kept as gains, upper CI below
0 as losses, and the rest with CI overlapping zero are collapsed as neutral.
"""
margin = segarr["sem"] * zscore
levels = np.zeros(len(segarr))
levels[segarr["log2"] - margin > 0] = 1
levels[segarr["log2"] + margin < 0] = -1
return METHOD_NAME(segarr, pd.Series(levels)) |
299,435 | set background color | from rubicon.objc import CGSize, objc_method, objc_property
from travertino.size import at_least
from toga_iOS.colors import native_color
from toga_iOS.libs import (
NSTextAlignment,
UIColor,
UIPickerView,
UITextBorderStyle,
UITextField,
)
from toga_iOS.widgets.base import Widget
class TogaPickerView(UIPickerView):
interface = objc_property(object, weak=True)
impl = objc_property(object, weak=True)
@objc_method
def numberOfComponentsInPickerView_(self, pickerView) -> int:
return 1
@objc_method
def pickerView_numberOfRowsInComponent_(self, pickerView, component: int) -> int:
return len(self.interface.items)
@objc_method
def pickerView_titleForRow_forComponent_(
self,
pickerView,
row: int,
component: int,
):
try:
item = self.interface.items[int(row)]
label = self.interface._title_for_item(item)
return label
except IndexError:
# iOS can't have a fully empty picker; there's always a row 0.
# If we get an index error, it will be because the data source
# is empty, so return an empty string.
return ""
@objc_method
def pickerView_didSelectRow_inComponent_(
self, pickerView, row: int, component: int
):
item = self.interface.items[row]
label = self.interface._title_for_item(item)
self.native.text = label
self.interface.on_change(None)
class Selection(Widget):
def create(self):
self.native = UITextField.alloc().init()
self.native.interface = self.interface
self.native.impl = self
self.native.tintColor = UIColor.clearColor
self.native.borderStyle = UITextBorderStyle.RoundedRect
self.native_picker = TogaPickerView.alloc().init()
self.native_picker.interface = self.interface
self.native_picker.impl = self
self.native_picker.native = self.native
self.native_picker.delegate = self.native_picker
self.native_picker.dataSource = self.native_picker
self.native.inputView = self.native_picker
self.native.delegate = self.native_picker
# The iOS widget doesn't maintain a local concept of the number of items, so its
# not possible to identify if the current visual display is empty during a
# change of source. Maintain a local boolean to track when we believe our
# local representation has no items.
self._empty = True
self.add_constraints()
def set_alignment(self, value):
self.native.textAlignment = NSTextAlignment(value)
def set_color(self, color):
self.native.textColor = native_color(color)
def METHOD_NAME(self, color):
self.set_background_color_simple(color)
def set_font(self, font):
self.native.font = font._impl.native
def rehint(self):
# Height of a text input is known.
fitting_size = self.native.systemLayoutSizeFittingSize(CGSize(0, 0))
self.interface.intrinsic.width = at_least(
max(self.interface._MIN_WIDTH, fitting_size.width)
)
self.interface.intrinsic.height = fitting_size.height
def _reset_selection(self):
try:
default_item = self.interface.items[0]
except IndexError:
# Deleted the last item; source is empty
default_item = None
self._empty = True
self.select_item(0, default_item)
def insert(self, index, item):
if self._empty:
# If you're inserting the first item, make sure it's selected
self.select_item(index, item)
self._empty = False
else:
# If you're inserting before the current selection,
# the index of the current selection needs to be increased by 1.
selected_index = self.native_picker.selectedRowInComponent(0)
if index <= selected_index:
self.native_picker.selectRow(
selected_index + 1, inComponent=0, animated=False
)
# Get rid of focus to force the user to re-open the selection
self.native_picker.resignFirstResponder()
def change(self, item):
index = self.interface.items.index(item)
if self.native_picker.selectedRowInComponent(0) == index:
self.native.text = self.interface._title_for_item(item)
# Get rid of focus to force the user to re-open the selection
self.native_picker.resignFirstResponder()
# Changing the item text can change the layout size
self.interface.refresh()
def remove(self, index, item):
selection_change = self.native_picker.selectedRowInComponent(0) == index
# Get rid of focus to force the user to re-open the selection
self.native_picker.resignFirstResponder()
if selection_change:
self._reset_selection()
def clear(self):
self._empty = True
# Get rid of focus to force the user to re-open the selection
self.native_picker.resignFirstResponder()
self._reset_selection()
def select_item(self, index, item):
if item is not None:
self.native.text = self.interface._title_for_item(item)
self.native_picker.selectRow(index, inComponent=0, animated=False)
else:
self.native.text = ""
self.interface.on_change(None)
def get_selected_index(self):
if self._empty:
return None
return self.native_picker.selectedRowInComponent(0) |
299,436 | two opt swap |
import numpy as np
from PYME.Analysis.points.traveling_salesperson import two_opt_utils
def calculate_path_length(distances, route):
"""
Parameters
----------
distances: ndarray
distance array, for which distances[i, j] is the distance from the ith to the jth point
route: ndarray
array of indices defining the path
"""
return distances[route[:-1], route[1:]].sum()
def METHOD_NAME(route, i, k):
"""
Take everything the same up to i, then reverse i:k, then take k: normally.
Parameters
----------
route: ndarray
Path to swap postions in
i: int
first swap index
k: int
second swap index
Returns
-------
two-opt swapped route
Notes
-----
Returns a copy. Temping to do something in place, e.g. route[i:k + 1] = route[k:i - 1: -1], but the algorithm
seems to require a copy somewhere anyway, so might as well do it here.
"""
return np.concatenate([route[:i], # [start up to first swap position)
route[k:i - 1: -1], # [from first swap to second], reversed
route[k + 1:]]) # (everything else]
def two_opt_test(route, i, k, distances, k_max):
"""
Test to see what distance change we'd get doing a two_opt_swap.
Take everything the same up to i, then reverse i:k, then take k: normally.
Parameters
----------
route: ndarray
Path to swap postions in
i: int
first swap index
k: int
second swap index
distances: NxN matrix of float
distances between points
k_max: pre-computed maximum value of k == distances.shape[0] -1
Returns
-------
distance change on swap
Notes
-----
There is a cythonized version in two_opt_utils which is considerably faster.
"""
removed = 0
added = 0
if i > 0:
removed = distances[route[i - 1], route[i]]
added = distances[route[i - 1], route[k]]
if k < k_max:
removed += distances[route[k], route[k + 1]]
added += distances[route[i], route[k + 1]]
return added - removed
def two_opt(distances, epsilon, initial_route=None, fixed_endpoint=False):
"""
Solves the traveling salesperson problem (TSP) using two-opt swaps to untangle a route.
Parameters
----------
distances: ndarray
distance array, which distances[i, j] is the distance from the ith to the jth point
epsilon: float
exit tolerence on relative improvement. 0.01 corresponds to 1%
initial_route: ndarray
[optional] route to initialize search with. Note that the first position in the route is fixed, but all others
may vary. If no route is provided, the initial route is the same order the distances array was constructed with.
Returns
-------
route: ndarray
"solved" route
best_distance: float
distance of the route
og_distance: float
distance of the initial route.
Notes
-----
see https://en.wikipedia.org/wiki/2-opt for pseudo code
"""
route = initial_route.astype(int) if initial_route is not None else np.arange(distances.shape[0], dtype=int)
endpoint_offset = int(fixed_endpoint)
og_distance = calculate_path_length(distances, route)
# initialize values we'll be updating
improvement = 1
best_distance = og_distance
k_max = distances.shape[0] - 1
while improvement > epsilon:
last_distance = best_distance
for i in range(1, distances.shape[0] - 2): # don't swap the first position
for k in range(i + 1, distances.shape[0] - endpoint_offset):
d_dist = two_opt_utils.two_opt_test(route, i, k, distances, k_max)
if d_dist < 0:
# do the swap in-place since we tested before we leaped and we don't need the old route
route[i:k + 1] = route[k:i - 1: -1]
best_distance = best_distance + d_dist
improvement = (last_distance - best_distance) / last_distance
return route, best_distance, og_distance
def timeout_two_opt(distances, epsilon, timeout, initial_route=None):
"""
Solves the traveling salesperson problem (TSP) using two-opt swaps to untangle a route.
Parameters
----------
distances: ndarray
distance array, which distances[i, j] is the distance from the ith to the jth point
epsilon: float
exit tolerance on relative improvement. 0.01 corresponds to 1%
timeout: float
number of seconds to allow computation
initial_route: ndarray
[optional] route to initialize search with. Note that the first position in the route is fixed, but all others
may vary. If no route is provided, the initial route is the same order the distances array was constructed with.
Returns
-------
route: ndarray
"solved" route
best_distance: float
distance of the route
og_distance: float
distance of the initial route.
Notes
-----
see https://en.wikipedia.org/wiki/2-opt for pseudo code
"""
import time
abort_time = time.time() + timeout
# start route backwards. Starting point will be fixed, and we want LIFO for fast microscope acquisition
route = initial_route if initial_route is not None else np.arange(distances.shape[0] - 1, -1, -1)
og_distance = calculate_path_length(distances, route)
# initialize values we'll be updating
improvement = 1
best_distance = og_distance
while improvement > epsilon:
last_distance = best_distance
for i in range(1, distances.shape[0] - 2): # don't swap the first position
if time.time() > abort_time:
return route, best_distance, og_distance
for k in range(i + 1, distances.shape[0]): # allow the last position in the route to vary
new_route = METHOD_NAME(route, i, k)
new_distance = calculate_path_length(distances, new_route)
if new_distance < best_distance:
route = new_route
best_distance = new_distance
improvement = (last_distance - best_distance) / last_distance
return route, best_distance, og_distance |
299,437 | test subclass mro | """Slot tests
Made for Jython.
"""
from test import test_support
import unittest
# The strict tests fail on PyPy (but work on CPython and Jython).
# They're questionable
strict = True
class SlottedTestCase(unittest.TestCase):
def test_slotted(self):
class Foo(object):
__slots__ = 'bar'
self.assert_('__dict__' not in Foo.__dict__)
foo = Foo()
self.assert_(not hasattr(foo, '__dict__'))
foo.bar = 'hello bar'
self.assertEqual(foo.bar, 'hello bar')
self.assertRaises(AttributeError, setattr, foo, 'foo', 'hello foo')
class Baz(object):
__slots__ = ['python', 'jython']
self.assert_('__dict__' not in Baz.__dict__)
baz = Baz()
self.assert_(not hasattr(baz, '__dict__'))
baz.python = 'hello python'
baz.jython = 'hello jython'
self.assertEqual(baz.python, 'hello python')
self.assertEqual(baz.jython, 'hello jython')
self.assertRaises(AttributeError, setattr, baz, 'foo', 'hello')
class SlottedWithDictTestCase(unittest.TestCase):
def test_subclass(self):
class Base(object):
pass
class Foo(Base):
__slots__ = 'bar'
self.assert_('__dict__' not in Foo.__dict__)
foo = Foo()
self.assert_(hasattr(foo, '__dict__'))
foo.bar = 'hello bar'
foo.foo = 'hello foo'
self.assertEqual(foo.bar, 'hello bar')
self.assertEqual(foo.__dict__, {'foo': 'hello foo'})
def METHOD_NAME(self):
class Base(object):
pass
class Slotted(object):
__slots__ = 'baz'
class Foo(Slotted, Base):
__slots__ = 'bar'
if strict:
self.assert_('__dict__' in Foo.__dict__)
self.assertEqual(Foo.__dict__['__dict__'].__objclass__, Foo)
foo = Foo()
self.assert_(hasattr(foo, '__dict__'))
foo.bar = 'hello bar'
foo.baz = 'hello baz'
foo.foo = 'hello foo'
self.assertEqual(foo.bar, 'hello bar')
self.assertEqual(foo.baz, 'hello baz')
self.assertEqual(foo.__dict__, {'foo': 'hello foo'})
class Bar(Slotted, Base):
pass
if strict:
self.assert_('__dict__' in Bar.__dict__)
self.assertEqual(Bar.__dict__['__dict__'].__objclass__, Bar)
bar = Bar()
self.assert_(hasattr(bar, '__dict__'))
bar.bar = 'hello bar'
bar.baz = 'hello baz'
bar.foo = 'hello foo'
self.assertEqual(bar.bar, 'hello bar')
self.assertEqual(bar.baz, 'hello baz')
self.assertEqual(bar.__dict__, {'foo': 'hello foo', 'bar': 'hello bar'})
def test_subclass_oldstyle(self):
class OldBase:
pass
class Foo(OldBase, object):
__slots__ = 'bar'
if strict:
self.assert_('__dict__' in Foo.__dict__)
self.assertEqual(Foo.__dict__['__dict__'].__objclass__, Foo)
foo = Foo()
self.assert_(hasattr(foo, '__dict__'))
foo.bar = 'hello bar'
foo.foo = 'hello foo'
self.assertEqual(foo.bar, 'hello bar')
self.assertEqual(foo.__dict__, {'foo': 'hello foo'})
class Bar(OldBase, object):
__slots__ = '__dict__'
self.assert_('__dict__' in Bar.__dict__)
self.assertEqual(Bar.__dict__['__dict__'].__objclass__, Bar)
bar = Bar()
self.assert_(hasattr(bar, '__dict__'))
bar.bar = 'hello bar'
bar.foo = 'hello foo'
self.assertEqual(bar.bar, 'hello bar')
self.assertEqual(bar.__dict__, {'foo': 'hello foo', 'bar': 'hello bar'})
def test_mixin_oldstyle(self):
class OldBase:
pass
class NewBase(object):
pass
class Baz(NewBase, OldBase):
__slots__ = 'baz'
self.assert_('__dict__' not in Baz.__dict__)
baz = Baz()
self.assert_(hasattr(baz, '__dict__'))
baz.baz = 'hello baz'
baz.bar = 'hello bar'
self.assertEqual(baz.baz, 'hello baz')
self.assertEqual(baz.bar, 'hello bar')
self.assertEqual(baz.__dict__, {'bar': 'hello bar'})
class SlottedWithWeakrefTestCase(unittest.TestCase):
def test_subclass_oldstyle(self):
class OldBase:
pass
class Foo(OldBase, object):
__slots__ = '__dict__'
self.assert_(hasattr(Foo, '__weakref__'))
def test_main():
test_support.run_unittest(SlottedTestCase,
SlottedWithDictTestCase,
SlottedWithWeakrefTestCase)
if __name__ == '__main__':
test_main() |
299,438 | batch gather 2d | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import paddle
from paddle import nn
def build_linear(n_in, n_out, name=None, init=None):
return nn.Linear(
n_in,
n_out,
weight_attr=paddle.ParamAttr(name="%s.w_0" % name if name is not None else None, initializer=init),
bias_attr="%s.b_0" % name if name is not None else None,
)
def build_layer_norm(n_in, name):
return nn.LayerNorm(
normalized_shape=n_in,
weight_attr=paddle.ParamAttr(
name="%s_layer_norm_scale" % name if name is not None else None, initializer=nn.initializer.Constant(1.0)
),
bias_attr=paddle.ParamAttr(
name="%s_layer_norm_bias" % name if name is not None else None, initializer=nn.initializer.Constant(0.0)
),
)
def lstm_init(num_layers, hidden_size, *batch_sizes):
init_size = batch_sizes + (hidden_size,)
if num_layers is not None:
init_size = (num_layers,) + init_size
init = paddle.zeros(init_size)
return (init, init)
def METHOD_NAME(var, indices):
"""Gather slices from var in each batch, according to corresponding
index in indices. Currently, it only support 2d Tensor.
Args:
var (Variable): with shape [batch_size, ...]
indices (Variable): with shape [batch_size, max_len]
Returns: Variable with shape [batch_size]
Raises: NULL
Examples:
var
[[1, 2, 3],
[4, 5, 6]]
indices
[[2, 0], [1, 2]]
return
[[3, 1], [5, 6]]
"""
if len(indices.shape) != 2:
raise ValueError(
"shape of indices error. it should be a 2-D layers. " "but got shape = %s" % (str(indices.shape),)
)
batch_size = paddle.shape(indices)[0]
zero = paddle.to_tensor([0], dtype="int64")
one = paddle.to_tensor([1], dtype="int64")
end = paddle.cast(batch_size, dtype="int64")
batch_indices_1d = paddle.unsqueeze(paddle.arange(zero, end, one, dtype=indices.dtype), [1])
seq_len = indices.shape[1]
batch_indices = paddle.expand(batch_indices_1d, [batch_size, seq_len])
coord_2d = paddle.concat([paddle.unsqueeze(batch_indices, [2]), paddle.unsqueeze(indices, [2])], axis=2)
coord_2d.stop_gradient = True
coord_1d = paddle.reshape(coord_2d, shape=[-1, 2])
output_1d = paddle.gather_nd(var, coord_1d)
output_2d = paddle.reshape(output_1d, [batch_size, seq_len, var.shape[-1]])
return output_2d
def sequence_mask(seq_hidden, mask, mode="zero"):
"""
Args:
seq_hidden (Tensor): NULL
mask (Tensor): 1 for un-mask tokens, and 0 for mask tokens.
mode (str): zero/-inf/+inf
Returns: TODO
Raises: NULL
"""
while len(mask.shape) < len(seq_hidden.shape):
mask = mask.unsqueeze([-1])
mask = mask.cast(dtype=seq_hidden.dtype)
masked = paddle.multiply(seq_hidden, mask)
if mode == "zero":
return masked
if mode == "-inf":
scale_size = +1e5
elif mode == "+inf":
scale_size = -1e5
else:
raise ValueError(f"mask mode setting error. expect zero/-inf/+inf, but got {mode}")
add_mask = paddle.scale(mask - 1, scale=scale_size)
masked = paddle.add(masked, add_mask)
return masked
def pad_sequences(seqs, max_len, value=0.0, dtype=np.int64):
"""padding sequences"""
data_max_len = 0
format_seqs = []
for seq in seqs:
format_seqs.append(list(seq))
data_max_len = len(seq) if len(seq) > data_max_len else data_max_len
max_len = min(max_len, data_max_len)
padded = []
for seq in format_seqs:
padded.append(seq[:max_len] + [value] * (max_len - len(seq)))
padded = np.array(padded)
return padded.astype(dtype)
def pad_sequences_for_3d(seqs, max_col, max_num, dtype=np.int64):
"""padding sequences for 3d"""
padded = []
for seq in seqs:
padded.append(np.vstack((seq, np.zeros((max_col - seq.shape[0], max_num), dtype=np.int64))))
return np.array(padded).astype(dtype)
def pad_index_sequences(seqs, max_col, max_row, dtype=np.int64):
"""padding sequences for column token indexes"""
padded = []
for query in seqs:
new_cols = []
for col in query[:max_row]:
temp_cols = col[:max_col] + [0] * (max_col - len(col))
new_cols.append(temp_cols)
new_cols = new_cols + [[0] * max_col for _ in range(max_row - len(new_cols))]
padded.append(new_cols)
return np.array(padded).astype(dtype)
def tensor2numpy(inputs):
if type(inputs) in (list, tuple):
return [x.numpy() for x in inputs]
elif type(inputs) is dict:
outputs = {}
for key, value in inputs.items():
if type(value) is paddle.Tensor:
outputs[key] = value.numpy()
else:
outputs[key] = value
return outputs
elif type(inputs) is paddle.Tensor:
return inputs.numpy()
else:
raise ValueError("only support inputs to be of type list/tuple/dict/Tensor." + f"but got {type(inputs)}")
if __name__ == "__main__":
"""run some simple test cases"""
seq_input = paddle.to_tensor(
[
[1, 2, 3, 4],
[5, 5, 5, 5],
],
dtype="float32",
)
mask = paddle.to_tensor(
[
[1, 1, 0, 0],
[1, 1, 1, 0],
],
dtype="float32",
)
print(sequence_mask(seq_input, mask, mode="zero"))
print(sequence_mask(seq_input, mask, mode="-inf")) |
299,439 | complete bpnumber | import signal
import sys
from bdb import Bdb
from cmd import Cmd
from collections.abc import Callable, Iterable, Mapping, Sequence
from inspect import _SourceObjectType
from types import CodeType, FrameType, TracebackType
from typing import IO, Any, ClassVar, TypeVar
from typing_extensions import ParamSpec, Self
__all__ = ["run", "pm", "Pdb", "runeval", "runctx", "runcall", "set_trace", "post_mortem", "help"]
_T = TypeVar("_T")
_P = ParamSpec("_P")
line_prefix: str # undocumented
class Restart(Exception): ...
def run(statement: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> None: ...
def runeval(expression: str, globals: dict[str, Any] | None = None, locals: Mapping[str, Any] | None = None) -> Any: ...
def runctx(statement: str, globals: dict[str, Any], locals: Mapping[str, Any]) -> None: ...
def runcall(func: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> _T | None: ...
def set_trace(*, header: str | None = None) -> None: ...
def post_mortem(t: TracebackType | None = None) -> None: ...
def pm() -> None: ...
class Pdb(Bdb, Cmd):
# Everything here is undocumented, except for __init__
commands_resuming: ClassVar[list[str]]
aliases: dict[str, str]
mainpyfile: str
_wait_for_mainpyfile: bool
rcLines: list[str]
commands: dict[int, list[str]]
commands_doprompt: dict[int, bool]
commands_silent: dict[int, bool]
commands_defining: bool
commands_bnum: int | None
lineno: int | None
stack: list[tuple[FrameType, int]]
curindex: int
curframe: FrameType | None
curframe_locals: Mapping[str, Any]
def __init__(
self,
completekey: str = "tab",
stdin: IO[str] | None = None,
stdout: IO[str] | None = None,
skip: Iterable[str] | None = None,
nosigint: bool = False,
readrc: bool = True,
) -> None: ...
def forget(self) -> None: ...
def setup(self, f: FrameType | None, tb: TracebackType | None) -> None: ...
def execRcLines(self) -> None: ...
def bp_commands(self, frame: FrameType) -> bool: ...
def interaction(self, frame: FrameType | None, traceback: TracebackType | None) -> None: ...
def displayhook(self, obj: object) -> None: ...
def handle_command_def(self, line: str) -> bool: ...
def defaultFile(self) -> str: ...
def lineinfo(self, identifier: str) -> tuple[None, None, None] | tuple[str, str, int]: ...
def checkline(self, filename: str, lineno: int) -> int: ...
def _getval(self, arg: str) -> object: ...
def print_stack_trace(self) -> None: ...
def print_stack_entry(self, frame_lineno: tuple[FrameType, int], prompt_prefix: str = "\n-> ") -> None: ...
def lookupmodule(self, filename: str) -> str | None: ...
if sys.version_info < (3, 11):
def _runscript(self, filename: str) -> None: ...
def do_commands(self, arg: str) -> bool | None: ...
def do_break(self, arg: str, temporary: bool = ...) -> bool | None: ...
def do_tbreak(self, arg: str) -> bool | None: ...
def do_enable(self, arg: str) -> bool | None: ...
def do_disable(self, arg: str) -> bool | None: ...
def do_condition(self, arg: str) -> bool | None: ...
def do_ignore(self, arg: str) -> bool | None: ...
def do_clear(self, arg: str) -> bool | None: ...
def do_where(self, arg: str) -> bool | None: ...
def do_up(self, arg: str) -> bool | None: ...
def do_down(self, arg: str) -> bool | None: ...
def do_until(self, arg: str) -> bool | None: ...
def do_step(self, arg: str) -> bool | None: ...
def do_next(self, arg: str) -> bool | None: ...
def do_run(self, arg: str) -> bool | None: ...
def do_return(self, arg: str) -> bool | None: ...
def do_continue(self, arg: str) -> bool | None: ...
def do_jump(self, arg: str) -> bool | None: ...
def do_debug(self, arg: str) -> bool | None: ...
def do_quit(self, arg: str) -> bool | None: ...
def do_EOF(self, arg: str) -> bool | None: ...
def do_args(self, arg: str) -> bool | None: ...
def do_retval(self, arg: str) -> bool | None: ...
def do_p(self, arg: str) -> bool | None: ...
def do_pp(self, arg: str) -> bool | None: ...
def do_list(self, arg: str) -> bool | None: ...
def do_whatis(self, arg: str) -> bool | None: ...
def do_alias(self, arg: str) -> bool | None: ...
def do_unalias(self, arg: str) -> bool | None: ...
def do_help(self, arg: str) -> bool | None: ...
do_b = do_break
do_cl = do_clear
do_w = do_where
do_bt = do_where
do_u = do_up
do_d = do_down
do_unt = do_until
do_s = do_step
do_n = do_next
do_restart = do_run
do_r = do_return
do_c = do_continue
do_cont = do_continue
do_j = do_jump
do_q = do_quit
do_exit = do_quit
do_a = do_args
do_rv = do_retval
do_l = do_list
do_h = do_help
def help_exec(self) -> None: ...
def help_pdb(self) -> None: ...
def sigint_handler(self, signum: signal.Signals, frame: FrameType) -> None: ...
def message(self, msg: str) -> None: ...
def error(self, msg: str) -> None: ...
if sys.version_info >= (3, 12):
def set_convenience_variable(self, frame: FrameType, name: str, value: Any) -> None: ...
def _select_frame(self, number: int) -> None: ...
def _getval_except(self, arg: str, frame: FrameType | None = None) -> object: ...
def _print_lines(
self, lines: Sequence[str], start: int, breaks: Sequence[int] = (), frame: FrameType | None = None
) -> None: ...
def _cmdloop(self) -> None: ...
def do_display(self, arg: str) -> bool | None: ...
def do_interact(self, arg: str) -> bool | None: ...
def do_longlist(self, arg: str) -> bool | None: ...
def do_source(self, arg: str) -> bool | None: ...
def do_undisplay(self, arg: str) -> bool | None: ...
do_ll = do_longlist
def _complete_location(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ...
def METHOD_NAME(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ...
def _complete_expression(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ...
def complete_undisplay(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ...
def complete_unalias(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ...
complete_commands = METHOD_NAME
complete_break = _complete_location
complete_b = _complete_location
complete_tbreak = _complete_location
complete_enable = METHOD_NAME
complete_disable = METHOD_NAME
complete_condition = METHOD_NAME
complete_ignore = METHOD_NAME
complete_clear = _complete_location
complete_cl = _complete_location
complete_debug = _complete_expression
complete_print = _complete_expression
complete_p = _complete_expression
complete_pp = _complete_expression
complete_source = _complete_expression
complete_whatis = _complete_expression
complete_display = _complete_expression
if sys.version_info < (3, 11):
def _runmodule(self, module_name: str) -> None: ...
# undocumented
def find_function(funcname: str, filename: str) -> tuple[str, str, int] | None: ...
def main() -> None: ...
def help() -> None: ...
if sys.version_info < (3, 10):
def getsourcelines(obj: _SourceObjectType) -> tuple[list[str], int]: ...
def lasti2lineno(code: CodeType, lasti: int) -> int: ...
class _rstr(str):
def __repr__(self) -> Self: ... |
299,440 | wheel name | # This file contains shared logic used to build the PyPI wheels. See
# //tools/wheel:builder for the user interface.
import argparse
import os
import re
import sys
# Location where most of the build will take place.
build_root = '/opt/drake-wheel-build'
# Location where testing of the wheel will take place.
test_root = '/opt/drake-wheel-test'
# Location where the wheel will be produced.
wheelhouse = os.path.join(build_root, 'wheel', 'wheelhouse')
# Location of various scripts and other artifacts used to complete the build.
resource_root = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
def gripe(message):
"""
Prints a message to stderr.
"""
print(message, file=sys.stderr)
def die(message, result=1):
"""
Prints a message to stderr and aborts.
"""
gripe(message)
sys.exit(result)
def METHOD_NAME(python_version, wheel_version, wheel_platform):
"""
Determines the complete name of the Drake wheel, given various individual
bits such as the Drake version, Python version, and Python wheel platform.
"""
vm = f'cp{python_version}'
return f'drake-{wheel_version}-{vm}-{vm}-{wheel_platform}.whl'
def _check_version(version):
"""
Returns True iff the given version string matches PEP 440.
"""
return re.match(
r'^([1-9][0-9]*!)?(0|[1-9][0-9]*)'
r'(\.(0|[1-9][0-9]*))*((a|b|rc)(0|[1-9][0-9]*))?'
r'(\.post(0|[1-9][0-9]*))?(\.dev(0|[1-9][0-9]*))?'
r'([+][a-z0-9]+([-_\.][a-z0-9]+)*)?$',
version) is not None
def find_tests(*test_subdirs):
"""
Returns a list of tests in the common directory and any subdirectories
given as additional arguments.
"""
all_tests = []
for test_dir in ('', *test_subdirs):
tests = []
test_dir_full = os.path.join(resource_root, 'test', 'tests', test_dir)
for test in os.listdir(test_dir_full):
if not os.path.isdir(os.path.join(test_dir_full, test)):
tests.append(os.path.join('tests', test_dir, test))
where = f'subdirectory {test_dir!r}' if len(test_dir) else 'directory'
assert len(tests), f'No tests were found in the test {where}!'
all_tests += sorted(tests)
return all_tests
def do_main(args, platform):
"""
Entry point; performs the build using the given CLI arguments, platform,
and resource root.
The `platform` must be either a `linux` or `macos` object which provides
platform-specific implementations of various operations necessary to
complete the build.
"""
# Work around `bazel run` changing the working directory; this is to allow
# the user to pass in relative paths in a sane manner.
real_cwd = os.environ.get('BUILD_WORKING_DIRECTORY')
if real_cwd is not None:
os.chdir(real_cwd)
# Set up argument parser.
parser = argparse.ArgumentParser(
prog='build-wheels',
description='Build the Drake PyPI wheel(s).')
parser.add_argument(
'version',
help='PEP 440 version number with which to label the wheels')
parser.add_argument(
'-o', '--output-dir', metavar='DIR', default=os.path.realpath('.'),
help='directory into which to extract wheels (default: %(default)r)')
parser.add_argument(
'-n', '--no-extract', dest='extract', action='store_false',
help='build images but do not extract wheels')
parser.add_argument(
'--no-test', dest='test', action='store_false',
help='build images but do not run tests')
# TODO(jwnimmer-tri) Remove this argument after we've updated CI not to
# provide it anymore.
parser.add_argument(
'-t', dest='_', action='store_true',
help='ignored for backwards compatibility')
if platform is not None:
platform.add_build_arguments(parser)
platform.add_selection_arguments(parser)
parser.add_argument(
'--pep440', action='store_true',
help='validate version number without building anything')
# Parse arguments.
options = parser.parse_args(args)
if not options.extract:
options.test = False
if platform is not None:
platform.fixup_options(options)
if not _check_version(options.version):
die(f'Version \'{options.version}\' does NOT conform to PEP 440')
if options.pep440:
print(f'Version \'{options.version}\' conforms to PEP 440')
return
if platform is not None:
platform.build(options)
else:
die('Building wheels is not supported on this platform '
f'(\'{sys.platform}\')')
print('wheel_builder: SUCCESS') |
299,441 | from point | #!/usr/bin/python
##################
# LatGaussFitFRTC.py
#
# Copyright David Baddeley, 2009
# d.baddeley@auckland.ac.nz
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##################
import numpy as np
#import PYME.Analysis.points.twoColour as twoColour
from .fitCommon import fmtSlicesUsed
from . import FFBase
#fresultdtype=[('tIndex', '<i4'),('fitResults', [('Ag', '<f4'),('Ar', '<f4'),('x0', '<f4'),('y0', '<f4'),('sigma', '<f4'), ('backgroundG', '<f4'),('backgroundR', '<f4'),('bx', '<f4'),('by', '<f4')]),('fitError', [('Ag', '<f4'),('Ar', '<f4'),('x0', '<f4'),('y0', '<f4'),('sigma', '<f4'), ('backgroundG', '<f4'),('backgroundR', '<f4'),('bx', '<f4'),('by', '<f4')]), ('resultCode', '<i4'), ('slicesUsed', [('x', [('start', '<i4'),('stop', '<i4'),('step', '<i4')]),('y', [('start', '<i4'),('stop', '<i4'),('step', '<i4')])])]
fresultdtype=[('tIndex', '<i4'), ('x', '<f4'), ('y', '<f4'),
('A', '<f4'),
('data', 'f4', (15,15,1)),
('sigma', 'f4', (15,15,1)),
('sp', [('Xg0', '<f4'),('Yg0', '<f4'),('Xr0', '<f4'),('Yr0', '<f4')]),
]
class GaussianFitFactory(FFBase.FFBase):
def __init__(self, data, metadata, fitfcn=None, background=None, noiseSigma=None, **kwargs):
super(GaussianFitFactory, self).__init__(data, metadata, fitfcn, background, noiseSigma, **kwargs)
def METHOD_NAME(self, x, y, z=None, roiHalfSize=7, axialHalfSize=15):
roiHalfSize=7
X, Y, dataMean, bgMean, sigma, xslice, yslice, zslice= self.getROIAtPoint(x, y, z, roiHalfSize, axialHalfSize)
res = np.zeros(1, fresultdtype)
res['tIndex'] = self.metadata['tIndex']
res['x'] = x
res['y'] = y
res['sp']['Xg0'] = X[0]
res['sp']['Yg0'] = Y[0]
data = dataMean - bgMean
res['A'] = data.sum()
res['data'][0][:data.shape[0], :data.shape[1], :] = np.atleast_3d(data)
res['sigma'][0][:data.shape[0], :data.shape[1], :] = np.atleast_3d(sigma)
return res
#so that fit tasks know which class to use
FitFactory = GaussianFitFactory
FitResult = None
FitResultsDType = fresultdtype #only defined if returning data as numarray
import PYME.localization.MetaDataEdit as mde
#from PYME.localization.FitFactories import Interpolators
#from PYME.localization.FitFactories import zEstimators
PARAMETERS = [#mde.ChoiceParam('Analysis.InterpModule','Interp:','LinearInterpolator', choices=Interpolators.interpolatorList, choiceNames=Interpolators.interpolatorDisplayList),
#mde.FilenameParam('PSFFilename', 'PSF:', prompt='Please select PSF to use ...', wildcard='PSF Files|*.psf'),
#mde.ShiftFieldParam('chroma.ShiftFilename', 'Shifts:', prompt='Please select shiftfield to use', wildcard='Shiftfields|*.sf'),
#mde.IntParam('Analysis.DebounceRadius', 'Debounce r:', 4),
#mde.FloatParam('Analysis.AxialShift', 'Z Shift [nm]:', 0),
#mde.ChoiceParam('Analysis.EstimatorModule', 'Z Start Est:', 'astigEstimator', choices=zEstimators.estimatorList),
#mde.ChoiceParam('PRI.Axis', 'PRI Axis:', 'y', choices=['x', 'y'])
]
DESCRIPTION = 'Cut out ROI for subsequent analysis - no fitting'
LONG_DESCRIPTION = 'Cut out a ROI. useful for '
USE_FOR = 'PSF extraction' |
299,442 | run | import os
import platform
import queue
import time
from dataclasses import dataclass
from enum import IntEnum
from logging import getLogger
from typing import List, Optional, Dict
from PyQt5.QtCore import QThread, pyqtSignal, QProcess
from rare.lgndr.cli import LegendaryCLI
from rare.lgndr.core import LegendaryCore
from rare.lgndr.glue.monkeys import DLManagerSignals
from rare.lgndr.models.downloading import UIUpdate
from rare.models.game import RareGame
from rare.models.install import InstallQueueItemModel, InstallOptionsModel
logger = getLogger("DownloadThread")
class DlResultCode(IntEnum):
ERROR = 1
STOPPED = 2
FINISHED = 3
@dataclass
class DlResultModel:
options: InstallOptionsModel
code: DlResultCode = DlResultCode.ERROR
message: str = ""
dlcs: Optional[List[Dict]] = None
sync_saves: bool = False
tip_url: str = ""
shortcut: bool = False
shortcut_name: str = ""
shortcut_title: str = ""
class DlThread(QThread):
result = pyqtSignal(DlResultModel)
progress = pyqtSignal(UIUpdate, object)
def __init__(self, item: InstallQueueItemModel, rgame: RareGame, core: LegendaryCore, debug: bool = False):
super(DlThread, self).__init__()
self.dlm_signals: DLManagerSignals = DLManagerSignals()
self.core: LegendaryCore = core
self.item: InstallQueueItemModel = item
self.dl_size = item.download.analysis.dl_size
self.rgame = rgame
self.debug = debug
def __finish(self, result):
if result.code == DlResultCode.FINISHED:
self.rgame.set_installed(True)
self.rgame.state = RareGame.State.IDLE
self.rgame.signals.progress.finish.emit(not result.code == DlResultCode.FINISHED)
self.result.emit(result)
def METHOD_NAME(self):
cli = LegendaryCLI(self.core)
self.item.download.dlm.logging_queue = cli.logging_queue
self.item.download.dlm.proc_debug = self.debug
result = DlResultModel(self.item.options)
start_t = time.time()
try:
self.item.download.dlm.start()
self.rgame.state = RareGame.State.DOWNLOADING
self.rgame.signals.progress.start.emit()
time.sleep(1)
while self.item.download.dlm.is_alive():
try:
status = self.item.download.dlm.status_queue.get(timeout=1.0)
self.rgame.signals.progress.update.emit(int(status.progress))
self.progress.emit(status, self.dl_size)
except queue.Empty:
pass
if self.dlm_signals.update:
try:
self.item.download.dlm.signals_queue.put(self.dlm_signals, block=False, timeout=1.0)
except queue.Full:
pass
time.sleep(self.item.download.dlm.update_interval / 10)
self.item.download.dlm.join()
except Exception as e:
self.kill()
self.item.download.dlm.join()
end_t = time.time()
logger.error(f"Installation failed after {end_t - start_t:.02f} seconds.")
logger.warning(f"The following exception occurred while waiting for the downloader to finish: {e!r}.")
result.code = DlResultCode.ERROR
result.message = f"{e!r}"
self.__finish(result)
return
else:
end_t = time.time()
if self.dlm_signals.kill is True:
logger.info(f"Download stopped after {end_t - start_t:.02f} seconds.")
result.code = DlResultCode.STOPPED
self.__finish(result)
return
logger.info(f"Download finished in {end_t - start_t:.02f} seconds.")
result.code = DlResultCode.FINISHED
if self.item.options.overlay:
self.core.finish_overlay_install(self.item.download.igame)
self.__finish(result)
return
if not self.item.options.no_install:
postinstall = self.core.install_game(self.item.download.igame)
if postinstall:
# LegendaryCLI(self.core)._handle_postinstall(
# postinstall,
# self.item.download.igame,
# False,
# self.item.options.install_prereqs,
# )
self._handle_postinstall(postinstall, self.item.download.igame)
dlcs = self.core.get_dlc_for_game(self.item.download.igame.app_name)
if dlcs and not self.item.options.skip_dlcs:
result.dlcs = []
for dlc in dlcs:
result.dlcs.append(
{
"app_name": dlc.app_name,
"app_title": dlc.app_title,
"app_version": dlc.app_version(self.item.options.platform),
}
)
if (
self.item.download.game.supports_cloud_saves
or self.item.download.game.supports_mac_cloud_saves
) and not self.item.download.game.is_dlc:
result.sync_saves = True
# show tip again after installation finishes so users hopefully actually see it
if tip_url := self.core.get_game_tip(self.item.download.igame.app_name):
result.tip_url = tip_url
LegendaryCLI(self.core).install_game_cleanup(
self.item.download.game,
self.item.download.igame,
self.item.download.repair,
self.item.download.repair_file,
)
if not self.item.options.update and self.item.options.create_shortcut:
result.shortcut = True
result.shortcut_name = self.rgame.folder_name
result.shortcut_title = self.rgame.app_title
self.__finish(result)
def _handle_postinstall(self, postinstall, igame):
logger.info("This game lists the following prerequisites to be installed:")
logger.info(f'- {postinstall["name"]}: {" ".join((postinstall["path"], postinstall["args"]))}')
if platform.system() == "Windows":
if self.item.options.install_prereqs:
logger.info("Launching prerequisite executable..")
self.core.prereq_installed(igame.app_name)
req_path, req_exec = os.path.split(postinstall["path"])
work_dir = os.path.join(igame.install_path, req_path)
fullpath = os.path.join(work_dir, req_exec)
proc = QProcess()
proc.setProcessChannelMode(QProcess.MergedChannels)
proc.readyReadStandardOutput.connect(
lambda: logger.debug(str(proc.readAllStandardOutput().data(), "utf-8", "ignore"))
)
proc.setProgram(fullpath)
proc.setArguments(postinstall.get("args", "").split(" "))
proc.setWorkingDirectory(work_dir)
proc.start()
proc.waitForFinished() # wait, because it is inside the thread
else:
logger.info("Automatic installation not available on Linux.")
def kill(self):
self.dlm_signals.kill = True |
299,443 | check args section | # vim: ft=python fileencoding=utf-8 sw=4 et sts=4
"""Additional docstring checkers."""
import re
from typing import Set
import astroid
from pylint.interfaces import IAstroidChecker
from pylint.checkers import BaseChecker
class CommandMissingDocumentation(BaseChecker):
"""Checker to ensure command docstrings include all information for docs."""
__implements__ = IAstroidChecker
name = "command-docstring"
name_ambiguous = "ambiguous-register"
name_missing = "docstr-missing"
name_syntax = "command-bad-syntax"
name_count = "command-bad-count"
name_argument_missing = "command-arg-missing-doc"
name_argument_bad = "command-arg-bad-doc"
msgs = {
"E9001": (
"Command '%s' bad syntax section",
name_syntax,
"All commands with args must define a section in the form of '**syntax:**'",
),
"E9002": (
"Command '%s' bad count section",
name_count,
"All commands with count must define a section in the form of '**count:**'",
),
"E9003": (
"Command '%s' argument '%s' undocmented",
name_argument_missing,
"All command arguments should be documented in the "
"**posional/optional** arguments section**",
),
"E9004": (
"Command '%s' argument '%s' bad doc format",
name_argument_bad,
"All command arguments should be documented in the "
"**posional/optional** arguments section**",
),
"E9005": (
"Command '%s' missing docstring",
name_missing,
"All commands must have a docstring for documentation",
),
"W9001": (
"Ambiguous register decorator, use module.register instead",
name_ambiguous,
"register decorators should be prepended with the module for clarity",
),
}
priority = -1
def visit_functiondef(self, node):
"""Run the checks on all function definitions that are commands."""
if not self._is_command(node):
return
argnames = {arg.name.replace("_", "-") for arg in node.args.args}
regular_argnames = argnames - {"self", "count"}
self.METHOD_NAME(node, regular_argnames)
self._check_count_section(node, argnames)
self._check_syntax_section(node, regular_argnames)
@staticmethod
def sections(docstr):
"""Retrieve list of all sections separated by an empty line in docstr."""
sections = []
content = ""
for line in docstr.split("\n"):
if not line.strip():
sections.append(content)
content = ""
else:
content += line
return sections
def _check_syntax_section(self, node, argnames):
"""Check if a syntax section is available for commands with arguments."""
if not argnames:
return
for section in self.sections(node.doc):
if re.match(r"\*\*syntax:\*\* ``.*``", section.strip()):
return
self.add_message(self.name_syntax, node=node, args=(node.name,))
def _check_count_section(self, node, argnames):
"""Check if a count section is available for commands that support count."""
if "count" not in argnames:
return
if "**count:**" not in node.doc:
self.add_message(self.name_count, node=node, args=(node.name,))
def METHOD_NAME(self, node, argnames):
"""Check if all command arguments are documented."""
docstring_argnames = self._get_args_from_docstring(node)
difference = argnames - docstring_argnames
for argname in difference:
self.add_message(
self.name_argument_missing, node=node, args=(node.name, argname)
)
def _is_command(self, node) -> bool:
"""Check if a function definition node is a command.
This checks if the function was decorated by @commands.register.
"""
decorators = node.decorators
if decorators is None:
return False
for decorator in decorators.nodes:
# @register
if isinstance(decorator, astroid.node_classes.Name):
if decorator.name == "register":
self.add_message(self.name_ambiguous, node=node)
continue
# @module.register, cannot be command as it needs arguments
if isinstance(decorator, astroid.node_classes.Attribute):
continue
# @register()
if isinstance(decorator.func, astroid.node_classes.Name):
if decorator.func.name == "register":
self.add_message(self.name_ambiguous, node=node)
continue
# @module.register()
if isinstance(decorator.func.expr, astroid.node_classes.Name):
if decorator.func.expr.name == "commands":
return True
return False
# @api.module.register()
if (
decorator.func.attrname == "register"
and decorator.func.expr.attrname == "commands"
):
return True
return False
def _get_args_from_docstring(self, node) -> Set[str]:
"""Retrieve documented arguments from command docstring.
If an argument is not correctly formatted in the documentation section, the
name_argument_bad message is added.
Returns:
Set of all documented argument names.
"""
docstr = node.doc
if docstr is None:
self.add_message(self.name_missing, node=node, args=(node.name,))
return set()
lines = [line.strip() for line in node.doc.split("\n")]
def _get_args(identifier, pattern):
try:
index = lines.index(identifier)
except ValueError:
return set()
args = []
for line in lines[index:]:
if not line: # Section separated
break
if line.startswith("* "): # Argument list
argument = line.split()[1]
argname = argument.strip("-:`")
if not re.match(pattern, argument):
self.add_message(
self.name_argument_bad, node=node, args=(node.name, argname)
)
args.append(argname)
return set(args)
positional_args = _get_args(
"positional arguments:", "``[a-zA-Z][a-zA-Z0-9-]*``:"
)
optional_args = _get_args("optional arguments:", "``--[a-zA-Z][a-zA-Z0-9-]*``:")
return positional_args | optional_args
def register(linter):
"""Register the defined checkers automatically."""
linter.register_checker(CommandMissingDocumentation(linter)) |
299,444 | index | #
# pipeline.py -- Base classes for pipelines in Ginga
#
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
import time
from ginga.misc import Bunch, Callback
from ginga.util import action
__all__ = ['Pipeline']
class PipeError(Exception):
pass
class Pipeline(Callback.Callbacks):
def __init__(self, logger, stages, name=None):
super(Pipeline, self).__init__()
self.logger = logger
self.cur_stage = None
self._i = 0
self.bboard = Bunch.Bunch()
if name is None:
name = 'noname'
self.name = name
self.enabled = True
self.pipeline = list(stages)
# undo/redo stack
self.actions = action.ActionStack()
for stage in self.pipeline:
self._init_stage(stage)
for name in ['pipeline-start',
'stage-executing', 'stage-errored', 'stage-done']:
self.enable_callback(name)
def _init_stage(self, stage):
stage.pipeline = self
stage.logger = self.logger
stage.result = Bunch.Bunch(res_np=None)
def insert(self, i, stage):
self.pipeline.insert(i, stage)
self._init_stage(stage)
def append(self, stage):
self.pipeline.append(stage)
self._init_stage(stage)
def remove(self, stage):
stage.pipeline = None
self.pipeline.remove(stage)
def enable(self, tf):
self.enabled = tf
def run_stage_idx(self, i):
if not self.enabled:
self.logger.info("pipeline disabled")
if i < 0 or i >= len(self.pipeline):
raise ValueError("No stage at index {}".format(i))
stage = self.pipeline[i]
prev_stage = None if i == 0 else self.pipeline[i - 1]
self.cur_stage = stage
self.make_callback('stage-executing', stage)
start_time = time.time()
try:
stage.run(prev_stage)
stop_time = time.time()
self.make_callback('stage-done', stage)
except Exception as e:
stop_time = time.time()
self.logger.error("Error running stage %d (%s): %s" % (
i, str(stage), e), exc_info=True)
self.stop()
self.make_callback('stage-errored', stage)
self.logger.debug("stage '%s' took %.4f sec" % (stage._stagename,
stop_time - start_time))
def run_from(self, stage):
self.make_callback('pipeline-start', stage)
self._i = self.pipeline.METHOD_NAME(stage)
start_time = time.time()
while self._i < len(self.pipeline):
self.run_stage_idx(self._i)
self._i += 1
stop_time = time.time()
self.logger.debug("pipeline '%s' total execution %.4f sec" % (
self.name, stop_time - start_time))
def run_all(self):
self.run_from(self.pipeline[0])
def stop(self):
self._i = len(self.pipeline)
def get_data(self, stage):
return stage.result.res_np
def send(self, **kwargs):
self.cur_stage.result = Bunch.Bunch(kwargs)
def set(self, **kwargs):
self.bboard.setvals(**kwargs)
def get(self, *args):
if len(args) == 1:
return self.bboard[args[0]]
if len(args) == 2:
return self.bboard.get(args[0], args[1])
raise ValueError("Pass keyword as argument")
def invalidate(self):
for stage in self.pipeline:
stage.invalidate()
def push(self, act):
self.actions.push(act)
def undo(self):
act = self.actions.undo()
self.run_from(act.obj)
def redo(self):
act = self.actions.redo()
self.run_from(act.obj)
def save(self):
dd = dict(name=self.name, enabled=self.enabled)
d = dict(pipeline=dd)
l = [stage.export_as_dict() for stage in self.pipeline]
dd['stages'] = l
return d
def load(self, d, cd):
# reinitialize some things
self.cur_stage = None
self._i = 0
self.bboard = Bunch.Bunch()
self.actions = action.ActionStack()
dd = d['pipeline']
self.name = dd['name']
self.enabled = dd.get('enabled', True)
l = []
for sd in dd['stages']:
# instantiate stage
stage = cd[sd['type']]()
self._init_stage(stage)
stage.import_from_dict(sd)
l.append(stage)
self.pipeline = l
return d
def __getitem__(self, idx):
return self.pipeline[idx]
def __contains__(self, stage):
return stage in self.pipeline
def METHOD_NAME(self, stage):
return self.pipeline.METHOD_NAME(stage)
def __len__(self):
return len(self.pipeline) |
299,445 | token | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetManagedEnvironmentAuthTokenResult',
'AwaitableGetManagedEnvironmentAuthTokenResult',
'get_managed_environment_auth_token',
'get_managed_environment_auth_token_output',
]
@pulumi.output_type
class GetManagedEnvironmentAuthTokenResult:
"""
Environment Auth Token.
"""
def __init__(__self__, expires=None, id=None, location=None, name=None, system_data=None, tags=None, METHOD_NAME=None, type=None):
if expires and not isinstance(expires, str):
raise TypeError("Expected argument 'expires' to be a str")
pulumi.set(__self__, "expires", expires)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if METHOD_NAME and not isinstance(METHOD_NAME, str):
raise TypeError("Expected argument 'token' to be a str")
pulumi.set(__self__, "token", METHOD_NAME)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def expires(self) -> str:
"""
Token expiration date.
"""
return pulumi.get(self, "expires")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> str:
"""
The geo-location where the resource lives
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
Azure Resource Manager metadata containing createdBy and modifiedBy information.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def METHOD_NAME(self) -> str:
"""
Auth token value.
"""
return pulumi.get(self, "token")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
class AwaitableGetManagedEnvironmentAuthTokenResult(GetManagedEnvironmentAuthTokenResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetManagedEnvironmentAuthTokenResult(
expires=self.expires,
id=self.id,
location=self.location,
name=self.name,
system_data=self.system_data,
tags=self.tags,
METHOD_NAME=self.METHOD_NAME,
type=self.type)
def get_managed_environment_auth_token(environment_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetManagedEnvironmentAuthTokenResult:
"""
Checks if resource name is available.
Azure REST API version: 2022-10-01.
:param str environment_name: Name of the Managed Environment.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
__args__ = dict()
__args__['environmentName'] = environment_name
__args__['resourceGroupName'] = resource_group_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:app:getManagedEnvironmentAuthToken', __args__, opts=opts, typ=GetManagedEnvironmentAuthTokenResult).value
return AwaitableGetManagedEnvironmentAuthTokenResult(
expires=pulumi.get(__ret__, 'expires'),
id=pulumi.get(__ret__, 'id'),
location=pulumi.get(__ret__, 'location'),
name=pulumi.get(__ret__, 'name'),
system_data=pulumi.get(__ret__, 'system_data'),
tags=pulumi.get(__ret__, 'tags'),
METHOD_NAME=pulumi.get(__ret__, 'token'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_managed_environment_auth_token)
def get_managed_environment_auth_token_output(environment_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetManagedEnvironmentAuthTokenResult]:
"""
Checks if resource name is available.
Azure REST API version: 2022-10-01.
:param str environment_name: Name of the Managed Environment.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
... |
299,446 | handler returning response | # Copyright 2023 Iguazio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import typing
from http import HTTPStatus
import fastapi
import pydantic
import pytest
from fastapi.exception_handlers import http_exception_handler
from fastapi.testclient import TestClient
from sqlalchemy.orm import Session
from mlrun.api.main import app
from mlrun.utils import logger
class HandledException1(Exception):
pass
class HandledException2(Exception):
pass
class UnhandledException(Exception):
pass
@app.exception_handler(HandledException1)
async def METHOD_NAME(request: fastapi.Request, exc: HandledException1):
logger.warning("Handler caught HandledException1 exception, returning 204 response")
return fastapi.Response(status_code=HTTPStatus.NO_CONTENT.value)
@app.exception_handler(HandledException2)
async def handler_returning_http_exception(
request: fastapi.Request, exc: HandledException2
):
logger.warning(
"Handler caught HandledException2 exception, returning HTTPException with 401"
)
return await http_exception_handler(
request, fastapi.HTTPException(status_code=HTTPStatus.UNAUTHORIZED.value)
)
test_router = fastapi.APIRouter()
@test_router.get("/success")
def success():
logger.info("Success endpoint received request, returning 202")
return fastapi.Response(status_code=202)
@test_router.get("/handled_exception_1")
def handled_exception_1():
logger.info(
"handled_exception_1 endpoint received request, raising handled exception 1"
)
raise HandledException1("handled exception 1")
@test_router.get("/handled_exception_2")
def handled_exception_2():
logger.info(
"handled_exception_2 endpoint received request, raising handled exception 2"
)
raise HandledException2("handled exception 2")
@test_router.get("/unhandled_exception")
def unhandled_exception():
logger.info("unhandled endpoint received request, raising unhandled exception")
raise UnhandledException("Unhandled exception")
class SomeScheme(pydantic.BaseModel):
id: str
@test_router.post("/fastapi_handled_exception")
def fastapi_handled_exception(model: SomeScheme):
logger.info("Should not get here, will fail on body validation")
middleware_modes = [
"with_middleware",
"without_middleware",
]
# must add it here since we're adding routes
@pytest.fixture(params=middleware_modes)
def client(request) -> typing.Generator:
# save a copy of the middlewares. we would want to restore them once we're done with the test
user_middleware = app.user_middleware.copy()
try:
if request.param == "without_middleware":
# this overrides the webapp middlewares by removing the logging middleware
app.user_middleware = []
app.middleware_stack = app.build_middleware_stack()
app.include_router(test_router, prefix="/test")
with TestClient(app) as c:
yield c
finally:
# restore back the middlewares
if request.param == "without_middleware":
app.user_middleware = user_middleware
app.middleware_stack = app.build_middleware_stack()
def test_logging_middleware(db: Session, client: TestClient) -> None:
resp = client.get("/test/success")
assert resp.status_code == HTTPStatus.ACCEPTED.value
resp = client.get("/test/handled_exception_1")
assert resp.status_code == HTTPStatus.NO_CONTENT.value
resp = client.get("/test/handled_exception_2")
assert resp.status_code == HTTPStatus.UNAUTHORIZED.value
resp = client.post("/test/fastapi_handled_exception")
assert resp.status_code == HTTPStatus.UNPROCESSABLE_ENTITY.value
with pytest.raises(UnhandledException):
# In a real fastapi (and not test) unhandled exception returns 500
client.get("/test/unhandled_exception") |
299,447 | test get dict attr | # Copyright (c) 2018, 2023, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# The Universal Permissive License (UPL), Version 1.0
#
# Subject to the condition set forth below, permission is hereby granted to any
# person obtaining a copy of this software, associated documentation and/or
# data (collectively the "Software"), free of charge and under any and all
# copyright rights in the Software, and any and all patent rights owned or
# freely licensable by each licensor hereunder covering either (i) the
# unmodified Software as contributed to or provided by such licensor, or (ii)
# the Larger Works (as defined below), to deal in both
#
# (a) the Software, and
#
# (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
# one is included with the Software each a "Larger Work" to which the Software
# is contributed by such licensors),
#
# without restriction, including without limitation the rights to copy, create
# derivative works of, display, perform, and distribute the Software and make,
# use, sell, offer for sale, import, export, have made, and have sold the
# Software and the Larger Work(s), and to sublicense the foregoing rights on
# either these or other terms.
#
# This license is subject to the following condition:
#
# The above copyright notice and either this complete permission notice or at a
# minimum a reference to the UPL must be included in all copies or substantial
# portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
def assert_raises(err, fn, *args, **kwargs):
raised = False
try:
fn(*args, **kwargs)
except err:
raised = True
assert raised
def test_reduce_ex_with_slots():
# Adapted from test_desc.test_issue24097
class A:
__slotnames__ = ['spam']
def __getattr__(self, attr):
if attr == 'spam':
return 42
else:
raise AttributeError
import copyreg
expected = (copyreg.__newobj__, (A,), (None, {'spam': 42}), None, None)
assert A().__reduce_ex__(2) == expected
def test_set_dict_attr_builtin_extension():
class MyList(list):
pass
lst = MyList()
assert lst.__dict__ == {}
lst.__dict__ = {'a': 9}
assert lst.a == 9
assert lst.__dict__ == {'a': 9}
def METHOD_NAME():
o = object()
def get_dict_attr():
return o.__dict__
def set_dict_attr():
o.__dict__ = {'a': 10}
assert_raises(AttributeError, get_dict_attr)
assert_raises(AttributeError, set_dict_attr)
def test_set_dict_attr():
class MyClass(object):
def __init__(self):
self.a = 9
m = MyClass()
assert m.a == 9
assert m.__dict__ == {'a': 9}
assert m.a == 9
m.__dict__ = {'a': 10}
assert m.__dict__ == {'a': 10}
assert m.a == 10
m.d = 20
assert m.d == 20
assert "d" in m.__dict__
assert m.__dict__ == {'a': 10, 'd': 20}
# check dir & __dir__
assert sorted(list(m.__dir__())) == dir(m)
def test_set_attr_builtins():
lst = list()
def set_attr():
lst.a = 10
assert_raises(AttributeError, set_attr)
class MyList(list):
pass
mlst = MyList()
mlst.a = 10
assert mlst.a == 10
def test_set_dict_attr_with_getattr_defined():
class MyOtherClass(object):
def __getattribute__(self, item):
return object.__getattribute__(self, item)
def __getattr__(self, item):
if item == "my_attr":
return 10
raise AttributeError
m1 = MyOtherClass()
def get_non_existing_attr():
return m1.my_attr_2
assert_raises(AttributeError, get_non_existing_attr)
assert m1.my_attr == 10
assert "my_attr" not in m1.__dict__
m1.__dict__ = {'d': 10}
assert m1.my_attr == 10
assert "my_attr" not in m1.__dict__
assert m1.d == 10
def test_class_attr():
class AAA:
def foo(self):
assert __class__ == AAA
assert self.__class__ == AAA
class BBB:
pass
class CCC(AAA):
def getclass(self):
return BBB
__class__ = property(getclass)
def bar(self):
assert __class__ == CCC
assert self.__class__ == BBB
AAA().foo()
CCC().bar()
def test_reduce_ex_with_none():
assert_raises(TypeError, object(), None)
def test_descr_call_with_none():
descr = object.__dict__['__class__']
assert None.__class__ is type(None)
assert descr.__get__(None, type(None)) is descr
assert_raises(TypeError, descr.__get__, None, None)
def test_custom_getattribute():
class AAA:
__slots__ = '__wrapped__'
def __init__(self, wrapped):
object.__setattr__(self, '__wrapped__', wrapped)
def __index__(self):
return self.__wrapped__.__index__()
def __len__(self):
return len(self.__wrapped__)
def __contains__(self, value):
return value in self.__wrapped__
def __getitem__(self, key):
return self.__wrapped__[key]
def __setitem__(self, key, value):
self.__wrapped__[key] = value
def __getattr__(self, name):
if name == '__wrapped__':
raise ValueError('wrapper has not been initialised')
return getattr(self.__wrapped__, name)
def __iter__(self):
return iter(self.__wrapped__)
class BBB(AAA):
def __init__(self, wrapped_dict=None):
AAA.__init__(self, wrapped_dict)
def __getattribute__(self, name):
if (hasattr(type(self), name)
and isinstance(getattr(type(self), name), property)):
return object.__getattribute__(self, name)
else:
return super().__getattribute__(name)
d = {"abc": 1}
assert dict(BBB(d)) == |
299,448 | product type | from django import forms
from django.conf import settings
from django.contrib import admin
from django.core.exceptions import ValidationError
from django.utils.safestring import mark_safe
from django.utils.timezone import localtime
from django.utils.translation import ugettext_lazy as _
from modeltranslation.admin import TranslationAdmin
from payments.utils import get_price_period_display
from resources.models import Resource
from .models import Order, OrderLine, OrderLogEntry, Product
def get_datetime_display(dt):
if not dt:
return None
return localtime(dt).strftime('%d %b %Y %H:%M:%S')
class ProductForm(forms.ModelForm):
class Meta:
model = Product
fields = '__all__'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['resources'] = forms.ModelMultipleChoiceField(queryset=Resource.objects.order_by('name'))
def clean_resources(self):
resources = self.cleaned_data.get('resources', [])
if resources:
if any(r.need_manual_confirmation for r in resources):
raise ValidationError(_('All the resources must have manual reservation confirmation disabled.'))
return resources
class ProductAdmin(TranslationAdmin):
list_display = (
'product_id', 'sku', 'name', 'type', 'price', 'price_type', 'get_price_period', 'tax_percentage',
'max_quantity', 'get_resources', 'get_created_at', 'get_modified_at'
)
readonly_fields = ('product_id',)
fieldsets = (
(None, {
'fields': ('sku', 'type', 'name', 'description', 'max_quantity')
}),
(_('price').capitalize(), {
'fields': ('price', 'price_type', 'price_period', 'tax_percentage'),
}),
(_('resources').capitalize(), {
'fields': ('resources',)
}),
)
ordering = ('-product_id',)
form = ProductForm
def get_resources(self, obj):
return mark_safe('<br>'.join([str(r) for r in obj.resources.all()]))
get_resources.short_description = _('resources')
def get_created_at(self, obj):
return Product.objects.filter(product_id=obj.product_id).first().created_at
get_created_at.short_description = _('created at')
def get_modified_at(self, obj):
return obj.created_at
get_modified_at.short_description = _('modified at')
def get_queryset(self, request):
return super().get_queryset(request).current()
def change_view(self, request, object_id, form_url='', extra_context=None):
# disable "save and continue editing" button since it does not work
# because of the Product versioning stuff
extra_context = extra_context or {}
extra_context['show_save_and_continue'] = False
return super().change_view(request, object_id, form_url, extra_context=extra_context)
def get_price_period(self, obj):
return get_price_period_display(obj.price_period)
get_price_period.short_description = _('price period')
class OrderLineInline(admin.TabularInline):
model = OrderLine
fields = ('product', 'product_type', 'unit_price', 'quantity', 'price', 'tax_percentage')
extra = 0
readonly_fields = fields
can_delete = False
def has_add_permission(self, request, obj):
return False
def METHOD_NAME(self, obj):
return obj.product.type
METHOD_NAME.short_description = _('product type')
def price(self, obj):
return obj.get_price()
price.short_description = _('price including VAT')
def unit_price(self, obj):
return obj.get_unit_price()
unit_price.short_description = _('unit price')
def tax_percentage(self, obj):
return obj.product.tax_percentage
tax_percentage.short_description = _('tax percentage')
class OrderLogEntryInline(admin.TabularInline):
model = OrderLogEntry
extra = 0
readonly_fields = ('timestamp_with_seconds', 'state_change', 'message')
can_delete = False
def has_add_permission(self, request, obj):
return False
def timestamp_with_seconds(self, obj):
return get_datetime_display(obj.timestamp)
timestamp_with_seconds.short_description = _('timestamp')
class OrderAdmin(admin.ModelAdmin):
list_display = ('order_number', 'user', 'created_at', 'state', 'reservation', 'price')
fields = ('order_number', 'created_at', 'state', 'reservation', 'user', 'price')
raw_id_fields = ('reservation',)
inlines = (OrderLineInline, OrderLogEntryInline)
ordering = ('-id',)
search_fields = ('order_number',)
list_filter = ('state',)
actions = None
def get_readonly_fields(self, request, obj=None):
return [f.name for f in self.model._meta.fields if f.name != 'id'] + [
'user', 'created_at', 'price', 'tax_amount', 'pretax_price'
]
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
if obj and obj.state == Order.CONFIRMED:
return True
return False
def changeform_view(self, request, object_id=None, form_url='', extra_context=None):
extra_context = extra_context or {}
extra_context['show_save_and_continue'] = False
extra_context['show_save'] = False
return super().changeform_view(request, object_id, extra_context=extra_context)
def delete_model(self, request, obj):
obj.set_state(Order.CANCELLED, log_message='Cancelled using the Django admin UI.')
def user(self, obj):
return obj.reservation.user
user.short_description = _('user')
def price(self, obj):
return obj.get_price()
price.short_description = _('price including VAT')
def created_at(self, obj):
return get_datetime_display(obj.created_at)
created_at.short_description = _('created at')
if settings.RESPA_PAYMENTS_ENABLED:
admin.site.register(Product, ProductAdmin)
admin.site.register(Order, OrderAdmin) |
299,449 | setup | # Copyright © SixtyFPS GmbH <info@slint.dev>
# SPDX-License-Identifier: GPL-3.0-only OR LicenseRef-Slint-commercial
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import textwrap
# -- Project information -----------------------------------------------------
# The full version, including alpha/beta/rc tags
version = "1.3.0"
project = f'Slint {version} C++ API'
copyright = "SixtyFPS GmbH"
author = "Slint Developers <info@slint.dev>"
cpp_index_common_prefix = ["slint::", "slint::interpreter::"]
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ["breathe", "myst_parser", "exhale", "sphinx_markdown_tables", "sphinxcontrib.jquery"]
breathe_projects = {"Slint": "./docs/xml"}
breathe_default_project = "Slint"
exhale_args = {
"containmentFolder": "./api",
"rootFileName": "library_root.rst",
"rootFileTitle": "C++ API Reference",
"afterTitleDescription": textwrap.dedent(
"""
The following sections present the C++ API Reference. All types are
within the :ref:`slint<namespace_slint>` namespace and are accessible by including
the :code:`slint.h` header file.
If you choose to load :code:`.slint` files dynamically at run-time, then
you can use the classes in :ref:`slint::interpreter<namespace_slint__interpreter>`, starting at
:cpp:class:`slint::interpreter::ComponentCompiler`. You need to include
the :code:`slint-interpreter.h` header file.
"""
),
"doxygenStripFromPath": "..",
"createTreeView": True,
"kindsWithContentsDirectives": [],
"exhaleExecutesDoxygen": True,
"exhaleDoxygenStdin": """INPUT = ../../api/cpp/include generated_include
EXCLUDE_SYMBOLS = slint::cbindgen_private* slint::private_api* vtable* slint::testing* SLINT_DECL_ITEM
EXCLUDE = ../../api/cpp/include/vtable.h ../../api/cpp/include/slint_testing.h
ENABLE_PREPROCESSING = YES
PREDEFINED += DOXYGEN
INCLUDE_PATH = generated_include
WARN_AS_ERROR = YES""",
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = [
"_build",
"html/_static/collapsible-lists/LICENSE.md",
"Thumbs.db",
".DS_Store",
"markdown/tutorial",
"markdown/building.md",
"markdown/development.md",
"markdown/install_qt.md",
"markdown/README.md",
"README.md",
]
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "furo"
html_theme_options = {"collapse_navigation": False}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
html_show_sourcelink = False
html_logo = "https://slint.dev/logo/slint-logo-small-light.svg"
myst_enable_extensions = [
"html_image",
]
# Annotate h1/h2 elements with anchors
myst_heading_anchors = 2
myst_url_schemes = {
"slint-reference": f"https://slint.dev/releases/{version}/docs/slint/{{{{path}}}}",
'http': None, 'https': None, 'mailto': None,
}
rst_epilog = """
.. |ListView| replace:: :code:`ListView`
.. _ListView: ../../slint/src/builtins/widgets.html#listview
.. |Repetition| replace:: :code:`for` - :code:`in`
.. _Repetition: ../../slint/src/reference/repetitions.html
"""
def METHOD_NAME(app):
app.add_css_file("theme_tweak.css") |
299,450 | test series | import pytest
from diofant import (E, Float, I, Rational, Symbol, ceiling, exp, factorial,
false, floor, log, nan, oo, pi, sin, sqrt, symbols, true)
from diofant.abc import x, y
__all__ = ()
def test_floor():
i = Symbol('i', imaginary=True)
y = Symbol('y', extended_real=True)
r = Symbol('r', real=True)
k, n = symbols('k,n', integer=True)
assert floor(y).is_extended_real
assert floor(x).is_extended_real is None
assert floor(r).is_finite
assert floor(y).is_finite is None
assert floor(r).is_integer
assert floor(y).is_integer is None
assert floor(nan) == nan
assert floor(oo) == oo
assert floor(-oo) == -oo
assert floor(0) == 0
assert floor(1) == 1
assert floor(-1) == -1
assert floor(E) == 2
assert floor(-E) == -3
assert floor(2*E) == 5
assert floor(-2*E) == -6
assert floor(pi) == 3
assert floor(-pi) == -4
assert floor(Rational(1, 2)) == 0
assert floor(-Rational(1, 2)) == -1
assert floor(Rational(7, 3)) == 2
assert floor(-Rational(7, 3)) == -3
assert floor(Float(17.0)) == 17
assert floor(-Float(17.0)) == -17
assert floor(Float(7.69)) == 7
assert floor(-Float(7.69)) == -8
assert floor(I) == I
assert floor(-I) == -I
e = floor(i)
assert isinstance(e, floor)
assert e.args[0] == i
assert floor(oo*I) == oo*I
assert floor(-oo*I) == -oo*I
assert floor(2*I) == 2*I
assert floor(-2*I) == -2*I
assert floor(I/2) == 0
assert floor(-I/2) == -I
assert floor(E + 17) == 19
assert floor(pi + 2) == 5
assert floor(E + pi) == floor(E + pi)
assert floor(I + pi) == floor(I + pi)
assert floor(floor(pi)) == 3
assert floor(floor(y)) == floor(y)
assert floor(floor(x)) == floor(floor(x))
assert floor(x) == floor(x)
assert floor(2*x) == floor(2*x)
assert floor(k*x) == floor(k*x)
assert floor(k) == k
assert floor(2*k) == 2*k
assert floor(k*n) == k*n
assert floor(k/2) == floor(k/2)
assert floor(x + y) == floor(x + y)
assert floor(x + 3) == floor(x + 3)
assert floor(x + k) == floor(x + k)
assert floor(y + 3) == floor(y) + 3
assert floor(y + k) == floor(y) + k
assert floor(3 + I*y + pi) == 6 + floor(y)*I
assert floor(k + n) == k + n
assert floor(x*I) == floor(x*I)
assert floor(k*I) == k*I
assert floor(Rational(23, 10) - E*I) == 2 - 3*I
assert floor(sin(1)) == 0
assert floor(sin(-1)) == -1
assert floor(exp(2)) == 7
assert floor(log(8)/log(2)) != 2
assert int(floor(log(8)/log(2)).evalf(chop=True)) == 3
assert floor(factorial(50)/exp(1)) == \
11188719610782480504630258070757734324011354208865721592720336800
assert (floor(y) <= y) is true
assert (floor(y) > y) is false
assert (floor(x) <= x).is_Relational # x could be non-real
assert (floor(x) > x).is_Relational
assert (floor(x) <= y).is_Relational # arg is not same as rhs
assert (floor(x) > y).is_Relational
assert floor(x).as_leading_term(x) == floor(x)
# issue sympy/sympy#11207
assert floor(floor(x)) == floor(x)
assert floor(ceiling(x)) == ceiling(x)
def test_ceiling():
i = Symbol('i', imaginary=True)
y = Symbol('y', extended_real=True)
k, n = symbols('k,n', integer=True)
assert ceiling(nan) == nan
assert ceiling(oo) == oo
assert ceiling(-oo) == -oo
assert ceiling(0) == 0
assert ceiling(1) == 1
assert ceiling(-1) == -1
assert ceiling(E) == 3
assert ceiling(-E) == -2
assert ceiling(2*E) == 6
assert ceiling(-2*E) == -5
assert ceiling(pi) == 4
assert ceiling(-pi) == -3
assert ceiling(Rational(1, 2)) == 1
assert ceiling(-Rational(1, 2)) == 0
assert ceiling(Rational(7, 3)) == 3
assert ceiling(-Rational(7, 3)) == -2
assert ceiling(Float(17.0)) == 17
assert ceiling(-Float(17.0)) == -17
assert ceiling(Float(7.69)) == 8
assert ceiling(-Float(7.69)) == -7
assert ceiling(I) == I
assert ceiling(-I) == -I
e = ceiling(i)
assert isinstance(e, ceiling)
assert e.args[0] == i
assert ceiling(oo*I) == oo*I
assert ceiling(-oo*I) == -oo*I
assert ceiling(2*I) == 2*I
assert ceiling(-2*I) == -2*I
assert ceiling(I/2) == I
assert ceiling(-I/2) == 0
assert ceiling(E + 17) == 20
assert ceiling(pi + 2) == 6
assert ceiling(E + pi) == ceiling(E + pi)
assert ceiling(I + pi) == ceiling(I + pi)
assert ceiling(ceiling(pi)) == 4
assert ceiling(ceiling(y)) == ceiling(y)
assert ceiling(ceiling(x)) == ceiling(ceiling(x))
assert ceiling(x) == ceiling(x)
assert ceiling(2*x) == ceiling(2*x)
assert ceiling(k*x) == ceiling(k*x)
assert ceiling(k) == k
assert ceiling(2*k) == 2*k
assert ceiling(k*n) == k*n
assert ceiling(k/2) == ceiling(k/2)
assert ceiling(x + y) == ceiling(x + y)
assert ceiling(x + 3) == ceiling(x + 3)
assert ceiling(x + k) == ceiling(x + k)
assert ceiling(y + 3) == ceiling(y) + 3
assert ceiling(y + k) == ceiling(y) + k
assert ceiling(3 + pi + y*I) == 7 + ceiling(y)*I
assert ceiling(k + n) == k + n
assert ceiling(x*I) == ceiling(x*I)
assert ceiling(k*I) == k*I
assert ceiling(Rational(23, 10) - E*I) == 3 - 2*I
assert ceiling(sin(1)) == 1
assert ceiling(sin(-1)) == 0
assert ceiling(exp(2)) == 8
assert ceiling(-log(8)/log(2)) != -2
assert int(ceiling(-log(8)/log(2)).evalf(chop=True)) == -3
assert ceiling(factorial(50)/exp(1)) == \
11188719610782480504630258070757734324011354208865721592720336801
assert (ceiling(y) >= y) is true
assert (ceiling(y) < y) is false
assert (ceiling(x) >= x).is_Relational # x could be non-real
assert (ceiling(x) < x).is_Relational
assert (ceiling(x) >= y).is_Relational # arg is not same as rhs
assert (ceiling(x) < y).is_Relational
# issue sympy/sympy#11207
assert ceiling(floor(x)) == floor(x)
assert ceiling(ceiling(x)) == ceiling(x)
def METHOD_NAME():
assert floor(x).series(x, y, 100) == floor(y)
assert ceiling(x).series(x, y, 100) == ceiling(y)
assert floor(x).series(x, pi, 100) == 3
assert ceiling(x).series(x, pi, 100) == 4
assert floor(x).series(x, n=100) == 0
assert ceiling(x).series(x, n=100) == 1
assert floor(-x).series(x, n=100) == -1
assert ceiling(-x).series(x, n=100) == 0
@pytest.mark.xfail
def test_sympyissue_4149():
y = Symbol('y', real=True)
assert floor(3 + pi*I + y*I) == 3 + floor(pi + y)*I
assert floor(3*I + pi*I + y*I) == floor(3 + pi + y)*I
assert floor(3 + E + pi*I + y*I) == 5 + floor(pi + y)*I
def test_issue_1055():
e = 1/(sqrt(2) - 1) - sqrt(2)
e1 = e - Rational(1, 10**15)
e2 = e - 1
e3 = e - Rational(1, 10**1000)
assert floor(e) == 1
assert floor(e1) == 0
assert floor(e2) == e2
assert floor(e3) == floor(e3, evaluate=False) |
299,451 | test missing location | import binascii
import os
import shutil
import stat
import textwrap
import pytest
from snakeoil.osutils import pjoin
from pkgcore import const
from pkgcore import exceptions as base_errors
from pkgcore.config import errors as config_errors
from pkgcore.ebuild.portage_conf import PortageConfig
load_make_conf = PortageConfig.load_make_conf
load_repos_conf = PortageConfig.parse_repos_conf_path
class TestMakeConf:
def test_load_defaults(self):
make_globals = {}
load_make_conf(make_globals, pjoin(const.CONFIG_PATH, "make.globals"))
assert "PORTAGE_TMPDIR" in make_globals
def test_nonexistent_file(self, tmp_path):
d = {}
# by default files are required
with pytest.raises(config_errors.ParsingError):
load_make_conf(d, tmp_path / "make.globals")
# should return empty dict when not required
load_make_conf(d, tmp_path / "make.conf", required=False)
assert not d
@pytest.mark.skipif(os.getuid() == 0, reason="need to be non root")
def test_unreadable_file(self, tmp_path):
d = {}
(path := tmp_path / "file").touch()
path.chmod(stat.S_IWUSR)
with pytest.raises(base_errors.PermissionDenied):
load_make_conf(d, path)
def test_overrides_incrementals(self, tmp_path):
(path := tmp_path / "file").write_bytes(b"DISTDIR=foo\n")
d = {}
load_make_conf(d, pjoin(const.CONFIG_PATH, "make.globals"))
load_make_conf(d, path, allow_sourcing=True, incrementals=True)
assert d["DISTDIR"] == "foo"
def test_load_make_conf_dir(self, tmp_path):
# load files from dir and symlinked dir
(make_conf_dir := tmp_path / "make.conf").mkdir()
(make_conf_dir / "a").write_text("DISTDIR=foo\n")
(make_conf_sym := tmp_path / "make.conf.sym").symlink_to(make_conf_dir)
d = {}
load_make_conf(d, pjoin(const.CONFIG_PATH, "make.globals"))
sym_d = d.copy()
load_make_conf(d, make_conf_dir)
load_make_conf(sym_d, make_conf_sym)
assert d == sym_d
assert d["DISTDIR"] == "foo"
class TestReposConf:
def test_load_defaults(self):
_, global_repos_conf = load_repos_conf(pjoin(const.CONFIG_PATH, "repos.conf"))
assert "gentoo" in global_repos_conf
def test_nonexistent_file(self, tmp_path):
with pytest.raises(config_errors.ParsingError):
load_repos_conf(tmp_path / "repos.conf")
@pytest.mark.skipif(os.getuid() == 0, reason="need to be non root")
def test_unreadable_file(self, tmp_path):
(path := tmp_path / "file").touch()
path.chmod(stat.S_IWUSR)
with pytest.raises(base_errors.PermissionDenied):
load_repos_conf(path)
def test_garbage_file(self, tmp_path):
(path := tmp_path / "file").write_bytes(binascii.b2a_hex(os.urandom(10)))
with pytest.raises(config_errors.ConfigurationError):
load_repos_conf(path)
def METHOD_NAME(self, tmp_path, caplog):
(path := tmp_path / "file").write_text(
textwrap.dedent(
"""\
[foo]
sync-uri = git://foo.git"""
)
)
load_repos_conf(path)
assert "'foo' repo missing location setting" in caplog.text
def test_bad_priority(self, tmp_path, caplog):
# bad priority value causes fallback to the default
(path := tmp_path / "file").write_text(
textwrap.dedent(
"""\
[foo]
priority = foo
location = /var/gentoo/repos/foo
[gentoo]
location = /var/gentoo/repos/gentoo"""
)
)
defaults, repos = load_repos_conf(path)
assert repos["foo"]["priority"] == 0
assert "'foo' repo has invalid priority setting" in caplog.text
def test_overriding_defaults_same_file(self, tmp_path):
# overriding defaults in the same file throws an exception from configparser
(path := tmp_path / "file").write_text(
textwrap.dedent(
"""\
[DEFAULT]
main-repo = gentoo
[DEFAULT]
main-repo = foo
[foo]
priority = foo
location = /var/gentoo/repos/foo
[gentoo]
location = /var/gentoo/repos/gentoo"""
)
)
with pytest.raises(config_errors.ConfigurationError):
load_repos_conf(path)
def test_undefined_main_repo(self, tmp_path):
# undefined main repo with 'gentoo' missing
(path := tmp_path / "file").write_text(
textwrap.dedent(
"""\
[foo]
location = /var/gentoo/repos/foo"""
)
)
with pytest.raises(config_errors.UserConfigError):
load_repos_conf(path)
def test_optional_default_section(self, tmp_path, caplog):
# default section isn't required as long as gentoo repo exists
(path := tmp_path / "file").write_text(
textwrap.dedent(
"""\
[foo]
location = /var/gentoo/repos/foo
[gentoo]
location = /var/gentoo/repos/gentoo"""
)
)
defaults, repos = load_repos_conf(path)
assert defaults["main-repo"] == "gentoo"
assert list(repos.keys()) == ["foo", "gentoo"]
assert not caplog.text
def test_overriding_sections_same_file(self, tmp_path):
# overriding sections in the same file throws an exception from configparser
(path := tmp_path / "file").write_text(
textwrap.dedent(
"""\
[DEFAULT]
main-repo = foo
[foo]
priority = 3
location = /var/gentoo/repos/gentoo
[foo]
location = /var/gentoo/repos/foo"""
)
)
with pytest.raises(config_errors.ConfigurationError):
load_repos_conf(path)
def test_load_repos_conf_dir(self, tmp_path):
# repo priority sorting and dir/symlink scanning
(repos_conf_dir := tmp_path / "repos.conf").mkdir()
shutil.copyfile(
pjoin(const.CONFIG_PATH, "repos.conf"), repos_conf_dir / "repos.conf"
)
(repos_conf_sym := tmp_path / "repos.conf.sym").symlink_to(repos_conf_dir)
(repos_conf_sym / "file").write_text(
textwrap.dedent(
"""\
[bar]
location = /var/gentoo/repos/bar
[foo]
location = /var/gentoo/repos/foo
priority = 10"""
)
)
defaults, repos = load_repos_conf(repos_conf_dir)
sym_defaults, sym_repos = load_repos_conf(repos_conf_sym)
assert defaults == sym_defaults
assert repos == sym_repos
assert defaults["main-repo"] == "gentoo"
assert list(repos.keys()) == ["foo", "bar", "gentoo", "binpkgs"] |
299,452 | local and ut | #!/usr/bin/env python3
# PyEphem script to find cities with similar sunrise times
# to the Los Alamos Nature Center
import ephem
import ephem.cities
from datetime import datetime, timezone
sun = ephem.Sun()
def subsolar_point(obstime):
"""Return lon, lat of the earth's subsolar point at the given UTC datetime.
"""
gmt_obs = ephem.Observer()
gmt_obs.lat = "0"
gmt_obs.lon = "0"
gmt_obs.date = obstime
sun.compute(gmt_obs.date)
sun_lon = math.degrees(sun.ra - gmt_obs.sidereal_time())
if sun_lon < -180.0 :
sun_lon = 360.0 + sun_lon
elif sun_lon > 180.0 :
sun_lon = sun_lon - 360.0
sun_lat = math.degrees(sun.dec)
return sun_lon, sun_lat
# Add Los Alamos and White Rock to city data:
# ephem.cities._city_data['Los Alamos'] = ('35.8851668', '-106.3061889', 2165)
# ephem.cities._city_data['White Rock'] = ('35.8131579', '-106.2189755', 1980)
ephem.cities._city_data["Reykjavik"] = ('64.1466', '21.9426', 0)
peec = ephem.Observer()
peec.lat = '35:53.1' # 35.8849756
peec.lon = '-106:18.36' # -106.3061510
peec.elevation = 2100 # About 7000'
peec.name = "Los Alamos Nature Center"
FMT = "%17s %-20s %-20s"
TIMEFMT = "%Y-%m-%d %H:%M"
def print_stats(name, ephemtime):
def METHOD_NAME(ephemdate):
"""Convert an ephem.Date to datetime.datetime.
Return aware (localdatetime, utcdatetime)
"""
dt = ephemdate.datetime()
dt = dt.astimezone().replace(tzinfo=timezone.utc)
return (dt.astimezone(), dt)
print()
print(FMT % ("", "Local", "UTC"))
lt, ut = METHOD_NAME(ephemtime)
if lt.month == 3:
season = "Vernal"
elif lt.month == 6:
season = "Summer"
elif lt.month == 9:
season = "Autumnal"
elif lt.month == 12:
season = "Winter"
else:
season = ""
name = "%s %s" % (season, name)
print(FMT % (name, lt.strftime(TIMEFMT), ut.strftime(TIMEFMT)))
halfdaybefore = ephem.Date(ephemtime - .5)
peec.date = halfdaybefore
sunrise = peec.next_rising(sun)
peec.date = sunrise
lt_sunrise, ut_sunrise = METHOD_NAME(peec.date)
sunset = peec.next_setting(sun)
peec.date = sunset
lt_sunset, ut_sunset = METHOD_NAME(peec.date)
cities = find_similar_cities(halfdaybefore, sunrise, sunset)
print("------------------------------------------------")
print(FMT % ("** SUNRISE",
lt_sunrise.strftime(TIMEFMT),
ut_sunrise.strftime(TIMEFMT)))
cities.sort(key=lambda c: c[1])
for c in cities[:5]:
lt, ut = METHOD_NAME(c[3])
print(FMT % (c[0], lt.strftime(TIMEFMT), ut.strftime(TIMEFMT)))
print("------------------------------------------------")
print(FMT % ("** SUNSET",
lt_sunset.strftime(TIMEFMT),
ut_sunset.strftime(TIMEFMT)))
cities.sort(key=lambda c: c[2])
for c in cities[:5]:
lt, ut = METHOD_NAME(c[4])
print(FMT % (c[0], lt.strftime(TIMEFMT), ut.strftime(TIMEFMT)))
def find_similar_cities(halfdaybefore, sunrise, sunset):
"""Search through pyephem's list of cities and return a list
of each city and the difference between its sunrise/sunset times
and the times passed in. So return a list of:
[cityname, abs_difference_sunrise, abs_difference_sunset,
sunrise, sunset]
where the last two are ephem.Dates.
You can sort the list on the abs difference columns
to find the most similar cities.
"""
cities = []
for city in ephem.cities._city_data:
city_obs = ephem.city(city)
city_obs.date = halfdaybefore
city_sunrise = city_obs.next_rising(sun)
city_obs.date = sunrise
city_sunset = city_obs.next_setting(sun)
cities.append([city,
abs(city_sunrise - sunrise),
abs(city_sunset - sunset),
city_sunrise, city_sunset])
return cities
next_solstice = ephem.next_solstice(peec.date)
print_stats("Solstice", next_solstice)
next_equinox = ephem.next_equinox(next_solstice)
print_stats("Equinox", next_equinox)
second_solstice = ephem.next_solstice(next_equinox)
print_stats("Solstice", second_solstice)
second_equinox = ephem.next_equinox(second_solstice)
print_stats("Equinox", second_equinox)
|
299,453 | child page count | import json
import re
from django import template
from django.conf import settings
from django.templatetags.static import static
from django.utils.html import format_html
from wagtail.models import Page
register = template.Library()
@register.filter
def clean_whitespace(value):
return re.sub(r'\s+', '-', value)
@register.filter
def lookup(dict, arg):
return dict.get(arg, '')
@register.simple_tag()
def formatted_title(page):
if hasattr(page, 'formatted_title'):
if page.formatted_title:
return format_html(page.formatted_title)
else:
return format_html(page.title)
else:
return page.title
@register.filter()
def districts(max):
"""Returns a list of numbers 1-100 for district filter"""
districts = range(max)
return districts
@register.filter()
def METHOD_NAME(page):
"""Returns the number of pages that are children of a particular page"""
count = Page.objects.child_of(page).live().count()
return "{} {}".format(count, 'result' if count == 1 else 'results')
@register.filter()
def prepend_non_digit(string):
"""
Prepends non-digit-containing string.
Useful in combination with built-in slugify in order to create strings
from titles that can be used as HTML IDs, which cannot begin with digits.
"""
if string[:1].isdigit():
string = "go-to-{0}".format(string)
return string
@register.filter()
def web_app_url(path):
"""
Appends a path to the web app URL as defined in the settings
This is useful for StaticBlocks, which don't have access to the entire context
"""
return "{}{}".format(settings.FEC_APP_URL, path)
@register.filter()
def highlight_matches(text):
"""
Replaces the highlight markers with span tags for Search.gov website search results.
Because format_html uses str.format, remove { and } because they are special characters.
"""
cleaned_text = text.replace("{", "").replace("}", "")
highlighted_text = cleaned_text.replace(
"\ue000", '<span class="t-highlight">'
).replace("\ue001", "</span>")
return format_html(highlighted_text)
@register.filter(name='splitlines')
def splitlines(value):
"""
Returns the value turned into a list.
"""
return value.splitlines()
@register.filter(name='get_touch_icon')
def get_touch_icon(content_section, dimension):
"""
Returns a path to a touch icon for the given dimension and content_section
"""
if content_section in ['legal', 'help']:
return static('img/favicon/{}/apple-touch-icon-{}.png'.format(content_section, dimension))
else:
return static('img/favicon/general/apple-touch-icon-{}.png'.format(dimension))
@register.filter(name='get_meta_description')
def get_meta_description(content_section):
"""
Returns a meta description for social media
"""
return 'Find what you need to know about the federal campaign finance process. \
Explore legal resources, campaign finance data, help for candidates and committees, and more.'
@register.simple_tag
def asset_for_js(path):
"""Looks up the hashed asset path in rev-manifest-js.json
If the path doesn't exist there, then just return the path to the static file
without a hash"""
key = '/static/js/{}'.format(path)
assets = json.load(open(settings.DIST_DIR + '/fec/static/js/rev-manifest-js.json'))
return assets[key] if key in assets else key
@register.simple_tag
def asset_for_css(key):
"""Looks up the hashed asset key in rev-manifest-css.json
If the key doesn't exist there, then just return the key to the static file
without a hash"""
assets = json.load(open(settings.DIST_DIR + '/fec/static/css/rev-manifest-css.json'))
if key in assets:
return '/static/css/' + assets[key]
else:
return key
@register.filter(name='remove_word')
def remove_word(str, words):
"""
Removes a word or words from a string
Returns a new string
"""
return str.replace(words, '')
@register.filter(name='dot_or_not')
def dot_or_not(str):
"""
Puts dot-after, only if string represemts a number
Specifically for footnote lists on ReportingDatesTables
"""
try:
int(str)
return '.'
except ValueError:
return ''
@register.filter(name='get_social_image_path')
def get_social_image_path(identifier):
# """
# Returns a path to a social image for the given content section
# TODO: combine with fec/data/templatetags/filters.py ?
# Called by meta-tags.html
# """
imageFilename = identifier
if identifier == 'advisory-opinions':
imageFilename = 'fec-pen'
elif identifier in ['commission-meetings', 'meeting-page']:
imageFilename = 'fec-microphones'
elif identifier == 'press-release':
imageFilename = 'fec-microphone'
elif identifier == 'weekly-digest':
imageFilename = 'fec-seal'
elif identifier == 'data':
imageFilename = 'fec-data'
elif identifier in ['legal', 'help']:
imageFilename = 'fec-' + identifier
else:
imageFilename = 'fec-logo'
return 'https://www.fec.gov/static/img/social/{}.png'.format(imageFilename)
@register.filter(name='get_file_type')
def get_file_type(value):
file_extension = value.rsplit('.', 1)[1].upper()
xl = (file_extension == 'XLS') or (file_extension == 'XLSX')
file_type = "EXCEL" if xl else file_extension
return file_type |
299,454 | aclose | from ssl import SSLContext
from typing import Optional
import trio
from .._exceptions import (
ConnectError,
ConnectTimeout,
ReadError,
ReadTimeout,
WriteError,
WriteTimeout,
map_exceptions,
)
from .._types import TimeoutDict
from .base import AsyncBackend, AsyncLock, AsyncSemaphore, AsyncSocketStream
def none_as_inf(value: Optional[float]) -> float:
return value if value is not None else float("inf")
class SocketStream(AsyncSocketStream):
def __init__(self, stream: trio.abc.Stream) -> None:
self.stream = stream
self.read_lock = trio.Lock()
self.write_lock = trio.Lock()
def get_http_version(self) -> str:
if not isinstance(self.stream, trio.SSLStream):
return "HTTP/1.1"
ident = self.stream.selected_alpn_protocol()
return "HTTP/2" if ident == "h2" else "HTTP/1.1"
async def start_tls(
self, hostname: bytes, ssl_context: SSLContext, timeout: TimeoutDict
) -> "SocketStream":
connect_timeout = none_as_inf(timeout.get("connect"))
exc_map = {
trio.TooSlowError: ConnectTimeout,
trio.BrokenResourceError: ConnectError,
}
ssl_stream = trio.SSLStream(
self.stream,
ssl_context=ssl_context,
server_hostname=hostname.decode("ascii"),
)
with map_exceptions(exc_map):
with trio.fail_after(connect_timeout):
await ssl_stream.do_handshake()
return SocketStream(ssl_stream)
async def read(self, n: int, timeout: TimeoutDict) -> bytes:
read_timeout = none_as_inf(timeout.get("read"))
exc_map = {trio.TooSlowError: ReadTimeout, trio.BrokenResourceError: ReadError}
async with self.read_lock:
with map_exceptions(exc_map):
try:
with trio.fail_after(read_timeout):
return await self.stream.receive_some(max_bytes=n)
except trio.TooSlowError as exc:
await self.stream.METHOD_NAME()
raise exc
async def write(self, data: bytes, timeout: TimeoutDict) -> None:
if not data:
return
write_timeout = none_as_inf(timeout.get("write"))
exc_map = {
trio.TooSlowError: WriteTimeout,
trio.BrokenResourceError: WriteError,
}
async with self.write_lock:
with map_exceptions(exc_map):
try:
with trio.fail_after(write_timeout):
return await self.stream.send_all(data)
except trio.TooSlowError as exc:
await self.stream.METHOD_NAME()
raise exc
async def METHOD_NAME(self) -> None:
async with self.write_lock:
try:
await self.stream.METHOD_NAME()
except trio.BrokenResourceError:
pass
def is_readable(self) -> bool:
# Adapted from: https://github.com/encode/httpx/pull/143#issuecomment-515202982
stream = self.stream
# Peek through any SSLStream wrappers to get the underlying SocketStream.
while isinstance(stream, trio.SSLStream):
stream = stream.transport_stream
assert isinstance(stream, trio.SocketStream)
return stream.socket.is_readable()
class Lock(AsyncLock):
def __init__(self) -> None:
self._lock = trio.Lock()
async def release(self) -> None:
self._lock.release()
async def acquire(self) -> None:
await self._lock.acquire()
class Semaphore(AsyncSemaphore):
def __init__(self, max_value: int, exc_class: type):
self.max_value = max_value
self.exc_class = exc_class
@property
def semaphore(self) -> trio.Semaphore:
if not hasattr(self, "_semaphore"):
self._semaphore = trio.Semaphore(self.max_value, max_value=self.max_value)
return self._semaphore
async def acquire(self, timeout: float = None) -> None:
timeout = none_as_inf(timeout)
with trio.move_on_after(timeout):
await self.semaphore.acquire()
return
raise self.exc_class()
async def release(self) -> None:
self.semaphore.release()
class TrioBackend(AsyncBackend):
async def open_tcp_stream(
self,
hostname: bytes,
port: int,
ssl_context: Optional[SSLContext],
timeout: TimeoutDict,
*,
local_address: Optional[str],
) -> AsyncSocketStream:
connect_timeout = none_as_inf(timeout.get("connect"))
# Trio will support local_address from 0.16.1 onwards.
# We only include the keyword argument if a local_address
# argument has been passed.
kwargs: dict = {} if local_address is None else {"local_address": local_address}
exc_map = {
OSError: ConnectError,
trio.TooSlowError: ConnectTimeout,
trio.BrokenResourceError: ConnectError,
}
with map_exceptions(exc_map):
with trio.fail_after(connect_timeout):
stream: trio.abc.Stream = await trio.open_tcp_stream(
hostname, port, **kwargs
)
if ssl_context is not None:
stream = trio.SSLStream(
stream, ssl_context, server_hostname=hostname.decode("ascii")
)
await stream.do_handshake()
return SocketStream(stream=stream)
async def open_uds_stream(
self,
path: str,
hostname: bytes,
ssl_context: Optional[SSLContext],
timeout: TimeoutDict,
) -> AsyncSocketStream:
connect_timeout = none_as_inf(timeout.get("connect"))
exc_map = {
OSError: ConnectError,
trio.TooSlowError: ConnectTimeout,
trio.BrokenResourceError: ConnectError,
}
with map_exceptions(exc_map):
with trio.fail_after(connect_timeout):
stream: trio.abc.Stream = await trio.open_unix_socket(path)
if ssl_context is not None:
stream = trio.SSLStream(
stream, ssl_context, server_hostname=hostname.decode("ascii")
)
await stream.do_handshake()
return SocketStream(stream=stream)
def create_lock(self) -> AsyncLock:
return Lock()
def create_semaphore(self, max_value: int, exc_class: type) -> AsyncSemaphore:
return Semaphore(max_value, exc_class=exc_class)
async def time(self) -> float:
return trio.current_time()
async def sleep(self, seconds: float) -> None:
await trio.sleep(seconds) |
299,455 | pv platform | #
# Copyright 2020-2023 Picovoice Inc.
#
# You may not use this file except in compliance with the license. A copy of the license is located in the "LICENSE"
# file accompanying this source.
#
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
import logging
import os
import platform
import subprocess
log = logging.getLogger('PPN')
log.setLevel(logging.WARNING)
def _is_64bit():
return '64bit' in platform.architecture()[0]
def _pv_linux_machine(machine):
if machine == 'x86_64':
return machine
elif machine in ['aarch64', 'armv7l', 'armv6l']:
arch_info = ('-' + machine) if _is_64bit() else ''
else:
raise NotImplementedError("Unsupported CPU architecture: `%s`" % machine)
cpu_info = ''
try:
cpu_info = subprocess.check_output(['cat', '/proc/cpuinfo']).decode()
cpu_part_list = [x for x in cpu_info.split('\n') if 'CPU part' in x]
cpu_part = cpu_part_list[0].split(' ')[-1].lower()
except Exception as error:
raise RuntimeError("Failed to identify the CPU with '%s'\nCPU info: %s" % (error, cpu_info))
if '0xb76' == cpu_part:
return 'arm11' + arch_info
elif '0xc07' == cpu_part:
return 'cortex-a7' + arch_info
elif '0xd03' == cpu_part:
return 'cortex-a53' + arch_info
elif '0xd07' == cpu_part:
return 'cortex-a57' + arch_info
elif '0xd08' == cpu_part:
return 'cortex-a72' + arch_info
elif '0xc08' == cpu_part:
return 'beaglebone' + arch_info
elif machine == 'armv7l':
log.warning(
'WARNING: Please be advised that this device (CPU part = %s) is not officially supported by Picovoice. '
'Falling back to the armv6-based (Raspberry Pi Zero) library. This is not tested nor optimal.' % cpu_part)
return 'arm11'
else:
raise NotImplementedError("Unsupported CPU: '%s'." % cpu_part)
def METHOD_NAME():
pv_system = platform.system()
if pv_system not in {'Darwin', 'Linux', 'Windows'}:
raise ValueError("Unsupported system '%s'." % pv_system)
if pv_system == 'Linux':
pv_machine = _pv_linux_machine(platform.machine())
else:
pv_machine = platform.machine()
return pv_system, pv_machine
_PV_SYSTEM, _PV_MACHINE = METHOD_NAME()
_RASPBERRY_PI_MACHINES = {'arm11', 'cortex-a7', 'cortex-a53', 'cortex-a72', 'cortex-a53-aarch64', 'cortex-a72-aarch64'}
_JETSON_MACHINES = {'cortex-a57-aarch64'}
def pv_library_path(relative):
if _PV_SYSTEM == 'Darwin':
if _PV_MACHINE == 'x86_64':
return os.path.join(os.path.dirname(__file__), relative, 'lib/mac/x86_64/libpv_porcupine.dylib')
elif _PV_MACHINE == "arm64":
return os.path.join(os.path.dirname(__file__), relative, 'lib/mac/arm64/libpv_porcupine.dylib')
elif _PV_SYSTEM == 'Linux':
if _PV_MACHINE == 'x86_64':
return os.path.join(os.path.dirname(__file__), relative, 'lib/linux/x86_64/libpv_porcupine.so')
elif _PV_MACHINE in _JETSON_MACHINES:
return os.path.join(
os.path.dirname(__file__),
relative,
'lib/jetson/%s/libpv_porcupine.so' % _PV_MACHINE)
elif _PV_MACHINE in _RASPBERRY_PI_MACHINES:
return os.path.join(
os.path.dirname(__file__),
relative,
'lib/raspberry-pi/%s/libpv_porcupine.so' % _PV_MACHINE)
elif _PV_MACHINE == 'beaglebone':
return os.path.join(os.path.dirname(__file__), relative, 'lib/beaglebone/libpv_porcupine.so')
elif _PV_SYSTEM == 'Windows':
return os.path.join(os.path.dirname(__file__), relative, 'lib/windows/amd64/libpv_porcupine.dll')
raise NotImplementedError('Unsupported platform.')
def pv_model_path(relative):
return os.path.join(os.path.dirname(__file__), relative, 'lib/common/porcupine_params.pv')
def pv_keyword_files_subdir():
if _PV_SYSTEM == 'Darwin':
return 'mac'
elif _PV_SYSTEM == 'Linux':
if _PV_MACHINE == 'x86_64':
return 'linux'
elif _PV_MACHINE in _JETSON_MACHINES:
return 'jetson'
elif _PV_MACHINE in _RASPBERRY_PI_MACHINES:
return 'raspberry-pi'
elif _PV_MACHINE == 'beaglebone':
return 'beaglebone'
elif _PV_SYSTEM == 'Windows':
return 'windows'
raise NotImplementedError('Unsupported platform')
def pv_keyword_paths(relative):
keyword_files_dir = \
os.path.join(os.path.dirname(__file__), relative, 'resources/keyword_files', pv_keyword_files_subdir())
res = dict()
for x in os.listdir(keyword_files_dir):
res[x.rsplit('_')[0]] = os.path.join(keyword_files_dir, x)
return res |
299,456 | apply | from firedrake.petsc import PETSc
from firedrake.preconditioners.base import PCBase
import firedrake.dmhooks as dmhooks
__all__ = ['GTMGPC']
class GTMGPC(PCBase):
needs_python_pmat = False
_prefix = "gt_"
def initialize(self, pc):
from firedrake import TestFunction, parameters
from firedrake.assemble import allocate_matrix, TwoFormAssembler
from firedrake.interpolation import Interpolator
from firedrake.solving_utils import _SNESContext
from firedrake.matrix_free.operators import ImplicitMatrixContext
_, P = pc.getOperators()
appctx = self.get_appctx(pc)
fcp = appctx.get("form_compiler_parameters")
if pc.getType() != "python":
raise ValueError("Expecting PC type python")
ctx = dmhooks.get_appctx(pc.getDM())
if ctx is None:
raise ValueError("No context found.")
if not isinstance(ctx, _SNESContext):
raise ValueError("Don't know how to get form from %r" % ctx)
prefix = pc.getOptionsPrefix()
options_prefix = prefix + self._prefix
opts = PETSc.Options()
# Handle the fine operator if type is python
if P.getType() == "python":
ictx = P.getPythonContext()
if ictx is None:
raise ValueError("No context found on matrix")
if not isinstance(ictx, ImplicitMatrixContext):
raise ValueError("Don't know how to get form from %r" % ictx)
fine_operator = ictx.a
fine_bcs = ictx.row_bcs
if fine_bcs != ictx.col_bcs:
raise NotImplementedError("Row and column bcs must match")
fine_mat_type = opts.getString(options_prefix + "mat_type",
parameters["default_matrix_type"])
self.fine_op = allocate_matrix(fine_operator,
bcs=fine_bcs,
form_compiler_parameters=fcp,
mat_type=fine_mat_type,
options_prefix=options_prefix)
self._assemble_fine_op = TwoFormAssembler(fine_operator, tensor=self.fine_op,
form_compiler_parameters=fcp,
bcs=fine_bcs).assemble
self._assemble_fine_op()
fine_petscmat = self.fine_op.petscmat
else:
fine_petscmat = P
# Transfer fine operator null space
fine_petscmat.setNullSpace(P.getNullSpace())
fine_transpose_nullspace = P.getTransposeNullSpace()
if fine_transpose_nullspace.handle != 0:
fine_petscmat.setTransposeNullSpace(fine_transpose_nullspace)
# Handle the coarse operator
coarse_options_prefix = options_prefix + "mg_coarse_"
coarse_mat_type = opts.getString(coarse_options_prefix + "mat_type",
parameters["default_matrix_type"])
get_coarse_space = appctx.get("get_coarse_space", None)
if not get_coarse_space:
raise ValueError("Need to provide a callback which provides the coarse space.")
coarse_space = get_coarse_space()
get_coarse_operator = appctx.get("get_coarse_operator", None)
if not get_coarse_operator:
raise ValueError("Need to provide a callback which provides the coarse operator.")
coarse_operator = get_coarse_operator()
coarse_space_bcs = appctx.get("coarse_space_bcs", None)
# These should be callbacks which return the relevant nullspaces
get_coarse_nullspace = appctx.get("get_coarse_op_nullspace", None)
get_coarse_transpose_nullspace = appctx.get("get_coarse_op_transpose_nullspace", None)
self.coarse_op = allocate_matrix(coarse_operator,
bcs=coarse_space_bcs,
form_compiler_parameters=fcp,
mat_type=coarse_mat_type,
options_prefix=coarse_options_prefix)
self._assemble_coarse_op = TwoFormAssembler(coarse_operator, tensor=self.coarse_op,
form_compiler_parameters=fcp,
bcs=coarse_space_bcs).assemble
self._assemble_coarse_op()
coarse_opmat = self.coarse_op.petscmat
# Set nullspace if provided
if get_coarse_nullspace:
nsp = get_coarse_nullspace()
coarse_opmat.setNullSpace(nsp.nullspace())
if get_coarse_transpose_nullspace:
tnsp = get_coarse_transpose_nullspace()
coarse_opmat.setTransposeNullSpace(tnsp.nullspace())
interp_petscmat = appctx.get("interpolation_matrix", None)
if interp_petscmat is None:
# Create interpolation matrix from coarse space to fine space
fine_space = ctx.J.arguments()[0].function_space()
interpolator = Interpolator(TestFunction(coarse_space), fine_space)
interpolation_matrix = interpolator.callable()
interp_petscmat = interpolation_matrix.handle
# We set up a PCMG object that uses the constructed interpolation
# matrix to generate the restriction/prolongation operators.
# This is a two-level multigrid preconditioner.
pcmg = PETSc.PC().create(comm=pc.comm)
pcmg.incrementTabLevel(1, parent=pc)
pcmg.setType(pc.Type.MG)
pcmg.setOptionsPrefix(options_prefix)
pcmg.setMGLevels(2)
pcmg.setMGCycleType(pc.MGCycleType.V)
pcmg.setMGInterpolation(1, interp_petscmat)
pcmg.setOperators(A=fine_petscmat, P=fine_petscmat)
coarse_solver = pcmg.getMGCoarseSolve()
coarse_solver.setOperators(A=coarse_opmat, P=coarse_opmat)
# coarse space dm
coarse_dm = coarse_space.dm
coarse_solver.setDM(coarse_dm)
coarse_solver.setDMActive(False)
pcmg.setDM(pc.getDM())
pcmg.setFromOptions()
self.pc = pcmg
self._dm = coarse_dm
prefix = coarse_solver.getOptionsPrefix()
# Create new appctx
self._ctx_ref = self.new_snes_ctx(pc,
coarse_operator,
coarse_space_bcs,
coarse_mat_type,
fcp,
options_prefix=prefix)
with dmhooks.add_hooks(coarse_dm, self,
appctx=self._ctx_ref,
save=False):
coarse_solver.setFromOptions()
def update(self, pc):
if hasattr(self, "fine_op"):
self._assemble_fine_op()
self._assemble_coarse_op()
self.pc.setUp()
def METHOD_NAME(self, pc, X, Y):
dm = self._dm
with dmhooks.add_hooks(dm, self, appctx=self._ctx_ref):
self.pc.METHOD_NAME(X, Y)
def applyTranspose(self, pc, X, Y):
dm = self._dm
with dmhooks.add_hooks(dm, self, appctx=self._ctx_ref):
self.pc.applyTranspose(X, Y)
def view(self, pc, viewer=None):
super(GTMGPC, self).view(pc, viewer)
if hasattr(self, "pc"):
viewer.printfASCII("PC using Gopalakrishnan and Tan algorithm\n")
self.pc.view(viewer) |
299,457 | models | # -*- coding: utf-8 -*-
#
# hl_api_models.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Functions for model handling
"""
from ..ll_api import check_stack, sps, sr, spp
from .hl_api_helper import deprecated, is_iterable, is_literal, model_deprecation_warning
from .hl_api_types import to_json
from .hl_api_simulation import GetKernelStatus
__all__ = [
"ConnectionRules",
"CopyModel",
"GetDefaults",
"Models",
"SetDefaults",
]
@deprecated("nest.node_models or nest.synapse_models")
@check_stack
def METHOD_NAME(mtype="all", sel=None):
"""Return a tuple of neuron, device, or synapse model names.
Parameters
----------
mtype : str, optional
Use ``mtype='nodes'`` to only get neuron and device models,
or ``mtype='synapses'`` to only get synapse models.
sel : str, optional
Filter results and only return models containing ``sel``.
Returns
-------
tuple
Available model names, sorted by name
Raises
------
ValueError
Description
Notes
-----
- Synapse model names ending in ``_hpc`` require less memory because of
thread-local indices for target neuron IDs and fixed ``rport``s of 0.
- Synapse model names ending in ``_lbl`` allow to assign an integer label
(``synapse_label``) to each individual synapse, at the cost of increased
memory requirements.
"""
if mtype not in ("all", "nodes", "synapses"):
raise ValueError("mtype has to be one of 'all', 'nodes', or 'synapses'")
models = []
if mtype in ("all", "nodes"):
models += GetKernelStatus("node_models")
if mtype in ("all", "synapses"):
models += GetKernelStatus("synapse_models")
if sel is not None:
models = [x for x in models if sel in x]
models.sort()
return tuple(models)
@deprecated("nest.connection_rules")
@check_stack
def ConnectionRules():
"""Return a tuple of all available connection rules, sorted by name.
Returns
-------
tuple
Available connection rules, sorted by name
"""
return tuple(sorted(GetKernelStatus("connection_rules")))
@check_stack
def SetDefaults(model, params, val=None):
"""Set defaults for the given model or recording backend.
New default values are used for all subsequently created instances
of the model.
Parameters
----------
model : str
Name of the model or recording backend
params : str or dict
Dictionary of new default parameter values
val : str, optional
If given, ``params`` has to be the name of a parameter.
"""
if val is not None:
if is_literal(params):
params = {params: val}
sps(params)
sr("/{0} exch SetDefaults".format(model))
@check_stack
def GetDefaults(model, keys=None, output=""):
"""Return defaults of the given model or recording backend.
Parameters
----------
model : str
Name of the model or recording backend
keys : str or list, optional
String or a list of strings naming model properties. `GetDefaults` then
returns a single value or a list of values belonging to the keys
given.
output : str, optional
Whether the returned data should be in a format
(``output='json'``). Default is ''.
Returns
-------
dict
A dictionary of default parameters.
type
If keys is a string, the corrsponding default parameter is returned.
list
If keys is a list of strings, a list of corrsponding default parameters
is returned.
str :
If `output` is ``json``, returns parameters in JSON format.
Raises
------
TypeError
"""
if keys is None:
cmd = "/{0} GetDefaults".format(model)
elif is_literal(keys):
cmd = "/{0} GetDefaults /{1} get".format(model, keys)
elif is_iterable(keys):
keys_str = " ".join("/{0}".format(x) for x in keys)
cmd = "/{0} GetDefaults [ {1} ] {{ 1 index exch get }}".format(model, keys_str) + " Map exch pop"
else:
raise TypeError("keys should be either a string or an iterable")
sr(cmd)
result = spp()
if output == "json":
result = to_json(result)
return result
@check_stack
def CopyModel(existing, new, params=None):
"""Create a new model by copying an existing one.
Parameters
----------
existing : str
Name of existing model
new : str
Name of the copied model
params : dict, optional
Default parameters assigned to the copy. Not provided parameters are
taken from the existing model.
"""
model_deprecation_warning(existing)
if params is not None:
sps(params)
sr("/%s /%s 3 2 roll CopyModel" % (existing, new))
else:
sr("/%s /%s CopyModel" % (existing, new)) |
299,458 | load attributes | # Copyright (c) Microsoft Corporation
# Licensed under the MIT License.
"""Utilities for attribute serialization."""
import json
import pickle
import warnings
from pathlib import Path
from typing import Any, List, Union
from responsibleai._internal.constants import FileFormats
class SerializationFormats:
PICKLE = 'pickle'
JSON = 'json'
TEXT = 'text'
class SerializationExtensions:
@classmethod
def from_format(cls, file_format: str) -> str:
if file_format == SerializationFormats.PICKLE:
return FileFormats.PKL
elif file_format == SerializationFormats.JSON:
return FileFormats.JSON
elif file_format == SerializationFormats.TEXT:
return FileFormats.TXT
else:
raise ValueError(f"Unknown format: {file_format}")
def save_attributes(
o: Any,
attributes: List[str],
path: Union[str, Path],
file_format: Union[str, List[str]] = SerializationFormats.PICKLE,
) -> List[Path]:
"""Save attributes from an object to disk.
:param o: Object from which to pull attributes.
:param attributes: List of attributes on the object to save.
:param path: Path to directory on disk in which to write the attributes.
:param file_format: File format to use when writing to disk. A list of
file formats can be passed to assign each attribute a different
format.
:returns: List of paths to the saved attributes.
"""
paths = []
dir_path = Path(path)
is_format_list = isinstance(file_format, list)
for i, attribute in enumerate(attributes):
attribute_format = file_format[i] if is_format_list else file_format
value = getattr(o, attribute)
extension = SerializationExtensions.from_format(attribute_format)
path = dir_path / f'{attribute}{extension}'
_save_attribute(value, path, attribute_format)
paths.append(path)
return paths
def _save_attribute(
value: Any,
path: Union[str, Path],
file_format: str,
) -> None:
if file_format == SerializationFormats.PICKLE:
with open(path, 'wb') as f:
pickle.dump(value, f)
elif file_format == SerializationFormats.JSON:
with open(path, 'w') as f:
json.dump(value, f)
elif file_format == SerializationFormats.TEXT:
with open(path, 'w') as f:
f.write(value)
else:
raise ValueError(f"Invalid format {file_format}")
def METHOD_NAME(
o: Any,
attributes: List[str],
path: Union[str, Path],
file_format: Union[str, List[str]] = SerializationFormats.PICKLE,
fail_on_missing: bool = True,
) -> None:
"""Load attributes from disk and save to an existing object.
:param o: Object on which to save the loaded attributes.
:param attributes: List of attributes to load to the object.
:param path: Path to directory on disk where attributes are saved.
:param file_format: File format to use when loading attributes from
disk. A list of file formats can be passed to assign each
attribute a different format.
:param fail_on_missing: Whether to raise an exception if an attribute
was not found.
"""
dir_path = Path(path)
is_format_list = isinstance(file_format, list)
for i, attribute in enumerate(attributes):
attribute_format = file_format[i] if is_format_list else file_format
extension = SerializationExtensions.from_format(attribute_format)
path = dir_path / f'{attribute}{extension}'
if not fail_on_missing and (not path.exists() or not path.is_file()):
continue
value = _load_attribute(path, attribute_format)
setattr(o, attribute, value)
def _load_attribute(
path: Union[str, Path],
file_format: str,
) -> Any:
if file_format == SerializationFormats.PICKLE:
val = None
try:
with open(path, 'rb') as f:
val = pickle.load(f)
except Exception:
model_load_err = ('ERROR-LOADING-EXPLAINER: '
'There was an error loading the explainer. '
'Some of RAI dashboard features may not work.')
warnings.warn(model_load_err, UserWarning)
return val
elif file_format == SerializationFormats.JSON:
with open(path, 'r') as f:
return json.load(f)
elif file_format == SerializationFormats.TEXT:
with open(path, 'r') as f:
return f.read()
else:
raise ValueError(f"Invalid format {file_format}") |
299,459 | on evt text | #Boa:Frame:PlotFrame
from __future__ import division
import wx
import sys
import os
PPRZ_SRC = os.getenv("PAPARAZZI_SRC", os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)),
'../../../..')))
sys.path.append(PPRZ_SRC + "/sw/lib/python")
from settings_tool import IvySettingsInterface
def create(parent, ac_ids):
return SettingsFrame(parent, ac_ids)
SLIDER_ID_OFFSET = 30000
BUTTON_ID_OFFSET = 31000
SLIDER_FACTOR = 100
# Wraps TextCtrl to provide added functionality
class TextCtrlSetting(wx.TextCtrl):
update_callback = None
def __init__(self, parent, setting):
self.setting = setting
wx.TextCtrl.__init__(self, parent=parent, name=setting.shortname, id=setting.index)
self.Bind(wx.EVT_TEXT, self.METHOD_NAME, self)
def RegisterUpdateCallback(self, cb):
self.update_callback = cb
def METHOD_NAME(self, event):
index = int(self.GetId())
try:
value = float(self.GetValue())
self.update_callback(index, value)
except:
return
# helper function to toggle edit box boldness (bold = user-set, normal=downlink-received)
def setBold(self, bold):
font = self.GetFont()
if (bold):
font.SetWeight(wx.FONTWEIGHT_BOLD)
else:
font.SetWeight(wx.FONTWEIGHT_NORMAL)
self.SetFont(font)
def SetSettingValue(self, value):
if (self.setting.step < 1):
self.SetValue("%.2f" % float(value))
else:
self.SetValue("%i" % int(float(value)))
# Wraps slider
class SettingCtrl(wx.Slider):
update_callback = None
def __init__(self, parent, setting):
self.setting = setting
max_v = int(setting.max_value) * SLIDER_FACTOR
min_v = int(setting.min_value) * SLIDER_FACTOR
if (min_v >= max_v):
max_v = max_v + 1
wx.Slider.__init__(self, parent=parent, minValue=min_v, maxValue=max_v, style=wx.SL_HORIZONTAL | wx.SL_AUTOTICKS, size=(200, 30), id=setting.index + SLIDER_ID_OFFSET)
self.SetLineSize(setting.step * SLIDER_FACTOR)
self.Bind(wx.EVT_MOUSEWHEEL, self.sliderWheel, self)
self.Bind(wx.EVT_SLIDER, self.OnEvtSlider, self)
def RegisterUpdateCallback(self, cb):
self.update_callback = cb
def OnEvtSlider(self, event):
slider = event.GetEventObject()
self.update_callback(slider.GetSettingIndex(), slider.GetSettingValue())
# Called on mouse wheel events (default handler seems backwards?)
def sliderWheel(self, event):
slider = event.GetEventObject()
if (event.GetWheelRotation() > 0):
slider.SetValue(slider.GetValue() + slider.GetLineSize())
else:
slider.SetValue(slider.GetValue() - slider.GetLineSize())
self.update_callback(slider.GetSettingIndex(), slider.GetSettingValue())
def GetSettingIndex(self):
index = int(self.GetId())
if index >= SLIDER_ID_OFFSET:
index = index - SLIDER_ID_OFFSET
return index
def SetSettingValue(self, value):
self.SetValue(int(float(value)) * SLIDER_FACTOR)
def GetSettingValue(self):
if (self.setting.step < 1):
return float(self.GetValue()) / SLIDER_FACTOR
else:
return int(self.GetValue()) // SLIDER_FACTOR
class SettingsFrame(wx.Frame):
edits = []
sliders = []
def __init__(self, parent, ac_ids):
self.settings = IvySettingsInterface(ac_ids)
title = "Settings %s (%s)" % (ac_ids, self.settings.GetACName())
wx.Frame.__init__(self, name=u'SettingsFrame', parent=parent, title=title, size=(480, 320))
self.book = wx.Notebook(self)
self.updates = []
self.Bind( wx.EVT_CLOSE, self.OnClose)
for setting_group in self.settings.groups:
page = wx.Panel(self.book)
vert_box = wx.BoxSizer(orient=wx.VERTICAL)
for setting in setting_group.member_list:
horz_box = wx.BoxSizer(orient=wx.HORIZONTAL)
text = wx.StaticText(page, label=setting.shortname, size=(100,30))
# Edit
edit = TextCtrlSetting(page, setting)
edit.RegisterUpdateCallback(self.editUpdate)
self.edits.append(edit)
# Slider
slider = SettingCtrl(page, setting)
slider.RegisterUpdateCallback(self.updateEditFromSlider)
self.sliders.append(slider)
# Button
button = wx.Button(page, id=setting.index + BUTTON_ID_OFFSET, label="Apply")
self.Bind(wx.EVT_BUTTON, self.onButton)
horz_box.AddWindow(text)
horz_box.AddWindow(edit)
horz_box.AddWindow(slider)
horz_box.AddWindow(button)
vert_box.AddWindow(horz_box)
page.SetSizer(vert_box)
self.book.AddPage(page, setting_group.name)
self.settings.RegisterCallback(self.onUpdate)
# Copy slider value into associated edit box
def updateEditFromSlider(self, index, value):
self.edits[index].ChangeValue(str(value))
self.edits[index].setBold(True)
# Called on edit box update
def editUpdate(self, index, value):
self.sliders[index].SetSettingValue(value)
self.edits[index].setBold(True)
# Called on button push
def onButton(self, event):
button = event.GetEventObject()
index = int(button.GetId())
if index >= BUTTON_ID_OFFSET:
index = index - BUTTON_ID_OFFSET
self.settings.lookup[index].value = self.sliders[index].GetSettingValue()
self.settings.SendSetting(index)
# Called for remote settings updates
def onUpdate(self, index, value, fromRemote):
# Schedule the call for later via wx (run after events)
# to prevent crashy crashy
wx.CallAfter(self.update_value, index, value, fromRemote)
# Called to update GUI with new values
def update_value(self, index, value, fromRemote):
editCtrl = self.edits[index]
if fromRemote and editCtrl.FindFocus() == editCtrl:
# don't process remote updates if the control is focused
return
editCtrl.SetSettingValue(value)
editCtrl.setBold(not fromRemote)
self.sliders[index].SetSettingValue(value)
def OnClose(self, event):
# need to forward close to canvas so that ivy is shut down, otherwise ivy hangs the shutdown
self.settings.OnClose()
self.Destroy() |
299,460 | get | # -*- coding: utf-8 -*-
"""
requests.api
~~~~~~~~~~~~
This module implements the Requests API.
:copyright: (c) 2012 by Kenneth Reitz.
:license: Apache2, see LICENSE for more details.
"""
from . import sessions
def request(method, url, **kwargs):
"""Constructs and sends a :class:`Request <Request>`.
:param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary, list of tuples or bytes to send
in the query string for the :class:`Request`.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
:param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
:param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.
``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``
or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string
defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers
to add for the file.
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) How many seconds to wait for the server to send data
before giving up, as a float, or a :ref:`(connect timeout, read
timeout) <timeouts>` tuple.
:type timeout: float or tuple
:param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``.
:type allow_redirects: bool
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
:param verify: (optional) Either a boolean, in which case it controls whether we verify
the server's TLS certificate, or a string, in which case it must be a path
to a CA bundle to use. Defaults to ``True``.
:param stream: (optional) if ``False``, the response content will be immediately downloaded.
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
:return: :class:`Response <Response>` object
:rtype: requests.Response
Usage::
>>> import requests
>>> req = requests.request('GET', 'https://httpbin.org/get')
<Response [200]>
"""
# By using the 'with' statement we are sure the session is closed, thus we
# avoid leaving sockets open which can trigger a ResourceWarning in some
# cases, and look like a memory leak in others.
with sessions.Session() as session:
return session.request(method=method, url=url, **kwargs)
def METHOD_NAME(url, params=None, **kwargs):
r"""Sends a GET request.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary, list of tuples or bytes to send
in the query string for the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', True)
return request('get', url, params=params, **kwargs)
def options(url, **kwargs):
r"""Sends an OPTIONS request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', True)
return request('options', url, **kwargs)
def head(url, **kwargs):
r"""Sends a HEAD request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', False)
return request('head', url, **kwargs)
def post(url, data=None, json=None, **kwargs):
r"""Sends a POST request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('post', url, data=data, json=json, **kwargs)
def put(url, data=None, **kwargs):
r"""Sends a PUT request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('put', url, data=data, **kwargs)
def patch(url, data=None, **kwargs):
r"""Sends a PATCH request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('patch', url, data=data, **kwargs)
def delete(url, **kwargs):
r"""Sends a DELETE request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('delete', url, **kwargs) |
299,461 | test get teams and corresponding challenges for | from datetime import timedelta
from django.urls import reverse_lazy, resolve
from django.contrib.auth.models import User
from django.utils import timezone
from allauth.account.models import EmailAddress
from rest_framework.test import APITestCase, APIClient
from challenges.models import Challenge
from hosts.models import ChallengeHost, ChallengeHostTeam
from participants.models import ParticipantTeam
class BaseAPITestClass(APITestCase):
def setUp(self):
self.client = APIClient(enforce_csrf_checks=True)
self.user = User.objects.create(
username="someuser",
email="user@test.com",
password="secret_password",
)
EmailAddress.objects.create(
user=self.user, email="user@test.com", primary=True, verified=True
)
self.invite_user = User.objects.create(
username="otheruser",
email="other@platform.com",
password="other_secret_password",
)
self.participant_team = ParticipantTeam.objects.create(
team_name="Participant Team", created_by=self.user
)
# user who create a challenge host team
self.user2 = User.objects.create(
username="someuser2", password="some_secret_password"
)
self.challenge_host_team = ChallengeHostTeam.objects.create(
team_name="Some Test Challenge Host Team", created_by=self.user2
)
self.challenge_host2 = ChallengeHost.objects.create(
user=self.user2,
team_name=self.challenge_host_team,
status=ChallengeHost.ACCEPTED,
permissions=ChallengeHost.ADMIN,
)
self.challenge = Challenge.objects.create(
title="Some Test Challenge",
short_description="Short description for some test challenge",
description="Description for some test challenge",
terms_and_conditions="Terms and conditions for some test challenge",
submission_guidelines="Submission guidelines for some test challenge",
creator=self.challenge_host_team,
published=False,
enable_forum=True,
anonymous_leaderboard=False,
start_date=timezone.now() - timedelta(days=2),
end_date=timezone.now() + timedelta(days=1),
)
self.client.force_authenticate(user=self.user)
class TestStringMethods(BaseAPITestClass):
def test_participant_team_list_url(self):
self.url = reverse_lazy("participants:get_participant_team_list")
self.assertEqual(str(self.url), "/api/participants/participant_team")
resolver = resolve(self.url)
self.assertEqual(
resolver.view_name, "participants:get_participant_team_list"
)
def test_get_participant_team_challenge_list(self):
self.url = reverse_lazy(
"participants:get_participant_team_challenge_list",
kwargs={"participant_team_pk": self.participant_team.pk},
)
self.assertEqual(
str(self.url),
"/api/participants/participant_team/%s/challenge"
% (self.participant_team.pk),
)
resolver = resolve(self.url)
self.assertEqual(
resolver.view_name,
"participants:get_participant_team_challenge_list",
)
def test_participant_team_detail_url(self):
self.url = reverse_lazy(
"participants:get_participant_team_details",
kwargs={"pk": self.participant_team.pk},
)
self.assertEqual(
str(self.url),
"/api/participants/participant_team/%s"
% (self.participant_team.pk),
)
resolver = resolve(self.url)
self.assertEqual(
resolver.view_name, "participants:get_participant_team_details"
)
def test_invite_participant_to_team_url(self):
self.url = reverse_lazy(
"participants:invite_participant_to_team",
kwargs={"pk": self.participant_team.pk},
)
self.assertEqual(
str(self.url),
"/api/participants/participant_team/%s/invite"
% (self.participant_team.pk),
)
resolver = resolve(self.url)
self.assertEqual(
resolver.view_name, "participants:invite_participant_to_team"
)
def test_delete_participant_from_team_url(self):
self.url = reverse_lazy(
"participants:delete_participant_from_team",
kwargs={
"participant_team_pk": self.participant_team.pk,
"participant_pk": self.invite_user.pk,
},
)
self.assertEqual(
str(self.url),
"/api/participants/participant_team/%s/participant/%s"
% (self.participant_team.pk, self.invite_user.pk),
)
resolver = resolve(self.url)
self.assertEqual(
resolver.view_name, "participants:delete_participant_from_team"
)
def METHOD_NAME(
self,
):
self.url = reverse_lazy(
"participants:get_teams_and_corresponding_challenges_for_a_participant",
kwargs={"challenge_pk": self.challenge.pk},
)
self.assertEqual(
str(self.url),
"/api/participants/participant_teams/challenges/{}/user".format(
self.challenge.pk
),
)
resolver = resolve(self.url)
self.assertEqual(
resolver.view_name,
"participants:get_teams_and_corresponding_challenges_for_a_participant",
)
def test_remove_self_from_participant_team_url(self):
self.url = reverse_lazy(
"participants:remove_self_from_participant_team",
kwargs={"participant_team_pk": self.participant_team.pk},
)
self.assertEqual(
str(self.url),
"/api/participants/remove_self_from_participant_team/%s"
% (self.participant_team.pk),
)
resolver = resolve(self.url)
self.assertEqual(
resolver.view_name,
"participants:remove_self_from_participant_team",
) |
299,462 | parser | from invoke.collection import Collection
from invoke.parser import Parser
from invoke.tasks import task
class CLIParsing:
"""
High level parsing tests
"""
def setup_method(self):
@task(positional=[], iterable=["my_list"], incrementable=["verbose"])
def my_task(
c,
mystring,
s,
boolean=False,
b=False,
v=False,
long_name=False,
true_bool=True,
_leading_underscore=False,
trailing_underscore_=False,
my_list=None,
verbose=0,
):
pass
@task(aliases=["my_task27"])
def my_task2(c):
pass
@task(default=True)
def my_task3(c, mystring):
pass
@task
def my_task4(c, clean=False, browse=False):
pass
@task(aliases=["other"], default=True)
def sub_task(c):
pass
sub_coll = Collection("sub_coll", sub_task)
self.c = Collection(my_task, my_task2, my_task3, my_task4, sub_coll)
def METHOD_NAME(self):
return Parser(self.c.to_contexts())
def _parse(self, argstr):
return self.METHOD_NAME().parse_argv(argstr.split())
def _compare(self, invoke, flagname, value):
invoke = "my-task " + invoke
result = self._parse(invoke)
assert result[0].args[flagname].value == value
def _compare_names(self, given, real):
assert self._parse(given)[0].name == real
def underscored_flags_can_be_given_as_dashed(self):
self._compare("--long-name", "long_name", True)
def leading_underscores_are_ignored(self):
self._compare("--leading-underscore", "_leading_underscore", True)
def trailing_underscores_are_ignored(self):
self._compare("--trailing-underscore", "trailing_underscore_", True)
def inverse_boolean_flags(self):
self._compare("--no-true-bool", "true_bool", False)
def namespaced_task(self):
self._compare_names("sub-coll.sub-task", "sub-coll.sub-task")
def aliases(self):
self._compare_names("my-task27", "my-task2")
def subcollection_aliases(self):
self._compare_names("sub-coll.other", "sub-coll.sub-task")
def subcollection_default_tasks(self):
self._compare_names("sub-coll", "sub-coll.sub-task")
def boolean_args(self):
"my-task --boolean"
self._compare("--boolean", "boolean", True)
def flag_then_space_then_value(self):
"my-task --mystring foo"
self._compare("--mystring foo", "mystring", "foo")
def flag_then_equals_sign_then_value(self):
"my-task --mystring=foo"
self._compare("--mystring=foo", "mystring", "foo")
def short_boolean_flag(self):
"my-task -b"
self._compare("-b", "b", True)
def short_flag_then_space_then_value(self):
"my-task -s value"
self._compare("-s value", "s", "value")
def short_flag_then_equals_sign_then_value(self):
"my-task -s=value"
self._compare("-s=value", "s", "value")
def short_flag_with_adjacent_value(self):
"my-task -svalue"
r = self._parse("my-task -svalue")
assert r[0].args.s.value == "value"
def _flag_value_task(self, value):
r = self._parse("my-task -s {} my-task2".format(value))
assert len(r) == 2
assert r[0].name == "my-task"
assert r[0].args.s.value == value
assert r[1].name == "my-task2"
def flag_value_then_task(self):
"my-task -s value my-task2"
self._flag_value_task("value")
def flag_value_same_as_task_name(self):
"my-task -s my-task2 my-task2"
self._flag_value_task("my-task2")
def three_tasks_with_args(self):
"my-task --boolean my-task3 --mystring foo my-task2"
r = self._parse("my-task --boolean my-task3 --mystring foo my-task2")
assert len(r) == 3
assert [x.name for x in r] == ["my-task", "my-task3", "my-task2"]
assert r[0].args.boolean.value
assert r[1].args.mystring.value == "foo"
def tasks_with_duplicately_named_kwargs(self):
"my-task --mystring foo my-task3 --mystring bar"
r = self._parse("my-task --mystring foo my-task3 --mystring bar")
assert r[0].name == "my-task"
assert r[0].args.mystring.value == "foo"
assert r[1].name == "my-task3"
assert r[1].args.mystring.value == "bar"
def multiple_short_flags_adjacent(self):
"my-task -bv (and inverse)"
for args in ("-bv", "-vb"):
r = self._parse("my-task {}".format(args))
a = r[0].args
assert a.b.value
assert a.v.value
def list_type_flag_can_be_given_N_times_building_a_list(self):
"my-task --my-list foo --my-list bar"
# Test both the singular and plural cases, just to be safe.
self._compare("--my-list foo", "my-list", ["foo"])
self._compare("--my-list foo --my-list bar", "my-list", ["foo", "bar"])
def incrementable_type_flag_can_be_used_as_a_switch_or_counter(self):
"my-task -v, -vv, -vvvvv etc, except with explicit --verbose"
self._compare("", "verbose", 0)
self._compare("--verbose", "verbose", 1)
self._compare("--verbose --verbose --verbose", "verbose", 3) |
299,463 | type | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetReplicationProtectionContainerMappingResult',
'AwaitableGetReplicationProtectionContainerMappingResult',
'get_replication_protection_container_mapping',
'get_replication_protection_container_mapping_output',
]
@pulumi.output_type
class GetReplicationProtectionContainerMappingResult:
"""
Protection container mapping object.
"""
def __init__(__self__, id=None, location=None, name=None, properties=None, METHOD_NAME=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if properties and not isinstance(properties, dict):
raise TypeError("Expected argument 'properties' to be a dict")
pulumi.set(__self__, "properties", properties)
if METHOD_NAME and not isinstance(METHOD_NAME, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", METHOD_NAME)
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
Resource Location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource Name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> 'outputs.ProtectionContainerMappingPropertiesResponse':
"""
The custom data.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def METHOD_NAME(self) -> str:
"""
Resource Type
"""
return pulumi.get(self, "type")
class AwaitableGetReplicationProtectionContainerMappingResult(GetReplicationProtectionContainerMappingResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetReplicationProtectionContainerMappingResult(
id=self.id,
location=self.location,
name=self.name,
properties=self.properties,
METHOD_NAME=self.METHOD_NAME)
def get_replication_protection_container_mapping(fabric_name: Optional[str] = None,
mapping_name: Optional[str] = None,
protection_container_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
resource_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetReplicationProtectionContainerMappingResult:
"""
Gets the details of a protection container mapping.
:param str fabric_name: Fabric name.
:param str mapping_name: Protection Container mapping name.
:param str protection_container_name: Protection container name.
:param str resource_group_name: The name of the resource group where the recovery services vault is present.
:param str resource_name: The name of the recovery services vault.
"""
__args__ = dict()
__args__['fabricName'] = fabric_name
__args__['mappingName'] = mapping_name
__args__['protectionContainerName'] = protection_container_name
__args__['resourceGroupName'] = resource_group_name
__args__['resourceName'] = resource_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:recoveryservices/v20230401:getReplicationProtectionContainerMapping', __args__, opts=opts, typ=GetReplicationProtectionContainerMappingResult).value
return AwaitableGetReplicationProtectionContainerMappingResult(
id=pulumi.get(__ret__, 'id'),
location=pulumi.get(__ret__, 'location'),
name=pulumi.get(__ret__, 'name'),
properties=pulumi.get(__ret__, 'properties'),
METHOD_NAME=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_replication_protection_container_mapping)
def get_replication_protection_container_mapping_output(fabric_name: Optional[pulumi.Input[str]] = None,
mapping_name: Optional[pulumi.Input[str]] = None,
protection_container_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetReplicationProtectionContainerMappingResult]:
"""
Gets the details of a protection container mapping.
:param str fabric_name: Fabric name.
:param str mapping_name: Protection Container mapping name.
:param str protection_container_name: Protection container name.
:param str resource_group_name: The name of the resource group where the recovery services vault is present.
:param str resource_name: The name of the recovery services vault.
"""
... |
299,464 | scan outputs | # SPDX-License-Identifier: Apache-2.0
"""Loop Optimizer.
some op in loop's body graph can be moved out of the loop
"""
from tf2onnx.utils import make_name, make_sure
from .optimizer_base import GraphOptimizerBase
# pylint: disable=logging-not-lazy,unused-argument,missing-docstring,unused-variable,arguments-differ
class LoopOptimizer(GraphOptimizerBase):
"""Loop Optimizer."""
# a lot of terms used here come from loop's onnx spec
# https://github.com/onnx/onnx/blob/main/docs/Operators.md#Loop
def __init__(self): # pylint: disable=useless-super-delegation
super(LoopOptimizer, self).__init__()
def _optimize(self, graph):
return self._apply_optimization(graph, self._optimize_at_current_graph_level)
def _optimize_at_current_graph_level(self, g):
has_update = True
while has_update:
has_update = False
nodes = [n for n in g.get_nodes() if n.type == "Loop"]
for n in nodes:
has_update_tmp = self._try_move_transpose_out_of_body_graph(n)
if has_update_tmp:
has_update = True
self.graph_been_opt = True
return g
@staticmethod
def num_consumers(graph, node):
make_sure(len(node.output) == 1, "only consider node with only one output")
res = len(graph.find_output_consumers(node.output[0]))
# This is an optimizer so we cannot rely on outputs having Identity nodes
res += graph.outputs.count(node.output[0])
return res
def _try_move_transpose_out_of_body_graph(self, loop_node):
# output node of body graph can be loop-carried-dependent, if so it can't be moved out of the body graph
# return True if moving some nodes successfully
# for now, we only consider moving transpose
body_graph = loop_node.get_body_graphs()["body"]
parent_graph = loop_node.graph
scan_nodes_name_in_body, scan_node_in_parent = self.METHOD_NAME(loop_node)
scan_nodes = [body_graph.get_node_by_output(name) for name in scan_nodes_name_in_body]
graph_is_changed = False
for node, name_in_parent in zip(scan_nodes, scan_node_in_parent):
# 1 delete node in body graph if possible
# only consider two case: trans is output, or transpose > identity > output
need_process = False
if node.type == "Transpose" and self.num_consumers(body_graph, node) == 1:
trans = node
new_output = node.input[0]
body_graph.remove_node(node.name)
need_process = True
elif node.type == "Identity" and node.inputs[0].type == "Transpose" \
and self.num_consumers(body_graph, node) == 1\
and self.num_consumers(body_graph, node.inputs[0]) == 1:
trans = node.inputs[0]
new_output = node.inputs[0].input[0]
body_graph.remove_node(node.inputs[0].name)
body_graph.remove_node(node.name)
need_process = True
if need_process:
# 2 correct body graph's output
body_outputs = body_graph.outputs
body_outputs[body_outputs.index(node.output[0])] = new_output
# 3 insert new node in parent graph
ori_perm = list(trans.get_attr("perm").ints)
new_perm = [0] + [i + 1 for i in ori_perm] # body output's rank is m > rank of loop's output is m+1
name = make_name("trans_moved_from_loop_body")
_ = parent_graph.insert_new_node_on_output("Transpose", name_in_parent, name, perm=new_perm)
graph_is_changed = True
return graph_is_changed
@classmethod
def METHOD_NAME(cls, loop):
# loop has 2+N inputs; loop has N+K outputs;
# loop's body graph has 1+N+K outputs
loop_carried = len(loop.input) - 2
body_graph = loop.get_body_graphs()["body"]
return body_graph.outputs[loop_carried + 1:], loop.output[loop_carried:] |
299,465 | on 204 | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"network nat gateway delete",
)
class Delete(AAZCommand):
"""Delete a NAT gateway.
:example: Delete a NAT gateway.
az network nat gateway delete --resource-group MyResourceGroup --name MyNatGateway
"""
_aaz_info = {
"version": "2022-01-01",
"resources": [
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.network/natgateways/{}", "2022-01-01"],
]
}
AZ_SUPPORT_NO_WAIT = True
def _handler(self, command_args):
super()._handler(command_args)
return self.build_lro_poller(self._execute_operations, None)
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.name = AAZStrArg(
options=["-n", "--name"],
help="Name of the NAT gateway.",
required=True,
id_part="name",
)
_args_schema.resource_group = AAZResourceGroupNameArg(
required=True,
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
yield self.NatGatewaysDelete(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
class NatGatewaysDelete(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [202]:
return self.client.build_lro_polling(
self.ctx.args.no_wait,
session,
self.on_200,
self.on_error,
lro_options={"final-state-via": "location"},
path_format_arguments=self.url_parameters,
)
if session.http_response.status_code in [200]:
return self.client.build_lro_polling(
self.ctx.args.no_wait,
session,
self.on_200,
self.on_error,
lro_options={"final-state-via": "location"},
path_format_arguments=self.url_parameters,
)
if session.http_response.status_code in [204]:
return self.client.build_lro_polling(
self.ctx.args.no_wait,
session,
self.METHOD_NAME,
self.on_error,
lro_options={"final-state-via": "location"},
path_format_arguments=self.url_parameters,
)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/natGateways/{natGatewayName}",
**self.url_parameters
)
@property
def method(self):
return "DELETE"
@property
def error_format(self):
return "ODataV4Format"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"natGatewayName", self.ctx.args.name,
required=True,
),
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2022-01-01",
required=True,
),
}
return parameters
def on_200(self, session):
pass
def METHOD_NAME(self, session):
pass
class _DeleteHelper:
"""Helper class for Delete"""
__all__ = ["Delete"] |
299,466 | test xml output file | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that Google Test correctly parses environment variables."""
import os
from googletest.test import gtest_test_utils
IS_WINDOWS = os.name == 'nt'
IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
COMMAND = gtest_test_utils.GetTestExecutablePath('googletest-env-var-test_')
environ = os.environ.copy()
def AssertEq(expected, actual):
if expected != actual:
print('Expected: %s' % (expected,))
print(' Actual: %s' % (actual,))
raise AssertionError
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
environ[env_var] = value
elif env_var in environ:
del environ[env_var]
def GetFlag(flag):
"""Runs googletest-env-var-test_ and returns its output."""
args = [COMMAND]
if flag is not None:
args += [flag]
return gtest_test_utils.Subprocess(args, env=environ).output
def TestFlag(flag, test_val, default_val):
"""Verifies that the given flag is affected by the corresponding env var."""
env_var = 'GTEST_' + flag.upper()
SetEnvVar(env_var, test_val)
AssertEq(test_val, GetFlag(flag))
SetEnvVar(env_var, None)
AssertEq(default_val, GetFlag(flag))
class GTestEnvVarTest(gtest_test_utils.TestCase):
def testEnvVarAffectsFlag(self):
"""Tests that environment variable should affect the corresponding flag."""
TestFlag('break_on_failure', '1', '0')
TestFlag('color', 'yes', 'auto')
SetEnvVar('TESTBRIDGE_TEST_RUNNER_FAIL_FAST', None) # For 'fail_fast' test
TestFlag('fail_fast', '1', '0')
TestFlag('filter', 'FooTest.Bar', '*')
SetEnvVar('XML_OUTPUT_FILE', None) # For 'output' test
TestFlag('output', 'xml:tmp/foo.xml', '')
TestFlag('brief', '1', '0')
TestFlag('print_time', '0', '1')
TestFlag('repeat', '999', '1')
TestFlag('throw_on_failure', '1', '0')
TestFlag('death_test_style', 'threadsafe', 'fast')
TestFlag('catch_exceptions', '0', '1')
if IS_LINUX:
TestFlag('death_test_use_fork', '1', '0')
TestFlag('stack_trace_depth', '0', '100')
def METHOD_NAME(self):
"""Tests that $XML_OUTPUT_FILE affects the output flag."""
SetEnvVar('GTEST_OUTPUT', None)
SetEnvVar('XML_OUTPUT_FILE', 'tmp/bar.xml')
AssertEq('xml:tmp/bar.xml', GetFlag('output'))
def testXmlOutputFileOverride(self):
"""Tests that $XML_OUTPUT_FILE is overridden by $GTEST_OUTPUT."""
SetEnvVar('GTEST_OUTPUT', 'xml:tmp/foo.xml')
SetEnvVar('XML_OUTPUT_FILE', 'tmp/bar.xml')
AssertEq('xml:tmp/foo.xml', GetFlag('output'))
if __name__ == '__main__':
gtest_test_utils.Main() |
299,467 | test library starting quantity post | import pytest
def METHOD_NAME(testapp, library_starting_quantity):
testapp.post_json('/library', library_starting_quantity)
def test_library_fragmentation_method_string(testapp, library_with_invalid_fragmentation_methods_string):
res = testapp.post_json('/library', library_with_invalid_fragmentation_methods_string, status=422)
def test_library_fragmentation_method_list(testapp, library_with_valid_fragmentation_method_list):
testapp.post_json('/library', library_with_valid_fragmentation_method_list, status=201)
def test_library_fragmentation_method_list(testapp, library_with_valid_fragmentation_method_list):
testapp.post_json('/library', library_with_valid_fragmentation_method_list, status=201)
library_with_valid_fragmentation_method_list.update({'fragmentation_duration_time': 5})
testapp.post_json('/library', library_with_valid_fragmentation_method_list, status=422)
library_with_valid_fragmentation_method_list.update({'fragmentation_duration_time_units': 'minutes'})
testapp.post_json('/library', library_with_valid_fragmentation_method_list, status=201)
library_with_valid_fragmentation_method_list.pop('fragmentation_methods')
testapp.post_json('/library', library_with_valid_fragmentation_method_list, status=422)
def test_library_size_SD_and_CV_properties(testapp, library_size_range, library_fragment_length_CV):
# https://encodedcc.atlassian.net/browse/ENCD-5276
testapp.post_json('/library', library_size_range, status=201)
library_size_range.update({'average_fragment_size': 350})
testapp.post_json('/library', library_size_range, status=422)
library_size_range.pop('size_range')
testapp.post_json('/library', library_size_range, status=201)
testapp.post_json('/library', library_fragment_length_CV, status=201)
library_fragment_length_CV.update({'fragment_length_SD': 45})
testapp.post_json('/library', library_fragment_length_CV, status=422)
library_fragment_length_CV.pop('fragment_length_CV')
testapp.post_json('/library', library_fragment_length_CV, status=201)
def test_library_adapters(testapp, library, file):
file_adapters = {
**library,
'adapters': [
{
'type': "read1 3' adapter",
'file': file['@id'],
},
]
}
testapp.post_json('/library', file_adapters, status=201)
sequence_adapters = {
**library,
'adapters': [
{
'type': "read1 3' adapter",
'sequence': 'GGGGGGCNA',
},
{
'type': "read1 3' adapter",
'sequence': 'GGGGGGCNAT',
},
]
}
testapp.post_json('/library', sequence_adapters, status=201)
file_sequence_adapter1 = {
**library,
'adapters': [
{
'type': "read1 3' adapter",
'file': file['@id'],
'sequence': 'GGGGGGCNA',
},
]
}
testapp.post_json('/library', file_sequence_adapter1, status=422)
file_sequence_adapter2 = {
**library,
'adapters': [
{
'type': "read1 3' adapter",
'file': file['@id'],
},
{
'type': "read1 3' adapter",
'file': file['@id'],
'sequence': 'GGGGGGCNA',
},
]
}
testapp.post_json('/library', file_sequence_adapter2, status=422)
mixed_adapters = {
**library,
'adapters': [
{
'type': "read1 3' adapter",
'file': file['@id'],
},
{
'type': "read1 3' adapter",
'sequence': 'GGGGGGCNA',
},
]
}
testapp.post_json('/library', mixed_adapters, status=422)
def test_library_adapters_type(testapp, library, file):
adapters = {
**library,
'adapters': [
{
'type': "read1 3' adapter",
'file': file['@id'],
},
]
}
testapp.post_json('/library', adapters, status=201)
adapters_missing_type = {
**library,
'adapters': [
{
'sequence': 'GGGGGGCNA',
}
]
}
testapp.post_json('/library', adapters_missing_type, status=422)
def test_library_nucleic_acid_depleted_in_term(testapp, library_schema_13, library_schema_capped_mRNA):
# https://encodedcc.atlassian.net/browse/ENCD-5368
testapp.post_json('/library', library_schema_13, status=422)
library_schema_13.update({'nucleic_acid_term_name': 'RNA'})
testapp.post_json('/library', library_schema_13, status=201)
# https://encodedcc.atlassian.net/browse/ENCD-5647
testapp.post_json('/library', library_schema_capped_mRNA, status=201)
library_schema_capped_mRNA.update({'depleted_in_term_name': ['capped mRNA', 'polyadenylated mRNA']})
testapp.post_json('/library', library_schema_capped_mRNA, status=422)
def test_library_biosample_and_mixed_biosample(testapp, library, biosample_1, biosample_2):
# https://encodedcc.atlassian.net/browse/ENCD-5674
testapp.post_json('/library', library, status=201)
library.update({'mixed_biosamples': [biosample_1['@id'], biosample_2['@id']]})
testapp.post_json('/library', library, status=201)
library.update({'biosample': biosample_2})
testapp.post_json('/library', library, status=422)
def test_library_strand_specificity_required_for_RNA(testapp, library, file):
# https://encodedcc.atlassian.net/browse/ENCD-5894
testapp.post_json('/library', library, status=201)
library.update({'nucleic_acid_term_name': 'RNA'})
testapp.post_json('/library', library, status=422)
library.update({'strand_specificity': 'unstranded'})
testapp.post_json('/library', library, status=201) |
299,468 | get sql statement | # Copyright 2021 Collate
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Postgres Query parser module
"""
import csv
import traceback
from abc import ABC
from datetime import datetime
from typing import Iterable, Optional
from sqlalchemy.engine.base import Engine
from metadata.generated.schema.entity.services.connections.database.postgresConnection import (
PostgresConnection,
)
from metadata.generated.schema.entity.services.connections.metadata.openMetadataConnection import (
OpenMetadataConnection,
)
from metadata.generated.schema.metadataIngestion.workflow import (
Source as WorkflowSource,
)
from metadata.generated.schema.type.tableQuery import TableQueries, TableQuery
from metadata.ingestion.api.steps import InvalidSourceException
from metadata.ingestion.source.connections import get_connection
from metadata.ingestion.source.database.postgres.queries import POSTGRES_GET_DATABASE
from metadata.ingestion.source.database.postgres.utils import (
get_postgres_time_column_name,
)
from metadata.ingestion.source.database.query_parser_source import QueryParserSource
from metadata.utils.helpers import get_start_and_end
from metadata.utils.logger import ingestion_logger
logger = ingestion_logger()
class PostgresQueryParserSource(QueryParserSource, ABC):
"""
Postgres base for Usage and Lineage
"""
filters: str
def __init__(self, config: WorkflowSource, metadata_config: OpenMetadataConnection):
super().__init__(config, metadata_config)
# Postgres does not allow retrieval of data older than 7 days
# Update start and end based on this
duration = min(self.source_config.queryLogDuration, 6)
self.start, self.end = get_start_and_end(duration)
@classmethod
def create(cls, config_dict, metadata_config: OpenMetadataConnection):
config: WorkflowSource = WorkflowSource.parse_obj(config_dict)
connection: PostgresConnection = config.serviceConnection.__root__.config
if not isinstance(connection, PostgresConnection):
raise InvalidSourceException(
f"Expected PostgresConnection, but got {connection}"
)
return cls(config, metadata_config)
def METHOD_NAME(self, *_) -> str:
"""
returns sql statement to fetch query logs.
We don't use any start or end times as they are not available
"""
return self.sql_stmt.format(
result_limit=self.config.sourceConfig.config.resultLimit,
filters=self.get_filters(),
time_column_name=get_postgres_time_column_name(engine=self.engine),
)
def get_table_query(self) -> Iterable[TableQuery]:
try:
if self.config.sourceConfig.config.queryLogFilePath:
table_query_list = []
with open(
self.config.sourceConfig.config.queryLogFilePath,
"r",
encoding="utf-8",
) as query_log_file:
for record in csv.DictReader(query_log_file):
query_dict = dict(record)
analysis_date = (
datetime.utcnow()
if not query_dict.get("session_start_time")
else datetime.strptime(
query_dict.get("session_start_time"),
"%Y-%m-%d %H:%M:%S+%f",
)
)
query_dict["aborted"] = query_dict["sql_state_code"] == "00000"
if "statement" in query_dict["message"]:
query_dict["message"] = query_dict["message"].split(":")[1]
table_query_list.append(
TableQuery(
query=query_dict["message"],
userName=query_dict.get("user_name", ""),
startTime=query_dict.get("session_start_time", ""),
endTime=query_dict.get("log_time", ""),
analysisDate=analysis_date,
aborted=self.get_aborted_status(query_dict),
databaseName=self.get_database_name(query_dict),
serviceName=self.config.serviceName,
databaseSchema=self.get_schema_name(query_dict),
)
)
yield TableQueries(queries=table_query_list)
else:
database = self.config.serviceConnection.__root__.config.database
if database:
self.engine: Engine = get_connection(self.service_connection)
yield from self.process_table_query()
else:
results = self.engine.execute(POSTGRES_GET_DATABASE)
for res in results:
row = list(res)
logger.info(f"Ingesting from database: {row[0]}")
self.config.serviceConnection.__root__.config.database = row[0]
self.engine = get_connection(self.service_connection)
yield from self.process_table_query()
except Exception as err:
logger.error(f"Source usage processing error - {err}")
logger.debug(traceback.format_exc())
def process_table_query(self) -> Optional[Iterable[TableQuery]]:
"""
Process Query
"""
try:
with get_connection(self.service_connection).connect() as conn:
rows = conn.execute(self.METHOD_NAME())
queries = []
for row in rows:
row = dict(row)
try:
queries.append(
TableQuery(
query=row["query_text"],
userName=row["usename"],
analysisDate=datetime.now(),
aborted=self.get_aborted_status(row),
databaseName=self.get_database_name(row),
serviceName=self.config.serviceName,
databaseSchema=self.get_schema_name(row),
duration=row.get("duration"),
)
)
except Exception as err:
logger.debug(traceback.format_exc())
logger.error(str(err))
yield TableQueries(queries=queries)
except Exception as err:
logger.error(f"Source usage processing error - {err}")
logger.debug(traceback.format_exc())
@staticmethod
def get_database_name(data: dict) -> str:
"""
Method to get database name
"""
return data.get("database_name") |
299,469 | name | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'GetContentTypeResult',
'AwaitableGetContentTypeResult',
'get_content_type',
'get_content_type_output',
]
@pulumi.output_type
class GetContentTypeResult:
"""
Content type contract details.
"""
def __init__(__self__, description=None, id=None, METHOD_NAME=None, schema=None, type=None, version=None):
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if METHOD_NAME and not isinstance(METHOD_NAME, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", METHOD_NAME)
if schema and not isinstance(schema, dict):
raise TypeError("Expected argument 'schema' to be a dict")
pulumi.set(__self__, "schema", schema)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if version and not isinstance(version, str):
raise TypeError("Expected argument 'version' to be a str")
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Content type description.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def METHOD_NAME(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def schema(self) -> Optional[Any]:
"""
Content type schema.
"""
return pulumi.get(self, "schema")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def version(self) -> Optional[str]:
"""
Content type version.
"""
return pulumi.get(self, "version")
class AwaitableGetContentTypeResult(GetContentTypeResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetContentTypeResult(
description=self.description,
id=self.id,
METHOD_NAME=self.METHOD_NAME,
schema=self.schema,
type=self.type,
version=self.version)
def get_content_type(content_type_id: Optional[str] = None,
resource_group_name: Optional[str] = None,
service_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetContentTypeResult:
"""
Gets the details of the developer portal's content type. Content types describe content items' properties, validation rules, and constraints.
Azure REST API version: 2022-08-01.
:param str content_type_id: Content type identifier.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str service_name: The name of the API Management service.
"""
__args__ = dict()
__args__['contentTypeId'] = content_type_id
__args__['resourceGroupName'] = resource_group_name
__args__['serviceName'] = service_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:apimanagement:getContentType', __args__, opts=opts, typ=GetContentTypeResult).value
return AwaitableGetContentTypeResult(
description=pulumi.get(__ret__, 'description'),
id=pulumi.get(__ret__, 'id'),
METHOD_NAME=pulumi.get(__ret__, 'name'),
schema=pulumi.get(__ret__, 'schema'),
type=pulumi.get(__ret__, 'type'),
version=pulumi.get(__ret__, 'version'))
@_utilities.lift_output_func(get_content_type)
def get_content_type_output(content_type_id: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetContentTypeResult]:
"""
Gets the details of the developer portal's content type. Content types describe content items' properties, validation rules, and constraints.
Azure REST API version: 2022-08-01.
:param str content_type_id: Content type identifier.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str service_name: The name of the API Management service.
"""
... |
299,470 | errors | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetProviderInstanceResult',
'AwaitableGetProviderInstanceResult',
'get_provider_instance',
'get_provider_instance_output',
]
@pulumi.output_type
class GetProviderInstanceResult:
"""
A provider instance associated with SAP monitor.
"""
def __init__(__self__, METHOD_NAME=None, id=None, identity=None, name=None, provider_settings=None, provisioning_state=None, system_data=None, type=None):
if METHOD_NAME and not isinstance(METHOD_NAME, dict):
raise TypeError("Expected argument 'errors' to be a dict")
pulumi.set(__self__, "errors", METHOD_NAME)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if identity and not isinstance(identity, dict):
raise TypeError("Expected argument 'identity' to be a dict")
pulumi.set(__self__, "identity", identity)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provider_settings and not isinstance(provider_settings, dict):
raise TypeError("Expected argument 'provider_settings' to be a dict")
pulumi.set(__self__, "provider_settings", provider_settings)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def METHOD_NAME(self) -> 'outputs.ProviderInstancePropertiesResponseErrors':
"""
Defines the provider instance errors.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def identity(self) -> Optional['outputs.UserAssignedServiceIdentityResponse']:
"""
[currently not in use] Managed service identity(user assigned identities)
"""
return pulumi.get(self, "identity")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="providerSettings")
def provider_settings(self) -> Optional[Any]:
"""
Defines the provider specific properties.
"""
return pulumi.get(self, "provider_settings")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
State of provisioning of the provider instance
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
Azure Resource Manager metadata containing createdBy and modifiedBy information.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
class AwaitableGetProviderInstanceResult(GetProviderInstanceResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetProviderInstanceResult(
METHOD_NAME=self.METHOD_NAME,
id=self.id,
identity=self.identity,
name=self.name,
provider_settings=self.provider_settings,
provisioning_state=self.provisioning_state,
system_data=self.system_data,
type=self.type)
def get_provider_instance(monitor_name: Optional[str] = None,
provider_instance_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetProviderInstanceResult:
"""
Gets properties of a provider instance for the specified subscription, resource group, SAP monitor name, and resource name.
Azure REST API version: 2023-04-01.
:param str monitor_name: Name of the SAP monitor resource.
:param str provider_instance_name: Name of the provider instance.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
__args__ = dict()
__args__['monitorName'] = monitor_name
__args__['providerInstanceName'] = provider_instance_name
__args__['resourceGroupName'] = resource_group_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:workloads:getProviderInstance', __args__, opts=opts, typ=GetProviderInstanceResult).value
return AwaitableGetProviderInstanceResult(
METHOD_NAME=pulumi.get(__ret__, 'errors'),
id=pulumi.get(__ret__, 'id'),
identity=pulumi.get(__ret__, 'identity'),
name=pulumi.get(__ret__, 'name'),
provider_settings=pulumi.get(__ret__, 'provider_settings'),
provisioning_state=pulumi.get(__ret__, 'provisioning_state'),
system_data=pulumi.get(__ret__, 'system_data'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_provider_instance)
def get_provider_instance_output(monitor_name: Optional[pulumi.Input[str]] = None,
provider_instance_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetProviderInstanceResult]:
"""
Gets properties of a provider instance for the specified subscription, resource group, SAP monitor name, and resource name.
Azure REST API version: 2023-04-01.
:param str monitor_name: Name of the SAP monitor resource.
:param str provider_instance_name: Name of the provider instance.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
... |
299,471 | filter split and classification anns | import enum
import pathlib
from typing import Any, BinaryIO, Dict, List, Optional, Tuple, Union
from torchdata.datapipes.iter import CSVDictParser, Demultiplexer, Filter, IterDataPipe, IterKeyZipper, Mapper
from torchvision.prototype.datasets.utils import Dataset, EncodedImage, HttpResource, OnlineResource
from torchvision.prototype.datasets.utils._internal import (
getitem,
hint_sharding,
hint_shuffling,
INFINITE_BUFFER_SIZE,
path_accessor,
path_comparator,
read_categories_file,
)
from torchvision.prototype.tv_tensors import Label
from .._api import register_dataset, register_info
NAME = "oxford-iiit-pet"
class OxfordIIITPetDemux(enum.IntEnum):
SPLIT_AND_CLASSIFICATION = 0
SEGMENTATIONS = 1
@register_info(NAME)
def _info() -> Dict[str, Any]:
return dict(categories=read_categories_file(NAME))
@register_dataset(NAME)
class OxfordIIITPet(Dataset):
"""Oxford IIIT Pet Dataset
homepage="https://www.robots.ox.ac.uk/~vgg/data/pets/",
"""
def __init__(
self, root: Union[str, pathlib.Path], *, split: str = "trainval", skip_integrity_check: bool = False
) -> None:
self._split = self._verify_str_arg(split, "split", {"trainval", "test"})
self._categories = _info()["categories"]
super().__init__(root, skip_integrity_check=skip_integrity_check)
def _resources(self) -> List[OnlineResource]:
images = HttpResource(
"https://www.robots.ox.ac.uk/~vgg/data/pets/data/images.tar.gz",
sha256="67195c5e1c01f1ab5f9b6a5d22b8c27a580d896ece458917e61d459337fa318d",
preprocess="decompress",
)
anns = HttpResource(
"https://www.robots.ox.ac.uk/~vgg/data/pets/data/annotations.tar.gz",
sha256="52425fb6de5c424942b7626b428656fcbd798db970a937df61750c0f1d358e91",
preprocess="decompress",
)
return [images, anns]
def _classify_anns(self, data: Tuple[str, Any]) -> Optional[int]:
return {
"annotations": OxfordIIITPetDemux.SPLIT_AND_CLASSIFICATION,
"trimaps": OxfordIIITPetDemux.SEGMENTATIONS,
}.get(pathlib.Path(data[0]).parent.name)
def _filter_images(self, data: Tuple[str, Any]) -> bool:
return pathlib.Path(data[0]).suffix == ".jpg"
def _filter_segmentations(self, data: Tuple[str, Any]) -> bool:
return not pathlib.Path(data[0]).name.startswith(".")
def _prepare_sample(
self, data: Tuple[Tuple[Dict[str, str], Tuple[str, BinaryIO]], Tuple[str, BinaryIO]]
) -> Dict[str, Any]:
ann_data, image_data = data
classification_data, segmentation_data = ann_data
segmentation_path, segmentation_buffer = segmentation_data
image_path, image_buffer = image_data
return dict(
label=Label(int(classification_data["label"]) - 1, categories=self._categories),
species="cat" if classification_data["species"] == "1" else "dog",
segmentation_path=segmentation_path,
segmentation=EncodedImage.from_file(segmentation_buffer),
image_path=image_path,
image=EncodedImage.from_file(image_buffer),
)
def _datapipe(self, resource_dps: List[IterDataPipe]) -> IterDataPipe[Dict[str, Any]]:
images_dp, anns_dp = resource_dps
images_dp = Filter(images_dp, self._filter_images)
split_and_classification_dp, segmentations_dp = Demultiplexer(
anns_dp,
2,
self._classify_anns,
drop_none=True,
buffer_size=INFINITE_BUFFER_SIZE,
)
split_and_classification_dp = Filter(split_and_classification_dp, path_comparator("name", f"{self._split}.txt"))
split_and_classification_dp = CSVDictParser(
split_and_classification_dp, fieldnames=("image_id", "label", "species"), delimiter=" "
)
split_and_classification_dp = hint_shuffling(split_and_classification_dp)
split_and_classification_dp = hint_sharding(split_and_classification_dp)
segmentations_dp = Filter(segmentations_dp, self._filter_segmentations)
anns_dp = IterKeyZipper(
split_and_classification_dp,
segmentations_dp,
key_fn=getitem("image_id"),
ref_key_fn=path_accessor("stem"),
buffer_size=INFINITE_BUFFER_SIZE,
)
dp = IterKeyZipper(
anns_dp,
images_dp,
key_fn=getitem(0, "image_id"),
ref_key_fn=path_accessor("stem"),
buffer_size=INFINITE_BUFFER_SIZE,
)
return Mapper(dp, self._prepare_sample)
def METHOD_NAME(self, data: Tuple[str, Any]) -> bool:
return self._classify_anns(data) == OxfordIIITPetDemux.SPLIT_AND_CLASSIFICATION
def _generate_categories(self) -> List[str]:
resources = self._resources()
dp = resources[1].load(self._root)
dp = Filter(dp, self.METHOD_NAME)
dp = Filter(dp, path_comparator("name", "trainval.txt"))
dp = CSVDictParser(dp, fieldnames=("image_id", "label"), delimiter=" ")
raw_categories_and_labels = {(data["image_id"].rsplit("_", 1)[0], data["label"]) for data in dp}
raw_categories, _ = zip(
*sorted(raw_categories_and_labels, key=lambda raw_category_and_label: int(raw_category_and_label[1]))
)
return [" ".join(part.title() for part in raw_category.split("_")) for raw_category in raw_categories]
def __len__(self) -> int:
return 3_680 if self._split == "trainval" else 3_669 |
299,472 | sub | import os
import re
import glob
import git
_INCLUDE_EXAMPLES_REGEX = re.compile(
r"""(?P<_includer_indent>[^\S\r\n]*){\s*%\s*include-examples\s*"(?P<example_name>[^")]+)"\s*%\s*}\s*""",
flags=re.VERBOSE | re.DOTALL,
)
_INCLUDE_EXAMPLE_REGEX = re.compile(
r"""(?P<_includer_indent>[^\S\r\n]*){\s*%\s*include-example\s*"(?P<example_path>[^")]+)"\s*%\s*}\s*""",
flags=re.VERBOSE | re.DOTALL,
)
_LINT_MAP = {
".py": "python",
".json": "json",
".yaml": "yaml",
".yml": "yaml",
".sh": "sh",
".md": "md",
}
_REPO_BASE = os.path.abspath(
os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)
)
_EXAMPLES_BASE = os.path.abspath(os.path.join(_REPO_BASE, "examples"))
def sub_include_examples(match):
example_name = match.group("example_name")
indents_level0 = match.group("_includer_indent")
lines = []
lines.append(f"{indents_level0}???+ Example\n")
lines.append(f"{indents_level0}\n")
indents_level1 = indents_level0 + " "
for example_type, pretty_name in [("pipeline", "Pipeline"), ("dsl/v2", "DSL")]:
include_path = os.path.join(_EXAMPLES_BASE, example_type, example_name, "*.*")
lines.append(f'{indents_level1}=== "{pretty_name}"\n\n')
indents_level2 = f"{indents_level1} "
for name in glob.glob(include_path):
if name.endswith("README.md") or name.endswith("readme.md"):
lines.append(f"{indents_level2}```markdown\n")
with open(name) as f:
for line in f.readlines():
lines.append(f"{indents_level2}{line}")
lines.append(f"{indents_level2}\n")
lines.append(f"{indents_level2}```\n")
lines.append(f"{indents_level2}\n")
for file_name in glob.glob(include_path):
if file_name.endswith("README.md") or file_name.endswith("readme.md"):
continue
_, file_extension = os.path.splitext(file_name)
lint = _LINT_MAP.get(file_extension, "")
lines.append(
f'{indents_level2}??? Example "{os.path.basename(file_name)}"\n'
)
lines.append(f"{indents_level2} ```{lint}\n")
head = True
with open(file_name) as f:
for line in f.readlines():
# skip license
if head:
if line.strip() == "" or line.lstrip().startswith("#"):
continue
head = False
lines.append(f"{indents_level2} {line}")
lines.append(f"{indents_level2} \n")
lines.append(f"{indents_level2} ```\n")
lines.append(f"{indents_level2} \n")
return "".join(lines)
def sub_include_example(src_file_path):
def METHOD_NAME(match):
example_path = match.group("example_path")
indents_level0 = match.group("_includer_indent")
lines = []
lines.append(f"{indents_level0}\n")
lines.append(f'{indents_level0}??? Example "{example_path}"\n')
lines.append(f"{indents_level0}\n")
indents_level1 = indents_level0 + " "
abs_file_path = os.path.abspath(
os.path.join(src_file_path, os.pardir, example_path)
)
if os.path.exists(abs_file_path):
with open(abs_file_path) as f:
_, file_extension = os.path.splitext(abs_file_path)
lint = _LINT_MAP.get(file_extension, "")
lines.append(f"{indents_level1}```{lint}\n")
head = True
for line in f.readlines():
# skip license
if head:
if line.strip() == "" or line.lstrip().startswith("#"):
continue
head = False
lines.append(f"{indents_level1} {line}")
lines.append(f"{indents_level1}\n")
lines.append(f"{indents_level1}```\n")
lines.append(f"{indents_level1}\n")
return "".join(lines)
return METHOD_NAME
try:
repo = git.Repo(search_parent_directories=True)
sha = repo.head.object.hexsha
url = repo.remote().url
if url.endswith(".git"):
url = url[:-4]
GITHUB_REPO = f"{url}/tree/{sha}"
except BaseException:
GITHUB_REPO = "https://github.com/FederatedAI/FATE/tree/master"
_DIR_URL_REGEX = re.compile(
r"""(?P<text>\[\s*:file_folder:[^\(]*\])\((?P<url>[^\)]+)\)""",
flags=re.VERBOSE | re.DOTALL,
)
def _fix_dir_url(src_path):
def _replace(match):
text = match.group("text")
url = match.group("url")
if not url.startswith("http"):
url_rel_to_repo_base = os.path.relpath(
os.path.abspath(os.path.join(src_path, os.path.pardir, url)), _REPO_BASE
)
url = f"{GITHUB_REPO}/{url_rel_to_repo_base}"
return f"{text}({url})"
return _replace
_COMMENT_REGEX = re.compile(
r"""[^\S\r\n]*<!--\s*mkdocs\s*\n(?P<_content>.*?)-->""",
flags=re.VERBOSE | re.DOTALL,
)
def _remove_comment(match):
content = match.group("_content")
return content
def on_page_markdown(markdown, page, **kwargs):
markdown = re.METHOD_NAME(_DIR_URL_REGEX, _fix_dir_url(page.file.abs_src_path), markdown)
# remove specific commnent
markdown = re.METHOD_NAME(_COMMENT_REGEX, _remove_comment, markdown)
markdown = re.METHOD_NAME(
_INCLUDE_EXAMPLES_REGEX,
sub_include_examples,
markdown,
)
markdown = re.METHOD_NAME(
_INCLUDE_EXAMPLE_REGEX,
sub_include_example(page.file.abs_src_path),
markdown,
)
return markdown |
299,473 | init passthrough | import serial, time, sys, re
import argparse
import serials_find
import SerialHelper
import bootloader
from query_yes_no import query_yes_no
from elrs_helpers import ElrsUploadResult
SCRIPT_DEBUG = False
class PassthroughEnabled(Exception):
pass
class PassthroughFailed(Exception):
pass
def dbg_print(line=''):
sys.stdout.write(line + '\n')
sys.stdout.flush()
def _validate_serialrx(rl, config, expected):
found = False
if type(expected) == str:
expected = [expected]
rl.set_delimiters(["# "])
rl.clear()
rl.write_str("get %s" % config)
line = rl.read_line(1.).strip()
for key in expected:
key = " = %s" % key
if key in line:
found = True
break
return found
def bf_passthrough_init(port, requestedBaudrate, half_duplex=False):
sys.stdout.flush()
dbg_print("======== PASSTHROUGH INIT ========")
dbg_print(" Trying to initialize %s @ %s" % (port, requestedBaudrate))
s = serial.Serial(port=port, baudrate=115200,
bytesize=8, parity='N', stopbits=1,
timeout=1, xonxoff=0, rtscts=0)
rl = SerialHelper.SerialHelper(s, 3., ['CCC', "# "])
rl.clear()
# Send start command '#'
rl.write_str("#", half_duplex)
start = rl.read_line(2.).strip()
#dbg_print("BF INIT: '%s'" % start.replace("\r", ""))
if "CCC" in start:
raise PassthroughEnabled("Passthrough already enabled and bootloader active")
elif not start or not start.endswith("#"):
raise PassthroughEnabled("No CLI available. Already in passthrough mode?, If this fails reboot FC and try again!")
serial_check = []
if not _validate_serialrx(rl, "serialrx_provider", [["CRSF", "ELRS"], "GHST"][half_duplex]):
serial_check.append("Serial Receiver Protocol is not set to CRSF! Hint: set serialrx_provider = CRSF")
if not _validate_serialrx(rl, "serialrx_inverted", "OFF"):
serial_check.append("Serial Receiver UART is inverted! Hint: set serialrx_inverted = OFF")
if not _validate_serialrx(rl, "serialrx_halfduplex", ["OFF", "AUTO"]):
serial_check.append("Serial Receiver UART is not in full duplex! Hint: set serialrx_halfduplex = OFF")
if _validate_serialrx(rl, "rx_spi_protocol", "EXPRESSLRS" ) and serial_check:
serial_check = [ "ExpressLRS SPI RX detected\n\nUpdate via betaflight to flash your RX\nhttps://www.expresslrs.org/2.0/hardware/spi-receivers/" ]
if serial_check:
error = "\n\n [ERROR] Invalid serial RX configuration detected:\n"
for err in serial_check:
error += " !!! %s !!!\n" % err
error += "\n Please change the configuration and try again!\n"
raise PassthroughFailed(error)
SerialRXindex = ""
dbg_print("\nAttempting to detect FC UART configuration...")
rl.set_delimiters(["\n"])
rl.clear()
rl.write_str("serial")
while True:
line = rl.read_line().strip()
#print("FC: '%s'" % line)
if not line or "#" in line:
break
if line.startswith("serial"):
if SCRIPT_DEBUG:
dbg_print(" '%s'" % line)
config = re.search('serial ([0-9]+) ([0-9]+) ', line)
if config and (int(config.group(2)) & 64 == 64):
dbg_print(" ** Serial RX config detected: '%s'" % line)
SerialRXindex = config.group(1)
if not SCRIPT_DEBUG:
break
if not SerialRXindex:
raise PassthroughFailed("!!! RX Serial not found !!!!\n Check configuration and try again...")
cmd = "serialpassthrough %s %s" % (SerialRXindex, requestedBaudrate, )
dbg_print("Enabling serial passthrough...")
dbg_print(" CMD: '%s'" % cmd)
rl.write_str(cmd)
time.sleep(.2)
s.close()
dbg_print("======== PASSTHROUGH DONE ========")
def reset_to_bootloader(port, baud, target, action, accept=None, half_duplex=False, chip_type='ESP82') -> int:
dbg_print("======== RESET TO BOOTLOADER ========")
s = serial.Serial(port=port, baudrate=baud,
bytesize=8, parity='N', stopbits=1,
timeout=1, xonxoff=0, rtscts=0)
rl = SerialHelper.SerialHelper(s, 3.)
rl.clear()
if half_duplex:
BootloaderInitSeq = bootloader.get_init_seq('GHST', chip_type)
dbg_print(" * Using half duplex (GHST)")
else:
BootloaderInitSeq = bootloader.get_init_seq('CRSF', chip_type)
dbg_print(" * Using full duplex (CRSF)")
#this is the training sequ for the ROM bootloader, we send it here so it doesn't auto-neg to the wrong baudrate by the BootloaderInitSeq that we send to reset ELRS
rl.write(b'\x07\x07\x12\x20' + 32 * b'\x55')
time.sleep(0.2)
rl.write(BootloaderInitSeq)
s.flush()
rx_target = rl.read_line().strip().upper()
if target is not None:
flash_target = re.sub("_VIA_.*", "", target.upper())
ignore_incorrect_target = action == "uploadforce"
if rx_target == "":
dbg_print("Cannot detect RX target, blindly flashing!")
elif ignore_incorrect_target:
dbg_print(f"Force flashing {flash_target}, detected {rx_target}")
elif rx_target != flash_target and rx_target != accept:
if query_yes_no("\n\n\nWrong target selected! your RX is '%s', trying to flash '%s', continue? Y/N\n" % (rx_target, flash_target)):
dbg_print("Ok, flashing anyway!")
else:
dbg_print("Wrong target selected your RX is '%s', trying to flash '%s'" % (rx_target, flash_target))
return ElrsUploadResult.ErrorMismatch
elif flash_target != "":
dbg_print("Verified RX target '%s'" % (flash_target))
time.sleep(.5)
s.close()
return ElrsUploadResult.Success
def METHOD_NAME(source, target, env) -> int:
env.AutodetectUploadPort([env])
try:
bf_passthrough_init(env['UPLOAD_PORT'], env['UPLOAD_SPEED'])
except PassthroughEnabled as err:
dbg_print(str(err))
return reset_to_bootloader(env['UPLOAD_PORT'], env['UPLOAD_SPEED'], env['PIOENV'], source[0])
def main(custom_args = None):
parser = argparse.ArgumentParser(
description="Initialize BetaFlight passthrough and optionally send a reboot comamnd sequence")
parser.add_argument("-b", "--baud", type=int, default=420000,
help="Baud rate for passthrough communication")
parser.add_argument("-p", "--port", type=str,
help="Override serial port autodetection and use PORT")
parser.add_argument("-r", "--target", type=str,
help="The target firmware that is going to be uploaded")
parser.add_argument("-nr", "--no-reset", action="store_false",
dest="reset_to_bl", help="Do not send reset_to_bootloader command sequence")
parser.add_argument("-hd", "--half-duplex", action="store_true",
dest="half_duplex", help="Use half duplex mode")
parser.add_argument("-t", "--type", type=str, default="ESP82",
help="Defines flash target type which is sent to target in reboot command")
parser.add_argument("-a", "--action", type=str, default="upload",
help="Upload action: upload (default), or uploadforce to flash even on target mismatch")
parser.add_argument("--accept", type=str, default=None,
help="Acceptable target to auto-overwrite")
args = parser.parse_args(custom_args)
if (args.port == None):
args.port = serials_find.get_serial_port()
returncode = ElrsUploadResult.Success
try:
bf_passthrough_init(args.port, args.baud)
except PassthroughEnabled as err:
dbg_print(str(err))
if args.reset_to_bl:
returncode = reset_to_bootloader(args.port, args.baud, args.target, args.action, args.accept, args.half_duplex, args.type)
return returncode
if __name__ == '__main__':
returncode = main()
exit(returncode |
299,474 | setup | """This is Porcupine's syntax highlighting plugin.
This plugin features two syntax highlighters with different advantages and
disadvantages. See filetypes.toml for information about configuring them.
You can change the color theme in Porcupine Settings.
"""
from __future__ import annotations
import logging
import tkinter
from typing import Callable
from pygments.lexer import LexerMeta
from porcupine import get_tab_manager, tabs, textutils, utils
from .base_highlighter import BaseHighlighter
from .pygments_highlighter import PygmentsHighlighter
from .tree_sitter_highlighter import TreeSitterHighlighter
log = logging.getLogger(__name__)
# Uses tab settings defined in filetypes.toml.
# TODO: what other plugins need this?
setup_after = ["filetypes"]
class HighlighterManager:
def __init__(self, tab: tabs.FileTab) -> None:
self._tab = tab
self._highlighter: BaseHighlighter | None = None
def on_config_changed(self, junk: object = None) -> None:
highlighter_name = self._tab.settings.get("syntax_highlighter", str)
if highlighter_name == "tree_sitter":
language_name = self._tab.settings.get("tree_sitter_language_name", str)
log.info(f"creating a tree_sitter highlighter with language {repr(language_name)}")
self._highlighter = TreeSitterHighlighter(self._tab.textwidget, language_name)
elif highlighter_name == "pygments":
lexer_class = self._tab.settings.get("pygments_lexer", LexerMeta)
log.info(f"creating a pygments highlighter with lexer class {lexer_class}")
self._highlighter = PygmentsHighlighter(self._tab.textwidget, lexer_class())
else:
log.warning(
f"bad syntax_highlighter setting {repr(highlighter_name)}, assuming 'pygments'"
)
self._tab.settings.set("syntax_highlighter", "pygments") # runs this again
return
self._highlighter.on_scroll()
def on_change_event(self, event: utils.EventWithData) -> None:
assert self._highlighter is not None
self._highlighter.on_change(event.data_class(textutils.Changes))
def on_scroll_event(self) -> None:
assert self._highlighter is not None
self._highlighter.on_scroll()
# When scrolling, don't highlight too often. Makes scrolling smoother.
def debounce(
any_widget: tkinter.Misc, function: Callable[[], None], ms_between_calls_min: int
) -> Callable[[], None]:
timeout_scheduled = False
running_requested = False
def timeout_callback() -> None:
nonlocal timeout_scheduled, running_requested
assert timeout_scheduled
if running_requested:
function()
any_widget.after(ms_between_calls_min, timeout_callback)
running_requested = False
else:
timeout_scheduled = False
def request_running() -> None:
nonlocal timeout_scheduled, running_requested
if timeout_scheduled:
running_requested = True
else:
assert not running_requested
function()
any_widget.after(ms_between_calls_min, timeout_callback)
timeout_scheduled = True
return request_running
def on_new_filetab(tab: tabs.FileTab) -> None:
# pygments_lexer option already exists, as it is used also outside this plugin
tab.settings.add_option("syntax_highlighter", default="pygments")
tab.settings.add_option(
"tree_sitter_language_name", default="<tree_sitter_language_name not set>"
)
manager = HighlighterManager(tab)
tab.bind("<<TabSettingChanged:pygments_lexer>>", manager.on_config_changed, add=True)
tab.bind("<<TabSettingChanged:syntax_highlighter>>", manager.on_config_changed, add=True)
tab.bind("<<TabSettingChanged:tree_sitter_language_name>>", manager.on_config_changed, add=True)
manager.on_config_changed()
utils.bind_with_data(tab.textwidget, "<<ContentChanged>>", manager.on_change_event, add=True)
utils.add_scroll_command(
tab.textwidget, "yscrollcommand", debounce(tab, manager.on_scroll_event, 100)
)
def METHOD_NAME() -> None:
get_tab_manager().add_filetab_callback(on_new_filetab) |
299,475 | count sprintf parameters | from __future__ import annotations
import ast
import base64
import codecs
import re
import string
import zlib
from typing import Any, Callable, overload
from django.utils.encoding import force_str, smart_str
_sprintf_placeholder_re = re.compile(
r"%(?:\d+\$)?[+-]?(?:[ 0]|\'.{1})?-?\d*(?:\.\d+)?[bcdeEufFgGosxX]"
)
INVALID_ESCAPE = re.compile(
r"""
(?<!\\) # no backslash behind
((?:\\\\)*\\) # odd number of backslashes
(?!x[0-9a-fA-F]{2}) # char escape: \x__
(?!u[0-9a-fA-F]{4}) # char escape: \u____
(?!U[0-9a-fA-F]{8}) # char escape: \U________
(?![0-7]{1,3}) # octal escape: \_, \__, \___
(?![\\'"abfnrtv]) # other escapes: https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals
""",
re.VERBOSE,
)
def unescape_string(value: str) -> str:
"""Unescapes a backslash escaped string."""
value = INVALID_ESCAPE.sub(r"\1\\", value)
return ast.literal_eval(f'"""{value}"""')
def strip_lone_surrogates(string: str) -> str:
"""Removes lone surrogates."""
return string.encode("utf-8", "surrogatepass").decode("utf-8", "ignore")
@overload
def truncatechars(value: None, arg: int, ellipsis: str = ...) -> None:
...
@overload
def truncatechars(value: str, arg: int, ellipsis: str = ...) -> str:
...
def truncatechars(value: str | None, arg: int, ellipsis: str = "...") -> str | None:
# TODO (alex) could use unicode ellipsis: u'\u2026'
"""
Truncates a string after a certain number of chars.
Argument: Number of chars to truncate after.
"""
if value is None:
return value
try:
length = int(arg)
except ValueError: # Invalid literal for int().
return value # Fail silently.
if len(value) > length:
return value[: max(0, length - len(ellipsis))] + ellipsis
return value
def compress(value: bytes) -> str:
"""
Compresses a value for safe passage as a string.
This returns a unicode string rather than bytes, as the Django ORM works
with unicode objects.
"""
return base64.b64encode(zlib.compress(value)).decode("utf-8")
def decompress(value: str) -> bytes:
return zlib.decompress(base64.b64decode(value))
def strip(value: str | None) -> str:
if not value:
return ""
return smart_str(value).strip()
def soft_hyphenate(value: str, length: int, hyphen: str = "\u00ad") -> str:
return hyphen.join([value[i : (i + length)] for i in range(0, len(value), length)])
def soft_break(value: str, length: int, process: Callable[[str], str] = lambda chunk: chunk) -> str:
"""
Encourages soft breaking of text values above a maximum length by adding
zero-width spaces after common delimiters, as well as soft-hyphenating long
identifiers.
"""
delimiters = re.compile(r"([{}]+)".format("".join(map(re.escape, ",.$:/+@!?()<>[]{}"))))
def soft_break_delimiter(match: re.Match[str]) -> str:
results = []
value = match.group(0)
chunks = delimiters.split(value)
for i, chunk in enumerate(chunks):
if i % 2 == 1: # check if this is this a delimiter
results.extend([chunk, "\u200b"])
else:
results.append(process(chunk))
return "".join(results).rstrip("\u200b")
return re.sub(rf"\S{{{length},}}", soft_break_delimiter, value)
def to_unicode(value: Any) -> str:
try:
value = str(force_str(value))
except (UnicodeEncodeError, UnicodeDecodeError):
value = "(Error decoding value)"
except Exception: # in some cases we get a different exception
try:
value = str(repr(type(value)))
except Exception:
value = "(Error decoding value)"
return value
valid_dot_atom_characters = frozenset(string.ascii_letters + string.digits + ".!#$%&'*+-/=?^_`{|}~")
def is_valid_dot_atom(value: str) -> bool:
"""Validate an input string as an RFC 2822 dot-atom-text value."""
return (
isinstance(value, str) # must be a string type
and not value[0] == "."
and not value[-1] == "." # cannot start or end with a dot
and set(value).issubset(valid_dot_atom_characters)
) # can only contain valid characters
def METHOD_NAME(string: str) -> int:
"""Counts the number of sprintf parameters in a string."""
return len(_sprintf_placeholder_re.findall(string))
def codec_lookup(encoding: str, default: str = "utf-8") -> codecs.CodecInfo:
"""Safely lookup a codec and ignore non-text codecs,
falling back to a default on errors.
Note: the default value is not sanity checked and would
bypass these checks."""
def _get_default() -> codecs.CodecInfo:
return codecs.lookup(default)
if not encoding:
return _get_default()
try:
info = codecs.lookup(encoding)
except (LookupError, TypeError):
return _get_default()
try:
# Check for `CodecInfo._is_text_encoding`.
# If this attribute exists, we can assume we can operate
# with this encoding value safely. This attribute was
# introduced into 2.7.12, so versions prior to this will
# raise, but this is the best we can do.
if not info._is_text_encoding: # type: ignore[attr-defined] # python/typeshed#10354
return _get_default()
except AttributeError:
pass
# `undefined` is special a special encoding in python that 100% of
# the time will raise, so ignore it.
if info.name == "undefined":
return _get_default()
return info
def oxfordize_list(strings: list[str]) -> str:
"""Given a list of strings, formats them correctly given the length of the
list. For example:
oxfordize_list(['A']) => 'A'
oxfordize_list(['A', 'B']) => 'A and B'
oxfordize_list(['A', 'B', 'C']) => 'A, B, and C'
"""
if len(strings) == 0:
return ""
elif len(strings) == 1:
return strings[0]
elif len(strings) == 2:
return f"{strings[0]} and {strings[1]}"
return f"{', '.join(strings[:-1])}, and {strings[-1]}"
def to_single_line_str(original_str: str) -> str:
return " ".join(original_str.strip().split()) |
299,476 | numba funcify extract diag | from textwrap import indent
import numpy as np
from aesara.link.numba.dispatch import basic as numba_basic
from aesara.link.numba.dispatch.basic import _numba_funcify, create_tuple_string
from aesara.link.utils import compile_function_src, unique_name_generator
from aesara.tensor.basic import (
Alloc,
AllocDiag,
AllocEmpty,
ARange,
ExtractDiag,
Eye,
Join,
MakeVector,
ScalarFromTensor,
Split,
TensorFromScalar,
)
from aesara.tensor.shape import Unbroadcast
@_numba_funcify.register(AllocEmpty)
def numba_funcify_AllocEmpty(op, node, **kwargs):
global_env = {
"np": np,
"to_scalar": numba_basic.to_scalar,
"dtype": np.dtype(op.dtype),
}
unique_names = unique_name_generator(
["np", "to_scalar", "dtype", "allocempty", "scalar_shape"], suffix_sep="_"
)
shape_var_names = [unique_names(v, force_unique=True) for v in node.inputs]
shape_var_item_names = [f"{name}_item" for name in shape_var_names]
shapes_to_items_src = indent(
"\n".join(
[
f"{item_name} = to_scalar({shape_name})"
for item_name, shape_name in zip(shape_var_item_names, shape_var_names)
]
),
" " * 4,
)
alloc_def_src = f"""
def allocempty({", ".join(shape_var_names)}):
{shapes_to_items_src}
scalar_shape = {create_tuple_string(shape_var_item_names)}
return np.empty(scalar_shape, dtype)
"""
alloc_fn = compile_function_src(
alloc_def_src, "allocempty", {**globals(), **global_env}
)
return numba_basic.numba_njit(alloc_fn)
@_numba_funcify.register(Alloc)
def numba_funcify_Alloc(op, node, **kwargs):
global_env = {"np": np, "to_scalar": numba_basic.to_scalar}
unique_names = unique_name_generator(
["np", "to_scalar", "alloc", "val_np", "val", "scalar_shape", "res"],
suffix_sep="_",
)
shape_var_names = [unique_names(v, force_unique=True) for v in node.inputs[1:]]
shape_var_item_names = [f"{name}_item" for name in shape_var_names]
shapes_to_items_src = indent(
"\n".join(
[
f"{item_name} = to_scalar({shape_name})"
for item_name, shape_name in zip(shape_var_item_names, shape_var_names)
]
),
" " * 4,
)
alloc_def_src = f"""
def alloc(val, {", ".join(shape_var_names)}):
val_np = np.asarray(val)
{shapes_to_items_src}
scalar_shape = {create_tuple_string(shape_var_item_names)}
res = np.empty(scalar_shape, dtype=val_np.dtype)
res[...] = val_np
return res
"""
alloc_fn = compile_function_src(alloc_def_src, "alloc", {**globals(), **global_env})
return numba_basic.numba_njit(alloc_fn)
@_numba_funcify.register(AllocDiag)
def numba_funcify_AllocDiag(op, **kwargs):
offset = op.offset
@numba_basic.numba_njit(inline="always")
def allocdiag(v):
return np.diag(v, k=offset)
return allocdiag
@_numba_funcify.register(ARange)
def numba_funcify_ARange(op, **kwargs):
dtype = np.dtype(op.dtype)
@numba_basic.numba_njit(inline="always")
def arange(start, stop, step):
return np.arange(
numba_basic.to_scalar(start),
numba_basic.to_scalar(stop),
numba_basic.to_scalar(step),
dtype=dtype,
)
return arange
@_numba_funcify.register(Join)
def numba_funcify_Join(op, **kwargs):
view = op.view
if view != -1:
# TODO: Where (and why) is this `Join.view` even being used? From a
# quick search, the answer appears to be "nowhere", so we should
# probably just remove it.
raise NotImplementedError("The `view` parameter to `Join` is not supported")
@numba_basic.numba_njit
def join(axis, *tensors):
return np.concatenate(tensors, numba_basic.to_scalar(axis))
return join
@_numba_funcify.register(Split)
def numba_funcify_Split(op, **kwargs):
@numba_basic.numba_njit
def split(tensor, axis, indices):
# Work around for https://github.com/numba/numba/issues/8257
axis = axis % tensor.ndim
axis = numba_basic.to_scalar(axis)
return np.split(tensor, np.cumsum(indices)[:-1], axis=axis)
return split
@_numba_funcify.register(ExtractDiag)
def METHOD_NAME(op, **kwargs):
offset = op.offset
# axis1 = op.axis1
# axis2 = op.axis2
@numba_basic.numba_njit(inline="always")
def extract_diag(x):
return np.diag(x, k=offset)
return extract_diag
@_numba_funcify.register(Eye)
def numba_funcify_Eye(op, **kwargs):
dtype = np.dtype(op.dtype)
@numba_basic.numba_njit(inline="always")
def eye(N, M, k):
return np.eye(
numba_basic.to_scalar(N),
numba_basic.to_scalar(M),
numba_basic.to_scalar(k),
dtype=dtype,
)
return eye
@_numba_funcify.register(MakeVector)
def numba_funcify_MakeVector(op, node, **kwargs):
dtype = np.dtype(op.dtype)
global_env = {"np": np, "to_scalar": numba_basic.to_scalar, "dtype": dtype}
unique_names = unique_name_generator(
["np", "to_scalar"],
suffix_sep="_",
)
input_names = [unique_names(v, force_unique=True) for v in node.inputs]
def create_list_string(x):
args = ", ".join([f"to_scalar({i})" for i in x] + ([""] if len(x) == 1 else []))
return f"[{args}]"
makevector_def_src = f"""
def makevector({", ".join(input_names)}):
return np.array({create_list_string(input_names)}, dtype=dtype)
"""
makevector_fn = compile_function_src(
makevector_def_src, "makevector", {**globals(), **global_env}
)
return numba_basic.numba_njit(makevector_fn)
@_numba_funcify.register(Unbroadcast)
def numba_funcify_Unbroadcast(op, **kwargs):
@numba_basic.numba_njit
def unbroadcast(x):
return x
return unbroadcast
@_numba_funcify.register(TensorFromScalar)
def numba_funcify_TensorFromScalar(op, **kwargs):
@numba_basic.numba_njit(inline="always")
def tensor_from_scalar(x):
return np.array(x)
return tensor_from_scalar
@_numba_funcify.register(ScalarFromTensor)
def numba_funcify_ScalarFromTensor(op, **kwargs):
@numba_basic.numba_njit(inline="always")
def scalar_from_tensor(x):
return numba_basic.to_scalar(x)
return scalar_from_tensor |
299,477 | generate classification model | import os
import numpy as np
import xgboost
kRounds = 2
kRows = 1000
kCols = 4
kForests = 2
kMaxDepth = 2
kClasses = 3
X = np.random.randn(kRows, kCols)
w = np.random.uniform(size=kRows)
version = xgboost.__version__
np.random.seed(1994)
target_dir = 'models'
def booster_bin(model):
return os.path.join(target_dir,
'xgboost-' + version + '.' + model + '.bin')
def booster_json(model):
return os.path.join(target_dir,
'xgboost-' + version + '.' + model + '.json')
def skl_bin(model):
return os.path.join(target_dir,
'xgboost_scikit-' + version + '.' + model + '.bin')
def skl_json(model):
return os.path.join(target_dir,
'xgboost_scikit-' + version + '.' + model + '.json')
def generate_regression_model():
print('Regression')
y = np.random.randn(kRows)
data = xgboost.DMatrix(X, label=y, weight=w)
booster = xgboost.train({'tree_method': 'hist',
'num_parallel_tree': kForests,
'max_depth': kMaxDepth},
num_boost_round=kRounds, dtrain=data)
booster.save_model(booster_bin('reg'))
booster.save_model(booster_json('reg'))
reg = xgboost.XGBRegressor(tree_method='hist',
num_parallel_tree=kForests,
max_depth=kMaxDepth,
n_estimators=kRounds)
reg.fit(X, y, w)
reg.save_model(skl_bin('reg'))
reg.save_model(skl_json('reg'))
def generate_logistic_model():
print('Logistic')
y = np.random.randint(0, 2, size=kRows)
assert y.max() == 1 and y.min() == 0
for objective, name in [('binary:logistic', 'logit'), ('binary:logitraw', 'logitraw')]:
data = xgboost.DMatrix(X, label=y, weight=w)
booster = xgboost.train({'tree_method': 'hist',
'num_parallel_tree': kForests,
'max_depth': kMaxDepth,
'objective': objective},
num_boost_round=kRounds, dtrain=data)
booster.save_model(booster_bin(name))
booster.save_model(booster_json(name))
reg = xgboost.XGBClassifier(tree_method='hist',
num_parallel_tree=kForests,
max_depth=kMaxDepth,
n_estimators=kRounds,
objective=objective)
reg.fit(X, y, w)
reg.save_model(skl_bin(name))
reg.save_model(skl_json(name))
def METHOD_NAME():
print('Classification')
y = np.random.randint(0, kClasses, size=kRows)
data = xgboost.DMatrix(X, label=y, weight=w)
booster = xgboost.train({'num_class': kClasses,
'tree_method': 'hist',
'num_parallel_tree': kForests,
'max_depth': kMaxDepth},
num_boost_round=kRounds, dtrain=data)
booster.save_model(booster_bin('cls'))
booster.save_model(booster_json('cls'))
cls = xgboost.XGBClassifier(tree_method='hist',
num_parallel_tree=kForests,
max_depth=kMaxDepth,
n_estimators=kRounds)
cls.fit(X, y, w)
cls.save_model(skl_bin('cls'))
cls.save_model(skl_json('cls'))
def generate_ranking_model():
print('Learning to Rank')
y = np.random.randint(5, size=kRows)
w = np.random.uniform(size=20)
g = np.repeat(50, 20)
data = xgboost.DMatrix(X, y, weight=w)
data.set_group(g)
booster = xgboost.train({'objective': 'rank:ndcg',
'num_parallel_tree': kForests,
'tree_method': 'hist',
'max_depth': kMaxDepth},
num_boost_round=kRounds,
dtrain=data)
booster.save_model(booster_bin('ltr'))
booster.save_model(booster_json('ltr'))
ranker = xgboost.sklearn.XGBRanker(n_estimators=kRounds,
tree_method='hist',
objective='rank:ndcg',
max_depth=kMaxDepth,
num_parallel_tree=kForests)
ranker.fit(X, y, g, sample_weight=w)
ranker.save_model(skl_bin('ltr'))
ranker.save_model(skl_json('ltr'))
def write_versions():
versions = {'numpy': np.__version__,
'xgboost': version}
with open(os.path.join(target_dir, 'version'), 'w') as fd:
fd.write(str(versions))
if __name__ == '__main__':
if not os.path.exists(target_dir):
os.mkdir(target_dir)
generate_regression_model()
generate_logistic_model()
METHOD_NAME()
generate_ranking_model()
write_versions() |
299,478 | add parameter type | # -*- coding: utf-8 -*-
#
# function_symbol.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
from pynestml.symbols.symbol import Symbol, SymbolKind
class FunctionSymbol(Symbol):
"""
This class is used to store a single function symbol, e.g. the definition of the function max.
Attributes:
param_types (list(TypeSymbol)): A list of the types of parameters.
return_type (type_symbol): The type of the returned value.
is_predefined (bool): Indicates whether this function predefined or not.
"""
def __init__(self, name, param_types, return_type, element_reference, scope=None, is_predefined=False):
"""
Standard constructor.
:param name: the name of the function symbol.
:type name: str
:param param_types: a list of argument types.
:type param_types: list(TypeSymbol)
:param return_type: the return type of the function.
:type return_type: Union(TypeSymbol,None)
:param element_reference: a reference to the ASTFunction which corresponds to this symbol (if not predefined)
:type element_reference: ast_function or None
:param scope: a reference to the scope in which this symbol is defined in
:type scope: Scope
:param is_predefined: True, if this element is a predefined one, otherwise False.
:type is_predefined: bool
"""
super(FunctionSymbol, self).__init__(element_reference=element_reference, scope=scope,
name=name, symbol_kind=SymbolKind.FUNCTION)
self.param_types = param_types
self.return_type = return_type
self.is_predefined = is_predefined
def print_symbol(self) -> str:
"""
Returns a string representation of this symbol.
"""
ret = 'FunctionSymbol[' + self.get_symbol_name() + ', Parameters = {'
for arg in self.param_types:
ret += arg.print_symbol()
if self.param_types.index(arg) < len(
self.param_types) - 1: # in the case that it is not the last arg, print also a comma
ret += ','
ret += '}, return type = ' + (self.get_return_type().print_symbol())
ret += ', @['
if self.get_referenced_object() is not None:
ret += str(self.get_referenced_object().get_source_position())
else:
ret += 'predefined'
ret += ']'
return ret
def get_return_type(self):
"""
Returns the return type of this function symbol
:return: a single type symbol.
:rtype: type_symbol
"""
return self.return_type
def set_return_type(self, new_type):
"""
Sets the return type to the handed over one.
:param new_type: a single type symbol
:type new_type: type_symbol
"""
self.return_type = new_type
def get_parameter_types(self):
"""
Returns a list of all parameter types.
:return: a list of parameter types.
:rtype: list(TypeSymbol)
"""
return self.param_types
def METHOD_NAME(self, new_type):
"""
Adds the handed over type to the list of argument types.
:param new_type: a single type symbol
:type new_type: type_symbol
"""
self.param_types.append(new_type)
def equals(self, _other=None):
"""
Compares the handed over instance of function symbol to this one and returns true, if the they are equal.
:param _other: a different function symbol
:type _other: FunctionSymbol
:return: True if equal, otherwise False.
:rtype: bool
"""
if not isinstance(_other, FunctionSymbol):
return False
if not self.name == _other.get_symbol_name():
return False
if not self.return_type.equals(_other.return_type):
return False
if len(self.param_types) != len(_other.get_parameter_types()):
return False
other_args = _other.get_parameter_types()
for i in range(0, len(self.param_types)):
if not self.param_types[i].equals(other_args[i]):
return False
return True |
299,479 | applications | """
This is the main file, run via `flask run`, for the mock Connect server.
"""
import sys
from os.path import basename
# noinspection PyPackageRequirements
from flask import Flask, Blueprint, g, request, url_for, send_file
from .data import (
Application,
AppMode,
Bundle,
Content,
Task,
get_data_dump,
default_server_settings,
)
from .http_helpers import endpoint, error
app = Flask(__name__)
api = Blueprint("api", __name__)
@app.route("/")
def index():
return (
"""<html>
<head><title>Posit Connect -- Mocked</title></head><body>
<h1>Posit Connect -- Mocked</h1>
<p>Welcome to the mocked Posit Connect!
<hr>
%s
</body></html>
"""
% get_data_dump()
)
@api.route("me")
@endpoint(authenticated=True, writes_json=True)
def me():
return g.user
@api.route("applications", methods=["GET", "POST"])
@endpoint(authenticated=True, writes_json=True)
def METHOD_NAME():
if request.method == "POST":
connect_app = request.get_json(force=True)
name = connect_app.get("name")
if name and Application.get_app_by_name(name) is not None:
return error(409, "An object with that name already exists.")
title = connect_app["title"] if "title" in connect_app else ""
return Application(
name=name,
title=title,
owner_username=g.user.username,
owner_first_name=g.user.first_name,
owner_last_name=g.user.last_name,
owner_email=g.user.email,
owner_locked=g.user.locked,
_base_url=url_for("index", _external=True),
)
else:
count = int(request.args.get("count", 10000))
search = request.args.get("search")
def match(app_to_match):
return search is None or app_to_match.title.startswith(search)
matches = list(filter(match, Application.get_all_objects()))[:count]
return {
"count": len(matches),
"total": len(matches),
"applications": matches,
}
# noinspection PyUnresolvedReferences
@api.route("applications/<object_id>", methods=["GET", "POST"])
@endpoint(authenticated=True, cls=Application, writes_json=True)
def get_application(connect_app):
if request.method == "POST":
connect_app.update_from(request.get_json(force=True))
return connect_app
# noinspection PyUnresolvedReferences
@api.route("applications/<object_id>/config")
@endpoint(authenticated=True, cls=Application, writes_json=True)
def config(connect_app):
return {"config_url": connect_app.url}
# noinspection PyUnresolvedReferences
@api.route("applications/<object_id>/upload", methods=["POST"])
@endpoint(authenticated=True, cls=Application, writes_json=True)
def upload(connect_app):
return Bundle(app_id=connect_app.id, _tar_data=request.data)
# noinspection PyUnresolvedReferences
@api.route("applications/<object_id>/deploy", methods=["POST"])
@endpoint(authenticated=True, cls=Application, writes_json=True)
def deploy(connect_app):
bundle_id = request.get_json(force=True).get("bundle")
if bundle_id is None:
return error(400, "bundle_id is required") # message and status code probably wrong
bundle = Bundle.get_object(bundle_id)
if bundle is None:
return error(404, "bundle %s not found" % bundle_id) # message and status code probably wrong
manifest = bundle.get_manifest()
old_app_mode = connect_app.app_mode
# noinspection SpellCheckingInspection
new_app_mode = AppMode.value_of(manifest["metadata"]["appmode"])
if old_app_mode is not None and old_app_mode != new_app_mode:
return error(400, "Cannot change app mode once deployed") # message and status code probably wrong
connect_app.bundle_deployed(bundle, new_app_mode)
return Task()
# noinspection PyUnresolvedReferences
@api.route("tasks/<object_id>")
@endpoint(authenticated=True, cls=Task, writes_json=True)
def get_task(task):
return task
@api.route("server_settings")
@endpoint(authenticated=True, auth_optional=True, writes_json=True)
def server_settings():
settings = default_server_settings.copy()
# If the endpoint was hit with a valid user, fill in some extra stuff.
if g.user is not None:
settings["version"] = "1.8.1-9999"
settings["build"] = '"9709a0fd93"'
settings["about"] = "RStudio Connect v1.8.1-9999"
return settings
@api.route("v1/server_settings/python")
@endpoint(authenticated=True, writes_json=True)
def python_settings():
v = sys.version_info
v = "%d.%d.%d" % (v[0], v[1], v[2])
return {
"installations": [{"version": v}],
"api_enabled": True,
"conda_enabled": False,
}
# noinspection PyUnresolvedReferences
@app.route("/content/apps/<object_id>")
@endpoint(cls=Application)
def get_content(connect_app):
bundle = connect_app.get_bundle()
if bundle is None:
return error(400, "The content has not been deployed.") # message and status code probably wrong
return bundle.get_rendered_content()
# noinspection PyUnresolvedReferences
@api.route("v1/content/<object_id>")
@endpoint(authenticated=True, cls=Content, writes_json=True)
def v1_get_content(content):
return content
# noinspection PyUnresolvedReferences
@api.route("v1/content")
@endpoint(authenticated=True, writes_json=True)
def v1_content():
return list(Content.get_all_objects())
# This endpoint is kind of a cheat, we dont actually do any validation
# that the requested bundle belongs to this piece of content
# noinspection PyUnresolvedReferences
@api.route("v1/content/<content_id>/bundles/<object_id>/download")
@endpoint(authenticated=True, cls=Bundle)
def v1_content_bundle_download(bundle: Bundle, content_id):
print(content_id)
return send_file(
bundle.read_bundle_data(),
mimetype="application/tar+gzip",
as_attachment=True,
download_name=basename(bundle._tar_file) if bundle._tar_file else None,
)
@api.route("v1/content/<object_id>/build", methods=["POST"])
@endpoint(authenticated=True, writes_json=True)
def v1_content_build():
bundle_id = request.get_json(force=True).get("bundle_id")
if bundle_id is None:
return error(400, "bundle_id is required") # message and status code probably wrong
task = Task()
return {"task_id": task.id}
app.register_blueprint(api, url_prefix="/__api__") |
299,480 | key pressed | # Copyright (C) 2010 by Brian Parma
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 1, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from gi.repository import Gdk
from gi.repository import Gtk
from gi.repository import GObject
class CellRendererDays(Gtk.CellRendererText):
'''Custom Cell Renderer for showing a ListView of 7 days with checkboxes, based off pygtk FAQ example'''
__gtype_name__ = 'CellRendererDays'
__gproperties__ = {
'days': (object, 'days', 'List of enabled days', GObject.ParamFlags.READWRITE)
}
__gsignals__ = {
'days-changed': (GObject.SignalFlags.RUN_FIRST, None, (str, object))
}
property_names = list(__gproperties__.keys())
def __init__(self):
Gtk.CellRendererText.__init__(self)
self.model = Gtk.ListStore(bool, str)
self.view = None
self.view_window = None
for day in [
'Sunday',
'Monday',
'Tuesday',
'Wednesday',
'Thursday',
'Friday',
'Saturday',
]:
self.model.append([True, day])
self.set_property('text', 'Edit me')
def _create_view(self, treeview):
'''Create the Window and View to display when editing'''
self.view_window = Gtk.Window()
self.view_window.set_decorated(False)
self.view_window.set_property('skip-taskbar-hint', True)
self.view = Gtk.TreeView()
self.view.set_model(self.model)
self.view.set_headers_visible(False)
cr = Gtk.CellRendererToggle()
cr.connect('toggled', self._toggle)
col = Gtk.TreeViewColumn('Enabled', cr, active=0)
self.view.append_column(col)
cr = Gtk.CellRendererText()
col = Gtk.TreeViewColumn('Day', cr, text=1)
self.view.append_column(col)
# events
self.view.connect('focus-out-event', self._close)
self.view.connect('key-press-event', self.METHOD_NAME)
# should be automatic
self.view_window.set_modal(False)
self.view_window.set_transient_for(None) # cancel the modality of dialog
self.view_window.add(self.view)
# necessary for getting the (width, height) of calendar_window
self.view.show()
self.view_window.realize()
def do_set_property(self, pspec, value):
'''Set property overload'''
setattr(self, pspec.name, value)
def do_get_property(self, pspec):
'''Get property overload'''
return getattr(self, pspec.name)
def do_start_editing(
self, event, treeview, path, background_area, cell_area, flags
):
'''Called when user starts editing the cell'''
if not self.get_property('editable'):
return
# create window/view if it doesn't exist
if not self.view_window:
self._create_view(treeview)
else:
self.view_window.show()
# set display to reflect 'days' property
for i, row in enumerate(self.model):
row[0] = self.days[i]
# position the popup below the edited cell (and try hard to keep the popup within the toplevel window)
(tree_x, tree_y) = treeview.get_bin_window().get_origin()[1:]
(tree_w, tree_h) = treeview.get_window().get_geometry()[2:4]
(my_w, my_h) = self.view_window.get_window().get_geometry()[2:4]
x = tree_x + min(cell_area.x, tree_w - my_w + treeview.get_visible_rect().x)
y = tree_y + min(cell_area.y, tree_h - my_h + treeview.get_visible_rect().y)
self.view_window.move(x, y)
# save the path so we can return it in _done, and we aren't using dialog so we can't block....
self._path = path
return None # don't return any editable, our Gtk.Dialog did the work already
def _done(self):
'''Called when we are done editing'''
days = [row[0] for row in self.model]
if days != self.days:
self.emit('days-changed', self._path, days)
self.view_window.hide()
def METHOD_NAME(self, view, event):
'''Key pressed event handler, finish editing on Return'''
# event == None for day selected via double-click
if (
not event
or event.type == Gdk.EventType.KEY_PRESS
and Gdk.keyval_name(event.keyval) == 'Return'
):
self._done()
return True
def _toggle(self, cell, path):
'''Checkbox toggle event handler'''
active = self.model[path][0]
self.model[path][0] = not active
return True
def _close(self, view, event):
'''Focus-out-event handler'''
self._done()
return True |
299,481 | stdout keyword | from typing import List
from .config_dict import ConfigDict
from .config_schema_item import SchemaItem
from .deprecation_info import DeprecationInfo
from .ext_job_keywords import ExtJobKeys
from .schema_dict import SchemaItemDict
from .schema_item_type import SchemaItemType
def executable_keyword() -> SchemaItem:
return SchemaItem(
kw=ExtJobKeys.EXECUTABLE,
required_set=True,
type_map=[SchemaItemType.EXECUTABLE],
)
def stdin_keyword() -> SchemaItem:
return SchemaItem(
kw=ExtJobKeys.STDIN,
type_map=[SchemaItemType.STRING],
required_set=False,
)
def METHOD_NAME() -> SchemaItem:
return SchemaItem(
kw=ExtJobKeys.STDOUT,
type_map=[SchemaItemType.STRING],
required_set=False,
)
def stderr_keyword() -> SchemaItem:
return SchemaItem(
kw=ExtJobKeys.STDERR,
type_map=[SchemaItemType.STRING],
required_set=False,
)
def start_file_keyword() -> SchemaItem:
return SchemaItem(
kw=ExtJobKeys.START_FILE,
type_map=[SchemaItemType.STRING],
)
def target_file_keyword() -> SchemaItem:
return SchemaItem(
kw=ExtJobKeys.TARGET_FILE,
type_map=[SchemaItemType.STRING],
)
def error_file_keyword() -> SchemaItem:
return SchemaItem(
kw=ExtJobKeys.ERROR_FILE,
type_map=[SchemaItemType.STRING],
)
def max_running_keyword() -> SchemaItem:
return SchemaItem(
kw=ExtJobKeys.MAX_RUNNING, type_map=[SchemaItemType.INT], required_set=False
)
def max_running_minutes_keyword() -> SchemaItem:
return SchemaItem(
kw=ExtJobKeys.MAX_RUNNING_MINUTES,
type_map=[SchemaItemType.INT],
required_set=False,
)
def min_arg_keyword() -> SchemaItem:
return SchemaItem(
kw=ExtJobKeys.MIN_ARG,
type_map=[SchemaItemType.INT],
required_set=False,
)
def max_arg_keyword() -> SchemaItem:
return SchemaItem(
kw=ExtJobKeys.MAX_ARG,
type_map=[SchemaItemType.INT],
required_set=False,
)
def arglist_keyword() -> SchemaItem:
return SchemaItem(
kw=ExtJobKeys.ARGLIST,
argc_max=None,
)
def arg_type_keyword() -> SchemaItem:
return SchemaItem(
kw=ExtJobKeys.ARG_TYPE,
argc_min=2,
argc_max=2,
type_map=[SchemaItemType.INT, SchemaItemType.STRING],
multi_occurrence=True,
)
def env_keyword() -> SchemaItem:
return SchemaItem(
kw=ExtJobKeys.ENV,
argc_min=2,
argc_max=2,
multi_occurrence=True,
type_map=[SchemaItemType.STRING, SchemaItemType.STRING],
)
def exec_env_keyword() -> SchemaItem:
return SchemaItem(
kw=ExtJobKeys.EXEC_ENV,
argc_min=2,
argc_max=2,
multi_occurrence=True,
type_map=[SchemaItemType.STRING, SchemaItemType.STRING],
)
def default_keyword() -> SchemaItem:
return SchemaItem(
kw=ExtJobKeys.DEFAULT,
argc_min=2,
argc_max=2,
multi_occurrence=True,
type_map=[SchemaItemType.STRING, SchemaItemType.STRING],
)
ext_job_schema_items: List[SchemaItem] = [
executable_keyword(),
stdin_keyword(),
METHOD_NAME(),
stderr_keyword(),
start_file_keyword(),
target_file_keyword(),
error_file_keyword(),
max_running_keyword(),
max_running_minutes_keyword(),
min_arg_keyword(),
max_arg_keyword(),
arglist_keyword(),
default_keyword(), # Default values for args
arg_type_keyword(),
env_keyword(),
exec_env_keyword(),
]
ext_job_deprecations: List[DeprecationInfo] = [
DeprecationInfo(
keyword="PORTABLE_EXE",
message='"PORTABLE_EXE" key is deprecated, please replace with "EXECUTABLE"',
)
]
class ExtJobSchemaItemDict(SchemaItemDict):
def check_required(
self,
config_dict: ConfigDict,
filename: str,
) -> None:
self.search_for_deprecated_keyword_usages(
config_dict=config_dict,
filename=filename,
)
self.search_for_unset_required_keywords(
config_dict=config_dict, filename=filename
)
def init_ext_job_schema() -> ExtJobSchemaItemDict:
schema = ExtJobSchemaItemDict()
for item in ext_job_schema_items:
schema[item.kw] = item
schema.add_deprecations(ext_job_deprecations)
return schema |
299,482 | reset device | #!/usr/bin/env python3
# SPDX-License-Identifier: BSD-3-Clause
# Copyright(c) 2020 Intel Corporation
"""
Configure an entire Intel DSA instance, using idxd kernel driver, for DPDK use
"""
import sys
import argparse
import os
import os.path
class SysfsDir:
verbose = False
"Used to read/write paths in a sysfs directory"
def __init__(self, path):
self.path = path
def read_int(self, filename):
"Return a value from sysfs file"
if SysfsDir.verbose:
print(f"Reading '{filename}' in {self.path}")
with open(os.path.join(self.path, filename)) as f:
return int(f.readline())
def write_values(self, values):
"write dictionary, where key is filename and value is value to write"
for filename, contents in values.items():
if SysfsDir.verbose:
print(f"Writing '{contents}' to '{filename}' in {self.path}")
with open(os.path.join(self.path, filename), "w") as f:
f.write(str(contents))
def get_drv_dir(dtype):
"Get the sysfs path for the driver, either 'idxd' or 'user'"
drv_dir = "/sys/bus/dsa/drivers/" + dtype
if not os.path.exists(drv_dir):
return "/sys/bus/dsa/drivers/dsa"
return drv_dir
def METHOD_NAME(dsa_id):
"Reset the DSA device and all its queues"
drv_dir = SysfsDir(get_drv_dir("idxd"))
drv_dir.write_values({"unbind": f"dsa{dsa_id}"})
def get_pci_dir(pci):
"Search for the sysfs directory of the PCI device"
base_dir = '/sys/bus/pci/devices/'
for path, dirs, files in os.walk(base_dir):
for dir in dirs:
if pci in dir:
return os.path.join(base_dir, dir)
sys.exit(f"Could not find sysfs directory for device {pci}")
def get_dsa_id(pci):
"Get the DSA instance ID using the PCI address of the device"
pci_dir = get_pci_dir(pci)
for path, dirs, files in os.walk(pci_dir):
for dir in dirs:
if dir.startswith('dsa') and 'wq' not in dir:
return int(dir[3:])
sys.exit(f"Could not get device ID for device {pci}")
def parse_wq_opts(wq_opts):
"Parse user-specified queue configuration, creating a dict of options"
try:
return {o.split('=')[0]: o.split('=')[1] for o in wq_opts}
except ValueError:
sys.exit("Invalid --wq-option format, use format 'option=value'")
def configure_dsa(dsa_id, args):
"Configure the DSA instance with appropriate number of queues"
dsa_dir = SysfsDir(f"/sys/bus/dsa/devices/dsa{dsa_id}")
max_groups = dsa_dir.read_int("max_groups")
max_engines = dsa_dir.read_int("max_engines")
max_queues = dsa_dir.read_int("max_work_queues")
max_work_queues_size = dsa_dir.read_int("max_work_queues_size")
nb_queues = min(args.q, max_queues)
if args.q > nb_queues:
print(f"Setting number of queues to max supported value: {max_queues}")
# we want one engine per group, and no more engines than queues
nb_groups = min(max_engines, max_groups, nb_queues)
for grp in range(nb_groups):
dsa_dir.write_values({f"engine{dsa_id}.{grp}/group_id": grp})
# configure each queue
for q in range(nb_queues):
wqcfg = {"group_id": q % nb_groups,
"type": "user",
"mode": "dedicated",
"name": f"{args.prefix}_wq{dsa_id}.{q}",
"priority": 1,
"max_batch_size": 1024,
"size": int(max_work_queues_size / nb_queues)}
wqcfg.update(parse_wq_opts(args.wq_option))
wq_dir = SysfsDir(os.path.join(dsa_dir.path, f"wq{dsa_id}.{q}"))
wq_dir.write_values(wqcfg)
# enable device and then queues
idxd_dir = SysfsDir(get_drv_dir("idxd"))
idxd_dir.write_values({"bind": f"dsa{dsa_id}"})
user_dir = SysfsDir(get_drv_dir("user"))
for q in range(nb_queues):
user_dir.write_values({"bind": f"wq{dsa_id}.{q}"})
def main(args):
"Main function, does arg parsing and calls config function"
arg_p = argparse.ArgumentParser(
description="Configure whole DSA device instance for DPDK use")
arg_p.add_argument('dsa_id',
help="Specify DSA instance either via DSA instance number or PCI address")
arg_p.add_argument('-q', metavar='queues', type=int, default=255,
help="Number of queues to set up")
arg_p.add_argument('--name-prefix', metavar='prefix', dest='prefix',
default="dpdk",
help="Prefix for workqueue name to mark for DPDK use [default: 'dpdk']")
arg_p.add_argument('--wq-option', action='append', default=[],
help="Provide additional config option for queues (format 'x=y')")
arg_p.add_argument('--verbose', '-v', action='store_true',
help="Provide addition info on tasks being performed")
arg_p.add_argument('--reset', action='store_true',
help="Reset DSA device and its queues")
parsed_args = arg_p.parse_args(args[1:])
dsa_id = parsed_args.dsa_id
dsa_id = get_dsa_id(dsa_id) if ':' in dsa_id else dsa_id
SysfsDir.verbose = parsed_args.verbose
if parsed_args.reset:
METHOD_NAME(dsa_id)
else:
configure_dsa(dsa_id, parsed_args)
if __name__ == "__main__":
main(sys.argv) |
299,483 | test generate entry points txt from stevedore | # Copyright 2023 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
from textwrap import dedent
import pytest
from pants.backend.python.framework.stevedore.python_target_dependencies import (
rules as stevedore_dep_rules,
)
from pants.backend.python.framework.stevedore.rules import (
GenerateEntryPointsTxtFromStevedoreExtensionRequest,
)
from pants.backend.python.framework.stevedore.rules import rules as stevedore_rules
from pants.backend.python.framework.stevedore.target_types import StevedoreNamespace
from pants.backend.python.goals.pytest_runner import PytestPluginSetup
from pants.backend.python.macros.python_artifact import PythonArtifact
from pants.backend.python.target_types import (
PythonDistribution,
PythonSourcesGeneratorTarget,
PythonSourceTarget,
PythonTestsGeneratorTarget,
PythonTestTarget,
)
from pants.backend.python.target_types_rules import rules as python_target_types_rules
from pants.engine.addresses import Address
from pants.engine.fs import EMPTY_DIGEST, CreateDigest, Digest, FileContent
from pants.testutil.rule_runner import QueryRule, RuleRunner
# random set of runner names to use in tests
st2_runners = ["noop", "python", "foobar"]
@pytest.fixture
def rule_runner() -> RuleRunner:
return RuleRunner(
rules=[
*python_target_types_rules(),
*stevedore_dep_rules(),
*stevedore_rules(),
QueryRule(
PytestPluginSetup,
(GenerateEntryPointsTxtFromStevedoreExtensionRequest,),
),
],
target_types=[
PythonSourceTarget,
PythonSourcesGeneratorTarget,
PythonTestTarget,
PythonTestsGeneratorTarget,
PythonDistribution,
],
objects={
"python_artifact": PythonArtifact,
"stevedore_namespace": StevedoreNamespace,
},
)
# based on get_snapshot from pantsbuild/pants.git/src/python/pants/backend/python/lint/black/rules_integration_test.py
def get_digest(rule_runner: RuleRunner, source_files: dict[str, str]) -> Digest:
files = [FileContent(path, content.encode()) for path, content in source_files.items()]
return rule_runner.request(Digest, [CreateDigest(files)])
def METHOD_NAME(
rule_runner: RuleRunner,
) -> None:
rule_runner.write_files(
{
"src/one_ns/BUILD": dedent(
"""\
python_tests(
name="tests",
stevedore_namespaces=["st2common.runners.runner"],
)
"""
),
"src/one_ns/test_something.py": "",
"src/two_ns/BUILD": dedent(
"""\
python_tests(
name="tests",
stevedore_namespaces=[
"st2common.runners.runner",
"some.thing.else",
],
)
"""
),
"src/two_ns/test_something.py": "",
"src/no_deps/BUILD": dedent(
"""\
python_tests(
name="tests",
stevedore_namespaces=["namespace.without.implementations"],
)
"""
),
"src/no_deps/test_something.py": "",
}
)
for runner in st2_runners:
rule_runner.write_files(
{
f"runners/{runner}_runner/BUILD": dedent(
# to test consistent sorting, reverse sort by namespace
# and then reverse sort entry_points by key.
f"""\
python_distribution(
provides=python_artifact(
name="stackstorm-runner-{runner}",
),
entry_points={{
stevedore_namespace("st2common.runners.runner"): {{
"{runner}": "{runner}_runner.{runner}_runner",
}},
stevedore_namespace("some.thing.else"): {{
"{runner}2": "{runner}_runner.thing2",
"{runner}1": "{runner}_runner.thing1",
}},
}},
)
"""
),
f"runners/{runner}_runner/{runner}_runner/BUILD": "python_sources()",
f"runners/{runner}_runner/{runner}_runner/__init__.py": "",
f"runners/{runner}_runner/{runner}_runner/{runner}_runner.py": "",
f"runners/{runner}_runner/{runner}_runner/thing1.py": "",
f"runners/{runner}_runner/{runner}_runner/thing2.py": "",
}
)
args = [
"--source-root-patterns=runners/*_runner",
]
rule_runner.set_options(args, env_inherit={"PATH", "PYENV_ROOT", "HOME"})
def gen_entry_points_txt(address: Address) -> PytestPluginSetup:
target = rule_runner.get_target(address)
return rule_runner.request(
PytestPluginSetup,
[GenerateEntryPointsTxtFromStevedoreExtensionRequest(target)],
)
# test with no implementations of the requested namespace
assert gen_entry_points_txt(
Address("src/no_deps", target_name="tests", relative_file_path="test_something.py"),
) == PytestPluginSetup(EMPTY_DIGEST)
assert gen_entry_points_txt(
Address("src/one_ns", target_name="tests", relative_file_path="test_something.py"),
) == PytestPluginSetup(
get_digest(
rule_runner,
{
f"runners/{runner}_runner/{runner}_runner.egg-info/entry_points.txt": dedent(
f"""\
[st2common.runners.runner]
{runner} = {runner}_runner.{runner}_runner
"""
)
for runner in st2_runners
},
)
)
assert gen_entry_points_txt(
Address("src/two_ns", target_name="tests", relative_file_path="test_something.py"),
) == PytestPluginSetup(
get_digest(
rule_runner,
{
f"runners/{runner}_runner/{runner}_runner.egg-info/entry_points.txt": dedent(
# Note that these are sorted for better cacheability
f"""\
[some.thing.else]
{runner}1 = {runner}_runner.thing1
{runner}2 = {runner}_runner.thing2
[st2common.runners.runner]
{runner} = {runner}_runner.{runner}_runner
"""
)
for runner in st2_runners
},
)
) |
299,484 | run tasks | """
Parellel Map snippet by Brian Refsdal
http://www.astropython.org/snippet/2010/3/Parallel-map-using-multiprocessing
"""
from __future__ import print_function
import numpy
import warnings
from astropy import log
_multi=False
_ncpus=1
try:
# May raise ImportError
import multiprocessing
_multi=True
# May raise NotImplementedError
_ncpus = multiprocessing.cpu_count()
except Exception as ex:
pmap_exception = ex
_multi=False
__all__ = ('parallel_map',)
def worker(f, ii, chunk, out_q, err_q, lock):
"""
A worker function that maps an input function over a
slice of the input iterable.
:param f : callable function that accepts argument from iterable
:param ii : process ID
:param chunk: slice of input iterable
:param out_q: thread-safe output queue
:param err_q: thread-safe queue to populate on exception
:param lock : thread-safe lock to protect a resource
( useful in extending parallel_map() )
"""
vals = []
# iterate over slice
for val in chunk:
try:
result = f(val)
except Exception as e:
err_q.put(e)
return
vals.append(result)
# output the result and task ID to output queue
out_q.put( (ii, vals) )
def METHOD_NAME(procs, err_q, out_q, num):
"""
A function that executes populated processes and processes
the resultant array. Checks error queue for any exceptions.
:param procs: list of Process objects
:param out_q: thread-safe output queue
:param err_q: thread-safe queue to populate on exception
:param num : length of resultant array
"""
# function to terminate processes that are still running.
die = (lambda vals : [val.terminate() for val in vals
if val.exitcode is None])
try:
for proc in procs:
proc.start()
for proc in procs:
proc.join()
except Exception as e:
# kill all slave processes on ctrl-C
die(procs)
raise e
if not err_q.empty():
# kill all on any exception from any one slave
die(procs)
raise err_q.get()
# Processes finish in arbitrary order. Process IDs double
# as index in the resultant array.
results=[None]*num;
while not out_q.empty():
idx, result = out_q.get()
results[idx] = result
try:
# Remove extra dimension added by array_split
return list(numpy.concatenate(results))
except ValueError:
return list(results)
def parallel_map(function, sequence, numcores=None):
"""
A parallelized version of the native Python map function that
utilizes the Python multiprocessing module to divide and
conquer sequence.
parallel_map does not yet support multiple argument sequences.
:param function: callable function that accepts argument from iterable
:param sequence: iterable sequence
:param numcores: number of cores to use
"""
if not callable(function):
raise TypeError("input function '%s' is not callable" %
repr(function))
if not numpy.iterable(sequence):
raise TypeError("input '%s' is not iterable" %
repr(sequence))
size = len(sequence)
if not _multi or size == 1 or numcores == 1:
return map(function, sequence)
if numcores is not None and numcores > _ncpus:
warnings.warn("Number of requested cores is greated than the "
"number of available CPUs.")
elif numcores is None:
numcores = _ncpus
# https://stackoverflow.com/a/70876951/814354
# if this step fails, parallel_map won't work - it _must_ use forking, not spawning
multiprocessing.set_start_method('fork', force=True)
# Returns a started SyncManager object which can be used for sharing
# objects between processes. The returned manager object corresponds
# to a spawned child process and has methods which will create shared
# objects and return corresponding proxies.
manager = multiprocessing.Manager()
# Create FIFO queue and lock shared objects and return proxies to them.
# The managers handles a server process that manages shared objects that
# each slave process has access to. Bottom line -- thread-safe.
out_q = manager.Queue()
err_q = manager.Queue()
lock = manager.Lock()
# if sequence is less than numcores, only use len sequence number of
# processes
if size < numcores:
log.info("Reduced number of cores to {0}".format(size))
numcores = size
# group sequence into numcores-worth of chunks
sequence = numpy.array_split(sequence, numcores)
procs = [multiprocessing.Process(target=worker,
args=(function, ii, chunk, out_q, err_q, lock))
for ii, chunk in enumerate(sequence)]
return METHOD_NAME(procs, err_q, out_q, numcores)
if __name__ == "__main__":
"""
Unit test of parallel_map()
Create an arbitrary length list of references to a single
matrix containing random floats and compute the eigenvals
in serial and parallel. Compare the results and timings.
"""
import time
numtasks = 5
#size = (1024,1024)
size = (512,512)
vals = numpy.random.rand(*size)
f = numpy.linalg.eigvals
iterable = [vals]*numtasks
print('Running numpy.linalg.eigvals %iX on matrix size [%i,%i]' %
(numtasks,size[0],size[1]))
tt = time.time()
presult = parallel_map(f, iterable)
print('parallel map in %g secs' % (time.time()-tt))
tt = time.time()
result = map(f, iterable)
print('serial map in %g secs' % (time.time()-tt))
assert (numpy.asarray(result) == numpy.asarray(presult)).all() |
299,485 | log message | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Tests for proxy app.
from __future__ import print_function
from future import standard_library
standard_library.install_aliases()
from builtins import str
import threading
import logging
import http.server
import sys
from nose.tools import assert_true, assert_false
from django.test.client import Client
from desktop.lib.django_test_util import make_logged_in_client
from proxy.views import _rewrite_links
import proxy.conf
if sys.version_info[0] > 2:
from io import StringIO as string_io
else:
from StringIO import StringIO as string_io
class Handler(http.server.BaseHTTPRequestHandler):
"""
To avoid mocking out urllib, we setup a web server
that does very little, and test proxying against it.
"""
def do_GET(self):
self.send_response(200)
self.send_header("Content-type", "text/html; charset=utf8")
self.end_headers()
self.wfile.write(b"Hello there.")
path = self.path
if not isinstance(path, bytes):
path = path.encode('utf-8')
self.wfile.write(b"You requested: " + path + b".")
self.wfile.write(b"Image: <img src='/foo.jpg'>")
self.wfile.write(b"Link: <a href='/baz?with=parameter'>link</a>")
def do_POST(self):
self.send_response(200)
self.send_header("Content-type", "text/html; charset=utf8")
self.end_headers()
self.wfile.write(b"Hello there.")
path = self.path
if not isinstance(path, bytes):
path = path.encode('utf-8')
self.wfile.write(b"You requested: " + path + b".")
# Somehow in this architecture read() blocks, so we read the exact
# number of bytes the test sends.
self.wfile.write(b"Data: " + self.rfile.read(16))
def METHOD_NAME(self, fmt, *args):
logging.debug("%s - - [%s] %s" %
(self.address_string(),
self.log_date_time_string(),
fmt % args))
def run_test_server():
"""
Returns the server, and a method to close it out.
"""
# We need to proxy a server, so we go ahead and create one.
httpd = http.server.HTTPServer(("127.0.0.1", 0), Handler)
# Spawn a thread that serves exactly one request.
thread = threading.Thread(target=httpd.handle_request)
thread.daemon = True
thread.start()
def finish():
# Make sure the server thread is done.
print("Closing thread " + str(thread))
thread.join(10.0) # Wait at most 10 seconds
assert_false(thread.is_alive())
return httpd, finish
run_test_server.__test__ = False
def test_proxy_get():
"""
Proxying test.
"""
# All apps require login.
client = make_logged_in_client(username="test", is_superuser=True)
httpd, finish = run_test_server()
try:
# Test the proxying
finish_conf = proxy.conf.WHITELIST.set_for_testing(r"127\.0\.0\.1:\d*")
try:
response_get = client.get('/proxy/127.0.0.1/%s/' % httpd.server_port, dict(foo="bar"))
finally:
finish_conf()
assert_true(b"Hello there" in response_get.content)
assert_true(b"You requested: /?foo=bar." in response_get.content)
proxy_url = "/proxy/127.0.0.1/%s/foo.jpg" % httpd.server_port
if not isinstance(proxy_url, bytes):
proxy_url = proxy_url.encode('utf-8')
assert_true(proxy_url in response_get.content)
proxy_url = "/proxy/127.0.0.1/%s/baz?with=parameter" % httpd.server_port
if not isinstance(proxy_url, bytes):
proxy_url = proxy_url.encode('utf-8')
assert_true(proxy_url in response_get.content)
finally:
finish()
def test_proxy_post():
"""
Proxying test, using POST.
"""
client = make_logged_in_client(username="test", is_superuser=True)
httpd, finish = run_test_server()
try:
# Test the proxying
finish_conf = proxy.conf.WHITELIST.set_for_testing(r"127\.0\.0\.1:\d*")
try:
response_post = client.post('/proxy/127.0.0.1/%s/' % httpd.server_port, dict(foo="bar", foo2="bar"))
finally:
finish_conf()
assert_true(b"Hello there" in response_post.content)
assert_true(b"You requested: /." in response_post.content)
assert_true(b"foo=bar" in response_post.content)
assert_true(b"foo2=bar" in response_post.content)
finally:
finish()
def test_blacklist():
client = make_logged_in_client('test')
finish_confs = [
proxy.conf.WHITELIST.set_for_testing(r"localhost:\d*"),
proxy.conf.BLACKLIST.set_for_testing(r"localhost:\d*/(foo|bar)/fred/"),
]
try:
# Request 1: Hit the blacklist
resp = client.get('/proxy/localhost/1234//foo//fred/')
assert_true(b"is blocked" in resp.content)
# Request 2: This is not a match
httpd, finish = run_test_server()
try:
resp = client.get('/proxy/localhost/%s//foo//fred_ok' % (httpd.server_port,))
assert_true(b"Hello there" in resp.content)
finally:
finish()
finally:
for fin in finish_confs:
fin()
class UrlLibFileWrapper(string_io):
"""
urllib2.urlopen returns a file-like object; we fake it here.
"""
def __init__(self, buf, url):
string_io.__init__(self, buf)
self.url = url
def geturl(self):
"""URL we were initialized with."""
return self.url
def test_rewriting():
"""
Tests that simple re-writing is working.
"""
html = "<a href='foo'>bar</a><a href='http://alpha.com'>baz</a>"
assert_true(b'<a href="/proxy/abc.com/80/sub/foo">bar</a>' in _rewrite_links(UrlLibFileWrapper(html, "http://abc.com/sub/")),
msg="Relative links")
assert_true(b'<a href="/proxy/alpha.com/80/">baz</a>' in _rewrite_links(UrlLibFileWrapper(html, "http://abc.com/sub/")),
msg="Absolute links")
# Test url with port and invalid port
html = "<a href='http://alpha.com:1234/bar'>bar</a><a href='http://alpha.com:-1/baz'>baz</a>"
assert_true(b'<a href="/proxy/alpha.com/1234/bar">bar</a><a>baz</a>' in
_rewrite_links(UrlLibFileWrapper(html, "http://abc.com/sub/")),
msg="URL with invalid port")
html = """
<img src="/static/hadoop-logo.jpg"/><br>
"""
rewritten = _rewrite_links(UrlLibFileWrapper(html, "http://abc.com/sub/"))
assert_true(b'<img src="/proxy/abc.com/80/static/hadoop-logo.jpg">' in
rewritten,
msg="Rewrite images") |
299,486 | socket level mapping | #!/usr/bin/env python3
#
# Cross Platform and Multi Architecture Advanced Binary Emulation Framework
#
from typing import Mapping, TypeVar
from qiling import Qiling
from qiling.const import QL_ARCH, QL_OS
from .const import *
KT = TypeVar('KT')
VT = TypeVar('VT')
def __invert_dict(d: Mapping[KT, VT]) -> Mapping[VT, KT]:
return {v: k for k, v in d.items()}
def _constant_mapping(bits: int, consts_map: Mapping[str, int]) -> str:
return __invert_dict(consts_map)[bits]
def _flags_mapping(value: int, flags_map: Mapping[str, int]) -> str:
names = []
for name, flag in flags_map.items():
if value & flag:
value ^= flag
names.append(name)
if value:
names.append(f'{value:#x}')
return ' | '.join(names)
def ql_open_flag_mapping(ql: Qiling, flags: int) -> int:
def flag_mapping(flags, mapping_name, mapping_from, mapping_to, host_os, virt_os):
ret = 0
for n in mapping_name:
if mapping_from[n] is None or mapping_to[n] is None:
continue
if (flags & mapping_from[n]) == mapping_from[n]:
ret = ret | mapping_to[n]
if (host_os == QL_OS.WINDOWS and virt_os != QL_OS.WINDOWS):
ret = ret | mapping_to['O_BINARY']
return ret
f = {}
t = {}
host_os = ql.host.os
virt_os = ql.os.type
if host_os is None:
return flags
if virt_os == QL_OS.LINUX:
if ql.arch.type in (QL_ARCH.X86, QL_ARCH.X8664):
f = linux_x86_open_flags
elif ql.arch.type in (QL_ARCH.ARM, QL_ARCH.ARM64):
f = linux_arm_open_flags
elif ql.arch.type == QL_ARCH.MIPS:
f = linux_mips_open_flags
elif ql.arch.type in (QL_ARCH.RISCV, QL_ARCH.RISCV64):
f = linux_riscv_open_flags
elif ql.arch.type == QL_ARCH.PPC:
f = linux_ppc_open_flags
elif virt_os == QL_OS.MACOS:
if ql.arch.type in (QL_ARCH.X86, QL_ARCH.X8664):
f = macos_x86_open_flags
elif virt_os == QL_OS.FREEBSD:
f = freebsd_x86_open_flags
elif virt_os == QL_OS.WINDOWS:
f = windows_x86_open_flags
elif virt_os == QL_OS.QNX:
f = qnx_arm64_open_flags
t = {
QL_OS.LINUX: linux_x86_open_flags,
QL_OS.MACOS: macos_x86_open_flags,
QL_OS.FREEBSD: freebsd_x86_open_flags,
QL_OS.WINDOWS: windows_x86_open_flags
}.get(host_os, {})
if f == t:
return flags
return flag_mapping(flags, open_flags_name, f, t, host_os, virt_os)
def mmap_flag_mapping(flags):
mmap_flags = {
'MAP_SHARED' : 0x00000001,
'MAP_PRIVATE' : 0x00000002,
'MAP_FIXED' : 0x00000010,
'MAP_ANONYMOUS' : 0x00000020,
# QNX (openqnx)
# lib/c/public/sys/mman.h
'MAP_NOINIT' : 0x00004000,
'MAP_PHYS' : 0x00010000,
'MAP_NOX64K' : 0x00020000,
'MAP_BELOW16M' : 0x00040000,
'MAP_ANON' : 0x00080000,
'MAP_SYSRAM' : 0x01000000
}
return _flags_mapping(flags, mmap_flags)
def mmap_prot_mapping(prots: int) -> str:
if prots == 0:
return 'PROT_NONE'
mmap_prots = {
'PROT_READ' : 0b001,
'PROT_WRITE': 0b010,
'PROT_EXEC' : 0b100,
# not supported by unicorn
'PROT_GROWSDOWN' : 0x01000000,
'PROT_GROWSUP' : 0x02000000
}
return _flags_mapping(prots, mmap_prots)
def socket_type_mapping(t: int, archtype: QL_ARCH) -> str:
socket_type_map = {
QL_ARCH.X86: linux_x86_socket_types,
QL_ARCH.X8664: linux_x86_socket_types,
QL_ARCH.ARM: linux_arm_socket_types,
QL_ARCH.ARM64: linux_arm_socket_types,
QL_ARCH.MIPS: linux_mips_socket_types
}[archtype]
# https://code.woboq.org/linux/linux/net/socket.c.html#1363
return _constant_mapping(t & SOCK_TYPE_MASK, socket_type_map)
def socket_domain_mapping(p: int, archtype: QL_ARCH, ostype: QL_OS) -> str:
socket_domain_map = {
QL_ARCH.X86: linux_x86_socket_domain,
QL_ARCH.X8664: macos_x86_socket_domain if ostype == QL_OS.MACOS else linux_x86_socket_domain,
QL_ARCH.ARM: linux_arm_socket_domain,
QL_ARCH.ARM64: linux_arm_socket_domain,
QL_ARCH.MIPS: linux_mips_socket_domain
}[archtype]
return _constant_mapping(p, socket_domain_map)
def socket_tcp_option_mapping(t: int, archtype: QL_ARCH) -> str:
socket_option_map = {
QL_ARCH.X86: linux_socket_tcp_options,
QL_ARCH.X8664: linux_socket_tcp_options,
QL_ARCH.ARM: linux_socket_tcp_options,
QL_ARCH.ARM64: linux_socket_tcp_options,
QL_ARCH.MIPS: linux_socket_tcp_options,
}[archtype]
return _constant_mapping(t, socket_option_map)
def METHOD_NAME(t: int, archtype: QL_ARCH) -> str:
socket_level_map = {
QL_ARCH.X86: linux_x86_socket_level,
QL_ARCH.X8664: linux_x86_socket_level,
QL_ARCH.ARM: linux_arm_socket_level,
QL_ARCH.ARM64: linux_arm_socket_level,
QL_ARCH.MIPS: linux_mips_socket_level
}[archtype]
return _constant_mapping(t, socket_level_map)
def socket_ip_option_mapping(t: int, archtype: QL_ARCH, ostype: QL_OS) -> str:
socket_option_map = {
QL_ARCH.X86: linux_socket_ip_options,
QL_ARCH.X8664: macos_socket_ip_options if ostype == QL_OS.MACOS else linux_socket_ip_options,
QL_ARCH.ARM: linux_socket_ip_options,
QL_ARCH.ARM64: macos_socket_ip_options if ostype == QL_OS.MACOS else linux_socket_ip_options,
QL_ARCH.MIPS: linux_mips_socket_ip_options
}[archtype]
return _constant_mapping(t, socket_option_map)
def socket_option_mapping(t: int, archtype: QL_ARCH) -> str:
socket_option_map = {
QL_ARCH.X86: linux_x86_socket_options,
QL_ARCH.X8664: linux_x86_socket_options,
QL_ARCH.ARM: linux_arm_socket_options,
QL_ARCH.ARM64: linux_arm_socket_options,
QL_ARCH.MIPS: linux_mips_socket_options
}[archtype]
return _constant_mapping(t, socket_option_map) |
299,487 | slug | # Copyright (c) 2020 the original author or authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing
# permissions and limitations under the License.
import os
import logging
import tempfile
from abc import ABC, abstractmethod
from pathlib import Path
from typing import Any, Tuple, Optional
from zipfile import ZipFile
import attr
import gdown
from cached_property import cached_property
from src.services.dto.json_encodable import JSONEncodable
from src.services.dto import plugin_result
logger = logging.getLogger(__name__)
MODELS_ROOT = os.path.expanduser(os.path.join('~', '.models'))
@attr.s(auto_attribs=True)
class MLModel:
plugin: 'BasePlugin'
name: str
google_drive_id: str
def __str__(self):
return self.name
@property
def path(self):
return Path(MODELS_ROOT) / self.plugin.backend / self.plugin.METHOD_NAME / self.name
def exists(self):
return os.path.exists(self.path)
def download_if_not_exists(self):
"""
Download a zipped model from url and extract it to models directory.
"""
if self.exists():
logger.debug(f'Already exists {self.plugin} model {self.name}')
return
logger.debug(f'Getting {self.plugin} model {self.name}')
with tempfile.NamedTemporaryFile() as tmpfile:
self._download(self.url, tmpfile)
self._extract(tmpfile.name)
@property
def url(self):
return f'https://drive.google.com/uc?id={self.google_drive_id}'
@classmethod
def _download(cls, url: str, output):
return gdown.download(url, output)
def _extract(self, filename: str):
os.makedirs(self.path, exist_ok=True)
with ZipFile(filename, 'r') as zf:
if self.plugin.retain_folder_structure:
for info in zf.infolist():
if info.is_dir():
os.makedirs(Path(self.path) / Path(info.filename))
continue
file_path = Path(self.path) / Path(info.filename)
file_path.write_bytes(zf.read(info))
else:
for info in zf.infolist():
if info.is_dir():
continue
file_path = Path(self.path) / Path(info.filename).name
file_path.write_bytes(zf.read(info))
@attr.s(auto_attribs=True)
class CalculatorModel(MLModel):
# used to convert euclidean distance to similarity [0.0..1.0]
# E.g. algorithm: (tanh((first_coef - distance) * second_coef) + 1) / 2
similarity_coefficients: Tuple[float, float] = (0, 1)
difference_threshold: float = 0.4
class BasePlugin(ABC):
# args for init MLModel: model name, Goodle Drive fileID
ml_models: Tuple[Tuple[str, str], ...] = ()
ml_model_name: str = None
def __new__(cls, ml_model_name: str = None):
"""
Plugins might cache pre-trained models and neural networks in properties
so it has to be Singleton.
"""
if not hasattr(cls, 'instance'):
cls.instance = super(BasePlugin, cls).__new__(cls)
cls.instance.ml_model_name = ml_model_name
return cls.instance
@property
@abstractmethod
def METHOD_NAME(self):
pass
def create_ml_model(self, *args):
""" Create MLModel instance by arguments following plugin settings """
return MLModel(self, *args)
@cached_property
def ml_model(self) -> Optional[MLModel]:
if hasattr(self, 'ml_models'):
for ml_model_args in self.ml_models:
if not self.ml_model_name or self.ml_model_name == ml_model_args[0]:
return self.create_ml_model(*ml_model_args)
@property
def backend(self) -> str:
return self.__class__.__module__.rsplit('.', 1)[-1]
@property
def name(self) -> str:
return f'{self.backend}.{self.__class__.__name__}'
@property
def retain_folder_structure(self) -> bool:
return False
def __str__(self):
if self.ml_model and self.ml_model_name:
return f'{self.name}@{self.ml_model_name}'
else:
return self.name
@abstractmethod
def __call__(self, face: plugin_result.FaceDTO) -> JSONEncodable:
raise NotImplementedError |
299,488 | test slice eq | # Copyright (c) 2018, 2022, Oracle and/or its affiliates.
# Copyright (c) 2013, Regents of the University of California
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are
# permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of
# conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of
# conditions and the following disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
# GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
def test_list():
l = [0, 1, 2, 3, 4]
assert l[0:0] == []
assert l[1:2] == [1]
assert l[-2:-1] == [3]
assert l[-100:100] == [0, 1, 2, 3, 4]
assert l[100:-100] == []
assert l[:] == [0, 1, 2, 3, 4]
assert l[1:None] == [ 1, 2, 3, 4]
assert l[None:3] == [0, 1, 2]
# extended slice
assert l[::] == [0, 1, 2, 3, 4]
assert l[::2] == [0, 2, 4]
assert l[1::2] == [1, 3]
assert l[::-1] == [4, 3, 2, 1, 0]
assert l[::-2] == [4, 2, 0]
assert l[3::-2] == [3, 1]
assert l[3:3:-2] == []
assert l[3:2:-2] == [3]
assert l[3:1:-2] == [3]
assert l[3:0:-2] == [3, 1]
assert l[::-100] == [4]
assert l[100:-100:] == []
assert l[-100:100:] == [0, 1, 2, 3, 4]
assert l[100:-100:-1] == [4, 3, 2, 1, 0]
assert l[-100:100:-1] == []
assert l[-100:100:2] == [0, 2, 4]
def test_tuple():
t = (0, 1, 2, 3, 4)
assert t[0:0] == tuple() # []
assert t[1:2] == (1,) # [1]
assert t[-2:-1] == (3,) # [3]
assert t[-100:100] == (0, 1, 2, 3, 4) # [0, 1, 2, 3, 4]
assert t[100:-100] == tuple() # []
assert t[:] == (0, 1, 2, 3, 4) # [0, 1, 2, 3, 4]
assert t[1:None] == (1, 2, 3, 4) # [1, 2, 3, 4]
assert t[None:3] == (0, 1, 2) # [0, 1, 2]
# extended slice
assert t[::] == (0, 1, 2, 3, 4) # [0, 1, 2, 3, 4]
assert t[::2] == (0, 2, 4) # [0, 2, 4]
assert t[1::2] == (1, 3) # [1, 3]
assert t[::-1] == (4, 3, 2, 1, 0) # [4, 3, 2, 1, 0]
assert t[::-2] == (4, 2, 0) # [4, 2, 0]
assert t[3::-2] == (3, 1) # [3, 1]
assert t[3:3:-2] == tuple() # []
assert t[3:2:-2] == (3,) # [3]
assert t[3:1:-2] == (3,) # [3]
assert t[3:0:-2] == (3, 1) # [3, 1]
assert t[::-100] == (4,) # [4]
assert t[100:-100:] == tuple() # []
assert t[-100:100:] == (0, 1, 2, 3, 4) # [0, 1, 2, 3, 4]
assert t[100:-100:-1] == (4, 3, 2, 1, 0) # [4, 3, 2, 1, 0]
assert t[-100:100:-1] == tuple() # []
assert t[-100:100:2] == (0, 2, 4) # [0, 2, 4]
def test_string():
s = "01234"
assert s[0:0] == ""
assert s[1:2] == "1"
assert s[-2:-1] == "3"
assert s[-100:100] == "01234"
assert s[100:-100] == ""
assert s[:] == "01234"
assert s[1:None] == "1234"
assert s[None:3] == "012"
# extended slice
assert s[::] == "01234"
assert s[::2] == "024"
assert s[1::2] == "13"
assert s[::-1] == "43210"
assert s[::-2] == "420"
assert s[3::-2] == "31"
assert s[3:3:-2] == ""
assert s[3:2:-2] == "3"
assert s[3:1:-2] == "3"
assert s[3:0:-2] == "31"
assert s[::-100] == "4"
assert s[100:-100:] == ""
assert s[-100:100:] == "01234"
assert s[100:-100:-1] == "43210"
assert s[-100:100:-1] == ""
assert s[-100:100:2] == "024"
def test_range_step1():
t = range(5)
assert t[0:0] == range(0)
assert t[1:2] == range(1, 2)
assert t[-2:-1] == range(3, 4)
assert t[-100:100] == range(0, 5)
assert t[100:-100] == range(5, 0)
assert t[:] == range(0, 5)
assert t[1:None] == range(1, 5)
assert t[None:3] == range(0, 3)
# extended slice
assert t[::] == range(0, 5)
assert t[::2] == range(0, 5, 2)
assert t[1::2] == range(1, 4, 2)
assert t[::-1] == range(4, -1, -1)
assert t[::-2] == range(4, -1, -2)
if sys.version_info.minor >= 7:
assert t[3::-2] == range(3, 0, -2)
else:
assert t[3::-2] == range(3, -1, -2)
assert t[3:3:-2] == range(0)
assert t[3:2:-2] == range(3, 2, -2)
assert t[3:1:-2] == range(3, 1, -2)
assert t[3:0:-2] == range(3, 0, -2)
assert t[::-100] == range(4, 5, 1)
assert t[100:-100:] == range(0)
assert t[-100:100:] == range(5)
assert t[100:-100:-1] == range(4, -1, -1)
assert t[-100:100:-1] == range(0)
assert t[-100:100:2] == range(0, 5, 2)
def test_range_step2():
t = range(5, 15, 2)
assert t[0:0] == range(5, 5, 2)
assert t[1:2] == range(7, 9, 2)
assert t[-2:-1] == range(11, 13, 2)
assert t[-100:100] == range(5, 15, 2)
assert t[100:-100] == range(15, 5, 2)
assert t[:] == range(5, 15, 2)
assert t[1:None] == range(7, 15, 2)
assert t[None:3] == range(5, 11, 2)
# extended slice
assert t[::] == range(5, 15, 2)
assert t[::2] == range(5, 15, 4)
assert t[1::2] == range(7, 15, 4)
assert t[::-1] == range(13, 3, -2)
assert t[::-2] == range(13, 3, -4)
assert t[3::-2] == range(11, 3, -4)
assert t[3:3:-2] == range(11, 11, -4)
assert t[3:2:-2] == range(11, 9, -4)
assert t[3:1:-2] == range(11, 7, -4)
assert t[3:0:-2] == range(11, 5, -4)
assert t[::-100] == range(13, 3, -200)
assert t[100:-100:] == range(15, 5, 2)
assert t[-100:100:] == range(5, 15, 2)
assert t[100:-100:-1] == range(13, 3, -2)
assert t[-100:100:-1] == range(3, 13, -2)
assert t[-100:100:2] == range(5, 15, 4)
def test_correct_error():
class X():
def __index__(self):
return "42"
try:
[1][:X()]
except TypeError as e:
assert "__index__ returned non-int" in str(e)
try:
[1][:"42"]
except TypeError as e:
assert "slice indices must be integers" in str(e)
def METHOD_NAME():
# Note: large numbers that do not get interned when boxed...
assert slice(2, 10000, 10) == slice(2, 10000, 10)
assert slice(None, 12345, 10) == slice(None, 12345, 10)
assert slice(1, 10000, 10) != slice(None, 10000, 10)
assert slice(2, 10000, 10) != slice(2, 10000, 1) |
299,489 | do transformation | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Inject dummy BiasAdd Graph Rewriter."""
import tensorflow as tf
from tensorflow.python.framework import dtypes
from neural_compressor.adaptor.tf_utils.graph_util import GraphAnalyzer
from neural_compressor.adaptor.tf_utils.graph_util import GraphRewriterHelper as Helper
from neural_compressor.adaptor.tf_utils.util import version1_gt_version2
from neural_compressor.utils.utility import dump_elapsed_time
from ..graph_base import GraphRewriterBase
class InjectDummyBiasAddOptimizer(GraphRewriterBase):
"""Inject dummy BiasAdd for MatMul, Conv2D for pattern fusion."""
def __init__(self, model, outputs):
"""Initialization."""
super().__init__(model)
self.outputs = outputs
@dump_elapsed_time("Pass InjectDummyBiasAddOptimizer")
def METHOD_NAME(self):
"""Inject dummy BiasAdd if MatMul, Conv2D missing the valid add ops behind them."""
g = GraphAnalyzer()
g.graph = self.model
graph_info = g.parse_graph()
g.get_frame_info()
valid_ops = ("BiasAdd", "Add", "AddV2", "AddN")
target_nodes = g.query_fusion_pattern_nodes(
[
["MatMul", "Conv2D"],
]
)
for i in target_nodes:
# only apply this pass for tensorflow old quantization API, pre_optimize does this check
# use conv+dummy_biasadd+relu because TF do not support conv+relu now.
if i[0] in self.outputs:
continue
next_node_names = graph_info[i[0]].outputs
if (
next_node_names
and len(next_node_names) == 1
and graph_info[Helper.node_name_from_input(next_node_names[0])].node.op in valid_ops
):
continue
bias_node_name = i[0] + "_dummy_biasadd"
bias_const_node_name = i[0] + "_dummy_biasadd_const"
matmul_a_node_name = Helper.node_name_from_input(graph_info[i[0]].node.input[0])
matmul_a_node = graph_info[matmul_a_node_name].node
matmul_b_node_name = Helper.node_name_from_input(graph_info[i[0]].node.input[1])
matmul_b_node = graph_info[matmul_b_node_name].node
if matmul_a_node.op == "Const" or matmul_b_node.op not in ["Const", "Enter"]:
continue
if matmul_b_node.op == "Enter": # pragma: no cover
parent_node = graph_info[Helper.node_name_from_input(matmul_b_node.input[0])].node
if parent_node.op != "Const":
continue
else:
matmul_b_node = parent_node
matmul_b_node_name = matmul_b_node.name
if graph_info[i[0]].node.op == "MatMul":
t_b_index = 0 if graph_info[i[0]].node.attr["transpose_b"].b else 1
elif graph_info[i[0]].node.op == "Conv2D" and graph_info[i[0]].node.attr["data_format"].s == b"NHWC":
t_b_index = 3
elif graph_info[i[0]].node.op == "Conv2D" and graph_info[i[0]].node.attr["data_format"].s == b"NCHW":
t_b_index = 1
else:
continue
bias_add_length = matmul_b_node.attr["value"].tensor.tensor_shape.dim[t_b_index].size
bias_add_content = [0.0] * bias_add_length
bias_const_node = Helper.create_constant_node(
bias_const_node_name, bias_add_content, dtypes.float32, shape=[bias_add_length]
)
if i[0] in g.parent_frame_details and g.parent_frame_details[i[0]]: # pragma: no cover
bias_const_enter_node = Helper.create_node(
"Enter", bias_const_node_name + "_enter", [bias_const_node_name]
)
Helper.set_attr_string(
bias_const_enter_node, "frame_name", g.parent_frame_details[i[0]].attr["frame_name"].s
)
Helper.set_attr_dtype(bias_const_enter_node, "T", dtypes.float32)
Helper.set_attr_bool(bias_const_enter_node, "is_constant", True)
Helper.set_attr_int(
bias_const_enter_node,
"parallel_iterations",
g.parent_frame_details[i[0]].attr["parallel_iterations"].i,
)
bias_node = Helper.create_node(
"BiasAdd",
bias_node_name,
[
i[0],
bias_const_enter_node.name
if i[0] in g.parent_frame_details and g.parent_frame_details[i[0]]
else bias_const_node_name,
],
)
Helper.set_attr_dtype(bias_node, "T", dtypes.float32)
g.add_node(bias_node, i[0], next_node_names)
if i[0] in g.parent_frame_details and g.parent_frame_details[i[0]]: # pragma: no cover
g.add_node(bias_const_node, None, [bias_const_enter_node.name])
g.add_node(bias_const_enter_node, bias_const_node_name, [bias_node_name])
else:
g.add_node(bias_const_node, None, [bias_node_name])
return g.dump_graph() |
299,490 | create asset dir | # SPDX-License-Identifier: Apache-2.0
# Copyright 2022 The HuggingFace Authors.
import contextlib
import os
from os import makedirs
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import Generator, List, Tuple, TypedDict
from PIL import Image # type: ignore
from pydub import AudioSegment # type:ignore
from libcommon.storage import StrPath
DATASET_SEPARATOR = "--"
ASSET_DIR_MODE = 0o755
DATASETS_SERVER_MDATE_FILENAME = ".dss"
SUPPORTED_AUDIO_EXTENSION_TO_MEDIA_TYPE = {".wav": "audio/wav", ".mp3": "audio/mpeg"}
def METHOD_NAME(
dataset: str, config: str, split: str, row_idx: int, column: str, assets_directory: StrPath
) -> Tuple[Path, str]:
dir_path = Path(assets_directory).resolve() / dataset / DATASET_SEPARATOR / config / split / str(row_idx) / column
url_dir_path = f"{dataset}/{DATASET_SEPARATOR}/{config}/{split}/{row_idx}/{column}"
makedirs(dir_path, ASSET_DIR_MODE, exist_ok=True)
return dir_path, url_dir_path
def glob_rows_in_assets_dir(
dataset: str,
assets_directory: StrPath,
) -> Generator[Path, None, None]:
return Path(assets_directory).resolve().glob(os.path.join(dataset, DATASET_SEPARATOR, "*", "*", "*"))
def update_directory_modification_date(path: Path) -> None:
if path.is_dir():
# update the directory's last modified date
temporary_file = path / DATASETS_SERVER_MDATE_FILENAME
if temporary_file.is_dir():
raise ValueError(f"Cannot create temporary file {temporary_file} in {path}")
temporary_file.touch(exist_ok=True)
if temporary_file.is_file():
with contextlib.suppress(FileNotFoundError):
temporary_file.unlink()
def update_last_modified_date_of_rows_in_assets_dir(
dataset: str,
config: str,
split: str,
offset: int,
length: int,
assets_directory: StrPath,
) -> None:
update_directory_modification_date(Path(assets_directory).resolve() / dataset.split("/")[0])
row_dirs_path = Path(assets_directory).resolve() / dataset / DATASET_SEPARATOR / config / split
for row_idx in range(offset, offset + length):
update_directory_modification_date(row_dirs_path / str(row_idx))
class ImageSource(TypedDict):
src: str
height: int
width: int
def create_image_file(
dataset: str,
config: str,
split: str,
row_idx: int,
column: str,
filename: str,
image: Image.Image,
assets_base_url: str,
assets_directory: StrPath,
overwrite: bool = True,
) -> ImageSource:
dir_path, url_dir_path = METHOD_NAME(
dataset=dataset,
config=config,
split=split,
row_idx=row_idx,
column=column,
assets_directory=assets_directory,
)
makedirs(dir_path, ASSET_DIR_MODE, exist_ok=True)
file_path = dir_path / filename
if overwrite or not file_path.exists():
image.save(file_path)
return {
"src": f"{assets_base_url}/{url_dir_path}/{filename}",
"height": image.height,
"width": image.width,
}
class AudioSource(TypedDict):
src: str
type: str
def create_audio_file(
dataset: str,
config: str,
split: str,
row_idx: int,
column: str,
audio_file_bytes: bytes,
audio_file_extension: str,
assets_base_url: str,
filename: str,
assets_directory: StrPath,
overwrite: bool = True,
) -> List[AudioSource]:
dir_path, url_dir_path = METHOD_NAME(
dataset=dataset,
config=config,
split=split,
row_idx=row_idx,
column=column,
assets_directory=assets_directory,
)
makedirs(dir_path, ASSET_DIR_MODE, exist_ok=True)
file_path = dir_path / filename
if file_path.suffix not in SUPPORTED_AUDIO_EXTENSION_TO_MEDIA_TYPE:
raise ValueError(
f"Audio format {file_path.suffix} is not supported. Supported formats are"
f" {','.join(SUPPORTED_AUDIO_EXTENSION_TO_MEDIA_TYPE)}."
)
media_type = SUPPORTED_AUDIO_EXTENSION_TO_MEDIA_TYPE[file_path.suffix]
if overwrite or not file_path.exists():
if audio_file_extension == file_path.suffix:
with open(file_path, "wb") as f:
f.write(audio_file_bytes)
else: # we need to convert
# might spawn a process to convert the audio file using ffmpeg
with NamedTemporaryFile("wb", suffix=audio_file_extension) as tmpfile:
tmpfile.write(audio_file_bytes)
segment: AudioSegment = AudioSegment.from_file(tmpfile.name)
segment.export(file_path, format=file_path.suffix[1:])
return [
{"src": f"{assets_base_url}/{url_dir_path}/{filename}", "type": media_type},
] |
299,491 | refresh panel | # =============================================================================
# Copyright (C) 2014 Alexandros Kosiaris
#
# This file is part of pyfa.
#
# pyfa is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pyfa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
# =============================================================================
# noinspection PyPackageRequirements
import wx
import gui.mainFrame
from gui.statsView import StatsView
from gui.bitmap_loader import BitmapLoader
from gui.utils.numberFormatter import formatAmount
from service.fit import Fit
_t = wx.GetTranslation
class MiningYieldViewFull(StatsView):
name = "miningyieldViewFull"
def __init__(self, parent):
StatsView.__init__(self)
self.parent = parent
self._cachedValues = []
def getHeaderText(self, fit):
return _t("Mining Yield")
def getTextExtentW(self, text):
width, height = self.parent.GetTextExtent(text)
return width
def populatePanel(self, contentPanel, headerPanel):
contentSizer = contentPanel.GetSizer()
parent = self.panel = contentPanel
self.headerPanel = headerPanel
panel = "full"
sizerMiningYield = wx.FlexGridSizer(1, 4, 0, 0)
sizerMiningYield.AddGrowableCol(1)
contentSizer.Add(sizerMiningYield, 0, wx.EXPAND, 0)
counter = 0
for miningType, image in (("miner", "mining"), ("drone", "drones")):
baseBox = wx.BoxSizer(wx.HORIZONTAL)
sizerMiningYield.Add(baseBox, 1, wx.ALIGN_LEFT if counter == 0 else wx.ALIGN_CENTER_HORIZONTAL)
baseBox.Add(BitmapLoader.getStaticBitmap("%s_big" % image, parent, "gui"), 0, wx.ALIGN_CENTER)
box = wx.BoxSizer(wx.VERTICAL)
baseBox.Add(box, 0, wx.ALIGN_CENTER)
box.Add(wx.StaticText(parent, wx.ID_ANY, _t(miningType).capitalize()), 0, wx.ALIGN_LEFT)
hbox = wx.BoxSizer(wx.HORIZONTAL)
box.Add(hbox, 1, wx.ALIGN_CENTER)
lbl = wx.StaticText(parent, wx.ID_ANY, "0.0 m\u00B3/s")
setattr(self, "label%sminingyield%s" % (panel.capitalize(), miningType.capitalize()), lbl)
hbox.Add(lbl, 0, wx.ALIGN_CENTER)
self._cachedValues.append(0)
counter += 1
targetSizer = sizerMiningYield
baseBox = wx.BoxSizer(wx.HORIZONTAL)
targetSizer.Add(baseBox, 0, wx.ALIGN_LEFT)
baseBox.Add(BitmapLoader.getStaticBitmap("cargoBay_big", parent, "gui"), 0, wx.ALIGN_CENTER)
box = wx.BoxSizer(wx.VERTICAL)
baseBox.Add(box, 0, wx.EXPAND)
box.Add(wx.StaticText(parent, wx.ID_ANY, _t("Total")), 0, wx.ALIGN_LEFT)
hbox = wx.BoxSizer(wx.HORIZONTAL)
box.Add(hbox, 1, wx.EXPAND)
lbl = wx.StaticText(parent, wx.ID_ANY, "0.0 m\u00B3/s")
setattr(self, "label%sminingyieldTotal" % panel.capitalize(), lbl)
hbox.Add(lbl, 0, wx.ALIGN_LEFT)
self._cachedValues.append(0)
image = BitmapLoader.getBitmap("turret_small", "gui")
firepower = wx.BitmapButton(contentPanel, -1, image)
firepower.SetToolTip(wx.ToolTip(_t("Click to toggle to Firepower View")))
firepower.Bind(wx.EVT_BUTTON, self.switchToFirepowerView)
sizerMiningYield.Add(firepower, 0, wx.ALIGN_LEFT)
self._cachedValues.append(0)
def switchToFirepowerView(self, event):
# Getting the active fit
mainFrame = gui.mainFrame.MainFrame.getInstance()
sFit = Fit.getInstance()
fit = sFit.getFit(mainFrame.getActiveFit())
# Remove ourselves from statsPane's view list
self.parent.views.remove(self)
self._cachedValues = []
# And no longer display us
self.panel.GetSizer().Clear(True)
self.panel.GetSizer().Layout()
# Get the new view
view = StatsView.getView("firepowerViewFull")(self.parent)
view.populatePanel(self.panel, self.headerPanel)
# Populate us in statsPane's view list
self.parent.views.append(view)
# Get the TogglePanel
tp = self.panel.GetParent()
# Bind the new panel's children to allow context menu access
tp.SetLabel(view.getHeaderText(fit))
view.METHOD_NAME(fit)
def METHOD_NAME(self, fit):
# If we did anything intresting, we'd update our labels to reflect the new fit's stats here
stats = (("labelFullminingyieldMiner", lambda: fit.minerYield, lambda: fit.minerWaste, 3, 0, 0, "{}{} m\u00B3/s", None),
("labelFullminingyieldDrone", lambda: fit.droneYield, lambda: fit.droneWaste, 3, 0, 0, "{}{} m\u00B3/s", None),
("labelFullminingyieldTotal", lambda: fit.totalYield, lambda: fit.totalWaste, 3, 0, 0, "{}{} m\u00B3/s", None))
def processValue(value):
value = value() if fit is not None else 0
value = value if value is not None else 0
return value
counter = 0
for labelName, yieldValue, wasteValue, prec, lowest, highest, valueFormat, altFormat in stats:
label = getattr(self, labelName)
yieldValue = processValue(yieldValue)
wasteValue = processValue(wasteValue)
if self._cachedValues[counter] != (yieldValue, wasteValue):
yps = formatAmount(yieldValue, prec, lowest, highest)
yph = formatAmount(yieldValue * 3600, prec, lowest, highest)
wps = formatAmount(wasteValue, prec, lowest, highest)
wph = formatAmount(wasteValue * 3600, prec, lowest, highest)
wasteSuffix = '\u02b7' if wasteValue > 0 else ''
label.SetLabel(valueFormat.format(yps, wasteSuffix))
tipLines = []
tipLines.append("{} m\u00B3 mining yield per second ({} m\u00B3 per hour)".format(yps, yph))
if wasteValue > 0:
tipLines.append("{} m\u00B3 mining waste per second ({} m\u00B3 per hour)".format(wps, wph))
label.SetToolTip(wx.ToolTip('\n'.join(tipLines)))
self._cachedValues[counter] = (yieldValue, wasteValue)
counter += 1
self.panel.Layout()
self.headerPanel.Layout()
MiningYieldViewFull.register() |
299,492 | test asset registry | import threading
import time
from collections import deque
from typing import TYPE_CHECKING, Any
from unittest import mock
import pytest
import wandb
from wandb.sdk.internal.settings_static import SettingsStatic
from wandb.sdk.internal.system.assets import (
CPU,
GPU,
GPUAMD,
IPU,
TPU,
Disk,
GPUApple,
Memory,
Network,
Trainium,
)
from wandb.sdk.internal.system.assets.asset_registry import asset_registry
from wandb.sdk.internal.system.assets.interfaces import MetricsMonitor
from wandb.sdk.internal.system.system_monitor import AssetInterface, SystemMonitor
if TYPE_CHECKING:
from typing import Deque
class MockMetric:
name: str = "mock_metric"
# at first, we will only support the gauge type
samples: "Deque[Any]" = deque()
def __init__(self, **kwargs):
name = kwargs.pop("name", None)
if name:
self.name = name
self.default_value = kwargs.pop("value", 42)
def sample(self) -> None:
self.samples.append(self.default_value)
def clear(self) -> None:
self.samples.clear()
def aggregate(self) -> dict:
if self.samples:
return {self.name: self.samples[-1]}
return {}
class MockAsset1:
def __init__(self, interface, settings, shutdown_event) -> None:
self.name = self.__class__.__name__.lower()
self.metrics = [
MockMetric(name="mock_metric_1", value=42),
]
self.metrics_monitor = MetricsMonitor(
self.name, self.metrics, interface, settings, shutdown_event
)
@classmethod
def is_available(cls) -> bool:
return True
def start(self) -> None:
self.metrics_monitor.start()
def finish(self) -> None:
self.metrics_monitor.finish()
def probe(self) -> dict:
pass
class MockAsset2:
def __init__(self, interface, settings, shutdown_event) -> None:
self.name = self.__class__.__name__.lower()
self.metrics = [
MockMetric(name="mock_metric_2", value=24),
]
self.metrics_monitor = MetricsMonitor(
self.name, self.metrics, interface, settings, shutdown_event
)
@classmethod
def is_available(cls) -> bool:
return True
def start(self) -> None:
self.metrics_monitor.start()
def finish(self) -> None:
self.metrics_monitor.finish()
def probe(self) -> dict:
pass
class MockBrokenMetric:
name: str = "mock_broken_metric"
# at first, we will only support the gauge type
samples: "Deque[Any]" = deque()
def sample(self) -> None:
raise Exception("MockBrokenMetric failed to sample")
def clear(self) -> None:
self.samples.clear()
def aggregate(self) -> dict:
if self.samples:
return {self.name: self.samples[0]}
return {}
def METHOD_NAME():
# test that the asset registry is populated with the correct assets
# should be updated if new assets are added
registry = asset_registry._registry
assert len(registry) == 10
for asset in (
CPU,
Disk,
Memory,
GPU,
GPUAMD,
GPUApple,
IPU,
Network,
TPU,
Trainium,
):
assert asset in registry
def test_metrics_monitor(capsys, test_settings):
# test that the metrics monitor is able to robustly sample metrics
mock_metric = MockMetric()
mock_broken_metric = MockBrokenMetric()
metrics = [mock_metric, mock_broken_metric]
interface = AssetInterface()
settings = SettingsStatic(
test_settings(
dict(
_stats_sample_rate_seconds=0.1,
_stats_samples_to_average=2,
)
).to_proto()
)
shutdown_event = threading.Event()
metrics_monitor = MetricsMonitor(
asset_name="test_metrics_monitor",
metrics=metrics,
interface=interface,
settings=settings,
shutdown_event=shutdown_event,
)
metrics_monitor.start()
time.sleep(1)
shutdown_event.set()
metrics_monitor.finish()
while not interface.metrics_queue.empty():
metric_record = interface.metrics_queue.get()
assert metric_record == {mock_metric.name: 42}
assert len(mock_metric.samples) == 0
@pytest.mark.parametrize(
"join_assets,num_keys",
[(True, 2), (False, 1)],
)
def test_system_monitor(test_settings, join_assets, num_keys):
# - test compatibility mode where we join metrics from individual assets
# before publishing them to the interface
# - test the future default mode where we publish metrics from individual assets
interface = AssetInterface()
settings = SettingsStatic(
test_settings(
dict(
_stats_sample_rate_seconds=0.1,
_stats_samples_to_average=2,
_stats_join_assets=join_assets,
)
).to_proto()
)
# todo: refactor this ugliness into a factory
mock_assets = [MockAsset1, MockAsset2]
with mock.patch.object(
wandb.sdk.internal.system.assets.asset_registry,
"_registry",
mock_assets,
):
system_monitor = SystemMonitor(
interface=interface,
settings=settings,
)
system_monitor.start()
time.sleep(1.5)
system_monitor.finish()
max_num_keys = 0
while not interface.metrics_queue.empty():
metric_record = interface.metrics_queue.get()
# it's tricky to get the timing right, so we are looking at the
# maximum number of keys we've seen in the queue as it should be == num_keys;
# sometimes, due to timing we might see less than num_keys
max_num_keys = max(max_num_keys, len(metric_record))
assert max_num_keys == num_keys |
299,493 | add cover | #!/usr/bin/python3
import glob
import os
import sys
import time
if len(sys.argv) < 4:
print("usage: " + sys.argv[0] + " <kompiled-dir>... -- <files>...")
exit(1)
allRules = set()
coverMap = {}
def METHOD_NAME(rule):
rule = rule.strip()
if not rule in coverMap:
coverMap[rule] = 0
coverMap[rule] += 1
for idx, dir in enumerate(sys.argv[1:], start=1):
if dir == "--":
fileIdx = idx + 1
break
filename = dir + "/allRules.txt"
with open(filename) as f:
allRules.update(f.readlines())
filename = dir + "/coverage.txt"
with open(filename) as f:
for line in f:
METHOD_NAME(line)
for filename in glob.glob(dir + "/*_coverage.txt"):
with open(filename) as f:
for line in f:
METHOD_NAME(line)
sources = [os.path.abspath(path) for path in sys.argv[fileIdx:]]
ruleMap = {}
for line in allRules:
parts = line.split(" ")
id = parts[0].strip()
location = " ".join(parts[1:])
parts = location.split(":")
ruleMap[id] = (os.path.abspath(":".join(parts[:-2])), parts[-2], parts[-1])
allLines = set()
for key, value in ruleMap.items():
allLines.add((value[0], value[1]))
def linesCovered(coverageOfComponent):
coveredLines = set()
for ruleId in coverageOfComponent:
rule = ruleMap[ruleId]
coveredLines.add((rule[0], rule[1]))
return len(coveredLines)
def rulesCovered(coverageOfComponent):
return len(coverageOfComponent)
numRulesGlobal = len(allRules)
numLines = len(allLines)
lineRateGlobal = float(linesCovered(coverMap)) / numLines
ruleRateGlobal = float(rulesCovered(coverMap)) / numRulesGlobal
timestamp = int(time.time())
template = """
<coverage line-rate="{lineRate}" branch-rate="{ruleRate}" version="1.9" timestamp="{timestamp}">
<sources>
<source>{source}</source>
</sources>
<packages>
<package name="" line-rate="{lineRate}" branch-rate="{ruleRate}" complexity="{numRules}.0">
<classes>
{classesElem}
</classes>
</package>
</packages>
</coverage>
"""
source = os.path.dirname(os.path.commonprefix(sources))
classTemplate = """
<class name="{filename}" filename="{filename}" line-rate="{lineRate}" branch-rate="{ruleRate}" complexity="{numRules}.0">
<lines>
{linesElem}
</lines>
</class>
"""
lineTemplateNoBranch = """
<line number="{lineNum}" hits="{hits}" branch="false"/>
"""
lineTemplateBranch = """
<line number="{lineNum}" hits="{hits}" branch="true" condition-coverage="{ruleRate}% ({rulesCovered}/{numRules})">
<conditions>
<condition number="0" type="jump" coverage="{ruleRate}%"/>
</conditions>
</line>
"""
ruleMapByFile = {}
for id, loc in ruleMap.items():
if not loc[0] in ruleMapByFile:
ruleMapByFile[loc[0]] = {}
fileMap = ruleMapByFile[loc[0]]
fileMap[id] = (loc[1], loc[2])
classes = []
for filename in sources:
if not filename in ruleMapByFile:
continue
relativeFile = os.path.relpath(filename, source)
allLines = set()
allRules = ruleMapByFile[filename]
ruleMapByLine = {}
for key, value in allRules.items():
allLines.add((value[0], value[1]))
if not value[0] in ruleMapByLine:
ruleMapByLine[value[0]] = [key]
else:
ruleMapByLine[value[0]].append(key)
fileCoverage = {rule: num for rule, num in coverMap.items() if rule in allRules}
numRulesFile = len(allRules)
numLines = len(allLines)
lineRateFile = float(linesCovered(fileCoverage)) / numLines
ruleRateFile = float(rulesCovered(fileCoverage)) / numRulesFile
lines = []
for lineNum,rules in ruleMapByLine.items():
lineCoverage = {rule: num for rule, num in fileCoverage.items() if rule in rules}
hits = sum(lineCoverage.values())
numCovered = len(lineCoverage)
numRulesLine = len(rules)
ruleRateLine = float(numCovered) / numRulesLine
if numRulesLine == 1:
lines.append(lineTemplateNoBranch.format(lineNum=lineNum,hits=hits))
else:
lines.append(lineTemplateBranch.format(lineNum=lineNum,hits=hits,ruleRate=int(ruleRateLine*100),rulesCovered=numCovered,numRules=numRulesLine))
linesElem = "".join(lines)
classes.append(classTemplate.format(filename=relativeFile,lineRate=lineRateFile,ruleRate=ruleRateFile,numRules=numRulesFile,linesElem=linesElem))
classesElem = "".join(classes)
xml = template.format(lineRate=lineRateGlobal,ruleRate=ruleRateGlobal,timestamp=timestamp,numRules=numRulesGlobal,source=source,classesElem=classesElem)
print(xml) |
299,494 | test kexec crash size | import pytest
from insights.core.exceptions import ParseException
from insights.parsers import kdump
from insights.tests import context_wrap
KDUMP_WITH_NORMAL_COMMENTS = """
# this is a comment
ssh kdumpuser@10.209.136.62
path /kdump/raw
core_collector makedumpfile -c --message-level 1 -d 31
""".strip()
KDUMP_WITH_INLINE_COMMENTS = """
nfs4 10.209.136.62:/kdumps
path /kdump/raw #some path stuff
core_collector makedumpfile -c --message-level 1 -d 31
""".strip()
KDUMP_WITH_EQUAL = """
nfs 10.209.136.62:/kdumps
path /kdump/raw #some path stuff
core_collector makedumpfile -c --message-level 1 -d 31
some_var "blah=3"
options bonding mode=active-backup miimon=100
""".strip()
KDUMP_WITH_BLACKLIST = """
path /var/crash
core_collector makedumpfile -c --message-level 1 -d 24
default shell
blacklist vxfs
blacklist vxportal
blacklist vxted
blacklist vxcafs
blacklist fdd
ignore_me
"""
KDUMP_WITH_NET = """
net user@raw.server.com
path /var/crash
""".strip()
KDUMP_MATCH_1 = """
net user@raw.server.com
path /var/crash
""".strip()
def test_with_normal_comments():
context = context_wrap(KDUMP_WITH_NORMAL_COMMENTS)
kd = kdump.KDumpConf(context)
expected = "# this is a comment"
assert expected == kd.comments[0]
# Also test is_* properties
assert not kd.is_nfs()
assert kd.is_ssh()
# Not a local disk then.
assert not kd.using_local_disk
def test_with_inline_comments():
context = context_wrap(KDUMP_WITH_INLINE_COMMENTS)
kd = kdump.KDumpConf(context)
expected = "path /kdump/raw #some path stuff"
assert expected == kd.inline_comments[0]
assert "/kdump/raw" == kd["path"]
# Also test is_* properties
assert kd.is_nfs()
assert not kd.is_ssh()
# Not a local disk then.
assert not kd.using_local_disk
def test_with_equal():
context = context_wrap(KDUMP_WITH_EQUAL)
kd = kdump.KDumpConf(context)
expected = '"blah=3"'
assert expected == kd['some_var']
assert 'options' in kd.data
assert isinstance(kd.data['options'], dict)
assert 'bonding' in kd.data['options']
assert 'mode=active-backup miimon=100' == kd.data['options']['bonding']
# Alternate accessor for options:
assert kd.options('bonding') == 'mode=active-backup miimon=100'
# Also test is_* properties
assert kd.is_nfs()
assert not kd.is_ssh()
# Not a local disk then.
assert not kd.using_local_disk
def test_get_hostname():
context = context_wrap(KDUMP_WITH_EQUAL)
kd = kdump.KDumpConf(context)
assert '10.209.136.62' == kd.hostname
context = context_wrap(KDUMP_MATCH_1)
kd = kdump.KDumpConf(context)
assert 'raw.server.com' == kd.hostname
def test_get_ip():
context = context_wrap(KDUMP_WITH_EQUAL)
kd = kdump.KDumpConf(context)
assert '10.209.136.62' == kd.ip
context = context_wrap(KDUMP_MATCH_1)
kd = kdump.KDumpConf(context)
assert kd.ip is None
def test_blacklist_repeated():
context = context_wrap(KDUMP_WITH_BLACKLIST)
kd = kdump.KDumpConf(context)
assert 'blacklist' in kd.data
assert kd.data['blacklist'] == ['vxfs', 'vxportal', 'vxted', 'vxcafs', 'fdd']
# Also test is_* properties
assert not kd.is_nfs()
assert not kd.is_ssh()
assert kd.using_local_disk
def test_net():
context = context_wrap(KDUMP_WITH_NET)
kd = kdump.KDumpConf(context)
assert 'net' in kd.data
assert 'path' in kd.data
assert not kd.using_local_disk
with pytest.raises(KeyError):
assert kd[3]
KEXEC_CRASH_SIZE_1 = "134217728"
KEXEC_CRASH_SIZE_2 = "0"
KEXEC_CRASH_SIZE_BAD = ""
def METHOD_NAME():
kcs = kdump.KexecCrashSize(context_wrap(KEXEC_CRASH_SIZE_1))
assert kcs.size == 134217728
kcs = kdump.KexecCrashSize(context_wrap(KEXEC_CRASH_SIZE_2))
assert kcs.size == 0
kcs = kdump.KexecCrashSize(context_wrap(KEXEC_CRASH_SIZE_BAD))
assert kcs.size == 0
KDUMP_CRASH_NOT_LOADED = '0'
KDUMP_CRASH_LOADED = '1'
KDUMP_CRASH_LOADED_BAD = ''
def test_loaded():
ctx = context_wrap(KDUMP_CRASH_LOADED, path='/sys/kernel/kexec_crash_loaded')
assert kdump.KexecCrashLoaded(ctx).is_loaded
def test_not_loaded():
ctx = context_wrap(KDUMP_CRASH_NOT_LOADED, path='/sys/kernel/kexec_crash_loaded')
assert not kdump.KexecCrashLoaded(ctx).is_loaded
def test_loaded_bad():
ctx = context_wrap(KDUMP_CRASH_LOADED_BAD, path='/sys/kernel/kexec_crash_loaded')
assert not kdump.KexecCrashLoaded(ctx).is_loaded
KDUMP_LOCAL_FS_1 = """
ext3 UUID=f15759be-89d4-46c4-9e1d-1b67e5b5da82
path /usr/local/cores
core_collector makedumpfile -c --message-level 1 -d 31
""".strip()
KDUMP_LOCAL_FS_UNSUPPORTED_2 = """
auto LABEL=/boot
path /usr/local/cores
core_collector makedumpfile -c --message-level 1 -d 31
""".strip()
KDUMP_REMOTE_TARGET_3 = """
net user@raw.server.com
path /usr/local/cores
core_collector makedumpfile -c --message-level 1 -d 31
""".strip()
def test_target():
kd = kdump.KDumpConf(context_wrap(KDUMP_LOCAL_FS_1))
assert kd.using_local_disk
assert kd.target == ('ext3', 'UUID=f15759be-89d4-46c4-9e1d-1b67e5b5da82')
assert kd['path'] == '/usr/local/cores'
kd = kdump.KDumpConf(context_wrap(KDUMP_LOCAL_FS_UNSUPPORTED_2))
assert kd.using_local_disk
assert kd.target is None
assert kd['path'] == '/usr/local/cores'
kd = kdump.KDumpConf(context_wrap(KDUMP_REMOTE_TARGET_3))
assert not kd.using_local_disk
assert kd.target == ('net', 'user@raw.server.com')
KDUMP_TARGET_CONFLICT_1 = """
net user@raw.server.com
raw /dev/sda5
"""
KDUMP_TARGET_CONFLICT_2 = """
ext4 /dev/sdb1
ext4 UUID=f15759be-89d4-46c4-9e1d-1b67e5b5da82
"""
def test_conflict_targets_excptions():
with pytest.raises(ParseException) as e_info:
kdump.KDumpConf(context_wrap(KDUMP_TARGET_CONFLICT_1))
assert "More than one target is configured" in str(e_info.value)
with pytest.raises(ParseException) as e_info:
kdump.KDumpConf(context_wrap(KDUMP_TARGET_CONFLICT_2))
assert "More than one ext4 type targets" in str(e_info.value) |
299,495 | reset | from collections import Counter
from axelrod.action import Action, actions_to_str
C, D = Action.C, Action.D
class History(object):
"""
History class to track the history of play and metadata including
the number of cooperations and defections, and if available, the
opponents plays and the state distribution of the history of play.
"""
def __init__(self, plays=None, coplays=None):
"""
Parameters
----------
plays:
An ordered iterable of the actions of the player.
coplays:
An ordered iterable of the actions of the coplayer (aka opponent).
"""
self._plays = []
# Coplays is tracked mainly for computation of the state distribution
# when cloning or dualing.
self._coplays = []
self._actions = Counter()
self._state_distribution = Counter()
if plays:
self.extend(plays, coplays)
def append(self, play, coplay):
"""Appends a new (play, coplay) pair an updates metadata for
number of cooperations and defections, and the state distribution."""
self._plays.append(play)
self._actions[play] += 1
self._coplays.append(coplay)
self._state_distribution[(play, coplay)] += 1
def copy(self):
"""Returns a new object with the same data."""
return self.__class__(plays=self._plays, coplays=self._coplays)
def flip_plays(self):
"""Creates a flipped plays history for use with DualTransformer."""
flipped_plays = [action.flip() for action in self._plays]
return self.__class__(plays=flipped_plays, coplays=self._coplays)
def extend(self, plays, coplays):
"""A function that emulates list.extend."""
# We could repeatedly call self.append but this is more efficient.
self._plays.extend(plays)
self._actions.update(plays)
self._coplays.extend(coplays)
self._state_distribution.update(zip(plays, coplays))
def METHOD_NAME(self):
"""Clears all data in the History object."""
self._plays.clear()
self._coplays.clear()
self._actions.clear()
self._state_distribution.clear()
@property
def coplays(self):
return self._coplays
@property
def cooperations(self):
return self._actions[C]
@property
def defections(self):
return self._actions[D]
@property
def state_distribution(self):
return self._state_distribution
def __eq__(self, other):
if isinstance(other, list):
return self._plays == other
elif isinstance(other, History):
return (
self._plays == other._plays and self._coplays == other._coplays
)
raise TypeError("Cannot compare types.")
def __getitem__(self, key):
# Passthrough keys and slice objects
return self._plays[key]
def __str__(self):
return actions_to_str(self._plays)
def __list__(self):
return self._plays
def __len__(self):
return len(self._plays)
def __repr__(self):
return repr(self.__list__())
class LimitedHistory(History):
"""
History class that only tracks the last N rounds. Used for testing memory
depth.
"""
def __init__(self, memory_depth, plays=None, coplays=None):
"""
Parameters
----------
memory_depth, int:
length of history to retain
"""
super().__init__(plays=plays, coplays=coplays)
self.memory_depth = memory_depth
def flip_plays(self):
"""Creates a flipped plays history for use with DualTransformer."""
flipped_plays = [action.flip() for action in self._plays]
return self.__class__(
self.memory_depth, plays=flipped_plays, coplays=self._coplays
)
def append(self, play, coplay):
"""Appends a new (play, coplay) pair an updates metadata for
number of cooperations and defections, and the state distribution."""
self._plays.append(play)
self._actions[play] += 1
self._coplays.append(coplay)
self._state_distribution[(play, coplay)] += 1
if len(self._plays) > self.memory_depth:
first_play, first_coplay = self._plays.pop(0), self._coplays.pop(0)
self._actions[first_play] -= 1
self._state_distribution[(first_play, first_coplay)] -= 1 |
299,496 | get default printer w | from _typeshed import Incomplete
import _win32typing
def OpenPrinter(printer: str, Defaults: Incomplete | None = ...) -> _win32typing.PyPrinterHANDLE: ...
def GetPrinter(hPrinter: _win32typing.PyPrinterHANDLE, Level: int = ...): ...
def SetPrinter(hPrinter: _win32typing.PyPrinterHANDLE, Level, pPrinter, Command) -> None: ...
def ClosePrinter(hPrinter: _win32typing.PyPrinterHANDLE) -> None: ...
def AddPrinterConnection(printer: str): ...
def DeletePrinterConnection(printer: str): ...
def EnumPrinters(flags, name: str | None = ..., level: int = ...): ...
def GetDefaultPrinter() -> str: ...
def METHOD_NAME() -> str: ...
def SetDefaultPrinter(printer: str): ...
def SetDefaultPrinterW(Printer: str): ...
def StartDocPrinter(hprinter: _win32typing.PyPrinterHANDLE, _tuple, level: int = ...): ...
def EndDocPrinter(hPrinter: _win32typing.PyPrinterHANDLE): ...
def AbortPrinter(hPrinter: _win32typing.PyPrinterHANDLE) -> None: ...
def StartPagePrinter(hprinter: _win32typing.PyPrinterHANDLE) -> None: ...
def EndPagePrinter(hprinter: _win32typing.PyPrinterHANDLE) -> None: ...
def StartDoc(hdc: int, docinfo): ...
def EndDoc(hdc: int) -> None: ...
def AbortDoc(hdc: int) -> None: ...
def StartPage(hdc: int) -> None: ...
def EndPage(hdc: int) -> None: ...
def WritePrinter(hprinter: _win32typing.PyPrinterHANDLE, buf: str): ...
def EnumJobs(hPrinter: _win32typing.PyPrinterHANDLE, FirstJob, NoJobs, Level=...): ...
def GetJob(hPrinter: _win32typing.PyPrinterHANDLE, JobID, Level: int = ...): ...
def SetJob(hPrinter: _win32typing.PyPrinterHANDLE, JobID, Level, JobInfo, Command): ...
def DocumentProperties(
HWnd: int,
hPrinter: _win32typing.PyPrinterHANDLE,
DeviceName: str,
DevModeOutput: _win32typing.PyDEVMODE,
DevModeInput: _win32typing.PyDEVMODE,
Mode,
): ...
def EnumPrintProcessors(Server: str | None = ..., Environment: str | None = ...) -> tuple[str, ...]: ...
def EnumPrintProcessorDatatypes(ServerName: str, PrintProcessorName: str) -> tuple[str, ...]: ...
def EnumPrinterDrivers(Server: str | None = ..., Environment: str | None = ..., Level=...) -> tuple[Incomplete, ...]: ...
def EnumForms(hprinter: _win32typing.PyPrinterHANDLE) -> tuple[_win32typing.FORM_INFO_1, ...]: ...
def AddForm(hprinter: _win32typing.PyPrinterHANDLE, Form) -> None: ...
def DeleteForm(hprinter: _win32typing.PyPrinterHANDLE, FormName: str) -> None: ...
def GetForm(hprinter: _win32typing.PyPrinterHANDLE, FormName: str) -> None: ...
def SetForm(hprinter: _win32typing.PyPrinterHANDLE, FormName: str, Form) -> None: ...
def AddJob(hprinter: _win32typing.PyPrinterHANDLE) -> None: ...
def ScheduleJob(hprinter: _win32typing.PyPrinterHANDLE, JobId) -> None: ...
def DeviceCapabilities(Device: str, Port: str, Capability, DEVMODE: _win32typing.PyDEVMODE | None = ...) -> None: ...
def GetDeviceCaps(hdc: int, Index): ...
def EnumMonitors(Name: str, Level) -> tuple[Incomplete, ...]: ...
def EnumPorts(Name: str, Level) -> tuple[Incomplete, ...]: ...
def GetPrintProcessorDirectory(Name: str, Environment: str) -> str: ...
def GetPrinterDriverDirectory(Name: str, Environment: str) -> str: ...
def AddPrinter(Name: str, Level, pPrinter) -> _win32typing.PyPrinterHANDLE: ...
def DeletePrinter(hPrinter: _win32typing.PyPrinterHANDLE) -> None: ...
def DeletePrinterDriver(Server: str, Environment: str, DriverName: str) -> None: ...
def DeletePrinterDriverEx(Server: str, Environment: str, DriverName: str, DeleteFlag, VersionFlag) -> None: ...
def FlushPrinter(Printer: _win32typing.PyPrinterHANDLE, Buf, Sleep): ...
DEF_PRIORITY: int
DI_APPBANDING: int
DI_ROPS_READ_DESTINATION: int
DPD_DELETE_ALL_FILES: int
DPD_DELETE_SPECIFIC_VERSION: int
DPD_DELETE_UNUSED_FILES: int
DSPRINT_PENDING: int
DSPRINT_PUBLISH: int
DSPRINT_REPUBLISH: int
DSPRINT_UNPUBLISH: int
DSPRINT_UPDATE: int
FORM_BUILTIN: int
FORM_PRINTER: int
FORM_USER: int
JOB_ACCESS_ADMINISTER: int
JOB_ACCESS_READ: int
JOB_ALL_ACCESS: int
JOB_CONTROL_CANCEL: int
JOB_CONTROL_DELETE: int
JOB_CONTROL_LAST_PAGE_EJECTED: int
JOB_CONTROL_PAUSE: int
JOB_CONTROL_RESTART: int
JOB_CONTROL_RESUME: int
JOB_CONTROL_SENT_TO_PRINTER: int
JOB_EXECUTE: int
JOB_INFO_1: int
JOB_POSITION_UNSPECIFIED: int
JOB_READ: int
JOB_STATUS_BLOCKED_DEVQ: int
JOB_STATUS_COMPLETE: int
JOB_STATUS_DELETED: int
JOB_STATUS_DELETING: int
JOB_STATUS_ERROR: int
JOB_STATUS_OFFLINE: int
JOB_STATUS_PAPEROUT: int
JOB_STATUS_PAUSED: int
JOB_STATUS_PRINTED: int
JOB_STATUS_PRINTING: int
JOB_STATUS_RESTART: int
JOB_STATUS_SPOOLING: int
JOB_STATUS_USER_INTERVENTION: int
JOB_WRITE: int
MAX_PRIORITY: int
MIN_PRIORITY: int
PORT_STATUS_DOOR_OPEN: int
PORT_STATUS_NO_TONER: int
PORT_STATUS_OFFLINE: int
PORT_STATUS_OUTPUT_BIN_FULL: int
PORT_STATUS_OUT_OF_MEMORY: int
PORT_STATUS_PAPER_JAM: int
PORT_STATUS_PAPER_OUT: int
PORT_STATUS_PAPER_PROBLEM: int
PORT_STATUS_POWER_SAVE: int
PORT_STATUS_TONER_LOW: int
PORT_STATUS_TYPE_ERROR: int
PORT_STATUS_TYPE_INFO: int
PORT_STATUS_TYPE_WARNING: int
PORT_STATUS_USER_INTERVENTION: int
PORT_STATUS_WARMING_UP: int
PORT_TYPE_NET_ATTACHED: int
PORT_TYPE_READ: int
PORT_TYPE_REDIRECTED: int
PORT_TYPE_WRITE: int
PRINTER_ACCESS_ADMINISTER: int
PRINTER_ACCESS_USE: int
PRINTER_ALL_ACCESS: int
PRINTER_ATTRIBUTE_DEFAULT: int
PRINTER_ATTRIBUTE_DIRECT: int
PRINTER_ATTRIBUTE_DO_COMPLETE_FIRST: int
PRINTER_ATTRIBUTE_ENABLE_BIDI: int
PRINTER_ATTRIBUTE_ENABLE_DEVQ: int
PRINTER_ATTRIBUTE_FAX: int
PRINTER_ATTRIBUTE_HIDDEN: int
PRINTER_ATTRIBUTE_KEEPPRINTEDJOBS: int
PRINTER_ATTRIBUTE_LOCAL: int
PRINTER_ATTRIBUTE_NETWORK: int
PRINTER_ATTRIBUTE_PUBLISHED: int
PRINTER_ATTRIBUTE_QUEUED: int
PRINTER_ATTRIBUTE_RAW_ONLY: int
PRINTER_ATTRIBUTE_SHARED: int
PRINTER_ATTRIBUTE_TS: int
PRINTER_ATTRIBUTE_WORK_OFFLINE: int
PRINTER_CONTROL_PAUSE: int
PRINTER_CONTROL_PURGE: int
PRINTER_CONTROL_RESUME: int
PRINTER_CONTROL_SET_STATUS: int
PRINTER_ENUM_CONNECTIONS: int
PRINTER_ENUM_CONTAINER: int
PRINTER_ENUM_DEFAULT: int
PRINTER_ENUM_EXPAND: int
PRINTER_ENUM_ICON1: int
PRINTER_ENUM_ICON2: int
PRINTER_ENUM_ICON3: int
PRINTER_ENUM_ICON4: int
PRINTER_ENUM_ICON5: int
PRINTER_ENUM_ICON6: int
PRINTER_ENUM_ICON7: int
PRINTER_ENUM_ICON8: int
PRINTER_ENUM_LOCAL: int
PRINTER_ENUM_NAME: int
PRINTER_ENUM_NETWORK: int
PRINTER_ENUM_REMOTE: int
PRINTER_ENUM_SHARED: int
PRINTER_EXECUTE: int
PRINTER_INFO_1: int
PRINTER_READ: int
PRINTER_STATUS_BUSY: int
PRINTER_STATUS_DOOR_OPEN: int
PRINTER_STATUS_ERROR: int
PRINTER_STATUS_INITIALIZING: int
PRINTER_STATUS_IO_ACTIVE: int
PRINTER_STATUS_MANUAL_FEED: int
PRINTER_STATUS_NOT_AVAILABLE: int
PRINTER_STATUS_NO_TONER: int
PRINTER_STATUS_OFFLINE: int
PRINTER_STATUS_OUTPUT_BIN_FULL: int
PRINTER_STATUS_OUT_OF_MEMORY: int
PRINTER_STATUS_PAGE_PUNT: int
PRINTER_STATUS_PAPER_JAM: int
PRINTER_STATUS_PAPER_OUT: int
PRINTER_STATUS_PAPER_PROBLEM: int
PRINTER_STATUS_PAUSED: int
PRINTER_STATUS_PENDING_DELETION: int
PRINTER_STATUS_POWER_SAVE: int
PRINTER_STATUS_PRINTING: int
PRINTER_STATUS_PROCESSING: int
PRINTER_STATUS_SERVER_UNKNOWN: int
PRINTER_STATUS_TONER_LOW: int
PRINTER_STATUS_USER_INTERVENTION: int
PRINTER_STATUS_WAITING: int
PRINTER_STATUS_WARMING_UP: int
PRINTER_WRITE: int
SERVER_ACCESS_ADMINISTER: int
SERVER_ACCESS_ENUMERATE: int
SERVER_ALL_ACCESS: int
SERVER_EXECUTE: int
SERVER_READ: int
SERVER_WRITE: int |
299,497 | handler | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"automanage configuration-profile-assignment vm show",
)
class Show(AAZCommand):
"""Get information about an association between a VM and Automanage configuration profile
:example: show configuration-profile-assignment vm
az automanage configuration-profile-assignment vm show -n default -g {rg} --vm-name {vm_name}
"""
_aaz_info = {
"version": "2022-05-04",
"resources": [
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.compute/virtualmachines/{}/providers/microsoft.automanage/configurationprofileassignments/{}", "2022-05-04"],
]
}
def METHOD_NAME(self, command_args):
super().METHOD_NAME(command_args)
self._execute_operations()
return self._output()
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.configuration_profile_assignment_name = AAZStrArg(
options=["-n", "--name", "--configuration-profile-assignment-name"],
help="The configuration profile assignment name.",
required=True,
id_part="child_name_1",
)
_args_schema.resource_group = AAZResourceGroupNameArg(
required=True,
)
_args_schema.vm_name = AAZStrArg(
options=["--vm-name"],
help="The name of the virtual machine.",
required=True,
id_part="name",
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
self.ConfigurationProfileAssignmentsGet(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
def _output(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True)
return result
class ConfigurationProfileAssignmentsGet(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/providers/Microsoft.Automanage/configurationProfileAssignments/{configurationProfileAssignmentName}",
**self.url_parameters
)
@property
def method(self):
return "GET"
@property
def error_format(self):
return "MgmtErrorFormat"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"configurationProfileAssignmentName", self.ctx.args.configuration_profile_assignment_name,
required=True,
),
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
**self.serialize_url_param(
"vmName", self.ctx.args.vm_name,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2022-05-04",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200
)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.id = AAZStrType(
flags={"read_only": True},
)
_schema_on_200.managed_by = AAZStrType(
serialized_name="managedBy",
)
_schema_on_200.name = AAZStrType(
flags={"read_only": True},
)
_schema_on_200.properties = AAZObjectType()
_schema_on_200.system_data = AAZObjectType(
serialized_name="systemData",
flags={"read_only": True},
)
_schema_on_200.type = AAZStrType(
flags={"read_only": True},
)
properties = cls._schema_on_200.properties
properties.configuration_profile = AAZStrType(
serialized_name="configurationProfile",
)
properties.status = AAZStrType(
flags={"read_only": True},
)
properties.target_id = AAZStrType(
serialized_name="targetId",
flags={"read_only": True},
)
system_data = cls._schema_on_200.system_data
system_data.created_at = AAZStrType(
serialized_name="createdAt",
)
system_data.created_by = AAZStrType(
serialized_name="createdBy",
)
system_data.created_by_type = AAZStrType(
serialized_name="createdByType",
)
system_data.last_modified_at = AAZStrType(
serialized_name="lastModifiedAt",
)
system_data.last_modified_by = AAZStrType(
serialized_name="lastModifiedBy",
)
system_data.last_modified_by_type = AAZStrType(
serialized_name="lastModifiedByType",
)
return cls._schema_on_200
class _ShowHelper:
"""Helper class for Show"""
__all__ = ["Show"] |
299,498 | test article is related to owner | from django.apps import apps
from django.contrib.auth import get_user_model
from django.contrib.sites.models import Site
from django.test.testcases import TestCase
from django.urls import re_path
from wiki.conf import settings
from wiki.managers import ArticleManager
from wiki.models import Article
from wiki.models import ArticleRevision
from wiki.models import URLPath
from wiki.urls import WikiURLPatterns
User = get_user_model()
Group = apps.get_model(settings.GROUP_MODEL)
class WikiCustomUrlPatterns(WikiURLPatterns):
def get_article_urls(self):
urlpatterns = [
re_path(
"^my-wiki/(?P<article_id>[0-9]+)/$",
self.article_view_class.as_view(),
name="get",
),
]
return urlpatterns
def get_article_path_urls(self):
urlpatterns = [
re_path(
"^my-wiki/(?P<path>.+/|)$",
self.article_view_class.as_view(),
name="get",
),
]
return urlpatterns
class ArticleModelTest(TestCase):
def test_default_fields_of_empty_article(self):
a = Article.objects.create()
self.assertIsNone(a.current_revision)
self.assertIsNone(a.owner)
self.assertIsNone(a.group)
self.assertIsNotNone(a.created)
self.assertIsNotNone(a.modified)
self.assertIsNotNone(a.group_read)
self.assertIsNotNone(a.group_write)
self.assertIsNotNone(a.other_read)
self.assertIsNotNone(a.other_write)
# XXX maybe redundant test
def test_model_manager_class(self):
self.assertIsInstance(Article.objects, ArticleManager)
def test_str_method_if_have_current_revision(self):
title = "Test title"
a = Article.objects.create()
ArticleRevision.objects.create(article=a, title=title)
self.assertEqual(str(a), title)
def test_str_method_if_dont_have_current_revision(self):
a = Article.objects.create()
expected = "Article without content (1)"
self.assertEqual(str(a), expected)
def test_get_absolute_url_if_urlpath_set_is_exists(self):
a1 = Article.objects.create()
s1 = Site.objects.create(domain="something.com", name="something.com")
u1 = URLPath.objects.create(article=a1, site=s1)
a2 = Article.objects.create()
s2 = Site.objects.create(domain="somethingelse.com", name="somethingelse.com")
URLPath.objects.create(article=a2, site=s2, parent=u1, slug="test_slug")
url = a2.get_absolute_url()
expected = "/test_slug/"
self.assertEqual(url, expected)
def test_get_absolute_url_if_urlpath_set_is_not_exists(self):
a = Article.objects.create()
url = a.get_absolute_url()
expected = "/1/"
self.assertEqual(url, expected)
def test_article_is_related_to_articlerevision(self):
title = "Test title"
a = Article.objects.create()
r = ArticleRevision.objects.create(article=a, title=title)
self.assertEqual(r.article, a)
self.assertIn(r, a.articlerevision_set.all())
def METHOD_NAME(self):
u = User.objects.create(username="Noman", password="pass")
a = Article.objects.create(owner=u)
self.assertEqual(a.owner, u)
self.assertIn(a, u.owned_articles.all())
def test_article_is_related_to_group(self):
g = Group.objects.create()
a = Article.objects.create(group=g)
self.assertEqual(a.group, g)
self.assertIn(a, g.article_set.all())
def test_cache(self):
a = Article.objects.create()
ArticleRevision.objects.create(article=a, title="test", content="# header")
expected = """<h1 id="wiki-toc-header">header""" """.*</h1>"""
# cached content does not exist yet. this will create it
self.assertRegexpMatches(a.get_cached_content(), expected)
# actual cached content test
self.assertRegexpMatches(a.get_cached_content(), expected)
def test_articlerevision_presave_signals(self):
a = Article.objects.create()
ar1 = ArticleRevision(article=a, title="revision1")
a.add_revision(ar1)
self.assertEqual(ar1, a.current_revision)
ar2 = ArticleRevision(article=a, title="revision2")
ar2.save()
self.assertEqual(ar2.previous_revision, ar1) |
299,499 | initiate messaging rule run | from django.conf import settings
from django.db import transaction
from django.db.models import Q
from dimagi.utils.chunked import chunked
from dimagi.utils.couch import CriticalSection
from field_audit.models import AuditAction
from corehq.apps.data_interfaces.models import AutomaticUpdateRule
from corehq.apps.es import CaseES
from corehq.apps.sms import tasks as sms_tasks
from corehq.form_processor.exceptions import CaseNotFound
from corehq.form_processor.models import CommCareCase
from corehq.messaging.scheduling.tasks import (
delete_schedule_instances_for_cases,
)
from corehq.messaging.scheduling.util import utcnow
from corehq.messaging.util import MessagingRuleProgressHelper
from corehq.sql_db.util import (
get_db_aliases_for_partitioned_query,
paginate_query,
paginate_query_across_partitioned_databases,
)
from corehq.util.celery_utils import no_result_task
from corehq.util.metrics.load_counters import case_load_counter
def get_sync_key(case_id):
return 'sync-case-for-messaging-%s' % case_id
@no_result_task(queue=settings.CELERY_REMINDER_CASE_UPDATE_QUEUE, acks_late=True,
default_retry_delay=5 * 60, max_retries=12, bind=True)
def sync_case_for_messaging_task(self, domain, case_id):
try:
with CriticalSection([get_sync_key(case_id)], timeout=5 * 60):
sync_case_for_messaging(domain, case_id)
except Exception as e:
self.retry(exc=e)
@no_result_task(queue=settings.CELERY_REMINDER_CASE_UPDATE_BULK_QUEUE, acks_late=True,
default_retry_delay=5 * 60, max_retries=12, bind=True)
def sync_case_for_messaging_rule(self, domain, case_id, rule_id):
try:
with CriticalSection([get_sync_key(case_id)], timeout=5 * 60):
_sync_case_for_messaging_rule(domain, case_id, rule_id)
except Exception as e:
self.retry(exc=e)
@no_result_task(queue=settings.CELERY_REMINDER_CASE_UPDATE_BULK_QUEUE, acks_late=True)
def sync_case_chunk_for_messaging_rule(domain, case_id_chunk, rule_id):
for case_id in case_id_chunk:
try:
with CriticalSection([get_sync_key(case_id)], timeout=5 * 60):
_sync_case_for_messaging_rule(domain, case_id, rule_id)
except Exception:
sync_case_for_messaging_rule.delay(domain, case_id, rule_id)
def sync_case_for_messaging(domain, case_id, get_rules=None):
try:
case = CommCareCase.objects.get_case(case_id, domain)
sms_tasks.clear_case_caches(case)
except CaseNotFound:
case = None
case_load_counter("messaging_sync", domain)()
update_messaging_for_case(domain, case_id, case)
if case is not None:
run_auto_update_rules_for_case(case, get_rules)
def update_messaging_for_case(domain, case_id, case):
if case is None or case.is_deleted:
clear_messaging_for_case(domain, case_id)
elif settings.USE_PHONE_ENTRIES:
sms_tasks.sync_case_phone_number(case)
def clear_messaging_for_case(domain, case_id):
sms_tasks.delete_phone_numbers_for_owners([case_id])
delete_schedule_instances_for_cases(domain, [case_id])
def run_auto_update_rules_for_case(case, get_rules=None):
if get_rules is not None:
rules = get_rules(case.domain, case.type)
else:
all_rules = AutomaticUpdateRule.by_domain_cached(case.domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
rules_by_case_type = AutomaticUpdateRule.organize_rules_by_case_type(all_rules)
rules = rules_by_case_type.get(case.type, [])
for rule in rules:
rule.run_rule(case, utcnow())
def get_cached_rule(domain, rule_id):
rules = AutomaticUpdateRule.by_domain_cached(domain, AutomaticUpdateRule.WORKFLOW_SCHEDULING)
rules = [rule for rule in rules if rule.pk == rule_id]
if len(rules) == 1:
return rules[0]
def _sync_case_for_messaging_rule(domain, case_id, rule_id):
case_load_counter("messaging_rule_sync", domain)()
try:
case = CommCareCase.objects.get_case(case_id, domain)
except CaseNotFound:
clear_messaging_for_case(domain, case_id)
return
rule = get_cached_rule(domain, rule_id)
if rule:
rule.run_rule(case, utcnow())
MessagingRuleProgressHelper(rule_id).increment_current_case_count()
def METHOD_NAME(rule):
if not rule.active:
return
rule.locked_for_editing = True
rule.save(update_fields=['locked_for_editing'])
transaction.on_commit(lambda: run_messaging_rule.delay(rule.domain, rule.pk))
def paginated_case_ids(domain, case_type, db_alias=None):
args = [
CommCareCase,
Q(domain=domain, type=case_type, deleted=False)
]
if db_alias:
fn = paginate_query
args = [db_alias] + args
else:
fn = paginate_query_across_partitioned_databases
row_generator = fn(*args, values=['case_id'], load_source='run_messaging_rule')
for row in row_generator:
yield row[0]
def get_case_ids_for_messaging_rule(domain, case_type):
return paginated_case_ids(domain, case_type)
@no_result_task(queue=settings.CELERY_REMINDER_CASE_UPDATE_BULK_QUEUE)
def set_rule_complete(rule_id):
AutomaticUpdateRule.objects.filter(pk=rule_id).update(locked_for_editing=False,
audit_action=AuditAction.AUDIT)
MessagingRuleProgressHelper(rule_id).set_rule_complete()
@no_result_task(queue=settings.CELERY_REMINDER_CASE_UPDATE_BULK_QUEUE, acks_late=True,
soft_time_limit=15 * settings.CELERY_TASK_SOFT_TIME_LIMIT)
def run_messaging_rule(domain, rule_id):
rule = get_cached_rule(domain, rule_id)
if not rule:
return
progress_helper = MessagingRuleProgressHelper(rule_id)
total_cases_count = CaseES().domain(domain).case_type(rule.case_type).count()
progress_helper.set_total_cases_to_be_processed(total_cases_count)
db_aliases = get_db_aliases_for_partitioned_query()
progress_helper.set_initial_progress(shard_count=len(db_aliases))
for db_alias in db_aliases:
run_messaging_rule_for_shard.delay(domain, rule_id, db_alias)
@no_result_task(queue=settings.CELERY_REMINDER_CASE_UPDATE_BULK_QUEUE, acks_late=True,
soft_time_limit=15 * settings.CELERY_TASK_SOFT_TIME_LIMIT)
def run_messaging_rule_for_shard(domain, rule_id, db_alias):
rule = get_cached_rule(domain, rule_id)
if not rule:
return
chunk_size = getattr(settings, 'MESSAGING_RULE_CASE_CHUNK_SIZE', 100)
progress_helper = MessagingRuleProgressHelper(rule_id)
if not progress_helper.is_canceled():
for case_id_chunk in chunked(paginated_case_ids(domain, rule.case_type, db_alias), chunk_size):
sync_case_chunk_for_messaging_rule.delay(domain, case_id_chunk, rule_id)
progress_helper.update_total_key_expiry()
if progress_helper.is_canceled():
break
all_shards_complete = progress_helper.mark_shard_complete(db_alias)
if all_shards_complete:
# this should get triggered for the last shard
set_rule_complete.delay(rule_id) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.