id int64 0 300k | label stringlengths 1 74 β | text stringlengths 4k 8k |
|---|---|---|
299,200 | get circuits and is blockers | # Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import random
import pytest
import networkx
import cirq
class FakeDevice(cirq.Device):
pass
def test_wrapper_eq():
q0, q1 = cirq.LineQubit.range(2)
eq = cirq.testing.EqualsTester()
eq.add_equality_group(cirq.contrib.CircuitDag.make_node(cirq.X(q0)))
eq.add_equality_group(cirq.contrib.CircuitDag.make_node(cirq.X(q0)))
eq.add_equality_group(cirq.contrib.CircuitDag.make_node(cirq.Y(q0)))
eq.add_equality_group(cirq.contrib.CircuitDag.make_node(cirq.X(q1)))
def test_wrapper_cmp():
u0 = cirq.contrib.Unique(0)
u1 = cirq.contrib.Unique(1)
# The ordering of Unique instances is unpredictable
u0, u1 = (u1, u0) if u1 < u0 else (u0, u1)
assert u0 == u0
assert u0 != u1
assert u0 < u1
assert u1 > u0
assert u0 <= u0
assert u0 <= u1
assert u0 >= u0
assert u1 >= u0
def test_wrapper_cmp_failure():
with pytest.raises(TypeError):
_ = object() < cirq.contrib.Unique(1)
with pytest.raises(TypeError):
_ = cirq.contrib.Unique(1) < object()
def test_wrapper_repr():
q0 = cirq.LineQubit(0)
node = cirq.contrib.CircuitDag.make_node(cirq.X(q0))
expected = f'cirq.contrib.Unique({id(node)}, cirq.X(cirq.LineQubit(0)))'
assert repr(node) == expected
def test_init():
dag = cirq.contrib.CircuitDag()
assert networkx.dag.is_directed_acyclic_graph(dag)
assert list(dag.nodes()) == []
assert list(dag.edges()) == []
def test_append():
q0 = cirq.LineQubit(0)
dag = cirq.contrib.CircuitDag()
dag.append(cirq.X(q0))
dag.append(cirq.Y(q0))
assert networkx.dag.is_directed_acyclic_graph(dag)
assert len(dag.nodes()) == 2
assert [(n1.val, n2.val) for n1, n2 in dag.edges()] == [(cirq.X(q0), cirq.Y(q0))]
def test_two_identical_ops():
q0 = cirq.LineQubit(0)
dag = cirq.contrib.CircuitDag()
dag.append(cirq.X(q0))
dag.append(cirq.Y(q0))
dag.append(cirq.X(q0))
assert networkx.dag.is_directed_acyclic_graph(dag)
assert len(dag.nodes()) == 3
assert set((n1.val, n2.val) for n1, n2 in dag.edges()) == {
(cirq.X(q0), cirq.Y(q0)),
(cirq.X(q0), cirq.X(q0)),
(cirq.Y(q0), cirq.X(q0)),
}
def test_from_ops():
q0 = cirq.LineQubit(0)
dag = cirq.contrib.CircuitDag.from_ops(cirq.X(q0), cirq.Y(q0))
assert networkx.dag.is_directed_acyclic_graph(dag)
assert len(dag.nodes()) == 2
assert [(n1.val, n2.val) for n1, n2 in dag.edges()] == [(cirq.X(q0), cirq.Y(q0))]
def test_from_circuit():
q0 = cirq.LineQubit(0)
circuit = cirq.Circuit(cirq.X(q0), cirq.Y(q0))
dag = cirq.contrib.CircuitDag.from_circuit(circuit)
assert networkx.dag.is_directed_acyclic_graph(dag)
assert len(dag.nodes()) == 2
assert [(n1.val, n2.val) for n1, n2 in dag.edges()] == [(cirq.X(q0), cirq.Y(q0))]
assert sorted(circuit.all_qubits()) == sorted(dag.all_qubits())
def test_to_empty_circuit():
circuit = cirq.Circuit()
dag = cirq.contrib.CircuitDag.from_circuit(circuit)
assert networkx.dag.is_directed_acyclic_graph(dag)
assert circuit == dag.to_circuit()
def test_to_circuit():
q0 = cirq.LineQubit(0)
circuit = cirq.Circuit(cirq.X(q0), cirq.Y(q0))
dag = cirq.contrib.CircuitDag.from_circuit(circuit)
assert networkx.dag.is_directed_acyclic_graph(dag)
# Only one possible output circuit for this simple case
assert circuit == dag.to_circuit()
cirq.testing.assert_allclose_up_to_global_phase(
circuit.unitary(), dag.to_circuit().unitary(), atol=1e-7
)
def test_equality():
q0, q1 = cirq.LineQubit.range(2)
circuit1 = cirq.Circuit(
cirq.X(q0), cirq.Y(q0), cirq.Z(q1), cirq.CZ(q0, q1), cirq.X(q1), cirq.Y(q1), cirq.Z(q0)
)
circuit2 = cirq.Circuit(
cirq.Z(q1), cirq.X(q0), cirq.Y(q0), cirq.CZ(q0, q1), cirq.Z(q0), cirq.X(q1), cirq.Y(q1)
)
circuit3 = cirq.Circuit(
cirq.X(q0),
cirq.Y(q0),
cirq.Z(q1),
cirq.CZ(q0, q1),
cirq.X(q1),
cirq.Y(q1),
cirq.Z(q0) ** 0.5,
)
circuit4 = cirq.Circuit(
cirq.X(q0), cirq.Y(q0), cirq.Z(q1), cirq.CZ(q0, q1), cirq.X(q1), cirq.Y(q1)
)
eq = cirq.testing.EqualsTester()
eq.make_equality_group(
lambda: cirq.contrib.CircuitDag.from_circuit(circuit1),
lambda: cirq.contrib.CircuitDag.from_circuit(circuit2),
)
eq.add_equality_group(cirq.contrib.CircuitDag.from_circuit(circuit3))
eq.add_equality_group(cirq.contrib.CircuitDag.from_circuit(circuit4))
def test_larger_circuit():
q0, q1, q2, q3 = [
cirq.GridQubit(0, 5),
cirq.GridQubit(1, 5),
cirq.GridQubit(2, 5),
cirq.GridQubit(3, 5),
]
# This circuit does not have CZ gates on adjacent qubits because the order
# dag.to_circuit() would append them is non-deterministic.
circuit = cirq.Circuit(
cirq.X(q0),
cirq.CZ(q1, q2),
cirq.CZ(q0, q1),
cirq.Y(q0),
cirq.Z(q0),
cirq.CZ(q1, q2),
cirq.X(q0),
cirq.Y(q0),
cirq.CZ(q0, q1),
cirq.T(q3),
strategy=cirq.InsertStrategy.EARLIEST,
)
dag = cirq.contrib.CircuitDag.from_circuit(circuit)
assert networkx.dag.is_directed_acyclic_graph(dag)
# Operation order within a moment is non-deterministic
# but text diagrams still look the same.
desired = """
(0, 5): βββXβββ@βββYβββZβββXβββYβββ@βββ
β β
(1, 5): βββ@βββ@βββ@βββββββββββββββ@βββ
β β
(2, 5): βββ@βββββββ@βββββββββββββββββββ
(3, 5): βββTβββββββββββββββββββββββββββ
"""
cirq.testing.assert_has_diagram(circuit, desired)
cirq.testing.assert_has_diagram(dag.to_circuit(), desired)
cirq.testing.assert_allclose_up_to_global_phase(
circuit.unitary(), dag.to_circuit().unitary(), atol=1e-7
)
@pytest.mark.parametrize('circuit', [cirq.testing.random_circuit(10, 10, 0.5) for _ in range(3)])
def test_is_maximalist(circuit):
dag = cirq.contrib.CircuitDag.from_circuit(circuit)
transitive_closure = networkx.dag.transitive_closure(dag)
assert cirq.contrib.CircuitDag(incoming_graph_data=transitive_closure) == dag
assert not any(dag.has_edge(b, a) for a, b in itertools.combinations(dag.ordered_nodes(), 2))
def METHOD_NAME():
qubits = cirq.LineQubit.range(10)
circuits = [cirq.testing.random_circuit(qubits, 10, 0.5) for _ in range(1)]
edges = [
set(qubit_pair) for qubit_pair in itertools.combinations(qubits, 2) if random.random() > 0.5
]
not_on_edge = lambda op: len(op.qubits) > 1 and set(op.qubits) not in edges
is_blockers = [lambda op: False, not_on_edge]
return itertools.product(circuits, is_blockers)
@pytest.mark.parametrize('circuit, is_blocker', METHOD_NAME())
def test_findall_nodes_until_blocked(circuit, is_blocker):
dag = cirq.contrib.CircuitDag.from_circuit(circuit)
all_nodes = list(dag.ordered_nodes())
found_nodes = list(dag.findall_nodes_until_blocked(is_blocker))
assert not any(dag.has_edge(b, a) for a, b in itertools.combinations(found_nodes, 2))
blocking_nodes = set(node for node in all_nodes if is_blocker(node.val))
blocked_nodes = blocking_nodes.union(*(dag.succ[node] for node in blocking_nodes))
expected_nodes = set(all_nodes) - blocked_nodes
assert sorted(found_nodes) == sorted(expected_nodes) |
299,201 | test get numbered text from nodes | """Test PromptHelper."""
from llama_index.indices.prompt_helper import PromptHelper
from llama_index.indices.tree.utils import get_numbered_text_from_nodes
from llama_index.prompts.base import PromptTemplate
from llama_index.prompts.prompt_utils import get_biggest_prompt, get_empty_prompt_txt
from llama_index.schema import TextNode
from llama_index.text_splitter.utils import truncate_text
from tests.mock_utils.mock_utils import mock_tokenizer
def test_get_chunk_size() -> None:
"""Test get chunk size given prompt."""
# test with 1 chunk
prompt = PromptTemplate("This is the prompt")
prompt_helper = PromptHelper(
context_window=11, num_output=1, chunk_overlap_ratio=0, tokenizer=mock_tokenizer
)
chunk_size = prompt_helper._get_available_chunk_size(prompt, 1, padding=0)
assert chunk_size == 6
# test having 2 chunks
prompt_helper = PromptHelper(
context_window=11, num_output=1, chunk_overlap_ratio=0, tokenizer=mock_tokenizer
)
chunk_size = prompt_helper._get_available_chunk_size(prompt, 2, padding=0)
assert chunk_size == 3
# test with 2 chunks, and with chunk_size_limit
prompt_helper = PromptHelper(
context_window=11,
num_output=1,
chunk_overlap_ratio=0,
tokenizer=mock_tokenizer,
chunk_size_limit=2,
)
chunk_size = prompt_helper._get_available_chunk_size(prompt, 2, padding=0)
assert chunk_size == 2
# test padding
prompt_helper = PromptHelper(
context_window=11, num_output=1, chunk_overlap_ratio=0, tokenizer=mock_tokenizer
)
chunk_size = prompt_helper._get_available_chunk_size(prompt, 2, padding=1)
assert chunk_size == 2
def test_get_text_splitter() -> None:
"""Test get text splitter."""
test_prompt_text = "This is the prompt{text}"
test_prompt = PromptTemplate(test_prompt_text)
prompt_helper = PromptHelper(
context_window=11, num_output=1, chunk_overlap_ratio=0, tokenizer=mock_tokenizer
)
text_splitter = prompt_helper.get_text_splitter_given_prompt(
test_prompt, 2, padding=1
)
assert text_splitter.chunk_size == 2
test_text = "Hello world foo Hello world bar"
text_chunks = text_splitter.split_text(test_text)
assert text_chunks == ["Hello world", "foo Hello", "world bar"]
truncated_text = truncate_text(test_text, text_splitter)
assert truncated_text == "Hello world"
# test with chunk_size_limit
prompt_helper = PromptHelper(
context_window=11,
num_output=1,
chunk_overlap_ratio=0,
tokenizer=mock_tokenizer,
chunk_size_limit=1,
)
text_splitter = prompt_helper.get_text_splitter_given_prompt(
test_prompt, 2, padding=1
)
text_chunks = text_splitter.split_text(test_text)
assert text_chunks == ["Hello", "world", "foo", "Hello", "world", "bar"]
def test_get_text_splitter_partial() -> None:
"""Test get text splitter with a partially formatted prompt."""
# test without partially formatting
test_prompt_text = "This is the {foo} prompt{text}"
test_prompt = PromptTemplate(test_prompt_text)
prompt_helper = PromptHelper(
context_window=11, num_output=1, chunk_overlap_ratio=0, tokenizer=mock_tokenizer
)
text_splitter = prompt_helper.get_text_splitter_given_prompt(
test_prompt, 2, padding=1
)
test_text = "Hello world foo Hello world bar"
text_chunks = text_splitter.split_text(test_text)
assert text_chunks == ["Hello world", "foo Hello", "world bar"]
truncated_text = truncate_text(test_text, text_splitter)
assert truncated_text == "Hello world"
# test with partially formatting
test_prompt = PromptTemplate(test_prompt_text)
test_prompt = test_prompt.partial_format(foo="bar")
prompt_helper = PromptHelper(
context_window=12, num_output=1, chunk_overlap_ratio=0, tokenizer=mock_tokenizer
)
assert get_empty_prompt_txt(test_prompt) == "This is the bar prompt"
text_splitter = prompt_helper.get_text_splitter_given_prompt(
test_prompt, 2, padding=1
)
test_text = "Hello world foo Hello world bar"
text_chunks = text_splitter.split_text(test_text)
assert text_chunks == ["Hello world", "foo Hello", "world bar"]
truncated_text = truncate_text(test_text, text_splitter)
assert truncated_text == "Hello world"
def test_truncate() -> None:
"""Test truncate."""
# test prompt uses up one token
test_prompt_txt = "test{text}"
test_prompt = PromptTemplate(test_prompt_txt)
# set context_window=19
# For each text chunk, there's 4 tokens for text + 5 for the padding
prompt_helper = PromptHelper(
context_window=19, num_output=0, chunk_overlap_ratio=0, tokenizer=mock_tokenizer
)
text_chunks = ["This is a test foo bar", "Hello world bar foo"]
truncated_chunks = prompt_helper.truncate(
prompt=test_prompt, text_chunks=text_chunks
)
assert truncated_chunks == [
"This is a test",
"Hello world bar foo",
]
def METHOD_NAME() -> None:
"""Test get_text_from_nodes."""
# test prompt uses up one token
test_prompt_txt = "test{text}"
test_prompt = PromptTemplate(test_prompt_txt)
# set context_window=17
# For each text chunk, there's 3 for text, 5 for padding (including number)
prompt_helper = PromptHelper(
context_window=17, num_output=0, chunk_overlap_ratio=0, tokenizer=mock_tokenizer
)
node1 = TextNode(text="This is a test foo bar")
node2 = TextNode(text="Hello world bar foo")
text_splitter = prompt_helper.get_text_splitter_given_prompt(
prompt=test_prompt,
num_chunks=2,
)
response = get_numbered_text_from_nodes([node1, node2], text_splitter=text_splitter)
assert str(response) == ("(1) This is a\n\n(2) Hello world bar")
def test_repack() -> None:
"""Test repack."""
test_prompt_text = "This is the prompt{text}"
test_prompt = PromptTemplate(test_prompt_text)
prompt_helper = PromptHelper(
context_window=13,
num_output=1,
chunk_overlap_ratio=0,
tokenizer=mock_tokenizer,
separator="\n\n",
)
text_chunks = ["Hello", "world", "foo", "Hello", "world", "bar"]
compacted_chunks = prompt_helper.repack(test_prompt, text_chunks)
assert compacted_chunks == ["Hello\n\nworld\n\nfoo", "Hello\n\nworld\n\nbar"]
def test_get_biggest_prompt() -> None:
"""Test get_biggest_prompt from PromptHelper."""
prompt1 = PromptTemplate("This is the prompt{text}")
prompt2 = PromptTemplate("This is the longer prompt{text}")
prompt3 = PromptTemplate("This is the {text}")
biggest_prompt = get_biggest_prompt([prompt1, prompt2, prompt3])
assert biggest_prompt == prompt2 |
299,202 | backward compatible download | """
OONI API - various pages e.g.
/ <index>
/files
Redirects:
/stats
/files
/files/by_date
"""
import re
from datetime import timedelta, datetime
from flask import Blueprint, render_template, redirect, send_file, make_response
from werkzeug.exceptions import BadRequest, NotFound
from werkzeug.wrappers import Response # why not flask.Response?
# Exporting it
from .docs import api_docs_blueprint
pages_blueprint = Blueprint(
"pages", "measurements", static_folder="static", static_url_path="/static/"
)
DAY_REGEXP = re.compile(r"^\d{4}\-[0-1]\d\-[0-3]\d$")
@pages_blueprint.route("/")
def index():
"""Landing page
---
responses:
'200':
description: TODO
"""
return render_template("index.html")
@pages_blueprint.route("/stats")
def stats() -> Response:
"""TODO
---
responses:
'200':
description: TODO
"""
return redirect("https://explorer.ooni.org", 301)
@pages_blueprint.route("/files")
def files_index() -> Response:
"""TODO
---
responses:
'200':
description: TODO
"""
return redirect("https://explorer.ooni.org/search", 301)
@pages_blueprint.route("/files/by_date")
def files_by_date() -> Response:
"""TODO
---
responses:
'200':
description: TODO
"""
return redirect("https://explorer.ooni.org/search", 301)
@pages_blueprint.route("/files/by_date/<date>")
def files_on_date(date) -> Response:
"""TODO
---
responses:
'200':
description: TODO
"""
if not DAY_REGEXP.match(date):
raise BadRequest("Invalid date format")
since = date
until = (datetime.strptime(date, "%Y-%m-%d") + timedelta(days=1)).strftime(
"%Y-%m-%d"
)
return redirect(
"https://explorer.ooni.org/search?until={}&since={}".format(until, since), 301
)
@pages_blueprint.route("/files/by_country")
def files_by_country() -> Response:
"""TODO
---
responses:
'200':
description: TODO
"""
return redirect("https://explorer.ooni.org/search", 301)
@pages_blueprint.route("/files/by_country/<country_code>")
def files_in_country(country_code) -> Response:
"""TODO
---
responses:
'200':
description: TODO
"""
if len(country_code) != 2:
raise BadRequest("Country code must be two characters")
country_code = country_code.upper()
return redirect(
"https://explorer.ooni.org/search?probe_cc={}".format(country_code), 301
)
@pages_blueprint.route("/robots.txt")
def robots_txt() -> Response:
"""Robots.txt
---
responses:
'200':
description: robots.txt content
"""
txt = """
User-agent: *
Disallow: /api/_
Disallow: /api/v1/aggregation
Disallow: /api/v1/measurement_meta
Disallow: /api/v1/raw_measurement
Disallow: /api/v1/test-list/urls
Disallow: /api/v1/torsf_stats
Disallow: /files
Disallow: /stats
Disallow: /201
Disallow: /202
Crawl-delay: 300
"""
resp = make_response(txt)
resp.headers["Content-type"] = "text/plain"
resp.cache_control.max_age = 86400
return resp
# These two are needed to avoid breaking older URLs
@pages_blueprint.route("/<date>/<report_file>")
def METHOD_NAME(date, report_file) -> Response:
"""Legacy entry point
---
responses:
'200':
description: TODO
"""
if DAY_REGEXP.match(date) and report_file.endswith(".json"):
# XXX maybe do some extra validation on report_file
return redirect("/files/download/%s" % report_file)
raise NotFound
@pages_blueprint.route("/<date>")
def backward_compatible_by_date(date) -> Response:
"""TODO
---
responses:
'200':
description: TODO
"""
if DAY_REGEXP.match(date):
since = date
until = (datetime.strptime(date, "%Y-%m-%d") + timedelta(days=1)).strftime(
"%Y-%m-%d"
)
return redirect(
"https://explorer.ooni.org/search?until={}&since={}".format(until, since),
301,
)
raise NotFound |
299,203 | package | from conan import ConanFile
from conan.tools.cmake import CMake, CMakeDeps, CMakeToolchain, cmake_layout
from conan.tools.files import apply_conandata_patches, collect_libs, copy, export_conandata_patches, get, rmdir, save
import os
import textwrap
required_conan_version = ">=1.53.0"
class LibgeotiffConan(ConanFile):
name = "libgeotiff"
description = "Libgeotiff is an open source library normally hosted on top " \
"of libtiff for reading, and writing GeoTIFF information tags."
license = ["MIT", "BSD-3-Clause"]
topics = ("geotiff", "tiff")
homepage = "https://github.com/OSGeo/libgeotiff"
url = "https://github.com/conan-io/conan-center-index"
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
}
def export_sources(self):
export_conandata_patches(self)
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
def configure(self):
if self.options.shared:
self.options.rm_safe("fPIC")
self.settings.rm_safe("compiler.cppstd")
self.settings.rm_safe("compiler.libcxx")
def layout(self):
cmake_layout(self, src_folder="src")
def requirements(self):
# libgeotiff/include/xtiffio.h includes libtiff/include/tiffio.h
self.requires("libtiff/4.5.1", transitive_headers=True, transitive_libs=True)
self.requires("proj/9.2.1")
def source(self):
get(self, **self.conan_data["sources"][self.version], strip_root=True)
def generate(self):
tc = CMakeToolchain(self)
tc.variables["WITH_UTILITIES"] = False
tc.variables["WITH_TOWGS84"] = True
tc.generate()
deps = CMakeDeps(self)
deps.generate()
def build(self):
apply_conandata_patches(self)
cmake = CMake(self)
cmake.configure()
cmake.build()
def METHOD_NAME(self):
copy(self, "LICENSE", src=self.source_folder, dst=os.path.join(self.package_folder, "licenses"))
cmake = CMake(self)
cmake.install()
rmdir(self, os.path.join(self.package_folder, "cmake"))
rmdir(self, os.path.join(self.package_folder, "doc"))
rmdir(self, os.path.join(self.package_folder, "share"))
self._create_cmake_module_variables(
os.path.join(self.package_folder, self._module_vars_file)
)
self._create_cmake_module_alias_targets(
os.path.join(self.package_folder, self._module_target_file),
{"geotiff_library": "geotiff::geotiff"}
)
def _create_cmake_module_variables(self, module_file):
content = textwrap.dedent("""\
set(GEOTIFF_FOUND ${GeoTIFF_FOUND})
if(DEFINED GeoTIFF_INCLUDE_DIR)
set(GEOTIFF_INCLUDE_DIR ${GeoTIFF_INCLUDE_DIR})
endif()
if(DEFINED GeoTIFF_LIBRARIES)
set(GEOTIFF_LIBRARIES ${GeoTIFF_LIBRARIES})
endif()
""")
save(self, module_file, content)
def _create_cmake_module_alias_targets(self, module_file, targets):
content = ""
for alias, aliased in targets.items():
content += textwrap.dedent(f"""\
if(TARGET {aliased} AND NOT TARGET {alias})
add_library({alias} INTERFACE IMPORTED)
set_property(TARGET {alias} PROPERTY INTERFACE_LINK_LIBRARIES {aliased})
endif()
""")
save(self, module_file, content)
@property
def _module_vars_file(self):
return os.path.join("lib", "cmake", f"conan-official-{self.name}-variables.cmake")
@property
def _module_target_file(self):
return os.path.join("lib", "cmake", f"conan-official-{self.name}-targets.cmake")
def package_info(self):
self.cpp_info.set_property("cmake_find_mode", "both")
self.cpp_info.set_property("cmake_module_file_name", "GeoTIFF")
self.cpp_info.set_property("cmake_build_modules", [self._module_vars_file])
self.cpp_info.set_property("cmake_file_name", "geotiff")
self.cpp_info.set_property("cmake_target_name", "geotiff_library")
self.cpp_info.names["cmake_find_package"] = "GeoTIFF"
self.cpp_info.names["cmake_find_package_multi"] = "geotiff"
self.cpp_info.build_modules["cmake_find_package"] = [self._module_vars_file]
self.cpp_info.build_modules["cmake_find_package_multi"] = [self._module_target_file]
self.cpp_info.libs = collect_libs(self)
if self.settings.os in ["Linux", "FreeBSD"]:
self.cpp_info.system_libs.append("m") |
299,204 | get key val | """
owtf.config
~~~~~~~~~~~
The Configuration object parses all configuration files, loads them into
memory, derives some settings and provides framework modules with a central
repository to get info.
"""
import logging
from collections import defaultdict
try: # PY3
from urllib.parse import urlparse
except ImportError: # PY2
from urlparse import urlparse
try:
import configparser as parser
except ImportError:
import ConfigParser as parser
from owtf.lib.exceptions import PluginAbortException
from owtf.settings import CONFIG_TYPES, REPLACEMENT_DELIMITER, ROOT_DIR
__all__ = ["config_handler"]
class Config(object):
target = None
def __init__(self):
self.root_dir = ROOT_DIR
self.config = defaultdict(list) # General configuration information.
for type in CONFIG_TYPES:
self.config[
type
] = {} # key can consist alphabets, numbers, hyphen & underscore.
self.cli_options = {}
def is_set(self, key):
"""Checks if the key is set in the config dict
:param key: Key to check
:type key: `str`
:return: True if present, else False
:rtype: `bool`
"""
key = self.pad_key(key)
config = self.get_config_dict
for type in CONFIG_TYPES:
if key in config[type]:
return True
return False
def METHOD_NAME(self, key):
"""Gets the right config for target / general.
:param key: The key
:type key: `str`
:return: Value for the key
"""
config = self.get_config_dict
for type in CONFIG_TYPES:
if key in config[type]:
return config[type][key]
def pad_key(self, key):
"""Add delimiters.
:param key: Key to pad
:type key: `str`
:return: Padded key string
:rtype: `str`
"""
return REPLACEMENT_DELIMITER + key + REPLACEMENT_DELIMITER
def strip_key(self, key):
"""Replaces key with empty space
:param key: Key to clear
:type key: `str`
:return: Empty key
:rtype: `str`
"""
return key.replace(REPLACEMENT_DELIMITER, "")
def get_val(self, key):
"""Transparently gets config info from target or General.
:param key: Key
:type key: `str`
:return: The value for the key
"""
try:
key = self.pad_key(key)
return self.METHOD_NAME(key)
except KeyError:
message = "The configuration item: %s does not exist!" % key
# Raise plugin-level exception to move on to next plugin.
raise PluginAbortException(message)
def get_as_list(self, key_list):
"""Get values for keys in a list
:param key_list: List of keys
:type key_list: `list`
:return: List of corresponding values
:rtype: `list`
"""
value_list = []
for key in key_list:
value_list.append(self.get_val(key))
return value_list
def get_header_list(self, key):
"""Get list from a string of values for a key
:param key: Key
:type key: `str`
:return: List of values
:rtype: `list`
"""
return self.get_val(key).split(",")
def set_general_val(self, type, key, value):
""" Set value for a key in any config file
:param type: Type of config file, framework or general.cfg
:type type: `str`
:param key: The key
:type key: `str`
:param value: Value to be set
:type value:
:return: None
:rtype: None
"""
self.config[type][key] = value
def set_val(self, key, value):
"""set config items in target-specific or General config."""
# Store config in "replacement mode", that way we can multiple-replace
# the config on resources, etc.
key = REPLACEMENT_DELIMITER + key + REPLACEMENT_DELIMITER
type = "other"
# Only when value is a string, store in replacements config.
if isinstance(value, str):
type = "string"
return self.set_general_val(type, key, value)
@property
def get_framework_config_dict(self):
return self.get_config_dict["string"]
def __getitem__(self, key):
return self.get_val(key)
def __setitem__(self, key, value):
return self.set_val(key, value)
@property
def get_config_dict(self):
"""Get the global config dictionary
:return: None
:rtype: None
"""
return self.config
@property
def get_replacement_dict(self):
return {"FRAMEWORK_DIR": self.root_dir}
def show(self):
"""Print all keys and values from configuration dictionary
:return: None
:rtype: None
"""
logging.info("Configuration settings: ")
for k, v in list(self.get_config_dict.items()):
logging.info("%s => %s", str(k), str(v))
def get_tcp_ports(self, start_port, end_port):
"""Get TCP ports from the config file
:param start_port: Start port in a range
:type start_port: `str`
:param end_port: End port
:type end_port: `str`
:return: Comma-separate string of tcp ports
:rtype: `str`
"""
return ",".join(
self.get_val("TCP_PORTS").split(",")[int(start_port):int(end_port)]
)
def get_udp_ports(self, start_port, end_port):
"""Get UDP ports from the config file
:param start_ort: Start port in a range
:type start_port: `str`
:param end_port: End port
:type end_port: `str`
:return: Comma-separate string of udp ports
:rtype: `str`
"""
return ",".join(
self.get_val("UDP_PORTS").split(",")[int(start_port):int(end_port)]
)
config_handler = Config() |
299,205 | import | # -*- coding: utf-8 -*-
#
# This file is part of SENAITE.CORE.
#
# SENAITE.CORE is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright 2018-2023 by it's authors.
# Some rights reserved, see README and LICENSE.
""" Shimadzu LC MS/MS Nexera X2 LCMS-8050
"""
from bika.lims import bikaMessageFactory as _
from senaite.core.exportimport.instruments.resultsimport import \
InstrumentCSVResultsFileParser, AnalysisResultsImporter
from datetime import datetime
import json
import traceback
title = "Shimadzu LC MS/MS Nexera X2 LCMS-8050"
def METHOD_NAME(context, request):
""" Read Shimadzu LC MS/MS Nexera X2 LCMS-8050 analysis results
"""
form = request.form
infile = form['instrument_results_file'][0] if \
isinstance(form['instrument_results_file'], list) else \
form['instrument_results_file']
artoapply = form['artoapply']
override = form['results_override']
instrument = form.get('instrument', None)
errors = []
logs = []
# Load the most suitable parser according to file extension/options/etc...
parser = None
if not hasattr(infile, 'filename'):
errors.append(_("No file selected"))
parser = TSVParser(infile)
if parser:
# Load the importer
status = ['sample_received', 'to_be_verified']
if artoapply == 'received':
status = ['sample_received']
elif artoapply == 'received_tobeverified':
status = ['sample_received', 'to_be_verified']
over = [False, False]
if override == 'nooverride':
over = [False, False]
elif override == 'override':
over = [True, False]
elif override == 'overrideempty':
over = [True, True]
importer = LCMS8050_Importer(parser=parser,
context=context,
allowed_ar_states=status,
allowed_analysis_states=None,
override=over,
instrument_uid=instrument)
tbex = ''
try:
importer.process()
except Exception:
tbex = traceback.format_exc()
errors = importer.errors
logs = importer.logs
warns = importer.warns
if tbex:
errors.append(tbex)
results = {'errors': errors, 'log': logs, 'warns': warns}
return json.dumps(results)
class TSVParser(InstrumentCSVResultsFileParser):
def __init__(self, csv):
InstrumentCSVResultsFileParser.__init__(self, csv)
self._currentresultsheader = []
self._currentanalysiskw = ''
self._numline = 0
def _parseline(self, line):
return self.parse_TSVline(line)
def parse_TSVline(self, line):
""" Parses result lines
"""
split_row = [token.strip() for token in line.split('\t')]
_results = {'DefaultResult': 'Conc.'}
# ID# 1
if split_row[0] == 'ID#':
return 0
# Name CBDV - cannabidivarin
elif split_row[0] == 'Name':
if split_row[1]:
self._currentanalysiskw = split_row[1]
return 0
else:
self.warn("Analysis Keyword not found or empty",
numline=self._numline, line=line)
# Data Filename Sample Name Sample ID Sample Type Level#
elif 'Sample ID' in split_row:
split_row.insert(0, '#')
self._currentresultsheader = split_row
return 0
# 1 QC PREP A_QC PREP A_009.lcd QC PREP
elif split_row[0].isdigit():
_results.update(dict(zip(self._currentresultsheader, split_row)))
# 10/17/2016 7:55:06 PM
try:
da = datetime.strptime(
_results['Date Acquired'], "%m/%d/%Y %I:%M:%S %p")
self._header['Output Date'] = da
self._header['Output Time'] = da
except ValueError:
self.err("Invalid Output Time format",
numline=self._numline, line=line)
result = _results[_results['DefaultResult']]
column_name = _results['DefaultResult']
result = self.zeroValueDefaultInstrumentResults(
column_name, result, line)
_results[_results['DefaultResult']] = result
self._addRawResult(_results['Sample ID'],
values={self._currentanalysiskw: _results},
override=False)
def zeroValueDefaultInstrumentResults(self, column_name, result, line):
result = str(result)
if result.startswith('--') or result == '' or result == 'ND':
return 0.0
try:
result = float(result)
if result < 0.0:
result = 0.0
except ValueError:
self.err(
"No valid number ${result} in column (${column_name})",
mapping={"result": result,
"column_name": column_name},
numline=self._numline, line=line)
return
return result
class LCMS8050_Importer(AnalysisResultsImporter):
def __init__(self, parser, context, override,
allowed_ar_states=None, allowed_analysis_states=None,
instrument_uid=''):
AnalysisResultsImporter.__init__(self, parser, context,
override, allowed_ar_states,
allowed_analysis_states,
instrument_uid) |
299,206 | close | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from copy import deepcopy
from typing import Any
from azure.core import PipelineClient
from azure.core.rest import HttpRequest, HttpResponse
from ._configuration import AutoRestHttpInfrastructureTestServiceConfiguration
from ._serialization import Deserializer, Serializer
from .operations import (
HttpClientFailureOperations,
HttpFailureOperations,
HttpRedirectsOperations,
HttpRetryOperations,
HttpServerFailureOperations,
HttpSuccessOperations,
MultipleResponsesOperations,
)
class AutoRestHttpInfrastructureTestService: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes
"""Test Infrastructure for AutoRest.
:ivar http_failure: HttpFailureOperations operations
:vartype http_failure: httpinfrastructureversiontolerant.operations.HttpFailureOperations
:ivar http_success: HttpSuccessOperations operations
:vartype http_success: httpinfrastructureversiontolerant.operations.HttpSuccessOperations
:ivar http_redirects: HttpRedirectsOperations operations
:vartype http_redirects: httpinfrastructureversiontolerant.operations.HttpRedirectsOperations
:ivar http_client_failure: HttpClientFailureOperations operations
:vartype http_client_failure:
httpinfrastructureversiontolerant.operations.HttpClientFailureOperations
:ivar http_server_failure: HttpServerFailureOperations operations
:vartype http_server_failure:
httpinfrastructureversiontolerant.operations.HttpServerFailureOperations
:ivar http_retry: HttpRetryOperations operations
:vartype http_retry: httpinfrastructureversiontolerant.operations.HttpRetryOperations
:ivar multiple_responses: MultipleResponsesOperations operations
:vartype multiple_responses:
httpinfrastructureversiontolerant.operations.MultipleResponsesOperations
:keyword endpoint: Service URL. Default value is "http://localhost:3000".
:paramtype endpoint: str
"""
def __init__( # pylint: disable=missing-client-constructor-parameter-credential
self, *, endpoint: str = "http://localhost:3000", **kwargs: Any
) -> None:
self._config = AutoRestHttpInfrastructureTestServiceConfiguration(**kwargs)
self._client: PipelineClient = PipelineClient(base_url=endpoint, config=self._config, **kwargs)
self._serialize = Serializer()
self._deserialize = Deserializer()
self._serialize.client_side_validation = False
self.http_failure = HttpFailureOperations(self._client, self._config, self._serialize, self._deserialize)
self.http_success = HttpSuccessOperations(self._client, self._config, self._serialize, self._deserialize)
self.http_redirects = HttpRedirectsOperations(self._client, self._config, self._serialize, self._deserialize)
self.http_client_failure = HttpClientFailureOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.http_server_failure = HttpServerFailureOperations(
self._client, self._config, self._serialize, self._deserialize
)
self.http_retry = HttpRetryOperations(self._client, self._config, self._serialize, self._deserialize)
self.multiple_responses = MultipleResponsesOperations(
self._client, self._config, self._serialize, self._deserialize
)
def send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = client.send_request(request)
<HttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.HttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
def METHOD_NAME(self) -> None:
self._client.METHOD_NAME()
def __enter__(self) -> "AutoRestHttpInfrastructureTestService":
self._client.__enter__()
return self
def __exit__(self, *exc_details: Any) -> None:
self._client.__exit__(*exc_details) |
299,207 | kwargs | import typing
import uuid
from datetime import datetime
import simplejson as json
from django.db import models
from django.utils import timezone
from task_processor.exceptions import TaskProcessingError
from task_processor.managers import RecurringTaskManager, TaskManager
from task_processor.task_registry import registered_tasks
class AbstractBaseTask(models.Model):
uuid = models.UUIDField(unique=True, default=uuid.uuid4)
created_at = models.DateTimeField(auto_now_add=True)
task_identifier = models.CharField(max_length=200)
serialized_args = models.TextField(blank=True, null=True)
serialized_kwargs = models.TextField(blank=True, null=True)
is_locked = models.BooleanField(default=False)
class Meta:
abstract = True
@property
def args(self) -> typing.List[typing.Any]:
if self.serialized_args:
return self.deserialize_data(self.serialized_args)
return []
@property
def METHOD_NAME(self) -> typing.Dict[str, typing.Any]:
if self.serialized_kwargs:
return self.deserialize_data(self.serialized_kwargs)
return {}
@staticmethod
def serialize_data(data: typing.Any):
# TODO: add datetime support if needed
return json.dumps(data)
@staticmethod
def deserialize_data(data: typing.Any):
return json.loads(data)
def mark_failure(self):
self.unlock()
def mark_success(self):
self.unlock()
def unlock(self):
self.is_locked = False
def run(self):
return self.callable(*self.args, **self.METHOD_NAME)
@property
def callable(self) -> typing.Callable:
try:
return registered_tasks[self.task_identifier]
except KeyError as e:
raise TaskProcessingError(
"No task registered with identifier '%s'. Ensure your task is "
"decorated with @register_task_handler.",
self.task_identifier,
) from e
class Task(AbstractBaseTask):
scheduled_for = models.DateTimeField(blank=True, null=True, default=timezone.now)
# denormalise failures and completion so that we can use select_for_update
num_failures = models.IntegerField(default=0)
completed = models.BooleanField(default=False)
objects = TaskManager()
class Meta:
# We have customised the migration in 0004 to only apply this change to postgres databases
# TODO: work out how to index the taskprocessor_task table for Oracle and MySQL
indexes = [
models.Index(
name="incomplete_tasks_idx",
fields=["scheduled_for"],
condition=models.Q(completed=False, num_failures__lt=3),
)
]
@classmethod
def create(
cls,
task_identifier: str,
*,
args: typing.Tuple[typing.Any] = None,
METHOD_NAME: typing.Dict[str, typing.Any] = None,
) -> "Task":
return Task(
task_identifier=task_identifier,
serialized_args=cls.serialize_data(args or tuple()),
serialized_kwargs=cls.serialize_data(METHOD_NAME or dict()),
)
@classmethod
def schedule_task(
cls,
schedule_for: datetime,
task_identifier: str,
*,
args: typing.Tuple[typing.Any] = None,
METHOD_NAME: typing.Dict[str, typing.Any] = None,
) -> "Task":
task = cls.create(
task_identifier=task_identifier,
args=args,
METHOD_NAME=METHOD_NAME,
)
task.scheduled_for = schedule_for
return task
def mark_failure(self):
super().mark_failure()
self.num_failures += 1
def mark_success(self):
super().mark_success()
self.completed = True
class RecurringTask(AbstractBaseTask):
run_every = models.DurationField()
first_run_time = models.TimeField(blank=True, null=True)
objects = RecurringTaskManager()
class Meta:
constraints = [
models.UniqueConstraint(
fields=["task_identifier", "run_every"],
name="unique_run_every_tasks",
),
]
@property
def should_execute(self) -> bool:
now = timezone.now()
last_task_run = self.task_runs.order_by("-started_at").first()
if not last_task_run:
# If we have never run this task, then we should execute it only if
# the time has passed after which we want to ensure this task runs.
# This allows us to control when intensive tasks should be run.
return not (self.first_run_time and self.first_run_time > now.time())
# if the last run was at t- run_every, then we should execute it
if (timezone.now() - last_task_run.started_at) >= self.run_every:
return True
# if the last run was not a success and we do not have
# more than 3 failures in t- run_every, then we should execute it
if (
last_task_run.result != TaskResult.SUCCESS.name
and self.task_runs.filter(started_at__gte=(now - self.run_every)).count()
<= 3
):
return True
# otherwise, we should not execute it
return False
@property
def is_task_registered(self) -> bool:
return self.task_identifier in registered_tasks
class TaskResult(models.Choices):
SUCCESS = "SUCCESS"
FAILURE = "FAILURE"
class AbstractTaskRun(models.Model):
started_at = models.DateTimeField()
finished_at = models.DateTimeField(blank=True, null=True)
result = models.CharField(
max_length=50, choices=TaskResult.choices, blank=True, null=True, db_index=True
)
error_details = models.TextField(blank=True, null=True)
class Meta:
abstract = True
class TaskRun(AbstractTaskRun):
task = models.ForeignKey(Task, on_delete=models.CASCADE, related_name="task_runs")
class RecurringTaskRun(AbstractTaskRun):
task = models.ForeignKey(
RecurringTask, on_delete=models.CASCADE, related_name="task_runs"
)
class HealthCheckModel(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
uuid = models.UUIDField(unique=True, blank=False, null=False) |
299,208 | default footprint | """Miscellaneous morphology functions."""
import numpy as np
import functools
from scipy import ndimage as ndi
from .._shared.utils import warn
# Our function names don't exactly correspond to ndimages.
# This dictionary translates from our names to scipy's.
funcs = ('erosion', 'dilation', 'opening', 'closing')
skimage2ndimage = {x: 'grey_' + x for x in funcs}
# These function names are the same in ndimage.
funcs = ('binary_erosion', 'binary_dilation', 'binary_opening',
'binary_closing', 'black_tophat', 'white_tophat')
skimage2ndimage.update({x: x for x in funcs})
def METHOD_NAME(func):
"""Decorator to add a default footprint to morphology functions.
Parameters
----------
func : function
A morphology function such as erosion, dilation, opening, closing,
white_tophat, or black_tophat.
Returns
-------
func_out : function
The function, using a default footprint of same dimension
as the input image with connectivity 1.
"""
@functools.wraps(func)
def func_out(image, footprint=None, *args, **kwargs):
if footprint is None:
footprint = ndi.generate_binary_structure(image.ndim, 1)
return func(image, footprint=footprint, *args, **kwargs)
return func_out
def _check_dtype_supported(ar):
# Should use `issubdtype` for bool below, but there's a bug in numpy 1.7
if not (ar.dtype == bool or np.issubdtype(ar.dtype, np.integer)):
raise TypeError("Only bool or integer image types are supported. "
f"Got {ar.dtype}.")
def remove_small_objects(ar, min_size=64, connectivity=1, *, out=None):
"""Remove objects smaller than the specified size.
Expects ar to be an array with labeled objects, and removes objects
smaller than min_size. If `ar` is bool, the image is first labeled.
This leads to potentially different behavior for bool and 0-and-1
arrays.
Parameters
----------
ar : ndarray (arbitrary shape, int or bool type)
The array containing the objects of interest. If the array type is
int, the ints must be non-negative.
min_size : int, optional (default: 64)
The smallest allowable object size.
connectivity : int, {1, 2, ..., ar.ndim}, optional (default: 1)
The connectivity defining the neighborhood of a pixel. Used during
labelling if `ar` is bool.
out : ndarray
Array of the same shape as `ar`, into which the output is
placed. By default, a new array is created.
Raises
------
TypeError
If the input array is of an invalid type, such as float or string.
ValueError
If the input array contains negative values.
Returns
-------
out : ndarray, same shape and type as input `ar`
The input array with small connected components removed.
Examples
--------
>>> from skimage import morphology
>>> a = np.array([[0, 0, 0, 1, 0],
... [1, 1, 1, 0, 0],
... [1, 1, 1, 0, 1]], bool)
>>> b = morphology.remove_small_objects(a, 6)
>>> b
array([[False, False, False, False, False],
[ True, True, True, False, False],
[ True, True, True, False, False]])
>>> c = morphology.remove_small_objects(a, 7, connectivity=2)
>>> c
array([[False, False, False, True, False],
[ True, True, True, False, False],
[ True, True, True, False, False]])
>>> d = morphology.remove_small_objects(a, 6, out=a)
>>> d is a
True
"""
# Raising type error if not int or bool
_check_dtype_supported(ar)
if out is None:
out = ar.copy()
else:
out[:] = ar
if min_size == 0: # shortcut for efficiency
return out
if out.dtype == bool:
footprint = ndi.generate_binary_structure(ar.ndim, connectivity)
ccs = np.zeros_like(ar, dtype=np.int32)
ndi.label(ar, footprint, output=ccs)
else:
ccs = out
try:
component_sizes = np.bincount(ccs.ravel())
except ValueError:
raise ValueError("Negative value labels are not supported. Try "
"relabeling the input with `scipy.ndimage.label` or "
"`skimage.morphology.label`.")
if len(component_sizes) == 2 and out.dtype != bool:
warn("Only one label was provided to `remove_small_objects`. "
"Did you mean to use a boolean array?")
too_small = component_sizes < min_size
too_small_mask = too_small[ccs]
out[too_small_mask] = 0
return out
def remove_small_holes(ar, area_threshold=64, connectivity=1, *, out=None):
"""Remove contiguous holes smaller than the specified size.
Parameters
----------
ar : ndarray (arbitrary shape, int or bool type)
The array containing the connected components of interest.
area_threshold : int, optional (default: 64)
The maximum area, in pixels, of a contiguous hole that will be filled.
Replaces `min_size`.
connectivity : int, {1, 2, ..., ar.ndim}, optional (default: 1)
The connectivity defining the neighborhood of a pixel.
out : ndarray
Array of the same shape as `ar` and bool dtype, into which the
output is placed. By default, a new array is created.
Raises
------
TypeError
If the input array is of an invalid type, such as float or string.
ValueError
If the input array contains negative values.
Returns
-------
out : ndarray, same shape and type as input `ar`
The input array with small holes within connected components removed.
Examples
--------
>>> from skimage import morphology
>>> a = np.array([[1, 1, 1, 1, 1, 0],
... [1, 1, 1, 0, 1, 0],
... [1, 0, 0, 1, 1, 0],
... [1, 1, 1, 1, 1, 0]], bool)
>>> b = morphology.remove_small_holes(a, 2)
>>> b
array([[ True, True, True, True, True, False],
[ True, True, True, True, True, False],
[ True, False, False, True, True, False],
[ True, True, True, True, True, False]])
>>> c = morphology.remove_small_holes(a, 2, connectivity=2)
>>> c
array([[ True, True, True, True, True, False],
[ True, True, True, False, True, False],
[ True, False, False, True, True, False],
[ True, True, True, True, True, False]])
>>> d = morphology.remove_small_holes(a, 2, out=a)
>>> d is a
True
Notes
-----
If the array type is int, it is assumed that it contains already-labeled
objects. The labels are not kept in the output image (this function always
outputs a bool image). It is suggested that labeling is completed after
using this function.
"""
_check_dtype_supported(ar)
# Creates warning if image is an integer image
if ar.dtype != bool:
warn("Any labeled images will be returned as a boolean array. "
"Did you mean to use a boolean array?", UserWarning)
if out is not None:
if out.dtype != bool:
raise TypeError("out dtype must be bool")
else:
out = ar.astype(bool, copy=True)
# Creating the inverse of ar
np.logical_not(ar, out=out)
# removing small objects from the inverse of ar
out = remove_small_objects(out, area_threshold, connectivity, out=out)
np.logical_not(out, out=out)
return out |
299,209 | test delaunay sphere with num points | import numpy as np
from numpy.testing import assert_array_almost_equal
import openpnm as op
class DelaunayGabrielTest:
def setup_class(self):
pass
def teardown_class(self):
pass
def test_delaunay_square_with_reflect(self):
np.random.seed(0)
shape = [1, 1, 0]
tri = op.network.Delaunay(points=30, shape=shape, reflect=False)
assert op.topotools.isoutside(network=tri, shape=shape).sum() == 0
tri = op.network.Delaunay(points=30, shape=shape, reflect=True)
assert op.topotools.isoutside(network=tri, shape=shape).sum() == 0
def test_delaunay_cube_with_trim_reflect(self):
np.random.seed(0)
shape = [1, 1, 1]
tri = op.network.Delaunay(points=30, shape=shape, reflect=False)
assert op.topotools.isoutside(network=tri, shape=shape).sum() == 0
tri = op.network.Delaunay(points=30, shape=shape, reflect=True)
assert op.topotools.isoutside(network=tri, shape=shape).sum() == 0
def test_delaunay_square_with_2D_points(self):
np.random.seed(0)
pts = np.random.rand(50, 2)
tri = op.network.Delaunay(points=pts, shape=[1, 1, 0])
assert tri.coords.shape == (50, 3)
assert np.all(tri.coords[:, :2] == pts)
def test_delaunay_square_with_3D_points(self):
np.random.seed(0)
pts = np.random.rand(50, 3)
tri = op.network.Delaunay(points=pts, shape=[1, 1, 0])
assert tri.coords.shape == (50, 3)
assert np.all(tri.coords[:, :2] == pts[:, :2])
assert np.all(tri.coords[:, -1] != pts[:, -1])
assert np.all(tri.coords[:, -1] == 0.0)
def test_delaunay_square_with_num_points(self):
np.random.seed(0)
tri = op.network.Delaunay(points=30, shape=[1, 1, 0])
assert op.topotools.dimensionality(network=tri).sum() == 2
def test_delaunay_cube_with_points(self):
np.random.seed(0)
pts = np.random.rand(50, 3)
tri = op.network.Delaunay(points=pts, shape=[1, 1, 1])
assert tri.coords.shape == (50, 3)
assert np.all(tri.coords == pts)
def test_delaunay_cube_with_num_points(self):
np.random.seed(0)
tri = op.network.Delaunay(points=30, shape=[1, 1, 1])
assert op.topotools.dimensionality(network=tri).sum() == 3
def test_delaunay_disk_with_2D_points(self):
np.random.seed(0)
rqz = np.random.rand(50, 3)*np.array([1, 2*np.pi, 1])
pts = np.vstack(op._skgraph.tools.cyl2cart(*rqz.T)).T
tri = op.network.Delaunay(points=pts[:, :2], shape=[1, 0])
assert tri.coords.shape == (50, 3)
assert_array_almost_equal(tri.coords[:, :2], pts[:, :2], decimal=15)
assert np.all(tri.coords[:, -1] != pts[:, -1])
assert np.all(tri.coords[:, -1] == 0.0)
def test_delaunay_disk_with_prereflected_points(self):
np.random.seed(0)
rqz = np.random.rand(50, 3)*np.array([1, 2*np.pi, 1])
rqz = op._skgraph.generators.tools.reflect_base_points(rqz.T,
domain_size=[1, 1])
pts = np.vstack(op._skgraph.tools.cyl2cart(*rqz)).T
tri = op.network.Delaunay(points=pts, shape=[1, 1], reflect=False, trim=True)
assert tri.coords.shape == (50, 3)
assert_array_almost_equal(tri.coords, pts[:50, :], decimal=15)
def test_delaunay_disk_with_3D_points(self):
np.random.seed(0)
rqz = np.random.rand(50, 3)*np.array([1, 2*np.pi, 1])
pts = np.vstack(op._skgraph.tools.cyl2cart(*rqz.T)).T
tri = op.network.Delaunay(points=pts, shape=[1, 1])
assert tri.coords.shape == (50, 3)
assert_array_almost_equal(tri.coords, pts, decimal=15)
def test_delaunay_disk_with_num_points(self):
np.random.seed(0)
tri = op.network.Delaunay(points=30, shape=[1, 0])
assert op.topotools.dimensionality(network=tri).sum() == 2
def test_delaunay_cylinder_with_points(self):
np.random.seed(0)
rqz = np.random.rand(50, 3)*np.array([1, 2*np.pi, 1])
pts = np.vstack(op._skgraph.tools.cyl2cart(*rqz.T)).T
tri = op.network.Delaunay(points=pts, shape=[1, 1])
assert tri.coords.shape == (50, 3)
assert_array_almost_equal(tri.coords, pts, decimal=15)
def METHOD_NAME(self):
np.random.seed(0)
tri = op.network.Delaunay(points=30, shape=[1])
assert op.topotools.dimensionality(network=tri).sum() == 3
if __name__ == '__main__':
t = DelaunayGabrielTest()
t.setup_class()
self = t
for item in t.__dir__():
if item.startswith('test'):
print(f"Running test: {item}")
t.__getattribute__(item)() |
299,210 | test update function code | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""
Unit tests for lambda_basics.py functions.
"""
import json
import unittest.mock
import zipfile
import boto3
from botocore.exceptions import ClientError
import pytest
from lambda_basics import LambdaWrapper
def test_create_lambda_deployment_package(monkeypatch):
monkeypatch.setattr(zipfile.ZipFile, 'write', lambda x, y, z: None)
wrapper = LambdaWrapper(None, None)
got_package = wrapper.create_deployment_package('test-file', 'other-file')
assert got_package is not None
@pytest.mark.parametrize(
'error_code,stop_on_method', [
(None, None),
('TestException', 'stub_create_role'),
('TestException', 'stub_attach_role_policy')
])
def test_create_iam_role_for_lambda(
make_stubber, make_unique_name, stub_runner, error_code, stop_on_method):
iam_resource = boto3.resource('iam')
iam_stubber = make_stubber(iam_resource.meta.client)
wrapper = LambdaWrapper(None, iam_resource)
role_name = make_unique_name('role-')
with stub_runner(error_code, stop_on_method) as runner:
runner.add(iam_stubber.stub_get_role, role_name, error_code='NoSuchEntity')
runner.add(iam_stubber.stub_create_role, role_name)
runner.add(
iam_stubber.stub_attach_role_policy, role_name,
'arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole')
if error_code is None:
got_role, got_created = wrapper.create_iam_role_for_lambda(role_name)
assert got_role.name == role_name
assert got_created
else:
with pytest.raises(ClientError) as exc_info:
wrapper.create_iam_role_for_lambda(role_name)
assert exc_info.value.response['Error']['Code'] == error_code
@pytest.mark.parametrize('error_code', [None, 'TestException'])
def test_create_function(make_stubber, make_unique_name, error_code):
lambda_client = boto3.client('lambda')
lambda_stubber = make_stubber(lambda_client)
wrapper = LambdaWrapper(lambda_client, None)
func_name = make_unique_name('func-')
handler_name = make_unique_name('handler-')
iam_role = unittest.mock.MagicMock(arn='arn:aws:iam:::role/test-role')
test_package = 'test-package'
func_arn = f'arn:aws:lambda:::function/{func_name}'
lambda_stubber.stub_create_function(
func_name, func_arn, iam_role.arn, handler_name, test_package,
error_code=error_code)
if error_code is None:
lambda_stubber.stub_get_function(func_name, 'Active')
if error_code is None:
got_arn = wrapper.create_function(func_name, handler_name, iam_role, test_package)
assert got_arn == func_arn
else:
with pytest.raises(ClientError) as exc_info:
wrapper.create_function(
func_name, handler_name, iam_role, test_package)
assert exc_info.value.response['Error']['Code'] == error_code
@pytest.mark.parametrize('error_code', [
None, 'TestException', 'ResourceNotFoundException'])
def test_get_function(make_stubber, error_code):
lambda_client = boto3.client('lambda')
lambda_stubber = make_stubber(lambda_client)
wrapper = LambdaWrapper(lambda_client, None)
func_name = 'test-func_name'
lambda_stubber.stub_get_function(func_name, error_code=error_code)
if error_code in (None, 'ResourceNotFoundException'):
wrapper.get_function(func_name)
else:
with pytest.raises(ClientError) as exc_info:
wrapper.get_function(func_name)
assert exc_info.value.response['Error']['Code'] == error_code
@pytest.mark.parametrize('error_code', [None, 'TestException'])
def test_delete_function(make_stubber, make_unique_name, error_code):
lambda_client = boto3.client('lambda')
lambda_stubber = make_stubber(lambda_client)
wrapper = LambdaWrapper(lambda_client, None)
func_name = make_unique_name('func-')
lambda_stubber.stub_delete_function(func_name, error_code=error_code)
if error_code is None:
wrapper.delete_function(func_name)
else:
with pytest.raises(ClientError) as exc_info:
wrapper.delete_function(func_name)
assert exc_info.value.response['Error']['Code'] == error_code
@pytest.mark.parametrize('error_code', [None, 'TestException'])
def test_invoke_function(make_stubber, make_unique_name, error_code):
lambda_client = boto3.client('lambda')
lambda_stubber = make_stubber(lambda_client)
wrapper = LambdaWrapper(lambda_client, None)
func_name = make_unique_name('func-')
func_params = {'param1': 'test', 'param2': 35}
response_payload = 'ahoy there'
lambda_stubber.stub_invoke(
func_name, json.dumps(func_params), response_payload, log_type='None',
error_code=error_code)
if error_code is None:
response = wrapper.invoke_function(func_name, func_params)
assert response['Payload'] == response_payload
else:
with pytest.raises(ClientError) as exc_info:
wrapper.invoke_function(func_name, func_params)
assert exc_info.value.response['Error']['Code'] == error_code
@pytest.mark.parametrize('error_code', [None, 'TestException'])
def METHOD_NAME(make_stubber, error_code):
lambda_client = boto3.client('lambda')
lambda_stubber = make_stubber(lambda_client)
wrapper = LambdaWrapper(lambda_client, None)
func_name = 'test-func_name'
package = 'test-package'
update_status = 'InProgress'
lambda_stubber.stub_update_function_code(
func_name, update_status, package=package, error_code=error_code)
if error_code is None:
got_response = wrapper.update_function_code(func_name, package)
assert got_response['LastUpdateStatus'] == update_status
else:
with pytest.raises(ClientError) as exc_info:
wrapper.update_function_code(func_name, package)
assert exc_info.value.response['Error']['Code'] == error_code
@pytest.mark.parametrize('error_code', [None, 'TestException'])
def test_update_function_configuration(make_stubber, error_code):
lambda_client = boto3.client('lambda')
lambda_stubber = make_stubber(lambda_client)
wrapper = LambdaWrapper(lambda_client, None)
func_name = 'test-func_name'
env_vars = {'test-key': 'test-val'}
lambda_stubber.stub_update_function_configuration(
func_name, env_vars, error_code=error_code)
if error_code is None:
got_response = wrapper.update_function_configuration(func_name, env_vars)
assert got_response
else:
with pytest.raises(ClientError) as exc_info:
wrapper.update_function_configuration(func_name, env_vars)
assert exc_info.value.response['Error']['Code'] == error_code
@pytest.mark.parametrize('error_code', [None, 'TestException'])
def test_list_functions(make_stubber, error_code):
lambda_client = boto3.client('lambda')
lambda_stubber = make_stubber(lambda_client)
wrapper = LambdaWrapper(lambda_client, None)
funcs = [{
'FunctionName': f'test-func-{index}',
'Description': f'test description {index}',
'Runtime': f'test-runtime-{index}',
'Handler': f'test-handler-{index}'}
for index in range(3)]
lambda_stubber.stub_list_functions(funcs, error_code=error_code)
if error_code is None:
wrapper.list_functions()
else:
with pytest.raises(ClientError) as exc_info:
wrapper.list_functions()
assert exc_info.value.response['Error']['Code'] == error_code |
299,211 | main | #!/usr/bin/env python3
#
# Copyright (C) 2020-2023 by
# David Turner, Robert Wilhelm, and Werner Lemberg.
#
# This file is part of the FreeType project, and may only be used, modified,
# and distributed under the terms of the FreeType project license,
# LICENSE.TXT. By continuing to use, modify, or distribute this file you
# indicate that you have read the license and understand and accept it
# fully.
"""Parse modules.cfg and dump its output either as ftmodule.h or a list of
base extensions.
"""
from __future__ import print_function
import argparse
import os
import re
import sys
# Expected input:
#
# ...
# FONT_MODULES += <name>
# HINTING_MODULES += <name>
# RASTER_MODULES += <name>
# AUX_MODULES += <name>
# BASE_EXTENSIONS += <name>
# ...
def parse_modules_cfg(input_file):
lists = {
"FONT_MODULES": [],
"HINTING_MODULES": [],
"RASTER_MODULES": [],
"AUX_MODULES": [],
"BASE_EXTENSIONS": [],
}
for line in input_file.splitlines():
line = line.rstrip()
# Ignore empty lines and those that start with a comment.
if not line or line[0] == "#":
continue
items = line.split()
assert len(items) == 3 and items[1] == "+=", (
"Unexpected input line [%s]" % line
)
assert items[0] in lists, (
"Unexpected configuration variable name " + items[0]
)
lists[items[0]].append(items[2])
return lists
def generate_ftmodule(lists):
result = "/* This is a generated file. */\n"
for driver in lists["FONT_MODULES"]:
if driver == "sfnt": # Special case for the sfnt 'driver'.
result += "FT_USE_MODULE( FT_Module_Class, sfnt_module_class )\n"
continue
name = {
"truetype": "tt",
"type1": "t1",
"cid": "t1cid",
"type42": "t42",
"winfonts": "winfnt",
}.get(driver, driver)
result += (
"FT_USE_MODULE( FT_Driver_ClassRec, %s_driver_class )\n" % name
)
for module in lists["HINTING_MODULES"]:
result += (
"FT_USE_MODULE( FT_Module_Class, %s_module_class )\n" % module
)
for module in lists["RASTER_MODULES"]:
names = {
"raster": ("ft_raster1",),
"smooth": ("ft_smooth",),
"svg": ("ft_svg",),
"sdf": ("ft_sdf", "ft_bitmap_sdf"),
}.get(module)
for name in names:
result += (
"FT_USE_MODULE( FT_Renderer_Class, %s_renderer_class )\n" % name
)
for module in lists["AUX_MODULES"]:
if module in ("psaux", "psnames", "otvalid", "gxvalid"):
name = {
"gxvalid": "gxv",
"otvalid": "otv",
}.get(module, module)
result += (
"FT_USE_MODULE( FT_Module_Class, %s_module_class )\n" % name
)
result += "/* EOF */\n"
return result
def generate_main_modules(lists):
return "\n".join(
lists["FONT_MODULES"]
+ lists["HINTING_MODULES"]
+ lists["RASTER_MODULES"]
)
def generate_aux_modules(lists):
return "\n".join(lists["AUX_MODULES"])
def generate_base_extensions(lists):
return "\n".join(lists["BASE_EXTENSIONS"])
def METHOD_NAME():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"--format",
required=True,
choices=(
"ftmodule.h",
"main-modules",
"aux-modules",
"base-extensions-list",
),
help="Select output format.",
)
parser.add_argument(
"input",
metavar="CONFIGURE_RAW",
help="The input configure.raw file to parse.",
)
parser.add_argument("--output", help="Output file (default is stdout).")
args = parser.parse_args()
with open(args.input) as f:
input_data = f.read()
lists = parse_modules_cfg(input_data)
if args.format == "ftmodule.h":
result = generate_ftmodule(lists)
elif args.format == "main-modules":
result = generate_main_modules(lists)
elif args.format == "aux-modules":
result = generate_aux_modules(lists)
elif args.format == "base-extensions-list":
result = generate_base_extensions(lists)
else:
assert False, "Invalid output format!"
if args.output:
with open(args.output, "w") as f:
f.write(result)
else:
print(result)
return 0
if __name__ == "__main__":
sys.exit(METHOD_NAME()) |
299,212 | execute operations | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"network nsg delete",
)
class Delete(AAZCommand):
"""Delete a network security group.
:example: Delete an NSG in a resource group.
az network nsg delete -g MyResourceGroup -n MyNsg
"""
_aaz_info = {
"version": "2018-11-01",
"resources": [
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.network/networksecuritygroups/{}", "2018-11-01"],
]
}
AZ_SUPPORT_NO_WAIT = True
def _handler(self, command_args):
super()._handler(command_args)
return self.build_lro_poller(self.METHOD_NAME, None)
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.name = AAZStrArg(
options=["-n", "--name"],
help="Name of the network security group.",
required=True,
id_part="name",
)
_args_schema.resource_group = AAZResourceGroupNameArg(
required=True,
)
return cls._args_schema
def METHOD_NAME(self):
self.pre_operations()
yield self.NetworkSecurityGroupsDelete(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
class NetworkSecurityGroupsDelete(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [202]:
return self.client.build_lro_polling(
self.ctx.args.no_wait,
session,
self.on_200,
self.on_error,
lro_options={"final-state-via": "azure-async-operation"},
path_format_arguments=self.url_parameters,
)
if session.http_response.status_code in [200]:
return self.client.build_lro_polling(
self.ctx.args.no_wait,
session,
self.on_200,
self.on_error,
lro_options={"final-state-via": "azure-async-operation"},
path_format_arguments=self.url_parameters,
)
if session.http_response.status_code in [204]:
return self.client.build_lro_polling(
self.ctx.args.no_wait,
session,
self.on_204,
self.on_error,
lro_options={"final-state-via": "azure-async-operation"},
path_format_arguments=self.url_parameters,
)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}",
**self.url_parameters
)
@property
def method(self):
return "DELETE"
@property
def error_format(self):
return "ODataV4Format"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"networkSecurityGroupName", self.ctx.args.name,
required=True,
),
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2018-11-01",
required=True,
),
}
return parameters
def on_200(self, session):
pass
def on_204(self, session):
pass
class _DeleteHelper:
"""Helper class for Delete"""
__all__ = ["Delete"] |
299,213 | create es brake | from cereal import car
from openpilot.selfdrive.car.subaru.values import CanBus
VisualAlert = car.CarControl.HUDControl.VisualAlert
def create_steering_control(packer, apply_steer, steer_req):
values = {
"LKAS_Output": apply_steer,
"LKAS_Request": steer_req,
"SET_1": 1
}
return packer.make_can_msg("ES_LKAS", 0, values)
def create_steering_status(packer):
return packer.make_can_msg("ES_LKAS_State", 0, {})
def create_es_distance(packer, frame, es_distance_msg, bus, pcm_cancel_cmd, long_enabled = False, brake_cmd = False, cruise_throttle = 0):
values = {s: es_distance_msg[s] for s in [
"CHECKSUM",
"Signal1",
"Cruise_Fault",
"Cruise_Throttle",
"Signal2",
"Car_Follow",
"Low_Speed_Follow",
"Cruise_Soft_Disable",
"Signal7",
"Cruise_Brake_Active",
"Distance_Swap",
"Cruise_EPB",
"Signal4",
"Close_Distance",
"Signal5",
"Cruise_Cancel",
"Cruise_Set",
"Cruise_Resume",
"Signal6",
]}
values["COUNTER"] = frame % 0x10
if long_enabled:
values["Cruise_Throttle"] = cruise_throttle
# Do not disable openpilot on Eyesight Soft Disable, if openpilot is controlling long
values["Cruise_Soft_Disable"] = 0
if brake_cmd:
values["Cruise_Brake_Active"] = 1
if pcm_cancel_cmd:
values["Cruise_Cancel"] = 1
values["Cruise_Throttle"] = 1818 # inactive throttle
return packer.make_can_msg("ES_Distance", bus, values)
def create_es_lkas_state(packer, frame, es_lkas_state_msg, enabled, visual_alert, left_line, right_line, left_lane_depart, right_lane_depart):
values = {s: es_lkas_state_msg[s] for s in [
"CHECKSUM",
"LKAS_Alert_Msg",
"Signal1",
"LKAS_ACTIVE",
"LKAS_Dash_State",
"Signal2",
"Backward_Speed_Limit_Menu",
"LKAS_Left_Line_Enable",
"LKAS_Left_Line_Light_Blink",
"LKAS_Right_Line_Enable",
"LKAS_Right_Line_Light_Blink",
"LKAS_Left_Line_Visible",
"LKAS_Right_Line_Visible",
"LKAS_Alert",
"Signal3",
]}
values["COUNTER"] = frame % 0x10
# Filter the stock LKAS "Keep hands on wheel" alert
if values["LKAS_Alert_Msg"] == 1:
values["LKAS_Alert_Msg"] = 0
# Filter the stock LKAS sending an audible alert when it turns off LKAS
if values["LKAS_Alert"] == 27:
values["LKAS_Alert"] = 0
# Filter the stock LKAS sending an audible alert when "Keep hands on wheel" alert is active (2020+ models)
if values["LKAS_Alert"] == 28 and values["LKAS_Alert_Msg"] == 7:
values["LKAS_Alert"] = 0
# Filter the stock LKAS sending an audible alert when "Keep hands on wheel OFF" alert is active (2020+ models)
if values["LKAS_Alert"] == 30:
values["LKAS_Alert"] = 0
# Filter the stock LKAS sending "Keep hands on wheel OFF" alert (2020+ models)
if values["LKAS_Alert_Msg"] == 7:
values["LKAS_Alert_Msg"] = 0
# Show Keep hands on wheel alert for openpilot steerRequired alert
if visual_alert == VisualAlert.steerRequired:
values["LKAS_Alert_Msg"] = 1
# Ensure we don't overwrite potentially more important alerts from stock (e.g. FCW)
if visual_alert == VisualAlert.ldw and values["LKAS_Alert"] == 0:
if left_lane_depart:
values["LKAS_Alert"] = 12 # Left lane departure dash alert
elif right_lane_depart:
values["LKAS_Alert"] = 11 # Right lane departure dash alert
if enabled:
values["LKAS_ACTIVE"] = 1 # Show LKAS lane lines
values["LKAS_Dash_State"] = 2 # Green enabled indicator
else:
values["LKAS_Dash_State"] = 0 # LKAS Not enabled
values["LKAS_Left_Line_Visible"] = int(left_line)
values["LKAS_Right_Line_Visible"] = int(right_line)
return packer.make_can_msg("ES_LKAS_State", CanBus.main, values)
def create_es_dashstatus(packer, frame, dashstatus_msg, enabled, long_enabled, long_active, lead_visible):
values = {s: dashstatus_msg[s] for s in [
"CHECKSUM",
"PCB_Off",
"LDW_Off",
"Signal1",
"Cruise_State_Msg",
"LKAS_State_Msg",
"Signal2",
"Cruise_Soft_Disable",
"Cruise_Status_Msg",
"Signal3",
"Cruise_Distance",
"Signal4",
"Conventional_Cruise",
"Signal5",
"Cruise_Disengaged",
"Cruise_Activated",
"Signal6",
"Cruise_Set_Speed",
"Cruise_Fault",
"Cruise_On",
"Display_Own_Car",
"Brake_Lights",
"Car_Follow",
"Signal7",
"Far_Distance",
"Cruise_State",
]}
values["COUNTER"] = frame % 0x10
if enabled and long_active:
values["Cruise_State"] = 0
values["Cruise_Activated"] = 1
values["Cruise_Disengaged"] = 0
values["Car_Follow"] = int(lead_visible)
if long_enabled:
values["PCB_Off"] = 1 # AEB is not presevered, so show the PCB_Off on dash
# Filter stock LKAS disabled and Keep hands on steering wheel OFF alerts
if values["LKAS_State_Msg"] in (2, 3):
values["LKAS_State_Msg"] = 0
return packer.make_can_msg("ES_DashStatus", CanBus.main, values)
def METHOD_NAME(packer, frame, es_brake_msg, enabled, brake_value):
values = {s: es_brake_msg[s] for s in [
"CHECKSUM",
"Signal1",
"Brake_Pressure",
"AEB_Status",
"Cruise_Brake_Lights",
"Cruise_Brake_Fault",
"Cruise_Brake_Active",
"Cruise_Activated",
"Signal3",
]}
values["COUNTER"] = frame % 0x10
if enabled:
values["Cruise_Activated"] = 1
values["Brake_Pressure"] = brake_value
if brake_value > 0:
values["Cruise_Brake_Active"] = 1
values["Cruise_Brake_Lights"] = 1 if brake_value >= 70 else 0
return packer.make_can_msg("ES_Brake", CanBus.main, values)
def create_es_status(packer, frame, es_status_msg, long_enabled, long_active, cruise_rpm):
values = {s: es_status_msg[s] for s in [
"CHECKSUM",
"Signal1",
"Cruise_Fault",
"Cruise_RPM",
"Signal2",
"Cruise_Activated",
"Brake_Lights",
"Cruise_Hold",
"Signal3",
]}
values["COUNTER"] = frame % 0x10
if long_enabled:
values["Cruise_RPM"] = cruise_rpm
if long_active:
values["Cruise_Activated"] = 1
return packer.make_can_msg("ES_Status", CanBus.main, values)
def create_es_infotainment(packer, frame, es_infotainment_msg, visual_alert):
# Filter stock LKAS disabled and Keep hands on steering wheel OFF alerts
values = {s: es_infotainment_msg[s] for s in [
"CHECKSUM",
"LKAS_State_Infotainment",
"LKAS_Blue_Lines",
"Signal1",
"Signal2",
]}
values["COUNTER"] = frame % 0x10
if values["LKAS_State_Infotainment"] in (3, 4):
values["LKAS_State_Infotainment"] = 0
# Show Keep hands on wheel alert for openpilot steerRequired alert
if visual_alert == VisualAlert.steerRequired:
values["LKAS_State_Infotainment"] = 3
# Show Obstacle Detected for fcw
if visual_alert == VisualAlert.fcw:
values["LKAS_State_Infotainment"] = 2
return packer.make_can_msg("ES_Infotainment", CanBus.main, values)
# *** Subaru Pre-global ***
def subaru_preglobal_checksum(packer, values, addr, checksum_byte=7):
dat = packer.make_can_msg(addr, 0, values)[2]
return (sum(dat[:checksum_byte]) + sum(dat[checksum_byte+1:])) % 256
def create_preglobal_steering_control(packer, frame, apply_steer, steer_req):
values = {
"COUNTER": frame % 0x08,
"LKAS_Command": apply_steer,
"LKAS_Active": steer_req,
}
values["Checksum"] = subaru_preglobal_checksum(packer, values, "ES_LKAS")
return packer.make_can_msg("ES_LKAS", CanBus.main, values)
def create_preglobal_es_distance(packer, cruise_button, es_distance_msg):
values = {s: es_distance_msg[s] for s in [
"Cruise_Throttle",
"Signal1",
"Car_Follow",
"Signal2",
"Cruise_Brake_Active",
"Distance_Swap",
"Standstill",
"Signal3",
"Close_Distance",
"Signal4",
"Standstill_2",
"Cruise_Fault",
"Signal5",
"COUNTER",
"Signal6",
"Cruise_Button",
"Signal7",
]}
values["Cruise_Button"] = cruise_button
values["Checksum"] = subaru_preglobal_checksum(packer, values, "ES_Distance")
return packer.make_can_msg("ES_Distance", CanBus.main, values) |
299,214 | random velocity vector | import numpy as np
from yt.testing import (
assert_allclose_units,
fake_random_ds,
requires_file,
requires_module,
)
from yt.units import cm, s # type: ignore
from yt.utilities.answer_testing.framework import data_dir_load
from yt.visualization.volume_rendering.off_axis_projection import off_axis_projection
def random_unit_vector(prng):
v = prng.random_sample(3)
while (v == 0).all():
v = prng.random_sample(3)
return v / np.sqrt((v**2).sum())
def METHOD_NAME(prng):
return 2e5 * prng.random_sample(3) - 1e5
def compare_vector_conversions(data_source):
prng = np.random.RandomState(8675309)
normals = [[1, 0, 0], [0, 1, 0], [0, 0, 1]] + [
random_unit_vector(prng) for i in range(2)
]
bulk_velocities = [METHOD_NAME(prng) for i in range(2)]
for bv in bulk_velocities:
bulk_velocity = bv * cm / s
data_source.set_field_parameter("bulk_velocity", bulk_velocity)
data_source.clear_data()
vmag = data_source[("gas", "velocity_magnitude")]
vrad = data_source[("gas", "velocity_spherical_radius")]
for normal in normals:
data_source.set_field_parameter("normal", normal)
data_source.clear_data()
assert_allclose_units(
vrad, data_source[("gas", "velocity_spherical_radius")]
)
vmag_new = data_source[("gas", "velocity_magnitude")]
assert_allclose_units(vmag, vmag_new)
vmag_cart = np.sqrt(
(data_source[("gas", "velocity_x")] - bulk_velocity[0]) ** 2
+ (data_source[("gas", "velocity_y")] - bulk_velocity[1]) ** 2
+ (data_source[("gas", "velocity_z")] - bulk_velocity[2]) ** 2
)
assert_allclose_units(vmag, vmag_cart)
vmag_cyl = np.sqrt(
data_source[("gas", "velocity_cylindrical_radius")] ** 2
+ data_source[("gas", "velocity_cylindrical_theta")] ** 2
+ data_source[("gas", "velocity_cylindrical_z")] ** 2
)
assert_allclose_units(vmag, vmag_cyl)
vmag_sph = np.sqrt(
data_source[("gas", "velocity_spherical_radius")] ** 2
+ data_source[("gas", "velocity_spherical_theta")] ** 2
+ data_source[("gas", "velocity_spherical_phi")] ** 2
)
assert_allclose_units(vmag, vmag_sph)
for i, d in enumerate("xyz"):
assert_allclose_units(
data_source[("gas", f"velocity_{d}")] - bulk_velocity[i],
data_source[("gas", f"relative_velocity_{d}")],
)
for i, ax in enumerate("xyz"):
data_source.set_field_parameter("axis", i)
data_source.clear_data()
assert_allclose_units(
data_source[("gas", "velocity_los")],
data_source[("gas", f"relative_velocity_{ax}")],
)
for i, ax in enumerate("xyz"):
prj = data_source.ds.proj(
("gas", "velocity_los"), i, weight_field=("gas", "density")
)
assert_allclose_units(
prj[("gas", "velocity_los")], prj[("gas", f"velocity_{ax}")]
)
data_source.clear_data()
ax = [0.1, 0.2, -0.3]
data_source.set_field_parameter("axis", ax)
ax /= np.sqrt(np.dot(ax, ax))
vlos = data_source[("gas", "relative_velocity_x")] * ax[0]
vlos += data_source[("gas", "relative_velocity_y")] * ax[1]
vlos += data_source[("gas", "relative_velocity_z")] * ax[2]
assert_allclose_units(data_source[("gas", "velocity_los")], vlos)
buf_los = off_axis_projection(
data_source,
data_source.ds.domain_center,
ax,
0.5,
128,
("gas", "velocity_los"),
weight=("gas", "density"),
)
buf_x = off_axis_projection(
data_source,
data_source.ds.domain_center,
ax,
0.5,
128,
("gas", "relative_velocity_x"),
weight=("gas", "density"),
)
buf_y = off_axis_projection(
data_source,
data_source.ds.domain_center,
ax,
0.5,
128,
("gas", "relative_velocity_y"),
weight=("gas", "density"),
)
buf_z = off_axis_projection(
data_source,
data_source.ds.domain_center,
ax,
0.5,
128,
("gas", "relative_velocity_z"),
weight=("gas", "density"),
)
vlos = buf_x * ax[0] + buf_y * ax[1] + buf_z * ax[2]
assert_allclose_units(buf_los, vlos, rtol=1.0e-6)
def test_vector_component_conversions_fake():
ds = fake_random_ds(16)
ad = ds.all_data()
compare_vector_conversions(ad)
g30 = "IsolatedGalaxy/galaxy0030/galaxy0030"
@requires_module("h5py")
@requires_file(g30)
def test_vector_component_conversions_real():
ds = data_dir_load(g30)
sp = ds.sphere(ds.domain_center, (10, "kpc"))
compare_vector_conversions(sp) |
299,215 | test no passwords | """
:codeauthor: :email:`Alexandru Bleotu <alexandru.bleotu@morganstanley.com>`
Tests for esxcluster proxy
"""
import pytest
import salt.exceptions
import salt.proxy.esxcluster as esxcluster
from salt.config.schemas.esxcluster import EsxclusterProxySchema
from tests.support.mock import MagicMock, patch
try:
import jsonschema
HAS_JSONSCHEMA = True
except ImportError:
HAS_JSONSCHEMA = False
pytestmark = [pytest.mark.skipif(not HAS_JSONSCHEMA, reason="jsonschema is required")]
@pytest.fixture
def opts_userpass():
return {
"proxy": {
"proxytype": "esxcluster",
"vcenter": "fake_vcenter",
"datacenter": "fake_dc",
"cluster": "fake_cluster",
"mechanism": "userpass",
"username": "fake_username",
"passwords": ["fake_password"],
"protocol": "fake_protocol",
"port": 100,
}
}
@pytest.fixture
def opts_sspi():
return {
"proxy": {
"proxytype": "esxcluster",
"vcenter": "fake_vcenter",
"datacenter": "fake_dc",
"cluster": "fake_cluster",
"mechanism": "sspi",
"domain": "fake_domain",
"principal": "fake_principal",
"protocol": "fake_protocol",
"port": 100,
}
}
@pytest.fixture
def configure_loader_modules(opts_sspi):
with patch.dict(esxcluster.DETAILS):
with patch(
"salt.proxy.esxcluster.merge",
MagicMock(return_value=opts_sspi["proxy"]),
):
yield {esxcluster: {"__pillar__": {}}}
def test_merge(opts_sspi):
mock_pillar_proxy = MagicMock()
mock_opts_proxy = MagicMock()
mock_merge = MagicMock(return_value=opts_sspi["proxy"])
with patch.dict(esxcluster.__pillar__, {"proxy": mock_pillar_proxy}):
with patch("salt.proxy.esxcluster.merge", mock_merge):
esxcluster.init(opts={"proxy": mock_opts_proxy})
mock_merge.assert_called_once_with(mock_opts_proxy, mock_pillar_proxy)
def test_esxcluster_schema(opts_sspi):
mock_json_validate = MagicMock()
serialized_schema = EsxclusterProxySchema().serialize()
with patch("salt.proxy.esxcluster.jsonschema.validate", mock_json_validate):
esxcluster.init(opts_sspi)
mock_json_validate.assert_called_once_with(opts_sspi["proxy"], serialized_schema)
def test_invalid_proxy_input_error(opts_userpass):
with patch(
"salt.proxy.esxcluster.jsonschema.validate",
MagicMock(
side_effect=jsonschema.exceptions.ValidationError("Validation Error")
),
):
with pytest.raises(salt.exceptions.InvalidConfigError) as excinfo:
esxcluster.init(opts_userpass)
assert excinfo.value.message == "Validation Error"
def test_no_username(opts_userpass):
opts = opts_userpass.copy()
del opts["proxy"]["username"]
with patch("salt.proxy.esxcluster.merge", MagicMock(return_value=opts["proxy"])):
with pytest.raises(salt.exceptions.InvalidConfigError) as excinfo:
esxcluster.init(opts)
assert (
excinfo.value.message == "Mechanism is set to 'userpass', but no "
"'username' key found in proxy config."
)
def METHOD_NAME(opts_userpass):
opts = opts_userpass.copy()
del opts["proxy"]["passwords"]
with patch("salt.proxy.esxcluster.merge", MagicMock(return_value=opts["proxy"])):
with pytest.raises(salt.exceptions.InvalidConfigError) as excinfo:
esxcluster.init(opts)
assert (
excinfo.value.message == "Mechanism is set to 'userpass', but no "
"'passwords' key found in proxy config."
)
def test_no_domain(opts_sspi):
opts = opts_sspi.copy()
del opts["proxy"]["domain"]
with patch("salt.proxy.esxcluster.merge", MagicMock(return_value=opts["proxy"])):
with pytest.raises(salt.exceptions.InvalidConfigError) as excinfo:
esxcluster.init(opts)
assert (
excinfo.value.message
== "Mechanism is set to 'sspi', but no 'domain' key found in proxy config."
)
def test_no_principal(opts_sspi):
opts = opts_sspi.copy()
del opts["proxy"]["principal"]
with patch("salt.proxy.esxcluster.merge", MagicMock(return_value=opts["proxy"])):
with pytest.raises(salt.exceptions.InvalidConfigError) as excinfo:
esxcluster.init(opts)
assert (
excinfo.value.message
== "Mechanism is set to 'sspi', but no 'principal' key found in proxy config."
)
def test_find_credentials(opts_userpass):
mock_find_credentials = MagicMock(return_value=("fake_username", "fake_password"))
with patch(
"salt.proxy.esxcluster.merge",
MagicMock(return_value=opts_userpass["proxy"]),
):
with patch("salt.proxy.esxcluster.find_credentials", mock_find_credentials):
esxcluster.init(opts_userpass)
mock_find_credentials.assert_called_once_with()
def test_details_userpass(opts_userpass):
mock_find_credentials = MagicMock(return_value=("fake_username", "fake_password"))
with patch(
"salt.proxy.esxcluster.merge",
MagicMock(return_value=opts_userpass["proxy"]),
):
with patch("salt.proxy.esxcluster.find_credentials", mock_find_credentials):
esxcluster.init(opts_userpass)
assert esxcluster.DETAILS == {
"vcenter": "fake_vcenter",
"datacenter": "fake_dc",
"cluster": "fake_cluster",
"mechanism": "userpass",
"username": "fake_username",
"password": "fake_password",
"passwords": ["fake_password"],
"protocol": "fake_protocol",
"port": 100,
}
def test_details_sspi(opts_sspi):
esxcluster.init(opts_sspi)
assert esxcluster.DETAILS == {
"vcenter": "fake_vcenter",
"datacenter": "fake_dc",
"cluster": "fake_cluster",
"mechanism": "sspi",
"domain": "fake_domain",
"principal": "fake_principal",
"protocol": "fake_protocol",
"port": 100,
} |
299,216 | get entry from action | # Copyright Contributors to the Packit project.
# SPDX-License-Identifier: MIT
import logging
import re
import shutil
from typing import Optional
import packit.upstream
from packit.actions import ActionName
from packit.config.common_package_config import MultiplePackages
from packit.distgit import DistGit
logger = logging.getLogger(__name__)
class ChangelogHelper:
def __init__(
self,
upstream: "packit.upstream.Upstream",
downstream: Optional[DistGit] = None,
package_config: Optional[MultiplePackages] = None,
) -> None:
self.up = upstream
self.dg = downstream
self.package_config = package_config
@staticmethod
def resolve_release_suffix(
package_config: MultiplePackages,
release_suffix: Optional[str] = None,
default_release_suffix: bool = False,
) -> Optional[str]:
"""
Resolves the release suffix value, since it can be set from multiple places
and also overriden to the default one that is generated by packit.
Args:
package_config: Package config that is used as fallback.
release_suffix: Release suffix that was passed from CLI.
default_release_suffix: Override for using the default one that ensures
correct NVR ordering.
Returns:
`None` if packit is to use the default method of generating release
suffix, otherwise string containing the release suffix.
"""
if default_release_suffix:
# we want to use the default packit-generated release suffix
release_suffix = None
elif release_suffix is None:
# we want to get release suffix from the configuration
release_suffix = package_config.release_suffix
return release_suffix
def METHOD_NAME(self, version: Optional[str] = None) -> Optional[str]:
"""
Runs changelog-entry action if present and returns string that can be
used as a changelog entry.
Args:
version: version to be set in specfile
Returns:
Changelog entry or `None` if action is not present.
"""
env = {"PACKIT_PROJECT_VERSION": version}
messages = self.up.get_output_from_action(ActionName.changelog_entry, env=env)
if not messages:
return None
return "\n".join(map(lambda line: line.rstrip(), messages))
@staticmethod
def sanitize_entry(entry: str) -> str:
# escape macro references and macro/shell/expression expansions
# that could break spec file parsing
entry = re.sub(r"(?<!%)%(?=(\w+|[{[(]))", "%%", entry)
# prepend asterisk at the start of a line with a space in order
# not to break identification of entry boundaries
return re.sub(r"^[*]", " *", entry, flags=re.MULTILINE)
def update_dist_git(self, full_version: str, upstream_tag: str) -> None:
"""
Update the spec-file in dist-git:
* Sync content from upstream spec-file.
* Set 'Version'.
* Add new entry in the %changelog section
(if %autochangelog macro is not used).
Copy the upstream spec-file as is if no spec-file is present in downstream.
(E.g. for new packages)
Args:
full_version: Version to be set in the spec-file.
upstream_tag: The commit messages after last tag and before this tag are used
to update the changelog in the spec-file.
"""
comment = self.METHOD_NAME(version=full_version) or (
self.up.local_project.git_project.get_release(
tag_name=upstream_tag, name=full_version
).body
if self.package_config.copy_upstream_release_description
# in pull_from_upstream workflow, upstream git_project can be None
and self.up.local_project.git_project
else self.up.get_commit_messages(
after=self.up.get_last_tag(before=upstream_tag),
before=upstream_tag,
)
)
comment = self.sanitize_entry(comment)
try:
self.dg.set_specfile_content(
self.up.specfile,
full_version,
comment=None if self.dg.specfile.has_autochangelog else comment,
)
except FileNotFoundError as ex:
# no downstream spec file: this is either a mistake or
# there is no spec file in dist-git yet, hence warning
logger.warning(
f"Unable to find a spec file in downstream: {ex}, copying the one from upstream."
)
shutil.copy2(
self.up.absolute_specfile_path,
self.dg.get_absolute_specfile_path(),
)
def _get_release_for_source_git(
self, current_commit: str, update_release: bool, release_suffix: Optional[str]
) -> Optional[str]:
old_release = self.up.specfile.expanded_release
if release_suffix:
return f"{old_release}.{release_suffix}"
if not update_release:
return None
try:
old_release_int = int(old_release)
new_release = str(old_release_int + 1)
except ValueError:
new_release = str(old_release)
return f"{new_release}.g{current_commit}"
def prepare_upstream_using_source_git(
self, update_release: bool, release_suffix: Optional[str]
) -> None:
"""
Updates changelog when creating SRPM within source-git repository.
"""
current_commit = self.up.local_project.commit_hexsha
release_to_update = self._get_release_for_source_git(
current_commit, update_release, release_suffix
)
msg = self.METHOD_NAME()
if not msg and update_release:
msg = f"- Downstream changes ({current_commit})"
self.up.specfile.release = release_to_update
if msg is not None:
self.up.specfile.add_changelog_entry(msg)
def prepare_upstream_locally(
self,
version: str,
commit: str,
update_release: bool,
release: str,
) -> None:
"""
Updates changelog when creating SRPM within upstream repository.
Args:
version: Version to be set in the spec-file.
commit: Commit to be set in the changelog.
update_release: Whether to change Release in the spec-file.
release: Release to be set in the spec-file.
"""
self.up.specfile.version = version
last_tag = self.up.get_last_tag()
msg = self.METHOD_NAME(version=version)
if not msg and last_tag and update_release:
msg = self.up.get_commit_messages(after=last_tag)
if not msg and update_release:
# no describe, no tag - just a boilerplate message w/ commit hash
# or, there were no changes b/w HEAD and last_tag, which implies last_tag == HEAD
msg = f"- Development snapshot ({commit})"
# instead of changing version, we change Release field
# upstream projects should take care of versions
if update_release:
logger.debug(f"Setting Release in spec to {release!r}.")
self.up.specfile.release = release
if msg is not None:
self.up.specfile.add_changelog_entry(msg) |
299,217 | cleanup files | import concurrent.futures
import logging.handlers
import os
import re
import time
class AsyncLogHandlerMixin(logging.Handler):
def __init__(self, *args, **kwargs):
self._executor = concurrent.futures.ThreadPoolExecutor(max_workers=1)
super().__init__(*args, **kwargs)
def emit(self, record):
if getattr(self._executor, "_shutdown", False):
return
try:
self._executor.submit(self._emit, record)
except Exception:
self.handleError(record)
def _emit(self, record):
# noinspection PyUnresolvedReferences
super().emit(record)
def close(self):
self._executor.shutdown(wait=True)
super().close()
class CleaningTimedRotatingFileHandler(logging.handlers.TimedRotatingFileHandler):
def __init__(self, *args, **kwargs):
kwargs["encoding"] = kwargs.get("encoding", "utf-8")
super().__init__(*args, **kwargs)
# clean up old files on handler start
if self.backupCount > 0:
for s in self.getFilesToDelete():
os.remove(s)
class OctoPrintLogHandler(AsyncLogHandlerMixin, CleaningTimedRotatingFileHandler):
rollover_callbacks = []
def __init__(self, *args, **kwargs):
kwargs["encoding"] = kwargs.get("encoding", "utf-8")
super().__init__(*args, **kwargs)
@classmethod
def registerRolloverCallback(cls, callback, *args, **kwargs):
cls.rollover_callbacks.append((callback, args, kwargs))
def doRollover(self):
super().doRollover()
for rcb in self.rollover_callbacks:
callback, args, kwargs = rcb
callback(*args, **kwargs)
class OctoPrintStreamHandler(AsyncLogHandlerMixin, logging.StreamHandler):
pass
class TriggeredRolloverLogHandler(
AsyncLogHandlerMixin, logging.handlers.RotatingFileHandler
):
do_rollover = False
suffix_template = "%Y-%m-%d_%H-%M-%S"
file_pattern = re.compile(r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}$")
@classmethod
def arm_rollover(cls):
cls.do_rollover = True
def __init__(self, *args, **kwargs):
kwargs["encoding"] = kwargs.get("encoding", "utf-8")
super().__init__(*args, **kwargs)
self.METHOD_NAME()
def shouldRollover(self, record):
return self.do_rollover
def getFilesToDelete(self):
"""
Determine the files to delete when rolling over.
"""
dirName, baseName = os.path.split(self.baseFilename)
fileNames = os.listdir(dirName)
result = []
prefix = baseName + "."
plen = len(prefix)
for fileName in fileNames:
if fileName[:plen] == prefix:
suffix = fileName[plen:]
if type(self).file_pattern.match(suffix):
result.append(os.path.join(dirName, fileName))
result.sort()
if len(result) < self.backupCount:
result = []
else:
result = result[: len(result) - self.backupCount]
return result
def METHOD_NAME(self):
if self.backupCount > 0:
for path in self.getFilesToDelete():
os.remove(path)
def doRollover(self):
self.do_rollover = False
if self.stream:
self.stream.close()
self.stream = None
if os.path.exists(self.baseFilename):
# figure out creation date/time to use for file suffix
t = time.localtime(os.stat(self.baseFilename).st_mtime)
dfn = self.baseFilename + "." + time.strftime(type(self).suffix_template, t)
if os.path.exists(dfn):
os.remove(dfn)
os.rename(self.baseFilename, dfn)
self.METHOD_NAME()
if not self.delay:
self.stream = self._open()
class SerialLogHandler(TriggeredRolloverLogHandler):
pass
class PluginTimingsLogHandler(TriggeredRolloverLogHandler):
pass
class TornadoLogHandler(CleaningTimedRotatingFileHandler):
pass
class AuthLogHandler(CleaningTimedRotatingFileHandler):
pass
class RecordingLogHandler(logging.Handler):
def __init__(self, target=None, *args, **kwargs):
super().__init__(*args, **kwargs)
self._buffer = []
self._target = target
def emit(self, record):
self._buffer.append(record)
def setTarget(self, target):
self._target = target
def flush(self):
if not self._target:
return
self.acquire()
try:
for record in self._buffer:
self._target.handle(record)
self._buffer = []
finally:
self.release()
def close(self):
self.flush()
self.acquire()
try:
self._buffer = []
finally:
self.release()
def __len__(self):
return len(self._buffer)
# noinspection PyAbstractClass
class CombinedLogHandler(logging.Handler):
def __init__(self, *handlers):
logging.Handler.__init__(self)
self._handlers = handlers
def setHandlers(self, *handlers):
self._handlers = handlers
def handle(self, record):
self.acquire()
try:
if self._handlers:
for handler in self._handlers:
handler.handle(record)
finally:
self.release() |
299,218 | download image from url | from io import BytesIO
from tempfile import NamedTemporaryFile
import requests
from candidates.models.db import ActionType, LoggedAction
from candidates.views.version_data import get_client_ip
from django.http import HttpResponseRedirect
from django.shortcuts import render
from django.urls import reverse
from PIL import ExifTags, ImageOps
from PIL import Image as PillowImage
from .models import QueuedImage
def upload_photo_response(request, person, image_form, url_form):
return render(
request,
"moderation_queue/photo-upload-new.html",
{
"image_form": image_form,
"url_form": url_form,
"queued_images": QueuedImage.objects.filter(
person=person, decision="undecided"
).order_by("created"),
"person": person,
},
)
def image_form_valid_response(request, person, image_form):
# Make sure that we save the user that made the upload
queued_image = image_form.save(commit=False)
queued_image.user = request.user
queued_image.save()
# Record that action:
LoggedAction.objects.create(
user=request.user,
action_type=ActionType.PHOTO_UPLOAD,
ip_address=get_client_ip(request),
popit_person_new_version="",
person=person,
source=image_form.cleaned_data["justification_for_use"],
)
return HttpResponseRedirect(
reverse("photo-upload-success", kwargs={"person_id": person.id})
)
def rotate_photo(original_image):
# TO DO issue #2026 : This does not handle URL
# uploads.
# If an image has an EXIF Orientation tag, other than 1,
# return a new image that is transposed accordingly.
# The new image will have the orientation data removed.
# https://pillow.readthedocs.io/en/stable/_modules/PIL/ImageOps.html#exif_transpose
# Otherwise, return a copy of the image. If an image
# has an EXIF Orientation tag of 1, it might still
# need to be rotated, but we can handle that manually
# in the review process.
pil_image = PillowImage.open(original_image)
for orientation in ExifTags.TAGS:
if ExifTags.TAGS[orientation] == "Orientation":
break
exif = pil_image.getexif()
if exif and exif.get(274):
pil_image = ImageOps.exif_transpose(pil_image)
buffer = BytesIO()
pil_image.save(buffer, "PNG")
return pil_image
def resize_photo(photo, original_image):
if not isinstance(photo, PillowImage.Image):
pil_image = PillowImage.open(photo)
else:
pil_image = photo
if original_image.width > 5000 or original_image.height > 5000:
size = 2000, 2000
pil_image.thumbnail(size)
buffer = BytesIO()
pil_image.save(buffer, "PNG")
return pil_image
return photo
def convert_image_to_png(photo):
# Some uploaded images are CYMK, which gives you an error when
# you try to write them as PNG, so convert to RGBA (this is
# RGBA rather than RGB so that any alpha channel (transparency)
# is preserved).
# If the photo is not already a PillowImage object
# coming from the form, then we need to
# open it as a PillowImage object before
# converting it to RGBA.
if not isinstance(photo, PillowImage.Image):
photo = PillowImage.open(photo)
converted = photo.convert("RGBA")
bytes_obj = BytesIO()
converted.save(bytes_obj, "PNG")
return bytes_obj
class ImageDownloadException(Exception):
pass
def METHOD_NAME(image_url, max_size_bytes=(50 * 2**20)):
"""This downloads an image to a temporary file and returns the filename
It raises an ImageDownloadException if a GET for the URL results
in a HTTP response with status code other than 200, or the
downloaded resource doesn't seem to be an image. It's the
responsibility of the caller to delete the image once they're
finished with it. If the download exceeds max_size_bytes (default
50MB) then this will also throw an ImageDownloadException."""
with NamedTemporaryFile(delete=True) as image_ntf:
image_response = requests.get(image_url, stream=True)
if image_response.status_code != 200:
msg = (
" Ignoring an image URL with non-200 status code "
"({status_code}): {url}"
)
raise ImageDownloadException(
msg.format(
status_code=image_response.status_code, url=image_url
)
)
# Download no more than a megabyte at a time:
downloaded_so_far = 0
for chunk in image_response.iter_content(chunk_size=(2 * 20)):
downloaded_so_far += len(chunk)
if downloaded_so_far > max_size_bytes:
raise ImageDownloadException(
"The image exceeded the maximum allowed size"
)
image_ntf.write(chunk)
return convert_image_to_png(image_ntf.file) |
299,219 | udslisten | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import json
import logging
import logging.config
import os
import pickle
import re
import select
import signal
import socket
import stat
import struct
import sys
import threading
from io import StringIO as string_io
from socketserver import ThreadingTCPServer, StreamRequestHandler
#
# The following code implements a socket listener for on-the-fly
# reconfiguration of logging.
#
# _listener holds the server object doing the listening
rt = None
_udslistener = None
def udsstopListening():
"""
Stop the listening server which was created with a call to listen().
"""
global _udslistener
logging._acquireLock()
try:
if _udslistener:
_udslistener.abort = 1
_udslistener = None
finally:
logging._releaseLock()
def METHOD_NAME(server_address='hue.uds', verify=None):
"""
Start up a socket server on the specified unix domain socket, and listen for new
configurations.
"""
class ConfigStreamHandler(StreamRequestHandler):
"""
Handler for a streaming logging request.
This basically logs the record using whatever logging policy is
configured locally.
"""
def handle(self):
"""
Handle multiple requests - each expected to be a 4-byte length,
followed by the LogRecord in pickle format. Logs the record
according to whatever policy is configured locally.
"""
while self.server.ready:
chunk = self.connection.recv(4)
if len(chunk) < 4:
break
slen = struct.unpack('>L', chunk)[0]
chunk = self.connection.recv(slen)
while len(chunk) < slen:
chunk = chunk + self.connection.recv(slen - len(chunk))
obj = pickle.loads(chunk)
record = logging.makeLogRecord(obj)
self.handleLogRecord(record)
def handleLogRecord(self, record):
# if a name is specified, we use the named logger rather than the one
# implied by the record.
if self.server.logname is not None:
name = self.server.logname
else:
name = record.name
logger = logging.getLogger(name)
logger.handle(record)
class ConfigSocketReceiver(ThreadingTCPServer):
"""
A simple TCP socket-based logging config receiver.
"""
request_queue_size = 1
def __init__(self, server_address='hue.uds', handler=None,
ready=None, verify=None):
ThreadingTCPServer.__init__(self, server_address, handler)
logging._acquireLock()
self.abort = 0
logging._releaseLock()
self.timeout = 1
self.ready = ready
self.verify = verify
self.logname = None
self.server_address = server_address
def server_bind(self):
# Create a UDS socket
self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.socket.bind(self.server_address)
st = os.stat(self.server_address)
os.chmod(self.server_address, st.st_mode | stat.S_IWOTH)
def server_activate(self):
self.socket.listen(self.request_queue_size)
def serve_until_stopped(self):
abort = 0
while not abort:
rd, wr, ex = select.select([self.socket.fileno()],
[], [],
self.timeout)
if rd:
self.handle_request()
logging._acquireLock()
abort = self.abort
logging._releaseLock()
self.server_close()
class Server(threading.Thread):
def __init__(self, rcvr, hdlr, server_address, verify):
super(Server, self).__init__()
self.rcvr = rcvr
self.hdlr = hdlr
self.server_address = server_address
self.verify = verify
self.ready = threading.Event()
def run(self):
server = self.rcvr(server_address=self.server_address, handler=self.hdlr,
ready=self.ready, verify=self.verify)
self.ready.set()
global _udslistener
logging._acquireLock()
_udslistener = server
logging._releaseLock()
server.serve_until_stopped()
def stop(self):
udsstopListening()
self.ready.clear()
def stopped(self):
return self.ready.is_set()
return Server(ConfigSocketReceiver, ConfigStreamHandler, server_address, verify)
def argprocessing(args=[], options={}):
parser = argparse.ArgumentParser(prog='loglistener', description='Run Log listener listening the unix domain socket.')
parser.add_argument('-s', '--socket', dest='socket', action='store', default='')
opts = parser.parse_args()
if opts.socket:
options['socket'] = opts.socket
else:
options['socket'] = "%s/hue.uds" % (os.getenv("DESKTOP_LOG_DIR", "/var/log/hue"))
def enable_logging(args, options):
CONF_RE = re.compile('%LOG_DIR%')
CONF_FILE = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)),
'..', '..', 'conf', 'gunicorn_log.conf'))
if os.path.exists(CONF_FILE):
log_dir = os.getenv("DESKTOP_LOG_DIR", "/var/log/hue")
raw = open(CONF_FILE).read()
def _repl(match):
if match.group(0) == '%LOG_DIR%':
return log_dir
sio = string_io(CONF_RE.sub(_repl, raw))
logging.config.fileConfig(sio)
root_logger = logging.getLogger()
root_logger.info("Starting Hue Log Listener server using socket file %s" % (options["socket"]))
root_logger.info("Using logging.conf file %s" % (CONF_FILE))
class LogException(Exception):
def __init__(self, e):
super(LogException, self).__init__(e)
self.message = e.status.message
def __str__(self):
return self.message
def signal_handler(sig, frame):
global rt
print("Received signal to stop log listener %s" % sig)
rt.stop()
sys.exit(1)
def start_listener(args, options):
global rt
try:
os.unlink(options["socket"])
except OSError:
if os.path.exists(options["socket"]):
raise
signal.signal(signal.SIGTERM, signal_handler)
signal.signal(signal.SIGHUP, signal_handler)
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGQUIT, signal_handler)
enable_logging(args, options)
rt = METHOD_NAME(server_address=options["socket"], verify=None)
try:
rt.start()
finally:
rt.stop()
if __name__ == '__main__':
args = sys.argv[1:]
options = {}
argprocessing(args=args, options=options)
start_listener(args, options |
299,220 | get environment specification version | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetEnvironmentSpecificationVersionResult',
'AwaitableGetEnvironmentSpecificationVersionResult',
'get_environment_specification_version',
'get_environment_specification_version_output',
]
@pulumi.output_type
class GetEnvironmentSpecificationVersionResult:
"""
Azure Resource Manager resource envelope.
"""
def __init__(__self__, id=None, name=None, properties=None, system_data=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if properties and not isinstance(properties, dict):
raise TypeError("Expected argument 'properties' to be a dict")
pulumi.set(__self__, "properties", properties)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> 'outputs.EnvironmentSpecificationVersionResponse':
"""
[Required] Additional attributes of the entity.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
System data associated with resource provider
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
class AwaitableGetEnvironmentSpecificationVersionResult(GetEnvironmentSpecificationVersionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetEnvironmentSpecificationVersionResult(
id=self.id,
name=self.name,
properties=self.properties,
system_data=self.system_data,
type=self.type)
def METHOD_NAME(name: Optional[str] = None,
resource_group_name: Optional[str] = None,
version: Optional[str] = None,
workspace_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetEnvironmentSpecificationVersionResult:
"""
Azure Resource Manager resource envelope.
Azure REST API version: 2021-03-01-preview.
:param str name: Container name.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str version: Version identifier.
:param str workspace_name: Name of Azure Machine Learning workspace.
"""
__args__ = dict()
__args__['name'] = name
__args__['resourceGroupName'] = resource_group_name
__args__['version'] = version
__args__['workspaceName'] = workspace_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:machinelearningservices:getEnvironmentSpecificationVersion', __args__, opts=opts, typ=GetEnvironmentSpecificationVersionResult).value
return AwaitableGetEnvironmentSpecificationVersionResult(
id=pulumi.get(__ret__, 'id'),
name=pulumi.get(__ret__, 'name'),
properties=pulumi.get(__ret__, 'properties'),
system_data=pulumi.get(__ret__, 'system_data'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(METHOD_NAME)
def get_environment_specification_version_output(name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[str]] = None,
workspace_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetEnvironmentSpecificationVersionResult]:
"""
Azure Resource Manager resource envelope.
Azure REST API version: 2021-03-01-preview.
:param str name: Container name.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str version: Version identifier.
:param str workspace_name: Name of Azure Machine Learning workspace.
"""
... |
299,221 | reset sr | #!/usr/bin/python3
#
# Copyright (C) Citrix Systems Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; version 2.1 only.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Clear the attach status for all VDIs in the given SR on this host.
# Additionally, reset the paused state if this host is the master.
import cleanup
import util
import lock
import XenAPI # pylint: disable=import-error
def METHOD_NAME(session, host_uuid, sr_uuid, is_sr_master):
from vhdutil import LOCK_TYPE_SR
from cleanup import LOCK_TYPE_RUNNING
cleanup.abort(sr_uuid)
gc_lock = lock.Lock(LOCK_TYPE_RUNNING, sr_uuid)
sr_lock = lock.Lock(LOCK_TYPE_SR, sr_uuid)
gc_lock.acquire()
sr_lock.acquire()
sr_ref = session.xenapi.SR.get_by_uuid(sr_uuid)
host_ref = session.xenapi.host.get_by_uuid(host_uuid)
host_key = "host_%s" % host_ref
util.SMlog("RESET for SR %s (master: %s)" % (sr_uuid, is_sr_master))
vdi_recs = session.xenapi.VDI.get_all_records_where( \
"field \"SR\" = \"%s\"" % sr_ref)
for vdi_ref, vdi_rec in vdi_recs.items():
vdi_uuid = vdi_rec["uuid"]
sm_config = vdi_rec["sm_config"]
if sm_config.get(host_key):
util.SMlog("Clearing attached status for VDI %s" % vdi_uuid)
session.xenapi.VDI.remove_from_sm_config(vdi_ref, host_key)
if is_sr_master and sm_config.get("paused"):
util.SMlog("Clearing paused status for VDI %s" % vdi_uuid)
session.xenapi.VDI.remove_from_sm_config(vdi_ref, "paused")
sr_lock.release()
gc_lock.release()
def reset_vdi(session, vdi_uuid, force, term_output=True, writable=True):
vdi_ref = session.xenapi.VDI.get_by_uuid(vdi_uuid)
vdi_rec = session.xenapi.VDI.get_record(vdi_ref)
sm_config = vdi_rec["sm_config"]
host_ref = None
clean = True
for key, val in sm_config.items():
if key.startswith("host_"):
host_ref = key[len("host_"):]
host_uuid = None
host_invalid = False
host_str = host_ref
try:
host_rec = session.xenapi.host.get_record(host_ref)
host_uuid = host_rec["uuid"]
host_str = "%s (%s)" % (host_uuid, host_rec["name_label"])
except XenAPI.Failure as e:
msg = "Invalid host: %s (%s)" % (host_ref, e)
util.SMlog(msg)
if term_output:
print(msg)
host_invalid = True
if host_invalid:
session.xenapi.VDI.remove_from_sm_config(vdi_ref, key)
msg = "Invalid host: Force-cleared %s for %s on host %s" % \
(val, vdi_uuid, host_str)
util.SMlog(msg)
if term_output:
print(msg)
continue
if force:
session.xenapi.VDI.remove_from_sm_config(vdi_ref, key)
msg = "Force-cleared %s for %s on host %s" % \
(val, vdi_uuid, host_str)
util.SMlog(msg)
if term_output:
print(msg)
continue
ret = session.xenapi.host.call_plugin(
host_ref, "on-slave", "is_open",
{"vdiUuid": vdi_uuid, "srRef": vdi_rec["SR"]})
if ret != "False":
util.SMlog("VDI %s is still open on host %s, not resetting" % \
(vdi_uuid, host_str))
if term_output:
print("ERROR: VDI %s is still open on host %s" % \
(vdi_uuid, host_str))
if writable:
return False
else:
clean = False
else:
session.xenapi.VDI.remove_from_sm_config(vdi_ref, key)
msg = "Cleared %s for %s on host %s" % \
(val, vdi_uuid, host_str)
util.SMlog(msg)
if term_output:
print(msg)
if not host_ref:
msg = "VDI %s is not marked as attached anywhere, nothing to do" \
% vdi_uuid
util.SMlog(msg)
if term_output:
print(msg)
return clean
def usage():
print("Usage:")
print("all <HOST UUID> <SR UUID> [--master]")
print("single <VDI UUID> [--force]")
print()
print("*WARNING!* calling with 'all' on an attached SR, or using " + \
"--force may cause DATA CORRUPTION if the VDI is still " + \
"attached somewhere. Always manually double-check that " + \
"the VDI is not in use before running this script.")
sys.exit(1)
if __name__ == '__main__':
import sys
import atexit
if len(sys.argv) not in [3, 4, 5]:
usage()
session = XenAPI.xapi_local()
session.xenapi.login_with_password('root', '', '', 'SM')
atexit.register(session.xenapi.session.logout)
mode = sys.argv[1]
if mode == "all":
if len(sys.argv) not in [4, 5]:
usage()
host_uuid = sys.argv[2]
sr_uuid = sys.argv[3]
is_master = False
if len(sys.argv) == 5:
if sys.argv[4] == "--master":
is_master = True
else:
usage()
METHOD_NAME(session, host_uuid, sr_uuid, is_master)
elif mode == "single":
vdi_uuid = sys.argv[2]
force = False
if len(sys.argv) == 4 and sys.argv[3] == "--force":
force = True
reset_vdi(session, vdi_uuid, force)
elif len(sys.argv) in [3, 4]:
# backwards compatibility: the arguments for the "all" case used to be
# just host_uuid, sr_uuid, [is_master] (i.e., no "all" string, since it
# was the only mode available). To avoid having to change XAPI, accept
# the old format here as well.
host_uuid = sys.argv[1]
sr_uuid = sys.argv[2]
is_master = False
if len(sys.argv) == 4:
if sys.argv[3] == "--master":
is_master = True
else:
usage()
METHOD_NAME(session, host_uuid, sr_uuid, is_master)
else:
usage() |
299,222 | test revoke token with wrong hint | import datetime
from django.contrib.auth import get_user_model
from django.test import RequestFactory, TestCase
from django.urls import reverse
from django.utils import timezone
from oauth2_provider.models import get_access_token_model, get_application_model, get_refresh_token_model
Application = get_application_model()
AccessToken = get_access_token_model()
RefreshToken = get_refresh_token_model()
UserModel = get_user_model()
CLEARTEXT_SECRET = "1234567890abcdefghijklmnopqrstuvwxyz"
class BaseTest(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.test_user = UserModel.objects.create_user("test_user", "test@example.com", "123456")
self.dev_user = UserModel.objects.create_user("dev_user", "dev@example.com", "123456")
self.application = Application.objects.create(
name="Test Application",
redirect_uris="http://localhost http://example.com http://example.org",
user=self.dev_user,
client_type=Application.CLIENT_CONFIDENTIAL,
authorization_grant_type=Application.GRANT_AUTHORIZATION_CODE,
client_secret=CLEARTEXT_SECRET,
)
def tearDown(self):
self.application.delete()
self.test_user.delete()
self.dev_user.delete()
class TestRevocationView(BaseTest):
def test_revoke_access_token(self):
tok = AccessToken.objects.create(
user=self.test_user,
token="1234567890",
application=self.application,
expires=timezone.now() + datetime.timedelta(days=1),
scope="read write",
)
data = {
"client_id": self.application.client_id,
"client_secret": CLEARTEXT_SECRET,
"token": tok.token,
}
url = reverse("oauth2_provider:revoke-token")
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"")
self.assertFalse(AccessToken.objects.filter(id=tok.id).exists())
def test_revoke_access_token_public(self):
public_app = Application(
name="Test Application",
redirect_uris="http://localhost http://example.com http://example.org",
user=self.dev_user,
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_AUTHORIZATION_CODE,
)
public_app.save()
tok = AccessToken.objects.create(
user=self.test_user,
token="1234567890",
application=public_app,
expires=timezone.now() + datetime.timedelta(days=1),
scope="read write",
)
data = {
"client_id": public_app.client_id,
"token": tok.token,
}
url = reverse("oauth2_provider:revoke-token")
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 200)
def test_revoke_access_token_with_hint(self):
tok = AccessToken.objects.create(
user=self.test_user,
token="1234567890",
application=self.application,
expires=timezone.now() + datetime.timedelta(days=1),
scope="read write",
)
data = {
"client_id": self.application.client_id,
"client_secret": CLEARTEXT_SECRET,
"token": tok.token,
"token_type_hint": "access_token",
}
url = reverse("oauth2_provider:revoke-token")
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 200)
self.assertFalse(AccessToken.objects.filter(id=tok.id).exists())
def test_revoke_access_token_with_invalid_hint(self):
tok = AccessToken.objects.create(
user=self.test_user,
token="1234567890",
application=self.application,
expires=timezone.now() + datetime.timedelta(days=1),
scope="read write",
)
# invalid hint should have no effect
data = {
"client_id": self.application.client_id,
"client_secret": CLEARTEXT_SECRET,
"token": tok.token,
"token_type_hint": "bad_hint",
}
url = reverse("oauth2_provider:revoke-token")
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 200)
self.assertFalse(AccessToken.objects.filter(id=tok.id).exists())
def test_revoke_refresh_token(self):
tok = AccessToken.objects.create(
user=self.test_user,
token="1234567890",
application=self.application,
expires=timezone.now() + datetime.timedelta(days=1),
scope="read write",
)
rtok = RefreshToken.objects.create(
user=self.test_user, token="999999999", application=self.application, access_token=tok
)
data = {
"client_id": self.application.client_id,
"client_secret": CLEARTEXT_SECRET,
"token": rtok.token,
}
url = reverse("oauth2_provider:revoke-token")
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 200)
refresh_token = RefreshToken.objects.filter(id=rtok.id).first()
self.assertIsNotNone(refresh_token.revoked)
self.assertFalse(AccessToken.objects.filter(id=rtok.access_token.id).exists())
def test_revoke_refresh_token_with_revoked_access_token(self):
tok = AccessToken.objects.create(
user=self.test_user,
token="1234567890",
application=self.application,
expires=timezone.now() + datetime.timedelta(days=1),
scope="read write",
)
rtok = RefreshToken.objects.create(
user=self.test_user, token="999999999", application=self.application, access_token=tok
)
for token in (tok.token, rtok.token):
data = {
"client_id": self.application.client_id,
"client_secret": CLEARTEXT_SECRET,
"token": token,
}
url = reverse("oauth2_provider:revoke-token")
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 200)
self.assertFalse(AccessToken.objects.filter(id=tok.id).exists())
refresh_token = RefreshToken.objects.filter(id=rtok.id).first()
self.assertIsNotNone(refresh_token.revoked)
def METHOD_NAME(self):
"""
From the revocation rfc, `Section 4.1.2`_ :
If the server is unable to locate the token using the given hint,
it MUST extend its search across all of its supported token types
.. _`Section 4.1.2`: http://tools.ietf.org/html/draft-ietf-oauth-revocation-11#section-4.1.2
"""
tok = AccessToken.objects.create(
user=self.test_user,
token="1234567890",
application=self.application,
expires=timezone.now() + datetime.timedelta(days=1),
scope="read write",
)
data = {
"client_id": self.application.client_id,
"client_secret": CLEARTEXT_SECRET,
"token": tok.token,
"token_type_hint": "refresh_token",
}
url = reverse("oauth2_provider:revoke-token")
response = self.client.post(url, data=data)
self.assertEqual(response.status_code, 200)
self.assertFalse(AccessToken.objects.filter(id=tok.id).exists()) |
299,223 | purelist isregular | # BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE
from __future__ import annotations
__all__ = ("EmptyForm",)
from collections.abc import Callable
from inspect import signature
import awkward as ak
from awkward._errors import deprecate
from awkward._nplikes.numpylike import NumpyMetadata
from awkward._nplikes.shape import ShapeItem
from awkward._typing import Iterator, JSONSerializable, Self, final
from awkward._util import UNSET
from awkward.forms.form import Form, JSONMapping
np = NumpyMetadata.instance()
@final
class EmptyForm(Form):
is_numpy = True
is_unknown = True
def __init__(self, *, parameters: JSONMapping | None = None, form_key=None):
if not (parameters is None or len(parameters) == 0):
raise TypeError(f"{type(self).__name__} cannot contain parameters")
self._init(parameters=parameters, form_key=form_key)
def copy(
self, *, parameters: JSONMapping | None = UNSET, form_key=UNSET
) -> EmptyForm:
if not (parameters is UNSET or parameters is None or len(parameters) == 0):
raise TypeError(f"{type(self).__name__} cannot contain parameters")
return EmptyForm(
form_key=self._form_key if form_key is UNSET else form_key,
)
@classmethod
def simplified(cls, *, parameters=None, form_key=None) -> Form:
if not (parameters is None or len(parameters) == 0):
raise TypeError(f"{cls.__name__} cannot contain parameters")
return cls(parameters=parameters, form_key=form_key)
def __repr__(self):
args = self._repr_args()
return "{}({})".format(type(self).__name__, ", ".join(args))
def _to_dict_part(self, verbose, toplevel):
return self._to_dict_extra({"class": "EmptyArray"}, verbose)
@property
def type(self):
return ak.types.UnknownType()
def __eq__(self, other) -> bool:
return isinstance(other, EmptyForm) and self._form_key == other._form_key
def to_NumpyForm(self, *args, **kwargs):
def legacy_impl(dtype):
deprecate(
f"the `dtype` parameter in {type(self).__name__}.to_NumpyForm is deprecated, "
f"in favour of a new `primitive` argument. Pass `primitive` by keyword to opt-in to the new behavior.",
version="2.4.0",
)
return ak.forms.numpyform.from_dtype(dtype)
def new_impl(*, primitive):
return ak.forms.numpyform.NumpyForm(primitive)
dispatch_table = [
new_impl,
legacy_impl,
]
for func in dispatch_table:
sig = signature(func)
try:
bound_arguments = sig.bind(*args, **kwargs)
except TypeError:
continue
else:
return func(*bound_arguments.args, **bound_arguments.kwargs)
raise AssertionError(
f"{type(self).__name__}.to_NumpyForm accepts either the new `primitive` argument as a keyword-only "
f"argument, or the legacy `dtype` argument as positional or keyword"
)
def purelist_parameters(self, *keys: str) -> JSONSerializable:
return None
@property
def METHOD_NAME(self) -> bool:
return True
@property
def purelist_depth(self) -> int:
return 1
@property
def is_identity_like(self) -> bool:
return True
@property
def minmax_depth(self) -> tuple[int, int]:
return (1, 1)
@property
def branch_depth(self) -> tuple[bool, int]:
return (False, 1)
@property
def fields(self) -> list[str]:
return []
@property
def is_tuple(self) -> bool:
return False
@property
def dimension_optiontype(self) -> bool:
return False
def _columns(self, path, output, list_indicator):
output.append(".".join(path))
def _select_columns(self, match_specifier):
return self
def _prune_columns(self, is_inside_record_or_union: bool) -> Self:
return self
def _column_types(self) -> tuple[str, ...]:
return ("empty",)
def _length_one_buffer_lengths(self) -> Iterator[ShapeItem]:
yield 0
def __setstate__(self, state):
if isinstance(state, dict):
# read data pickled in Awkward 2.x
self.__dict__.update(state)
else:
# read data pickled in Awkward 1.x
# https://github.com/scikit-hep/awkward/blob/main-v1/src/python/forms.cpp#L240-L244
has_identities, parameters, form_key = state
if form_key is not None:
form_key = "part0-" + form_key # only the first partition
self.__init__(form_key=form_key)
def _expected_from_buffers(
self, getkey: Callable[[Form, str], str]
) -> Iterator[tuple[str, np.dtype]]:
yield from () |
299,224 | sort | from typing import Any, Callable, Dict, Iterable, List, Set, SupportsIndex, Union, overload
from . import events
class ObservableDict(dict):
def __init__(self, data: Dict, on_change: Callable) -> None:
super().__init__(data)
for key, value in self.items():
super().__setitem__(key, make_observable(value, on_change))
self.on_change = lambda: events.handle_event(on_change, events.ObservableChangeEventArguments(sender=self))
def pop(self, k: Any, d: Any = None) -> Any:
item = super().pop(k, d)
self.on_change()
return item
def popitem(self) -> Any:
item = super().popitem()
self.on_change()
return item
def update(self, *args: Any, **kwargs: Any) -> None:
super().update(make_observable(dict(*args, **kwargs), self.on_change))
self.on_change()
def clear(self) -> None:
super().clear()
self.on_change()
def setdefault(self, __key: Any, __default: Any = None) -> Any:
item = super().setdefault(__key, make_observable(__default, self.on_change))
self.on_change()
return item
def __setitem__(self, __key: Any, __value: Any) -> None:
super().__setitem__(__key, make_observable(__value, self.on_change))
self.on_change()
def __delitem__(self, __key: Any) -> None:
super().__delitem__(__key)
self.on_change()
def __or__(self, other: Any) -> Any:
return super().__or__(other)
def __ior__(self, other: Any) -> Any:
super().__ior__(make_observable(dict(other), self.on_change))
self.on_change()
return self
class ObservableList(list):
def __init__(self, data: List, on_change: Callable) -> None:
super().__init__(data)
for i, item in enumerate(self):
super().__setitem__(i, make_observable(item, on_change))
self.on_change = lambda: events.handle_event(on_change, events.ObservableChangeEventArguments(sender=self))
def append(self, item: Any) -> None:
super().append(make_observable(item, self.on_change))
self.on_change()
def extend(self, iterable: Iterable) -> None:
super().extend(make_observable(list(iterable), self.on_change))
self.on_change()
def insert(self, index: SupportsIndex, obj: Any) -> None:
super().insert(index, make_observable(obj, self.on_change))
self.on_change()
def remove(self, value: Any) -> None:
super().remove(value)
self.on_change()
def pop(self, index: SupportsIndex = -1) -> Any:
item = super().pop(index)
self.on_change()
return item
def clear(self) -> None:
super().clear()
self.on_change()
def METHOD_NAME(self, **kwargs: Any) -> None:
super().METHOD_NAME(**kwargs)
self.on_change()
def reverse(self) -> None:
super().reverse()
self.on_change()
def __delitem__(self, key: Union[SupportsIndex, slice]) -> None:
super().__delitem__(key)
self.on_change()
def __setitem__(self, key: Union[SupportsIndex, slice], value: Any) -> None:
super().__setitem__(key, make_observable(value, self.on_change))
self.on_change()
def __add__(self, other: Any) -> Any:
return super().__add__(other)
def __iadd__(self, other: Any) -> Any:
super().__iadd__(make_observable(other, self.on_change))
self.on_change()
return self
class ObservableSet(set):
def __init__(self, data: set, on_change: Callable) -> None:
super().__init__(data)
for item in self:
super().add(make_observable(item, on_change))
self.on_change = lambda: events.handle_event(on_change, events.ObservableChangeEventArguments(sender=self))
def add(self, item: Any) -> None:
super().add(make_observable(item, self.on_change))
self.on_change()
def remove(self, item: Any) -> None:
super().remove(item)
self.on_change()
def discard(self, item: Any) -> None:
super().discard(item)
self.on_change()
def pop(self) -> Any:
item = super().pop()
self.on_change()
return item
def clear(self) -> None:
super().clear()
self.on_change()
def update(self, *s: Iterable[Any]) -> None:
super().update(make_observable(set(*s), self.on_change))
self.on_change()
def intersection_update(self, *s: Iterable[Any]) -> None:
super().intersection_update(*s)
self.on_change()
def difference_update(self, *s: Iterable[Any]) -> None:
super().difference_update(*s)
self.on_change()
def symmetric_difference_update(self, *s: Iterable[Any]) -> None:
super().symmetric_difference_update(*s)
self.on_change()
def __or__(self, other: Any) -> Any:
return super().__or__(other)
def __ior__(self, other: Any) -> Any:
super().__ior__(make_observable(other, self.on_change))
self.on_change()
return self
def __and__(self, other: Any) -> set:
return super().__and__(other)
def __iand__(self, other: Any) -> Any:
super().__iand__(make_observable(other, self.on_change))
self.on_change()
return self
def __sub__(self, other: Any) -> set:
return super().__sub__(other)
def __isub__(self, other: Any) -> Any:
super().__isub__(make_observable(other, self.on_change))
self.on_change()
return self
def __xor__(self, other: Any) -> set:
return super().__xor__(other)
def __ixor__(self, other: Any) -> Any:
super().__ixor__(make_observable(other, self.on_change))
self.on_change()
return self
@overload
def make_observable(data: Dict, on_change: Callable) -> ObservableDict:
...
@overload
def make_observable(data: List, on_change: Callable) -> ObservableList:
...
@overload
def make_observable(data: Set, on_change: Callable) -> ObservableSet:
...
def make_observable(data: Any, on_change: Callable) -> Any:
if isinstance(data, dict):
return ObservableDict(data, on_change)
if isinstance(data, list):
return ObservableList(data, on_change)
if isinstance(data, set):
return ObservableSet(data, on_change)
return data |
299,225 | daemon | #!/usr/bin/python
# -*- coding: utf-8; -*-
#
# (c) 2008-2010 Mandriva, http://www.mandriva.com/
#
# $Id$
#
# This file is part of Pulse 2, http://pulse2.mandriva.org
#
# Pulse 2 is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Pulse 2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Pulse 2. If not, see <http://www.gnu.org/licenses/>.
"""
Pulse 2 Package Server daemon.
"""
import os
import os.path
import sys
import getopt
import logging
import logging.config
import twisted
import twisted.copyright
from mmc.site import mmcconfdir
from pulse2.package_server import ThreadLauncher, init_logger_debug, getVersion, getRevision
from pulse2.package_server.config import P2PServerCP
def running(inifile, daemonize, noconsoledebug = True):
config = P2PServerCP()
config.pre_setup(inifile)
# Try to create the default log directory
if not os.path.exists(config.logdir):
try:
os.mkdir(config.logdir)
except:
# Maybe we are not root, so it may not be a problem if the mkdir
# failed
pass
logging.config.fileConfig(inifile)
logger = logging.getLogger()
logger.debug("Logger loaded")
logger.info("Pulse 2 Package Server %s starting..." % getVersion())
logger.info("Pulse 2 Package server build '%s'" % str(getRevision()))
logger.info("Using Python %s" % sys.version.split("\n")[0])
logger.info("Using Python Twisted %s" % twisted.copyright.version)
init_logger_debug()
config.setup(inifile)
# Set umask and effective UID and GID values
os.umask(config.umask)
os.setegid(config.daemon_group)
os.seteuid(config.daemon_user)
logger.debug("Running as euid = %d, egid = %d" % (os.geteuid(), os.getegid()))
if config.real_package_deletion:
logger.warning("Real package deletion is activated")
# When starting, we log to stderr too if we don't become a daemon
# As there is a lot of log possible during the init of pserver, we no
# longer log to stderr if we daemonize later
if not daemonize:
if noconsoledebug:
hdlr2 = logging.StreamHandler()
logger.addHandler(hdlr2)
logging.getLogger('imaging').addHandler(hdlr2)
try:
ThreadLauncher().initialize(config)
except Exception, e:
logger.exception(e)
raise
twisted.internet.reactor.addSystemEventTrigger('before', 'shutdown', cleanUp, config)
# Become a daemon
if daemonize:
METHOD_NAME(config)
# No more log to stderr
# logger.removeHandler(hdlr2)
ThreadLauncher().runThreads()
twisted.internet.reactor.run()
return 0
def cleanUp(config):
logger = logging.getLogger()
logger.info('Pulse 2 Package Server shutting down, cleaning up...')
# Unlink pidfile if it exists
if os.path.isfile(config.pidfile):
os.seteuid(0)
os.setegid(0)
os.unlink(config.pidfile)
def METHOD_NAME(config):
"""
daemonize pulse2-package-server
@param pidfile: path to pid file
@type pidfile: str
"""
pidfile = config.pidfile
# Test if mmcagent has been already launched in daemon mode
if os.path.isfile(pidfile):
print pidfile+" pid already exist. Maybe pulse2-package-server is already running\n"
print "use /etc/init.d script to stop and relaunch it"
sys.exit(0)
# do the UNIX double-fork magic, see Stevens' "Advanced
# Programming in the UNIX Environment" for details (ISBN 0201563177)
try:
pid = os.fork()
if pid > 0:
# exit first parent
sys.exit(0)
except OSError, e:
print >>sys.stderr, "fork #1 failed: %d (%s)" % (e.errno, e.strerror)
sys.exit(1)
# decouple from parent environment
os.close(sys.stdin.fileno())
os.close(sys.stdout.fileno())
os.close(sys.stderr.fileno())
os.chdir("/")
os.setsid()
# do second fork
try:
pid = os.fork()
if pid > 0:
# exit from second parent, print eventual PID before
print "Daemon PID %d" % pid
os.seteuid(0)
os.setegid(0)
os.system("echo " + str(pid) + " > " + pidfile)
sys.exit(0)
except OSError, e:
print >>sys.stderr, "fork #2 failed: %d (%s)" % (e.errno, e.strerror)
sys.exit(1)
if __name__ == "__main__":
inifile = mmcconfdir + "/pulse2/package-server/package-server.ini"
try:
opts, suivarg = getopt.getopt(sys.argv[1:], "f:ds")
except getopt.GetoptError:
sys.exit(2)
daemonize = True
debugtoconsole = True
for option, argument in opts:
if option == "-f":
inifile = argument
elif option == "-d":
daemonize = False
elif option == "-s":
daemonize = False
debugtoconsole = False
if not os.path.exists(inifile):
print "File '%s' does not exist." % inifile
sys.exit(3)
# Start the daemon main loop
sys.exit(running(inifile, daemonize, debugtoconsole)) |
299,226 | bad case issue2846 | # pylint: disable=unnecessary-comprehension,missing-docstring,too-few-public-methods,unnecessary-direct-lambda-call
"""Tests for loopvar-in-closure."""
from enum import Enum
def good_case():
"""No problems here."""
lst = []
for i in range(10):
lst.append(i)
def good_case2():
"""No problems here."""
return [i for i in range(10)]
def good_case3():
"""No problems here."""
lst = []
for i in range(10):
lst.append(lambda i=i: i)
def good_case4():
"""No problems here."""
lst = []
for i in range(10):
print(i)
lst.append(lambda i: i)
def good_case5():
"""No problems here."""
return (i for i in range(10))
def good_case6():
"""Accept use of the variable inside return."""
for i in range(10):
if i == 8:
return lambda: i
return lambda: -1
def good_case7():
"""Lambda defined and called in loop."""
for i in range(10):
print((lambda x: i + x)(1))
def good_case8():
"""Another eager binding of the cell variable."""
funs = []
for i in range(10):
def func(bound_i=i):
"""Ignore."""
return bound_i
funs.append(func)
return funs
def good_case9():
"""Ignore when the cell var is not defined in a loop"""
i = 10
lst = []
for _ in range(10):
lst.append(lambda: i)
return lst
def good_case10():
"""Ignore when a loop variable is shadowed by an inner function"""
lst = []
for i in range(10): # pylint: disable=unused-variable
def func():
i = 100
def func2(arg=i):
return arg
return func2
lst.append(func)
return lst
def good_case_issue3107():
"""Eager binding of cell variable when used in a non-trivial default argument expression.
"""
for i in [[2], [3]]:
next(filter(lambda j, ix=i[0]: j == ix, [1, 3]))
def good_case_issue_5012():
"""Eager binding of cell variable when used as the default value of a keyword-only argument.
https://github.com/pylint-dev/pylint/issues/5012
"""
funs = []
for i in range(5):
def func(*, _i=i):
print(_i)
funs.append(func)
def func2(_i=i):
print(_i)
funs.append(func2)
return funs
def bad_case():
"""Closing over a loop variable."""
lst = []
for i in range(10):
print(i)
lst.append(lambda: i) # [cell-var-from-loop]
def bad_case2():
"""Closing over a loop variable."""
return [lambda: i for i in range(10)] # [cell-var-from-loop]
def bad_case3():
"""Closing over variable defined in loop."""
lst = []
for i in range(10):
j = i * i
lst.append(lambda: j) # [cell-var-from-loop]
return lst
def bad_case4():
"""Closing over variable defined in loop."""
lst = []
for i in range(10):
def nested():
"""Nested function."""
return i**2 # [cell-var-from-loop]
lst.append(nested)
return lst
def bad_case5():
"""Problematic case.
If this function is used as
>>> [x() for x in bad_case5()]
it behaves 'as expected', i.e. the result is range(10).
If it's used with
>>> lst = list(bad_case5())
>>> [x() for x in lst]
the result is [9] * 10 again.
"""
return (lambda: i for i in range(10)) # [cell-var-from-loop]
def bad_case6():
"""Closing over variable defined in loop."""
lst = []
for i, j in zip(range(10), range(10, 20)):
print(j)
lst.append(lambda: i) # [cell-var-from-loop]
return lst
def bad_case7():
"""Multiple variables unpacked in comprehension."""
return [
lambda: (
x # [cell-var-from-loop]
+ y) # [cell-var-from-loop]
for x, y in ((1, 2), (3, 4), (5, 6))
]
def bad_case8():
"""Closing over variable defined in loop below the function."""
lst = []
for i in range(10):
lst.append(lambda: j) # [cell-var-from-loop]
j = i * i
return lst
def bad_case9():
"""Detect when loop variable shadows an outer assignment."""
lst = []
i = 100
for i in range(10):
lst.append(lambda: i) # [cell-var-from-loop]
return lst
def bad_case10():
"""Detect when a loop variable is the default argument for a nested function"""
lst = []
for i in range(10):
def func():
def func2(arg=i): # [cell-var-from-loop]
return arg
return func2
lst.append(func)
return lst
def METHOD_NAME():
"""Closing over variable that is used within a comprehension in the function body."""
lst_a = [
(lambda: n) # [cell-var-from-loop]
for n in range(3)
]
lst_b = [
(lambda: [n for _ in range(3)]) # [cell-var-from-loop]
for n in range(3)
]
return lst_a, lst_b
class Test(Enum):
TEST = (40, 160)
@staticmethod
def new_test(minimum=TEST[0], maximum=TEST[1]):
return minimum, maximum |
299,227 | plugin list | # --- BEGIN COPYRIGHT BLOCK ---
# Copyright (C) 2018 Red Hat, Inc.
# All rights reserved.
#
# License: GPL (version 3 or any later version).
# See LICENSE for details.
# --- END COPYRIGHT BLOCK ---
import json
from lib389.plugins import Plugin, Plugins
from lib389.utils import ensure_dict_str
from lib389.cli_base import (
_generic_get,
_get_arg,
)
from lib389.cli_conf import generic_object_edit
from lib389.cli_conf.plugins import memberof as cli_memberof
from lib389.cli_conf.plugins import usn as cli_usn
from lib389.cli_conf.plugins import rootdn_ac as cli_rootdn_ac
from lib389.cli_conf.plugins import referint as cli_referint
from lib389.cli_conf.plugins import accountpolicy as cli_accountpolicy
from lib389.cli_conf.plugins import attruniq as cli_attruniq
from lib389.cli_conf.plugins import dna as cli_dna
from lib389.cli_conf.plugins import linkedattr as cli_linkedattr
from lib389.cli_conf.plugins import managedentries as cli_managedentries
from lib389.cli_conf.plugins import pampassthrough as cli_pampassthrough
from lib389.cli_conf.plugins import ldappassthrough as cli_ldappassthrough
from lib389.cli_conf.plugins import retrochangelog as cli_retrochangelog
from lib389.cli_conf.plugins import automember as cli_automember
from lib389.cli_conf.plugins import posix_winsync as cli_posix_winsync
from lib389.cli_conf.plugins import contentsync as cli_contentsync
from lib389.cli_conf.plugins import entryuuid as cli_entryuuid
SINGULAR = Plugin
MANY = Plugins
RDN = 'cn'
arg_to_attr = {
'initfunc': 'nsslapd-pluginInitfunc',
'enabled': 'nsslapd-pluginEnabled',
'path': 'nsslapd-pluginPath',
'type': 'nsslapd-pluginType',
'id': 'nsslapd-pluginId',
'version': 'nsslapd-pluginVersion',
'vendor': 'nsslapd-pluginVendor',
'description': 'nsslapd-pluginDescription',
'depends_on_type': 'nsslapd-plugin-depends-on-type',
'depends_on_named': 'nsslapd-plugin-depends-on-named',
'precedence': 'nsslapd-pluginPrecedence'
}
def METHOD_NAME(inst, basedn, log, args):
plugin_log = log.getChild('plugin_list')
mc = MANY(inst, basedn)
plugins = mc.list()
if len(plugins) == 0:
if args and args.json:
print(json.dumps({"type": "list", "items": []}, indent=4))
else:
plugin_log.info("No objects to display")
elif len(plugins) > 0:
# We might sort this in the future
if args and args.json:
json_result = {"type": "list", "items": []}
for plugin in plugins:
plugin_data = ensure_dict_str(dict(plugin.get_all_attrs()))
if args and args.json:
json_result['items'].append(plugin_data)
else:
plugin_log.info(plugin_data["cn"][0])
if args and args.json:
print(json.dumps(json_result, indent=4))
def plugin_get(inst, basedn, log, args):
rdn = _get_arg(args.selector, msg="Enter %s to retrieve" % RDN)
_generic_get(inst, basedn, log.getChild('plugin_get'), MANY, rdn, args)
def vaidate_args(plugin, attr_arg_list):
"""Check if the attribute needs to be changed
Return mods for the replace_many() method
"""
mods = []
for attr_name, arg in attr_arg_list.items():
if arg is not None and plugin.get_attr_val_utf8_l(attr_name) != arg.lower():
mods.append((attr_name, arg))
return mods
def plugin_edit(inst, basedn, log, args):
log = log.getChild('plugin_edit')
rdn = _get_arg(args.selector, msg="Enter %s to retrieve" % RDN)
plugins = Plugins(inst)
plugin = plugins.get(rdn)
generic_object_edit(plugin, log, args, arg_to_attr)
def create_parser(subparsers):
plugin_parser = subparsers.add_parser('plugin', help="Manage plug-ins available on the server")
subcommands = plugin_parser.add_subparsers(help="Plugins")
cli_memberof.create_parser(subcommands)
cli_automember.create_parser(subcommands)
cli_referint.create_parser(subcommands)
cli_rootdn_ac.create_parser(subcommands)
cli_usn.create_parser(subcommands)
cli_accountpolicy.create_parser(subcommands)
cli_attruniq.create_parser(subcommands)
cli_dna.create_parser(subcommands)
cli_ldappassthrough.create_parser(subcommands)
cli_linkedattr.create_parser(subcommands)
cli_managedentries.create_parser(subcommands)
cli_pampassthrough.create_parser(subcommands)
cli_retrochangelog.create_parser(subcommands)
cli_posix_winsync.create_parser(subcommands)
cli_contentsync.create_parser(subcommands)
cli_entryuuid.create_parser(subcommands)
list_parser = subcommands.add_parser('list', help="List current configured (enabled and disabled) plugins")
list_parser.set_defaults(func=METHOD_NAME)
get_parser = subcommands.add_parser('show', help='Show the plugin data')
get_parser.set_defaults(func=plugin_get)
get_parser.add_argument('selector', nargs='?', help='The plugin to search for')
edit_parser = subcommands.add_parser('set', help='Edit the plugin settings')
edit_parser.set_defaults(func=plugin_edit)
edit_parser.add_argument('selector', nargs='?', help='The plugin to edit')
edit_parser.add_argument('--type', help='The type of plugin.')
edit_parser.add_argument('--enabled', choices=['on', 'off'],
help='Identifies whether or not the plugin is enabled.')
edit_parser.add_argument('--path', help='The plugin library name (without the library suffix).')
edit_parser.add_argument('--initfunc', help='An initialization function of the plugin.')
edit_parser.add_argument('--id', help='The plugin ID.')
edit_parser.add_argument('--vendor', help='The vendor of plugin.')
edit_parser.add_argument('--version', help='The version of plugin.')
edit_parser.add_argument('--description', help='The description of the plugin.')
edit_parser.add_argument('--depends-on-type',
help='All plug-ins with a type value which matches one of the values '
'in the following valid range will be started by the server prior to this plug-in.')
edit_parser.add_argument('--depends-on-named',
help='The plug-in name matching one of the following values will be '
'started by the server prior to this plug-in')
edit_parser.add_argument('--precedence', help='The priority it has in the execution order of plug-ins') |
299,228 | realm activity link | import re
import sys
from datetime import datetime
from typing import Any, Collection, Dict, List, Optional, Sequence
from urllib.parse import urlencode
from django.conf import settings
from django.db.backends.utils import CursorWrapper
from django.template import loader
from django.urls import reverse
from markupsafe import Markup
from zerver.lib.url_encoding import append_url_query_string
from zerver.models import UserActivity, get_realm
if sys.version_info < (3, 9): # nocoverage
from backports import zoneinfo
else: # nocoverage
import zoneinfo
eastern_tz = zoneinfo.ZoneInfo("America/New_York")
if settings.BILLING_ENABLED:
pass
def make_table(
title: str, cols: Sequence[str], rows: Sequence[Any], has_row_class: bool = False
) -> str:
if not has_row_class:
def fix_row(row: Any) -> Dict[str, Any]:
return dict(cells=row, row_class=None)
rows = list(map(fix_row, rows))
data = dict(title=title, cols=cols, rows=rows)
content = loader.render_to_string(
"analytics/ad_hoc_query.html",
dict(data=data),
)
return content
def dictfetchall(cursor: CursorWrapper) -> List[Dict[str, Any]]:
"""Returns all rows from a cursor as a dict"""
desc = cursor.description
return [dict(zip((col[0] for col in desc), row)) for row in cursor.fetchall()]
def format_date_for_activity_reports(date: Optional[datetime]) -> str:
if date:
return date.astimezone(eastern_tz).strftime("%Y-%m-%d %H:%M")
else:
return ""
def user_activity_link(email: str, user_profile_id: int) -> Markup:
from analytics.views.user_activity import get_user_activity
url = reverse(get_user_activity, kwargs=dict(user_profile_id=user_profile_id))
return Markup('<a href="{url}">{email}</a>').format(url=url, email=email)
def METHOD_NAME(realm_str: str) -> Markup:
from analytics.views.realm_activity import get_realm_activity
url = reverse(get_realm_activity, kwargs=dict(realm_str=realm_str))
return Markup('<a href="{url}">{realm_str}</a>').format(url=url, realm_str=realm_str)
def realm_stats_link(realm_str: str) -> Markup:
from analytics.views.stats import stats_for_realm
url = reverse(stats_for_realm, kwargs=dict(realm_str=realm_str))
return Markup('<a href="{url}"><i class="fa fa-pie-chart"></i></a>').format(url=url)
def realm_support_link(realm_str: str) -> Markup:
support_url = reverse("support")
query = urlencode({"q": realm_str})
url = append_url_query_string(support_url, query)
return Markup('<a href="{url}">{realm_str}</a>').format(url=url, realm_str=realm_str)
def realm_url_link(realm_str: str) -> Markup:
url = get_realm(realm_str).uri
return Markup('<a href="{url}"><i class="fa fa-home"></i></a>').format(url=url)
def remote_installation_stats_link(server_id: int, hostname: str) -> Markup:
from analytics.views.stats import stats_for_remote_installation
url = reverse(stats_for_remote_installation, kwargs=dict(remote_server_id=server_id))
return Markup('<a href="{url}"><i class="fa fa-pie-chart"></i>{hostname}</a>').format(
url=url, hostname=hostname
)
def get_user_activity_summary(records: Collection[UserActivity]) -> Dict[str, Any]:
#: The type annotation used above is clearly overly permissive.
#: We should perhaps use TypedDict to clearly lay out the schema
#: for the user activity summary.
summary: Dict[str, Any] = {}
def update(action: str, record: UserActivity) -> None:
if action not in summary:
summary[action] = dict(
count=record.count,
last_visit=record.last_visit,
)
else:
summary[action]["count"] += record.count
summary[action]["last_visit"] = max(
summary[action]["last_visit"],
record.last_visit,
)
if records:
first_record = next(iter(records))
summary["name"] = first_record.user_profile.full_name
summary["user_profile_id"] = first_record.user_profile.id
for record in records:
client = record.client.name
query = str(record.query)
update("use", record)
if client == "API":
m = re.match("/api/.*/external/(.*)", query)
if m:
client = m.group(1)
update(client, record)
if client.startswith("desktop"):
update("desktop", record)
if client == "website":
update("website", record)
if ("send_message" in query) or re.search("/api/.*/external/.*", query):
update("send", record)
if query in [
"/json/update_pointer",
"/json/users/me/pointer",
"/api/v1/update_pointer",
"update_pointer_backend",
]:
update("pointer", record)
update(client, record)
return summary |
299,229 | on 200 | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"storage-mover job-definition stop-job",
)
class StopJob(AAZCommand):
"""Requests the Agent of any active instance of this Job Definition to stop.
:example: job-definition stop-job
az storage-mover job-definition stop-job -g {rg} --job-definition-name {job_definition} --project-name {project_name} --storage-mover-name {mover_name}
"""
_aaz_info = {
"version": "2023-07-01-preview",
"resources": [
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.storagemover/storagemovers/{}/projects/{}/jobdefinitions/{}/stopjob", "2023-07-01-preview"],
]
}
def _handler(self, command_args):
super()._handler(command_args)
self._execute_operations()
return self._output()
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.job_definition_name = AAZStrArg(
options=["--job-definition-name"],
help="The name of the Job Definition resource.",
required=True,
id_part="child_name_2",
)
_args_schema.project_name = AAZStrArg(
options=["--project-name"],
help="The name of the Project resource.",
required=True,
id_part="child_name_1",
)
_args_schema.resource_group = AAZResourceGroupNameArg(
required=True,
)
_args_schema.storage_mover_name = AAZStrArg(
options=["--storage-mover-name"],
help="The name of the Storage Mover resource.",
required=True,
id_part="name",
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
self.JobDefinitionsStopJob(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
def _output(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True)
return result
class JobDefinitionsStopJob(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.METHOD_NAME(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.StorageMover/storageMovers/{storageMoverName}/projects/{projectName}/jobDefinitions/{jobDefinitionName}/stopJob",
**self.url_parameters
)
@property
def method(self):
return "POST"
@property
def error_format(self):
return "MgmtErrorFormat"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"jobDefinitionName", self.ctx.args.job_definition_name,
required=True,
),
**self.serialize_url_param(
"projectName", self.ctx.args.project_name,
required=True,
),
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"storageMoverName", self.ctx.args.storage_mover_name,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2023-07-01-preview",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
def METHOD_NAME(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200
)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.job_run_resource_id = AAZStrType(
serialized_name="jobRunResourceId",
flags={"read_only": True},
)
return cls._schema_on_200
class _StopJobHelper:
"""Helper class for StopJob"""
__all__ = ["StopJob"] |
299,230 | url | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"automanage configuration-profile-assignment arc show",
)
class Show(AAZCommand):
"""Get information about an association between an ARC machine and Automanage configuration profile
:example: show configuration-profile-assignment for arc
az automanage configuration-profile-assignment arc show -n default -g {rg} --machine-name {arc_name}
"""
_aaz_info = {
"version": "2022-05-04",
"resources": [
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.hybridcompute/machines/{}/providers/microsoft.automanage/configurationprofileassignments/{}", "2022-05-04"],
]
}
def _handler(self, command_args):
super()._handler(command_args)
self._execute_operations()
return self._output()
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.configuration_profile_assignment_name = AAZStrArg(
options=["-n", "--name", "--configuration-profile-assignment-name"],
help="The configuration profile assignment name.",
required=True,
id_part="child_name_1",
)
_args_schema.machine_name = AAZStrArg(
options=["--machine-name"],
help="The name of the Arc machine.",
required=True,
id_part="name",
)
_args_schema.resource_group = AAZResourceGroupNameArg(
required=True,
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
self.ConfigurationProfileHCRPAssignmentsGet(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
def _output(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.vars.instance, client_flatten=True)
return result
class ConfigurationProfileHCRPAssignmentsGet(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def METHOD_NAME(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.HybridCompute/machines/{machineName}/providers/Microsoft.Automanage/configurationProfileAssignments/{configurationProfileAssignmentName}",
**self.url_parameters
)
@property
def method(self):
return "GET"
@property
def error_format(self):
return "MgmtErrorFormat"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"configurationProfileAssignmentName", self.ctx.args.configuration_profile_assignment_name,
required=True,
),
**self.serialize_url_param(
"machineName", self.ctx.args.machine_name,
required=True,
),
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2022-05-04",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200
)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.id = AAZStrType(
flags={"read_only": True},
)
_schema_on_200.managed_by = AAZStrType(
serialized_name="managedBy",
)
_schema_on_200.name = AAZStrType(
flags={"read_only": True},
)
_schema_on_200.properties = AAZObjectType()
_schema_on_200.system_data = AAZObjectType(
serialized_name="systemData",
flags={"read_only": True},
)
_schema_on_200.type = AAZStrType(
flags={"read_only": True},
)
properties = cls._schema_on_200.properties
properties.configuration_profile = AAZStrType(
serialized_name="configurationProfile",
)
properties.status = AAZStrType(
flags={"read_only": True},
)
properties.target_id = AAZStrType(
serialized_name="targetId",
flags={"read_only": True},
)
system_data = cls._schema_on_200.system_data
system_data.created_at = AAZStrType(
serialized_name="createdAt",
)
system_data.created_by = AAZStrType(
serialized_name="createdBy",
)
system_data.created_by_type = AAZStrType(
serialized_name="createdByType",
)
system_data.last_modified_at = AAZStrType(
serialized_name="lastModifiedAt",
)
system_data.last_modified_by = AAZStrType(
serialized_name="lastModifiedBy",
)
system_data.last_modified_by_type = AAZStrType(
serialized_name="lastModifiedByType",
)
return cls._schema_on_200
class _ShowHelper:
"""Helper class for Show"""
__all__ = ["Show"] |
299,231 | get format field at range | # A part of NonVisual Desktop Access (NVDA)
# This file is covered by the GNU General Public License.
# See the file COPYING for more details.
# Copyright (C) 2010-2022 NV Access Limited, Soronel Haetir, Babbage B.V., Francisco Del Roio,
# Leonard de Ruijter
import objbase
import comtypes
from locationHelper import RectLTWH
from logHandler import log
import textInfos.offsets
from NVDAObjects.behaviors import EditableText, EditableTextWithoutAutoSelectDetection
from NVDAObjects.window import Window
from comtypes.automation import IDispatch
from NVDAObjects.window import DisplayModelEditableText
from NVDAObjects.IAccessible import IAccessible
from NVDAObjects.UIA import UIA, WpfTextView, UIATextInfo
from enum import IntEnum
import appModuleHandler
import controlTypes
import threading
import UIAHandler
# A few helpful constants
# vsWindowType Enum
class VsWindowType(IntEnum):
ToolWindow = 15
Document = 16
Output = 17
# Scroll bar selector
SB_HORZ = 0
SB_VERT = 1
class AppModule(appModuleHandler.AppModule):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._DTECache = {}
vsMajor, vsMinor, rest = self.productVersion.split(".", 2)
self.vsMajor, self.vsMinor = int(vsMajor), int(vsMinor)
def chooseNVDAObjectOverlayClasses(self, obj, clsList):
if WpfTextView in clsList:
clsList.remove(WpfTextView)
clsList.insert(0, VsWpfTextView)
# Only use this overlay class if the top level automation object for the IDE can be retrieved,
# as it will not work otherwise.
elif obj.windowClassName == "VsTextEditPane" and self.DTE:
try:
clsList.remove(DisplayModelEditableText)
except ValueError:
pass
clsList[0:0] = [VsTextEditPane, EditableTextWithoutAutoSelectDetection]
elif (
(self.vsMajor == 15 and self.vsMinor >= 3)
or self.vsMajor >= 16
):
if obj.role == controlTypes.Role.TREEVIEWITEM and obj.windowClassName == "LiteTreeView32":
clsList.insert(0, ObjectsTreeItem)
def _getDTE(self):
# Retrieve the top level automation object for the IDE
bctx = objbase.CreateBindCtx()
ROT = objbase.GetRunningObjectTable()
for mon in ROT:
displayName = mon.GetDisplayName(bctx, None)
if displayName == f"!VisualStudio.DTE.{self.vsMajor}.0:{self.processID}":
return comtypes.client.dynamic.Dispatch(ROT.GetObject(mon).QueryInterface(IDispatch))
else:
# None found.
log.debugWarning("No top level automation object found", exc_info=True)
return None
def _get_DTE(self):
thread = threading.get_ident()
# Return the already fetched instance if there is one.
DTE = self._DTECache.get(thread)
if DTE:
return DTE
DTE = self._DTECache[thread] = self._getDTE()
return DTE
class VsWpfTextViewTextInfo(UIATextInfo):
def _getLineNumberString(self, textRange):
# Visual Studio exposes line numbers as part of the actual text.
# We want to store the line number in a format field instead.
lineNumberRange = textRange.Clone()
lineNumberRange.MoveEndpointByRange(
UIAHandler.TextPatternRangeEndpoint_End,
lineNumberRange,
UIAHandler.TextPatternRangeEndpoint_Start
)
return lineNumberRange.GetText(-1)
def METHOD_NAME(self, textRange, formatConfig, ignoreMixedValues=False):
formatField = super().METHOD_NAME(textRange, formatConfig, ignoreMixedValues=ignoreMixedValues)
if not formatField or not formatConfig['reportLineNumber']:
return formatField
lineNumberStr = self._getLineNumberString(textRange)
if lineNumberStr:
try:
formatField.field['line-number'] = int(lineNumberStr)
except ValueError:
log.debugWarning(
f"Couldn't parse {lineNumberStr} as integer to report a line number",
exc_info=True
)
return formatField
def _getTextFromUIARange(self, textRange):
text = super()._getTextFromUIARange(textRange)
lineNumberStr = self._getLineNumberString(textRange)
return text[(0 if not lineNumberStr else len(lineNumberStr)):]
class VsWpfTextView(WpfTextView):
TextInfo = VsWpfTextViewTextInfo
class VsTextEditPaneTextInfo(textInfos.offsets.OffsetsTextInfo):
def _get__selectionObject(self):
window = self.obj._window
if window.Type == VsWindowType.Document:
selection = window.Selection
elif window.Type == VsWindowType.Output:
selection = window.Object.ActivePane.TextDocument.Selection
elif window.Type == VsWindowType.ToolWindow:
selection = window.Object.TextDocument.Selection
else:
raise RuntimeError(f"Unknown window type: {window.Kind}")
self._selectionObject = selection
return selection
def _createEditPoint(self):
return self._selectionObject.ActivePoint.CreateEditPoint()
def _getCaretOffset(self):
return self._createEditPoint().AbsoluteCharOffset - 1
def _setCaretOffset(self, offset):
self._selectionObject.MoveToAbsoluteOffset(offset + 1)
def _setSelectionOffsets(self, start, end):
self._selectionObject.MoveToAbsoluteOffset(start + 1)
self._selectionObject.MoveToAbsoluteOffset(end + 1, True)
def _getSelectionOffsets(self):
caretPos = self._getCaretOffset()
anchorPos = self._selectionObject.AnchorPoint.CreateEditPoint().AbsoluteCharOffset - 1
return min(caretPos, anchorPos), max(caretPos, anchorPos)
def _getTextRange(self, start, end):
editPointStart = self._createEditPoint()
editPointStart.MoveToAbsoluteOffset(start + 1)
return editPointStart.GetText(end - start)
def _getWordOffsets(self, offset):
editPointEnd = self._createEditPoint()
editPointEnd.MoveToAbsoluteOffset(offset + 1)
editPointEnd.WordRight()
editPointStart = editPointEnd.CreateEditPoint()
editPointStart.WordLeft()
return editPointStart.AbsoluteCharOffset - 1, editPointEnd.AbsoluteCharOffset - 1
def _getLineOffsets(self, offset):
editPointStart = self._createEditPoint()
editPointStart.MoveToAbsoluteOffset(offset + 1)
editPointStart.StartOfLine()
editPointEnd = editPointStart.CreateEditPoint()
editPointEnd.EndOfLine()
# Offsets are one based and exclusive
return editPointStart.AbsoluteCharOffset - 1, editPointEnd.AbsoluteCharOffset
def _getLineNumFromOffset(self, offset):
editPoint = self._createEditPoint()
editPoint.MoveToAbsoluteOffset(offset + 1)
return editPoint.Line
def _getStoryLength(self):
editPoint = self._createEditPoint()
editPoint.EndOfDocument()
return editPoint.AbsoluteCharOffset - 1
class VsTextEditPane(EditableText, Window):
def _get_TextInfo(self):
try:
if self._window.Type in iter(VsWindowType):
return VsTextEditPaneTextInfo
else:
log.debugWarning(
f"Retrieved Visual Studio window object, but unknown type: {self._window.Type}"
)
except Exception:
log.debugWarning("Couldn't retrieve Visual Studio window object", exc_info=True)
return super().TextInfo
def initOverlayClass(self):
self._window = self.appModule.DTE.ActiveWindow
def _get_location(self):
if not isinstance(self, UIA):
return RectLTWH(
self._window.Left,
self._window.Top,
self._window.Width,
self._window.Height
)
return super().location
def event_valueChange(self):
pass
class ObjectsTreeItem(IAccessible):
def _get_focusRedirect(self):
"""
Returns the correct focused item in the object explorer trees
"""
if controlTypes.State.FOCUSED not in self.states:
# Object explorer tree views have a bad IAccessible implementation.
# When expanding a primary node and going to secondary node, the
# focus is placed to the next root node, so we need to redirect
# it to the real focused widget. Fortunately, the states are
# still correct and we can detect if this is really focused or not.
return self.objectWithFocus()
def _get_positionInfo(self):
return {
"level": int(self.IAccessibleObject.accValue(self.IAccessibleChildID))
} |
299,232 | is printable | # Copyright (C) 2010-2015 Cuckoo Foundation, KillerInstinct
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
import binascii
import contextlib
import string
from base64 import b64decode
from itertools import chain, repeat
from typing import Callable, List, Tuple
from lib.cuckoo.common.utils import convert_to_printable
try:
import re2 as re
except ImportError:
import re
# Regexs from Decalages olevba.py + a commonly observed path regex.
PATTERNS = (
(
"URL",
re.compile(
r"(http|https|ftp)\://[a-zA-Z0-9\-\.]+(:[a-zA-Z0-9]*)?/?([a-zA-Z0-9\-\._\?\,/\\\+&%\$#\=~])*[^\.\,\)\(\'\s]"
),
),
(
"IPv4 address",
re.compile(r"\b(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\b"),
),
("E-mail address", re.compile(r"(?i)\b[A-Z0-9._%+-]+@(?:[A-Z0-9-]+\.)+(?:[A-Z]{2,12}|XN--[A-Z0-9]{4,18})\b")),
(
"Executable file name",
re.compile(
r"(?i)\b\w+\.(EXE|PIF|GADGET|MSI|MSP|MSC|VB|VBS|JS|VBE|JSE|WS|WSF|WSC|WSH|BAT|CMD|DLL|SCR|HTA|CPL|CLASS|JAR|PS1|PS1XML|PS2|PS2XML|PSC1|PSC2|SCF|LNK|INF|REG)\b"
),
),
("User Directory", re.compile(r"['\"][Cc]:\\.*Users+[^;].*['\"]")),
)
DECRYPTORS = {}
def METHOD_NAME(s: str) -> bool:
return all(c in string.printable for c in s)
def filter_printable(s: str) -> str:
return "".join(c for c in s if c in string.printable)
def repeating_xor(s: str, key: str) -> str:
repeating_key = chain.from_iterable(repeat(key))
return "".join(chr(ord(c) ^ ord(k)) for c, k in zip(s, repeating_key))
def quote(f: Callable[[re.Match], str]):
return lambda *args: f'"""{f(*args)}"""'
def decrypt(enc_type: str) -> Callable[[Callable[[re.Match], str]], Callable[[re.Match], str]]:
def wrapper(f: Callable[[re.Match], str]) -> Callable[[re.Match], str]:
DECRYPTORS[enc_type] = f
return f
return wrapper
def normalize_code(macro: str) -> str:
return re.sub(r"_\s*\n", " ", macro) # Remove underscore line continuation.
@quote
def decode_chr(m: re.Match) -> str:
ascii_chars = re.findall(r"Chr[A-Z$]?\((\d+)\)", m.group(1))
return "".join(chr(int(n)) for n in ascii_chars)
@quote
def decode_base64(m: re.Match) -> str:
s = m.group(1)
if (len(s) % 4 != 0 and not s.endswith("=")) or ("=" in s.rstrip("=")):
return s
try:
decoded = b64decode(s).decode()
except (binascii.Error, UnicodeDecodeError):
return s
if not METHOD_NAME(decoded):
return s
return decoded
@quote
def decode_hex(m: re.Match) -> str:
s = m.group(1)
if len(s) % 2 != 0:
return s
try:
result = "".join(binascii.unhexlify(s))
except Exception:
return ""
return result
@quote
def decode_reverse(m: re.Match) -> str:
return m.group(1)[::-1]
@quote
def concatenate(m: re.Match) -> str:
return "".join(re.findall(r'"""(.*?)"""', m.group(0)))
@decrypt("xor")
@quote
def decrypt_xor(m: re.Match) -> str:
return repeating_xor(m.group(1), m.group(2))
@decrypt("sub")
@quote
def decrypt_sub(m: re.Match):
with contextlib.suppress(Exception):
# TODO: Needs a relook, will likely error
first = int([c for c in m.group(1) if c.isdigit()])
second = int([c for c in m.group(2) if c.isdigit()])
if first and second:
return chr(first - second)
return m.group()
def find_enc_function(macro) -> Tuple[str, str]:
match, type = re.search(r"(?ims)Public Function (\w+).+? Xor .+?End Function", macro), "xor"
if not match:
match, type = re.search(r"(?ims)Public Function (\w+).+?\d+\s*-\s*\d+.+?End Function", macro), "sub"
return (match.group(1), type) if match else (None, None)
def handle_techniques(line: str, **opts) -> str:
enc_func_name = opts["enc_func_name"]
decrypt_func = opts["decrypt_func"]
line = line.replace('"', '"""')
line = re.sub(r'"""([A-F0-9]{2,})"""', decode_hex, line)
line = re.sub(r'"""([\w_+=/]{2,})"""', decode_base64, line)
line = re.sub(r"(?i)Chr[A-Z$]\(Asc[A-Z$](.+?)\)\)", r"\1", line)
line = re.sub(r'(?i)Asc[A-Z$]\("""(\w)\w*"""\)', lambda m: ord(m.group(1)), line)
line = re.sub(r"(?i)((?:Chr[A-Z$]?\(\d+\)\s*&?\s*)+)", decode_chr, line)
line = re.sub(rf'(?i)\b{enc_func_name}\s*\(\w+\("""(.+?)"""\),\s*\w+\("""(.+?)"""', decrypt_func, line)
line = re.sub(rf'(?i)\b{enc_func_name}\((?:""")?(.+?)(?:""")?,\s*(?:""")?(.+?)(?:""")?\)', decrypt_func, line)
line = re.sub(r'(?i)StrReverse\(.+?"""(.+?)"""\)', decode_reverse, line)
line = re.sub(r'""".+?"""\s+&+\s+""".+?""".+', concatenate, line)
while "Chr(Asc(" in line:
lastline = line
line = re.sub(r"(?i)Chr\(Asc\((.+?)\)\)", r"\1", line)
if line == lastline:
break
# Remove quotes before regexing against them.
line = line.replace('""" + """', "")
line = line.replace('"""', "")
# Remove a few concat patterns. Theres a bug with some obfuscation
# techniques.
line = line.replace(" + ", "")
line = line.replace(" & ", "")
return line
def extract_iocs(s: str) -> Tuple[str, str]:
for desc, pattern in PATTERNS:
m = pattern.findall(s)
if m:
# Hacked-up buxfix for multilayer Chr(Asc(Chr(Asc( which can
# sometimes mess up our quoted string extraction / parsing.
while "Chr(Asc(" in s:
lastline = s
s = re.sub(r"(?i)Chr\(Asc\((.+?)\)\)", r"\1", s)
if s == lastline:
break
# Return the line matched and not m because I prefer to have
# context and not simply the IOC. This helps with the executable
# file IOC, sometimes it's a save location!
return desc, convert_to_printable(s)
return None
def parse_macro(macro: str) -> List[Tuple[str, str]]:
opts = {}
vb_vars = {}
result = {}
iocs = set()
macro = normalize_code(macro)
enc_func_name, enc_type = find_enc_function(macro)
if not enc_func_name:
enc_func_name, enc_type = r"xor\w+", "xor"
decrypt_func = DECRYPTORS.get(enc_type)
opts = {"enc_func_name": enc_func_name, "decrypt_func": decrypt_func, "vb_vars": vb_vars}
for line in macro.splitlines():
line = line.strip()
if line.startswith("'"):
continue
substituted = handle_techniques(line, **opts)
# Look for variable assignments
split = [part for part in re.split(r"^(\w+)\s*=\s*", line, maxsplit=1)[1:] if part]
# Basic variable data find/replace.
if len(split) == 2:
name, val = split
vb_vars[name] = substituted
# Walk the deobfuscated macro and check for any IOCs
for line in substituted.splitlines():
ioc = extract_iocs(line)
if ioc:
iocs.add(ioc)
# Dedup IOCs
result = sorted(iocs, key=lambda p: p[0])
return result |
299,233 | schain container | import json
import os
import shutil
from pathlib import Path
import mock
import pytest
from dataclasses import dataclass
from skale.skale_manager import spawn_skale_manager_lib
from core.schains.cleaner import (
delete_bls_keys,
monitor,
get_schains_on_node,
remove_config_dir,
remove_schain_volume, remove_schain_container,
remove_ima_container
)
from core.schains.config import init_schain_config_dir
from core.schains.runner import get_container_name
from tools.configs.containers import SCHAIN_CONTAINER, IMA_CONTAINER
from tools.configs.schains import SCHAINS_DIR_PATH
from web.models.schain import (
SChainRecord, mark_schain_deleted, upsert_schain_record)
from tests.utils import (get_schain_contracts_data,
run_simple_schain_container,
run_simple_ima_container)
SCHAIN_CONTAINER_NAME_TEMPLATE = 'skale_schain_{}'
IMA_CONTAINER_NAME_TEMPLATE = 'skale_ima_{}'
TEST_SCHAIN_NAME_1 = 'schain_cleaner_test1'
TEST_SCHAIN_NAME_2 = 'schain_cleaner_test2'
PHANTOM_SCHAIN_NAME = 'phantom_schain'
@dataclass
class ImaEnv:
schain_dir: str
def to_dict(self):
return {
'SCHAIN_DIR': self.schain_dir,
}
def is_container_running(dutils, container_name):
return dutils.is_container_running(container_name)
@pytest.fixture
def schain_dirs_for_monitor():
schain_dir_path2 = os.path.join(SCHAINS_DIR_PATH, TEST_SCHAIN_NAME_1)
schain_dir_path1 = os.path.join(SCHAINS_DIR_PATH, TEST_SCHAIN_NAME_2)
Path(schain_dir_path1).mkdir(parents=True, exist_ok=True)
Path(schain_dir_path2).mkdir(parents=True, exist_ok=True)
try:
yield
finally:
shutil.rmtree(schain_dir_path1, ignore_errors=True)
shutil.rmtree(schain_dir_path2, ignore_errors=True)
@pytest.fixture
def upsert_db(db):
for name in [TEST_SCHAIN_NAME_1, TEST_SCHAIN_NAME_2, PHANTOM_SCHAIN_NAME]:
upsert_schain_record(name)
def test_monitor(db, schain_dirs_for_monitor, skale, node_config, dutils):
ensure_schain_removed_mock = mock.Mock()
ensure_schain_removed_mock = mock.Mock(side_effect=ValueError)
with mock.patch('core.schains.cleaner.ensure_schain_removed',
ensure_schain_removed_mock):
monitor(skale, node_config, dutils=dutils)
ensure_schain_removed_mock.assert_any_call(
skale,
TEST_SCHAIN_NAME_1,
node_config.id,
dutils=dutils
)
ensure_schain_removed_mock.assert_any_call(
skale,
TEST_SCHAIN_NAME_2,
node_config.id,
dutils=dutils
)
monitor(skale, node_config, dutils=dutils)
assert [
c.name
for c in dutils.client.containers.list(
filters={'name': 'skale_schains'}
)
] == []
def test_remove_config_dir():
schain_name = 'temp'
init_schain_config_dir(schain_name)
config_dir = os.path.join(SCHAINS_DIR_PATH, schain_name)
assert os.path.isdir(config_dir)
remove_config_dir(schain_name)
assert not os.path.isdir(config_dir)
def test_remove_schain_volume(dutils, schain_config):
schain_name = schain_config['skaleConfig']['sChain']['schainName']
dutils.create_data_volume(schain_name)
assert dutils.is_data_volume_exists(schain_name)
remove_schain_volume(schain_name, dutils=dutils)
assert not dutils.is_data_volume_exists(schain_name)
@pytest.fixture
def METHOD_NAME(schain_config, ssl_folder, dutils):
""" Creates and removes schain container """
schain_name = schain_config['skaleConfig']['sChain']['schainName']
schain_data = get_schain_contracts_data(schain_name)
try:
run_simple_schain_container(schain_data, dutils)
yield schain_name
finally:
schain_name = schain_config['skaleConfig']['sChain']['schainName']
dutils.safe_rm(
get_container_name(SCHAIN_CONTAINER, schain_name),
force=True
)
dutils.safe_rm(
get_container_name(IMA_CONTAINER, schain_name),
force=True
)
def test_remove_schain_container(
dutils,
schain_config,
cleanup_container,
cert_key_pair
):
schain_name = schain_config['skaleConfig']['sChain']['schainName']
schain_data = get_schain_contracts_data(schain_name)
run_simple_schain_container(schain_data, dutils)
container_name = SCHAIN_CONTAINER_NAME_TEMPLATE.format(schain_name)
assert is_container_running(dutils, container_name)
remove_schain_container(schain_name, dutils=dutils)
assert not is_container_running(dutils, container_name)
@pytest.mark.skip('Docker API GA issues need to be resolved')
def test_remove_ima_container(dutils, METHOD_NAME):
schain_name = METHOD_NAME
schain_data = get_schain_contracts_data(schain_name)
with mock.patch('core.schains.runner.get_ima_env', return_value=ImaEnv(
schain_dir='/'
)):
run_simple_ima_container(schain_data, dutils)
container_name = IMA_CONTAINER_NAME_TEMPLATE.format(schain_name)
assert dutils.is_container_found(container_name)
remove_ima_container(schain_name, dutils=dutils)
assert not dutils.is_container_found(container_name)
def test_remove_schain_record():
SChainRecord.create_table()
name = "test"
SChainRecord.add(name)
mark_schain_deleted(name)
record = SChainRecord.to_dict(SChainRecord.get_by_name(name))
assert record["is_deleted"]
SChainRecord.drop_table()
@pytest.fixture
def invalid_secret_key_file(schain_dirs_for_monitor):
schain_dir_path1 = os.path.join(SCHAINS_DIR_PATH, TEST_SCHAIN_NAME_1)
secret_key_filepath = os.path.join(schain_dir_path1,
'secret_key_1.json')
with open(secret_key_filepath, 'w') as secret_key_file:
json.dump(None, secret_key_file)
return
@pytest.fixture
def valid_secret_key_file(schain_dirs_for_monitor):
schain_dir_path1 = os.path.join(SCHAINS_DIR_PATH, TEST_SCHAIN_NAME_1)
secret_key_filepath = os.path.join(schain_dir_path1,
'secret_key_0.json')
with open(secret_key_filepath, 'w') as secret_key_file:
json.dump(
{'key_share_name': 'BLS_KEY:SCHAIN_ID:1:NODE_ID:0:DKG_ID:0'},
secret_key_file
)
return
def test_delete_bls_keys(skale, valid_secret_key_file):
with mock.patch('core.schains.cleaner.SgxClient.delete_bls_key',
new=mock.Mock()) as delete_mock:
delete_bls_keys(skale, TEST_SCHAIN_NAME_1)
delete_mock.assert_called_with(
'BLS_KEY:SCHAIN_ID:1:NODE_ID:0:DKG_ID:0')
assert delete_mock.call_count == 1
def test_delete_bls_keys_with_invalid_secret_key(
skale,
invalid_secret_key_file,
valid_secret_key_file
):
"""
No exception but removing called only for 0 secret key
secret_key_1.json - invalid, secret_key_2.json not exists
"""
skale_for_test = spawn_skale_manager_lib(skale)
skale_for_test.schains.get_last_rotation_id = lambda x: 2
with mock.patch('core.schains.cleaner.SgxClient.delete_bls_key',
new=mock.Mock()) as delete_mock:
delete_bls_keys(skale_for_test, TEST_SCHAIN_NAME_1)
assert delete_mock.call_count == 1
def test_get_schains_on_node(schain_dirs_for_monitor,
dutils, METHOD_NAME, upsert_db, cleanup_schain_dirs_before):
schain_name = METHOD_NAME
result = get_schains_on_node(dutils)
assert set([
TEST_SCHAIN_NAME_1, TEST_SCHAIN_NAME_2,
PHANTOM_SCHAIN_NAME, schain_name
]).issubset(set(result)) |
299,234 | wrapper | from __future__ import print_function
from builtins import object
import sys
if sys.version_info.major >= 3:
unicode = str
###############################################################################
# lazyflow: data flow based lazy parallel computation framework
#
# Copyright (C) 2011-2014, the ilastik developers
# <team@ilastik.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the Lesser GNU General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# See the files LICENSE.lgpl2 and LICENSE.lgpl3 for full text of the
# GNU Lesser General Public License version 2.1 and 3 respectively.
# This information is also available on the ilastik web site at:
# http://ilastik.org/license/
###############################################################################
import logging
import inspect
class Tracer(object):
"""
Context manager to simplify function entry/exit logging trace statements.
Examples:
Example Usage::
# Create a TRACE logger
import sys, logging
traceLogger = logging.getLogger("TRACE.examplemodule1")
traceLogger.addHandler( logging.StreamHandler(sys.stdout) )
# Use the context manager
def f():
with Tracer(traceLogger):
print("Function f is running...")
# If TRACE logging isn't enabled, there's no extra output
f()
> Function f is running...
# Enable TRACE logging to see enter/exit log statements.
traceLogger.setLevel(logging.DEBUG)
f()
> (enter) f
> Function f is running...
> (exit) f
# Disable TRACE logging by setting the level above DEBUG.
traceLogger.setLevel(logging.INFO)
"""
def __init__(self, logger, level=logging.DEBUG, msg="", determine_caller=True, caller_name=""):
if isinstance(logger, (str, unicode)):
self._logger = logging.getLogger(logger)
else:
self._logger = logger
self._level = level
self._determine_caller = determine_caller
self._msg = msg
self._caller = caller_name
def __enter__(self):
if self._logger.isEnabledFor(self._level):
if self._determine_caller and self._caller == "":
stack = inspect.stack()
self._caller = stack[1][3]
self._logger.log(self._level, "(enter) " + self._caller + " " + self._msg)
def __exit__(self, *args):
if self._logger.isEnabledFor(self._level):
self._logger.log(self._level, "(exit) " + self._caller)
from functools import wraps
def traceLogged(logger, level=logging.DEBUG, msg="", caller_name=""):
"""
Returns a decorator that logs the entry and exit of its target function.
Uses the the :py:class:`Tracer` context manager internally.
Examples:
Example Usage::
# Create a TRACE logger
import sys, logging
traceLogger = logging.getLogger("TRACE.examplemodule2")
traceLogger.addHandler( logging.StreamHandler(sys.stdout) )
# Decorate a function to allow entry/exit trace logging.
@traceLogged(traceLogger)
def f():
print("Function f is running...")
# If TRACE logging isn't enabled, there's no extra output
f()
> Function f is running...
# Enable TRACE logging to see enter/exit log statements.
traceLogger.setLevel(logging.DEBUG)
f()
> (enter) f
> Function f is running...
> (exit) f
# Disable TRACE logging by setting the level above DEBUG.
traceLogger.setLevel(logging.INFO)
"""
def decorator(func):
"""A closure that logs the entry and exit of func using the logger."""
if caller_name != "":
name = caller_name
elif hasattr(func, "im_func"):
name = func.__func__.__name__
else:
name = func.__name__
@wraps(func)
def METHOD_NAME(*args, **kwargs):
with Tracer(logger, level=level, msg=msg, determine_caller=False, caller_name=name):
return func(*args, **kwargs)
METHOD_NAME.__wrapped__ = func # Emulate python 3 behavior of @wraps
return METHOD_NAME
return decorator
if __name__ == "__main__":
import sys
logger = logging.getLogger(__name__)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter("%(levelname)s %(thread)d %(name)s:%(funcName)s:%(lineno)d %(message)s"))
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
def func1():
with Tracer(logger):
print("I'm func 1")
@traceLogged(logger)
def func2():
print("I'm func 2")
func1()
func2()
# Execute doctests
import doctest
doctest.testmod() |
299,235 | check board winner | # This example requires the 'message_content' privileged intent for prefixed commands.
from typing import List
import discord
from discord.ext import commands
# Defines a custom button that contains the logic of the game.
# The ['TicTacToe'] bit is for type hinting purposes to tell your IDE or linter
# what the type of `self.view` is. It is not required.
class TicTacToeButton(discord.ui.Button["TicTacToe"]):
def __init__(self, x: int, y: int):
# A label is required, but we don't need one so a zero-width space is used.
# The row parameter tells the View which row to place the button under.
# A View can only contain up to 5 rows -- each row can only have 5 buttons.
# Since a Tic Tac Toe grid is 3x3 that means we have 3 rows and 3 columns.
super().__init__(style=discord.ButtonStyle.secondary, label="\u200b", row=y)
self.x = x
self.y = y
# This function is called whenever this particular button is pressed.
# This is part of the "meat" of the game logic.
async def callback(self, interaction: discord.Interaction):
assert self.view is not None
view: TicTacToe = self.view
state = view.board[self.y][self.x]
if state in (view.X, view.O):
return
if view.current_player == view.X:
self.style = discord.ButtonStyle.danger
self.label = "X"
view.board[self.y][self.x] = view.X
view.current_player = view.O
content = "It is now O's turn"
else:
self.style = discord.ButtonStyle.success
self.label = "O"
view.board[self.y][self.x] = view.O
view.current_player = view.X
content = "It is now X's turn"
self.disabled = True
winner = view.METHOD_NAME()
if winner is not None:
if winner == view.X:
content = "X won!"
elif winner == view.O:
content = "O won!"
else:
content = "It's a tie!"
for child in view.children:
child.disabled = True
view.stop()
await interaction.response.edit_message(content=content, view=view)
# This is our actual board View.
class TicTacToe(discord.ui.View):
# This tells the IDE or linter that all our children will be TicTacToeButtons.
# This is not required.
children: List[TicTacToeButton]
X = -1
O = 1
Tie = 2
def __init__(self):
super().__init__()
self.current_player = self.X
self.board = [
[0, 0, 0],
[0, 0, 0],
[0, 0, 0],
]
# Our board is made up of 3 by 3 TicTacToeButtons.
# The TicTacToeButton maintains the callbacks and helps steer
# the actual game.
for x in range(3):
for y in range(3):
self.add_item(TicTacToeButton(x, y))
# This method checks for the board winner and is used by the TicTacToeButton.
def METHOD_NAME(self):
# Check horizontal
for across in self.board:
value = sum(across)
if value == 3:
return self.O
elif value == -3:
return self.X
# Check vertical
for line in range(3):
value = self.board[0][line] + self.board[1][line] + self.board[2][line]
if value == 3:
return self.O
elif value == -3:
return self.X
# Check diagonals
diag = self.board[0][2] + self.board[1][1] + self.board[2][0]
if diag == 3:
return self.O
elif diag == -3:
return self.X
diag = self.board[0][0] + self.board[1][1] + self.board[2][2]
if diag == -3:
return self.X
elif diag == 3:
return self.O
# If we're here, we need to check if a tie has been reached.
if all(i != 0 for row in self.board for i in row):
return self.Tie
return None
intents = discord.Intents.default()
intents.message_content = True
bot = commands.Bot(command_prefix=commands.when_mentioned_or("!"), intents=intents)
@bot.command()
async def tic(ctx: commands.Context):
"""Starts a tic-tac-toe game with yourself."""
# Setting the reference message to ctx.message makes the bot reply to the member's message.
await ctx.send("Tic Tac Toe: X goes first", view=TicTacToe(), reference=ctx.message)
@bot.event
async def on_ready():
print(f"Logged in as {bot.user} (ID: {bot.user.id})")
print("------")
bot.run("TOKEN") |
299,236 | test compat router classes called with | from unittest.mock import ANY, Mock
import pytest
from kombu import Exchange, Queue
from kombu.utils.functional import maybe_evaluate
from celery.app import routes
from celery.exceptions import QueueNotFound
from celery.utils.imports import qualname
def Router(app, *args, **kwargs):
return routes.Router(*args, app=app, **kwargs)
def E(app, queues):
def expand(answer):
return Router(app, [], queues).expand_destination(answer)
return expand
def set_queues(app, **queues):
app.conf.task_queues = queues
app.amqp.queues = app.amqp.Queues(queues)
class RouteCase:
def setup_method(self):
self.a_queue = {
'exchange': 'fooexchange',
'exchange_type': 'fanout',
'routing_key': 'xuzzy',
}
self.b_queue = {
'exchange': 'barexchange',
'exchange_type': 'topic',
'routing_key': 'b.b.#',
}
self.d_queue = {
'exchange': self.app.conf.task_default_exchange,
'exchange_type': self.app.conf.task_default_exchange_type,
'routing_key': self.app.conf.task_default_routing_key,
}
@self.app.task(shared=False)
def mytask(*args, **kwargs):
pass
self.mytask = mytask
def assert_routes_to_queue(self, queue, router, name,
args=None, kwargs=None, options=None):
if options is None:
options = {}
if kwargs is None:
kwargs = {}
if args is None:
args = []
assert router.route(options, name, args, kwargs)[
'queue'].name == queue
def assert_routes_to_default_queue(self, router, name, *args, **kwargs):
self.assert_routes_to_queue(
self.app.conf.task_default_queue, router, name, *args, **kwargs)
class test_MapRoute(RouteCase):
def test_route_for_task_expanded_route(self):
set_queues(self.app, foo=self.a_queue, bar=self.b_queue)
expand = E(self.app, self.app.amqp.queues)
route = routes.MapRoute({self.mytask.name: {'queue': 'foo'}})
assert expand(route(self.mytask.name))['queue'].name == 'foo'
assert route('celery.awesome') is None
def test_route_for_task(self):
set_queues(self.app, foo=self.a_queue, bar=self.b_queue)
expand = E(self.app, self.app.amqp.queues)
route = routes.MapRoute({self.mytask.name: self.b_queue})
eroute = expand(route(self.mytask.name))
for key, value in self.b_queue.items():
assert eroute[key] == value
assert route('celery.awesome') is None
def test_route_for_task__glob(self):
from re import compile
route = routes.MapRoute([
('proj.tasks.bar*', {'queue': 'routeC'}),
('proj.tasks.*', 'routeA'),
('demoapp.tasks.bar.*', {'exchange': 'routeB'}),
(compile(r'(video|image)\.tasks\..*'), {'queue': 'media'}),
])
assert route('proj.tasks.bar') == {'queue': 'routeC'}
assert route('proj.tasks.bar.baz') == {'queue': 'routeC'}
assert route('proj.tasks.foo') == {'queue': 'routeA'}
assert route('demoapp.tasks.bar.moo') == {'exchange': 'routeB'}
assert route('video.tasks.foo') == {'queue': 'media'}
assert route('image.tasks.foo') == {'queue': 'media'}
assert route('demoapp.foo.bar.moo') is None
def test_expand_route_not_found(self):
expand = E(self.app, self.app.amqp.Queues(
self.app.conf.task_queues, False))
route = routes.MapRoute({'a': {'queue': 'x'}})
with pytest.raises(QueueNotFound):
expand(route('a'))
class test_lookup_route(RouteCase):
def test_init_queues(self):
router = Router(self.app, queues=None)
assert router.queues == {}
def test_lookup_takes_first(self):
set_queues(self.app, foo=self.a_queue, bar=self.b_queue)
R = routes.prepare(({self.mytask.name: {'queue': 'bar'}},
{self.mytask.name: {'queue': 'foo'}}))
router = Router(self.app, R, self.app.amqp.queues)
self.assert_routes_to_queue('bar', router, self.mytask.name)
def test_expands_queue_in_options(self):
set_queues(self.app)
R = routes.prepare(())
router = Router(
self.app, R, self.app.amqp.queues, create_missing=True,
)
# apply_async forwards all arguments, even exchange=None etc,
# so need to make sure it's merged correctly.
route = router.route(
{'queue': 'testq',
'exchange': None,
'routing_key': None,
'immediate': False},
self.mytask.name,
args=[1, 2], kwargs={},
)
assert route['queue'].name == 'testq'
assert route['queue'].exchange == Exchange('testq')
assert route['queue'].routing_key == 'testq'
assert route['immediate'] is False
def test_expand_destination_string(self):
set_queues(self.app, foo=self.a_queue, bar=self.b_queue)
x = Router(self.app, {}, self.app.amqp.queues)
dest = x.expand_destination('foo')
assert dest['queue'].name == 'foo'
def test_expand_destination__Queue(self):
queue = Queue('foo')
x = Router(self.app, {}, self.app.amqp.queues)
dest = x.expand_destination({'queue': queue})
assert dest['queue'] is queue
def test_lookup_paths_traversed(self):
self.simple_queue_setup()
R = routes.prepare((
{'celery.xaza': {'queue': 'bar'}},
{self.mytask.name: {'queue': 'foo'}}
))
router = Router(self.app, R, self.app.amqp.queues)
self.assert_routes_to_queue('foo', router, self.mytask.name)
self.assert_routes_to_default_queue(router, 'celery.poza')
def test_compat_router_class(self):
self.simple_queue_setup()
R = routes.prepare((
TestRouter(),
))
router = Router(self.app, R, self.app.amqp.queues)
self.assert_routes_to_queue('bar', router, 'celery.xaza')
self.assert_routes_to_default_queue(router, 'celery.poza')
def test_router_fun__called_with(self):
self.simple_queue_setup()
step = Mock(spec=['__call__'])
step.return_value = None
R = routes.prepare([step])
router = Router(self.app, R, self.app.amqp.queues)
self.mytask.apply_async((2, 2), {'kw': 3}, router=router, priority=3)
step.assert_called_with(
self.mytask.name, (2, 2), {'kw': 3}, ANY,
task=self.mytask,
)
options = step.call_args[0][3]
assert options['priority'] == 3
def METHOD_NAME(self):
self.simple_queue_setup()
step = Mock(spec=['route_for_task'])
step.route_for_task.return_value = None
R = routes.prepare([step])
router = Router(self.app, R, self.app.amqp.queues)
self.mytask.apply_async((2, 2), {'kw': 3}, router=router, priority=3)
step.route_for_task.assert_called_with(
self.mytask.name, (2, 2), {'kw': 3},
)
def simple_queue_setup(self):
set_queues(
self.app, foo=self.a_queue, bar=self.b_queue,
**{self.app.conf.task_default_queue: self.d_queue})
class TestRouter:
def route_for_task(self, task, args, kwargs):
if task == 'celery.xaza':
return 'bar'
class test_prepare:
def test_prepare(self):
o = object()
R = [
{'foo': 'bar'},
qualname(TestRouter),
o,
]
p = routes.prepare(R)
assert isinstance(p[0], routes.MapRoute)
assert isinstance(maybe_evaluate(p[1]), TestRouter)
assert p[2] is o
assert routes.prepare(o) == [o]
def test_prepare_item_is_dict(self):
R = {'foo': 'bar'}
p = routes.prepare(R)
assert isinstance(p[0], routes.MapRoute) |
299,237 | get label value | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from parlai.core.build_data import make_dir
from parlai.core.teachers import DialogTeacher
from parlai.utils.data import DatatypeHelper
from typing import Dict, Iterable, List, Optional, Tuple
from typing_extensions import TypedDict
import os
# huggingface imports
import datasets
class AbstractHuggingFaceTeacher(DialogTeacher):
"""
Abstract parent class for HuggingFace teachers. Extend this class and specify the
attributes below to use a different dataset.
hf_path = path parameter passed into hugging face load_dataset function
hf_name = name parameter passed into hugging face load_dataset function
hf_text_fields = list of names of the data fields from the dataset to be included in the text/query
hf_message_fields = [optional] list of names of the data fields from the dataset to be included in the message object but *not* text
hf_label_field = name of the data field from the hf dataset that specifies the label of the episode
hf_splits_mapping = dictionary mapping with the keys 'train', 'valid', and 'test', that map to the
names of the splits of the hf dataset.
render_text_field = bool where if True, will include the text field name in the query (e.g. "sentence: <sentence>")
"""
def __init__(self, opt, shared=None):
self.fold = DatatypeHelper.fold(opt['datatype'])
self.hf_split = self.hf_splits_mapping[self.fold]
self.data_path = self._path(opt)
opt['datafile'] = self.data_path
make_dir(opt['datafile'])
self.id = "huggingface"
super().__init__(opt, shared)
def _path(self, opt):
if self.hf_name:
return os.path.join(
opt['datapath'], 'huggingface', self.hf_path, self.hf_name, self.fold
)
return os.path.join(opt['datapath'], 'huggingface', self.hf_path, self.fold)
def _get_text_value(self, row) -> Tuple[str, Dict[str, str]]:
"""
return the constructed text query and dict mapping text field names to values.
"""
# construct text query from the hf_text_fields specified
text_dict = {}
for col in self.hf_text_fields:
text_part = row.get(col)
if text_part is None:
raise KeyError(f'Feature "{col}" not found in data.')
text_dict[col] = text_part
query = '\n'.join(text_dict.values())
if hasattr(self, "hf_message_fields"):
for col in self.hf_message_fields:
text_part = row.get(col)
if text_part is None:
raise KeyError(f'Feature "{col}" not found in data.')
text_dict[col] = text_part
return query, text_dict
def METHOD_NAME(self, row):
"""
return the label value from the data row.
"""
return row[self.hf_label_field]
def _get_label_candidates(self, row, label) -> str:
"""
try to return the true label text value from the row and the candidates.
"""
if isinstance(self.dataset.features['label'], datasets.features.ClassLabel):
pre_candidates = self.dataset.features[self.hf_label_field].names
# construct label and candidates
if type(label) is int:
label = pre_candidates[label]
if label in row:
return row[label], [row[l] for l in pre_candidates]
return label, pre_candidates
else:
label = str(label)
return label, [label]
def setup_data(self, path: str) -> Iterable[tuple]:
"""
Default implementation of setup_data.
Manually override if needed.
"""
# load dataset from HuggingFace
self.dataset = datasets.load_dataset(
path=self.hf_path, name=self.hf_name, split=self.hf_split
)
for row in self.dataset:
query, text_dict = self._get_text_value(row)
label = self.METHOD_NAME(row)
label, candidates = self._get_label_candidates(row, label)
episode_dict = text_dict
episode_dict['text'] = query
episode_dict['label'] = label
episode_dict['label_candidates'] = candidates
yield episode_dict, True
class DefaultTeacher:
def __init__(self, opt):
raise NotImplementedError(
"There is no default teacher for HuggingFace datasets. Please use a specific one."
) |
299,238 | get trade limits for instrument strategy | """
We want to limit the number of trades we expect to do in a given period (usually a day, but could be longer if
eg going on holiday)
Limits per contract don't make sense, but it makes sense to limit (a) the number of times a given instrument
within a strategy can be traded and (b) the number of times an instrument can be traded, period.
"""
from dataclasses import dataclass
from syscore.exceptions import missingData
from sysdata.base_data import baseData
from syslogging.logger import *
from sysobjects.production.trade_limits import tradeLimit, listOfTradeLimits
from sysobjects.production.tradeable_object import instrumentStrategy
@dataclass
class instrumentStrategyKeyAndDays:
instrument_strategy_key: str
period_days: int
@classmethod
def from_trade_limit(instrumentStrategyKeyAndDays, trade_limit: tradeLimit):
return instrumentStrategyKeyAndDays(
trade_limit.instrument_strategy.key, trade_limit.period_days
)
class listOfInstrumentStrategyKeyAndDays(list):
def for_given_instrument_strategy(self, instrument_strategy: instrumentStrategy):
instrument_strategy_key = instrument_strategy.key
results = [
item
for item in self
if item.instrument_strategy_key == instrument_strategy_key
]
return listOfInstrumentStrategyKeyAndDays(results)
class tradeLimitData(baseData):
def __init__(self, log=get_logger("Overrides")):
super().__init__(log=log)
def no_limit(
self, instrument_strategy: instrumentStrategy, period_days: int
) -> tradeLimit:
return tradeLimit(
999999,
instrument_strategy,
period_days=period_days,
)
def what_trade_is_possible_for_instrument(
self, instrument_code: str, proposed_trade: int
) -> int:
combined_list = self.get_trade_limits_for_instrument(instrument_code)
possible_trade = combined_list.what_trade_is_possible(proposed_trade)
return possible_trade
def what_trade_is_possible_for_instrument_strategy(
self, instrument_strategy: instrumentStrategy, proposed_trade: int
) -> int:
combined_list = self._get_list_of_all_relevant_trade_limits(instrument_strategy)
possible_trade = combined_list.what_trade_is_possible(proposed_trade)
return possible_trade
def add_trade(self, instrument_strategy: instrumentStrategy, trade: int):
combined_list = self._get_list_of_all_relevant_trade_limits(instrument_strategy)
combined_list.add_trade(trade)
self._update_list_of_trade_limits(combined_list)
def remove_trade(self, instrument_strategy: instrumentStrategy, trade: int):
combined_list = self._get_list_of_all_relevant_trade_limits(instrument_strategy)
combined_list.remove_trade(trade)
self._update_list_of_trade_limits(combined_list)
def _get_list_of_all_relevant_trade_limits(
self, instrument_strategy: instrumentStrategy
) -> listOfTradeLimits:
instrument_trade_limits = self.get_trade_limits_for_instrument(
instrument_strategy.instrument_code
)
strategy_instrument_trade_limits = (
self.METHOD_NAME(instrument_strategy)
)
combined_list = listOfTradeLimits(
instrument_trade_limits + strategy_instrument_trade_limits
)
return combined_list
def get_trade_limits_for_instrument(
self, instrument_code: str
) -> listOfTradeLimits:
instrument_strategy = instrument_strategy_for_instrument_only(instrument_code)
return listOfTradeLimits(
self.METHOD_NAME(instrument_strategy)
)
def METHOD_NAME(
self, instrument_strategy: instrumentStrategy
) -> listOfTradeLimits:
all_keys = self._get_all_limit_keys()
relevant_keys = all_keys.for_given_instrument_strategy(instrument_strategy)
trade_limits = [
self._get_trade_limit_object_from_isd_key(isd_key)
for isd_key in relevant_keys
]
return listOfTradeLimits(trade_limits)
def _update_list_of_trade_limits(self, list_of_trade_limits: list):
result = [
self._update_trade_limit_object(trade_limit_object)
for trade_limit_object in list_of_trade_limits
]
return result
def update_instrument_limit_with_new_limit(
self, instrument_code: str, period_days: int, new_limit: int
):
"""
self.update_instrument_strategy_limit_with_new_limit(
"", instrument_code, period_days, new_limit
)
"""
instrument_strategy = instrument_strategy_for_instrument_only(instrument_code)
self.update_instrument_strategy_limit_with_new_limit(
instrument_strategy, period_days, new_limit
)
def update_instrument_strategy_limit_with_new_limit(
self, instrument_strategy: instrumentStrategy, period_days: int, new_limit: int
):
trade_limit = self._get_trade_limit_object(instrument_strategy, period_days)
trade_limit.update_limit(new_limit)
self._update_trade_limit_object(trade_limit)
def reset_all_limits(self):
all_limits = self.get_all_limits()
for limit in all_limits:
self.reset_instrument_strategy_limit(
instrument_strategy=limit.instrument_strategy,
period_days=limit.period_days,
)
def reset_instrument_limit(self, instrument_code: str, period_days: int):
instrument_strategy = instrument_strategy_for_instrument_only(instrument_code)
self.reset_instrument_strategy_limit(instrument_strategy, period_days)
def reset_strategy_limit_all_instruments(
self, strategy_name: str, period_days: int
):
pass
def reset_instrument_strategy_limit(
self, instrument_strategy: instrumentStrategy, period_days: int
):
trade_limit = self._get_trade_limit_object(instrument_strategy, period_days)
trade_limit.reset()
self._update_trade_limit_object(trade_limit)
def get_all_limits(self) -> list:
all_keys = self._get_all_limit_keys()
all_limits = [
self._get_trade_limit_object_from_isd_key(key) for key in all_keys
]
return listOfTradeLimits(all_limits)
def _get_trade_limit_object_from_isd_key(
self, isd_key: instrumentStrategyKeyAndDays
) -> tradeLimit:
instrument_strategy = instrumentStrategy.from_key(
isd_key.instrument_strategy_key
)
period_days = isd_key.period_days
return self._get_trade_limit_object(instrument_strategy, period_days)
def _get_trade_limit_object(
self, instrument_strategy: instrumentStrategy, period_days: int
) -> tradeLimit:
try:
trade_limit_as_dict = self._get_trade_limit_as_dict_or_missing_data(
instrument_strategy, period_days
)
except missingData:
return self.no_limit(instrument_strategy, period_days)
trade_limit_object = tradeLimit.from_dict(trade_limit_as_dict)
return trade_limit_object
def _update_trade_limit_object(self, trade_limit_object):
trade_limit_as_dict = trade_limit_object.as_dict()
self._update_trade_limit_as_dict(trade_limit_as_dict)
def _get_trade_limit_as_dict_or_missing_data(
self, instrument_strategy: instrumentStrategy, period_days: int
) -> dict:
raise NotImplementedError
def _update_trade_limit_as_dict(self, trade_limit_object: dict):
raise NotImplementedError
def _get_all_limit_keys(self) -> listOfInstrumentStrategyKeyAndDays:
raise NotImplementedError
def instrument_strategy_for_instrument_only(instrument_code) -> instrumentStrategy:
return instrumentStrategy(strategy_name="", instrument_code=instrument_code) |
299,239 | get pyramid keys unlocks | from typing import Tuple, Dict, Union
from BaseClasses import MultiWorld
from .Options import timespinner_options, is_option_enabled, get_option_value
class PreCalculatedWeights:
pyramid_keys_unlock: str
present_key_unlock: str
past_key_unlock: str
time_key_unlock: str
flood_basement: bool
flood_basement_high: bool
flood_xarion: bool
flood_maw: bool
flood_pyramid_shaft: bool
flood_pyramid_back: bool
flood_moat: bool
flood_courtyard: bool
flood_lake_desolation: bool
dry_lake_serene: bool
def __init__(self, world: MultiWorld, player: int):
if world and is_option_enabled(world, player, "RisingTides"):
weights_overrrides: Dict[str, Union[str, Dict[str, int]]] = self.get_flood_weights_overrides(world, player)
self.flood_basement, self.flood_basement_high = \
self.roll_flood_setting(world, player, weights_overrrides, "CastleBasement")
self.flood_xarion, _ = self.roll_flood_setting(world, player, weights_overrrides, "Xarion")
self.flood_maw, _ = self.roll_flood_setting(world, player, weights_overrrides, "Maw")
self.flood_pyramid_shaft, _ = self.roll_flood_setting(world, player, weights_overrrides, "AncientPyramidShaft")
self.flood_pyramid_back, _ = self.roll_flood_setting(world, player, weights_overrrides, "Sandman")
self.flood_moat, _ = self.roll_flood_setting(world, player, weights_overrrides, "CastleMoat")
self.flood_courtyard, _ = self.roll_flood_setting(world, player, weights_overrrides, "CastleCourtyard")
self.flood_lake_desolation, _ = self.roll_flood_setting(world, player, weights_overrrides, "LakeDesolation")
flood_lake_serene, _ = self.roll_flood_setting(world, player, weights_overrrides, "LakeSerene")
self.dry_lake_serene = not flood_lake_serene
else:
self.flood_basement = False
self.flood_basement_high = False
self.flood_xarion = False
self.flood_maw = False
self.flood_pyramid_shaft = False
self.flood_pyramid_back = False
self.flood_moat = False
self.flood_courtyard = False
self.flood_lake_desolation = False
self.dry_lake_serene = False
self.pyramid_keys_unlock, self.present_key_unlock, self.past_key_unlock, self.time_key_unlock = \
self.METHOD_NAME(world, player, self.flood_maw)
@staticmethod
def METHOD_NAME(world: MultiWorld, player: int, is_maw_flooded: bool) -> Tuple[str, str, str, str]:
present_teleportation_gates: Tuple[str, ...] = (
"GateKittyBoss",
"GateLeftLibrary",
"GateMilitaryGate",
"GateSealedCaves",
"GateSealedSirensCave",
"GateLakeDesolation"
)
past_teleportation_gates: Tuple[str, ...] = (
"GateLakeSereneRight",
"GateAccessToPast",
"GateCastleRamparts",
"GateCastleKeep",
"GateRoyalTowers",
"GateCavesOfBanishment"
)
ancient_pyramid_teleportation_gates: Tuple[str, ...] = (
"GateGyre",
"GateLeftPyramid",
"GateRightPyramid"
)
if not world:
return (
present_teleportation_gates[0],
present_teleportation_gates[0],
past_teleportation_gates[0],
ancient_pyramid_teleportation_gates[0]
)
if not is_maw_flooded:
past_teleportation_gates += ("GateMaw", )
if is_option_enabled(world, player, "Inverted"):
all_gates: Tuple[str, ...] = present_teleportation_gates
else:
all_gates: Tuple[str, ...] = past_teleportation_gates + present_teleportation_gates
return (
world.random.choice(all_gates),
world.random.choice(present_teleportation_gates),
world.random.choice(past_teleportation_gates),
world.random.choice(ancient_pyramid_teleportation_gates)
)
@staticmethod
def get_flood_weights_overrides(world: MultiWorld, player: int) -> Dict[str, Union[str, Dict[str, int]]]:
weights_overrides_option: Union[int, Dict[str, Union[str, Dict[str, int]]]] = \
get_option_value(world, player, "RisingTidesOverrides")
default_weights: Dict[str, Dict[str, int]] = timespinner_options["RisingTidesOverrides"].default
if not weights_overrides_option:
weights_overrides_option = default_weights
else:
for key, weights in default_weights.items():
if not key in weights_overrides_option:
weights_overrides_option[key] = weights
return weights_overrides_option
@staticmethod
def roll_flood_setting(world: MultiWorld, player: int,
all_weights: Dict[str, Union[Dict[str, int], str]], key: str) -> Tuple[bool, bool]:
weights: Union[Dict[str, int], str] = all_weights[key]
if isinstance(weights, dict):
result: str = world.random.choices(list(weights.keys()), weights=list(map(int, weights.values())))[0]
else:
result: str = weights
if result == "Dry":
return False, False
elif result == "Flooded":
return True, True
elif result == "FloodedWithSavePointAvailable":
return True, False |
299,240 | main | """
Tool to read on-chain storage from EVM
"""
import json
import argparse
from crytic_compile import cryticparser
from slither import Slither
from slither.tools.read_storage.read_storage import SlitherReadStorage, RpcInfo
def parse_args() -> argparse.Namespace:
"""Parse the underlying arguments for the program.
Returns:
The arguments for the program.
"""
parser = argparse.ArgumentParser(
description="Read a variable's value from storage for a deployed contract",
usage=(
"\nTo retrieve a single variable's value:\n"
+ "\tslither-read-storage $TARGET address --variable-name $NAME\n"
+ "To retrieve a contract's storage layout:\n"
+ "\tslither-read-storage $TARGET address --contract-name $NAME --json storage_layout.json\n"
+ "To retrieve a contract's storage layout and values:\n"
+ "\tslither-read-storage $TARGET address --contract-name $NAME --json storage_layout.json --value\n"
+ "TARGET can be a contract address or project directory"
),
)
parser.add_argument(
"contract_source",
help="The deployed contract address if verified on etherscan. Prepend project directory for unverified contracts.",
nargs="+",
)
parser.add_argument(
"--variable-name",
help="The name of the variable whose value will be returned.",
default=None,
)
parser.add_argument("--rpc-url", help="An endpoint for web3 requests.")
parser.add_argument(
"--key",
help="The key/ index whose value will be returned from a mapping or array.",
default=None,
)
parser.add_argument(
"--deep-key",
help="The key/ index whose value will be returned from a deep mapping or multidimensional array.",
default=None,
)
parser.add_argument(
"--struct-var",
help="The name of the variable whose value will be returned from a struct.",
default=None,
)
parser.add_argument(
"--storage-address",
help="The address of the storage contract (if a proxy pattern is used).",
default=None,
)
parser.add_argument(
"--contract-name",
help="The name of the logic contract.",
default=None,
)
parser.add_argument(
"--json",
action="store",
help="Save the result in a JSON file.",
)
parser.add_argument(
"--value",
action="store_true",
help="Toggle used to include values in output.",
)
parser.add_argument(
"--table",
action="store_true",
help="Print table view of storage layout",
)
parser.add_argument(
"--silent",
action="store_true",
help="Silence log outputs",
)
parser.add_argument("--max-depth", help="Max depth to search in data structure.", default=20)
parser.add_argument(
"--block",
help="The block number to read storage from. Requires an archive node to be provided as the RPC url.",
default="latest",
)
parser.add_argument(
"--unstructured",
action="store_true",
help="Include unstructured storage slots",
)
cryticparser.init(parser)
return parser.parse_args()
def METHOD_NAME() -> None:
args = parse_args()
if len(args.contract_source) == 2:
# Source code is file.sol or project directory
source_code, target = args.contract_source
slither = Slither(source_code, **vars(args))
else:
# Source code is published and retrieved via etherscan
target = args.contract_source[0]
slither = Slither(target, **vars(args))
if args.contract_name:
contracts = slither.get_contract_from_name(args.contract_name)
else:
contracts = slither.contracts
rpc_info = None
if args.rpc_url:
valid = ["latest", "earliest", "pending", "safe", "finalized"]
block = args.block if args.block in valid else int(args.block)
rpc_info = RpcInfo(args.rpc_url, block)
srs = SlitherReadStorage(contracts, args.max_depth, rpc_info)
srs.unstructured = bool(args.unstructured)
# Remove target prefix e.g. rinkeby:0x0 -> 0x0.
address = target[target.find(":") + 1 :]
# Default to implementation address unless a storage address is given.
if not args.storage_address:
args.storage_address = address
srs.storage_address = args.storage_address
if args.variable_name:
# Use a lambda func to only return variables that have same name as target.
# x is a tuple (`Contract`, `StateVariable`).
srs.get_all_storage_variables(lambda x: bool(x[1].name == args.variable_name))
srs.get_target_variables(**vars(args))
else:
srs.get_all_storage_variables()
srs.get_storage_layout()
# To retrieve slot values an rpc url is required.
if args.value:
assert args.rpc_url
srs.walk_slot_info(srs.get_slot_values)
if args.table:
srs.walk_slot_info(srs.convert_slot_info_to_rows)
print(srs.table)
if args.json:
with open(args.json, "w", encoding="utf-8") as file:
slot_infos_json = srs.to_json()
json.dump(slot_infos_json, file, indent=4)
if __name__ == "__main__":
METHOD_NAME() |
299,241 | test can await stopping | from unittest import TestCase
from lamden.nodes.queue_base import ProcessingQueue
import time
import asyncio
class TestProcessingQueue(TestCase):
def setUp(self):
self.processing_queue = ProcessingQueue()
def tearDown(self):
self.processing_queue.stop()
self.processing_queue.flush()
async def await_queue_stopping(self):
print (self.processing_queue.currently_processing)
# Await the stopping of the queue
await self.processing_queue.stopping()
async def await_queue_pausing(self):
print (self.processing_queue.currently_processing)
# Await the stopping of the queue
await self.processing_queue.pausing()
async def delay_processing(self, func, delay):
print('\n')
print('Starting Sleeping: ', time.time())
await asyncio.sleep(delay)
print('Done Sleeping: ', time.time())
if func:
return func()
def stop(self):
self.running = False
def test_can_start(self):
self.processing_queue.start()
self.assertEqual(self.processing_queue.running, True)
def test_can_stop(self):
self.processing_queue.stop()
self.assertEqual(self.processing_queue.running, False)
def test_can_pause(self):
self.assertEqual(self.processing_queue.paused, False)
self.processing_queue.pause()
self.assertEqual(self.processing_queue.paused, True)
def test_can_unpause(self):
self.processing_queue.paused = True
self.processing_queue.unpause()
self.assertEqual(self.processing_queue.paused, False)
def test_can_start_processing(self):
self.processing_queue.start_processing()
self.assertEqual(self.processing_queue.currently_processing, True)
def test_can_stop_processing(self):
self.processing_queue.stop_processing()
self.assertEqual(self.processing_queue.currently_processing, False)
def METHOD_NAME(self):
# Mark the queue as currently processing
self.processing_queue.start_processing()
# Stop the queue
self.processing_queue.stop()
# Await the queue stopping and then mark the queue as not processing after X seconds
tasks = asyncio.gather(
self.await_queue_stopping(),
self.delay_processing(func=self.processing_queue.stop_processing, delay=2)
)
loop = asyncio.get_event_loop()
loop.run_until_complete(tasks)
# Assert the queue is stopped and not processing any transactions
self.assertEqual(self.processing_queue.currently_processing, False)
self.assertEqual(self.processing_queue.running, False)
def test_can_await_pausing(self):
# Mark the queue as currently processing
self.processing_queue.start_processing()
# Stop the queue
self.processing_queue.pause()
# Await the queue stopping and then mark the queue as not processing after X seconds
tasks = asyncio.gather(
self.await_queue_pausing(),
self.delay_processing(func=self.processing_queue.stop_processing, delay=2)
)
loop = asyncio.get_event_loop()
loop.run_until_complete(tasks)
# Assert the queue is stopped and not processing any transactions
self.assertEqual(self.processing_queue.currently_processing, False)
self.assertEqual(self.processing_queue.paused, True)
def test_flush(self):
# Add a bunch of transactions to the queue
for i in range(10):
self.processing_queue.queue.append("testing")
# assert queue has items in it
self.assertEqual(len(self.processing_queue), 10)
# flush queue
self.processing_queue.flush()
# Assert queue is empty
self.assertEqual(len(self.processing_queue), 0)
def test_is_subscriptable(self):
item = "testing"
self.processing_queue.append(item)
# assert queue has items in it
self.assertEqual("testing", self.processing_queue[0])
def test_is_subscriptable_ret_None_if_indexError(self):
# assert queue has items in it
self.assertIsNone(self.processing_queue[0])
def test_disable_append__sets_allow_append_to_False(self):
self.processing_queue.allow_append = True
self.assertTrue(self.processing_queue.allow_append)
self.processing_queue.disable_append()
self.assertFalse(self.processing_queue.allow_append)
def test_enable_append__sets_allow_append_to_True(self):
self.processing_queue.allow_append = False
self.assertFalse(self.processing_queue.allow_append)
self.processing_queue.enable_append()
self.assertTrue(self.processing_queue.allow_append) |
299,242 | test mlcp enum large fromfile | # Copyright 2022 INRIA
import numpy as np
import siconos.numerics as sn
ztol = 1e-8
def createMLCP_fromFile(datafile): # uses datafile pytest fixture
mlcp = sn.MLCP()
fname = datafile("diodeBridge_mlcp.dat")
sn.mixedLinearComplementarity_newFromFilename(mlcp, fname)
zsol = np.array(
[
9.85185185e-01,
9.85185185e-01,
-0.00000000e00,
9.85185185e-04,
0.00000000e00,
0.00000000e00,
9.85185185e-04,
]
)
return (mlcp, zsol)
def createMLCP_small():
# basic interface
# Murty88, p2
M = np.array([[2.0, 1.0], [1.0, 2.0]])
q = np.array([-5.0, -6.0])
# solution
zsol = np.array([4.0 / 3.0, 7.0 / 3.0])
# problem
mlcp = sn.MLCP(1, M, q)
return (mlcp, zsol)
def createMLCP_large():
zsol = np.array(
[
9.85185185e-01,
9.85185185e-01,
-0.00000000e00,
9.85185185e-04,
0.00000000e00,
0.00000000e00,
9.85185185e-04,
]
)
M = np.array(
[
[
0.00000000e00,
-1.00000000e-03,
1.00000000e-03,
0.00000000e00,
1.00000000e00,
0.00000000e00,
1.00000000e00,
],
[
0.00000000e00,
1.00000000e-03,
-1.00000000e-03,
-1.00000000e00,
0.00000000e00,
-1.00000000e00,
0.00000000e00,
],
[
-1.00250000e00,
0.00000000e00,
0.00000000e00,
0.00000000e00,
0.00000000e00,
1.00000000e01,
-1.00000000e01,
],
[
0.00000000e00,
0.00000000e00,
-1.00000000e00,
0.00000000e00,
0.00000000e00,
0.00000000e00,
0.00000000e00,
],
[
0.00000000e00,
1.00000000e00,
0.00000000e00,
0.00000000e00,
0.00000000e00,
0.00000000e00,
0.00000000e00,
],
[
1.00000000e00,
0.00000000e00,
-1.00000000e00,
0.00000000e00,
0.00000000e00,
0.00000000e00,
0.00000000e00,
],
[
-1.00000000e00,
1.00000000e00,
0.00000000e00,
0.00000000e00,
0.00000000e00,
0.00000000e00,
0.00000000e00,
],
]
)
q = np.array([[0.0], [0.0], [0.9975], [0.0], [0.0], [0.0], [0.0]])
mlcp = sn.MLCP(3, M, q)
return (mlcp, zsol)
def test_mlcp_enum():
z = np.array([0.0, 0.0])
w = np.array([0.0, 0.0])
mlcp, zsol = createMLCP_small()
SO = sn.SolverOptions(sn.SICONOS_MLCP_ENUM)
sn.mlcp_driver_init(mlcp, SO)
info = sn.mlcp_enum(mlcp, z, w, SO)
sn.mlcp_driver_reset(mlcp, SO)
print("z = ", z)
print("w = ", w)
assert np.linalg.norm(z - zsol) <= ztol
assert not info
def test_mlcp_enum_large():
z = np.array([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
w = np.array([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
mlcp, zsol = createMLCP_large()
SO = sn.SolverOptions(sn.SICONOS_MLCP_ENUM)
sn.mlcp_driver_init(mlcp, SO)
info = sn.mlcp_enum(mlcp, z, w, SO)
sn.mlcp_driver_reset(mlcp, SO)
print("z = ", z)
print("w = ", w)
assert np.linalg.norm(z - zsol) <= ztol
assert not info
def METHOD_NAME(datafile): # uses datafile pytest fixture
z = np.array([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
w = np.array([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
mlcp, zsol = createMLCP_fromFile(datafile)
SO = sn.SolverOptions(sn.SICONOS_MLCP_ENUM)
sn.mlcp_driver_init(mlcp, SO)
info = sn.mlcp_enum(mlcp, z, w, SO)
sn.mlcp_driver_reset(mlcp, SO)
print("z = ", z)
print("w = ", w)
assert np.linalg.norm(z - zsol) <= ztol
assert not info |
299,243 | is different | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# Author: Pauli Virtanen, 2016
import math
from operator import index
def get_weight(stats):
"""
Return a data point weight for the result.
"""
if stats is None or 'ci_99_a' not in stats or 'ci_99_b' not in stats:
return None
try:
a = stats['ci_99_a']
b = stats['ci_99_b']
if math.isinf(a) or math.isinf(b):
# Infinite interval is due to too few samples --- consider
# weight as missing
return None
return 2 / abs(b - a)
except ZeroDivisionError:
return None
def METHOD_NAME(samples_a, samples_b, stats_a, stats_b, p_threshold=0.002):
"""Check whether the samples are statistically different.
If sample data is not provided, or the sample is too small, falls
back to a pessimistic CI-based check. If it returns True, then the
difference is statistically significant. If it returns False, it
might or might not be statistically significant.
Parameters
----------
samples_a, samples_b
Input samples
stats_a, stats_b
Input stats data
"""
if samples_a is not None and samples_b is not None:
# Raw data present: Mann-Whitney U test, but only if there's
# enough data so that the test can return True
a = [x for x in samples_a if not math.isnan(x)]
b = [x for x in samples_b if not math.isnan(x)]
p_min = 1 / binom(len(a) + len(b), min(len(a), len(b)))
if p_min < p_threshold:
_, p = mann_whitney_u(a, b)
return p < p_threshold
# If confidence intervals overlap, reject.
# Corresponds to a test with ill-specified threshold p-value,
# which generally can be significantly smaller than p <= 0.01
# depending on the actual data. For normal test (known variance),
# 0.00027 <= p <= 0.01.
ci_a = (stats_a['ci_99_a'], stats_a['ci_99_b'])
ci_b = (stats_b['ci_99_a'], stats_b['ci_99_b'])
if ci_a[1] >= ci_b[0] and ci_a[0] <= ci_b[1]:
return False
return True
_mann_whitney_u_memo = {}
def mann_whitney_u(x, y, method='auto'):
"""
Mann-Whitney U test
Ties are handled conservatively, returning the least significant
tie breaking.
Parameters
----------
x, y : list of float
Samples to test
method : {'auto', 'exact', 'normal'}
Whether to compute p-value exactly of via normal approximation.
The option 'auto' switches to approximation for sample size > 20.
Returns
-------
u : int
U-statistic
p : float
p-value for two-sided alternative
References
----------
.. [1] Mann & Whitney, Ann. Math. Statist. 18, 50 (1947).
.. [2] Gibbons & Chakraborti, "Nonparametric statistical inference". (2003)
"""
memo = _mann_whitney_u_memo
if len(memo) > 100000:
memo.clear()
m = len(x)
n = len(y)
if method == 'auto':
if max(m, n) > 20:
method = 'normal'
else:
method = 'exact'
u, ties = mann_whitney_u_u(x, y)
# Conservative tie breaking
if u <= m * n // 2 and u + ties >= m * n // 2:
ties = m * n // 2 - u
ux1 = min(u, m * n - u)
ux2 = min(u + ties, m * n - (u + ties))
if ux1 >= ux2:
ux = ux1
else:
u = u + ties
ux = ux2
# Get p-value
if method == 'exact':
p1 = mann_whitney_u_cdf(m, n, ux, memo)
p2 = 1.0 - mann_whitney_u_cdf(m, n, max(m * n // 2, m * n - ux - 1), memo)
p = p1 + p2
elif method == 'normal':
N = m + n
var = m * n * (N + 1) / 12
z = (ux - m * n / 2) / math.sqrt(var)
cdf = 0.5 * math.erfc(-z / math.sqrt(2))
p = 2 * cdf
else:
raise ValueError(f"Unknown method {repr(method)}")
return u, p
def mann_whitney_u_u(x, y):
u = 0
ties = 0
for xx in x:
for yy in y:
if xx > yy:
u += 1
elif xx == yy:
ties += 1
return u, ties
def mann_whitney_u_cdf(m, n, u, memo=None):
if memo is None:
memo = {}
cdf = 0
for uu in range(u + 1):
cdf += mann_whitney_u_pmf(m, n, uu, memo)
return cdf
def mann_whitney_u_pmf(m, n, u, memo=None):
if memo is None:
memo = {}
return mann_whitney_u_r(m, n, u, memo) / binom(m + n, m)
def mann_whitney_u_r(m, n, u, memo=None):
"""
Number of orderings in Mann-Whitney U test.
The PMF of U for samples of sizes (m, n) is given by
p(u) = r(m, n, u) / binom(m + n, m).
References
----------
.. [1] Mann & Whitney, Ann. Math. Statist. 18, 50 (1947).
"""
if u < 0:
value = 0
elif m == 0 or n == 0:
value = 1 if u == 0 else 0
else:
# Don't bother figuring out table construction, memoization
# sorts it out
if memo is None:
memo = {}
key = (m, n, u)
value = memo.get(key)
if value is not None:
return value
value = (mann_whitney_u_r(m, n - 1, u, memo) +
mann_whitney_u_r(m - 1, n, u - n, memo))
memo[key] = value
return value
def binom(n, k):
"""
Binomial coefficient (n over k)
"""
n = index(n)
k = index(k)
if not 0 <= k <= n:
return 0
m = n + 1
num = 1
den = 1
for j in range(1, min(k, n - k) + 1):
num *= m - j
den *= j
return num // den |
299,244 | convert to extras | # encoding: utf-8
import json
import six
from six import string_types, text_type
import ckan.model as model
import ckan.lib.navl.dictization_functions as df
import ckan.logic.validators as validators
from ckan.common import _
def METHOD_NAME(key, data, errors, context):
# Get the current extras index
current_indexes = [k[1] for k in data.keys()
if len(k) > 1 and k[0] == 'extras']
new_index = max(current_indexes) + 1 if current_indexes else 0
data[('extras', new_index, 'key')] = key[-1]
data[('extras', new_index, 'value')] = data[key]
def convert_from_extras(key, data, errors, context):
def remove_from_extras(data, key):
to_remove = []
for data_key, data_value in six.iteritems(data):
if (data_key[0] == 'extras'
and data_key[1] == key):
to_remove.append(data_key)
for item in to_remove:
del data[item]
for data_key, data_value in six.iteritems(data):
if (data_key[0] == 'extras'
and data_key[-1] == 'key'
and data_value == key[-1]):
data[key] = data[('extras', data_key[1], 'value')]
break
else:
return
remove_from_extras(data, data_key[1])
def extras_unicode_convert(extras, context):
for extra in extras:
extras[extra] = text_type(extras[extra])
return extras
def free_tags_only(key, data, errors, context):
tag_number = key[1]
if not data.get(('tags', tag_number, 'vocabulary_id')):
return
for k in list(data.keys()):
if k[0] == 'tags' and k[1] == tag_number:
del data[k]
def convert_to_tags(vocab):
def callable(key, data, errors, context):
new_tags = data.get(key)
if not new_tags:
return
if isinstance(new_tags, string_types):
new_tags = [new_tags]
# get current number of tags
n = 0
for k in data.keys():
if k[0] == 'tags':
n = max(n, k[1] + 1)
v = model.Vocabulary.get(vocab)
if not v:
raise df.Invalid(_('Tag vocabulary "%s" does not exist') % vocab)
context['vocabulary'] = v
for tag in new_tags:
validators.tag_in_vocabulary_validator(tag, context)
for num, tag in enumerate(new_tags):
data[('tags', num + n, 'name')] = tag
data[('tags', num + n, 'vocabulary_id')] = v.id
return callable
def convert_from_tags(vocab):
def callable(key, data, errors, context):
v = model.Vocabulary.get(vocab)
if not v:
raise df.Invalid(_('Tag vocabulary "%s" does not exist') % vocab)
tags = []
for k in data.keys():
if k[0] == 'tags':
if data[k].get('vocabulary_id') == v.id:
name = data[k].get('display_name', data[k]['name'])
tags.append(name)
data[key] = tags
return callable
def convert_user_name_or_id_to_id(user_name_or_id, context):
'''Return the user id for the given user name or id.
The point of this function is to convert user names to ids. If you have
something that may be a user name or a user id you can pass it into this
function and get the user id out either way.
Also validates that a user with the given name or id exists.
:returns: the id of the user with the given user name or id
:rtype: string
:raises: ckan.lib.navl.dictization_functions.Invalid if no user can be
found with the given id or user name
'''
session = context['session']
result = session.query(model.User).filter_by(id=user_name_or_id).first()
if not result:
result = session.query(model.User).filter_by(
name=user_name_or_id).first()
if not result:
raise df.Invalid('%s: %s' % (_('Not found'), _('User')))
return result.id
def convert_package_name_or_id_to_id(package_name_or_id, context):
'''Return the package id for the given package name or id.
The point of this function is to convert package names to ids. If you have
something that may be a package name or id you can pass it into this
function and get the id out either way.
Also validates that a package with the given name or id exists.
:returns: the id of the package with the given name or id
:rtype: string
:raises: ckan.lib.navl.dictization_functions.Invalid if there is no
package with the given name or id
'''
session = context['session']
result = session.query(model.Package).filter_by(
id=package_name_or_id).first()
if not result:
result = session.query(model.Package).filter_by(
name=package_name_or_id).first()
if not result:
raise df.Invalid('%s: %s' % (_('Not found'), _('Dataset')))
return result.id
def convert_group_name_or_id_to_id(group_name_or_id, context):
'''Return the group id for the given group name or id.
The point of this function is to convert group names to ids. If you have
something that may be a group name or id you can pass it into this
function and get the id out either way.
Also validates that a group with the given name or id exists.
:returns: the id of the group with the given name or id
:rtype: string
:raises: ckan.lib.navl.dictization_functions.Invalid if there is no
group with the given name or id
'''
session = context['session']
result = session.query(model.Group).filter_by(
id=group_name_or_id).first()
if not result:
result = session.query(model.Group).filter_by(
name=group_name_or_id).first()
if not result:
raise df.Invalid('%s: %s' % (_('Not found'), _('Group')))
return result.id
def convert_to_json_if_string(value, context):
if isinstance(value, string_types):
try:
return json.loads(value)
except ValueError:
raise df.Invalid(_('Could not parse as valid JSON'))
else:
return value
def convert_to_list_if_string(value, context=None):
if isinstance(value, string_types):
return [value]
else:
return value
def json_or_string(value):
"""
parse string values as json, return string if that fails
"""
if isinstance(value, string_types):
try:
return json.loads(value)
except ValueError:
pass
return value
def json_list_or_string(value):
"""
parse string values as json or comma-separated lists, return
string as a one-element list if that fails
"""
if isinstance(value, string_types):
try:
return json.loads(value)
except ValueError:
pass
return value.split(',')
return value
def remove_whitespace(value, context):
if isinstance(value, string_types):
return value.strip()
return value |
299,245 | lpage1 90 | import doctest
import unittest
import pdfarranger.core as core
class PTest(unittest.TestCase):
"""Base class for Page and LayerPage tests"""
@staticmethod
def _lpage1() -> core.LayerPage:
"""Sample layer page 1"""
return core.LayerPage(2, 4, 'lcopy', 90, 2, core.Sides(0.11, 0.21, 0.31, 0.41),
core.Sides(0.12, 0.22, 0.32, 0.42), 'OVERLAY', core.Dims(10.33, 20.33))
@staticmethod
def METHOD_NAME() -> core.LayerPage:
"""Sample layer page 1 rotated 90 degrees"""
return core.LayerPage(2, 4, 'lcopy', 180, 2, core.Sides(0.41, 0.31, 0.11, 0.21),
core.Sides(0.42, 0.32, 0.12, 0.22), 'OVERLAY', core.Dims(10.33, 20.33))
@staticmethod
def _lpage1_180() -> core.LayerPage:
"""Sample layer page 1 rotated 180 degrees"""
return core.LayerPage(2, 4, 'lcopy', 270, 2, core.Sides(0.21, 0.11, 0.41, 0.31),
core.Sides(0.22, 0.12, 0.42, 0.32), 'OVERLAY', core.Dims(10.33, 20.33))
@staticmethod
def _lpage1_270() -> core.LayerPage:
"""Sample layer page 1 rotated 180 degrees"""
return core.LayerPage(2, 4, 'lcopy', 0, 2, core.Sides(0.31, 0.41, 0.21, 0.11),
core.Sides(0.32, 0.42, 0.22, 0.12), 'OVERLAY', core.Dims(10.33, 20.33))
def _page1(self) -> core.Page:
"""Sample page 1"""
return core.Page(1, 2, 0.55, 'copy', 0, 2, core.Sides(0.1, 0.2, 0.3, 0.4),
core.Sides(0.11, 0.21, 0.31, 0.41), core.Dims(100.33, 200.66), 'base', [self._lpage1()])
def _page1_90(self) -> core.Page:
"""Sample page 1 rotated 90 degrees"""
return core.Page(1, 2, 0.55, 'copy', 90, 2, core.Sides(0.4, 0.3, 0.1, 0.2),
core.Sides(0.41, 0.31, 0.11, 0.21), core.Dims(100.33, 200.66), 'base', [self.METHOD_NAME()])
def _page1_180(self) -> core.Page:
"""Sample page 1 rotated 90 degrees"""
return core.Page(1, 2, 0.55, 'copy', 180, 2, core.Sides(0.2, 0.1, 0.4, 0.3),
core.Sides(0.21, 0.11, 0.41, 0.31), core.Dims(100.33, 200.66), 'base', [self._lpage1_180()])
def _page1_270(self) -> core.Page:
"""Sample page 1 rotated 90 degrees"""
return core.Page(1, 2, 0.55, 'copy', 270, 2, core.Sides(0.3, 0.4, 0.2, 0.1),
core.Sides(0.31, 0.41, 0.21, 0.11), core.Dims(100.33, 200.66), 'base', [self._lpage1_270()])
class BasePageTest(PTest):
def test01(self):
"""Test width | height | size_in_points"""
self.assertAlmostEquals(self._page1().size_in_points()[0], 140.462)
self.assertAlmostEquals(self._page1().width_in_points(), 140.462)
self.assertAlmostEquals(self._page1().size_in_points()[1], 120.396)
self.assertAlmostEquals(self._page1().height_in_points(), 120.396)
self.assertAlmostEquals(self._page1_90().size_in_points()[0], 120.396)
self.assertAlmostEquals(self._page1_90().width_in_points(), 120.396)
self.assertAlmostEquals(self._page1_90().size_in_points()[1], 140.462)
self.assertAlmostEquals(self._page1_90().height_in_points(), 140.462)
def test02(self):
"""Test rotate_times"""
# Remember - counter-clockwise !
self.assertEqual(core.Page.rotate_times(0), 0)
self.assertEqual(core.Page.rotate_times(90), 3)
self.assertEqual(core.Page.rotate_times(134), 3)
self.assertEqual(core.Page.rotate_times(-270), 3)
self.assertEqual(core.Page.rotate_times(3690), 3)
class PageTest(PTest):
def _rotate(self, angle: int) -> core.Page:
"""Return sample page 1 rotated by angle"""
p = self._page1()
p.rotate(angle)
return p
def test01(self):
"""Test rotate"""
self.assertEqual(repr(self._rotate(0)), repr(self._page1()))
self.assertEqual(repr(self._rotate(3600)), repr(self._page1()))
self.assertEqual(repr(self._rotate(-7200)), repr(self._page1()))
self.assertEqual(repr(self._rotate(90)), repr(self._page1_90()))
self.assertEqual(repr(self._rotate(-270)), repr(self._page1_90()))
self.assertEqual(repr(self._rotate(180)), repr(self._page1_180()))
self.assertEqual(repr(self._rotate(270)), repr(self._page1_270()))
self.assertEqual(self._rotate(0).size, core.Dims(100.33, 200.66))
self.assertEqual(self._rotate(90).size, core.Dims(200.66, 100.33))
self.assertEqual(self._rotate(180).size, core.Dims(100.33, 200.66))
self.assertEqual(self._rotate(270).size, core.Dims(200.66, 100.33))
def test02(self):
"""Test duplicate"""
p = self._page1()
d = p.duplicate()
self.assertEqual(repr(p), repr(d))
p.rotate(90)
self.assertEqual(repr(d), repr(self._page1()))
self.assertNotEquals(repr(p), repr(self._page1()))
def test03(self):
"""Test serialize"""
self.assertEqual(self._page1().serialize(),
'copy\n2\nbase\n0\n2\n0.1\n0.2\n0.3\n0.4\n0.11\n0.21\n0.31\n0.41\n'
'lcopy\n4\n90\n2\nOVERLAY\n0.11\n0.21\n0.31\n0.41\n0.12\n0.22\n0.32\n0.42')
def test04(self):
"""Test width | height | size_in_pixel"""
self.assertEqual(self._page1().size_in_pixel()[0], 77)
self.assertEqual(self._page1().width_in_pixel(), 77)
self.assertEqual(self._page1().size_in_pixel()[1], 66)
self.assertEqual(self._page1().height_in_pixel(), 66)
self.assertEqual(self._page1_90().size_in_pixel()[0], 66)
self.assertEqual(self._page1_90().width_in_pixel(), 66)
self.assertEqual(self._page1_90().size_in_pixel()[1], 77)
self.assertEqual(self._page1_90().height_in_pixel(), 77)
self.assertTrue(isinstance(self._page1().height_in_pixel(), int), 'height_in_pixel not an int')
self.assertTrue(isinstance(self._page1().width_in_pixel(), int), 'width_in_pixel not an int')
class LayerPageTest(PTest):
def _rotate(self, angle: int) -> core.LayerPage:
"""Return sample layer page 1 rotated by angle"""
p = self._lpage1()
p.rotate(angle)
return p
def test01(self):
"""Test rotate"""
self.assertEqual(repr(self._rotate(0)), repr(self._lpage1()))
self.assertEqual(repr(self._rotate(-40)), repr(self._lpage1()))
self.assertEqual(repr(self._rotate(80)), repr(self._lpage1()))
self.assertEqual(repr(self._rotate(-1)), repr(self.METHOD_NAME()))
self.assertEqual(repr(self._rotate(3)), repr(self.METHOD_NAME()))
self.assertEqual(repr(self._rotate(-2)), repr(self._lpage1_180()))
self.assertEqual(repr(self._rotate(-3)), repr(self._lpage1_270()))
self.assertEqual(self._rotate(0).size, core.Dims(20.33, 10.33))
self.assertEqual(self._rotate(-1).size, core.Dims(10.33, 20.33))
self.assertEqual(self._rotate(-2).size, core.Dims(20.33, 10.33))
self.assertEqual(self._rotate(-3).size, core.Dims(10.33, 20.33))
def test02(self):
"""Test duplicate"""
p = self._lpage1()
d = p.duplicate()
self.assertEqual(repr(p), repr(d))
p.rotate(90)
self.assertEqual(repr(d), repr(self._lpage1()))
self.assertNotEquals(repr(p), repr(self._lpage1()))
def test03(self):
"""Test serialize"""
self.assertEqual(self._lpage1().serialize(),
'lcopy\n4\n90\n2\nOVERLAY\n0.11\n0.21\n0.31\n0.41\n0.12\n0.22\n0.32\n0.42')
def load_tests(loader, tests, ignore):
tests.addTests(doctest.DocTestSuite(core))
return tests |
299,246 | present | """
Management of PostgreSQL schemas
================================
The postgres_schemas module is used to create and manage Postgres schemas.
.. code-block:: yaml
public:
postgres_schema.present 'dbname' 'name'
"""
import logging
log = logging.getLogger(__name__)
def __virtual__():
"""
Only load if the postgres module is present
"""
if "postgres.schema_exists" not in __salt__:
return (
False,
"Unable to load postgres module. Make sure `postgres.bins_dir` is set.",
)
return True
def METHOD_NAME(
dbname,
name,
owner=None,
user=None,
db_user=None,
db_password=None,
db_host=None,
db_port=None,
):
"""
Ensure that the named schema is present in the database.
dbname
The database's name will work on
name
The name of the schema to manage
user
system user all operations should be performed on behalf of
db_user
database username if different from config or default
db_password
user password if any password for a specified user
db_host
Database host if different from config or default
db_port
Database port if different from config or default
"""
ret = {
"dbname": dbname,
"name": name,
"changes": {},
"result": True,
"comment": "Schema {} is already present in database {}".format(name, dbname),
}
db_args = {
"db_user": db_user,
"db_password": db_password,
"db_host": db_host,
"db_port": db_port,
"user": user,
}
# check if schema exists
schema_attr = __salt__["postgres.schema_get"](dbname, name, **db_args)
cret = None
# The schema is not present, make it!
if schema_attr is None:
if __opts__["test"]:
ret["result"] = None
ret["comment"] = "Schema {} is set to be created in database {}.".format(
name, dbname
)
return ret
cret = __salt__["postgres.schema_create"](dbname, name, owner=owner, **db_args)
else:
msg = "Schema {0} already exists in database {1}"
cret = None
if cret:
msg = "Schema {0} has been created in database {1}"
ret["result"] = True
ret["changes"][name] = "Present"
elif cret is not None:
msg = "Failed to create schema {0} in database {1}"
ret["result"] = False
else:
msg = "Schema {0} already exists in database {1}"
ret["result"] = True
ret["comment"] = msg.format(name, dbname)
return ret
def absent(
dbname, name, user=None, db_user=None, db_password=None, db_host=None, db_port=None
):
"""
Ensure that the named schema is absent.
dbname
The database's name will work on
name
The name of the schema to remove
user
system user all operations should be performed on behalf of
db_user
database username if different from config or default
db_password
user password if any password for a specified user
db_host
Database host if different from config or default
db_port
Database port if different from config or default
"""
ret = {"name": name, "dbname": dbname, "changes": {}, "result": True, "comment": ""}
db_args = {
"db_user": db_user,
"db_password": db_password,
"db_host": db_host,
"db_port": db_port,
"user": user,
}
# check if schema exists and remove it
if __salt__["postgres.schema_exists"](dbname, name, **db_args):
if __opts__["test"]:
ret["result"] = None
ret["comment"] = "Schema {} is set to be removed from database {}".format(
name, dbname
)
return ret
elif __salt__["postgres.schema_remove"](dbname, name, **db_args):
ret["comment"] = "Schema {} has been removed from database {}".format(
name, dbname
)
ret["changes"][name] = "Absent"
return ret
else:
ret["result"] = False
ret["comment"] = "Schema {} failed to be removed".format(name)
return ret
else:
ret[
"comment"
] = "Schema {} is not present in database {}, so it cannot be removed".format(
name, dbname
)
return ret |
299,247 | get previous commit | #
# Copyright 2017-2023 - Swiss Data Science Center (SDSC)
# A partnership between Γcole Polytechnique FΓ©dΓ©rale de Lausanne (EPFL) and
# EidgenΓΆssische Technische Hochschule ZΓΌrich (ETHZ).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Migration utility functions."""
import os
from collections import defaultdict
from enum import IntFlag
from typing import Any, Dict, NamedTuple, Optional
from renku.core import errors
from renku.core.constant import RENKU_HOME
from renku.core.util import communication
from renku.infrastructure.repository import Repository, git_unicode_unescape
class MigrationType(IntFlag):
"""Type of migration that is being executed."""
DATASETS = 1
WORKFLOWS = 2
STRUCTURAL = 4
ALL = DATASETS | WORKFLOWS | STRUCTURAL
class MigrationOptions(NamedTuple):
"""Migration options."""
strict: bool
preserve_identifiers: bool
type: MigrationType = MigrationType.ALL
class MigrationContext:
"""Context containing required migration information."""
def __init__(self, strict: bool, type: MigrationType, preserve_identifiers: bool):
self.options = MigrationOptions(strict=strict, type=type, preserve_identifiers=preserve_identifiers)
self.dataset_migration_context: Optional[DatasetMigrationContext] = None
self.cache: Optional[RepositoryCache] = None
self.cwl_cache: Optional[Dict[str, Any]] = None
self.plan_cache: Optional[Dict[str, Any]] = None
class DatasetMigrationContext:
"""The context to get path info in a specific revision."""
def __init__(self, repository, revision):
self.repository = repository
self.revision = revision
def exists(self, path) -> bool:
"""If a given path existed at the revision."""
try:
self.repository.run_git_command("cat-file", "-e", f"{self.revision}:{path}")
except errors.GitCommandError:
return False
else:
return True
def is_dir(self, path) -> bool:
"""If a given path was a directory at the revision."""
try:
result = self.repository.run_git_command("cat-file", "-t", f"{self.revision}:{path}")
except errors.GitCommandError:
return False
else:
return "tree" in result
def METHOD_NAME(self, path):
"""Get previous commit of a given path starting from the revision."""
return self.repository.METHOD_NAME(path, revision=self.revision)
class RepositoryCache:
"""Cache for a git repository."""
def __init__(self, repository: "Repository", cache, cwl_files_commits):
self.repository = repository
self.cache = cache
self.cwl_files_commits = cwl_files_commits
@classmethod
def from_repository(cls, repository: "Repository"):
"""Return a cached repository."""
cache = defaultdict(list)
cwl_files_commits_map = {}
for n, commit in enumerate(repository.iterate_commits(full_history=True), start=1):
communication.echo(f"Caching commit {n}", end="\r")
cwl_files = []
for file in commit.get_changes():
# Ignore deleted files
if file.deleted:
continue
path = file.b_path
cache[path].append(commit)
if path.startswith(f"{RENKU_HOME}/workflow/") and path.endswith(".cwl"):
cwl_files.append(os.path.realpath(repository.path / path))
cls._update_cwl_files_and_commits(commit, cwl_files_commits_map, cwl_files)
communication.echo(40 * " ", end="\r")
return RepositoryCache(repository, cache, cwl_files_commits_map)
@staticmethod
def _update_cwl_files_and_commits(commit, cwl_files_commits_map, cwl_files):
if len(cwl_files) != 1:
return
path = cwl_files[0]
existing_commit = cwl_files_commits_map.get(path)
if existing_commit is None:
cwl_files_commits_map[path] = commit
elif existing_commit.compare_to(commit) < 0: # existing commit is older
cwl_files_commits_map[path] = commit
def find_previous_commit(self, path, revision="HEAD"):
"""Return a previous commit for a given path starting from 'revision'."""
def find_from_repository(path, revision):
try:
return self.repository.METHOD_NAME(path=path, revision=revision, full_history=True)
except errors.GitCommitNotFoundError:
communication.warn(f"Cannot find previous commit for {path} from {str(revision)}")
return revision
try:
path = (self.repository.path / path).relative_to(self.repository.path)
except ValueError:
pass
path = str(path)
if revision == "HEAD":
revision = self.repository.head.commit
commits = self.cache.get(git_unicode_unescape(path))
if not commits:
return find_from_repository(path, revision)
if revision in commits:
return revision
for commit in commits:
if commit.compare_to(revision) <= 0:
return commit
# No commit was found
return find_from_repository(path, revision) |
299,248 | download | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Prepare CHiME3 background data.
Download, unpack and create manifest files.
Manifest file is a json-format file with each line containing the
meta data (i.e. audio filepath, transcript and audio duration)
of each audio file in the data set.
"""
import argparse
import io
import json
import os
import zipfile
import soundfile
import wget
from paddle.v2.dataset.common import md5file
# DATA_HOME = os.path.expanduser('~/.cache/paddle/dataset/speech')
DATA_HOME = os.path.expanduser('.')
URL = "https://d4s.myairbridge.com/packagev2/AG0Y3DNBE5IWRRTV/?dlid=W19XG7T0NNHB027139H0EQ"
MD5 = "c3ff512618d7a67d4f85566ea1bc39ec"
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"--target_dir",
default=DATA_HOME + "/chime3_background",
type=str,
help="Directory to save the dataset. (default: %(default)s)")
parser.add_argument(
"--manifest_filepath",
default="manifest.chime3.background",
type=str,
help="Filepath for output manifests. (default: %(default)s)")
args = parser.parse_args()
def METHOD_NAME(url, md5sum, target_dir, filename=None):
"""Download file from url to target_dir, and check md5sum."""
if filename is None:
filename = url.split("/")[-1]
if not os.path.exists(target_dir):
os.makedirs(target_dir)
filepath = os.path.join(target_dir, filename)
if not (os.path.exists(filepath) and md5file(filepath) == md5sum):
print("Downloading %s ..." % url)
wget.METHOD_NAME(url, target_dir)
print("\nMD5 Chesksum %s ..." % filepath)
if not md5file(filepath) == md5sum:
raise RuntimeError("MD5 checksum failed.")
else:
print("File exists, skip downloading. (%s)" % filepath)
return filepath
def unpack(filepath, target_dir):
"""Unpack the file to the target_dir."""
print("Unpacking %s ..." % filepath)
if filepath.endswith('.zip'):
zip = zipfile.ZipFile(filepath, 'r')
zip.extractall(target_dir)
zip.close()
elif filepath.endswith('.tar') or filepath.endswith('.tar.gz'):
tar = zipfile.open(filepath)
tar.extractall(target_dir)
tar.close()
else:
raise ValueError("File format is not supported for unpacking.")
def create_manifest(data_dir, manifest_path):
"""Create a manifest json file summarizing the data set, with each line
containing the meta data (i.e. audio filepath, transcription text, audio
duration) of each audio file within the data set.
"""
print("Creating manifest %s ..." % manifest_path)
json_lines = []
for subfolder, _, filelist in sorted(os.walk(data_dir)):
for filename in filelist:
if filename.endswith('.wav'):
filepath = os.path.join(data_dir, subfolder, filename)
audio_data, samplerate = soundfile.read(filepath)
duration = float(len(audio_data)) / samplerate
json_lines.append(
json.dumps(
{
'utt': os.path.splitext(os.path.basename(filepath))[
0],
'feat': filepath,
'feat_shape': (duration, ), # second
'type': 'background'
}))
with io.open(manifest_path, mode='w', encoding='utf8') as out_file:
for line in json_lines:
out_file.write(line + '\n')
def prepare_chime3(url, md5sum, target_dir, manifest_path):
"""Download, unpack and create summmary manifest file."""
if not os.path.exists(os.path.join(target_dir, "CHiME3")):
# download
filepath = METHOD_NAME(url, md5sum, target_dir,
"myairbridge-AG0Y3DNBE5IWRRTV.zip")
# unpack
unpack(filepath, target_dir)
unpack(
os.path.join(target_dir, 'CHiME3_background_bus.zip'), target_dir)
unpack(
os.path.join(target_dir, 'CHiME3_background_caf.zip'), target_dir)
unpack(
os.path.join(target_dir, 'CHiME3_background_ped.zip'), target_dir)
unpack(
os.path.join(target_dir, 'CHiME3_background_str.zip'), target_dir)
else:
print("Skip downloading and unpacking. Data already exists in %s." %
target_dir)
# create manifest json file
create_manifest(target_dir, manifest_path)
def main():
prepare_chime3(
url=URL,
md5sum=MD5,
target_dir=args.target_dir,
manifest_path=args.manifest_filepath)
if __name__ == '__main__':
main() |
299,249 | attach signal2 | #!/usr/bin/env python
#
# Copyright 2011,2012,2015 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
from gnuradio import gr, filter
from gnuradio.fft import window
from gnuradio import blocks
import sys
try:
from gnuradio import qtgui
from PyQt5 import QtWidgets, Qt
import sip
except ImportError:
sys.stderr.write("Error: Program requires PyQt5 and gr-qtgui.\n")
sys.exit(1)
try:
from gnuradio import analog
except ImportError:
sys.stderr.write("Error: Program requires gr-analog.\n")
sys.exit(1)
try:
from gnuradio import channels
except ImportError:
sys.stderr.write("Error: Program requires gr-channels.\n")
sys.exit(1)
class dialog_box(QtWidgets.QWidget):
def __init__(self, display, control):
QtWidgets.QWidget.__init__(self, None)
self.setWindowTitle('PyQt Test GUI')
self.boxlayout = QtWidgets.QBoxLayout(
QtWidgets.QBoxLayout.LeftToRight, self)
self.boxlayout.addWidget(display, 1)
self.boxlayout.addWidget(control)
self.resize(800, 500)
class control_box(QtWidgets.QWidget):
def __init__(self, parent=None):
QtWidgets.QWidget.__init__(self, parent)
self.setWindowTitle('Control Panel')
self.setToolTip('Control the signals')
QtWidgets.QToolTip.setFont(Qt.QFont('OldEnglish', 10))
self.layout = QtWidgets.QFormLayout(self)
# Control the first signal
self.freq1Edit = QtWidgets.QLineEdit(self)
self.freq1Edit.setMinimumWidth(100)
self.layout.addRow("Signal 1 Frequency:", self.freq1Edit)
self.freq1Edit.editingFinished.connect(self.freq1EditText)
self.amp1Edit = QtWidgets.QLineEdit(self)
self.amp1Edit.setMinimumWidth(100)
self.layout.addRow("Signal 1 Amplitude:", self.amp1Edit)
self.amp1Edit.editingFinished.connect(self.amp1EditText)
# Control the second signal
self.freq2Edit = QtWidgets.QLineEdit(self)
self.freq2Edit.setMinimumWidth(100)
self.layout.addRow("Signal 2 Frequency:", self.freq2Edit)
self.freq2Edit.editingFinished.connect(self.freq2EditText)
self.amp2Edit = QtWidgets.QLineEdit(self)
self.amp2Edit.setMinimumWidth(100)
self.layout.addRow("Signal 2 Amplitude:", self.amp2Edit)
self.amp2Edit.editingFinished.connect(self.amp2EditText)
self.quit = QtWidgets.QPushButton('Close', self)
self.quit.setMinimumWidth(100)
self.layout.addWidget(self.quit)
self.quit.clicked.connect(QtWidgets.qApp.quit)
def attach_signal1(self, signal):
self.signal1 = signal
self.freq1Edit.setText(("{0}").format(self.signal1.frequency()))
self.amp1Edit.setText(("{0}").format(self.signal1.amplitude()))
def METHOD_NAME(self, signal):
self.signal2 = signal
self.freq2Edit.setText(("{0}").format(self.signal2.frequency()))
self.amp2Edit.setText(("{0}").format(self.signal2.amplitude()))
def freq1EditText(self):
try:
newfreq = float(self.freq1Edit.text())
self.signal1.set_frequency(newfreq)
except ValueError:
print("Bad frequency value entered")
def amp1EditText(self):
try:
newamp = float(self.amp1Edit.text())
self.signal1.set_amplitude(newamp)
except ValueError:
print("Bad amplitude value entered")
def freq2EditText(self):
try:
newfreq = float(self.freq2Edit.text())
self.signal2.set_frequency(newfreq)
except ValueError:
print("Bad frequency value entered")
def amp2EditText(self):
try:
newamp = float(self.amp2Edit.text())
self.signal2.set_amplitude(newamp)
except ValueError:
print("Bad amplitude value entered")
class my_top_block(gr.top_block):
def __init__(self):
gr.top_block.__init__(self)
Rs = 8000
f1 = 1000
f2 = 2000
fftsize = 2048
self.qapp = QtWidgets.QApplication(sys.argv)
ss = open(gr.prefix() + '/share/gnuradio/themes/dark.qss')
sstext = ss.read()
ss.close()
self.qapp.setStyleSheet(sstext)
src1 = analog.sig_source_c(Rs, analog.GR_SIN_WAVE, f1, 0.1, 0)
src2 = analog.sig_source_c(Rs, analog.GR_SIN_WAVE, f2, 0.1, 0)
src = blocks.add_cc()
channel = channels.channel_model(0.001)
thr = blocks.throttle(gr.sizeof_gr_complex, 100 * fftsize)
self.snk1 = qtgui.sink_c(fftsize, window.WIN_BLACKMAN_hARRIS,
0, Rs,
"Complex Signal Example",
True, True, True, False, None)
self.connect(src1, (src, 0))
self.connect(src2, (src, 1))
self.connect(src, channel, thr, self.snk1)
self.ctrl_win = control_box()
self.ctrl_win.attach_signal1(src1)
self.ctrl_win.METHOD_NAME(src2)
# Get the reference pointer to the SpectrumDisplayForm QWidget
pyQt = self.snk1.qwidget()
# Wrap the pointer as a PyQt SIP object
# This can now be manipulated as a PyQt5.QtWidgets.QWidget
pyWin = sip.wrapinstance(pyQt, QtWidgets.QWidget)
self.main_box = dialog_box(pyWin, self.ctrl_win)
self.main_box.show()
if __name__ == "__main__":
tb = my_top_block()
tb.start()
tb.qapp.exec_()
tb.stop() |
299,250 | test twisted event relayed | """
Tests for ``magic_folder.test.eliotutil``.
"""
import logging
from testtools import (
TestCase,
)
from testtools.matchers import (
Is,
IsInstance,
Equals,
AfterPreprocessing,
)
from testtools.twistedsupport import (
succeeded,
failed,
)
from eliot import (
Message,
)
from eliot.twisted import DeferredContext
from eliot.testing import (
capture_logging,
assertHasAction,
)
from twisted.internet.defer import (
succeed,
)
from twisted.internet.task import deferLater
from twisted.internet import reactor
from ..util.eliotutil import (
log_call_deferred,
log_inline_callbacks,
_EliotLogging,
)
from .common import (
AsyncTestCase,
)
class EliotLoggedTestTests(AsyncTestCase):
def test_returns_none(self):
Message.log(hello="world")
def test_returns_fired_deferred(self):
Message.log(hello="world")
return succeed(None)
def test_returns_unfired_deferred(self):
Message.log(hello="world")
# @eliot_logged_test automatically gives us an action context but it's
# still our responsibility to maintain it across stack-busting
# operations.
d = DeferredContext(deferLater(reactor, 0.0, lambda: None))
d.addCallback(lambda ignored: Message.log(goodbye="world"))
# We didn't start an action. We're not finishing an action.
return d.result
# Opt out of the great features of common.SyncTestCase because we're
# interacting with Eliot in a very obscure, particular, fragile way. :/
class EliotLoggingTests(TestCase):
"""
Tests for ``_EliotLogging``.
"""
def test_stdlib_event_relayed(self):
"""
An event logged using the stdlib logging module is delivered to the Eliot
destination.
"""
collected = []
service = _EliotLogging([collected.append], capture_logs=True)
service.startService()
self.addCleanup(service.stopService)
# The first destination added to the global log destinations gets any
# buffered messages delivered to it. We don't care about those.
# Throw them on the floor. Sorry.
del collected[:]
logging.critical("oh no")
self.assertThat(
collected,
AfterPreprocessing(
len,
Equals(1),
),
)
def METHOD_NAME(self):
"""
An event logged with a ``twisted.logger.Logger`` is delivered to the Eliot
destination.
"""
collected = []
service = _EliotLogging([collected.append], capture_logs=True)
service.startService()
self.addCleanup(service.stopService)
from twisted.logger import Logger
Logger().critical("oh no")
self.assertThat(
collected,
AfterPreprocessing(
len, Equals(1),
),
)
class LogCallDeferredTests(TestCase):
"""
Tests for ``log_call_deferred``.
"""
@capture_logging(
lambda self, logger:
assertHasAction(self, logger, u"the-action", succeeded=True),
)
def test_return_value(self, logger):
"""
The decorated function's return value is passed through.
"""
result = object()
@log_call_deferred(action_type=u"the-action")
def f():
return result
self.assertThat(f(), succeeded(Is(result)))
@capture_logging(
lambda self, logger: assertHasAction(
self, logger, "the-action", succeeded=True, startFields={"thing": "value"}
),
)
def test_args_logged(self, logger):
"""
The decorated function's arguments are logged.
"""
@log_call_deferred(action_type="the-action", include_args=True)
def f(self, reactor, thing):
pass
f(object(), object(), "value")
@capture_logging(
lambda self, logger: assertHasAction(
self, logger, "the-action", succeeded=True, startFields={"thing": "value"}
),
)
def test_args_logged_explicit(self, logger):
"""
The decorated function's arguments are logged.
"""
@log_call_deferred(action_type="the-action", include_args=["thing"])
def f(thing, other):
pass
f("value", object())
@capture_logging(
lambda self, logger: assertHasAction(
self, logger, "the-action", succeeded=True, startFields={"thing": "value"}
),
)
def test_args_logged_inline_callbacks(self, logger):
"""
The decorated function's arguments are logged.
"""
@log_inline_callbacks(action_type="the-action", include_args=["thing"])
def f(thing, other):
yield
f("value", object())
@capture_logging(
lambda self, logger:
assertHasAction(self, logger, u"the-action", succeeded=False),
)
def test_raise_exception(self, logger):
"""
An exception raised by the decorated function is passed through.
"""
class Result(Exception):
pass
@log_call_deferred(action_type=u"the-action")
def f():
raise Result()
self.assertThat(
f(),
failed(
AfterPreprocessing(
lambda f: f.value,
IsInstance(Result),
),
),
) |
299,251 | results | #!/usr/bin/env python3
# Copyright (c) 2011-present, Facebook, Inc. All rights reserved.
# This source code is licensed under both the GPLv2 (found in the
# COPYING file in the root directory) and Apache 2.0 License
# (found in the LICENSE.Apache file in the root directory).
"""Run benchmark_compare.sh on the most recent build, for CI
"""
import argparse
import glob
import logging
import os
import re
import shutil
import subprocess
import sys
logging.basicConfig(level=logging.INFO)
class Config:
def __init__(self, args):
self.version_file = "./include/rocksdb/version.h"
self.data_dir = os.path.expanduser(f"{args.db_dir}")
self.results_dir = os.path.expanduser(f"{args.output_dir}")
self.benchmark_script = f"{os.getcwd()}/tools/benchmark_compare.sh"
self.benchmark_cwd = f"{os.getcwd()}/tools"
benchmark_env_keys = [
"LD_LIBRARY_PATH",
"NUM_KEYS",
"KEY_SIZE",
"VALUE_SIZE",
"CACHE_SIZE_MB",
"DURATION_RW",
"DURATION_RO",
"MB_WRITE_PER_SEC",
"NUM_THREADS",
"COMPRESSION_TYPE",
"MIN_LEVEL_TO_COMPRESS",
"WRITE_BUFFER_SIZE_MB",
"TARGET_FILE_SIZE_BASE_MB",
"MAX_BYTES_FOR_LEVEL_BASE_MB",
"MAX_BACKGROUND_JOBS",
"CACHE_INDEX_AND_FILTER_BLOCKS",
"USE_O_DIRECT",
"STATS_INTERVAL_SECONDS",
"SUBCOMPACTIONS",
"COMPACTION_STYLE",
"CI_TESTS_ONLY",
]
def read_version(config):
majorRegex = re.compile(r"#define ROCKSDB_MAJOR\s([0-9]+)")
minorRegex = re.compile(r"#define ROCKSDB_MINOR\s([0-9]+)")
patchRegex = re.compile(r"#define ROCKSDB_PATCH\s([0-9]+)")
with open(config.version_file, "r") as reader:
major = None
minor = None
patch = None
for line in reader:
if major is None:
major = majorRegex.match(line)
elif minor is None:
minor = minorRegex.match(line)
elif patch is None:
patch = patchRegex.match(line)
if patch is not None:
break
if patch is not None:
return (major.group(1), minor.group(1), patch.group(1))
# Didn't complete a match
return None
def prepare(version_str, config):
old_files = glob.glob(f"{config.results_dir}/{version_str}/**", recursive=True)
for f in old_files:
if os.path.isfile(f):
logging.debug(f"remove file {f}")
os.remove(f)
for f in old_files:
if os.path.isdir(f):
logging.debug(f"remove dir {f}")
os.rmdir(f)
db_bench_vers = f"{config.benchmark_cwd}/db_bench.{version_str}"
# Create a symlink to the db_bench executable
os.symlink(f"{os.getcwd()}/db_bench", db_bench_vers)
def METHOD_NAME(version_str, config):
# Copy the report TSV file back to the top level of results
shutil.copyfile(
f"{config.results_dir}/{version_str}/report.tsv",
f"{config.results_dir}/report.tsv",
)
def cleanup(version_str, config):
# Remove the symlink to the db_bench executable
db_bench_vers = f"{config.benchmark_cwd}/db_bench.{version_str}"
os.remove(db_bench_vers)
def get_benchmark_env():
env = []
for key in Config.benchmark_env_keys:
value = os.getenv(key)
if value is not None:
env.append((key, value))
return env
def main():
"""Tool for running benchmark_compare.sh on the most recent build, for CI
This tool will
(1) Work out the current version of RocksDB
(2) Run benchmark_compare with that version alone
"""
parser = argparse.ArgumentParser(
description="benchmark_compare.sh Python wrapper for CI."
)
# --tsvfile is the name of the file to read results from
# --esdocument is the ElasticSearch document to push these results into
#
parser.add_argument(
"--db_dir",
default="~/tmp/rocksdb-benchmark-datadir",
help="Database directory hierarchy to use",
)
parser.add_argument(
"--output_dir",
default="~/tmp/benchmark-results",
help="Benchmark output goes here",
)
parser.add_argument(
"--num_keys",
default="10000",
help="Number of database keys to use in benchmark test(s) (determines size of test job)",
)
args = parser.parse_args()
config = Config(args)
version = read_version(config)
if version is None:
raise Exception(f"Could not read RocksDB version from {config.version_file}")
version_str = f"{version[0]}.{version[1]}.{version[2]}"
logging.info(f"Run benchmark_ci with RocksDB version {version_str}")
prepare(version_str, config)
try:
env = get_benchmark_env()
env.append(("NUM_KEYS", args.num_keys))
cmd = [
config.benchmark_script,
config.data_dir,
config.results_dir,
version_str,
]
logging.info(f"Run {cmd} env={env} cwd={config.benchmark_cwd}")
subprocess.run(cmd, env=dict(env), cwd=config.benchmark_cwd)
METHOD_NAME(version_str, config)
finally:
cleanup(version_str, config)
return 0
if __name__ == "__main__":
sys.exit(main()) |
299,252 | test compare nf models | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Author: Jean-Luc Auge
# @Date: 2018-02-02 14:06:55
from numpy import zeros, array
from gnpy.core.elements import Transceiver, Edfa
from gnpy.core.utils import automatic_fmax, lin2db, db2lin, merge_amplifier_restrictions
from gnpy.core.info import create_input_spectral_information, ReferenceCarrier
from gnpy.core.network import build_network
from gnpy.tools.json_io import load_network, load_equipment
from pathlib import Path
import pytest
TEST_DIR = Path(__file__).parent
DATA_DIR = TEST_DIR / 'data'
test_network = DATA_DIR / 'test_network.json'
eqpt_library = DATA_DIR / 'eqpt_config.json'
# TODO in elements.py code: pytests doesn't pass with 1 channel: interpolate fail
@pytest.fixture(
params=[(96, 0.05e12), (60, 0.075e12), (45, 0.1e12), (2, 0.1e12)],
ids=['50GHz spacing', '75GHz spacing', '100GHz spacing', '2 channels'])
def nch_and_spacing(request):
"""parametrize channel count vs channel spacing (Hz)"""
yield request.param
@pytest.fixture()
def bw():
"""parametrize signal bandwidth (Hz)"""
return 45e9
@pytest.fixture()
def setup_edfa_variable_gain():
"""init edfa class by reading test_network.json file
remove all gain and nf ripple"""
equipment = load_equipment(eqpt_library)
network = load_network(test_network, equipment)
build_network(network, equipment, 0, 20)
edfa = [n for n in network.nodes() if isinstance(n, Edfa)][0]
edfa.gain_ripple = zeros(96)
edfa.interpol_nf_ripple = zeros(96)
yield edfa
@pytest.fixture()
def setup_edfa_fixed_gain():
"""init edfa class by reading the 2nd edfa in test_network.json file"""
equipment = load_equipment(eqpt_library)
network = load_network(test_network, equipment)
build_network(network, equipment, 0, 20)
edfa = [n for n in network.nodes() if isinstance(n, Edfa)][1]
yield edfa
@pytest.fixture()
def setup_trx():
"""init transceiver class to access snr and osnr calculations"""
equipment = load_equipment(eqpt_library)
network = load_network(test_network, equipment)
build_network(network, equipment, 0, 20)
trx = [n for n in network.nodes() if isinstance(n, Transceiver)][0]
return trx
@pytest.fixture()
def si(nch_and_spacing, bw):
"""parametrize a channel comb with nb_channel, spacing and signal bw"""
nb_channel, spacing = nch_and_spacing
f_min = 191.3e12
f_max = automatic_fmax(f_min, spacing, nb_channel)
return create_input_spectral_information(f_min=f_min, f_max=f_max, roll_off=0.15, baud_rate=bw, power=1e-3,
spacing=spacing, tx_osnr=40.0,
ref_carrier=ReferenceCarrier(baud_rate=32e9, slot_width=50e9))
@pytest.mark.parametrize("gain, nf_expected", [(10, 15), (15, 10), (25, 5.8)])
def test_variable_gain_nf(gain, nf_expected, setup_edfa_variable_gain, si):
"""=> unitary test for variable gain model Edfa._calc_nf() (and Edfa.interpol_params)"""
edfa = setup_edfa_variable_gain
si.signal /= db2lin(gain)
si.nli /= db2lin(gain)
si.ase /= db2lin(gain)
edfa.operational.gain_target = gain
si.pref = si.pref._replace(p_span0=0, p_spani=-gain)
edfa.interpol_params(si)
result = edfa.nf
assert pytest.approx(nf_expected, abs=0.01) == result[0]
@pytest.mark.parametrize("gain, nf_expected", [(15, 10), (20, 5), (25, 5)])
def test_fixed_gain_nf(gain, nf_expected, setup_edfa_fixed_gain, si):
"""=> unitary test for fixed gain model Edfa._calc_nf() (and Edfa.interpol_params)"""
edfa = setup_edfa_fixed_gain
si.signal /= db2lin(gain)
si.nli /= db2lin(gain)
si.ase /= db2lin(gain)
edfa.operational.gain_target = gain
si.pref = si.pref._replace(p_span0=0, p_spani=-gain)
edfa.interpol_params(si)
assert pytest.approx(nf_expected, abs=0.01) == edfa.nf[0]
def test_si(si, nch_and_spacing):
"""basic total power check of the channel comb generation"""
nb_channel = nch_and_spacing[0]
p_tot = sum(si.signal + si.ase + si.nli)
expected_p_tot = si.signal[0] * nb_channel
assert pytest.approx(expected_p_tot, abs=0.01) == p_tot
@pytest.mark.parametrize("gain", [17, 19, 21, 23])
def METHOD_NAME(gain, setup_edfa_variable_gain, si):
"""compare the 2 amplifier models (polynomial and estimated from nf_min and max)
=> nf_model vs nf_poly_fit for intermediate gain values:
between gain_min and gain_flatmax some discrepancy is expected but target < 0.5dB
=> unitary test for Edfa._calc_nf (and Edfa.interpol_params)"""
edfa = setup_edfa_variable_gain
si.signal /= db2lin(gain)
si.nli /= db2lin(gain)
si.ase /= db2lin(gain)
edfa.operational.gain_target = gain
# edfa is variable gain type
si.pref = si.pref._replace(p_span0=0, p_spani=-gain)
edfa.interpol_params(si)
nf_model = edfa.nf[0]
# change edfa type variety to a polynomial
el_config = {
"uid": "Edfa1",
"operational": {
"gain_target": gain,
"tilt_target": 0
},
"metadata": {
"location": {
"region": "",
"latitude": 2,
"longitude": 0
}
}
}
equipment = load_equipment(eqpt_library)
extra_params = equipment['Edfa']['CienaDB_medium_gain']
temp = el_config.setdefault('params', {})
temp = merge_amplifier_restrictions(temp, extra_params.__dict__)
el_config['params'] = temp
edfa = Edfa(**el_config)
# edfa is variable gain type
edfa.interpol_params(si)
nf_poly = edfa.nf[0]
print(nf_poly, nf_model)
assert pytest.approx(nf_model, abs=0.5) == nf_poly
@pytest.mark.parametrize("gain", [13, 15, 17, 19, 21, 23, 25, 27])
def test_ase_noise(gain, si, setup_trx, bw):
"""testing 3 different ways of calculating osnr:
1-pin-edfa.nf+58 vs
2-pout/pase afet propagate
3-Transceiver osnr_ase_01nm
=> unitary test for Edfa.noise_profile (Edfa.interpol_params, Edfa.propagate)"""
equipment = load_equipment(eqpt_library)
network = load_network(test_network, equipment)
edfa = next(n for n in network.nodes() if n.uid == 'Edfa1')
span = next(n for n in network.nodes() if n.uid == 'Span1')
# update span1 and Edfa1 according to new gain before building network
# updating span 1 avoids to overload amp
span.params.length = gain * 1e3 / 0.2
edfa.operational.gain_target = gain
build_network(network, equipment, 0, 20)
edfa.gain_ripple = zeros(96)
edfa.interpol_nf_ripple = zeros(96)
# propagate in span1 to have si with the correct power level
si = span(si)
print(span)
si.pref = si.pref._replace(p_span0=0, p_spani=-gain)
edfa.interpol_params(si)
nf = edfa.nf
print('nf', nf)
pin = lin2db((si.signal[0] + si.ase[0] + si.nli[0]) * 1e3)
osnr_expected = pin - nf[0] + 58
si = edfa(si)
print(edfa)
osnr = lin2db(si.signal[0] / si.ase[0]) - lin2db(12.5e9 / bw)
assert pytest.approx(osnr_expected, abs=0.01) == osnr
trx = setup_trx
si = trx(si)
osnr = trx.osnr_ase_01nm[0]
assert pytest.approx(osnr_expected, abs=0.01) == osnr |
299,253 | test add log | import os
import pytest
from mongoengine import connect
from kairon import Utility
from kairon.shared.data.constant import EVENT_STATUS, REQUIREMENTS, COMPONENT_COUNT
from kairon.shared.data.data_objects import BotSettings
from kairon.shared.multilingual.processor import MultilingualLogProcessor
from kairon.shared.multilingual.data_objects import BotReplicationLogs
from kairon.exceptions import AppException
class TestMultilingualLogProcessor:
@pytest.fixture(scope='session', autouse=True)
def init(self):
os.environ["system_file"] = "./tests/testing_data/system.yaml"
Utility.load_environment()
connect(**Utility.mongoengine_connection(Utility.environment['database']["url"]))
def METHOD_NAME(self):
bot = 'test'
user = 'test'
MultilingualLogProcessor.add_log(source_bot=bot, user=user)
log = BotReplicationLogs.objects(bot=bot).get().to_mongo().to_dict()
assert not log.get('source_bot_name')
assert not log.get('destination_bot')
assert not log.get('s_lang')
assert not log.get('d_lang')
assert log.get('copy_type') == 'Translation'
assert not log.get('account')
assert log.get('translate_responses')
assert not log.get('translate_actions')
assert not log.get('exception')
assert log.get('start_timestamp')
assert not log.get('end_timestamp')
assert not log.get('status')
assert log.get('event_status') == EVENT_STATUS.INITIATED.value
def test_add_log_exception(self):
bot = 'test'
user = 'test'
MultilingualLogProcessor.add_log(bot, user, exception='Translation failed', status='Failure',
event_status=EVENT_STATUS.FAIL.value)
log = BotReplicationLogs.objects(bot=bot).get().to_mongo().to_dict()
assert not log.get('source_bot_name')
assert not log.get('destination_bot')
assert not log.get('s_lang')
assert not log.get('d_lang')
assert log.get('copy_type') == 'Translation'
assert not log.get('account')
assert log.get('translate_responses')
assert not log.get('translate_actions')
assert log.get('exception') == 'Translation failed'
assert log.get('start_timestamp')
assert log.get('end_timestamp')
assert log.get('status') == 'Failure'
assert log.get('event_status') == EVENT_STATUS.FAIL.value
def test_add_log_success(self):
bot = 'test'
user = 'test'
MultilingualLogProcessor.add_log(bot, user)
MultilingualLogProcessor.add_log(bot, user,
status='Success',
event_status=EVENT_STATUS.COMPLETED.value)
log = list(MultilingualLogProcessor.get_logs(bot))
assert not log[0].get('source_bot_name')
assert not log[0].get('destination_bot')
assert not log[0].get('s_lang')
assert not log[0].get('d_lang')
assert log[0].get('copy_type') == 'Translation'
assert not log[0].get('account')
assert not log[0].get('exception')
assert log[0].get('translate_responses')
assert not log[0].get('translate_actions')
assert log[0].get('start_timestamp')
assert log[0].get('end_timestamp')
assert log[0].get('status') == 'Success'
assert log[0].get('event_status') == EVENT_STATUS.COMPLETED.value
def test_is_event_in_progress_false(self):
bot = 'test'
assert not MultilingualLogProcessor.is_event_in_progress(bot)
def test_is_event_in_progress_true(self):
bot = 'test'
user = 'test'
MultilingualLogProcessor.add_log(bot, user)
assert MultilingualLogProcessor.is_event_in_progress(bot, False)
with pytest.raises(AppException):
MultilingualLogProcessor.is_event_in_progress(bot)
def test_is_limit_exceeded_failure(self, monkeypatch):
bot = 'test_bot'
bot_settings = BotSettings.objects(bot=bot).get()
bot_settings.multilingual_limit_per_day = 0
bot_settings.save()
assert MultilingualLogProcessor.is_limit_exceeded(bot, False)
with pytest.raises(AppException, match='Daily limit exceeded.'):
MultilingualLogProcessor.is_limit_exceeded(bot)
def test_is_limit_exceeded(self, monkeypatch):
bot = 'test_bot'
bot_settings = BotSettings.objects(bot=bot).get()
bot_settings.multilingual_limit_per_day = 5
bot_settings.save()
assert not MultilingualLogProcessor.is_limit_exceeded(bot)
def test_get_logs(self):
bot = 'test'
logs = list(MultilingualLogProcessor.get_logs(bot))
assert len(logs) == 3
def test_update_log(self):
bot = 'test'
user = 'test'
MultilingualLogProcessor.add_log(bot, user)
log = next(MultilingualLogProcessor.get_logs(bot))
assert not log.get('source_bot_name')
assert not log.get('destination_bot')
assert not log.get('s_lang')
assert not log.get('d_lang')
assert log.get('copy_type') == 'Translation'
assert not log.get('account')
assert not log.get('exception')
assert log.get('translate_responses')
assert not log.get('translate_actions')
assert log.get('start_timestamp')
assert not log.get('end_timestamp')
assert not log.get('status')
assert log.get('event_status') == EVENT_STATUS.INITIATED.value
destination_bot = 'd_bot'
MultilingualLogProcessor.update_summary(bot, user, destination_bot=destination_bot, status='Success')
log = next(MultilingualLogProcessor.get_logs(bot))
assert not log.get('source_bot_name')
assert log.get('destination_bot') == 'd_bot'
assert not log.get('s_lang')
assert not log.get('d_lang')
assert log.get('copy_type') == 'Translation'
assert not log.get('account')
assert not log.get('exception')
assert log.get('translate_responses')
assert not log.get('translate_actions')
assert log.get('start_timestamp')
assert log.get('end_timestamp')
assert log.get('status') == 'Success'
assert log.get('event_status') == EVENT_STATUS.COMPLETED.value
def test_update_log_create_new(self):
bot = 'test'
user = 'test'
destination_bot = 'd_bot'
MultilingualLogProcessor.update_summary(bot, user, destination_bot)
log = next(MultilingualLogProcessor.get_logs(bot))
assert not log.get('source_bot_name')
assert log.get('destination_bot') == 'd_bot'
assert not log.get('s_lang')
assert not log.get('d_lang')
assert log.get('copy_type') == 'Translation'
assert not log.get('account')
assert not log.get('exception')
assert log.get('translate_responses')
assert not log.get('translate_actions')
assert log.get('start_timestamp')
assert log.get('end_timestamp')
assert not log.get('status')
assert log.get('event_status') == EVENT_STATUS.COMPLETED.value |
299,254 | arn | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetSiteResult',
'AwaitableGetSiteResult',
'get_site',
'get_site_output',
]
@pulumi.output_type
class GetSiteResult:
"""
A collection of values returned by getSite.
"""
def __init__(__self__, METHOD_NAME=None, description=None, global_network_id=None, id=None, locations=None, site_id=None, tags=None):
if METHOD_NAME and not isinstance(METHOD_NAME, str):
raise TypeError("Expected argument 'arn' to be a str")
pulumi.set(__self__, "arn", METHOD_NAME)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if global_network_id and not isinstance(global_network_id, str):
raise TypeError("Expected argument 'global_network_id' to be a str")
pulumi.set(__self__, "global_network_id", global_network_id)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if locations and not isinstance(locations, list):
raise TypeError("Expected argument 'locations' to be a list")
pulumi.set(__self__, "locations", locations)
if site_id and not isinstance(site_id, str):
raise TypeError("Expected argument 'site_id' to be a str")
pulumi.set(__self__, "site_id", site_id)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def METHOD_NAME(self) -> str:
"""
ARN of the site.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter
def description(self) -> str:
"""
Description of the site.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="globalNetworkId")
def global_network_id(self) -> str:
return pulumi.get(self, "global_network_id")
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def locations(self) -> Sequence['outputs.GetSiteLocationResult']:
"""
Site location as documented below.
"""
return pulumi.get(self, "locations")
@property
@pulumi.getter(name="siteId")
def site_id(self) -> str:
return pulumi.get(self, "site_id")
@property
@pulumi.getter
def tags(self) -> Mapping[str, str]:
"""
Key-value tags for the Site.
"""
return pulumi.get(self, "tags")
class AwaitableGetSiteResult(GetSiteResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetSiteResult(
METHOD_NAME=self.METHOD_NAME,
description=self.description,
global_network_id=self.global_network_id,
id=self.id,
locations=self.locations,
site_id=self.site_id,
tags=self.tags)
def get_site(global_network_id: Optional[str] = None,
site_id: Optional[str] = None,
tags: Optional[Mapping[str, str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetSiteResult:
"""
Retrieve information about a site.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
example = aws.networkmanager.get_site(global_network_id=var["global_network_id"],
site_id=var["site_id"])
```
:param str global_network_id: ID of the Global Network of the site to retrieve.
:param str site_id: ID of the specific site to retrieve.
:param Mapping[str, str] tags: Key-value tags for the Site.
"""
__args__ = dict()
__args__['globalNetworkId'] = global_network_id
__args__['siteId'] = site_id
__args__['tags'] = tags
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('aws:networkmanager/getSite:getSite', __args__, opts=opts, typ=GetSiteResult).value
return AwaitableGetSiteResult(
METHOD_NAME=pulumi.get(__ret__, 'arn'),
description=pulumi.get(__ret__, 'description'),
global_network_id=pulumi.get(__ret__, 'global_network_id'),
id=pulumi.get(__ret__, 'id'),
locations=pulumi.get(__ret__, 'locations'),
site_id=pulumi.get(__ret__, 'site_id'),
tags=pulumi.get(__ret__, 'tags'))
@_utilities.lift_output_func(get_site)
def get_site_output(global_network_id: Optional[pulumi.Input[str]] = None,
site_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Optional[Mapping[str, str]]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetSiteResult]:
"""
Retrieve information about a site.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
example = aws.networkmanager.get_site(global_network_id=var["global_network_id"],
site_id=var["site_id"])
```
:param str global_network_id: ID of the Global Network of the site to retrieve.
:param str site_id: ID of the specific site to retrieve.
:param Mapping[str, str] tags: Key-value tags for the Site.
"""
... |
299,255 | get oauth server | import json
from urllib.parse import quote, urlencode
from oauthlib.oauth2 import FatalClientError, OAuth2Error
from oauthlib.openid.connect.core.endpoints.pre_configured import Server as WebApplicationServer
import frappe
from frappe.integrations.doctype.oauth_provider_settings.oauth_provider_settings import (
get_oauth_settings,
)
from frappe.oauth import (
OAuthWebRequestValidator,
generate_json_error_response,
get_server_url,
get_userinfo,
)
def METHOD_NAME():
if not getattr(frappe.local, "oauth_server", None):
oauth_validator = OAuthWebRequestValidator()
frappe.local.oauth_server = WebApplicationServer(oauth_validator)
return frappe.local.oauth_server
def sanitize_kwargs(param_kwargs):
"""Remove 'data' and 'cmd' keys, if present."""
arguments = param_kwargs
arguments.pop("data", None)
arguments.pop("cmd", None)
return arguments
def encode_params(params):
"""
Encode a dict of params into a query string.
Use `quote_via=urllib.parse.quote` so that whitespaces will be encoded as
`%20` instead of as `+`. This is needed because oauthlib cannot handle `+`
as a whitespace.
"""
return urlencode(params, quote_via=quote)
@frappe.whitelist()
def approve(*args, **kwargs):
r = frappe.request
try:
(scopes, frappe.flags.oauth_credentials,) = METHOD_NAME().validate_authorization_request(
r.url, r.method, r.get_data(), r.headers
)
headers, body, status = METHOD_NAME().create_authorization_response(
uri=frappe.flags.oauth_credentials["redirect_uri"],
body=r.get_data(),
headers=r.headers,
scopes=scopes,
credentials=frappe.flags.oauth_credentials,
)
uri = headers.get("Location", None)
frappe.local.response["type"] = "redirect"
frappe.local.response["location"] = uri
return
except (FatalClientError, OAuth2Error) as e:
return generate_json_error_response(e)
@frappe.whitelist(allow_guest=True)
def authorize(**kwargs):
success_url = "/api/method/frappe.integrations.oauth2.approve?" + encode_params(
sanitize_kwargs(kwargs)
)
failure_url = frappe.form_dict["redirect_uri"] + "?error=access_denied"
if frappe.session.user == "Guest":
# Force login, redirect to preauth again.
frappe.local.response["type"] = "redirect"
frappe.local.response["location"] = "/login?" + encode_params(
{"redirect-to": frappe.request.url}
)
else:
try:
r = frappe.request
(scopes, frappe.flags.oauth_credentials,) = METHOD_NAME().validate_authorization_request(
r.url, r.method, r.get_data(), r.headers
)
skip_auth = frappe.db.get_value(
"OAuth Client",
frappe.flags.oauth_credentials["client_id"],
"skip_authorization",
)
unrevoked_tokens = frappe.get_all("OAuth Bearer Token", filters={"status": "Active"})
if skip_auth or (get_oauth_settings().skip_authorization == "Auto" and unrevoked_tokens):
frappe.local.response["type"] = "redirect"
frappe.local.response["location"] = success_url
else:
if "openid" in scopes:
scopes.remove("openid")
scopes.extend(["Full Name", "Email", "User Image", "Roles"])
# Show Allow/Deny screen.
response_html_params = frappe._dict(
{
"client_id": frappe.db.get_value("OAuth Client", kwargs["client_id"], "app_name"),
"success_url": success_url,
"failure_url": failure_url,
"details": scopes,
}
)
resp_html = frappe.render_template(
"templates/includes/oauth_confirmation.html", response_html_params
)
frappe.respond_as_web_page("Confirm Access", resp_html, primary_action=None)
except (FatalClientError, OAuth2Error) as e:
return generate_json_error_response(e)
@frappe.whitelist(allow_guest=True)
def get_token(*args, **kwargs):
try:
r = frappe.request
headers, body, status = METHOD_NAME().create_token_response(
r.url, r.method, r.form, r.headers, frappe.flags.oauth_credentials
)
body = frappe._dict(json.loads(body))
if body.error:
frappe.local.response = body
frappe.local.response["http_status_code"] = 400
return
frappe.local.response = body
return
except (FatalClientError, OAuth2Error) as e:
return generate_json_error_response(e)
@frappe.whitelist(allow_guest=True)
def revoke_token(*args, **kwargs):
try:
r = frappe.request
headers, body, status = METHOD_NAME().create_revocation_response(
r.url,
headers=r.headers,
body=r.form,
http_method=r.method,
)
except (FatalClientError, OAuth2Error):
pass
# status_code must be 200
frappe.local.response = frappe._dict({})
frappe.local.response["http_status_code"] = status or 200
return
@frappe.whitelist()
def openid_profile(*args, **kwargs):
try:
r = frappe.request
headers, body, status = METHOD_NAME().create_userinfo_response(
r.url,
headers=r.headers,
body=r.form,
)
body = frappe._dict(json.loads(body))
frappe.local.response = body
return
except (FatalClientError, OAuth2Error) as e:
return generate_json_error_response(e)
@frappe.whitelist(allow_guest=True)
def openid_configuration():
frappe_server_url = get_server_url()
frappe.local.response = frappe._dict(
{
"issuer": frappe_server_url,
"authorization_endpoint": f"{frappe_server_url}/api/method/frappe.integrations.oauth2.authorize",
"token_endpoint": f"{frappe_server_url}/api/method/frappe.integrations.oauth2.get_token",
"userinfo_endpoint": f"{frappe_server_url}/api/method/frappe.integrations.oauth2.openid_profile",
"revocation_endpoint": f"{frappe_server_url}/api/method/frappe.integrations.oauth2.revoke_token",
"introspection_endpoint": f"{frappe_server_url}/api/method/frappe.integrations.oauth2.introspect_token",
"response_types_supported": [
"code",
"token",
"code id_token",
"code token id_token",
"id_token",
"id_token token",
],
"subject_types_supported": ["public"],
"id_token_signing_alg_values_supported": ["HS256"],
}
)
@frappe.whitelist(allow_guest=True)
def introspect_token(token=None, token_type_hint=None):
if token_type_hint not in ["access_token", "refresh_token"]:
token_type_hint = "access_token"
try:
bearer_token = None
if token_type_hint == "access_token":
bearer_token = frappe.get_doc("OAuth Bearer Token", {"access_token": token})
elif token_type_hint == "refresh_token":
bearer_token = frappe.get_doc("OAuth Bearer Token", {"refresh_token": token})
client = frappe.get_doc("OAuth Client", bearer_token.client)
token_response = frappe._dict(
{
"client_id": client.client_id,
"trusted_client": client.skip_authorization,
"active": bearer_token.status == "Active",
"exp": round(bearer_token.expiration_time.timestamp()),
"scope": bearer_token.scopes,
}
)
if "openid" in bearer_token.scopes:
sub = frappe.get_value(
"User Social Login",
{"provider": "frappe", "parent": bearer_token.user},
"userid",
)
if sub:
token_response.update({"sub": sub})
user = frappe.get_doc("User", bearer_token.user)
userinfo = get_userinfo(user)
token_response.update(userinfo)
frappe.local.response = token_response
except Exception:
frappe.local.response = frappe._dict({"active": False}) |
299,256 | test create table | import time
import os
import shutil
import tarfile
import pytest
import docker
from mindsdb.integrations.handlers.mysql_handler.mysql_handler import MySQLHandler
from mindsdb.api.mysql.mysql_proxy.libs.constants.response_type import RESPONSE_TYPE
HANDLER_KWARGS = {
"connection_data": {
"host": "localhost",
"port": "3307",
"user": "root",
"password": "supersecret",
"database": "test",
"ssl": False
}
}
CERTS_ARCHIVE = "certs.tar"
CERTS_DIR = "mysql"
def get_certs():
certs_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "mysql")
certs = {}
for cert_key, fname in [("ssl_ca", "ca.pem"), ("ssl_cert", "client-cert.pem"), ("ssl_key", "client-key.pem")]:
cert_file = os.path.join(certs_dir, fname)
certs[cert_key] = cert_file
return certs
def get_certificates(container):
cur_dir = os.path.dirname(os.path.abspath(__file__))
archive_path = os.path.join(cur_dir, CERTS_ARCHIVE)
with open(archive_path, "wb") as f:
bits, _ = container.get_archive('/var/lib/mysql')
for chunk in bits:
f.write(chunk)
with tarfile.open(archive_path) as tf:
tf.extractall(path=cur_dir)
certs = get_certs()
HANDLER_KWARGS["connection_data"].update(certs)
def waitReadiness(container, timeout=30):
threshold = time.time() + timeout
ready_msg = "/usr/sbin/mysqld: ready for connections. Version: '8.0.27'"
while True:
lines = container.logs().decode()
# container fully ready
# because it reloads the db server during initialization
# need to check that the 'ready for connections' has found second time
if lines.count(ready_msg) >= 2:
break
if time.time() > threshold:
raise Exception("timeout exceeded, container is still not ready")
@pytest.fixture(scope="module", params=[{"ssl": False}, {"ssl": True}], ids=["NoSSL", "SSL"])
def handler(request):
image_name = "mindsdb/mysql-handler-test"
docker_client = docker.from_env()
with_ssl = request.param["ssl"]
container = None
try:
container = docker_client.containers.run(
image_name,
command="--secure-file-priv=/",
detach=True,
environment={"MYSQL_ROOT_PASSWORD": "supersecret"},
ports={"3306/tcp": 3307},
)
waitReadiness(container)
except Exception as e:
if container is not None:
container.kill()
raise e
if with_ssl:
get_certificates(container)
handler = MySQLHandler('test_mysql_handler', **HANDLER_KWARGS)
yield handler
container.kill()
docker_client.close()
if with_ssl:
cur_dir = os.path.dirname(os.path.abspath(__file__))
try:
os.remove(os.path.join(cur_dir, CERTS_ARCHIVE))
shutil.rmtree(os.path.join(cur_dir, CERTS_DIR))
except Exception as e:
print(f"unable to delete .tar/files of certificates: {e}")
class TestMySQLHandler:
def test_connect(self, handler):
handler.connect()
assert handler.is_connected, "connection error"
def test_check_connection(self, handler):
res = handler.check_connection()
assert res.success, res.error_message
def test_native_query_show_dbs(self, handler):
dbs = handler.native_query("SHOW DATABASES;")
dbs = dbs.data_frame
assert dbs is not None, "expected to get some data, but got None"
assert 'Database' in dbs, f"expected to get 'Database' column in response:\n{dbs}"
dbs = list(dbs["Database"])
expected_db = HANDLER_KWARGS["connection_data"]["database"]
assert expected_db in dbs, f"expected to have {expected_db} db in response: {dbs}"
def test_get_tables(self, handler):
tables = self.get_table_names(handler)
assert "rentals" in tables, f"expected to have 'rentals' table in the db but got: {tables}"
def test_describe_table(self, handler):
described = handler.get_columns("rentals")
describe_data = described.data_frame
self.check_valid_response(described)
got_columns = list(describe_data.iloc[:, 0])
want_columns = ["number_of_rooms", "number_of_bathrooms",
"sqft", "location", "days_on_market",
"initial_price", "neighborhood", "rental_price"]
assert got_columns == want_columns, f"expected to have next columns in rentals table:\n{want_columns}\nbut got:\n{got_columns}"
def METHOD_NAME(self, handler):
new_table = "test_mdb"
res = handler.native_query(f"CREATE TABLE IF NOT EXISTS {new_table} (test_col INT)")
self.check_valid_response(res)
tables = self.get_table_names(handler)
assert new_table in tables, f"expected to have {new_table} in database, but got: {tables}"
def test_drop_table(self, handler):
drop_table = "test_md"
res = handler.native_query(f"DROP TABLE IF EXISTS {drop_table}")
self.check_valid_response(res)
tables = self.get_table_names(handler)
assert drop_table not in tables
def test_select_query(self, handler):
limit = 5
query = f"SELECT * FROM rentals WHERE number_of_rooms = 2 LIMIT {limit}"
res = handler.query(query)
self.check_valid_response(res)
got_rows = res.data_frame.shape[0]
want_rows = limit
assert got_rows == want_rows, f"expected to have {want_rows} rows in response but got: {got_rows}"
def check_valid_response(self, res):
if res.resp_type == RESPONSE_TYPE.TABLE:
assert res.data_frame is not None, "expected to have some data, but got None"
assert res.error_code == 0, f"expected to have zero error_code, but got {res.error_code}"
assert res.error_message is None, f"expected to have None in error message, but got {res.error_message}"
def get_table_names(self, handler):
res = handler.get_tables()
tables = res.data_frame
assert tables is not None, "expected to have some tables in the db, but got None"
assert 'table_name' in tables, f"expected to get 'table_name' column in the response:\n{tables}"
return list(tables['table_name']) |
299,257 | to565 | ################################################################################
# Copyright (C) 2023 Maxim Integrated Products, Inc., All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL MAXIM INTEGRATED BE LIABLE FOR ANY CLAIM, DAMAGES
# OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# Except as contained in this notice, the name of Maxim Integrated
# Products, Inc. shall not be used except as stated in the Maxim Integrated
# Products, Inc. Branding Policy.
#
# The mere transfer of this software does not imply any licenses
# of trade secrets, proprietary technology, copyrights, patents,
# trademarks, maskwork rights, or any other form of intellectual
# property whatsoever. Maxim Integrated Products, Inc. retains all
# ownership rights.
#
###############################################################################
"""
Creates a C file of RGB565 pixel format resized to 320x240 from a JPEG, or 24bit BMP file.
"""
from string import Template
import sys
from PIL import Image, ImageOps
ROWSIZE = 8
# Convert 24 bit RGB to 16 bit 565 format
def METHOD_NAME(pixel):
"""
convert 24bit to 16bit
"""
red = pixel[0]
green = pixel[1]
blue = pixel[2]
return ((red & 0x00F8) << 8) | ((green & 0x00FC) << 3) | ((blue & 0x00F8) >> 3)
# Load image from command line
if len(sys.argv) < 2:
print('Error: No input file!\nUsage:')
print(' python bmp2c.py image_24bit.bmp [-r][-s][-f][-m]')
print(' python bmp2c.py image.jpg [-r][-s][-f][-m]')
print(' -r: rotate 90 degrees')
print(' -s: rescale to fit the TFT size (320x240)')
print(' -f: flip vertically')
print(' -m: mirror horizontally')
sys.exit()
# Load image from command line
file = sys.argv[1]
img = Image.open(file)
imgname = file
# Info
print(f" Name: {imgname} \n Size: {img.size} \n Format: {img.format}")
# print(f" Mode: {img.mode} \n Info: {img.info}\n")
if img.format != 'BMP':
print('Converted to BMP')
img.save("temp.bmp", "BMP")
img = Image.open("temp.bmp")
if img.mode != 'RGB':
print('Error: Input file mode should be 24bit RGB')
sys.exit()
for arg in sys.argv:
# Rotation needed?
if arg == '-r':
img = img.rotate(90)
print('Image rotated')
# Scale needed?
if arg == '-s':
print('Image resized to (320,240)')
img = img.resize((320, 240))
# Flip needed?
if arg == '-f':
img = ImageOps.flip(img)
print('Image flipped')
# Mirror needed?
if arg == '-m':
img = ImageOps.mirror(img)
print('Image mirrored')
# Make sure dimension is even
img = img.resize((2*(img.size[0]//2), 2*(img.size[1]//2)))
print(f'Processing {img.size} image')
img.save("temp.bmp", "BMP")
if (img.size[0] > 320 or img.size[1] > 240):
print(f'Error: image size cannot be greater than 320x240: use -s to rescale')
sys.exit()
# Read image data
imgdata = list(img.getdata())
# Open the template
templatefile = open("template.txt", "r")
template = Template(templatefile.read())
# Build the template parameter list
data = {}
data['imgname'] = imgname
data['imgnamecaps'] = imgname.upper()
data['imglen'] = 2*img.size[0] * img.size[1] + 4 # width and height
data['imgsize'] = \
''.join([' 0x%.2X, 0x%.2X, 0x%.2X, 0x%.2X, // size: (%d, %d)' % (
img.size[0] & 0xFF,
img.size[0] >> 8 & 0xFF,
img.size[1] & 0xFF,
img.size[1] >> 8 & 0xFF,
img.size[0],
img.size[1],
)])
data['imgdata'] = ',\n\t'.join([', '.join(['0x%.2X, 0x%.2X' % (METHOD_NAME(x)
& 0xFF, METHOD_NAME(x) >> 8 & 0xFF) for x in
imgdata[y:y + ROWSIZE]]) for y in
range(0, len(imgdata), ROWSIZE)])
# Open the the text file
outputfile = open("image_rgb565" + ".c", "w")
outputfile.write(template.substitute(data))
outputfile.close()
# Save the resized image
img.save("out.bmp") |
299,258 | test spatial model | # Licensed under a 3-clause BSD style license - see LICENSE.rst
import pytest
import numpy as np
from numpy.testing import assert_allclose
import astropy.units as u
from gammapy.catalog import SourceCatalog1LHAASO
from gammapy.modeling.models import (
GaussianSpatialModel,
PointSpatialModel,
PowerLawNormSpectralModel,
PowerLawSpectralModel,
TemplateSpatialModel,
)
from gammapy.utils.testing import requires_data
@pytest.fixture(scope="session")
def lhaaso1():
return SourceCatalog1LHAASO()
@requires_data()
class TestSourceCatalog1LHAASO:
@staticmethod
def test_source_table(lhaaso1):
assert lhaaso1.tag == "1LHAASO"
assert len(lhaaso1.table) == 90
@staticmethod
def test_positions(lhaaso1):
assert len(lhaaso1.positions) == 90
@staticmethod
def test_to_models(lhaaso1):
models = lhaaso1.to_models(which="both")
assert len(models) == 90
models = lhaaso1.to_models(which="KM2A")
assert np.all(
[m.spectral_model.reference.quantity == 50 * u.TeV for m in models]
)
assert len(models) == 75
models = lhaaso1.to_models(which="WCDA")
assert np.all(
[m.spectral_model.reference.quantity == 3 * u.TeV for m in models]
)
assert len(models) == 69
@requires_data()
class TestSourceCatalogObject1LHAASO:
@staticmethod
def test_data(lhaaso1):
assert lhaaso1[0].data["Source_Name"] == "1LHAASO J0007+5659u"
assert "KM2A" in lhaaso1[0].data["Model_a"]
assert_allclose(lhaaso1[0].data["r39_ul"].value, 0.18)
assert lhaaso1[0].data["r39_ul"].unit == u.deg
assert_allclose(lhaaso1[0].data["N0"].value, 0.33e-16)
assert lhaaso1[0].data["N0"].unit == u.Unit("cmβ2 sβ1 TeVβ1")
assert_allclose(lhaaso1[0].data["N0_err"].value, 0.05e-16)
assert lhaaso1[0].data["N0_err"].unit == u.Unit("cmβ2 sβ1 TeVβ1")
assert_allclose(lhaaso1[0].data["N0_ul_b"].value, 0.27e-13)
assert lhaaso1[0].data["N0_ul_b"].unit == u.Unit("cmβ2 sβ1 TeVβ1")
assert lhaaso1[1].data["ASSO_Name"] == "CTA 1"
assert_allclose(lhaaso1[1].data["ASSO_Sep"].value, 0.12)
assert lhaaso1[0].data["ASSO_Sep"].unit == u.deg
assert lhaaso1[10].data["Source_Name"] == "1LHAASO J0428+5531*"
assert "WCDA" in lhaaso1[10].data["Model_a"]
assert_allclose(lhaaso1[10].data["RAJ2000"].value, 67.23)
assert_allclose(lhaaso1[10].data["DECJ2000"].value, 55.53)
assert_allclose(lhaaso1[10].data["pos_err"].value, 0.36)
assert lhaaso1[10].data["RAJ2000"].unit == u.deg
assert lhaaso1[10].data["DECJ2000"].unit == u.deg
assert lhaaso1[10].data["pos_err"].unit == u.deg
assert_allclose(lhaaso1[10].data["r39"].value, 1.18)
assert_allclose(lhaaso1[10].data["r39_b"].value, 0.32)
assert lhaaso1[10].data["r39_b"].unit == u.deg
assert_allclose(lhaaso1[10].data["r39_err"].value, 0.12)
assert_allclose(lhaaso1[10].data["r39_err_b"].value, 0.06)
assert lhaaso1[10].data["r39_err_b"].unit == u.deg
@staticmethod
def test_position(lhaaso1):
position = lhaaso1[0].position
assert_allclose(position.ra.deg, 1.86, atol=1e-3)
assert_allclose(position.dec.deg, 57.00, atol=1e-3)
@staticmethod
def test_sky_model(lhaaso1):
model = lhaaso1[0].sky_model("both")
assert model.name == "1LHAASO J0007+5659u"
assert isinstance(model.spectral_model, PowerLawSpectralModel)
assert isinstance(model.spatial_model, PointSpatialModel)
assert lhaaso1[0].sky_model("WCDA") is None
model = lhaaso1[1].sky_model("both")
assert model.name == "1LHAASO J0007+7303u"
assert isinstance(model.spectral_model, PowerLawNormSpectralModel)
assert isinstance(model.spatial_model, TemplateSpatialModel)
model = lhaaso1[1].sky_model("KM2A")
assert model.name == "1LHAASO J0007+7303u"
assert isinstance(model.spectral_model, PowerLawSpectralModel)
assert isinstance(model.spatial_model, GaussianSpatialModel)
model = lhaaso1[1].sky_model("WCDA")
assert model.name == "1LHAASO J0007+7303u"
assert isinstance(model.spectral_model, PowerLawSpectralModel)
assert isinstance(model.spatial_model, PointSpatialModel)
model = lhaaso1[11].sky_model("both")
assert model.name == "1LHAASO J0500+4454"
assert isinstance(model.spectral_model, PowerLawSpectralModel)
assert isinstance(model.spatial_model, GaussianSpatialModel)
@staticmethod
def test_spectral_model(lhaaso1):
m = lhaaso1[0].spectral_model("KM2A")
dnde, dnde_err = m.evaluate_error(50 * u.TeV)
assert dnde.unit == "cm-2 s-1 TeV-1"
assert_allclose(dnde.value, 0.33e-16, rtol=1e-3)
assert_allclose(dnde_err.value, 0.05e-16, rtol=1e-3)
m = lhaaso1[11].spectral_model("WCDA")
dnde, dnde_err = m.evaluate_error(3 * u.TeV)
assert dnde.unit == "cm-2 s-1 TeV-1"
assert_allclose(dnde.value, 0.69e-13, rtol=1e-3)
assert_allclose(dnde_err.value, 0.16e-13, rtol=1e-3)
@staticmethod
def METHOD_NAME(lhaaso1):
m = lhaaso1[0].spatial_model("KM2A")
assert isinstance(m, PointSpatialModel)
assert m.lon_0.unit == "deg"
assert_allclose(m.lon_0.value, 1.86, atol=1e-2)
assert_allclose(m.lon_0.error, 0.09, atol=1e-2)
assert m.lat_0.unit == "deg"
assert_allclose(m.lat_0.value, 57.00, atol=1e-2)
assert_allclose(m.lat_0.error, 0.049, atol=1e-2)
assert m.frame == "fk5"
m = lhaaso1[11].spatial_model("WCDA")
assert isinstance(m, GaussianSpatialModel)
assert m.lon_0.unit == "deg"
assert_allclose(m.lon_0.value, 75.01, atol=1e-10)
assert m.lat_0.unit == "deg"
assert_allclose(m.lat_0.value, 44.92, atol=1e-10)
assert m.frame == "fk5"
assert m.sigma.unit == "deg"
assert_allclose(m.sigma.value, 0.41, atol=1e-3)
model = lhaaso1["1LHAASO J0007+5659u"].spatial_model("KM2A")
pos_err = model.position_error
assert_allclose(pos_err.height.value, 2 * 0.12, rtol=1e-4)
assert_allclose(pos_err.width.value, 2 * 0.12, rtol=1e-4)
assert_allclose(model.position.ra.value, pos_err.center.ra.value)
assert_allclose(model.position.dec.value, pos_err.center.dec.value) |
299,259 | rate select | from boxbranding import getBoxType, getMachineName, getHaveRCA, getHaveDVI, getHaveSCART, getHaveAVJACK
from Screens.Wizard import WizardSummary
from Screens.WizardLanguage import WizardLanguage
from Screens.Rc import Rc
from Components.AVSwitch import iAVSwitch as iAV
from Components.Pixmap import Pixmap
from Components.config import config, ConfigBoolean, configfile
from Components.SystemInfo import SystemInfo
from Tools.Directories import resolveFilename, SCOPE_SKIN, SCOPE_CURRENT_SKIN
from Tools.HardwareInfo import HardwareInfo
config.misc.showtestcard = ConfigBoolean(default=False)
class VideoWizardSummary(WizardSummary):
def __init__(self, session, parent):
WizardSummary.__init__(self, session, parent)
def setLCDPicCallback(self):
self.parent.setLCDTextCallback(self.setText)
def setLCDPic(self, file):
self["pic"].instance.setPixmapFromFile(file)
class VideoWizard(WizardLanguage, Rc):
skin = """
<screen position="fill" title="Welcome..." flags="wfNoBorder" >
<panel name="WizardMarginsTemplate"/>
<panel name="WizardPictureLangTemplate"/>
<panel name="RemoteControlTemplate"/>
<panel position="left" size="10,*" />
<panel position="right" size="10,*" />
<panel position="fill">
<widget name="text" position="top" size="*,270" font="Regular;23" valign="center" />
<panel position="fill">
<panel position="left" size="150,*">
<widget name="portpic" position="top" zPosition="10" size="150,150" transparent="1" alphatest="on"/>
</panel>
<panel position="fill" layout="stack">
<widget source="list" render="Listbox" position="fill" scrollbarMode="showOnDemand" >
<convert type="StringList" />
</widget>
<!--<widget name="config" position="fill" zPosition="1" scrollbarMode="showOnDemand" />-->
</panel>
</panel>
</panel>
</screen>"""
def __init__(self, session):
# FIXME anyone knows how to use relative paths from the plugin's directory?
self.xmlfile = resolveFilename(SCOPE_SKIN, "videowizard.xml")
self.hw = iAV # needed by VideoWizard.xml do not change
WizardLanguage.__init__(self, session, showSteps=False, showStepSlider=False)
Rc.__init__(self)
self["wizard"] = Pixmap()
self["portpic"] = Pixmap()
self.port = None
self.mode = None
self.rate = None
def createSummary(self):
print("[VideoWizard] createSummary")
return VideoWizardSummary
def markDone(self):
iAV.saveMode(self.port, self.mode, self.rate)
config.misc.videowizardenabled.value = 0
config.misc.videowizardenabled.save()
configfile.save()
def listInputChannels(self):
# hw_type = HardwareInfo().get_device_name()
# has_hdmi = HardwareInfo().has_hdmi()
list = []
for port in iAV.getPortList():
if iAV.isPortUsed(port):
descr = port
if descr == "Scart" and not SystemInfo["hasScart"]:
continue
if port != "DVI-PC":
list.append((descr, port))
list.sort(key=lambda x: x[0])
print("[VideoWizard] listInputChannels:", list)
return list
def inputSelectionMade(self, index):
print("[VideoWizard] inputSelectionMade:", index)
self.port = index
self.inputSelect(index)
def inputSelectionMoved(self):
# hw_type = HardwareInfo().get_device_name()
# has_hdmi = HardwareInfo().has_hdmi()
print("[VideoWizard] input selection moved:", self.selection)
self.inputSelect(self.selection)
if self["portpic"].instance is not None:
picname = self.selection
if picname == "Jack":
picname = "JACK"
if picname == "Scart-YPbPr":
picname = "Scart"
self["portpic"].instance.setPixmapFromFile(resolveFilename(SCOPE_CURRENT_SKIN, "icons/%s.png" % picname))
def inputSelect(self, port):
print("[VideoWizard] inputSelect:", port)
modeList = iAV.getModeList(self.selection)
print("[VideoWizard] modeList:", modeList)
self.port = port
if len(modeList) > 0:
ratesList = self.listRates(modeList[0][0])
iAV.setMode(port=port, mode=modeList[0][0], rate=ratesList[0][0])
def listModes(self):
list = []
print("[VideoWizard] modes for port", self.port)
for mode in iAV.getModeList(self.port):
# if mode[0] != "PC":
list.append((mode[0], mode[0]))
print("[VideoWizard] modeslist:", list)
return list
def modeSelectionMade(self, index):
print("[VideoWizard] modeSelectionMade:", index)
self.mode = index
self.modeSelect(index)
def modeSelectionMoved(self):
print("[VideoWizard] mode selection moved:", self.selection)
self.modeSelect(self.selection)
def modeSelect(self, mode):
ratesList = self.listRates(mode)
print("[VideoWizard] ratesList:", ratesList)
if self.port == "HDMI" and mode in ("720p", "1080i", "1080p", "2160p"):
self.rate = "multi"
iAV.setMode(port=self.port, mode=mode, rate="multi")
else:
iAV.setMode(port=self.port, mode=mode, rate=ratesList[0][0])
def listRates(self, querymode=None):
if querymode is None:
querymode = self.mode
list = []
print("[VideoWizard] modes for port", self.port, "and mode", querymode)
for mode in iAV.getModeList(self.port):
print("[VideoWizard] mode:", mode)
if mode[0] == querymode:
for rate in mode[1]:
if self.port == "DVI-PC":
print("[VideoWizard] rate:", rate)
if rate == "640x480":
list.insert(0, (rate, rate))
continue
list.append((rate, rate))
return list
def rateSelectionMade(self, index):
print("[VideoWizard] rateSelectionMade:", index)
self.rate = index
self.METHOD_NAME(index)
def rateSelectionMoved(self):
print("[VideoWizard] rate selection moved:", self.selection)
self.METHOD_NAME(self.selection)
def METHOD_NAME(self, rate):
iAV.setMode(port=self.port, mode=self.mode, rate=rate)
def showTestCard(self, selection=None):
if selection is None:
selection = self.selection
print("[VideoWizard] set config.misc.showtestcard to", {"yes": True, "no": False}[selection])
if selection == "yes":
config.misc.showtestcard.value = True
else:
config.misc.showtestcard.value = False
def keyNumberGlobal(self, number):
if number in (1, 2, 3):
if number == 1:
iAV.saveMode("HDMI", "720p", "multi")
elif number == 2:
iAV.saveMode("HDMI", "1080i", "multi")
elif number == 3:
iAV.saveMode("Scart", "Multi", "multi")
iAV.setConfiguredMode()
self.close()
WizardLanguage.keyNumberGlobal(self, number) |
299,260 | check overlaps | from .ParallelVolumeEngine import *
class VolumeEngine(g4.G4VUserDetectorConstruction, gate.EngineBase):
"""
Engine that will create all G4 elements for the hierarchy of volumes.
Correspond to the G4VUserDetectorConstruction (inherit)
Also manage the list of parallel worlds.
"""
def __init__(self, simulation_engine):
g4.G4VUserDetectorConstruction.__init__(self)
gate.EngineBase.__init__(self, simulation_engine)
self.is_constructed = False
# parallel world info
self.world_volumes_user_info = {}
self.parallel_volume_engines = []
# list of volumes for the main world
self.volumes_tree = None
# all G4 volumes are store here
# (including volumes in parallel worlds)
self.g4_volumes = {}
# create the parallel worlds
self.initialize_parallel_worlds()
def initialize_parallel_worlds(self):
# init list of trees
self.world_volumes_user_info = (
self.simulation_engine.simulation.volume_manager.separate_parallel_worlds()
)
# build G4 parallel volume engine (except for main world)
for world_name in self.world_volumes_user_info:
if (
world_name
== self.simulation_engine.simulation.volume_manager.world_name
):
continue
# register a new parallel world
volumes_user_info = self.world_volumes_user_info[world_name]
pw = gate.ParallelVolumeEngine(self, world_name, volumes_user_info)
self.RegisterParallelWorld(pw)
# store it to avoid destruction
self.parallel_volume_engines.append(pw)
def __del__(self):
if self.verbose_destructor:
gate.warning("Deleting VolumeEngine")
def close(self):
if self.verbose_close:
gate.warning(f"Closing VolumeEngine")
for pwe in self.parallel_volume_engines:
pwe.close()
self.release_g4_references()
def release_g4_references(self):
self.g4_volumes = None
def Construct(self):
"""
G4 overloaded.
Override the Construct method from G4VUserDetectorConstruction
"""
# build the materials
self.simulation_engine.simulation.volume_manager.material_database.initialize()
# initial check (not really needed)
self.simulation_engine.simulation.check_geometry()
# build the tree of volumes
volumes_user_info = self.world_volumes_user_info[gate.__world_name__]
self.volumes_tree = gate.build_tree(volumes_user_info)
# build all G4 volume objects
self.build_g4_volumes(volumes_user_info, None)
# return the (main) world physical volume
self.is_constructed = True
return self.g4_volumes[gate.__world_name__].g4_physical_volume
def METHOD_NAME(self, verbose):
for v in self.g4_volumes.values():
for w in v.g4_physical_volumes:
try:
b = w.CheckOverlaps(1000, 0, verbose, 1)
if b:
gate.fatal(
f'Some volumes overlap the volume "{v}". \n'
f"Consider using G4 verbose to know which ones. \n"
f"Aborting."
)
except:
pass
# gate.warning(f'do not check physical volume {w}')
def find_or_build_material(self, material):
mat = self.simulation_engine.simulation.volume_manager.material_database.FindOrBuildMaterial(
material
)
return mat
def build_g4_volumes(self, volumes_user_info, g4_world_log_vol):
uiv = volumes_user_info
for vu in uiv.values():
# create the volume
vol = gate.new_element(vu, self.simulation_engine.simulation)
# construct the G4 Volume
vol.construct(self, g4_world_log_vol)
# store at least one PhysVol
if len(vol.g4_physical_volumes) == 0:
vol.g4_physical_volumes.append(vol.g4_physical_volume)
# keep the volume to avoid being destructed
if g4_world_log_vol is not None:
n = f"{g4_world_log_vol.GetName()}_{vu.name}"
self.g4_volumes[n] = vol
else:
self.g4_volumes[vu.name] = vol
# def set_actor_engine(self, actor_engine):
# self.actor_engine = actor_engine
# for pw in self.parallel_volume_engines:
# pw.actor_engine = actor_engine
def ConstructSDandField(self):
"""
G4 overloaded
"""
# This function is called in MT mode
tree = self.volumes_tree
self.simulation_engine.actor_engine.register_sensitive_detectors(
gate.__world_name__,
tree,
self.simulation_engine.simulation.volume_manager,
self,
)
def get_volume(self, name, check_initialization=True):
if check_initialization and not self.is_constructed:
gate.fatal(f"Cannot get_volume before initialization")
try:
return self.g4_volumes[name]
except KeyError:
gate.fatal(
f"The volume {name} is not in the current "
f"list of volumes: {self.g4_volumes}"
)
def get_database_material_names(self, db=None):
return self.simulation_engine.simulation.volume_manager.material_database.get_database_material_names(
db
)
def dump_build_materials(self, level=0):
table = g4.G4Material.GetMaterialTable
if level == 0:
names = [m.GetName() for m in table]
return names
return table |
299,261 | execute grasp | #!/usr/bin/env python3
# Software License Agreement (BSD License)
# Copyright Β© 2021-2023 belongs to Shadow Robot Company Ltd.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of Shadow Robot Company Ltd nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# This software is provided by Shadow Robot Company Ltd "as is" and any express
# or implied warranties, including, but not limited to, the implied warranties of
# merchantability and fitness for a particular purpose are disclaimed. In no event
# shall the copyright holder be liable for any direct, indirect, incidental, special,
# exemplary, or consequential damages (including, but not limited to, procurement of
# substitute goods or services; loss of use, data, or profits; or business interruption)
# however caused and on any theory of liability, whether in contract, strict liability,
# or tort (including negligence or otherwise) arising in any way out of the use of this
# software, even if advised of the possibility of such damage.
# Reading the tactiles from the hand.
from threading import Thread
import termios
import sys
import tty
import yaml
import rospy
from sr_robot_commander.sr_hand_commander import SrHandCommander
class GraspExecution:
def __init__(self):
self.keyboard_pressed = False
self.hand_commander = SrHandCommander(name='right_hand')
self.grasp_yaml = {}
def _open_yaml(self):
grasp_config_filename = '/home/user/projects/shadow_robot/base/src/'\
'sr_interface/sr_example/config/demo_grasps.yaml'
with open(grasp_config_filename, encoding="utf-8") as grasp_config_file:
self.grasp_yaml = yaml.load(grasp_config_file, Loader=yaml.FullLoader)
@staticmethod
def _get_input():
file_description = sys.stdin.fileno()
old_settings = termios.tcgetattr(file_description)
try:
tty.setraw(sys.stdin.fileno())
char_read = sys.stdin.read(1)
finally:
termios.tcsetattr(file_description, termios.TCSADRAIN, old_settings)
return char_read
def run(self):
self._open_yaml()
while True:
input_val = self._get_input()
if input_val == "1":
self.METHOD_NAME("open_hand")
elif input_val == "2":
self.METHOD_NAME("close_hand")
elif input_val == "3":
self.METHOD_NAME("point")
elif input_val == "4":
self.METHOD_NAME("2_finger_pinch")
elif input_val == "5":
self.METHOD_NAME("3_finger_pinch")
elif input_val == "6":
self.METHOD_NAME("parallel_extension")
elif input_val == "7":
self.METHOD_NAME("grasp_sphere")
if hex(ord(input_val)) == '0x1b':
sys.exit(0)
def METHOD_NAME(self, grasp):
rospy.loginfo(f"Grasp {grasp} started.")
open_dict = dict(zip(self.grasp_yaml['joint_names'], self.grasp_yaml['grasps']['open_hand']))
grasp_dict = dict(zip(self.grasp_yaml['joint_names'], self.grasp_yaml['grasps'][grasp]))
self.hand_commander.move_to_joint_value_target_unsafe(open_dict, 5.0, True)
self.hand_commander.move_to_joint_value_target_unsafe(grasp_dict, 5.0, True)
rospy.sleep(2.0)
rospy.loginfo(f"Grasp {grasp} completed.")
if __name__ == "__main__":
rospy.init_node("right_hand_demo", anonymous=True)
# Keyboard thread for input
kpd = GraspExecution()
keyboard_thread = Thread(target=kpd.run)
keyboard_thread.start()
rospy.loginfo("\nPRESS 1-9 ON THE KEYBOARD TO SHOW A GRASP:\
\n 1: Open Hand\
\n 2: Close Hand\
\n 3: Point\
\n 4: 2 Finger Pinch\
\n 5: 3 Finger Pinch\
\n 6: Parallel Extension\
\n 7: Grasp Sphere") |
299,262 | assert correct num gpus per node | # Copyright 2016-2020 Swiss National Supercomputing Centre (CSCS/ETH Zurich)
# ReFrame Project Developers. See the top-level LICENSE file for details.
#
# SPDX-License-Identifier: BSD-3-Clause
import reframe.utility.sanity as sn
import reframe as rfm
__all__ = ['BuildGpuPchase', 'RunGpuPchase', 'RunGpuPchaseD2D']
class BuildGpuPchase(rfm.CompileOnlyRegressionTest, pin_prefix=True):
''' Base class to build the pointer chase executable.
The test sources can be compiled for both CUDA and HIP. This is set with
the `gpu_build` variable, which must be set by a derived class to either
'cuda' or 'hip'. This source code can also be compiled for a specific
device architecture by setting the ``gpu_arch`` variable to an AMD or
NVIDIA supported architecture code.
The name of the resulting executable is ``pChase.x``.
'''
#: Set the build option to either 'cuda' or 'hip'.
#:
#: :default: ``required``
gpu_build = variable(str)
#: Set the GPU architecture.
#: This variable will be passed to the compiler to generate the
#: arch-specific code.
#:
#: :default: ``None``
gpu_arch = variable(str, type(None), value=None)
num_tasks = 1
build_system = 'Make'
postbuild_cmds = ['ls .']
num_tasks_per_node = 1
exclusive_access = True
maintainers = ['JO', 'SK']
tags = {'benchmark'}
@run_before('compile')
def set_gpu_build(self):
'''Set the build options [pre-compile hook].
This hook requires the ``gpu_build`` variable to be set.
The supported options are ``'cuda'`` and ``'hip'``. See the
vendor-specific docs for the supported options for the ``gpu_arch``
variable.
'''
if self.gpu_build == 'cuda':
self.build_system.makefile = 'makefile.cuda'
if self.gpu_arch:
self.build_system.cxxflags = [f'-arch=compute_{self.gpu_arch}',
f'-code=sm_{self.gpu_arch}']
elif self.gpu_build == 'hip':
self.build_system.makefile = 'makefile.hip'
if self.gpu_arch:
self.build_system.cxxflags = [
f'--amdgpu-target={self.gpu_arch}'
]
else:
raise ValueError('unknown gpu_build option')
@sanity_function
def assert_exec_present(self):
'''Assert that the executable is present.'''
return sn.assert_found(r'pChase.x', self.stdout)
class RunGpuPchaseBase(rfm.RunOnlyRegressionTest, pin_prefix=True):
'''Base RunOnly class for the gpu pointer chase test.
This runs the pointer chase algo on the linked list from the code compiled
in the executable from the test above. The list is fully customisable
through the command line, so the number of nodes, and the stride size for
each jump will determine where the memory hits occur. This stride is set to
32 node lengths (a node is 8 Bytes) to ensure that there is only a single
node per cache line. The number of node jumps is set relatively large to
ensure that the background effects are averaged out.
Derived tests must set the number of list nodes, the executable and the
number of gpus per compute node.
'''
#: Variable that sets the size of the linked list.
#:
#: :default:``required``
num_list_nodes = variable(int)
#: Variable to set the stride (in mumber of nodes) amongst nodes in the
#: linked list. We Use a large stride to ensure there's only a single
#: node per cache line.
#:
#: :default:``32``
stride = variable(int, value=32) # (128 Bytes)
#: Variable to set the total number of node jumps on the list traversal.
#: We use a relatively large number of jumps to smooth out potential
#: spurious effects.
#:
#: :default:``400000``
num_node_jumps = variable(int, value=400000)
# Mark the required variables
executable = required
num_gpus_per_node = required
maintainers = ['JO', 'SK']
tags = {'benchmark'}
@run_before('run')
def set_exec_opts(self):
'''Set the list travesal options as executable args.'''
self.executable_opts += [
f'--stride {self.stride}',
f'--nodes {self.num_list_nodes}',
f'--num-jumps {self.num_node_jumps}'
]
@sanity_function
def METHOD_NAME(self):
'''Check that every node has the right number of GPUs.'''
my_nodes = set(sn.extractall(
rf'^\s*\[([^\]]*)\]\s*Found {self.num_gpus_per_node} device\(s\).',
self.stdout, 1))
# Check that every node has made it to the end.
nodes_at_end = len(set(sn.extractall(
r'^\s*\[([^\]]*)\]\s*Pointer chase complete.',
self.stdout, 1)))
return sn.assert_eq(
sn.assert_eq(self.job.num_tasks, sn.count(my_nodes)),
sn.assert_eq(self.job.num_tasks, nodes_at_end)
)
class RunGpuPchase(RunGpuPchaseBase):
'''Base class for intra-GPU latency tests.
Derived classes must set the dependency with respect to a derived class
from :class:`BuildGpuPchase`.
'''
@run_before('performance')
def set_performance_patterns(self):
self.perf_patterns = {
'average_latency': sn.max(sn.extractall(
r'^\s*\[[^\]]*\]\s* On device \d+, '
r'the chase took on average (\d+) '
r'cycles per node jump.', self.stdout, 1, int)
),
}
class RunGpuPchaseD2D(RunGpuPchaseBase):
'''Base class for inter-device (D2D) latency tests.
Derived classes must set the dependency with respect to a derived class
from :class:`BuildGpuPchase`.
'''
executable_opts = ['--multi-gpu']
@deferrable
def average_D2D_latency(self):
'''Extract the average D2D latency.
The pChase code returns a table with the cummulative latency for all
D2D list traversals, and the last column of this table has the max
values for each device.
'''
return sn.max(sn.extractall(
r'^\s*\[[^\]]*\]\s*GPU\s*\d+\s+(\s*\d+.\s+)+',
self.stdout, 1, int
))
@run_before('performance')
def set_performance_patterns(self):
self.perf_patterns = {
'average_latency': self.average_D2D_latency(),
} |
299,263 | decorate | from functools import wraps
from typing import Any, Callable, List
import inspect
import logging
import time
from dispatch.metrics import provider as metrics_provider
from dispatch.organization import service as organization_service
from dispatch.project import service as project_service
from .database.core import engine, sessionmaker
from sqlalchemy.orm import scoped_session
log = logging.getLogger(__name__)
def fullname(o):
module = inspect.getmodule(o)
return f"{module.__name__}.{o.__qualname__}"
def _execute_task_in_project_context(
func: Callable,
*args,
**kwargs,
) -> None:
CoreSession = scoped_session(sessionmaker(bind=engine))
db_session = CoreSession()
metrics_provider.counter("function.call.counter", tags={"function": fullname(func)})
start = time.perf_counter()
try:
# iterate for all schema
for organization in organization_service.get_all(db_session=db_session):
schema_engine = engine.execution_options(
schema_translate_map={None: f"dispatch_organization_{organization.slug}"}
)
OrgSession = scoped_session(sessionmaker(bind=schema_engine))
schema_session = OrgSession()
try:
kwargs["db_session"] = schema_session
for project in project_service.get_all(db_session=schema_session):
kwargs["project"] = project
func(*args, **kwargs)
except Exception as e:
log.error(
f"Error trying to execute task: {fullname(func)} with parameters {args} and {kwargs}"
)
log.exception(e)
finally:
OrgSession.remove()
elapsed_time = time.perf_counter() - start
metrics_provider.timer(
"function.elapsed.time", value=elapsed_time, tags={"function": fullname(func)}
)
except Exception as e:
# No rollback necessary as we only read from the database
log.error(f"Error trying to execute task: {fullname(func)}")
log.exception(e)
finally:
CoreSession.remove()
def scheduled_project_task(func: Callable):
"""Decorator that sets up a background task function with
a database session and exception tracking.
Each task is executed in a specific project context.
"""
@wraps(func)
def wrapper(*args, **kwargs):
_execute_task_in_project_context(
func,
*args,
**kwargs,
)
return wrapper
def background_task(func):
"""Decorator that sets up the a background task function
with a database session and exception tracking.
As background tasks run in their own threads, it does not attempt
to propagate errors.
"""
@wraps(func)
def wrapper(*args, **kwargs):
background = False
if not kwargs.get("db_session"):
if not kwargs.get("organization_slug"):
raise Exception("If not db_session is supplied organization slug must be provided.")
schema_engine = engine.execution_options(
schema_translate_map={
None: f"dispatch_organization_{kwargs['organization_slug']}",
}
)
db_session = sessionmaker(bind=schema_engine)
background = True
kwargs["db_session"] = db_session()
try:
metrics_provider.counter("function.call.counter", tags={"function": fullname(func)})
start = time.perf_counter()
result = func(*args, **kwargs)
elapsed_time = time.perf_counter() - start
metrics_provider.timer(
"function.elapsed.time", value=elapsed_time, tags={"function": fullname(func)}
)
return result
except Exception as e:
log.exception(e)
finally:
if background:
kwargs["db_session"].close()
return wrapper
def timer(func: Any):
"""Timing decorator that sends a timing metric."""
@wraps(func)
def wrapper(*args, **kwargs):
start = time.perf_counter()
result = func(*args, **kwargs)
elapsed_time = time.perf_counter() - start
metrics_provider.timer(
"function.elapsed.time", value=elapsed_time, tags={"function": fullname(func)}
)
log.debug(f"function.elapsed.time.{fullname(func)}: {elapsed_time}")
return result
return wrapper
def counter(func: Any):
"""Counting decorator that sends a counting metric."""
@wraps(func)
def wrapper(*args, **kwargs):
metrics_provider.counter("function.call.counter", tags={"function": fullname(func)})
return func(*args, **kwargs)
return wrapper
def apply(decorator: Any, exclude: List[str] = None):
"""Class decorator that applies specified decorator to all class methods."""
if not exclude:
exclude = []
def METHOD_NAME(cls):
for attr in cls.__dict__:
if callable(getattr(cls, attr)) and attr not in exclude:
setattr(cls, attr, decorator(getattr(cls, attr)))
return cls
return METHOD_NAME |
299,264 | matmul | # ***************************************************************
# Copyright (c) 2023 Jittor. All Rights Reserved.
# Maintainers: Dun Liang <randonlang@gmail.com>.
# This file is subject to the terms and conditions defined in
# file 'LICENSE.txt', which is part of this source code package.
# ***************************************************************
import unittest
import jittor as jt
import numpy as np
from jittor import Module
from jittor.models import resnet
import pickle
from PIL import Image
import platform
f32 = jt.float32
def METHOD_NAME(a, b):
(n, m), k = a.shape, b.shape[-1]
a = a.broadcast([n,m,k], dims=[2])
b = b.broadcast([n,m,k], dims=[0])
return (a*b).sum(dim=1)
def relu(x):
return jt.maximum(x, 0.0)
Relu = jt.make_module(relu)
class Model(Module):
def __init__(self, input_size):
self.linear1 = Linear(input_size, 10)
self.relu1 = Relu()
self.linear2 = Linear(10, 1)
def execute(self, x):
x = self.linear1(x)
x = self.relu1(x)
return self.linear2(x)
def print_stack_tree(data):
tree = {}
for n in data["node_data"].values():
p = tree
for s in n["stacks"]:
name = s['name']
if name not in p:
p[name] = {}
p = p[name]
from pprint import pprint
pprint(tree)
class Linear(Module):
def __init__(self, in_features, out_features, bias=True):
self.w = (jt.random((in_features, out_features))-0.5) / in_features**0.5
self.b = jt.random((out_features,))-0.5 if bias else None
def execute(self, x):
x = METHOD_NAME(x, self.w)
if self.b is not None:
return x+self.b
return x
class TestTraceVar(unittest.TestCase):
def test_simple_model(self):
with jt.flag_scope(trace_py_var=2):
model = Model(input_size=1)
batch_size = 10
x = jt.float32(np.random.rand(batch_size, 1))
y = model(x)
y.sync()
data = jt.dump_trace_data()
jt.clear_trace_data()
with open(f"{jt.flags.cache_path}/simple_model.pkl", "wb") as f:
pickle.dump(data, f)
def test_simple_model_train(self):
with jt.flag_scope(trace_py_var=2):
model = Model(input_size=1)
opt = jt.optim.SGD(model.parameters(), 0.1)
batch_size = 10
x = jt.float32(np.random.rand(batch_size, 1))
y = model(x)
opt.step(y**2)
jt.sync_all()
data = jt.dump_trace_data()
jt.clear_trace_data()
# print_stack_tree(data)
for k,v in data["execute_op_info"].items():
for i in v['fused_ops']:
if i not in data["node_data"]:
assert 0, (i, "not found")
for k,v in list(data["node_data"].items()):
if v["attrs"]["name"] == "unname":
assert 0
print(len(data["node_data"]))
with open(f"{jt.flags.cache_path}/simple_model_train.pkl", "wb") as f:
pickle.dump(data, f)
def test_resnet_infer(self):
with jt.flag_scope(trace_py_var=2):
resnet18 = resnet.Resnet18()
x = jt.float32(np.random.rand(2, 3, 224, 224))
y = resnet18(x)
y.sync()
data = jt.dump_trace_data()
jt.clear_trace_data()
with open(f"{jt.flags.cache_path}/resnet.pkl", "wb") as f:
pickle.dump(data, f)
for k,v in data["execute_op_info"].items():
for i in v['fused_ops']:
if i not in data["node_data"]:
assert 0, (i, "not found")
def test_resnet_infer_with_feature(self):
cat_url = "https://ss1.bdstatic.com/70cFuXSh_Q1YnxGkpoWK1HF6hhy/it/u=3782485413,1118109468&fm=26&gp=0.jpg"
import jittor_utils
cat_path = f"{jt.flags.cache_path}/cat.jpg"
print("download")
jittor_utils.download(cat_url, cat_path)
with open(cat_path, 'rb') as f:
img = Image.open(f).convert('RGB')
img = jt.array(np.array(img))
print(img.shape, img.dtype)
img = ((img.float() - 128) / 255).transpose(2,0,1)
with jt.flag_scope(trace_py_var=2, trace_var_data=1):
img = img[None,...]
resnet18 = resnet.Resnet18(pretrained=True)
x = jt.float32(img)
y = resnet18(x)
y.sync()
data = jt.dump_trace_data()
jt.clear_trace_data()
with open(f"{jt.flags.cache_path}/resnet_with_feature.pkl", "wb") as f:
pickle.dump(data, f)
for k,v in data["execute_op_info"].items():
for i in v['fused_ops']:
if i not in data["node_data"]:
assert 0, (i, "not found")
def test_resnet_trainx(self):
with jt.flag_scope(trace_py_var=2):
resnet18 = resnet.Resnet18()
opt = jt.optim.SGD(resnet18.parameters(), 0.1)
x = jt.float32(np.random.rand(2, 3, 224, 224))
y = resnet18(x)
opt.step(y**2)
jt.sync_all()
data = jt.dump_trace_data()
jt.clear_trace_data()
with open(f"{jt.flags.cache_path}/resnet_train.pkl", "wb") as f:
pickle.dump(data, f)
for k,v in data["execute_op_info"].items():
for i in v['fused_ops']:
if i not in data["node_data"]:
assert 0, (i, "not found")
for k,v in data["node_data"].items():
if 'name' not in v["attrs"]:
print(v)
# assert 'name' in v["attrs"], v
# for s in v["stacks"]:
# if "_opt" in s["name"] or "_model" in s["name"]:
# assert 0, v
def test_resnet_train_profile(self):
with jt.profile_scope(trace_py_var=1):
resnet18 = resnet.Resnet18()
opt = jt.optim.SGD(resnet18.parameters(), 0.1)
x = jt.float32(np.random.rand(2, 3, 224, 224))
y = resnet18(x)
opt.step(y**2)
jt.sync_all()
if __name__ == "__main__":
unittest.main( |
299,265 | test empty map | # (C) Copyright 2005-2023 Enthought, Inc., Austin, TX
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in LICENSE.txt and may be redistributed only under
# the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
"""
Tests for the PrefixMap handler.
"""
import pickle
import unittest
from traits.api import HasTraits, Int, PrefixMap, TraitError
class Person(HasTraits):
married = PrefixMap({"yes": 1, "yeah": 1, "no": 0, "nah": 0})
class TestPrefixMap(unittest.TestCase):
def test_assignment(self):
person = Person()
# Test prefix
person.married = "yea"
self.assertEqual("yeah", person.married)
self.assertEqual(1, person.married_)
person.married = "yes"
self.assertEqual("yes", person.married)
self.assertEqual(1, person.married_)
person.married = "na"
self.assertEqual("nah", person.married)
self.assertEqual(0, person.married_)
with self.assertRaises(TraitError):
person.married = "unknown"
# Test duplicate prefix
with self.assertRaises(TraitError):
person.married = "ye"
def test_bad_types(self):
person = Person()
wrong_type = [[], (1, 2, 3), 1j, 2.3, 23, b"not a string", None]
for value in wrong_type:
with self.subTest(value=value):
with self.assertRaises(TraitError):
person.married = value
def test_no_default(self):
mapping = {"yes": 1, "yeah": 1, "no": 0, "nah": 0}
class Person(HasTraits):
married = PrefixMap(mapping)
p = Person()
# Since we're using Python >= 3.6, we can rely on dictionaries
# being ordered, and then the default is predictable.
self.assertEqual(p.married, "yes")
self.assertEqual(p.married_, 1)
def test_default(self):
class Person(HasTraits):
married = PrefixMap({"yes": 1, "yeah": 1, "no": 0, "nah": 0},
default_value="nah")
p = Person()
self.assertEqual(p.married, "nah")
self.assertEqual(p.married_, 0)
def test_default_keyword_only(self):
with self.assertRaises(TypeError):
PrefixMap({"yes": 1, "no": 0}, "yes")
def test_default_method(self):
class Person(HasTraits):
married = PrefixMap({"yes": 1, "yeah": 1, "no": 0, "nah": 0})
default_calls = Int(0)
def _married_default(self):
self.default_calls += 1
return "nah"
p = Person()
self.assertEqual(p.married, "nah")
self.assertEqual(p.married_, 0)
self.assertEqual(p.default_calls, 1)
# Check that the order doesn't matter
p2 = Person()
self.assertEqual(p2.married_, 0)
self.assertEqual(p2.married, "nah")
self.assertEqual(p2.default_calls, 1)
def test_default_static_override_static(self):
class BasePerson(HasTraits):
married = PrefixMap({"yes": 1, "yeah": 1, "no": 0, "nah": 0},
default_value="nah")
class Person(BasePerson):
married = "yes"
p = Person()
self.assertEqual(p.married, "yes")
self.assertEqual(p.married_, 1)
def test_default_static_override_method(self):
class BasePerson(HasTraits):
married = PrefixMap({"yes": 1, "yeah": 1, "no": 0, "nah": 0},
default_value="nah")
class Person(BasePerson):
default_calls = Int(0)
def _married_default(self):
self.default_calls += 1
return "yes"
p = Person()
self.assertEqual(p.married, "yes")
self.assertEqual(p.married_, 1)
self.assertEqual(p.default_calls, 1)
def test_default_method_override_static(self):
class BasePerson(HasTraits):
married = PrefixMap({"yes": 1, "yeah": 1, "no": 0, "nah": 0})
default_calls = Int(0)
def _married_default(self):
self.default_calls += 1
return "nah"
class Person(BasePerson):
married = "yes"
p = Person()
self.assertEqual(p.married, "yes")
self.assertEqual(p.married_, 1)
self.assertEqual(p.default_calls, 0)
def test_default_method_override_method(self):
class BasePerson(HasTraits):
married = PrefixMap({"yes": 1, "yeah": 1, "no": 0, "nah": 0})
default_calls = Int(0)
def _married_default(self):
self.default_calls += 1
return "nah"
class Person(BasePerson):
def _married_default(self):
self.default_calls += 1
return "yes"
p = Person()
self.assertEqual(p.married, "yes")
self.assertEqual(p.married_, 1)
self.assertEqual(p.default_calls, 1)
def test_static_default_transformed(self):
# Test the static default is transformed
class Person(HasTraits):
married = PrefixMap(
{"yes": 1, "yeah": 1, "no": 0}, default_value="yea")
p = Person()
self.assertEqual(p.married, "yeah")
self.assertEqual(p.married_, 1)
# access mapped trait first is okay
p = Person()
self.assertEqual(p.married_, 1)
self.assertEqual(p.married, "yeah")
def test_static_default_validation_error(self):
with self.assertRaises(ValueError):
class Person(HasTraits):
married = PrefixMap(
{"yes": 1, "yeah": 1, "no": 0}, default_value="meh")
def test_no_nested_exception(self):
# Regression test for enthought/traits#1155
class A(HasTraits):
washable = PrefixMap({"yes": 1, "no": 0})
a = A()
try:
a.washable = "affirmatron"
except TraitError as exc:
self.assertIsNone(exc.__context__)
self.assertIsNone(exc.__cause__)
def test_pickle_roundtrip(self):
class Person(HasTraits):
married = PrefixMap({"yes": 1, "yeah": 1, "no": 0, "nah": 0},
default_value="yea")
p = Person()
married_trait = p.traits()["married"]
reconstituted = pickle.loads(pickle.dumps(married_trait))
self.assertEqual(married_trait.validate(p, "married", "yea"), "yeah")
self.assertEqual(reconstituted.validate(p, "married", "yea"), "yeah")
with self.assertRaises(TraitError):
reconstituted.validate(p, "married", "uknown")
with self.assertRaises(TraitError):
reconstituted.validate(p, "married", "ye")
def METHOD_NAME(self):
with self.assertRaises(ValueError):
PrefixMap({})
def test_pickle_shadow_trait(self):
class Person(HasTraits):
married = PrefixMap({"yes": 1, "yeah": 1, "no": 0, "nah": 0},
default_value="yeah")
p = Person()
married_shadow_trait = p.trait("married_")
reconstituted = pickle.loads(pickle.dumps(married_shadow_trait))
default_value_callable = reconstituted.default_value()[1]
self.assertEqual(default_value_callable(p), 1)
def test_existence_of__map(self):
# This test can be removed once Mayavi no longer depends on the
# existence of the _map attribute.
# xref: enthought/traits#1577
# xref: enthought/mayavi#1094
prefix_map = PrefixMap({"yes": 1, "yeah": 1, "no": 0, "nah": 0})
self.assertEqual(prefix_map._map["yes"], "yes") |
299,266 | tagger | import warnings
from unittest import TestCase
import pytest
import srsly
from numpy import zeros
from spacy.kb.kb_in_memory import InMemoryLookupKB, Writer
from spacy.language import Language
from spacy.pipeline import TrainablePipe
from spacy.vectors import Vectors
from spacy.vocab import Vocab
from ..util import make_tempdir
def nlp():
return Language()
def vectors():
data = zeros((3, 1), dtype="f")
keys = ["cat", "dog", "rat"]
return Vectors(data=data, keys=keys)
def custom_pipe():
# create dummy pipe partially implementing interface -- only want to test to_disk
class SerializableDummy:
def __init__(self, **cfg):
if cfg:
self.cfg = cfg
else:
self.cfg = None
super(SerializableDummy, self).__init__()
def to_bytes(self, exclude=tuple(), disable=None, **kwargs):
return srsly.msgpack_dumps({"dummy": srsly.json_dumps(None)})
def from_bytes(self, bytes_data, exclude):
return self
def to_disk(self, path, exclude=tuple(), **kwargs):
pass
def from_disk(self, path, exclude=tuple(), **kwargs):
return self
class MyPipe(TrainablePipe):
def __init__(self, vocab, model=True, **cfg):
if cfg:
self.cfg = cfg
else:
self.cfg = None
self.model = SerializableDummy()
self.vocab = vocab
return MyPipe(Vocab())
def METHOD_NAME():
nlp = Language()
METHOD_NAME = nlp.add_pipe("tagger")
# need to add model for two reasons:
# 1. no model leads to error in serialization,
# 2. the affected line is the one for model serialization
METHOD_NAME.add_label("A")
nlp.initialize()
return METHOD_NAME
def entity_linker():
nlp = Language()
def create_kb(vocab):
kb = InMemoryLookupKB(vocab, entity_vector_length=1)
kb.add_entity("test", 0.0, zeros((1,), dtype="f"))
return kb
entity_linker = nlp.add_pipe("entity_linker")
entity_linker.set_kb(create_kb)
# need to add model for two reasons:
# 1. no model leads to error in serialization,
# 2. the affected line is the one for model serialization
nlp.initialize()
return entity_linker
objects_to_test = (
[nlp(), vectors(), custom_pipe(), METHOD_NAME(), entity_linker()],
["nlp", "vectors", "custom_pipe", "tagger", "entity_linker"],
)
def write_obj_and_catch_warnings(obj):
with make_tempdir() as d:
with warnings.catch_warnings(record=True) as warnings_list:
warnings.filterwarnings("always", category=ResourceWarning)
obj.to_disk(d)
# in python3.5 it seems that deprecation warnings are not filtered by filterwarnings
return list(filter(lambda x: isinstance(x, ResourceWarning), warnings_list))
@pytest.mark.parametrize("obj", objects_to_test[0], ids=objects_to_test[1])
def test_to_disk_resource_warning(obj):
warnings_list = write_obj_and_catch_warnings(obj)
assert len(warnings_list) == 0
def test_writer_with_path_py35():
writer = None
with make_tempdir() as d:
path = d / "test"
try:
writer = Writer(path)
except Exception as e:
pytest.fail(str(e))
finally:
if writer:
writer.close()
def test_save_and_load_knowledge_base():
nlp = Language()
kb = InMemoryLookupKB(nlp.vocab, entity_vector_length=1)
with make_tempdir() as d:
path = d / "kb"
try:
kb.to_disk(path)
except Exception as e:
pytest.fail(str(e))
try:
kb_loaded = InMemoryLookupKB(nlp.vocab, entity_vector_length=1)
kb_loaded.from_disk(path)
except Exception as e:
pytest.fail(str(e))
class TestToDiskResourceWarningUnittest(TestCase):
def test_resource_warning(self):
scenarios = zip(*objects_to_test)
for scenario in scenarios:
with self.subTest(msg=scenario[1]):
warnings_list = write_obj_and_catch_warnings(scenario[0])
self.assertEqual(len(warnings_list), 0) |
299,267 | tgen | # -*- coding: utf-8 eval: (blacken-mode 1) -*-
# SPDX-License-Identifier: GPL-2.0-or-later
#
# February 21 2022, Christian Hopps <chopps@labn.net>
#
# Copyright (c) 2022, LabN Consulting, L.L.C.
#
"""
test_basic_grpc.py: Test Basic gRPC.
"""
import logging
import os
import sys
import pytest
from lib.common_config import step
from lib.micronet import commander
from lib.topogen import Topogen, TopoRouter
from lib.topolog import logger
CWD = os.path.dirname(os.path.realpath(__file__))
GRPCP_ZEBRA = 50051
GRPCP_STATICD = 50052
GRPCP_BFDD = 50053
GRPCP_ISISD = 50054
GRPCP_OSPFD = 50055
GRPCP_PIMD = 50056
pytestmark = [
# pytest.mark.mgmtd -- Need a new non-protocol marker
# pytest.mark.bfdd,
# pytest.mark.isisd,
# pytest.mark.ospfd,
# pytest.mark.pimd,
pytest.mark.staticd,
]
script_path = os.path.realpath(os.path.join(CWD, "../lib/grpc-query.py"))
try:
commander.cmd_raises([script_path, "--check"])
except Exception:
pytest.skip(
"skipping; cannot create or import gRPC proto modules", allow_module_level=True
)
@pytest.fixture(scope="module")
def METHOD_NAME(request):
"Setup/Teardown the environment and provide tgen argument to tests"
topodef = {"s1": ("r1", "r2")}
METHOD_NAME = Topogen(topodef, request.module.__name__)
METHOD_NAME.start_topology()
router_list = METHOD_NAME.routers()
for rname, router in router_list.items():
router.load_config(TopoRouter.RD_ZEBRA, "zebra.conf", f"-M grpc:{GRPCP_ZEBRA}")
router.load_config(TopoRouter.RD_STATIC, None, f"-M grpc:{GRPCP_STATICD}")
# router.load_config(TopoRouter.RD_BFD, None, f"-M grpc:{GRPCP_BFDD}")
# router.load_config(TopoRouter.RD_ISIS, None, f"-M grpc:{GRPCP_ISISD}")
# router.load_config(TopoRouter.RD_OSPF, None, f"-M grpc:{GRPCP_OSPFD}")
# router.load_config(TopoRouter.RD_PIM, None, f"-M grpc:{GRPCP_PIMD}")
METHOD_NAME.start_router()
yield METHOD_NAME
logging.info("Stopping all routers (no assert on error)")
METHOD_NAME.stop_topology()
# Let's not do this so we catch errors
# Fixture that executes before each test
@pytest.fixture(autouse=True)
def skip_on_failure(METHOD_NAME):
if METHOD_NAME.routers_have_failure():
pytest.skip("skipped because of previous test failure")
# ===================
# The tests functions
# ===================
def run_grpc_client(r, port, commands):
if not isinstance(commands, str):
commands = "\n".join(commands) + "\n"
if not commands.endswith("\n"):
commands += "\n"
return r.cmd_raises([script_path, f"--port={port}"], stdin=commands)
def test_connectivity(METHOD_NAME):
r1 = METHOD_NAME.gears["r1"]
output = r1.cmd_raises("ping -c1 192.168.1.2")
logging.info("ping output: %s", output)
def test_capabilities(METHOD_NAME):
r1 = METHOD_NAME.gears["r1"]
output = run_grpc_client(r1, GRPCP_ZEBRA, "GETCAP")
logging.info("grpc output: %s", output)
def test_get_config(METHOD_NAME):
nrepeat = 5
r1 = METHOD_NAME.gears["r1"]
step("'GET' interface config 10 times, once per invocation")
for i in range(0, nrepeat):
output = run_grpc_client(r1, GRPCP_ZEBRA, "GET,/frr-interface:lib")
logging.info("[iteration %s]: grpc GET output: %s", i, output)
step(f"'GET' YANG {nrepeat} times in one invocation")
commands = ["GET,/frr-interface:lib" for _ in range(0, 10)]
output = run_grpc_client(r1, GRPCP_ZEBRA, commands)
logging.info("grpc GET*{%d} output: %s", nrepeat, output)
def test_get_vrf_config(METHOD_NAME):
r1 = METHOD_NAME.gears["r1"]
step("'GET' get VRF config")
output = run_grpc_client(r1, GRPCP_ZEBRA, "GET,/frr-vrf:lib")
logging.info("grpc GET /frr-vrf:lib output: %s", output)
def test_shutdown_checks(METHOD_NAME):
# Start a process rnuning that will fetch bunches of data then shut the routers down
# and check for cores.
nrepeat = 100
r1 = METHOD_NAME.gears["r1"]
commands = ["GET,/frr-interface:lib" for _ in range(0, nrepeat)]
p = r1.popen([script_path, f"--port={GRPCP_ZEBRA}"] + commands)
import time
time.sleep(1)
try:
for r in METHOD_NAME.routers().values():
r.net.stopRouter()
r.net.checkRouterCores()
finally:
if p:
p.terminate()
p.wait()
# Memory leak test template
# Not compatible with the shutdown check above
def _test_memory_leak(METHOD_NAME):
"Run the memory leak test and report results."
if not METHOD_NAME.is_memleak_enabled():
pytest.skip("Memory leak test/report is disabled")
METHOD_NAME.report_memory_leaks()
if __name__ == "__main__":
args = ["-s"] + sys.argv[1:]
sys.exit(pytest.main(args)) |
299,268 | sign | # Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from cryptography.exceptions import InvalidKey, InvalidSignature
from cryptography.hazmat.primitives import asymmetric, ciphers, hashes, padding
from cryptography.x509 import Certificate
HASH_LENGTH = 4 # Adjustable to avoid collision
NONCE_LENGTH = 16 # For AES, this is 128 bits (i.e. block size)
KEY_LENGTH = 32 # AES 256. Choose from 16, 24, 32
HEADER_LENGTH = HASH_LENGTH + NONCE_LENGTH
PADDING_LENGTH = NONCE_LENGTH * 8 # in bits
KEY_ENC_LENGTH = 256
SIGNATURE_LENGTH = 256
SIMPLE_HEADER_LENGTH = NONCE_LENGTH + KEY_ENC_LENGTH + SIGNATURE_LENGTH
def get_hash(value):
hash = hashes.Hash(hashes.SHA256())
hash.update(value)
return hash.finalize()
class SessionKeyUnavailable(Exception):
pass
class InvalidCertChain(Exception):
pass
def _asym_enc(k, m):
return k.encrypt(
m,
asymmetric.padding.OAEP(
mgf=asymmetric.padding.MGF1(algorithm=hashes.SHA256()), algorithm=hashes.SHA256(), label=None
),
)
def _asym_dec(k, m):
return k.decrypt(
m,
asymmetric.padding.OAEP(
mgf=asymmetric.padding.MGF1(algorithm=hashes.SHA256()), algorithm=hashes.SHA256(), label=None
),
)
def METHOD_NAME(k, m):
return k.sign(
data=m,
padding=asymmetric.padding.PSS(
mgf=asymmetric.padding.MGF1(hashes.SHA256()),
salt_length=asymmetric.padding.PSS.MAX_LENGTH,
),
algorithm=hashes.SHA256(),
)
def _verify(k, m, s):
k.verify(
s,
m,
asymmetric.padding.PSS(
mgf=asymmetric.padding.MGF1(hashes.SHA256()), salt_length=asymmetric.padding.PSS.MAX_LENGTH
),
hashes.SHA256(),
)
def _sym_enc(k, n, m):
cipher = ciphers.Cipher(ciphers.algorithms.AES(k), ciphers.modes.CBC(n))
encryptor = cipher.encryptor()
padder = padding.PKCS7(PADDING_LENGTH).padder()
padded_data = padder.update(m) + padder.finalize()
return encryptor.update(padded_data) + encryptor.finalize()
def _sym_dec(k, n, m):
cipher = ciphers.Cipher(ciphers.algorithms.AES(k), ciphers.modes.CBC(n))
decryptor = cipher.decryptor()
plain_text = decryptor.update(m)
plain_text = plain_text + decryptor.finalize()
unpadder = padding.PKCS7(PADDING_LENGTH).unpadder()
return unpadder.update(plain_text) + unpadder.finalize()
class SessionKeyManager:
def __init__(self, root_ca):
self.key_hash_dict = dict()
self.root_ca = root_ca
self.root_ca_pub_key = root_ca.public_key()
def validate_cert_chain(self, cert):
self.root_ca_pub_key.verify(
cert.signature, cert.tbs_certificate_bytes, asymmetric.padding.PKCS1v15(), cert.signature_hash_algorithm
)
def key_request(self, remote_cert, local_cert, local_pri_key):
session_key = os.urandom(KEY_LENGTH)
signature = METHOD_NAME(local_pri_key, session_key)
try:
self.validate_cert_chain(remote_cert)
except InvalidSignature:
return False
remote_pub_key = remote_cert.public_key()
key_enc = _asym_enc(remote_pub_key, session_key)
self.key_hash_dict[get_hash(session_key)[-HASH_LENGTH:]] = session_key
key_response = key_enc + signature
return key_response
def process_key_response(self, remote_cert, local_cert, local_pri_key, key_response):
key_enc, signature = key_response[:KEY_ENC_LENGTH], key_response[KEY_ENC_LENGTH:]
try:
session_key = _asym_dec(local_pri_key, key_enc)
self.validate_cert_chain(remote_cert)
public_key = remote_cert.public_key()
_verify(public_key, session_key, signature)
self.key_hash_dict[get_hash(session_key)[-HASH_LENGTH:]] = session_key
except (InvalidKey, InvalidSignature):
return False
return True
def key_available(self):
return bool(self.key_hash_dict)
def get_key(self, key_hash):
return self.key_hash_dict.get(key_hash)
def get_latest_key(self):
try:
k, last_value = _, self.key_hash_dict[k] = self.key_hash_dict.popitem()
except KeyError as e:
raise SessionKeyUnavailable("No session key established yet")
return last_value
class CellCipher:
def __init__(self, session_key_manager: SessionKeyManager):
self.session_key_manager = session_key_manager
def encrypt(self, message):
key = self.session_key_manager.get_latest_key()
key_hash = get_hash(key)
nonce = os.urandom(NONCE_LENGTH)
return nonce + key_hash[-HASH_LENGTH:] + _sym_enc(key, nonce, message)
def decrypt(self, message):
nonce, key_hash, message = (
message[:NONCE_LENGTH],
message[NONCE_LENGTH:HEADER_LENGTH],
message[HEADER_LENGTH:],
)
key = self.session_key_manager.get_key(key_hash)
if key is None:
raise SessionKeyUnavailable("No session key found for received message")
return _sym_dec(key, nonce, message)
class SimpleCellCipher:
def __init__(self, root_ca: Certificate, pri_key: asymmetric.rsa.RSAPrivateKey, cert: Certificate):
self._root_ca = root_ca
self._root_ca_pub_key = root_ca.public_key()
self._pri_key = pri_key
self._cert = cert
self._pub_key = cert.public_key()
self._validate_cert_chain(self._cert)
self._cached_enc = dict()
self._cached_dec = dict()
def _validate_cert_chain(self, cert: Certificate):
self._root_ca_pub_key.verify(
cert.signature, cert.tbs_certificate_bytes, asymmetric.padding.PKCS1v15(), cert.signature_hash_algorithm
)
def encrypt(self, message: bytes, target_cert: Certificate):
cert_hash = hash(target_cert)
secret = self._cached_enc.get(cert_hash)
if secret is None:
self._validate_cert_chain(target_cert)
key = os.urandom(KEY_LENGTH)
remote_pub_key = target_cert.public_key()
key_enc = _asym_enc(remote_pub_key, key)
signature = METHOD_NAME(self._pri_key, key_enc)
self._cached_enc[cert_hash] = (key, key_enc, signature)
else:
(key, key_enc, signature) = secret
nonce = os.urandom(NONCE_LENGTH)
ct = nonce + key_enc + signature + _sym_enc(key, nonce, message)
return ct
def decrypt(self, message: bytes, origin_cert: Certificate):
nonce, key_enc, signature = (
message[:NONCE_LENGTH],
message[NONCE_LENGTH : NONCE_LENGTH + KEY_ENC_LENGTH],
message[NONCE_LENGTH + KEY_ENC_LENGTH : SIMPLE_HEADER_LENGTH],
)
key_hash = hash(key_enc)
dec = self._cached_dec.get(key_hash)
if dec is None:
self._validate_cert_chain(origin_cert)
public_key = origin_cert.public_key()
_verify(public_key, key_enc, signature)
key = _asym_dec(self._pri_key, key_enc)
self._cached_dec[key_hash] = key
else:
key = dec
return _sym_dec(key, nonce, message[SIMPLE_HEADER_LENGTH:]) |
299,269 | test import processor class | # Copyright 2021 Collate
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test import utilities
"""
from unittest import TestCase
from metadata.generated.schema.entity.services.connections.database.mysqlConnection import (
MysqlConnection,
)
from metadata.generated.schema.entity.services.serviceType import ServiceType
from metadata.utils.importer import (
DynamicImportException,
get_class_name_root,
get_module_name,
get_source_module_name,
import_bulk_sink_type,
import_connection_fn,
import_from_module,
import_processor_class,
import_sink_class,
import_source_class,
import_stage_class,
)
# pylint: disable=import-outside-toplevel
class ImporterTest(TestCase):
"""
Validate that we properly convert
module paths and load classes.
"""
def test_get_module_name(self) -> None:
self.assertEqual(get_source_module_name("mysql"), "metadata")
self.assertEqual(get_source_module_name("redshift-usage"), "usage")
self.assertEqual(get_module_name("query-parser"), "query_parser")
def test_get_class_name(self) -> None:
self.assertEqual(get_class_name_root("mysql"), "Mysql")
self.assertEqual(get_class_name_root("redshift-usage"), "RedshiftUsage")
def test_import_class(self) -> None:
from metadata.ingestion.source.database.mysql.metadata import MysqlSource
self.assertEqual(
import_from_module(
"metadata.ingestion.source.database.mysql.metadata.MysqlSource"
),
MysqlSource,
)
def test_import_source_class(self) -> None:
from metadata.ingestion.source.database.bigquery.lineage import (
BigqueryLineageSource,
)
from metadata.ingestion.source.database.bigquery.usage import (
BigqueryUsageSource,
)
from metadata.ingestion.source.database.mysql.metadata import MysqlSource
self.assertEqual(
import_source_class(service_type=ServiceType.Database, source_type="mysql"),
MysqlSource,
)
self.assertEqual(
import_source_class(
service_type=ServiceType.Database, source_type="bigquery-lineage"
),
BigqueryLineageSource,
)
self.assertEqual(
import_source_class(
service_type=ServiceType.Database, source_type="bigquery-usage"
),
BigqueryUsageSource,
)
def METHOD_NAME(self) -> None:
from metadata.ingestion.processor.query_parser import QueryParserProcessor
self.assertEqual(
import_processor_class(processor_type="query-parser"),
QueryParserProcessor,
)
def test_import_stage_class(self) -> None:
from metadata.ingestion.stage.table_usage import TableUsageStage
self.assertEqual(import_stage_class(stage_type="table-usage"), TableUsageStage)
def test_import_sink_class(self) -> None:
from metadata.ingestion.sink.metadata_rest import MetadataRestSink
self.assertEqual(import_sink_class(sink_type="metadata-rest"), MetadataRestSink)
def test_import_bulk_sink_type(self) -> None:
from metadata.ingestion.bulksink.metadata_usage import MetadataUsageBulkSink
self.assertEqual(
import_bulk_sink_type(bulk_sink_type="metadata-usage"),
MetadataUsageBulkSink,
)
def test_import_get_connection(self) -> None:
connection = MysqlConnection(
username="name",
hostPort="hostPort",
)
get_connection_fn = import_connection_fn(
connection=connection, function_name="get_connection"
)
self.assertIsNotNone(get_connection_fn)
self.assertRaises(
DynamicImportException,
import_connection_fn,
connection=connection,
function_name="random",
) |
299,270 | test can have unicode in lambda sources | # This file is part of Hypothesis, which may be found at
# https://github.com/HypothesisWorks/hypothesis/
#
# Copyright the Hypothesis Authors.
# Individual contributors are listed in AUTHORS.rst and the git log.
#
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at https://mozilla.org/MPL/2.0/.
from hypothesis.internal.reflection import get_pretty_function_description
def test_bracket_whitespace_is_striped():
assert get_pretty_function_description(lambda x: (x + 1)) == "lambda x: (x + 1)"
def test_no_whitespace_before_colon_with_no_args():
assert get_pretty_function_description(eval("lambda: None")) == "lambda: <unknown>"
def METHOD_NAME():
t = lambda x: "Γ©" not in x
assert get_pretty_function_description(t) == 'lambda x: "Γ©" not in x'
# fmt: off
ordered_pair = (
lambda right: [].map(
lambda length: ()))
# fmt: on
def test_can_get_descriptions_of_nested_lambdas_with_different_names():
assert (
get_pretty_function_description(ordered_pair)
== "lambda right: [].map(lambda length: ())"
)
def test_does_not_error_on_unparsable_source():
# fmt: off
t = [
lambda x: \
# This will break ast.parse, but the brackets are needed for the real
# parser to accept this lambda
x][0]
# fmt: on
assert get_pretty_function_description(t) == "lambda x: <unknown>"
def test_source_of_lambda_is_pretty():
assert get_pretty_function_description(lambda x: True) == "lambda x: True"
def test_variable_names_are_not_pretty():
t = lambda x: True
assert get_pretty_function_description(t) == "lambda x: True"
def test_does_not_error_on_dynamically_defined_functions():
x = eval("lambda t: 1")
get_pretty_function_description(x)
def test_collapses_whitespace_nicely():
# fmt: off
t = (
lambda x, y: 1
)
# fmt: on
assert get_pretty_function_description(t) == "lambda x, y: 1"
def test_is_not_confused_by_tuples():
p = (lambda x: x > 1, 2)[0]
assert get_pretty_function_description(p) == "lambda x: x > 1"
def test_strips_comments_from_the_end():
t = lambda x: 1 # A lambda comment
assert get_pretty_function_description(t) == "lambda x: 1"
def test_does_not_strip_hashes_within_a_string():
t = lambda x: "#"
assert get_pretty_function_description(t) == 'lambda x: "#"'
def test_can_distinguish_between_two_lambdas_with_different_args():
a, b = (lambda x: 1, lambda y: 2)
assert get_pretty_function_description(a) == "lambda x: 1"
assert get_pretty_function_description(b) == "lambda y: 2"
def test_does_not_error_if_it_cannot_distinguish_between_two_lambdas():
a, b = (lambda x: 1, lambda x: 2)
assert "lambda x:" in get_pretty_function_description(a)
assert "lambda x:" in get_pretty_function_description(b)
def test_lambda_source_break_after_def_with_brackets():
# fmt: off
f = (lambda n:
'aaa')
# fmt: on
source = get_pretty_function_description(f)
assert source == "lambda n: 'aaa'"
def test_lambda_source_break_after_def_with_line_continuation():
# fmt: off
f = lambda n:\
'aaa'
# fmt: on
source = get_pretty_function_description(f)
assert source == "lambda n: 'aaa'"
def arg_decorator(*s):
def accept(f):
return s
return accept
@arg_decorator(lambda x: x + 1)
def plus_one():
pass
@arg_decorator(lambda x: x + 1, lambda y: y * 2)
def two_decorators():
pass
def test_can_extract_lambda_repr_in_a_decorator():
assert get_pretty_function_description(plus_one[0]) == "lambda x: x + 1"
def test_can_extract_two_lambdas_from_a_decorator_if_args_differ():
a, b = two_decorators
assert get_pretty_function_description(a) == "lambda x: x + 1"
assert get_pretty_function_description(b) == "lambda y: y * 2"
@arg_decorator(lambda x: x + 1)
def decorator_with_space():
pass
def test_can_extract_lambda_repr_in_a_decorator_with_spaces():
assert get_pretty_function_description(decorator_with_space[0]) == "lambda x: x + 1"
@arg_decorator(lambda: ())
def to_brackets():
pass
def test_can_handle_brackets_in_decorator_argument():
assert get_pretty_function_description(to_brackets[0]) == "lambda: ()"
def identity(x):
return x
@arg_decorator(identity(lambda x: x + 1))
def decorator_with_wrapper():
pass
def test_can_handle_nested_lambda_in_decorator_argument():
assert (
get_pretty_function_description(decorator_with_wrapper[0]) == "lambda x: x + 1"
) |
299,271 | instrument stack | """
Stack handler is a giant object, so we split it up into files/classes
This 'core' is inherited by all the other classes and just initialises, plus does some common functions
"""
from collections import namedtuple
from syscore.constants import arg_not_supplied, success, failure
from sysdata.data_blob import dataBlob
from sysexecution.order_stacks.order_stack import orderStackData, failureWithRollback
from sysexecution.orders.base_orders import Order
from sysexecution.orders.list_of_orders import listOfOrders
from sysproduction.data.orders import dataOrders
from sysproduction.data.prices import diagPrices, updatePrices
from sysproduction.data.contracts import dataContracts
from sysproduction.data.broker import dataBroker
from sysproduction.data.positions import updatePositions
class stackHandlerCore(object):
def __init__(self, data: dataBlob = arg_not_supplied):
if data is arg_not_supplied:
data = dataBlob()
self._data = data
self._log = data.log
order_data = dataOrders(data)
METHOD_NAME = order_data.db_instrument_stack_data
contract_stack = order_data.db_contract_stack_data
broker_stack = order_data.db_broker_stack_data
self._instrument_stack = METHOD_NAME
self._contract_stack = contract_stack
self._broker_stack = broker_stack
@property
def data(self):
return self._data
@property
def log(self):
return self._log
@property
def METHOD_NAME(self):
return self._instrument_stack
@property
def contract_stack(self):
return self._contract_stack
@property
def broker_stack(self):
return self._broker_stack
@property
def diag_prices(self) -> diagPrices:
diag_prices = getattr(self, "_diag_prices", None)
if diag_prices is None:
diag_prices = diagPrices(self.data)
self._diag_prices = diag_prices
return diag_prices
@property
def data_contracts(self) -> dataContracts:
data_contracts = getattr(self, "_data_contracts", None)
if data_contracts is None:
data_contracts = dataContracts(self.data)
self._data_contracts = data_contracts
return data_contracts
@property
def data_broker(self) -> dataBroker:
data_broker = getattr(self, "_data_broker", None)
if data_broker is None:
data_broker = dataBroker(self.data)
self._data_broker = data_broker
return data_broker
@property
def update_prices(self) -> updatePrices:
update_prices = getattr(self, "_update_prices", None)
if update_prices is None:
update_prices = updatePrices(self.data)
self._update_prices = update_prices
return update_prices
def put_children_on_stack(
child_stack: orderStackData,
parent_order: Order,
list_of_child_orders: listOfOrders,
parent_log,
) -> list:
try:
list_of_child_ids = child_stack.put_list_of_orders_on_stack(
list_of_child_orders
)
except failureWithRollback as e:
parent_log.warning(
"Tried to add child orders but %s; rolled back so can try again (parent %s)"
% (str(e), str(parent_order))
)
return []
except Exception as e:
parent_log.critical(
"Tried to add child orders, error %s and couldn't roll back! Order stack may well be corrupted!"
% str(e)
)
return []
return list_of_child_ids
def add_children_to_parent_or_rollback_children(
parent_order: Order,
list_of_child_ids: list,
parent_stack: orderStackData,
child_stack: orderStackData,
parent_log,
):
try:
parent_stack.add_children_to_order_without_existing_children(
parent_order.order_id, list_of_child_ids
)
except Exception as e:
try:
child_stack.rollback_list_of_orders_on_stack(list_of_child_ids)
parent_log.warning(
"Tried to add child orders to parent but %s; rolled back so can try again (parent %s)"
% (str(e), str(parent_order))
)
return failure
except:
parent_log.critical(
"Tried to add child orders, error %s and couldn't roll back! Order stack may well be corrupted!"
% str(e)
)
return failure
return success
def log_successful_adding(
list_of_child_orders: listOfOrders,
list_of_child_ids: list,
parent_order: Order,
parent_log,
):
for child_order, child_id in zip(list_of_child_orders, list_of_child_ids):
child_log = child_order.log_with_attributes(parent_log)
child_log.debug(
"Put child order %s on stack with ID %d from parent order %s"
% (str(child_order), child_id, str(parent_order))
)
def rollback_parents_and_children_and_handle_exceptions(
parent_stack: orderStackData,
child_stack: orderStackData,
parent_order_id: int,
list_of_child_order_ids: list,
parent_log,
error_from_adding_child_orders: Exception,
):
##
try:
rollback_parents_and_children(
child_stack=child_stack,
parent_stack=parent_stack,
list_of_child_order_ids=list_of_child_order_ids,
parent_order_id=parent_order_id,
)
parent_log.warning(
"Error %s when adding a set of parents and children but managed to rollback"
% str(error_from_adding_child_orders)
)
return None
except Exception as rollback_exception:
## bloody hell even the rollback has failed, throw everything out of the pram
parent_log.critical(
"Error %s when adding a set of parents and children and couldn't rollback got error %s! Stack may be corrupted"
% (str(error_from_adding_child_orders), str(rollback_exception))
)
return None
def rollback_parents_and_children(
parent_stack: orderStackData,
child_stack: orderStackData,
parent_order_id: int,
list_of_child_order_ids: list,
):
## parent order might be locked
parent_stack.unlock_order_on_stack(parent_order_id)
parent_stack.deactivate_order(parent_order_id)
parent_stack.remove_order_with_id_from_stack(parent_order_id)
# If any children, roll them back also
if len(list_of_child_order_ids) > 0:
child_stack.rollback_list_of_orders_on_stack(list_of_child_order_ids)
orderFamily = namedtuple(
"orderFamily",
["instrument_order_id", "list_of_contract_order_id", "list_of_broker_order_id"],
) |
299,272 | register handler | #
# This source file is part of the EdgeDB open source project.
#
# Copyright 2021-present MagicStack Inc. and the EdgeDB authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import annotations
from typing import *
from edb import errors
from edb.edgeql import ast as qlast
from edb.edgeql import qltypes
from . import delta as sd
from . import objects as so
from . import name as sn
from . import schema as s_schema
class FutureBehavior(
so.Object,
qlkind=qltypes.SchemaObjectClass.FUTURE,
data_safe=False,
):
name = so.SchemaField(
sn.Name,
inheritable=False,
compcoef=0.0, # can't rename
)
class FutureBehaviorCommandContext(
sd.ObjectCommandContext[FutureBehavior],
):
pass
# Unlike extensions, futures are *explicitly* built into the
# language. Enabling or disabling a futures might require making
# other changes (recompiling functions that depend on it, for
# example), so each future is mapped to a handler function that can
# generate a command.
_FutureBehaviorHandler = Callable[
['FutureBehaviorCommand', s_schema.Schema, sd.CommandContext, bool],
tuple[s_schema.Schema, sd.Command],
]
FUTURE_HANDLERS: dict[str, _FutureBehaviorHandler] = {}
def METHOD_NAME(name: str) -> Callable[
[_FutureBehaviorHandler], _FutureBehaviorHandler
]:
def func(f: _FutureBehaviorHandler) -> _FutureBehaviorHandler:
FUTURE_HANDLERS[name] = f
return f
return func
def future_enabled(schema: s_schema.Schema, feat: str) -> bool:
return bool(schema.get_global(FutureBehavior, feat, default=None))
class FutureBehaviorCommand(
sd.ObjectCommand[FutureBehavior],
context_class=FutureBehaviorCommandContext,
):
# A command that gets run after adjusting the future value.
# It needs to run *after* the delete, for a 'drop future',
# and so it can't use any of the existing varieties of subcommands.
#
# If anything else ends up needing to do this, we can add another
# variety of subcommand.
future_cmd: sd.Command | None = None
def copy(self: FutureBehaviorCommand) -> FutureBehaviorCommand:
result = super().copy()
if self.future_cmd:
result.future_cmd = self.future_cmd.copy()
return result
@classmethod
def adapt(
cls: Type[FutureBehaviorCommand], obj: sd.Command
) -> FutureBehaviorCommand:
result = super(FutureBehaviorCommand, cls).adapt(obj)
assert isinstance(obj, FutureBehaviorCommand)
mcls = cast(sd.CommandMeta, type(cls))
if obj.future_cmd:
result.future_cmd = mcls.adapt(obj.future_cmd)
return result
def apply(
self,
schema: s_schema.Schema,
context: sd.CommandContext,
) -> s_schema.Schema:
schema = super().apply(schema, context)
if not context.canonical:
key = str(self.classname)
if key not in FUTURE_HANDLERS:
raise errors.QueryError(
f"Unknown future '{str(key)}'"
)
schema, cmd = FUTURE_HANDLERS[key](
self, schema, context, isinstance(self, sd.CreateObject))
self.future_cmd = cmd
if self.future_cmd:
schema = self.future_cmd.apply(schema, context)
return schema
class CreateFutureBehavior(
FutureBehaviorCommand,
sd.CreateObject[FutureBehavior],
):
astnode = qlast.CreateFuture
class DeleteFutureBehavior(
FutureBehaviorCommand,
sd.DeleteObject[FutureBehavior],
):
astnode = qlast.DropFuture |
299,273 | test get with kwargs | # This file is a part of the AnyBlok project
#
# Copyright (C) 2015 Jean-Sebastien SUZANNE <jssuzanne@anybox.fr>
# Copyright (C) 2015 Denis VIVIΓS <dvivies@geoblink.com>
#
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file,You can
# obtain one at http://mozilla.org/MPL/2.0/.
import pytest
@pytest.mark.usefixtures("rollback_registry")
class TestQueryScope:
def test_dictone(self, rollback_registry):
registry = rollback_registry
query = registry.System.Cache.query().limit(1)
cache = query.one()
assert query.dictone() == {
"id": cache.id,
"method": cache.method,
"registry_name": cache.registry_name,
}
def test_iteration(self, rollback_registry):
registry = rollback_registry
query = registry.System.Blok.query()
has_iteration = False
for blok in query:
has_iteration = True
if has_iteration is False:
pytest.fail("No iteration")
def test_dictone_on_some_column(self, rollback_registry):
registry = rollback_registry
query = registry.System.Cache.query("id", "method").limit(1)
cache = query.one()
assert query.dictone() == {
"id": cache.id,
"method": cache.method,
}
def test_dictone_on_some_column_with_label(self, rollback_registry):
registry = rollback_registry
Cache = registry.System.Cache
query = Cache.query(
Cache.id.label("n1"),
Cache.method.label("t2"),
).limit(1)
cache = query.one()
assert query.dictone() == {
"n1": cache.n1,
"t2": cache.t2,
}
def test_dictfirst(self, rollback_registry):
registry = rollback_registry
query = registry.System.Cache.query()
cache = query.first()
assert query.dictfirst() == {
"id": cache.id,
"method": cache.method,
"registry_name": cache.registry_name,
}
def test_dictfirst_on_some_column(self, rollback_registry):
registry = rollback_registry
query = registry.System.Cache.query("id", "method")
cache = query.first()
assert query.dictfirst() == {
"id": cache.id,
"method": cache.method,
}
def test_dictfirst_on_some_column_with_label(self, rollback_registry):
registry = rollback_registry
Cache = registry.System.Cache
query = Cache.query(
Cache.id.label("n1"),
Cache.method.label("t2"),
)
cache = query.first()
assert query.dictfirst() == {
"n1": cache.n1,
"t2": cache.t2,
}
def test_dictall(self, rollback_registry):
registry = rollback_registry
query = registry.System.Cache.query().limit(2)
caches = query.all()
def to_dict(cache):
return {
"id": cache.id,
"method": cache.method,
"registry_name": cache.registry_name,
}
dictall = query.dictall()
for i in range(2):
assert to_dict(caches[i]) in dictall
def test_dictall_on_some_column(self, rollback_registry):
registry = rollback_registry
query = registry.System.Cache.query("id", "method").limit(2)
caches = query.all()
def to_dict(cache):
return {
"id": cache.id,
"method": cache.method,
}
dictall = query.dictall()
for i in range(2):
assert to_dict(caches[i]) in dictall
def test_dictall_on_some_column_with_label(self, rollback_registry):
registry = rollback_registry
Cache = registry.System.Cache
query = Cache.query(
Cache.id.label("n1"),
Cache.method.label("t2"),
).limit(2)
caches = query.all()
def to_dict(cache):
return {
"n1": cache.n1,
"t2": cache.t2,
}
dictall = query.dictall()
for i in range(2):
assert to_dict(caches[i]) in dictall
def test_get_with_dict_use_prefix(self, rollback_registry):
registry = rollback_registry
entry = registry.System.Blok.query().get({"name": "anyblok-core"})
assert entry is not None
def METHOD_NAME(self, rollback_registry):
registry = rollback_registry
entry = registry.System.Blok.query().get(name="anyblok-core")
assert entry is not None
def test_str(self, rollback_registry):
registry = rollback_registry
query = registry.System.Sequence.query()
assert str(query) == str(query.sql_statement)
def test_repr(self, rollback_registry):
registry = rollback_registry
query = registry.System.Sequence.query()
assert repr(query) == str(query.sql_statement) |
299,274 | as list | from __future__ import print_function
import importlib
import json
import sys
from env import Env
from utilities.proc import which
from utilities.lazy import lazy
from utilities.subsystems.docker import has_docker
DRIVER_CAP_FN = "driver_capabilities"
DRIVER_CAP_PREFIX = "drivers.resource."
class BaseCapabilities(object):
def __contains__(self, cap):
return cap in self.data["tags"]
def scan_generic(self):
tags = [
"node.x.cache.name",
"node.x.cache.ttl",
]
labels = {}
for tag, bp in (
("node.x.blkid", Env.syspaths.blkid),
("node.x.dmsetup", Env.syspaths.dmsetup),
("node.x.drbdadm", "drbdadm"),
("node.x.exportfs", "exportfs"),
("node.x.findfs", "findfs"),
("node.x.git", "git"),
("node.x.hpvmstart", "/opt/hpvm/bin/hpvmstart"),
("node.x.hpvmstatus", "/opt/hpvm/bin/hpvmstatus"),
("node.x.hpvmstop", "/opt/hpvm/bin/hpvmstop"),
("node.x.ifconfig", "ifconfig"),
("node.x.ip", "/sbin/ip"),
("node.x.losetup", Env.syspaths.losetup),
("node.x.lvs", "/sbin/lvs"),
("node.x.multipath", Env.syspaths.multipath),
("node.x.netstat", "netstat"),
("node.x.podman", "/usr/bin/podman"),
("node.x.powermt", "powermt"),
("node.x.scsi_id", ("scsi_id", "/lib/udev/scsi_id", "/usr/lib/udev/scsi_id")),
("node.x.share", "share"),
("node.x.srp", "srp"),
("node.x.srp_su", "srp_su"),
("node.x.stat", "stat"),
("node.x.udevadm", "udevadm"),
("node.x.vmware-cmd", "vmware-cmd"),
("node.x.vxdmpadm", "vxdmpadm"),
("node.x.zfs", "zfs"),
("node.x.zpool", "zpool"),
):
if not isinstance(bp, tuple):
bp = (bp,)
for bpc in bp:
p = which(bpc)
if not p:
continue
tags.append(tag)
labels[tag+".path"] = p
break
if has_docker(["docker"]):
tags.append("node.x.docker")
if has_docker(["docker.io"]):
tags.append("node.x.docker.io")
if has_docker(["dockerd"]):
tags.append("node.x.dockerd")
return {"tags": tags, "labels": labels}
def need_refresh(self):
return False
def scan_os(self):
return {"tags": [], "labels": {}}
def scan(self, node=None):
from core.objects.svcdict import SECTIONS
from utilities.drivers import iter_drivers
if node is None:
from core.node import Node
node = Node()
data = self.scan_generic()
osdata = self.scan_os()
data["tags"] += osdata["tags"]
data["labels"].update(osdata["labels"])
for mod in iter_drivers(SECTIONS):
if not hasattr(mod, DRIVER_CAP_FN):
if hasattr(mod, "DRIVER_GROUP") and hasattr(mod, "DRIVER_BASENAME"):
# consider the driver active by default
data["tags"] += ["%s%s.%s" % (DRIVER_CAP_PREFIX, mod.DRIVER_GROUP, mod.DRIVER_BASENAME)]
continue
try:
for cap in getattr(mod, DRIVER_CAP_FN)(node=node):
if isinstance(cap, tuple):
cap, val = cap
pcap = DRIVER_CAP_PREFIX + cap
data["labels"][pcap] = val
else:
pcap = DRIVER_CAP_PREFIX + cap
data["tags"].append(pcap)
except Exception as exc:
print(mod, exc, file=sys.stderr)
self.dump(data)
return data
@staticmethod
def METHOD_NAME(data):
l = [] + data["tags"]
for k, v in data["labels"].items():
l.append("%s=%s" % (k, v))
return sorted(l)
def dump(self, data):
data = self.METHOD_NAME(data)
with open(Env.paths.capabilities, "w") as f:
json.dump(data, f)
def load(self):
with open(Env.paths.capabilities, "r") as f:
l = json.load(f)
data = {"tags": [], "labels": {}}
for s in l:
try:
label, val = s.split("=", 1)
data["labels"][label] = val
except ValueError:
data["tags"].append(s)
return data
@lazy
def data(self):
if self.need_refresh():
return self.scan()
try:
return self.load()
except Exception as exc:
return self.scan()
def has(self, cap):
return cap in self.data["tags"]
def get(self, cap):
return self.data["labels"].get(cap)
try:
_package = __package__ or __spec__.name # pylint: disable=undefined-variable
_os = importlib.import_module("." + Env.module_sysname, package=_package)
capabilities = _os.Capabilities()
except (ImportError, AttributeError):
capabilities = BaseCapabilities()
|
299,275 | gmtime | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*-
# vi: set ft=python sts=4 ts=4 sw=4 noet :
# This file is part of Fail2Ban.
#
# Fail2Ban is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Fail2Ban is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Fail2Ban; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
__author__ = "Cyril Jaquier"
__copyright__ = "Copyright (c) 2004 Cyril Jaquier"
__license__ = "GPL"
import datetime
import re
import time
##
# MyTime class.
#
class MyTime:
"""A wrapper around time module primarily for testing purposes
This class is a wrapper around time.time() and time.gmtime(). When
performing unit test, it is very useful to get a fixed value from
these functions. Thus, time.time() and time.gmtime() should never
be called directly. This wrapper should be called instead. The API
are equivalent.
"""
myTime = None
alternateNowTime = None
alternateNow = None
@staticmethod
def setAlternateNow(t):
"""Set current time.
Use None in order to always get the real current time.
@param t the time to set or None
"""
MyTime.alternateNowTime = t
MyTime.alternateNow = \
datetime.datetime.fromtimestamp(t) if t is not None else None
@staticmethod
def setTime(t):
"""Set current time.
Use None in order to always get the real current time.
@param t the time to set or None
"""
MyTime.myTime = t
@staticmethod
def time():
"""Decorate time.time() for the purpose of testing mocking
@return time.time() if setTime was called with None
"""
if MyTime.myTime is None:
return time.time()
else:
return MyTime.myTime
@staticmethod
def METHOD_NAME():
"""Decorate time.gmtime() for the purpose of testing mocking
@return time.gmtime() if setTime was called with None
"""
if MyTime.myTime is None:
return time.METHOD_NAME()
else:
return time.METHOD_NAME(MyTime.myTime)
@staticmethod
def now():
"""Decorate datetime.now() for the purpose of testing mocking
@return datetime.now() if setTime was called with None
"""
if MyTime.myTime is None:
return datetime.datetime.now()
if MyTime.myTime == MyTime.alternateNowTime:
return MyTime.alternateNow
return datetime.datetime.fromtimestamp(MyTime.myTime)
@staticmethod
def localtime(x=None):
"""Decorate time.localtime() for the purpose of testing mocking
@return time.localtime() if setTime was called with None
"""
if MyTime.myTime is None or x is not None:
return time.localtime(x)
else:
return time.localtime(MyTime.myTime)
@staticmethod
def time2str(unixTime, format="%Y-%m-%d %H:%M:%S"):
"""Convert time to a string representing as date and time using given format.
Default format is ISO 8601, YYYY-MM-DD HH:MM:SS without microseconds.
@return ISO-capable string representation of given unixTime
"""
# consider end of 9999th year (in GMT+23 to avoid year overflow in other TZ)
dt = datetime.datetime.fromtimestamp(
unixTime).replace(microsecond=0
) if unixTime < 253402214400 else datetime.datetime(9999, 12, 31, 23, 59, 59)
return dt.strftime(format)
## precreate/precompile primitives used in str2seconds:
## preparing expression:
_str2sec_prep = re.compile(r"(?i)(?<=[a-z])(\d)")
## finally expression:
_str2sec_fini = re.compile(r"(\d)\s+(\d)")
## wrapper for each sub part:
_str2sec_subpart = r"(?i)(?<=[\d\s])(%s)\b"
## parts to be replaced - pair of (regexp x replacement):
_str2sec_parts = (
(re.compile(_str2sec_subpart % r"days?|da|dd?"), "*"+str(24*60*60)),
(re.compile(_str2sec_subpart % r"weeks?|wee?|ww?"), "*"+str(7*24*60*60)),
(re.compile(_str2sec_subpart % r"months?|mon?"), "*"+str((365*3+366)*24*60*60/4/12)),
(re.compile(_str2sec_subpart % r"years?|yea?|yy?"), "*"+str((365*3+366)*24*60*60/4)),
(re.compile(_str2sec_subpart % r"seconds?|sec?|ss?"), "*"+str(1)),
(re.compile(_str2sec_subpart % r"minutes?|min?|mm?"), "*"+str(60)),
(re.compile(_str2sec_subpart % r"hours?|hou?|hh?"), "*"+str(60*60)),
)
@staticmethod
def str2seconds(val):
"""Wraps string expression like "1h 2m 3s" into number contains seconds (3723).
The string expression will be evaluated as mathematical expression, spaces between each groups
will be wrapped to "+" operand (only if any operand does not specified between).
Because of case insensitivity and overwriting with minutes ("m" or "mm"), the short replacement for month
are "mo" or "mon".
Ex: 1hour+30min = 5400
0d 1h 30m = 5400
1year-6mo = 15778800
6 months = 15778800
warn: month is not 30 days, it is a year in seconds / 12, the leap years will be respected also:
>>>> float(str2seconds("1month")) / 60 / 60 / 24
30.4375
>>>> float(str2seconds("1year")) / 60 / 60 / 24
365.25
@returns number (calculated seconds from expression "val")
"""
if isinstance(val, (int, float, complex)):
return val
# replace together standing abbreviations, example '1d12h' -> '1d 12h':
val = MyTime._str2sec_prep.sub(r" \1", val)
# replace abbreviation with expression:
for rexp, rpl in MyTime._str2sec_parts:
val = rexp.sub(rpl, val)
val = MyTime._str2sec_fini.sub(r"\1+\2", val)
return eval(val)
class seconds2str():
"""Converts seconds to string on demand (if string representation needed).
Ex: seconds2str(86400*390) = 1y 3w 4d
seconds2str(86400*368) = 1y 3d
seconds2str(86400*365.5) = 1y
seconds2str(86400*2+3600*7+60*15) = 2d 7h 15m
seconds2str(86400*2+3599) = 2d 1h
seconds2str(3600-5) = 1h
seconds2str(3600-10) = 59m 50s
seconds2str(59) = 59s
"""
def __init__(self, sec):
self.sec = sec
def __str__(self):
# s = str(datetime.timedelta(seconds=int(self.sec)))
# return s if s[-3:] != ":00" else s[:-3]
s = self.sec; c = 3
# automatic accuracy: round by large values (and maximally 3 groups)
if s >= 31536000: # a year as 365*24*60*60 (don't need to consider leap year by this accuracy)
s = int(round(float(s)/86400)) # round by a day
r = str(s//365) + 'y '; s %= 365
if s >= 7:
r += str(s//7) + 'w '; s %= 7
if s:
r += str(s) + 'd '
return r[:-1]
if s >= 604800: # a week as 24*60*60*7
s = int(round(float(s)/3600)) # round by a hour
r = str(s//168) + 'w '; s %= 168
if s >= 24:
r += str(s//24) + 'd '; s %= 24
if s:
r += str(s) + 'h '
return r[:-1]
if s >= 86400: # a day as 24*60*60
s = int(round(float(s)/60)) # round by a minute
r = str(s//1440) + 'd '; s %= 1440
if s >= 60:
r += str(s//60) + 'h '; s %= 60
if s:
r += str(s) + 'm '
return r[:-1]
if s >= 3595: # a hour as 60*60 (- 5 seconds)
s = int(round(float(s)/10)) # round by 10 seconds
r = str(s//360) + 'h '; s %= 360
if s >= 6: # a minute
r += str(s//6) + 'm '; s %= 6
return r[:-1]
r = ''
if s >= 60: # a minute
r += str(s//60) + 'm '; s %= 60
if s: # remaining seconds
r += str(s) + 's '
elif not self.sec: # 0s
r = '0 '
return r[:-1]
def __repr__(self):
return self.__str__() |
299,276 | name | # (C) Datadog, Inc. 2022-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from __future__ import annotations
import os
import sys
from functools import lru_cache
from importlib import import_module
@lru_cache(maxsize=None)
def get_platform_name():
import platform
return normalize_platform_name(platform.system())
def normalize_platform_name(platform_name):
platform_name = platform_name.lower()
return 'macos' if platform_name == 'darwin' else platform_name
class Platform:
def __init__(self, display_func=print):
self.__display_func = display_func
# Lazily loaded constants
self.__default_shell = None
self.__format_file_uri = None
self.__join_command_args = None
self.__name = None
# Whether or not an interactive status is being displayed
self.displaying_status = False
self.__modules = LazilyLoadedModules()
@property
def modules(self):
"""
Accessor for lazily loading modules that either take multiple milliseconds to import
(like `shutil` and `subprocess`) or are not used on all platforms (like `shlex`).
"""
return self.__modules
def format_for_subprocess(self, command: str | list[str], *, shell: bool):
"""
Format the given command in a cross-platform manner for immediate consumption by subprocess utilities.
"""
if self.windows:
# Manually locate executables on Windows to avoid multiple cases in which `shell=True` is required:
#
# - If the `PATH` environment variable has been modified, see: https://bugs.python.org/issue8557
# - Executables that do not have the extension `.exe`, see:
# https://docs.microsoft.com/en-us/windows/win32/api/processthreadsapi/nf-processthreadsapi-createprocessw
if not shell and not isinstance(command, str):
executable = command[0]
new_command = [self.modules.shutil.which(executable) or executable]
new_command.extend(command[1:])
return new_command
else:
if not shell and isinstance(command, str):
return self.modules.shlex.split(command)
return command
def exit_with_code(self, code):
return sys.exit(code)
def _run_command_integrated(self, command: str | list[str], shell=False, **kwargs):
with self.capture_process(command, shell=shell, **kwargs) as process:
for line in self.stream_process_output(process):
self.__display_func(line, end='')
stdout, stderr = process.communicate()
return self.modules.subprocess.CompletedProcess(process.args, process.poll(), stdout, stderr)
def run_command(self, command: str | list[str], shell=False, **kwargs):
"""
Equivalent to the standard library's `subprocess.run`, with the command first being properly formatted.
"""
if self.displaying_status and not kwargs.get('capture_output'):
return self._run_command_integrated(command, shell=shell, **kwargs)
return self.modules.subprocess.run(self.format_for_subprocess(command, shell=shell), shell=shell, **kwargs)
def check_command(self, command: str | list[str], shell=False, **kwargs):
"""
Equivalent to `run_command`, but non-zero exit codes will gracefully end program execution.
"""
process = self.run_command(command, shell=shell, **kwargs)
if process.returncode:
self.exit_with_code(process.returncode)
return process
def check_command_output(self, command: str | list[str], shell=False, **kwargs) -> str:
"""
Equivalent to the output from the process returned by `capture_process`,
but non-zero exit codes will gracefully end program execution.
"""
with self.capture_process(command, shell=shell, **kwargs) as process:
stdout, _ = process.communicate()
if process.returncode:
self.__display_func(stdout.decode('utf-8'))
self.exit_with_code(process.returncode)
return stdout.decode('utf-8')
def capture_process(self, command: str | list[str], shell=False, **kwargs):
"""
Equivalent to the standard library's `subprocess.Popen`, with all output
captured by `stdout` and the command first being properly formatted.
"""
return self.modules.subprocess.Popen(
self.format_for_subprocess(command, shell=shell),
shell=shell,
stdout=self.modules.subprocess.PIPE,
stderr=self.modules.subprocess.STDOUT,
**kwargs,
)
@staticmethod
def stream_process_output(process):
# To avoid blocking never use a pipe's file descriptor iterator. See https://bugs.python.org/issue3907
for line in iter(process.stdout.readline, b''):
yield line.decode('utf-8')
@property
def windows(self):
"""
Indicates whether ddev is running on Windows.
"""
return self.METHOD_NAME == 'windows'
@property
def macos(self):
"""
Indicates whether ddev is running on macOS.
"""
return self.METHOD_NAME == 'macos'
@property
def linux(self):
"""
Indicates whether ddev is running on neither Windows nor macOS.
"""
return not (self.windows or self.macos)
def exit_with_command(self, command: list[str]):
"""
Run the given command and exit with its exit code. On non-Windows systems, this uses the standard library's
`os.execvp`.
"""
if self.windows:
process = self.run_command(command)
self.exit_with_code(process.returncode)
else:
return os.execvp(command[0], command)
@property
def METHOD_NAME(self):
"""
One of the following:
- `linux`
- `windows`
- `macos`
"""
if self.__name is None:
self.__name = get_platform_name()
return self.__name
class LazilyLoadedModules:
def __getattr__(self, METHOD_NAME):
module = import_module(METHOD_NAME)
setattr(self, METHOD_NAME, module)
return module |
299,277 | post | #
# Copyright Β© 2022 Josep Maria ViΓ±olas Auquer
#
# This file is part of IsardVDI.
#
# IsardVDI is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# IsardVDI is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with IsardVDI. If not, see <https://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: AGPL-3.0-or-later
import json
import logging
import os
import time
import traceback
from datetime import datetime, timedelta
import requests
from jose import jwt
def header_auth():
token = jwt.encode(
{
"exp": datetime.utcnow() + timedelta(seconds=20),
"kid": "isardvdi",
"data": {
"role_id": "admin",
"category_id": "*",
},
},
os.environ["API_ISARDVDI_SECRET"],
algorithm="HS256",
)
return {"Authorization": "Bearer " + token}
def is_ip(ip):
try:
parts = ip.split(".")
if len(parts) != 4:
return False
for x in parts:
if not x.isdigit():
return False
i = int(x)
if i < 0 or i > 255:
return False
except:
return False
return True
container_base_path = {
"isard-api": "/api/v3",
"isard-scheduler": "/scheduler",
}
class ApiRest:
def __init__(self, service="isard-api", base_url=None):
if base_url:
self.base_url = base_url
self.verify_cert = False if base_url.startswith("http://") else True
else:
if service == "isard-api":
actual_server = os.environ.get("API_DOMAIN")
if service == "isard-scheduler":
actual_server = "isard-scheduler"
if actual_server:
if actual_server == "localhost" or actual_server.startswith("isard-"):
self.base_url = (
"http://"
+ actual_server
+ ":5000"
+ container_base_path[service]
)
self.verify_cert = False
else:
self.base_url = (
"https://" + actual_server + container_base_path[service]
)
self.verify_cert = False if is_ip(actual_server) else True
else:
self.base_url = (
"http://" + service + ":5000" + container_base_path[service]
)
self.verify_cert = False
self.service = service
logging.debug(
"Api base url for service " + service + " set to " + self.base_url
)
def wait_for(self, max_retries=-1, timeout=1):
while max_retries:
try:
logging.info(
"Check connection to "
+ self.service
+ " container at "
+ self.base_url
)
self.get()
max_retries = 0
except:
logging.error(
"Unable to reach " + self.service + " container at " + self.base_url
)
time.sleep(timeout)
if max_retries >= 0:
max_retries -= 1
def get(self, url="", timeout=5):
resp = requests.get(
self.base_url + url,
headers=header_auth(),
verify=self.verify_cert,
timeout=timeout,
)
resp.raise_for_status()
return json.loads(resp.text)
def METHOD_NAME(self, url, data={}):
resp = requests.METHOD_NAME(
self.base_url + url,
json=data,
headers=header_auth(),
verify=self.verify_cert,
)
resp.raise_for_status()
return json.loads(resp.text)
def put(self, url, data={}):
resp = requests.put(
self.base_url + url,
json=data,
headers=header_auth(),
verify=self.verify_cert,
)
resp.raise_for_status()
return json.loads(resp.text)
def delete(self, url, data={}):
resp = requests.delete(
self.base_url + url,
json=data,
headers=header_auth(),
verify=self.verify_cert,
)
resp.raise_for_status()
return json.loads(resp.text) |
299,278 | parse args | #!/usr/bin/env python3
# Copyright (c) 2020 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import os.path
import subprocess
import elftools.elf.elffile
import argparse
ENTRY_SYM = "__start64"
def verbose(msg):
if args.verbose:
print(msg)
def build_elf(elf_file, include_dirs):
base_dir = os.path.dirname(os.path.abspath(__file__))
cfile = os.path.join(base_dir, "zefi.c")
ldscript = os.path.join(base_dir, "efi.ld")
assert os.path.isfile(cfile)
assert os.path.isfile(ldscript)
#
# Open the ELF file up and find our entry point
#
fp = open(elf_file, "rb")
ef = elftools.elf.elffile.ELFFile(fp)
symtab = ef.get_section_by_name(".symtab")
entry_addr = symtab.get_symbol_by_name(ENTRY_SYM)[0].entry.st_value
verbose("Entry point address (symbol: %s) 0x%x" % (ENTRY_SYM, entry_addr))
#
# Parse the ELF file and extract segment data
#
data_blob = b''
data_segs = []
zero_segs = []
for seg in ef.iter_segments():
h = seg.header
if h.p_type != "PT_LOAD":
continue
assert h.p_memsz >= h.p_filesz
assert len(seg.data()) == h.p_filesz
if h.p_filesz > 0:
sd = seg.data()
verbose("%d bytes of data at 0x%x, data offset %d"
% (len(sd), h.p_vaddr, len(data_blob)))
data_segs.append((h.p_vaddr, len(sd), len(data_blob)))
data_blob = data_blob + sd
if h.p_memsz > h.p_filesz:
bytesz = h.p_memsz - h.p_filesz
addr = h.p_vaddr + h.p_filesz
verbose("%d bytes of zero-fill at 0x%x" % (bytesz, addr))
zero_segs.append((addr, bytesz))
verbose(f"{len(data_blob)} bytes of data to include in image")
#
# Emit a C header containing the metadata
#
cf = open("zefi-segments.h", "w")
cf.write("/* GENERATED CODE. DO NOT EDIT. */\n\n")
cf.write("/* Sizes and offsets specified in 4-byte units.\n")
cf.write(" * All addresses 4-byte aligned.\n")
cf.write(" */\n")
cf.write("struct data_seg { uint64_t addr; uint32_t sz; uint32_t off; };\n\n")
cf.write("static struct data_seg zefi_dsegs[] = {\n")
for s in data_segs:
cf.write(" { 0x%x, %d, %d },\n"
% (s[0], s[1], s[2]))
cf.write("};\n\n")
cf.write("struct zero_seg { uint64_t addr; uint32_t sz; };\n\n")
cf.write("static struct zero_seg zefi_zsegs[] = {\n")
for s in zero_segs:
cf.write(" { 0x%x, %d },\n"
% (s[0], s[1]))
cf.write("};\n\n")
cf.write("static uintptr_t zefi_entry = 0x%xUL;\n" % (entry_addr))
cf.close()
verbose("Metadata header generated.")
#
# Build
#
# First stage ELF binary. Flag notes:
# + Stack protector is default on some distros and needs library support
# + We need pic to enforce that the linker adds no relocations
# + UEFI can take interrupts on our stack, so no red zone
# + UEFI API assumes 16-bit wchar_t
includes = []
for include_dir in include_dirs:
includes.extend(["-I", include_dir])
cmd = ([args.compiler, "-shared", "-Wall", "-Werror", "-I."] + includes +
["-fno-stack-protector", "-fpic", "-mno-red-zone", "-fshort-wchar",
"-Wl,-nostdlib", "-T", ldscript, "-o", "zefi.elf", cfile])
verbose(" ".join(cmd))
subprocess.run(cmd, check = True)
# Extract the .data segment and append our extra blob
cmd = [args.objcopy, "-O", "binary", "-j", ".data", "zefi.elf", "data.dat"]
verbose(" ".join(cmd))
subprocess.run(cmd, check = True)
assert (os.stat("data.dat").st_size % 8) == 0
df = open("data.dat", "ab")
df.write(data_blob)
df.close()
# FIXME: this generates warnings about our unused trash section having to be moved to make room. Set its address far away...
subprocess.run([args.objcopy, "--update-section", ".data=data.dat",
"zefi.elf"], check = True)
# Convert it to a PE-COFF DLL.
cmd = [args.objcopy, "--target=efi-app-x86_64",
"-j", ".text", "-j", ".reloc", "-j", ".data",
"zefi.elf", "zephyr.efi"]
verbose(" ".join(cmd))
subprocess.run(cmd, check = True)
verbose("Build complete; zephyr.efi wrapper binary is ready")
def METHOD_NAME():
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter, allow_abbrev=False)
parser.add_argument("-c", "--compiler", required=True, help="Compiler to be used")
parser.add_argument("-o", "--objcopy", required=True, help="objcopy to be used")
parser.add_argument("-f", "--elf-file", required=True, help="Input file")
parser.add_argument("-v", "--verbose", action="store_true", help="Verbose output")
parser.add_argument("-i", "--includes", required=True, nargs="+",
help="Zephyr base include directories")
return parser.METHOD_NAME()
if __name__ == "__main__":
args = METHOD_NAME()
verbose(f"Working on {args.elf_file} with {args.includes}...")
build_elf(args.elf_file, args.includes) |
299,279 | test component ut esp eth | import os
import re
import socket
from collections.abc import Callable
from threading import Thread
import tiny_test_fw
import ttfw_idf
from scapy.all import Ether, raw
from ttfw_idf import TestFormat
try:
import typing # noqa: F401 # pylint: disable=unused-import
except ImportError:
pass
def configure_eth_if(func): # type: (typing.Any) -> typing.Any
def inner(*args, **kwargs): # type: (typing.Any, typing.Any) -> typing.Any
# try to determine which interface to use
netifs = os.listdir('/sys/class/net/')
target_if = ''
print('detected interfaces: ' + str(netifs))
for netif in netifs:
if netif.find('eth') == 0 or netif.find('enp') == 0 or netif.find('eno') == 0:
target_if = netif
break
if target_if == '':
raise Exception('no network interface found')
print('Use ' + target_if + ' for testing')
so = socket.socket(socket.AF_PACKET, socket.SOCK_RAW, 0x2222)
so.bind((target_if, 0))
func(so, *args, **kwargs)
so.close()
return inner
@configure_eth_if
def check_eth_recv_packet(so): # type: (socket.socket) -> None
so.settimeout(10)
try:
eth_frame = Ether(so.recv(1024))
for i in range(0, 1010):
if eth_frame.load[i] != i & 0xff:
raise Exception('Packet content mismatch')
except Exception as e:
raise e
@configure_eth_if
def send_eth_packet(so, mac): # type: (socket.socket, str) -> None
so.settimeout(10)
payload = bytearray(1010)
for i, _ in enumerate(payload):
payload[i] = i & 0xff
eth_frame = Ether(dst=mac, src=so.getsockname()[4], type=0x2222) / raw(payload)
try:
so.send(raw(eth_frame))
except Exception as e:
raise e
@configure_eth_if
def recv_resp_poke(so, i): # type: (socket.socket, int) -> None
so.settimeout(10)
try:
eth_frame = Ether(so.recv(60))
if eth_frame.type == 0x2222 and eth_frame.load[0] == 0xfa:
if eth_frame.load[1] != i:
raise Exception('Missed Poke Packet')
eth_frame.dst = eth_frame.src
eth_frame.src = so.getsockname()[4]
eth_frame.load = bytes.fromhex('fb') # POKE_RESP code
so.send(raw(eth_frame))
except Exception as e:
raise e
@configure_eth_if
def traffic_gen(so, mac, enabled): # type: (socket.socket, str, Callable) -> None
payload = bytes.fromhex('ff') # DUMMY_TRAFFIC code
payload += bytes(1485)
eth_frame = Ether(dst=mac, src=so.getsockname()[4], type=0x2222) / raw(payload)
try:
while enabled() == 1:
so.send(raw(eth_frame))
except Exception as e:
raise e
def METHOD_NAME(env, appname): # type: (tiny_test_fw.Env, str) -> None
dut = env.get_dut('esp_eth', 'components/esp_eth/test_apps', app_config_name=appname)
dut.start_app()
stdout = dut.expect('Press ENTER to see the list of tests', full_stdout=True)
dut.write('"start_and_stop"')
stdout += dut.expect("Enter next test, or 'enter' to see menu", full_stdout=True)
ttfw_idf.ComponentUTResult.parse_result(stdout, test_format=TestFormat.UNITY_BASIC)
dut.write('"get_set_mac"')
stdout = dut.expect("Enter next test, or 'enter' to see menu", full_stdout=True)
ttfw_idf.ComponentUTResult.parse_result(stdout, test_format=TestFormat.UNITY_BASIC)
dut.write('"ethernet_broadcast_transmit"')
check_eth_recv_packet()
stdout = dut.expect("Enter next test, or 'enter' to see menu", full_stdout=True)
ttfw_idf.ComponentUTResult.parse_result(stdout, test_format=TestFormat.UNITY_BASIC)
dut.write('"recv_pkt"')
expect_result = dut.expect(re.compile(
r'([\s\S]*)'
r'DUT MAC: ([0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2})'),
timeout=10
)
stdout = expect_result[0]
send_eth_packet('ff:ff:ff:ff:ff:ff') # broadcast frame
send_eth_packet('01:00:00:00:00:00') # multicast frame
send_eth_packet(expect_result[1]) # unicast frame
stdout += dut.expect("Enter next test, or 'enter' to see menu", full_stdout=True)
ttfw_idf.ComponentUTResult.parse_result(stdout, test_format=TestFormat.UNITY_BASIC)
dut.write('"start_stop_stress_test"')
expect_result = dut.expect(re.compile(
r'([\s\S]*)'
r'DUT MAC: ([0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2})'),
timeout=10
)
# Start/stop under heavy Tx traffic
for tx_i in range(10):
recv_resp_poke(tx_i)
# Start/stop under heavy Rx traffic
traffic_en = 1
thread = Thread(target=traffic_gen, args=(expect_result[1], lambda:traffic_en, ))
thread.start()
try:
for rx_i in range(10):
recv_resp_poke(rx_i)
finally:
traffic_en = 0
thread.join()
stdout = dut.expect("Enter next test, or 'enter' to see menu", full_stdout=True)
ttfw_idf.ComponentUTResult.parse_result(stdout, test_format=TestFormat.UNITY_BASIC)
@ttfw_idf.idf_component_unit_test(env_tag='COMPONENT_UT_IP101', target=['esp32'])
def test_component_ut_esp_eth_ip101(env, _): # type: (tiny_test_fw.Env, typing.Any) -> None
METHOD_NAME(env, 'ip101')
@ttfw_idf.idf_component_unit_test(env_tag='COMPONENT_UT_LAN8720', target=['esp32'])
def test_component_ut_esp_eth_lan8720(env, _): # type: (tiny_test_fw.Env, typing.Any) -> None
METHOD_NAME(env, 'lan8720')
if __name__ == '__main__':
test_component_ut_esp_eth_ip101()
test_component_ut_esp_eth_lan8720() |
299,280 | reset logs | from logging import getLogger
from django.utils.module_loading import import_string
from django.utils.timezone import now
from axes.conf import settings
from axes.handlers.base import AxesBaseHandler, AbstractAxesHandler, AxesHandler
from axes.helpers import (
get_client_ip_address,
get_client_user_agent,
get_client_path_info,
get_client_http_accept,
toggleable,
)
log = getLogger(__name__)
class AxesProxyHandler(AbstractAxesHandler, AxesBaseHandler):
"""
Proxy interface for configurable Axes signal handler class.
If you wish to implement a custom version of this handler,
you can override the settings.AXES_HANDLER configuration string
with a class that implements a compatible interface and methods.
Defaults to using axes.handlers.proxy.AxesProxyHandler if not overridden.
Refer to axes.handlers.proxy.AxesProxyHandler for default implementation.
"""
implementation = None # type: AxesHandler
@classmethod
def get_implementation(cls, force: bool = False) -> AxesHandler:
"""
Fetch and initialize configured handler implementation and memoize it to avoid reinitialization.
This method is re-entrant and can be called multiple times from e.g. Django application loader.
"""
if force or not cls.implementation:
cls.implementation = import_string(settings.AXES_HANDLER)()
return cls.implementation
@classmethod
def reset_attempts(
cls,
*,
ip_address: str = None,
username: str = None,
ip_or_username: bool = False,
) -> int:
return cls.get_implementation().reset_attempts(
ip_address=ip_address, username=username, ip_or_username=ip_or_username
)
@classmethod
def METHOD_NAME(cls, *, age_days: int = None) -> int:
return cls.get_implementation().METHOD_NAME(age_days=age_days)
@staticmethod
def update_request(request):
"""
Update request attributes before passing them into the selected handler class.
"""
if request is None:
log.error(
"AXES: AxesProxyHandler.update_request can not set request attributes to a None request"
)
return
if not hasattr(request, "axes_updated"):
request.axes_locked_out = False
request.axes_attempt_time = now()
request.axes_ip_address = get_client_ip_address(request)
request.axes_user_agent = get_client_user_agent(request)
request.axes_path_info = get_client_path_info(request)
request.axes_http_accept = get_client_http_accept(request)
request.axes_updated = True
@classmethod
def is_locked(cls, request, credentials: dict = None) -> bool:
cls.update_request(request)
return cls.get_implementation().is_locked(request, credentials)
@classmethod
def is_allowed(cls, request, credentials: dict = None) -> bool:
cls.update_request(request)
return cls.get_implementation().is_allowed(request, credentials)
@classmethod
def get_failures(cls, request, credentials: dict = None) -> int:
cls.update_request(request)
return cls.get_implementation().get_failures(request, credentials)
@classmethod
@toggleable
def user_login_failed(cls, sender, credentials: dict, request=None, **kwargs):
cls.update_request(request)
return cls.get_implementation().user_login_failed(
sender, credentials, request, **kwargs
)
@classmethod
@toggleable
def user_logged_in(cls, sender, request, user, **kwargs):
cls.update_request(request)
return cls.get_implementation().user_logged_in(sender, request, user, **kwargs)
@classmethod
@toggleable
def user_logged_out(cls, sender, request, user, **kwargs):
cls.update_request(request)
return cls.get_implementation().user_logged_out(sender, request, user, **kwargs)
@classmethod
@toggleable
def post_save_access_attempt(cls, instance, **kwargs):
return cls.get_implementation().post_save_access_attempt(instance, **kwargs)
@classmethod
@toggleable
def post_delete_access_attempt(cls, instance, **kwargs):
return cls.get_implementation().post_delete_access_attempt(instance, **kwargs) |
299,281 | get all neigh states | #!/usr/bin/env python3
""""
Description: bgpmon.py -- populating bgp related information in stateDB.
script is started by supervisord in bgp docker when the docker is started.
Initial creation of this daemon is to assist SNMP agent in obtaining the
BGP related information for its MIB support. The MIB that this daemon is
assisting is for the CiscoBgp4MIB (Neighbor state only). Also for chassis use-case
it identify if the given BGP neighbors as i-BGP vs e-BGP. If there are other
BGP related items that needs to be updated in a periodic manner in the
future, then more can be added into this process.
The script check if there are any bgp activities by monitoring the bgp
frr.log file timestamp. If activity is detected, then it will request bgp
neighbor state via vtysh cli interface. This bgp activity monitoring is
done periodically (every 15 second). When triggered, it looks specifically
for the neighbor state in the json output of show ip bgp neighbors json
and update the state DB for each neighbor accordingly.
In order to not disturb and hold on to the State DB access too long and
removal of the stale neighbors (neighbors that was there previously on
previous get request but no longer there in the current get request), a
"previous" neighbor dictionary will be kept and used to determine if there
is a need to perform update or the peer is stale to be removed from the
state DB
"""
import json
import os
import sys
import syslog
from swsscommon import swsscommon
import time
from sonic_py_common.general import getstatusoutput_noshell
PIPE_BATCH_MAX_COUNT = 50
class BgpStateGet:
def __init__(self):
# set peer_l stores the Neighbor peer Ip address
# dic peer_state stores the Neighbor peer state entries
# set new_peer_l stores the new snapshot of Neighbor peer ip address
# dic new_peer_state stores the new snapshot of Neighbor peer states
self.peer_l = set()
self.peer_state = {}
self.new_peer_l = set()
self.new_peer_state = {}
self.cached_timestamp = 0
self.db = swsscommon.SonicV2Connector()
self.db.connect(self.db.STATE_DB, False)
self.pipe = swsscommon.RedisPipeline(self.db.get_redis_client(self.db.STATE_DB))
self.db.delete_all_by_pattern(self.db.STATE_DB, "NEIGH_STATE_TABLE|*" )
# A quick way to check if there are anything happening within BGP is to
# check its log file has any activities. This is by checking its modified
# timestamp against the cached timestamp that we keep and if there is a
# difference, there is activity detected. In case the log file got wiped
# out, it will default back to constant pulling every 15 seconds
def bgp_activity_detected(self):
try:
timestamp = os.stat("/var/log/frr/frr.log").st_mtime
if timestamp != self.cached_timestamp:
self.cached_timestamp = timestamp
return True
else:
return False
except (IOError, OSError):
return True
def update_new_peer_states(self, peer_dict):
peer_l = peer_dict["peers"].keys()
self.new_peer_l.update(peer_l)
for peer in peer_l:
self.new_peer_state[peer] = (peer_dict["peers"][peer]["state"],
peer_dict["peers"][peer]["remoteAs"],
peer_dict["peers"][peer]["localAs"])
# Get a new snapshot of BGP neighbors and store them in the "new" location
def METHOD_NAME(self):
cmd = ["vtysh", "-c", 'show bgp summary json']
rc, output = getstatusoutput_noshell(cmd)
if rc:
syslog.syslog(syslog.LOG_ERR, "*ERROR* Failed with rc:{} when execute: {}".format(rc, cmd))
return
peer_info = json.loads(output)
# cmd ran successfully, safe to Clean the "new" set/dict for new snapshot
self.new_peer_l.clear()
self.new_peer_state.clear()
for key, value in peer_info.items():
if key == "ipv4Unicast" or key == "ipv6Unicast":
self.update_new_peer_states(value)
# This method will take the caller's dictionary which contains the peer state operation
# That need to be updated in StateDB using Redis pipeline.
# The data{} will be cleared at the end of this method before returning to caller.
def flush_pipe(self, data):
"""Dump each entry in data{} into State DB via redis pipeline.
Args:
data: Neighbor state in dictionary format
{
'NEIGH_STATE_TABLE|ip_address_a': {'state':state},
'NEIGH_STATE_TABLE|ip_address_b': {'state':state},
'NEIGH_STATE_TABLE|ip_address_c': {'state':state},
'NEIGH_STATE_TABLE|ip_address_x': None,
'NEIGH_STATE_TABLE|ip_address_z': None
...
}
"""
for key, value in data.items():
if value is None:
# delete case
command = swsscommon.RedisCommand()
command.formatDEL(key)
self.pipe.push(command)
else:
# Add or Modify case
command = swsscommon.RedisCommand()
command.formatHSET(key, value)
self.pipe.push(command)
self.pipe.flush()
data.clear()
def update_neigh_states(self):
data = {}
for peer in self.new_peer_l:
key = "NEIGH_STATE_TABLE|%s" % peer
if peer in self.peer_l:
# only update the entry if state changed
if self.peer_state[peer] != self.new_peer_state[peer][0]:
# state changed. Update state DB for this entry
state = self.new_peer_state[peer][0]
peerType = "i-BGP" if self.new_peer_state[peer][1] == self.new_peer_state[peer][2] else "e-BGP"
data[key] = {'state':state, 'peerType':peerType}
self.peer_state[peer] = state
# remove this neighbor from old set since it is accounted for
self.peer_l.remove(peer)
else:
# New neighbor found case. Add to dictionary and state DB
state = self.new_peer_state[peer][0]
peerType = "i-BGP" if self.new_peer_state[peer][1] == self.new_peer_state[peer][2] else "e-BGP"
data[key] = {'state':state, 'peerType':peerType}
self.peer_state[peer] = state
if len(data) > PIPE_BATCH_MAX_COUNT:
self.flush_pipe(data)
# Check for stale state entries to be cleaned up
for peer in self.peer_l:
# remove this from the stateDB and the current neighbor state entry
del_key = "NEIGH_STATE_TABLE|%s" % peer
data[del_key] = None
if peer in self.peer_state:
del self.peer_state[peer]
if len(data) > PIPE_BATCH_MAX_COUNT:
self.flush_pipe(data)
# If anything in the pipeline not yet flushed, flush them now
if len(data) > 0:
self.flush_pipe(data)
# Save the new set
self.peer_l = self.new_peer_l.copy()
def main():
syslog.syslog(syslog.LOG_INFO, "bgpmon service started")
bgp_state_get = None
try:
bgp_state_get = BgpStateGet()
except Exception as e:
syslog.syslog(syslog.LOG_ERR, "{}: error exit 1, reason {}".format("THIS_MODULE", str(e)))
sys.exit(1)
# periodically obtain the new neighbor information and update if necessary
while True:
time.sleep(15)
if bgp_state_get.bgp_activity_detected():
bgp_state_get.METHOD_NAME()
bgp_state_get.update_neigh_states()
if __name__ == '__main__':
main() |
299,282 | get next | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Iterable, Optional, TypeVar
import urllib.parse
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from ..._serialization import Serializer
from .._vendor import _convert_request
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_request(**kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2022-07-02-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/providers/Microsoft.ContainerService/operations")
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
class Operations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.containerservice.v2022_07_02_preview.ContainerServiceClient`'s
:attr:`operations` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
self._api_version = input_args.pop(0) if input_args else kwargs.pop("api_version")
@distributed_trace
def list(self, **kwargs: Any) -> Iterable["_models.OperationValue"]:
"""Gets a list of operations.
Gets a list of operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either OperationValue or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.containerservice.v2022_07_02_preview.models.OperationValue]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop(
"api_version", _params.pop("api-version", self._api_version or "2022-07-02-preview")
)
cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("OperationListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return None, iter(list_of_elem)
def METHOD_NAME(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(METHOD_NAME, extract_data)
list.metadata = {"url": "/providers/Microsoft.ContainerService/operations"} |
299,283 | add options | import logging
import os
from optparse import Values
from typing import List
from pipenv.patched.pip._internal.cli import cmdoptions
from pipenv.patched.pip._internal.cli.cmdoptions import make_target_python
from pipenv.patched.pip._internal.cli.req_command import RequirementCommand, with_cleanup
from pipenv.patched.pip._internal.cli.status_codes import SUCCESS
from pipenv.patched.pip._internal.operations.build.build_tracker import get_build_tracker
from pipenv.patched.pip._internal.req.req_install import check_legacy_setup_py_options
from pipenv.patched.pip._internal.utils.misc import ensure_dir, normalize_path, write_output
from pipenv.patched.pip._internal.utils.temp_dir import TempDirectory
logger = logging.getLogger(__name__)
class DownloadCommand(RequirementCommand):
"""
Download packages from:
- PyPI (and other indexes) using requirement specifiers.
- VCS project urls.
- Local project directories.
- Local or remote source archives.
pip also supports downloading from "requirements files", which provide
an easy way to specify a whole environment to be downloaded.
"""
usage = """
%prog [options] <requirement specifier> [package-index-options] ...
%prog [options] -r <requirements file> [package-index-options] ...
%prog [options] <vcs project url> ...
%prog [options] <local project path> ...
%prog [options] <archive url/path> ..."""
def METHOD_NAME(self) -> None:
self.cmd_opts.add_option(cmdoptions.constraints())
self.cmd_opts.add_option(cmdoptions.requirements())
self.cmd_opts.add_option(cmdoptions.no_deps())
self.cmd_opts.add_option(cmdoptions.global_options())
self.cmd_opts.add_option(cmdoptions.no_binary())
self.cmd_opts.add_option(cmdoptions.only_binary())
self.cmd_opts.add_option(cmdoptions.prefer_binary())
self.cmd_opts.add_option(cmdoptions.src())
self.cmd_opts.add_option(cmdoptions.pre())
self.cmd_opts.add_option(cmdoptions.require_hashes())
self.cmd_opts.add_option(cmdoptions.progress_bar())
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
self.cmd_opts.add_option(cmdoptions.use_pep517())
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
self.cmd_opts.add_option(cmdoptions.check_build_deps())
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
self.cmd_opts.add_option(
"-d",
"--dest",
"--destination-dir",
"--destination-directory",
dest="download_dir",
metavar="dir",
default=os.curdir,
help="Download packages into <dir>.",
)
cmdoptions.add_target_python_options(self.cmd_opts)
index_opts = cmdoptions.make_option_group(
cmdoptions.index_group,
self.parser,
)
self.parser.insert_option_group(0, index_opts)
self.parser.insert_option_group(0, self.cmd_opts)
@with_cleanup
def run(self, options: Values, args: List[str]) -> int:
options.ignore_installed = True
# editable doesn't really make sense for `pip download`, but the bowels
# of the RequirementSet code require that property.
options.editables = []
cmdoptions.check_dist_restriction(options)
options.download_dir = normalize_path(options.download_dir)
ensure_dir(options.download_dir)
session = self.get_default_session(options)
target_python = make_target_python(options)
finder = self._build_package_finder(
options=options,
session=session,
target_python=target_python,
ignore_requires_python=options.ignore_requires_python,
)
build_tracker = self.enter_context(get_build_tracker())
directory = TempDirectory(
delete=not options.no_clean,
kind="download",
globally_managed=True,
)
reqs = self.get_requirements(args, options, finder, session)
check_legacy_setup_py_options(options, reqs)
preparer = self.make_requirement_preparer(
temp_build_dir=directory,
options=options,
build_tracker=build_tracker,
session=session,
finder=finder,
download_dir=options.download_dir,
use_user_site=False,
verbosity=self.verbosity,
)
resolver = self.make_resolver(
preparer=preparer,
finder=finder,
options=options,
ignore_requires_python=options.ignore_requires_python,
use_pep517=options.use_pep517,
py_version_info=options.python_version,
)
self.trace_basic_info(finder)
requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
downloaded: List[str] = []
for req in requirement_set.requirements.values():
if req.satisfied_by is None:
assert req.name is not None
preparer.save_linked_requirement(req)
downloaded.append(req.name)
preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
requirement_set.warn_legacy_versions_and_specifiers()
if downloaded:
write_output("Successfully downloaded %s", " ".join(downloaded))
return SUCCESS |
299,284 | get passages | # coding=utf-8
# Copyright (c) 2001-2022, Hove and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Hove (www.hove.com).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# channel `#navitia` on riot https://riot.im/app/#/room/#navitia:matrix.org
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from __future__ import absolute_import, print_function, division
from jormungandr.realtime_schedule.realtime_proxy import RealtimeProxy, RealtimeProxyError
import logging
import pybreaker
import pytz
import requests as requests
from jormungandr import cache, app
from jormungandr.schedule import RealTimePassage
from jormungandr.utils import PY3
from datetime import datetime
import six
class Cleverage(RealtimeProxy):
"""
class managing calls to cleverage external service providing real-time next passages
curl example to check/test that external service is working:
curl -X GET -H 'X-Keolis-Api-Version: {version}, X-Keolis-Api-Key: {key}' '{server}/{stop_code}'
On the response, Navitia matches route-point's {line_code} with
'code' in each element at the root of the response.
{line_code} and {stop_code} are provided using the same code key, named after
the 'destination_id_tag' if provided on connector's init, or the 'id' otherwise.
In practice it will look like:
curl -X GET -H 'X-Keolis-Api-Version: 1.0, X-Keolis-Api-Key: BLA-68764125-BOB' 'http://api.bobito.fr/api/schedule/3763'
"""
def __init__(
self,
id,
service_url,
service_args,
timezone,
object_id_tag=None,
destination_id_tag=None,
instance=None,
timeout=10,
**kwargs
):
self.service_url = service_url if (service_url[-1] == u'/') else (service_url + '/')
self.service_args = service_args
self.timeout = timeout # timeout in seconds
self.rt_system_id = id
self.object_id_tag = object_id_tag if object_id_tag else id
self.destination_id_tag = destination_id_tag
self.instance = instance
fail_max = kwargs.get(
'circuit_breaker_max_fail', app.config.get(str('CIRCUIT_BREAKER_MAX_CLEVERAGE_FAIL'), 5)
)
reset_timeout = kwargs.get(
'circuit_breaker_reset_timeout', app.config.get(str('CIRCUIT_BREAKER_CLEVERAGE_TIMEOUT_S'), 60)
)
self.breaker = pybreaker.CircuitBreaker(fail_max=fail_max, reset_timeout=reset_timeout)
self.timezone = pytz.timezone(timezone)
def __repr__(self):
"""
used as the cache key. we use the rt_system_id to share the cache between servers in production
"""
if PY3:
return self.rt_system_id
try:
return self.rt_system_id.encode('utf-8', 'backslashreplace')
except:
return self.rt_system_id
@cache.memoize(app.config['CACHE_CONFIGURATION'].get('TIMEOUT_CLEVERAGE', 30))
def _call_cleverage(self, url):
"""
http call to cleverage
"""
logging.getLogger(__name__).debug('Cleverage RT service , call url : {}'.format(url))
try:
return self.breaker.call(requests.get, url, timeout=self.timeout, headers=self.service_args)
except pybreaker.CircuitBreakerError as e:
logging.getLogger(__name__).error(
'Cleverage RT service dead, using base schedule (error: {}'.format(e)
)
raise RealtimeProxyError('circuit breaker open')
except requests.Timeout as t:
logging.getLogger(__name__).error(
'Cleverage RT service timeout, using base schedule (error: {}'.format(t)
)
raise RealtimeProxyError('timeout')
except Exception as e:
logging.getLogger(__name__).exception('Cleverage RT error, using base schedule')
raise RealtimeProxyError(str(e))
def _make_url(self, route_point):
"""
The url returns something like a departure on a stop point
"""
stop_id = route_point.fetch_stop_id(self.object_id_tag)
if not stop_id:
# one a the id is missing, we'll not find any realtime
logging.getLogger(__name__).debug(
'missing realtime id for {obj}: stop code={s}'.format(obj=route_point, s=stop_id)
)
self.record_internal_failure('missing id')
return None
url = "{base_url}{stop_id}".format(base_url=self.service_url, stop_id=stop_id)
return url
def _get_dt(self, datetime_str):
dt = datetime.strptime(datetime_str, "%Y-%m-%d %H:%M:%S")
utc_dt = self.timezone.normalize(self.timezone.localize(dt)).astimezone(pytz.utc)
return utc_dt
def METHOD_NAME(self, route_point, cleverage_resp):
logging.getLogger(__name__).debug('cleverage response: {}'.format(cleverage_resp))
line_code = route_point.fetch_line_id(self.object_id_tag)
schedules = next(
(line['schedules'] for line in cleverage_resp if line['code'].lower() == line_code.lower()), None
)
if schedules:
next_passages = []
for next_expected_st in schedules:
# for the moment we handle only the NextStop and the direction
dt = self._get_dt(next_expected_st['departure'])
direction = next_expected_st.get('destination_name')
is_real_time = next_expected_st.get('realtime') == '1'
next_passage = RealTimePassage(dt, direction, is_real_time)
next_passages.append(next_passage)
return next_passages
else:
return None
def _get_next_passage_for_route_point(
self, route_point, count=None, from_dt=None, current_dt=None, duration=None
):
url = self._make_url(route_point)
if not url:
return None
r = self._call_cleverage(url)
if not r:
return None
if r.status_code != 200:
# TODO better error handling, the response might be in 200 but in error
logging.getLogger(__name__).error(
'Cleverage RT service unavailable, impossible to query : {}'.format(r.url)
)
raise RealtimeProxyError('non 200 response')
return self.METHOD_NAME(route_point, r.json())
def status(self):
return {
'id': six.text_type(self.rt_system_id),
'timeout': self.timeout,
'circuit_breaker': {
'current_state': self.breaker.current_state,
'fail_counter': self.breaker.fail_counter,
'reset_timeout': self.breaker.reset_timeout,
},
}
def __eq__(self, other):
return self.rt_system_id == other.rt_system_id |
299,285 | str field list | import re, inspect, textwrap, pydoc
from docscrape import NumpyDocString, FunctionDoc, ClassDoc
from six import iteritems
class SphinxDocString(NumpyDocString):
# string conversion routines
def _str_header(self, name, symbol='`'):
return ['.. rubric:: ' + name, '']
def METHOD_NAME(self, name):
return [':' + name + ':']
def _str_indent(self, doc, indent=4):
out = []
for line in doc:
out += [' '*indent + line]
return out
def _str_signature(self):
return ['']
if self['Signature']:
return ['``%s``' % self['Signature']] + ['']
else:
return ['']
def _str_summary(self):
return self['Summary'] + ['']
def _str_extended_summary(self):
return self['Extended Summary'] + ['']
def _str_param_list(self, name):
out = []
if self[name]:
out += self.METHOD_NAME(name)
out += ['']
for param,param_type,desc in self[name]:
out += self._str_indent(['**%s** : %s' % (param.strip(),
param_type)])
out += ['']
out += self._str_indent(desc,8)
out += ['']
return out
def _str_section(self, name):
out = []
if self[name]:
out += self._str_header(name)
out += ['']
content = textwrap.dedent("\n".join(self[name])).split("\n")
out += content
out += ['']
return out
def _str_see_also(self, func_role):
out = []
if self['See Also']:
see_also = super(SphinxDocString, self)._str_see_also(func_role)
out = ['.. seealso::', '']
out += self._str_indent(see_also[2:])
return out
def _str_warnings(self):
out = []
if self['Warnings']:
out = ['.. warning::', '']
out += self._str_indent(self['Warnings'])
return out
def _str_index(self):
idx = self['index']
out = []
if len(idx) == 0:
return out
out += ['.. index:: %s' % idx.get('default','')]
for section, references in iteritems(idx):
if section == 'default':
continue
elif section == 'refguide':
out += [' single: %s' % (', '.join(references))]
else:
out += [' %s: %s' % (section, ','.join(references))]
return out
def _str_references(self):
out = []
if self['References']:
out += self._str_header('References')
if isinstance(self['References'], str):
self['References'] = [self['References']]
out.extend(self['References'])
out += ['']
return out
def __str__(self, indent=0, func_role="obj"):
out = []
out += self._str_signature()
out += self._str_index() + ['']
out += self._str_summary()
out += self._str_extended_summary()
for param_list in ('Parameters', 'Attributes', 'Methods',
'Returns','Raises'):
out += self._str_param_list(param_list)
out += self._str_warnings()
out += self._str_see_also(func_role)
out += self._str_section('Notes')
out += self._str_references()
out += self._str_section('Examples')
out = self._str_indent(out,indent)
return '\n'.join(out)
class SphinxFunctionDoc(SphinxDocString, FunctionDoc):
pass
class SphinxClassDoc(SphinxDocString, ClassDoc):
pass
def get_doc_object(obj, what=None, doc=None):
if what is None:
if inspect.isclass(obj):
what = 'class'
elif inspect.ismodule(obj):
what = 'module'
elif callable(obj):
what = 'function'
else:
what = 'object'
if what == 'class':
return SphinxClassDoc(obj, '', func_doc=SphinxFunctionDoc, doc=doc)
elif what in ('function', 'method'):
return SphinxFunctionDoc(obj, '', doc=doc)
else:
if doc is None:
doc = pydoc.getdoc(obj)
return SphinxDocString(doc)
|
299,286 | read first frame | # -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
#
# MDAnalysis --- https://www.mdanalysis.org
# Copyright (c) 2006-2017 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
# doi: 10.25080/majora-629e541a-00e
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
"""
DESRES file format --- :mod:`MDAnalysis.coordinates.DMS`
========================================================
Classes to read DESRES_ Molecular Structure file format (DMS_)
coordinate files (as used by the Desmond_ MD package).
.. _DESRES: http://www.deshawresearch.com
.. _Desmond: http://www.deshawresearch.com/resources_desmond.html
.. _DMS: http://www.deshawresearch.com/Desmond_Users_Guide-0.7.pdf
"""
import numpy as np
import sqlite3
from . import base
from .core import triclinic_box
class DMSReader(base.SingleFrameReaderBase):
"""
Reads both coordinates and velocities.
.. versionchanged:: 0.11.0
Frames now 0-based instead of 1-based
"""
format = 'DMS'
units = {'time': None, 'length': 'A', 'velocity': 'A/ps'}
def get_coordinates(self, cur):
cur.execute('SELECT * FROM particle')
particles = cur.fetchall()
return [(p['x'], p['y'], p['z']) for p in particles]
def get_particle_by_columns(self, cur, columns=None):
if columns is None:
columns = ['x', 'y', 'z']
cur.execute('SELECT * FROM particle')
particles = cur.fetchall()
return [tuple([p[c] for c in columns]) for p in particles]
def get_global_cell(self, cur):
cur.execute('SELECT * FROM global_cell')
rows = cur.fetchall()
assert len(rows) == 3
x = [row["x"] for row in rows]
y = [row["y"] for row in rows]
z = [row["z"] for row in rows]
return {'x': x, 'y': y, 'z': z}
def METHOD_NAME(self):
coords_list = None
velocities_list = None
def dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
with sqlite3.connect(self.filename) as con:
# This will return dictionaries instead of tuples, when calling cur.fetch() or fetchall()
con.row_factory = dict_factory
cur = con.cursor()
coords_list = self.get_coordinates(cur)
velocities_list = self.get_particle_by_columns(cur, columns=['vx', 'vy', 'vz'])
unitcell = self.get_global_cell(cur)
if not coords_list:
raise IOError("Found no coordinates")
self.n_atoms = len(coords_list)
velocities = np.array(velocities_list, dtype=np.float32)
if not velocities.any():
velocities = None
self.ts = self._Timestep.from_coordinates(
np.array(coords_list, dtype=np.float32),
velocities=velocities,
**self._ts_kwargs)
self.ts.frame = 0 # 0-based frame number
self.ts.dimensions = triclinic_box(unitcell['x'], unitcell['y'], unitcell['z'])
if self.convert_units:
self.convert_pos_from_native(self.ts._pos) # in-place !
if self.ts.dimensions is not None:
self.convert_pos_from_native(self.ts.dimensions[:3]) # in-place !
if self.ts.has_velocities:
# converts nm/ps to A/ps units
self.convert_velocities_from_native(self.ts._velocities) |
299,287 | auto data rate | import esphome.codegen as cg
import esphome.config_validation as cv
from esphome.components import i2c, sensor
from esphome.const import (
CONF_ADDRESS,
CONF_ID,
CONF_OVERSAMPLING,
CONF_RANGE,
ICON_MAGNET,
STATE_CLASS_MEASUREMENT,
UNIT_MICROTESLA,
UNIT_DEGREES,
ICON_SCREEN_ROTATION,
CONF_UPDATE_INTERVAL,
)
DEPENDENCIES = ["i2c"]
qmc5883l_ns = cg.esphome_ns.namespace("qmc5883l")
CONF_FIELD_STRENGTH_X = "field_strength_x"
CONF_FIELD_STRENGTH_Y = "field_strength_y"
CONF_FIELD_STRENGTH_Z = "field_strength_z"
CONF_HEADING = "heading"
QMC5883LComponent = qmc5883l_ns.class_(
"QMC5883LComponent", cg.PollingComponent, i2c.I2CDevice
)
QMC5883LDatarate = qmc5883l_ns.enum("QMC5883LDatarate")
QMC5883LDatarates = {
10: QMC5883LDatarate.QMC5883L_DATARATE_10_HZ,
50: QMC5883LDatarate.QMC5883L_DATARATE_50_HZ,
100: QMC5883LDatarate.QMC5883L_DATARATE_100_HZ,
200: QMC5883LDatarate.QMC5883L_DATARATE_200_HZ,
}
QMC5883LRange = qmc5883l_ns.enum("QMC5883LRange")
QMC5883L_RANGES = {
200: QMC5883LRange.QMC5883L_RANGE_200_UT,
800: QMC5883LRange.QMC5883L_RANGE_800_UT,
}
QMC5883LOversampling = qmc5883l_ns.enum("QMC5883LOversampling")
QMC5883LOversamplings = {
512: QMC5883LOversampling.QMC5883L_SAMPLING_512,
256: QMC5883LOversampling.QMC5883L_SAMPLING_256,
128: QMC5883LOversampling.QMC5883L_SAMPLING_128,
64: QMC5883LOversampling.QMC5883L_SAMPLING_64,
}
def validate_enum(enum_values, units=None, int=True):
_units = []
if units is not None:
_units = units if isinstance(units, list) else [units]
_units = [str(x) for x in _units]
enum_bound = cv.enum(enum_values, int=int)
def validate_enum_bound(value):
value = cv.string(value)
for unit in _units:
if value.endswith(unit):
value = value[: -len(unit)]
break
return enum_bound(value)
return validate_enum_bound
field_strength_schema = sensor.sensor_schema(
unit_of_measurement=UNIT_MICROTESLA,
icon=ICON_MAGNET,
accuracy_decimals=1,
state_class=STATE_CLASS_MEASUREMENT,
)
heading_schema = sensor.sensor_schema(
unit_of_measurement=UNIT_DEGREES,
icon=ICON_SCREEN_ROTATION,
accuracy_decimals=1,
)
CONFIG_SCHEMA = (
cv.Schema(
{
cv.GenerateID(): cv.declare_id(QMC5883LComponent),
cv.Optional(CONF_ADDRESS): cv.i2c_address,
cv.Optional(CONF_RANGE, default="200Β΅T"): validate_enum(
QMC5883L_RANGES, units=["uT", "Β΅T"]
),
cv.Optional(CONF_OVERSAMPLING, default="512x"): validate_enum(
QMC5883LOversamplings, units="x"
),
cv.Optional(CONF_FIELD_STRENGTH_X): field_strength_schema,
cv.Optional(CONF_FIELD_STRENGTH_Y): field_strength_schema,
cv.Optional(CONF_FIELD_STRENGTH_Z): field_strength_schema,
cv.Optional(CONF_HEADING): heading_schema,
}
)
.extend(cv.polling_component_schema("60s"))
.extend(i2c.i2c_device_schema(0x0D))
)
def METHOD_NAME(config):
interval_sec = config[CONF_UPDATE_INTERVAL].total_milliseconds / 1000
interval_hz = 1.0 / interval_sec
for datarate in sorted(QMC5883LDatarates.keys()):
if float(datarate) >= interval_hz:
return QMC5883LDatarates[datarate]
return QMC5883LDatarates[200]
async def to_code(config):
var = cg.new_Pvariable(config[CONF_ID])
await cg.register_component(var, config)
await i2c.register_i2c_device(var, config)
cg.add(var.set_oversampling(config[CONF_OVERSAMPLING]))
cg.add(var.set_datarate(METHOD_NAME(config)))
cg.add(var.set_range(config[CONF_RANGE]))
if CONF_FIELD_STRENGTH_X in config:
sens = await sensor.new_sensor(config[CONF_FIELD_STRENGTH_X])
cg.add(var.set_x_sensor(sens))
if CONF_FIELD_STRENGTH_Y in config:
sens = await sensor.new_sensor(config[CONF_FIELD_STRENGTH_Y])
cg.add(var.set_y_sensor(sens))
if CONF_FIELD_STRENGTH_Z in config:
sens = await sensor.new_sensor(config[CONF_FIELD_STRENGTH_Z])
cg.add(var.set_z_sensor(sens))
if CONF_HEADING in config:
sens = await sensor.new_sensor(config[CONF_HEADING])
cg.add(var.set_heading_sensor(sens)) |
299,288 | description | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'GetKeyResult',
'AwaitableGetKeyResult',
'get_key',
'get_key_output',
]
@pulumi.output_type
class GetKeyResult:
"""
A collection of values returned by getKey.
"""
def __init__(__self__, created_date=None, METHOD_NAME=None, enabled=None, id=None, last_updated_date=None, name=None, tags=None, value=None):
if created_date and not isinstance(created_date, str):
raise TypeError("Expected argument 'created_date' to be a str")
pulumi.set(__self__, "created_date", created_date)
if METHOD_NAME and not isinstance(METHOD_NAME, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", METHOD_NAME)
if enabled and not isinstance(enabled, bool):
raise TypeError("Expected argument 'enabled' to be a bool")
pulumi.set(__self__, "enabled", enabled)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if last_updated_date and not isinstance(last_updated_date, str):
raise TypeError("Expected argument 'last_updated_date' to be a str")
pulumi.set(__self__, "last_updated_date", last_updated_date)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if value and not isinstance(value, str):
raise TypeError("Expected argument 'value' to be a str")
pulumi.set(__self__, "value", value)
@property
@pulumi.getter(name="createdDate")
def created_date(self) -> str:
"""
Date and time when the API Key was created.
"""
return pulumi.get(self, "created_date")
@property
@pulumi.getter
def METHOD_NAME(self) -> str:
"""
Description of the API Key.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def enabled(self) -> bool:
"""
Whether the API Key is enabled.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def id(self) -> str:
"""
Set to the ID of the API Key.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="lastUpdatedDate")
def last_updated_date(self) -> str:
"""
Date and time when the API Key was last updated.
"""
return pulumi.get(self, "last_updated_date")
@property
@pulumi.getter
def name(self) -> str:
"""
Set to the name of the API Key.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def tags(self) -> Mapping[str, str]:
"""
Map of tags for the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def value(self) -> str:
"""
Set to the value of the API Key.
"""
return pulumi.get(self, "value")
class AwaitableGetKeyResult(GetKeyResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetKeyResult(
created_date=self.created_date,
METHOD_NAME=self.METHOD_NAME,
enabled=self.enabled,
id=self.id,
last_updated_date=self.last_updated_date,
name=self.name,
tags=self.tags,
value=self.value)
def get_key(id: Optional[str] = None,
tags: Optional[Mapping[str, str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetKeyResult:
"""
Use this data source to get the name and value of a pre-existing API Key, for
example to supply credentials for a dependency microservice.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
my_api_key = aws.apigateway.get_key(id="ru3mpjgse6")
```
:param str id: ID of the API Key to look up.
:param Mapping[str, str] tags: Map of tags for the resource.
"""
__args__ = dict()
__args__['id'] = id
__args__['tags'] = tags
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('aws:apigateway/getKey:getKey', __args__, opts=opts, typ=GetKeyResult).value
return AwaitableGetKeyResult(
created_date=pulumi.get(__ret__, 'created_date'),
METHOD_NAME=pulumi.get(__ret__, 'description'),
enabled=pulumi.get(__ret__, 'enabled'),
id=pulumi.get(__ret__, 'id'),
last_updated_date=pulumi.get(__ret__, 'last_updated_date'),
name=pulumi.get(__ret__, 'name'),
tags=pulumi.get(__ret__, 'tags'),
value=pulumi.get(__ret__, 'value'))
@_utilities.lift_output_func(get_key)
def get_key_output(id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Optional[Mapping[str, str]]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetKeyResult]:
"""
Use this data source to get the name and value of a pre-existing API Key, for
example to supply credentials for a dependency microservice.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
my_api_key = aws.apigateway.get_key(id="ru3mpjgse6")
```
:param str id: ID of the API Key to look up.
:param Mapping[str, str] tags: Map of tags for the resource.
"""
... |
299,289 | enable stage drag | #!/usr/bin/env python
"""
QGraphicsView customized for displaying camera images.
Hazen 3/17
"""
from PyQt5 import QtCore, QtGui, QtWidgets
class QtCameraGraphicsView(QtWidgets.QGraphicsView):
"""
This is responsible for handling the camera transforms
(flip_horizontal, flip_vertical, transpose). Hopefully
this makes rendering a lot simpler for us as we don't
have keep track of all these details.
"""
dragMove = QtCore.pyqtSignal(int, int)
dragStart = QtCore.pyqtSignal()
newCenter = QtCore.pyqtSignal(int, int)
newScale = QtCore.pyqtSignal(int)
def __init__(self, parent = None, **kwds):
kwds["parent"] = parent
super().__init__(**kwds)
self.can_drag = False
self.chip_max = 100
self.center_x = 0
self.center_y = 0
self.ctrl_key_down = False
self.display_scale = 0
self.drag_mode = False
self.drag_scale = 1.0
self.drag_x = 0
self.drag_y = 0
self.frame_size = 0
self.max_scale = 8
self.min_scale = -8
self.transform = QtGui.QTransform()
self.viewport_min = 100
self.setAcceptDrops(True)
self.setAlignment(QtCore.Qt.AlignCenter)
self.setBackgroundBrush(QtGui.QBrush(QtGui.QColor(0,0,0)))
self.setDragMode(QtWidgets.QGraphicsView.RubberBandDrag)
def calcScale(self, size):
if (size < self.viewport_min):
return int(self.viewport_min/size) -1
else:
return -int(size/self.viewport_min)
def METHOD_NAME(self, enabled):
self.can_drag = enabled
def getCurrentCenter(self):
center = self.mapToScene(self.viewport().rect().center())
self.center_x = center.x()
self.center_y = center.y()
self.newCenter.emit(self.center_x, self.center_y)
def keyPressEvent(self, event):
if self.can_drag and (event.key() == QtCore.Qt.Key_Control):
self.ctrl_key_down = True
QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.OpenHandCursor))
def keyReleaseEvent(self, event):
if self.can_drag and (event.key() == QtCore.Qt.Key_Control):
self.ctrl_key_down = False
self.drag_mode = False
QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
def mouseMoveEvent(self, event):
if self.drag_mode:
dx = (event.x() - self.drag_x) * self.drag_scale
dy = (event.y() - self.drag_y) * self.drag_scale
self.dragMove.emit(dx, dy)
else:
super().mouseMoveEvent(event)
def mousePressEvent(self, event):
pos = self.mapToScene(event.pos())
self.center_x = pos.x()
self.center_y = pos.y()
self.newCenter.emit(self.center_x, self.center_y)
self.centerOn(self.center_x, self.center_y)
if self.ctrl_key_down:
self.drag_mode = True
self.drag_x = event.x()
self.drag_y = event.y()
QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.ClosedHandCursor))
self.dragStart.emit()
else:
super().mousePressEvent(event)
def mouseReleaseEvent(self, event):
if self.drag_mode:
self.drag_mode = False
QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.OpenHandCursor))
else:
super().mouseReleaseEvent(event)
def newConfiguration(self, camera_functionality, feed_parameters):
"""
This is called when the camera or frame size may have changed.
"""
self.chip_max = camera_functionality.getChipMax()
# Calculate transform matrix.
[cx, cy] = camera_functionality.getChipSize()
if camera_functionality.getParameter("flip_horizontal"):
flip_lr = QtGui.QTransform(-1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
cx, 0.0, 1.0)
else:
flip_lr = QtGui.QTransform()
if camera_functionality.getParameter("flip_vertical"):
flip_ud = QtGui.QTransform(1.0, 0.0, 0.0,
0.0, -1.0, 0.0,
0.0, cy, 1.0)
else:
flip_ud = QtGui.QTransform()
if camera_functionality.getParameter("transpose"):
flip_xy = QtGui.QTransform(0.0, 1.0, 0.0,
1.0, 0.0, 0.0,
0.0, 0.0, 1.0)
else:
flip_xy = QtGui.QTransform()
self.transform = flip_lr * flip_ud * flip_xy
self.setTransform(self.transform)
# Calculate initial zoom and center position.
if feed_parameters.get("initialized"):
self.display_scale = feed_parameters.get("scale")
self.center_x = feed_parameters.get("center_x")
self.center_y = feed_parameters.get("center_y")
else:
self.display_scale = self.calcScale(camera_functionality.getFrameMax())
[self.center_x, self.center_y] = camera_functionality.getFrameCenter()
self.newCenter.emit(self.center_x, self.center_y)
feed_parameters.set("initialized", True)
# Calculate max zoom out.
self.min_scale = self.calcScale(self.chip_max)
#
# Among other possible issues, this solves the problem that at startup
# self.display_scale will get set to the wrong value this GraphicsView
# will not have the correct size the first time we come through this
# method, then on the second pass initialized will be set and we'll
# locked in on a self.display_scale value that is out of range and cannot
# be changed using the scroll wheel.
#
if (self.display_scale < self.min_scale):
self.display_scale = self.min_scale
self.rescale(self.display_scale)
def resizeEvent(self, event):
#
# Use the GraphicsView contentsRect size and not it's viewport
# contentsRect size because depending on the zoom scroll bars
# will appear and disappear throwing off the calculation.
#
#viewport_rect = self.viewport().contentsRect()
viewport_rect = self.contentsRect()
self.viewport_min = viewport_rect.width() if (viewport_rect.width() < viewport_rect.height())\
else viewport_rect.height()
self.min_scale = self.calcScale(self.chip_max)
if (self.display_scale < self.min_scale):
self.display_scale = self.min_scale
super().resizeEvent(event)
def rescale(self, scale):
"""
Rescale the view so that it looks like we have zoomed in/out.
"""
if (scale < self.min_scale) or (scale > self.max_scale):
return
self.display_scale = scale
self.newScale.emit(self.display_scale)
if (self.display_scale == 0):
flt_scale = 1.0
elif (self.display_scale > 0):
flt_scale = float(self.display_scale + 1)
else:
flt_scale = 1.0/(-self.display_scale + 1)
self.drag_scale = 1.0/flt_scale
transform = QtGui.QTransform()
transform.scale(flt_scale, flt_scale)
self.setTransform(self.transform * transform)
self.centerOn(self.center_x, self.center_y)
def wheelEvent(self, event):
"""
Zoom in/out with the mouse wheel.
"""
if not event.angleDelta().isNull():
if (event.angleDelta().y() > 0):
self.rescale(self.display_scale + 1)
else:
self.rescale(self.display_scale - 1)
event.accept() |
299,290 | test create timeseries data | # Copyright (c) Microsoft Corporation
# Licensed under the MIT License.
import pytest
from rai_test_utils.datasets.tabular import (
create_adult_census_data, create_binary_classification_dataset,
create_cancer_data, create_complex_titanic_data, create_diabetes_data,
create_energy_data, create_housing_data, create_iris_data, create_msx_data,
create_multiclass_classification_dataset, create_reviews_data,
create_simple_titanic_data, create_timeseries_data, create_wine_data)
from rai_test_utils.datasets.vision import (
get_images, load_fridge_object_detection_dataset)
class TestDataUtils:
@pytest.mark.parametrize('if_small_data', [True, False])
def test_create_housing_data(self, if_small_data):
X_train, X_test, y_train, y_test, feature_names = \
create_housing_data(if_small_data)
assert X_train is not None
assert X_test is not None
assert y_train is not None
assert y_test is not None
assert feature_names is not None
def test_create_simple_titanic_data(self):
X_train, X_test, y_train, y_test, num_feature_names, \
cat_feature_names = create_simple_titanic_data()
assert X_train is not None
assert X_test is not None
assert y_train is not None
assert y_test is not None
assert num_feature_names is not None
assert cat_feature_names is not None
def test_create_binary_classification_dataset(self):
X_train, X_test, y_train, y_test, classes = \
create_binary_classification_dataset()
assert X_train is not None
assert X_test is not None
assert y_train is not None
assert y_test is not None
assert classes is not None
def test_create_diabetes_data(self):
X_train, X_test, y_train, y_test, feature_names = \
create_diabetes_data()
assert X_train is not None
assert X_test is not None
assert y_train is not None
assert y_test is not None
assert feature_names is not None
@pytest.mark.parametrize('return_dataframe', [True, False])
def test_create_cancer_data(self, return_dataframe):
X_train, X_test, y_train, y_test, feature_names, classes = \
create_cancer_data(return_dataframe)
assert X_train is not None
assert X_test is not None
assert y_train is not None
assert y_test is not None
assert feature_names is not None
assert classes is not None
def test_create_wine_data(self):
X_train, X_test, y_train, y_test, feature_names, classes = \
create_wine_data()
assert X_train is not None
assert X_test is not None
assert y_train is not None
assert y_test is not None
assert feature_names is not None
assert classes is not None
@pytest.mark.parametrize('append_special_characters', [True, False])
def test_create_iris_data(self, append_special_characters):
X_train, X_test, y_train, y_test, feature_names, classes = \
create_iris_data(append_special_characters)
assert X_train is not None
assert X_test is not None
assert y_train is not None
assert y_test is not None
assert feature_names is not None
assert classes is not None
@pytest.mark.parametrize('string_labels', [True, False])
def test_create_adult_census_data(self, string_labels):
X_train, X_test, y_train, y_test, feature_names = \
create_adult_census_data(string_labels)
assert X_train is not None
assert X_test is not None
assert y_train is not None
assert y_test is not None
assert feature_names is not None
def METHOD_NAME(self):
X_train, y_train = create_timeseries_data(
sample_cnt_per_grain=10,
time_column_name='time',
target_column_name='target',
)
assert X_train is not None
assert y_train is not None
def test_create_msx_data(self):
X_train, X_test, y_train, y_test = \
create_msx_data(test_size=0.2)
assert X_train is not None
assert X_test is not None
assert y_train is not None
assert y_test is not None
def test_create_energy_data(self):
X_train, X_test, y_train, y_test, feature_names = \
create_energy_data()
assert X_train is not None
assert X_test is not None
assert y_train is not None
assert y_test is not None
assert feature_names is not None
def test_create_complex_titanic_data(self):
X_train, X_test, y_train, y_test = create_complex_titanic_data()
assert X_train is not None
assert X_test is not None
assert y_train is not None
assert y_test is not None
def test_create_multiclass_classification_dataset(self):
X_train, X_test, y_train, y_test, classes = \
create_multiclass_classification_dataset()
assert X_train is not None
assert X_test is not None
assert y_train is not None
assert y_test is not None
assert classes is not None
def test_create_reviews_data(self):
X_train, X_test, y_train, y_test = \
create_reviews_data(test_size=0.2)
assert X_train is not None
assert X_test is not None
assert y_train is not None
assert y_test is not None
def test_create_fridge_data(self):
dataset = load_fridge_object_detection_dataset()
X_train = X_test = dataset[["image"]]
y_train = y_test = dataset[["label"]]
assert X_train is not None
assert X_test is not None
assert y_train is not None
assert y_test is not None
def test_get_images(self):
fridge_dataset = load_fridge_object_detection_dataset().iloc[:2]
images = get_images(fridge_dataset, "RGB", None)
assert len(images) == 2
assert images[0].shape == (666, 499, 3)
assert images[1].shape == (666, 499, 3) |
299,291 | get bloom filter object | # -*- coding: utf-8 -*-
"""Analysis plugin to look up file hashes in bloom database."""
import flor
from plaso.analysis import hash_tagging
from plaso.analysis import logger
from plaso.analysis import manager
class BloomAnalysisPlugin(hash_tagging.HashTaggingAnalysisPlugin):
"""Analysis plugin for looking up hashes in bloom database."""
DATA_TYPES = frozenset(['fs:stat', 'fs:stat:ntfs'])
NAME = 'bloom'
SUPPORTED_HASHES = frozenset(['md5', 'sha1', 'sha256'])
DEFAULT_LABEL = 'bloom_present'
def __init__(self):
"""Initializes a bloom database analysis plugin."""
super(BloomAnalysisPlugin, self).__init__()
self._bloom_database_path = None
self._bloom_filter_object = None
self._label = self.DEFAULT_LABEL
def _Analyze(self, hashes):
"""Looks up file hashes in a bloom database.
Args:
hashes (list[str]): hash values to look up.
Returns:
list[HashAnalysis]: analysis results, or an empty list on error.
Raises:
RuntimeError: when the analyzer fail to get a bloom filter object.
"""
bloom_filter = self.METHOD_NAME(cached=True)
if not bloom_filter:
raise RuntimeError('Failed to open bloom file')
hash_analyses = []
for digest in hashes:
response = self._QueryHash(digest=digest, bloom_filter=bloom_filter)
if response is not None:
hash_analysis = hash_tagging.HashAnalysis(
subject_hash=digest, hash_information=response)
hash_analyses.append(hash_analysis)
return hash_analyses
def _GenerateLabels(self, hash_information):
"""Generates a list of strings that will be used in the event tag.
Args:
hash_information (bool): response from the hash tagging that indicates
that the file hash was present or not.
Returns:
list[str]: list of labels to apply to event.
"""
if hash_information:
return [self._label]
return []
def METHOD_NAME(self, cached=True):
"""Loads a bloom filter file in memory.
Args:
cached (bool): True if the bloom filter should be cached.
Returns:
flor.BloomFilter: bloom filter object or None if not available.
"""
bloom_filter = self._bloom_filter_object
if not bloom_filter:
logger.debug(
f'Opening bloom database file: {self._bloom_database_path:s}.')
if not flor:
logger.warning('Missing optional dependency: flor')
return None
try:
bloom_filter = flor.BloomFilter()
with open(self._bloom_database_path, 'rb') as file_object:
bloom_filter.read(file_object)
except IOError as exception:
bloom_filter = None
logger.warning((
f'Unable to open bloom database file: '
f'{self._bloom_database_path:s} with error: {exception!s}.'))
if cached:
self._bloom_filter_object = bloom_filter
return bloom_filter
def _QueryHash(self, digest, bloom_filter):
"""Queries BloomFilter for a specific hash in upper case.
Args:
digest (str): hash to look up.
bloom_filter (flor.BloomFilter): instanced bloom filter.
Returns:
bool: True if the hash was found, False if not.
"""
value_to_test = digest.upper().encode('utf-8')
return value_to_test in bloom_filter
def SetBloomDatabasePath(self, bloom_database_path):
"""Set the path to the bloom file containing hash
Args:
bloom_database_path (str): Path to the bloom file
"""
self._bloom_database_path = bloom_database_path
def SetLabel(self, label):
"""Sets the tagging label.
Args:
label (str): label to apply to events extracted from files that are
present in the bloom database.
"""
self._label = label
def TestLoading(self):
"""Checks if the bloom database exist and is valid.
Returns:
bool: True is the bloom database exist and is valid.
"""
return bool(self.METHOD_NAME(cached=False))
manager.AnalysisPluginManager.RegisterPlugin(BloomAnalysisPlugin) |
299,292 | list | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
from typing import Any, Callable, Dict, Iterable, Optional, TypeVar
import urllib.parse
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
from .._vendor import _convert_request
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_request(**kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2022-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/providers/Microsoft.LabServices/operations")
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
class Operations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.labservices.ManagedLabsClient`'s
:attr:`operations` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = METHOD_NAME(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def METHOD_NAME(self, **kwargs: Any) -> Iterable["_models.Operation"]:
"""Get all operations.
Returns a list of all operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Operation or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.labservices.models.Operation]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2022-08-01"] = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
)
cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
api_version=api_version,
template_url=self.METHOD_NAME.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("OperationListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
METHOD_NAME.metadata = {"url": "/providers/Microsoft.LabServices/operations"} |
299,293 | get i pv6 firewall rule output | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'GetIPv6FirewallRuleResult',
'AwaitableGetIPv6FirewallRuleResult',
'get_i_pv6_firewall_rule',
'get_i_pv6_firewall_rule_output',
]
@pulumi.output_type
class GetIPv6FirewallRuleResult:
"""
An IPv6 server firewall rule.
"""
def __init__(__self__, end_i_pv6_address=None, id=None, name=None, start_i_pv6_address=None, type=None):
if end_i_pv6_address and not isinstance(end_i_pv6_address, str):
raise TypeError("Expected argument 'end_i_pv6_address' to be a str")
pulumi.set(__self__, "end_i_pv6_address", end_i_pv6_address)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if start_i_pv6_address and not isinstance(start_i_pv6_address, str):
raise TypeError("Expected argument 'start_i_pv6_address' to be a str")
pulumi.set(__self__, "start_i_pv6_address", start_i_pv6_address)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="endIPv6Address")
def end_i_pv6_address(self) -> Optional[str]:
"""
The end IP address of the firewall rule. Must be IPv6 format. Must be greater than or equal to startIpAddress.
"""
return pulumi.get(self, "end_i_pv6_address")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="startIPv6Address")
def start_i_pv6_address(self) -> Optional[str]:
"""
The start IP address of the firewall rule. Must be IPv6 format.
"""
return pulumi.get(self, "start_i_pv6_address")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
class AwaitableGetIPv6FirewallRuleResult(GetIPv6FirewallRuleResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetIPv6FirewallRuleResult(
end_i_pv6_address=self.end_i_pv6_address,
id=self.id,
name=self.name,
start_i_pv6_address=self.start_i_pv6_address,
type=self.type)
def get_i_pv6_firewall_rule(firewall_rule_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
server_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetIPv6FirewallRuleResult:
"""
Gets an IPv6 firewall rule.
:param str firewall_rule_name: The name of the firewall rule.
:param str resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
:param str server_name: The name of the server.
"""
__args__ = dict()
__args__['firewallRuleName'] = firewall_rule_name
__args__['resourceGroupName'] = resource_group_name
__args__['serverName'] = server_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:sql/v20230201preview:getIPv6FirewallRule', __args__, opts=opts, typ=GetIPv6FirewallRuleResult).value
return AwaitableGetIPv6FirewallRuleResult(
end_i_pv6_address=pulumi.get(__ret__, 'end_i_pv6_address'),
id=pulumi.get(__ret__, 'id'),
name=pulumi.get(__ret__, 'name'),
start_i_pv6_address=pulumi.get(__ret__, 'start_i_pv6_address'),
type=pulumi.get(__ret__, 'type'))
@_utilities.lift_output_func(get_i_pv6_firewall_rule)
def METHOD_NAME(firewall_rule_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
server_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetIPv6FirewallRuleResult]:
"""
Gets an IPv6 firewall rule.
:param str firewall_rule_name: The name of the firewall rule.
:param str resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
:param str server_name: The name of the server.
"""
... |
299,294 | open signal handler | # piker: trading gear for hackers
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Qt event proxying and processing using ``trio`` mem chans.
"""
from contextlib import asynccontextmanager as acm
from typing import Callable
import trio
from tractor.trionics import gather_contexts
from PyQt5 import QtCore
from PyQt5.QtCore import QEvent, pyqtBoundSignal
from PyQt5.QtWidgets import QWidget
from PyQt5.QtWidgets import (
QGraphicsSceneMouseEvent as gs_mouse,
)
from ..data.types import Struct
MOUSE_EVENTS = {
gs_mouse.GraphicsSceneMousePress,
gs_mouse.GraphicsSceneMouseRelease,
QEvent.MouseButtonPress,
QEvent.MouseButtonRelease,
# QtGui.QMouseEvent,
}
# TODO: maybe consider some constrained ints down the road?
# https://pydantic-docs.helpmanual.io/usage/types/#constrained-types
class KeyboardMsg(Struct):
'''Unpacked Qt keyboard event data.
'''
event: QEvent
etype: int
key: int
mods: int
txt: str
def to_tuple(self) -> tuple:
return tuple(self.to_dict().values())
class MouseMsg(Struct):
'''Unpacked Qt keyboard event data.
'''
event: QEvent
etype: int
button: int
# TODO: maybe add some methods to detect key combos? Or is that gonna be
# better with pattern matching?
# # ctl + alt as combo
# ctlalt = False
# if (QtCore.Qt.AltModifier | QtCore.Qt.ControlModifier) == mods:
# ctlalt = True
class EventRelay(QtCore.QObject):
'''
Relay Qt events over a trio memory channel for async processing.
'''
_event_types: set[QEvent] = set()
_send_chan: trio.abc.SendChannel = None
_filter_auto_repeats: bool = True
def eventFilter(
self,
source: QWidget,
ev: QEvent,
) -> None:
'''
Qt global event filter: return `False` to pass through and `True`
to filter event out.
https://doc.qt.io/qt-5/qobject.html#eventFilter
https://doc.qt.io/qtforpython/overviews/eventsandfilters.html#event-filters
'''
etype = ev.type()
# TODO: turn this on and see what we can filter by default (such
# as mouseWheelEvent).
# print(f'ev: {ev}')
if etype not in self._event_types:
return False
# XXX: we unpack here because apparently doing it
# after pop from the mem chan isn't showing the same
# event object? no clue wtf is going on there, likely
# something to do with Qt internals and calling the
# parent handler?
if etype in {QEvent.KeyPress, QEvent.KeyRelease}:
msg = KeyboardMsg(
event=ev,
etype=etype,
key=ev.key(),
mods=ev.modifiers(),
txt=ev.text(),
)
# TODO: is there a global setting for this?
if ev.isAutoRepeat() and self._filter_auto_repeats:
ev.ignore()
# filter out this event and stop it's processing
# https://doc.qt.io/qt-5/qobject.html#installEventFilter
return True
# NOTE: the event object instance coming out
# the other side is mutated since Qt resumes event
# processing **before** running a ``trio`` guest mode
# tick, thus special handling or copying must be done.
elif etype in MOUSE_EVENTS:
# print('f mouse event: {ev}')
msg = MouseMsg(
event=ev,
etype=etype,
button=ev.button(),
)
else:
msg = ev
# send event-msg to async handler
self._send_chan.send_nowait(msg)
# **do not** filter out this event
# and instead forward to the source widget
# https://doc.qt.io/qt-5/qobject.html#installEventFilter
return False
@acm
async def open_event_stream(
source_widget: QWidget,
event_types: set[QEvent] = {QEvent.KeyPress},
filter_auto_repeats: bool = True,
) -> trio.abc.ReceiveChannel:
# 1 to force eager sending
send, recv = trio.open_memory_channel(16)
kc = EventRelay()
kc._send_chan = send
kc._event_types = event_types
kc._filter_auto_repeats = filter_auto_repeats
source_widget.installEventFilter(kc)
try:
async with send:
yield recv
finally:
source_widget.removeEventFilter(kc)
@acm
async def METHOD_NAME(
signal: pyqtBoundSignal,
async_handler: Callable,
) -> trio.abc.ReceiveChannel:
send, recv = trio.open_memory_channel(0)
def proxy_args_to_chan(*args):
send.send_nowait(args)
signal.connect(proxy_args_to_chan)
async def proxy_to_handler():
async for args in recv:
await async_handler(*args)
async with trio.open_nursery() as n:
n.start_soon(proxy_to_handler)
async with send:
yield
@acm
async def open_handlers(
source_widgets: list[QWidget],
event_types: set[QEvent],
async_handler: Callable[[QWidget, trio.abc.ReceiveChannel], None],
**kwargs,
) -> None:
async with (
trio.open_nursery() as n,
gather_contexts([
open_event_stream(widget, event_types, **kwargs)
for widget in source_widgets
]) as streams,
):
for widget, event_recv_stream in zip(source_widgets, streams):
n.start_soon(async_handler, widget, event_recv_stream)
yield |
299,295 | test build adam | # Copyright 2021, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import copy
from absl.testing import parameterized
import tensorflow as tf
from tensorflow_federated.python.learning.optimizers import adam
from tensorflow_federated.python.learning.optimizers import optimizer as optimizer_base
from tensorflow_federated.python.learning.optimizers import optimizer_test_utils
_SCALAR_SPEC = tf.TensorSpec([1], tf.float32)
_STRUCT_SPEC = [tf.TensorSpec([2], tf.float32), tf.TensorSpec([3], tf.float32)]
_NESTED_SPEC = [
tf.TensorSpec([10], tf.float32),
[tf.TensorSpec([20], tf.float32), [tf.TensorSpec([30], tf.float32)]],
]
class AdamTest(optimizer_test_utils.TestCase, parameterized.TestCase):
def test_state_structure(self):
optimizer = adam.build_adam(0.01)
state = optimizer.initialize(_SCALAR_SPEC)
self.assertLen(state, 7)
self.assertIn(optimizer_base.LEARNING_RATE_KEY, state)
self.assertIn(adam._BETA_1_KEY, state)
self.assertIn(adam._BETA_2_KEY, state)
self.assertIn(adam._EPSILON_KEY, state)
self.assertIn(adam._STEP_KEY, state)
self.assertIn(adam._PRECONDITIONER_KEY, state)
self.assertIn(adam._ACCUMULATOR_KEY, state)
def test_math(self):
weights = tf.constant([1.0], tf.float32)
gradients = tf.constant([2.0], tf.float32)
optimizer = adam.build_adam(0.1, beta_1=0.9, beta_2=0.999, epsilon=0.0)
history = [weights]
state = optimizer.initialize(_SCALAR_SPEC)
for _ in range(4):
state, weights = optimizer.next(state, weights, gradients)
history.append(weights)
self.assertAllClose(
[[1.0], [0.9000007], [0.8000017], [0.700002], [0.600003]], history
)
@parameterized.named_parameters(
('scalar_spec', _SCALAR_SPEC),
('struct_spec', _STRUCT_SPEC),
('nested_spec', _NESTED_SPEC),
)
def test_executes_with(self, spec):
weights = tf.nest.map_structure(lambda s: tf.ones(s.shape, s.dtype), spec)
gradients = tf.nest.map_structure(lambda s: tf.ones(s.shape, s.dtype), spec)
optimizer = adam.build_adam(0.01)
state = optimizer.initialize(spec)
for _ in range(10):
state, weights = optimizer.next(state, weights, gradients)
tf.nest.map_structure(
lambda w: self.assertTrue(all(tf.math.is_finite(w))), weights
)
def test_executes_with_indexed_slices(self):
# TF can represent gradients as tf.IndexedSlices. This test makes sure this
# case is supported by the optimizer.
weights = tf.ones([4, 2])
gradients = tf.IndexedSlices(
values=tf.constant([[1.0, 1.0], [1.0, 1.0]]),
indices=tf.constant([0, 2]),
dense_shape=tf.constant([4, 2]),
)
# Always-zero preconditioner and accumulator, for simplicity of this test.
optimizer = adam.build_adam(0.5, beta_1=0.0, beta_2=0.0)
state = optimizer.initialize(tf.TensorSpec([4, 2]))
_, weights = optimizer.next(state, weights, gradients)
self.assertAllClose(
[[0.5, 0.5], [1.0, 1.0], [0.5, 0.5], [1.0, 1.0]], weights
)
def test_convergence(self):
init_w, fn, grad_fn = optimizer_test_utils.test_quadratic_problem()
weights = init_w()
self.assertGreater(fn(weights), 5.0)
optimizer = adam.build_adam(0.5)
state = optimizer.initialize(tf.TensorSpec(weights.shape, weights.dtype))
for _ in range(100):
gradients = grad_fn(weights)
state, weights = optimizer.next(state, weights, gradients)
self.assertLess(fn(weights), 0.005)
def METHOD_NAME(self):
optimizer = adam.build_adam(0.01)
self.assertIsInstance(optimizer, optimizer_base.Optimizer)
def test_match_keras(self):
weight_spec = [
tf.TensorSpec([10, 2], tf.float32),
tf.TensorSpec([2], tf.float32),
]
steps = 10
genarator = tf.random.Generator.from_seed(2021)
def random_vector():
return [
genarator.normal(shape=s.shape, dtype=s.dtype) for s in weight_spec
]
intial_weight = random_vector()
model_variables_fn = lambda: [tf.Variable(v) for v in intial_weight]
gradients = [random_vector() for _ in range(steps)]
tff_optimizer_fn = lambda: adam.build_adam(0.01, 0.9, 0.999)
keras_optimizer_fn = lambda: tf.keras.optimizers.Adam(0.01, 0.9, 0.999)
self.assert_optimizers_numerically_close(
model_variables_fn, gradients, tff_optimizer_fn, keras_optimizer_fn
)
@parameterized.named_parameters(
('negative_lr', -1.0, 0.9, 0.999, 1e-7, 'learning_rate'),
('negative_beta_1', 1.0, -0.9, 0.999, 1e-7, 'beta_1'),
('beta_1_greater_than_1', 1.0, 1.1, 0.999, 1e-7, 'beta_1'),
('negative_beta_2', 1.0, 0.9, -0.999, 1e-7, 'beta_2'),
('beta_2_greater_than_1', 1.0, 0.9, 1.1, 1e-7, 'beta_2'),
('negative_epsilon', 1.0, 0.9, 0.999, -1e-7, 'epsilon'),
)
def test_invalid_args_raises(self, lr, beta_1, beta_2, epsilon, regex):
with self.assertRaisesRegex(ValueError, regex):
adam.build_adam(lr, beta_1, beta_2, epsilon)
def test_weights_gradients_mismatch_raises(self):
optimizer = adam.build_adam(0.1)
state = optimizer.initialize(_SCALAR_SPEC)
with self.assertRaises(ValueError):
optimizer.next(state, tf.zeros([1]), tf.zeros([2]))
def test_initialize_next_weights_mismatch_raises(self):
optimizer = adam.build_adam(0.1)
state = optimizer.initialize(_SCALAR_SPEC)
with self.assertRaises(ValueError):
optimizer.next(state, tf.zeros([2]), tf.zeros([2]))
@parameterized.named_parameters(
('scalar_spec', _SCALAR_SPEC),
('struct_spec', _STRUCT_SPEC),
('nested_spec', _NESTED_SPEC),
)
def test_get_hparams_returns_expected_result(self, spec):
optimizer = adam.build_adam(
learning_rate=0.1, beta_1=0.92, beta_2=0.97, epsilon=0.01
)
state = optimizer.initialize(spec)
expected_hparams = collections.OrderedDict(
learning_rate=0.1, beta_1=0.92, beta_2=0.97, epsilon=0.01
)
actual_hparams = optimizer.get_hparams(state)
self.assertIsInstance(actual_hparams, collections.OrderedDict)
self.assertEqual(actual_hparams, expected_hparams)
@parameterized.named_parameters(
('scalar_spec', _SCALAR_SPEC),
('struct_spec', _STRUCT_SPEC),
('nested_spec', _NESTED_SPEC),
)
def test_set_hparams_returns_expected_result(self, spec):
optimizer = adam.build_adam(
learning_rate=0.1, beta_1=0.92, beta_2=0.97, epsilon=0.01
)
state = optimizer.initialize(spec)
hparams = collections.OrderedDict(
learning_rate=0.5, beta_1=0.12, beta_2=0.56, epsilon=2.0
)
expected_state = copy.deepcopy(state)
for k, v in hparams.items():
expected_state[k] = v
updated_state = optimizer.set_hparams(state, hparams)
self.assertIsInstance(updated_state, collections.OrderedDict)
self.assertEqual(updated_state, expected_state)
@parameterized.named_parameters(
('scalar_spec', _SCALAR_SPEC),
('struct_spec', _STRUCT_SPEC),
('nested_spec', _NESTED_SPEC),
)
def test_set_get_hparams_is_no_op(self, spec):
optimizer = adam.build_adam(0.1)
state = optimizer.initialize(spec)
hparams = optimizer.get_hparams(state)
updated_state = optimizer.set_hparams(state, hparams)
self.assertEqual(state, updated_state)
if __name__ == '__main__':
tf.test.main() |
299,296 | libs | # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
from spack.util.environment import is_system_path
class Tcl(AutotoolsPackage, SourceforgePackage):
"""Tcl (Tool Command Language) is a very powerful but easy to learn dynamic
programming language, suitable for a very wide range of uses, including web and
desktop applications, networking, administration, testing and many more. Open source
and business-friendly, Tcl is a mature yet evolving language that is truly cross
platform, easily deployed and highly extensible."""
homepage = "https://www.tcl.tk/"
sourceforge_mirror_path = "tcl/tcl8.6.11-src.tar.gz"
version('8.6.11', sha256='8c0486668586672c5693d7d95817cb05a18c5ecca2f40e2836b9578064088258')
version('8.6.10', sha256='5196dbf6638e3df8d5c87b5815c8c2b758496eb6f0e41446596c9a4e638d87ed')
version('8.6.8', sha256='c43cb0c1518ce42b00e7c8f6eaddd5195c53a98f94adc717234a65cbcfd3f96a')
version('8.6.6', sha256='a265409781e4b3edcc4ef822533071b34c3dc6790b893963809b9fe221befe07')
version('8.6.5', sha256='ce26d5b9c7504fc25d2f10ef0b82b14cf117315445b5afa9e673ed331830fb53')
version('8.6.4', sha256='9e6ed94c981c1d0c5f5fefb8112d06c6bf4d050a7327e95e71d417c416519c8d')
version('8.6.3', sha256='6ce0778de0d50daaa9c345d7c1fd1288fb658f674028812e7eeee992e3051005')
version('8.5.19', sha256='d3f04456da873d17f02efc30734b0300fb6c3b85028d445fe284b83253a6db18')
extendable = True
depends_on('zlib')
configure_directory = 'unix'
def install(self, spec, prefix):
with working_dir(self.build_directory):
make('install')
# https://wiki.tcl-lang.org/page/kitgen
if self.spec.satisfies('@8.6:'):
make('install-headers')
# Some applications like Expect require private Tcl headers.
make('install-private-headers')
# Copy source to install tree
# A user-provided install option might re-do this
# https://github.com/spack/spack/pull/4102/files
installed_src = join_path(
self.spec.prefix, 'share', self.name, 'src')
stage_src = os.path.realpath(self.stage.source_path)
install_tree(stage_src, installed_src)
# Replace stage dir -> installed src dir in tclConfig
filter_file(
stage_src, installed_src,
join_path(self.spec['tcl'].METHOD_NAME.directories[0],
'tclConfig.sh'))
# Don't install binaries in src/ tree
with working_dir(join_path(installed_src, self.configure_directory)):
make('clean')
@run_after('install')
def symlink_tclsh(self):
with working_dir(self.prefix.bin):
symlink('tclsh{0}'.format(self.version.up_to(2)), 'tclsh')
# ========================================================================
# Set up environment to make install easy for tcl extensions.
# ========================================================================
@property
def METHOD_NAME(self):
return find_libraries(['libtcl{0}'.format(self.version.up_to(2))],
root=self.prefix, recursive=True)
@property
def command(self):
"""Returns the tclsh command.
Returns:
Executable: the tclsh command
"""
# Although we symlink tclshX.Y to tclsh, we also need to support external
# installations that may not have this symlink, or may have multiple versions
# of Tcl installed in the same directory.
return Executable(os.path.realpath(self.prefix.bin.join(
'tclsh{0}'.format(self.version.up_to(2)))))
def _find_script_dir(self):
# Put more-specific prefixes first
check_prefixes = [
join_path(self.prefix, "share", "tcl{0}".format(self.version.up_to(2))),
self.prefix,
]
for prefix in check_prefixes:
result = find(prefix, "init.tcl")
if result:
return os.path.dirname(sorted(result)[0])
def setup_run_environment(self, env):
"""Set TCL_LIBRARY to the directory containing init.tcl.
For further info see:
* https://wiki.tcl-lang.org/page/TCL_LIBRARY
"""
# When using tkinter from within spack provided python+tkinter,
# python will not be able to find Tcl unless TCL_LIBRARY is set.
env.set('TCL_LIBRARY', self._find_script_dir())
#sorted(find(self.prefix, 'init.tcl'))[0]))
def setup_dependent_build_environment(self, env, dependent_spec):
"""Set TCL_LIBRARY to the directory containing init.tcl.
Set TCLLIBPATH to include the tcl-shipped directory for
extensions and any other tcl extension it depends on.
For further info see:
* https://wiki.tcl-lang.org/page/TCL_LIBRARY
* https://wiki.tcl-lang.org/page/TCLLIBPATH
"""
env.set('TCL_LIBRARY', self._find_script_dir())
#sorted(find(self.prefix, 'init.tcl'))[0]))
# If we set TCLLIBPATH, we must also ensure that the corresponding
# tcl is found in the build environment. This to prevent cases
# where a system provided tcl is run against the standard libraries
# of a Spack built tcl. See issue #7128 that relates to python but
# it boils down to the same situation we have here.
if not is_system_path(self.prefix.bin):
env.prepend_path('PATH', self.prefix.bin)
# WARNING: paths in $TCLLIBPATH must be *space* separated,
# its value is meant to be a Tcl list, *not* an env list
# as explained here: https://wiki.tcl-lang.org/page/TCLLIBPATH:
# "TCLLIBPATH is a Tcl list, not some platform-specific
# colon-separated or semi-colon separated format"
# WARNING: Tcl and Tcl extensions like Tk install their configuration files
# in subdirectories like `<prefix>/lib/tcl8.6`. However, Tcl is aware of this,
# and $TCLLIBPATH should only contain `<prefix>/lib`. $TCLLIBPATH is only needed
# because we install Tcl extensions to different directories than Tcl. See:
# https://core.tcl-lang.org/tk/tktview/447bd3e4abe17452d19a80e6840dcc8a2603fcbc
env.prepend_path(
'TCLLIBPATH', self.spec['tcl'].METHOD_NAME.directories[0], separator=' ')
for d in dependent_spec.traverse(deptype=('build', 'run', 'test')):
if d.package.extends(self.spec):
# Tcl libraries may be installed in lib or lib64, see #19546
for lib in ['lib', 'lib64']:
tcllibpath = join_path(d.prefix, lib)
if os.path.exists(tcllibpath):
env.prepend_path('TCLLIBPATH', tcllibpath, separator=' ')
def setup_dependent_run_environment(self, env, dependent_spec):
"""Set TCLLIBPATH to include the tcl-shipped directory for
extensions and any other tcl extension it depends on.
For further info see:
* https://wiki.tcl-lang.org/page/TCLLIBPATH
"""
for d in dependent_spec.traverse(deptype=('build', 'run', 'test')):
if d.package.extends(self.spec):
# Tcl libraries may be installed in lib or lib64, see #19546
for lib in ['lib', 'lib64']:
tcllibpath = join_path(d.prefix, lib)
if os.path.exists(tcllibpath):
env.prepend_path('TCLLIBPATH', tcllibpath, separator=' ') |
299,297 | set style | # (C) Copyright 2005-2023 Enthought, Inc., Austin, TX
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in LICENSE.txt and may be redistributed only under
# the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
from fontTools.afmLib import AFM
from fontTools.ttLib import TTFont
from kiva.fonttools._constants import stretch_dict, weight_dict
from kiva.fonttools._util import get_ttf_prop_dict
from kiva.fonttools.font_manager import default_font_manager
class FontQuery(object):
""" A class for storing properties needed to query the font manager.
The properties are those described in the `W3C Cascading
Style Sheet, Level 1 <http://www.w3.org/TR/1998/REC-CSS2-19980512/>`_ font
specification. The six properties are:
- family: A list of font names in decreasing order of priority.
The items may include a generic font family name, either
'serif', 'sans-serif', 'cursive', 'fantasy', or 'monospace'.
- style: Either 'normal', 'italic' or 'oblique'.
- variant: Either 'normal' or 'small-caps'.
- stretch: A numeric value in the range 0-1000 or one of
'ultra-condensed', 'extra-condensed', 'condensed',
'semi-condensed', 'normal', 'semi-expanded', 'expanded',
'extra-expanded' or 'ultra-expanded'
- weight: A numeric value in the range 0-1000 or one of
'ultralight', 'light', 'normal', 'regular', 'book', 'medium',
'roman', 'semibold', 'demibold', 'demi', 'bold', 'heavy',
'extra bold', 'black'
- size: An absolute font size, e.g. 12
Alternatively, a font may be specified using an absolute path to a
.ttf file, by using the *fname* kwarg.
"""
def __init__(self, family=None, style=None, variant=None, weight=None,
stretch=None, size=None, fname=None, _init=None):
# if fname is set, it's a hardcoded filename to use
# _init is used only by copy()
self._family = None
self._slant = None
self._variant = None
self._weight = None
self._stretch = None
self._size = None
self._file = None
# This is used only by copy()
if _init is not None:
self.__dict__.update(_init.__dict__)
return
self.set_family(family)
self.METHOD_NAME(style)
self.set_variant(variant)
self.set_weight(weight)
self.set_stretch(stretch)
self.set_file(fname)
self.set_size(size)
def __hash__(self):
lst = [(k, getattr(self, "get" + k)()) for k in sorted(self.__dict__)]
return hash(repr(lst))
def __str__(self):
attrs = (
self._family, self._slant, self._variant, self._weight,
self._stretch, self._size,
)
return str(attrs)
def get_family(self):
""" Return a list of font names that comprise the font family.
"""
return self._family
def get_name(self):
""" Return the name of the font that best matches the font properties.
"""
spec = default_font_manager().findfont(self)
if spec.filename.endswith(".afm"):
return AFM().FamilyName
prop_dict = get_ttf_prop_dict(
TTFont(spec.filename, fontNumber=spec.face_index)
)
return prop_dict["family"]
def get_style(self):
""" Return the font style.
Values are: 'normal', 'italic' or 'oblique'.
"""
return self._slant
get_slant = get_style
def get_variant(self):
""" Return the font variant.
Values are: 'normal' or 'small-caps'.
"""
return self._variant
def get_weight(self):
""" Set the font weight.
Options are: A numeric value in the range 0-1000 or one of 'light',
'normal', 'regular', 'book', 'medium', 'roman', 'semibold', 'demibold',
'demi', 'bold', 'heavy', 'extra bold', 'black'
"""
return self._weight
def get_stretch(self):
""" Return the font stretch or width.
Options are: 'ultra-condensed', 'extra-condensed', 'condensed',
'semi-condensed', 'normal', 'semi-expanded', 'expanded',
'extra-expanded', 'ultra-expanded'.
"""
return self._stretch
def get_size(self):
""" Return the font size.
"""
return self._size
def get_file(self):
""" Return the filename of the associated font.
"""
return self._file
def set_family(self, family):
""" Change the font family.
May be either an alias (generic name is CSS parlance), such as:
'serif', 'sans-serif', 'cursive', 'fantasy', or 'monospace', or
a real font name.
"""
if family is None:
self._family = None
else:
if isinstance(family, bytes):
family = [family.decode("utf8")]
elif isinstance(family, str):
family = [family]
self._family = family
set_name = set_family
def METHOD_NAME(self, style):
""" Set the font style.
Values are: 'normal', 'italic' or 'oblique'.
"""
if style not in ("normal", "italic", "oblique", None):
raise ValueError("style must be normal, italic or oblique")
self._slant = style
set_slant = METHOD_NAME
def set_variant(self, variant):
""" Set the font variant.
Values are: 'normal' or 'small-caps'.
"""
if variant not in ("normal", "small-caps", None):
raise ValueError("variant must be normal or small-caps")
self._variant = variant
def set_weight(self, weight):
""" Set the font weight.
May be either a numeric value in the range 0-1000 or one of
'ultralight', 'light', 'normal', 'regular', 'book', 'medium', 'roman',
'semibold', 'demibold', 'demi', 'bold', 'heavy', 'extra bold', 'black'.
"""
if weight is not None:
try:
weight = int(weight)
if weight < 0 or weight > 1000:
raise ValueError()
except ValueError:
if weight not in weight_dict:
raise ValueError("weight is invalid")
self._weight = weight
def set_stretch(self, stretch):
""" Set the font stretch or width.
Options are: 'ultra-condensed', 'extra-condensed', 'condensed',
'semi-condensed', 'normal', 'semi-expanded', 'expanded',
'extra-expanded' or 'ultra-expanded', or a numeric value in the
range 0-1000.
"""
if stretch is not None:
try:
stretch = int(stretch)
if stretch < 0 or stretch > 1000:
raise ValueError()
except ValueError:
if stretch not in stretch_dict:
raise ValueError("stretch is invalid")
else:
stretch = 500
self._stretch = stretch
def set_size(self, size):
""" Set the font size.
An absolute font size, e.g. 12.
"""
if size is not None:
try:
size = float(size)
except ValueError:
raise ValueError("size is invalid")
self._size = size
def set_file(self, file):
""" Set the filename of the fontfile to use.
In this case, all other properties will be ignored.
"""
self._file = file
def copy(self):
""" Return a deep copy of self
"""
return FontQuery(_init=self) |
299,298 | thunder alert | import logging
import traceback
import requests
import settings
from functions.smtp.send_email import send_email
from slack_functions import slack_post_message
from functions.pagerduty.send_pagerduty import send_pagerduty
skyline_app = 'thunder'
skyline_app_logger = '%sLog' % skyline_app
logger = logging.getLogger(skyline_app_logger)
skyline_app_logfile = '%s/%s.log' % (settings.LOG_PATH, skyline_app)
def METHOD_NAME(alert_via, subject, body):
"""
"""
# logger = logging.getLogger(skyline_app_logger)
message_sent = False
if alert_via == 'alert_via_smtp':
send_smtp_alert = False
try:
if settings.THUNDER_OPTS['alert_via_smtp']:
send_smtp_alert = True
to = settings.THUNDER_OPTS['smtp_recipients'][0]
cc = []
for i in settings.THUNDER_OPTS['smtp_recipients']:
if i != to:
cc.append(i)
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: thunder_alert :: failed to determine alert_via_smtp settings - %s' % e)
if send_smtp_alert:
try:
message_sent = send_email(skyline_app, to, cc, subject, str(body))
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: thunder_alert :: failed to send smtp messages - %s' % e)
if message_sent:
logger.info('thunder_alert :: smtp message sent - %s' % subject)
if alert_via == 'alert_via_slack':
try:
if settings.THUNDER_OPTS['alert_via_slack']:
message = '*%s*\n%s' % (subject, body)
message_sent = slack_post_message(skyline_app,
settings.THUNDER_OPTS['slack_channel'],
'None', message)
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: thunder_alert :: failed to send slack message - %s' % e)
if message_sent:
logger.info('thunder_alert :: slack message sent - %s' % subject)
if alert_via == 'alert_via_pagerduty':
try:
if settings.THUNDER_OPTS['alert_via_pagerduty']:
message = '%s %s' % (subject, body)
message_sent = send_pagerduty(skyline_app, message, log=True)
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: thunder_alert :: failed to determine alert_via_smtp settings - %s' % e)
if message_sent:
logger.info('thunder_alert :: slack message sent - %s' % subject)
alert_via_http = False
alerter_endpoint = None
thunder_alert_token = None
if isinstance(alert_via, dict):
try:
alert_via_http = alert_via['alert_via_http']
if alert_via_http:
alerter_endpoint = alert_via['thunder_alert_endpoint']
thunder_alert_token = alert_via['thunder_alert_token']
alert_data_dict = alert_via['alert_data_dict']
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: thunder_alert :: failed to determine alert_via settings - %s - %s' % (
str(alert_via), e))
if alerter_endpoint and thunder_alert_token and alert_data_dict:
connect_timeout = 5
read_timeout = 20
use_timeout = (int(connect_timeout), int(read_timeout))
response = None
try:
# response = requests.post(alerter_endpoint, data=alert_data, headers=headers, timeout=use_timeout)
response = requests.post(alerter_endpoint, json=alert_data_dict, timeout=use_timeout)
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: thunder_alert :: failed to post alert to %s - %s' % (
str(alerter_endpoint), e))
try:
if response.status_code != 200:
logger.warning('warning :: thunder_alert :: %s responded with status code %s and reason %s' % (
str(alerter_endpoint), str(response.status_code),
str(response.reason)))
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: thunder_alert :: failed determine response.status_code - %s' % e)
try:
if response.status_code == 400:
response_str = None
try:
response_str = str(response.json())
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: thunder_alert :: failed determine response.status_code - %s' % e)
logger.info('thunder_alert :: 400 response - %s' % (
str(response_str)))
except Exception as e:
logger.error(traceback.format_exc())
logger.error('error :: thunder_alert :: failed determine response.status_code - %s' % e)
if response:
if response.status_code == 200:
logger.info('thunder_alert :: alert sent to %s - %s' % (
# str(alerter_endpoint), str(alert_data_dict['status'])))
str(alerter_endpoint), str(response.status_code)))
message_sent = True
if message_sent:
logger.info('thunder_alert :: external thunder message sent')
return message_sent |
299,299 | reset | #! /usr/bin/python
try:
import time
import string
from ctypes import create_string_buffer
from sonic_sfp.sfputilbase import SfpUtilBase
except ImportError as e:
raise ImportError(str(e) + "- required module not found")
class SfpUtil(SfpUtilBase):
"""Platform specific sfputil class"""
_port_start = 0
_port_end = 31
ports_in_block = 32
_port_to_eeprom_mapping = {}
_qsfp_ports = list(range(0, ports_in_block + 1))
def __init__(self):
# Override port_to_eeprom_mapping for class initialization
eeprom_path = '/sys/bus/i2c/devices/{0}-0050/sfp_eeprom'
for x in range(self.port_start, self.port_end + 1):
self._port_to_eeprom_mapping[x] = eeprom_path.format(x + 18)
SfpUtilBase.__init__(self)
def METHOD_NAME(self, port_num):
raise NotImplementedError
def get_presence(self, port_num):
# Check for invalid port_num
if port_num < self._port_start or port_num > self._port_end:
return False
path = "/sys/bus/i2c/devices/{0}-0050/sfp_is_present"
port_ps = path.format(port_num+18)
reg_value = '0'
try:
reg_file = open(port_ps)
reg_value = reg_file.readline().rstrip()
reg_file.close()
except IOError as e:
print("Error: unable to access file: %s" % str(e))
return False
if reg_value == '1':
return True
return False
@property
def port_start(self):
return self._port_start
@property
def port_end(self):
return self._port_end
@property
def qsfp_ports(self):
return list(range(0, self.ports_in_block + 1))
@property
def port_to_eeprom_mapping(self):
return self._port_to_eeprom_mapping
def get_transceiver_change_event(self):
"""
TODO: This function need to be implemented
when decide to support monitoring SFP(Xcvrd)
on this platform.
"""
raise NotImplementedError
def get_low_power_mode(self, port_num):
# Check for invalid port_num
if port_num < self._port_start or port_num > self._port_end:
return False
try:
eeprom = None
if not self.get_presence(port_num):
return False
eeprom = open(self.port_to_eeprom_mapping[port_num], "rb")
eeprom.seek(93)
lpmode = ord(eeprom.read(1))
if ((lpmode & 0x3) == 0x3):
return True # Low Power Mode if "Power override" bit is 1 and "Power set" bit is 1
else:
# High Power Mode if one of the following conditions is matched:
# 1. "Power override" bit is 0
# 2. "Power override" bit is 1 and "Power set" bit is 0
return False
except IOError as e:
print("Error: unable to open file: %s" % str(e))
return False
finally:
if eeprom is not None:
eeprom.close()
time.sleep(0.01)
def set_low_power_mode(self, port_num, lpmode):
# Check for invalid port_num
if port_num < self._port_start or port_num > self._port_end:
return False
try:
eeprom = None
if not self.get_presence(port_num):
return False # Port is not present, unable to set the eeprom
# Fill in write buffer
regval = 0x3 if lpmode else 0x1 # 0x3:Low Power Mode, 0x1:High Power Mode
buffer = create_string_buffer(1)
buffer[0] = chr(regval)
# Write to eeprom
eeprom = open(self.port_to_eeprom_mapping[port_num], "r+b")
eeprom.seek(93)
eeprom.write(buffer[0])
return True
except IOError as e:
print("Error: unable to open file: %s" % str(e))
return False
finally:
if eeprom is not None:
eeprom.close()
time.sleep(0.01) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.