id int64 0 300k | label stringlengths 1 74 ⌀ | text stringlengths 4k 8k |
|---|---|---|
4,800 | parse start | import re
from collections import defaultdict
from datetime import datetime
import scrapy
from city_scrapers_core.constants import BOARD, COMMITTEE
from city_scrapers_core.items import Meeting
from city_scrapers_core.spiders import CityScrapersSpider
class ChiTeacherPensionSpider(CityScrapersSpider):
name = "chi_teacherpension"
agency = "Chicago Teachers Pension Fund"
timezone = "America/Chicago"
start_urls = ["https://www.ctpf.org/board-trustees-meeting-minutes"]
location = {
"name": "CTPF Office",
"address": "203 N LaSalle St, Suite 2600 Chicago, IL 60601",
}
custom_settings = {"ROBOTSTXT_OBEY": False}
def __init__(self, *args, **kwargs):
self.month_year_minutes = defaultdict(list)
super().__init__(*args, **kwargs)
def parse(self, response):
"""
`parse` should always `yield` Meeting items.
Change the `_parse_title`, `_parse_start`, etc methods to fit your scraping
needs.
"""
self._parse_minutes(response)
yield scrapy.Request(
"https://www.boarddocs.com/il/ctpf/board.nsf/XML-ActiveMeetings",
callback=self._parse_boarddocs,
dont_filter=True,
)
def _parse_minutes(self, response):
"""Parse all past board meeting minutes, store for association to meetings"""
for minutes_link in response.css(".file > a"):
link_text = minutes_link.css(".link-text::text").extract_first().strip()
month_year_match = re.search(r"\w{3,12}\s+\d{4}", link_text)
if month_year_match:
month_year = re.sub(r"\s+", " ", month_year_match.group()).strip()
link_title = "Minutes"
if "executive" in link_text.lower():
link_title = "Executive Session Minutes"
self.month_year_minutes[month_year].append(
{
"href": response.urljoin(
minutes_link.xpath("@href").extract_first()
),
"title": link_title,
}
)
def _parse_boarddocs(self, response):
"""Parse meetings from BoardDocs XML feed"""
for item in response.xpath("//meeting"):
title = self._parse_title(item)
start = self.METHOD_NAME(item)
classification = self._parse_classification(title)
source = self._parse_source(item)
if (
"historical"
in (item.xpath("description/text()").extract_first() or "").lower()
):
continue
meeting = Meeting(
title=title,
description="",
classification=classification,
start=start,
end=None,
time_notes="",
all_day=False,
location=self._parse_location(item),
links=self._parse_links(start, classification, source, item),
source=source or response.url,
)
meeting["status"] = self._get_status(meeting)
meeting["id"] = self._get_id(meeting)
yield meeting
def _parse_title(self, item):
"""Parse or generate meeting title."""
title_str = item.xpath("name/text()").extract_first()
if "board of trustees" in title_str.lower():
return "Board of Trustees"
# Return first part of string before any "at 3:30 P.M"...
return re.split(r"\s+at\s+\d", title_str)[0].replace("Meeting", "").strip()
def _parse_classification(self, title):
"""Parse classification for board or committee meetings."""
if "board" in title.lower():
return BOARD
else:
return COMMITTEE
def METHOD_NAME(self, item):
"""Parse start datetime"""
start_date_str = item.xpath("start/date/text()").extract_first()
start_date = datetime.strptime(start_date_str, "%Y-%m-%d")
desc = item.xpath("description/text()").extract_first()
title = item.xpath("name/text()").extract_first()
time_re = re.compile(r"\d{1,2}:\d{2} [APM\.]{2,4}")
time_match = time_re.search(title)
if not time_match and desc is not None:
time_match = time_re.search(desc)
if time_match:
time_match_str = time_match.group().strip().replace(".", "").upper()
time_obj = datetime.strptime(time_match_str, "%I:%M %p")
return datetime.combine(start_date.date(), time_obj.time())
return start_date
def _parse_location(self, item):
desc = item.xpath("description/text()").extract_first()
if desc and "425 S" in desc:
return {
"name": "",
"address": "425 S Financial Place, Suite 1500, Chicago, IL 60605",
}
if desc and not re.search(r"203 N[\.orth]*? LaSalle", desc):
raise ValueError("Meeting location has changed")
return self.location
def _parse_links(self, start, classification, source, item):
"""Attach board meeting minutes if for a board meeting"""
links = [{"title": "Agenda", "href": source}]
if classification == BOARD:
links.extend(self.month_year_minutes[start.strftime("%B %Y")])
return links
def _parse_source(self, item):
"""Source is also a link to the agenda"""
return item.xpath("link/text()").extract_first() |
4,801 | set device | # Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import torch.distributed as dist
from contextlib import contextmanager
import logging.config
import random
def convert_weight_names(names):
extra_params = {"cls/predictions/bias": "cls/predictions/output_bias",
"cls/seq_relationship/kernel": "cls/seq_relationship/output_weights",
"cls/seq_relationship/bias": "cls/seq_relationship/output_bias"}
new_names = []
for name in names:
name = name.replace("layer.", "layer_").replace(
".", "/").replace(
"LayerNorm/bias", "LayerNorm/beta").replace(
"LayerNorm/weight", "LayerNorm/gamma").replace(
"weight", "kernel").replace(
"embeddings/kernel", "embeddings")
if name in extra_params:
name = extra_params[name]
new_names.append(name)
return new_names
def generate_seeds(rng, size):
"""
Generate list of random seeds
:param rng: random number generator
:param size: length of the returned list
"""
seeds = [rng.randint(0, 2**32 - 1) for _ in range(size)]
return seeds
def broadcast_seeds(seeds, device):
"""
Broadcasts random seeds to all distributed workers.
Returns list of random seeds (broadcasted from workers with rank 0).
:param seeds: list of seeds (integers)
:param device: torch.device
"""
if torch.distributed.is_available() and torch.distributed.is_initialized():
seeds_tensor = torch.LongTensor(seeds).to(device)
torch.distributed.broadcast(seeds_tensor, 0)
seeds = seeds_tensor.tolist()
return seeds
def setup_seeds(master_seed, epochs, device):
"""
Generates seeds from one master_seed.
Function returns (worker_seeds, shuffling_seeds), worker_seeds are later
used to initialize per-worker random number generators (mostly for
dropouts), shuffling_seeds are for RNGs resposible for reshuffling the
dataset before each epoch.
Seeds are generated on worker with rank 0 and broadcasted to all other
workers.
:param master_seed: master RNG seed used to initialize other generators
:param epochs: number of epochs
:param device: torch.device (used for distributed.broadcast)
"""
if master_seed is None:
# random master seed, random.SystemRandom() uses /dev/urandom on Unix
master_seed = random.SystemRandom().randint(0, 2**32 - 1)
if get_rank() == 0:
# master seed is reported only from rank=0 worker, it's to avoid
# confusion, seeds from rank=0 are later broadcasted to other
# workers
logging.info(f'Using random master seed: {master_seed}')
else:
# master seed was specified from command line
logging.info(f'Using master seed from command line: {master_seed}')
# initialize seeding RNG
seeding_rng = random.Random(master_seed)
# generate worker seeds, one seed for every distributed worker
worker_seeds = generate_seeds(seeding_rng, get_world_size())
# generate seeds for data shuffling, one seed for every epoch
shuffling_seeds = generate_seeds(seeding_rng, epochs)
# broadcast seeds from rank=0 to other workers
worker_seeds = broadcast_seeds(worker_seeds, device)
shuffling_seeds = broadcast_seeds(shuffling_seeds, device)
return worker_seeds, shuffling_seeds
def barrier():
"""
Works as a temporary distributed barrier, currently pytorch
doesn't implement barrier for NCCL backend.
Calls all_reduce on dummy tensor and synchronizes with GPU.
"""
if torch.distributed.is_available() and torch.distributed.is_initialized():
torch.distributed.all_reduce(torch.cuda.FloatTensor(1))
torch.cuda.synchronize()
def get_rank():
"""
Gets distributed rank or returns zero if distributed is not initialized.
"""
if torch.distributed.is_available() and torch.distributed.is_initialized():
rank = torch.distributed.get_rank()
else:
rank = 0
return rank
def get_world_size():
"""
Gets total number of distributed workers or returns one if distributed is
not initialized.
"""
if torch.distributed.is_available():
print("Torch distributed is available.")
else:
print("Torch distributed is not available.")
if torch.distributed.is_initialized():
print("Torch distributed is initialized.")
else:
print("Torch distributed is not initialized.")
if torch.distributed.is_available() and torch.distributed.is_initialized():
world_size = torch.distributed.get_world_size()
else:
world_size = 1
return world_size
def METHOD_NAME(cuda, local_rank):
"""
Sets device based on local_rank and returns instance of torch.device.
:param cuda: if True: use cuda
:param local_rank: local rank of the worker
"""
if cuda:
torch.cuda.METHOD_NAME(local_rank)
device = torch.device('cuda')
else:
device = torch.device('cpu')
return device
@contextmanager
def sync_workers():
"""
Yields distributed rank and synchronizes all workers on exit.
"""
rank = get_rank()
yield rank
barrier()
def is_main_process():
return get_rank() == 0
def format_step(step):
if isinstance(step, str):
return step
s = ""
if len(step) > 0:
s += "Training Epoch: {} ".format(step[0])
if len(step) > 1:
s += "Training Iteration: {} ".format(step[1])
if len(step) > 2:
s += "Validation Iteration: {} ".format(step[2])
return s |
4,802 | match | import os
import re
import random
from spytest import env
from spytest.ordyaml import OrderedYaml
import utilities.common as utils
config = None
levels = ['emerg', 'alert', 'crit', 'err', 'warning', 'notice', 'info', 'debug', 'none']
sample_syslogs = []
def add_sample_syslog(msg):
sample_syslogs.append(msg)
def clear_sample_syslogs():
try:
sample_syslogs.clear()
except Exception:
del sample_syslogs[:]
def _append_sample_entry(entries, dut_name, msgtype, lvl, msg):
entry = [dut_name, msgtype]
entry.append(utils.get_current_datetime("%b %d %H:%M:%S.%f %Y"))
entry.append("sonic") # host
entry.append(lvl) # level
entry.append(msg) # message
entry.append("") # module
entry.append(msg) # message
entries.append(entry)
def get_config():
global config
if config is None:
config = {}
if env.get("SPYTEST_SYSLOG_ANALYSIS", "0") != "0":
root = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
filename = os.path.join(root, "reporting", "syslogs.yaml")
oyaml = OrderedYaml(filename, [])
config = oyaml.get_data() or dict()
for color in ["yellow", "green", "red"]:
if color not in config:
config[color] = []
return config
def METHOD_NAME(lvl, line):
index = levels.index(lvl)
needed = "|".join(levels[:index + 1])
regex = r"^\S+\s+\d+\s+\d+:\d+:\d+(\.\d+){{0,1}}\s+\S+\s+({})\s+"
cre = re.compile(regex.format(needed.upper()))
return cre.search(line)
def parse(phase, lvl, msgtype, dut_name, output, filemode=False):
entries = []
if lvl in levels:
index = levels.index(lvl)
needed = "|".join(levels[:index + 1])
regex = r"^(\S+\s+\d+\s+\d+:\d+:\d+(\.\d+){{0,1}}(\+\d+:\d+){{0,1}}(\s+\d+){{0,1}})\s+(\S+)\s+({})\s+(.*)"
cre = re.compile(regex.format(needed.upper()))
cre_list = []
chars = r"[a-zA-Z0-9-_/\.]+"
cre_list.append(re.compile(r"^\s*({0}#{0}):*\s(.*)".format(chars)))
cre_list.append(re.compile(r"^\s*({0}#{0}\[\d+\]):*\s(.*)".format(chars)))
cre_list.append(re.compile(r"^\s*({0}\[\d+\]):\s*(.*)".format(chars)))
cre_list.append(re.compile(r"^\s*({0}):\s*(.*)".format(chars)))
for line in output.split("\n"):
rv = cre.search(line)
if not rv:
continue
entry = [dut_name, msgtype]
off = 0 if len(rv.groups()) == 6 else 1
date = re.split(r" |\+", rv.group(1))
if len(date) > 4:
date.pop(3)
entry.append(" ".join(date)) # date
entry.append(rv.group(4 + off)) # host
entry.append(rv.group(5 + off)) # level
msg = rv.group(6 + off)
entry.append(msg) # message
rv = None
for cre2 in cre_list:
rv = cre2.search(msg)
if rv:
entry.append(rv.group(1)) # module
entry.append(rv.group(2)) # message
break
if not rv:
entry.append("") # module
entry.append(msg) # message
entries.append(entry)
if filemode and lvl != "none":
val = random.randint(1, 1000)
_append_sample_entry(entries, dut_name, msgtype, lvl, "test syslog {}".format(val))
for msg in sample_syslogs:
_append_sample_entry(entries, dut_name, msgtype, lvl, msg)
return entries
def store(phase, prev, current):
cfg = get_config()
rmatch, offset = None, 7
for entry in current:
gmatch, ymatch, pmatch = None, None, None
# find green syslogs to discard
for regex in cfg.get("green", []):
if re.compile(regex).METHOD_NAME(entry[offset]):
gmatch = regex
break
if gmatch is not None:
continue # ignore the syslog
# find yellow syslogs to report only once
for regex in cfg.get("yellow", []):
if re.compile(regex).METHOD_NAME(entry[offset]):
ymatch = regex
break
# check if yellow syslog already noted
for pentry in prev:
if pentry[offset] == entry[offset]:
pmatch = ymatch
break
if pmatch is not None:
continue # syslog already reported once
# add the entry to current syslogs
prev.append(entry)
if rmatch is not None:
continue # first red syslog already noted
# check if red syslog to report SW Issue
for regex in cfg.get("red", []):
if re.compile(regex).METHOD_NAME(entry[offset]):
rmatch = " ".join(entry)
break
return rmatch |
4,803 | test cov of uncorrelated data is zero | '''
@date: 15/04/2015
@author: Stefan Hegglin
'''
import unittest
import numpy as np
from scipy.constants import c, e, m_p
from PyHEADTAIL.particles.particles import Particles
from PyHEADTAIL.particles.generators import generate_Gaussian6DTwiss
import PyHEADTAIL.cobra_functions.stats as cf
from PyHEADTAIL.trackers.longitudinal_tracking import LinearMap
from PyHEADTAIL.general.printers import SilentPrinter
class TestCobra(unittest.TestCase):
def setUp(self):
np.random.seed(0)
self.tolerance = 3
self.n_samples = 100000
self.data1_var = 1.0
#some random data to use for cov/eps/... computations
self.data1 = np.random.normal(0, np.sqrt(self.data1_var), self.n_samples)
self.data2 = np.random.normal(5., 2.1, self.n_samples)
self.data3 = np.random.laplace(loc=-2., scale=0.5, size=self.n_samples)
def tearDown(self):
pass
def test_consistency_for_std(self):
""" Test whether the cf.std, np.std return the correct std
for a simulated data set
"""
s_cobra = cf.std(self.data1)
s_numpy = np.std(self.data1)
self.assertAlmostEqual(s_cobra, s_numpy, places=self.tolerance,
msg='cobra std() yields a different result ' +
'than numpy.std()')
def test_consistency_covariance(self):
""" Test whether cov and np.cov return the same covariance
for a simulated data set
"""
v_cobra = cf.cov(self.data1, self.data2)
v_numpy = np.cov(self.data1, self.data2)[0,1]
self.assertAlmostEqual(v_cobra, v_numpy, places=self.tolerance,
msg='cobra cov() yields a different result ' +
'than numpy.cov()')
def test_eta_prime_is_zero(self):
""" Test whether computing eta_prime of a beam generated using
Gaussian6dTwiss is 0. This should be true for alphax, alphay = 0.
Otherwise a correlation will be present. The error decreases with
increasing number of particles"""
bunch = self.generate_gaussian6dBunch(1000000, 0, 0, 1, 1, 5, 100)
eta_prime_x = cf.dispersion(bunch.xp, bunch.dp)
weak_tol = 2
self.assertAlmostEqual(eta_prime_x, 0., places=weak_tol,
msg='eta_prime_x is not zero but ' + str(eta_prime_x))
eta_prime_y = cf.dispersion(bunch.yp, bunch.dp)
self.assertAlmostEqual(eta_prime_y, 0., places=weak_tol,
msg='eta_prime_y is not zero but ' + str(eta_prime_y))
def METHOD_NAME(self):
""" Test whether the covariance of two uncorrelated normally distributed
data vectors is zero. Only works for big sample sizes / big tolerance
"""
d1 = np.random.normal(100, 2., self.n_samples)
d2 = np.random.normal(200, 0.2, self.n_samples)
self.assertAlmostEqual(cf.cov(d1, d2), 0.0,
places=self.tolerance,
msg='cobra cov() of two uncorrelated ' +
'Gaussians != 0')
def test_cov_per_slice_consistency(self):
""" Test whether cov_per_slice yields the same results as np.cov.
The data is split into two slices (via its mean), then the covariance
per slice gets computed via cov_per_slice and numpy.cov
"""
n_slices = 2
cf_cov = np.zeros(n_slices)
slice_index_of_particle = np.zeros(len(self.data1),dtype=np.int32)
# all coordinates < mean() belong to slice 0, all > to slice 1
slice_index_of_particle[self.data1 > np.mean(self.data1)] = 1
# all particles are in between the cuts
particles_within_cuts = np.arange(len(self.data1), dtype=np.int32)
n_macroparticles = np.asarray([np.sum(slice_index_of_particle == 0),
np.sum(slice_index_of_particle == 1)],
dtype=np.int32)
cf.cov_per_slice(slice_index_of_particle, particles_within_cuts,
n_macroparticles, self.data1, self.data2, cf_cov)
# numpy slice 0
np_cov_0 = np.cov(self.data1[slice_index_of_particle == 0],
self.data2[slice_index_of_particle == 0])[0,1]
# numpy slice 1
np_cov_1 = np.cov(self.data1[slice_index_of_particle == 1],
self.data2[slice_index_of_particle == 1])[0,1]
self.assertAlmostEqual(cf_cov[0], np_cov_0, places=self.tolerance,
msg='cov_per_slice yields different results ' +
'than np.cov')
self.assertAlmostEqual(cf_cov[1], np_cov_1, places=self.tolerance,
msg='cov_per_slice yields different results ' +
'than np.cov')
def test_cov_std_consistency(self):
""" Test whether cov_per_slice yields the same result as
std_per_slice**2 """
n_slices = 2
cf_cov = np.zeros(n_slices)
cf_std = np.zeros(n_slices)
slice_index_of_particle = np.zeros(len(self.data1),dtype=np.int32)
# all coordinates < mean() belong to slice 0, all > to slice 1
slice_index_of_particle[self.data1 > np.mean(self.data1)] = 1
# all particles are in between the cuts
particles_within_cuts = np.arange(len(self.data1), dtype=np.int32)
n_macroparticles = np.asarray([np.sum(slice_index_of_particle == 0),
np.sum(slice_index_of_particle == 1)],
dtype=np.int32)
cf.cov_per_slice(slice_index_of_particle, particles_within_cuts,
n_macroparticles, self.data1, self.data1, cf_cov)
cf.std_per_slice(slice_index_of_particle, particles_within_cuts,
n_macroparticles, self.data1, cf_std)
self.assertAlmostEqual(cf_cov[0], cf_std[0]**2, places=self.tolerance,
msg='cov_per_slice and std_per_slice yield' +
'different results when computing the std')
self.assertAlmostEqual(cf_cov[1], cf_std[1]**2, places=self.tolerance,
msg='cov_per_slice and std_per_slice yield' +
'different results when computing the std')
def generate_gaussian6dBunch(self,n_macroparticles, alpha_x, alpha_y, beta_x,
beta_y, dispx, dispy,
gamma = 3730.27):
Q_s = 0.0020443
C = 26658.883
alpha_0 = [0.0003225]
linear_map = LinearMap(alpha_0, C, Q_s)
intensity = 1.05e11
sigma_z = 0.0059958
gamma_t = 1. / np.sqrt(alpha_0)
p0 = np.sqrt(gamma**2 - 1) * m_p * c
beta_z = (linear_map.eta(dp=0, gamma=gamma) * linear_map.circumference /
(2 * np.pi * linear_map.Q_s))
epsn_x = 3.75e-6 # [m rad]
epsn_y = 3.75e-6 # [m rad]
epsn_z = 4 * np.pi * sigma_z**2 * p0 / (beta_z * e)
bunch = generate_Gaussian6DTwiss(
macroparticlenumber=n_macroparticles, intensity=intensity, charge=e,
gamma=gamma, mass=m_p, circumference=C,
alpha_x=0., beta_x=1., epsn_x=epsn_x,
alpha_y=0., beta_y=1., epsn_y=epsn_y,
beta_z=beta_z, epsn_z=epsn_z)
# Scale to correct beta and alpha
bunch.x *= np.sqrt(beta_x)
bunch.xp = -alpha_x/np.sqrt(beta_x) * bunch.x + 1./np.sqrt(beta_x) * bunch.xp
bunch.y = np.sqrt(beta_y)*bunch.y
bunch.yp = -alpha_y/np.sqrt(beta_y) * bunch.y + 1./np.sqrt(beta_y) * bunch.yp
bunch.x += dispx * bunch.dp
bunch.y += dispy * bunch.dp
return bunch
if __name__ == '__main__':
unittest.main()
|
4,804 | test grouped iterator | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from fairseq.data import iterators
class TestIterators(unittest.TestCase):
def test_counting_iterator(self, ref=None, itr=None):
if ref is None:
assert itr is None
ref = list(range(10))
itr = iterators.CountingIterator(ref)
else:
assert len(ref) == 10
assert itr is not None
self.assertTrue(itr.has_next())
self.assertEqual(itr.n, 0)
self.assertEqual(next(itr), ref[0])
self.assertEqual(itr.n, 1)
self.assertEqual(next(itr), ref[1])
self.assertEqual(itr.n, 2)
itr.skip(3)
self.assertEqual(itr.n, 5)
self.assertEqual(next(itr), ref[5])
itr.skip(3)
self.assertEqual(itr.n, 9)
self.assertEqual(next(itr), ref[9])
self.assertFalse(itr.has_next())
def METHOD_NAME(self):
# test correctness
x = list(range(10))
itr = iterators.GroupedIterator(x, 1)
self.assertEqual(list(itr), [[0], [1], [2], [3], [4], [5], [6], [7], [8], [9]])
itr = iterators.GroupedIterator(x, 4)
self.assertEqual(list(itr), [[0, 1, 2, 3], [4, 5, 6, 7], [8, 9]])
itr = iterators.GroupedIterator(x, 5)
self.assertEqual(list(itr), [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]])
# test CountingIterator functionality
x = list(range(30))
ref = list(iterators.GroupedIterator(x, 3))
itr = iterators.GroupedIterator(x, 3)
self.test_counting_iterator(ref, itr)
def test_sharded_iterator(self):
# test correctness
x = list(range(10))
itr = iterators.ShardedIterator(x, num_shards=1, shard_id=0)
self.assertEqual(list(itr), x)
itr = iterators.ShardedIterator(x, num_shards=2, shard_id=0)
self.assertEqual(list(itr), [0, 2, 4, 6, 8])
itr = iterators.ShardedIterator(x, num_shards=2, shard_id=1)
self.assertEqual(list(itr), [1, 3, 5, 7, 9])
itr = iterators.ShardedIterator(x, num_shards=3, shard_id=0)
self.assertEqual(list(itr), [0, 3, 6, 9])
itr = iterators.ShardedIterator(x, num_shards=3, shard_id=1)
self.assertEqual(list(itr), [1, 4, 7, None])
itr = iterators.ShardedIterator(x, num_shards=3, shard_id=2)
self.assertEqual(list(itr), [2, 5, 8, None])
# test CountingIterator functionality
x = list(range(30))
ref = list(iterators.ShardedIterator(x, num_shards=3, shard_id=0))
itr = iterators.ShardedIterator(x, num_shards=3, shard_id=0)
self.test_counting_iterator(ref, itr)
def test_counting_iterator_take(self):
ref = list(range(10))
itr = iterators.CountingIterator(ref)
itr.take(5)
self.assertEqual(len(itr), len(list(iter(itr))))
self.assertEqual(len(itr), 5)
itr = iterators.CountingIterator(ref)
itr.take(5)
self.assertEqual(next(itr), ref[0])
self.assertEqual(next(itr), ref[1])
itr.skip(2)
self.assertEqual(next(itr), ref[4])
self.assertFalse(itr.has_next())
def test_counting_iterator_buffered_iterator_take(self):
ref = list(range(10))
buffered_itr = iterators.BufferedIterator(2, ref)
itr = iterators.CountingIterator(buffered_itr)
itr.take(5)
self.assertEqual(len(itr), len(list(iter(itr))))
self.assertEqual(len(itr), 5)
buffered_itr = iterators.BufferedIterator(2, ref)
itr = iterators.CountingIterator(buffered_itr)
itr.take(5)
self.assertEqual(len(buffered_itr), 5)
self.assertEqual(len(list(iter(buffered_itr))), 5)
buffered_itr = iterators.BufferedIterator(2, ref)
itr = iterators.CountingIterator(buffered_itr)
itr.take(5)
self.assertEqual(next(itr), ref[0])
self.assertEqual(next(itr), ref[1])
itr.skip(2)
self.assertEqual(next(itr), ref[4])
self.assertFalse(itr.has_next())
self.assertRaises(StopIteration, next, buffered_itr)
ref = list(range(4, 10))
buffered_itr = iterators.BufferedIterator(2, ref)
itr = iterators.CountingIterator(buffered_itr, start=4)
itr.take(5)
self.assertEqual(len(itr), 5)
self.assertEqual(len(buffered_itr), 1)
self.assertEqual(next(itr), ref[0])
self.assertFalse(itr.has_next())
self.assertRaises(StopIteration, next, buffered_itr)
if __name__ == "__main__":
unittest.main() |
4,805 | mock ugettext | from unittest.mock import patch
from django.conf import settings
from django.contrib.sites.models import Site
from django.utils.functional import lazy
from django.utils.translation import get_language
from kitsune.sumo.email_utils import emails_with_users_and_watches, safe_translation
from kitsune.sumo.tests import TestCase
from kitsune.sumo.utils import uselocale
from kitsune.users.tests import UserFactory
mock_translations = {
"Hello": {"en-us": "Hello", "fr": "Bonjour", "es": "Hola"},
"Hello {name}": {
"en-us": "Hello {name}",
"fr": "Bonjour {0}",
"es": "Hola {name}",
},
}
def METHOD_NAME(msg_id):
locale = get_language()
return mock_translations[msg_id][locale]
mock_gettext_lazy = lazy(METHOD_NAME)
def mock_gettext(f):
f = patch("django.utils.translation.gettext", METHOD_NAME)(f)
f = patch("django.utils.translation.gettext_lazy", mock_gettext_lazy)(f)
return f
class SafeTranslationTests(TestCase):
def setUp(self):
# These tests assume English is the fall back language. If it
# isn't we are gonna have a bad time.
self.assertEqual("en-US", settings.WIKI_DEFAULT_LANGUAGE)
@mock_gettext
def test_mocked_gettext(self):
"""I'm not entirely sure about the mocking, so test that."""
# Import translation now so it is affected by the mock.
from django.utils.translation import gettext as _
with uselocale("en-US"):
self.assertEqual(_("Hello"), "Hello")
with uselocale("fr"):
self.assertEqual(_("Hello"), "Bonjour")
with uselocale("es"):
self.assertEqual(_("Hello"), "Hola")
@mock_gettext
def test_safe_translation_noop(self):
"""Test that safe_translation doesn't mess with good translations."""
# Import translation now so it is affected by the mock.
from django.utils.translation import gettext as _
@safe_translation
def simple(locale):
return _("Hello")
# These should just work normally.
self.assertEqual(simple("en-US"), "Hello")
self.assertEqual(simple("fr"), "Bonjour")
self.assertEqual(simple("es"), "Hola")
@mock_gettext
def test_safe_translation_bad_trans(self):
"""Test that safe_translation insulates from bad translations."""
# Import translation now so it is affected by the mock.
from django.utils.translation import gettext as _
# `safe_translation` will call this with the given locale, and
# if that fails, fall back to English.
@safe_translation
def bad_trans(locale):
return _("Hello {name}").format(name="Mike")
# French should come back as English, because it has a bad
# translation, but Spanish should come back in Spanish.
self.assertEqual(bad_trans("en-US"), "Hello Mike")
self.assertEqual(bad_trans("fr"), "Hello Mike")
self.assertEqual(bad_trans("es"), "Hola Mike")
@mock_gettext
@patch("kitsune.sumo.email_utils.log")
def test_safe_translation_logging(self, mocked_log):
"""Logging translation errors is really important, so test it."""
# Import translation now so it is affected by the mock.
from django.utils.translation import gettext as _
# Assert that bad translations cause error logging.
@safe_translation
def bad_trans(locale):
return _("Hello {name}").format(name="Mike")
# English and Spanish should not log anything. French should.
bad_trans("en-US")
bad_trans("es")
self.assertEqual(len(mocked_log.method_calls), 0)
bad_trans("fr")
self.assertEqual(len(mocked_log.method_calls), 1)
method_name, method_args, method_kwargs = mocked_log.method_calls[0]
self.assertEqual(method_name, "exception")
assert "Bad translation" in method_args[0]
self.assertEqual(method_args[1], "fr")
class UseLocaleTests(TestCase):
def test_uselocale(self):
"""Test that uselocale does what it says on the tin."""
with uselocale("en-US"):
self.assertEqual(get_language(), "en-us")
with uselocale("de"):
self.assertEqual(get_language(), "de")
with uselocale("fr"):
self.assertEqual(get_language(), "fr")
class PremailerTests(TestCase):
def test_styles_inlining(self):
"""Test that styles tags are converted to inline styles"""
with patch("kitsune.sumo.email_utils.render_to_string") as mocked:
mocked.return_value = (
"<html>"
"<head>"
"<style>a { color: #000; }</style>"
"</head>"
"<body>"
'<a href="/test">Hyperlink</a>'
"</body>"
"</html>"
)
u = UserFactory()
msg = emails_with_users_and_watches("test", "a.ltxt", "a.html", {}, [(u, [None])])
for m in msg:
tag = '<a href="https://%s/test" style="color:#000">Hyperlink</a>'
self.assertIn(tag % Site.objects.get_current().domain, str(m.message())) |
4,806 | id | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetBotResult',
'AwaitableGetBotResult',
'get_bot',
'get_bot_output',
]
@pulumi.output_type
class GetBotResult:
"""
Bot resource definition
"""
def __init__(__self__, etag=None, METHOD_NAME=None, kind=None, location=None, name=None, properties=None, sku=None, tags=None, type=None, zones=None):
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if METHOD_NAME and not isinstance(METHOD_NAME, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", METHOD_NAME)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if properties and not isinstance(properties, dict):
raise TypeError("Expected argument 'properties' to be a dict")
pulumi.set(__self__, "properties", properties)
if sku and not isinstance(sku, dict):
raise TypeError("Expected argument 'sku' to be a dict")
pulumi.set(__self__, "sku", sku)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if zones and not isinstance(zones, list):
raise TypeError("Expected argument 'zones' to be a list")
pulumi.set(__self__, "zones", zones)
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
Entity Tag.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def METHOD_NAME(self) -> str:
"""
Specifies the resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def kind(self) -> Optional[str]:
"""
Required. Gets or sets the Kind of the resource.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
Specifies the location of the resource.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
Specifies the name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> 'outputs.BotPropertiesResponse':
"""
The set of properties specific to bot resource
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def sku(self) -> Optional['outputs.SkuResponse']:
"""
Gets or sets the SKU of the resource.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Contains resource tags defined as key/value pairs.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
Specifies the type of the resource.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def zones(self) -> Sequence[str]:
"""
Entity zones
"""
return pulumi.get(self, "zones")
class AwaitableGetBotResult(GetBotResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetBotResult(
etag=self.etag,
METHOD_NAME=self.METHOD_NAME,
kind=self.kind,
location=self.location,
name=self.name,
properties=self.properties,
sku=self.sku,
tags=self.tags,
type=self.type,
zones=self.zones)
def get_bot(resource_group_name: Optional[str] = None,
resource_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetBotResult:
"""
Returns a BotService specified by the parameters.
Azure REST API version: 2022-09-15.
:param str resource_group_name: The name of the Bot resource group in the user subscription.
:param str resource_name: The name of the Bot resource.
"""
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
__args__['resourceName'] = resource_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:botservice:getBot', __args__, opts=opts, typ=GetBotResult).value
return AwaitableGetBotResult(
etag=pulumi.get(__ret__, 'etag'),
METHOD_NAME=pulumi.get(__ret__, 'id'),
kind=pulumi.get(__ret__, 'kind'),
location=pulumi.get(__ret__, 'location'),
name=pulumi.get(__ret__, 'name'),
properties=pulumi.get(__ret__, 'properties'),
sku=pulumi.get(__ret__, 'sku'),
tags=pulumi.get(__ret__, 'tags'),
type=pulumi.get(__ret__, 'type'),
zones=pulumi.get(__ret__, 'zones'))
@_utilities.lift_output_func(get_bot)
def get_bot_output(resource_group_name: Optional[pulumi.Input[str]] = None,
resource_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetBotResult]:
"""
Returns a BotService specified by the parameters.
Azure REST API version: 2022-09-15.
:param str resource_group_name: The name of the Bot resource group in the user subscription.
:param str resource_name: The name of the Bot resource.
"""
... |
4,807 | cli | # Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import click
from platformio import app, exception, fs, util
from platformio.project.config import ProjectConfig
from platformio.test.helpers import list_test_suites
from platformio.test.reports.base import TestReportFactory
from platformio.test.result import TestResult, TestStatus
from platformio.test.runners.base import TestRunnerOptions
from platformio.test.runners.factory import TestRunnerFactory
@click.command("test", short_help="Unit Testing")
@click.option("--environment", "-e", multiple=True)
@click.option(
"--filter",
"-f",
multiple=True,
metavar="PATTERN",
help="Filter tests by a pattern",
)
@click.option(
"--ignore",
"-i",
multiple=True,
metavar="PATTERN",
help="Ignore tests by a pattern",
)
@click.option("--upload-port")
@click.option("--test-port")
@click.option(
"-d",
"--project-dir",
default=os.getcwd,
type=click.Path(exists=True, file_okay=False, dir_okay=True, writable=True),
)
@click.option(
"-c",
"--project-conf",
type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True),
)
@click.option("--without-building", is_flag=True)
@click.option("--without-uploading", is_flag=True)
@click.option("--without-testing", is_flag=True)
@click.option("--no-reset", is_flag=True)
@click.option(
"--monitor-rts",
default=None,
type=click.IntRange(0, 1),
help="Set initial RTS line state for Serial Monitor",
)
@click.option(
"--monitor-dtr",
default=None,
type=click.IntRange(0, 1),
help="Set initial DTR line state for Serial Monitor",
)
@click.option(
"-a",
"--program-arg",
"program_args",
multiple=True,
help="A program argument (multiple are allowed)",
)
@click.option("--list-tests", is_flag=True)
@click.option("--json-output-path", type=click.Path())
@click.option("--junit-output-path", type=click.Path())
@click.option(
"--verbose",
"-v",
count=True,
help="Increase verbosity level, maximum is 3 levels (-vvv), see docs for details",
)
@click.pass_context
def METHOD_NAME( # pylint: disable=too-many-arguments,too-many-locals,redefined-builtin
ctx,
environment,
ignore,
filter,
upload_port,
test_port,
project_dir,
project_conf,
without_building,
without_uploading,
without_testing,
no_reset,
monitor_rts,
monitor_dtr,
program_args,
list_tests,
json_output_path,
junit_output_path,
verbose,
):
app.set_session_var("custom_project_conf", project_conf)
with fs.cd(project_dir):
project_config = ProjectConfig.get_instance(project_conf)
project_config.validate(envs=environment)
test_result = TestResult(project_dir)
test_suites = list_test_suites(
project_config, environments=environment, filters=filter, ignores=ignore
)
test_names = sorted(set(s.test_name for s in test_suites))
if not verbose:
click.echo("Verbosity level can be increased via `-v, -vv, or -vvv` option")
click.secho("Collected %d tests" % len(test_names), bold=True, nl=not verbose)
if verbose:
click.echo(" (%s)" % ", ".join(test_names))
for test_suite in test_suites:
test_result.add_suite(test_suite)
if list_tests or test_suite.is_finished(): # skipped by user
continue
runner = TestRunnerFactory.new(
test_suite,
project_config,
TestRunnerOptions(
verbose=verbose,
without_building=without_building,
without_uploading=without_uploading,
without_testing=without_testing,
upload_port=upload_port,
test_port=test_port,
no_reset=no_reset,
monitor_rts=monitor_rts,
monitor_dtr=monitor_dtr,
program_args=program_args,
),
)
click.echo()
print_suite_header(test_suite)
runner.start(ctx)
print_suite_footer(test_suite)
stdout_report = TestReportFactory.new("stdout", test_result)
stdout_report.generate(verbose=verbose or list_tests)
for output_format, output_path in [
("json", json_output_path),
("junit", junit_output_path),
]:
if not output_path:
continue
custom_report = TestReportFactory.new(output_format, test_result)
custom_report.generate(output_path=output_path, verbose=True)
# Reset custom project config
app.set_session_var("custom_project_conf", None)
if test_result.is_errored or test_result.get_status_nums(TestStatus.FAILED):
raise exception.ReturnErrorCode(1)
def print_suite_header(test_suite):
click.echo(
"Processing %s in %s environment"
% (
click.style(test_suite.test_name, fg="yellow", bold=True),
click.style(test_suite.env_name, fg="cyan", bold=True),
)
)
terminal_width = shutil.get_terminal_size().columns
click.secho("-" * terminal_width, bold=True)
def print_suite_footer(test_suite):
is_error = test_suite.status in (TestStatus.FAILED, TestStatus.ERRORED)
util.print_labeled_bar(
"%s [%s] Took %.2f seconds"
% (
click.style(
"%s:%s" % (test_suite.env_name, test_suite.test_name), bold=True
),
(
click.style(test_suite.status.name, fg="red", bold=True)
if is_error
else click.style("PASSED", fg="green", bold=True)
),
test_suite.duration,
),
is_error=is_error,
sep="-",
) |
4,808 | create initial config | import os
import sys
from hls4ml.backends import FPGABackend
from hls4ml.model.flow import register_flow
from hls4ml.report import parse_vivado_report
class SymbolicExpressionBackend(FPGABackend):
def __init__(self):
super().__init__('SymbolicExpression')
self._register_flows()
def _register_flows(self):
vivado_types = [
'vivado:transform_types',
]
vivado_types_flow = register_flow('specific_types', vivado_types, requires=None, backend=self.name)
validation_passes = [
'symbolicexpression:validate_user_lookup_table',
]
validation_flow = register_flow('validation', validation_passes, requires=None, backend=self.name)
template_flow = register_flow('apply_templates', self._get_layer_templates, requires=None, backend=self.name)
writer_passes = ['make_stamp', 'symbolicexpression:write_hls']
self._writer_flow = register_flow('write', writer_passes, requires=['vivado:ip'], backend=self.name)
ip_flow_requirements = [vivado_types_flow, validation_flow, template_flow]
ip_flow_requirements = list(filter(None, ip_flow_requirements))
self._default_flow = register_flow('ip', None, requires=ip_flow_requirements, backend=self.name)
def get_default_flow(self):
return self._default_flow
def get_writer_flow(self):
return self._writer_flow
def METHOD_NAME(
self,
part='xcvu9p-flga2577-2-e',
clock_period=5,
io_type='io_parallel',
compiler='vivado_hls',
hls_include_path=None,
hls_libs_path=None,
):
config = {}
config['Part'] = part if part is not None else 'xcvu9p-flga2577-2-e'
config['ClockPeriod'] = clock_period
config['IOType'] = io_type
config['Compiler'] = compiler if compiler is not None else 'vivado_hls'
if not all([hls_include_path, hls_libs_path]):
# Try to infer the include path from Vivado path
bin_path = os.popen(f'command -v {compiler}').read().strip()
if hls_include_path is None:
hls_include_path = bin_path.replace(f'/bin/{compiler}', '/include')
if not os.path.exists(hls_include_path + '/hls_math.h'):
raise Exception(
'Vivado HLS header files not found. Make sure you pass the proper path '
'to the "include" directory (for example "/opt/Xilinx/Vivado/2020.1/include").'
)
elif hls_include_path == '':
print(
'No HLS include path provided, using HLS math functions from Python (i.e., predict()) will not work. '
'Consider using only LUT approximations.'
)
if hls_libs_path is None:
hls_libs_path = bin_path.replace(f'/bin/{compiler}', '/lnx64')
if not os.path.exists(hls_libs_path + '/lib/csim/libhlsmc++-GCC46.so'):
raise Exception(
'Vivado HLS libraries not found. Make sure you pass the proper path '
'to the "lnx64" directory (for example "/opt/Xilinx/Vivado/2020.1/lnx64").'
)
config['HLSIncludePath'] = hls_include_path
config['HLSLibsPath'] = hls_libs_path
config['HLSConfig'] = {}
return config
def build(self, model, reset=False, csim=True, synth=True, cosim=False, validation=False, export=False, vsynth=False):
if 'linux' in sys.platform:
found = os.system('command -v vivado_hls > /dev/null')
if found != 0:
raise Exception('Vivado HLS installation not found. Make sure "vivado_hls" is on PATH.')
curr_dir = os.getcwd()
os.chdir(model.config.get_output_dir())
vivado_cmd = (
f'vivado_hls -f build_prj.tcl "reset={reset} '
f'csim={csim} '
f'synth={synth} '
f'cosim={cosim} '
f'validation={validation} '
f'export={export} '
f'vsynth={vsynth}"'
)
os.system(vivado_cmd)
os.chdir(curr_dir)
return parse_vivado_report(model.config.get_output_dir()) |
4,809 | test fifth day five gold rings | # These tests are auto-generated with test data from:
# https://github.com/exercism/problem-specifications/tree/main/exercises/twelve-days/canonical-data.json
# File last updated on 2023-07-19
import unittest
from twelve_days import (
recite,
)
# PLEASE TAKE NOTE: Expected result lists for these test cases use **implicit line joining.**
# A new line in a result list below **does not** always equal a new list element.
# Check comma placement carefully!
class TwelveDaysTest(unittest.TestCase):
def test_first_day_a_partridge_in_a_pear_tree(self):
expected = [
"On the first day of Christmas my true love gave to me: "
"a Partridge in a Pear Tree."
]
self.assertEqual(recite(1, 1), expected)
def test_second_day_two_turtle_doves(self):
expected = [
"On the second day of Christmas my true love gave to me: "
"two Turtle Doves, "
"and a Partridge in a Pear Tree."
]
self.assertEqual(recite(2, 2), expected)
def test_third_day_three_french_hens(self):
expected = [
"On the third day of Christmas my true love gave to me: "
"three French Hens, "
"two Turtle Doves, "
"and a Partridge in a Pear Tree."
]
self.assertEqual(recite(3, 3), expected)
def test_fourth_day_four_calling_birds(self):
expected = [
"On the fourth day of Christmas my true love gave to me: "
"four Calling Birds, "
"three French Hens, "
"two Turtle Doves, "
"and a Partridge in a Pear Tree."
]
self.assertEqual(recite(4, 4), expected)
def METHOD_NAME(self):
expected = [
"On the fifth day of Christmas my true love gave to me: "
"five Gold Rings, "
"four Calling Birds, "
"three French Hens, "
"two Turtle Doves, "
"and a Partridge in a Pear Tree."
]
self.assertEqual(recite(5, 5), expected)
def test_sixth_day_six_geese_a_laying(self):
expected = [
"On the sixth day of Christmas my true love gave to me: "
"six Geese-a-Laying, "
"five Gold Rings, "
"four Calling Birds, "
"three French Hens, "
"two Turtle Doves, "
"and a Partridge in a Pear Tree."
]
self.assertEqual(recite(6, 6), expected)
def test_seventh_day_seven_swans_a_swimming(self):
expected = [
"On the seventh day of Christmas my true love gave to me: "
"seven Swans-a-Swimming, "
"six Geese-a-Laying, "
"five Gold Rings, "
"four Calling Birds, "
"three French Hens, "
"two Turtle Doves, "
"and a Partridge in a Pear Tree."
]
self.assertEqual(recite(7, 7), expected)
def test_eighth_day_eight_maids_a_milking(self):
expected = [
"On the eighth day of Christmas my true love gave to me: "
"eight Maids-a-Milking, "
"seven Swans-a-Swimming, "
"six Geese-a-Laying, "
"five Gold Rings, "
"four Calling Birds, "
"three French Hens, "
"two Turtle Doves, "
"and a Partridge in a Pear Tree."
]
self.assertEqual(recite(8, 8), expected)
def test_ninth_day_nine_ladies_dancing(self):
expected = [
"On the ninth day of Christmas my true love gave to me: "
"nine Ladies Dancing, "
"eight Maids-a-Milking, "
"seven Swans-a-Swimming, "
"six Geese-a-Laying, "
"five Gold Rings, "
"four Calling Birds, "
"three French Hens, "
"two Turtle Doves, "
"and a Partridge in a Pear Tree."
]
self.assertEqual(recite(9, 9), expected)
def test_tenth_day_ten_lords_a_leaping(self):
expected = [
"On the tenth day of Christmas my true love gave to me: "
"ten Lords-a-Leaping, "
"nine Ladies Dancing, "
"eight Maids-a-Milking, "
"seven Swans-a-Swimming, "
"six Geese-a-Laying, "
"five Gold Rings, "
"four Calling Birds, "
"three French Hens, "
"two Turtle Doves, "
"and a Partridge in a Pear Tree."
]
self.assertEqual(recite(10, 10), expected)
def test_eleventh_day_eleven_pipers_piping(self):
expected = [
"On the eleventh day of Christmas my true love gave to me: "
"eleven Pipers Piping, "
"ten Lords-a-Leaping, "
"nine Ladies Dancing, "
"eight Maids-a-Milking, "
"seven Swans-a-Swimming, "
"six Geese-a-Laying, "
"five Gold Rings, "
"four Calling Birds, "
"three French Hens, "
"two Turtle Doves, "
"and a Partridge in a Pear Tree."
]
self.assertEqual(recite(11, 11), expected)
def test_twelfth_day_twelve_drummers_drumming(self):
expected = [
"On the twelfth day of Christmas my true love gave to me: "
"twelve Drummers Drumming, "
"eleven Pipers Piping, "
"ten Lords-a-Leaping, "
"nine Ladies Dancing, "
"eight Maids-a-Milking, "
"seven Swans-a-Swimming, "
"six Geese-a-Laying, "
"five Gold Rings, "
"four Calling Birds, "
"three French Hens, "
"two Turtle Doves, "
"and a Partridge in a Pear Tree."
]
self.assertEqual(recite(12, 12), expected)
def test_recites_first_three_verses_of_the_song(self):
expected = [recite(n, n)[0] for n in range(1, 4)]
self.assertEqual(recite(1, 3), expected)
def test_recites_three_verses_from_the_middle_of_the_song(self):
expected = [recite(n, n)[0] for n in range(4, 7)]
self.assertEqual(recite(4, 6), expected)
def test_recites_the_whole_song(self):
expected = [recite(n, n)[0] for n in range(1, 13)]
self.assertEqual(recite(1, 12), expected) |
4,810 | list | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._operations import build_list_request
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class Operations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.servicefabricmanagedclusters.aio.ServiceFabricManagedClustersManagementClient`'s
:attr:`operations` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = METHOD_NAME(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def METHOD_NAME(self, **kwargs: Any) -> AsyncIterable["_models.OperationResult"]:
"""Lists all of the available Service Fabric resource provider API operations.
Get the list of available Service Fabric resource provider API operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either OperationResult or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.servicefabricmanagedclusters.models.OperationResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
api_version=api_version,
template_url=self.METHOD_NAME.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("OperationListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorModel, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
METHOD_NAME.metadata = {"url": "/providers/Microsoft.ServiceFabric/operations"} |
4,811 | test ethaddress typeerror | #!/usr/bin/python3
import pytest
from brownie.convert.datatypes import EthAddress, HexString, ReturnValue, Wei
from brownie.project import compile_source
string_fixture = "bar baz"
@pytest.fixture
def return_value(accounts, tester):
yield tester.manyValues(
88, [False, False, False], accounts[2], [("0x1234", "0x6666")], string_fixture
)
def test_type(return_value):
assert isinstance(return_value, ReturnValue)
assert isinstance(return_value["_addr"], EthAddress)
assert isinstance(return_value["_bool"], ReturnValue)
assert isinstance(return_value["_bool"][0], bool)
assert isinstance(return_value["_num"], Wei)
assert isinstance(return_value["_bytes"], ReturnValue)
assert isinstance(return_value["_bytes"][0][0], HexString)
def test_len(return_value):
assert len(return_value) == 5
def test_count(return_value):
assert return_value.count(2) == 0
assert return_value.count([("0x1234", "0x6666")]) == 1
def test_index(return_value):
assert return_value.index([("0x1234", "0x6666")]) == 3
assert return_value.index([("0x1234", "0x6666")], 1, 4) == 3
with pytest.raises(ValueError):
return_value.index([("0x1234", "0x6666")], stop=2)
with pytest.raises(ValueError):
return_value.index("foo")
def test_contains_conversions(accounts, return_value):
assert 88 in return_value
assert "88 wei" in return_value
assert False in return_value[1]
assert True not in return_value[1]
assert 0 not in return_value[1]
assert accounts[2] in return_value
assert str(accounts[2]) in return_value
assert accounts[1] not in return_value
assert "0x1234" in return_value[3][0]
assert "0x00001234" in return_value[3][0]
def test_eq_conversions(accounts, return_value):
data = [88, [False, False, False], accounts[2], [("0x1234", "0x6666")], string_fixture]
assert return_value == data
assert return_value == tuple(data)
data[1] = tuple(data[1])
data[3] = tuple(data[3])
assert return_value == tuple(data)
def test_ne_conversions(accounts, return_value):
data = [88, [False, False, False], accounts[2], [("0x1234", "0x6666")], string_fixture]
assert not return_value != data
assert not return_value != tuple(data)
data[1] = tuple(data[1])
data[3] = tuple(data[3])
assert not return_value != tuple(data)
def test_dict(accounts, return_value):
d = return_value.dict()
assert isinstance(d, dict)
assert len(d) == 5
assert len(d["_bool"]) == 3
assert sorted(d) == ["_addr", "_bool", "_bytes", "_num", "_string"]
assert d["_addr"] == accounts[2]
def test_keys(return_value):
assert list(return_value.keys()) == ["_num", "_bool", "_addr", "_bytes", "_string"]
def test_items(return_value):
assert return_value.items() == return_value.dict().items()
def test_getitem(accounts, return_value):
assert return_value[2] == return_value["_addr"] == accounts[2]
assert return_value[0] == return_value["_num"] == 88
def test_getitem_slice(accounts, return_value):
s = return_value[1:3]
assert s == [[False, False, False], accounts[2]]
assert isinstance(s, ReturnValue)
assert s[0] == s["_bool"]
assert "_num" not in s
def METHOD_NAME():
e = EthAddress("0x0063046686E46Dc6F15918b61AE2B121458534a5")
with pytest.raises(TypeError):
e == "potato"
with pytest.raises(TypeError):
e == "0x00"
assert str(e) != "potato"
def test_hexstring_typeerror():
b = HexString("0x1234", "bytes32")
with pytest.raises(TypeError):
b == "potato"
with pytest.raises(TypeError):
b == "1234"
assert str(b) != "potato"
def test_hexstring_length():
b = HexString("0x1234", "bytes32")
assert b == "0x1234"
assert b == "0x000000000000001234"
def test_hashable():
assert hash(ReturnValue([1, 2])) == hash(tuple([1, 2]))
assert set(ReturnValue([3, 1, 3, 3, 7])) == set([3, 1, 3, 3, 7])
def test_decimals(vypertester):
ret = vypertester.fixedType("1.234", ["-42", "3.1337"])
assert ret == ["1.234", "-42", "3.1337"]
def test_dynamic_tuple_array(accounts):
code = """
pragma solidity ^0.6.0;
pragma experimental ABIEncoderV2;
contract Test {
struct Foo { uint256 a; }
Foo[] bar;
function foo() public returns (Foo[] memory a) {
bar.push(Foo(1));
bar.push(Foo(6));
return bar;
}
}
"""
contract = compile_source(code).Test.deploy({"from": accounts[0]})
assert contract.foo.call() == [(1,), (6,)]
def test_fixed_tuple_array(accounts):
code = """
pragma solidity ^0.6.0;
pragma experimental ABIEncoderV2;
contract Test {
struct Foo { uint256 a; string b; }
Foo[2][2] bar;
function foo() public returns (Foo[2][2] memory, Foo[2] memory) {
bar[0][0].a = 42;
bar[0][0].b = "hello";
bar[1][1].a = 69;
return (bar, bar[1]);
}
}
"""
contract = compile_source(code).Test.deploy({"from": accounts[0]})
assert contract.foo.call() == [
([(42, "hello"), (0, "")], [(0, ""), (69, "")]),
[(0, ""), (69, "")],
] |
4,812 | load model | import os
import shutil
from mlagents.torch_utils import torch
from typing import Dict, Union, Optional, cast, Tuple, List
from mlagents_envs.exception import UnityPolicyException
from mlagents_envs.logging_util import get_logger
from mlagents.trainers.model_saver.model_saver import BaseModelSaver
from mlagents.trainers.settings import TrainerSettings, SerializationSettings
from mlagents.trainers.policy.torch_policy import TorchPolicy
from mlagents.trainers.optimizer.torch_optimizer import TorchOptimizer
from mlagents.trainers.torch_entities.model_serialization import ModelSerializer
logger = get_logger(__name__)
DEFAULT_CHECKPOINT_NAME = "checkpoint.pt"
class TorchModelSaver(BaseModelSaver):
"""
ModelSaver class for PyTorch
"""
def __init__(
self, trainer_settings: TrainerSettings, model_path: str, load: bool = False
):
super().__init__()
self.model_path = model_path
self.initialize_path = trainer_settings.init_path
self._keep_checkpoints = trainer_settings.keep_checkpoints
self.load = load
self.policy: Optional[TorchPolicy] = None
self.exporter: Optional[ModelSerializer] = None
self.modules: Dict[str, torch.nn.Modules] = {}
def register(self, module: Union[TorchPolicy, TorchOptimizer]) -> None:
if isinstance(module, TorchPolicy) or isinstance(module, TorchOptimizer):
self.modules.update(module.get_modules()) # type: ignore
else:
raise UnityPolicyException(
"Registering Object of unsupported type {} to ModelSaver ".format(
type(module)
)
)
if self.policy is None and isinstance(module, TorchPolicy):
self.policy = module
self.exporter = ModelSerializer(self.policy)
def save_checkpoint(self, behavior_name: str, step: int) -> Tuple[str, List[str]]:
if not os.path.exists(self.model_path):
os.makedirs(self.model_path)
checkpoint_path = os.path.join(self.model_path, f"{behavior_name}-{step}")
state_dict = {
name: module.state_dict() for name, module in self.modules.items()
}
pytorch_ckpt_path = f"{checkpoint_path}.pt"
export_ckpt_path = f"{checkpoint_path}.onnx"
torch.save(state_dict, f"{checkpoint_path}.pt")
torch.save(state_dict, os.path.join(self.model_path, DEFAULT_CHECKPOINT_NAME))
self.export(checkpoint_path, behavior_name)
return export_ckpt_path, [pytorch_ckpt_path]
def export(self, output_filepath: str, behavior_name: str) -> None:
if self.exporter is not None:
self.exporter.export_policy_model(output_filepath)
def initialize_or_load(self, policy: Optional[TorchPolicy] = None) -> None:
# Initialize/Load registered self.policy by default.
# If given input argument policy, use the input policy instead.
# This argument is mainly for initialization of the ghost trainer's fixed policy.
reset_steps = not self.load
if self.initialize_path is not None:
logger.info(f"Initializing from {self.initialize_path}.")
self.METHOD_NAME(
self.initialize_path, policy, reset_global_steps=reset_steps
)
elif self.load:
logger.info(f"Resuming from {self.model_path}.")
self.METHOD_NAME(
os.path.join(self.model_path, DEFAULT_CHECKPOINT_NAME),
policy,
reset_global_steps=reset_steps,
)
def METHOD_NAME(
self,
load_path: str,
policy: Optional[TorchPolicy] = None,
reset_global_steps: bool = False,
) -> None:
saved_state_dict = torch.load(load_path)
if policy is None:
modules = self.modules
policy = self.policy
else:
modules = policy.get_modules()
policy = cast(TorchPolicy, policy)
for name, mod in modules.items():
try:
if isinstance(mod, torch.nn.Module):
missing_keys, unexpected_keys = mod.load_state_dict(
saved_state_dict[name], strict=False
)
if missing_keys:
logger.warning(
f"Did not find these keys {missing_keys} in checkpoint. Initializing."
)
if unexpected_keys:
logger.warning(
f"Did not expect these keys {unexpected_keys} in checkpoint. Ignoring."
)
else:
# If module is not an nn.Module, try to load as one piece
mod.load_state_dict(saved_state_dict[name])
# KeyError is raised if the module was not present in the last run but is being
# accessed in the saved_state_dict.
# ValueError is raised by the optimizer's load_state_dict if the parameters have
# have changed. Note, the optimizer uses a completely different load_state_dict
# function because it is not an nn.Module.
# RuntimeError is raised by PyTorch if there is a size mismatch between modules
# of the same name. This will still partially assign values to those layers that
# have not changed shape.
except (KeyError, ValueError, RuntimeError) as err:
logger.warning(f"Failed to load for module {name}. Initializing")
logger.debug(f"Module loading error : {err}")
if reset_global_steps:
policy.set_step(0)
logger.info(
"Starting training from step 0 and saving to {}.".format(
self.model_path
)
)
else:
logger.info(f"Resuming training from step {policy.get_current_step()}.")
def copy_final_model(self, source_nn_path: str) -> None:
"""
Copy the .nn file at the given source to the destination.
Also copies the corresponding .onnx file if it exists.
"""
final_model_name = os.path.splitext(source_nn_path)[0]
if SerializationSettings.convert_to_onnx:
try:
source_path = f"{final_model_name}.onnx"
destination_path = f"{self.model_path}.onnx"
shutil.copyfile(source_path, destination_path)
logger.info(f"Copied {source_path} to {destination_path}.")
except OSError:
pass |
4,813 | test plot coord 3d transform | # Licensed under a 3-clause BSD style license - see LICENSE.rst
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.backend_bases import KeyEvent
import astropy.units as u
from astropy.coordinates import FK5, SkyCoord, galactocentric_frame_defaults
from astropy.time import Time
from astropy.visualization.wcsaxes.core import WCSAxes
from astropy.wcs import WCS
from .test_images import BaseImageTests
class TestDisplayWorldCoordinate(BaseImageTests):
def teardown_method(self, method):
plt.close("all")
def test_overlay_coords(self, ignore_matplotlibrc, tmp_path):
wcs = WCS(self.msx_header)
fig = plt.figure(figsize=(4, 4))
canvas = fig.canvas
ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8], wcs=wcs)
fig.add_axes(ax)
# On some systems, fig.canvas.draw is not enough to force a draw, so we
# save to a temporary file.
fig.savefig(tmp_path / "test1.png")
# Testing default displayed world coordinates
string_world = ax._display_world_coords(0.523412, 0.518311)
assert string_world == "0\xb029'45\" -0\xb029'20\" (world)"
# Test pixel coordinates
event1 = KeyEvent("test_pixel_coords", canvas, "w")
fig.canvas.callbacks.process("key_press_event", event1)
string_pixel = ax._display_world_coords(0.523412, 0.523412)
assert string_pixel == "0.523412 0.523412 (pixel)"
event3 = KeyEvent("test_pixel_coords", canvas, "w")
fig.canvas.callbacks.process("key_press_event", event3)
# Test that it still displays world coords when there are no overlay coords
string_world2 = ax._display_world_coords(0.523412, 0.518311)
assert string_world2 == "0\xb029'45\" -0\xb029'20\" (world)"
overlay = ax.get_coords_overlay("fk5")
# Regression test for bug that caused format to always be taken from
# main world coordinates.
overlay[0].set_major_formatter("d.ddd")
# On some systems, fig.canvas.draw is not enough to force a draw, so we
# save to a temporary file.
fig.savefig(tmp_path / "test2.png")
event4 = KeyEvent("test_pixel_coords", canvas, "w")
fig.canvas.callbacks.process("key_press_event", event4)
# Test that it displays the overlay world coordinates
string_world3 = ax._display_world_coords(0.523412, 0.518311)
assert string_world3 == "267.176\xb0 -28\xb045'56\" (world, overlay 1)"
overlay = ax.get_coords_overlay(FK5())
# Regression test for bug that caused format to always be taken from
# main world coordinates.
overlay[0].set_major_formatter("d.ddd")
# On some systems, fig.canvas.draw is not enough to force a draw, so we
# save to a temporary file.
fig.savefig(tmp_path / "test3.png")
event5 = KeyEvent("test_pixel_coords", canvas, "w")
fig.canvas.callbacks.process("key_press_event", event5)
# Test that it displays the overlay world coordinates
string_world4 = ax._display_world_coords(0.523412, 0.518311)
assert string_world4 == "267.176\xb0 -28\xb045'56\" (world, overlay 2)"
overlay = ax.get_coords_overlay(FK5(equinox=Time("J2030")))
# Regression test for bug that caused format to always be taken from
# main world coordinates.
overlay[0].set_major_formatter("d.ddd")
# On some systems, fig.canvas.draw is not enough to force a draw, so we
# save to a temporary file.
fig.savefig(tmp_path / "test4.png")
event6 = KeyEvent("test_pixel_coords", canvas, "w")
fig.canvas.callbacks.process("key_press_event", event6)
# Test that it displays the overlay world coordinates
string_world5 = ax._display_world_coords(0.523412, 0.518311)
assert string_world5 == "267.652\xb0 -28\xb046'23\" (world, overlay 3)"
def test_cube_coords(self, ignore_matplotlibrc, tmp_path):
wcs = WCS(self.cube_header)
fig = plt.figure(figsize=(4, 4))
canvas = fig.canvas
ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8], wcs=wcs, slices=("y", 50, "x"))
fig.add_axes(ax)
# On some systems, fig.canvas.draw is not enough to force a draw, so we
# save to a temporary file.
fig.savefig(tmp_path / "test.png")
# Testing default displayed world coordinates
string_world = ax._display_world_coords(0.523412, 0.518311)
assert string_world == "3h26m52.0s 30\xb037'17\" 2563 (world)"
# Test pixel coordinates
event1 = KeyEvent("test_pixel_coords", canvas, "w")
fig.canvas.callbacks.process("key_press_event", event1)
string_pixel = ax._display_world_coords(0.523412, 0.523412)
assert string_pixel == "0.523412 0.523412 (pixel)"
def test_cube_coords_uncorr_slicing(self, ignore_matplotlibrc, tmp_path):
# Regression test for a bug that occurred with coordinate formatting if
# some dimensions were uncorrelated and sliced out.
wcs = WCS(self.cube_header)
fig = plt.figure(figsize=(4, 4))
canvas = fig.canvas
ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8], wcs=wcs, slices=("x", "y", 2))
fig.add_axes(ax)
# On some systems, fig.canvas.draw is not enough to force a draw, so we
# save to a temporary file.
fig.savefig(tmp_path / "test.png")
# Testing default displayed world coordinates
string_world = ax._display_world_coords(0.523412, 0.518311)
assert string_world == "3h26m56.6s 30\xb018'19\" (world)"
# Test pixel coordinates
event1 = KeyEvent("test_pixel_coords", canvas, "w")
fig.canvas.callbacks.process("key_press_event", event1)
string_pixel = ax._display_world_coords(0.523412, 0.523412)
assert string_pixel == "0.523412 0.523412 (pixel)"
def METHOD_NAME(self):
wcs = WCS(self.msx_header)
with galactocentric_frame_defaults.set("latest"):
coord = SkyCoord(0 * u.kpc, 0 * u.kpc, 0 * u.kpc, frame="galactocentric")
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1, projection=wcs)
(point,) = ax.plot_coord(coord, "ro")
np.testing.assert_allclose(point.get_xydata()[0], [0, 0], atol=1e-4) |
4,814 | test allows some none tags | #!/usr/bin/env python3
# Copyright 2023 OpenC3, Inc.
# All Rights Reserved.
#
# This program is free software; you can modify and/or redistribute it
# under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation; version 3 with
# attribution addendums as found in the LICENSE.txt
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# This file may also be used under the terms of a commercial license
# if purchased from OpenC3, Inc.
from datetime import datetime
import importlib
import unittest
from unittest.mock import *
from test.test_helper import *
from openc3.utilities.string import *
from openc3.utilities.logger import Logger
class QuoteIfNecessary(unittest.TestCase):
def test_quotes_strings_with_spaces(self):
self.assertEqual(quote_if_necessary("HelloWorld"), "HelloWorld")
self.assertEqual(quote_if_necessary("Hello World"), '"Hello World"')
class SimpleFormatted(unittest.TestCase):
def setUp(self):
self.data = []
for x in range(26, 48):
self.data.append(x)
self.data = bytes(self.data)
def test_formats_the_data(self):
self.assertEqual(
simple_formatted(self.data), "1A1B1C1D1E1F202122232425262728292A2B2C2D2E2F"
)
class TestFormatted(unittest.TestCase):
def setUp(self):
self.data = []
for x in range(26, 48):
self.data.append(x)
self.data = bytes(self.data)
def test_uses_1_byte_words(self):
self.assertEqual(
formatted(self.data).split("\n")[0],
"00000000: 1A 1B 1C 1D 1E 1F 20 21 22 23 24 25 26 27 28 29 !\"#$%&'()",
)
self.assertEqual(
formatted(self.data).split("\n")[1],
"00000010: 2A 2B 2C 2D 2E 2F *+,-./ ",
)
def test_uses_2_byte_words(self):
self.assertIn("00000000: 1A1B 1C1D 1E1F", formatted(self.data, 2, 8)) # ...
self.assertIn("00000010: 2A2B 2C2D 2E2F", formatted(self.data, 2, 8))
def test_changes_the_word_separator(self):
self.assertIn("00000000: 1A1B_1C1D_1E1F_2021", formatted(self.data, 2, 4, "_"))
self.assertIn("00000008: 2223_2425_2627_2829", formatted(self.data, 2, 4, "_"))
self.assertIn("00000010: 2A2B_2C2D_2E2F", formatted(self.data, 2, 4, "_"))
def test_indents_the_lines(self):
self.assertIn(" 00000000: 1A 1B 1C 1D", formatted(self.data, 1, 16, " ", 4))
def test_does_not_show_the_address(self):
self.assertIn("1A 1B 1C 1D", formatted(self.data, 1, 16, " ", 0, False))
def test_changes_the_address_separator(self):
self.assertIn(
"00000000= 1A 1B 1C 1D", formatted(self.data, 1, 16, " ", 0, True, "= ")
)
def test_does_not_show_the_ascii(self):
self.assertIn(
"29 !\"#$%&'()", formatted(self.data, 1, 16, "", 0, True, "", True)
)
self.assertNotIn(
"29 !\"#$%&'()",
formatted(self.data, 1, 16, "", 0, True, "", False),
)
def test_changes_the_ascii_separator(self):
self.assertIn(
"29__ !\"#$%&'()",
formatted(self.data, 1, 16, "", 0, True, "", True, "__"),
)
def test_changes_the_ascii_unprintable_character(self):
self.assertIn(
"29__xxxxxx !\"#$%&'()",
formatted(self.data, 1, 16, "", 0, True, "", True, "__", "x"),
)
def test_changes_the_line_separator(self):
self.assertEqual(
formatted(self.data, 1, 16, " ", 0, True, ": ", True, " ", " ", "~").split(
"~"
)[0],
"00000000: 1A 1B 1C 1D 1E 1F 20 21 22 23 24 25 26 27 28 29 !\"#$%&'()",
)
self.assertEqual(
formatted(self.data, 1, 16, " ", 0, True, ": ", True, " ", " ", "~").split(
"~"
)[1],
"00000010: 2A 2B 2C 2D 2E 2F *+,-./ ",
)
class TestBuildTimestampedFilename(unittest.TestCase):
def test_formats_the_time(self):
time = datetime.now()
timestamp = time.strftime("%Y_%m_%d_%H_%M_%S")
self.assertIn(timestamp, build_timestamped_filename(None, ".txt", time))
def test_allows_empty_tags(self):
self.assertRegex(build_timestamped_filename([]), r"\d\d\.txt")
def test_allows_none_tags(self):
self.assertRegex(build_timestamped_filename(None), r"\d\d\.txt")
def METHOD_NAME(self):
self.assertRegex(build_timestamped_filename([None, 1]), r"_1\.txt")
def test_includes_the_tags(self):
self.assertRegex(
build_timestamped_filename(["this", "is", "a", "test"]), r"this_is_a_test"
)
def test_changes_the_extension(self):
self.assertRegex(build_timestamped_filename(None, ".bin"), r"\.bin")
class ClassNameToFilename(unittest.TestCase):
def test_converts_a_class_name_to_a_filename(self):
self.assertEqual(class_name_to_filename("MyGreatClass"), "my_great_class")
self.assertEqual(
class_name_to_filename("MyGreatClass", True), "my_great_class.py"
)
class FilenameToClassName(unittest.TestCase):
def test_converts_a_filename_to_a_class_name(self):
self.assertEqual(
filename_to_class_name("path/to/something/my_great_class.rb"),
"MyGreatClass",
)
class ToClass(unittest.TestCase):
def test_returns_the_class_for_the_string(self):
importlib.import_module(".logger", "openc3.utilities")
self.assertEqual(
to_class("openc3.utilities.logger", "Logger").__class__.__name__,
Logger.__class__.__name__,
) |
4,815 | url parameters | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"vm reapply",
)
class Reapply(AAZCommand):
"""Reapply VMs.
:example: Reapply a VM.
az vm reapply -g MyResourceGroup -n MyVm
:example: Reapply all VMs in a resource group.
az vm reapply --ids $(az vm list -g MyResourceGroup --query "[].id" -o tsv)
"""
_aaz_info = {
"version": "2020-06-01",
"resources": [
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.compute/virtualmachines/{}/reapply", "2020-06-01"],
]
}
AZ_SUPPORT_NO_WAIT = True
def _handler(self, command_args):
super()._handler(command_args)
return self.build_lro_poller(self._execute_operations, None)
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.resource_group = AAZResourceGroupNameArg(
required=True,
)
_args_schema.vm_name = AAZStrArg(
options=["-n", "--name", "--vm-name"],
help="The name of the Virtual Machine. You can configure the default using `az configure --defaults vm=<name>`",
required=True,
id_part="name",
configured_default="vm",
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
yield self.VirtualMachinesReapply(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
class VirtualMachinesReapply(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [202]:
return self.client.build_lro_polling(
self.ctx.args.no_wait,
session,
self.on_200,
self.on_error,
lro_options={"final-state-via": "azure-async-operation"},
path_format_arguments=self.METHOD_NAME,
)
if session.http_response.status_code in [200]:
return self.client.build_lro_polling(
self.ctx.args.no_wait,
session,
self.on_200,
self.on_error,
lro_options={"final-state-via": "azure-async-operation"},
path_format_arguments=self.METHOD_NAME,
)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/reapply",
**self.METHOD_NAME
)
@property
def method(self):
return "POST"
@property
def error_format(self):
return "ODataV4Format"
@property
def METHOD_NAME(self):
parameters = {
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
**self.serialize_url_param(
"vmName", self.ctx.args.vm_name,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2020-06-01",
required=True,
),
}
return parameters
def on_200(self, session):
pass
class _ReapplyHelper:
"""Helper class for Reapply"""
__all__ = ["Reapply"] |
4,816 | tear down | # (C) Copyright 2005-2023 Enthought, Inc., Austin, TX
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in LICENSE.txt and may be redistributed only under
# the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
""" Tests for the the "anytrait" static notifiers. """
import unittest
from traits import trait_notifiers
from traits.api import Float, HasTraits, Undefined
class AnytraitStaticNotifiers0Fail(HasTraits):
fail = Float
def _anytrait_changed():
raise Exception("error")
class AnytraitStaticNotifiers1Fail(HasTraits):
fail = Float
def _anytrait_changed(self):
raise Exception("error")
class AnytraitStaticNotifiers2Fail(HasTraits):
fail = Float
def _anytrait_changed(self, name):
raise Exception("error")
class AnytraitStaticNotifiers3Fail(HasTraits):
fail = Float
def _anytrait_changed(self, name, new):
raise Exception("error")
class AnytraitStaticNotifiers4Fail(HasTraits):
fail = Float
def _anytrait_changed(self, name, old, new):
raise Exception("error")
class TestNotifiers(unittest.TestCase):
""" Tests for the static notifiers, and the "anytrait" static notifiers.
"""
#### 'TestCase' protocol ##################################################
def setUp(self):
self.exceptions = []
trait_notifiers.push_exception_handler(self._handle_exception)
def METHOD_NAME(self):
trait_notifiers.pop_exception_handler()
#### Private protocol #####################################################
def _handle_exception(self, obj, name, old, new):
self.exceptions.append((obj, name, old, new))
#### Tests ################################################################
def test_anytrait_static_notifiers_0(self):
anycalls_0 = []
class AnytraitStaticNotifiers0(HasTraits):
ok = Float
def _anytrait_changed():
anycalls_0.append(True)
obj = AnytraitStaticNotifiers0(ok=2)
obj.ok = 3
self.assertEqual(len(anycalls_0), 2)
def test_anytrait_static_notifiers_1(self):
class AnytraitStaticNotifiers1(HasTraits):
ok = Float
def _anytrait_changed(self):
if not hasattr(self, "anycalls"):
self.anycalls = []
self.anycalls.append(True)
obj = AnytraitStaticNotifiers1(ok=2)
obj.ok = 3
# 3 calls (see test_anytrait_static_notifiers_4):
# 1 to add trait 'anycalls',
# 1 from the constructor,
# 1 to set ok to 3
self.assertEqual(len(obj.anycalls), 3)
def test_anytrait_static_notifiers_2(self):
class AnytraitStaticNotifiers2(HasTraits):
ok = Float
def _anytrait_changed(self, name):
if not hasattr(self, "anycalls"):
self.anycalls = []
self.anycalls.append(name)
obj = AnytraitStaticNotifiers2(ok=2)
obj.ok = 3
expected = ["trait_added", "ok", "ok"]
self.assertEqual(expected, obj.anycalls)
def test_anytrait_static_notifiers_3(self):
class AnytraitStaticNotifiers3(HasTraits):
ok = Float
def _anytrait_changed(self, name, new):
if not hasattr(self, "anycalls"):
self.anycalls = []
self.anycalls.append((name, new))
obj = AnytraitStaticNotifiers3(ok=2)
obj.ok = 3
expected = [("trait_added", "anycalls"), ("ok", 2), ("ok", 3)]
self.assertEqual(expected, obj.anycalls)
def test_anytrait_static_notifiers_4(self):
class AnytraitStaticNotifiers4(HasTraits):
ok = Float
def _anytrait_changed(self, name, old, new):
if not hasattr(self, "anycalls"):
self.anycalls = []
self.anycalls.append((name, old, new))
obj = AnytraitStaticNotifiers4(ok=2)
obj.ok = 3
expected = [
("trait_added", Undefined, "anycalls"),
("ok", 0, 2),
("ok", 2, 3),
]
self.assertEqual(expected, obj.anycalls)
def test_anytrait_static_notifiers_0_fail(self):
obj = AnytraitStaticNotifiers0Fail()
obj.fail = 1
self.assertEqual(self.exceptions, [(obj, "fail", 0, 1)])
def test_anytrait_static_notifiers_1_fail(self):
obj = AnytraitStaticNotifiers1Fail()
obj.fail = 1
self.assertEqual(self.exceptions, [(obj, "fail", 0, 1)])
def test_anytrait_static_notifiers_2_fail(self):
obj = AnytraitStaticNotifiers2Fail()
obj.fail = 1
self.assertEqual(self.exceptions, [(obj, "fail", 0, 1)])
def test_anytrait_static_notifiers_3_fail(self):
obj = AnytraitStaticNotifiers3Fail()
obj.fail = 1
self.assertEqual(self.exceptions, [(obj, "fail", 0, 1)])
def test_anytrait_static_notifiers_4_fail(self):
obj = AnytraitStaticNotifiers4Fail()
obj.fail = 1
self.assertEqual(self.exceptions, [(obj, "fail", 0, 1)]) |
4,817 | export get productions | """ DISET request handler base class for the ProductionDB.
"""
from DIRAC import S_ERROR, S_OK, gLogger
from DIRAC.Core.DISET.RequestHandler import RequestHandler
from DIRAC.Core.Utilities.DEncode import ignoreEncodeWarning
from DIRAC.Core.Utilities.ObjectLoader import ObjectLoader
prodTypes = [str, int]
transTypes = [str, int, list]
class ProductionManagerHandlerMixin:
@classmethod
def initializeHandler(cls, serviceInfoDict):
"""Initialization of DB object"""
try:
result = ObjectLoader().loadObject("ProductionSystem.DB.ProductionDB", "ProductionDB")
if not result["OK"]:
return result
cls.productionDB = result["Value"]()
except RuntimeError as excp:
return S_ERROR(f"Can't connect to DB: {excp}")
return S_OK()
####################################################################
#
# These are the methods to manipulate the Productions table
#
types_addProduction = [prodTypes, str]
def export_addProduction(self, prodName, prodDescription):
credDict = self.getRemoteCredentials()
author = credDict.get("username", credDict.get("DN", credDict.get("CN")))
authorGroup = credDict.get("group")
res = self.productionDB.addProduction(prodName, prodDescription, author, authorGroup)
if res["OK"]:
gLogger.info("Added production", res["Value"])
return res
types_deleteProduction = [prodTypes]
def export_deleteProduction(self, prodName):
credDict = self.getRemoteCredentials()
author = credDict.get("username")
return self.productionDB.deleteProduction(prodName, author=author)
types_getProductions = []
@classmethod
def METHOD_NAME(
cls,
condDict=None,
older=None,
newer=None,
timeStamp="CreationDate",
orderAttribute=None,
limit=None,
offset=None,
):
if not condDict:
condDict = {}
return cls.productionDB.getProductions(
condDict=condDict,
older=older,
newer=newer,
timeStamp=timeStamp,
orderAttribute=orderAttribute,
limit=limit,
offset=offset,
)
types_getProduction = [prodTypes]
@classmethod
def export_getProduction(cls, prodName):
return cls.productionDB.getProduction(prodName)
types_getProductionParameters = [prodTypes, [str, list, tuple]]
@classmethod
def export_getProductionParameters(cls, prodName, parameters):
return cls.productionDB.getProductionParameters(prodName, parameters)
types_setProductionStatus = [prodTypes, str]
@classmethod
def export_setProductionStatus(cls, prodName, status):
return cls.productionDB.setProductionStatus(prodName, status)
types_startProduction = [prodTypes]
@classmethod
@ignoreEncodeWarning
def export_startProduction(cls, prodName):
return cls.productionDB.startProduction(prodName)
####################################################################
#
# These are the methods to manipulate the ProductionTransformations table
#
types_addTransformationsToProduction = [prodTypes, transTypes, transTypes]
@classmethod
def export_addTransformationsToProduction(cls, prodName, transIDs, parentTransIDs):
return cls.productionDB.addTransformationsToProduction(prodName, transIDs, parentTransIDs=parentTransIDs)
types_getProductionTransformations = []
@classmethod
def export_getProductionTransformations(
cls,
prodName,
condDict=None,
older=None,
newer=None,
timeStamp="CreationTime",
orderAttribute=None,
limit=None,
offset=None,
):
if not condDict:
condDict = {}
return cls.productionDB.getProductionTransformations(
prodName,
condDict=condDict,
older=older,
newer=newer,
timeStamp=timeStamp,
orderAttribute=orderAttribute,
limit=limit,
offset=offset,
)
####################################################################
#
# These are the methods to manipulate the ProductionSteps table
#
types_addProductionStep = [dict]
@classmethod
def export_addProductionStep(cls, prodStep):
stepName = prodStep["name"]
stepDescription = prodStep["description"]
stepLongDescription = prodStep["longDescription"]
stepBody = prodStep["body"]
stepType = prodStep["stepType"]
stepPlugin = prodStep["plugin"]
stepAgentType = prodStep["agentType"]
stepGroupSize = prodStep["groupsize"]
stepInputQuery = prodStep["inputquery"]
stepOutputQuery = prodStep["outputquery"]
res = cls.productionDB.addProductionStep(
stepName,
stepDescription,
stepLongDescription,
stepBody,
stepType,
stepPlugin,
stepAgentType,
stepGroupSize,
stepInputQuery,
stepOutputQuery,
)
if res["OK"]:
gLogger.info("Added production step %d" % res["Value"])
return res
types_getProductionStep = [int]
@classmethod
def export_getProductionStep(cls, stepID):
return cls.productionDB.getProductionStep(stepID)
class ProductionManagerHandler(ProductionManagerHandlerMixin, RequestHandler):
pass |
4,818 | set up | # -*- coding: utf-8 -*-
from __future__ import division
from __future__ import print_function
import os
import sys
import unittest
# noinspection PyProtectedMember
from numpy.testing import assert_allclose
from numpy.testing import assert_array_less
from numpy.testing import assert_equal
from numpy.testing import assert_raises
from scipy.stats import rankdata
from sklearn.base import clone
from sklearn.metrics import roc_auc_score
# temporary solution for relative imports in case pyod is not installed
# if pyod is installed, no need to use the following line
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from pyod.models.ecod import ECOD
from pyod.utils.data import generate_data
class TestCOPODParallel(unittest.TestCase):
def METHOD_NAME(self):
self.n_train = 200
self.n_test = 100
self.contamination = 0.1
self.roc_floor = 0.8
self.X_train, self.X_test, self.y_train, self.y_test = generate_data(
n_train=self.n_train, n_test=self.n_test, n_features=10,
contamination=self.contamination, random_state=42)
self.clf = ECOD(contamination=self.contamination, n_jobs=2)
self.clf.fit(self.X_train)
# get a copy from the single thread copy
self.clf_ = ECOD(contamination=self.contamination)
self.clf_.fit(self.X_train)
def test_fit(self):
clf = ECOD(contamination=self.contamination, n_jobs=3)
clf.fit(self.X_train[:, :2])
def test_parameters(self):
assert (hasattr(self.clf, 'decision_scores_') and
self.clf.decision_scores_ is not None)
assert (hasattr(self.clf, 'labels_') and
self.clf.labels_ is not None)
assert (hasattr(self.clf, 'threshold_') and
self.clf.threshold_ is not None)
def test_train_scores(self):
assert_equal(len(self.clf.decision_scores_), self.X_train.shape[0])
assert_allclose(self.clf.decision_scores_, self.clf_.decision_scores_)
def test_prediction_scores(self):
pred_scores = self.clf.decision_function(self.X_test)
# check score shapes
assert_equal(pred_scores.shape[0], self.X_test.shape[0])
# check performance
assert (roc_auc_score(self.y_test, pred_scores) >= self.roc_floor)
def test_prediction_labels(self):
pred_labels = self.clf.predict(self.X_test)
assert_equal(pred_labels.shape, self.y_test.shape)
pred_labels_ = self.clf_.predict(self.X_test)
assert_equal(pred_labels, pred_labels_)
def test_prediction_proba(self):
pred_proba = self.clf.predict_proba(self.X_test)
assert (pred_proba.min() >= 0)
assert (pred_proba.max() <= 1)
def test_prediction_proba_linear(self):
pred_proba = self.clf.predict_proba(self.X_test, method='linear')
assert (pred_proba.min() >= 0)
assert (pred_proba.max() <= 1)
def test_prediction_proba_unify(self):
pred_proba = self.clf.predict_proba(self.X_test, method='unify')
assert (pred_proba.min() >= 0)
assert (pred_proba.max() <= 1)
def test_prediction_proba_parameter(self):
with assert_raises(ValueError):
self.clf.predict_proba(self.X_test, method='something')
def test_prediction_labels_confidence(self):
pred_labels, confidence = self.clf.predict(self.X_test,
return_confidence=True)
assert_equal(pred_labels.shape, self.y_test.shape)
assert_equal(confidence.shape, self.y_test.shape)
assert (confidence.min() >= 0)
assert (confidence.max() <= 1)
def test_prediction_proba_linear_confidence(self):
pred_proba, confidence = self.clf.predict_proba(self.X_test,
method='linear',
return_confidence=True)
assert (pred_proba.min() >= 0)
assert (pred_proba.max() <= 1)
assert_equal(confidence.shape, self.y_test.shape)
assert (confidence.min() >= 0)
assert (confidence.max() <= 1)
def test_fit_predict(self):
pred_labels = self.clf.fit_predict(self.X_train)
assert_equal(pred_labels.shape, self.y_train.shape)
def test_fit_predict_score(self):
self.clf.fit_predict_score(self.X_test, self.y_test)
self.clf.fit_predict_score(self.X_test, self.y_test,
scoring='roc_auc_score')
self.clf.fit_predict_score(self.X_test, self.y_test,
scoring='prc_n_score')
with assert_raises(NotImplementedError):
self.clf.fit_predict_score(self.X_test, self.y_test,
scoring='something')
def test_predict_rank(self):
pred_socres = self.clf.decision_function(self.X_test)
pred_ranks = self.clf._predict_rank(self.X_test)
# assert the order is reserved
assert_allclose(rankdata(pred_ranks), rankdata(pred_socres), atol=3)
assert_array_less(pred_ranks, self.X_train.shape[0] + 1)
assert_array_less(-0.1, pred_ranks)
def test_predict_rank_normalized(self):
pred_socres = self.clf.decision_function(self.X_test)
pred_ranks = self.clf._predict_rank(self.X_test, normalized=True)
# assert the order is reserved
assert_allclose(rankdata(pred_ranks), rankdata(pred_socres), atol=3)
assert_array_less(pred_ranks, 1.01)
assert_array_less(-0.1, pred_ranks)
def test_fit_single_feature_multiple_jobs(self):
clf = ECOD(contamination=self.contamination, n_jobs=5)
with assert_raises(ValueError):
clf.fit(self.X_train[:, 0])
# def test_plot(self):
# os, cutoff1, cutoff2 = self.clf.explain_outlier(ind=1)
# assert_array_less(0, os)
def test_model_clone(self):
clone_clf = clone(self.clf)
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main() |
4,819 | test division detail api redirect | import urllib.parse
from django.contrib.auth.models import User
from django.test import TestCase
from dfirtrack_main.models import Division
class DivisionAPIViewTestCase(TestCase):
"""division API view tests"""
@classmethod
def setUpTestData(cls):
# create object
Division.objects.create(division_name='division_api_1')
# create user
User.objects.create_user(
username='testuser_division_api', password='tvjnIPBlhP9P3ixDHVE7'
)
def test_division_list_api_unauthorized(self):
"""unauthorized access is forbidden"""
# get response
response = self.client.get('/api/division/')
# compare
self.assertEqual(response.status_code, 401)
def test_division_list_api_method_get(self):
"""GET is allowed"""
# login testuser
self.client.login(
username='testuser_division_api', password='tvjnIPBlhP9P3ixDHVE7'
)
# get response
response = self.client.get('/api/division/')
# compare
self.assertEqual(response.status_code, 200)
def test_division_list_api_method_post(self):
"""POST is allowed"""
# login testuser
self.client.login(
username='testuser_division_api', password='tvjnIPBlhP9P3ixDHVE7'
)
# create POST string
poststring = {"division_name": "division_api_2"}
# get response
response = self.client.post('/api/division/', data=poststring)
# compare
self.assertEqual(response.status_code, 201)
def test_division_list_api_redirect(self):
"""test redirect with appending slash"""
# login testuser
self.client.login(
username='testuser_division_api', password='tvjnIPBlhP9P3ixDHVE7'
)
# create url
destination = urllib.parse.quote('/api/division/', safe='/')
# get response
response = self.client.get('/api/division', follow=True)
# compare
self.assertRedirects(
response, destination, status_code=301, target_status_code=200
)
def test_division_detail_api_unauthorized(self):
"""unauthorized access is forbidden"""
# get object
division_api_1 = Division.objects.get(division_name='division_api_1')
# get response
response = self.client.get(
'/api/division/' + str(division_api_1.division_id) + '/'
)
# compare
self.assertEqual(response.status_code, 401)
def test_division_detail_api_method_get(self):
"""GET is allowed"""
# get object
division_api_1 = Division.objects.get(division_name='division_api_1')
# login testuser
self.client.login(
username='testuser_division_api', password='tvjnIPBlhP9P3ixDHVE7'
)
# get response
response = self.client.get(
'/api/division/' + str(division_api_1.division_id) + '/'
)
# compare
self.assertEqual(response.status_code, 200)
def test_division_detail_api_method_delete(self):
"""DELETE is forbidden"""
# get object
division_api_1 = Division.objects.get(division_name='division_api_1')
# login testuser
self.client.login(
username='testuser_division_api', password='tvjnIPBlhP9P3ixDHVE7'
)
# get response
response = self.client.delete(
'/api/division/' + str(division_api_1.division_id) + '/'
)
# compare
self.assertEqual(response.status_code, 405)
def test_division_detail_api_method_put(self):
"""PUT is allowed"""
# get object
division_api_1 = Division.objects.get(division_name='division_api_1')
# login testuser
self.client.login(
username='testuser_division_api', password='tvjnIPBlhP9P3ixDHVE7'
)
# create url
destination = urllib.parse.quote(
'/api/division/' + str(division_api_1.division_id) + '/', safe='/'
)
# create PUT string
putstring = {"division_name": "new_division_api_1"}
# get response
response = self.client.put(
destination, data=putstring, content_type='application/json'
)
# compare
self.assertEqual(response.status_code, 200)
def METHOD_NAME(self):
"""test redirect with appending slash"""
# get object
division_api_1 = Division.objects.get(division_name='division_api_1')
# login testuser
self.client.login(
username='testuser_division_api', password='tvjnIPBlhP9P3ixDHVE7'
)
# create url
destination = urllib.parse.quote(
'/api/division/' + str(division_api_1.division_id) + '/', safe='/'
)
# get response
response = self.client.get(
'/api/division/' + str(division_api_1.division_id), follow=True
)
# compare
self.assertRedirects(
response, destination, status_code=301, target_status_code=200
) |
4,820 | mock delete | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import defer
from twisted.trial import unittest
from buildbot.interfaces import LatentWorkerFailedToSubstantiate
from buildbot.process.properties import Interpolate
from buildbot.process.properties import Properties
from buildbot.test.fake import fakemaster
from buildbot.test.fake.fakebuild import FakeBuildForRendering as FakeBuild
from buildbot.test.fake.fakeprotocol import FakeTrivialConnection as FakeBot
from buildbot.test.fake.kube import KubeClientService
from buildbot.test.reactor import TestReactorMixin
from buildbot.util.kubeclientservice import KubeError
from buildbot.util.kubeclientservice import KubeHardcodedConfig
from buildbot.worker import kubernetes
class FakeResult:
code = 204
def METHOD_NAME(*args):
return defer.succeed(FakeResult())
class TestKubernetesWorker(TestReactorMixin, unittest.TestCase):
worker = None
def setUp(self):
self.setup_test_reactor()
@defer.inlineCallbacks
def setupWorker(self, *args, **kwargs):
config = KubeHardcodedConfig(master_url="https://kube.example.com")
self.worker = worker = kubernetes.KubeLatentWorker(
*args, kube_config=config, **kwargs)
master = fakemaster.make_master(self, wantData=True)
self._kube = yield KubeClientService.getService(master, self, kube_config=config)
worker.setServiceParent(master)
yield master.startService()
self.assertTrue(config.running)
def cleanup():
self._kube.delete = METHOD_NAME
self.addCleanup(master.stopService)
self.addCleanup(cleanup)
return worker
def test_instantiate(self):
worker = kubernetes.KubeLatentWorker('worker')
# class instantiation configures nothing
self.assertEqual(getattr(worker, '_kube', None), None)
@defer.inlineCallbacks
def test_wrong_arg(self):
with self.assertRaises(TypeError):
yield self.setupWorker('worker', wrong_param='wrong_param')
def test_service_arg(self):
return self.setupWorker('worker')
@defer.inlineCallbacks
def test_builds_may_be_incompatible(self):
yield self.setupWorker('worker')
# http is lazily created on worker substantiation
self.assertEqual(self.worker.builds_may_be_incompatible, True)
@defer.inlineCallbacks
def test_start_service(self):
yield self.setupWorker('worker')
# http is lazily created on worker substantiation
self.assertNotEqual(self.worker._kube, None)
@defer.inlineCallbacks
def test_start_worker(self):
worker = yield self.setupWorker('worker')
d = worker.substantiate(None, FakeBuild())
worker.attached(FakeBot())
yield d
self.assertEqual(len(worker._kube.pods), 1)
pod_name = list(worker._kube.pods.keys())[0]
self.assertRegex(pod_name, r'default/buildbot-worker-[0-9a-f]+')
pod = worker._kube.pods[pod_name]
self.assertEqual(
sorted(pod['spec'].keys()), ['containers', 'restartPolicy'])
self.assertEqual(
sorted(pod['spec']['containers'][0].keys()),
['env', 'image', 'name', 'resources'])
self.assertEqual(pod['spec']['containers'][0]['image'],
'rendered:buildbot/buildbot-worker')
self.assertEqual(pod['spec']['restartPolicy'], 'Never')
@defer.inlineCallbacks
def test_start_worker_but_error(self):
worker = yield self.setupWorker('worker')
def createPod(namespace, spec):
raise KubeError({'message': "yeah, but no"})
self.patch(self._kube, 'createPod', createPod)
with self.assertRaises(LatentWorkerFailedToSubstantiate):
yield worker.substantiate(None, FakeBuild())
self.assertEqual(worker.instance, None)
@defer.inlineCallbacks
def test_interpolate_renderables_for_new_build(self):
build1 = Properties(img_prop="image1")
build2 = Properties(img_prop="image2")
worker = yield self.setupWorker('worker', image=Interpolate("%(prop:img_prop)s"))
yield worker.start_instance(build1)
yield worker.stop_instance()
self.assertTrue((yield worker.isCompatibleWithBuild(build2)))
@defer.inlineCallbacks
def test_reject_incompatible_build_while_running(self):
build1 = Properties(img_prop="image1")
build2 = Properties(img_prop="image2")
worker = yield self.setupWorker('worker', image=Interpolate("%(prop:img_prop)s"))
yield worker.start_instance(build1)
self.assertFalse((yield worker.isCompatibleWithBuild(build2))) |
4,821 | drive letter iso | """
Windows virtio-win utilities
"""
import re
import logging
from . import drive
from . import system
from avocado.core import exceptions
ARCH_MAP_ISO = {"32-bit": "x86", "64-bit": "amd64"}
ARCH_MAP_VFD = {"32-bit": "i386", "64-bit": "amd64"}
LOG = logging.getLogger('avocado.' + __name__)
def arch_dirname_iso(session):
"""
Get architecture directory's name - iso media version.
:param session: Session object.
:return: Directory's name.
"""
return ARCH_MAP_ISO.get(system.os_arch(session))
def arch_dirname_vfd(session):
"""
Get architecture directory's name - vfd media version.
:param session: Session object.
:return: Directory's name.
"""
return ARCH_MAP_VFD.get(system.os_arch(session))
def _product_info(session):
# Some windows system would present 'r' as the registered
# trademark character at the end of string "Server"
match = re.search(r"Windows((?: )Serverr?)? (\S+)(?: (R2))?",
system.product_name(session), re.I)
if not match:
return ("", "", "")
server, name, suffix = match.groups()
server = server if server else ""
suffix = suffix if suffix else ""
return server, name, suffix
def product_dirname_iso(session):
"""
Get product directory's name - iso media version.
:param session: Session object.
:return: Directory's name.
"""
server, name, suffix = _product_info(session)
if not name:
return None
if server:
if len(name) == 4:
name = re.sub("0+", "k", name)
else:
if name[0].isdigit():
name = "w" + name
return name + suffix
def product_dirname_vfd(session):
"""
Get product directory's name - vfd media version.
:param session: Session object.
:return: Directory's name.
"""
server, name, suffix = _product_info(session)
if not name:
return None
return "Win" + name + suffix
def METHOD_NAME(session):
"""
Get virtio-win drive letter - iso media version.
:param session: Session object.
:return: Drive letter.
"""
return drive.get_hard_drive_letter(session, "virtio-win%")
def drive_letter_vfd(session):
"""
Get virtio-win drive letter - vfd media version.
:param session: Session object.
:return: Drive letter.
"""
for letter in drive.get_floppy_drives_letter(session):
# FIXME: addresses the drive accurately
return letter
return None
def _get_netkvmco_path(session):
"""
Get the proper netkvmco.dll path from iso.
:param session: a session to send cmd
:return: the proper netkvmco.dll path.
"""
viowin_ltr = METHOD_NAME(session)
if not viowin_ltr:
err = "Could not find virtio-win drive in guest"
raise exceptions.TestError(err)
guest_name = product_dirname_iso(session)
if not guest_name:
err = "Could not get product dirname of the vm"
raise exceptions.TestError(err)
guest_arch = arch_dirname_iso(session)
if not guest_arch:
err = "Could not get architecture dirname of the vm"
raise exceptions.TestError(err)
middle_path = "%s\\%s" % (guest_name, guest_arch)
find_cmd = 'dir /b /s %s\\netkvmco.dll | findstr "\\%s\\\\"'
find_cmd %= (viowin_ltr, middle_path)
netkvmco_path = session.cmd(find_cmd).strip()
LOG.info("Found netkvmco.dll file at %s" % netkvmco_path)
return netkvmco_path
def prepare_netkvmco(vm):
"""
Copy the proper netkvmco.dll to driver c:\\, and register it.
param vm: the target vm
"""
LOG.info("Prepare the netkvmco.dll")
session = vm.wait_for_login(timeout=360)
try:
netkvmco_path = _get_netkvmco_path(session)
prepare_netkvmco_cmd = "xcopy %s c:\\ /y && "
prepare_netkvmco_cmd += "rundll32 netkvmco.dll,"
prepare_netkvmco_cmd += "RegisterNetKVMNetShHelper"
session.cmd(prepare_netkvmco_cmd % netkvmco_path, timeout=240)
finally:
session.close() |
4,822 | record call | """
This module contains facilities for building a call graph for an Ada program.
The 'StaticCallGraphVisitor' class runs a call graph analysis on a libadalang
AdaNode via a visitor pattern (inherit from 'AdaVisitor') and makes available
its result call graph in the 'nodes' and 'edges' member variables.
"""
__copyright__ = "Copyright (c) 2020, Galois, Inc."
import logging
from typing import Callable, Dict, NewType, Optional, Set
import urllib.parse
import re
import libadalang as lal
from ada_visitor import AdaVisitor
from node_naming import GraphNode, NodeKey, get_node_key
class StaticCallGraphVisitor(AdaVisitor):
"""
Computes the static call graph within some AST node. Once `visit()` has
completed, you can read the call graph in the `edges` instance variable.
"""
def __init__(
self,
context: lal.AnalysisContext,
caller_being_defined: Optional[GraphNode],
nodes: Dict[NodeKey, GraphNode],
edges: Dict[NodeKey, Set[NodeKey]]
) -> None:
"""
Initialize the visitor. Because it is not very practical to locally
update the parameters when doing recursive calls, we suggest instead to
instantiate a new local visitor, run it, and then gather from its final
state whatever data you need. Avoids code duplication, at the price of
creating a bunch of short-lived instances.
"""
self.context: lal.AnalysisContext = context
self.caller_being_defined: Optional[GraphNode] = caller_being_defined
"""
Name of the caller currently being defined, that will be deemed the
caller of whatever call expression we encounter. This can either be a
function/procedure, or a file if we're at the top level.
"""
# INVARIANT
# All nodes appearing in edges, either via their key, or in the set of
# destinations, should be in the nodes set.
# There may be nodes that participate in no edges.
# We store nodes by key so that we can retrieve node instances by their
# key and avoid creating duplicates.
self.nodes: Dict[NodeKey, GraphNode] = nodes
"""All nodes in the graph, unsorted."""
if caller_being_defined:
# Register the caller as a node
caller_key = get_node_key(caller_being_defined)
if caller_key is not None and caller_key not in nodes:
nodes[caller_key] = caller_being_defined
self.edges: Dict[NodeKey, Set[NodeKey]] = edges
"""
Edges of the graph, keyed by their origin, valued by the set of
destinations for that origin.
"""
def get_graph_node_for_name(self, node: lal.Name) -> Optional[GraphNode]:
"""Returns the graph node for a name, creating it if none exists yet."""
key = get_node_key(node)
if key is None:
return None
if key not in self.nodes:
self.nodes[key] = node
return self.nodes[key]
def METHOD_NAME(self, callee: lal.Name) -> None:
"""Records a witnessed static function/procedure call to callee."""
if self.caller_being_defined is not None:
caller_key = get_node_key(self.caller_being_defined)
if caller_key is None:
return
callee_node = self.get_graph_node_for_name(callee)
if callee_node is None:
return
callee_key = get_node_key(callee_node)
if callee_key is None:
return
if caller_key not in self.edges:
self.edges[caller_key] = set()
self.edges[caller_key].add(callee_key)
def locally_visit(
self,
caller_being_defined: Optional[GraphNode],
callback: Callable[[AdaVisitor], None]
) -> None:
"""
Do something with a visitor locally overriding the values of certain
variables.
"""
local_visitor = StaticCallGraphVisitor(
context=self.context,
caller_being_defined=caller_being_defined,
nodes=self.nodes,
edges=self.edges
)
callback(local_visitor)
# pylint: disable=invalid-name, missing-function-docstring
def visit_CallExpr(self, node: lal.CallExpr):
self.METHOD_NAME(node.f_name)
# pylint: enable=invalid-name, missing-function-docstring
# pylint: disable=invalid-name, missing-function-docstring
def visit_CallStmt(self, node: lal.CallStmt):
self.METHOD_NAME(node.f_call)
# pylint: enable=invalid-name, missing-function-docstring
# pylint: disable=invalid-name, missing-function-docstring
def visit_PackageBody(self, node: lal.PackageBody) -> None:
def callback(visitor):
visitor.generic_visit(node.f_decls)
visitor.generic_visit(node.f_stmts)
self.locally_visit(
caller_being_defined=self.caller_being_defined,
callback=callback
)
# pylint: enable=invalid-name, missing-function-docstring
# pylint: disable=invalid-name, missing-function-docstring
def visit_SubpBody(self, node: lal.SubpBody) -> None:
spec = node.f_subp_spec
name = spec.f_subp_name
def callback(visitor):
# assumption: the spec does not contain calls, skipping it
visitor.visit(node.f_decls)
visitor.visit(node.f_stmts)
self.locally_visit(
caller_being_defined=name,
callback=callback
)
# pylint: enable=invalid-name, missing-function-docstring |
4,823 | view | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import os
import six
import yaml
from builtins import object
# Describes where to search for the config file if no location is specified
from typing import Text
from rasa_nlu import utils
from rasa_nlu.utils import json_to_string
DEFAULT_CONFIG_LOCATION = "config.yml"
DEFAULT_CONFIG = {
"language": "en",
"pipeline": [],
"data": None,
}
logger = logging.getLogger(__name__)
class InvalidConfigError(ValueError):
"""Raised if an invalid configuration is encountered."""
def __init__(self, message):
# type: (Text) -> None
super(InvalidConfigError, self).__init__(message)
def load(filename=None, **kwargs):
if filename is None and os.path.isfile(DEFAULT_CONFIG_LOCATION):
filename = DEFAULT_CONFIG_LOCATION
if filename is not None:
try:
file_config = utils.read_yaml_file(filename)
except yaml.parser.ParserError as e:
raise InvalidConfigError("Failed to read configuration file "
"'{}'. Error: {}".format(filename, e))
if kwargs:
file_config.update(kwargs)
return RasaNLUModelConfig(file_config)
else:
return RasaNLUModelConfig(kwargs)
def override_defaults(defaults, custom):
cfg = defaults or {}
if custom:
cfg.update(custom)
return cfg
def make_path_absolute(path):
if path and not os.path.isabs(path):
return os.path.join(os.getcwd(), path)
else:
return path
class RasaNLUModelConfig(object):
DEFAULT_PROJECT_NAME = "default"
def __init__(self, configuration_values=None):
if not configuration_values:
configuration_values = {}
self.override(DEFAULT_CONFIG)
self.override(configuration_values)
if isinstance(self.__dict__['pipeline'], six.string_types):
from rasa_nlu import registry
template_name = self.__dict__['pipeline']
pipeline = registry.pipeline_template(template_name)
if pipeline:
# replaces the template with the actual components
self.__dict__['pipeline'] = pipeline
else:
known_templates = ", ".join(
registry.registered_pipeline_templates.keys())
raise InvalidConfigError("No pipeline specified and unknown "
"pipeline template '{}' passed. Known "
"pipeline templates: {}"
"".format(template_name,
known_templates))
for key, value in self.items():
setattr(self, key, value)
def __getitem__(self, key):
return self.__dict__[key]
def get(self, key, default=None):
return self.__dict__.get(key, default)
def __setitem__(self, key, value):
self.__dict__[key] = value
def __delitem__(self, key):
del self.__dict__[key]
def __contains__(self, key):
return key in self.__dict__
def __len__(self):
return len(self.__dict__)
def __getstate__(self):
return self.as_dict()
def __setstate__(self, state):
self.override(state)
def items(self):
return list(self.__dict__.items())
def as_dict(self):
return dict(list(self.items()))
def METHOD_NAME(self):
return json_to_string(self.__dict__, indent=4)
def for_component(self, name, defaults=None):
for c in self.pipeline:
if c.get("name") == name:
return override_defaults(defaults, c)
else:
return defaults or {}
@property
def component_names(self):
if self.pipeline:
return [c.get("name") for c in self.pipeline]
else:
return []
def set_component_attr(self, name, **kwargs):
for c in self.pipeline:
if c.get("name") == name:
c.update(kwargs)
else:
logger.warn("Tried to set configuration value for component '{}' "
"which is not part of the pipeline.".format(name))
def override(self, config):
if config:
self.__dict__.update(config) |
4,824 | create operation | # SPDX-License-Identifier: BSD-3-Clause
# Copyright 2018-2021, Intel Corporation
import memoryoperations
from reorderexceptions import NotSupportedOperationException
class OperationFactory:
"""
An abstract memory operation factory.
This object factory puts special constraints on names of classes.
It creates objects based on log in string format, as such the
classes have to start with a capital letter and the rest of the
name has to be in lowercase. For example::
STORE -> Store
FULL_REORDER -> Full_reorder
The object to be created has to have and internal **Factory** class
with a :func:`create` method taking a string parameter. For example see
:class:`memoryoperations.Store`.
:cvar __factories: The registered object factories.
:type __factories: dict
"""
__factories = {}
__suffix = [".BEGIN", ".END"]
memoryoperations.BaseOperation()
@staticmethod
def add_factory(id_, operation_factory):
"""
Explicitly register an object factory.
This method should be used when the factory cannot be inferred
from the name of the object to be created.
:param id_: The id under which this factory is to be registered
in the dictionary.
:type id_: str
:param operation_factory: The operation factory to be registered.
:return: None
"""
OperationFactory.__factories[id_] = operation_factory
@staticmethod
def METHOD_NAME(string_operation, markers, stack):
def check_marker_format(marker):
"""
Checks if marker has proper suffix.
"""
for s in OperationFactory.__suffix:
if marker.endswith(s):
return
raise NotSupportedOperationException(
"Incorrect marker format {}, suffix is missing.".format(marker)
)
def check_pair_consistency(stack, marker):
"""
Checks if markers do not cross.
You can pop from stack only if end
marker match previous one.
Example OK:
MACRO1.BEGIN
MACRO2.BEGIN
MACRO2.END
MACRO1.END
Example NOT OK:
MACRO1.BEGIN
MACRO2.BEGIN
MACRO1.END
MACRO2.END
"""
top = stack[-1][0]
if top.endswith(OperationFactory.__suffix[0]):
top = top[: -len(OperationFactory.__suffix[0])]
if marker.endswith(OperationFactory.__suffix[-1]):
marker = marker[: -len(OperationFactory.__suffix[-1])]
if top != marker:
raise NotSupportedOperationException(
"Cannot cross markers: {0}, {1}".format(top, marker)
)
"""
Creates the object based on the pre-formatted string.
The string needs to be in the specific format. Each specific value
in the string has to be separated with a `;`. The first field
has to be the name of the operation, the rest are operation
specific values.
:param string_operation: The string describing the operation.
:param markers: The dict describing the pair marker-engine.
:param stack: The stack describing the order of engine changes.
:return: The specific object instantiated based on the string.
"""
id_ = string_operation.split(";")[0]
id_case_sensitive = id_.lower().capitalize()
# checks if id_ is one of memoryoperation classes
mem_ops = getattr(memoryoperations, id_case_sensitive, None)
# if class is not one of memoryoperations
# it means it can be user defined marker
if mem_ops is None:
check_marker_format(id_)
# if id_ is section BEGIN
if id_.endswith(OperationFactory.__suffix[0]):
# BEGIN defined by user
marker_name = id_.partition(".")[0]
if markers is not None and marker_name in markers:
engine = markers[marker_name]
try:
mem_ops = getattr(memoryoperations, engine)
except AttributeError:
raise NotSupportedOperationException(
"Not supported reorder engine: {}".format(engine)
)
# BEGIN but not defined by user
else:
mem_ops = stack[-1][1]
if issubclass(mem_ops, memoryoperations.ReorderBase):
stack.append((id_, mem_ops))
# END section
elif id_.endswith(OperationFactory.__suffix[-1]):
check_pair_consistency(stack, id_)
stack.pop()
mem_ops = stack[-1][1]
# here we have proper memory operation to perform,
# it can be Store, Fence, ReorderDefault etc.
id_ = mem_ops.__name__
if id_ not in OperationFactory.__factories:
OperationFactory.__factories[id_] = mem_ops.Factory()
return OperationFactory.__factories[id_].create(string_operation) |
4,825 | test progress update | from contextlib import contextmanager
import numpy as np
from napari.utils import cancelable_progress, progrange, progress
@contextmanager
def assert_progress_added_to_all(prog):
"""Check prog is added to `progress_instances` on init & removed on close"""
assert prog in progress._all_instances
yield
assert prog not in progress._all_instances
def test_progress_with_iterable():
"""Test typical iterable is correctly built"""
r = range(100)
pbr = progress(r, desc='iterable')
assert pbr.iterable is r
assert pbr.n == 0
with assert_progress_added_to_all(pbr):
pbr.close()
def test_progress_with_ndarray():
"""Test 2D ndarray is correctly built"""
iter_ = np.random.random((100, 100))
pbr = progress(iter_, desc='ndarray')
assert pbr.iterable is iter_
assert pbr.n == 0
with assert_progress_added_to_all(pbr):
pbr.close()
def test_progress_with_total():
"""Test progress with total not iterable, and manual updates"""
pbr = progress(total=5, desc='total')
assert pbr.n == 0
pbr.update(1)
assert pbr.n == 1
with assert_progress_added_to_all(pbr):
pbr.close()
def test_progress_with_context():
"""Test context manager works as expected"""
with progress(range(100), desc='context') as pbr:
assert pbr in progress._all_instances
assert pbr.n == 0
assert pbr not in progress._all_instances
def METHOD_NAME():
"""Test update with different values"""
pbr = progress(total=10, desc='update')
assert pbr.n == 0
pbr.update(1)
pbr.refresh() # not sure why this has to be called manually here
assert pbr.n == 1
pbr.update(2)
pbr.refresh()
assert pbr.n == 3
pbr.update()
pbr.refresh()
assert pbr.n == 4
with assert_progress_added_to_all(pbr):
pbr.close()
def test_progress_set_description():
"""Test setting description works as expected"""
pbr = progress(total=5)
pbr.set_description("Test")
assert pbr.desc == "Test: "
pbr.close()
assert pbr not in progress._all_instances
def test_progress_set_disable():
"""Test that the progress bar does not throw an attribute error when it is disabled."""
# before the changes in #5964 this failed with an AttributeError, because self.desc was not
# set in the super constructor of tqdm
pbr = progress(
total=5, disable=True, desc="This description will not be set by tqdm."
)
# make sure the dummy desscription (empty string) was set
assert pbr.desc == "progress: "
pbr.close()
def test_progrange():
"""Test progrange shorthand for progress(range(n))"""
with progrange(10) as pbr, progress(range(10)) as pbr2:
assert pbr.iterable == pbr2.iterable
assert pbr not in progress._all_instances
def test_progress_cancellation():
"""Test cancellation breaks the for loop"""
total = 10
pbr = cancelable_progress(range(total))
last_loop = -1
for i in pbr:
last_loop = i
# Let's cancel at i=total/2
if i == total / 2:
pbr.cancel()
assert pbr.is_canceled
assert last_loop == total / 2
def test_progress_cancellation_with_callback():
"""Test that cancellation runs the callback function"""
total = 10
last_loop = -1
expected_last_loop = -2
def cancel_callback():
nonlocal last_loop
last_loop = expected_last_loop
pbr = cancelable_progress(range(total), cancel_callback=cancel_callback)
for i in pbr:
last_loop = i
# Let's cancel at i=total/2
if i == total / 2:
pbr.cancel()
assert pbr.is_canceled
assert last_loop == expected_last_loop
def test_progress_cancellation_with_generator():
"""Test that cancellation closes a generator with a finally clause."""
closed = False
def test_generator():
try:
i = 0
while True:
yield i
i += 1
finally:
nonlocal closed
closed = True
pbr = cancelable_progress(test_generator())
for i in pbr:
if i == 5:
pbr.cancel()
assert pbr.is_canceled
assert closed |
4,826 | kind | # coding=utf-8
# *** WARNING: this file was generated by pulumigen. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from ... import meta as _meta
__all__ = [
'PriorityClass',
]
@pulumi.output_type
class PriorityClass(dict):
"""
PriorityClass defines mapping from a priority class name to the priority integer value. The value can be any valid integer.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "apiVersion":
suggest = "api_version"
elif key == "globalDefault":
suggest = "global_default"
elif key == "preemptionPolicy":
suggest = "preemption_policy"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PriorityClass. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PriorityClass.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PriorityClass.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
value: int,
api_version: Optional[str] = None,
description: Optional[str] = None,
global_default: Optional[bool] = None,
METHOD_NAME: Optional[str] = None,
metadata: Optional['_meta.v1.outputs.ObjectMeta'] = None,
preemption_policy: Optional[str] = None):
"""
PriorityClass defines mapping from a priority class name to the priority integer value. The value can be any valid integer.
:param int value: value represents the integer value of this priority class. This is the actual priority that pods receive when they have the name of this class in their pod spec.
:param str api_version: APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
:param str description: description is an arbitrary string that usually provides guidelines on when this priority class should be used.
:param bool global_default: globalDefault specifies whether this PriorityClass should be considered as the default priority for pods that do not have any priority class. Only one PriorityClass can be marked as `globalDefault`. However, if more than one PriorityClasses exists with their `globalDefault` field set to true, the smallest value of such global default PriorityClasses will be used as the default priority.
:param str kind: Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
:param '_meta.v1.ObjectMetaArgs' metadata: Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
:param str preemption_policy: preemptionPolicy is the Policy for preempting pods with lower priority. One of Never, PreemptLowerPriority. Defaults to PreemptLowerPriority if unset.
"""
pulumi.set(__self__, "value", value)
if api_version is not None:
pulumi.set(__self__, "api_version", 'scheduling.k8s.io/v1')
if description is not None:
pulumi.set(__self__, "description", description)
if global_default is not None:
pulumi.set(__self__, "global_default", global_default)
if METHOD_NAME is not None:
pulumi.set(__self__, "kind", 'PriorityClass')
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if preemption_policy is not None:
pulumi.set(__self__, "preemption_policy", preemption_policy)
@property
@pulumi.getter
def value(self) -> int:
"""
value represents the integer value of this priority class. This is the actual priority that pods receive when they have the name of this class in their pod spec.
"""
return pulumi.get(self, "value")
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[str]:
"""
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
"""
return pulumi.get(self, "api_version")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
description is an arbitrary string that usually provides guidelines on when this priority class should be used.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="globalDefault")
def global_default(self) -> Optional[bool]:
"""
globalDefault specifies whether this PriorityClass should be considered as the default priority for pods that do not have any priority class. Only one PriorityClass can be marked as `globalDefault`. However, if more than one PriorityClasses exists with their `globalDefault` field set to true, the smallest value of such global default PriorityClasses will be used as the default priority.
"""
return pulumi.get(self, "global_default")
@property
@pulumi.getter
def METHOD_NAME(self) -> Optional[str]:
"""
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def metadata(self) -> Optional['_meta.v1.outputs.ObjectMeta']:
"""
Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
"""
return pulumi.get(self, "metadata")
@property
@pulumi.getter(name="preemptionPolicy")
def preemption_policy(self) -> Optional[str]:
"""
preemptionPolicy is the Policy for preempting pods with lower priority. One of Never, PreemptLowerPriority. Defaults to PreemptLowerPriority if unset.
"""
return pulumi.get(self, "preemption_policy")
|
4,827 | default config | """
svd
===
Autogenerated DPF operator classes.
"""
from warnings import warn
from ansys.dpf.core.dpf_operator import Operator
from ansys.dpf.core.inputs import Input, _Inputs
from ansys.dpf.core.outputs import Output, _Outputs
from ansys.dpf.core.operators.specification import PinSpecification, Specification
class svd(Operator):
"""Computes the matrix singular value decomposition (SVD) for each field
in the given fields container.
Parameters
----------
fields_container : FieldsContainer
Fields_container
Examples
--------
>>> from ansys.dpf import core as dpf
>>> # Instantiate operator
>>> op = dpf.operators.math.svd()
>>> # Make input connections
>>> my_fields_container = dpf.FieldsContainer()
>>> op.inputs.fields_container.connect(my_fields_container)
>>> # Instantiate operator and connect inputs in one line
>>> op = dpf.operators.math.svd(
... fields_container=my_fields_container,
... )
>>> # Get output data
>>> result_s_svd = op.outputs.s_svd()
>>> result_u_svd = op.outputs.u_svd()
>>> result_vt_svd = op.outputs.vt_svd()
"""
def __init__(self, fields_container=None, config=None, server=None):
super().__init__(name="svdOp", config=config, server=server)
self._inputs = InputsSvd(self)
self._outputs = OutputsSvd(self)
if fields_container is not None:
self.inputs.fields_container.connect(fields_container)
@staticmethod
def _spec():
description = """Computes the matrix singular value decomposition (SVD) for each field
in the given fields container."""
spec = Specification(
description=description,
map_input_pin_spec={
0: PinSpecification(
name="fields_container",
type_names=["fields_container"],
optional=False,
document="""Fields_container""",
),
},
map_output_pin_spec={
0: PinSpecification(
name="s_svd",
type_names=["fields_container"],
optional=False,
document="""Singular values of the input data, where
a=u.s.vt""",
),
1: PinSpecification(
name="u_svd",
type_names=["fields_container"],
optional=False,
document="""U of the input data, where a=u.s.vt""",
),
2: PinSpecification(
name="vt_svd",
type_names=["fields_container"],
optional=False,
document="""Vt of the input data, where a=u.s.vt""",
),
},
)
return spec
@staticmethod
def METHOD_NAME(server=None):
"""Returns the default config of the operator.
This config can then be changed to the user needs and be used to
instantiate the operator. The Configuration allows to customize
how the operation will be processed by the operator.
Parameters
----------
server : server.DPFServer, optional
Server with channel connected to the remote or local instance. When
``None``, attempts to use the global server.
"""
return Operator.METHOD_NAME(name="svdOp", server=server)
@property
def inputs(self):
"""Enables to connect inputs to the operator
Returns
--------
inputs : InputsSvd
"""
return super().inputs
@property
def outputs(self):
"""Enables to get outputs of the operator by evaluating it
Returns
--------
outputs : OutputsSvd
"""
return super().outputs
class InputsSvd(_Inputs):
"""Intermediate class used to connect user inputs to
svd operator.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.svd()
>>> my_fields_container = dpf.FieldsContainer()
>>> op.inputs.fields_container.connect(my_fields_container)
"""
def __init__(self, op: Operator):
super().__init__(svd._spec().inputs, op)
self._fields_container = Input(svd._spec().input_pin(0), 0, op, -1)
self._inputs.append(self._fields_container)
@property
def fields_container(self):
"""Allows to connect fields_container input to the operator.
Fields_container
Parameters
----------
my_fields_container : FieldsContainer
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.svd()
>>> op.inputs.fields_container.connect(my_fields_container)
>>> # or
>>> op.inputs.fields_container(my_fields_container)
"""
return self._fields_container
class OutputsSvd(_Outputs):
"""Intermediate class used to get outputs from
svd operator.
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.svd()
>>> # Connect inputs : op.inputs. ...
>>> result_s_svd = op.outputs.s_svd()
>>> result_u_svd = op.outputs.u_svd()
>>> result_vt_svd = op.outputs.vt_svd()
"""
def __init__(self, op: Operator):
super().__init__(svd._spec().outputs, op)
self._s_svd = Output(svd._spec().output_pin(0), 0, op)
self._outputs.append(self._s_svd)
self._u_svd = Output(svd._spec().output_pin(1), 1, op)
self._outputs.append(self._u_svd)
self._vt_svd = Output(svd._spec().output_pin(2), 2, op)
self._outputs.append(self._vt_svd)
@property
def s_svd(self):
"""Allows to get s_svd output of the operator
Returns
----------
my_s_svd : FieldsContainer
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.svd()
>>> # Connect inputs : op.inputs. ...
>>> result_s_svd = op.outputs.s_svd()
""" # noqa: E501
return self._s_svd
@property
def u_svd(self):
"""Allows to get u_svd output of the operator
Returns
----------
my_u_svd : FieldsContainer
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.svd()
>>> # Connect inputs : op.inputs. ...
>>> result_u_svd = op.outputs.u_svd()
""" # noqa: E501
return self._u_svd
@property
def vt_svd(self):
"""Allows to get vt_svd output of the operator
Returns
----------
my_vt_svd : FieldsContainer
Examples
--------
>>> from ansys.dpf import core as dpf
>>> op = dpf.operators.math.svd()
>>> # Connect inputs : op.inputs. ...
>>> result_vt_svd = op.outputs.vt_svd()
""" # noqa: E501
return self._vt_svd |
4,828 | infer | # Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import importlib
from functools import reduce
import os
import numpy as np
import math
import paddle
from paddlenlp import Taskflow
from ..base import ModelBaseOp
from ppcv.ops.base import create_operators, BaseOp
from ppcv.core.workspace import register
from .postprocess import *
@register
class SentimentAnalysisOp(BaseOp):
def __init__(self, model_cfg, env_cfg):
super(SentimentAnalysisOp, self).__init__(model_cfg, env_cfg)
mod = importlib.import_module(__name__)
env_cfg["batch_size"] = model_cfg.get("batch_size", 1)
self.batch_size = env_cfg["batch_size"]
self.name = model_cfg["name"]
self.frame = -1
keys = self.get_output_keys()
self.output_keys = [self.name + '.' + key for key in keys]
self.postprocessor = create_operators(model_cfg["PostProcess"], mod)
self._init_task(model_cfg)
def _init_task(self, model_cfg):
task = model_cfg.get('task', 'sentiment_analysis')
self.nlp = Taskflow(task)
@classmethod
def get_output_keys(cls):
return ["label"]
def postprocess(self, inputs, result):
outputs = result
for idx, ops in enumerate(self.postprocessor):
if idx == len(self.postprocessor) - 1:
outputs = ops(outputs, self.output_keys)
else:
outputs = ops(outputs)
return outputs
@classmethod
def type(self):
return 'MODEL'
def METHOD_NAME(self, image_list):
inputs = []
batch_loop_cnt = math.ceil(float(len(image_list)) / self.batch_size)
results = []
for i in range(batch_loop_cnt):
start_index = i * self.batch_size
end_index = min((i + 1) * self.batch_size, len(image_list))
batch_image_list = image_list[start_index:end_index]
# preprocess
# model inference
result = self.nlp(batch_image_list)
# postprocess
result = self.postprocess(inputs, result)
results.extend(result)
# results = self.merge_batch_result(results)
return results
def __call__(self, inputs):
"""
step1: parser inputs
step2: run
step3: merge results
input: a list of dict
"""
key = self.input_keys[0]
is_list = False
if isinstance(inputs[0][key], (list, tuple)):
inputs = [input[key] for input in inputs]
is_list = True
else:
inputs = [[input[key]] for input in inputs]
sub_index_list = [len(input) for input in inputs]
inputs = reduce(lambda x, y: x.extend(y) or x, inputs)
# step2: run
outputs = self.METHOD_NAME(inputs)
# step3: merge
curr_offsef_id = 0
pipe_outputs = []
for idx in range(len(sub_index_list)):
sub_start_idx = curr_offsef_id
sub_end_idx = curr_offsef_id + sub_index_list[idx]
output = outputs[sub_start_idx:sub_end_idx]
output = {k: [o[k] for o in output] for k in output[0]}
if is_list is not True:
output = {k: output[k][0] for k in output}
pipe_outputs.append(output)
curr_offsef_id = sub_end_idx
return pipe_outputs
@register
class InformationExtractionOp(SentimentAnalysisOp):
def __init__(self, model_cfg, env_cfg):
super(InformationExtractionOp, self).__init__(model_cfg, env_cfg)
self._init_task(model_cfg)
def _init_task(self, model_cfg):
task = model_cfg.get('task', 'information_extraction')
schema = model_cfg.get('schema', ['时间', '地点', '人物'])
self.nlp = Taskflow(task, schema=schema)
@classmethod
def get_output_keys(cls):
return ["text", "type"] |
4,829 | test execute query succeeded no retry | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name, unused-argument, import-outside-toplevel
from contextlib import nullcontext, suppress
from typing import Optional, Union
import pandas as pd
import pytest
from pytest_mock import MockFixture
from superset.reports.commands.exceptions import AlertQueryError
from superset.reports.models import ReportCreationMethod, ReportScheduleType
from superset.tasks.types import ExecutorType
from superset.utils.database import get_example_database
from tests.integration_tests.test_app import app
@pytest.mark.parametrize(
"owner_names,creator_name,config,expected_result",
[
(["gamma"], None, [ExecutorType.SELENIUM], "admin"),
(["gamma"], None, [ExecutorType.OWNER], "gamma"),
(
["alpha", "gamma"],
"gamma",
[ExecutorType.CREATOR_OWNER],
"gamma",
),
(
["alpha", "gamma"],
"alpha",
[ExecutorType.CREATOR_OWNER],
"alpha",
),
(
["alpha", "gamma"],
"admin",
[ExecutorType.CREATOR_OWNER],
AlertQueryError(),
),
(["gamma"], None, [ExecutorType.CURRENT_USER], AlertQueryError()),
],
)
def test_execute_query_as_report_executor(
owner_names: list[str],
creator_name: Optional[str],
config: list[ExecutorType],
expected_result: Union[tuple[ExecutorType, str], Exception],
mocker: MockFixture,
app_context: None,
get_user,
) -> None:
from superset.reports.commands.alert import AlertCommand
from superset.reports.models import ReportSchedule
with app.app_context():
original_config = app.config["ALERT_REPORTS_EXECUTE_AS"]
app.config["ALERT_REPORTS_EXECUTE_AS"] = config
owners = [get_user(owner_name) for owner_name in owner_names]
report_schedule = ReportSchedule(
created_by=get_user(creator_name) if creator_name else None,
owners=owners,
type=ReportScheduleType.ALERT,
description="description",
crontab="0 9 * * *",
creation_method=ReportCreationMethod.ALERTS_REPORTS,
sql="SELECT 1",
grace_period=14400,
working_timeout=3600,
database=get_example_database(),
validator_config_json='{"op": "==", "threshold": 1}',
)
command = AlertCommand(report_schedule=report_schedule)
override_user_mock = mocker.patch(
"superset.reports.commands.alert.override_user"
)
cm = (
pytest.raises(type(expected_result))
if isinstance(expected_result, Exception)
else nullcontext()
)
with cm:
command.run()
assert override_user_mock.call_args[0][0].username == expected_result
app.config["ALERT_REPORTS_EXECUTE_AS"] = original_config
def METHOD_NAME(
mocker: MockFixture, app_context: None
) -> None:
from superset.reports.commands.alert import AlertCommand
execute_query_mock = mocker.patch(
"superset.reports.commands.alert.AlertCommand._execute_query",
side_effect=lambda: pd.DataFrame([{"sample_col": 0}]),
)
command = AlertCommand(report_schedule=mocker.Mock())
command.validate()
assert execute_query_mock.call_count == 1
def test_execute_query_succeeded_with_retries(
mocker: MockFixture, app_context: None
) -> None:
from superset.reports.commands.alert import AlertCommand, AlertQueryError
execute_query_mock = mocker.patch(
"superset.reports.commands.alert.AlertCommand._execute_query"
)
query_executed_count = 0
# Should match the value defined in superset_test_config.py
expected_max_retries = 3
def _mocked_execute_query() -> pd.DataFrame:
nonlocal query_executed_count
query_executed_count += 1
if query_executed_count < expected_max_retries:
raise AlertQueryError()
else:
return pd.DataFrame([{"sample_col": 0}])
execute_query_mock.side_effect = _mocked_execute_query
execute_query_mock.__name__ = "mocked_execute_query"
command = AlertCommand(report_schedule=mocker.Mock())
command.validate()
assert execute_query_mock.call_count == expected_max_retries
def test_execute_query_failed_no_retry(mocker: MockFixture, app_context: None) -> None:
from superset.reports.commands.alert import AlertCommand, AlertQueryTimeout
execute_query_mock = mocker.patch(
"superset.reports.commands.alert.AlertCommand._execute_query"
)
def _mocked_execute_query() -> None:
raise AlertQueryTimeout
execute_query_mock.side_effect = _mocked_execute_query
execute_query_mock.__name__ = "mocked_execute_query"
command = AlertCommand(report_schedule=mocker.Mock())
with suppress(AlertQueryTimeout):
command.validate()
assert execute_query_mock.call_count == 1
def test_execute_query_failed_max_retries(
mocker: MockFixture, app_context: None
) -> None:
from superset.reports.commands.alert import AlertCommand, AlertQueryError
execute_query_mock = mocker.patch(
"superset.reports.commands.alert.AlertCommand._execute_query"
)
def _mocked_execute_query() -> None:
raise AlertQueryError
execute_query_mock.side_effect = _mocked_execute_query
execute_query_mock.__name__ = "mocked_execute_query"
command = AlertCommand(report_schedule=mocker.Mock())
with suppress(AlertQueryError):
command.validate()
# Should match the value defined in superset_test_config.py
assert execute_query_mock.call_count == 3 |
4,830 | teardown method | # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
# For details: https://github.com/pylint-dev/pylint/blob/main/LICENSE
# Copyright (c) https://github.com/pylint-dev/pylint/blob/main/CONTRIBUTORS.txt
import os
import re
import sys
from pathlib import Path
import astroid
from pylint.checkers import variables
from pylint.interfaces import HIGH
from pylint.testutils import CheckerTestCase, MessageTest, linter, set_config
from pylint.testutils.reporter_for_tests import GenericTestReporter
REGR_DATA_DIR = str(Path(__file__).parent / ".." / "regrtest_data")
class TestVariablesChecker(CheckerTestCase):
CHECKER_CLASS = variables.VariablesChecker
def test_all_elements_without_parent(self) -> None:
node = astroid.extract_node("__all__ = []")
node.value.elts.append(astroid.Const("test"))
root = node.root()
with self.assertNoMessages():
self.checker.visit_module(root)
self.checker.leave_module(root)
class TestVariablesCheckerWithTearDown(CheckerTestCase):
CHECKER_CLASS = variables.VariablesChecker
def setup_method(self) -> None:
super().setup_method()
self._to_consume_backup = self.checker._to_consume
self.checker._to_consume = []
def METHOD_NAME(self) -> None:
self.checker._to_consume = self._to_consume_backup
@set_config(callbacks=("callback_", "_callback"))
def test_custom_callback_string(self) -> None:
"""Test the --callbacks option works."""
node = astroid.extract_node(
"""
def callback_one(abc):
''' should not emit unused-argument. '''
"""
)
with self.assertNoMessages():
self.checker.visit_functiondef(node)
self.checker.leave_functiondef(node)
node = astroid.extract_node(
"""
def two_callback(abc, defg):
''' should not emit unused-argument. '''
"""
)
with self.assertNoMessages():
self.checker.visit_functiondef(node)
self.checker.leave_functiondef(node)
node = astroid.extract_node(
"""
def normal_func(abc):
''' should emit unused-argument. '''
"""
)
with self.assertAddsMessages(
MessageTest(
"unused-argument",
node=node["abc"],
args="abc",
confidence=HIGH,
line=2,
col_offset=16,
end_line=2,
end_col_offset=19,
)
):
self.checker.visit_functiondef(node)
self.checker.leave_functiondef(node)
node = astroid.extract_node(
"""
def cb_func(abc):
''' Previous callbacks are overridden. '''
"""
)
with self.assertAddsMessages(
MessageTest(
"unused-argument",
node=node["abc"],
args="abc",
confidence=HIGH,
line=2,
col_offset=12,
end_line=2,
end_col_offset=15,
)
):
self.checker.visit_functiondef(node)
self.checker.leave_functiondef(node)
@set_config(redefining_builtins_modules=("os",))
def test_redefined_builtin_modname_not_ignored(self) -> None:
node = astroid.parse(
"""
from future.builtins import open
"""
)
with self.assertAddsMessages(
MessageTest(
"redefined-builtin",
node=node.body[0],
args="open",
line=2,
col_offset=0,
end_line=2,
end_col_offset=32,
)
):
self.checker.visit_module(node)
@set_config(redefining_builtins_modules=("os",))
def test_redefined_builtin_in_function(self) -> None:
node = astroid.extract_node(
"""
def test():
from os import open
"""
)
with self.assertNoMessages():
self.checker.visit_module(node.root())
self.checker.visit_functiondef(node)
def test_import_as_underscore(self) -> None:
node = astroid.parse(
"""
import math as _
"""
)
with self.assertNoMessages():
self.walk(node)
def test_lambda_in_classdef(self) -> None:
# Make sure lambda doesn't raises
# Undefined-method in class def
# Issue 1824
# https://github.com/pylint-dev/pylint/issues/1824
node = astroid.parse(
"""
class MyObject(object):
method1 = lambda func: func()
method2 = lambda function: function()
"""
)
with self.assertNoMessages():
self.walk(node)
def test_nested_lambda(self) -> None:
"""Make sure variables from parent lambdas
aren't noted as undefined
https://github.com/pylint-dev/pylint/issues/760
"""
node = astroid.parse(
"""
lambda x: lambda: x + 1
"""
)
with self.assertNoMessages():
self.walk(node)
@set_config(ignored_argument_names=re.compile("arg"))
def test_ignored_argument_names_no_message(self) -> None:
"""Make sure is_ignored_argument_names properly ignores
function arguments
"""
node = astroid.parse(
"""
def fooby(arg):
pass
"""
)
with self.assertNoMessages():
self.walk(node)
@set_config(ignored_argument_names=re.compile("args|kwargs"))
def test_ignored_argument_names_starred_args(self) -> None:
node = astroid.parse(
"""
def fooby(*args, **kwargs):
pass
"""
)
with self.assertNoMessages():
self.walk(node)
class TestMissingSubmodule(CheckerTestCase):
CHECKER_CLASS = variables.VariablesChecker
@staticmethod
def test_package_all() -> None:
sys.path.insert(0, REGR_DATA_DIR)
try:
linter.check([os.path.join(REGR_DATA_DIR, "package_all")])
assert isinstance(linter.reporter, GenericTestReporter)
got = linter.reporter.finalize().strip()
assert got == "E: 3: Undefined variable name 'missing' in __all__"
finally:
sys.path.pop(0) |
4,831 | setup user in baggage and spans | import functools
import inspect
import os
import typing
from abc import ABCMeta
from operator import attrgetter
from typing import List, Optional, Union
from opentelemetry import baggage, context
from opentelemetry.context import Context, attach, detach, set_value
from opentelemetry.sdk.trace import Span
from opentelemetry.sdk.trace.export import BatchSpanProcessor
from opentelemetry.trace import Status, StatusCode, Tracer, get_current_span
def disable_instrumentation(wrapped_function):
@functools.wraps(wrapped_function)
def _wrapper(*args, **kwargs):
token = attach(set_value("suppress_instrumentation", True))
result = wrapped_function(*args, **kwargs)
detach(token)
return result
return _wrapper
# attrs don't include the module name to keep them short and easier to see so we add
# baserow manually.
BASEROW_OTEL_TRACE_ATTR_PREFIX = "baserow."
class BatchBaggageSpanProcessor(BatchSpanProcessor):
def on_start(
self, span: Span, parent_context: typing.Optional[Context] = None
) -> None:
super().on_start(span, parent_context)
get_all = baggage.get_all(context=parent_context)
for name, value in get_all.items():
span.set_attribute(name, value)
def METHOD_NAME(user, request):
if otel_is_enabled():
span = get_current_span()
def _set(name, attr, source, set_baggage=False):
try:
value = attrgetter(attr)(source)
except AttributeError:
value = None
if value:
span.set_attribute(name, value)
if set_baggage:
context.attach(baggage.set_baggage(name, value))
_set("user.id", "id", user, set_baggage=True)
_set("user.untrusted_client_session_id", "untrusted_client_session_id", user)
_set("user.token_id", "user_token.id", request)
def _baserow_trace_func(wrapped_func, tracer: Tracer):
@functools.wraps(wrapped_func)
def _wrapper(*args, **kwargs):
with tracer.start_as_current_span(
wrapped_func.__module__ + "." + wrapped_func.__qualname__
) as span:
try:
result = wrapped_func(*args, **kwargs)
except Exception as ex:
span.set_status(Status(StatusCode.ERROR))
span.record_exception(ex)
raise ex
return result
return _wrapper
def baserow_trace_methods(
tracer: Tracer,
only: Optional[Union[str, List[str]]] = None,
exclude: Optional[Union[str, List[str]]] = None,
abc: bool = False,
):
"""
Automatically traces all public methods, or specific methods of a class depending
on the arguments.
You need to use this if you want to say, trace every implementation of an abstract
method as decorating the method itself will get overriden by the subclasses where-as
this metaclass will wrap the method when the subclass itself is created (the class
not the instances!)
If you want to use this metaclass and abc.ABC, use this and set abc=True.
Using a metaclass is the python recommended way of automatically decorating
all/some functions in a class.
:param tracer: An otel Tracer, add `tracer = trace.get_tracer(__name__)` to the top
of your file to get one.
:param only: The name of the only function you want to trace or a list of names.
:param exclude: The name of the function you do not want to trace or a list of
names.
:param abc: Whether this class should also be an abstract base class.
"""
if only and not isinstance(only, list):
only = [only]
if exclude and not isinstance(exclude, list):
exclude = [exclude]
super_class = ABCMeta if abc else type
class TraceMethodsMetaClass(super_class):
def __new__(cls, name, bases, local):
for attr in local:
if cls._should_trace_attr(attr):
continue
value = local[attr]
if inspect.isfunction(value):
local[attr] = _baserow_trace_func(value, tracer)
return super().__new__(cls, name, bases, local)
@staticmethod
def _should_trace_attr(attr):
return (
attr.startswith("_")
or (only and attr not in only)
or (exclude and attr in exclude)
)
return TraceMethodsMetaClass
def baserow_trace(tracer):
"""
Decorates a function to send a span of its execution. This will let you see how
long the function took in your telemetry platform.
:param tracer: An otel Tracer, add `tracer = trace.get_tracer(__name__)` to the top
of your file to get one.
"""
if not isinstance(tracer, Tracer):
raise Exception(
f"Must provider a tracer to baserow_trace, instead you gave me a "
f"{type(tracer)}. Get "
"one using "
"`tracer = trace.get_tracer(__name__)`."
)
def inner(wrapped_function_or_cls):
return _baserow_trace_func(wrapped_function_or_cls, tracer)
return inner
def add_baserow_trace_attrs(**kwargs):
"""
Simple helper function for quickly adding attributes to the current span. The
attribute names will be prefixed with the baserow. to namespace them properly.
:param kwargs: Key value pairs, the key will be the attr name prefixed with
baserow. and the value will be the span attribute value.
"""
span = get_current_span()
for key, value in kwargs.items():
span.set_attribute(f"{BASEROW_OTEL_TRACE_ATTR_PREFIX}{key}", value)
def otel_is_enabled():
env_var_set = bool(os.getenv("BASEROW_ENABLE_OTEL", False))
not_in_tests = (
os.getenv("DJANGO_SETTINGS_MODULE", "").strip()
!= "baserow.config.settings.test"
)
return env_var_set and not_in_tests |
4,832 | test encoding loop | # This file is part of Hypothesis, which may be found at
# https://github.com/HypothesisWorks/hypothesis/
#
# Copyright the Hypothesis Authors.
# Individual contributors are listed in AUTHORS.rst and the git log.
#
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at https://mozilla.org/MPL/2.0/.
import base64
import re
import zlib
import pytest
from hypothesis import (
Verbosity,
__version__,
example,
given,
reject,
reproduce_failure,
settings,
strategies as st,
)
from hypothesis.core import decode_failure, encode_failure
from hypothesis.errors import DidNotReproduce, InvalidArgument, UnsatisfiedAssumption
from tests.common.utils import capture_out, no_shrink
@example(bytes(20)) # shorter compressed
@example(bytes(3)) # shorter uncompressed
@given(st.binary() | st.binary(min_size=100))
def METHOD_NAME(b):
assert decode_failure(encode_failure(b)) == b
@example(base64.b64encode(b"\2\3\4"))
@example(b"\t")
@example(base64.b64encode(b"\1\0")) # zlib error
@given(st.binary())
def test_decoding_may_fail(t):
try:
decode_failure(t)
reject()
except UnsatisfiedAssumption:
raise # don't silence the reject()
except InvalidArgument:
pass
except Exception as e:
raise AssertionError("Expected an InvalidArgument exception") from e
def test_invalid_base_64_gives_invalid_argument():
with pytest.raises(InvalidArgument) as exc_info:
decode_failure(b"/")
assert "Invalid base64 encoded" in exc_info.value.args[0]
def test_reproduces_the_failure():
b = b"hello world"
n = len(b)
@reproduce_failure(__version__, encode_failure(b))
@given(st.binary(min_size=n, max_size=n))
def test_outer(x):
assert x != b
@given(st.binary(min_size=n, max_size=n))
@reproduce_failure(__version__, encode_failure(b))
def test_inner(x):
assert x != b
with pytest.raises(AssertionError):
test_outer()
with pytest.raises(AssertionError):
test_inner()
def test_errors_if_provided_example_does_not_reproduce_failure():
b = b"hello world"
n = len(b)
@reproduce_failure(__version__, encode_failure(b))
@given(st.binary(min_size=n, max_size=n))
def test(x):
assert x == b
with pytest.raises(DidNotReproduce):
test()
def test_errors_with_did_not_reproduce_if_the_shape_changes():
b = b"hello world"
n = len(b)
@reproduce_failure(__version__, encode_failure(b))
@given(st.binary(min_size=n + 1, max_size=n + 1))
def test(x):
assert x == b
with pytest.raises(DidNotReproduce):
test()
def test_errors_with_did_not_reproduce_if_rejected():
b = b"hello world"
n = len(b)
@reproduce_failure(__version__, encode_failure(b))
@given(st.binary(min_size=n, max_size=n))
def test(x):
reject()
with pytest.raises(DidNotReproduce):
test()
def test_prints_reproduction_if_requested():
failing_example = [None]
@settings(print_blob=True, database=None, max_examples=100)
@given(st.integers())
def test(i):
if failing_example[0] is None and i != 0:
failing_example[0] = i
assert i not in failing_example
with pytest.raises(AssertionError) as err:
test()
notes = "\n".join(err.value.__notes__)
assert "@reproduce_failure" in notes
exp = re.compile(r"reproduce_failure\(([^)]+)\)", re.MULTILINE)
extract = exp.search(notes)
reproduction = eval(extract.group(0))
test = reproduction(test)
with pytest.raises(AssertionError):
test()
def test_does_not_print_reproduction_for_simple_examples_by_default():
@settings(print_blob=False)
@given(st.integers())
def test(i):
raise AssertionError
with capture_out() as o:
with pytest.raises(AssertionError):
test()
assert "@reproduce_failure" not in o.getvalue()
def test_does_not_print_reproduction_for_simple_data_examples_by_default():
@settings(print_blob=False)
@given(st.data())
def test(data):
data.draw(st.integers())
raise AssertionError
with capture_out() as o:
with pytest.raises(AssertionError):
test()
assert "@reproduce_failure" not in o.getvalue()
def test_does_not_print_reproduction_for_large_data_examples_by_default():
@settings(phases=no_shrink, print_blob=False)
@given(st.data())
def test(data):
b = data.draw(st.binary(min_size=1000, max_size=1000))
if len(zlib.compress(b)) > 1000:
raise ValueError
with capture_out() as o:
with pytest.raises(ValueError):
test()
assert "@reproduce_failure" not in o.getvalue()
class Foo:
def __repr__(self):
return "not a valid python expression"
def test_does_not_print_reproduction_if_told_not_to():
@settings(print_blob=False)
@given(st.integers().map(lambda x: Foo()))
def test(i):
raise ValueError
with capture_out() as o:
with pytest.raises(ValueError):
test()
assert "@reproduce_failure" not in o.getvalue()
def test_raises_invalid_if_wrong_version():
b = b"hello world"
n = len(b)
@reproduce_failure("1.0.0", encode_failure(b))
@given(st.binary(min_size=n, max_size=n))
def test(x):
pass
with pytest.raises(InvalidArgument):
test()
def test_does_not_print_reproduction_if_verbosity_set_to_quiet():
@given(st.data())
@settings(verbosity=Verbosity.quiet, print_blob=False)
def test_always_fails(data):
assert data.draw(st.just(False))
with capture_out() as out:
with pytest.raises(AssertionError):
test_always_fails()
assert "@reproduce_failure" not in out.getvalue() |
4,833 | requested task | import json
from uuid import uuid4
import pytest
class TestRequestedTaskList:
url = "/requested-tasks/"
def _assert_requested_task(self, task, item):
assert set(item.keys()) == {
"_id",
"status",
"schedule_name",
"timestamp",
"config",
"requested_by",
"priority",
"worker",
}
assert item["_id"] == str(task["_id"])
assert item["status"] == task["status"]
assert item["schedule_name"] == task["schedule_name"]
@pytest.mark.parametrize(
"query_param", [{"matching_cpu": "-2"}, {"matching_memory": -1}]
)
def test_bad_request(self, client, query_param):
headers = {"Content-Type": "application/json"}
response = client.get(self.url, headers=headers, query_string=query_param)
assert response.status_code == 400
def test_list_requested_tasks(self, client, requested_tasks):
headers = {"Content-Type": "application/json"}
response = client.get(self.url, headers=headers)
assert response.status_code == 200
data = json.loads(response.data)
assert data["meta"]["limit"] == 20
assert data["meta"]["skip"] == 0
items = data["items"]
# items ordering is done by DB and not important to us
# but we need to match our requests with result to test resulting data
sorted_requested_tasks = list(
map(
lambda item: [
r for r in requested_tasks if str(r["_id"]) == item["_id"]
][-1],
items,
)
)
assert len(items) == len(sorted_requested_tasks)
# assert sorting
assert str(sorted_requested_tasks[0]["_id"]) == items[0]["_id"]
for index, task in enumerate(sorted_requested_tasks):
item = items[index]
self._assert_requested_task(task, item)
@pytest.mark.parametrize(
"matching, expected",
[
[{"cpu": 3, "memory": 1024, "disk": 1024}, 20],
[
{
"cpu": 3,
"memory": 1024,
"disk": 1024,
"offliners": ["mwoffliner", "phet", "gutenberg", "youtube"],
},
20,
],
[
{
"cpu": 2,
"memory": 1024,
"disk": 1024,
"offliners": ["mwoffliner", "phet", "gutenberg", "youtube"],
},
0,
],
[
{
"cpu": 3,
"memory": 1023,
"disk": 1024,
"offliners": ["mwoffliner", "phet", "gutenberg", "youtube"],
},
0,
],
[
{
"cpu": 3,
"memory": 1024,
"disk": 1023,
"offliners": ["mwoffliner", "phet", "gutenberg", "youtube"],
},
0,
],
[
{
"cpu": 3,
"memory": 1024,
"disk": 1024,
"offliners": ["mwoffliner", "phet", "gutenberg"],
},
0,
],
],
)
def test_list_matching(self, client, requested_tasks, matching, expected):
url = f"{self.url}?"
for key, value in matching.items():
if isinstance(value, list):
for lvalue in value:
url += f"matching_{key}={lvalue}&"
else:
url += f"matching_{key}={value}&"
headers = {"Content-Type": "application/json"}
response = client.get(url, headers=headers)
assert response.status_code == 200
data = json.loads(response.data)
items = data["items"]
assert len(items) == expected
def test_list_pagination(self, client, requested_tasks):
url = "/requested-tasks/?limit={}&skip={}".format(10, 5)
headers = {"Content-Type": "application/json"}
response = client.get(url, headers=headers)
assert response.status_code == 200
data = json.loads(response.data)
assert data["meta"]["limit"] == 10
assert data["meta"]["skip"] == 5
class TestRequestedTaskGet:
def test_not_found(self, client):
url = f"/requested-tasks/{uuid4()}"
headers = {"Content-Type": "application/json"}
response = client.get(url, headers=headers)
assert response.status_code == 404
def test_not_uuid(self, client):
url = "/requested-tasks/imnotauuid"
headers = {"Content-Type": "application/json"}
response = client.get(url, headers=headers)
assert response.status_code == 400
response_json = response.get_json()
assert "error" in response_json
def test_get(self, client, METHOD_NAME):
url = "/requested-tasks/{}".format(METHOD_NAME["_id"])
headers = {"Content-Type": "application/json"}
response = client.get(url, headers=headers)
assert response.status_code == 200
data = json.loads(response.data)
assert data["_id"] == str(METHOD_NAME["_id"])
assert data["status"] == METHOD_NAME["status"]
assert "schedule_name" in data
assert data["schedule_name"] == METHOD_NAME["schedule_name"]
assert "timestamp" in data
assert "events" in data
class TestRequestedTaskCreate:
@pytest.fixture()
def METHOD_NAME(self, make_requested_task):
METHOD_NAME = make_requested_task()
return METHOD_NAME
def test_create_from_schedule(self, client, access_token, schedule):
url = "/requested-tasks/"
headers = {"Authorization": access_token, "Content-Type": "application/json"}
response = client.post(
url,
headers=headers,
data=json.dumps({"schedule_names": [schedule["name"]]}),
)
assert response.status_code == 201
def test_create_with_wrong_schedule(self, client, access_token, schedule):
url = "/requested-tasks/"
headers = {"Authorization": access_token, "Content-Type": "application/json"}
response = client.post(
url, headers=headers, data=json.dumps({"schedule_names": ["hello"]})
)
assert response.status_code == 404 |
4,834 | test user cannot reply if status is | from unittest.mock import ANY
import pytest
from grants.models import Grant
pytestmark = pytest.mark.django_db
def _send_grant_reply(graphql_client, grant, *, status, message=""):
document = """
mutation sendGrantReply ($input: SendGrantReplyInput!) {
sendGrantReply(input: $input) {
__typename
...on Grant {
id
}
...on SendGrantReplyError {
message
}
}
}
"""
variables = {
"status": status,
"message": message,
"instance": grant.id,
}
return graphql_client.query(document, variables={"input": variables})
def test_user_is_not_the_owner(graphql_client, user, grant_factory, user_factory):
graphql_client.force_login(user)
other_user = user_factory()
grant = grant_factory(user_id=other_user.id)
response = _send_grant_reply(graphql_client, grant, status="refused")
assert response["data"]["sendGrantReply"]["__typename"] == "SendGrantReplyError"
assert (
response["data"]["sendGrantReply"]["message"]
== "You cannot reply to this grant"
)
def METHOD_NAME(graphql_client, user, grant_factory):
graphql_client.force_login(user)
grant = grant_factory(user_id=user.id, status=Grant.Status.pending)
response = _send_grant_reply(graphql_client, grant, status="refused")
assert response["data"]["sendGrantReply"]["__typename"] == "SendGrantReplyError"
assert (
response["data"]["sendGrantReply"]["message"]
== "You cannot reply to this grant"
)
def test_user_cannot_reply_if_status_is_rejected(graphql_client, user, grant_factory):
graphql_client.force_login(user)
grant = grant_factory(user_id=user.id, status=Grant.Status.rejected)
response = _send_grant_reply(graphql_client, grant, status="refused")
assert response["data"]["sendGrantReply"]["__typename"] == "SendGrantReplyError"
assert (
response["data"]["sendGrantReply"]["message"]
== "You cannot reply to this grant"
)
def test_status_is_not_updated_when_the_reply_is_need_info(
graphql_client, user, grant_factory
):
graphql_client.force_login(user)
grant = grant_factory(user_id=user.id, status=Grant.Status.waiting_for_confirmation)
response = _send_grant_reply(graphql_client, grant, status="need_info")
assert response["data"]["sendGrantReply"]["__typename"] == "Grant"
grant.refresh_from_db()
assert grant.status == Grant.Status.waiting_for_confirmation
def test_status_is_updated_when_reply_is_confrimed(graphql_client, user, grant_factory):
graphql_client.force_login(user)
grant = grant_factory(user_id=user.id, status=Grant.Status.waiting_for_confirmation)
response = _send_grant_reply(graphql_client, grant, status="confirmed")
assert response["data"]["sendGrantReply"]["__typename"] == "Grant"
grant.refresh_from_db()
assert grant.status == Grant.Status.confirmed
def test_status_is_updated_when_reply_is_refused(graphql_client, user, grant_factory):
graphql_client.force_login(user)
grant = grant_factory(user_id=user.id, status=Grant.Status.waiting_for_confirmation)
response = _send_grant_reply(graphql_client, grant, status="refused")
assert response["data"]["sendGrantReply"]["__typename"] == "Grant"
grant.refresh_from_db()
assert grant.status == Grant.Status.refused
def test_send_plain_when_user_send_a_message(
graphql_client, user, grant_factory, mocker
):
graphql_client.force_login(user)
grant = grant_factory(user_id=user.id, status=Grant.Status.waiting_for_confirmation)
mock_publisher = mocker.patch("api.grants.mutations.send_message_to_plain")
response = _send_grant_reply(
graphql_client, grant, status="need_info", message="wtf"
)
assert response["data"]["sendGrantReply"]["__typename"] == "Grant"
mock_publisher.assert_called_once_with(grant, "wtf")
def test_call_notify_new_grant_reply(rf, graphql_client, user, grant_factory, mocker):
graphql_client.force_login(user)
grant = grant_factory(user_id=user.id, status=Grant.Status.waiting_for_confirmation)
mock_publisher = mocker.patch("api.grants.mutations.notify_new_grant_reply")
response = _send_grant_reply(graphql_client, grant, status="refused", message="wtf")
assert response["data"]["sendGrantReply"]["__typename"] == "Grant"
mock_publisher.assert_called_once_with(grant, ANY) |
4,835 | test standalone | import shutil
import tempfile
import os.path
from nose.tools import eq_
from nose.tools import raises
from nose.tools import with_setup
from build_pack_utils import BuildPack
from build_pack_utils.runner import CalledProcessError
from common.integration import FileAssertHelper
from common.integration import ErrorHelper
from common.components import BuildPackAssertHelper
from common.components import HttpdAssertHelper
from common.components import NginxAssertHelper
from common.components import PhpAssertHelper
from common.components import NoWebServerAssertHelper
from common.components import DownloadAssertHelper
from common.base import BaseCompileApp
class TestCompileApp1(BaseCompileApp):
def __init__(self):
self.app_name = 'app-1'
def test_with_httpd(self):
# helpers to confirm the environment
bp = BuildPackAssertHelper()
httpd = HttpdAssertHelper()
php = PhpAssertHelper()
# set web server to httpd, since that's what we're expecting here
self.opts.set_web_server('httpd')
# run the compile step of the build pack
output = ErrorHelper().compile(self.bp)
# confirm downloads
DownloadAssertHelper(2, 2).assert_downloads_from_output(output)
# confirm start script
bp.assert_start_script_is_correct(self.build_dir)
httpd.assert_start_script_is_correct(self.build_dir)
php.assert_start_script_is_correct(self.build_dir)
# confirm bp utils installed
bp.assert_scripts_are_installed(self.build_dir)
bp.assert_config_options(self.build_dir)
# check env & proc files
httpd.assert_contents_of_procs_file(self.build_dir)
httpd.assert_contents_of_env_file(self.build_dir)
php.assert_contents_of_procs_file(self.build_dir)
php.assert_contents_of_env_file(self.build_dir)
# webdir exists
httpd.assert_web_dir_exists(self.build_dir, self.opts.get_webdir())
# check php & httpd installed
httpd.assert_files_installed(self.build_dir)
php.assert_files_installed(self.build_dir)
def test_with_nginx(self):
# helpers to confirm the environment
bp = BuildPackAssertHelper()
nginx = NginxAssertHelper()
php = PhpAssertHelper()
# set web server to httpd, since that's what we're expecting here
self.opts.set_web_server('nginx')
# run the compile step of the build pack
output = ErrorHelper().compile(self.bp)
# confirm downloads
DownloadAssertHelper(2, 2).assert_downloads_from_output(output)
# confirm start script
bp.assert_start_script_is_correct(self.build_dir)
nginx.assert_start_script_is_correct(self.build_dir)
php.assert_start_script_is_correct(self.build_dir)
# confirm bp utils installed
bp.assert_scripts_are_installed(self.build_dir)
bp.assert_config_options(self.build_dir)
# check env & proc files
nginx.assert_contents_of_procs_file(self.build_dir)
php.assert_contents_of_procs_file(self.build_dir)
php.assert_contents_of_env_file(self.build_dir)
# webdir exists
nginx.assert_web_dir_exists(self.build_dir, self.opts.get_webdir())
# check php & nginx installed
nginx.assert_files_installed(self.build_dir)
php.assert_files_installed(self.build_dir)
class TestCompileApp6(TestCompileApp1):
def __init__(self):
self.app_name = 'app-6'
def setUp(self):
TestCompileApp1.setUp(self)
self.opts.set_webdir('public')
def assert_app6_specifics(self):
fah = FileAssertHelper()
(fah.expect()
.root(self.build_dir)
.path('public') # noqa
.path('public', 'index.php')
.path('public', 'info.php')
.path('vendor')
.path('vendor', 'lib.php')
.path('.bp-config', 'options.json')
.exists())
def test_with_httpd(self):
TestCompileApp1.test_with_httpd(self)
# some app specific tests
self.assert_app6_specifics()
def test_with_nginx(self):
TestCompileApp1.test_with_nginx(self)
# some app specific tests
self.assert_app6_specifics()
class TestCompileApp5(BaseCompileApp):
def __init__(self):
self.app_name = 'app-5'
def METHOD_NAME(self):
# helpers to confirm the environment
bp = BuildPackAssertHelper()
php = PhpAssertHelper()
none = NoWebServerAssertHelper()
# no web server
self.opts.set_web_server('none')
# run the compile step of the build pack
output = ErrorHelper().compile(self.bp)
# confirm downloads
none.assert_downloads_from_output(output)
# confirm httpd and nginx are not installed
none.assert_no_web_server_is_installed(self.build_dir)
# confirm start script
bp.assert_start_script_is_correct(self.build_dir)
php.assert_start_script_is_correct(self.build_dir)
# confirm bp utils installed
bp.assert_scripts_are_installed(self.build_dir)
# check env & proc files
none.assert_contents_of_procs_file(self.build_dir)
php.assert_contents_of_env_file(self.build_dir)
# webdir exists
none.assert_no_web_dir(self.build_dir, self.opts.get_webdir())
# check php cli installed
none.assert_files_installed(self.build_dir)
class TestCompileWithProfileD(BaseCompileApp):
def __init__(self):
self.app_name = 'app-with-profile-d'
def testProfileDNotOverridden(self):
ErrorHelper().compile(self.bp)
fah = FileAssertHelper()
fah.expect().path(self.build_dir, '.profile.d',
'finalize_dontdelete.sh').exists()
class TestCompileWithInvalidJSON(BaseCompileApp):
def __init__(self):
self.app_name = 'app-invalid-json'
@raises(CalledProcessError)
def test_compile_with_invalid_json(self):
ErrorHelper().compile(self.bp)
|
4,836 | decorator | # Copyright (C) 2018-2022 Intel Corporation
#
# SPDX-License-Identifier: MIT
import http.client
from django.http import HttpResponseBadRequest, HttpResponse
from rules.contrib.views import permission_required, objectgetter
from django.conf import settings
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.request import Request
from rest_framework.decorators import api_view, permission_classes
from drf_spectacular.utils import extend_schema
from cvat.apps.engine.log import ServerLogManager
from cvat.apps.engine import models
from cvat.apps.dataset_repo.models import GitData
import contextlib
import cvat.apps.dataset_repo.dataset_repo as CVATGit
import django_rq
slogger = ServerLogManager(__name__)
def _legacy_api_view(allowed_method_names=None):
# Currently, the views in this file use the legacy permission-checking
# approach, so this decorator disables the default DRF permission classes.
# TODO: migrate to DRF permissions, make the views compatible with drf-spectacular,
# and remove this decorator.
def METHOD_NAME(view):
view = permission_classes([IsAuthenticated])(view)
view = api_view(allowed_method_names)(view)
view = extend_schema(exclude=True)(view)
return view
return METHOD_NAME
@_legacy_api_view()
def check_process(request, rq_id):
try:
queue = django_rq.get_queue(settings.CVAT_QUEUES.EXPORT_DATA.value)
rq_job = queue.fetch_job(rq_id)
if rq_job is not None:
if rq_job.is_queued or rq_job.is_started:
return Response({"status": rq_job.get_status()})
elif rq_job.is_finished:
return Response({"status": rq_job.get_status()})
else:
return Response({"status": rq_job.get_status(), "stderr": rq_job.exc_info})
else:
return Response({"status": "unknown"})
except Exception as ex:
slogger.glob.error("error occurred during checking repository request with rq id {}".format(rq_id), exc_info=True)
return HttpResponseBadRequest(str(ex))
@_legacy_api_view(['POST'])
@permission_required(perm=['engine.task.create'],
fn=objectgetter(models.Task, 'tid'), raise_exception=True)
def create(request: Request, tid):
try:
slogger.task[tid].info("create repository request")
body = request.data
path = body["path"]
export_format = body.get("format")
lfs = body["lfs"]
rq_id = "git.create.{}".format(tid)
queue = django_rq.get_queue(settings.CVAT_QUEUES.EXPORT_DATA.value)
queue.enqueue_call(func = CVATGit.initial_create, args = (tid, path, export_format, lfs, request.user), job_id = rq_id)
return Response({ "rq_id": rq_id })
except Exception as ex:
slogger.glob.error("error occurred during initial cloning repository request with rq id {}".format(rq_id), exc_info=True)
return HttpResponseBadRequest(str(ex))
@_legacy_api_view()
def push_repository(request: Request, tid):
try:
slogger.task[tid].info("push repository request")
rq_id = "git.push.{}".format(tid)
queue = django_rq.get_queue(settings.CVAT_QUEUES.EXPORT_DATA.value)
queue.enqueue_call(func = CVATGit.push, args = (tid, request.user, request.scheme, request.get_host()), job_id = rq_id)
return Response({ "rq_id": rq_id })
except Exception as ex:
with contextlib.suppress(Exception):
slogger.task[tid].error("error occurred during pushing repository request",
exc_info=True)
return HttpResponseBadRequest(str(ex))
@_legacy_api_view()
def get_repository(request: Request, tid):
try:
slogger.task[tid].info("get repository request")
return Response(CVATGit.get(tid, request.user))
except Exception as ex:
with contextlib.suppress(Exception):
slogger.task[tid].error("error occurred during getting repository info request",
exc_info=True)
return HttpResponseBadRequest(str(ex))
@_legacy_api_view(['PATCH'])
@permission_required(perm=['engine.task.access'],
fn=objectgetter(models.Task, 'tid'), raise_exception=True)
def update_git_repo(request: Request, tid):
try:
body = request.data
req_type = body["type"]
value = body["value"]
git_data_obj = GitData.objects.filter(task_id=tid)[0]
if req_type == "url":
git_data_obj.url = value
git_data_obj.save(update_fields=["url"])
elif req_type == "lfs":
git_data_obj.lfs = bool(value)
git_data_obj.save(update_fields=["lfs"])
elif req_type == "format":
git_data_obj.format = value
git_data_obj.save(update_fields=["format"])
slogger.task[tid].info("get repository request")
return HttpResponse(
status=http.HTTPStatus.OK,
)
except Exception as ex:
with contextlib.suppress(Exception):
slogger.task[tid].error("error occurred during changing repository request", exc_info=True)
return HttpResponseBadRequest(str(ex))
@_legacy_api_view()
def get_meta_info(request):
try:
db_git_records = GitData.objects.all()
response = {}
for db_git in db_git_records:
response[db_git.task_id] = db_git.status
return Response(response)
except Exception as ex:
slogger.glob.exception("error occurred during get meta request", exc_info = True)
return HttpResponseBadRequest(str(ex)) |
4,837 | test parameters to table | # Licensed under a 3-clause BSD style license - see LICENSE.rst
import pytest
import numpy as np
from numpy.testing import assert_allclose
from gammapy.modeling import Parameter, Parameters
def test_parameter_init():
par = Parameter("spam", 42, "deg")
assert par.name == "spam"
assert par.factor == 42
assert isinstance(par.factor, float)
assert par.scale == 1
assert isinstance(par.scale, float)
assert par.value == 42
assert isinstance(par.value, float)
assert par.unit == "deg"
assert par.min is np.nan
assert par.max is np.nan
assert not par.frozen
par = Parameter("spam", "42 deg")
assert par.factor == 42
assert par.scale == 1
assert par.unit == "deg"
with pytest.raises(TypeError):
Parameter(1, 2)
def test_parameter_outside_limit(caplog):
par = Parameter("spam", 50, min=0, max=40)
par.check_limits()
assert "WARNING" in [_.levelname for _ in caplog.records]
message1 = "Value 50.0 is outside bounds [0.0, 40.0] for parameter 'spam'"
assert message1 in [_.message for _ in caplog.records]
def test_parameter_scale():
# Basic check how scale is used for value, min, max
par = Parameter("spam", 420, "deg", 10, 400, 500)
assert par.value == 420
assert par.min == 400
assert_allclose(par.factor_min, 40)
assert par.max == 500
assert_allclose(par.factor_max, 50)
par.value = 70
assert par.scale == 10
assert_allclose(par.factor, 7)
def test_parameter_quantity():
par = Parameter("spam", 420, "deg", 10)
quantity = par.quantity
assert quantity.unit == "deg"
assert quantity.value == 420
par.quantity = "70 deg"
assert_allclose(par.factor, 7)
assert par.scale == 10
assert par.unit == "deg"
def test_parameter_repr():
par = Parameter("spam", 42, "deg")
assert repr(par).startswith("Parameter(name=")
def test_parameter_to_dict():
par = Parameter("spam", 42, "deg")
d = par.to_dict()
assert isinstance(d["unit"], str)
@pytest.mark.parametrize(
"method,value,factor,scale",
[
# Check method="scale10" in detail
("scale10", 2e-10, 2, 1e-10),
("scale10", 2e10, 2, 1e10),
("scale10", -2e-10, -2, 1e-10),
("scale10", -2e10, -2, 1e10),
# Check that results are OK for very large numbers
# Regression test for https://github.com/gammapy/gammapy/issues/1883
("scale10", 9e35, 9, 1e35),
# Checks for the simpler method="factor1"
("factor1", 2e10, 1, 2e10),
("factor1", -2e10, 1, -2e10),
],
)
def test_parameter_autoscale(method, value, factor, scale):
par = Parameter("", value, scale_method=method)
par.autoscale()
assert_allclose(par.factor, factor)
assert_allclose(par.scale, scale)
assert isinstance(par.scale, float)
@pytest.fixture()
def pars():
return Parameters([Parameter("spam", 42, "deg"), Parameter("ham", 99, "TeV")])
def test_parameters_basics(pars):
# This applies a unit transformation
pars["ham"].error = "10000 GeV"
pars["spam"].error = 0.1
assert_allclose(pars["spam"].error, 0.1)
assert_allclose(pars[1].error, 10)
def test_parameters_copy(pars):
pars2 = pars.copy()
assert pars is not pars2
assert pars[0] is not pars2[0]
def test_parameters_from_stack():
a = Parameter("a", 1)
b = Parameter("b", 2)
c = Parameter("c", 3)
pars = Parameters([a, b]) + Parameters([]) + Parameters([c])
assert pars.names == ["a", "b", "c"]
def test_unique_parameters():
a = Parameter("a", 1)
b = Parameter("b", 2)
c = Parameter("c", 3)
parameters = Parameters([a, b, a, c])
assert parameters.names == ["a", "b", "a", "c"]
parameters_unique = parameters.unique_parameters
assert parameters_unique.names == ["a", "b", "c"]
def test_parameters_getitem(pars):
assert pars[1].name == "ham"
assert pars["ham"].name == "ham"
assert pars[pars[1]].name == "ham"
with pytest.raises(TypeError):
pars[42.3]
with pytest.raises(IndexError):
pars[3]
with pytest.raises(IndexError):
pars["lamb"]
with pytest.raises(ValueError):
pars[Parameter("bam!", 99)]
def METHOD_NAME(pars):
pars["ham"].error = 1e-10
pars["spam"]._link_label_io = "test"
table = pars.to_table()
assert len(table) == 2
assert len(table.columns) == 10
assert table["link"][0] == "test"
assert table["link"][1] == ""
def test_parameters_set_parameter_factors(pars):
pars.set_parameter_factors([77, 78])
assert_allclose(pars["spam"].factor, 77)
assert_allclose(pars["spam"].scale, 1)
assert_allclose(pars["ham"].factor, 78)
assert_allclose(pars["ham"].scale, 1)
def test_parameters_s():
pars = Parameters(
[
Parameter("", 20, scale_method="scale10"),
Parameter("", 20, scale_method=None),
]
)
pars_dict = pars.to_dict()
pars.autoscale()
assert_allclose(pars[0].factor, 2)
assert_allclose(pars[0].scale, 10)
assert pars_dict[0]["scale_method"] == "scale10"
assert pars_dict[1]["scale_method"] is None
pars = Parameters.from_dict(pars_dict)
pars.autoscale()
assert_allclose(pars[0].factor, 2)
assert_allclose(pars[0].scale, 10)
assert pars[1].scale_method is None
pars.autoscale()
assert_allclose(pars[1].factor, 20)
assert_allclose(pars[1].scale, 1)
def test_parameter_scan_values():
p = Parameter(name="test", value=0, error=1)
values = p.scan_values
assert len(values) == 11
assert_allclose(values[[0, -1]], [-2, 2])
assert_allclose(values[5], 0)
p.scan_n_sigma = 3
assert_allclose(p.scan_values[[0, -1]], [-3, 3])
p.scan_min = -2
p.scan_max = 3
assert_allclose(p.scan_values[[0, -1]], [-2, 3])
p.scan_n_values = 5
assert len(p.scan_values) == 5
p.interp = "log"
p.scan_n_values = 3
p.scan_min = 0.1
p.scan_max = 10
assert_allclose(p.scan_values, [0.1, 1, 10])
def test_update_from_dict():
par = Parameter(
"test",
value=1e-10,
min="nan",
max="nan",
frozen=False,
unit="TeV",
scale_method="scale10",
)
par.autoscale()
data = {
"model": "gc",
"type": "spectral",
"name": "test2",
"value": 3e-10,
"min": 0,
"max": np.nan,
"frozen": True,
"unit": "GeV",
}
par.update_from_dict(data)
assert par.name == "test"
assert_allclose(par.factor, 3)
assert_allclose(par.value, 3e-10)
assert par.unit == "GeV"
assert_allclose(par.min, 0)
assert par.max is np.nan
assert par.frozen
data = {
"model": "gc",
"type": "spectral",
"name": "test2",
"value": 3e-10,
"min": 0,
"max": np.nan,
"frozen": "True",
"unit": "GeV",
}
par.update_from_dict(data)
assert par.frozen |
4,838 | visit call | # Copyright 2022 Fuzz Introspector Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ast
import os
import sys
import shutil
import importlib.util
class FuzzerVisitor(ast.NodeVisitor):
def __init__(self, ast_content):
print("Hello")
self.ast_content = ast_content
self.current_scope = []
self.fuzzer_entrypoint = None
self.fuzzer_imports = []
self.fuzzer_packages = []
def visit_Module(self, node):
print("Visiting module")
print(node)
self.generic_visit(node)
def visit_With(self, node):
print("In with")
print(node.body)
for elem in node.body:
print("Iterating %s" % (elem))
self.visit(elem)
def visit_Import(self, node):
print("Import")
for alias in node.names:
print("- %s" % (alias.name))
self.fuzzer_imports.append(alias.name)
def visit_ImportFrom(self, node):
print("From import")
mod = node.module
for _import in node.names:
imported_module = mod + "." + _import.name
if imported_module.endswith(".*"):
imported_module = imported_module[:-2]
self.fuzzer_imports.append(imported_module)
def METHOD_NAME(self, node):
if len(self.current_scope) == 0:
scope = "global"
else:
scope = self.current_scope[-1]
print("call instruction: %s" % (ast.dump(node)))
print("Inside of call instruction -- %s" % (scope))
if isinstance(node.func, ast.Name):
print("- [N] %s" % (node.func.id))
if isinstance(node.func, ast.Attribute):
print("%s" % (node.func))
lhs = ""
lhs_obj = node.func
while isinstance(lhs_obj, ast.Attribute):
tmp = lhs_obj.value
lhs = "." + lhs_obj.attr + lhs
lhs_obj = tmp
if isinstance(tmp, ast.Name):
break
if isinstance(lhs_obj, ast.Call):
self.METHOD_NAME(lhs_obj)
lhs_obj = None
if lhs_obj is not None:
try:
lhs = lhs_obj.id + lhs
except AttributeError:
lhs = ""
print(" [C] %s" % (lhs))
# Check if we have atheris.Setup
if lhs == "atheris.Setup":
print("We have the set up function")
# Identify the second argument to the function
# Target function is the second argument
arg = node.args[1]
if isinstance(arg, ast.Name):
self.fuzzer_entrypoint = arg.id
for arg in node.args:
print("- arg: %s" % (arg))
def visit_FunctionDef(self, node):
print("Function definition: %s" % (node.name))
self.current_scope.append(node.name)
self.generic_visit(node)
self.current_scope = self.current_scope[:-1]
def analyze(self):
self.visit(self.ast_content)
def print_specifics(self):
print("#" * 50)
print("Fuzzer specification")
if self.fuzzer_entrypoint is None:
ep = "Found none"
else:
ep = self.fuzzer_entrypoint
print("- Fuzzer entrypoint: %s" % (ep))
print("- Fuzzer imports:")
for _import in self.fuzzer_imports:
print(" - %s" % (_import))
if _import.count(".") > 0:
_import = _import.split(".")[0]
print("Refining import to %s" % (_import))
# Let's try and see if these are searchable
try:
specs = importlib.util.find_spec(_import)
except ModuleNotFoundError:
continue
except ImportError:
continue
print("No error")
if specs is not None:
print("Spec:")
print(specs)
avoid = ['atheris', 'sys', 'os']
if _import not in avoid:
if specs.submodule_search_locations:
for elem in specs.submodule_search_locations:
print("Checking --- %s" % (elem))
if (
("/usr/local/lib/" in elem or "/usr/lib/" in elem)
and "site-packages" not in elem
):
# skip packages that are builtin packacges
# Check if we can refine
if elem.count(".") > 1:
print("Has such a count")
continue
print("Adding --- %s" % (elem))
self.fuzzer_packages.append(elem)
else:
print("Spec is none")
print("Iterating")
for pkg in self.fuzzer_packages:
print("package: %s" % (pkg))
def get_package_paths(filename):
with open(filename, "r") as f:
content = f.read()
print("Fuzzer visitor")
fuzz_visitor = FuzzerVisitor(ast.parse(content))
fuzz_visitor.analyze()
fuzz_visitor.print_specifics()
return fuzz_visitor.fuzzer_packages
if __name__ == "__main__":
filename = sys.argv[1]
if len(sys.argv) > 2:
is_oss_fuzz = True
else:
is_oss_fuzz = False
fuzz_packages = get_package_paths(filename)
print("After main")
for fpkg in fuzz_packages:
print("- %s" % (fpkg))
with open("tmp-packages.txt", "w") as pkgf:
for fpkg in fuzz_packages:
print("- %s" % (fpkg))
pkgf.write(fpkg)
pkgf.write("\n")
if is_oss_fuzz:
if not os.path.isdir("/src/pyintro-pack-deps"):
os.mkdir("/src/pyintro-pack-deps")
for pkg in fuzz_packages:
dst_dir = "/src/pyintro-pack-deps/%s" % (os.path.basename(pkg))
if os.path.isdir(pkg) and not os.path.isdir(dst_dir):
shutil.copytree(pkg, dst_dir) |
4,839 | check | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# request.py
#
# Copyright 2017 Dominique Revuz <dr@univ-mlv.fr>
#
import json
import logging
import os
import requests
from django.conf import settings
from playexo.exception import SandboxUnavailable
from playexo.utils import tar_from_dic
from playexo.utils import make_data
from playexo.utils import get_sandboxerr_build
from playexo.utils import get_sandboxerr_eval
from playexo.utils import get_file_from_env
logger = logging.getLogger(__name__)
class SandboxBuild:
def __init__(self, dic, sandbox=None, test=False):
self.sandbox = settings.SANDBOX if sandbox is None else sandbox
self.dic = dict(dic)
self.test = test
def _build_env(self):
env = dict(self.dic['__files'])
tmp = self.dic
del tmp['__files']
env['pl.json'] = json.dumps(tmp)
if 'grader' in self.dic and 'grader.py' not in env:
env['grader.py'] = self.dic['grader']
if 'builder' in self.dic and 'builder.py' not in env:
env['builder.py'] = self.dic['builder']
for item in os.listdir(settings.DOCKER_DEFAULT_FILES):
s = os.path.join(settings.DOCKER_DEFAULT_FILES, item)
with open(s, "r") as f:
env[item] = f.read()
return env
def call(self, request_timeout=10):
env = self._build_env()
files = {'environment': tar_from_dic(env)}
commands = ['chmod +x clean.sh', './clean.sh', 'chmod +x builder.sh', './builder.sh']
data = make_data(commands, True, )
logger.info("Building on sandbox '" + self.sandbox + "'.")
url = os.path.join(self.sandbox, "execute/")
try:
response = requests.post(url, data=data, files=files, timeout=request_timeout)
response = json.loads(response.text)
response["id"] = response["environment"]
response['stderr'] = response['execution'][-1]['stderr']
response['sandboxerr'] = get_sandboxerr_build(response['status'], request_timeout)
stderr = get_file_from_env(requests, self.sandbox, "stderr.log", response["id"])
if stderr is not None:
response["stderr"] = stderr
if response['status'] != 0:
if "JSONDecodeError" in response['stderr']:
response['status'] = -1
return response
context = get_file_from_env(requests, self.sandbox, "processed.json", response["id"])
if context is not None:
response["context"] = json.loads(context)
else:
response['status'] = -1
del response["environment"]
del response['execution']
except json.decoder.JSONDecodeError: # pragma: no cover
msg = "Sandbox '" + url + "' returned a non JSON response\n"
logger.critical(msg)
raise SandboxUnavailable(msg)
except Exception:
msg = "Could not join the sandbox '" + url + "'."
logger.exception(msg)
raise SandboxUnavailable(msg)
return response
class SandboxEval:
def __init__(self, uuid, answers, sandbox=None):
self.uuid = uuid
self.sandbox = settings.SANDBOX if sandbox is None else sandbox
self.answers = answers
def METHOD_NAME(self):
url = os.path.join(self.sandbox, "environments/%s/")
try:
r = requests.head(url % str(self.uuid), timeout=1)
return 200 <= r.status_code <= 299
except Exception:
msg = "Could not join the sandbox '" + url + "'."
logger.exception(msg)
raise SandboxUnavailable(msg)
def call(self, request_timeout=10):
logger.info("Evaluating on sandbox '" + self.sandbox + "'.")
files = {'environment': tar_from_dic({'answers.json': json.dumps(self.answers)})}
commands = ['chmod +x clean.sh', './clean.sh', 'chmod +x grader.sh', './grader.sh']
data = make_data(commands, True, environment=str(self.uuid))
url = os.path.join(self.sandbox, "execute/")
try:
response = requests.post(url, data=data, files=files, timeout=request_timeout)
response = json.loads(response.text)
response["id"] = response["environment"]
command = response['execution'][-1]
stderr = get_file_from_env(requests, self.sandbox, "stderr.log", response["id"])
if stderr is not None:
response["stderr"] = stderr
response["sandboxerr"] = get_sandboxerr_eval(response["status"], request_timeout)
feedback = get_file_from_env(requests, self.sandbox, "feedback.html", response["id"])
if feedback is not None:
if feedback == '\n':
feedback = ""
response["feedback"] = feedback
context = get_file_from_env(requests, self.sandbox, "processed.json", response["id"])
if context is not None:
response["context"] = json.loads(context)
else:
response["context"] = {}
try:
if not command["exit_code"]:
stdout = int(command["stdout"])
response["grade"] = stdout
else:
response["grade"] = -1
except ValueError:
response["grade"] = -1
response['status'] = -4
response["feedback"] = "Sandbox error:" + response["sandboxerr"]
del response["environment"]
del response['execution']
except json.decoder.JSONDecodeError: # pragma: no cover
msg = "Sandbox '" + url + "' returned a non JSON response\n"
logger.critical(msg)
raise SandboxUnavailable(msg)
except Exception:
msg = "Could not join the sandbox '" + url + "'."
logger.exception(msg)
raise SandboxUnavailable(msg)
return response |
4,840 | translate profile | # (C) Datadog, Inc. 2020-present
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
import os
import click
import yaml
from ...console import CONTEXT_SETTINGS
from .constants import MIB_SOURCE_URL
def fetch_mib(mib, source_url):
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
import pysnmp_mibs
from pysmi.codegen import PySnmpCodeGen
from pysmi.compiler import MibCompiler
from pysmi.parser import SmiStarParser
from pysmi.reader import HttpReader
from pysmi.writer import PyFileWriter
target_directory = os.path.dirname(pysnmp_mibs.__file__)
parsed_url = urlparse(source_url)
reader = HttpReader(parsed_url.netloc, 80, parsed_url.path)
mibCompiler = MibCompiler(SmiStarParser(), PySnmpCodeGen(), PyFileWriter(target_directory))
mibCompiler.addSources(reader)
mibCompiler.compile(mib)
@click.command(context_settings=CONTEXT_SETTINGS, short_help='Translate MIB name to OIDs in SNMP profiles')
@click.argument('profile_path')
@click.option(
'--mib_source_url',
default=MIB_SOURCE_URL,
help='Source url to fetch missing MIBS',
)
@click.pass_context
def METHOD_NAME(ctx, profile_path, mib_source_url):
"""
Do OID translation in a SNMP profile. This isn't a plain replacement, as it
doesn't preserve comments and indent, but it should automate most of the
work.
You'll need to install pysnmp and pysnmp-mibs manually beforehand.
"""
# Leave imports in function to not add the dependencies
from pysnmp import hlapi
from pysnmp.smi import view
from pysnmp.smi.error import MibNotFoundError
snmp_engine = hlapi.SnmpEngine()
mib_builder = snmp_engine.getMibBuilder()
mib_view_controller = view.MibViewController(mib_builder)
with open(profile_path) as f:
data = yaml.safe_load(f.read())
output = []
metrics = data.get('metrics', [])
for metric in metrics:
mib = metric['MIB']
try:
mib_view_controller.mibBuilder.loadModule(mib)
except MibNotFoundError:
fetch_mib(mib, source_url=mib_source_url)
if 'table' in metric:
table = metric['table']
if not isinstance(table, str):
continue
node = mib_view_controller.mibBuilder.importSymbols(mib, table)[0]
value = '.'.join([str(i) for i in node.getName()])
table = {'name': table, 'OID': value}
symbols = []
for symbol in metric['symbols']:
if not isinstance(symbol, str):
continue
node = mib_view_controller.mibBuilder.importSymbols(mib, symbol)[0]
value = '.'.join([str(i) for i in node.getName()])
symbols.append({'name': symbol, 'OID': value})
tags = []
for tag in metric['metric_tags']:
if 'column' in tag:
tag_mib = tag.get('MIB', mib)
key = tag['column']
if not isinstance(key, str):
continue
node = mib_view_controller.mibBuilder.importSymbols(tag_mib, key)[0]
value = '.'.join([str(i) for i in node.getName()])
tag = tag.copy()
tag['column'] = {'name': key, 'OID': value}
tags.append(tag)
else:
tags.append(tag)
element = {'MIB': mib, 'table': table, 'symbols': symbols, 'metric_tags': tags}
if 'forced_type' in metric:
element['forced_type'] = metric['forced_type']
output.append(element)
elif 'symbol' in metric:
key = metric['symbol']
node = mib_view_controller.mibBuilder.importSymbols(mib, key)[0]
value = '.'.join([str(i) for i in node.getName()])
element = {'MIB': mib, 'symbol': {'name': key, 'OID': value}}
if 'forced_type' in metric:
element['forced_type'] = metric['forced_type']
output.append(element)
print(yaml.dump({'metrics': output})) |
4,841 | test parens in quotes are not counted | #############################################################################
# Copyright (c) 2015-2016 Balabit
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 as published
# by the Free Software Foundation, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
# As an additional exemption you are allowed to compile & link against the
# OpenSSL libraries as published by the OpenSSL project. See the file
# COPYING for details.
#
#############################################################################
from __future__ import absolute_import, print_function
from .test_lexer import TestLexer
from .. import commandlinelexer
class TestCommandLineLexer(TestLexer):
def _construct_lexer(self):
return commandlinelexer.CommandLineLexer()
def test_lexer_returns_none_for_empty_string(self):
self._lexer.input("")
self._assert_next_token_is_none()
def test_single_quote_character_is_returned_as_a_partial_token(self):
for quote in ('"', "'"):
self._lexer.input(quote)
self._assert_next_token_is_partial()
def test_pair_of_quotes_is_returned_as_an_empty_string(self):
self._lexer.input("''")
self._next_token()
self._assert_current_token_is_not_partial()
self._assert_current_token_value_equals('')
def test_quoted_character_is_returned_as_the_character(self):
self._lexer.input("'a'")
self._next_token()
self._assert_current_token_is_not_partial()
self._assert_current_token_value_equals('a')
def test_quoted_string_is_returned_as_the_string(self):
self._lexer.input('"foo bar"')
self._assert_next_token_value_equals('foo bar')
def test_unquoted_string_is_returned_as_the_string(self):
self._lexer.input('foo')
self._assert_next_token_value_equals('foo')
def test_unquoted_prefix_and_then_a_quoted_string_is_concatenated(self):
self._lexer.input('foo"bar"')
self._assert_next_token_value_equals('foobar')
def test_double_quotes_allow_backslash_as_a_single_char_escape(self):
self._lexer.input(r'"foo\""')
self._assert_next_token_value_equals('foo"')
def test_white_space_separates_tokens(self):
self._lexer.input("""'foo' "bar" baz """)
self._assert_next_token_value_equals('foo')
self._assert_next_token_value_equals('bar')
self._assert_next_token_value_equals('baz')
self._assert_next_token_is_none()
def test_unclosed_string_is_returned_as_a_partial_token(self):
self._lexer.input("""'foo' "bar" 'baz""")
self._next_token()
self._assert_current_token_value_equals('foo')
self._assert_current_token_is_not_partial()
self._next_token()
self._assert_current_token_value_equals('bar')
self._assert_current_token_is_not_partial()
self._next_token()
self._assert_current_token_value_equals('baz')
self._assert_current_token_is_partial()
def test_single_opened_paren_is_returned_as_a_partial_token(self):
self._lexer.input("(")
self._next_token()
self._assert_current_token_value_equals('(')
self._assert_current_token_is_partial()
def test_pair_of_parens_is_returned_as_a_pair_of_parens(self):
self._lexer.input("()")
self._next_token()
self._assert_current_token_value_equals('()')
self._assert_current_token_is_not_partial()
def test_token_enclosed_in_parens_is_returned_as_a_token_in_parens(self):
self._lexer.input("(foo)")
self._next_token()
self._assert_current_token_value_equals('(foo)')
self._assert_current_token_is_not_partial()
def test_whitespace_in_parens_doesnt_terminate_the_token(self):
self._lexer.input("(foo bar)")
self._next_token()
self._assert_current_token_value_equals('(foo bar)')
self._assert_current_token_is_not_partial()
def test_closing_paren_terminates_the_token_only_if_properly_paired(self):
self._lexer.input("(foo bar (baz bax)) next-token")
self._next_token()
self._assert_current_token_value_equals('(foo bar (baz bax))')
self._assert_current_token_is_not_partial()
self._next_token()
self._assert_current_token_value_equals('next-token')
self._assert_current_token_is_not_partial()
def test_one_too_many_closing_parens_is_interpreted_as_a_separate_token(self):
self._lexer.input("(foo bar )) next-token")
self._next_token()
self._assert_current_token_value_equals('(foo bar )')
self._assert_current_token_is_not_partial()
self._next_token()
self._assert_current_token_value_equals(')')
self._assert_current_token_is_not_partial()
self._next_token()
self._assert_current_token_value_equals('next-token')
self._assert_current_token_is_not_partial()
def test_quotes_within_parens_are_left_intact_so_a_recursive_parsing_will_find_them(self):
self._lexer.input("(foo 'bar baz') next-token")
self._next_token()
self._assert_current_token_value_equals("(foo 'bar baz')")
self._assert_current_token_is_not_partial()
def METHOD_NAME(self):
self._lexer.input("(foo 'bar )') next-token")
self._next_token()
self._assert_current_token_value_equals("(foo 'bar )')")
self._assert_current_token_is_not_partial()
self._next_token()
self._assert_current_token_value_equals("next-token")
self._assert_current_token_is_not_partial() |
4,842 | add | from GangaCore.Utility.logging import getLogger
from GangaCore.Core.exceptions import GangaValueError
logger = getLogger()
class PluginManagerError(GangaValueError):
def __init__(self, x):
super(PluginManagerError, self).__init__(self, x)
# Simple Ganga Plugin Mechanism
#
# Any object may be registered (added) in the plugin manager provided that
# you are able to specify the name and the category to which it belongs.
#
# If you do not use category all plugins are registered in a flat list. Otherwise
# there is a list of names for each category seaprately.
class PluginManager(object):
__slots__ = ('all_dict', 'first', '_prev_found')
def __init__(self):
self.all_dict = {}
self.first = {}
self._prev_found = {}
def find(self, category, name):
"""
Return a plugin added with 'name' in the given 'category'.
If 'name' is None then the default plugin in the category is returned.
Typically the default plugin is the first added.
If plugin not found raise PluginManagerError.
"""
#logger.debug( "Attempting to Find Plugin: %s" % name )
#import traceback
# traceback.print_stack()
# Simple attempt to pre-load and cache Plugin lookups
key = str(category) + "_" + str(name)
if key in self._prev_found:
return self._prev_found[key]
try:
if name is not None:
if category in self.first:
# This is expected to work and is quite verbose when debugging turned on
#logger.debug("Returning based upon Category and Name")
#logger.debug("name: %s cat: %s" % (str(name), str(category)))
if name in self.all_dict[category]:
self._prev_found[key] = self.all_dict[category][name]
return self.all_dict[category][name]
if (name is None) and category is not None:
if (category in self.first):
# This is expected to work and is quite verbose when debugging turned on
#logger.debug("Returning based upon Category ONLY")
#logger.debug("name: %s cat: %s" % (str(name), str(category)))
self._prev_found[key] = self.first[category]
return self.first[category]
elif (name is not None) and (category is not None):
for category_i in self.all_dict:
for this_name in self.all_dict[category_i]:
if name == this_name:
message1 = "Category of %s, has likely changed between ganga versions!" % name
message2 = "Category Requested: %s, Category in which plugin was found: %s" % (
category, category_i)
message3 = "Attempting to use new category %s to load a stored object, this may fail!" % category_i
logger.debug(message1)
logger.debug(message2)
logger.debug(message3)
self._prev_found[key] = self.all_dict[category_i][name]
return self.all_dict[category_i][name]
except KeyError:
logger.debug("KeyError from Config system!")
except:
logger.error("Some Other unexpected ERROR!")
raise
if name is None:
s = "cannot find default plugin for category " + category
else:
s = "cannot find '%s' in a category '%s', or elsewhere" % (name, category)
if name is None and category is None:
s = "Serious Plugin Error has occured"
logger.debug(s)
raise PluginManagerError(s)
def METHOD_NAME(self, pluginobj, category, name):
""" Add a pluginobj to the plugin manager with the name and the category labels.
The first plugin is default unless changed explicitly.
"""
cat = self.all_dict.setdefault(category, {})
self.first.setdefault(category, pluginobj)
cat[name] = pluginobj
logger.debug('adding plugin %s (category "%s") ' % (name, category))
def setDefault(self, category, name):
""" Make the plugin 'name' be default in a given 'category'.
You must first add() the plugin object before calling this method. Otherwise
PluginManagerError is raised.
"""
assert(not name is None)
pluginobj = self.find(category, name)
self.first[category] = pluginobj
def allCategories(self):
return self.all_dict
def allClasses(self, category):
cat = self.all_dict.get(category)
if cat:
return cat
else:
return {}
allPlugins = PluginManager() |
4,843 | close | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# gPodder - A media aggregator and podcast client
# Copyright (c) 2005-2018 The gPodder Team
#
# gPodder is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# gPodder is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# gpodder.minidb - A simple SQLite store for Python objects
# Thomas Perl, 2010-01-28
# based on: "ORM wie eine Kirchenmaus - a very poor ORM implementation
# by thp, 2009-11-29 (thp.io/about)"
# This module is also available separately at:
# http://thp.io/2010/minidb/
# For Python 2.5, we need to request the "with" statement
try:
import sqlite3.dbapi2 as sqlite
except ImportError:
try:
from pysqlite2 import dbapi2 as sqlite
except ImportError:
raise Exception('Please install SQLite3 support.')
import threading
class Store(object):
def __init__(self, filename=':memory:'):
self.db = sqlite.connect(filename, check_same_thread=False)
self.lock = threading.RLock()
def _schema(self, class_):
return class_.__name__, list(sorted(class_.__slots__))
def _set(self, o, slot, value):
# Set a slot on the given object to value, doing a cast if
# necessary. The value None is special-cased and never cast.
cls = o.__class__.__slots__[slot]
if value is not None:
if isinstance(value, bytes):
value = value.decode('utf-8')
value = cls(value)
setattr(o, slot, value)
def commit(self):
with self.lock:
self.db.commit()
def METHOD_NAME(self):
with self.lock:
self.db.isolation_level = None
self.db.execute('VACUUM')
self.db.isolation_level = ''
self.db.METHOD_NAME()
def _register(self, class_):
with self.lock:
table, slots = self._schema(class_)
cur = self.db.execute('PRAGMA table_info(%s)' % table)
available = cur.fetchall()
if available:
available = [row[1] for row in available]
missing_slots = (s for s in slots if s not in available)
for slot in missing_slots:
self.db.execute('ALTER TABLE %s ADD COLUMN %s TEXT' % (table,
slot))
else:
self.db.execute('CREATE TABLE %s (%s)' % (table,
', '.join('%s TEXT' % s for s in slots)))
def convert(self, v):
if isinstance(v, str):
return v
elif isinstance(v, str):
# XXX: Rewrite ^^^ as "isinstance(v, bytes)" in Python 3
return v.decode('utf-8')
else:
return str(v)
def update(self, o, **kwargs):
self.remove(o)
for k, v in list(kwargs.items()):
setattr(o, k, v)
self.save(o)
def save(self, o):
if hasattr(o, '__iter__'):
klass = None
for child in o:
if klass is None:
klass = child.__class__
self._register(klass)
table, slots = self._schema(klass)
if not isinstance(child, klass):
raise ValueError('Only one type of object allowed')
used = [s for s in slots if getattr(child, s, None) is not None]
values = [self.convert(getattr(child, slot)) for slot in used]
self.db.execute('INSERT INTO %s (%s) VALUES (%s)' % (table,
', '.join(used), ', '.join('?' * len(used))), values)
return
with self.lock:
self._register(o.__class__)
table, slots = self._schema(o.__class__)
values = [self.convert(getattr(o, slot)) for slot in slots]
self.db.execute('INSERT INTO %s (%s) VALUES (%s)' % (table,
', '.join(slots), ', '.join('?' * len(slots))), values)
def delete(self, class_, **kwargs):
with self.lock:
self._register(class_)
table, slots = self._schema(class_)
sql = 'DELETE FROM %s' % (table,)
if kwargs:
sql += ' WHERE %s' % (' AND '.join('%s=?' % k for k in kwargs))
try:
self.db.execute(sql, list(kwargs.values()))
return True
except Exception as e:
return False
def remove(self, o):
if hasattr(o, '__iter__'):
for child in o:
self.remove(child)
return
with self.lock:
self._register(o.__class__)
table, slots = self._schema(o.__class__)
# Use "None" as wildcard selector in remove actions
slots = [s for s in slots if getattr(o, s, None) is not None]
values = [self.convert(getattr(o, slot)) for slot in slots]
self.db.execute('DELETE FROM %s WHERE %s' % (table,
' AND '.join('%s=?' % s for s in slots)), values)
def load(self, class_, **kwargs):
with self.lock:
self._register(class_)
table, slots = self._schema(class_)
sql = 'SELECT %s FROM %s' % (', '.join(slots), table)
if kwargs:
sql += ' WHERE %s' % (' AND '.join('%s=?' % k for k in kwargs))
try:
cur = self.db.execute(sql, list(kwargs.values()))
except Exception as e:
raise
def apply(row):
o = class_.__new__(class_)
for attr, value in zip(slots, row):
try:
self._set(o, attr, value)
except ValueError as ve:
return None
return o
return [x for x in [apply(row) for row in cur] if x is not None]
def get(self, class_, **kwargs):
result = self.load(class_, **kwargs)
if result:
return result[0]
else:
return None
if __name__ == '__main__':
class Person(object):
__slots__ = {'username': str, 'id': int}
def __init__(self, username, id):
self.username = username
self.id = id
def __repr__(self):
return '<Person "%s" (%d)>' % (self.username, self.id)
m = Store()
m.save(Person('User %d' % x, x * 20) for x in range(50))
p = m.get(Person, id=200)
print(p)
m.remove(p)
p = m.get(Person, id=200)
# Remove some persons again (deletion by value!)
m.remove(Person('User %d' % x, x * 20) for x in range(40))
class Person(object):
__slots__ = {'username': str, 'id': int, 'mail': str}
def __init__(self, username, id, mail):
self.username = username
self.id = id
self.mail = mail
def __repr__(self):
return '<Person "%s" (%s)>' % (self.username, self.mail)
# A schema update takes place here
m.save(Person('User %d' % x, x * 20, 'user@home.com') for x in range(50))
print(m.load(Person)) |
4,844 | get reports by table | import json
import requests
from django.conf import settings
from django.db import transaction
from django.db.models import Q
from django.http import HttpResponse
from django.template import loader
from django.utils.translation import gettext as _
from django.views.decorators.cache import cache_control
from django.views.decorators.http import require_GET, require_POST, \
require_http_methods
from ..permissions.permissions import PermissionTarget, PermissionTargetAction, \
check_permission_targets, check_table_permissions
from ..specify.api import obj_to_data, toJson, \
HttpResponseCreated, objs_to_data_, _obj_to_data
from ..specify.models import Spappresource, Spappresourcedir, Spreport, Spquery
from ..specify.views import login_maybe_required
from ..stored_queries.execution import run_ephemeral_query, models
from ..stored_queries.queryfield import QueryField
class ReportException(Exception):
pass
class ReportsPT(PermissionTarget):
resource = "/report"
execute = PermissionTargetAction()
@require_http_methods(['GET', 'HEAD'])
@cache_control(max_age=86400, private=True)
def get_status(request):
"Indicates whether a report runner server is available."
resp = {'available': settings.REPORT_RUNNER_HOST != ''}
return HttpResponse(toJson(resp), content_type="application/json")
@require_POST
@login_maybe_required
def run(request):
"""Executes the named 'report' using the given 'query' and 'parameters' as POST parameters.
Returns the result as a PDF.
"""
check_permission_targets(request.specify_collection.id, request.specify_user.id, [ReportsPT.execute])
if settings.REPORT_RUNNER_HOST == '':
raise ReportException(_("Report service is not configured."))
port = settings.REPORT_RUNNER_PORT
if port == '': port = 80
report_data = run_query(request.specify_collection, request.specify_user, request.POST['query'])
if len(report_data['rows']) < 1:
return HttpResponse(_("The report query returned no results."), content_type="text/plain")
r = requests.post("http://%s:%s/report" %
(settings.REPORT_RUNNER_HOST, port),
data={'report': request.POST['report'],
'parameters': request.POST['parameters'],
'data': toJson(report_data)})
if r.status_code == 200:
return HttpResponse(r.content, content_type="application/pdf")
else:
raise ReportException(r.text)
def get_reports_view(request):
return METHOD_NAME(request, table_id=None)
@require_GET
@login_maybe_required
def METHOD_NAME(request, table_id):
"Returns a list of available reports and labels."
reports = Spappresource.objects.filter(
mimetype__icontains="jrxml/",
spappresourcedir__discipline=request.specify_collection.discipline) \
.prefetch_related('spreports') \
.filter(
Q(spappresourcedir__collection=None) |
Q(spappresourcedir__collection=request.specify_collection)) \
.filter(
Q(spappresourcedir__specifyuser=request.specify_user) |
Q(spappresourcedir__ispersonal=False))
if table_id is not None:
reports = reports.filter(Q(spreports__query__contexttableid=table_id))
def map(app_resource):
report = app_resource.spreports.first()
return dict(
app_resource=app_resource,
report=report,
query=None if report is None else report.query
)
response = [
map(app_resource) for app_resource in reports
]
data = objs_to_data_(
response,
len(response),
lambda o: {
key: None if value is None else _obj_to_data(value, lambda x: None)
for key, value in o.items()
},
request.GET.get('offset', 0),
request.GET.get('limit', 0)
)
return HttpResponse(toJson(data), content_type="application/json")
@require_POST
@login_maybe_required
def create(request):
check_table_permissions(request.specify_collection, request.specify_user, Spreport, "create")
report = create_report(
request.specify_user.id,
request.specify_collection.discipline.id,
request.POST['queryid'],
request.POST['mimetype'],
request.POST['name'],
)
return HttpResponseCreated(toJson(obj_to_data(report)), content_type="application/json")
@transaction.atomic
def create_report(user_id, discipline_id, query_id, mimetype, name):
if mimetype not in ("jrxml/label", "jrxml/report"): raise AssertionError(
"Can not create report: mimetype not 'jrxml/label' or 'jrxml/report'",
{"localizationKey" : "invalidReportMimetype"})
query = Spquery.objects.get(id=query_id)
spappdirs_matched = Spappresourcedir.objects.filter(discipline_id=discipline_id, collection_id=None)
if len(spappdirs_matched) == 0:
spappdir = Spappresourcedir.objects.create(discipline_id=discipline_id)
else:
spappdir = spappdirs_matched[0]
appresource = spappdir.sppersistedappresources.create(
version=0,
mimetype=mimetype,
level=0,
name=name,
description=name,
specifyuser_id=user_id,
metadata="tableid=-1;reporttype=Report;",
)
appresource.spappresourcedatas.create(
version=0,
data=template_report_for_query(query_id, name),
)
return Spreport.objects.create(
version=0,
name=name,
appresource=appresource,
query_id=query_id,
specifyuser_id=user_id,
)
def template_report_for_query(query_id, name):
def field_element(field):
queryfield = QueryField.from_spqueryfield(field)
fieldspec = queryfield.fieldspec
field_type = fieldspec.get_field().type
if field.formatName \
or field.isRelFld \
or fieldspec.tree_rank \
or fieldspec.date_part \
or field_type in ("java.sql.Timestamp", "java.util.Calendar", "java.util.Date", "text"):
field_type = 'java.lang.String'
return dict(stringid=field.stringId, field_type=field_type)
with models.session_context() as session:
sp_query = session.query(models.SpQuery).get(query_id)
field_els = [
field_element(field)
for field in sorted(sp_query.fields, key=lambda field: field.position)
if field.isDisplay
]
template = loader.get_template('report_template.xml')
return template.render({
'name': name,
'fields': field_els
})
def run_query(collection, user, query_json):
try:
spquery = json.loads(query_json)
except ValueError as e:
raise ReportException(e)
spquery['limit'] = 0
report_fields = ['id'] + [
field['stringid']
for field in sorted(spquery['fields'], key=lambda f: f['position'])
if field['isdisplay']
]
query_result = run_ephemeral_query(collection, user, spquery)
return {'fields': report_fields, 'rows': query_result['results']} |
4,845 | reset queue manager process match | # (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import threading
from six import iteritems
from datadog_checks.base import AgentCheck
from datadog_checks.ibm_mq.collectors.stats_collector import StatsCollector
from datadog_checks.ibm_mq.metrics import COUNT, GAUGE
from . import connection, errors
from .collectors import ChannelMetricCollector, MetadataCollector, QueueMetricCollector
from .config import IBMMQConfig
from .process_matcher import QueueManagerProcessMatcher
try:
from typing import Any, Dict, List # noqa: F401
except ImportError:
pass
try:
import pymqi
except ImportError as e:
pymqiException = e
pymqi = None
class IbmMqCheck(AgentCheck):
MATCHER_CREATION_LOCK = threading.Lock()
process_matcher = None
SERVICE_CHECK = 'ibm_mq.can_connect'
def __init__(self, *args, **kwargs):
# type: (*Any, **Any) -> None
super(IbmMqCheck, self).__init__(*args, **kwargs)
if not pymqi:
self.log.error("You need to install pymqi: %s", pymqiException)
raise errors.PymqiException("You need to install pymqi: {}".format(pymqiException))
self._config = IBMMQConfig(self.instance, self.init_config)
self.queue_metric_collector = QueueMetricCollector(
self._config,
self.service_check,
self.warning,
self.send_metric,
self.send_metrics_from_properties,
self.log,
)
self.channel_metric_collector = ChannelMetricCollector(self._config, self.service_check, self.gauge, self.log)
self.metadata_collector = MetadataCollector(self._config, self.log)
self.stats_collector = StatsCollector(self._config, self.send_metrics_from_properties, self.log)
self.check_initializations.append(self.create_process_matcher)
def check(self, _):
if not self.check_queue_manager_process():
message = 'Process not found, skipping check run'
self.log.info(message)
for sc_name in (self.SERVICE_CHECK, QueueMetricCollector.QUEUE_MANAGER_SERVICE_CHECK):
self.service_check(
sc_name, self.UNKNOWN, self._config.tags, message=message, hostname=self._config.hostname
)
return
try:
queue_manager = connection.get_queue_manager_connection(self._config, self.log)
self.service_check(self.SERVICE_CHECK, AgentCheck.OK, self._config.tags, hostname=self._config.hostname)
except Exception as e:
message = 'cannot connect to queue manager: {}'.format(e)
self.warning(message)
self.service_check(
self.SERVICE_CHECK,
AgentCheck.CRITICAL,
self._config.tags,
message=message,
hostname=self._config.hostname,
)
self.service_check(
QueueMetricCollector.QUEUE_MANAGER_SERVICE_CHECK,
AgentCheck.CRITICAL,
self._config.tags,
message=message,
hostname=self._config.hostname,
)
self.METHOD_NAME()
raise
self._collect_metadata(queue_manager)
try:
self.channel_metric_collector.get_pcf_channel_metrics(queue_manager)
self.queue_metric_collector.collect_queue_metrics(queue_manager)
if self._config.collect_statistics_metrics:
self.stats_collector.collect(queue_manager)
finally:
queue_manager.disconnect()
def send_metric(self, metric_type, metric_name, metric_value, tags):
if metric_type in [GAUGE, COUNT]:
getattr(self, metric_type)(metric_name, metric_value, tags=tags, hostname=self._config.hostname)
else:
self.log.warning("Unknown metric type `%s` for metric `%s`", metric_type, metric_name)
@AgentCheck.metadata_entrypoint
def _collect_metadata(self, queue_manager):
try:
version = self.metadata_collector.collect_metadata(queue_manager)
if version:
raw_version = '{}.{}.{}.{}'.format(version["major"], version["minor"], version["mod"], version["fix"])
self.set_metadata('version', raw_version, scheme='parts', part_map=version)
self.log.debug('Found ibm_mq version: %s', raw_version)
else:
self.log.debug('Could not retrieve ibm_mq version info')
except Exception as e:
self.log.debug('Could not retrieve ibm_mq version info: %s', e)
def send_metrics_from_properties(self, properties, metrics_map, prefix, tags):
# type: (Dict, Dict, str, List[str]) -> None
for metric_name, (pymqi_type, metric_type) in iteritems(metrics_map):
metric_full_name = '{}.{}'.format(prefix, metric_name)
if pymqi_type not in properties:
self.log.debug("MQ type `%s` not found in properties for metric `%s` and tags `%s`", metric_name, tags)
continue
values_to_submit = []
value = properties[pymqi_type]
if isinstance(value, list):
# Some metrics are returned as a list of two values.
# Index 0 = Contains the value for non-persistent messages
# Index 1 = Contains the value for persistent messages
# https://www.ibm.com/support/knowledgecenter/en/SSFKSJ_7.5.0/com.ibm.mq.mon.doc/q037510_.htm#q037510___q037510_2
values_to_submit.append((tags + ['persistent:false'], value[0]))
values_to_submit.append((tags + ['persistent:true'], value[1]))
else:
values_to_submit.append((tags, value))
for new_tags, metric_value in values_to_submit:
try:
metric_value = int(metric_value)
except ValueError as e:
self.log.debug(
"Cannot convert `%s` to int for metric `%s` ang tags `%s`: %s",
properties[pymqi_type],
metric_name,
new_tags,
e,
)
return
self.send_metric(metric_type, metric_full_name, metric_value, new_tags)
def check_queue_manager_process(self):
if self._config.queue_manager_process_pattern is None:
return True
return self.process_matcher.check_condition(self.check_id, self._config.queue_manager_process_pattern, self.log)
def METHOD_NAME(self):
if self._config.queue_manager_process_pattern is not None:
self.log.debug('Resetting queue manager process match')
return self.process_matcher.remove(self.check_id)
def create_process_matcher(self):
if self._config.queue_manager_process_pattern is not None:
with IbmMqCheck.MATCHER_CREATION_LOCK:
if IbmMqCheck.process_matcher is None:
limit = int(self.init_config.get('queue_manager_process_limit', 1))
IbmMqCheck.process_matcher = QueueManagerProcessMatcher(limit)
def cancel(self):
# This method is called when the check in unscheduled by the Agent.
self.METHOD_NAME() |
4,846 | set name | #
# Copyright (c) 2008--2016 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import os
#Base class for all modes.
class BaseMode:
def __init__(self):
self.state = False
self.name = ""
def on(self):
self.state = True
def off(self):
self.state = False
def is_on(self):
if self.state:
return True
else:
return False
def is_off(self):
if not self.state:
return True
else:
return False
def METHOD_NAME(self, name):
self.name = name
def get_name(self):
return self.name
#Contains the directory and file manipulation stuff
class PathHandler:
def __init__(self):
self.rhn_root = "/etc/sysconfig/rhn/allowed-actions/configfiles"
#Set the rhn_root variable.
def set_rhn_root(self, rhn_root):
self.rhn_root = rhn_root
#Creates the self.rhn_root directories if they don't already exist. This allows subclasses to implement modes in different locations.
def _create_rhnconfig_path(self):
if not os.path.exists(self.rhn_root):
os.makedirs(self.rhn_root, int('0770', 8))
#Create the file if it doesn't already exist.
def add_file(self, filename):
self._create_rhnconfig_path()
if not self.check_for_file(filename):
try:
f = open(os.path.join(self.rhn_root, filename), "w")
f.close()
except Exception:
raise
#remove the file if it's present.
def remove_file(self, filename):
self._create_rhnconfig_path()
if self.check_for_file(filename):
try:
os.remove(os.path.join(self.rhn_root, filename))
except Exception:
raise
#Returns True if filename exists in /etc/sysconfig/rhn/allowed-actions/configfiles
def check_for_file(self, filename):
self._create_rhnconfig_path()
return os.path.exists(os.path.join(self.rhn_root, filename))
#Stuff that's common to the Mode subclasses.
class ConfigFilesBaseMode(BaseMode):
def __init__(self):
BaseMode.__init__(self)
self.ph = PathHandler()
self.name = None #Must be set in subclass
def on(self):
self.ph.add_file(self.name)
self.state = True
def off(self):
self.ph.remove_file(self.name)
self.state = False
#Could probably just check the value of state...
def is_on(self):
return self.ph.check_for_file(self.name)
def is_off(self):
if self.ph.check_for_file(self.name):
return False
elif not self.ph.check_for_file(self.name):
return True
class RunMode(ConfigFilesBaseMode):
def __init__(self):
ConfigFilesBaseMode.__init__(self)
self.name = "run"
self.ph.set_rhn_root("/etc/sysconfig/rhn/allowed-actions/script")
class RunAllMode(ConfigFilesBaseMode):
def __init__(self):
ConfigFilesBaseMode.__init__(self)
self.name = "all"
self.ph.set_rhn_root("/etc/sysconfig/rhn/allowed-actions/script")
class AllMode(ConfigFilesBaseMode):
def __init__(self):
ConfigFilesBaseMode.__init__(self)
self.name = "all"
class DeployMode(ConfigFilesBaseMode):
def __init__(self):
ConfigFilesBaseMode.__init__(self)
self.name = "deploy"
class DiffMode(ConfigFilesBaseMode):
def __init__(self):
ConfigFilesBaseMode.__init__(self)
self.name = "diff"
class UploadMode(ConfigFilesBaseMode):
def __init__(self):
ConfigFilesBaseMode.__init__(self)
self.name = "upload"
class MTimeUploadMode(ConfigFilesBaseMode):
def __init__(self):
ConfigFilesBaseMode.__init__(self)
self.name = "mtime_upload"
#Solaris Specific Modes
class SolarisRunMode(ConfigFilesBaseMode):
def __init__(self):
ConfigFilesBaseMode.__init__(self)
self.name = "run"
self.ph.set_rhn_root("/opt/redhat/rhn/solaris/etc/sysconfig/rhn/allowed-actions/script")
class SolarisAllRunMode(ConfigFilesBaseMode):
def __init__(self):
ConfigFilesBaseMode.__init__(self)
self.name = "all"
self.ph.set_rhn_root("/opt/redhat/rhn/solaris/etc/sysconfig/rhn/allowed-actions/script")
class SolarisAllMode(ConfigFilesBaseMode):
def __init__(self):
ConfigFilesBaseMode.__init__(self)
self.name = "all"
self.ph.set_rhn_root("/opt/redhat/rhn/solaris/etc/sysconfig/rhn/allowed-actions/configfiles")
class SolarisDeployMode(ConfigFilesBaseMode):
def __init__(self):
ConfigFilesBaseMode.__init__(self)
self.name = "deploy"
self.ph.set_rhn_root("/opt/redhat/rhn/solaris/etc/sysconfig/rhn/allowed-actions/configfiles")
class SolarisDiffMode(ConfigFilesBaseMode):
def __init__(self):
ConfigFilesBaseMode.__init__(self)
self.name = "diff"
self.ph.set_rhn_root("/opt/redhat/rhn/solaris/etc/sysconfig/rhn/allowed-actions/configfiles")
class SolarisUploadMode(ConfigFilesBaseMode):
def __init__(self):
ConfigFilesBaseMode.__init__(self)
self.name = "upload"
self.ph.set_rhn_root("/opt/redhat/rhn/solaris/etc/sysconfig/rhn/allowed-actions/configfiles")
class SolarisMTimeUploadMode(ConfigFilesBaseMode):
def __init__(self):
ConfigFilesBaseMode.__init__(self)
self.name = "mtime_upload"
self.ph.set_rhn_root("/opt/redhat/rhn/solaris/etc/sysconfig/rhn/allowed-actions/configfiles")
|
4,847 | call tir dyn | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
"""Relax vm primitives."""
from typing import Union
from . import _ffi_api
from ...expr import Expr, Call, PrimValue, DataTypeImm, Tuple, StringImm
from ...utils import args_converter
@args_converter.auto
def alloc_storage(
shape: Expr,
runtime_device_index: Union[int, Expr],
dtype: Union[str, Expr],
storage_scope: Union[str, StringImm] = "global",
) -> Call:
"""Construct a Call to allocate a storage with specific size,
runtime_device_index, and dtype.
Parameters
----------
shape : Expr
The shape of the storage to be allocated.
runtime_device_index : Union[int, Expr]
The device index indicating on which device the tensor is to
be allocated at runtime. Index -1 is reserved for the host device.
dtype : Union[str, Expr]
The datatype of the storage to be allocated.
storage_scope : Union[str, StringImm]
The storage scope of the storage to allocate. Default is global.
Returns
-------
result : Call
A relax Call, which gets the allocated storage.
"""
if isinstance(dtype, str):
dtype = DataTypeImm(dtype)
if isinstance(storage_scope, str):
storage_scope = StringImm(storage_scope)
if isinstance(runtime_device_index, int):
runtime_device_index = PrimValue(runtime_device_index)
return _ffi_api.alloc_storage(shape, runtime_device_index, dtype, storage_scope) # type: ignore
@args_converter.auto
def alloc_tensor(
storage: Expr, offset: Union[int, Expr], shape: Expr, dtype: Union[str, Expr]
) -> Call:
"""Construct a Call to allocate a tensor on a certain storage starting from the given offset.
Parameters
----------
storage : Expr
The storage to allocate the tensor to.
offset : Union[int, Expr]
The storage offset to allocate the tensor.
shape : Expr
The shape of the tensor to be allocated.
dtype : Union[str, Expr]
The datatype of the tensor to be allocated.
Returns
-------
result : Call
A relax Call, which gets the allocated tensor.
"""
if isinstance(offset, int):
offset = PrimValue(offset)
if isinstance(dtype, str):
dtype = DataTypeImm(dtype)
return _ffi_api.alloc_tensor(storage, offset, shape, dtype) # type: ignore
def kill_object(obj: Expr) -> Call:
"""Construct a Call to set the register corresponding to the input object to
null at runtime, in order to kill the input object.
Parameters
----------
obj : Expr
The object to be killed.
Returns
-------
result : Call
CallNode that kills the input object.
"""
return _ffi_api.kill_object(obj) # type: ignore
@args_converter.auto
def METHOD_NAME(func: Expr, args: Tuple) -> Call:
"""Construct a Call to call_tir_dyn (invoke the given TIR PrimFunc)
consisting of the input tensors and the shape of the result.
Parameters
----------
func : Expr
An expression evaluating to a TIR PrimFunc.
args : Tuple
The input args, includes a list of tensors, and a ShapeExpr.
Returns
-------
result : Call
A relax Call to call_tir_dyn.
"""
if isinstance(args, (list, tuple)):
args = Tuple(args)
return _ffi_api.METHOD_NAME(func, args) # type: ignore |
4,848 | load usage markdown | import operator
from collections import defaultdict
from aiohttp_jinja2 import template
from app.service.auth_svc import check_authorization
from app.utility.base_world import BaseWorld
class CampaignPack(BaseWorld):
def __init__(self, services):
self.auth_svc = services.get('auth_svc')
self.app_svc = services.get('app_svc')
self.data_svc = services.get('data_svc')
self.rest_svc = services.get('rest_svc')
async def enable(self):
self.app_svc.application.router.add_route('GET', '/campaign/agents', self._section_agent)
self.app_svc.application.router.add_route('GET', '/campaign/abilities', self._section_abilities)
self.app_svc.application.router.add_route('GET', '/campaign/adversaries', self._section_profiles)
self.app_svc.application.router.add_route('GET', '/campaign/operations', self._section_operations)
@check_authorization
@template('agents.html')
async def _section_agent(self, request):
search = dict(access=tuple(await self.auth_svc.get_permissions(request)))
agents = [h.display for h in await self.data_svc.locate('agents', match=search)]
ability_ids = tuple(self.get_config(name='agents', prop='deployments'))
abilities = await self.data_svc.locate('abilities', match=dict(ability_id=ability_ids))
agent_config = self.get_config(name='agents')
return dict(agents=agents, abilities=self._rollup_abilities(abilities), agent_config=agent_config)
@check_authorization
@template('abilities.html')
async def _section_abilities(self, request):
access = dict(access=tuple(await self.auth_svc.get_permissions(request)))
abilities = await self.data_svc.locate('abilities', match=access)
payloads = list(await self.rest_svc.list_payloads())
platforms = dict()
for a in abilities:
for executor in a.executors:
if executor.platform in platforms:
platforms[executor.platform].add(executor.name)
else:
platforms[executor.platform] = set([executor.name])
for p in platforms:
platforms[p] = list(platforms[p])
return dict(platforms=platforms, payloads=payloads)
@check_authorization
@template('adversaries.html')
async def _section_profiles(self, request):
access = dict(access=tuple(await self.auth_svc.get_permissions(request)))
abilities = await self.data_svc.locate('abilities', match=access)
objs = await self.data_svc.locate('objectives', match=access)
platforms = dict()
for a in abilities:
for executor in a.executors:
if executor.platform in platforms:
platforms[executor.platform].add(executor.name)
else:
platforms[executor.platform] = set([executor.name])
for p in platforms:
platforms[p] = list(platforms[p])
tactics = sorted(list(set(a.tactic.lower() for a in abilities)))
payloads = list(await self.rest_svc.list_payloads())
adversaries = sorted([a.display for a in await self.data_svc.locate('adversaries', match=access)],
key=lambda a: a['name'])
exploits = sorted([a.display for a in abilities], key=operator.itemgetter('technique_id', 'name'))
objectives = sorted([a.display for a in objs], key=operator.itemgetter('id', 'name'))
return dict(adversaries=adversaries, exploits=exploits, payloads=payloads,
tactics=tactics, platforms=platforms, objectives=objectives)
@check_authorization
@template('operations.html')
async def _section_operations(self, request):
def METHOD_NAME(header):
f = open('plugins/fieldmanual/sphinx-docs/Basic-Usage.md', 'r')
markdown = []
seen_header = False
for x in f:
if (not seen_header and "## Operations" in x):
markdown = []
seen_header = True
elif (seen_header and "## " in x):
break
elif (seen_header):
if "*" in x:
key, val = x.split(': ')
if (key and val):
key = key.split("*")[3]
val = val.strip("\n")
markdown.append({key: val})
f.close()
return markdown
access = dict(access=tuple(await self.auth_svc.get_permissions(request)))
hosts = [h.display for h in await self.data_svc.locate('agents', match=access)]
groups = sorted(list(set(([h['group'] for h in hosts]))))
adversaries = sorted([a.display for a in await self.data_svc.locate('adversaries', match=access)],
key=lambda a: a['name'])
sources = [s.display for s in await self.data_svc.locate('sources', match=access)]
planners = sorted([p.display for p in await self.data_svc.locate('planners')],
key=lambda p: p['name'])
obfuscators = [o.display for o in await self.data_svc.locate('obfuscators')]
operations = [o.display for o in await self.data_svc.locate('operations', match=access)]
usage = METHOD_NAME('operations')
return dict(operations=operations, groups=groups, adversaries=adversaries, sources=sources, planners=planners,
obfuscators=obfuscators, usage=usage)
@staticmethod
def _rollup_abilities(abilities):
rolled = defaultdict(list)
for a in abilities:
rolled[a.ability_id].append(a.display)
return dict(rolled) |
4,849 | test quadratic form | """
Copyright 2013 Steven Diamond
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import warnings
import numpy as np
import cvxpy as cp
from cvxpy.atoms.affine.vstack import vstack
from cvxpy.atoms.elementwise.power import power
from cvxpy.expressions.variable import Variable
from cvxpy.tests.base_test import BaseTest
class TestExpressions(BaseTest):
""" Unit tests for the expression/expression module. """
def setUp(self) -> None:
pass
# Test elementwise power
def test_power(self) -> None:
x = Variable(3)
y = Variable(3)
self.assertFalse(x.is_constant())
self.assertTrue(x.is_affine())
self.assertTrue(x.is_quadratic())
with warnings.catch_warnings():
warnings.simplefilter("ignore")
s = power(x.T @ y, 0)
self.assertTrue(s.is_constant())
self.assertTrue(s.is_affine())
self.assertTrue(s.is_quadratic())
t = power(x-y, 1)
self.assertFalse(t.is_constant())
self.assertTrue(t.is_affine())
self.assertTrue(t.is_quadratic())
u = power(x+2*y, 2)
self.assertFalse(u.is_constant())
self.assertFalse(u.is_affine())
self.assertTrue(u.is_quadratic())
self.assertTrue(u.is_dcp())
w = (x+2*y)**2
self.assertFalse(w.is_constant())
self.assertFalse(w.is_affine())
self.assertTrue(w.is_quadratic())
self.assertTrue(w.is_dcp())
def test_matrix_multiplication(self) -> None:
x = Variable((3, 5))
y = Variable((3, 5))
self.assertFalse(x.is_constant())
self.assertTrue(x.is_affine())
self.assertTrue(x.is_quadratic())
with warnings.catch_warnings():
warnings.simplefilter("ignore")
s = x.T @ y
self.assertFalse(s.is_constant())
self.assertFalse(s.is_affine())
self.assertTrue(s.is_quadratic())
self.assertFalse(s.is_dcp())
def test_quad_over_lin(self) -> None:
x = Variable((3, 5))
y = Variable((3, 5))
z = Variable()
s = cp.quad_over_lin(x-y, z)
self.assertFalse(s.is_constant())
self.assertFalse(s.is_affine())
self.assertFalse(s.is_quadratic())
self.assertTrue(s.is_dcp())
t = cp.quad_over_lin(x+2*y, 5)
self.assertFalse(t.is_constant())
self.assertFalse(t.is_affine())
self.assertTrue(t.is_quadratic())
self.assertTrue(t.is_dcp())
def test_matrix_frac(self) -> None:
x = Variable(5)
M = np.eye(5)
P = M.T @ M
s = cp.matrix_frac(x, P)
self.assertFalse(s.is_constant())
self.assertFalse(s.is_affine())
self.assertTrue(s.is_quadratic())
self.assertTrue(s.is_dcp())
def METHOD_NAME(self) -> None:
x = Variable(5)
P = np.eye(5) - 2*np.ones((5, 5))
q = np.ones((5, 1))
with warnings.catch_warnings():
warnings.simplefilter("ignore")
s = x.T @ P @ x + q.T @ x
self.assertFalse(s.is_constant())
self.assertFalse(s.is_affine())
self.assertTrue(s.is_quadratic())
self.assertFalse(s.is_dcp())
def test_sum_squares(self) -> None:
X = Variable((5, 4))
P = np.ones((3, 5))
Q = np.ones((4, 7))
M = np.ones((3, 7))
y = P @ X @ Q + M
self.assertFalse(y.is_constant())
self.assertTrue(y.is_affine())
self.assertTrue(y.is_quadratic())
self.assertTrue(y.is_dcp())
s = cp.sum_squares(y)
self.assertFalse(s.is_constant())
self.assertFalse(s.is_affine())
self.assertTrue(s.is_quadratic())
self.assertTrue(s.is_dcp())
# Frobenius norm squared is indeed quadratic
# but can't show quadraticity using recursive rules
t = cp.norm(y, 'fro')**2
self.assertFalse(t.is_constant())
self.assertFalse(t.is_affine())
self.assertFalse(t.is_quadratic())
self.assertTrue(t.is_dcp())
def test_indefinite_quadratic(self) -> None:
x = Variable()
y = Variable()
z = Variable()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
s = y*z
self.assertTrue(s.is_quadratic())
self.assertFalse(s.is_dcp())
t = (x+y)**2 - s - z*z
self.assertTrue(t.is_quadratic())
self.assertFalse(t.is_dcp())
def test_non_quadratic(self) -> None:
x = Variable()
y = Variable()
z = Variable()
s = cp.max(vstack([x, y, z]))**2
self.assertFalse(s.is_quadratic())
t = cp.max(vstack([x**2, power(y, 2), z]))
self.assertFalse(t.is_quadratic())
def test_affine_prod(self) -> None:
x = Variable((3, 5))
y = Variable((5, 4))
with warnings.catch_warnings():
warnings.simplefilter("ignore")
s = x @ y
self.assertFalse(s.is_constant())
self.assertFalse(s.is_affine())
self.assertTrue(s.is_quadratic())
self.assertFalse(s.is_dcp())
def test_has_quadratic(self) -> None:
"""Test the has_quadratic_term function."""
x = Variable()
assert not x.has_quadratic_term()
assert not (3 + 3*x).has_quadratic_term()
assert (x**2).has_quadratic_term()
assert (x**2/2).has_quadratic_term()
assert (x**2 + x**3).has_quadratic_term()
assert (2*x**2 + x**3).has_quadratic_term()
assert cp.conj(x**2).has_quadratic_term()
assert not cp.pos(x**2).has_quadratic_term()
assert cp.square(x**2).has_quadratic_term()
assert cp.huber(x**3).has_quadratic_term()
assert cp.power(x**2, 1).has_quadratic_term()
assert cp.quad_over_lin(x**3, 1).has_quadratic_term()
assert not cp.quad_over_lin(x**3, x).has_quadratic_term()
y = cp.Variable(2)
P = np.eye(2)
assert cp.matrix_frac(y**3, P).has_quadratic_term()
P = cp.Parameter((2, 2), PSD=True)
assert cp.matrix_frac(y**3, P).has_quadratic_term() |
4,850 | encode | # Copyright 2021 The OpenAI Team Authors.
# Copyright 2021-2022 The Alibaba Fundamental Vision Team Authors. All rights reserved.
#
# The implementation here is modified based on OpenAI CLIP,
# originally MIT License, Copyright (c) 2021 OpenAI,
# and publicly available at https://github.com/openai/CLIP/.
""" CLIP Tokenizer."""
import gzip
import html
import os
from functools import lru_cache
import ftfy
import regex as re
import torch
@lru_cache()
def default_bpe():
return os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'bpe_simple_vocab_16e6.txt.gz')
@lru_cache()
def bytes_to_unicode():
"""
Returns list of utf-8 byte and a corresponding list of unicode strings.
The reversible bpe codes work on unicode strings.
This means you need a large # of unicode characters in your vocab if you want to avoid UNKs.
When you're at something like a 10B token dataset you end up needing around 5K for decent coverage.
This is a signficant percentage of your normal, say, 32K bpe vocab.
To avoid that, we want lookup tables between utf-8 bytes and unicode strings.
And avoids mapping to whitespace/control characters the bpe code barfs on.
"""
bs = list(range(ord('!'),
ord('~') + 1)) + list(range(
ord('¡'),
ord('¬') + 1)) + list(range(ord('®'),
ord('ÿ') + 1))
cs = bs[:]
n = 0
for b in range(2**8):
if b not in bs:
bs.append(b)
cs.append(2**8 + n)
n += 1
cs = [chr(n) for n in cs]
return dict(zip(bs, cs))
def get_pairs(word):
"""Return set of symbol pairs in a word.
Word is represented as tuple of symbols (symbols being variable-length strings).
"""
pairs = set()
prev_char = word[0]
for char in word[1:]:
pairs.add((prev_char, char))
prev_char = char
return pairs
def basic_clean(text):
text = ftfy.fix_text(text)
text = html.unescape(html.unescape(text))
return text.strip()
def whitespace_clean(text):
text = re.sub(r'\s+', ' ', text)
text = text.strip()
return text
class SimpleTokenizer(object):
def __init__(self, bpe_path: str = default_bpe()):
self.byte_encoder = bytes_to_unicode()
self.byte_decoder = {v: k for k, v in self.byte_encoder.items()}
merges = gzip.open(bpe_path).read().decode('utf-8').split('\n')
merges = merges[1:49152 - 256 - 2 + 1]
merges = [tuple(merge.split()) for merge in merges]
vocab = list(bytes_to_unicode().values())
vocab = vocab + [v + '</w>' for v in vocab]
for merge in merges:
vocab.append(''.join(merge))
vocab.extend(['<|startoftext|>', '<|endoftext|>'])
self.encoder = dict(zip(vocab, range(len(vocab))))
self.decoder = {v: k for k, v in self.encoder.items()}
self.bpe_ranks = dict(zip(merges, range(len(merges))))
self.cache = {
'<|startoftext|>': '<|startoftext|>',
'<|endoftext|>': '<|endoftext|>'
}
self.pat = re.compile(
r"""<\|startoftext\|>|<\|endoftext\|>|'s|'t|'re|'ve|'m|'ll|'d|[\p{L}]+|[\p{N}]|[^\s\p{L}\p{N}]+""",
re.IGNORECASE)
def bpe(self, token):
if token in self.cache:
return self.cache[token]
word = tuple(token[:-1]) + (token[-1] + '</w>', )
pairs = get_pairs(word)
if not pairs:
return token + '</w>'
error_list = []
while True:
bigram = min(
pairs, key=lambda pair: self.bpe_ranks.get(pair, float('inf')))
if bigram not in self.bpe_ranks:
break
first, second = bigram
new_word = []
i = 0
while i < len(word):
try:
j = word.index(first, i)
new_word.extend(word[i:j])
i = j
except Exception as err:
error_list.append(err)
new_word.extend(word[i:])
break
if word[i] == first and i < len(word) - 1 and word[
i + 1] == second:
new_word.append(first + second)
i += 2
else:
new_word.append(word[i])
i += 1
new_word = tuple(new_word)
word = new_word
if len(word) == 1:
break
else:
pairs = get_pairs(word)
if len(error_list) > 100:
print(error_list[-1])
word = ' '.join(word)
self.cache[token] = word
return word
def METHOD_NAME(self, text):
bpe_tokens = []
text = whitespace_clean(basic_clean(text)).lower()
for token in re.findall(self.pat, text):
token = ''.join(self.byte_encoder[b]
for b in token.METHOD_NAME('utf-8'))
bpe_tokens.extend(self.encoder[bpe_token]
for bpe_token in self.bpe(token).split(' '))
return bpe_tokens
def decode(self, tokens):
text = ''.join([self.decoder[token] for token in tokens])
text = bytearray([self.byte_decoder[c] for c in text]).decode(
'utf-8', errors='replace').replace('</w>', ' ')
return text
def clip_tokenize(tokenizer, texts, context_length=77, truncate=True):
"""
Returns the tokenized representation of given input string(s)
Parameters
----------
texts : Union[str, List[str]]
An input string or a list of input strings to tokenize
context_length : int
The context length to use; all CLIP models use 77 as the context length
truncate: bool
Whether to truncate the text in case its encoding is longer than the context length
Returns
-------
A two-dimensional tensor containing the resulting tokens, shape = [number of input strings, context_length].
We return LongTensor when torch version is <1.8.0, since older index_select requires indices to be long.
"""
if isinstance(texts, str):
texts = [texts]
sot_token = tokenizer.encoder['<|startoftext|>']
eot_token = tokenizer.encoder['<|endoftext|>']
all_tokens = [[sot_token] + tokenizer.METHOD_NAME(text) + [eot_token]
for text in texts]
result = torch.zeros(len(all_tokens), context_length, dtype=torch.int)
for i, tokens in enumerate(all_tokens):
if len(tokens) > context_length:
if truncate:
tokens = tokens[:context_length]
tokens[-1] = eot_token
else:
raise RuntimeError(
f'Input {texts[i]} is too long for context length {context_length}'
)
result[i, :len(tokens)] = torch.tensor(tokens)
return result |
4,851 | test struct abi | import pytest
from vyper.compiler import compile_code
from vyper.compiler.output import build_abi_output
from vyper.compiler.phases import CompilerData
source_codes = [
"""
x: int128
@external
def __init__():
self.x = 1
""",
"""
x: int128
@external
def __init__():
pass
""",
]
@pytest.mark.parametrize("source_code", source_codes)
def test_only_init_function(source_code):
empty_sig = [
{"outputs": [], "inputs": [], "stateMutability": "nonpayable", "type": "constructor"}
]
data = CompilerData(source_code)
assert build_abi_output(data) == empty_sig
def test_default_abi():
default_code = """
@payable
@external
def __default__():
pass
"""
data = CompilerData(default_code)
assert build_abi_output(data) == [{"stateMutability": "payable", "type": "fallback"}]
def test_method_identifiers():
code = """
x: public(int128)
@external
def foo(y: uint256) -> Bytes[100]:
return b"hello"
"""
out = compile_code(code, output_formats=["method_identifiers"])
assert out["method_identifiers"] == {"foo(uint256)": "0x2fbebd38", "x()": "0xc55699c"}
def METHOD_NAME():
code = """
struct MyStruct:
a: address
b: uint256
@external
@view
def foo(s: MyStruct) -> MyStruct:
return s
"""
data = CompilerData(code)
abi = build_abi_output(data)
func_abi = abi[0]
assert func_abi["name"] == "foo"
expected_output = [
{
"type": "tuple",
"name": "",
"components": [{"type": "address", "name": "a"}, {"type": "uint256", "name": "b"}],
}
]
assert func_abi["outputs"] == expected_output
expected_input = {
"type": "tuple",
"name": "s",
"components": [{"type": "address", "name": "a"}, {"type": "uint256", "name": "b"}],
}
assert func_abi["inputs"][0] == expected_input
@pytest.mark.parametrize(
"type,abi_type", [("DynArray[NestedStruct, 2]", "tuple[]"), ("NestedStruct[2]", "tuple[2]")]
)
def test_nested_struct(type, abi_type):
code = f"""
struct MyStruct:
a: address
b: bytes32
struct NestedStruct:
t: MyStruct
foo: uint256
@view
@external
def getStructList() -> {type}:
return [
NestedStruct({{t: MyStruct({{a: msg.sender, b: block.prevhash}}), foo: 1}}),
NestedStruct({{t: MyStruct({{a: msg.sender, b: block.prevhash}}), foo: 2}})
]
"""
out = compile_code(code, output_formats=["abi"])
assert out["abi"] == [
{
"inputs": [],
"name": "getStructList",
"outputs": [
{
"components": [
{
"components": [
{"name": "a", "type": "address"},
{"name": "b", "type": "bytes32"},
],
"name": "t",
"type": "tuple",
},
{"name": "foo", "type": "uint256"},
],
"name": "",
"type": f"{abi_type}",
}
],
"stateMutability": "view",
"type": "function",
}
]
@pytest.mark.parametrize(
"type,abi_type", [("DynArray[DynArray[Foo, 2], 2]", "tuple[][]"), ("Foo[2][2]", "tuple[2][2]")]
)
def test_2d_list_of_struct(type, abi_type):
code = f"""
struct Foo:
a: uint256
b: uint256
@view
@external
def bar(x: {type}):
pass
"""
out = compile_code(code, output_formats=["abi"])
assert out["abi"] == [
{
"inputs": [
{
"components": [
{"name": "a", "type": "uint256"},
{"name": "b", "type": "uint256"},
],
"name": "x",
"type": f"{abi_type}",
}
],
"name": "bar",
"outputs": [],
"stateMutability": "view",
"type": "function",
}
] |
4,852 | test bad content | #!/usr/bin/env python3
"""Test fctl FAUCET CLI utility."""
# Copyright (C) 2015 Brad Cowie, Christopher Lorier and Joe Stringer.
# Copyright (C) 2015 Research and Innovation Advanced Network New Zealand Ltd.
# Copyright (C) 2015--2019 The Contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import copy
import shutil
import subprocess
import tempfile
import unittest
from faucet import fctl
class FctlTestCaseBase(unittest.TestCase): # pytype: disable=module-attr
"""Base class for fctl tests."""
DEFAULT_VALUES = {
"dp_id": "0xb827eb608918",
"mac_addr": "a4:5e:60:c5:5c:ed",
"metrics": "learned_macs",
"n": 3,
"port": "17",
"vlan": "2004",
"value": 180725257428205.0,
}
SRC_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "../../../faucet"
)
FCTL_BASE_ARGS = [
"--metrics={metrics}".format(**DEFAULT_VALUES),
"--labels=dp_id:{dp_id}".format(**DEFAULT_VALUES),
]
FCTL = os.path.join(SRC_DIR, "fctl.py")
tmpdir = None
prom_input_file_name = None
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
self.prom_input_file_name = os.path.join(self.tmpdir, "prom_input.txt")
def tearDown(self):
shutil.rmtree(self.tmpdir)
def fctl_args(self, extra_args=None):
"""generate argument list for fctl"""
result = copy.copy(self.FCTL_BASE_ARGS)
result += ["--endpoints=file:%s" % self.prom_input_file_name]
if extra_args is not None:
result += extra_args
return result
def learned_macs_prom(self, overwrite_labels=None):
"""generate prometheus formated data"""
labels = copy.copy(self.DEFAULT_VALUES)
if overwrite_labels is not None:
labels.update(overwrite_labels)
result = """
{metrics}{{dp_id="{dp_id}",n="{n}",port="{port}",vlan="{vlan}"}}\t{value}"""
return result.format(**labels).strip()
def learned_macs_result(self, overwrite_labels=None):
"""generate expected output data"""
labels = copy.copy(self.DEFAULT_VALUES)
if overwrite_labels is not None:
labels.update(overwrite_labels)
result = """
{metrics}\t[('dp_id', '{dp_id}'), ('n', '{n}'), ('port', '{port}'), ('vlan', '{vlan}')]\t{mac_addr}
"""
return result.format(**labels).strip()
class FctlTestCase(FctlTestCaseBase):
"""Drive fctl from shell."""
def run_fctl(self, prom_input, expected_output, extra_args=None):
"""Ensure fctl succeeds and returns expected output."""
with open(self.prom_input_file_name, "w", encoding="utf-8") as prom_input_file:
prom_input_file.write(prom_input)
fctl_cli = " ".join(["python3", self.FCTL] + self.fctl_args(extra_args))
retcode, output = subprocess.getstatusoutput(
fctl_cli
) # pytype: disable=module-attr
self.assertEqual(0, retcode, msg="%s returned %d" % (fctl_cli, retcode))
output = output.strip()
self.assertEqual(output, expected_output)
def test_macs(self):
"""Test can parse learned MACs from Prometheus data."""
self.run_fctl(self.learned_macs_prom(), self.learned_macs_result())
def test_display_labels(self):
"""Test can filter by display labels."""
expected_output = """
learned_macs\t[('dp_id', '{dp_id}')]\t{mac_addr}
""".format(
**self.DEFAULT_VALUES
).strip()
self.run_fctl(
self.learned_macs_prom(),
expected_output,
extra_args=["--display-labels=dp_id"],
)
class FctlClassTestCase(FctlTestCaseBase):
"""Test fctl internal methods."""
def test_http_fail(self):
"""Test HTTP scrape handled."""
with open(os.devnull, "w", encoding="utf-8") as err_output_file:
self.assertEqual(
None,
fctl.scrape_prometheus(
["http://127.0.0.1:23"], err_output_file=err_output_file
),
)
def test_bad_url(self):
"""Test unparseable URL."""
with open(os.devnull, "w", encoding="utf-8") as err_output_file:
self.assertEqual(
None,
fctl.scrape_prometheus(
["not/a$#@/valid_URL"], err_output_file=err_output_file
),
)
def METHOD_NAME(self):
"""Test bad content."""
bad_input_file_name = os.path.join(self.tmpdir, "bad_content.txt")
with open(bad_input_file_name, "w", encoding="utf-8") as bad_input_file:
bad_input_file.write("NOT/_prometheus_data")
with open(os.devnull, "w", encoding="utf-8") as err_output_file:
self.assertEqual(
None,
fctl.scrape_prometheus(
["file://%s" % bad_input_file_name], err_output_file=err_output_file
),
)
def write_prom_input_file(self, input_data):
with open(self.prom_input_file_name, "w", encoding="utf-8") as prom_input_file:
prom_input_file.write(input_data)
def test_macs(self):
"""Test reporting of learned MACs."""
self.write_prom_input_file(self.learned_macs_prom())
(endpoints, report_metrics, label_matches, nonzero_only, _) = fctl.parse_args(
self.fctl_args()
)
metrics = fctl.scrape_prometheus(endpoints)
report_out = fctl.report_label_match_metrics(
report_metrics=report_metrics,
metrics=metrics,
label_matches=label_matches,
nonzero_only=nonzero_only,
)
self.assertEqual(report_out, self.learned_macs_result())
def test_get_samples(self):
"""Test querying with get_samples"""
self.write_prom_input_file(self.learned_macs_prom())
samples = fctl.get_samples(
["file://" + self.prom_input_file_name], "learned_macs", {}
)
self.assertEqual(samples[0].value, self.DEFAULT_VALUES["value"])
if __name__ == "__main__":
unittest.main() # pytype: disable=module-attr |
4,853 | awooify | # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~# CatUserBot #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~#
# Copyright (C) 2020-2023 by TgCatUB@Github.
# This file is part of: https://github.com/TgCatUB/catuserbot
# and is released under the "GNU v3.0 License Agreement".
# Please see: https://github.com/TgCatUB/catuserbot/blob/master/LICENSE
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~#
import requests
from PIL import Image, ImageDraw, ImageFont
from validators.url import url
async def fakegs(search, result):
imgurl = "https://i.imgur.com/wNFr5X2.jpg"
with open("./temp/temp.jpg", "wb") as f:
f.write(requests.get(imgurl).content)
img = Image.open("./temp/temp.jpg")
drawing = ImageDraw.Draw(img)
blue = (0, 0, 255)
black = (0, 0, 0)
font1 = ImageFont.truetype("userbot/helpers/styles/ProductSans-BoldItalic.ttf", 20)
font2 = ImageFont.truetype("userbot/helpers/styles/ProductSans-Light.ttf", 23)
drawing.text((450, 258), result, fill=blue, font=font1)
drawing.text((270, 37), search, fill=black, font=font2)
img.save("./temp/temp.jpg")
return "./temp/temp.jpg"
async def trumptweet(text):
r = requests.get(
f"https://nekobot.xyz/api/imagegen?type=trumptweet&text={text}"
).json()
sandy = r.get("message")
caturl = url(sandy)
if not caturl:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(sandy).content)
img = Image.open("temp.png").convert("RGB")
img.save("temp.webp", "webp")
return "temp.webp"
async def changemymind(text):
r = requests.get(
f"https://nekobot.xyz/api/imagegen?type=changemymind&text={text}"
).json()
sandy = r.get("message")
caturl = url(sandy)
if not caturl:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(sandy).content)
img = Image.open("temp.png").convert("RGB")
img.save("temp.jpg", "jpeg")
return "temp.jpg"
async def kannagen(text):
r = requests.get(
f"https://nekobot.xyz/api/imagegen?type=kannagen&text={text}"
).json()
sandy = r.get("message")
caturl = url(sandy)
if not caturl:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(sandy).content)
img = Image.open("temp.png").convert("RGB")
img.save("temp.webp", "webp")
return "temp.webp"
async def moditweet(text):
r = requests.get(
f"https://nekobot.xyz/api/imagegen?type=tweet&text={text}&username=narendramodi"
).json()
sandy = r.get("message")
caturl = url(sandy)
if not caturl:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(sandy).content)
img = Image.open("temp.png").convert("RGB")
img.save("temp.webp", "webp")
return "temp.webp"
async def tweets(text1, text2):
r = requests.get(
f"https://nekobot.xyz/api/imagegen?type=tweet&text={text1}&username={text2}"
).json()
sandy = r.get("message")
caturl = url(sandy)
if not caturl:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(sandy).content)
img = Image.open("temp.png").convert("RGB")
img.save("temp.webp", "webp")
return "temp.webp"
async def iphonex(text):
r = requests.get(f"https://nekobot.xyz/api/imagegen?type=iphonex&url={text}").json()
sandy = r.get("message")
caturl = url(sandy)
if not caturl:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(sandy).content)
img = Image.open("temp.png").convert("RGB")
img.save("temp.jpg", "jpeg")
return "temp.jpg"
async def baguette(text):
r = requests.get(
f"https://nekobot.xyz/api/imagegen?type=baguette&url={text}"
).json()
sandy = r.get("message")
caturl = url(sandy)
if not caturl:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(sandy).content)
img = Image.open("temp.png").convert("RGB")
img.save("temp.jpg", "jpeg")
return "temp.jpg"
async def threats(text):
r = requests.get(f"https://nekobot.xyz/api/imagegen?type=threats&url={text}").json()
sandy = r.get("message")
caturl = url(sandy)
if not caturl:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(sandy).content)
img = Image.open("temp.png")
if img.mode != "RGB":
img = img.convert("RGB")
img.save("temp.jpg", "jpeg")
return "temp.jpg"
async def lolice(text):
r = requests.get(f"https://nekobot.xyz/api/imagegen?type=lolice&url={text}").json()
sandy = r.get("message")
caturl = url(sandy)
if not caturl:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(sandy).content)
img = Image.open("temp.png")
if img.mode != "RGB":
img = img.convert("RGB")
img.save("temp.jpg", "jpeg")
return "temp.jpg"
async def trash(text):
r = requests.get(f"https://nekobot.xyz/api/imagegen?type=trash&url={text}").json()
sandy = r.get("message")
caturl = url(sandy)
if not caturl:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(sandy).content)
img = Image.open("temp.png")
if img.mode != "RGB":
img = img.convert("RGB")
img.save("temp.jpg", "jpeg")
return "temp.jpg"
async def METHOD_NAME(text):
r = requests.get(f"https://nekobot.xyz/api/imagegen?type=awooify&url={text}").json()
sandy = r.get("message")
caturl = url(sandy)
if not caturl:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(sandy).content)
img = Image.open("temp.png")
if img.mode != "RGB":
img = img.convert("RGB")
img.save("temp.jpg", "jpeg")
return "temp.jpg"
async def trap(text1, text2, text3):
r = requests.get(
f"https://nekobot.xyz/api/imagegen?type=trap&name={text1}&author={text2}&image={text3}"
).json()
sandy = r.get("message")
caturl = url(sandy)
if not caturl:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(sandy).content)
img = Image.open("temp.png")
if img.mode != "RGB":
img = img.convert("RGB")
img.save("temp.jpg", "jpeg")
return "temp.jpg"
async def phcomment(text1, text2, text3):
r = requests.get(
f"https://nekobot.xyz/api/imagegen?type=phcomment&image={text1}&text={text2}&username={text3}"
).json()
sandy = r.get("message")
caturl = url(sandy)
if not caturl:
return "check syntax once more"
with open("temp.png", "wb") as f:
f.write(requests.get(sandy).content)
img = Image.open("temp.png")
if img.mode != "RGB":
img = img.convert("RGB")
img.save("temp.jpg", "jpeg")
return "temp.jpg" |
4,854 | js e local global negative paired | import itertools
import numpy as np
import torch
import torch.nn.functional as F
import torch.nn as nn
def JSE_loss(zs, zs_n=None, batch=None, sigma=None, neg_by_crpt=False, **kwargs):
'''The Jensen-Shannon Estimator of Mutual Information used in contrastive learning. The
implementation follows the paper `Learning deep representations by mutual information
estimation and maximization <https://arxiv.org/abs/1808.06670>`_.
.. note::
The JSE loss implementation can produce negative values because a :obj:`-2log2` shift is
added to the computation of JSE, for the sake of consistency with other f-convergence
losses.
Args:
zs (list, optional): List of tensors of shape [batch_size, z_dim].
zs_n (list, optional): List of tensors of shape [nodes, z_dim].
batch (Tensor, optional): Required when both :obj:`zs` and :obj:`zs_n` are given.
sigma (ndarray, optional): A 2D-array of shape [n_views, n_views] with boolean values,
indicating contrast between which two views are computed. Only required
when number of views is greater than 2. If :obj:`sigma[i][j]` = :obj:`True`,
JSE between :math:`view_i` and :math:`view_j` will be computed.
neg_by_crpt (bool, optional): The mode to obtain negative samples in JSE. If True,
obtain negative samples by performing corruption. Otherwise, consider pairs of
different graph samples as negative pairs.
:rtype: :class:`Tensor`
'''
if zs_n is not None:
assert len(zs_n) == len(zs)
assert batch is not None
jse = (METHOD_NAME
if neg_by_crpt else JSE_local_global)
if len(zs) == 1:
return jse(zs[0], zs_n[0], batch)
elif len(zs) == 2:
return (jse(zs[0], zs_n[1], batch) +
jse(zs[1], zs_n[0], batch))
else:
assert len(zs) == len(sigma)
loss = 0
for (i, j) in itertools.combinations(range(len(zs)), 2):
if sigma[i][j]:
loss += (jse(zs[i], zs_n[j], batch) +
jse(zs[j], zs_n[i], batch))
return loss
else:
jse = JSE_global_global
if len(zs) == 2:
return jse(zs[0], zs[1])
elif len(zs) > 2:
assert len(zs) == len(sigma)
loss = 0
for (i, j) in itertools.combinations(range(len(zs)), 2):
if sigma[i][j]:
loss += jse(zs[i], zs[j])
return loss
def METHOD_NAME(z_g, z_n, batch):
'''
Args:
z_g: of size [2*n_batch, dim]
z_n: of size [2*n_batch*nodes_per_batch, dim]
'''
device = z_g.device
num_graphs = int(z_g.shape[0]/2) # 4
num_nodes = int(z_n.shape[0]/2) # 4*2000
z_g, _ = torch.split(z_g, num_graphs)
z_n, z_n_crpt = torch.split(z_n, num_nodes)
num_sample_nodes = int(num_nodes / num_graphs)
z_n = torch.split(z_n, num_sample_nodes)
z_n_crpt = torch.split(z_n_crpt, num_sample_nodes)
d_pos = torch.cat([torch.matmul(z_g[i], z_n[i].t()) for i in range(num_graphs)]) # [1, 8000]
d_neg = torch.cat([torch.matmul(z_g[i], z_n_crpt[i].t()) for i in range(num_graphs)]) # [1, 8000]
logit = torch.unsqueeze(torch.cat((d_pos, d_neg)), 0) # [1, 16000]
lb_pos = torch.ones((1, num_nodes)).to(device) # [1, 8000]
lb_neg = torch.zeros((1, num_nodes)).to(device) # [1, 8000]
lb = torch.cat((lb_pos, lb_neg), 1)
b_xent = nn.BCEWithLogitsLoss()
loss = b_xent(logit, lb) * 0.5 # following mvgrl-node
return loss
def JSE_local_global(z_g, z_n, batch):
'''
Args:
z_g: Tensor of shape [n_graphs, z_dim].
z_n: Tensor of shape [n_nodes, z_dim].
batch: Tensor of shape [n_graphs].
'''
device = z_g.device
num_graphs = z_g.shape[0]
num_nodes = z_n.shape[0]
pos_mask = torch.zeros((num_nodes, num_graphs)).to(device)
neg_mask = torch.ones((num_nodes, num_graphs)).to(device)
for nodeidx, graphidx in enumerate(batch):
pos_mask[nodeidx][graphidx] = 1.
neg_mask[nodeidx][graphidx] = 0.
d_prime = torch.matmul(z_n, z_g.t())
E_pos = get_expectation(d_prime * pos_mask, positive=True).sum()
E_pos = E_pos / num_nodes
E_neg = get_expectation(d_prime * neg_mask, positive=False).sum()
E_neg = E_neg / (num_nodes * (num_graphs - 1))
return E_neg - E_pos
def JSE_global_global(z1, z2):
'''
Args:
z1, z2: Tensor of shape [batch_size, z_dim].
'''
device = z1.device
num_graphs = z1.shape[0]
pos_mask = torch.zeros((num_graphs, num_graphs)).to(device)
neg_mask = torch.ones((num_graphs, num_graphs)).to(device)
for graphidx in range(num_graphs):
pos_mask[graphidx][graphidx] = 1.
neg_mask[graphidx][graphidx] = 0.
d_prime = torch.matmul(z1, z2.t())
E_pos = get_expectation(d_prime * pos_mask, positive=True).sum()
E_pos = E_pos / num_graphs
E_neg = get_expectation(d_prime * neg_mask, positive=False).sum()
E_neg = E_neg / (num_graphs * (num_graphs - 1))
return E_neg - E_pos
def get_expectation(masked_d_prime, positive=True):
'''
Args:
masked_d_prime: Tensor of shape [n_graphs, n_graphs] for global_global,
tensor of shape [n_nodes, n_graphs] for local_global.
positive (bool): Set True if the d_prime is masked for positive pairs,
set False for negative pairs.
'''
log_2 = np.log(2.)
if positive:
score = log_2 - F.softplus(-masked_d_prime)
else:
score = F.softplus(-masked_d_prime) + masked_d_prime - log_2
return score
|
4,855 | grouper | #!/usr/bin/env python
# coding utf-8
from asyncio import BoundedSemaphore, Queue, gather, get_event_loop
from concurrent.futures import ProcessPoolExecutor
from logging import getLogger
from types import GeneratorType
from typing import Any, Callable, Dict, Optional
from aioitertools import enumerate
from tqdm.auto import tqdm
from maggma.utils import primed
logger = getLogger("MultiProcessor")
class BackPressure:
"""
Wrapper for an iterator to provide
async access with backpressure
"""
def __init__(self, iterator, n):
self.iterator = iter(iterator)
self.back_pressure = BoundedSemaphore(n)
def __aiter__(self):
return self
async def __anext__(self):
await self.back_pressure.acquire()
try:
return next(self.iterator)
except StopIteration:
raise StopAsyncIteration
async def release(self, async_iterator):
"""
release iterator to pipeline the backpressure
"""
async for item in async_iterator:
try:
self.back_pressure.release()
except ValueError:
pass
yield item
class AsyncUnorderedMap:
"""
Async iterator that maps a function to an async iterator
using an executor and returns items as they are done
This does not guarantee order
"""
def __init__(self, func, async_iterator, executor):
self.iterator = async_iterator
self.func = func
self.executor = executor
loop = get_event_loop()
self.fill_task = loop.create_task(self.get_from_iterator())
self.done_sentinel = object()
self.results = Queue()
self.tasks = {}
async def process_and_release(self, idx):
future = self.tasks[idx]
try:
item = await future
self.results.put_nowait(item)
except Exception:
pass
finally:
self.tasks.pop(idx)
async def get_from_iterator(self):
loop = get_event_loop()
async for idx, item in enumerate(self.iterator):
future = loop.run_in_executor(self.executor, safe_dispatch, (self.func, item))
self.tasks[idx] = future
loop.create_task(self.process_and_release(idx))
await gather(*self.tasks.values())
self.results.put_nowait(self.done_sentinel)
def __aiter__(self):
return self
async def __anext__(self):
item = await self.results.get()
if item == self.done_sentinel:
raise StopAsyncIteration
return item
async def atqdm(async_iterator, *args, **kwargs):
"""
Wrapper around tqdm for async generators
"""
_tqdm = tqdm(*args, **kwargs)
async for item in async_iterator:
_tqdm.update()
yield item
_tqdm.close()
async def METHOD_NAME(async_iterator, n: int):
"""
Collect data into fixed-length chunks or blocks.
>>> list(grouper(3, 'ABCDEFG'))
[['A', 'B', 'C'], ['D', 'E', 'F'], ['G']]
Updated from:
https://stackoverflow.com/questions/31164731/python-chunking-csv-file-multiproccessing/31170795#31170795
Modified for async
"""
chunk = []
async for item in async_iterator:
chunk.append(item)
if len(chunk) >= n:
yield chunk
chunk.clear()
if chunk != []:
yield chunk
def safe_dispatch(val):
func, item = val
try:
return func(item)
except Exception as e:
logger.error(e)
return None
async def multi(
builder,
num_processes,
no_bars=False,
heartbeat_func: Optional[Callable[..., Any]] = None,
heartbeat_func_kwargs: Optional[Dict[Any, Any]] = None,
):
builder.connect()
cursor = builder.get_items()
executor = ProcessPoolExecutor(num_processes)
# Gets the total number of items to process by priming
# the cursor
total = None
if isinstance(cursor, GeneratorType):
try:
cursor = primed(cursor)
if hasattr(builder, "total"):
total = builder.total
except StopIteration:
pass
elif hasattr(cursor, "__len__"):
total = len(cursor)
elif hasattr(cursor, "count"):
total = cursor.count()
logger.info(
f"Starting multiprocessing: {builder.__class__.__name__}",
extra={
"maggma": {
"event": "BUILD_STARTED",
"total": total,
"builder": builder.__class__.__name__,
"sources": [source.name for source in builder.sources],
"targets": [target.name for target in builder.targets],
}
},
)
back_pressured_get = BackPressure(
iterator=tqdm(cursor, desc="Get", total=total, disable=no_bars),
n=builder.chunk_size,
)
processed_items = atqdm(
async_iterator=AsyncUnorderedMap(
func=builder.process_item,
async_iterator=back_pressured_get,
executor=executor,
),
total=total,
desc="Process Items",
disable=no_bars,
)
if not heartbeat_func_kwargs:
heartbeat_func_kwargs = {}
if heartbeat_func:
heartbeat_func(**heartbeat_func_kwargs)
back_pressure_relief = back_pressured_get.release(processed_items)
update_items = tqdm(total=total, desc="Update Targets", disable=no_bars)
async for chunk in METHOD_NAME(back_pressure_relief, n=builder.chunk_size):
logger.info(
f"Processed batch of {builder.chunk_size} items",
extra={
"maggma": {
"event": "UPDATE",
"items": len(chunk),
"builder": builder.__class__.__name__,
"sources": [source.name for source in builder.sources],
"targets": [target.name for target in builder.targets],
}
},
)
processed_items = [item for item in chunk if item is not None]
builder.update_targets(processed_items)
update_items.update(len(processed_items))
logger.info(
f"Ended multiprocessing: {builder.__class__.__name__}",
extra={
"maggma": {
"event": "BUILD_ENDED",
"builder": builder.__class__.__name__,
"sources": [source.name for source in builder.sources],
"targets": [target.name for target in builder.targets],
}
},
)
update_items.close()
builder.finalize() |
4,856 | init num samples | # Copyright 2023 The DLRover Authors. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
from typing import Dict, Iterator, Optional, TypeVar
import torch
import torch.distributed as dist
from torch.utils.data import Dataset, DistributedSampler
from dlrover.python.common.log import default_logger as logger
T_co = TypeVar("T_co", covariant=True)
class ElasticDistributedSampler(DistributedSampler):
"""ElasticDistributedSampler can checkpoint unused sample indices
and restore sample indices from the checkpoint to support
fault-tolerance.
Example::
>>> dataset = torchvision.datasets.ImageFolder(
... root=args.training_data,
... transform=transforms.ToTensor(),
... )
>>> sampler = ElasticDistributedSampler(dataset=dataset)
>>> dataloader = DataLoader(
... dataset=train_data,
... batch_size=args.batch_size,
... num_workers=2,
... sampler=sampler,
... )
>>> for epoch in range(start_epoch, n_epochs):
... sampler.set_epoch(epoch)
... train(dataloader)
"""
def __init__(
self,
dataset: Dataset,
num_replicas: Optional[int] = None,
rank: Optional[int] = None,
shuffle: bool = True,
seed: int = 0,
drop_last: bool = False,
) -> None:
if not dist.is_initialized():
rank = 0 if not rank else rank
num_replicas = 1 if not num_replicas else num_replicas
super(ElasticDistributedSampler, self).__init__(
dataset,
num_replicas,
rank,
shuffle,
seed,
drop_last,
)
self._epoch_checkpoint: Dict[int, int] = {}
def __iter__(self) -> Iterator[T_co]:
indices = [] # type: ignore
if self.shuffle:
# deterministically shuffle based on epoch and seed
g = torch.Generator()
g.manual_seed(self.seed + self.epoch)
indices = torch.randperm(len(self.dataset), generator=g).tolist()
else:
indices = list(range(len(self.dataset)))
if not self.drop_last:
# add extra samples to make it evenly divisible
padding_size = self.total_size - len(indices)
if padding_size <= len(indices):
indices += indices[:padding_size]
else:
indices += (indices * math.ceil(padding_size / len(indices)))[
:padding_size
]
else:
# remove tail of data to make it evenly divisible.
indices = indices[: self.total_size]
assert len(indices) == self.total_size
# subsample
completed_num = self._epoch_checkpoint.get(self.epoch, 0)
start_iter = self.rank + completed_num
# fmt: off
indices = indices[start_iter:self.total_size:self.num_replicas]
# fmt: on
if self.epoch not in self._epoch_checkpoint:
self.METHOD_NAME()
assert len(indices) == self.num_samples
return iter(indices)
def METHOD_NAME(self):
if self.drop_last and len(self.dataset) % self.num_replicas != 0:
# Split to nearest available length that is evenly divisible.
# This is to ensure each rank receives the same amount of data when
# using this Sampler.
self.num_samples = math.ceil(
(len(self.dataset) - self.num_replicas) / self.num_replicas
)
else:
self.num_samples = math.ceil(len(self.dataset) / self.num_replicas)
def state_dict(self, iter_step, micro_batch_size):
"""Checkpoint the index of the last completed sample.
In DDP training, the completed number of sample of each
step is the micro_batch_size * num_replicas.
"""
completed_num = iter_step * micro_batch_size * self.num_replicas
state = {
"completed_num": completed_num,
"epoch": self.epoch,
}
return state
def load_state_dict(self, state: Dict[str, int]):
"""
Restore the uncompleted shards from a checkpoint. The shard
client will send uncompleted shards to the DLRover job master.
The master will assign those shards to workers to restore training.
"""
self.epoch = int(state.get("epoch", 0))
completed_num = int(state.get("completed_num", 0))
self.num_samples = int(
(self.total_size - completed_num) / self.num_replicas
)
if completed_num > self.total_size:
completed_num = completed_num % self.total_size
self._epoch_checkpoint[self.epoch] = completed_num
logger.info(
"Load epoch = %s, completed num = %s, num_samples = %s",
self.epoch,
completed_num,
self.num_samples,
) |
4,857 | is local | """Interface to the compiler's internal symbol tables"""
import _symtable
from _symtable import (USE, DEF_GLOBAL, DEF_LOCAL, DEF_PARAM,
DEF_IMPORT, DEF_BOUND, DEF_ANNOT, SCOPE_OFF, SCOPE_MASK, FREE,
LOCAL, GLOBAL_IMPLICIT, GLOBAL_EXPLICIT, CELL)
import weakref
__all__ = ["symtable", "SymbolTable", "Class", "Function", "Symbol"]
def symtable(code, filename, compile_type):
top = _symtable.symtable(code, filename, compile_type)
return _newSymbolTable(top, filename)
class SymbolTableFactory:
def __init__(self):
self.__memo = weakref.WeakValueDictionary()
def new(self, table, filename):
if table.type == _symtable.TYPE_FUNCTION:
return Function(table, filename)
if table.type == _symtable.TYPE_CLASS:
return Class(table, filename)
return SymbolTable(table, filename)
def __call__(self, table, filename):
key = table, filename
obj = self.__memo.get(key, None)
if obj is None:
obj = self.__memo[key] = self.new(table, filename)
return obj
_newSymbolTable = SymbolTableFactory()
class SymbolTable(object):
def __init__(self, raw_table, filename):
self._table = raw_table
self._filename = filename
self._symbols = {}
def __repr__(self):
if self.__class__ == SymbolTable:
kind = ""
else:
kind = "%s " % self.__class__.__name__
if self._table.name == "global":
return "<{0}SymbolTable for module {1}>".format(kind, self._filename)
else:
return "<{0}SymbolTable for {1} in {2}>".format(kind,
self._table.name,
self._filename)
def get_type(self):
if self._table.type == _symtable.TYPE_MODULE:
return "module"
if self._table.type == _symtable.TYPE_FUNCTION:
return "function"
if self._table.type == _symtable.TYPE_CLASS:
return "class"
assert self._table.type in (1, 2, 3), \
"unexpected type: {0}".format(self._table.type)
def get_id(self):
return self._table.id
def get_name(self):
return self._table.name
def get_lineno(self):
return self._table.lineno
def is_optimized(self):
return bool(self._table.type == _symtable.TYPE_FUNCTION)
def is_nested(self):
return bool(self._table.nested)
def has_children(self):
return bool(self._table.children)
def has_exec(self):
"""Return true if the scope uses exec. Deprecated method."""
return False
def get_identifiers(self):
return self._table.symbols.keys()
def lookup(self, name):
sym = self._symbols.get(name)
if sym is None:
flags = self._table.symbols[name]
namespaces = self.__check_children(name)
sym = self._symbols[name] = Symbol(name, flags, namespaces)
return sym
def get_symbols(self):
return [self.lookup(ident) for ident in self.get_identifiers()]
def __check_children(self, name):
return [_newSymbolTable(st, self._filename)
for st in self._table.children
if st.name == name]
def get_children(self):
return [_newSymbolTable(st, self._filename)
for st in self._table.children]
class Function(SymbolTable):
# Default values for instance variables
__params = None
__locals = None
__frees = None
__globals = None
def __idents_matching(self, test_func):
return tuple([ident for ident in self.get_identifiers()
if test_func(self._table.symbols[ident])])
def get_parameters(self):
if self.__params is None:
self.__params = self.__idents_matching(lambda x:x & DEF_PARAM)
return self.__params
def get_locals(self):
if self.__locals is None:
locs = (LOCAL, CELL)
test = lambda x: ((x >> SCOPE_OFF) & SCOPE_MASK) in locs
self.__locals = self.__idents_matching(test)
return self.__locals
def get_globals(self):
if self.__globals is None:
glob = (GLOBAL_IMPLICIT, GLOBAL_EXPLICIT)
test = lambda x:((x >> SCOPE_OFF) & SCOPE_MASK) in glob
self.__globals = self.__idents_matching(test)
return self.__globals
def get_frees(self):
if self.__frees is None:
is_free = lambda x:((x >> SCOPE_OFF) & SCOPE_MASK) == FREE
self.__frees = self.__idents_matching(is_free)
return self.__frees
class Class(SymbolTable):
__methods = None
def get_methods(self):
if self.__methods is None:
d = {}
for st in self._table.children:
d[st.name] = 1
self.__methods = tuple(d)
return self.__methods
class Symbol(object):
def __init__(self, name, flags, namespaces=None):
self.__name = name
self.__flags = flags
self.__scope = (flags >> SCOPE_OFF) & SCOPE_MASK # like PyST_GetScope()
self.__namespaces = namespaces or ()
def __repr__(self):
return "<symbol {0!r}>".format(self.__name)
def get_name(self):
return self.__name
def is_referenced(self):
return bool(self.__flags & _symtable.USE)
def is_parameter(self):
return bool(self.__flags & DEF_PARAM)
def is_global(self):
return bool(self.__scope in (GLOBAL_IMPLICIT, GLOBAL_EXPLICIT))
def is_declared_global(self):
return bool(self.__scope == GLOBAL_EXPLICIT)
def METHOD_NAME(self):
return bool(self.__flags & DEF_BOUND)
def is_annotated(self):
return bool(self.__flags & DEF_ANNOT)
def is_free(self):
return bool(self.__scope == FREE)
def is_imported(self):
return bool(self.__flags & DEF_IMPORT)
def is_assigned(self):
return bool(self.__flags & DEF_LOCAL)
def is_namespace(self):
"""Returns true if name binding introduces new namespace.
If the name is used as the target of a function or class
statement, this will be true.
Note that a single name can be bound to multiple objects. If
is_namespace() is true, the name may also be bound to other
objects, like an int or list, that does not introduce a new
namespace.
"""
return bool(self.__namespaces)
def get_namespaces(self):
"""Return a list of namespaces bound to this name"""
return self.__namespaces
def get_namespace(self):
"""Returns the single namespace bound to this name.
Raises ValueError if the name is bound to multiple namespaces.
"""
if len(self.__namespaces) != 1:
raise ValueError("name is bound to multiple namespaces")
return self.__namespaces[0]
if __name__ == "__main__":
import os, sys
with open(sys.argv[0]) as f:
src = f.read()
mod = symtable(src, os.path.split(sys.argv[0])[1], "exec")
for ident in mod.get_identifiers():
info = mod.lookup(ident)
print(info, info.METHOD_NAME(), info.is_namespace()) |
4,858 | clear | '''
Reusable tree structure, including a variant with a fast indexed lookup.
'''
from __future__ import annotations
from collections import deque
from typing import Callable, Deque, Dict, Generic, Iterable, List, Optional, TypeVar, Union
try:
from IPython.lib.pretty import PrettyPrinter # type: ignore
support_pretty = True
except ImportError:
support_pretty = False
__all__ = [
'Node',
'IndexedTree',
]
T = TypeVar('T')
MISSING = object()
class Node(Generic[T]):
def __init__(self, data: T, parent: Optional[Node[T]] = None):
self._data: T = data
self._parent: Optional[Node[T]] = parent
self._nodes: List[Node[T]] = list()
@property
def data(self) -> T:
return self._data
@property
def parent(self) -> Optional[Node[T]]:
return self._parent
@property
def parent_data(self) -> Optional[T]:
return self._parent.data if self._parent else None
@property
def nodes(self) -> List[Node[T]]:
return self._nodes
def walk_iterator(self, skip_self=True, breadth_first=False) -> Iterable[Node[T]]:
q: Deque[Node[T]] = deque()
q.append(self)
while q:
node: Node[T] = q.popleft()
if skip_self:
skip_self = False
else:
yield node
if breadth_first:
q.extend(node.nodes)
else:
q.extendleft(reversed(node.nodes))
def walk(self, fn: Callable[[Node[T]], Optional[bool]]) -> Optional[bool]:
'''
Call the given function for every node below this one, depth-first.
Return `False` from `fn` to stop.
Returns `False` if `fn` ever returned it, else `None`.
'''
if fn(self) is False:
return False
for node in self.nodes:
if node.walk(fn) is False:
return False
return None
def add(self, data: Union[T, Node[T]]) -> Node[T]:
node: Node[T] = data if isinstance(data, Node) else Node[T](data)
node._parent = self # pylint: disable=protected-access # it's our own class
self._nodes.append(node)
return node
def __contains__(self, data: Union[T, Node[T]]):
if isinstance(data, Node):
return data in self._nodes
return any(node.data is data for node in self._nodes)
def __repr__(self):
return f'{self.__class__.__name__}({self._data!r})'
if support_pretty:
def _repr_pretty_(self, p: PrettyPrinter, cycle: bool):
if cycle:
p.text(self.__class__.__name__ + '(<cyclic>)')
return
p.pretty(self._data)
with p.group(4, '', ''):
for node in self._nodes:
p.break_()
p.pretty(node)
class IndexedTree(Generic[T]):
_key_fn: Optional[Callable[[T], str]]
_lookup: Dict[str, Node[T]]
root: Node[T]
def __init__(self, root: T, key_fn: Optional[Callable[[T], str]] = None):
self._key_fn = key_fn
self._root_data = root
self.METHOD_NAME()
def METHOD_NAME(self):
self._lookup = dict()
self.root = Node[T](self._root_data)
self._register(self.root)
def add(self, parent: Union[str, Node[T]], data: Union[T, Node[T]]) -> Node[T]:
parent_node = self._handle_parent_arg(parent)
if not isinstance(data, Node):
data = Node[T](data)
self._register(data)
parent_node.add(data)
return data
def insert_segment(self, parent: Union[str, Node[T]], partial_tree: Node[T]):
parent_node = self._handle_parent_arg(parent)
partial_tree.walk(self._register)
parent_node.add(partial_tree)
def keys(self) -> Iterable[str]:
yield from self._lookup.keys()
def __getitem__(self, key: str) -> Node[T]:
return self._lookup[key]
def __contains__(self, key: str) -> bool:
return key in self._lookup
def get(self, key: str, fallback=MISSING) -> Node[T]:
if fallback is MISSING:
return self._lookup[key]
return self._lookup.get(key, fallback)
def ingest_list(self, src: List[T], parent_fn: Callable[[T], Optional[T]]):
'''
Add multiple items from a list.
Each list item must have a parent that is discoverable using the supplied function.
'''
for item in src:
self._ingest(item, parent_fn)
def _ingest(self, item: T, parent_fn: Callable[[T], Optional[T]]):
current: T = item
assert current
segment: Optional[Node[T]] = None
key: str = self._key_fn(current) if self._key_fn else current # type: ignore
if key in self:
return
while True:
old_segment = segment
segment = Node(current)
if old_segment:
segment.add(old_segment)
parent = parent_fn(current)
if parent is None:
anchor_point = self.root
else:
parent_key: str = self._key_fn(parent) if self._key_fn else parent # type: ignore
anchor_point = self.get(parent_key, None)
if anchor_point:
self.insert_segment(anchor_point, segment)
return
current = parent_fn(current) or self.root.data
def _register(self, node: Node[T]):
key: str = self._key_fn(node.data) if self._key_fn else node.data # type: ignore
if key in self._lookup:
raise KeyError(f'Key already present: {key}')
self._lookup[key] = node
def _handle_parent_arg(self, parent: Union[str, Node[T]]) -> Node[T]:
parent_node: Node[T]
if isinstance(parent, str):
parent_node = self[parent]
elif isinstance(parent, Node):
parent_node = parent
else:
raise TypeError("Parent must be a key or a node")
return parent_node
if support_pretty:
def _repr_pretty_(self, p: PrettyPrinter, cycle: bool):
if cycle:
p.text(self.__class__.__name__ + '(<cyclic>)')
return
p.text('Tree ')
p.pretty(self.root) |
4,859 | configure | """
Copyright (c) 2018-2023 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from .format_converter import FileBasedAnnotationConverter, ConverterReturn
from ..representation import ClassificationAnnotation, ContainerAnnotation
from ..utils import read_xml, check_file_existence
from ..config import StringField, PathField, ConfigError
class CVATAttributesRecognitionConverter(FileBasedAnnotationConverter):
__provider__ = 'cvat_attributes_recognition'
annotation_types = (ClassificationAnnotation, )
@classmethod
def parameters(cls):
configuration_parameters = super().parameters()
configuration_parameters.update({
'label': StringField(description='specific label for attribute collection'),
'images_dir': PathField(
is_directory=True, optional=True,
description='path to dataset images, used only for content existence check'
)
})
return configuration_parameters
def METHOD_NAME(self):
super().METHOD_NAME()
self.label = self.get_value_from_config('label')
self.images_dir = self.get_value_from_config('images_dir') or self.annotation_file.parent
def convert(self, check_content=False, progress_callback=None, progress_interval=100, **kwargs):
annotation = read_xml(self.annotation_file)
meta = annotation.find('meta')
size = int(meta.find('task').find('size').text)
attribute_values_mapping = {}
label = self.select_label(meta)
for attribute in label.iter('attribute'):
label_to_id = {
label: idx for idx, label in enumerate(attribute.find('values').text.split('\n'))
}
attribute_values_mapping[attribute.find('name').text] = label_to_id
annotations = []
content_errors = None if not check_content else []
for image_id, image in enumerate(annotation.iter('image')):
identifier = image.attrib['name'].split('/')[-1]
if check_content:
if not check_file_existence(self.images_dir / identifier):
content_errors.append('{}: does not exist'.format(self.images_dir / identifier))
for bbox in image:
if 'label' not in bbox.attrib.keys() or bbox.attrib['label'] != self.label:
continue
annotation_dict = {}
bbox_rect = [
float(bbox.attrib['xtl']), float(bbox.attrib['ytl']),
float(bbox.attrib['xbr']), float(bbox.attrib['ybr'])
]
for attribute in bbox.iter('attribute'):
attribute_name = attribute.attrib['name']
attribute_label = attribute_values_mapping[attribute_name][attribute.text]
attribute_annotation = ClassificationAnnotation(identifier, attribute_label)
attribute_annotation.metadata['rect'] = bbox_rect
annotation_dict[attribute_name] = attribute_annotation
if len(annotation_dict) == 1:
annotations.append(next(iter(annotation_dict.values())))
else:
annotations.append(ContainerAnnotation(annotation_dict))
if progress_callback is not None and image_id % progress_interval == 0:
progress_callback(image_id * 100 / size)
return ConverterReturn(annotations, self.generate_meta(attribute_values_mapping), content_errors)
@staticmethod
def generate_meta(attribute_values_mapping):
if len(attribute_values_mapping) == 1:
reversed_label_map = next(iter(attribute_values_mapping.values()))
return {'label_map': {value: key for key, value in reversed_label_map.items()}}
meta = {}
for key, reversed_label_map in attribute_values_mapping.items():
meta['{}_label_map'.format(key)] = {value: key for key, value in reversed_label_map.items()}
return meta
def select_label(self, meta):
label = [label for label in meta.iter('label') if label.find('name').text == self.label]
if not label:
raise ConfigError('{} does not present in annotation'.format(self.label))
return label[0]
def get_meta(self):
annotation = read_xml(self.annotation_file)
meta = annotation.find('meta')
attribute_values_mapping = {}
label = self.select_label(meta)
for attribute in label.iter('attribute'):
label_to_id = {
label: idx for idx, label in enumerate(attribute.find('values').text.split('\n'))
}
attribute_values_mapping[attribute.find('name').text] = label_to_id
return self.generate_meta(attribute_values_mapping) |
4,860 | test conv2d transpose dilation | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for convolutional transpose layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
from tensorflow.python import keras
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import testing_utils
from tensorflow.python.platform import test
@keras_parameterized.run_all_keras_modes
class Conv2DTransposeTest(keras_parameterized.TestCase):
def _run_test(self, kwargs):
num_samples = 2
stack_size = 3
num_row = 7
num_col = 6
with self.cached_session(use_gpu=True):
testing_utils.layer_test(
keras.layers.Conv2DTranspose,
kwargs=kwargs,
input_shape=(num_samples, num_row, num_col, stack_size))
@parameterized.named_parameters(
('padding_valid', {'padding': 'valid'}),
('padding_same', {'padding': 'same'}),
('strides', {'strides': (2, 2)}),
# Only runs on GPU with CUDA, channels_first is not supported on CPU.
# TODO(b/62340061): Support channels_first on CPU.
('data_format', {'data_format': 'channels_first'}),
('strides_output_padding', {'strides': (2, 2), 'output_padding': (1, 1)}),
)
def test_conv2d_transpose(self, kwargs):
kwargs['filters'] = 2
kwargs['kernel_size'] = (3, 3)
if 'data_format' not in kwargs or test.is_gpu_available(cuda_only=True):
self._run_test(kwargs)
def test_conv2d_transpose_regularizers(self):
kwargs = {
'filters': 3,
'kernel_size': 3,
'padding': 'valid',
'kernel_regularizer': 'l2',
'bias_regularizer': 'l2',
'activity_regularizer': 'l2',
'strides': 1
}
with self.cached_session(use_gpu=True):
layer = keras.layers.Conv2DTranspose(**kwargs)
layer.build((None, 5, 5, 2))
self.assertEqual(len(layer.losses), 2)
layer(keras.backend.variable(np.ones((1, 5, 5, 2))))
self.assertEqual(len(layer.losses), 3)
def test_conv2d_transpose_constraints(self):
k_constraint = lambda x: x
b_constraint = lambda x: x
kwargs = {
'filters': 3,
'kernel_size': 3,
'padding': 'valid',
'kernel_constraint': k_constraint,
'bias_constraint': b_constraint,
'strides': 1
}
with self.cached_session(use_gpu=True):
layer = keras.layers.Conv2DTranspose(**kwargs)
layer.build((None, 5, 5, 2))
self.assertEqual(layer.kernel.constraint, k_constraint)
self.assertEqual(layer.bias.constraint, b_constraint)
def METHOD_NAME(self):
testing_utils.layer_test(keras.layers.Conv2DTranspose,
kwargs={'filters': 2,
'kernel_size': 3,
'padding': 'same',
'data_format': 'channels_last',
'dilation_rate': (2, 2)},
input_shape=(2, 5, 6, 3))
input_data = np.arange(48).reshape((1, 4, 4, 3)).astype(np.float32)
expected_output = np.float32([[192, 228, 192, 228],
[336, 372, 336, 372],
[192, 228, 192, 228],
[336, 372, 336, 372]]).reshape((1, 4, 4, 1))
testing_utils.layer_test(keras.layers.Conv2DTranspose,
input_data=input_data,
kwargs={'filters': 1,
'kernel_size': 3,
'padding': 'same',
'data_format': 'channels_last',
'dilation_rate': (2, 2),
'kernel_initializer': 'ones'},
expected_output=expected_output)
@keras_parameterized.run_all_keras_modes
class Conv3DTransposeTest(keras_parameterized.TestCase):
def _run_test(self, kwargs):
num_samples = 2
stack_size = 3
num_row = 7
num_col = 6
depth = 5
with self.cached_session(use_gpu=True):
testing_utils.layer_test(
keras.layers.Conv3DTranspose,
kwargs=kwargs,
input_shape=(num_samples, depth, num_row, num_col, stack_size))
@parameterized.named_parameters(
('padding_valid', {'padding': 'valid'}),
('padding_same', {'padding': 'same'}),
('strides', {'strides': (2, 2, 2)}),
# Only runs on GPU with CUDA, channels_first is not supported on CPU.
# TODO(b/62340061): Support channels_first on CPU.
('data_format', {'data_format': 'channels_first'}),
('strides_output_padding', {'strides': (2, 2, 2),
'output_padding': (1, 1, 1)}),
)
def test_conv3d_transpose(self, kwargs):
kwargs['filters'] = 2
kwargs['kernel_size'] = (3, 3, 3)
if 'data_format' not in kwargs or test.is_gpu_available(cuda_only=True):
self._run_test(kwargs)
def test_conv3d_transpose_regularizers(self):
kwargs = {
'filters': 3,
'kernel_size': 3,
'padding': 'valid',
'kernel_regularizer': 'l2',
'bias_regularizer': 'l2',
'activity_regularizer': 'l2',
'strides': 1
}
with self.cached_session(use_gpu=True):
layer = keras.layers.Conv3DTranspose(**kwargs)
layer.build((None, 5, 5, 5, 2))
self.assertEqual(len(layer.losses), 2)
layer(keras.backend.variable(np.ones((1, 5, 5, 5, 2))))
self.assertEqual(len(layer.losses), 3)
def test_conv3d_transpose_constraints(self):
k_constraint = lambda x: x
b_constraint = lambda x: x
kwargs = {
'filters': 3,
'kernel_size': 3,
'padding': 'valid',
'kernel_constraint': k_constraint,
'bias_constraint': b_constraint,
'strides': 1
}
with self.cached_session(use_gpu=True):
layer = keras.layers.Conv3DTranspose(**kwargs)
layer.build((None, 5, 5, 5, 2))
self.assertEqual(layer.kernel.constraint, k_constraint)
self.assertEqual(layer.bias.constraint, b_constraint)
def test_conv3d_transpose_dynamic_shape(self):
input_data = np.random.random((1, 3, 3, 3, 3)).astype(np.float32)
with self.cached_session(use_gpu=True):
# Won't raise error here.
testing_utils.layer_test(
keras.layers.Conv3DTranspose,
kwargs={
'data_format': 'channels_last',
'filters': 3,
'kernel_size': 3
},
input_shape=(None, None, None, None, 3),
input_data=input_data)
if test.is_gpu_available(cuda_only=True):
testing_utils.layer_test(
keras.layers.Conv3DTranspose,
kwargs={
'data_format': 'channels_first',
'filters': 3,
'kernel_size': 3
},
input_shape=(None, 3, None, None, None),
input_data=input_data) |
4,861 | test linux webapp quick create kube | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import os
import requests
from azure.cli.testsdk.scenario_tests import AllowLargeResponse, live_only
from azure.cli.testsdk import (ScenarioTest, ResourceGroupPreparer, JMESPathCheck)
TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..'))
# TODO
class AppserviceKubernetesScenarioTest(ScenarioTest):
pass
# not lima-specific
class WebappBasicE2EKubeTest(ScenarioTest):
@ResourceGroupPreparer(location='canadacentral')
@live_only()
def METHOD_NAME(self, resource_group):
webapp_name = self.create_random_name(
prefix='webapp-quick-linux', length=24)
plan = self.create_random_name(prefix='plan-quick-linux', length=24)
self.cmd(
'appservice plan create -g {} -n {} --is-linux'.format(resource_group, plan))
self.cmd('webapp create -g {} -n {} --plan {} -i patle/ruby-hello'.format(
resource_group, webapp_name, plan))
r = requests.get(
'http://{}.azurewebsites.net'.format(webapp_name), timeout=240)
# verify the web page
self.assertTrue('Ruby on Rails in Web Apps on Linux' in str(r.content))
# verify app settings
self.cmd('webapp config appsettings list -g {} -n {}'.format(resource_group, webapp_name), checks=[
JMESPathCheck('[0].name', 'WEBSITES_ENABLE_APP_SERVICE_STORAGE'),
JMESPathCheck('[0].value', 'false'),
])
self.cmd('webapp update -g {} -n {} --https-only true'.format(resource_group, webapp_name), checks=[JMESPathCheck("httpsOnly", True)])
self.cmd('webapp update -g {} -n {} --https-only false'.format(resource_group, webapp_name), checks=[JMESPathCheck("httpsOnly", False)])
@ResourceGroupPreparer(location="eastus")
def test_win_webapp_quick_create_kube(self, resource_group):
webapp_name = self.create_random_name(prefix='webapp-quick', length=24)
plan = self.create_random_name(prefix='plan-quick', length=24)
self.cmd('appservice plan create -g {} -n {}'.format(resource_group, plan))
r = self.cmd('webapp create -g {} -n {} --plan {} --deployment-local-git'.format(
resource_group, webapp_name, plan)).get_output_in_json()
self.assertTrue(r['ftpPublishingUrl'].startswith('ftps://'))
self.cmd('webapp config appsettings list -g {} -n {}'.format(resource_group, webapp_name), checks=[
JMESPathCheck('[0].name', 'WEBSITE_NODE_DEFAULT_VERSION'),
JMESPathCheck('[0].value', '~14'),
])
self.cmd('webapp update -g {} -n {} --https-only true'.format(resource_group, webapp_name), checks=[JMESPathCheck("httpsOnly", True)])
self.cmd('webapp update -g {} -n {} --https-only false'.format(resource_group, webapp_name), checks=[JMESPathCheck("httpsOnly", False)])
@ResourceGroupPreparer(name_prefix="clitest", random_name_length=24, location="eastus")
def test_win_webapp_quick_create_runtime_kube(self, resource_group):
webapp_name = self.create_random_name(prefix='webapp-quick', length=24)
webapp_name_2 = self.create_random_name(prefix='webapp-quick', length=24)
plan = self.create_random_name(prefix='plan-quick', length=24)
self.cmd('appservice plan create -g {} -n {}'.format(resource_group, plan))
r = self.cmd('webapp create -g {} -n {} --plan {} --deployment-local-git -r "node|14LTS"'.format(
resource_group, webapp_name, plan)).get_output_in_json()
self.assertTrue(r['ftpPublishingUrl'].startswith('ftps://'))
self.cmd('webapp config appsettings list -g {} -n {}'.format(resource_group, webapp_name), checks=[
JMESPathCheck('[0].name', 'WEBSITE_NODE_DEFAULT_VERSION'),
JMESPathCheck('[0].value', '~14'),
])
r = self.cmd('webapp create -g {} -n {} --plan {} --deployment-local-git -r "dotnet:7"'.format(
resource_group, webapp_name_2, plan)).get_output_in_json()
self.assertTrue(r['ftpPublishingUrl'].startswith('ftps://'))
self.cmd('webapp update -g {} -n {} --https-only true'.format(resource_group, webapp_name), checks=[JMESPathCheck("httpsOnly", True)])
self.cmd('webapp update -g {} -n {} --https-only false'.format(resource_group, webapp_name), checks=[JMESPathCheck("httpsOnly", False)]) |
4,862 | test principal filter | from tests import testmodels
from tortoise.contrib import test
from tortoise.exceptions import IntegrityError, OperationalError
from tortoise.queryset import QuerySet
class TestOneToOneFieldWithUnique(test.TestCase):
async def test_principal__empty(self):
with self.assertRaises(IntegrityError):
await testmodels.Principal.create()
async def test_principal__create_by_id(self):
school = await testmodels.School.create(id=1024, name="School1")
principal = await testmodels.Principal.create(name="Sang-Heon Jeon", school_id=school.id)
self.assertEqual(principal.school_id, school.id)
self.assertEqual(await school.principal, principal)
async def test_principal__create_by_name(self):
school = await testmodels.School.create(id=1024, name="School1")
principal = await testmodels.Principal.create(name="Sang-Heon Jeon", school=school)
await principal.fetch_related("school")
self.assertEqual(principal.school, school)
self.assertEqual(await school.principal, principal)
async def test_principal__by_name__created_prefetched(self):
school = await testmodels.School.create(id=1024, name="School1")
principal = await testmodels.Principal.create(name="Sang-Heon Jeon", school=school)
self.assertEqual(principal.school, school)
self.assertEqual(await school.principal, principal)
async def test_principal__by_name__unfetched(self):
school = await testmodels.School.create(id=1024, name="School1")
principal = await testmodels.Principal.create(name="Sang-Heon Jeon", school=school)
principal = await testmodels.Principal.get(id=principal.id)
self.assertIsInstance(principal.school, QuerySet)
async def test_principal__by_name__re_awaited(self):
school = await testmodels.School.create(id=1024, name="School1")
principal = await testmodels.Principal.create(name="Sang-Heon Jeon", school=school)
await principal.fetch_related("school")
self.assertEqual(principal.school, school)
self.assertEqual(await principal.school, school)
async def test_principal__by_name__awaited(self):
school = await testmodels.School.create(id=1024, name="School1")
principal = await testmodels.Principal.create(name="Sang-Heon Jeon", school=school)
principal = await testmodels.Principal.get(id=principal.id)
self.assertEqual(await principal.school, school)
self.assertEqual(await school.principal, principal)
async def test_update_by_name(self):
school = await testmodels.School.create(id=1024, name="School1")
school2 = await testmodels.School.create(id=2048, name="School2")
principal0 = await testmodels.Principal.create(name="Sang-Heon Jeon", school=school)
await testmodels.Principal.filter(id=principal0.id).update(school=school2)
principal = await testmodels.Principal.get(id=principal0.id)
await principal.fetch_related("school")
self.assertEqual(principal.school, school2)
self.assertEqual(await school.principal, None)
self.assertEqual(await school2.principal, principal)
async def test_update_by_id(self):
school = await testmodels.School.create(id=1024, name="School1")
school2 = await testmodels.School.create(id=2048, name="School2")
principal0 = await testmodels.Principal.create(name="Sang-Heon Jeon", school_id=school.id)
await testmodels.Principal.filter(id=principal0.id).update(school_id=school2.id)
principal = await testmodels.Principal.get(id=principal0.id)
self.assertEqual(principal.school_id, school2.id)
self.assertEqual(await school.principal, None)
self.assertEqual(await school2.principal, principal)
async def test_delete_by_name(self):
school = await testmodels.School.create(id=1024, name="School1")
principal = await testmodels.Principal.create(name="Sang-Heon Jeon", school=school)
del principal.school
with self.assertRaises(IntegrityError):
await principal.save()
async def test_principal__uninstantiated_create(self):
school = await testmodels.School(id=1024, name="School1")
with self.assertRaisesRegex(OperationalError, "You should first call .save()"):
await testmodels.Principal.create(name="Sang-Heon Jeon", school=school)
async def test_principal__instantiated_create(self):
school = await testmodels.School.create(id=1024, name="School1")
await testmodels.Principal.create(name="Sang-Heon Jeon", school=school)
async def test_principal__fetched_bool(self):
school = await testmodels.School.create(id=1024, name="School1")
await school.fetch_related("principal")
self.assertFalse(bool(school.principal))
await testmodels.Principal.create(name="Sang-Heon Jeon", school=school)
await school.fetch_related("principal")
self.assertTrue(bool(school.principal))
async def METHOD_NAME(self):
school = await testmodels.School.create(id=1024, name="School1")
principal = await testmodels.Principal.create(name="Sang-Heon Jeon1", school=school)
self.assertEqual(await school.principal.filter(name="Sang-Heon Jeon1"), principal)
self.assertEqual(await school.principal.filter(name="Sang-Heon Jeon2"), None) |
4,863 | test op translation to value error | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Test runtime error handling"""
import functools
import platform
import subprocess
import traceback
import pytest
import tvm
import tvm.testing
def test_op_translation_to_not_implemented():
ferror = tvm.testing.test_raise_error_callback("OpNotImplemented: myop")
try:
ferror()
assert False
except tvm.error.OpNotImplemented as e:
msg = str(e)
assert isinstance(e, NotImplementedError)
assert msg.find("ffi_testing.cc") != -1
def test_op_translation_to_internal_error():
fchk_eq = tvm.testing.test_check_eq_callback("InternalError: myop")
try:
fchk_eq(0, 1)
assert False
except tvm.error.InternalError as e:
msg = str(e)
assert msg.find("ffi_testing.cc") != -1
def METHOD_NAME():
try:
tvm.testing.ErrorTest(0, 1)
assert False
except ValueError as e:
msg = str(e)
assert msg.find("ffi_testing.cc") != -1
def test_deep_callback():
"""Propagate python errors through API calls
If a Python exception is raised, and that exception is caught in
Python, the original exception should be propagated so that the
traceback contains all intermediate python frames.
Stack
- test_deep_callback
- test
"""
def error_callback():
raise ValueError("callback error")
wrap1 = tvm.testing.test_wrap_callback(error_callback)
def flevel2():
wrap1()
wrap2 = tvm.testing.test_wrap_callback(flevel2)
def flevel3():
wrap2()
wrap3 = tvm.testing.test_wrap_callback(flevel3)
try:
wrap3()
assert False
except ValueError as err:
frames = traceback.extract_tb(err.__traceback__)
local_frames = [frame.name for frame in frames if frame.filename == __file__]
assert local_frames == ["test_deep_callback", "flevel3", "flevel2", "error_callback"]
@functools.lru_cache()
def _has_debug_symbols():
lib = tvm._ffi.base._LIB
headers = subprocess.check_output(["objdump", "--section-headers", lib._name], encoding="utf-8")
return ".debug" in headers
@pytest.mark.skipif(
not _has_debug_symbols() or platform.machine != "x86_64",
reason="C++ stack frames require debug symbols, only implemented for x86",
)
def test_cpp_frames_in_stack_trace_from_python_error():
"""A python exception crossing C++ boundaries should have C++ stack frames"""
def error_callback():
raise ValueError("callback error")
wrapped = tvm.testing.test_wrap_callback(error_callback)
try:
wrapped()
assert False
except ValueError as err:
frames = traceback.extract_tb(err.__traceback__)
cpp_frames = [
frame
for frame in frames
if frame.filename.endswith(".cc") or frame.filename.endswith(".c")
]
assert len(cpp_frames) >= 1, (
f"Traceback through files '{[frame.filename for frame in frames]}'"
f" expected to contain C/C++ frames, "
f" but instead caught exception {err}"
)
@pytest.mark.skipif(
not _has_debug_symbols() or platform.machine != "x86_64",
reason="C++ stack frames require debug symbols, only implemented for x86",
)
def test_stack_trace_from_cpp_error():
"""A python exception originating in C++ should have C++ stack frames"""
try:
tvm.testing.ErrorTest(0, 1)
assert False
except ValueError as err:
frames = traceback.extract_tb(err.__traceback__)
cpp_frames = [
frame
for frame in frames
if frame.filename.endswith(".cc") or frame.filename.endswith(".c")
]
assert len(cpp_frames) >= 1, (
f"Traceback through files '{[frame.filename for frame in frames]}'"
f" expected to contain C/C++ frames, "
f" but instead caught exception {err}"
)
if __name__ == "__main__":
tvm.testing.main() |
4,864 | test composing v1 | # Copyright (C) 2018-2019 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Unit tests for the compose message schemas."""
from bodhi.messages.schemas.compose import (
ComposeComposingV1,
ComposeStartV1,
ComposeCompleteV1,
ComposeSyncWaitV1,
ComposeSyncDoneV1,
RepoDoneV1,
)
from .utils import check_message
class TestComposeMessage:
"""A set of unit tests for classes in :py:mod:`bodhi_messages.schemas.compose`"""
def METHOD_NAME(self):
expected = {
"topic": "bodhi.compose.composing",
"summary": "bodhi composer started composing test_repo",
"__str__": "bodhi composer started composing test_repo",
"app_icon": "https://apps.fedoraproject.org/img/icons/bodhi.png",
"app_name": "bodhi",
"url": None,
"usernames": ['mohanboddu'],
"packages": [],
'repo': 'test_repo',
'agent_name': 'mohanboddu'
}
msg = ComposeComposingV1(
body={
'agent': 'mohanboddu',
'repo': 'test_repo',
'updates': ['monitorix-3.11.0-1.el6'],
}
)
check_message(msg, expected)
def test_start_v1(self):
expected = {
"topic": "bodhi.compose.start",
"summary": "bodhi composer started a push",
"__str__": "bodhi composer started a push",
"app_icon": "https://apps.fedoraproject.org/img/icons/bodhi.png",
"app_name": "bodhi",
"url": None,
"usernames": ['mohanboddu'],
"packages": [],
'agent_name': 'mohanboddu'
}
msg = ComposeStartV1(body={'agent': 'mohanboddu'})
check_message(msg, expected)
def test_complete_v1_failed(self):
"""Test the ComposeCompleteV1 Message with a failed compose."""
expected = {
"topic": "bodhi.compose.complete",
"summary": "bodhi composer failed to compose test_repo",
"__str__": "bodhi composer failed to compose test_repo",
"app_icon": "https://apps.fedoraproject.org/img/icons/bodhi.png",
"app_name": "bodhi",
"url": None,
"usernames": ['mohanboddu'],
"packages": [],
'agent_name': 'mohanboddu',
'repo': 'test_repo',
'success': False,
'ctype': 'container',
}
msg = ComposeCompleteV1(
body={
'agent': 'mohanboddu',
'success': False,
'repo': 'test_repo',
'ctype': 'container',
}
)
check_message(msg, expected)
def test_complete_v1_success(self):
"""Test the ComposeCompleteV1 Message with a successful compose."""
expected = {
"topic": "bodhi.compose.complete",
"summary": "bodhi composer successfully composed test_repo",
"__str__": "bodhi composer successfully composed test_repo",
"app_icon": "https://apps.fedoraproject.org/img/icons/bodhi.png",
"app_name": "bodhi",
"url": None,
"usernames": ['mohanboddu'],
"packages": [],
'agent_name': 'mohanboddu',
'repo': 'test_repo',
'success': True,
'ctype': 'container',
}
msg = ComposeCompleteV1(
body={
'agent': 'mohanboddu',
'success': True,
'repo': 'test_repo',
'ctype': 'container',
}
)
check_message(msg, expected)
def test_repo_done_v1(self):
expected = {
"topic": "bodhi.repo.done",
"summary": "bodhi composer is finished building test_repo",
"__str__": "bodhi composer is finished building test_repo",
"app_icon": "https://apps.fedoraproject.org/img/icons/bodhi.png",
"app_name": "bodhi",
"url": None,
"usernames": ['mohanboddu'],
"packages": [],
'agent_name': 'mohanboddu',
'repo': 'test_repo'
}
msg = RepoDoneV1(
body={'agent': 'mohanboddu', 'repo': 'test_repo', 'path': '/some/path'}
)
check_message(msg, expected)
def test_sync_wait_v1(self):
expected = {
"topic": "bodhi.compose.sync.wait",
"summary": (
"bodhi composer is waiting for test_repo "
"to hit the master mirror"
),
"__str__": (
"bodhi composer is waiting for test_repo "
"to hit the master mirror"
),
"app_icon": "https://apps.fedoraproject.org/img/icons/bodhi.png",
"app_name": "bodhi",
"url": None,
"usernames": ['mohanboddu'],
"packages": [],
'agent_name': 'mohanboddu',
'repo': 'test_repo'
}
msg = ComposeSyncWaitV1(
body={'agent': 'mohanboddu', 'repo': 'test_repo'}
)
check_message(msg, expected)
def test_sync_done_v1(self):
expected = {
"topic": "bodhi.compose.sync.done",
"summary": (
"bodhi composer finished waiting for test_repo "
"to hit the master mirror"
),
"__str__": (
"bodhi composer finished waiting for test_repo "
"to hit the master mirror"
),
"app_icon": "https://apps.fedoraproject.org/img/icons/bodhi.png",
"app_name": "bodhi",
"url": None,
"usernames": ['mohanboddu'],
"packages": [],
'agent_name': 'mohanboddu',
'repo': 'test_repo'
}
msg = ComposeSyncDoneV1(
body={'agent': 'mohanboddu', 'repo': 'test_repo'}
)
check_message(msg, expected) |
4,865 | test format results without events | import elastalert.eql as eql
from elastalert import ElasticSearchClient
from unittest import mock
def test_format_request_without_eql():
assert eql.format_request({}) is None
assert eql.format_request({'query': {}}) is None
assert eql.format_request({'query': {'bool': {}}}) is None
assert eql.format_request({'query': {'bool': {'filter': {}}}}) is None
assert eql.format_request({'query': {'bool': {'filter': {'bool': {}}}}}) is None
assert eql.format_request({'query': {'bool': {'filter': {'bool': {'must': []}}}}}) is None
assert eql.format_request({'query': {'bool': {'filter': {'bool': {'must': [{'foo': 'bar'}]}}}}}) is None
def test_format_request_with_eql():
body = eql_body()
expected_body = {'filter': {'bool': {'must': [{'other': 'other filter'}]}}, 'query': 'test query'}
assert eql.format_request(body) == expected_body
def eql_body():
body = {
'query': {
'bool': {
'filter': {
'bool': {
'must': [
{'eql': 'test query'},
{'other': 'other filter'},
]
}
}
}
}
}
return body
def test_format_request_with_excessive_eql():
body = eql_body()
body['query']['bool']['filter']['bool']['must'].append({'eql': 'newer query'})
expected_body = {'filter': {'bool': {'must': [{'other': 'other filter'}]}}, 'query': 'newer query'}
assert eql.format_request(body) == expected_body
def METHOD_NAME():
expected_results = {'hits': {'hits': []}}
results = expected_results
assert eql.format_results(results) == expected_results
def test_format_results_with_events():
expected_results = {'hits': {'hits': [{'foo': 'bar'}]}, 'eql': True}
results = {'hits': {'events': [{'foo': 'bar'}]}}
assert eql.format_results(results) == expected_results
def init_client():
conn = {
'es_host': '',
'es_hosts': [],
'es_port': 123,
'es_url_prefix': '',
'use_ssl': False,
'verify_certs': False,
'ca_certs': [],
'ssl_show_warn': False,
'http_auth': '',
'headers': [],
'es_conn_timeout': 0,
'send_get_body_as': '',
'client_cert': '',
'client_key': ''
}
return ElasticSearchClient(conn)
def test_search_without_eql():
es_client = init_client()
expected_params = {'from': True, 'size': 12, 'scroll': True, '_source_includes': True}
expected_headers = {}
expected_body = {}
results = {}
es_client.transport = mock.Mock()
es_client.transport.perform_request.return_value = results
body = {}
params = {'from_': True, 'size': 12, 'scroll': True, '_source_includes': True}
es_client.search(body=body, index='test', params=params)
es_client.transport.perform_request.assert_called_with('POST', '/test/_search',
params=expected_params,
headers=expected_headers,
body=expected_body)
def test_search_with_eql():
es_client = init_client()
expected_params = {'from': True}
expected_headers = {}
expected_body = {'filter': {'bool': {'must': [{'other': 'other filter'}]}}, 'query': 'test query', 'size': 12}
results = {}
es_client.transport = mock.Mock()
es_client.transport.perform_request.return_value = results
body = eql_body()
params = {'from_': True, 'size': 12, 'scroll': True, '_source_includes': True}
results = es_client.search(body=body, index='test', params=params)
es_client.transport.perform_request.assert_called_with('POST', '/test/_eql/search',
params=expected_params,
headers=expected_headers,
body=expected_body) |
4,866 | xml | #!/usr/bin/env python
"""
_RESTFormatter_
A basic REST formatter. The formatter takes the data from the API call, turns it into the
appropriate format and sets the CherryPy header appropriately.
Could add YAML via http://pyyaml.org/
"""
from builtins import str
import json
from types import GeneratorType
from cherrypy import response, HTTPError, request
from WMCore.WebTools.Page import TemplatedPage, _setCherryPyHeaders
from WMCore.Wrappers.JsonWrapper.JSONThunker import JSONThunker
from Utils.Utilities import decodeBytesToUnicodeConditional
from Utils.PythonVersion import PY3
class RESTFormatter(TemplatedPage):
def __init__(self, config):
self.supporttypes = {
'text/json': self.json,
'application/xml': self.METHOD_NAME,
'application/atom+xml': self.atom,
'text/x-json': self.json,
'application/json': self.json,
'text/html': self.to_string,
'text/plain': self.to_string,
'*/*': self.to_string}
TemplatedPage.__init__(self, config)
def genstreamer(self, data):
yield "[\n"
firstItem = True
for item in data:
if not firstItem:
yield "\n,\n"
else:
firstItem = False
yield json.dumps(item)
yield "\n]"
def json(self, data):
if isinstance(data, GeneratorType):
out = ''.join([r for r in self.genstreamer(data)])
return out
thunker = JSONThunker()
data = thunker.thunk(data)
return json.dumps(data)
def METHOD_NAME(self, data):
if isinstance(data, GeneratorType):
data = [i for i in data]
return self.templatepage('XML', data = data,
config = self.config,
path = request.path_info)
def atom(self, data):
if isinstance(data, GeneratorType):
data = [i for i in data]
return self.templatepage('Atom', data = data,
config = self.config,
path = request.path_info)
def to_string(self, data):
"""
Since json.dumps returns unicode in py3 and bytes in py2 (it behaves
returning "native str" in both versions), then we do the same with all
the data that is not GeneratorType, dict nor list by calling str(data)
Moreover, we need to properly decode bytes.
:returns: "native str" (unicode in py3, bytes in py2)
"""
if isinstance(data, GeneratorType):
return self.json(data)
if isinstance(data, dict) or isinstance(data, list):
return json.dumps(data)
if not isinstance(data, bytes):
return str(data)
return decodeBytesToUnicodeConditional(data, condition=PY3)
def format(self, data, datatype, expires):
response_data = ''
func = self.supporttypes[datatype]
if datatype not in self.supporttypes:
response.status = 406
expires=0
response_data = self.supporttypes['text/plain']({'exception': 406,
'type': 'HTTPError',
'message': '%s is not supported. Valid accept headers are: %s' %\
(datatype, list(self.supporttypes))})
try:
response_data = self.supporttypes[datatype](data)
except HTTPError as h:
# This won't be triggered with a default formatter, but could be by a subclass
response.status = h.args[0]
expires=0
rec = {'exception': h.args[0],'type': 'HTTPError','message': h.args[1]}
response_data = func(rec)
except Exception as e:
response.status = 500
expires=0
rec = {'exception': 500, 'type': e.__class__.__name__, 'message': 'Server Error'}
response_data = func(rec)
_setCherryPyHeaders(response_data, datatype, expires)
return response_data |
4,867 | check health status | # Copyright 2023 The DLRover Authors. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import threading
import time
import ray
from dlrover.python.common.constants import NodeStatus
from dlrover.python.common.log import default_logger as logger
from dlrover.python.common.node import NodeGroupResource, NodeResource
from dlrover.python.master.stats.stats_backend import LocalFileStateBackend
from dlrover.python.scheduler.job import ElasticJob, JobArgs, NodeArgs
from dlrover.python.scheduler.kubernetes import (
convert_cpu_to_decimal,
convert_memory_to_mb,
)
from dlrover.python.util.actor_util.parse_actor import (
parse_type_id_from_actor_name,
)
from dlrover.python.util.state.store_mananger import StoreManager
def parse_bool(s: str):
return s.lower() in ["true", "yes", "t", "y"]
@ray.remote
class RayWorker: # pragma: no cover
def __init__(self):
pass
def exec_module(self):
pass
def get_node_service_addr(self):
return None
class RayClient(object):
_instance_lock = threading.Lock()
def __init__(self, namespace, jobname):
ray.init()
self.store_manager = StoreManager(
jobname=jobname, namespace=namespace
).build_store_manager()
self.store = self.store_manager.build_store()
self.actor_handle = {}
def create_pg(self, resource):
pass
def create_actor(self, actor_args):
# 反射方法获取worker
executor = actor_args.get("executor")
args = actor_args.get("args", [])
kwargs = actor_args.get("kargs", {})
actor_name = actor_args.get("actor_name", "actor")
actor_handle = (
ray.remote(executor)
.options(name=actor_name)
.remote(*args, **kwargs)
)
time.sleep(3)
actor_type, actor_id = parse_type_id_from_actor_name(actor_name)
self.store.add_actor_name(actor_type, actor_id, actor_name)
self.actor_handle[actor_name] = actor_handle
return actor_handle
def delete_actor(self, actor_name):
actor_handle = self.get_actor_handle(actor_name)
if actor_handle is None:
logger.warning("actor exited before killing")
else:
ray.kill(actor_handle, no_restart=True)
logger.info("kill actor %s successfully." % actor_name)
self.store.remove_actor_name(actor_name)
return
def list_actor(self):
actor_names = self.store.get("actor_names", {})
logger.info("actor stored in backend are {}".format(actor_names))
for actor_type, actor_id_name in actor_names.items():
if actor_id_name is not None:
for id, name in actor_id_name.items():
status = self.METHOD_NAME(name)
yield name, status
def get_actor_status(self, actor_name):
"""
check actor status from ray dashboard
"""
return "RUNNING"
def remote_call_actor(self, actor_handle, func, args=[], kargs={}):
res = ray.get(getattr(actor_handle, func).remote(*args, **kargs))
return res
def METHOD_NAME(self, actor_name):
"""
When Actor is in RUNNING Status, check whether estimator is initiated
"""
# to do
# 使用dlrover python的
status = None
res = None
actor_handle = self.get_actor_handle(actor_name)
if actor_handle is None:
status = NodeStatus.UNKNOWN
else:
res = self.remote_call_actor(actor_handle, "health_check", [], {})
if res is not None:
status = NodeStatus.RUNNING
else:
status = NodeStatus.UNKNOWN
return status
def get_actor_handle(self, actor_name):
actor_handle = None
try:
actor_handle = ray.get_actor(actor_name)
except Exception as e:
logger.warning(str(e))
return actor_handle
@classmethod
def singleton_instance(cls, *args, **kwargs):
if not hasattr(RayClient, "_instance"):
with RayClient._instance_lock:
if not hasattr(RayClient, "_instance"):
RayClient._instance = RayClient(*args, **kwargs)
return RayClient._instance
class RayElasticJob(ElasticJob):
def __init__(self, job_name, namespace):
"""
ElasticJob manages Pods by K8s Python APIs. The example of an elastic
job is in dlrover/go/elasticjob_operator/config/samples/
elastic_v1alpha1_elasticjob.yaml
Args:
image_name: Docker image path for ElasticDL pod.
namespace: The name of the Kubernetes namespace where ElasticDL
pods will be created.
job_name: ElasticDL job name, should be unique in the namespace.
Used as pod name prefix and value for "elastic" label.
"""
self._ray_client = RayClient.singleton_instance(namespace, job_name)
self._namespace = namespace
self._job_name = job_name
def get_node_name(self, type, id):
return "pod-name"
def get_node_service_addr(self, type, id):
return ""
class RayJobArgs(JobArgs):
def __init__(self, platform, namespace, job_name):
super(RayJobArgs, self).__init__(platform, namespace, job_name)
self.file_path = "{}.json".format(job_name)
foler_path = os.path.dirname(os.path.dirname(__file__))
self.file_path = os.path.join(foler_path, "tests/test.json")
self.stats_backend = LocalFileStateBackend(self.file_path)
def initilize(self):
job = self.stats_backend.load()
for replica, spec in job["spec"]["replicaSpecs"].items():
num = int(spec.get("replicas", 0))
requests = spec.get("resources", {})
cpu = convert_cpu_to_decimal(requests.get("cpu", 0))
if "memory" in requests:
memory = convert_memory_to_mb(requests["memory"])
else:
memory = 0
gpu_type = None
gpu_num = 0
for k, v in requests.items():
if "nvidia.com" in k:
gpu_type = k
gpu_num = int(v)
group_resource = NodeGroupResource(
num,
NodeResource(cpu, memory, gpu_type, gpu_num),
)
restart_count = int(spec.get("restartCount", 3))
auto_scale = parse_bool(spec.get("autoScale", "True"))
restart_timeout = int(spec.get("restartTimeout", 0))
critical_nodes = spec.get("criticalNodes", "")
self.node_args[replica] = NodeArgs(
group_resource,
auto_scale,
restart_count,
restart_timeout,
critical_nodes,
) |
4,868 | test manufacture delivery missing disposition | from expungeservice.models.disposition import DispositionCreator
from expungeservice.models.expungement_result import EligibilityStatus
from expungeservice.record_merger import RecordMerger
from tests.factories.charge_factory import ChargeFactory
from tests.models.test_charge import Dispositions
def test_manufacture_delivery_dismissed():
charges = ChargeFactory.create_ambiguous_charge(
name="Manufacture/Delivery", statute="4759922b", level="Felony Class A", disposition=Dispositions.DISMISSED
)
type_eligibility = RecordMerger.merge_type_eligibilities(charges)
assert type_eligibility.status is EligibilityStatus.ELIGIBLE
assert (
type_eligibility.reason == "Dismissed Criminal Charge – Dismissals are generally eligible under 137.225(1)(d)"
)
def METHOD_NAME():
charges = ChargeFactory.create_ambiguous_charge(
name="Manufacture/Delivery", statute="4759922b", level="Felony Class A", disposition=DispositionCreator.empty()
)
type_eligibility = RecordMerger.merge_type_eligibilities(charges)
assert type_eligibility.status is EligibilityStatus.NEEDS_MORE_ANALYSIS
assert (
type_eligibility.reason
== "Marijuana Manufacture Delivery – Always eligible under 137.226 (for convictions) or 137.225(1)(d) (for dismissals) OR Felony Class A – Disposition not found. Needs further analysis"
)
def test_manufacture_delivery_unrecognized_disposition():
charges = ChargeFactory.create_ambiguous_charge(
name="Manufacture/Delivery",
statute="4759922b",
level="Felony Class B",
disposition=Dispositions.UNRECOGNIZED_DISPOSITION,
)
type_eligibility = RecordMerger.merge_type_eligibilities(charges)
assert type_eligibility.status is EligibilityStatus.NEEDS_MORE_ANALYSIS
assert (
type_eligibility.reason
== "Marijuana Manufacture Delivery – Always eligible under 137.226 (for convictions) or 137.225(1)(d) (for dismissals) OR Felony Class B – Disposition not recognized. Needs further analysis"
)
def test_manufacture_delivery_manudel():
charges = ChargeFactory.create_ambiguous_charge(
name="Manu/Del Cntrld Sub-SC 1", statute="4759921B", level="Felony Class A", disposition=Dispositions.CONVICTED
)
type_eligibility = RecordMerger.merge_type_eligibilities(charges)
assert type_eligibility.status is EligibilityStatus.NEEDS_MORE_ANALYSIS
assert (
type_eligibility.reason
== "Marijuana Manufacture Delivery – Eligible under 137.226 OR Felony Class A – Ineligible by omission from statute"
)
def test_manufacture_delivery_manudel_felony_unclassified():
charges = ChargeFactory.create_ambiguous_charge(
name="Manu/Del Cntrld Sub-SC 1",
statute="4759921B",
level="Felony Unclassified",
disposition=Dispositions.CONVICTED,
)
type_eligibility = RecordMerger.merge_type_eligibilities(charges)
assert type_eligibility.status is EligibilityStatus.NEEDS_MORE_ANALYSIS
assert (
type_eligibility.reason
== "Marijuana Manufacture Delivery – Eligible under 137.226 OR Felony Class A – Ineligible by omission from statute OR Felony Class B – Convictions that fulfill the conditions of 137.225(1)(b) are eligible OR Felony Class C – Eligible under 137.225(1)(b)"
)
def test_manufacture_delivery_manudel_felony_c():
charges = ChargeFactory.create_ambiguous_charge(
name="Manu/Del Cntrld Sub-SC 1", statute="4759921B", level="Felony Class C", disposition=Dispositions.CONVICTED
)
type_eligibility = RecordMerger.merge_type_eligibilities(charges)
assert type_eligibility.status is EligibilityStatus.ELIGIBLE
assert type_eligibility.reason == "Felony Class C – Eligible under 137.225(1)(b)"
def test_manufacture_delivery_manufacturing_name():
charges = ChargeFactory.create_ambiguous_charge(
name="MANUFACTURING CONTROLLED SUB",
statute="4759921A",
level="Felony Unclassified",
disposition=Dispositions.CONVICTED,
)
type_eligibility = RecordMerger.merge_type_eligibilities(charges)
assert type_eligibility.status is EligibilityStatus.NEEDS_MORE_ANALYSIS
assert (
type_eligibility.reason
== "Marijuana Manufacture Delivery – Eligible under 137.226 OR Felony Class A – Ineligible by omission from statute OR Felony Class B – Convictions that fulfill the conditions of 137.225(1)(b) are eligible OR Felony Class C – Eligible under 137.225(1)(b)"
)
def test_manufacture_delivery_2():
charges = ChargeFactory.create_ambiguous_charge(
name="MANUFACTURING CONTROLLED SUB 2",
statute="4759921A",
level="Felony Unclassified",
disposition=Dispositions.CONVICTED,
)
type_eligibility = RecordMerger.merge_type_eligibilities(charges)
assert type_eligibility.status is EligibilityStatus.NEEDS_MORE_ANALYSIS
assert (
type_eligibility.reason
== "Felony Class A – Ineligible by omission from statute OR Felony Class B – Convictions that fulfill the conditions of 137.225(1)(b) are eligible"
)
def test_manufacture_delivery_heroin():
charges = ChargeFactory.create_ambiguous_charge(
name="MANUFACTURING CONTROLLED SUB HEROIN",
statute="4759921A",
level="Felony Unclassified",
disposition=Dispositions.CONVICTED,
)
type_eligibility = RecordMerger.merge_type_eligibilities(charges)
assert type_eligibility.status is EligibilityStatus.NEEDS_MORE_ANALYSIS
assert (
type_eligibility.reason
== "Felony Class A – Ineligible by omission from statute OR Felony Class B – Convictions that fulfill the conditions of 137.225(1)(b) are eligible"
)
def test_pcs():
charges = ChargeFactory.create_ambiguous_charge(
name="PCS",
statute="4759924A",
level="Felony Class B",
disposition=Dispositions.CONVICTED,
)
type_eligibility = RecordMerger.merge_type_eligibilities(charges)
assert type_eligibility.status is EligibilityStatus.ELIGIBLE
assert (
type_eligibility.reason
== "Marijuana Manufacture Delivery – Eligible under 137.226 OR Felony Class B – Convictions that fulfill the conditions of 137.225(1)(b) are eligible"
)
def test_pcs_heroin():
charges = ChargeFactory.create_ambiguous_charge(
name="POSS CONTROLLED SUB HEROIN",
statute="4757521A",
level="Felony Unclassified",
disposition=Dispositions.CONVICTED,
)
type_eligibility = RecordMerger.merge_type_eligibilities(charges)
assert type_eligibility.status is EligibilityStatus.ELIGIBLE
assert (
type_eligibility.reason
== "Felony Class B – Convictions that fulfill the conditions of 137.225(1)(b) are eligible OR Felony Class C – Eligible under 137.225(1)(b)"
)
def test_pcs_class_c():
charges = ChargeFactory.create_ambiguous_charge(
name="PCS",
statute="4759924A",
level="Felony Class C",
disposition=Dispositions.CONVICTED,
)
type_eligibility = RecordMerger.merge_type_eligibilities(charges)
assert type_eligibility.status is EligibilityStatus.ELIGIBLE
assert type_eligibility.reason == "Felony Class C – Eligible under 137.225(1)(b)" |
4,869 | method | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"dataprotection backup-policy delete",
is_experimental=True,
confirmation="Are you sure you want to perform this operation?",
)
class Delete(AAZCommand):
"""Deletes a backup policy belonging to a backup vault.
:example: Delete Backup Policy
az dataprotection backup-policy delete --name "OSSDBPolicy" --resource-group "000pikumar" --vault-name "PrivatePreviewVault"
"""
_aaz_info = {
"version": "2023-05-01",
"resources": [
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.dataprotection/backupvaults/{}/backuppolicies/{}", "2023-05-01"],
]
}
def _handler(self, command_args):
super()._handler(command_args)
self._execute_operations()
return None
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.backup_policy_name = AAZStrArg(
options=["-n", "--name", "--backup-policy-name"],
help="Name of the policy",
required=True,
id_part="child_name_1",
)
_args_schema.resource_group = AAZResourceGroupNameArg(
required=True,
)
_args_schema.vault_name = AAZStrArg(
options=["--vault-name"],
help="The name of the backup vault.",
required=True,
id_part="name",
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
self.BackupPoliciesDelete(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
class BackupPoliciesDelete(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
if session.http_response.status_code in [204]:
return self.on_204(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataProtection/backupVaults/{vaultName}/backupPolicies/{backupPolicyName}",
**self.url_parameters
)
@property
def METHOD_NAME(self):
return "DELETE"
@property
def error_format(self):
return "MgmtErrorFormat"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"backupPolicyName", self.ctx.args.backup_policy_name,
required=True,
),
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
**self.serialize_url_param(
"vaultName", self.ctx.args.vault_name,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2023-05-01",
required=True,
),
}
return parameters
def on_200(self, session):
pass
def on_204(self, session):
pass
class _DeleteHelper:
"""Helper class for Delete"""
__all__ = ["Delete"] |
4,870 | test decimal format | # Copyright 2021 The NetKet Authors - All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import pytest
import jax
import jax.numpy as jnp
from functools import partial
import netket as nk
from netket.stats import statistics
from scipy.optimize import curve_fit
from .. import common
pytestmark = common.skipif_mpi
WEIGHT_SEED = 3
@partial(jax.jit, static_argnums=0)
@partial(jax.vmap, in_axes=(None, None, 0, 0, 0), out_axes=(0))
def local_value_kernel(logpsi, pars, σ, σp, mel):
return jnp.sum(mel * jnp.exp(logpsi(pars, σp) - logpsi(pars, σ)))
def local_values(logpsi, variables, Ô, σ):
σp, mels = Ô.get_conn_padded(σ.reshape((-1, σ.shape[-1])))
loc_vals = local_value_kernel(
logpsi, variables, σ.reshape((-1, σ.shape[-1])), σp, mels
)
return loc_vals.reshape(σ.shape[:-1])
def _setup():
g = nk.graph.Hypercube(3, 2)
hi = nk.hilbert.Spin(0.5, N=g.n_nodes)
ham = nk.operator.Heisenberg(hi, graph=g)
ma = nk.models.RBM(alpha=2, param_dtype=np.complex64)
return hi, ham, ma
def _test_stats_mean_std(hi, ham, ma, n_chains):
w = ma.init(jax.random.PRNGKey(WEIGHT_SEED * n_chains), jnp.zeros((1, hi.size)))
sampler = nk.sampler.MetropolisLocal(hi, n_chains=n_chains)
n_samples = 16000
num_samples_per_chain = n_samples // n_chains
# Discard a few samples
_, state = sampler.sample(ma, w, chain_length=1000)
samples, state = sampler.sample(
ma, w, chain_length=num_samples_per_chain, state=state
)
assert samples.shape == (n_chains, num_samples_per_chain, hi.size)
eloc = local_values(ma.apply, w, ham, samples)
assert eloc.shape == (n_chains, num_samples_per_chain)
stats = statistics(eloc.T)
assert stats.mean == pytest.approx(np.mean(eloc))
if n_chains > 1:
# variance == average sample variance over chains
assert stats.variance == pytest.approx(np.var(eloc))
@common.skipif_mpi
def test_stats_mean_std():
hi, ham, ma = _setup()
for bs in (1, 2, 16, 32):
_test_stats_mean_std(hi, ham, ma, bs)
def _gen_data(n_samples, log_f, dx, seed_val):
np.random.seed(seed_val)
# Generates data with a simple markov chain
x = np.empty(n_samples)
x_old = np.random.normal()
for i in range(n_samples):
x_new = x_old + np.random.normal(scale=dx, loc=0.0)
if np.exp(log_f(x_new) - log_f(x_old)) > np.random.uniform(0, 1):
x[i] = x_new
else:
x[i] = x_old
x_old = x[i]
return x
@pytest.mark.parametrize("batch_size", [1, 2, 4, 8, 16, 32])
@pytest.mark.parametrize("sig_corr", [0.5])
def test_tau_corr_fft_logic(batch_size, sig_corr):
def next_pow_two(n):
i = 1
while i < n:
i = i << 1
return i
def autocorr_func_1d(x, norm=True):
x = np.atleast_1d(x)
if len(x.shape) != 1:
raise ValueError("invalid dimensions for 1D autocorrelation function")
n = next_pow_two(len(x))
# Compute the FFT and then (from that) the auto-correlation function
f = np.fft.fft(x - np.mean(x), n=2 * n)
acf = np.fft.ifft(f * np.conjugate(f))[: len(x)].real
acf /= 4 * n
# Optionally normalize
if norm:
acf /= acf[0]
return acf
def log_f(x):
return -(x**2.0) / 2.0
def func_corr(x, tau):
return np.exp(-x / (tau))
n_samples = 2**20 // batch_size
data = np.empty((batch_size, n_samples))
tau_fit = np.empty(batch_size)
for i in range(batch_size):
data[i] = _gen_data(n_samples, log_f, sig_corr, i + batch_size)
autoc = autocorr_func_1d(data[i])
popt, pcov = curve_fit(func_corr, np.arange(40), autoc[0:40])
tau_fit[i] = popt[0]
with common.netket_experimental_fft_autocorrelation(True):
tau_fit_mean = 1 + 2 * tau_fit.mean()
tau_fit_max = 1 + 2 * tau_fit.max()
stats = statistics(data)
assert np.mean(data) == pytest.approx(stats.mean)
assert np.var(data) == pytest.approx(stats.variance)
assert tau_fit_mean == pytest.approx(stats.tau_corr, rel=0.5, abs=0.5)
assert tau_fit_max == pytest.approx(stats.tau_corr_max, rel=0.5, abs=0.5)
eom_fit = np.sqrt(np.var(data) * tau_fit_mean / float(n_samples * batch_size))
assert eom_fit == pytest.approx(stats.error_of_mean, rel=0.5)
with common.netket_experimental_fft_autocorrelation(False):
tau_fit_m = tau_fit.mean()
stats = statistics(data)
assert np.mean(data) == pytest.approx(stats.mean)
assert np.var(data) == pytest.approx(stats.variance)
assert tau_fit_m == pytest.approx(stats.tau_corr, rel=1, abs=3)
eom_fit = np.sqrt(np.var(data) * tau_fit_m / float(n_samples * batch_size))
assert eom_fit == pytest.approx(stats.error_of_mean, rel=0.6)
def METHOD_NAME():
from netket.stats import Stats
assert str(Stats(1.0, 1e-3)) == "1.0000 ± 0.0010 [σ²=nan]"
assert str(Stats(1.0, 1e-6)) == "1.0000000 ± 0.0000010 [σ²=nan]"
assert str(Stats(1.0, 1e-7)) == "1.000e+00 ± 1.000e-07 [σ²=nan]"
assert str(Stats(float("nan"), float("inf"))) == "nan ± inf [σ²=nan]"
assert str(Stats(1.0, float("nan"))) == "1.000e+00 ± nan [σ²=nan]"
assert str(Stats(1.0, float("inf"))) == "1.000e+00 ± inf [σ²=nan]"
assert str(Stats(float("inf"), 0.0)) == "inf ± 0.000e+00 [σ²=nan]"
assert str(Stats(1.0, 0.0)) == "1.000e+00 ± 0.000e+00 [σ²=nan]"
assert str(Stats(1.0, 0.12, 0.5)) == "1.00 ± 0.12 [σ²=0.50]"
assert str(Stats(1.0, 0.12, 0.5, R_hat=1.01)) == "1.00 ± 0.12 [σ²=0.50, R̂=1.0100]"
@common.skipif_mpi
def test_R_hat():
# detect disagreeing chains
x = np.array(
[
[1.0, 1.0, 1.0],
[1.1, 1.1, 1.1],
]
)
assert statistics(x).R_hat > 1.01
# detect non-stationary chains
x = np.array(
[
[1.0, 1.5, 2.0],
[2.0, 1.5, 1.0],
]
)
assert statistics(x).R_hat > 1.01
# detect "stuck" chains
x = np.array(
[
np.random.normal(size=1000),
np.random.normal(size=1000),
]
)
# not stuck -> good R_hat:
assert statistics(x).R_hat <= 1.01
# stuck -> bad R_hat:
x[1, 100:] = 1.0
assert statistics(x).R_hat > 1.01 |
4,871 | teller3 | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
sys.path.append('../')
from auto_scan_test import AutoScanTest, IgnoreReasons
from program_config import TensorConfig, ProgramConfig, OpConfig, CxxConfig, TargetType, PrecisionType, DataLayoutType, Place
import unittest
import hypothesis
from hypothesis import given, settings, seed, example, assume
import hypothesis.strategies as st
import argparse
import numpy as np
from functools import partial
from functools import reduce
class TestReshapeOp(AutoScanTest):
def __init__(self, *args, **kwargs):
AutoScanTest.__init__(self, *args, **kwargs)
self.enable_testing_on_place(
TargetType.Host,
PrecisionType.FP32,
DataLayoutType.NCHW,
thread=[1, 2])
opencl_places = [
Place(TargetType.OpenCL, PrecisionType.FP16,
DataLayoutType.ImageDefault), Place(
TargetType.OpenCL, PrecisionType.FP16,
DataLayoutType.ImageFolder),
Place(TargetType.OpenCL, PrecisionType.FP32, DataLayoutType.NCHW),
Place(TargetType.OpenCL, PrecisionType.Any,
DataLayoutType.ImageDefault), Place(
TargetType.OpenCL, PrecisionType.Any,
DataLayoutType.ImageFolder),
Place(TargetType.OpenCL, PrecisionType.Any, DataLayoutType.NCHW),
Place(TargetType.Host, PrecisionType.FP32)
]
self.enable_testing_on_place(places=opencl_places)
self.enable_testing_on_place(TargetType.NNAdapter, PrecisionType.FP32)
self.enable_devices_on_nnadapter(device_names=[
"kunlunxin_xtcl", "cambricon_mlu", "nvidia_tensorrt",
"intel_openvino"
])
def is_program_valid(self,
program_config: ProgramConfig,
predictor_config: CxxConfig) -> bool:
return True
def sample_program_configs(self, draw):
in_shape = draw(
st.lists(
st.integers(
min_value=5, max_value=10), min_size=4, max_size=4))
attr_shape = draw(
st.lists(
st.integers(
min_value=1, max_value=max(in_shape)),
min_size=1,
max_size=len(in_shape)))
shape_tensor = []
for i in range(len(attr_shape) - 1, -1, -1):
shape_tensor.append(attr_shape[i])
assume(
reduce(lambda x, y: x * y, attr_shape) == reduce(
lambda x, y: x * y, in_shape))
in_shape = draw(st.sampled_from([in_shape, []]))
if in_shape == []:
attr_shape = [1]
shape_tensor = [1, 1]
# The parameter shape in ReshapeOp must be set
with_shape_attr = draw(st.sampled_from([True]))
with_shape_tensor = draw(st.sampled_from([True, False]))
def generate_input(*args, **kwargs):
return np.random.random(in_shape).astype(np.float32)
def generate_shape(*args, **kwargs):
return np.asarray(shape_tensor).astype(np.int32)
if (with_shape_attr and with_shape_tensor):
build_ops = OpConfig(
type="reshape",
inputs={"X": ["input_data"],
"Shape": ["input_shape"]},
outputs={"Out": ["output_data"], },
attrs={"shape": attr_shape, })
program_config = ProgramConfig(
ops=[build_ops],
weights={},
inputs={
"input_data":
TensorConfig(data_gen=partial(generate_input)),
"input_shape":
TensorConfig(data_gen=partial(generate_shape)),
},
outputs=["output_data"])
elif (with_shape_attr):
build_ops = OpConfig(
type="reshape",
inputs={"X": ["input_data"]},
outputs={"Out": ["output_data"], },
attrs={"shape": attr_shape, })
program_config = ProgramConfig(
ops=[build_ops],
weights={},
inputs={
"input_data":
TensorConfig(data_gen=partial(generate_input)),
},
outputs=["output_data"])
elif (with_shape_tensor):
build_ops = OpConfig(
type="reshape",
inputs={"X": ["input_data"],
"Shape": ["input_shape"]},
outputs={"Out": ["output_data"], },
attrs={})
program_config = ProgramConfig(
ops=[build_ops],
weights={},
inputs={
"input_data":
TensorConfig(data_gen=partial(generate_input)),
"input_shape":
TensorConfig(data_gen=partial(generate_shape)),
},
outputs=["output_data"])
else:
build_ops = OpConfig(
type="reshape",
inputs={"X": ["input_data"]},
outputs={"Out": ["output_data"], },
attrs={})
program_config = ProgramConfig(
ops=[build_ops],
weights={},
inputs={
"input_data":
TensorConfig(data_gen=partial(generate_input)),
},
outputs=["output_data"])
return program_config
def sample_predictor_configs(self):
return self.get_predictor_configs(), ["reshape"], (1e-5, 1e-5)
def add_ignore_pass_case(self):
def teller1(program_config, predictor_config):
if self.get_nnadapter_device_name() == "nvidia_tensorrt":
in_shape = program_config.inputs["input_data"].shape
shape = program_config.ops[0].attrs["shape"]
if in_shape[0] != shape[0]:
return True
self.add_ignore_check_case(
teller1, IgnoreReasons.PADDLELITE_NOT_SUPPORT,
"Lite does not support change batch on nvidia_tensorrt.")
def _teller2(program_config, predictor_config):
target_type = predictor_config.target()
in_x_shape = list(program_config.inputs["input_data"].shape)
if target_type != TargetType.Host and target_type != TargetType.OpenCL:
if len(in_x_shape) == 0:
return True
self.add_ignore_check_case(
_teller2, IgnoreReasons.PADDLELITE_NOT_SUPPORT,
"0D-tensor is not supported on this target now.")
def METHOD_NAME(program_config, predictor_config):
if self.get_nnadapter_device_name() == "intel_openvino":
return True
self.add_ignore_check_case(METHOD_NAME,
IgnoreReasons.PADDLELITE_NOT_SUPPORT,
"intel_openvino report error.")
def test(self, *args, **kwargs):
self.run_and_statis(quant=False, max_examples=500)
if __name__ == "__main__":
unittest.main(argv=['']) |
4,872 | api update alias | import logging
import re
import xml
from urllib.parse import quote
from xml.dom.minidom import parseString
import aiohttp
from scriptworker.exceptions import ScriptWorkerTaskException
from scriptworker.utils import retry_async
from bouncerscript.constants import BOUNCER_LOCATION_PLATFORMS, NIGHTLY_VERSION_REGEX
log = logging.getLogger(__name__)
async def api_call(context, route, data, retry_config=None):
"""Generic api_call method that's to be used as underlying method by
all the functions working with the bouncer api"""
retry_async_kwargs = dict(retry_exceptions=(aiohttp.ClientError, aiohttp.ServerTimeoutError))
if retry_config:
retry_async_kwargs.update(retry_config)
log.info("Calling {} with data: {}".format(route, data))
return await retry_async(_do_api_call, args=(context, route, data), **retry_async_kwargs)
async def _do_api_call(context, route, data, method="GET", session=None):
"""Effective function doing the API call to the bouncer API endpoint"""
session = session or context.session
bouncer_config = context.config["bouncer_config"][context.server]
credentials = (bouncer_config["username"], bouncer_config["password"])
api_root = bouncer_config["api_root"]
api_url = "%s/%s" % (api_root, route)
auth = aiohttp.BasicAuth(*credentials)
kwargs = {"timeout": 60}
if data:
kwargs["data"] = data
method = "POST"
try:
log.info("Performing a {} request to {} with kwargs {}".format(method, api_url, kwargs))
async with session.request(method, api_url, auth=auth, **kwargs) as resp:
result = await resp.text()
log.info("Server response: {}".format(result))
return result
except aiohttp.ServerTimeoutError as e:
log.warning("Timed out accessing %s: %s" % (api_url, e))
raise
except aiohttp.ClientError as e:
log.warning("Cannot access %s: %s" % (api_url, e))
raise
async def api_show_product(context, product_name):
"""Function to query the API for a specific product information"""
data = {}
return await api_call(context, "product_show?product=%s" % quote(product_name), data=data)
async def api_add_product(context, product_name, add_locales, ssl_only=False):
"""Function to add a specific product to Bouncer, along with its corresponding
list of locales"""
data = {"product": product_name}
if add_locales:
data["languages"] = context.task["payload"]["locales"]
if ssl_only:
# Send "true" as a string
data["ssl_only"] = "true"
return await api_call(context, "product_add/", data)
async def api_add_location(context, product_name, bouncer_platform, path):
"""Function to add locations per platform for a specific product"""
data = {"product": product_name, "os": bouncer_platform, "path": path}
return await api_call(context, "location_add/", data)
async def api_modify_location(context, product_name, bouncer_platform, path):
"""Function to modify locations per platform for a specific product"""
data = {"product": product_name, "os": bouncer_platform, "path": path}
return await api_call(context, "location_modify/", data)
async def api_show_location(context, product_name):
"""Function to query the API for specific locations of a product"""
data = {}
return await api_call(context, "location_show?product=%s" % quote(product_name), data=data)
async def METHOD_NAME(context, alias, product_name):
"""Function to update an aliases to a specific product"""
data = {"alias": alias, "related_product": product_name}
return await api_call(context, "create_update_alias", data)
async def does_product_exist(context, product_name):
"""Function to check if a specific product exists in bouncer already by
parsing the XML returned by the API endpoint."""
res = await api_show_product(context, product_name)
try:
xml_doc = parseString(res)
# bouncer API returns <products/> if the product doesn't exist
products_found = len(xml_doc.getElementsByTagName("product"))
log.info("Products found: {}".format(products_found))
return bool(products_found)
except (xml.parsers.expat.ExpatError, UnicodeDecodeError, ValueError) as e:
log.warning("Error parsing XML: {}".format(e))
log.warning("Assuming {} does not exist".format(product_name))
# ignore XML parsing errors
return False
async def does_location_path_exist(context, product_name, platform, path):
existing_info = await get_locations_info(context, product_name)
for info in existing_info:
if path == info["path"] and platform == info["os"]:
return True
return False
async def get_locations_info(context, product_name):
"""Function to query for location information within bouncer by parsing the
XML returned by the API endpoint"""
res = await api_show_location(context, product_name)
try:
xml_doc = parseString(res)
# bouncer API returns <locations/> if the product doesn't exist
locations_info = process_locations_from_xml(xml_doc.getElementsByTagName("location"))
log.debug("Locations info: {}".format(locations_info))
return locations_info
except (xml.parsers.expat.ExpatError, UnicodeDecodeError, ValueError) as e:
log.warning("Error parsing XML: {}".format(e))
raise ScriptWorkerTaskException("Not suitable XML received")
def process_locations_from_xml(locations_found):
"""Function to process the XML returned by bouncer for the location tags"""
info = []
for location in locations_found:
os = location.getAttribute("os")
if os not in BOUNCER_LOCATION_PLATFORMS:
err_msg = "Unexpected os found in bouncer. Found {} while " "expected {}.".format(os, BOUNCER_LOCATION_PLATFORMS)
raise ScriptWorkerTaskException(err_msg)
id_ = location.getAttribute("id")
path = location.firstChild.data
info.append({"os": os, "id": id_, "path": path})
return info
def get_nightly_version(product_name, path):
"""Function to return the version string of the given nightly location
Input:
* "firefox-nightly-latest"
* "/firefox/nightly/latest-mozilla-central-l10n/firefox-63.0a1.:lang.linux-i686.tar.bz"
Output: "63.0a1"
"""
match = re.search(NIGHTLY_VERSION_REGEX, path)
if not match:
err_msg = "Couldn't find valid nightly version within path {}".format(path)
raise ScriptWorkerTaskException(err_msg)
return match.group(0)
def get_version_bumped_path(path, current_version, bumped_version):
"""Function to return the version after bumping it up by one
Input: "/firefox/nightly/latest-mozilla-central-l10n/firefox-63.0a1.:lang.linux-i686.tar.bz"
Output: "/firefox/nightly/latest-mozilla-central-l10n/firefox-64.0a1.:lang.linux-i686.tar.bz"
"""
return path.replace(current_version, bumped_version) |
4,873 | target | from bayes_opt import BayesianOptimization
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import cm
from matplotlib import mlab
from matplotlib import gridspec
# https://github.com/fmfn/BayesianOptimization/issues/18
def unique_rows(a):
"""
A functions to trim repeated rows that may appear when optimizing.
This is necessary to avoid the sklearn GP object from breaking
:param a: array to trim repeated rows from
:return: mask of unique rows
"""
# Sort array and kep track of where things should go back to
order = np.lexsort(a.T)
reorder = np.argsort(order)
a = a[order]
diff = np.diff(a, axis=0)
ui = np.ones(len(a), 'bool')
ui[1:] = (diff != 0).any(axis=1)
return ui[reorder]
def METHOD_NAME(x, y):
a = np.exp(-( (x - 2)**2/0.7 + (y - 4)**2/1.2) + (x - 2)*(y - 4)/1.6 )
b = np.exp(-( (x - 4)**2/3 + (y - 2)**2/2.) )
c = np.exp(-( (x - 4)**2/0.5 + (y - 4)**2/0.5) + (x - 4)*(y - 4)/0.5 )
d = np.sin(3.1415 * x)
e = np.exp(-( (x - 5.5)**2/0.5 + (y - 5.5)**2/.5) )
return 2*a + b - c + 0.17 * d + 2*e
n = 1e5
x = np.linspace(0, 6, 300)
y = np.linspace(0, 8, 300)
X, Y = np.meshgrid(x, y)
x = X.ravel()
y = Y.ravel()
X = np.vstack([x, y]).T[:, [1, 0]]
z = METHOD_NAME(x, y)
print(X, X.shape)
print((max(z)))
print((min(z)))
fig, axis = plt.subplots(1, 1, figsize=(14, 10))
gridsize=150
im = axis.hexbin(x, y, C=z, gridsize=gridsize, cmap=cm.jet, bins=None, vmin=-0.9, vmax=2.1)
axis.axis([x.min(), x.max(), y.min(), y.max()])
cb = fig.colorbar(im, )
cb.set_label('Value')
def posterior(bo, X):
ur = unique_rows(bo.X)
bo.gp.fit(bo.X[ur], bo.Y[ur])
mu, sigma2 = bo.gp.predict(X, return_std=True)
return mu, np.sqrt(sigma2), bo.util.utility(X, bo.gp, bo.Y.max())
def plot_2d(name=None):
mu, s, ut = posterior(bo, X)
fig, ax = plt.subplots(2, 2, figsize=(14, 10))
gridsize=150
# fig.suptitle('Bayesian Optimization in Action', fontdict={'size':30})
# GP regression output
ax[0][0].set_title('Gausian Process Predicted Mean', fontdict={'size':15})
im00 = ax[0][0].hexbin(x, y, C=mu, gridsize=gridsize, cmap=cm.jet, bins=None, vmin=-0.9, vmax=2.1)
ax[0][0].axis([x.min(), x.max(), y.min(), y.max()])
ax[0][0].plot(bo.X[:, 1], bo.X[:, 0], 'D', markersize=4, color='k', label='Observations')
ax[0][1].set_title('Target Function', fontdict={'size':15})
im10 = ax[0][1].hexbin(x, y, C=z, gridsize=gridsize, cmap=cm.jet, bins=None, vmin=-0.9, vmax=2.1)
ax[0][1].axis([x.min(), x.max(), y.min(), y.max()])
ax[0][1].plot(bo.X[:, 1], bo.X[:, 0], 'D', markersize=4, color='k')
ax[1][0].set_title('Gausian Process Variance', fontdict={'size':15})
im01 = ax[1][0].hexbin(x, y, C=s, gridsize=gridsize, cmap=cm.jet, bins=None, vmin=0, vmax=1)
ax[1][0].axis([x.min(), x.max(), y.min(), y.max()])
ax[1][1].set_title('Acquisition Function', fontdict={'size':15})
im11 = ax[1][1].hexbin(x, y, C=ut, gridsize=gridsize, cmap=cm.jet, bins=None, vmin=0, vmax=8)
np.where(ut.reshape((300, 300)) == ut.max())[0]
np.where(ut.reshape((300, 300)) == ut.max())[1]
ax[1][1].plot([np.where(ut.reshape((300, 300)) == ut.max())[1]/50.,
np.where(ut.reshape((300, 300)) == ut.max())[1]/50.],
[0, 6],
'k-', lw=2, color='k')
ax[1][1].plot([0, 6],
[np.where(ut.reshape((300, 300)) == ut.max())[0]/50.,
np.where(ut.reshape((300, 300)) == ut.max())[0]/50.],
'k-', lw=2, color='k')
ax[1][1].axis([x.min(), x.max(), y.min(), y.max()])
for im, axis in zip([im00, im10, im01, im11], ax.flatten()):
cb = fig.colorbar(im, ax=axis)
# cb.set_label('Value')
if name is None:
name = '_'
plt.tight_layout()
# Save or show figure?
# fig.savefig('bo_eg_' + name + '.png')
plt.show()
plt.close(fig)
bo = BayesianOptimization(METHOD_NAME, {'x': (0, 6), 'y': (0, 8)})
# gp_params = {'corr': 'absolute_exponential'}#, 'nugget': 1e-9}
bo.maximize(init_points=5, n_iter=0, acq='ucb', kappa=10)
plot_2d("{:03}".format(len(bo.X)))
# Turn interactive plotting off
plt.ioff()
for i in range(50):
bo.maximize(init_points=0, n_iter=1, acq='ucb', kappa=10, **gp_params)
plot_2d("{:03}".format(len(bo.X))) |
4,874 | user update | from FreeTAKServer.model.FTSModel.fts_protocol_object import FTSProtocolObject
#######################################################
#
# Contact.py
# Python implementation of the Class Contact
# Generated by Enterprise Architect
# Created on(FTSProtocolObject): 11-Feb-2020 11(FTSProtocolObject):08(FTSProtocolObject):08 AM
# Original author: Corvo
#
#######################################################
from FreeTAKServer.model.FTSModelVariables.ContactVariables import ContactVariables as vars
class Contact(FTSProtocolObject):
def __init__(self):
self.callsign = None
self.endpoint = None
self.iconsetpath = None
self.uid = None
self.name = None
self.emailAddress = None
self.xmppUsername = None
self.sipAddress = None
@staticmethod
def drop_point(CALLSIGN = vars.drop_point().CALLSIGN):
contact = Contact()
contact.setcallsign(callsign=CALLSIGN)
return contact
@staticmethod
def connection(CALLSIGN = vars.connection().CALLSIGN, ENDPOINT = vars.connection().ENDPOINT,
ICONSETPATH = vars.connection().ICONSETPATH,UID = vars.connection().UID,
NAME = vars.connection().NAME, PHONE = vars.connection().PHONE,
EMAILADDRESS = vars.connection().EMAILADDRESS, XMPPUSERNAME = vars.connection().XMPPUSERNAME,
SIPADDRESS = vars.connection().SIPADDRESS):
contact = Contact()
contact.setcallsign(CALLSIGN)
contact.setname(NAME)
contact.setuid(UID)
contact.setendpoint(ENDPOINT)
contact.seticonsetpath(ICONSETPATH)
contact.setphone(PHONE)
contact.setemailAddress(EMAILADDRESS)
contact.setxmppUsername(XMPPUSERNAME)
contact.setsipAddress(SIPADDRESS)
return contact
@staticmethod
def geochat(CALLSIGN=vars.geochat().CALLSIGN, ENDPOINT=vars.geochat().ENDPOINT,
ICONSETPATH=vars.geochat().ICONSETPATH, UID=vars.geochat().UID,
NAME=vars.geochat().NAME, EMAILADDRESS = vars.connection().EMAILADDRESS,
XMPPUSERNAME = vars.connection().XMPPUSERNAME, SIPADDRESS = vars.connection().SIPADDRESS):
contact = Contact()
contact.setcallsign(CALLSIGN)
contact.setname(NAME)
contact.setuid(UID)
contact.setendpoint(ENDPOINT)
contact.seticonsetpath(ICONSETPATH)
contact.setemailAddress(EMAILADDRESS)
contact.setxmppUsername(XMPPUSERNAME)
contact.setsipAddress(SIPADDRESS)
return contact
@staticmethod
def emergency_on(CALLSIGN=vars.emergency_on().CALLSIGN, ENDPOINT=vars.emergency_on().ENDPOINT,
ICONSETPATH=vars.emergency_on().ICONSETPATH, UID=vars.emergency_on().UID,
NAME=vars.emergency_on().NAME, EMAILADDRESS = vars.connection().EMAILADDRESS,
XMPPUSERNAME = vars.connection().XMPPUSERNAME, SIPADDRESS = vars.connection().SIPADDRESS):
contact = Contact()
contact.setcallsign(CALLSIGN)
contact.setname(NAME)
contact.setuid(UID)
contact.setendpoint(ENDPOINT)
contact.seticonsetpath(ICONSETPATH)
contact.setemailAddress(EMAILADDRESS)
contact.setxmppUsername(XMPPUSERNAME)
contact.setsipAddress(SIPADDRESS)
return contact
@staticmethod
def METHOD_NAME(ENDPOINT=vars.METHOD_NAME().ENDPOINT, PHONE=vars.METHOD_NAME().PHONE,
CALLSIGN=vars.METHOD_NAME().CALLSIGN, EMAILADDRESS = vars.connection().EMAILADDRESS,
XMPPUSERNAME = vars.connection().XMPPUSERNAME, SIPADDRESS = vars.connection().SIPADDRESS):
contact = Contact()
contact.setendpoint(ENDPOINT)
contact.setphone(PHONE)
contact.setcallsign(CALLSIGN)
contact.setemailAddress(EMAILADDRESS)
contact.setxmppUsername(XMPPUSERNAME)
contact.setsipAddress(SIPADDRESS)
return contact
@staticmethod
def SimpleCoT(CALLSIGN = vars.SimpleCoT().callsign):
contact = Contact()
contact.setcallsign(CALLSIGN)
return contact
@staticmethod
def Presence(CALLSIGN = vars.Presence().callsign):
contact = Contact()
contact.setcallsign(CALLSIGN)
return contact
@staticmethod
def Route(CALLSIGN = vars.Presence().callsign):
contact = Contact()
contact.setcallsign(CALLSIGN)
return contact
@staticmethod
def VideoStream(CALLSIGN = vars.VideoStream().callsign):
contact = Contact()
contact.setcallsign(CALLSIGN)
return contact
@staticmethod
def DroneSensor(CALLSIGN = vars.DroneSensor().callsign):
contact = Contact()
contact.setcallsign(CALLSIGN)
return contact
@staticmethod
def SPISensor(CALLSIGN = vars.SPISensor().callsign):
contact = Contact()
contact.setcallsign(CALLSIGN)
return contact
@staticmethod
def BitsImageryVideo(CALLSIGN = vars.BitsImageryVideo().callsign):
contact = Contact()
contact.setcallsign(CALLSIGN)
return contact
# iconsetpath getter
def geticonsetpath(self):
return self.iconsetpath
# iconsetpath setter
def seticonsetpath(self, iconsetpath=None):
self.iconsetpath=iconsetpath
# sipAddress getter
def getsipAddress(self):
return self.sipAddress
# sipAddress setter
def setsipAddress(self, sipAddress=None):
self.sipAddress=sipAddress
# emailAddress setter
def getemailAddress(self):
return self.emailAddress
# emailAddress getter
def setemailAddress(self, emailAddress=None):
self.emailAddress=emailAddress
# emailAddress setter
def getxmppUsername(self):
return self.xmppUsername
# emailAddress getter
def setxmppUsername(self, xmppUsername=None):
self.xmppUsername=xmppUsername
# callsign getter
def getcallsign(self):
return self.callsign
# callsign setter
def setcallsign(self, callsign=None):
self.callsign=callsign
# endpoint getter
def getendpoint(self):
return self.endpoint
# endpoint setter
def setendpoint(self, endpoint=None):
self.endpoint=endpoint
def getuid(self):
return self.uid
# uid setter
def setuid(self, uid=None):
self.uid = uid
def getname(self):
return self.name
# name setter
def setname(self, name=None):
self.name = name
def getphone(self):
return self.phone
def setphone(self, phone=None):
self.phone = phone |
4,875 | keys | # External compatible license
# taken from https://github.com/slezica/python-frozendict on March 14th 2014 (commit ID b27053e4d1)
#
# Copyright (c) 2012 Santiago Lezica
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions
# of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
# TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE
"""
frozendict is an immutable wrapper around dictionaries that implements the complete mapping interface.
It can be used as a drop-in replacement for dictionaries where immutability is desired.
"""
import operator
from functools import reduce
from easybuild.base import fancylogger
from easybuild.tools.py2vs3 import Mapping
# minor adjustments:
# * renamed to FrozenDict
class FrozenDict(Mapping):
def __init__(self, *args, **kwargs):
self.__dict = dict(*args, **kwargs)
self.__hash = None
def __getitem__(self, key):
return self.__dict[key]
def copy(self, **add_or_replace):
return FrozenDict(self, **add_or_replace)
def __iter__(self):
return iter(self.__dict)
def __len__(self):
return len(self.__dict)
def __repr__(self):
return '<FrozenDict %s>' % repr(self.__dict)
def __hash__(self):
if self.__hash is None:
self.__hash = reduce(operator.xor, map(hash, self.iteritems()), 0)
return self.__hash
# minor adjustment: define missing keys() method
def METHOD_NAME(self):
return self.__dict.METHOD_NAME()
class FrozenDictKnownKeys(FrozenDict):
"""A frozen dictionary only allowing known keys."""
# list of known keys
KNOWN_KEYS = []
def __init__(self, *args, **kwargs):
"""Constructor, only way to define the contents."""
self.log = fancylogger.getLogger(self.__class__.__name__, fname=False)
# support ignoring of unknown keys
ignore_unknown_keys = kwargs.pop('ignore_unknown_keys', False)
# handle unknown keys: either ignore them or raise an exception
tmpdict = dict(*args, **kwargs)
unknown_keys = [key for key in tmpdict.METHOD_NAME() if key not in self.KNOWN_KEYS]
if unknown_keys:
if ignore_unknown_keys:
for key in unknown_keys:
self.log.debug("Ignoring unknown key '%s' (value '%s')" % (key, args[0][key]))
# filter key out of dictionary before creating instance
del tmpdict[key]
else:
msg = "Encountered unknown keys %s (known keys: %s)" % (unknown_keys, self.KNOWN_KEYS)
self.log.raiseException(msg, exception=KeyError)
super(FrozenDictKnownKeys, self).__init__(tmpdict)
# pylint: disable=arguments-differ
def __getitem__(self, key, *args, **kwargs):
"""Redefine __getitem__ to provide a better KeyError message."""
try:
return super(FrozenDictKnownKeys, self).__getitem__(key, *args, **kwargs)
except KeyError as err:
if key in self.KNOWN_KEYS:
raise KeyError(err)
else:
tup = (key, self.__class__.__name__, self.KNOWN_KEYS)
raise KeyError("Unknown key '%s' for %s instance (known keys: %s)" % tup) |
4,876 | get single google app publish config | import logging
log = logging.getLogger(__name__)
def _google_should_do_dry_run(task):
# Don't commit anything by default. Committed APKs can't be unpublished,
# unless you push a newer set of APKs.
return not task.get("commit", False)
def _handle_legacy_google_track(google_track):
if google_track == "rollout":
log.warn(
'Using "rollout" as the Google Play Track is deprecated, please specify the '
"target track that you would like to rollout to, instead. Assuming you meant "
'"production" for this task.'
)
return "production"
return google_track
def METHOD_NAME(product_config, task):
publish_config = product_config["app"]
rollout_percentage = task.get("rollout_percentage")
google_track = task["channel"]
google_track = _handle_legacy_google_track(google_track)
return {
"target_store": "google",
"dry_run": _google_should_do_dry_run(task),
"certificate_alias": publish_config.get("certificate_alias"),
"username": publish_config["service_account"],
"secret": publish_config["credentials_file"],
"package_names": publish_config["package_names"],
"google_track": google_track,
"google_rollout_percentage": rollout_percentage,
}
def _get_google_app_by_scope_publish_config(product_config, task, scope_product):
publish_config = product_config["apps"][scope_product]
rollout_percentage = task.get("rollout_percentage")
google_track = task.get("google_play_track", publish_config["default_track"])
google_track = _handle_legacy_google_track(google_track)
return {
"target_store": "google",
"dry_run": _google_should_do_dry_run(task),
"certificate_alias": publish_config.get("certificate_alias"),
"username": publish_config["service_account"],
"secret": publish_config["credentials_file"],
"package_names": publish_config["package_names"],
"google_track": google_track,
"google_rollout_percentage": rollout_percentage,
}
def _get_channel_publish_config(product_config, task):
publish_config = product_config["apps"][task["channel"]]
target_store = task.get("target_store")
# Determine the target store. If "target_store" isn't provided on the task payload,
# attempt to automatically determine it by checking if the channel only supports a single
# target - if so, then use that target store.
if target_store:
if not publish_config.get(target_store):
raise ValueError('Task had `target_store` set to "{}", but the "{}" channel does not support ' "that target".format(target_store, task["channel"]))
elif publish_config.get("google"):
target_store = "google"
else:
raise ValueError("Unknown target store")
store_config = publish_config[target_store]
rollout_percentage = task.get("rollout_percentage")
google_track = task.get("google_play_track", store_config["default_track"])
google_track = _handle_legacy_google_track(google_track)
return {
"target_store": target_store,
"dry_run": _google_should_do_dry_run(task),
"certificate_alias": publish_config.get("certificate_alias"),
"username": store_config["service_account"],
"secret": store_config["credentials_file"],
"package_names": publish_config["package_names"],
"google_track": google_track,
"google_rollout_percentage": rollout_percentage,
}
def get_publish_config(product_config, task, scope_product):
override_channel_model = product_config.get("override_channel_model")
if override_channel_model == "single_google_app":
# reference-browser uses a single Google app - with `channel` refering to the google default track -
# rather than a separate app-per-channel. So, reference-browser is configured with "single_google_app"
return METHOD_NAME(product_config, task)
elif override_channel_model == "choose_google_app_with_scope":
# Fennec only targets google, but doesn't provide the channel in the payload. We need
# to leverage the legacy strategy of inferring the channel from the scope, then choosing
# the Google app accordingly
return _get_google_app_by_scope_publish_config(product_config, task, scope_product)
else:
# The common configuration will have "channel" specified in the payload, which is used
# to choose the app to deploy to.
return _get_channel_publish_config(product_config, task) |
4,877 | test agents details scanner id typeerror | '''
test agents
'''
import pytest
from tenable.errors import UnexpectedValueError
from ..checker import check
@pytest.mark.vcr()
def test_agents_list_scanner_id_typeerror(api):
'''
test to raise the exception when type of scanner_id is not as defined
'''
with pytest.raises(TypeError):
api.agents.list(scanner_id='nope')
@pytest.mark.vcr()
def test_agents_list_offset_typeerror(api):
'''
test to raise the exception when type of offset is not as defined
'''
with pytest.raises(TypeError):
api.agents.list(offset='nope')
@pytest.mark.vcr()
def test_agents_list_limit_typeerror(api):
'''
test to raise the exception when type of limit is not as defined
'''
with pytest.raises(TypeError):
api.agents.list(limit='nope')
@pytest.mark.vcr()
def test_agents_list_sort_field_typeerror(api):
'''
test to raise the exception when type of sort field is not as defined
'''
with pytest.raises(TypeError):
api.agents.list(sort=((1, 'asc'),))
@pytest.mark.vcr()
def test_agents_list_sort_direction_typeerror(api):
'''
test to raise the exception when type of sort direction is not as defined
'''
with pytest.raises(TypeError):
api.agents.list(sort=(('uuid', 1),))
@pytest.mark.vcr()
def test_agents_list_sort_direction_unexpectedvalue(api):
'''
test to raise the exception when value of sort direction is not as defined
'''
with pytest.raises(UnexpectedValueError):
api.agents.list(sort=(('uuid', 'nope'),))
@pytest.mark.vcr()
def test_agents_list_filter_name_typeerror(api):
'''
test to raise the exception when type of filter name is not as defined
'''
with pytest.raises(TypeError):
api.agents.list((1, 'match', 'win'))
@pytest.mark.vcr()
def test_agents_list_filter_operator_typeerror(api):
'''
test to raise the exception when type of filter operator is not as defined
'''
with pytest.raises(TypeError):
api.agents.list(('distro', 1, 'win'))
@pytest.mark.vcr()
def test_agents_list_filter_value_typeerror(api):
'''
test to raise the exception when type of filter value is not as defined
'''
with pytest.raises(TypeError):
api.agents.list(('distro', 'match', 1))
@pytest.mark.vcr()
def test_agents_list_filter_type_typeerror(api):
'''
test to raise the exception when type of filter type is not as defined
'''
with pytest.raises(TypeError):
api.agents.list(filter_type=1)
@pytest.mark.vcr()
def test_agents_list_wildcard_typeerror(api):
'''
test to raise the exception when type of wildcard is not as defined
'''
with pytest.raises(TypeError):
api.agents.list(wildcard=1)
@pytest.mark.vcr()
def test_agents_list_wildcard_fields_typeerror(api):
'''
test to raise the exception when type of wildcard fields is not as defined
'''
with pytest.raises(TypeError):
api.agents.list(wildcard_fields='nope')
@pytest.mark.vcr()
def test_agents_list(api):
'''
test to get the agents list
'''
count = 0
agents = api.agents.list()
for agent in agents:
count += 1
check(agent, 'distro', str)
check(agent, 'id', int)
check(agent, 'ip', str)
check(agent, 'linked_on', int)
check(agent, 'name', str)
check(agent, 'platform', str)
check(agent, 'status', str)
check(agent, 'uuid', 'uuid')
assert count == agents.total
@pytest.mark.vcr()
def METHOD_NAME(api):
'''
test to raise the exception when type of scanner_id is not as defined
'''
with pytest.raises(TypeError):
api.agents.details(scanner_id='nope')
@pytest.mark.vcr()
def test_agents_details_agent_id_typeerror(api):
'''
test to raise the exception when type of agent_id is not as defined
'''
with pytest.raises(TypeError):
api.agents.details('nope')
@pytest.mark.vcr()
def test_agents_details_agent_details(api, agent):
'''
test to get the agent details
'''
resp = api.agents.details(agent['id'])
check(resp, 'distro', str)
check(resp, 'id', int)
check(resp, 'ip', str)
check(resp, 'linked_on', int)
check(resp, 'name', str)
check(resp, 'platform', str)
check(resp, 'status', str)
check(resp, 'uuid', 'uuid')
assert resp['id'] == agent['id']
# Add tests for singular & bulk agent deletion.
# att tests for task_status.
@pytest.mark.vcr()
def test_agents_list_fields(api):
'''
test to get the agent list
'''
count = 0
agents = api.agents.list(
filter_type='or',
limit=45,
offset=5,
wildcard='match',
wildcard_fields=['name'])
for agent in agents:
count += 1
check(agent, 'distro', str)
check(agent, 'id', int)
check(agent, 'ip', str)
check(agent, 'linked_on', int)
check(agent, 'name', str)
check(agent, 'platform', str)
check(agent, 'status', str)
check(agent, 'uuid', 'uuid')
assert count == agents.total
@pytest.mark.vcr()
def test_agents_unlink_agent_id_typeerror(api):
'''
test to raise the exception when type of agent_id is not as defined
'''
with pytest.raises(TypeError) as type_error:
api.agents.unlink('nope')
assert len(type_error.value.args) == 1, "Test-case should raise only one validation error."
assert type_error.value.args[0] == "agent_id is of type str. Expected int", \
"Invalid type validation error for agent_id parameter is not raised by test-case."
@pytest.mark.vcr()
def test_agents_unlink_multiple_agent_id_typeerror(api):
'''
test to raise the exception when types of multiple agent_ids are not as defined
'''
with pytest.raises(TypeError) as type_error:
api.agents.unlink('nope', 'test', scanner_id=11)
assert len(type_error.value.args) == 1, "Test-case should raise only one validation error."
assert type_error.value.args[0] == "agent_ids is of type str. Expected int", \
"Invalid type validation error for agent_ids parameter is not raised by test-case."
@pytest.mark.vcr()
def test_agents_task_status_scanner_id_typeerror(api):
'''
test to raise the exception when type of scanner_id is not as defined
'''
with pytest.raises(TypeError) as type_error:
api.agents.task_status(scanner_id='nope')
assert len(type_error.value.args) == 1, "Test-case should raise only one validation error."
assert "task_status() missing 1 required positional argument: 'task_uuid'" in type_error.value.args[0], \
"Missing value of required scanner_id parameter error is not raised by test-case."
@pytest.mark.vcr()
def test_agents_task_status_task_uuid_typeerror(api):
'''
test to raise the exception when type of task_uuid is not as defined
'''
with pytest.raises(TypeError) as type_error:
api.agents.task_status(task_uuid=11, scanner_id=11)
assert len(type_error.value.args) == 1, "Test-case should raise only one validation error."
assert "task_uuid is of type int. Expected str" in type_error.value.args[0], \
"Missing value of required task_uuid parameter error is not raised by test-case." |
4,878 | certificate | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import annotations
import typing
from cryptography import x509
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives._serialization import PBES as PBES
from cryptography.hazmat.primitives.asymmetric import (
dsa,
ec,
ed448,
ed25519,
rsa,
)
from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes
__all__ = [
"PBES",
"PKCS12PrivateKeyTypes",
"PKCS12Certificate",
"PKCS12KeyAndCertificates",
"load_key_and_certificates",
"load_pkcs12",
"serialize_key_and_certificates",
]
PKCS12PrivateKeyTypes = typing.Union[
rsa.RSAPrivateKey,
dsa.DSAPrivateKey,
ec.EllipticCurvePrivateKey,
ed25519.Ed25519PrivateKey,
ed448.Ed448PrivateKey,
]
class PKCS12Certificate:
def __init__(
self,
cert: x509.Certificate,
friendly_name: bytes | None,
):
if not isinstance(cert, x509.Certificate):
raise TypeError("Expecting x509.Certificate object")
if friendly_name is not None and not isinstance(friendly_name, bytes):
raise TypeError("friendly_name must be bytes or None")
self._cert = cert
self._friendly_name = friendly_name
@property
def friendly_name(self) -> bytes | None:
return self._friendly_name
@property
def METHOD_NAME(self) -> x509.Certificate:
return self._cert
def __eq__(self, other: object) -> bool:
if not isinstance(other, PKCS12Certificate):
return NotImplemented
return (
self.METHOD_NAME == other.METHOD_NAME
and self.friendly_name == other.friendly_name
)
def __hash__(self) -> int:
return hash((self.METHOD_NAME, self.friendly_name))
def __repr__(self) -> str:
return "<PKCS12Certificate({}, friendly_name={!r})>".format(
self.METHOD_NAME, self.friendly_name
)
class PKCS12KeyAndCertificates:
def __init__(
self,
key: PrivateKeyTypes | None,
cert: PKCS12Certificate | None,
additional_certs: list[PKCS12Certificate],
):
if key is not None and not isinstance(
key,
(
rsa.RSAPrivateKey,
dsa.DSAPrivateKey,
ec.EllipticCurvePrivateKey,
ed25519.Ed25519PrivateKey,
ed448.Ed448PrivateKey,
),
):
raise TypeError(
"Key must be RSA, DSA, EllipticCurve, ED25519, or ED448"
" private key, or None."
)
if cert is not None and not isinstance(cert, PKCS12Certificate):
raise TypeError("cert must be a PKCS12Certificate object or None")
if not all(
isinstance(add_cert, PKCS12Certificate)
for add_cert in additional_certs
):
raise TypeError(
"all values in additional_certs must be PKCS12Certificate"
" objects"
)
self._key = key
self._cert = cert
self._additional_certs = additional_certs
@property
def key(self) -> PrivateKeyTypes | None:
return self._key
@property
def cert(self) -> PKCS12Certificate | None:
return self._cert
@property
def additional_certs(self) -> list[PKCS12Certificate]:
return self._additional_certs
def __eq__(self, other: object) -> bool:
if not isinstance(other, PKCS12KeyAndCertificates):
return NotImplemented
return (
self.key == other.key
and self.cert == other.cert
and self.additional_certs == other.additional_certs
)
def __hash__(self) -> int:
return hash((self.key, self.cert, tuple(self.additional_certs)))
def __repr__(self) -> str:
fmt = (
"<PKCS12KeyAndCertificates(key={}, cert={}, additional_certs={})>"
)
return fmt.format(self.key, self.cert, self.additional_certs)
def load_key_and_certificates(
data: bytes,
password: bytes | None,
backend: typing.Any = None,
) -> tuple[
PrivateKeyTypes | None,
x509.Certificate | None,
list[x509.Certificate],
]:
from cryptography.hazmat.backends.openssl.backend import backend as ossl
return ossl.load_key_and_certificates_from_pkcs12(data, password)
def load_pkcs12(
data: bytes,
password: bytes | None,
backend: typing.Any = None,
) -> PKCS12KeyAndCertificates:
from cryptography.hazmat.backends.openssl.backend import backend as ossl
return ossl.load_pkcs12(data, password)
_PKCS12CATypes = typing.Union[
x509.Certificate,
PKCS12Certificate,
]
def serialize_key_and_certificates(
name: bytes | None,
key: PKCS12PrivateKeyTypes | None,
cert: x509.Certificate | None,
cas: typing.Iterable[_PKCS12CATypes] | None,
encryption_algorithm: serialization.KeySerializationEncryption,
) -> bytes:
if key is not None and not isinstance(
key,
(
rsa.RSAPrivateKey,
dsa.DSAPrivateKey,
ec.EllipticCurvePrivateKey,
ed25519.Ed25519PrivateKey,
ed448.Ed448PrivateKey,
),
):
raise TypeError(
"Key must be RSA, DSA, EllipticCurve, ED25519, or ED448"
" private key, or None."
)
if cert is not None and not isinstance(cert, x509.Certificate):
raise TypeError("cert must be a certificate or None")
if cas is not None:
cas = list(cas)
if not all(
isinstance(
val,
(
x509.Certificate,
PKCS12Certificate,
),
)
for val in cas
):
raise TypeError("all values in cas must be certificates")
if not isinstance(
encryption_algorithm, serialization.KeySerializationEncryption
):
raise TypeError(
"Key encryption algorithm must be a "
"KeySerializationEncryption instance"
)
if key is None and cert is None and not cas:
raise ValueError("You must supply at least one of key, cert, or cas")
from cryptography.hazmat.backends.openssl.backend import backend
return backend.serialize_key_and_certificates_to_pkcs12(
name, key, cert, cas, encryption_algorithm
) |
4,879 | send data | #!/usr/bin/python3
import importlib
from typing import TYPE_CHECKING, Any, Dict, Generator, Optional, Tuple
import rospy
from serial import SerialException
from mil_usb_to_can.sub8.board import (
USBtoCANBoard,
)
# relative import causes import error with rosrun - GH-731
from mil_usb_to_can.sub8.utils import USB2CANException
if TYPE_CHECKING:
from .device import CANDeviceHandle
class USBtoCANDriver:
"""
ROS Driver which implements the USB to CAN board. Allow users to specify a dictionary of
device handle classes to be loaded at runtime to handle communication with
specific devices.
Attributes:
board (USBtoCANBoard): The board the driver is implementing.
handles (dict[int, CANDeviceHandle]): The handles served by the driver. Each key represents
a unique device ID, and each corresponding value represents an instance of
a child class inheriting from :class:`CANDeviceHandle`. Upon initialization,
each class is constructed after being parsed from dynamic reconfigure.
timer (rospy.Timer): The timer controlling when buffers are processed.
"""
def __init__(self):
port = rospy.get_param("~port", "/dev/tty0")
baud = rospy.get_param("~baudrate", 115200)
can_id = rospy.get_param("~can_id", 0)
simulation = rospy.get_param("/is_simulation", False)
# If simulation mode, load simulated devices
if simulation:
rospy.logwarn(
"CAN2USB driver in simulation! Will not talk to real hardware.",
)
devices = dict(
list(
self.parse_module_dictionary(rospy.get_param("~simulated_devices")),
),
)
self.board = USBtoCANBoard(
port=port,
baud=baud,
simulated=simulation,
devices=devices,
can_id=can_id,
)
else:
self.board = USBtoCANBoard(port=port, baud=baud, simulated=simulation)
# Add device handles from the modules specified in ROS params
self.handles: Dict[int, CANDeviceHandle] = {
device_id: cls(self, device_id)
for device_id, cls in self.parse_module_dictionary(
rospy.get_param("~device_handles"),
)
}
self.timer = rospy.Timer(rospy.Duration(1.0 / 20.0), self.process_in_buffer)
def read_packet(self) -> bool:
"""
Attempt to read a packet from the board. If the packet has an appropriate device
handler, then the packet is passed to the ``on_data`` method of that handler.
Returns:
bool: The success in reading a packet.
"""
try:
packet = self.board.read_packet()
except (SerialException, USB2CANException) as e:
rospy.logerr(f"Error reading packet: {e}")
return False
if packet is None:
return False
if packet.device in self.handles:
self.handles[packet.device].on_data(packet.data)
else:
rospy.logwarn(
f"Message received for device {packet.device}, but no handle registered",
)
return True
def process_in_buffer(self, *args) -> None:
"""
Read all available packets in the board's in-buffer.
"""
while self.read_packet():
pass
def METHOD_NAME(self, *args, **kwargs) -> Optional[Exception]:
"""
Sends data using the :meth:`USBtoCANBoard.send_data` method.
Returns:
Optional[Exception]: If data was sent successfully, nothing is returned.
Otherwise, the exception that was raised in sending is returned.
"""
try:
self.board.METHOD_NAME(*args, **kwargs)
return None
except (SerialException, USB2CANException) as e:
rospy.logerr(f"Error writing packet: {e}")
return e
@staticmethod
def parse_module_dictionary(
d: Dict[str, Any],
) -> Generator[Tuple[int, Any], None, None]:
"""
Generator to load classes from module strings specified in a dictionary.
Imports all found classes.
Yields:
Generator[Tuple[int, Any], None, None]: Yields tuples containing the device
ID and the associated class.
"""
for device_id, module_name in d.items():
device_id = int(device_id)
# Split module from class name
module_name, cls = module_name.rsplit(".", 1)
# import module
module = importlib.import_module(module_name)
# Yield a tuple (device_id, imported_class)
yield device_id, getattr(module, cls)
if __name__ == "__main__":
rospy.init_node("usb_to_can_driver")
driver = USBtoCANDriver()
rospy.spin() |
4,880 | get | # -*- coding: utf-8 -*-
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import sys
import email
class SvDocstring(object):
"""
A class that incapsulates parsing of Sverchok's nodes docstrings.
As a standard, RFC822-style syntax is to be used. The docstring should
start with headers:
Triggers: This should be very short (two or three words, not much more) to be used in Ctrl-Space search menu.
Tooltip: Longer description to be present as a tooltip in UI.
More detailed description with technical information or historical notes goes after empty line.
This is not shown anywhere in the UI.
Other headers can possibly be introduced later. Unknown headers are just ignored.
For compatibility reasons, the old docstring syntax is also supported:
Triggers description /// Longer description
If we can't parse Triggers and Tooltip from docstring, then:
* The whole docstring will be used as tooltip
* The node will not have shorthand for search.
"""
def __init__(self, docstring):
self.docstring = docstring
if docstring:
self.message = email.message_from_string(SvDocstring.trim(docstring))
else:
self.message = {}
@staticmethod
def trim(docstring):
"""
Trim docstring indentation and extra spaces.
This is just copy-pasted from PEP-0257.
"""
if not docstring:
return ''
# Convert tabs to spaces (following the normal Python rules)
# and split into a list of lines:
lines = docstring.expandtabs().splitlines()
# Determine minimum indentation (first line doesn't count):
indent = sys.maxsize
for line in lines[1:]:
stripped = line.lstrip()
if stripped:
indent = min(indent, len(line) - len(stripped))
# Remove indentation (first line is special):
trimmed = [lines[0].strip()]
if indent < sys.maxsize:
for line in lines[1:]:
trimmed.append(line[indent:].rstrip())
# Strip off trailing and leading blank lines:
while trimmed and not trimmed[-1]:
trimmed.pop()
while trimmed and not trimmed[0]:
trimmed.pop(0)
# Return a single string:
return '\n'.join(trimmed)
def METHOD_NAME(self, header, default=None):
"""Obtain any header from docstring."""
return self.message.METHOD_NAME(header, default)
def __getitem__(self, header):
return self.message[header]
def get_shorthand(self, fallback=True):
"""
Get shorthand to be used in search menu.
If fallback == True, then whole docstring
will be returned for case when we can't
find valid shorthand specification.
"""
if 'Triggers' in self.message:
return self.message['Triggers']
elif not self.docstring:
return ""
elif '///' in self.docstring:
return self.docstring.strip().split('///')[0]
elif fallback:
return self.docstring
else:
return None
def has_shorthand(self):
return self.get_shorthand() is not None
def get_tooltip(self):
"""Get tooltip"""
if 'Tooltip' in self.message:
return self.message['Tooltip'].strip()
elif not self.docstring:
return ""
elif '///' in self.docstring:
return self.docstring.strip().split('///')[1].strip()
else:
return self.docstring.strip()
|
4,881 | run const expr | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Consist of utilities and methods for lowering QNN into mainline relay."""
from typing import Callable
import numpy as np
import tvm
from tvm import relay
def METHOD_NAME(expr: "relay.Expr") -> np.ndarray:
"""Evaluate a const expression, receiving result as np array.
If a number of passes are disabled in the current Pass Context, then there is no need to disable
these passes for const expression evaluation as well. That's why we use empty list
"disabled_pass=[]", all other arguments are inherited from the current Pass Context.
"""
curr_pass_ctx = tvm.ir.transform.PassContext.current()
with tvm.ir.transform.PassContext(
opt_level=curr_pass_ctx.opt_level,
required_pass=curr_pass_ctx.required_pass,
disabled_pass=[],
instruments=curr_pass_ctx.instruments,
config=curr_pass_ctx.config,
):
mod = tvm.IRModule.from_expr(expr)
vm_exe = relay.create_executor("vm", mod=mod)
output = vm_exe.evaluate()().asnumpy()
return output
def create_integer_lookup_table(
floating_point_func: Callable[[np.ndarray], np.ndarray],
input_scale: "relay.Expr",
input_zero_point: "relay.Expr",
output_scale: "relay.Expr",
output_zero_point: "relay.Expr",
in_axis: int = -1,
out_axis: int = -1,
in_dtype: str = "uint8",
out_dtype: str = "uint8",
) -> np.ndarray:
"""
Return a table where each input indexes to the output quantizing the given function.
Note this also supports mapping unsigned and signed integers to each other.
Args:
floating_point_func: The numpy function which this table is to approximate
input_scale: The scale of the quantized input tensor.
input_zero_point: The zero point of the quantized input tensor.
output_scale: The scale of the quantized output tensor.
output_zero_point: The zero point of the quantized output tensor.
in_axis: The axis for multi-channel quantization of the input if applicable.
out_axis: The axis for multi-channel quantization of the output if applicable.
in_dtype: The dtype of the input tensor.
out_dtype: The wanted dtype of the output tensor.
Returns:
A numpy array where values in quantized space will index to the output in quantized space
approximating the given function.
"""
if not np.issubdtype(np.dtype(in_dtype), np.integer) or not np.issubdtype(
np.dtype(out_dtype), np.integer
):
raise ValueError(
f"Only integer dtypes allowed got {in_dtype} and {out_dtype} for in and out dtypes."
)
dtype_info = np.iinfo(in_dtype)
num_bits = dtype_info.bits
# Use TVMs quantization methods via relay to be consistent
# inputs_quantized = np.array(range(dtype_info.min, dtype_info.max + 1)).astype(in_dtype)
# First generate a list of all num_bit integer patterns
inputs_quantized = np.array(range(0, 2**num_bits), dtype=f"uint{num_bits}")
# Reinterpret bits as the real datatype
# Note what we are doing here is a bit tricky, the canonical view of our lookup table
# is using the uintX version. When we run the lookup in the relay graph, we cast the
# bit pattern back into this form.
inputs_quantized = inputs_quantized.view(in_dtype)
inputs_quantized = relay.const(inputs_quantized, dtype=in_dtype)
inputs_dequantized = METHOD_NAME(
relay.qnn.op.dequantize(
inputs_quantized,
input_scale=input_scale,
input_zero_point=input_zero_point,
axis=in_axis,
)
)
output_dequantized = relay.const(floating_point_func(inputs_dequantized))
output_quantized = METHOD_NAME(
relay.qnn.op.quantize(
output_dequantized, output_scale, output_zero_point, out_axis, out_dtype
)
)
return output_quantized
def create_integer_lookup_op(
input_arg: "relay.Expr",
floating_point_func: Callable[[np.array], np.array],
in_scale: "relay.Expr",
in_zero_point: "relay.Expr",
out_scale: "relay.Expr",
out_zero_point: "relay.Expr",
in_axis: int = -1,
out_axis: int = -1,
in_dtype: str = "uint8",
out_dtype: str = "uint8",
) -> "relay.Expr":
"""
Create a quantized version of the given floating point unary operation using table lookup.
Args:
input_arg: The quantized input to the final function.
floating_point_func: The numpy function which this table is to approximate
in_scale: The scale of the quantized input tensor.
in_zero_point: The zero point of the quantized input tensor.
out_scale: The scale of the quantized output tensor.
out_zero_point: The zero point of the quantized output tensor.
in_axis: The axis for multi-channel quantization of the input if applicable.
out_axis: The axis for multi-channel quantization of the output if applicable.
in_dtype: The dtype of the input tensor.
out_dtype: The wanted dtype of the output tensor.
Returns:
A Relay expression representing a quantized version of the given function.
"""
# TODO: handle multi-channel q, below will fail with multi-channel q
in_scale = in_scale.data.numpy().item()
in_zero_point = in_zero_point.data.numpy().item()
out_scale = out_scale.data.numpy().item()
out_zero_point = out_zero_point.data.numpy().item()
lookup_table = create_integer_lookup_table(
floating_point_func,
relay.const(in_scale),
relay.const(in_zero_point, dtype="int32"),
relay.const(out_scale),
relay.const(out_zero_point, dtype="int32"),
in_axis=in_axis,
in_dtype=in_dtype,
out_axis=out_axis,
out_dtype=out_dtype,
)
in_dtype_info = np.iinfo(in_dtype)
in_dtype_num_bits = in_dtype_info.bits
lookup_table = relay.const(lookup_table)
index_tensor = relay.reinterpret(input_arg, f"uint{in_dtype_num_bits}")
result = relay.take(lookup_table, index_tensor, axis=0, mode="fast")
return result |
4,882 | get enum includes | # ===============================================================================
# NAME: Port.py
#
# DESCRIPTION: This is a Port meta-model sort of class. It is
# contained within a Component class as a list
# of Port instances and is visited by the code
# generators.
#
# USAGE:
#
# AUTHOR: reder
# EMAIL: reder@jpl.nasa.gov
# DATE CREATED : Feb. 11, 2013
#
# Copyright 2013, California Institute of Technology.
# ALL RIGHTS RESERVED. U.S. Government Sponsorship acknowledged.
# ===============================================================================
#
# Python standard modules
#
import logging
#
# Python extension modules and custom interfaces
#
#
# Universal globals used within module go here.
# (DO NOT USE MANY!)
#
# Global logger init. below.
PRINT = logging.getLogger("output")
DEBUG = logging.getLogger("debug")
#
class Port:
"""
This is a very simple component meta-model class.
Note: associations to Arg instances as needed.
"""
def __init__(
self,
pname,
ptype,
direction,
sync=None,
priority=None,
full=None,
comment=None,
xml_filename=None,
max_number=None,
role=None,
):
"""
Constructor
@param pname: Name of port instance (each instance must be unique)
@param type: Type of port (must have supporting include xml)
@param direction: Direction of data flow (must be input or output)
@param sync: Kind of port (must be one of: async, sync, or guarded)
@param comment: A single or multi-line comment describing the port
"""
self.__pname = pname
self.__ptype = ptype
self.__direction = direction
self.__sync = sync
self.__priority = priority
self.__full = full
self.__comment = comment
self.__role = role
self.__namespace = ""
self.__arg_obj_list = []
self.__incl_list = []
self.__if_comment = ""
self.__xml_filename = xml_filename
#
self.__target_comp = None
self.__target_port = None
self.__target_type = None
self.__target_direction = None
self.__target_num = None
self.__source_num = None
#
self.__max_number = max_number
#
self.__return_type = None
self.__return_modifier = None
def set(
self,
namespace,
arg_obj_list,
incl_list,
incl_serial_list,
incl_enum_list,
if_comment,
):
"""
Set type specific stuff for port type here.
Not instance specific stuff.
@param namespace: Namespace for port type.
@param arg_obj_list: List of Arg objects for each argument (list of Arg objects).
@param incl_list: List of Include objects for each data type.
@param if_comment: Interface comment.
"""
self.__namespace = namespace
self.__arg_obj_list = arg_obj_list
self.__incl_list = incl_list
self.__incl_serial_list = incl_serial_list
self.__incl_enum_list = incl_enum_list
self.__if_comment = if_comment
def set_return(self, t, m):
"""
Set the optional return type if one is specified here.
"""
self.__return_type = t
self.__return_modifier = m
def get_return(self):
"""
Return a tuple of (type, modifier). If (None,None) return None.
"""
if (self.__return_modifier is None) and (self.__return_type is None):
return None
else:
return (self.__return_type, self.__return_modifier)
def set_role(self, role):
"""
Sets the role
"""
self.__role = role
def get_role(self):
"""
Returns role
"""
return self.__role
def get_xml_filename(self):
"""
Returns the original xml filename
"""
return self.__xml_filename
def get_namespace(self):
"""
Returns the port's namespace
"""
return self.__namespace
def get_name(self):
"""
Returns the port's instance name.
"""
return self.__pname
def get_direction(self):
"""
Return the direction for port.
"""
return self.__direction
def get_type(self):
"""
Return the port's type.
"""
return self.__ptype
def get_priority(self):
"""
Return the port's priority.
"""
return self.__priority
def get_full(self):
"""
Return the port's queue full behavior.
"""
return self.__full
def get_sync(self):
"""
Returns type of port.
"""
return self.__sync
def get_comment(self):
"""
Returns comment for the port.
"""
return self.__comment
def get_ifcomment(self):
"""
Return interface comment for the port.
"""
return self.__if_comment
def get_args(self):
"""
Return a list of Arg objects for each argument.
"""
return self.__arg_obj_list
def get_includes(self):
"""
Return a list of include files for each port type.
These support data types within each Arg object.
"""
return self.__incl_list
def get_serial_includes(self):
""""""
return self.__incl_serial_list
def METHOD_NAME(self):
""""""
return self.__incl_enum_list
def get_target_comp(self):
return self.__target_comp
def get_target_port(self):
return self.__target_port
def get_target_type(self):
return self.__target_type
def get_target_direction(self):
return self.__target_direction
def set_target_comp(self, comp):
self.__target_comp = comp
def set_target_port(self, port):
self.__target_port = port
def set_target_type(self, type):
self.__target_type = type
def set_target_direction(self, direction):
self.__target_direction = direction
def set_direction(self, direction):
self.__direction = direction
def get_max_number(self):
return self.__max_number
def get_target_num(self):
return self.__target_num
def set_target_num(self, n):
self.__target_num = n
def get_source_num(self):
return self.__source_num
def set_source_num(self, n):
self.__source_num = n |
4,883 | brace expand words | #!/usr/bin/env python
from __future__ import print_function
"""
braces.py
NOTE: bash implements brace expansion in the braces.c file (835 lines). It
uses goto!
Possible optimization flags for CompoundWord:
- has Lit_LBrace, LitRBrace -- set during word_parse phase
- it if has both, then do _BraceDetect
- has BracedAltPart -- set during _BraceDetect
- if it does, then do the expansion
- has Lit_Star, ?, [ ] -- globbing?
- but after expansion do you still have those flags?
"""
import sys
from osh.meta import Id
from osh.meta import ast
word_part_e = ast.word_part_e
word_e = ast.word_e
class _StackFrame(object):
def __init__(self, cur_parts):
self.cur_parts = cur_parts
self.alt_part = ast.BracedAltPart()
self.saw_comma = False
def _BraceDetect(w):
"""
Args:
CompoundWord
Returns:
CompoundWord or None?
Another option:
Grammar:
# an alternative is a literal, possibly empty, or another brace_expr
part = <any part except LiteralPart>
alt = part* | brace_expr
# a brace_expr is group of at least 2 braced and comma-separated
# alternatives, with optional prefix and suffix.
brace_expr = part* '{' alt ',' alt (',' alt)* '}' part*
Problem this grammar: it's not LL(1)
Is it indirect left-recursive?
What's the best way to handle it? LR(1) parser?
Iterative algorithm:
Parse it with a stack?
It's a stack that asserts there is at least one , in between {}
Yeah just go through and when you see {, push another list.
When you get , append to list
When you get } and at least one ',', append o list
When you get } without, then pop
If there is no matching }, then abort with error
if not balanced, return error too?
"""
# Errors:
# }a{ - stack depth dips below 0
# {a,b}{ - Stack depth doesn't end at 0
# {a} - no comma, and also not an numeric range
cur_parts = []
stack = []
found = False
for i, part in enumerate(w.parts):
append = True
if part.tag == word_part_e.LiteralPart:
id_ = part.token.id
if id_ == Id.Lit_LBrace:
# Save prefix parts. Start new parts list.
new_frame = _StackFrame(cur_parts)
stack.append(new_frame)
cur_parts = []
append = False
found = True # assume found, but can early exit with None later
elif id_ == Id.Lit_Comma:
# Append a new alternative.
#print('*** Appending after COMMA', cur_parts)
# NOTE: Should we allow this:
# ,{a,b}
# or force this:
# \,{a,b}
# ? We're forcing braces right now but not commas.
if stack:
stack[-1].saw_comma = True
stack[-1].alt_part.words.append(ast.CompoundWord(cur_parts))
cur_parts = [] # clear
append = False
elif id_ == Id.Lit_RBrace:
# TODO:
# - Detect lack of , -- abort the whole thing
# - Detect {1..10} and {1..10..2}
# - bash and zsh only -- this is NOT implemented by mksh
# - Use a regex on the middle part:
# - digit+ '..' digit+ ( '..' digit+ )?
# - Char ranges are bash only!
#
# ast.BracedIntRangePart()
# ast.CharRangePart()
if not stack: # e.g. echo } -- unbalancd {
return None
if not stack[-1].saw_comma: # {foo} is not a real alternative
return None
stack[-1].alt_part.words.append(ast.CompoundWord(cur_parts))
frame = stack.pop()
cur_parts = frame.cur_parts
cur_parts.append(frame.alt_part)
append = False
if append:
cur_parts.append(part)
if len(stack) != 0:
return None
if found:
return ast.BracedWordTree(cur_parts)
else:
return None
def BraceDetectAll(words):
out = []
for w in words:
#print(w)
brace_tree = _BraceDetect(w)
if brace_tree:
out.append(brace_tree)
else:
out.append(w)
return out
# Possible optimization for later:
def _TreeCount(tree_word):
"""Count output size for allocation purposes.
We can count the number of words expanded into, and the max number of parts
in a word.
Every word can have a different number of parts, e.g. -{'a'b,c}- expands into
words of 4 parts, then 3 parts.
"""
# TODO: Copy the structure of _BraceExpand and _BraceExpandOne.
for part in tree_word.parts:
if part.tag == word_part_e.BracedAltPart:
for word in part.words:
pass
num_results = 2
max_parts = 5
return num_results, max_parts
def _BraceExpandOne(parts, first_alt_index, suffixes):
"""Helper for _BraceExpand.
Args:
parts: input parts
first_alt_index: index of the first BracedAltPart
suffixes: List of suffixes to append.
"""
out = []
# Need to call _BraceExpand on each of the inner words too!
first_alt = parts[first_alt_index]
expanded_alts = []
for w in first_alt.words:
expanded_alts.extend(_BraceExpand(w.parts))
prefix = parts[ : first_alt_index]
for alt_parts in expanded_alts:
for suffix in suffixes:
out_parts = []
out_parts.extend(prefix)
out_parts.extend(alt_parts)
out_parts.extend(suffix)
# TODO: Do we need to preserve flags?
out.append(out_parts)
return out
def _BraceExpand(parts):
num_alts = 0
first_alt_index = -1
for i, part in enumerate(parts):
if part.tag == word_part_e.BracedAltPart:
num_alts += 1
if num_alts == 1:
first_alt_index = i
elif num_alts == 2:
break # don't need to count anymore
# NOTE: There are TWO recursive calls here, not just one -- one for
# nested {}, and one for adjacent {}. Thus it's hard to do iteratively.
if num_alts == 0:
return [parts]
elif num_alts == 1:
suffix = parts[first_alt_index+1 : ]
return _BraceExpandOne(parts, first_alt_index, [suffix])
else:
# Now call it on the tail
tail_parts = parts[first_alt_index+1 : ]
suffixes = _BraceExpand(tail_parts) # recursive call
return _BraceExpandOne(parts, first_alt_index, suffixes)
def METHOD_NAME(words):
out = []
for w in words:
if w.tag == word_e.BracedWordTree:
parts_list = _BraceExpand(w.parts)
out.extend(ast.CompoundWord(p) for p in parts_list)
else:
out.append(w)
return out
def _Cartesian(tuples):
if len(tuples) == 1:
for x in tuples[0]:
yield (x,)
else:
for x in tuples[0]:
for y in _Cartesian(tuples[1:]):
yield (x,) + y # join tuples
def main(argv):
for t in _Cartesian([('a', 'b')]):
print(t)
print('--')
for t in _Cartesian([('a', 'b'), ('c', 'd', 'e'), ('f', 'g')]):
print(t)
if __name__ == '__main__':
try:
main(sys.argv)
except RuntimeError as e:
print('FATAL: %s' % e, file=sys.stderr)
sys.exit(1) |
4,884 | tags | # Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Classes and functions to manage package tags"""
import collections
import copy
from collections.abc import Mapping
import spack.error
import spack.util.spack_json as sjson
def _get_installed_package_names():
"""Returns names of packages installed in the active environment."""
specs = spack.environment.installed_specs()
return [spec.name for spec in specs]
def packages_with_tags(METHOD_NAME, installed, skip_empty):
"""
Returns a dict, indexed by tag, containing lists of names of packages
containing the tag or, if no tags, for all available tags.
Arguments:
tags (list or None): list of tags of interest or None for all
installed (bool): True if want names of packages that are installed;
otherwise, False if want all packages with the tag
skip_empty (bool): True if exclude tags with no associated packages;
otherwise, False if want entries for all tags even when no such
tagged packages
"""
tag_pkgs = collections.defaultdict(lambda: list)
spec_names = _get_installed_package_names() if installed else []
keys = spack.repo.PATH.tag_index if METHOD_NAME is None else METHOD_NAME
for tag in keys:
packages = [
name for name in spack.repo.PATH.tag_index[tag] if not installed or name in spec_names
]
if packages or not skip_empty:
tag_pkgs[tag] = packages
return tag_pkgs
class TagIndex(Mapping):
"""Maps tags to list of packages."""
def __init__(self, repository):
self._tag_dict = collections.defaultdict(list)
self.repository = repository
@property
def METHOD_NAME(self):
return self._tag_dict
def to_json(self, stream):
sjson.dump({"tags": self._tag_dict}, stream)
@staticmethod
def from_json(stream, repository):
d = sjson.load(stream)
if not isinstance(d, dict):
raise TagIndexError("TagIndex data was not a dict.")
if "tags" not in d:
raise TagIndexError("TagIndex data does not start with 'tags'")
r = TagIndex(repository=repository)
for tag, packages in d["tags"].items():
r[tag].extend(packages)
return r
def __getitem__(self, item):
return self._tag_dict[item]
def __iter__(self):
return iter(self._tag_dict)
def __len__(self):
return len(self._tag_dict)
def copy(self):
"""Return a deep copy of this index."""
clone = TagIndex(repository=self.repository)
clone._tag_dict = copy.deepcopy(self._tag_dict)
return clone
def get_packages(self, tag):
"""Returns all packages associated with the tag."""
return self.METHOD_NAME[tag] if tag in self.METHOD_NAME else []
def merge(self, other):
"""Merge another tag index into this one.
Args:
other (TagIndex): tag index to be merged
"""
other = other.copy() # defensive copy.
for tag in other.METHOD_NAME:
if tag not in self.METHOD_NAME:
self.METHOD_NAME[tag] = other.METHOD_NAME[tag]
continue
spkgs, opkgs = self.METHOD_NAME[tag], other.METHOD_NAME[tag]
self.METHOD_NAME[tag] = sorted(list(set(spkgs + opkgs)))
def update_package(self, pkg_name):
"""Updates a package in the tag index.
Args:
pkg_name (str): name of the package to be removed from the index
"""
pkg_cls = self.repository.get_pkg_class(pkg_name)
# Remove the package from the list of packages, if present
for pkg_list in self._tag_dict.values():
if pkg_name in pkg_list:
pkg_list.remove(pkg_name)
# Add it again under the appropriate tags
for tag in getattr(pkg_cls, "tags", []):
tag = tag.lower()
self._tag_dict[tag].append(pkg_cls.name)
class TagIndexError(spack.error.SpackError):
"""Raised when there is a problem with a TagIndex.""" |
4,885 | convert | import os
import core.exceptions as ex
from utilities.converters import convert_size
from env import Env
from utilities.subsystems.zfs import Dataset
from core.objects.svcdict import KEYS
from core.capabilities import capabilities
from .. import KWS_POOLING
KEYWORDS = KWS_POOLING + [
{
"section": "fs",
"rtype": "zfs",
"keyword": "size",
"required": False,
"convert": "size",
"at": True,
"text": "Used by default as the refquota of the provisioned dataset. The quota, refquota, reservation and refreservation values can be expressed as a multiplier of size (example: quota=x2).",
"provisioning": True
},
{
"section": "fs",
"rtype": "zfs",
"keyword": "refquota",
"required": False,
"default": "x1",
"at": True,
"text": "The dataset 'refquota' property value to set on provision. The value can be 'none', or a size expression, or a multiplier of the size keyword value (ex: x2).",
"provisioning": True
},
{
"section": "fs",
"rtype": "zfs",
"keyword": "quota",
"required": False,
"at": True,
"text": "The dataset 'quota' property value to set on provision. The value can be 'none', or a size expression, or a multiplier of the size keyword value (ex: x2).",
"provisioning": True
},
{
"section": "fs",
"rtype": "zfs",
"keyword": "refreservation",
"required": False,
"at": True,
"text": "The dataset 'refreservation' property value to set on provision. The value can be 'none', or a size expression, or a multiplier of the size keyword value (ex: x2).",
"provisioning": True
},
{
"section": "fs",
"rtype": "zfs",
"keyword": "reservation",
"required": False,
"at": True,
"text": "The dataset 'reservation' property value to set on provision. The value can be 'none', or a size expression, or a multiplier of the size keyword value (ex: x2).",
"provisioning": True
},
]
KEYS.register_driver(
"fs",
"zfs",
name=__name__,
keywords=KEYWORDS,
)
def driver_capabilities(node=None):
from utilities.proc import which
data = []
if which("zfs"):
data.append("fs.zfs")
return data
class FsZfsMixin():
@property
def poolname(self):
return self.device.split("/")[0]
def unprovisioner(self):
if "node.x.zfs" not in capabilities:
self.log.error("zfs command not found")
raise ex.Error
import core.status
need_stop = None
for r in self.svc.get_resources(["volume", "disk.zfs"]):
if r.device != self.poolname:
continue
if r.status() not in (core.status.UP, core.status.STDBY_UP):
r.start()
need_stop = r
dataset = Dataset(self.device, log=self.log)
if dataset.exists():
dataset.destroy(["-r"])
if os.path.exists(self.mount_point) and os.path.isdir(self.mount_point):
try:
os.rmdir(self.mount_point)
self.log.info("rmdir %s", self.mount_point)
except OSError as exc:
self.log.warning("failed to rmdir %s: %s", self.mount_point, exc)
if need_stop:
need_stop.stop()
def provisioner(self):
if "node.x.zfs" not in capabilities:
self.log.error("zfs command not found")
raise ex.Error
dataset = Dataset(self.device, log=self.log)
mkfs_opt = ["-p"]
mkfs_opt += self.oget("mkfs_opt")
if not any([True for e in mkfs_opt if e.startswith("mountpoint=")]):
mkfs_opt += ['-o', 'mountpoint='+self.mount_point]
if not any([True for e in mkfs_opt if e.startswith("canmount=")]):
mkfs_opt += ['-o', 'canmount=noauto']
if dataset.exists() is False:
dataset.create(mkfs_opt)
def METHOD_NAME(x, size):
val = self.oget(x)
if val in (None, "none", ""):
return
if val[0] == "x":
if not size:
return
try:
m = float(val[1:])
except Exception:
raise ex.Error("%s set to a multiplier of size, but invalid: %s" % (x, val))
return int(size * m)
return convert_size(val, _to="m")
nv_list = dict()
size = self.oget("size")
if size:
size = convert_size(size, _to="m")
for prop in ("refquota", "quota", "reservation", "refreservation"):
val = METHOD_NAME(prop, size)
if val:
nv_list[prop] = "%dM" % val
if not nv_list:
return
dataset.verify_prop(nv_list)
def provisioned(self):
dataset = Dataset(self.device, log=self.log)
return dataset.exists() |
4,886 | test image create | from datetime import datetime
from plone.app.textfield import RichTextValue
from plone.namedfile.file import NamedFile
from plone.namedfile.file import NamedImage
from Products.CMFPlone.tests import dummy
from Products.CMFPlone.tests import PloneTestCase
AddPortalTopics = "Add portal topics"
class TestContentTypeScripts(PloneTestCase.PloneTestCase):
def afterSetUp(self):
perms = self.getPermissionsOfRole("Member")
self.setPermissions(perms + [AddPortalTopics], "Member")
self.request = self.app.REQUEST
def getPermissionsOfRole(self, role):
perms = self.portal.permissionsOfRole(role)
return [p["name"] for p in perms if p["selected"]]
def testDocumentCreate(self):
self.folder.invokeFactory("Document", id="doc", text=RichTextValue("data"))
self.assertEqual(self.folder.doc.text.raw, "data")
self.assertEqual(self.folder.doc.Format(), "text/html")
def testEventCreate(self):
self.folder.invokeFactory(
"Event",
id="event",
title="Foo",
start=datetime(year=2003, month=9, day=18),
end=datetime(year=2003, month=9, day=19),
)
self.assertEqual(self.folder.event.Title(), "Foo")
self.assertTrue(
self.folder.event.start.isoformat().startswith("2003-09-18T00:00:00")
)
self.assertTrue(
self.folder.event.end.isoformat().startswith("2003-09-19T00:00:00")
)
def testFileCreate(self):
self.folder.invokeFactory("File", id="file", file=NamedFile(dummy.File()))
self.assertEqual(self.folder.file.file.data, dummy.TEXT)
def METHOD_NAME(self):
self.folder.invokeFactory("Image", id="image", image=NamedImage(dummy.Image()))
self.assertEqual(self.folder.image.image.data, dummy.GIF)
def testFolderCreate(self):
self.folder.invokeFactory("Folder", id="folder", title="Foo", description="Bar")
self.assertEqual(self.folder.folder.Title(), "Foo")
self.assertEqual(self.folder.folder.Description(), "Bar")
def testLinkCreate(self):
self.folder.invokeFactory(
"Link", id="link", remoteUrl="http://foo.com", title="Foo"
)
self.assertEqual(self.folder.link.Title(), "Foo")
self.assertEqual(self.folder.link.remoteUrl, "http://foo.com")
def testNewsItemCreate(self):
self.folder.invokeFactory(
"News Item", id="newsitem", text=RichTextValue("data"), title="Foo"
)
self.assertEqual(self.folder.newsitem.text.raw, "data")
self.assertEqual(self.folder.newsitem.Title(), "Foo")
# Bug tests
def test_listMetaTypes(self):
self.folder.invokeFactory("Document", id="doc")
tool = self.portal.plone_utils
doc = self.folder.doc
doc.setTitle("title")
tool.listMetaTags(doc)
# TODO: atm it checks only of the script can be called w/o an error
class TestFileURL(PloneTestCase.PloneTestCase):
# Tests covering http://dev.plone.org/plone/ticket/3296
# file:// URLs should contain correct number of slashes
# NOTABUG: This is how urlparse.urlparse() works.
def testFileURLWithHost(self):
self.folder.invokeFactory("Link", id="link", remoteUrl="file://foo.com/baz.txt")
self.assertEqual(self.folder.link.remoteUrl, "file://foo.com/baz.txt")
def testFileURLNoHost(self):
self.folder.invokeFactory("Link", id="link", remoteUrl="file:///foo.txt")
self.assertEqual(self.folder.link.remoteUrl, "file:///foo.txt")
# DX does not pass url through urlparse/urlunparse like setRemoteUrl does.
# def testFileURLFourSlash(self):
# self.folder.invokeFactory('Link', id='link',
# remoteUrl='file:////foo.com/baz.txt')
# # See urlparse.urlparse()
# self.assertEqual(self.folder.link.remoteUrl,
# 'file://foo.com/baz.txt')
# def testFileURLFiveSlash(self):
# self.folder.invokeFactory('Link', id='link',
# remoteUrl='file://///foo.com/baz.txt')
# # See urlparse.urlparse()
# self.assertEqual(self.folder.link.remoteUrl,
# 'file:///foo.com/baz.txt')
# def testFileURLSixSlash(self):
# self.folder.invokeFactory('Link', id='link',
# remoteUrl='file://////foo.com/baz.txt')
# # See urlparse.urlparse()
# self.assertEqual(self.folder.link.remoteUrl,
# 'file:////foo.com/baz.txt')
class TestImageProps(PloneTestCase.PloneTestCase):
def testImageComputedProps(self):
from OFS.Image import Image
tag = Image.tag
kw = {"_title": "some title", "_alt": "alt tag", "height": 100, "width": 100}
# Wrap object so that ComputedAttribute gets executed.
self.ob = dummy.ImageComputedProps(**kw).__of__(self.folder)
endswith = 'alt="alt tag" title="some title" ' 'height="100" width="100" />'
self.assertEqual(tag(self.ob)[-len(endswith) :], endswith) |
4,887 | load config | # encoding: utf-8
# type: ignore
from __future__ import print_function
import sys
from typing import Any, Optional, cast
import click
import paste.script # type: ignore
import routes # type: ignore
from paste.registry import Registry # type: ignore
from urllib.parse import urlparse
from ckan.config.middleware import make_app
from ckan.cli import METHOD_NAME as _get_config
import ckan.logic as logic
import ckan.model as model
from ckan.common import config
import ckan.lib.maintain as maintain
# This is a test Flask request context to be used internally.
# Do not use it!
_cli_test_request_context: Any = None
# NB No CKAN imports are allowed until after the config file is loaded.
# i.e. do the imports in methods, after _load_config is called.
# Otherwise loggers get disabled.
@maintain.deprecated('Use @maintain.deprecated instead', since="2.9.0")
def deprecation_warning(message: Optional[str] = None):
'''
DEPRECATED
Print a deprecation warning to STDERR.
If ``message`` is given it is also printed to STDERR.
'''
sys.stderr.write(u'WARNING: This function is deprecated.')
if message:
sys.stderr.write(u' ' + message.strip())
sys.stderr.write(u'\n')
@maintain.deprecated(since='2.9.0')
def error(msg: str):
'''
DEPRECATED
Print an error message to STDOUT and exit with return code 1.
'''
sys.stderr.write(msg)
if not msg.endswith('\n'):
sys.stderr.write('\n')
sys.exit(1)
@maintain.deprecated('Use model.parse_db_config directly instead',
since='2.9.0')
def _parse_db_config(config_key: str = u'sqlalchemy.url'): # type: ignore
'''Deprecated'''
db_config = model.parse_db_config(config_key)
if not db_config:
raise Exception(
u'Could not extract db details from url: %r' % config[config_key]
)
return db_config
## from http://code.activestate.com/recipes/577058/ MIT licence.
## Written by Trent Mick
@maintain.deprecated('Instead you can probably use click.confirm()',
since='2.9.0')
def query_yes_no(question: str, default: str = "yes"):
"""DEPRECATED
Ask a yes/no question via input() and return their answer.
"question" is a string that is presented to the user.
"default" is the presumed answer if the user just hits <Enter>.
It must be "yes" (the default), "no" or None (meaning
an answer is required of the user).
The "answer" return value is one of "yes" or "no".
"""
valid = {"yes": "yes", "y": "yes", "ye": "yes",
"no": "no", "n": "no"}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while 1:
sys.stdout.write(question + prompt)
choice = input().strip().lower()
if default is not None and choice == '':
return default
elif choice in valid.keys():
return valid[choice]
else:
sys.stdout.write("Please respond with 'yes' or 'no' "
"(or 'y' or 'n').\n")
def METHOD_NAME(config: Any, load_site_user: bool = True):
conf = _get_config(config)
assert 'ckan' not in dir() # otherwise loggers would be disabled
# We have now loaded the config. Now we can import ckan for the
# first time.
from ckan.config.environment import load_environment
load_environment(conf)
# Set this internal test request context with the configured environment so
# it can be used when calling url_for from the CLI.
global _cli_test_request_context
app = make_app(conf)
flask_app = app.apps['flask_app']._wsgi_app
_cli_test_request_context = flask_app.test_request_context()
registry = Registry()
registry.prepare()
site_user = None
if model.user_table.exists() and load_site_user:
site_user = logic.get_action('get_site_user')({'ignore_auth': True}, {})
## give routes enough information to run url_for
parsed = urlparse(
cast(str, conf.get('ckan.site_url', 'http://0.0.0.0')))
request_config = routes.request_config()
request_config.host = parsed.netloc + parsed.path
request_config.protocol = parsed.scheme
return site_user
@maintain.deprecated('Instead use ckan.cli.cli.CkanCommand or extensions '
'should use IClick', since='2.9.0')
def paster_click_group(summary: str):
'''DEPRECATED
Return a paster command click.Group for paster subcommands
:param command: the paster command linked to this function from
setup.py, used in help text (e.g. "datastore")
:param summary: summary text used in paster's help/command listings
(e.g. "Perform commands to set up the datastore")
'''
class PasterClickGroup(click.Group):
'''A click.Group that may be called like a paster command'''
def __call__(self, ignored_command: str):
sys.argv.remove(ignored_command)
return super(PasterClickGroup, self).__call__(
prog_name=u'paster ' + ignored_command,
help_option_names=[u'-h', u'--help'],
obj={})
@click.group(cls=PasterClickGroup)
@click.option(
'--plugin',
metavar='ckan',
help='paster plugin (when run outside ckan directory)')
@click_config_option
@click.pass_context
def cli(ctx: Any, plugin: str, config: Any):
ctx.obj['config'] = config
# type_ignore_reason: custom attributes
cli.summary = summary # type: ignore
cli.group_name = u'ckan' # type: ignore
return cli
# common definition for paster ... --config
click_config_option = click.option(
'-c',
'--config',
default=None,
metavar='CONFIG',
help=u'Config file to use (default: development.ini)')
class CkanCommand(paste.script.command.Command): # type: ignore
'''DEPRECATED - Instead use ckan.cli.cli.CkanCommand or extensions
should use IClick.
Base class for classes that implement CKAN paster commands to
inherit.'''
parser = paste.script.command.Command.standard_parser(verbose=True)
parser.add_option('-c', '--config', dest='config',
help='Config file to use.')
parser.add_option('-f', '--file',
action='store',
dest='file_path',
help="File to dump results to (if needed)")
default_verbosity = 1
group_name = 'ckan'
def _load_config(self, load_site_user: bool = True):
self.site_user = METHOD_NAME(self.options.config, load_site_user) |
4,888 | test command pipe alert text | import json
import subprocess
import logging
import pytest
from unittest import mock
from elastalert.alerters.command import CommandAlerter
from elastalert.alerts import BasicMatchString
from elastalert.util import EAException
from tests.alerts_test import mock_rule
def test_command_getinfo():
# Test command as list with a formatted arg
rule = {'command': ['/bin/test/', '--arg', '%(somefield)s']}
alert = CommandAlerter(rule)
match = {'@timestamp': '2014-01-01T00:00:00',
'somefield': 'foobarbaz',
'nested': {'field': 1}}
with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen:
alert.alert([match])
assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False)
expected_data = {
'type': 'command',
'command': '/bin/test/ --arg foobarbaz'
}
actual_data = alert.get_info()
assert expected_data == actual_data
def test_command_old_style_string_format1(caplog):
caplog.set_level(logging.INFO)
# Test command as string with formatted arg (old-style string format)
rule = {'command': '/bin/test/ --arg %(somefield)s'}
match = {'@timestamp': '2014-01-01T00:00:00',
'somefield': 'foobarbaz',
'nested': {'field': 1}}
alert = CommandAlerter(rule)
with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen:
alert.alert([match])
assert mock_popen.called_with('/bin/test --arg foobarbaz', stdin=subprocess.PIPE, shell=False)
assert ('elastalert', logging.WARNING, 'Warning! You could be vulnerable to shell injection!') == caplog.record_tuples[0]
assert ('elastalert', logging.INFO, 'Alert sent to Command') == caplog.record_tuples[1]
def test_command_old_style_string_format2():
# Test command as string without formatted arg (old-style string format)
rule = {'command': '/bin/test/foo.sh'}
match = {'@timestamp': '2014-01-01T00:00:00',
'somefield': 'foobarbaz',
'nested': {'field': 1}}
alert = CommandAlerter(rule)
with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen:
alert.alert([match])
assert mock_popen.called_with('/bin/test/foo.sh', stdin=subprocess.PIPE, shell=True)
def test_command_pipe_match_json():
rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'],
'pipe_match_json': True}
alert = CommandAlerter(rule)
match = {'@timestamp': '2014-01-01T00:00:00',
'somefield': 'foobarbaz'}
with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen:
mock_subprocess = mock.Mock()
mock_popen.return_value = mock_subprocess
mock_subprocess.communicate.return_value = (None, None)
alert.alert([match])
assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False)
assert mock_subprocess.communicate.called_with(input=json.dumps(match))
def METHOD_NAME():
rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'],
'pipe_alert_text': True, 'type': mock_rule(), 'name': 'Test'}
alert = CommandAlerter(rule)
match = {'@timestamp': '2014-01-01T00:00:00',
'somefield': 'foobarbaz'}
alert_text = str(BasicMatchString(rule, match))
with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen:
mock_subprocess = mock.Mock()
mock_popen.return_value = mock_subprocess
mock_subprocess.communicate.return_value = (None, None)
alert.alert([match])
assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False)
assert mock_subprocess.communicate.called_with(input=alert_text.encode())
def test_command_fail_on_non_zero_exit():
rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'],
'fail_on_non_zero_exit': True}
alert = CommandAlerter(rule)
match = {'@timestamp': '2014-01-01T00:00:00',
'somefield': 'foobarbaz'}
with pytest.raises(Exception) as exception:
with mock.patch("elastalert.alerters.command.subprocess.Popen") as mock_popen:
mock_subprocess = mock.Mock()
mock_popen.return_value = mock_subprocess
mock_subprocess.wait.return_value = 1
alert.alert([match])
assert mock_popen.called_with(['/bin/test', '--arg', 'foobarbaz'], stdin=subprocess.PIPE, shell=False)
assert "Non-zero exit code while running command" in str(exception)
def test_command_os_error():
rule = {'command': ['/bin/test/', '--arg', '%(somefield)s'],
'pipe_alert_text': True, 'type': mock_rule(), 'name': 'Test'}
alert = CommandAlerter(rule)
match = {'@timestamp': '2014-01-01T00:00:00',
'somefield': 'foobarbaz'}
with pytest.raises(EAException) as ea:
mock_run = mock.MagicMock(side_effect=OSError)
with mock.patch("elastalert.alerters.command.subprocess.Popen", mock_run), pytest.raises(OSError) as mock_popen:
mock_subprocess = mock.Mock()
mock_popen.return_value = mock_subprocess
mock_subprocess.communicate.return_value = (None, None)
alert.alert([match])
assert 'Error while running command /bin/test/ --arg foobarbaz: ' in str(ea)
def test_command_key_error():
with pytest.raises(EAException) as ea:
rule = {}
alert = CommandAlerter(rule)
match = {'@timestamp': '2014-01-01T00:00:00',
'somefield': 'foobarbaz',
'nested': {'field': 1}}
with mock.patch("elastalert.alerters.command.subprocess.Popen"):
alert.alert([match])
assert 'Error formatting command:' in str(ea) |
4,889 | test kaleido fulljson | import plotly.io as pio
import plotly.io.kaleido
from contextlib import contextmanager
from io import BytesIO
from pathlib import Path
from unittest.mock import Mock
fig = {"layout": {"title": {"text": "figure title"}}}
def make_writeable_mocks():
"""Produce some mocks which we will use for testing the `write_image()` function.
These mocks should be passed as the `file=` argument to `write_image()`.
The tests should verify that the method specified in the `active_write_function`
attribute is called once, and that scope.transform is called with the `format=`
argument specified by the `.expected_format` attribute.
In total we provide two mocks: one for a writable file descriptor, and other for a
pathlib.Path object.
"""
# Part 1: A mock for a file descriptor
# ------------------------------------
mock_file_descriptor = Mock()
# A file descriptor has no write_bytes method, unlike a pathlib Path.
del mock_file_descriptor.write_bytes
# The expected write method for a file descriptor is .write
mock_file_descriptor.active_write_function = mock_file_descriptor.write
# Since there is no filename, there should be no format detected.
mock_file_descriptor.expected_format = None
# Part 2: A mock for a pathlib path
# ---------------------------------
mock_pathlib_path = Mock(spec=Path)
# A pathlib Path object has no write method, unlike a file descriptor.
del mock_pathlib_path.write
# The expected write method for a pathlib Path is .write_bytes
mock_pathlib_path.active_write_function = mock_pathlib_path.write_bytes
# Mock a path with PNG suffix
mock_pathlib_path.suffix = ".png"
mock_pathlib_path.expected_format = "png"
return mock_file_descriptor, mock_pathlib_path
@contextmanager
def mocked_scope():
# Code to acquire resource, e.g.:
scope_mock = Mock()
original_scope = pio._kaleido.scope
pio._kaleido.scope = scope_mock
try:
yield scope_mock
finally:
pio._kaleido.scope = original_scope
def test_kaleido_engine_to_image_returns_bytes():
result = pio.to_image(fig, format="svg", engine="kaleido", validate=False)
assert result.startswith(b"<svg")
def METHOD_NAME():
empty_fig = dict(data=[], layout={})
result = pio.full_figure_for_development(empty_fig, warn=False, as_dict=True)
assert result["layout"]["calendar"] == "gregorian"
def test_kaleido_engine_to_image():
with mocked_scope() as scope:
pio.to_image(fig, engine="kaleido", validate=False)
scope.transform.assert_called_with(
fig, format=None, width=None, height=None, scale=None
)
def test_kaleido_engine_write_image():
for writeable_mock in make_writeable_mocks():
with mocked_scope() as scope:
pio.write_image(fig, writeable_mock, engine="kaleido", validate=False)
scope.transform.assert_called_with(
fig,
format=writeable_mock.expected_format,
width=None,
height=None,
scale=None,
)
assert writeable_mock.active_write_function.call_count == 1
def test_kaleido_engine_to_image_kwargs():
with mocked_scope() as scope:
pio.to_image(
fig,
format="pdf",
width=700,
height=600,
scale=2,
engine="kaleido",
validate=False,
)
scope.transform.assert_called_with(
fig, format="pdf", width=700, height=600, scale=2
)
def test_kaleido_engine_write_image_kwargs():
for writeable_mock in make_writeable_mocks():
with mocked_scope() as scope:
pio.write_image(
fig,
writeable_mock,
format="jpg",
width=700,
height=600,
scale=2,
engine="kaleido",
validate=False,
)
scope.transform.assert_called_with(
fig, format="jpg", width=700, height=600, scale=2
)
assert writeable_mock.active_write_function.call_count == 1
def test_image_renderer():
with mocked_scope() as scope:
pio.show(fig, renderer="svg", engine="kaleido", validate=False)
renderer = pio.renderers["svg"]
scope.transform.assert_called_with(
fig,
format="svg",
width=None,
height=None,
scale=renderer.scale,
)
def test_bytesio():
"""Verify that writing to a BytesIO object contains the same data as to_image().
The goal of this test is to ensure that Plotly correctly handles a writable buffer
which doesn't correspond to a filesystem path.
"""
bio = BytesIO()
pio.write_image(fig, bio, format="jpg", engine="kaleido", validate=False)
bio.seek(0) # Rewind to the beginning of the buffer, otherwise read() returns b''.
bio_bytes = bio.read()
to_image_bytes = pio.to_image(fig, format="jpg", engine="kaleido", validate=False)
assert bio_bytes == to_image_bytes |
4,890 | get tracked plugin assets and readings | # -*- coding: utf-8 -*-
# FLEDGE_BEGIN
# See: http://fledge-iot.readthedocs.io/
# FLEDGE_END
from functools import lru_cache
from aiohttp import web
from fledge.common.service_record import ServiceRecord
from fledge.common.storage_client.payload_builder import PayloadBuilder
from fledge.services.core.service_registry.service_registry import ServiceRegistry
from fledge.services.core.service_registry.exceptions import DoesNotExist
from fledge.services.core import connect
from fledge.common.configuration_manager import ConfigurationManager
from fledge.common.plugin_discovery import PluginDiscovery
__author__ = "Praveen Garg"
__copyright__ = "Copyright (c) 2018 OSIsoft, LLC"
__license__ = "Apache 2.0"
__version__ = "${VERSION}"
_help = """
-------------------------------------------------------------------------------
| GET | /fledge/south |
-------------------------------------------------------------------------------
"""
async def _get_schedule_status(storage_client, svc_name):
payload = PayloadBuilder().SELECT("enabled").WHERE(['schedule_name', '=', svc_name]).payload()
result = await storage_client.query_tbl_with_payload('schedules', payload)
return True if result['rows'][0]['enabled'] == 't' else False
@lru_cache(maxsize=1024)
def _get_installed_plugins():
return PluginDiscovery.get_plugins_installed("south", False)
async def _services_with_assets(storage_client, cf_mgr, south_services):
sr_list = list()
try:
try:
services_from_registry = ServiceRegistry.get(s_type="Southbound")
except DoesNotExist:
services_from_registry = []
def is_svc_in_service_registry(name):
return next((svc for svc in services_from_registry if svc._name == name), None)
installed_plugins = _get_installed_plugins()
for s_record in services_from_registry:
plugin, assets = await METHOD_NAME(storage_client, cf_mgr, s_record._name)
plugin_version = ''
for p in installed_plugins:
if p["name"] == plugin:
plugin_version = p["version"]
break
# Service running on another machine have no scheduler entry
sched_enable = 'unknown'
try:
sched_enable = await _get_schedule_status(storage_client, s_record._name)
except:
pass
sr_list.append(
{
'name': s_record._name,
'address': s_record._address,
'management_port': s_record._management_port,
'service_port': s_record._port,
'protocol': s_record._protocol,
'status': ServiceRecord.Status(int(s_record._status)).name.lower(),
'assets': assets,
'plugin': {'name': plugin, 'version': plugin_version},
'schedule_enabled': sched_enable
})
for s_name in south_services:
south_svc = is_svc_in_service_registry(s_name)
if not south_svc:
plugin, assets = await METHOD_NAME(storage_client, cf_mgr, s_name)
plugin_version = ''
for p in installed_plugins:
if p["name"] == plugin:
plugin_version = p["version"]
break
# Handle schedule status when there is no schedule entry matching a South child category name
sch_status = 'unknown'
try:
sch_status = await _get_schedule_status(storage_client, s_name)
except:
pass
sr_list.append(
{
'name': s_name,
'address': '',
'management_port': '',
'service_port': '',
'protocol': '',
'status': '',
'assets': assets,
'plugin': {'name': plugin, 'version': plugin_version},
'schedule_enabled': sch_status
})
except:
raise
else:
return sr_list
async def METHOD_NAME(storage_client, cf_mgr, svc_name):
asset_json = []
plugin_value = await cf_mgr.get_category_item(svc_name, 'plugin')
plugin = plugin_value['value'] if plugin_value is not None else ''
payload = PayloadBuilder().SELECT(["asset", "plugin"]).WHERE(['service', '=', svc_name]).AND_WHERE(
['event', '=', 'Ingest']).AND_WHERE(['plugin', '=', plugin]).AND_WHERE(['deprecated_ts', 'isnull']).payload()
try:
result = await storage_client.query_tbl_with_payload('asset_tracker', payload)
# TODO: FOGL-2549
# old asset track entry still appears with combination of service name + plugin name + event name if exists
asset_records = result['rows']
assets = [ar["asset"].upper()for ar in asset_records]
if len(assets):
def map_original_asset_name(asset_stats_key):
# asset name are being recorded in uppercase as key in statistics table
for ar in asset_records:
if ar["asset"].upper() == asset_stats_key:
return ar["asset"]
return None
payload = PayloadBuilder().SELECT(["key", "value"]).WHERE(["key", "in", assets]).payload()
results = await storage_client.query_tbl_with_payload("statistics", payload)
for _r in results['rows']:
asset_json.append({"count": _r['value'], "asset": map_original_asset_name(_r['key'])})
except:
raise
else:
return plugin, asset_json
async def get_south_services(request):
"""
Args:
request:
Returns:
list of all south services with tracked assets and readings count
:Example:
curl -X GET http://localhost:8081/fledge/south
"""
if 'cached' in request.query and request.query['cached'].lower() == 'false':
_get_installed_plugins.cache_clear()
storage_client = connect.get_storage_async()
cf_mgr = ConfigurationManager(storage_client)
try:
south_cat = await cf_mgr.get_category_child("South")
south_categories = [nc["key"] for nc in south_cat]
except:
return web.json_response({'services': []})
response = await _services_with_assets(storage_client, cf_mgr, south_categories)
return web.json_response({'services': response}) |
4,891 | test dataset move | """
Tests for FeaturizedSamples class
"""
import os
import tempfile
import shutil
import deepchem as dc
def test_unlabelled():
current_dir = os.path.dirname(os.path.abspath(__file__))
input_file = os.path.join(current_dir, "../../data/tests/no_labels.csv")
featurizer = dc.feat.CircularFingerprint(size=1024)
loader = dc.data.CSVLoader(tasks=[],
feature_field="smiles",
featurizer=featurizer)
dataset = loader.create_dataset(input_file)
assert len(dataset.X)
def test_scaffold_test_train_valid_test_split():
"""Test of singletask RF ECFP regression API."""
current_dir = os.path.dirname(os.path.abspath(__file__))
tasks = ["log-solubility"]
input_file = os.path.join(current_dir,
"../../models/tests/assets/example.csv")
featurizer = dc.feat.CircularFingerprint(size=1024)
input_file = os.path.join(current_dir, input_file)
loader = dc.data.CSVLoader(tasks=tasks,
feature_field="smiles",
featurizer=featurizer)
dataset = loader.create_dataset(input_file)
# Splits featurized samples into train/test
splitter = dc.splits.ScaffoldSplitter()
train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(
dataset)
assert len(train_dataset) == 8
assert len(valid_dataset) == 1
assert len(test_dataset) == 1
def test_scaffold_test_train_test_split():
"""Test of singletask RF ECFP regression API."""
current_dir = os.path.dirname(os.path.abspath(__file__))
tasks = ["log-solubility"]
input_file = os.path.join(current_dir,
"../../models/tests/assets/example.csv")
featurizer = dc.feat.CircularFingerprint(size=1024)
input_file = os.path.join(current_dir, input_file)
loader = dc.data.CSVLoader(tasks=tasks,
feature_field="smiles",
featurizer=featurizer)
dataset = loader.create_dataset(input_file)
# Splits featurized samples into train/test
splitter = dc.splits.ScaffoldSplitter()
train_dataset, test_dataset = splitter.train_test_split(dataset)
assert len(train_dataset) == 8
assert len(test_dataset) == 2
def test_random_test_train_valid_test_split():
"""Test of singletask RF ECFP regression API."""
current_dir = os.path.dirname(os.path.abspath(__file__))
tasks = ["log-solubility"]
input_file = os.path.join(current_dir,
"../../models/tests/assets/example.csv")
featurizer = dc.feat.CircularFingerprint(size=1024)
input_file = os.path.join(current_dir, input_file)
loader = dc.data.CSVLoader(tasks=tasks,
feature_field="smiles",
featurizer=featurizer)
dataset = loader.create_dataset(input_file)
# Splits featurized samples into train/test
splitter = dc.splits.RandomSplitter()
train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(
dataset)
assert len(train_dataset) == 8
assert len(valid_dataset) == 1
assert len(test_dataset) == 1
def test_random_test_train_test_split():
"""Test of singletask RF ECFP regression API."""
current_dir = os.path.dirname(os.path.abspath(__file__))
tasks = ["log-solubility"]
input_file = os.path.join(current_dir,
"../../models/tests/assets/example.csv")
featurizer = dc.feat.CircularFingerprint(size=1024)
loader = dc.data.CSVLoader(tasks=tasks,
feature_field="smiles",
featurizer=featurizer)
dataset = loader.create_dataset(input_file)
# Splits featurized samples into train/test
splitter = dc.splits.RandomSplitter()
train_dataset, test_dataset = splitter.train_test_split(dataset)
assert len(train_dataset) == 8
assert len(test_dataset) == 2
def test_log_solubility_dataset():
"""Test of loading for simple log-solubility dataset."""
current_dir = os.path.dirname(os.path.realpath(__file__))
input_file = "../../models/tests/assets/example.csv"
input_file = os.path.join(current_dir, input_file)
tasks = ["log-solubility"]
loader = dc.data.CSVLoader(
tasks=tasks,
feature_field="smiles",
featurizer=dc.feat.CircularFingerprint(size=1024))
dataset = loader.create_dataset(input_file)
assert len(dataset) == 10
def METHOD_NAME():
"""Test that dataset can be moved and reloaded."""
current_dir = os.path.dirname(os.path.abspath(__file__))
base_dir = tempfile.mkdtemp()
data_dir = os.path.join(base_dir, "data")
moved_data_dir = os.path.join(base_dir, "moved_data")
dataset_file = os.path.join(current_dir,
"../../models/tests/assets/example.csv")
featurizer = dc.feat.CircularFingerprint(size=1024)
tasks = ["log-solubility"]
loader = dc.data.CSVLoader(tasks=tasks,
feature_field="smiles",
featurizer=featurizer)
featurized_dataset = loader.create_dataset(dataset_file, data_dir)
n_dataset = len(featurized_dataset)
# Now perform move
shutil.move(data_dir, moved_data_dir)
moved_featurized_dataset = dc.data.DiskDataset(moved_data_dir)
assert len(moved_featurized_dataset) == n_dataset |
4,892 | test comment many as artist | import os
from tests.base import ApiDBTestCase
from zou.app.utils import events
from zou.app.services import projects_service, tasks_service
from PIL import Image
class RouteTaskChangeTestCase(ApiDBTestCase):
def setUp(self):
super(RouteTaskChangeTestCase, self).setUp()
self.generate_fixture_project_status()
self.generate_fixture_project()
self.generate_fixture_asset_type()
self.generate_fixture_asset()
self.generate_fixture_sequence()
self.generate_fixture_shot()
self.generate_fixture_department()
self.generate_fixture_task_type()
self.generate_fixture_task_status()
self.generate_fixture_task_status_wip()
self.generate_fixture_task_status_retake()
self.generate_fixture_task_status_done()
self.generate_fixture_task_status_todo()
self.generate_fixture_person()
self.generate_fixture_assigner()
self.generate_fixture_task()
self.open_status_id = str(self.task_status.id)
self.wip_status_id = str(self.task_status_wip.id)
self.retake_status_id = str(self.task_status_retake.id)
self.done_status_id = str(self.task_status_done.id)
self.is_event_fired = False
events.unregister_all()
def handle_event(self, data):
self.is_event_fired = True
self.assertEqual(data["previous_task_status_id"], self.open_status_id)
def assert_event_is_fired(self):
self.assertTrue(self.is_event_fired)
def test_retake_count(self):
task_id = str(self.task.id)
self.post(
"/actions/tasks/%s/comment" % task_id,
{"task_status_id": self.retake_status_id, "comment": "retake 1"},
)
task = self.get("data/tasks/%s" % task_id)
self.assertEqual(task["retake_count"], 1)
self.post(
"/actions/tasks/%s/comment" % task_id,
{"task_status_id": self.wip_status_id, "comment": "wip 1"},
)
comment = self.post(
"/actions/tasks/%s/comment" % task_id,
{"task_status_id": self.retake_status_id, "comment": "retake 2"},
)
task = self.get("data/tasks/%s" % task_id)
self.assertEqual(task["retake_count"], 2)
comment = self.delete(
"/data/tasks/%s/comments/%s" % (task_id, comment["id"])
)
task = self.get("data/tasks/%s" % task_id)
self.assertEqual(task["retake_count"], 1)
comment = self.post(
"/actions/tasks/%s/comment" % task_id,
{"task_status_id": self.retake_status_id, "comment": "retake 2"},
)
task = self.get("data/tasks/%s" % task_id)
self.assertEqual(task["retake_count"], 2)
def test_retake_cap(self):
task_id = str(self.task.id)
asset_id = self.asset.id
self.project.update({"max_retakes": 1})
self.post(
"/actions/tasks/%s/comment" % task_id,
{"task_status_id": self.retake_status_id, "comment": "retake 1"},
)
self.post(
"/actions/tasks/%s/comment" % task_id,
{"task_status_id": self.wip_status_id, "comment": "wip"},
)
self.post(
"/actions/tasks/%s/comment" % task_id,
{"task_status_id": self.retake_status_id, "comment": "retake 2"},
400,
)
self.put("/data/entities/%s" % asset_id, {"data": {"max_retakes": 2}})
entity = self.get("/data/entities/%s" % asset_id)
self.post(
"/actions/tasks/%s/comment" % task_id,
{"task_status_id": self.retake_status_id, "comment": "retake 2"},
)
self.post(
"/actions/tasks/%s/comment" % task_id,
{"task_status_id": self.wip_status_id, "comment": "wip"},
)
self.post(
"/actions/tasks/%s/comment" % task_id,
{"task_status_id": self.retake_status_id, "comment": "retake 3"},
400,
)
def test_comment_many(self):
project_id = str(self.project.id)
task_id = str(self.task.id)
self.generate_fixture_task(name="second_task")
task2_id = str(self.task.id)
path = "/actions/projects/%s/tasks/comment-many" % project_id
self.post(
path,
[
{
"task_status_id": self.retake_status_id,
"comment": "retake 1",
"object_id": task_id,
},
{
"task_status_id": self.retake_status_id,
"comment": "retake 1",
"object_id": task2_id,
},
],
)
self.get("data/tasks/%s" % task_id)
task = self.get("data/tasks/%s" % task_id)
self.assertEqual(task["retake_count"], 1)
comments = self.get("data/tasks/%s/comments" % task_id)
self.assertEqual(len(comments), 1)
comments = self.get("data/tasks/%s/comments" % task2_id)
self.assertEqual(len(comments), 1)
def METHOD_NAME(self):
project_id = str(self.project.id)
task_id = str(self.task.id)
self.generate_fixture_task(name="second_task")
task2_id = str(self.task.id)
self.generate_fixture_user_cg_artist()
self.log_in_cg_artist()
new_comments = [
{
"task_status_id": self.retake_status_id,
"comment": "retake 1",
"object_id": task_id,
},
{
"task_status_id": self.retake_status_id,
"comment": "retake 1",
"object_id": task2_id,
},
]
path = "/actions/projects/%s/tasks/comment-many" % project_id
self.assign_task_to_artist(task_id)
self.post(path, new_comments)
self.log_in_admin()
self.get("data/tasks/%s" % task_id)
task = self.get("data/tasks/%s" % task_id)
self.assertEqual(task["retake_count"], 1)
comments = self.get("data/tasks/%s/comments" % task_id)
self.assertEqual(len(comments), 1)
comments = self.get("data/tasks/%s/comments" % task2_id)
self.assertEqual(len(comments), 0)
def test_attachments(self):
self.delete_test_folder()
self.create_test_folder()
task_id = str(self.task.id)
project_id = str(self.project.id)
self.upload_file(
"/actions/tasks/%s/comment" % task_id,
self.get_fixture_file_path(os.path.join("thumbnails", "th01.png")),
extra_fields={
"task_status_id": self.retake_status_id,
"comment": "retake 1",
},
)
attachment = self.get("data/attachment-files")[0]
attachment = self.get("data/attachment-files/%s" % attachment["id"])
path = "/data/attachment-files/%s/file/th01.png" % attachment["id"]
result_file_path = self.get_file_path("th01.png")
self.download_file(path, result_file_path)
result_image = Image.open(result_file_path)
self.assertEqual(result_image.size, (180, 101))
self.generate_fixture_user_vendor()
self.log_in_vendor()
self.get("data/attachment-files/%s" % attachment["id"], 403)
projects_service.add_team_member(project_id, self.user_vendor["id"])
tasks_service.assign_task(task_id, self.user_vendor["id"])
self.get("data/attachment-files/%s" % attachment["id"])
self.delete_test_folder() |
4,893 | set up | from grass.gunittest.case import TestCase
from grass.gunittest.main import test
from grass.gunittest.gmodules import SimpleModule
class TestVsurfrst(TestCase):
elevation = "elevation"
elevation_attrib = "elevation_attrib"
elevation_threads = "elevation_threads"
slope = "slope"
aspect = "aspect"
pcurvature = "pcurvature"
tcurvature = "tcurvature"
mcurvature = "mcurvature"
deviations = "deviations"
cvdev = "cvdev"
treeseg = "treeseg"
overwin = "overwin"
@classmethod
def setUpClass(cls):
cls.use_temp_region()
cls.runModule("g.region", vector="elev_lid792_randpts", res=1)
cls.runModule(
"v.to.3d",
input="elev_lid792_randpts",
type="point",
output="elev_points3d",
column="value",
overwrite=True,
)
@classmethod
def tearDownClass(cls):
cls.del_temp_region()
cls.runModule(
"g.remove",
type=["raster", "vector"],
name=[
"elev_points3d",
cls.elevation,
cls.elevation_threads,
cls.elevation_attrib,
cls.slope,
cls.aspect,
cls.pcurvature,
cls.tcurvature,
cls.mcurvature,
cls.deviations,
cls.cvdev,
cls.treeseg,
cls.overwin,
],
flags="f",
)
def METHOD_NAME(self):
self.vsurfrst = SimpleModule(
"v.surf.rst",
input="elev_points3d",
npmin=100,
elevation=self.elevation,
overwrite=True,
)
def test_more_threads(self):
self.assertModule(self.vsurfrst)
try:
self.vsurfrst.inputs["nprocs"].value = 4
self.vsurfrst.outputs.elevation = self.elevation_threads
self.assertModule(self.vsurfrst)
self.assertRastersNoDifference(
self.elevation, self.elevation_threads, precision=1e-8
)
except KeyError:
# original version of v.surf.rst without parallel processing
return
def test_run_outputs(self):
self.vsurfrst.outputs.slope = self.slope
self.vsurfrst.outputs.aspect = self.aspect
self.vsurfrst.outputs.pcurvature = self.pcurvature
self.vsurfrst.outputs.tcurvature = self.tcurvature
self.vsurfrst.outputs.mcurvature = self.mcurvature
self.vsurfrst.outputs.deviations = self.deviations
self.vsurfrst.outputs.treeseg = self.treeseg
self.vsurfrst.outputs.overwin = self.overwin
self.assertModule(self.vsurfrst)
self.assertRasterExists(name=self.elevation)
self.assertRasterExists(name=self.slope)
self.assertRasterExists(name=self.aspect)
self.assertRasterExists(name=self.pcurvature)
self.assertRasterExists(name=self.tcurvature)
self.assertRasterExists(name=self.mcurvature)
self.assertVectorExists(name=self.deviations)
self.assertVectorExists(name=self.treeseg)
self.assertVectorExists(name=self.overwin)
values = "min=103.973861694336\nmax=131.529937744141\nmean=120.774013407641"
self.assertRasterFitsUnivar(
raster=self.elevation, reference=values, precision=1e-8
)
# slope
values = "min=0.00417369091883302\nmax=15.4391813278198\nmean=3.32303673469512"
self.assertRasterFitsUnivar(raster=self.slope, reference=values, precision=1e-8)
# aspect
values = "min=0\nmax=360\nmean=212.026580596575"
self.assertRasterFitsUnivar(
raster=self.aspect, reference=values, precision=1e-8
)
# pcurvature
values = (
"min=-0.0507194809615612\nmax=0.0395903363823891\nmean=0.00013527328666273"
)
self.assertRasterFitsUnivar(
raster=self.pcurvature, reference=values, precision=1e-8
)
# tcurvature
values = "min=-0.0455724261701107\nmax=0.0380486063659191\nmean=-0.000136686790876467"
self.assertRasterFitsUnivar(
raster=self.tcurvature, reference=values, precision=1e-8
)
# mcurvature
values = (
"min=-0.0437114611268044\nmax=0.032054178416729\nmean=-6.78450785489373e-07"
)
self.assertRasterFitsUnivar(
raster=self.mcurvature, reference=values, precision=1e-8
)
# deviations
values = "min=-0.035444\nmax=0.048801\nmean=4.21945e-05"
self.assertVectorFitsUnivar(
map=self.deviations, column="flt1", reference=values, precision=1e-8
)
# treeseg
topology = dict(primitives=256)
self.assertVectorFitsTopoInfo(vector=self.treeseg, reference=topology)
# overwin
topology = dict(primitives=256)
self.assertVectorFitsTopoInfo(vector=self.overwin, reference=topology)
# test 3D versus attribute
self.vsurfrst.outputs.elevation = self.elevation_attrib
self.vsurfrst.inputs.column = "value"
self.assertModule(self.vsurfrst)
self.assertRastersNoDifference(
self.elevation, self.elevation_attrib, precision=1e-8
)
if __name__ == "__main__":
test() |
4,894 | cb device ready | """
Copyright (c) 2018-2019 Arm Limited and affiliates.
SPDX-License-Identifier: Apache-2.0
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import collections
import threading
from mbed_host_tests import BaseHostTest
TestCaseData = collections.namedtuple('TestCaseData', ['index', 'data_to_send'])
DEFAULT_SYNC_DELAY = 4.0
MAX_HB_PERIOD = 2.5 # [s] Max expected heartbeat period.
MSG_VALUE_DUMMY = '0'
CASE_DATA_INVALID = 0xffffffff
CASE_DATA_PHASE2_OK = 0xfffffffe
CASE_DATA_INSUFF_HB = 0x0
MSG_KEY_SYNC = '__sync'
MSG_KEY_DEVICE_READY = 'ready'
MSG_KEY_START_CASE = 'start_case'
MSG_KEY_DEVICE_RESET = 'dev_reset'
MSG_KEY_HEARTBEAT = 'hb'
class WatchdogReset(BaseHostTest):
"""Host side test that handles device reset.
Given a device with a watchdog timer started.
When the device notifies the host about an incoming reset.
Then the host:
* keeps track of the test case index of the current test suite,
* performs a dev-host handshake.
"""
def __init__(self):
super(WatchdogReset, self).__init__()
self.current_case = TestCaseData(0, CASE_DATA_INVALID)
self.__handshake_timer = None
self.sync_delay = DEFAULT_SYNC_DELAY
self.drop_heartbeat_messages = True
self.hb_timestamps_us = []
def handshake_timer_start(self, seconds=1.0, pre_sync_fun=None):
"""Start a new handshake timer."""
def timer_handler():
"""Perform a dev-host handshake by sending a sync message."""
if pre_sync_fun is not None:
pre_sync_fun()
self.send_kv(MSG_KEY_SYNC, MSG_VALUE_DUMMY)
self.__handshake_timer = threading.Timer(seconds, timer_handler)
self.__handshake_timer.start()
def handshake_timer_cancel(self):
"""Cancel the current handshake timer."""
try:
self.__handshake_timer.cancel()
except AttributeError:
pass
finally:
self.__handshake_timer = None
def heartbeat_timeout_handler(self):
"""Handler for the heartbeat timeout.
Compute the time span of the last heartbeat sequence.
Set self.current_case.data_to_send to CASE_DATA_INVALID if no heartbeat was received.
Set self.current_case.data_to_send to CASE_DATA_INSUFF_HB if only one heartbeat was
received.
"""
self.drop_heartbeat_messages = True
dev_data = CASE_DATA_INVALID
if len(self.hb_timestamps_us) == 1:
dev_data = CASE_DATA_INSUFF_HB
self.log('Not enough heartbeats received.')
elif len(self.hb_timestamps_us) >= 2:
dev_data = int(round(0.001 * (self.hb_timestamps_us[-1] - self.hb_timestamps_us[0])))
self.log('Heartbeat time span was {} ms.'.format(dev_data))
self.current_case = TestCaseData(self.current_case.index, dev_data)
def setup(self):
sync_delay = self.get_config_item('forced_reset_timeout')
self.sync_delay = sync_delay if sync_delay is not None else DEFAULT_SYNC_DELAY
self.register_callback(MSG_KEY_DEVICE_READY, self.METHOD_NAME)
self.register_callback(MSG_KEY_DEVICE_RESET, self.cb_device_reset)
self.register_callback(MSG_KEY_HEARTBEAT, self.cb_heartbeat)
def teardown(self):
self.handshake_timer_cancel()
def METHOD_NAME(self, key, value, timestamp):
"""Advance the device test suite to a proper test case.
Additionally, send test case data to the device.
"""
self.handshake_timer_cancel()
msg_value = '{0.index:02x},{0.data_to_send:08x}'.format(self.current_case)
self.send_kv(MSG_KEY_START_CASE, msg_value)
self.drop_heartbeat_messages = False
self.hb_timestamps_us = []
def cb_device_reset(self, key, value, timestamp):
"""Keep track of the test case number.
Also set a new handshake timeout, so when the device gets
restarted by the watchdog, the communication will be restored
by the __handshake_timer.
"""
self.handshake_timer_cancel()
case_num, dev_reset_delay_ms = (int(i, base=16) for i in value.split(','))
self.current_case = TestCaseData(case_num, CASE_DATA_PHASE2_OK)
self.handshake_timer_start(self.sync_delay + dev_reset_delay_ms / 1000.0)
def cb_heartbeat(self, key, value, timestamp):
"""Save the timestamp of a heartbeat message.
Additionally, keep track of the test case number.
Also each heartbeat sets a new timeout, so when the device gets
restarted by the watchdog, the communication will be restored
by the __handshake_timer.
"""
if self.drop_heartbeat_messages:
return
self.handshake_timer_cancel()
case_num, timestamp_us = (int(i, base=16) for i in value.split(','))
self.current_case = TestCaseData(case_num, CASE_DATA_INVALID)
self.hb_timestamps_us.append(timestamp_us)
self.handshake_timer_start(
seconds=(MAX_HB_PERIOD + self.sync_delay),
pre_sync_fun=self.heartbeat_timeout_handler) |
4,895 | upgrade config | #!/usr/bin/python3.6
#
# Copyright (c) 2020 Seagate Technology LLC and/or its Affiliates
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For any questions about this software or licensing,
# please email opensource@seagate.com or cortx-questions@seagate.com.
#
# IMP : for preupgrade and postupgrade cmd,
# merge_configs() is imported from the merge_pre_post.py
from s3confstore.cortx_s3_confstore import S3CortxConfStore
import os.path
import sys
import logging
def METHOD_NAME(configFile:str, oldSampleFile:str, newSampleFile:str, unsafeAttributesFile:str, filetype:str):
"""
Core logic for updating config files during upgrade using conf store.
Following is algorithm from merge:
Iterate over all parameters sample.new file
for every parameter, check
- if it is marked as 'unsafe' in attributes file, skip
- if it marked as 'safe' in the attributes file
- diff the value in config and sample.old - if it is changed, skip
- if it is not changed, we will overwrite the value in cfg file from sample.new
- if it does not exist in cfg file add the value from sample.new file to cfg file
- All the arrays in yaml are always overwritten
"""
#If config file is not present then abort merging.
if not os.path.isfile(configFile):
logger.error(f'config file {configFile} does not exist')
raise Exception(f'ERROR: config file {configFile} does not exist')
logger.info(f'config file {str(configFile)} upgrade started.')
# old sample file
conf_old_sample = filetype + oldSampleFile
cs_conf_old_sample = S3CortxConfStore(config=conf_old_sample, index=conf_old_sample)
# new sample file
conf_new_sample = filetype + newSampleFile
cs_conf_new_sample = S3CortxConfStore(config=conf_new_sample, index=conf_new_sample)
conf_new_sample_keys = cs_conf_new_sample.get_all_keys()
# unsafe attribute file
conf_unsafe_file = filetype + unsafeAttributesFile
cs_conf_unsafe_file = S3CortxConfStore(config=conf_unsafe_file, index=conf_unsafe_file)
conf_unsafe_file_keys = cs_conf_unsafe_file.get_all_keys()
# active config file
conf_file = filetype + configFile
cs_conf_file = S3CortxConfStore(config=conf_file, index=conf_file)
conf_file_keys = cs_conf_file.get_all_keys()
#logic to determine which keys to merge.
keys_to_overwrite = []
for key in conf_new_sample_keys:
#If key is marked for unsafe then do not modify/overwrite.
if key in conf_unsafe_file_keys:
continue
#if key not present active config file then add it
# (this will also add and hence effectively overwrite keys removed in above [] handing
# and hence will always result in overwrite for these keys from the new sample file).
if key not in conf_file_keys:
keys_to_overwrite.append(key)
#if key is not unsafe and value is not changed by user then overwrite it.
elif cs_conf_file.get_config(key) == cs_conf_old_sample.get_config(key):
keys_to_overwrite.append(key)
#if user has changed the value of the key then skip it.
else:
continue
cs_conf_file.merge_config(source_index=conf_new_sample, keys_to_include=keys_to_overwrite)
cs_conf_file.save_config()
logger.info(f'config file {str(configFile)} upgrade completed')
def merge_configs(config_file_path: str, s3_tmp_dir):
"""
- This function will merge all S3 config files during upgrade
- This function should be used outside this file to call configs upgrade
"""
# Use existing s3-deployment-logger or setup new console logger
setup_logger()
g_upgrade_items = {
's3' : {
'configFile' : os.path.join(config_file_path, "s3/conf/s3config.yaml"),
'oldSampleFile' : os.path.join(s3_tmp_dir, "s3config.yaml.sample.old"),
'newSampleFile' : os.path.join(config_file_path, "s3/conf/s3config.yaml.sample"),
'unsafeAttributesFile' : os.path.join(config_file_path, "s3/conf/s3config_unsafe_attributes.yaml"),
'fileType' : 'yaml://'
},
'auth' : {
'configFile' : os.path.join(config_file_path, "auth/resources/authserver.properties"),
'oldSampleFile' : os.path.join(s3_tmp_dir, "authserver.properties.sample.old"),
'newSampleFile' : os.path.join(config_file_path, "auth/resources/authserver.properties.sample"),
'unsafeAttributesFile' : os.path.join(config_file_path, "auth/resources/authserver_unsafe_attributes.properties"),
'fileType' : 'properties://'
},
'keystore' : {
'configFile' : os.path.join(config_file_path, "auth/resources/keystore.properties"),
'oldSampleFile' : os.path.join(s3_tmp_dir,"keystore.properties.sample.old"),
'newSampleFile' : os.path.join(config_file_path, "auth/resources/keystore.properties.sample"),
'unsafeAttributesFile' : os.path.join(config_file_path, "auth/resources/keystore_unsafe_attributes.properties"),
'fileType' : 'properties://'
},
'bgdelete' : {
'configFile' : os.path.join(config_file_path, "s3/s3backgrounddelete/config.yaml"),
'oldSampleFile' : os.path.join(s3_tmp_dir, "config.yaml.sample.old"),
'newSampleFile' : os.path.join(config_file_path, "s3/s3backgrounddelete/config.yaml.sample"),
'unsafeAttributesFile' : os.path.join(config_file_path, "s3/s3backgrounddelete/s3backgrounddelete_unsafe_attributes.yaml"),
'fileType' : 'yaml://'
},
'cluster' : {
'configFile' : os.path.join(config_file_path, "s3/s3backgrounddelete/s3_cluster.yaml"),
'oldSampleFile' : os.path.join(s3_tmp_dir, "s3_cluster.yaml.sample.old"),
'newSampleFile' : os.path.join(config_file_path, "s3/s3backgrounddelete/s3_cluster.yaml.sample"),
'unsafeAttributesFile' : os.path.join(config_file_path, "s3/s3backgrounddelete/s3_cluster_unsafe_attributes.yaml"),
'fileType' : 'yaml://'
}
}
for upgrade_item in g_upgrade_items:
METHOD_NAME(g_upgrade_items[upgrade_item]['configFile'],
g_upgrade_items[upgrade_item]['oldSampleFile'],
g_upgrade_items[upgrade_item]['newSampleFile'],
g_upgrade_items[upgrade_item]['unsafeAttributesFile'],
g_upgrade_items[upgrade_item]['fileType'])
def setup_logger():
"""
- This function will use as is s3-deployment-logger if it is available
- else it will log to console
"""
global logger
logger = logging.getLogger("s3-deployment-logger")
if logger.hasHandlers():
logger.info("Logger has valid handler")
else:
logger.setLevel(logging.DEBUG)
# create console handler with a higher log level
chandler = logging.StreamHandler(sys.stdout)
chandler.setLevel(logging.DEBUG)
s3deployment_log_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
formatter = logging.Formatter(s3deployment_log_format)
# create formatter and add it to the handlers
chandler.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(chandler)
if __name__ == "__main__":
config_file_path = "/etc/cortx"
s3_tmp_dir = os.path.join(config_file_path, "s3/tmp")
merge_configs(config_file_path, s3_tmp_dir) |
4,896 | description | #!/usr/bin/env python3
#
# This file is part of OpenMediaVault.
#
# @license http://www.gnu.org/licenses/gpl.html GPL Version 3
# @author Volker Theile <volker.theile@openmediavault.org>
# @copyright Copyright (c) 2009-2023 Volker Theile
#
# OpenMediaVault is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# OpenMediaVault is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with OpenMediaVault. If not, see <http://www.gnu.org/licenses/>.
import shutil
import socket
import sys
import tempfile
import time
import dialog
import openmediavault.firstaid
import openmediavault.procutils
import openmediavault.systemd
import pyudev
class Module(openmediavault.firstaid.IModule):
@property
def METHOD_NAME(self):
return "Submit diagnostic report to administrator"
def execute(self):
# Check if postfix is running.
try:
manager = openmediavault.systemd.Manager()
unit = manager.get_unit("postfix.service")
active = unit.active
except Exception: # pylint: disable=broad-except
active = False
if not active:
d = dialog.Dialog(dialog="dialog")
code = d.msgbox(
"Failed to submit the system diagnostic "
"report to the administrator account via email because "
"the email notification service is disabled.",
backtitle=self.METHOD_NAME,
height=7,
width=56,
)
if code != d.OK:
return 0
code = d.yesno(
"Do you want to copy the system diagnostic "
"report onto an USB device?",
backtitle=self.METHOD_NAME,
height=6,
width=45,
)
if code != d.OK:
return 0
d.infobox(
"Please connect the USB device now.",
backtitle=self.METHOD_NAME,
height=3,
width=38,
)
# Wait until USB device is plugged in.
context = pyudev.Context()
monitor = pyudev.Monitor.from_netlink(context)
monitor.filter_by(subsystem="block", device_type="partition")
monitor.start()
for device in iter(monitor.poll, None):
# Only process 'add' events.
if device.action != "add":
continue
# Only process partitions with a file systems.
if not "ID_FS_TYPE" in device:
continue
break
d.infobox(
"USB device {} detected. Please wait ...".format(
device.get("DEVNAME")
),
backtitle=self.METHOD_NAME,
height=3,
width=50,
)
try:
mntdir = tempfile.mkdtemp()
outfile = "{}/sysinfo-{}-{}.txt".format(
mntdir, socket.gethostname(), time.strftime("%Y%m%d%H%M")
)
openmediavault.procutils.check_call(
["mount", device.get("DEVNAME"), mntdir]
)
with open(outfile, "w") as out:
openmediavault.procutils.check_call(
["omv-sysinfo"], stdout=out
)
except: # pylint: disable=try-except-raise
raise
finally:
openmediavault.procutils.check_call(
["umount", device.get("DEVNAME")]
)
shutil.rmtree(mntdir)
d.infobox(
"You can disconnect the USB device now.",
backtitle=self.METHOD_NAME,
height=3,
width=42,
)
else:
print(
"Submitting system diagnostic report to the "
"administrator account. Please check your email "
"mailbox ..."
)
openmediavault.procutils.check_call(
[
"omv-sysinfo",
"|",
"mail",
"-s",
"System diagnostic report",
"root",
]
)
return 0
if __name__ == "__main__":
module = Module()
sys.exit(module.execute()) |
4,897 | attach volume | from cli.exceptions import OperationFailedError, ResourceNotFoundError
from utility.log import Log
log = Log(__name__)
class Node:
"""Interface to perform node operations"""
def __init__(self, name, cloud):
"""Initialize instance with provided details
Args:
name (str): Node name
cloud (CloudProvider): CloudProvider object
**kwargs:
<key-val> for cloud credentials
"""
self._cloud, self._name = cloud, name
@property
def cloud(self):
"""Cloud provider object"""
return self._cloud
@property
def name(self):
"""Node name"""
return self._name
@property
def id(self):
"""Node ID"""
return self.cloud.get_node_id(self.name)
@property
def state(self):
"""Node state"""
return self.cloud.get_node_state_by_name(self.name)
@property
def public_ips(self):
"""Public IPs attached to node"""
return self.cloud.get_node_public_ips(self.name)
@property
def private_ips(self):
"""Private IPs attached to node"""
return self.cloud.get_node_private_ips(self.name)
@property
def volumes(self):
"""Volume names attached to node"""
return self.cloud.get_node_volumes(self.name)
def _get_available_network(self):
"""Get available network on cloud"""
networks = self.cloud.get_networks()
for n in networks:
subnets = self.cloud.get_subnets_by_network_name(n)
for subnet in subnets:
free_ips = subnet.get("total_ips", 0) - subnet.get("used_ips", 0)
if free_ips > 3:
return n
def create(self, image, size, cloud_data, network=None, timeout=300, interval=10):
"""Create node on cloud
Args:
image (str): Image to be used for node
size (int): Node root volume size
cloud_data (dict): Configuration steps after deployment
network (str): Network to be attached to node
timeout (int): Operation waiting time in sec
interval (int): Operation retry time in sec
"""
if self.id:
msg = f"Node with name '{self.name}' already exists"
log.error(msg)
raise OperationFailedError(msg)
# Get cloud image
_image = self.cloud.get_image_by_name(image)
if not _image:
msg = f"Image '{image}' not available on cloud"
log.error(msg)
raise ResourceNotFoundError(msg)
# Get cloud vm flavor
_size = self.cloud.get_flavor_by_name(size)
if not _size:
msg = f"VM size '{size}' not available on cloud"
log.error(msg)
raise ResourceNotFoundError(msg)
# Get network object
if not network:
network = self._get_available_network()
_network = self.cloud.get_network_by_name(network)
if not _network:
msg = f"Network '{network}' not available on cloud"
log.error(msg)
raise ResourceNotFoundError(msg)
log.info(f"Attaching network '{network}' to node '{self.name}'")
# Create vm on cloud
self.cloud.create_node(
self.name, _image, _size, cloud_data, [_network], timeout, interval
)
# Wait until private ips attached to node
self.cloud.wait_for_node_private_ips(self.name, timeout, interval)
return True
def METHOD_NAME(self, volume):
"""Attach node to volunme
Args
name (str|list|tuple): Volume name(s)
"""
# Get node object using id
node = self.cloud.get_node_by_id(self.id)
# Attach volumes to node
volumes = volume if type(volume) in (list, tuple) else (volume)
for v in volumes:
self.cloud.METHOD_NAME(node, self.cloud.get_volume_by_name(v))
return True
def delete(self, timeout=300, interval=10):
"""Delete node from cloud
Args:
timeout (int): Operation waiting time in sec
interval (int): Operation retry time in sec
"""
if not self.id:
msg = f"Node with name '{self.name}' doesn't exists"
log.error(msg)
raise OperationFailedError(msg)
if self.private_ips:
log.info(
f"Dettaching private IPs {', '.join(self.private_ips)} assigned to node '{self.name}'"
)
self.cloud.detach_node_private_ips(self.name, self.private_ips)
if self.public_ips:
log.info(
f"Dettaching public IPs {', '.join(self.public_ips)} assigned to node '{self.name}'"
)
self.cloud.detach_node_public_ips(self.name, self.public_ips)
self.cloud.delete_node(self.name, timeout, interval)
return True |
4,898 | fct delta c9 constant | import flavio
import numpy as np
from flavio.classes import AuxiliaryQuantity, Implementation
from flavio.physics.bdecays.common import meson_quark
from flavio.physics.bdecays.wilsoncoefficients import wctot_dict
from flavio.physics.common import conjugate_par, conjugate_wc, add_dict
from flavio.config import config
# auxiliary function to construct transversity_amps_deltaC7, transversity_amps_deltaC9
def _transversity_amps_deltaC(q2, deltaC, C_name, par):
scale = flavio.config['renormalization scale']['lambdab']
mLb = par['m_Lambdab']
mL = par['m_Lambda']
mb = flavio.physics.running.running.get_mb(par, scale)
ff = flavio.physics.bdecays.lambdablambdall.get_ff(q2, par)
N = flavio.physics.bdecays.lambdablambdall.prefactor(q2, par, scale)
ha = flavio.physics.bdecays.lambdablambdall.helicity_amps(q2, mLb, mL, ff)
wc = {'7': 0, '7p': 0, 'v': 0, 'a': 0, 's': 0, 'p': 0, 't': 0,'vp': 0, 'ap': 0, 'sp': 0, 'pp': 0, 'tp': 0, }
wc[C_name] = deltaC
return flavio.physics.bdecays.lambdablambdall.transverity_amps(ha, q2, mLb, mL, mb, 0, wc, N)
def transversity_amps_deltaC7(q2, deltaC7_dict, par):
r"""A function returning a contribution to the transversity amplitudes in
$\Lambda_b\to\Lambda\ell^+\ell^-$ coming from an effective transversity-dependent shift of
the Wilson coefficient $C_7(\mu_b)$. This can be used to parametrize
residual uncertainties due to subleading non-factorizable hadronic effects.
The input dictionary `deltaC7_dict` should be of the form
`{ 'perp0': deltaC7_perp0, 'para0': deltaC7_para0, 'perp1': deltaC7_perp1, 'para1': deltaC7_para1}`
"""
ta = {}
for amp in ['perp0', 'para0', 'perp1', 'para1']:
for X in ['L', 'R']:
ta[(amp, X)] = _transversity_amps_deltaC(q2, deltaC7_dict[amp], '7', par)[(amp, X)]
return ta
def transversity_amps_deltaC9(q2, deltaC9_dict, par):
r"""A function returning a contribution to the transversity amplitudes in
$\Lambda_b\to\Lambda\ell^+\ell^-$ coming from an effective transversity-dependent shift of
the Wilson coefficient $C_7(\mu_b)$. This can be used to parametrize
residual uncertainties due to subleading non-factorizable hadronic effects.
The input dictionary `deltaC9_dict` should be of the form
`{ 'perp0': deltaC9_perp0, 'para0': deltaC9_para0, 'perp1': deltaC9_perp1, 'para1': deltaC9_para1}`
"""
ta = {}
for amp in ['perp0', 'para0', 'perp1', 'para1']:
for X in ['L', 'R']:
ta[(amp, X)] = _transversity_amps_deltaC(q2, deltaC9_dict[amp], 'v', par)[(amp, X)]
return ta
# One possibility is to parametrize the effective shift in C7 or C9 as a simple
# polynomial in q2.
def transversity_amps_deltaC7_polynomial(q2, par):
deltaC7_dict = {}
for amp in ['perp0', 'para0', 'perp1', 'para1']:
deltaC7_dict[amp] = ( par['Lambdab->Lambda deltaC7 a_' + amp + ' Re']
+ par['Lambdab->Lambda deltaC7 b_' + amp + ' Re'] *q2
+ 1j*par['Lambdab->Lambda deltaC7 a_' + amp + ' Im']
+ 1j*par['Lambdab->Lambda deltaC7 b_' + amp + ' Im'] *q2)
return transversity_amps_deltaC7(q2, deltaC7_dict, par)
# a constant shift, e.g. for high q^2
def transversity_amps_deltaC9_constant(q2, par):
deltaC9_dict = {}
for amp in ['perp0', 'para0', 'perp1', 'para1']:
deltaC9_dict[amp] = ( par['Lambdab->Lambda deltaC9 c_' + amp + ' Re']
+ 1j*par['Lambdab->Lambda deltaC9 c_' + amp + ' Im'])
return transversity_amps_deltaC9(q2, deltaC9_dict, par)
def fct_deltaC7_polynomial(wc_obj, par_dict, q2, cp_conjugate):
par = par_dict.copy()
if cp_conjugate:
par = conjugate_par(par)
return transversity_amps_deltaC7_polynomial(q2, par)
def METHOD_NAME(wc_obj, par_dict, q2, cp_conjugate):
par = par_dict.copy()
if cp_conjugate:
par = conjugate_par(par)
return transversity_amps_deltaC9_constant(q2, par_dict)
# AuxiliaryQuantity & Implementatation: subleading effects at LOW q^2
quantity = 'Lambdab->Lambdall subleading effects at low q2'
a = AuxiliaryQuantity(name=quantity,
arguments=['q2', 'cp_conjugate'])
a.description = (r'Contribution to $\Lambda_b\to \Lambda \ell^+\ell^-$ transversity amplitudes from'
r' subleading hadronic effects (i.e. all effects not included'
r' elsewhere) at $q^2$ below the charmonium resonances')
# Implementation: C7-polynomial
iname = 'Lambdab->Lambdall deltaC7 polynomial'
i = Implementation(name=iname, quantity=quantity,
function=fct_deltaC7_polynomial)
i.set_description(r"Effective shift in the Wilson coefficient $C_7(\mu_b)$"
r" as a first-order polynomial in $q^2$.")
# AuxiliaryQuantity & Implementatation: subleading effects at HIGH q^2
quantity = 'Lambdab->Lambdall subleading effects at high q2'
a = AuxiliaryQuantity(name=quantity, arguments=['q2', 'cp_conjugate'])
a.description = ('Contribution to $\Lambda_b\to \Lambda \ell^+\ell^-$ transversity amplitudes from'
' subleading hadronic effects (i.e. all effects not included'
r'elsewhere) at $q^2$ above the charmonium resonances')
# Implementation: C9 constant shift
iname = 'Lambdab->Lambdall deltaC9 shift'
i = Implementation(name=iname, quantity=quantity,
function=METHOD_NAME)
i.set_description(r"Effective constant shift in the Wilson coefficient $C_9(\mu_b)$.") |
4,899 | generator | """ Unit test for while and for loops"""
import unittest
class SimpleLoopTests(unittest.TestCase):
def test_for_in_range(self):
a = 0
b = 0
for i in range(5):
b = i
a +=1
self.assertEqual(a,5)
self.assertEqual(b, 4)
y = 0
for t in range(1,4):
y += t
self.assertEqual(y, 6)
#test using step argument
n = 0
for x in range(0,10,2):
n +=1
self.assertEqual(n,5)
x = [0]*10
for i in range(10):
x[i] += i
x[i] += i*2
self.assertEqual(x, [0, 3, 6, 9, 12, 15, 18, 21, 24, 27])
def foo(x):
for i in x:
break
self.assertRaises(TypeError, foo, 2)
def test_for_in_list(self):
z = 0
for x in [1,2,3]:
z += x
self.assertEqual(z,6)
def test_for_in_dict(self):
a = []
for k in {"OK":0}: a.append(k)
self.assertEqual(a, ["OK"])
def test_for_in_string(self):
a = []
for i in "skulpt": a.append(i)
self.assertEqual(a, ["s","k","u","l","p","t"])
def test_for_in_tuple(self):
z = []
a = (1,2,3)
b = ('a', 'b', 'c')
for x in a+b:
z.append(x)
self.assertEqual(z, [1,2,3,'a', 'b', 'c'])
def test_while(self):
x = 1
t = 0
while x <=5:
t = t+x
x = x+1
self.assertEqual(x,6)
self.assertEqual(t,15)
def test_break(self):
x = 1
while x < 3:
break
x = x + 1
self.assertEqual(x,1)
def f():
for i in 1,2,3,4,5:
if i == 3: break
yield i
self.assertEqual(list(f()), [1, 2])
def test_continue(self):
x = 1
n = 0
while x < 10:
x = x + 1
if n == 2:
continue
n = n + 1
self.assertEqual(n,2)
def f():
for i in 1,2,3,4,5:
if i % 2 == 0: continue
yield i
self.assertEqual(list(f()), [1, 3, 5])
def test_list_comprehension(self):
x = [v*v for v in range(0,5)]
self.assertEqual(x[3], 9)
t = [[y*10+x for x in range(0,10)] for y in range(0,10)]
self.assertEqual(t[2][3], 23)
a = [c for c in "asdf"]
self.assertEqual(a, ['a', 's', 'd', 'f'])
def test_yield(self):
def f(n):
i = 0
yield i
i += 1
j = i
yield i
yield j
j *= 100
i += j
yield j
yield i
yield n + i
a = []
for i in f(10): # i to conflict with body
j = 999
a.append(i)
self.assertEqual(a, [0, 1, 1, 100, 101, 111])
def f(n):
i = 0
while i < n:
yield i
yield i * 10
i += 1
a = []
for i in f(10):
a.append(i)
self.assertEqual(a, [0, 0, 1, 10, 2, 20, 3, 30, 4, 40, 5, 50, 6, 60, 7, 70, 8, 80, 9, 90])
def f(n):
i = 0
while i < n:
yield i
i = 100
yield i
i += 1
a = []
for i in f(50):
a.append(i)
self.assertEqual(a, [0, 100])
def f():
y = 0
while y == 0:
y += 1
yield y
a = []
for i in f():
a.append(i)
self.assertEqual(a, [1])
def yrange(n):
for i in range(n):
yield i
self.assertEqual([0, 1, 2, 3, 4],list(yrange(5)))
def yrange(n):
for i in range(n):
yield i
def zrange(n):
for y in yrange(n):
yield y
self.assertEqual(list(zrange(5)), [0, 1, 2, 3, 4])
def f(n):
yield 1
a, b = n, n + 1
yield 2
yield a
yield b
a = 9999
b = 9999
z = []
for i in f(20):
z.append(i)
self.assertEqual(z, [1,2,20,21])
def f():
for i in 1,2,3,4,5:
if i == 4: return
yield i
self.assertEqual([1, 2, 3], list(f()))
def foo(value = None):
for i in [-1,0,1,2,3,4]:
if i < 0:
continue
elif i == 0:
yield 0
elif i == 1:
yield 1
yield value
yield 2
else:
yield i
self.assertEqual(list(foo()), [0, 1, None, 2, 2, 3, 4])
def f():
if 1 == 2:
yield -1
elif 1 == 1:
yield 3
else:
yield -1
self.assertEqual(list(f()),[3])
class GeneratorClass:
test = "hi"
def __init__(self):
pass
def METHOD_NAME(self):
for i in range(10):
yield i
gen = GeneratorClass()
a = []
for g in gen.METHOD_NAME():
a.append(g)
self.assertEqual(a, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
def test_generator(self):
a = (1 for x in range(3))
self.assertEqual(str(a)[:17], "<generator object")
b = []
for i in a:
b.append(a)
self.assertNotEqual(b, [1,1,1])
z = []
for i in (1 for x in range(3)):
z.append(i)
self.assertEqual(z, [1,1,1])
c = []
for i in (i*2 for i in range(3)):
c.append(i)
self.assertEqual(c, [0,2,4])
if __name__ == '__main__':
unittest.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.