code stringlengths 281 23.7M |
|---|
class OptionSeriesSolidgaugeSonificationDefaultinstrumentoptionsMappingTremolo(Options):
def depth(self) -> 'OptionSeriesSolidgaugeSonificationDefaultinstrumentoptionsMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesSolidgaugeSonificationDefaultinstrumentoptionsMappingTremoloDepth)
def speed(self) -> 'OptionSeriesSolidgaugeSonificationDefaultinstrumentoptionsMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesSolidgaugeSonificationDefaultinstrumentoptionsMappingTremoloSpeed) |
class _coconut_SupportsMinus(_coconut.typing.Protocol):
def __sub__(self, other):
raise NotImplementedError('Protocol methods cannot be called at runtime ((-) in a typing context is a Protocol)')
def __neg__(self):
raise NotImplementedError('Protocol methods cannot be called at runtime ((-) in a typing context is a Protocol)') |
class OptionPlotoptionsArcdiagramLevelsStatesSelect(Options):
def animation(self) -> 'OptionPlotoptionsArcdiagramLevelsStatesSelectAnimation':
return self._config_sub_data('animation', OptionPlotoptionsArcdiagramLevelsStatesSelectAnimation)
def borderColor(self):
return self._config_get('#000000')
def borderColor(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#cccccc')
def color(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
('aea.cli.utils.package_utils._compute_fingerprint', return_value={'correct': 'fingerprint'})
class IsFingerprintCorrectTestCase(TestCase):
def test_is_fingerprint_correct_positive(self, *mocks):
item_config = mock.Mock()
item_config.fingerprint = {'correct': 'fingerprint'}
item_config.fingerprint_ignore_patterns = []
result = is_fingerprint_correct('package_path', item_config)
self.assertTrue(result)
def test_is_fingerprint_correct_negative(self, *mocks):
item_config = mock.Mock()
item_config.fingerprint = {'incorrect': 'fingerprint'}
item_config.fingerprint_ignore_patterns = []
package_path = 'package_dir'
result = is_fingerprint_correct(package_path, item_config)
self.assertFalse(result) |
class Bluetooth(IntervalModule):
interval = 1
settings = (('format', 'formatp string'), ('color', 'Text color'), ('connected_color', 'Connected device color'), ('show_disconnected', 'Show disconnected but paired devices'))
format = '{name}: {dev_addr}'
color = '#ffffff'
connected_color = '#00ff00'
on_leftclick = 'next_device'
on_rightclick = 'prev_device'
on_upscroll = 'next_device'
on_downscroll = 'prev_device'
num_devices = 0
dev_index = 0
devices = []
show_disconnected = True
def run(self):
try:
self.devices = get_bluetooth_device_list(self.show_disconnected)
if (len(self.devices) < 1):
if hasattr(self, 'data'):
del self.data
self.output = None
return
self.dev_index = (self.dev_index % len(self.devices))
self.num_devices = len(self.devices)
fdict = {'name': self.devices[self.dev_index]['name'], 'dev_addr': self.devices[self.dev_index]['dev_addr']}
self.data = fdict
color = self.color
if self.devices[self.dev_index]['connected']:
color = self.connected_color
self.output = {'full_text': formatp(self.format, **fdict).strip(), 'color': color}
return
except dbus.exceptions.DBusException as e:
self.output = {'full_text': ('DBus error: ' + e.get_dbus_message()), 'color': '#ff0000'}
if hasattr(self, 'data'):
del self.data
return
def next_device(self):
self.dev_index = ((self.dev_index + 1) % self.num_devices)
def prev_device(self):
self.dev_index = ((self.dev_index - 1) % self.num_devices) |
class Command(DanubeCloudCommand):
default_config_file = DanubeCloudCommand._path(DanubeCloudCommand.PROJECT_DIR, 'core', 'gunicorn-sio.py')
help = 'Runs a production server (Gunicorn).'
options = (CommandOption('-c', '--config', action='store', dest='config', default=default_config_file, help=('The Gunicorn config file. [%s]' % default_config_file)),)
def handle(self, *args, **options):
with lcd(self.PROJECT_DIR):
self.local(('gunicorn -c %s core.wsgi:application' % options['config']), echo_command=True) |
def set_capacities_edge_communicability(topology, capacities, capacity_unit='Mbps'):
communicability = nx.communicability(topology)
centrality = {(u, v): communicability[u][v] for (u, v) in topology.edges()}
_set_capacities_proportionally(topology, capacities, centrality, capacity_unit=capacity_unit) |
def extractRaspommeTumblrCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TestNameGenerator(BaseEvenniaTest):
def test_fantasy_name(self):
single_name = namegen.fantasy_name()
self.assertEqual(type(single_name), str)
fluid_name = namegen.fantasy_name(style='fluid')
self.assertEqual(type(fluid_name), str)
three_names = namegen.fantasy_name(num=3)
self.assertEqual(type(three_names), list)
self.assertEqual(len(three_names), 3)
single_list = namegen.fantasy_name(return_list=True)
self.assertEqual(type(single_list), list)
self.assertEqual(len(single_list), 1)
with self.assertRaises(ValueError):
namegen.fantasy_name(num=(- 1))
with self.assertRaises(ValueError):
namegen.fantasy_name(style='dummy')
def test_structure_validation(self):
with self.assertRaises(KeyError):
namegen.fantasy_name(style='missing_keys')
with self.assertRaises(TypeError):
namegen.fantasy_name(style='invalid_vowels')
with self.assertRaises(ValueError):
namegen.fantasy_name(style='invalid_length')
def test_first_name(self):
single_name = namegen.first_name()
self.assertEqual(type(single_name), str)
three_names = namegen.first_name(num=3)
self.assertEqual(type(three_names), list)
self.assertEqual(len(three_names), 3)
gendered_name = namegen.first_name(gender='f')
self.assertEqual(type(gendered_name), str)
single_list = namegen.first_name(return_list=True)
self.assertEqual(type(single_list), list)
self.assertEqual(len(single_list), 1)
with self.assertRaises(ValueError):
namegen.first_name(gender='x')
with self.assertRaises(ValueError):
namegen.first_name(num=(- 1))
def test_last_name(self):
single_name = namegen.last_name()
self.assertEqual(type(single_name), str)
three_names = namegen.last_name(num=3)
self.assertEqual(type(three_names), list)
self.assertEqual(len(three_names), 3)
single_list = namegen.last_name(return_list=True)
self.assertEqual(type(single_list), list)
self.assertEqual(len(single_list), 1)
with self.assertRaises(ValueError):
namegen.last_name(num=(- 1))
def test_full_name(self):
single_name = namegen.full_name()
self.assertEqual(type(single_name), str)
three_names = namegen.full_name(num=3)
self.assertEqual(type(three_names), list)
self.assertEqual(len(three_names), 3)
gendered_name = namegen.full_name(gender='f')
self.assertEqual(type(gendered_name), str)
single_list = namegen.full_name(return_list=True)
self.assertEqual(type(single_list), list)
self.assertEqual(len(single_list), 1)
parts_name = namegen.full_name(parts=4)
parts = parts_name.split(' ')
self.assertGreaterEqual(len(parts), 3)
with self.assertRaises(ValueError):
namegen.full_name(parts=1)
with self.assertRaises(ValueError):
namegen.full_name(num=(- 1)) |
.skipif((sys.platform == 'win32'), reason='Cannot (yet) use expandvars on Windows')
def test_user_2():
s = load_source('file', climetlab_file('docs/examples/test.grib'))
home_file = os.path.expanduser('~/.climetlab/test.grib')
try:
s.save(home_file)
s = load_source('file', '$HOME/.climetlab/test.grib', expand_vars=True)
assert (len(s) == 2)
finally:
try:
os.unlink(home_file)
except OSError:
LOG.exception('unlink(%s)', home_file) |
def generate_numeric_anomalies_training_and_validation_files(rows_count_per_day=200):
def get_training_row(date, row_index, rows_count):
return {'updated_at': date.strftime(DATE_FORMAT), 'occurred_at': (date - timedelta(hours=1)).strftime(DATE_FORMAT), 'min': random.randint(100, 200), 'max': random.randint(100, 200), 'zero_count': (0 if (row_index < ((3 / 100) * rows_count)) else random.randint(100, 200)), 'zero_percent': (0 if (random.randint(1, rows_count) <= ((20 / 100) * rows_count)) else random.randint(100, 200)), 'average': random.randint(99, 101), 'standard_deviation': random.randint(99, 101), 'variance': random.randint(99, 101), 'sum': random.randint(100, 200)}
def get_validation_row(date, row_index, rows_count):
row_index += (- (rows_count / 2))
return {'updated_at': date.strftime(DATE_FORMAT), 'occurred_at': (date - timedelta(hours=7)).strftime(DATE_FORMAT), 'min': random.randint(10, 200), 'max': random.randint(100, 300), 'zero_count': (0 if (row_index < ((80 / 100) * rows_count)) else random.randint(100, 200)), 'zero_percent': (0 if (random.randint(1, rows_count) <= ((60 / 100) * rows_count)) else random.randint(100, 200)), 'average': random.randint(101, 110), 'standard_deviation': random.randint(80, 120), 'variance': random.randint(80, 120), 'sum': random.randint(300, 400)}
numeric_columns = ['updated_at', 'occurred_at', 'min', 'max', 'zero_count', 'zero_percent', 'average', 'standard_deviation', 'variance', 'sum']
dates = generate_rows_timestamps(base_date=(EPOCH - timedelta(days=2)))
training_rows = generate_rows(rows_count_per_day, dates, get_training_row)
write_rows_to_csv(os.path.join(FILE_DIR, 'data', 'training', 'numeric_column_anomalies_training.csv'), training_rows, numeric_columns)
validation_date = (EPOCH - timedelta(days=1))
validation_rows = generate_rows(rows_count_per_day, [validation_date], get_validation_row)
write_rows_to_csv(os.path.join(FILE_DIR, 'data', 'validation', 'numeric_column_anomalies_validation.csv'), validation_rows, numeric_columns) |
def parse_rocketfuel_isp_map(path):
topology = DirectedTopology(type='rocket_fuel')
comment_char = '#'
with open(path, 'r') as f:
for line in f.readlines():
if (comment_char in line):
(line, _) = line.split(comment_char, 1)
line = line.strip()
if (len(line) == 0):
continue
if line.startswith('-'):
try:
node = int(re.findall('-\\d+', line)[0])
address = re.findall('=\\S+', line)[0][1:]
r = int(re.findall('r\\d$', line)[0][1:])
except IndexError:
raise ValueError('Invalid input file. Parsing failed while trying to parse an external node')
topology.add_node(node, type='external', address=address, r=r)
else:
try:
node = int(re.findall('\\d+', line)[0])
node_location = re.findall('\\S*', line)[0]
node_location = re.sub('[\\+]', '', node_location)
r = int(re.findall('r\\d$', line)[0][1:])
address = re.findall('=\\S+', line)[0][1:]
except IndexError:
raise ValueError('Invalid input file. Parsing failed while trying to parse an internal node')
internal_links = re.findall('<(\\d+)>', line)
external_links = re.findall('{(-?\\d+)}', line)
backbone = (len(re.findall('\\sbb\\s', line)) > 0)
topology.add_node(node, type='internal', location=node_location, address=address, r=r, backbone=backbone)
for link in internal_links:
link = int(link)
if (node != link):
topology.add_edge(node, link, type='internal')
for link in external_links:
link = int(link)
if (node != link):
topology.add_edge(node, link, type='external')
return topology |
class BypassNormExtension(Extension):
def __init__(self, *args, **kwargs):
self.inlinehilite = []
self.config = {}
super().__init__(*args, **kwargs)
def extendMarkdown(self, md):
md.preprocessors.register(PreNormalizePreprocessor(md), 'pymdownx-pre-norm-ws', 35)
md.preprocessors.register(PostNormalizePreprocessor(md), 'pymdownx-post-norm-ws', 29.9) |
class CustomFormTranslateListPost(ResourceList):
def before_post(data):
require_relationship(['custom_form'], data)
if (not has_access('is_coorganizer', custom_form=data['custom_form'])):
raise ObjectNotFound({'parameter': 'custom_form'}, f"Custom Form: {data['custom_form']} not found")
schema = CustomFormTranslateSchema
methods = ['POST']
data_layer = {'session': db.session, 'model': CustomFormTranslates} |
def test_reward_clipping_wrapper():
observation_conversion = ObservationConversion()
env = DummyEnvironment(core_env=DummyCoreEnvironment(observation_conversion.space()), action_conversion=[DictActionConversion()], observation_conversion=[observation_conversion])
env.reset()
action = env.action_space.sample()
np.random.seed(1234)
wrapped_env = RewardClippingWrapper(env, min_val=(- 0.1), max_val=0.1)
np.random.seed(1234)
wrapped_reward = wrapped_env.step(action)[1]
assert ((- 0.1) <= wrapped_reward <= 0.1) |
class RobotArmMm(_RobotArm):
def __call__(self, length, move=None):
t = self.thickness
w = self.servo.height
w2 = self.servo2.height
l = max((self.servo.length * 2), (length + (2 * self.servo.axle_pos)))
th = max(((2 * self.thickness) + l), (((w + w2) + (4 * t)) + self.spacing))
tw = (5 * ((max(w, w2) + (2 * self.thickness)) + self.spacing))
if self.move(tw, th, move, True):
return
self.rectangularWall(w2, l, 'FfFf', callback=[(lambda : self.servo.top((w2 / 2)))], move='right')
self.rectangularWall(w2, l, 'FfFf', callback=[(lambda : self.servo.bottom((w2 / 2)))], move='right')
self.rectangularWall(w, l, 'FFFF', callback=[None, None, (lambda : self.servo2.top((w / 2)))], move='right')
self.rectangularWall(w, l, 'FFFF', callback=[None, None, (lambda : self.servo2.bottom((w / 2)))], move='right')
self.rectangularWall(w2, w, 'ffff', callback=[(lambda : self.servo.front((w2 / 2)))], move='up')
self.rectangularWall(w, w2, 'ffff', callback=[(lambda : self.servo2.front((w / 2)))], move='')
self.move(tw, th, move) |
def forward(model: Model, X: Any, is_train: bool) -> Tuple[(Any, Callable)]:
convert_inputs = model.attrs['convert_inputs']
convert_outputs = model.attrs['convert_outputs']
(Xmxnet, get_dX) = convert_inputs(model, X, is_train)
(Ymxnet, mxnet_backprop) = model.shims[0](Xmxnet, is_train)
(Y, get_dYmxnet) = convert_outputs(model, (X, Ymxnet), is_train)
def backprop(dY: Any) -> Any:
dYmxnet = get_dYmxnet(dY)
dXmxnet = mxnet_backprop(dYmxnet)
dX = get_dX(dXmxnet)
return dX
return (Y, backprop) |
def library_paths():
libwand = None
libmagick = None
versions = ('', '-7', '-7.Q8', '-7.Q16', '-6', '-Q16', '-Q8', '-6.Q16')
options = ('', 'HDRI', 'HDRI-2')
system = platform.system()
magick_home = os.environ.get('MAGICK_HOME')
magick_suffix = os.environ.get('WAND_MAGICK_LIBRARY_SUFFIX')
if (system == 'Windows'):
try:
with winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\ImageMagick\\Current') as reg_key:
libPath = winreg.QueryValueEx(reg_key, 'LibPath')
coderPath = winreg.QueryValueEx(reg_key, 'CoderModulesPath')
filterPath = winreg.QueryValueEx(reg_key, 'FilterModulesPath')
magick_home = libPath[0]
os.environ['PATH'] += str((((((';' + libPath[0]) + ';') + coderPath[0]) + ';') + filterPath[0]))
except OSError:
pass
def magick_path(path):
return os.path.join(magick_home, *path)
combinations = itertools.product(versions, options)
suffixes = list()
if magick_suffix:
suffixes = str(magick_suffix).split(';')
suffixes.extend(list(((version + option) for (version, option) in combinations)))
if magick_home:
for suffix in suffixes:
if (system == 'Windows'):
libwand = ('CORE_RL_wand_{0}.dll'.format(suffix),)
libmagick = ('CORE_RL_magick_{0}.dll'.format(suffix),)
(yield (magick_path(libwand), magick_path(libmagick)))
libwand = ('CORE_RL_MagickWand_{0}.dll'.format(suffix),)
libmagick = ('CORE_RL_MagickCore_{0}.dll'.format(suffix),)
(yield (magick_path(libwand), magick_path(libmagick)))
libwand = ('libMagickWand{0}.dll'.format(suffix),)
libmagick = ('libMagickCore{0}.dll'.format(suffix),)
(yield (magick_path(libwand), magick_path(libmagick)))
elif (system == 'Darwin'):
libwand = ('lib', 'libMagickWand{0}.dylib'.format(suffix))
(yield (magick_path(libwand), magick_path(libwand)))
else:
libwand = ('lib', 'libMagickWand{0}.so'.format(suffix))
libmagick = ('lib', 'libMagickCore{0}.so'.format(suffix))
(yield (magick_path(libwand), magick_path(libmagick)))
libwand = ('lib', 'libMagickWand{0}.so.9'.format(suffix))
libmagick = ('lib', 'libMagickCore{0}.so.9'.format(suffix))
(yield (magick_path(libwand), magick_path(libmagick)))
libwand = ('lib', 'libMagickWand{0}.so.6'.format(suffix))
libmagick = ('lib', 'libMagickCore{0}.so.6'.format(suffix))
(yield (magick_path(libwand), magick_path(libmagick)))
for suffix in suffixes:
if (system == 'Windows'):
libwand = ctypes.util.find_library(('CORE_RL_wand_' + suffix))
libmagick = ctypes.util.find_library(('CORE_RL_magick_' + suffix))
(yield (libwand, libmagick))
libwand = ctypes.util.find_library(('CORE_RL_MagickWand_' + suffix))
libmagick = ctypes.util.find_library(('CORE_RL_MagickCore_' + suffix))
(yield (libwand, libmagick))
libwand = ctypes.util.find_library(('libMagickWand' + suffix))
libmagick = ctypes.util.find_library(('libMagickCore' + suffix))
(yield (libwand, libmagick))
else:
libmagick = ctypes.util.find_library(('MagickCore' + suffix))
libwand = ctypes.util.find_library(('MagickWand' + suffix))
if (libmagick is not None):
(yield (libwand, libmagick))
(yield (libwand, libwand)) |
def conditional_process() -> None:
conditional_append_reference_faces()
for frame_processor_module in get_frame_processors_modules(facefusion.globals.frame_processors):
if (not frame_processor_module.pre_process('output')):
return
if is_image(facefusion.globals.target_path):
process_image()
if is_video(facefusion.globals.target_path):
process_video() |
class AmbiguousBaseFilterTestCase(unittest.TestCase):
def setUp(self):
self.records = [SeqRecord(Seq('ACGT')), SeqRecord(Seq('NNNN')), SeqRecord(Seq('NACT')), SeqRecord(Seq('ACGTN')), SeqRecord(Seq('GGNTTACT'))]
def test_drop(self):
instance = quality_filter.AmbiguousBaseFilter('drop')
actual = list(instance.filter_records(self.records))
self.assertEqual(1, len(actual))
self.assertEqual(1, instance.passed)
self.assertEqual(4, instance.failed)
self.assertEqual(self.records[0].seq, actual[0].seq)
def test_truncate(self):
instance = quality_filter.AmbiguousBaseFilter('truncate')
actual = list(instance.filter_records(self.records))
self.assertEqual(5, len(actual))
self.assertEqual(0, instance.failed)
self.assertEqual(5, instance.passed)
self.assertEqual(['ACGT', '', '', 'ACGT', 'GG'], [str(s.seq) for s in actual])
def test_invalid_action(self):
self.assertRaises(ValueError, quality_filter.AmbiguousBaseFilter, 'other') |
def queue_options(draw, systems):
queue_system = draw(systems)
name = draw(st.sampled_from(valid_queue_options(queue_system)))
do_set = draw(booleans)
if do_set:
return [queue_system, name, draw(valid_queue_values(name, queue_system))]
else:
return [queue_system, name] |
class BugzillaProcessor(InlineProcessor):
def handleMatch(self, m: 're.Match', data: str) -> typing.Tuple[('xml.etree.ElementTree.Element', int, int)]:
tracker = markdown.util.AtomicString(m.group(1))
idx = markdown.util.AtomicString(m.group(2))
url = bug_url(tracker, idx[1:])
if (url is None):
return (f'{tracker}{idx}', m.start(0), m.end(0))
el = etree.Element('a')
el.set('href', url)
el.text = idx
return (el, m.start(0), m.end(0)) |
(_dill._locate_function)
def by_value_locator(obj, pickler=None, og_locator=_dill._locate_function):
module_name = getattr(obj, '__module__', None)
if (module_name is not None):
if (module_name in _MODULES):
return False
(package_name, *_) = module_name.partition('.')
if (package_name in _PACKAGES):
return False
og_result = og_locator(obj, pickler)
return og_result |
def path_distance(path: List[int], distance_matrix: List[List[float]]):
dist = 0
last_idx = path[0]
tour = path[1:]
if (len(path) != (len(distance_matrix[0]) + 1)):
tour.append(path[0])
for idx in tour:
dist += distance_matrix[last_idx][idx]
last_idx = idx
return dist |
def distance_mask(data_coordinates, maxdist, coordinates=None, grid=None, projection=None):
(coordinates, shape) = _get_grid_coordinates(coordinates, grid)
if (projection is not None):
data_coordinates = projection(*n_1d_arrays(data_coordinates, 2))
coordinates = projection(*n_1d_arrays(coordinates, 2))
tree = kdtree(data_coordinates[:2])
distance = tree.query(np.transpose(n_1d_arrays(coordinates, 2)))[0].reshape(shape)
mask = (distance <= maxdist)
if (grid is not None):
return grid.where(mask)
return mask |
class UserManagerMock(BaseTestUserManager[models.UP]):
get_by_email: MagicMock
request_verify: MagicMock
verify: MagicMock
forgot_password: MagicMock
reset_password: MagicMock
on_after_register: MagicMock
on_after_request_verify: MagicMock
on_after_verify: MagicMock
on_after_forgot_password: MagicMock
on_after_reset_password: MagicMock
on_after_update: MagicMock
on_before_delete: MagicMock
on_after_delete: MagicMock
on_after_login: MagicMock
_update: MagicMock |
def env_list(name: str, separator: str=',', required: bool=False, default: Union[(Type[empty], List[Any])]=empty) -> List[Any]:
value = get_env_value(name, required=required, default=default)
if (value is empty):
return []
return list(filter(bool, [v.strip() for v in value.split(separator)])) |
class TestLoadApp():
.parametrize('name', ('_APP', 'make_app'))
def test_load_app(self, name):
parser = inspect_app.make_parser()
args = Namespace(app_module='{}:{}'.format(_MODULE, name), route_only=False, verbose=False)
app = inspect_app.load_app(parser, args)
assert isinstance(app, App)
assert (app._router.find('/test') is not None)
.parametrize('name', ('foo', '_MODULE', 'DummyResource'))
def test_load_app_error(self, name):
parser = inspect_app.make_parser()
args = Namespace(app_module='{}:{}'.format(_MODULE, name), route_only=False, verbose=False)
with pytest.raises(SystemExit):
inspect_app.load_app(parser, args)
def test_load_app_module_error(self):
parser = inspect_app.make_parser()
args = Namespace(app_module='foo', route_only=False, verbose=False)
with pytest.raises(SystemExit):
inspect_app.load_app(parser, args) |
class OptionPlotoptionsWaterfallStates(Options):
def hover(self) -> 'OptionPlotoptionsWaterfallStatesHover':
return self._config_sub_data('hover', OptionPlotoptionsWaterfallStatesHover)
def inactive(self) -> 'OptionPlotoptionsWaterfallStatesInactive':
return self._config_sub_data('inactive', OptionPlotoptionsWaterfallStatesInactive)
def normal(self) -> 'OptionPlotoptionsWaterfallStatesNormal':
return self._config_sub_data('normal', OptionPlotoptionsWaterfallStatesNormal)
def select(self) -> 'OptionPlotoptionsWaterfallStatesSelect':
return self._config_sub_data('select', OptionPlotoptionsWaterfallStatesSelect) |
class WafTagsResponse(ModelComposed):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'links': (PaginationLinks,), 'meta': (PaginationMeta,), 'data': ([WafTagsResponseDataItem],), 'included': ([WafRule],)}
_property
def discriminator():
return None
attribute_map = {'links': 'links', 'meta': 'meta', 'data': 'data', 'included': 'included'}
read_only_vars = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
_property
def _composed_schemas():
lazy_import()
return {'anyOf': [], 'allOf': [Pagination, WafTagsResponseAllOf], 'oneOf': []} |
.gpu
.skipif((not has_torch_cuda_gpu), reason='needs GPU & CUDA')
def test_torch_dtype():
nlp = spacy.blank('en')
cfg = {**_PIPE_CFG, **{'model': {'_models': 'spacy.Dolly.v1', 'name': 'dolly-v2-3b'}}}
cfg['model']['config_init'] = {'torch_dtype': 'float16'}
llm = nlp.add_pipe('llm', name='llm1', config=cfg)
assert (llm._model._config_init['torch_dtype'] == torch.float16)
cfg['model']['config_init'] = {'torch_dtype': 'auto'}
nlp.add_pipe('llm', name='llm2', config=cfg)
cfg['model']['config_init'] = {'torch_dtype': 'float999'}
with pytest.raises(ValueError, match='Invalid value float999'):
nlp.add_pipe('llm', name='llm3', config=cfg)
torch.cuda.empty_cache() |
def test_expiring_value_caching():
with freeze_time() as frozen_datetime:
ev = cache.ExpiringValue(random.random, max_age=300)
initial_value = ev.value()
frozen_datetime.tick(delta=timedelta(seconds=60))
assert (ev.value() == initial_value), 'value was different, should have been cached'
frozen_datetime.tick(delta=timedelta(seconds=241))
second_value = ev.value()
assert (second_value != initial_value), 'value was the same, should have expired'
frozen_datetime.tick(delta=timedelta(seconds=60))
assert (ev.value() == second_value), 'value was different, should have been cached' |
def test_recurse_check_structure_valid():
sample = dict(string='Foobar', list=['Foo', 'Bar'], dict={'foo': 'Bar'}, none=None, true=True, false=False)
to_check = dict(string='Foobar', list=['Foo', 'Bar', 'Bas'], dict={'foo': 'Bar'}, none=None, true=True, false=False)
recurse_check_structure(sample, to_check) |
def extractGameOfScanlation(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
def postgres_example_test_dataset_config_skipped_login_collection(read_connection_config: ConnectionConfig, db: Session, example_datasets: List[Dict]) -> Generator:
postgres_dataset = example_datasets[0].copy()
fides_key = postgres_dataset['fides_key']
skipped_collection = next((col for col in postgres_dataset['collections'] if (col['name'] == 'login')))
skipped_collection['fides_meta'] = {}
skipped_collection['fides_meta']['skip_processing'] = True
ctl_dataset = CtlDataset.create_from_dataset_dict(db, postgres_dataset)
dataset = DatasetConfig.create(db=db, data={'connection_config_id': read_connection_config.id, 'fides_key': fides_key, 'ctl_dataset_id': ctl_dataset.id})
(yield dataset)
dataset.delete(db=db)
ctl_dataset.delete(db=db) |
def action_handler(action_name, button_name=None, pad_n=None, pad_ij=None, encoder_name=None):
def wrapper(func):
action = action_name
if ((action_name in [ACTION_BUTTON_PRESSED, ACTION_BUTTON_RELEASED]) and (button_name is not None)):
action = get_individual_button_action_name(action_name, button_name)
if ((action_name in [ACTION_PAD_PRESSED, ACTION_PAD_RELEASED, ACTION_PAD_AFTERTOUCH]) and ((pad_n is not None) or (pad_ij is not None))):
action = get_individual_pad_action_name(action_name, pad_n=pad_n, pad_ij=pad_ij)
if ((action_name in [ACTION_ENCODER_ROTATED, ACTION_ENCODER_TOUCHED, ACTION_ENCODER_RELEASED]) and (encoder_name is not None)):
action = get_individual_encoder_action_name(action_name, encoder_name=encoder_name)
logging.debug('Registered handler {0} for action {1}'.format(func, action))
action_handler_registry[action].append(func)
return func
return wrapper |
('/models/{model_slug}', response_model=Model)
def update_models(model_slug: str, model_params: dict, connector: Connector=Depends(get_connection)):
try:
updated_model = connector.update_model(model_slug, model_params)
except exc.SlugDoesNotExist as ex:
raise HTTPException(status_code=404, detail=ex.message)
except exc.CannotSaveModel as ex:
raise HTTPException(status_code=500, detail=ex.message)
except exc.CannotConnectToDatabase as ex:
raise HTTPException(status_code=500, detail=ex.message)
return updated_model |
class DecisionId():
decision_target: DecisionTarget
decision_event_id: int
def __str__(self):
return f'{self.decision_target}:{self.decision_event_id}'
def __hash__(self):
return hash(self.__str__())
def __eq__(self, other: object):
if (not isinstance(other, DecisionId)):
return False
else:
return ((self.decision_target == other.decision_target) and (self.decision_event_id == other.decision_event_id)) |
def upgrade():
op.alter_column('call_for_papers', 'privacy', existing_type=sa.VARCHAR(), nullable=False)
op.alter_column('call_for_papers', 'timezone', existing_type=sa.VARCHAR(), nullable=False)
op.add_column('events', sa.Column('thumbnail', sa.String(), nullable=True))
op.add_column('events_version', sa.Column('thumbnail', sa.String(), autoincrement=False, nullable=True)) |
class PlanePoiseuilleFlow_w2(PlaneBase):
def __init__(self, plane_theta=0.0, plane_phi=old_div(math.pi, 2.0), v_theta=old_div(math.pi, 2.0), v_phi=None, v_norm=1.0, mu=1.0, grad_p=1.0, L=[1.0, 1.0, 1.0]):
PlaneBase.__init__(self, plane_theta, plane_phi, v_theta, v_phi, v_norm, mu, grad_p, L)
def uOfX(self, x):
return (self.U(x) * self.v_n[2]) |
class RuleVisitorTests(TestCase):
def test_round_trip(self):
tree = rule_grammar.parse('number = ~"[0-9]+"\n')
(rules, default_rule) = RuleVisitor().visit(tree)
text = '98'
self.assertEqual(default_rule.parse(text), Node(default_rule, text, 0, 2))
def test_undefined_rule(self):
tree = rule_grammar.parse('boy = howdy\n')
self.assertRaises(UndefinedLabel, RuleVisitor().visit, tree)
def test_optional(self):
tree = rule_grammar.parse('boy = "howdy"?\n')
(rules, default_rule) = RuleVisitor().visit(tree)
howdy = 'howdy'
self.assertEqual(default_rule.parse(howdy), Node(default_rule, howdy, 0, 5, children=[Node(Literal('howdy'), howdy, 0, 5)])) |
def main():
p = argparse.ArgumentParser(description='Utility for performing various checks and operations on SOLiD run directories. If a single solid_run_dir is specified then %(prog)s automatically finds and operates on all associated directories from the same instrument and with the same timestamp.')
p.add_argument('--version', action='version', version=('%(prog)s ' + get_version()))
p.add_argument('--only', action='store_true', dest='only', help="only operate on the specified solid_run_dir, don't locate associated run directories")
p.add_argument('--report', action='store_true', dest='report', help='print a report of the SOLiD run')
p.add_argument('--report-paths', action='store_true', dest='report_paths', default=False, help='in report mode, also print full paths to primary data files')
p.add_argument('--xls', action='store_true', dest='xls', help='write report to Excel spreadsheet')
p.add_argument('--verify', action='store_true', dest='verify', help='do verification checks on SOLiD run directories')
p.add_argument('--layout', action='store_true', dest='layout', help='generate script for laying out analysis directories')
p.add_argument('--rsync', action='store_true', dest='rsync', help='generate script for rsyncing data')
p.add_argument('--copy', action='append', dest='copy_pattern', default=[], help="copy primary data files to pwd from specific library where names match COPY_PATTERN, which should be of the form '<sample>/<library>'")
p.add_argument('--gzip', action='append', dest='gzip_pattern', default=[], help="make gzipped copies of primary data files in pwd from specific libraries where names match GZIP_PATTERN, which should be of the form '<sample>/<library>'")
p.add_argument('--md5', action='append', dest='md5_pattern', default=[], help="calculate md5sums for primary data files from specific libraries where names match MD5_PATTERN, which should be of the form '<sample>/<library>'")
p.add_argument('--md5sum', action='store_true', dest='md5sum', help='calculate md5sums for all primary data files (equivalent to --md5=*/*)')
p.add_argument('--no-warnings', action='store_true', dest='no_warnings', help='suppress warning messages')
p.add_argument('--debug', action='store_true', dest='debug', help='turn on debugging output (nb overrides --no-warnings)')
p.add_argument('solid_run_dirs', metavar='solid_run_dir', nargs='+', help='SOLiD run directory to operate on')
args = p.parse_args()
if args.debug:
logging.getLogger().setLevel(logging.DEBUG)
elif args.no_warnings:
logging.getLogger().setLevel(logging.ERROR)
for arg in args.solid_run_dirs:
if (not os.path.isdir(arg)):
logging.error(("'%s' not found or not a directory" % arg))
sys.exit(1)
if (len(args.solid_run_dirs) == 1):
if args.only:
solid_dirs = [args.solid_run_dirs[0]]
else:
solid_dirs = list_run_directories(args.solid_run_dirs[0])
else:
solid_dirs = args
if args.xls:
spreadsheet = (os.path.splitext(os.path.basename(solid_dirs[0]))[0] + '.xls')
print(('Writing spreadsheet %s' % spreadsheet))
report = args.report
if (not (args.report or args.layout or args.xls or args.verify or args.rsync or args.md5sum or args.copy_pattern or args.gzip_pattern or args.md5_pattern)):
report = True
solid_runs = []
for solid_dir in solid_dirs:
run = SolidRun(solid_dir)
if (not run):
logging.error(('Error extracting run data for %s' % solid_dir))
sys.exit(1)
else:
solid_runs.append(run)
if report:
report_run(solid_runs, args.report_paths)
if args.xls:
try:
import bcftbx.Spreadsheet as Spreadsheet
write_spreadsheet(solid_runs, spreadsheet)
except ImportError as ex:
logging.error(('Unable to write spreadsheet: %s' % ex))
if args.layout:
suggest_analysis_layout(solid_runs)
if args.rsync:
suggest_rsync_command(solid_runs)
if args.copy_pattern:
copy_data(solid_runs, args.copy_pattern)
if args.gzip_pattern:
gzip_data(solid_runs, args.gzip_pattern)
if (args.md5_pattern or args.md5sum):
if args.md5sum:
md5_pattern = ['*/*']
else:
md5_pattern = args.md5_pattern
md5_checksums(solid_runs, md5_pattern)
if args.verify:
status = verify_runs(solid_dirs)
sys.exit(status) |
class TestIssues(TestCase):
def test_auto_multi_int(self):
class Measurement(int, MultiValueEnum, AddValueEnum):
_order_ = 'one two three'
_start_ = 0
one = ''
two = ''
three = ''
self.assertEqual([m.value for m in Measurement], [0, 1, 2])
self.assertEqual([m.name for m in Measurement], ['one', 'two', 'three'])
self.assertIs(Measurement(''), Measurement.one)
self.assertIs(Measurement(0), Measurement.one)
self.assertIs(Measurement(''), Measurement.two)
self.assertIs(Measurement(1), Measurement.two)
self.assertIs(Measurement(''), Measurement.three)
self.assertIs(Measurement(2), Measurement.three)
def test_auto_kwds(self):
class Item(Enum):
_order_ = 'A B'
A = auto(size=100, requirements={})
B = auto(size=200, requirements={A: 1})
def __new__(cls, value, size, requirements):
obj = object.__new__(cls)
obj._value_ = value
obj.size = size
new_requirements = {}
for (k, v) in requirements.items():
if isinstance(k, auto):
k = k.enum_member
new_requirements[k] = v
obj.requirements = new_requirements
return obj
self.assertEqual((Item.A.value, Item.A.size, Item.A.requirements), (1, 100, {}))
self.assertEqual((Item.B.value, Item.B.size, Item.B.requirements), (2, 200, {Item.A: 1}))
def test_auto_kwds_and_gnv(self):
class Item(Enum):
_order_ = 'A B'
def _generate_next_value_(name, start, count, values, *args, **kwds):
return name
A = auto(size=100, requirements={})
B = auto(size=200, requirements={A: 1})
def __new__(cls, value, size, requirements):
obj = object.__new__(cls)
obj._value_ = value
obj.size = size
new_requirements = {}
for (k, v) in requirements.items():
if isinstance(k, auto):
k = k.enum_member
new_requirements[k] = v
obj.requirements = new_requirements
return obj
self.assertEqual((Item.A.value, Item.A.size, Item.A.requirements), ('A', 100, {}))
self.assertEqual((Item.B.value, Item.B.size, Item.B.requirements), ('B', 200, {Item.A: 1}))
def test_extend_flag(self):
class FlagTest(Flag):
NONE = 0
LOW = 1
MID = 2
extend_enum(FlagTest, 'HIGH', 4)
self.assertEqual((FlagTest.LOW | FlagTest.HIGH), FlagTest(5))
self.assertEqual((FlagTest.LOW | FlagTest.HIGH).value, 5)
def test_extend_unhashable(self):
class TestEnum(Enum):
ABC = {'id': 0, 'value': 'abc'}
DEF = {'id': 1, 'value': 'def'}
rand = uuid.uuid4().hex
new_value = {'id': 99, 'value': 'new'}
extend_enum(TestEnum, rand, new_value) |
.parametrize('telescope', ['ut1', 'ut2', 'ut3', 'antu', 'kueyen', 'melipal'])
.parametrize('with_spiders', [True, False])
def test_vlt_ut_123_aperture(telescope, with_spiders):
name = 'vlt/pupil_ut123'
name += ('_without_spiders' if (not with_spiders) else '')
check_aperture(make_vlt_aperture, 8.0, name, check_normalization=True, check_segmentation=True, telescope=telescope, with_spiders=with_spiders) |
class AnalysisPlugin(YaraBasePlugin):
NAME = 'known_vulnerabilities'
DESCRIPTION = 'Rule based detection of known vulnerabilities like Heartbleed'
DEPENDENCIES = ['file_hashes', 'software_components']
VERSION = '0.2.1'
FILE = __file__
def process_object(self, file_object):
file_object = super().process_object(file_object)
yara_results = file_object.processed_analysis.pop(self.NAME)
file_object.processed_analysis[self.NAME] = {}
binary_vulnerabilities = self._post_process_yara_results(yara_results)
matched_vulnerabilities = self._check_vulnerabilities(file_object.processed_analysis)
if ('NetUSB' in file_object.processed_analysis.get('software_components', {}).get('result', {})):
matched_vulnerabilities.extend(self._check_netusb_vulnerability(file_object.binary))
for (name, vulnerability) in (binary_vulnerabilities + matched_vulnerabilities):
file_object.processed_analysis[self.NAME][name] = vulnerability
file_object.processed_analysis[self.NAME]['summary'] = [name for (name, _) in (binary_vulnerabilities + matched_vulnerabilities)]
self.add_tags(file_object, (binary_vulnerabilities + matched_vulnerabilities))
return file_object
def add_tags(self, file_object, vulnerability_list):
for (name, details) in vulnerability_list:
if (details['score'] == 'none'):
continue
if (details['score'] == 'high'):
propagate = True
tag_color = TagColor.RED
else:
propagate = False
tag_color = TagColor.ORANGE
self.add_analysis_tag(file_object=file_object, tag_name=name, value=name.replace('_', ' '), color=tag_color, propagate=propagate)
def _post_process_yara_results(yara_results):
yara_results.pop('summary')
new_results = []
for result in yara_results:
meta = yara_results[result]['meta']
new_results.append((result, meta))
return new_results
def _check_vulnerabilities(processed_analysis):
matched_vulnerabilities = []
for vulnerability in VULNERABILITIES:
if evaluate(processed_analysis, vulnerability.rule):
vulnerability_data = vulnerability.get_dict()
name = vulnerability_data.pop('short_name')
matched_vulnerabilities.append((name, vulnerability_data))
return matched_vulnerabilities
def _check_netusb_vulnerability(self, input_file_data: bytes):
with TemporaryDirectory(prefix='known_vulns_', dir=config.backend.docker_mount_base_dir) as tmp_dir:
tmp_dir_path = Path(tmp_dir)
ghidra_input_file = (tmp_dir_path / 'ghidra_input')
ghidra_input_file.write_bytes(input_file_data)
with suppress(DockerException, TimeoutError):
run_docker_container('fact/known-vulnerabilities', logging_label=self.NAME, timeout=60, mounts=[Mount('/io', tmp_dir, type='bind')])
try:
ghidra_results = json.loads((tmp_dir_path / 'result.json').read_text())
return [('CVE-2021-45608', {'description': 'CVE-2021-45608: vulnerability in KCodes NetUSB kernel module', 'score': ('high' if (ghidra_results['is_vulnerable'] is True) else 'none'), 'reliability': 90, 'link': ' 'short_name': 'CVE-2021-45608', 'additional_data': ghidra_results})]
except (json.JSONDecodeError, FileNotFoundError):
return [] |
def get_earliest_load_date(keys, default=None):
earliest_date = None
for key in keys:
key_date = get_last_load_date(key)
if key_date:
if (earliest_date is None):
earliest_date = key_date
elif (key_date < earliest_date):
earliest_date = key_date
if (earliest_date is None):
logger.warning(f'No earliest load date could be calculated because no dates for keys `{keys}` were found!')
return default
return earliest_date |
class AsyncMultiplexer(Runnable, WithLogger):
DISCONNECT_TIMEOUT = 5
CONNECT_TIMEOUT = 60
SEND_TIMEOUT = 60
_lock: asyncio.Lock
def __init__(self, connections: Optional[Sequence[Connection]]=None, default_connection_index: int=0, loop: Optional[AbstractEventLoop]=None, exception_policy: ExceptionPolicyEnum=ExceptionPolicyEnum.propagate, threaded: bool=False, agent_name: str='standalone', default_routing: Optional[Dict[(PublicId, PublicId)]]=None, default_connection: Optional[PublicId]=None, protocols: Optional[List[Union[(Protocol, Message)]]]=None) -> None:
self._exception_policy: ExceptionPolicyEnum = exception_policy
logger = get_logger(__name__, agent_name)
WithLogger.__init__(self, logger=logger)
Runnable.__init__(self, loop=loop, threaded=threaded)
self._connections: List[Connection] = []
self._id_to_connection: Dict[(PublicId, Connection)] = {}
self._default_connection: Optional[Connection] = None
connections = (connections or [])
if ((not default_connection) and connections):
enforce(((len(connections) - 1) >= default_connection_index), 'default_connection_index os out of connections range!')
default_connection = connections[default_connection_index].connection_id
if default_connection:
enforce(bool([i.connection_id.same_prefix(default_connection) for i in connections]), f'Default connection {default_connection} does not present in connections list!')
self._default_routing = {}
self._setup((connections or []), default_routing, default_connection)
self._connection_status = MultiplexerStatus()
self._specification_id_to_protocol_id = {p.protocol_specification_id: p.protocol_id for p in (protocols or [])}
self._routing_helper: Dict[(Address, PublicId)] = {}
self._in_queue = AsyncFriendlyQueue()
self._out_queue = None
self._recv_loop_task = None
self._send_loop_task = None
self._loop: asyncio.AbstractEventLoop = (loop if (loop is not None) else asyncio.new_event_loop())
self.set_loop(self._loop)
def default_connection(self) -> Optional[Connection]:
return self._default_connection
def in_queue(self) -> AsyncFriendlyQueue:
return self._in_queue
def out_queue(self) -> asyncio.Queue:
if (self._out_queue is None):
raise ValueError('Accessing out queue before loop is started.')
return self._out_queue
def connections(self) -> Tuple[(Connection, ...)]:
return tuple(self._connections)
def is_connected(self) -> bool:
return self.connection_status.is_connected
def default_routing(self) -> Dict[(PublicId, PublicId)]:
return self._default_routing
_routing.setter
def default_routing(self, default_routing: Dict[(PublicId, PublicId)]) -> None:
self._default_routing = default_routing
def connection_status(self) -> MultiplexerStatus:
return self._connection_status
async def run(self) -> None:
self.set_loop(asyncio.get_event_loop())
try:
(await self.connect())
if ((not self._recv_loop_task) or (not self._send_loop_task)):
raise ValueError('Multiplexer is not connected properly.')
(await asyncio.gather(self._recv_loop_task, self._send_loop_task))
finally:
(await self.disconnect())
def _get_protocol_id_for_envelope(self, envelope: Envelope) -> PublicId:
if isinstance(envelope.message, Message):
return cast(Message, envelope.message).protocol_id
protocol_id = self._specification_id_to_protocol_id.get(envelope.protocol_specification_id)
if (not protocol_id):
raise ValueError(f'Can not resolve protocol id for {envelope}, pass protocols supported to multipelxer instance {self._specification_id_to_protocol_id}')
return protocol_id
def set_loop(self, loop: AbstractEventLoop) -> None:
self._loop = loop
self._lock = asyncio.Lock()
def _handle_exception(self, fn: Callable, exc: Exception) -> None:
if (self._exception_policy == ExceptionPolicyEnum.just_log):
self.logger.exception(f'Exception raised in {fn}')
elif (self._exception_policy == ExceptionPolicyEnum.propagate):
raise exc
elif (self._exception_policy == ExceptionPolicyEnum.stop_and_exit):
self._loop.create_task(AsyncMultiplexer.disconnect(self))
else:
raise ValueError(f'Unknown exception policy: {self._exception_policy}')
def add_connection(self, connection: Connection, is_default: bool=False) -> None:
if (connection.connection_id in self._id_to_connection):
self.logger.warning(f'A connection with id {connection.connection_id} was already added. Replacing it...')
self._connections.append(connection)
self._id_to_connection[connection.connection_id] = connection
if is_default:
self._default_connection = connection
def _connection_consistency_checks(self) -> None:
if (len(self.connections) == 0):
self.logger.debug('List of connections is empty.')
enforce((len(set((c.connection_id for c in self.connections))) == len(self.connections)), 'Connection names must be unique.')
def _set_default_connection_if_none(self) -> None:
if ((self._default_connection is None) and bool(self.connections)):
self._default_connection = self.connections[0]
async def connect(self) -> None:
self._loop = asyncio.get_event_loop()
self.logger.debug('Multiplexer connecting...')
self._connection_consistency_checks()
self._set_default_connection_if_none()
self._out_queue = asyncio.Queue()
async with self._lock:
if self.connection_status.is_connected:
self.logger.debug('Multiplexer already connected.')
return
try:
self.connection_status.set(ConnectionStates.connecting)
(await self._connect_all())
if all((c.is_connected for c in self._connections)):
self.connection_status.set(ConnectionStates.connected)
else:
raise AEAConnectionError('Failed to connect the multiplexer.')
self._recv_loop_task = self._loop.create_task(self._receiving_loop())
self._send_loop_task = self._loop.create_task(self._send_loop())
self.logger.debug('Multiplexer connected and running.')
except (CancelledError, asyncio.CancelledError):
(await self._stop())
raise asyncio.CancelledError()
except AEAConnectionError:
(await self._stop())
raise
except Exception as e:
self.logger.exception('Exception on connect:')
(await self._stop())
raise AEAConnectionError(f'Failed to connect the multiplexer: Error: {repr(e)}') from e
async def disconnect(self) -> None:
self.logger.debug('Multiplexer disconnecting...')
async with self._lock:
if self.connection_status.is_disconnected:
self.logger.debug('Multiplexer already disconnected.')
return
try:
self.connection_status.set(ConnectionStates.disconnecting)
(await asyncio.wait_for(self._stop(), timeout=60))
self.logger.debug('Multiplexer disconnected.')
except CancelledError:
self.logger.debug('Multiplexer.disconnect cancellation!')
raise
except Exception as e:
self.logger.exception('Exception on disconnect:')
raise AEAConnectionError(f'Failed to disconnect the multiplexer: Error: {repr(e)}') from e
async def _stop_receive_send_loops(self) -> None:
self.logger.debug('Stopping receive loop...')
if self._recv_loop_task:
self._recv_loop_task.cancel()
with suppress(Exception, asyncio.CancelledError):
(await self._recv_loop_task)
self._recv_loop_task = None
self.logger.debug('Receive loop stopped.')
self.logger.debug('Stopping send loop...')
if self._send_loop_task:
(await self.out_queue.put(None))
self._send_loop_task.cancel()
with suppress(Exception, asyncio.CancelledError):
(await self._send_loop_task)
self._send_loop_task = None
self.logger.debug('Send loop stopped.')
def _check_and_set_disconnected_state(self) -> None:
if all((c.is_disconnected for c in self.connections)):
self.connection_status.set(ConnectionStates.disconnected)
else:
connections_left = [str(c.connection_id) for c in self.connections if (not c.is_disconnected)]
raise AEAConnectionError(f"Failed to disconnect multiplexer, some connections are not disconnected within timeout: {', '.join(connections_left)}")
async def _stop(self) -> None:
self.logger.debug('Stopping multiplexer...')
(await asyncio.wait_for(self._stop_receive_send_loops(), timeout=60))
(await asyncio.wait_for(self._disconnect_all(), timeout=60))
self._check_and_set_disconnected_state()
self.logger.debug('Multiplexer stopped.')
async def _connect_all(self) -> None:
self.logger.debug('Starting multiplexer connections.')
connected = []
for (connection_id, connection) in self._id_to_connection.items():
try:
(await asyncio.wait_for(self._connect_one(connection_id), timeout=self.CONNECT_TIMEOUT))
connected.append(connection_id)
except Exception as e:
if (not isinstance(e, (asyncio.CancelledError, CancelledError))):
self.logger.exception('Error while connecting {}: {}'.format(str(type(connection)), repr(e)))
raise
self.logger.debug('Multiplexer connections are set.')
async def _connect_one(self, connection_id: PublicId) -> None:
connection = self._id_to_connection[connection_id]
self.logger.debug('Processing connection {}'.format(connection.connection_id))
if connection.is_connected:
self.logger.debug('Connection {} already established.'.format(connection.connection_id))
else:
(await connection.connect())
self.logger.debug('Connection {} has been set up successfully.'.format(connection.connection_id))
async def _disconnect_all(self) -> None:
self.logger.debug('Tear the multiplexer connections down.')
for (connection_id, connection) in self._id_to_connection.items():
try:
(await asyncio.wait_for(self._disconnect_one(connection_id), timeout=self.DISCONNECT_TIMEOUT))
except FuturesTimeoutError:
self.logger.debug(f'Disconnection of `{connection_id}` timed out.')
except Exception as e:
self.logger.exception('Error while disconnecting {}: {}'.format(str(type(connection)), str(e)))
async def _disconnect_one(self, connection_id: PublicId) -> None:
connection = self._id_to_connection[connection_id]
self.logger.debug('Processing connection {}'.format(connection.connection_id))
if (not connection.is_connected):
self.logger.debug('Connection {} already disconnected.'.format(connection.connection_id))
else:
(await connection.disconnect())
self.logger.debug('Connection {} has been disconnected successfully.'.format(connection.connection_id))
async def _send_loop(self) -> None:
if (not self.is_connected):
self.logger.debug('Sending loop not started. The multiplexer is not connected.')
return
try:
while self.is_connected:
self.logger.debug('Waiting for outgoing envelopes...')
envelope = (await self.out_queue.get())
if (envelope is None):
self.logger.debug('Received empty envelope. Quitting the sending loop...')
return None
self.logger.debug('Sending envelope {}'.format(str(envelope)))
(await self._send(envelope))
except asyncio.CancelledError:
self.logger.debug('Sending loop cancelled.')
raise
except Exception as e:
self.logger.exception('Error in the sending loop: {}'.format(str(e)))
raise
async def _receiving_loop(self) -> None:
self.logger.debug('Starting receving loop...')
task_to_connection = {asyncio.ensure_future(conn.receive()): conn for conn in self.connections}
try:
while (self.connection_status.is_connected and (len(task_to_connection) > 0)):
(done, _pending) = (await asyncio.wait(task_to_connection.keys(), return_when=asyncio.FIRST_COMPLETED))
for task in done:
connection = task_to_connection.pop(task)
envelope = task.result()
if (envelope is not None):
self._update_routing_helper(envelope, connection)
self.in_queue.put_nowait(envelope)
if connection.is_connected:
new_task = asyncio.ensure_future(connection.receive())
task_to_connection[new_task] = connection
except asyncio.CancelledError:
self.logger.debug('Receiving loop cancelled.')
raise
except Exception as e:
self.logger.exception('Error in the receiving loop: {}'.format(str(e)))
raise
finally:
for t in task_to_connection.keys():
t.cancel()
self.logger.debug('Receiving loop terminated.')
async def _send(self, envelope: Envelope) -> None:
envelope_protocol_id = self._get_protocol_id_for_envelope(envelope)
connection_id = self._get_connection_id_from_envelope(envelope, envelope_protocol_id)
connection = (self._get_connection(connection_id) if (connection_id is not None) else None)
if (connection is None):
self.logger.warning(f'Dropping envelope, no connection available for sending: {envelope}')
return
if (not self._is_connection_supported_protocol(connection, envelope_protocol_id)):
return
try:
(await asyncio.wait_for(connection.send(envelope), timeout=self.SEND_TIMEOUT))
except Exception as e:
self._handle_exception(self._send, e)
def _get_connection_id_from_envelope(self, envelope: Envelope, envelope_protocol_id: PublicId) -> Optional[PublicId]:
self.logger.debug(f'Routing envelope: {envelope}')
if envelope.is_component_to_component_message:
connection_id = envelope.to_as_public_id
self.logger.debug('Using envelope `to` field as connection_id: {}'.format(connection_id))
enforce((connection_id is not None), 'Connection id cannot be None by envelope construction.')
return connection_id
if ((envelope.context is not None) and (envelope.context.connection_id is not None)):
connection_id = envelope.context.connection_id
self.logger.debug('Using envelope context connection_id: {}'.format(connection_id))
return connection_id
if (envelope.to in self._routing_helper):
connection_id = self._routing_helper[envelope.to]
self.logger.debug('Using routing helper with connection_id: {}'.format(connection_id))
return connection_id
if (envelope_protocol_id in self.default_routing):
connection_id = self.default_routing[envelope_protocol_id]
self.logger.debug('Using default routing: {}'.format(connection_id))
return connection_id
connection_id = (self.default_connection.connection_id if (self.default_connection is not None) else None)
self.logger.debug('Using default connection: {}'.format(connection_id))
return connection_id
def _get_connection(self, connection_id: PublicId) -> Optional[Connection]:
conn_ = self._id_to_connection.get(connection_id, None)
if (conn_ is not None):
return conn_
for (id_, conn_) in self._id_to_connection.items():
if id_.same_prefix(connection_id):
return conn_
self.logger.error(f'No connection registered with id: {connection_id}')
return None
def _is_connection_supported_protocol(self, connection: Connection, protocol_id: PublicId) -> bool:
if (protocol_id in connection.excluded_protocols):
self.logger.warning(f'Connection {connection.connection_id} does not support protocol {protocol_id}. It is explicitly excluded.')
return False
if (connection.restricted_to_protocols and (protocol_id not in connection.restricted_to_protocols)):
self.logger.warning(f'Connection {connection.connection_id} does not support protocol {protocol_id}. The connection is restricted to protocols in {connection.restricted_to_protocols}.')
return False
return True
def get(self, block: bool=False, timeout: Optional[float]=None) -> Optional[Envelope]:
try:
return self.in_queue.get(block=block, timeout=timeout)
except queue.Empty:
raise Empty
async def async_get(self) -> Envelope:
try:
return (await self.in_queue.async_get())
except queue.Empty:
raise Empty
async def async_wait(self) -> None:
return (await self.in_queue.async_wait())
async def _put(self, envelope: Envelope) -> None:
(await self.out_queue.put(envelope))
def put(self, envelope: Envelope) -> None:
if self._threaded:
self._loop.call_soon_threadsafe(self.out_queue.put_nowait, envelope)
else:
self.out_queue.put_nowait(envelope)
def _setup(self, connections: Collection[Connection], default_routing: Optional[Dict[(PublicId, PublicId)]]=None, default_connection: Optional[PublicId]=None) -> None:
self.default_routing = (default_routing or {})
self._connections = []
self._id_to_connection = {}
for c in connections:
self.add_connection(c, (c.public_id == default_connection))
def _update_routing_helper(self, envelope: Envelope, connection: Connection) -> None:
if envelope.is_component_to_component_message:
return
self._routing_helper[envelope.sender] = connection.public_id |
()
('operation')
('params', nargs=(- 1))
('--path', type=click.Path(exists=True, dir_okay=False))
('--format', type=FORMAT_SCHEMA_CHOICES)
('--encoding', type=ENCODING_CHOICES)
('--verbose', '-v', is_flag=True, default=False)
_context
def request(ctx, operation, params, path, format, encoding, verbose):
options = {'schema': {'path': path, 'format': format, 'encoding': encoding}}
config = _load_config(options, verbose=verbose)
path = config['schema']['path']
format = config['schema']['format']
encoding = config['schema']['encoding']
with open(path, 'rb') as schema_file:
schema = schema_file.read()
params = [param.partition('=') for param in params]
params = dict([(key, value) for (key, sep, value) in params])
session = ctx.obj
if verbose:
session = DebugSession(session)
try:
client = Client(schema, format=format, encoding=encoding, session=session)
except (typesystem.ParseError, typesystem.ValidationError) as exc:
if isinstance(exc, typesystem.ParseError):
summary = {'json': 'Invalid JSON.', 'yaml': 'Invalid YAML.', None: 'Parse error.'}[encoding]
else:
summary = {'config': 'Invalid APIStar config.', 'jsonschema': 'Invalid JSONSchema document.', 'openapi': 'Invalid OpenAPI schema.', 'swagger': 'Invalid Swagger schema.', None: 'Invalid schema.'}[format]
_echo_error(exc, schema, summary=summary, verbose=verbose)
sys.exit(1)
try:
result = client.request(operation, **params)
except ClientError as exc:
for message in exc.messages:
if (message.code == 'invalid_property'):
text = ('* Invalid parameter "%s".' % message.index[0])
elif (message.code == 'required'):
text = ('* Missing required parameter "%s".' % message.index[0])
else:
text = ('* %s' % message.text)
click.echo(text)
click.echo((click.style(' ', fg='red') + 'Client error'))
sys.exit(1)
except ErrorResponse as exc:
click.echo(json.dumps(exc.content, indent=4))
click.echo((click.style(' ', fg='red') + exc.title))
sys.exit(1)
click.echo(json.dumps(result, indent=4)) |
class TestDockerProvisioner():
('uuid.uuid4')
def test_provisioning_with_defaults(self, uuid4):
uuid4.return_value = '9dbc682e-d32a-4669-8fbe-56fb77120dd4'
node_ip = '10.17.22.33'
node_root_dir = tempfile.gettempdir()
log_dir = os.path.join(node_root_dir, 'logs', 'server')
heap_dump_dir = os.path.join(node_root_dir, 'heapdump')
data_dir = os.path.join(node_root_dir, 'data', '9dbc682e-d32a-4669-8fbe-56fb77120dd4')
rally_root = os.path.normpath(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, 'esrally'))
c = team.Car('unit-test-car', None, '/tmp', variables={'docker_image': 'docker.elastic.co/elasticsearch/elasticsearch-oss'})
docker = provisioner.DockerProvisioner(car=c, node_name='rally-node-0', cluster_name='rally-benchmark', ip=node_ip, node_root_dir=node_root_dir, distribution_version='6.3.0', rally_root=rally_root)
assert (docker.config_vars == {'cluster_name': 'rally-benchmark', 'node_name': 'rally-node-0', 'install_root_path': '/usr/share/elasticsearch', 'data_paths': ['/usr/share/elasticsearch/data'], 'log_path': '/var/log/elasticsearch', 'heap_dump_path': '/usr/share/elasticsearch/heapdump', 'discovery_type': 'single-node', 'network_host': '0.0.0.0', ' '39200', 'transport_port': '39300', 'cluster_settings': {}, 'docker_image': 'docker.elastic.co/elasticsearch/elasticsearch-oss'})
assert (docker.docker_vars(mounts={}) == {'es_data_dir': data_dir, 'es_log_dir': log_dir, 'es_heap_dump_dir': heap_dump_dir, 'es_version': '6.3.0', 'docker_image': 'docker.elastic.co/elasticsearch/elasticsearch-oss', ' 39200, 'node_ip': node_ip, 'mounts': {}})
docker_cfg = docker._render_template_from_file(docker.docker_vars(mounts={}))
assert (docker_cfg == f'''version: '2.2'
services:
elasticsearch1:
cap_add:
- IPC_LOCK
image: "docker.elastic.co/elasticsearch/elasticsearch-oss:6.3.0"
labels:
io.rally.description: "elasticsearch-rally"
ports:
- 39200:39200
- 9300
ulimits:
memlock:
soft: -1
hard: -1
volumes:
- {data_dir}:/usr/share/elasticsearch/data
- {log_dir}:/var/log/elasticsearch
- {heap_dump_dir}:/usr/share/elasticsearch/heapdump
healthcheck:
test: nc -z 127.0.0.1 39200
interval: 5s
timeout: 2s
retries: 10
networks:
- rally-es
networks:
rally-es:
driver_opts:
com.docker.network.bridge.host_binding_ipv4: "{node_ip}"''')
('uuid.uuid4')
def test_provisioning_with_variables(self, uuid4):
uuid4.return_value = '86f42ae0-5840-4b5b-918d-41e7907cb644'
node_root_dir = tempfile.gettempdir()
node_ip = '10.17.22.33'
log_dir = os.path.join(node_root_dir, 'logs', 'server')
heap_dump_dir = os.path.join(node_root_dir, 'heapdump')
data_dir = os.path.join(node_root_dir, 'data', '86f42ae0-5840-4b5b-918d-41e7907cb644')
rally_root = os.path.normpath(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, 'esrally'))
c = team.Car('unit-test-car', None, '/tmp', variables={'docker_image': 'docker.elastic.co/elasticsearch/elasticsearch', 'docker_mem_limit': '256m', 'docker_cpu_count': 2})
docker = provisioner.DockerProvisioner(car=c, node_name='rally-node-0', cluster_name='rally-benchmark', ip=node_ip, node_root_dir=node_root_dir, distribution_version='6.3.0', rally_root=rally_root)
docker_cfg = docker._render_template_from_file(docker.docker_vars(mounts={}))
assert (docker_cfg == f'''version: '2.2'
services:
elasticsearch1:
cap_add:
- IPC_LOCK
image: "docker.elastic.co/elasticsearch/elasticsearch:6.3.0"
labels:
io.rally.description: "elasticsearch-rally"
cpu_count: 2
mem_limit: 256m
ports:
- 39200:39200
- 9300
ulimits:
memlock:
soft: -1
hard: -1
volumes:
- {data_dir}:/usr/share/elasticsearch/data
- {log_dir}:/var/log/elasticsearch
- {heap_dump_dir}:/usr/share/elasticsearch/heapdump
healthcheck:
test: nc -z 127.0.0.1 39200
interval: 5s
timeout: 2s
retries: 10
networks:
- rally-es
networks:
rally-es:
driver_opts:
com.docker.network.bridge.host_binding_ipv4: "{node_ip}"''') |
class OptionSeriesGaugeSonificationTracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def _start():
global patch, name, path, monitor
global filename, f, chanindx, channels, channelz, fSample, nSamples, replace, i, s, z, blocksize, begsample, endsample, block
filename = patch.getstring('playback', 'file')
monitor.info(('Reading data from ' + filename))
f = EDF.EDFReader()
f.open(filename)
monitor.info(('NSignals = ' + str(f.getNSignals())))
monitor.info(('SignalFreqs = ' + str(f.getSignalFreqs())))
monitor.info(('NSamples = ' + str(f.getNSamples())))
monitor.info(('SignalTextLabels = ' + str(f.getSignalTextLabels())))
for chanindx in range(f.getNSignals()):
if (f.getSignalFreqs()[chanindx] != f.getSignalFreqs()[0]):
raise AssertionError('unequal SignalFreqs')
if (f.getNSamples()[chanindx] != f.getNSamples()[0]):
raise AssertionError('unequal NSamples')
channels = f.getSignalTextLabels()
channelz = f.getSignalTextLabels()
fSample = f.getSignalFreqs()[0]
nSamples = f.getNSamples()[0]
for replace in patch.config.items('replace'):
monitor.debug(replace)
for i in range(len(channelz)):
channelz[i] = channelz[i].replace(replace[0], replace[1])
for (s, z) in zip(channels, channelz):
monitor.info(((('Writing channel ' + s) + ' as control value ') + z))
blocksize = 1
begsample = 0
endsample = (blocksize - 1)
block = 0
if len(locals()):
print(('LOCALS: ' + ', '.join(locals().keys()))) |
def search_by_query(query: str, platform: Platforms=None) -> Iterator[dict]:
hits_per_page = 50
options = {'hitsPerPage': hits_per_page}
page = (- 1)
while True:
page += 1
options['page'] = page
items = _search_index(query, **options)
for item in items:
if (item['topLevelCategoryCode'] != 'GAMES'):
continue
if platform:
if (item['platform'] != platform):
continue
(yield item)
if (len(items) < hits_per_page):
break |
_each
def before_each(transaction):
with stash['app'].app_context():
db.engine.execute('drop schema if exists public cascade')
db.engine.execute('create schema public')
db.engine.execute('create extension if not exists citext')
db.create_all()
create_super_admin(api_username, api_password)
if ('token' in stash):
print('adding a token')
else:
stash['token'] = obtain_token()
transaction['request']['headers']['Authorization'] = ('JWT ' + stash['token']) |
def get_documents(case_id, bucket):
documents = []
with db.engine.connect() as conn:
rs = conn.execute(CASE_DOCUMENTS, case_id)
for row in rs:
document = {'document_id': row['document_id'], 'category': row['category'], 'description': row['description'], 'length': row['length'], 'text': row['ocrtext'], 'document_date': row['document_date'], 'doc_order_id': row['doc_order_id']}
if (not row['fileimage']):
logger.error('Error uploading document ID {0} for {1} %{2}: No file image'.format(row['document_id'], row['case_type'], row['case_no']))
else:
pdf_key = 'legal/{0}/{1}/{2}'.format(get_es_type(row['case_type']), row['case_no'], row['filename'].replace(' ', '-'))
document['url'] = ('/files/' + pdf_key)
documents.append(document)
try:
if bucket:
logger.debug('S3: Uploading {}'.format(pdf_key))
bucket.put_object(Key=pdf_key, Body=bytes(row['fileimage']), ContentType='application/pdf', ACL='public-read')
except Exception:
pass
return documents |
class OptionPlotoptionsLineStatesHoverMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
class windowsof(_coconut_has_iter):
__slots__ = ('size', 'fillvalue', 'step')
def __new__(cls, size, iterable, fillvalue=_coconut_sentinel, step=1):
self = _coconut.super(_coconut_windowsof, cls).__new__(cls, iterable)
self.size = _coconut.operator.index(size)
if (self.size < 1):
raise _coconut.ValueError(('windowsof: size must be >= 1; not %r' % (self.size,)))
self.fillvalue = fillvalue
self.step = _coconut.operator.index(step)
if (self.step < 1):
raise _coconut.ValueError(('windowsof: step must be >= 1; not %r' % (self.step,)))
return self
def __reduce__(self):
return (self.__class__, (self.size, self.iter, self.fillvalue, self.step))
def __copy__(self):
return self.__class__(self.size, self.get_new_iter(), self.fillvalue, self.step)
def __repr__(self):
return (((((('windowsof(' + _coconut.repr(self.size)) + ', ') + _coconut.repr(self.iter)) + ((', fillvalue=' + _coconut.repr(self.fillvalue)) if (self.fillvalue is not _coconut_sentinel) else '')) + ((', step=' + _coconut.repr(self.step)) if (self.step != 1) else '')) + ')')
def __iter__(self):
cache = _coconut.collections.deque()
i = 0
for x in self.iter:
i += 1
cache.append(x)
if (_coconut.len(cache) == self.size):
(yield _coconut.tuple(cache))
for _ in _coconut.range(self.step):
cache.popleft()
if ((self.fillvalue is not _coconut_sentinel) and ((i < self.size) or ((i % self.step) != 0))):
while (_coconut.len(cache) < self.size):
cache.append(self.fillvalue)
(yield _coconut.tuple(cache))
def __len__(self):
if (not _coconut.isinstance(self.iter, _coconut.abc.Sized)):
return _coconut.NotImplemented
if (_coconut.len(self.iter) < self.size):
return (0 if (self.fillvalue is _coconut_sentinel) else 1)
return ((((_coconut.len(self.iter) - self.size) + self.step) // self.step) + _coconut.int((((_coconut.len(self.iter) % self.step) != 0) if (self.fillvalue is not _coconut_sentinel) else 0))) |
def fortios_log_fortiguard(data, fos):
fos.do_member_operation('log.fortiguard', 'filter')
if data['log_fortiguard_filter']:
resp = log_fortiguard_filter(data, fos)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'log_fortiguard_filter'))
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
class TestMPCService(IsolatedAsyncioTestCase):
def setUp(self):
cspatcher = patch('fbpcp.service.container.ContainerService')
gspatcher = patch('fbpcs.private_computation.service.mpc.mpc_game.MPCGameService')
container_svc = cspatcher.start()
mpc_game_svc = gspatcher.start()
for patcher in (cspatcher, gspatcher):
self.addCleanup(patcher.stop)
self.mpc_service = MPCService(container_svc, 'test_task_definition', mpc_game_svc)
def test_convert_cmd_args_list(self) -> None:
built_onedocker_args = ('private_lift/lift', 'test one docker arguments')
self.mpc_service.mpc_game_svc.build_onedocker_args = MagicMock(return_value=built_onedocker_args)
(binary_name, cmd_args_list) = self.mpc_service.convert_cmd_args_list(game_name=TEST_GAME_NAME, game_args=GAME_ARGS, mpc_party=TEST_MPC_ROLE)
self.assertEqual(binary_name, built_onedocker_args[0])
self.assertEqual(cmd_args_list, [built_onedocker_args[1]])
self.mpc_service.mpc_game_svc.build_onedocker_args.assert_called_once_with(game_name=TEST_GAME_NAME, mpc_party=TEST_MPC_ROLE, server_ip=None, input_filenames=TEST_INPUT_ARGS, input_directory=TEST_INPUT_DIRECTORY, output_filenames=TEST_OUTPUT_ARGS, output_directory=TEST_OUTPUT_DIRECTORY, concurrency=TEST_CONCURRENCY_ARGS) |
_type(ofproto.OFPPSPT_OPTICAL)
class OFPPortStatsPropOptical(OFPPortStatsProp):
def __init__(self, type_=None, length=None, flags=None, tx_freq_lmda=None, tx_offset=None, tx_grid_span=None, rx_freq_lmda=None, rx_offset=None, rx_grid_span=None, tx_pwr=None, rx_pwr=None, bias_current=None, temperature=None):
self.type = type_
self.length = length
self.flags = flags
self.tx_freq_lmda = tx_freq_lmda
self.tx_offset = tx_offset
self.tx_grid_span = tx_grid_span
self.rx_freq_lmda = rx_freq_lmda
self.rx_offset = rx_offset
self.rx_grid_span = rx_grid_span
self.tx_pwr = tx_pwr
self.rx_pwr = rx_pwr
self.bias_current = bias_current
self.temperature = temperature
def parser(cls, buf):
optical = cls()
(optical.type, optical.length, optical.flags, optical.tx_freq_lmda, optical.tx_offset, optical.tx_grid_span, optical.rx_freq_lmda, optical.rx_offset, optical.rx_grid_span, optical.tx_pwr, optical.rx_pwr, optical.bias_current, optical.temperature) = struct.unpack_from(ofproto.OFP_PORT_STATS_PROP_OPTICAL_PACK_STR, buf, 0)
return optical |
(LOOK_DEV_TYPES)
def check_multiple_connections_for_textures(progress_controller=None):
if (progress_controller is None):
progress_controller = ProgressControllerBase()
plugins = ['matrixNodes', 'quatNodes']
for plugin in plugins:
if (not pm.pluginInfo(plugin, q=1, l=1)):
pm.loadPlugin(plugin)
v = staging.get('version')
if (not v):
progress_controller.complete()
return
current_renderer = pm.PyNode('defaultRenderGlobals').currentRenderer.get()
if (current_renderer != 'arnold'):
progress_controller.complete()
return
skip_types = ['character', 'animation', 'previs']
for t in v.naming_parents:
for st in skip_types:
if (t.type and t.type.name.lower().startswith(st)):
progress_controller.complete()
return
from anima.dcc.mayaEnv import repr_tools
nodes_with_multiple_materials = []
types_to_ignore = (['hyperLayout', 'shadingEngine', 'materialInfo', 'time', 'unitConversion', 'hyperView'] + VALID_MATERIALS['redshift'])
nodes_to_ignore = [node for node in pm.ls() if (node.type() in types_to_ignore)]
nodes_to_ignore += pm.ls('lambert1', r=1)
nodes_to_ignore += pm.ls('defaultShaderList*', r=1)
nodes_to_ignore += pm.ls('defaultTextureList*', r=1)
nodes_to_ignore += pm.ls('defaultRenderUtilityList*', r=1)
nodes_to_ignore += pm.ls('hyperShadePrimaryNodeEditorSavedTabsInfo*', r=1)
nodes_to_ignore += pm.ls('MayaNodeEditorSavedTabsInfo*', r=1)
all_nodes = [node for node in pm.ls() if (node.type() in repr_tools.RENDER_RELATED_NODE_TYPES)]
for node in nodes_to_ignore:
if (node in all_nodes):
all_nodes.remove(node)
progress_controller.maximum = len(all_nodes)
for node in all_nodes:
materials_connected_to_this_node = pm.ls(node.listHistory(future=True), mat=True)
new_materials_connected_to_this_node = []
for mat in materials_connected_to_this_node:
if (mat.type() not in VALID_MATERIALS['redshift']):
new_materials_connected_to_this_node.append(mat)
materials_connected_to_this_node = new_materials_connected_to_this_node
if (node in materials_connected_to_this_node):
materials_connected_to_this_node.remove(node)
if (len(materials_connected_to_this_node) > 1):
nodes_with_multiple_materials.append(node)
else:
connections_out_of_this_node = node.outputs()
connections_out_of_this_node = [h for h in connections_out_of_this_node if (h not in nodes_to_ignore)]
if (len(set(connections_out_of_this_node)) > 1):
nodes_with_multiple_materials.append(node)
progress_controller.increment()
if (len(nodes_with_multiple_materials) > 0):
pm.select(nodes_with_multiple_materials)
progress_controller.complete()
raise PublishError(('Please update the scene so the following nodes are connected <br>to only <b>one material</b> (duplicate them):<br><br>%s<br><br>' % '<br>'.join(map((lambda x: x.name()), nodes_with_multiple_materials))))
progress_controller.complete() |
def upgrade():
op.add_column('currentprivacypreference', sa.Column('purpose_consent', sa.Integer(), nullable=True))
op.add_column('currentprivacypreference', sa.Column('purpose_legitimate_interests', sa.Integer(), nullable=True))
op.add_column('currentprivacypreference', sa.Column('vendor_consent', sa.String(), nullable=True))
op.add_column('currentprivacypreference', sa.Column('vendor_legitimate_interests', sa.String(), nullable=True))
op.add_column('currentprivacypreference', sa.Column('system_consent', sa.String(), nullable=True))
op.add_column('currentprivacypreference', sa.Column('system_legitimate_interests', sa.String(), nullable=True))
op.create_unique_constraint('fides_user_device_identity_purpose_consent', 'currentprivacypreference', ['fides_user_device_provided_identity_id', 'purpose_consent'])
op.create_unique_constraint('fides_user_device_identity_purpose_leg_interests', 'currentprivacypreference', ['fides_user_device_provided_identity_id', 'purpose_legitimate_interests'])
op.create_unique_constraint('fides_user_device_identity_system_consent', 'currentprivacypreference', ['fides_user_device_provided_identity_id', 'system_consent'])
op.create_unique_constraint('fides_user_device_identity_system_leg_interests', 'currentprivacypreference', ['fides_user_device_provided_identity_id', 'system_legitimate_interests'])
op.create_unique_constraint('fides_user_device_identity_vendor_consent', 'currentprivacypreference', ['fides_user_device_provided_identity_id', 'vendor_consent'])
op.create_unique_constraint('fides_user_device_identity_vendor_leg_interests', 'currentprivacypreference', ['fides_user_device_provided_identity_id', 'vendor_legitimate_interests'])
op.create_unique_constraint('identity_purpose_consent', 'currentprivacypreference', ['provided_identity_id', 'purpose_consent'])
op.create_unique_constraint('identity_purpose_leg_interests', 'currentprivacypreference', ['provided_identity_id', 'purpose_legitimate_interests'])
op.create_unique_constraint('identity_system_consent', 'currentprivacypreference', ['provided_identity_id', 'system_consent'])
op.create_unique_constraint('identity_system_leg_interests', 'currentprivacypreference', ['provided_identity_id', 'system_legitimate_interests'])
op.create_unique_constraint('identity_vendor_consent', 'currentprivacypreference', ['provided_identity_id', 'vendor_consent'])
op.create_unique_constraint('identity_vendor_leg_interests', 'currentprivacypreference', ['provided_identity_id', 'vendor_legitimate_interests'])
op.create_index(op.f('ix_currentprivacypreference_purpose_consent'), 'currentprivacypreference', ['purpose_consent'], unique=False)
op.create_index(op.f('ix_currentprivacypreference_purpose_legitimate_interests'), 'currentprivacypreference', ['purpose_legitimate_interests'], unique=False)
op.create_index(op.f('ix_currentprivacypreference_system_consent'), 'currentprivacypreference', ['system_consent'], unique=False)
op.create_index(op.f('ix_currentprivacypreference_system_legitimate_interests'), 'currentprivacypreference', ['system_legitimate_interests'], unique=False)
op.create_index(op.f('ix_currentprivacypreference_vendor_consent'), 'currentprivacypreference', ['vendor_consent'], unique=False)
op.create_index(op.f('ix_currentprivacypreference_vendor_legitimate_interests'), 'currentprivacypreference', ['vendor_legitimate_interests'], unique=False)
op.add_column('lastservednotice', sa.Column('purpose_consent', sa.Integer(), nullable=True))
op.add_column('lastservednotice', sa.Column('purpose_legitimate_interests', sa.Integer(), nullable=True))
op.add_column('lastservednotice', sa.Column('vendor_consent', sa.String(), nullable=True))
op.add_column('lastservednotice', sa.Column('vendor_legitimate_interests', sa.String(), nullable=True))
op.add_column('lastservednotice', sa.Column('system_consent', sa.String(), nullable=True))
op.add_column('lastservednotice', sa.Column('system_legitimate_interests', sa.String(), nullable=True))
op.create_index(op.f('ix_lastservednotice_purpose_consent'), 'lastservednotice', ['purpose_consent'], unique=False)
op.create_index(op.f('ix_lastservednotice_purpose_legitimate_interests'), 'lastservednotice', ['purpose_legitimate_interests'], unique=False)
op.create_index(op.f('ix_lastservednotice_system_consent'), 'lastservednotice', ['system_consent'], unique=False)
op.create_index(op.f('ix_lastservednotice_system_legitimate_interests'), 'lastservednotice', ['system_legitimate_interests'], unique=False)
op.create_index(op.f('ix_lastservednotice_vendor_consent'), 'lastservednotice', ['vendor_consent'], unique=False)
op.create_index(op.f('ix_lastservednotice_vendor_legitimate_interests'), 'lastservednotice', ['vendor_legitimate_interests'], unique=False)
op.create_unique_constraint('last_served_fides_user_device_identity_purpose_consent', 'lastservednotice', ['fides_user_device_provided_identity_id', 'purpose_consent'])
op.create_unique_constraint('last_served_fides_user_device_identity_purpose_leg_interests', 'lastservednotice', ['fides_user_device_provided_identity_id', 'purpose_legitimate_interests'])
op.create_unique_constraint('last_served_fides_user_device_identity_system_consent', 'lastservednotice', ['fides_user_device_provided_identity_id', 'system_consent'])
op.create_unique_constraint('last_served_fides_user_device_identity_system_leg_interests', 'lastservednotice', ['fides_user_device_provided_identity_id', 'system_legitimate_interests'])
op.create_unique_constraint('last_served_fides_user_device_identity_vendor_consent', 'lastservednotice', ['fides_user_device_provided_identity_id', 'vendor_consent'])
op.create_unique_constraint('last_served_fides_user_device_identity_vendor_leg_interests', 'lastservednotice', ['fides_user_device_provided_identity_id', 'vendor_legitimate_interests'])
op.create_unique_constraint('last_served_identity_purpose_consent', 'lastservednotice', ['provided_identity_id', 'purpose_consent'])
op.create_unique_constraint('last_served_identity_purpose_legitimate_interests', 'lastservednotice', ['provided_identity_id', 'purpose_legitimate_interests'])
op.create_unique_constraint('last_served_identity_system_consent', 'lastservednotice', ['provided_identity_id', 'system_consent'])
op.create_unique_constraint('last_served_identity_system_leg_interests', 'lastservednotice', ['provided_identity_id', 'system_legitimate_interests'])
op.create_unique_constraint('last_served_identity_vendor_consent', 'lastservednotice', ['provided_identity_id', 'vendor_consent'])
op.create_unique_constraint('last_served_identity_vendor_leg_interests', 'lastservednotice', ['provided_identity_id', 'vendor_legitimate_interests'])
op.add_column('privacypreferencehistory', sa.Column('purpose_consent', sa.Integer(), nullable=True))
op.add_column('privacypreferencehistory', sa.Column('purpose_legitimate_interests', sa.Integer(), nullable=True))
op.add_column('privacypreferencehistory', sa.Column('vendor_consent', sa.String(), nullable=True))
op.add_column('privacypreferencehistory', sa.Column('vendor_legitimate_interests', sa.String(), nullable=True))
op.add_column('privacypreferencehistory', sa.Column('system_consent', sa.String(), nullable=True))
op.add_column('privacypreferencehistory', sa.Column('system_legitimate_interests', sa.String(), nullable=True))
op.create_index(op.f('ix_privacypreferencehistory_purpose_consent'), 'privacypreferencehistory', ['purpose_consent'], unique=False)
op.create_index(op.f('ix_privacypreferencehistory_purpose_legitimate_interests'), 'privacypreferencehistory', ['purpose_legitimate_interests'], unique=False)
op.create_index(op.f('ix_privacypreferencehistory_system_consent'), 'privacypreferencehistory', ['system_consent'], unique=False)
op.create_index(op.f('ix_privacypreferencehistory_system_legitimate_interests'), 'privacypreferencehistory', ['system_legitimate_interests'], unique=False)
op.create_index(op.f('ix_privacypreferencehistory_vendor_consent'), 'privacypreferencehistory', ['vendor_consent'], unique=False)
op.create_index(op.f('ix_privacypreferencehistory_vendor_legitimate_interests'), 'privacypreferencehistory', ['vendor_legitimate_interests'], unique=False)
op.add_column('servednoticehistory', sa.Column('purpose_consent', sa.Integer(), nullable=True))
op.add_column('servednoticehistory', sa.Column('purpose_legitimate_interests', sa.Integer(), nullable=True))
op.add_column('servednoticehistory', sa.Column('vendor_consent', sa.String(), nullable=True))
op.add_column('servednoticehistory', sa.Column('vendor_legitimate_interests', sa.String(), nullable=True))
op.add_column('servednoticehistory', sa.Column('system_consent', sa.String(), nullable=True))
op.add_column('servednoticehistory', sa.Column('system_legitimate_interests', sa.String(), nullable=True))
op.create_index(op.f('ix_servednoticehistory_purpose_consent'), 'servednoticehistory', ['purpose_consent'], unique=False)
op.create_index(op.f('ix_servednoticehistory_purpose_legitimate_interests'), 'servednoticehistory', ['purpose_legitimate_interests'], unique=False)
op.create_index(op.f('ix_servednoticehistory_system_consent'), 'servednoticehistory', ['system_consent'], unique=False)
op.create_index(op.f('ix_servednoticehistory_system_legitimate_interests'), 'servednoticehistory', ['system_legitimate_interests'], unique=False)
op.create_index(op.f('ix_servednoticehistory_vendor_consent'), 'servednoticehistory', ['vendor_consent'], unique=False)
op.create_index(op.f('ix_servednoticehistory_vendor_legitimate_interests'), 'servednoticehistory', ['vendor_legitimate_interests'], unique=False) |
def test_custom_model_composer():
env = build_dummy_structured_env()
policies = {'_target_': 'maze.perception.models.policies.ProbabilisticPolicyComposer', 'networks': [{'_target_': 'maze.test.shared_test_utils.dummy_models.actor_model.DummyPolicyNet', 'non_lin': 'torch.nn.SELU'}, {'_target_': 'maze.test.shared_test_utils.dummy_models.actor_model.DummyPolicyNet', 'non_lin': 'torch.nn.SELU'}], 'substeps_with_separate_agent_nets': []}
CustomModelComposer.check_model_config({'policy': policies})
composer = CustomModelComposer(action_spaces_dict=env.action_spaces_dict, observation_spaces_dict=env.observation_spaces_dict, agent_counts_dict=env.agent_counts_dict, distribution_mapper_config=[], policy=policies, critic=None)
assert isinstance(composer.distribution_mapper, DistributionMapper)
assert (composer.critic is None)
shared_critic = {'_target_': 'maze.perception.models.critics.SharedStateCriticComposer', 'networks': [{'_target_': 'maze.test.shared_test_utils.dummy_models.critic_model.DummyValueNet', 'non_lin': 'torch.nn.SELU'}], 'stack_observations': False}
CustomModelComposer.check_model_config({'critic': shared_critic})
composer = CustomModelComposer(action_spaces_dict=env.action_spaces_dict, observation_spaces_dict=env.observation_spaces_dict, agent_counts_dict=env.agent_counts_dict, distribution_mapper_config=[], policy=policies, critic=shared_critic)
assert isinstance(composer.distribution_mapper, DistributionMapper)
assert isinstance(composer.critic, TorchSharedStateCritic)
assert isinstance(composer.critic.networks, dict)
assert isinstance(composer.critic.networks[0], DummyValueNet)
step_critic = {'_target_': 'maze.perception.models.critics.DeltaStateCriticComposer', 'networks': [{'_target_': 'maze.test.shared_test_utils.dummy_models.critic_model.DummyValueNet', 'non_lin': 'torch.nn.SELU'}, {'_target_': 'maze.test.shared_test_utils.dummy_models.critic_model.DummyValueNet', 'non_lin': 'torch.nn.SELU'}]}
CustomModelComposer.check_model_config({'critic': step_critic})
composer = CustomModelComposer(action_spaces_dict=env.action_spaces_dict, observation_spaces_dict=env.observation_spaces_dict, agent_counts_dict=env.agent_counts_dict, distribution_mapper_config=[], policy=policies, critic=step_critic)
assert isinstance(composer.distribution_mapper, DistributionMapper)
assert isinstance(composer.critic, TorchDeltaStateCritic)
assert isinstance(composer.critic.networks, dict)
assert isinstance(composer.critic.networks[0], DummyValueNet)
assert isinstance(composer.critic.networks[1], DummyValueNet)
value_0 = composer.critic.networks[0](convert_to_torch(env.observation_spaces_dict[0].sample(), device=None, cast=None, in_place=True))
_ = composer.critic.networks[1]({**convert_to_torch(env.observation_spaces_dict[1].sample(), device=None, cast=None, in_place=True), DeltaStateCriticComposer.prev_value_key: value_0['value']})
composer.save_models()
step_critic = {'_target_': 'maze.perception.models.critics.StepStateCriticComposer', 'networks': [{'_target_': 'maze.test.shared_test_utils.dummy_models.critic_model.DummyValueNet', 'non_lin': 'torch.nn.SELU'}, {'_target_': 'maze.test.shared_test_utils.dummy_models.critic_model.DummyValueNet', 'non_lin': 'torch.nn.SELU'}]}
CustomModelComposer.check_model_config({'critic': step_critic})
composer = CustomModelComposer(action_spaces_dict=env.action_spaces_dict, observation_spaces_dict=env.observation_spaces_dict, agent_counts_dict=env.agent_counts_dict, distribution_mapper_config=[], policy=policies, critic=step_critic)
assert isinstance(composer.distribution_mapper, DistributionMapper)
assert isinstance(composer.critic, TorchStepStateCritic)
assert isinstance(composer.critic.networks, dict)
assert isinstance(composer.critic.networks[0], DummyValueNet)
assert isinstance(composer.critic.networks[1], DummyValueNet)
composer.save_models()
try:
import pygraphviz
for model_file in ['critic_0.pdf', 'critic_1.pdf', 'policy_0.pdf', 'policy_1.pdf']:
file_path = os.path.join(os.getcwd(), model_file)
assert os.path.exists(file_path)
os.remove(file_path)
except ImportError:
pass |
def get_cat_name(cat_id: int, is_jp: bool, cc: str) -> str:
file_data = game_data_getter.get_file_latest('resLocal', f'Unit_Explanation{(cat_id + 1)}_{cc}.csv', is_jp)
if (file_data is None):
helper.error_text('Failed to get cat names')
return ''
cat_name = csv_handler.parse_csv(file_data.decode('utf-8'), delimeter=helper.get_text_splitter(is_jp))
return cat_name[0][0] |
class TestScheduleDView(ApiBaseTest):
def test_fields(self):
[factories.ScheduleDViewFactory()]
results = self._results(api.url_for(ScheduleDView))
assert (len(results) == 1)
assert (results[0].keys() == ScheduleDSchema().fields.keys())
def test_filters(self):
filters = [('image_number', ScheduleD.image_number, ['123', '456']), ('committee_id', ScheduleD.committee_id, ['C', 'C']), ('report_year', ScheduleD.report_year, [2023, 2019]), ('report_type', ScheduleD.report_type, ['60D', 'Q3']), ('filing_form', ScheduleD.filing_form, ['F3P', 'F3X']), ('committee_type', ScheduleD.committee_type, ['P', 'H'])]
for (label, column, values) in filters:
[factories.ScheduleDViewFactory(**{column.key: value}) for value in values]
results = self._results(api.url_for(ScheduleDView, **{label: values[0]}))
assert (len(results) == 1)
assert (results[0][column.key] == values[0])
def test_filter_fulltext_field(self):
names = ['OFFICE MAX', 'MAX AND ERMAS', 'OFFICE MAX CONSUMER CREDIT CARD']
[factories.ScheduleDViewFactory(creditor_debtor_name=name) for name in names]
results = self._results(api.url_for(ScheduleDView, creditor_debtor_name='OFFICE'))
self.assertEqual(len(results), 2)
self.assertEqual(results[0]['creditor_debtor_name'], 'OFFICE MAX')
def test_filter_match_field(self):
names = ['DUES', 'PRINTING', 'ENTERTAINMENT']
[factories.ScheduleDViewFactory(nature_of_debt=name) for name in names]
results = self._results(api.url_for(ScheduleDView, nature_of_debt='ENTERTAINMENT'))
self.assertEqual(len(results), 1)
self.assertEqual(results[0]['nature_of_debt'], 'ENTERTAINMENT')
def test_filter_range(self):
[factories.ScheduleDViewFactory(amount_incurred_period=1, outstanding_balance_beginning_of_period=1, outstanding_balance_close_of_period=1, coverage_start_date=datetime.date(2011, 1, 1), coverage_end_date=datetime.date(2011, 8, 27)), factories.ScheduleDViewFactory(amount_incurred_period=2, outstanding_balance_beginning_of_period=2, outstanding_balance_close_of_period=2, coverage_start_date=datetime.date(2012, 1, 1), coverage_end_date=datetime.date(2012, 2, 27)), factories.ScheduleDViewFactory(amount_incurred_period=3, outstanding_balance_beginning_of_period=3, outstanding_balance_close_of_period=3, coverage_start_date=datetime.date(2013, 1, 1), coverage_end_date=datetime.date(2013, 5, 5)), factories.ScheduleDViewFactory(amount_incurred_period=4, outstanding_balance_beginning_of_period=3, outstanding_balance_close_of_period=4, coverage_start_date=datetime.date(2014, 1, 1), coverage_end_date=datetime.date(2014, 6, 6))]
min_date = datetime.date(2013, 1, 1)
results = self._results(api.url_for(ScheduleDView, min_coverage_start_date=min_date))
self.assertTrue(all(((each['coverage_start_date'] >= min_date.isoformat()) for each in results)))
results = self._results(api.url_for(ScheduleDView, min_coverage_end_date=min_date))
self.assertTrue(all(((each['coverage_end_date'] >= min_date.isoformat()) for each in results)))
max_date = datetime.date(2014, 1, 1)
results = self._results(api.url_for(ScheduleDView, max_coverage_start_date=min_date))
self.assertTrue(all(((each['coverage_start_date'] <= max_date.isoformat()) for each in results)))
results = self._results(api.url_for(ScheduleDView, max_coverage_end_date=min_date))
self.assertTrue(all(((each['coverage_end_date'] <= max_date.isoformat()) for each in results)))
results = self._results(api.url_for(ScheduleDView, min_coverage_end_date=min_date, max_coverage_end_date=max_date))
self.assertTrue(all(((min_date.isoformat() <= each['coverage_end_date'] <= max_date.isoformat()) for each in results)))
results = self._results(api.url_for(ScheduleDView, min_coverage_start_date=min_date, max_coverage_start_date=max_date))
self.assertTrue(all(((min_date.isoformat() <= each['coverage_start_date'] <= max_date.isoformat()) for each in results)))
min_amount = 2
max_amount = 3
filters = [('amount_incurred_period', 'min_amount_incurred', 'max_amount_incurred'), ('outstanding_balance_beginning_of_period', 'min_amount_outstanding_beginning', 'max_amount_outstanding_beginning'), ('outstanding_balance_close_of_period', 'min_amount_outstanding_close', 'max_amount_outstanding_close')]
for (output_field, min_filter, max_filter) in filters:
results = self._results(api.url_for(ScheduleDView, **{min_filter: min_amount}))
self.assertTrue(all(((each[output_field] >= min_amount) for each in results)))
results = self._results(api.url_for(ScheduleDView, **{max_filter: max_amount}))
self.assertTrue(all(((each[output_field] <= max_amount) for each in results)))
results = self._results(api.url_for(ScheduleDView, **{min_filter: min_amount, max_filter: max_amount}))
self.assertTrue(all(((min_amount <= each[output_field] <= max_amount) for each in results)))
def test_sort_ascending(self):
[factories.ScheduleDViewFactory(sub_id=1, coverage_end_date='2017-01-02'), factories.ScheduleDViewFactory(sub_id=2, coverage_end_date='2017-01-01')]
results = self._results(api.url_for(ScheduleDView, sort=['coverage_end_date']))
self.assertEqual(results[0]['coverage_end_date'], '2017-01-01')
self.assertEqual(results[1]['coverage_end_date'], '2017-01-02')
def test_sort_descending(self):
[factories.ScheduleDViewFactory(sub_id=1), factories.ScheduleDViewFactory(sub_id=2)]
results = self._results(api.url_for(ScheduleDView, sort=['-sub_id']))
self.assertEqual(results[0]['sub_id'], '2')
self.assertEqual(results[1]['sub_id'], '1')
def test_schedule_d_filter_form_line_number(self):
[factories.ScheduleDViewFactory(line_number='9', filing_form='F3X'), factories.ScheduleDViewFactory(line_number='10', filing_form='F3X'), factories.ScheduleDViewFactory(line_number='12', filing_form='F3'), factories.ScheduleDViewFactory(line_number='9', filing_form='F3')]
results = self._results(api.url_for(ScheduleDView, form_line_number='f3X-9'))
self.assertEqual(len(results), 1)
results = self._results(api.url_for(ScheduleDView, form_line_number=('f3x-9', 'f3X-10')))
self.assertEqual(len(results), 2)
results = self._results(api.url_for(ScheduleDView, form_line_number='-F3x-10'))
self.assertEqual(len(results), 3)
response = self.app.get(api.url_for(ScheduleDView, form_line_number='f3x10'))
self.assertEqual(response.status_code, 400)
self.assertIn(b'Invalid form_line_number', response.data) |
def commit_prediction_drift_metrics_to_db(drift_report_metrics: Dict, timestamp: float, db_uri: Text) -> None:
engine = create_engine(db_uri)
session = open_sqa_session(engine)
data_drift_prediction = PredictionDriftTable(**drift_report_metrics, timestamp=timestamp)
add_or_update_by_ts(session, data_drift_prediction)
session.commit()
session.close() |
class JobFS(types.SimpleNamespace):
CONTEXT: str = ''
_jobs: 'JobsFS'
def from_jobsfs(cls, jobsfs: 'JobsFS', reqid: requests.ToRequestIDType) -> 'JobFS':
requestfs = JobRequestFS.from_raw(f'{jobsfs.requests}/{reqid}')
resultfs = JobResultFS.from_raw(f'{jobsfs.results}/{reqid}')
workfs = JobWorkFS.from_raw(f'{jobsfs.work}/{reqid}')
self = cls(request=requestfs, result=resultfs, work=workfs, reqid=reqid)
self._jobs = jobsfs
return self
def __init__(self, request: Union[(str, JobRequestFS)], result: Union[(str, JobResultFS)], work: Union[(str, JobWorkFS)], reqid: Optional[requests.ToRequestIDType]=None):
if (type(self) is JobFS):
raise TypeError('JobFS must be subclassed')
validate_context(self.CONTEXT)
request_fs = JobRequestFS.from_raw(request)
result_fs = JobResultFS.from_raw(result)
work_fs = JobWorkFS.from_raw(work)
if (not reqid):
reqid = os.path.basename(request)
reqid_obj = requests.RequestID.from_raw(reqid)
if (not reqid_obj):
raise ValueError('missing reqid')
else:
orig = reqid
reqid_obj = requests.RequestID.from_raw(reqid)
if (not reqid_obj):
raise ValueError(f'unsupported reqid {orig!r}')
super().__init__(reqid=reqid, request=request_fs, work=work_fs, result=result_fs)
self._custom_init()
jobkind = resolve_job_kind(reqid_obj.kind)
jobkind.set_request_fs(request_fs, self.CONTEXT)
jobkind.set_work_fs(work_fs, self.CONTEXT)
jobkind.set_result_fs(result_fs, self.CONTEXT)
def _custom_init(self) -> None:
pass
def __str__(self):
return str(self.request)
def __fspath__(self):
return str(self.request)
def context(self):
return self.CONTEXT
def requestsroot(self) -> str:
try:
return self.request.requestsroot
except AttributeError:
return os.path.dirname(self.request.root)
def resultsroot(self) -> str:
try:
return self.result.resultsroot
except AttributeError:
return os.path.dirname(self.result.root)
def job_script(self) -> str:
return self.work.job_script
def pidfile(self) -> str:
return self.work.pidfile
def logfile(self) -> str:
return self.work.logfile
def jobs(self) -> 'JobsFS':
try:
return self._jobs
except AttributeError:
raise NotImplementedError
def look_up(self, name: str, subname: Optional[str]=None) -> Any:
value = getattr(self, name)
if subname:
value = getattr(value, subname)
return value
def copy(self) -> 'JobFS':
return type(self)(str(self.request), str(self.result), str(self.work), self.reqid) |
class LocalDeclarationGenerator(BaseAstDataflowObjectVisitor):
def __init__(self, vars_per_line: int=1):
self._variables: Set[Variable] = set()
self._vars_per_line: int = vars_per_line
def from_task(cls, task: DecompilerTask):
param_names = list((param.name for param in task.function_parameters))
generator = cls(task.options.getint('code-generator.variable_declarations_per_line', fallback=1))
generator.visit_ast(task.syntax_tree)
return '\n'.join(generator.generate(param_names))
def visit_assignment(self, instruction: Assignment):
self._variables.update(instruction.definitions)
def visit_loop_node(self, node: LoopNode):
if (isinstance(node, ForLoopNode) and isinstance(node.declaration, Assignment)):
if isinstance(node.declaration.destination, Operation):
self._variables.add(node.declaration.destination[0])
else:
self._variables.add(node.declaration.destination)
def visit_unary_operation(self, unary: UnaryOperation):
if isinstance(unary, MemberAccess):
self._variables.add(unary.struct_variable)
if ((unary.operation == OperationType.address) or (unary.operation == OperationType.dereference)):
if isinstance(unary.operand, Variable):
self._variables.add(unary.operand)
elif isinstance(unary.operand, BinaryOperation):
if isinstance(unary.operand.left, Variable):
self._variables.add(unary.operand.left)
else:
self.visit(unary.operand.left)
def generate(self, param_names: list[str]=[]) -> Iterator[str]:
variable_type_mapping = defaultdict(list)
for variable in sorted(self._variables, key=(lambda x: str(x))):
if ((not isinstance(variable, GlobalVariable)) and (variable.name not in param_names)):
variable_type_mapping[variable.type].append(variable)
for (variable_type, variables) in sorted(variable_type_mapping.items(), key=(lambda x: str(x))):
for chunked_variables in self._chunks(variables, self._vars_per_line):
(yield (CExpressionGenerator.format_variables_declaration(variable_type, [var.name for var in chunked_variables]) + ';'))
def _chunks(lst: List, n: int) -> Iterator[List]:
for i in range(0, len(lst), n):
(yield lst[i:(i + n)]) |
def test_string():
validator = String()
validated = validator.validate_or_error('abc')
assert validated
assert (validated.value == 'abc')
assert (validated.error is None)
validator = String()
(value, error) = validator.validate_or_error('')
assert (error == ValidationError(text='Must not be blank.', code='blank'))
validator = String()
(value, error) = validator.validate_or_error(None)
assert (error == ValidationError(text='May not be null.', code='null'))
validator = String()
(value, error) = validator.validate_or_error(123)
assert (error == ValidationError(text='Must be a string.', code='type'))
validator = String(max_length=10)
(value, error) = validator.validate_or_error(('abc' * 10))
assert (error == ValidationError(text='Must have no more than 10 characters.', code='max_length'))
validator = String(min_length=3)
(value, error) = validator.validate_or_error('a')
assert (error == ValidationError(text='Must have at least 3 characters.', code='min_length'))
validator = String(allow_blank=False)
(value, error) = validator.validate_or_error('')
assert (error == ValidationError(text='Must not be blank.', code='blank'))
validator = String(allow_blank=True)
(value, error) = validator.validate_or_error('')
assert (value == '')
validator = String(allow_blank=True)
(value, error) = validator.validate_or_error(None)
assert (value == '')
validator = String(allow_null=True)
(value, error) = validator.validate_or_error(None)
assert (value is None)
assert (error is None)
validator = String(allow_null=True)
(value, error) = validator.validate_or_error('')
assert (value is None)
assert (error is None)
validator = String(allow_null=True)
(value, error) = validator.validate_or_error(' ')
assert (value is None)
assert (error is None)
validator = String(pattern='^[abc]*$')
(value, error) = validator.validate_or_error('cba')
assert (value == 'cba')
validator = String(pattern='^[abc]*$')
(value, error) = validator.validate_or_error('cbxa')
assert (error == ValidationError(text='Must match the pattern /^[abc]*$/.', code='pattern'))
validator = String(pattern=re.compile('ABC', re.IGNORECASE))
(value, error) = validator.validate_or_error('abc')
assert (value == 'abc')
validator = String()
(value, error) = validator.validate_or_error(' ')
assert (error == ValidationError(text='Must not be blank.', code='blank'))
validator = String()
(value, error) = validator.validate_or_error(' test ')
assert (value == 'test')
validator = String(trim_whitespace=False)
(value, error) = validator.validate_or_error(' ')
assert (value == ' ')
validator = String(trim_whitespace=False)
(value, error) = validator.validate_or_error(' test ')
assert (value == ' test ') |
class SaveCommentSchema(CSRFProtectedSchema, colander.MappingSchema):
def deserialize(self, cstruct):
appstruct = SaveCommentSchema().unflatten(cstruct)
return super(SaveCommentSchema, self).deserialize(appstruct)
update = colander.SchemaNode(colander.String())
text = colander.SchemaNode(colander.String(), missing='')
karma = colander.SchemaNode(colander.Integer(), validator=colander.Range(min=(- 1), max=1), missing=0)
karma_critpath = colander.SchemaNode(colander.Integer(), validator=colander.Range(min=(- 1), max=1), missing=0)
bug_feedback = BugFeedbacks(missing=[])
testcase_feedback = TestcaseFeedbacks(missing=[]) |
def test_get_authenticator_basic():
cfg = PlatformConfig(auth_mode=AuthType.BASIC)
with pytest.raises(ValueError, match='Client ID and Client SECRET both are required'):
get_authenticator(cfg, None)
cfg = PlatformConfig(auth_mode=AuthType.BASIC, client_credentials_secret='xyz', client_id='id')
authn = get_authenticator(cfg, get_client_config())
assert authn
assert isinstance(authn, ClientCredentialsAuthenticator)
cfg = PlatformConfig(auth_mode=AuthType.CLIENT_CREDENTIALS, client_credentials_secret='xyz', client_id='id')
authn = get_authenticator(cfg, get_client_config())
assert authn
assert isinstance(authn, ClientCredentialsAuthenticator)
cfg = PlatformConfig(auth_mode=AuthType.CLIENTSECRET, client_credentials_secret='xyz', client_id='id')
authn = get_authenticator(cfg, get_client_config())
assert authn
assert isinstance(authn, ClientCredentialsAuthenticator) |
def do_braid(maze_path, maze_verts, braid_amount=1.0):
ends = [vert for vert in maze_verts if (len(maze_nghbrs(vert, maze_path)) == 1)]
random.shuffle(ends)
braid_links = maze_path[:]
for vert in ends:
vert_neghbrs = maze_nghbrs(vert, braid_links)
if ((len(vert_neghbrs) == 1) and (random.random() < braid_amount)):
unlinked = [edge for edge in vert.link_edges if ((edge not in maze_path) and (edge.other_vert(vert) in maze_verts))]
best = [edge for edge in unlinked if (len(maze_nghbrs(edge.other_vert(vert), braid_links)) == 1)]
if (len(best) == 0):
best = unlinked
edge = random.choice(best)
braid_links.append(edge)
return braid_links |
class SliceScatterLargeInputsTestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(SliceScatterLargeInputsTestCase, self).__init__(*args, **kwargs)
self.test_count = 1
def setUpClass(cls) -> None:
torch.manual_seed(0)
def _test_slice_scatter(self, input_shape, start_indices, end_indices, concat_dim, dtype):
num_slices = 140
slice_outputs = [ops.dynamic_slice()(Tensor(shape=input_shape, dtype=dtype, name=f'input{idx}', is_input=True), start_indices=start_indices, end_indices=end_indices) for idx in range(num_slices)]
Y = ops.concatenate()(slice_outputs, concat_dim)
Y._attrs['name'] = 'y'
Y._attrs['is_output'] = True
target = detect_target()
dll_name = f'test_{self.test_count}.so'
test_name = f'slice_scatter_large_inputs_{self.test_count}'
module = compile_model(Y, target, './tmp', test_name, dll_name=dll_name)
Y_src_ops = list(Y._attrs['src_ops'])
self.assertEqual(len(Y_src_ops), 5)
self.assertTrue(all(((op._attrs['op'] == 'slice_scatter') for op in Y_src_ops)))
input_pt = [get_random_torch_tensor(input_shape, dtype) for _ in range(num_slices)]
slice_indices = [slice(i, j) for (i, j) in zip(start_indices, end_indices)]
slice_outputs_pt = [input_i[slice_indices] for input_i in input_pt]
y_pt = torch.cat(slice_outputs_pt, concat_dim)
inputs = {f'input{idx}': input_pt[idx] for idx in range(num_slices)}
y = get_torch_empty_tensor(y_pt.size(), dtype)
module.run_with_tensors(inputs, [y])
self.assertTrue(torch.allclose(y_pt, y, atol=0.01, rtol=0.01))
self.test_count += 1
def test_slice_scatter_float(self):
self._test_slice_scatter(input_shape=[3, 7, 10], start_indices=[0, 0, 0], end_indices=[2, 1, 4], concat_dim=0, dtype='float')
self._test_slice_scatter(input_shape=[3, 7, 10], start_indices=[0, 0, 0], end_indices=[2, 1, 4], concat_dim=1, dtype='float')
self._test_slice_scatter(input_shape=[3, 7, 10], start_indices=[0, 0, 0], end_indices=[2, 1, 4], concat_dim=2, dtype='float')
self._test_slice_scatter(input_shape=[3, 7, 10], start_indices=[0, 0, 0], end_indices=[2, 1, 4], concat_dim=1, dtype='float16') |
_server.peripheral_model
class UARTPublisher(object):
rx_buffers = defaultdict(deque)
_server.tx_msg
def write(cls, uart_id, chars):
log.info(('Writing: %s' % chars))
msg = {'id': uart_id, 'chars': chars}
return msg
def read(cls, uart_id, count=1, block=False):
log.debug(('In: UARTPublisher.read id:%s count:%i, block:%s' % (hex(uart_id), count, str(block))))
while (block and (len(cls.rx_buffers[uart_id]) < count)):
pass
log.debug('Done Blocking: UARTPublisher.read')
buffer = cls.rx_buffers[uart_id]
chars_available = len(buffer)
if (chars_available >= count):
chars = [buffer.popleft() for _ in range(count)]
chars = ''.join(chars).encode('utf-8')
else:
chars = [buffer.popleft() for _ in range(chars_available)]
chars = ''.join(chars).encode('utf-8')
log.info(('Reading %s' % chars))
return chars
def read_line(cls, uart_id, count=1, block=False):
log.debug(('In: UARTPublisher.read id:%s count:%i, block:%s' % (hex(uart_id), count, str(block))))
while (block and (len(cls.rx_buffers[uart_id]) < count)):
if (len(cls.rx_buffers[uart_id]) > 0):
if (cls.rx_buffers[uart_id][(- 1)] == '\n'):
break
log.debug('Done Blocking: UARTPublisher.read')
log.debug(('rx_buffers %s' % cls.rx_buffers[uart_id]))
buffer = cls.rx_buffers[uart_id]
chars_available = len(buffer)
if (chars_available >= count):
chars = [buffer.popleft() for _ in range(count)]
chars = ''.join(chars).encode('utf-8')
else:
chars = [buffer.popleft() for _ in range(chars_available)]
chars = ''.join(chars).encode('utf-8')
log.info(('Reading %s' % chars))
return chars
_server.reg_rx_handler
def rx_data(cls, msg):
log.debug(('rx_data got message: %s' % str(msg)))
uart_id = msg['id']
data = msg['chars']
cls.rx_buffers[uart_id].extend(data) |
def test_piggybacking_resource_post_and_delete(app_client):
for number in range(1, 8):
resp = app_client.simulate_post('/items', json={'number': number})
assert (resp.status_code == 201)
assert (resp.headers['X-Hook-Applied'] == '1')
assert (len(app_client.simulate_get('/items').json) == number)
resp = app_client.simulate_delete('/items/{}'.format(number))
assert (resp.status_code == 204)
assert (resp.headers['X-Fish-Trait'] == 'wet')
assert (resp.headers['X-Hook-Applied'] == '1')
assert (len(app_client.simulate_get('/items').json) == 6)
resp = app_client.simulate_delete('/items')
assert (resp.status_code == 204)
assert (resp.headers['X-Hook-Applied'] == '3')
assert (app_client.simulate_get('/items').json == []) |
def load_config():
config = configparser.ConfigParser()
config.read(args.config)
for power_source in ('AC', 'BATTERY'):
for option in ('Update_Rate_s', 'PL1_Tdp_W', 'PL1_Duration_s', 'PL2_Tdp_W', 'PL2_Duration_S'):
value = config.getfloat(power_source, option, fallback=None)
if (value is not None):
value = config.set(power_source, option, str(max(0.001, value)))
elif (option == 'Update_Rate_s'):
fatal('The mandatory "Update_Rate_s" parameter is missing.')
trip_temp = config.getfloat(power_source, 'Trip_Temp_C', fallback=None)
if (trip_temp is not None):
valid_trip_temp = min(TRIP_TEMP_RANGE[1], max(TRIP_TEMP_RANGE[0], trip_temp))
if (trip_temp != valid_trip_temp):
config.set(power_source, 'Trip_Temp_C', str(valid_trip_temp))
log('[!] Overriding invalid "Trip_Temp_C" value in "{:s}": {:.1f} -> {:.1f}'.format(power_source, trip_temp, valid_trip_temp))
for key in UNDERVOLT_KEYS:
for plane in VOLTAGE_PLANES:
if (key in config):
value = config.getfloat(key, plane)
valid_value = min(0, value)
if (value != valid_value):
config.set(key, plane, str(valid_value))
log('[!] Overriding invalid "{:s}" value in "{:s}" voltage plane: {:.0f} -> {:.0f}'.format(key, plane, value, valid_value))
if any(((key in config) for key in UNDERVOLT_KEYS[1:])):
for key in UNDERVOLT_KEYS[1:]:
if (key not in config):
config.add_section(key)
for plane in VOLTAGE_PLANES:
value = config.getfloat(key, plane, fallback=0.0)
config.set(key, plane, str(value))
for key in UNDERVOLT_KEYS:
if (key in config):
if (config.getfloat(key, 'CORE', fallback=0) != config.getfloat(key, 'CACHE', fallback=0)):
warning('On Skylake and newer CPUs CORE and CACHE values should match!')
break
iccmax_enabled = False
for key in ICCMAX_KEYS:
for plane in CURRENT_PLANES:
if (key in config):
try:
value = config.getfloat(key, plane)
if ((value <= 0) or (value >= 1023)):
raise ValueError
iccmax_enabled = True
except ValueError:
warning('Invalid value for {:s} in {:s}'.format(plane, key), oneshot=False)
config.remove_option(key, plane)
except configparser.NoOptionError:
pass
if iccmax_enabled:
warning('Warning! Raising IccMax above design limits can damage your system!')
return config |
class OptionPlotoptionsVectorSonificationContexttracksMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def fetch_consumption(zone_key: ZoneKey=ZoneKey('KR'), session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> list[dict]:
session = (session or Session())
if target_datetime:
raise ParserException('KPX.py', 'This parser is not yet able to parse past dates', zone_key)
logger.debug(f'Fetching consumption data from {REAL_TIME_URL}')
response = session.get(REAL_TIME_URL, verify=False)
assert (response.status_code == 200)
soup = BeautifulSoup(response.text, 'html.parser')
consumption_title = soup.find('th', string=re.compile('\\s*\\s*'))
consumption_val = float(consumption_title.find_next_sibling().text.split()[0].replace(',', ''))
consumption_date_list = soup.find('p', {'class': 'info_top'}).text.split(' ')[:2]
consumption_date_list[0] = consumption_date_list[0].replace('.', '-').split('(')[0]
consumption_date = datetime.strptime(' '.join(consumption_date_list), '%Y-%m-%d %H:%M').replace(tzinfo=TIMEZONE)
consumption_list = TotalConsumptionList(logger)
consumption_list.append(zoneKey=zone_key, datetime=consumption_date, source=KR_SOURCE, consumption=consumption_val)
return consumption_list.to_list() |
def test_mutate_grow(tree, r2_growable):
old_tree_len = tree.num_nodes()
l3 = LeafNode(depth=3, composite_rules=CompositeRules(all_dims=[0], all_split_rules=[SplitRule(grow_dim=0, grow_val=3, operator=Operator.le)]), val=15)
r3 = LeafNode(depth=3, composite_rules=CompositeRules(all_dims=[0], all_split_rules=[SplitRule(grow_dim=0, grow_val=1.5, operator=Operator.gt)]), val=15)
r2_grown = SplitNode(depth=2, left_child=l3, right_child=r3, composite_rules=CompositeRules(all_dims=[0], all_split_rules=[SplitRule(grow_dim=0, grow_val=1.5, operator=Operator.gt)]))
with pytest.raises(GrowError):
_ = GrowMutation(old_node=r2_grown, new_node=r2_growable)
mutation = GrowMutation(old_node=r2_growable, new_node=r2_grown)
tree.mutate(mutation)
assert (tree.num_nodes() == (old_tree_len + 2)) |
def test_get_accounts_for_deletion_returns(computation, canonical_address_a, canonical_address_b):
computation.register_account_for_deletion(canonical_address_a)
assert (computation.get_accounts_for_deletion() == ((canonical_address_a, canonical_address_a),))
computation.msg.storage_address = canonical_address_b
computation.register_account_for_deletion(canonical_address_b)
accounts_for_deletion = sorted(computation.get_accounts_for_deletion(), key=(lambda item: item[0]))
assert (canonical_address_a == accounts_for_deletion[0][0])
assert (canonical_address_b == accounts_for_deletion[1][0]) |
class TestWorkflowOps(unittest.TestCase):
def setUp(self) -> None:
self.workflow_file_path = os.path.join(os.path.dirname(__file__), 'workflow_for_unittest.py')
('ai_flow.ops.workflow_ops.get_scheduler_client')
def test_upload_new_workflows(self, mock_scheudler_client):
mock_scheudler_client.return_value.get_workflow.return_value = None
mock_workflow = mock.MagicMock()
mock_scheudler_client.return_value.add_workflow.return_value = mock_workflow
self.assertEqual([mock_workflow, mock_workflow], ops.upload_workflows(self.workflow_file_path))
('ai_flow.ops.workflow_ops.get_scheduler_client')
def test_upload_existed_workflows(self, mock_scheudler_client):
mock_workflow = mock.MagicMock()
mock_scheudler_client.return_value.get_workflow.return_value = mock_workflow
new_workflow = mock.MagicMock()
mock_scheudler_client.return_value.update_workflow.return_value = new_workflow
self.assertEqual([new_workflow, new_workflow], ops.upload_workflows(self.workflow_file_path)) |
class TestCreatePrivacyRequestAuthenticated():
(scope='function')
def url(self) -> str:
return f'{V1_URL_PREFIX}{PRIVACY_REQUEST_AUTHENTICATED}'
def verification_config(self, db):
original = CONFIG.execution.subject_identity_verification_required
CONFIG.execution.subject_identity_verification_required = True
ApplicationConfig.update_config_set(db, CONFIG)
(yield)
CONFIG.execution.subject_identity_verification_required = original
ApplicationConfig.update_config_set(db, CONFIG)
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request(self, run_access_request_mock, url, generate_auth_header, api_client: TestClient, policy):
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': ''}}]
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_CREATE])
resp = api_client.post(url, json=data, headers=auth_header)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 1)
assert run_access_request_mock.called
.usefixtures('verification_config')
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request_bypass_verification(self, run_access_request_mock, url, generate_auth_header, api_client: TestClient, policy):
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': ''}}]
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_CREATE])
resp = api_client.post(url, json=data, headers=auth_header)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 1)
assert run_access_request_mock.called
def test_create_privacy_requests_unauthenticated(self, api_client: TestClient, url, policy):
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': ''}}]
response = api_client.post(url, json=data, headers={})
assert (401 == response.status_code)
def test_create_privacy_requests_wrong_scope(self, api_client: TestClient, generate_auth_header, url, policy):
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': ''}}]
auth_header = generate_auth_header(scopes=[STORAGE_CREATE_OR_UPDATE])
response = api_client.post(url, json=data, headers=auth_header)
assert (403 == response.status_code)
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request_stores_identities(self, run_access_request_mock, url, db, generate_auth_header, api_client: TestClient, policy):
TEST_EMAIL = ''
TEST_PHONE_NUMBER = '+'
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': TEST_EMAIL, 'phone_number': TEST_PHONE_NUMBER}}]
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_CREATE])
resp = api_client.post(url, json=data, headers=auth_header)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 1)
pr = PrivacyRequest.get(db=db, object_id=response_data[0]['id'])
persisted_identity = pr.get_persisted_identity()
assert (persisted_identity.email == TEST_EMAIL)
assert (persisted_identity.phone_number == TEST_PHONE_NUMBER)
assert run_access_request_mock.called
.usefixtures('require_manual_request_approval')
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request_require_manual_approval(self, run_access_request_mock, url, generate_auth_header, api_client: TestClient, policy):
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': ''}}]
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_CREATE])
resp = api_client.post(url, json=data, headers=auth_header)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 1)
assert (response_data[0]['status'] == 'pending')
assert (not run_access_request_mock.called)
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request_with_masking_configuration(self, run_access_request_mock, url, generate_auth_header, api_client: TestClient, erasure_policy_string_rewrite):
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': erasure_policy_string_rewrite.key, 'identity': {'email': ''}}]
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_CREATE])
resp = api_client.post(url, json=data, headers=auth_header)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 1)
('fides.api.service.privacy_request.request_runner_service.run_access_request')
def test_create_privacy_request_limit_exceeded(self, _, url, generate_auth_header, api_client: TestClient, policy):
payload = []
for _ in range(0, 51):
payload.append({'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': 'ftest{i}'}})
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_CREATE])
response = api_client.post(url, json=payload, headers=auth_header)
assert (422 == response.status_code)
assert (json.loads(response.text)['detail'][0]['msg'] == 'ensure this value has at most 50 items')
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request_starts_processing(self, run_privacy_request_mock, url, generate_auth_header, api_client: TestClient, policy):
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': ''}}]
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_CREATE])
resp = api_client.post(url, json=data, headers=auth_header)
assert run_privacy_request_mock.called
assert (resp.status_code == 200)
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request_with_external_id(self, run_access_request_mock, url, db, generate_auth_header, api_client: TestClient, policy):
external_id = 'ext_some-uuid-here-1234'
data = [{'external_id': external_id, 'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': ''}}]
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_CREATE])
resp = api_client.post(url, json=data, headers=auth_header)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 1)
assert (response_data[0]['external_id'] == external_id)
pr = PrivacyRequest.get(db=db, object_id=response_data[0]['id'])
assert (pr.external_id == external_id)
assert run_access_request_mock.called
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request_caches_identity(self, run_access_request_mock, url, db, generate_auth_header, api_client: TestClient, policy, cache):
identity = {'email': ''}
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': identity}]
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_CREATE])
resp = api_client.post(url, json=data, headers=auth_header)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 1)
pr = PrivacyRequest.get(db=db, object_id=response_data[0]['id'])
key = get_identity_cache_key(privacy_request_id=pr.id, identity_attribute=list(identity.keys())[0])
assert (cache.get(key) == list(identity.values())[0])
assert run_access_request_mock.called
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request_caches_masking_secrets(self, run_erasure_request_mock, url, db, generate_auth_header, api_client: TestClient, erasure_policy_aes, cache):
identity = {'email': ''}
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': erasure_policy_aes.key, 'identity': identity}]
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_CREATE])
resp = api_client.post(url, json=data, headers=auth_header)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 1)
pr = PrivacyRequest.get(db=db, object_id=response_data[0]['id'])
secret_key = get_masking_secret_cache_key(privacy_request_id=pr.id, masking_strategy='aes_encrypt', secret_type=SecretType.key)
assert (cache.get_encoded_by_key(secret_key) is not None)
assert run_erasure_request_mock.called
def test_create_privacy_request_invalid_encryption_values(self, url, generate_auth_header, api_client: TestClient, policy):
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': ''}, 'encryption_key': 'test'}]
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_CREATE])
resp = api_client.post(url, json=data, headers=auth_header)
assert (resp.status_code == 422)
assert (resp.json()['detail'][0]['msg'] == 'Encryption key must be 16 bytes long')
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request_caches_encryption_keys(self, run_access_request_mock, url, db, generate_auth_header, api_client: TestClient, policy, cache):
identity = {'email': ''}
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': identity, 'encryption_key': 'test--encryption'}]
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_CREATE])
resp = api_client.post(url, json=data, headers=auth_header)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 1)
pr = PrivacyRequest.get(db=db, object_id=response_data[0]['id'])
encryption_key = get_encryption_cache_key(privacy_request_id=pr.id, encryption_attr='key')
assert (cache.get(encryption_key) == 'test--encryption')
assert run_access_request_mock.called
def test_create_privacy_request_no_identities(self, url, generate_auth_header, api_client: TestClient, policy):
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {}}]
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_CREATE])
resp = api_client.post(url, json=data, headers=auth_header)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 0)
response_data = resp.json()['failed']
assert (len(response_data) == 1)
def test_create_privacy_request_registers_async_task(self, db, url, generate_auth_header, api_client, policy):
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': ''}}]
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_CREATE])
resp = api_client.post(url, json=data, headers=auth_header)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 1)
pr = PrivacyRequest.get(db=db, object_id=response_data[0]['id'])
assert (pr.get_cached_task_id() is not None)
assert (pr.get_async_execution_task() is not None)
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request_creates_system_audit_log(self, run_access_request_mock, url, db, generate_auth_header, api_client: TestClient, policy):
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': ''}}]
auth_header = generate_auth_header(scopes=[PRIVACY_REQUEST_CREATE])
resp = api_client.post(url, json=data, headers=auth_header)
response_data = resp.json()['succeeded'][0]
approval_audit_log: AuditLog = AuditLog.filter(db=db, conditions=((AuditLog.privacy_request_id == response_data['id']) & (AuditLog.action == AuditLogAction.approved))).first()
assert (approval_audit_log is not None)
assert (approval_audit_log.user_id == 'system')
assert run_access_request_mock.called |
def test_ubq_to_dict():
ubq = UpdateByQuery()
assert ({} == ubq.to_dict())
ubq = ubq.query('match', f=42)
assert ({'query': {'match': {'f': 42}}} == ubq.to_dict())
assert ({'query': {'match': {'f': 42}}, 'size': 10} == ubq.to_dict(size=10))
ubq = UpdateByQuery(extra={'size': 5})
assert ({'size': 5} == ubq.to_dict())
ubq = UpdateByQuery(extra={'extra_q': Q('term', category='conference')})
assert ({'extra_q': {'term': {'category': 'conference'}}} == ubq.to_dict()) |
def _add_apikey_handler(security, user_datastore):
_manager.request_loader
def load_user_from_request(request):
api_key = request.headers.get('Authorization')
if api_key:
user = user_datastore.find_user(api_key=api_key)
if user:
return user
return None |
class Solution():
def verticalTraversal(self, root: TreeNode) -> List[List[int]]:
def build_levels(node, vl, hl, track):
if (node is None):
return
if (vl not in track):
track[vl] = []
track[vl].append([hl, node.val])
build_levels(node.left, (vl - 1), (hl + 1), track)
build_levels(node.right, (vl + 1), (hl + 1), track)
track = {}
build_levels(root, 0, 0, track)
ret = [[el[1] for el in sorted(track[key])] for key in sorted(track.keys())]
return ret |
_models('spacy.GPT-3-5.v2')
def openai_gpt_3_5_v2(config: Dict[(Any, Any)]=SimpleFrozenDict(temperature=_DEFAULT_TEMPERATURE), name: Literal[('gpt-3.5-turbo', 'gpt-3.5-turbo-16k', 'gpt-3.5-turbo-0613', 'gpt-3.5-turbo-0613-16k', 'gpt-3.5-turbo-instruct')]='gpt-3.5-turbo', strict: bool=OpenAI.DEFAULT_STRICT, max_tries: int=OpenAI.DEFAULT_MAX_TRIES, interval: float=OpenAI.DEFAULT_INTERVAL, max_request_time: float=OpenAI.DEFAULT_MAX_REQUEST_TIME, endpoint: Optional[str]=None) -> Callable[([Iterable[str]], Iterable[str])]:
return OpenAI(name=name, endpoint=((endpoint or Endpoints.CHAT.value) if (name != 'gpt-3.5-turbo-instruct') else Endpoints.NON_CHAT.value), config=config, strict=strict, max_tries=max_tries, interval=interval, max_request_time=max_request_time) |
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('dataset', choices=['addresses', 'chemicals'])
def handle(self, *args, **kwargs):
url = '
rsp = requests.get(url)
doc = BeautifulSoup(rsp.content, 'html.parser')
tag = doc.find('script', type='application/ld+json')
metadata = json.loads(list(tag.descendants)[0])
filename_fragment = {'addresses': 'ADDR%20BNFT', 'chemicals': 'CHEM%20SUBS'}[kwargs['dataset']]
pattern = (('T(\\d{4})(\\d{2})' + filename_fragment) + '.CSV')
urls = [record['contentUrl'] for record in metadata['distribution'] if (filename_fragment in record['contentUrl'])]
for url in sorted(urls, key=(lambda url: url.split('/')[(- 1)]), reverse=True):
filename = url.split('/')[(- 1)]
tmp_filename = (filename + '.tmp')
match = re.match(pattern, filename, re.I)
year_and_month = '_'.join(match.groups())
dir_path = os.path.join(settings.PIPELINE_DATA_BASEDIR, 'prescribing_metadata', year_and_month)
if os.path.exists(os.path.join(dir_path, filename)):
break
if os.path.exists(os.path.join(dir_path, filename.replace('%20', '+'))):
break
mkdir_p(dir_path)
rsp = requests.get(url)
assert rsp.ok
with open(os.path.join(dir_path, tmp_filename), 'w') as f:
f.write(rsp.text)
os.rename(os.path.join(dir_path, tmp_filename), os.path.join(dir_path, filename)) |
class ApexRest(commons.BaseRequest):
def __init__(self, session_id, instance_url, action, request_params, request_body, **kwargs):
if (request_params is not None):
p = urlencode(request_params)
action = ('%s?%s' % (action, p))
super().__init__(session_id, instance_url, request_body=request_body, **kwargs)
self.service = ('/services/apexrest/%s' % action) |
class TestImportDmd2(TestCase):
def setUpTestData(cls):
for (bnf_code, name) in [('0203020C0AAAAAA', 'Adenosine_I/V Inf 3mg/ml 2ml Vl'), ('1003020U0AAAIAI', 'Diclofenac Sod_Gel 2.32%'), ('1003020U0BBADAI', 'Voltarol 12 Hour Emulgel P_Gel 2.32%'), ('1305020C0AAFVFV', 'Coal Tar 10%/Salic Acid 5%/Aq_Crm'), ('1106000X0AAAIAI', 'Piloc HCl_Eye Dps 6%'), ('BBHCA0', 'Nutrison Pack_Stnd')]:
Presentation.objects.create(bnf_code=bnf_code, name=name)
shutil.copytree('dmd/tests/data/dmd/1', 'pipeline/test-data/data/dmd/2019_07_01/nhsbsa_dmd_7.4.0_')
mkdir_p('pipeline/test-data/data/bnf_snomed_mapping/2019_07_01')
shutil.copyfile('dmd/tests/data/bnf_snomed_mapping/mapping.xlsx', 'pipeline/test-data/data/bnf_snomed_mapping/2019_07_01/mapping.xlsx')
with patch('dmd.management.commands.import_dmd.Command.upload_to_bq'):
call_command('import_dmd')
shutil.copytree('dmd/tests/data/dmd/2', 'pipeline/test-data/data/dmd/2019_07_08/nhsbsa_dmd_7.4.0_')
def tearDownClass(cls):
shutil.rmtree('pipeline/test-data/data/dmd')
shutil.rmtree('pipeline/test-data/data/bnf_snomed_mapping')
super(TestImportDmd2, cls).tearDownClass()
def test_objects_created(self):
self.assertEqual(VMP.objects.count(), 7)
self.assertEqual(VMPP.objects.count(), 14)
self.assertEqual(AMP.objects.count(), 15)
self.assertEqual(AMPP.objects.count(), 26)
vmp = VMP.objects.get(id=)
self.assertEqual(vmp.nm, 'Diclofenac 2.32% gel')
self.assertEqual(vmp.pres_stat.descr, 'Valid as a prescribable product')
self.assertEqual(vmp.vmpp_set.count(), 3)
self.assertEqual(vmp.amp_set.count(), 3)
self.assertEqual(vmp.bnf_code, '1003020U0AAAIAI')
vmpp = VMPP.objects.get(id=)
self.assertEqual(vmpp.nm, 'Diclofenac 2.32% gel 30 gram')
self.assertEqual(vmpp.vmp, vmp)
self.assertEqual(vmpp.qty_uom.descr, 'gram')
self.assertEqual(vmpp.ampp_set.count(), 3)
self.assertEqual(vmpp.bnf_code, '1003020U0AAAIAI')
amp = AMP.objects.get(id=)
self.assertEqual(amp.nm, 'Diclofenac 2.32% gel')
self.assertEqual(amp.descr, 'Diclofenac 2.32% gel (Colorama Pharmaceuticals Ltd)')
self.assertEqual(amp.vmp, vmp)
self.assertEqual(amp.supp.descr, 'Colorama Pharmaceuticals Ltd')
self.assertEqual(amp.ampp_set.count(), 2)
self.assertIsNone(amp.bnf_code)
ampp = AMPP.objects.get(id=)
self.assertEqual(ampp.nm, 'Diclofenac 2.32% gel (Colorama Pharmaceuticals Ltd) 30 gram')
self.assertEqual(ampp.vmpp, vmpp)
self.assertEqual(ampp.amp, amp)
self.assertEqual(ampp.legal_cat.descr, 'P')
self.assertIsNone(amp.bnf_code)
self.assertEqual(AMP.objects.get(id=).bnf_code, '1003020U0BBADAI')
self.assertEqual(AMPP.objects.get(id=).bnf_code, '1003020U0BBADAI')
ampp_with_gtins = AMPP.objects.get(id=)
self.assertEqual(ampp_with_gtins.gtin_set.count(), 2)
gtin = ampp_with_gtins.gtin_set.get(gtin=)
self.assertEqual(gtin.startdt, date(2010, 2, 1))
self.assertEqual(gtin.enddt, date(2013, 7, 21))
def test_vmp_bnf_codes_set(self):
self.assertEqual(VMP.objects.get(id=).bnf_code, '0203020C0AAAAAA')
def test_dmd_names(self):
def _assert_dmd_name(bnf_code, exp_dmd_name):
self.assertEqual(Presentation.objects.get(bnf_code=bnf_code).dmd_name, exp_dmd_name)
_assert_dmd_name('1003020U0AAAIAI', 'Diclofenac 2.32% gel')
_assert_dmd_name('1003020U0BBADAI', 'Voltarol 12 Hour Emulgel P 2.32% gel')
_assert_dmd_name('1106000X0AAAIAI', 'Pilocarpine hydrochloride 6% eye drops')
_assert_dmd_name('1305020C0AAFVFV', None)
_assert_dmd_name('BBHCA0', 'Nutrison liquid (Nutricia Ltd)')
def test_logs(self):
path = 'pipeline/test-data/data/dmd/logs/7.4.0_/summary.csv'
with open(path) as f:
summary = list(csv.reader(f))
exp_summary = [['VMP', '7'], ['AMP', '15'], ['VMPP', '14'], ['AMPP', '26'], ['dmd-objs-present-in-mapping-only', '0'], ['vmps-with-inferred-bnf-code', '0'], ['vmps-with-no-bnf-code', '1'], ['bnf-codes-with-multiple-dmd-objs', '2'], ['bnf-codes-with-multiple-dmd-objs-and-no-inferred-name', '1'], ['vmpps-with-different-bnf-code-to-vmp', '0'], ['ampps-with-different-bnf-code-to-amp', '3']]
self.assertEqual(summary, exp_summary)
def test_another_import(self):
vmpp = VMPP.objects.get(id=)
concession = NCSOConcession.objects.create(date='2019-06-01', vmpp=vmpp, drug=vmpp.nm, pack_size=vmpp.qtyval, price_pence=123)
vmpp.delete()
with patch('dmd.management.commands.import_dmd.Command.upload_to_bq'):
call_command('import_dmd')
self.assertIsNone(VMP.objects.filter(id=).first())
vmp = VMP.objects.get(id=12345)
self.assertEqual(vmp.vpidprev, )
vmpp = VMPP.objects.get(id=)
self.assertEqual(vmpp.vmp, vmp)
amp = AMP.objects.get(id=)
self.assertEqual(amp.vmp, vmp)
concession.refresh_from_db()
self.assertEqual(concession.vmpp, vmpp)
def test_notify_slack(self):
with patch('dmd.management.commands.import_dmd.Command.upload_to_bq'):
with patch('dmd.management.commands.import_dmd.notify_slack') as ns:
call_command('import_dmd')
ns.assert_called()
def test_already_imported(self):
ImportLog.objects.create(category='dmd', filename='7.4.0_', current_at='2019-07-08')
with patch('dmd.management.commands.import_dmd.Command.upload_to_bq'):
with patch('dmd.management.commands.import_dmd.notify_slack') as ns:
call_command('import_dmd')
ns.assert_not_called() |
class SnmpUSMSecurityModel(AbstractSecurityModel):
SECURITY_MODEL_ID = 3
AUTH_SERVICES = {hmacmd5.HmacMd5.SERVICE_ID: hmacmd5.HmacMd5(), hmacsha.HmacSha.SERVICE_ID: hmacsha.HmacSha(), hmacsha2.HmacSha2.SHA224_SERVICE_ID: hmacsha2.HmacSha2(hmacsha2.HmacSha2.SHA224_SERVICE_ID), hmacsha2.HmacSha2.SHA256_SERVICE_ID: hmacsha2.HmacSha2(hmacsha2.HmacSha2.SHA256_SERVICE_ID), hmacsha2.HmacSha2.SHA384_SERVICE_ID: hmacsha2.HmacSha2(hmacsha2.HmacSha2.SHA384_SERVICE_ID), hmacsha2.HmacSha2.SHA512_SERVICE_ID: hmacsha2.HmacSha2(hmacsha2.HmacSha2.SHA512_SERVICE_ID), noauth.NoAuth.SERVICE_ID: noauth.NoAuth()}
PRIV_SERVICES = {des.Des.SERVICE_ID: des.Des(), des3.Des3.SERVICE_ID: des3.Des3(), aes.Aes.SERVICE_ID: aes.Aes(), aes192.AesBlumenthal192.SERVICE_ID: aes192.AesBlumenthal192(), aes256.AesBlumenthal256.SERVICE_ID: aes256.AesBlumenthal256(), aes192.Aes192.SERVICE_ID: aes192.Aes192(), aes256.Aes256.SERVICE_ID: aes256.Aes256(), nopriv.NoPriv.SERVICE_ID: nopriv.NoPriv()}
WILDCARD_SECURITY_ENGINE_ID = pMod.OctetString(hexValue='')
def __init__(self):
AbstractSecurityModel.__init__(self)
self._securityParametersSpec = UsmSecurityParameters()
self._timeline = {}
self._timelineExpQueue = {}
self._expirationTimer = 0
self._paramsBranchId = (- 1)
def _sec2usr(self, snmpEngine, securityName, securityEngineID=None):
mibBuilder = snmpEngine.msgAndPduDsp.mibInstrumController.mibBuilder
(usmUserEngineID,) = mibBuilder.importSymbols('SNMP-USER-BASED-SM-MIB', 'usmUserEngineID')
if (self._paramsBranchId != usmUserEngineID.branchVersionId):
(usmUserName, usmUserSecurityName) = mibBuilder.importSymbols('SNMP-USER-BASED-SM-MIB', 'usmUserName', 'usmUserSecurityName')
self._securityToUserMap = {}
nextMibNode = usmUserEngineID
while True:
try:
nextMibNode = usmUserEngineID.getNextNode(nextMibNode.name)
except NoSuchInstanceError:
self._paramsBranchId = usmUserEngineID.branchVersionId
((debug.logger & debug.FLAG_SM) and debug.logger(('_sec2usr: built snmpEngineId + securityName to userName map, version %s: %r' % (self._paramsBranchId, self._securityToUserMap))))
break
instId = nextMibNode.name[len(usmUserSecurityName.name):]
_engineID = usmUserEngineID.getNode((usmUserEngineID.name + instId)).syntax
_userName = usmUserName.getNode((usmUserName.name + instId)).syntax
_securityName = usmUserSecurityName.getNode((usmUserSecurityName.name + instId)).syntax
k = (_engineID, _securityName)
if (k not in self._securityToUserMap):
self._securityToUserMap[k] = _userName
if (securityEngineID is None):
(snmpEngineID,) = mibBuilder.importSymbols('__SNMP-FRAMEWORK-MIB', 'snmpEngineID')
securityEngineID = snmpEngineID.syntax
try:
userName = self._securityToUserMap[(securityEngineID, securityName)]
except KeyError:
((debug.logger & debug.FLAG_SM) and debug.logger(('_sec2usr: no entry exists for snmpEngineId %r, securityName %r' % (securityEngineID, securityName))))
raise NoSuchInstanceError()
((debug.logger & debug.FLAG_SM) and debug.logger(('_sec2usr: using userName %r for snmpEngineId %r, securityName %r' % (userName, securityEngineID, securityName))))
return userName
def _getUserInfo(mibInstrumController, securityEngineID, userName):
mibBuilder = mibInstrumController.mibBuilder
(usmUserEntry,) = mibBuilder.importSymbols('SNMP-USER-BASED-SM-MIB', 'usmUserEntry')
tblIdx = usmUserEntry.getInstIdFromIndices(securityEngineID, userName)
usmUserName = usmUserEntry.getNode(((usmUserEntry.name + (2,)) + tblIdx)).syntax
usmUserSecurityName = usmUserEntry.getNode(((usmUserEntry.name + (3,)) + tblIdx)).syntax
usmUserAuthProtocol = usmUserEntry.getNode(((usmUserEntry.name + (5,)) + tblIdx)).syntax
usmUserPrivProtocol = usmUserEntry.getNode(((usmUserEntry.name + (8,)) + tblIdx)).syntax
(pysnmpUsmKeyEntry,) = mibBuilder.importSymbols('PYSNMP-USM-MIB', 'pysnmpUsmKeyEntry')
pysnmpUsmKeyAuthLocalized = pysnmpUsmKeyEntry.getNode(((pysnmpUsmKeyEntry.name + (1,)) + tblIdx)).syntax
pysnmpUsmKeyPrivLocalized = pysnmpUsmKeyEntry.getNode(((pysnmpUsmKeyEntry.name + (2,)) + tblIdx)).syntax
return (usmUserName, usmUserSecurityName, usmUserAuthProtocol, pysnmpUsmKeyAuthLocalized, usmUserPrivProtocol, pysnmpUsmKeyPrivLocalized)
def _cloneUserInfo(self, snmpEngine, securityEngineID, userName):
mibInstrumController = snmpEngine.msgAndPduDsp.mibInstrumController
mibBuilder = mibInstrumController.mibBuilder
(snmpEngineID,) = mibBuilder.importSymbols('__SNMP-FRAMEWORK-MIB', 'snmpEngineID')
(usmUserEntry,) = mibBuilder.importSymbols('SNMP-USER-BASED-SM-MIB', 'usmUserEntry')
tblIdx1 = usmUserEntry.getInstIdFromIndices(snmpEngineID.syntax, userName)
usmUserName = usmUserEntry.getNode(((usmUserEntry.name + (2,)) + tblIdx1))
usmUserSecurityName = usmUserEntry.getNode(((usmUserEntry.name + (3,)) + tblIdx1))
usmUserCloneFrom = usmUserEntry.getNode(((usmUserEntry.name + (4,)) + tblIdx1))
usmUserAuthProtocol = usmUserEntry.getNode(((usmUserEntry.name + (5,)) + tblIdx1))
usmUserPrivProtocol = usmUserEntry.getNode(((usmUserEntry.name + (8,)) + tblIdx1))
(pysnmpUsmKeyEntry,) = mibBuilder.importSymbols('PYSNMP-USM-MIB', 'pysnmpUsmKeyEntry')
pysnmpUsmKeyAuth = pysnmpUsmKeyEntry.getNode(((pysnmpUsmKeyEntry.name + (3,)) + tblIdx1))
pysnmpUsmKeyPriv = pysnmpUsmKeyEntry.getNode(((pysnmpUsmKeyEntry.name + (4,)) + tblIdx1))
tblIdx2 = usmUserEntry.getInstIdFromIndices(securityEngineID, userName)
mibInstrumController.writeMibObjects((((usmUserEntry.name + (13,)) + tblIdx2), 5), snmpEngine=snmpEngine)
usmUserEntry.getNode(((usmUserEntry.name + (2,)) + tblIdx2)).syntax = usmUserName.syntax
usmUserEntry.getNode(((usmUserEntry.name + (3,)) + tblIdx2)).syntax = usmUserSecurityName.syntax
usmUserEntry.getNode(((usmUserEntry.name + (4,)) + tblIdx2)).syntax = usmUserCloneFrom.syntax.clone(tblIdx1)
usmUserEntry.getNode(((usmUserEntry.name + (5,)) + tblIdx2)).syntax = usmUserAuthProtocol.syntax
usmUserEntry.getNode(((usmUserEntry.name + (8,)) + tblIdx2)).syntax = usmUserPrivProtocol.syntax
mibInstrumController.writeMibObjects((((usmUserEntry.name + (13,)) + tblIdx2), 1), snmpEngine=snmpEngine)
(pysnmpUsmKeyEntry,) = mibBuilder.importSymbols('PYSNMP-USM-MIB', 'pysnmpUsmKeyEntry')
pysnmpUsmKeyAuthLocalized = pysnmpUsmKeyEntry.getNode(((pysnmpUsmKeyEntry.name + (1,)) + tblIdx2))
if (usmUserAuthProtocol.syntax in self.AUTH_SERVICES):
localizeKey = self.AUTH_SERVICES[usmUserAuthProtocol.syntax].localizeKey
localAuthKey = localizeKey(pysnmpUsmKeyAuth.syntax, securityEngineID)
else:
raise error.StatusInformation(errorIndication=errind.unsupportedAuthProtocol)
if (localAuthKey is not None):
pysnmpUsmKeyAuthLocalized.syntax = pysnmpUsmKeyAuthLocalized.syntax.clone(localAuthKey)
pysnmpUsmKeyPrivLocalized = pysnmpUsmKeyEntry.getNode(((pysnmpUsmKeyEntry.name + (2,)) + tblIdx2))
if (usmUserPrivProtocol.syntax in self.PRIV_SERVICES):
localizeKey = self.PRIV_SERVICES[usmUserPrivProtocol.syntax].localizeKey
localPrivKey = localizeKey(usmUserAuthProtocol.syntax, pysnmpUsmKeyPriv.syntax, securityEngineID)
else:
raise error.StatusInformation(errorIndication=errind.unsupportedPrivProtocol)
if (localPrivKey is not None):
pysnmpUsmKeyPrivLocalized.syntax = pysnmpUsmKeyPrivLocalized.syntax.clone(localPrivKey)
return (usmUserName.syntax, usmUserSecurityName.syntax, usmUserAuthProtocol.syntax, pysnmpUsmKeyAuthLocalized.syntax, usmUserPrivProtocol.syntax, pysnmpUsmKeyPrivLocalized.syntax)
def _generateRequestOrResponseMsg(self, snmpEngine, messageProcessingModel, globalData, maxMessageSize, securityModel, securityEngineID, securityName, securityLevel, scopedPDU, securityStateReference):
mibBuilder = snmpEngine.msgAndPduDsp.mibInstrumController.mibBuilder
snmpEngineID = mibBuilder.importSymbols('__SNMP-FRAMEWORK-MIB', 'snmpEngineID')[0].syntax
msg = globalData
if (securityStateReference is not None):
cachedSecurityData = self._cache.pop(securityStateReference)
usmUserName = cachedSecurityData['msgUserName']
if ('usmUserSecurityName' in cachedSecurityData):
usmUserSecurityName = cachedSecurityData['usmUserSecurityName']
else:
usmUserSecurityName = usmUserName
if ('usmUserAuthProtocol' in cachedSecurityData):
usmUserAuthProtocol = cachedSecurityData['usmUserAuthProtocol']
else:
usmUserAuthProtocol = noauth.NoAuth.SERVICE_ID
if ('usmUserAuthKeyLocalized' in cachedSecurityData):
usmUserAuthKeyLocalized = cachedSecurityData['usmUserAuthKeyLocalized']
else:
usmUserAuthKeyLocalized = None
if ('usmUserPrivProtocol' in cachedSecurityData):
usmUserPrivProtocol = cachedSecurityData['usmUserPrivProtocol']
else:
usmUserPrivProtocol = nopriv.NoPriv.SERVICE_ID
if ('usmUserPrivKeyLocalized' in cachedSecurityData):
usmUserPrivKeyLocalized = cachedSecurityData['usmUserPrivKeyLocalized']
else:
usmUserPrivKeyLocalized = None
securityEngineID = snmpEngineID
((debug.logger & debug.FLAG_SM) and debug.logger(('__generateRequestOrResponseMsg: using cached USM user entry usmUserName "%s" usmUserSecurityName "%s" usmUserAuthProtocol "%s" usmUserAuthKeyLocalized "%s" usmUserPrivProtocol "%s" usmUserPrivKeyLocalized "%s" for securityEngineID "%s" and securityName "%s" found by securityStateReference "%s" ' % (usmUserName, usmUserSecurityName, usmUserAuthProtocol, (usmUserAuthKeyLocalized and usmUserAuthKeyLocalized.prettyPrint()), usmUserPrivProtocol, (usmUserPrivKeyLocalized and usmUserPrivKeyLocalized.prettyPrint()), securityEngineID.prettyPrint(), securityName, securityStateReference))))
elif securityEngineID:
try:
try:
(usmUserName, usmUserSecurityName, usmUserAuthProtocol, usmUserAuthKeyLocalized, usmUserPrivProtocol, usmUserPrivKeyLocalized) = self._getUserInfo(snmpEngine.msgAndPduDsp.mibInstrumController, securityEngineID, self._sec2usr(snmpEngine, securityName, securityEngineID))
except NoSuchInstanceError:
(usmUserName, usmUserSecurityName, usmUserAuthProtocol, usmUserAuthKeyLocalized, usmUserPrivProtocol, usmUserPrivKeyLocalized) = self._getUserInfo(snmpEngine.msgAndPduDsp.mibInstrumController, self.WILDCARD_SECURITY_ENGINE_ID, self._sec2usr(snmpEngine, securityName, self.WILDCARD_SECURITY_ENGINE_ID))
((debug.logger & debug.FLAG_SM) and debug.logger(('__generateRequestOrResponseMsg: found USM user entry usmUserName "%s" usmUserSecurityName "%s" usmUserAuthProtocol "%s" usmUserAuthKeyLocalized "%s" usmUserPrivProtocol "%s" usmUserPrivKeyLocalized "%s" by securityEngineID "%s" and securityName "%s"' % (usmUserName, usmUserSecurityName, usmUserAuthProtocol, (usmUserAuthKeyLocalized and usmUserAuthKeyLocalized.prettyPrint()), usmUserPrivProtocol, (usmUserPrivKeyLocalized and usmUserPrivKeyLocalized.prettyPrint()), securityEngineID.prettyPrint(), securityName))))
except NoSuchInstanceError:
(pysnmpUsmDiscovery,) = mibBuilder.importSymbols('__PYSNMP-USM-MIB', 'pysnmpUsmDiscovery')
reportUnknownName = (not pysnmpUsmDiscovery.syntax)
if (not reportUnknownName):
try:
(usmUserName, usmUserSecurityName, usmUserAuthProtocol, usmUserAuthKeyLocalized, usmUserPrivProtocol, usmUserPrivKeyLocalized) = self._cloneUserInfo(snmpEngine, securityEngineID, self._sec2usr(snmpEngine, securityName))
((debug.logger & debug.FLAG_SM) and debug.logger(('__generateRequestOrResponseMsg: cloned USM user entry usmUserName "%s" usmUserSecurityName "%s" usmUserAuthProtocol "%s" usmUserAuthKeyLocalized "%s" usmUserPrivProtocol "%s" usmUserPrivKeyLocalized "%s" for securityEngineID "%s" and securityName "%s"' % (usmUserName, usmUserSecurityName, usmUserAuthProtocol, (usmUserAuthKeyLocalized and usmUserAuthKeyLocalized.prettyPrint()), usmUserPrivProtocol, (usmUserPrivKeyLocalized and usmUserPrivKeyLocalized.prettyPrint()), securityEngineID.prettyPrint(), securityName))))
except NoSuchInstanceError:
((debug.logger & debug.FLAG_SM) and debug.logger(('__generateRequestOrResponseMsg: failed to clone USM user for securityEngineID "%s" securityName "%s"' % (securityEngineID, securityName))))
reportUnknownName = True
if reportUnknownName:
raise error.StatusInformation(errorIndication=errind.unknownSecurityName)
((debug.logger & debug.FLAG_SM) and debug.logger('__generateRequestOrResponseMsg: clone user info'))
except PyAsn1Error as exc:
((debug.logger & debug.FLAG_SM) and debug.logger(('__generateRequestOrResponseMsg: %s' % exc)))
(snmpInGenErrs,) = mibBuilder.importSymbols('__SNMPv2-MIB', 'snmpInGenErrs')
snmpInGenErrs.syntax += 1
raise error.StatusInformation(errorIndication=errind.invalidMsg)
else:
securityEngineID = securityName = null
securityLevel = 1
scopedPDU.setComponentByPosition(0, null, verifyConstraints=False, matchTags=False, matchConstraints=False)
headerData = msg.getComponentByPosition(1)
headerData.setComponentByPosition(2, univ.OctetString(hexValue='04'), verifyConstraints=False, matchTags=False, matchConstraints=False)
emptyPdu = scopedPDU.getComponentByPosition(2).getComponent()
emptyPdu = emptyPdu.clone()
pMod.apiPDU.setDefaults(emptyPdu)
scopedPDU.getComponentByPosition(2).setComponentByType(emptyPdu.tagSet, emptyPdu, verifyConstraints=False, matchTags=False, matchConstraints=False)
usmUserName = usmUserSecurityName = null
usmUserAuthProtocol = noauth.NoAuth.SERVICE_ID
usmUserPrivProtocol = nopriv.NoPriv.SERVICE_ID
usmUserAuthKeyLocalized = usmUserPrivKeyLocalized = None
((debug.logger & debug.FLAG_SM) and debug.logger(('__generateRequestOrResponseMsg: using blank USM info for peer SNMP engine ID discovery usmUserName "%s" usmUserSecurityName "%s" usmUserAuthProtocol "%s" usmUserAuthKeyLocalized "%s" usmUserPrivProtocol "%s" usmUserPrivKeyLocalized "%s" for securityEngineID "%s" and securityName "%s"' % (usmUserName, usmUserSecurityName, usmUserAuthProtocol, usmUserAuthKeyLocalized, usmUserPrivProtocol, usmUserPrivKeyLocalized, (securityEngineID and securityEngineID.prettyPrint()), securityName))))
if (securityLevel == 3):
if ((usmUserAuthProtocol == noauth.NoAuth.SERVICE_ID) or (usmUserPrivProtocol == nopriv.NoPriv.SERVICE_ID)):
raise error.StatusInformation(errorIndication=errind.unsupportedSecurityLevel)
if ((securityLevel == 3) or (securityLevel == 2)):
if (usmUserAuthProtocol == noauth.NoAuth.SERVICE_ID):
raise error.StatusInformation(errorIndication=errind.unsupportedSecurityLevel)
securityParameters = self._securityParametersSpec
scopedPDUData = msg.setComponentByPosition(3).getComponentByPosition(3)
scopedPDUData.setComponentByPosition(0, scopedPDU, verifyConstraints=False, matchTags=False, matchConstraints=False)
snmpEngineBoots = snmpEngineTime = 0
if (securityLevel in (2, 3)):
pdu = scopedPDU.getComponentByPosition(2).getComponent()
if (pdu.tagSet in rfc3411.UNCONFIRMED_CLASS_PDUS):
(snmpEngineBoots, snmpEngineTime) = mibBuilder.importSymbols('__SNMP-FRAMEWORK-MIB', 'snmpEngineBoots', 'snmpEngineTime')
snmpEngineBoots = snmpEngineBoots.syntax
snmpEngineTime = snmpEngineTime.syntax.clone()
((debug.logger & debug.FLAG_SM) and debug.logger('__generateRequestOrResponseMsg: read snmpEngineBoots, snmpEngineTime from LCD'))
elif (securityEngineID in self._timeline):
(snmpEngineBoots, snmpEngineTime, latestReceivedEngineTime, latestUpdateTimestamp) = self._timeline[securityEngineID]
((debug.logger & debug.FLAG_SM) and debug.logger('__generateRequestOrResponseMsg: read snmpEngineBoots, snmpEngineTime from timeline'))
else:
((debug.logger & debug.FLAG_SM) and debug.logger('__generateRequestOrResponseMsg: assuming zero snmpEngineBoots, snmpEngineTime'))
((debug.logger & debug.FLAG_SM) and debug.logger(('__generateRequestOrResponseMsg: use snmpEngineBoots %s snmpEngineTime %s for securityEngineID %r' % (snmpEngineBoots, snmpEngineTime, securityEngineID))))
if (securityLevel == 3):
if (usmUserPrivProtocol in self.PRIV_SERVICES):
privHandler = self.PRIV_SERVICES[usmUserPrivProtocol]
else:
raise error.StatusInformation(errorIndication=errind.encryptionError)
((debug.logger & debug.FLAG_SM) and debug.logger(('__generateRequestOrResponseMsg: scopedPDU %s' % scopedPDU.prettyPrint())))
try:
dataToEncrypt = encoder.encode(scopedPDU)
except PyAsn1Error as exc:
((debug.logger & debug.FLAG_SM) and debug.logger(('__generateRequestOrResponseMsg: scopedPDU serialization error: %s' % exc)))
raise error.StatusInformation(errorIndication=errind.serializationError)
((debug.logger & debug.FLAG_SM) and debug.logger(('__generateRequestOrResponseMsg: scopedPDU encoded into %s' % debug.hexdump(dataToEncrypt))))
(encryptedData, privParameters) = privHandler.encryptData(usmUserPrivKeyLocalized, (snmpEngineBoots, snmpEngineTime, None), dataToEncrypt)
securityParameters.setComponentByPosition(5, privParameters, verifyConstraints=False, matchTags=False, matchConstraints=False)
scopedPDUData.setComponentByPosition(1, encryptedData, verifyConstraints=False, matchTags=False, matchConstraints=False)
((debug.logger & debug.FLAG_SM) and debug.logger(('__generateRequestOrResponseMsg: scopedPDU ciphered into %s' % debug.hexdump(encryptedData))))
elif ((securityLevel == 1) or (securityLevel == 2)):
securityParameters.setComponentByPosition(5, '')
((debug.logger & debug.FLAG_SM) and debug.logger(('__generateRequestOrResponseMsg: %s' % scopedPDUData.prettyPrint())))
securityParameters.setComponentByPosition(0, securityEngineID, verifyConstraints=False, matchTags=False, matchConstraints=False)
securityParameters.setComponentByPosition(1, snmpEngineBoots, verifyConstraints=False, matchTags=False, matchConstraints=False)
securityParameters.setComponentByPosition(2, snmpEngineTime, verifyConstraints=False, matchTags=False, matchConstraints=False)
securityParameters.setComponentByPosition(3, usmUserName, verifyConstraints=False, matchTags=False, matchConstraints=False)
if ((securityLevel == 3) or (securityLevel == 2)):
if (usmUserAuthProtocol in self.AUTH_SERVICES):
authHandler = self.AUTH_SERVICES[usmUserAuthProtocol]
else:
raise error.StatusInformation(errorIndication=errind.authenticationFailure)
securityParameters.setComponentByPosition(4, ('\x00' * authHandler.digestLength))
((debug.logger & debug.FLAG_SM) and debug.logger(('__generateRequestOrResponseMsg: %s' % (securityParameters.prettyPrint(),))))
try:
msg.setComponentByPosition(2, encoder.encode(securityParameters), verifyConstraints=False)
except PyAsn1Error as exc:
((debug.logger & debug.FLAG_SM) and debug.logger(('__generateRequestOrResponseMsg: securityParameters serialization error: %s' % exc)))
raise error.StatusInformation(errorIndication=errind.serializationError)
((debug.logger & debug.FLAG_SM) and debug.logger(('__generateRequestOrResponseMsg: auth outgoing msg: %s' % msg.prettyPrint())))
try:
wholeMsg = encoder.encode(msg)
except PyAsn1Error as exc:
((debug.logger & debug.FLAG_SM) and debug.logger(('__generateRequestOrResponseMsg: msg serialization error: %s' % exc)))
raise error.StatusInformation(errorIndication=errind.serializationError)
authenticatedWholeMsg = authHandler.authenticateOutgoingMsg(usmUserAuthKeyLocalized, wholeMsg)
else:
securityParameters.setComponentByPosition(4, '', verifyConstraints=False, matchTags=False, matchConstraints=False)
((debug.logger & debug.FLAG_SM) and debug.logger(('__generateRequestOrResponseMsg: %s' % (securityParameters.prettyPrint(),))))
try:
msg.setComponentByPosition(2, encoder.encode(securityParameters), verifyConstraints=False, matchTags=False, matchConstraints=False)
except PyAsn1Error as exc:
((debug.logger & debug.FLAG_SM) and debug.logger(('__generateRequestOrResponseMsg: securityParameters serialization error: %s' % exc)))
raise error.StatusInformation(errorIndication=errind.serializationError)
try:
((debug.logger & debug.FLAG_SM) and debug.logger(('__generateRequestOrResponseMsg: plain outgoing msg: %s' % msg.prettyPrint())))
authenticatedWholeMsg = encoder.encode(msg)
except PyAsn1Error as exc:
((debug.logger & debug.FLAG_SM) and debug.logger(('__generateRequestOrResponseMsg: msg serialization error: %s' % exc)))
raise error.StatusInformation(errorIndication=errind.serializationError)
((debug.logger & debug.FLAG_SM) and debug.logger(('__generateRequestOrResponseMsg: %s outgoing msg: %s' % ((((securityLevel > 1) and 'authenticated') or 'plain'), debug.hexdump(authenticatedWholeMsg)))))
return (msg.getComponentByPosition(2), authenticatedWholeMsg)
def generateRequestMsg(self, snmpEngine, messageProcessingModel, globalData, maxMessageSize, securityModel, securityEngineID, securityName, securityLevel, scopedPDU):
return self._generateRequestOrResponseMsg(snmpEngine, messageProcessingModel, globalData, maxMessageSize, securityModel, securityEngineID, securityName, securityLevel, scopedPDU, None)
def generateResponseMsg(self, snmpEngine, messageProcessingModel, globalData, maxMessageSize, securityModel, securityEngineID, securityName, securityLevel, scopedPDU, securityStateReference):
return self._generateRequestOrResponseMsg(snmpEngine, messageProcessingModel, globalData, maxMessageSize, securityModel, securityEngineID, securityName, securityLevel, scopedPDU, securityStateReference)
def processIncomingMsg(self, snmpEngine, messageProcessingModel, maxMessageSize, securityParameters, securityModel, securityLevel, wholeMsg, msg):
mibBuilder = snmpEngine.msgAndPduDsp.mibInstrumController.mibBuilder
maxSizeResponseScopedPDU = ((int(maxMessageSize) - len(securityParameters)) - 48)
((debug.logger & debug.FLAG_SM) and debug.logger(('processIncomingMsg: securityParameters %s' % debug.hexdump(securityParameters))))
(securityParameters, rest) = decoder.decode(securityParameters, asn1Spec=self._securityParametersSpec)
((debug.logger & debug.FLAG_SM) and debug.logger(('processIncomingMsg: %s' % (securityParameters.prettyPrint(),))))
if eoo.endOfOctets.isSameTypeWith(securityParameters):
raise error.StatusInformation(errorIndication=errind.parseError)
msgAuthoritativeEngineId = securityParameters.getComponentByPosition(0)
securityStateReference = self._cache.push(msgUserName=securityParameters.getComponentByPosition(3))
((debug.logger & debug.FLAG_SM) and debug.logger(('processIncomingMsg: cache write securityStateReference %s by msgUserName %s' % (securityStateReference, securityParameters.getComponentByPosition(3)))))
scopedPduData = msg.getComponentByPosition(3)
contextEngineId = mibBuilder.importSymbols('__SNMP-FRAMEWORK-MIB', 'snmpEngineID')[0].syntax
contextName = null
snmpEngineID = mibBuilder.importSymbols('__SNMP-FRAMEWORK-MIB', 'snmpEngineID')[0].syntax
if ((msgAuthoritativeEngineId != snmpEngineID) and (msgAuthoritativeEngineId not in self._timeline)):
if (msgAuthoritativeEngineId and (4 < len(msgAuthoritativeEngineId) < 33)):
((debug.logger & debug.FLAG_SM) and debug.logger(('processIncomingMsg: non-synchronized securityEngineID %r' % (msgAuthoritativeEngineId,))))
else:
((debug.logger & debug.FLAG_SM) and debug.logger('processIncomingMsg: peer requested snmpEngineID discovery'))
(usmStatsUnknownEngineIDs,) = mibBuilder.importSymbols('__SNMP-USER-BASED-SM-MIB', 'usmStatsUnknownEngineIDs')
usmStatsUnknownEngineIDs.syntax += 1
((debug.logger & debug.FLAG_SM) and debug.logger('processIncomingMsg: null or malformed msgAuthoritativeEngineId'))
(pysnmpUsmDiscoverable,) = mibBuilder.importSymbols('__PYSNMP-USM-MIB', 'pysnmpUsmDiscoverable')
if pysnmpUsmDiscoverable.syntax:
((debug.logger & debug.FLAG_SM) and debug.logger('processIncomingMsg: starting snmpEngineID discovery procedure'))
if (scopedPduData.getName() != 'plaintext'):
((debug.logger & debug.FLAG_SM) and debug.logger(('processIncomingMsg: scopedPduData not plaintext %s' % scopedPduData.prettyPrint())))
raise error.StatusInformation(errorIndication=errind.unknownEngineID)
scopedPdu = scopedPduData.getComponent()
contextEngineId = scopedPdu.getComponentByPosition(0)
contextName = scopedPdu.getComponentByPosition(1)
raise error.StatusInformation(errorIndication=errind.unknownEngineID, oid=usmStatsUnknownEngineIDs.name, val=usmStatsUnknownEngineIDs.syntax, securityStateReference=securityStateReference, securityLevel=securityLevel, contextEngineId=contextEngineId, contextName=contextName, scopedPDU=scopedPdu, maxSizeResponseScopedPDU=maxSizeResponseScopedPDU)
else:
((debug.logger & debug.FLAG_SM) and debug.logger('processIncomingMsg: will not discover EngineID'))
raise error.StatusInformation(errorIndication=errind.unknownEngineID)
msgUserName = securityParameters.getComponentByPosition(3)
((debug.logger & debug.FLAG_SM) and debug.logger(('processIncomingMsg: read from securityParams msgAuthoritativeEngineId %r msgUserName %r' % (msgAuthoritativeEngineId, msgUserName))))
if msgUserName:
try:
(usmUserName, usmUserSecurityName, usmUserAuthProtocol, usmUserAuthKeyLocalized, usmUserPrivProtocol, usmUserPrivKeyLocalized) = self._getUserInfo(snmpEngine.msgAndPduDsp.mibInstrumController, msgAuthoritativeEngineId, msgUserName)
((debug.logger & debug.FLAG_SM) and debug.logger('processIncomingMsg: read user info from LCD'))
except NoSuchInstanceError:
try:
(usmUserName, usmUserSecurityName, usmUserAuthProtocol, usmUserAuthKeyLocalized, usmUserPrivProtocol, usmUserPrivKeyLocalized) = self._getUserInfo(snmpEngine.msgAndPduDsp.mibInstrumController, self.WILDCARD_SECURITY_ENGINE_ID, msgUserName)
((debug.logger & debug.FLAG_SM) and debug.logger('processIncomingMsg: read wildcard user info from LCD'))
except NoSuchInstanceError:
((debug.logger & debug.FLAG_SM) and debug.logger(('processIncomingMsg: unknown securityEngineID %r msgUserName %r' % (msgAuthoritativeEngineId, msgUserName))))
(usmStatsUnknownUserNames,) = mibBuilder.importSymbols('__SNMP-USER-BASED-SM-MIB', 'usmStatsUnknownUserNames')
usmStatsUnknownUserNames.syntax += 1
raise error.StatusInformation(errorIndication=errind.unknownSecurityName, oid=usmStatsUnknownUserNames.name, val=usmStatsUnknownUserNames.syntax, securityStateReference=securityStateReference, securityLevel=securityLevel, contextEngineId=contextEngineId, contextName=contextName, msgUserName=msgUserName, maxSizeResponseScopedPDU=maxSizeResponseScopedPDU)
except PyAsn1Error as exc:
((debug.logger & debug.FLAG_SM) and debug.logger(('processIncomingMsg: %s' % exc)))
(snmpInGenErrs,) = mibBuilder.importSymbols('__SNMPv2-MIB', 'snmpInGenErrs')
snmpInGenErrs.syntax += 1
raise error.StatusInformation(errorIndication=errind.invalidMsg)
else:
usmUserName = usmUserSecurityName = null
usmUserAuthProtocol = noauth.NoAuth.SERVICE_ID
usmUserPrivProtocol = nopriv.NoPriv.SERVICE_ID
usmUserAuthKeyLocalized = usmUserPrivKeyLocalized = None
((debug.logger & debug.FLAG_SM) and debug.logger(('processIncomingMsg: now have usmUserName %r usmUserSecurityName %r usmUserAuthProtocol %r usmUserPrivProtocol %r for msgUserName %r' % (usmUserName, usmUserSecurityName, usmUserAuthProtocol, usmUserPrivProtocol, msgUserName))))
self._cache.pop(securityStateReference)
securityStateReference = self._cache.push(msgUserName=securityParameters.getComponentByPosition(3), usmUserSecurityName=usmUserSecurityName, usmUserAuthProtocol=usmUserAuthProtocol, usmUserAuthKeyLocalized=usmUserAuthKeyLocalized, usmUserPrivProtocol=usmUserPrivProtocol, usmUserPrivKeyLocalized=usmUserPrivKeyLocalized)
msgAuthoritativeEngineBoots = securityParameters.getComponentByPosition(1)
msgAuthoritativeEngineTime = securityParameters.getComponentByPosition(2)
snmpEngine.observer.storeExecutionContext(snmpEngine, 'rfc3414.processIncomingMsg', dict(securityEngineId=msgAuthoritativeEngineId, snmpEngineBoots=msgAuthoritativeEngineBoots, snmpEngineTime=msgAuthoritativeEngineTime, userName=usmUserName, securityName=usmUserSecurityName, authProtocol=usmUserAuthProtocol, authKey=usmUserAuthKeyLocalized, privProtocol=usmUserPrivProtocol, privKey=usmUserPrivKeyLocalized))
snmpEngine.observer.clearExecutionContext(snmpEngine, 'rfc3414.processIncomingMsg')
if (msgAuthoritativeEngineId == snmpEngineID):
badSecIndication = None
if (securityLevel == 3):
if (usmUserAuthProtocol == noauth.NoAuth.SERVICE_ID):
badSecIndication = 'authPriv wanted while auth not expected'
if (usmUserPrivProtocol == nopriv.NoPriv.SERVICE_ID):
badSecIndication = 'authPriv wanted while priv not expected'
elif (securityLevel == 2):
if (usmUserAuthProtocol == noauth.NoAuth.SERVICE_ID):
badSecIndication = 'authNoPriv wanted while auth not expected'
if (usmUserPrivProtocol != nopriv.NoPriv.SERVICE_ID):
if (msgAuthoritativeEngineBoots or msgAuthoritativeEngineTime):
badSecIndication = 'authNoPriv wanted while priv expected'
elif (securityLevel == 1):
if (usmUserAuthProtocol != noauth.NoAuth.SERVICE_ID):
badSecIndication = 'noAuthNoPriv wanted while auth expected'
if (usmUserPrivProtocol != nopriv.NoPriv.SERVICE_ID):
badSecIndication = 'noAuthNoPriv wanted while priv expected'
if badSecIndication:
(usmStatsUnsupportedSecLevels,) = mibBuilder.importSymbols('__SNMP-USER-BASED-SM-MIB', 'usmStatsUnsupportedSecLevels')
usmStatsUnsupportedSecLevels.syntax += 1
((debug.logger & debug.FLAG_SM) and debug.logger(('processIncomingMsg: reporting inappropriate security level for user %s: %s' % (msgUserName, badSecIndication))))
raise error.StatusInformation(errorIndication=errind.unsupportedSecurityLevel, oid=usmStatsUnsupportedSecLevels.name, val=usmStatsUnsupportedSecLevels.syntax, securityStateReference=securityStateReference, securityLevel=securityLevel, contextEngineId=contextEngineId, contextName=contextName, msgUserName=msgUserName, maxSizeResponseScopedPDU=maxSizeResponseScopedPDU)
if ((securityLevel == 3) or (securityLevel == 2)):
if (usmUserAuthProtocol in self.AUTH_SERVICES):
authHandler = self.AUTH_SERVICES[usmUserAuthProtocol]
else:
raise error.StatusInformation(errorIndication=errind.authenticationFailure)
try:
authHandler.authenticateIncomingMsg(usmUserAuthKeyLocalized, securityParameters.getComponentByPosition(4), wholeMsg)
except error.StatusInformation:
(usmStatsWrongDigests,) = mibBuilder.importSymbols('__SNMP-USER-BASED-SM-MIB', 'usmStatsWrongDigests')
usmStatsWrongDigests.syntax += 1
raise error.StatusInformation(errorIndication=errind.authenticationFailure, oid=usmStatsWrongDigests.name, val=usmStatsWrongDigests.syntax, securityStateReference=securityStateReference, securityLevel=securityLevel, contextEngineId=contextEngineId, contextName=contextName, msgUserName=msgUserName, maxSizeResponseScopedPDU=maxSizeResponseScopedPDU)
((debug.logger & debug.FLAG_SM) and debug.logger('processIncomingMsg: incoming msg authenticated'))
self._timeline[msgAuthoritativeEngineId] = (securityParameters.getComponentByPosition(1), securityParameters.getComponentByPosition(2), securityParameters.getComponentByPosition(2), int(time.time()))
timerResolution = (((snmpEngine.transportDispatcher is None) and 1.0) or snmpEngine.transportDispatcher.getTimerResolution())
expireAt = int((self._expirationTimer + (300 / timerResolution)))
if (expireAt not in self._timelineExpQueue):
self._timelineExpQueue[expireAt] = []
self._timelineExpQueue[expireAt].append(msgAuthoritativeEngineId)
((debug.logger & debug.FLAG_SM) and debug.logger(('processIncomingMsg: store timeline for securityEngineID %r' % (msgAuthoritativeEngineId,))))
if ((securityLevel == 3) or (securityLevel == 2)):
if (msgAuthoritativeEngineId == snmpEngineID):
(snmpEngineBoots, snmpEngineTime) = mibBuilder.importSymbols('__SNMP-FRAMEWORK-MIB', 'snmpEngineBoots', 'snmpEngineTime')
snmpEngineBoots = snmpEngineBoots.syntax
snmpEngineTime = snmpEngineTime.syntax.clone()
idleTime = 0
((debug.logger & debug.FLAG_SM) and debug.logger(('processIncomingMsg: read snmpEngineBoots (%s), snmpEngineTime (%s) from LCD' % (snmpEngineBoots, snmpEngineTime))))
elif (msgAuthoritativeEngineId in self._timeline):
(snmpEngineBoots, snmpEngineTime, latestReceivedEngineTime, latestUpdateTimestamp) = self._timeline[msgAuthoritativeEngineId]
idleTime = (int(time.time()) - latestUpdateTimestamp)
((debug.logger & debug.FLAG_SM) and debug.logger(('processIncomingMsg: read timeline snmpEngineBoots %s snmpEngineTime %s for msgAuthoritativeEngineId %r, idle time %s secs' % (snmpEngineBoots, snmpEngineTime, msgAuthoritativeEngineId, idleTime))))
else:
raise error.ProtocolError('Peer SNMP engine info missing')
if (msgAuthoritativeEngineId == snmpEngineID):
if ((snmpEngineBoots == ) or (snmpEngineBoots != msgAuthoritativeEngineBoots) or (abs(((idleTime + int(snmpEngineTime)) - int(msgAuthoritativeEngineTime))) > 150)):
(usmStatsNotInTimeWindows,) = mibBuilder.importSymbols('__SNMP-USER-BASED-SM-MIB', 'usmStatsNotInTimeWindows')
usmStatsNotInTimeWindows.syntax += 1
raise error.StatusInformation(errorIndication=errind.notInTimeWindow, oid=usmStatsNotInTimeWindows.name, val=usmStatsNotInTimeWindows.syntax, securityStateReference=securityStateReference, securityLevel=2, contextEngineId=contextEngineId, contextName=contextName, msgUserName=msgUserName, maxSizeResponseScopedPDU=maxSizeResponseScopedPDU)
else:
if ((msgAuthoritativeEngineBoots > snmpEngineBoots) or ((msgAuthoritativeEngineBoots == snmpEngineBoots) and (msgAuthoritativeEngineTime > latestReceivedEngineTime))):
self._timeline[msgAuthoritativeEngineId] = (msgAuthoritativeEngineBoots, msgAuthoritativeEngineTime, msgAuthoritativeEngineTime, int(time.time()))
timerResolution = (((snmpEngine.transportDispatcher is None) and 1.0) or snmpEngine.transportDispatcher.getTimerResolution())
expireAt = int((self._expirationTimer + (300 / timerResolution)))
if (expireAt not in self._timelineExpQueue):
self._timelineExpQueue[expireAt] = []
self._timelineExpQueue[expireAt].append(msgAuthoritativeEngineId)
((debug.logger & debug.FLAG_SM) and debug.logger(('processIncomingMsg: stored timeline msgAuthoritativeEngineBoots %s msgAuthoritativeEngineTime %s for msgAuthoritativeEngineId %r' % (msgAuthoritativeEngineBoots, msgAuthoritativeEngineTime, msgAuthoritativeEngineId))))
if ((snmpEngineBoots == ) or (msgAuthoritativeEngineBoots < snmpEngineBoots) or ((msgAuthoritativeEngineBoots == snmpEngineBoots) and (abs(((idleTime + int(snmpEngineTime)) - int(msgAuthoritativeEngineTime))) > 150))):
raise error.StatusInformation(errorIndication=errind.notInTimeWindow, msgUserName=msgUserName)
if (securityLevel == 3):
if (usmUserPrivProtocol in self.PRIV_SERVICES):
privHandler = self.PRIV_SERVICES[usmUserPrivProtocol]
else:
raise error.StatusInformation(errorIndication=errind.decryptionError, msgUserName=msgUserName)
encryptedPDU = scopedPduData.getComponentByPosition(1)
if (encryptedPDU is None):
raise error.StatusInformation(errorIndication=errind.decryptionError, msgUserName=msgUserName)
try:
decryptedData = privHandler.decryptData(usmUserPrivKeyLocalized, (securityParameters.getComponentByPosition(1), securityParameters.getComponentByPosition(2), securityParameters.getComponentByPosition(5)), encryptedPDU)
((debug.logger & debug.FLAG_SM) and debug.logger(('processIncomingMsg: PDU deciphered into %s' % debug.hexdump(decryptedData))))
except error.StatusInformation:
(usmStatsDecryptionErrors,) = mibBuilder.importSymbols('__SNMP-USER-BASED-SM-MIB', 'usmStatsDecryptionErrors')
usmStatsDecryptionErrors.syntax += 1
raise error.StatusInformation(errorIndication=errind.decryptionError, oid=usmStatsDecryptionErrors.name, val=usmStatsDecryptionErrors.syntax, securityStateReference=securityStateReference, securityLevel=securityLevel, contextEngineId=contextEngineId, contextName=contextName, msgUserName=msgUserName, maxSizeResponseScopedPDU=maxSizeResponseScopedPDU)
scopedPduSpec = scopedPduData.setComponentByPosition(0).getComponentByPosition(0)
try:
(scopedPDU, rest) = decoder.decode(decryptedData, asn1Spec=scopedPduSpec)
except PyAsn1Error as exc:
((debug.logger & debug.FLAG_SM) and debug.logger(('processIncomingMsg: scopedPDU decoder failed %s' % exc)))
raise error.StatusInformation(errorIndication=errind.decryptionError, msgUserName=msgUserName)
if eoo.endOfOctets.isSameTypeWith(scopedPDU):
raise error.StatusInformation(errorIndication=errind.decryptionError, msgUserName=msgUserName)
else:
scopedPDU = scopedPduData.getComponentByPosition(0)
if (scopedPDU is None):
raise error.StatusInformation(errorIndication=errind.decryptionError, msgUserName=msgUserName)
((debug.logger & debug.FLAG_SM) and debug.logger(('processIncomingMsg: scopedPDU decoded %s' % scopedPDU.prettyPrint())))
securityName = usmUserSecurityName
((debug.logger & debug.FLAG_SM) and debug.logger(('processIncomingMsg: cached msgUserName %s info by securityStateReference %s' % (msgUserName, securityStateReference))))
if ((not msgUserName) and (not msgAuthoritativeEngineId)):
(usmStatsUnknownUserNames,) = mibBuilder.importSymbols('__SNMP-USER-BASED-SM-MIB', 'usmStatsUnknownUserNames')
usmStatsUnknownUserNames.syntax += 1
raise error.StatusInformation(errorIndication=errind.unknownSecurityName, oid=usmStatsUnknownUserNames.name, val=usmStatsUnknownUserNames.syntax, securityStateReference=securityStateReference, securityEngineID=msgAuthoritativeEngineId, securityLevel=securityLevel, contextEngineId=contextEngineId, contextName=contextName, msgUserName=msgUserName, maxSizeResponseScopedPDU=maxSizeResponseScopedPDU, PDU=scopedPDU)
return (msgAuthoritativeEngineId, securityName, scopedPDU, maxSizeResponseScopedPDU, securityStateReference)
def _expireTimelineInfo(self):
if (self._expirationTimer in self._timelineExpQueue):
for engineIdKey in self._timelineExpQueue[self._expirationTimer]:
if (engineIdKey in self._timeline):
del self._timeline[engineIdKey]
((debug.logger & debug.FLAG_SM) and debug.logger(('__expireTimelineInfo: expiring %r' % (engineIdKey,))))
del self._timelineExpQueue[self._expirationTimer]
self._expirationTimer += 1
def receiveTimerTick(self, snmpEngine, timeNow):
self._expireTimelineInfo() |
def test_clock_decision_context_from_replay(version_decision_context):
with pytest.raises(Exception) as exc_info:
version_decision_context.workflow_clock.set_replaying(True)
version = version_decision_context.workflow_clock.get_version('abc', 1, 5)
assert ('Version -1 of changeID abc is not supported. Supported version is between 1 and 5' in str(exc_info.value)) |
def create_admin(username, password, email, job_title, silent=False):
query = FlicketUser.query.filter_by(username=username)
if (query.count() == 0):
add_user = FlicketUser(username=username, name=username, password=hash_password(password), email=email, job_title=job_title, date_added=datetime.datetime.now())
db.session.add(add_user)
if (not silent):
print('Admin user added.')
else:
print('Admin user is already added.') |
def process_new_build_pypi(copr, add_view, url_on_success):
def factory(**build_options):
BuildsLogic.create_new_from_pypi(flask.g.user, copr, form.pypi_package_name.data, form.pypi_package_version.data, form.spec_generator.data, form.spec_template.data, form.python_versions.data, form.selected_chroots, **build_options)
form = forms.BuildFormPyPIFactory(copr.active_chroots)()
return process_new_build(copr, form, factory, render_add_build_pypi, add_view, url_on_success) |
class OptionSeriesPictorialStatesInactive(Options):
def animation(self) -> 'OptionSeriesPictorialStatesInactiveAnimation':
return self._config_sub_data('animation', OptionSeriesPictorialStatesInactiveAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def opacity(self):
return self._config_get(0.2)
def opacity(self, num: float):
self._config(num, js_type=False) |
def extractSecretcouncildcWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class RunTest(TestCase):
def setUp(self) -> None:
self.db = DB(DBType.MEMORY)
create_models(self.db)
self.fakes = FakeObjectGenerator()
if (self.shortDescription() == 'No setUp'):
return
run1 = self.fakes.run()
run2 = self.fakes.run()
run3 = self.fakes.run()
with self.db.make_session() as session:
session.add(run1)
session.add(run2)
session.add(run3)
session.commit()
def testRuns(self) -> None:
with self.db.make_session() as session:
allruns = runs(session)
self.assertEqual(len(allruns), 3)
self.assertEqual(int(allruns[0].run_id), 3)
self.assertEqual(int(allruns[1].run_id), 2)
self.assertEqual(int(allruns[2].run_id), 1)
def testLatestRun(self) -> None:
with self.db.make_session() as session:
id = latest(session)
self.assertEqual(int(id), 3)
def testLatestRunWithNoRuns(self) -> None:
with self.db.make_session() as session:
id = latest(session)
self.assertEqual(str(id), 'None')
def testDeleteRun(self) -> None:
with self.db.make_session() as session:
delete_run(session, '1')
remainingRuns = runs(session)
remainingRunsId = [int(r.run_id) for r in remainingRuns]
self.assertEqual(len(remainingRuns), 2)
self.assertNotIn(1, remainingRunsId)
def testDeleteNonExistentRun(self) -> None:
with self.db.make_session() as session:
self.assertRaises(EmptyDeletionError, delete_run, session, 10)
allruns = runs(session)
self.assertEqual(len(allruns), 3) |
def set_font_size(window, size):
font = window.GetFont()
clone = clone_font(font)
clone.SetPointSize(size)
window.SetFont(clone)
sizer = window.GetSizer()
if (sizer is not None):
sizer.Layout()
window.Refresh()
for child in window.GetChildren():
set_font_size(child, size) |
class OptionSeriesVectorDragdrop(Options):
def draggableX(self):
return self._config_get(None)
def draggableX(self, flag: bool):
self._config(flag, js_type=False)
def draggableY(self):
return self._config_get(None)
def draggableY(self, flag: bool):
self._config(flag, js_type=False)
def dragHandle(self) -> 'OptionSeriesVectorDragdropDraghandle':
return self._config_sub_data('dragHandle', OptionSeriesVectorDragdropDraghandle)
def dragMaxX(self):
return self._config_get(None)
def dragMaxX(self, num: float):
self._config(num, js_type=False)
def dragMaxY(self):
return self._config_get(None)
def dragMaxY(self, num: float):
self._config(num, js_type=False)
def dragMinX(self):
return self._config_get(None)
def dragMinX(self, num: float):
self._config(num, js_type=False)
def dragMinY(self):
return self._config_get(None)
def dragMinY(self, num: float):
self._config(num, js_type=False)
def dragPrecisionX(self):
return self._config_get(0)
def dragPrecisionX(self, num: float):
self._config(num, js_type=False)
def dragPrecisionY(self):
return self._config_get(0)
def dragPrecisionY(self, num: float):
self._config(num, js_type=False)
def dragSensitivity(self):
return self._config_get(2)
def dragSensitivity(self, num: float):
self._config(num, js_type=False)
def groupBy(self):
return self._config_get(None)
def groupBy(self, text: str):
self._config(text, js_type=False)
def guideBox(self) -> 'OptionSeriesVectorDragdropGuidebox':
return self._config_sub_data('guideBox', OptionSeriesVectorDragdropGuidebox)
def liveRedraw(self):
return self._config_get(True)
def liveRedraw(self, flag: bool):
self._config(flag, js_type=False) |
def add_IsolateControllerServicer_to_server(servicer, server):
rpc_method_handlers = {'Run': grpc.unary_stream_rpc_method_handler(servicer.Run, request_deserializer=controller__pb2.HostedRun.FromString, response_serializer=controller__pb2.HostedRunResult.SerializeToString), 'Map': grpc.unary_stream_rpc_method_handler(servicer.Map, request_deserializer=controller__pb2.HostedMap.FromString, response_serializer=controller__pb2.HostedRunResult.SerializeToString), 'CreateUserKey': grpc.unary_unary_rpc_method_handler(servicer.CreateUserKey, request_deserializer=controller__pb2.CreateUserKeyRequest.FromString, response_serializer=controller__pb2.CreateUserKeyResponse.SerializeToString), 'ListUserKeys': grpc.unary_unary_rpc_method_handler(servicer.ListUserKeys, request_deserializer=controller__pb2.ListUserKeysRequest.FromString, response_serializer=controller__pb2.ListUserKeysResponse.SerializeToString), 'RevokeUserKey': grpc.unary_unary_rpc_method_handler(servicer.RevokeUserKey, request_deserializer=controller__pb2.RevokeUserKeyRequest.FromString, response_serializer=controller__pb2.RevokeUserKeyResponse.SerializeToString), 'RegisterApplication': grpc.unary_stream_rpc_method_handler(servicer.RegisterApplication, request_deserializer=controller__pb2.RegisterApplicationRequest.FromString, response_serializer=controller__pb2.RegisterApplicationResult.SerializeToString), 'UpdateApplication': grpc.unary_unary_rpc_method_handler(servicer.UpdateApplication, request_deserializer=controller__pb2.UpdateApplicationRequest.FromString, response_serializer=controller__pb2.UpdateApplicationResult.SerializeToString), 'SetAlias': grpc.unary_unary_rpc_method_handler(servicer.SetAlias, request_deserializer=controller__pb2.SetAliasRequest.FromString, response_serializer=controller__pb2.SetAliasResult.SerializeToString), 'DeleteAlias': grpc.unary_unary_rpc_method_handler(servicer.DeleteAlias, request_deserializer=controller__pb2.DeleteAliasRequest.FromString, response_serializer=controller__pb2.DeleteAliasResult.SerializeToString), 'ListAliases': grpc.unary_unary_rpc_method_handler(servicer.ListAliases, request_deserializer=controller__pb2.ListAliasesRequest.FromString, response_serializer=controller__pb2.ListAliasesResult.SerializeToString), 'SetSecret': grpc.unary_unary_rpc_method_handler(servicer.SetSecret, request_deserializer=controller__pb2.SetSecretRequest.FromString, response_serializer=controller__pb2.SetSecretResponse.SerializeToString), 'ListSecrets': grpc.unary_unary_rpc_method_handler(servicer.ListSecrets, request_deserializer=controller__pb2.ListSecretsRequest.FromString, response_serializer=controller__pb2.ListSecretsResponse.SerializeToString)}
generic_handler = grpc.method_handlers_generic_handler('controller.IsolateController', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,)) |
.django_db
def test_category_county_subawards(geo_test_data):
test_payload = {'category': 'county', 'subawards': True, 'page': 1, 'limit': 50}
spending_by_category_logic = CountyViewSet().perform_search(test_payload, {})
expected_response = {'category': 'county', 'limit': 50, 'page_metadata': {'page': 1, 'next': None, 'previous': None, 'hasNext': False, 'hasPrevious': False}, 'results': [{'amount': 1100, 'code': '001', 'id': None, 'name': 'SOMEWHEREVILLE'}, {'amount': 11, 'code': '004', 'id': None, 'name': 'COUNTYSVILLE'}], 'messages': [get_time_period_message()]}
assert (expected_response == spending_by_category_logic) |
def get_first_matching_media_type(accept_media_types: Sequence[str], available_media_types: Sequence[str]) -> Optional[str]:
if (not available_media_types):
return None
if (not accept_media_types):
return available_media_types[0]
for accept_media_type in accept_media_types:
if (accept_media_type == WILDCARD_MEDIA_TYPE):
return available_media_types[0]
for available_media_type in available_media_types:
if (accept_media_type == available_media_type):
return available_media_type
return None |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.