code stringlengths 281 23.7M |
|---|
class RequestController():
def __init__(self) -> None:
self.dispatcher = Dispatcher()
def dispatch_request(self, request: Any) -> None:
if isinstance(request, Request):
self.dispatcher.dispatch(request)
else:
print('request must be a Request object') |
def main():
mc.send_angles([(- 90), 0, 0, 0, 0, 0, 40], 60)
time.sleep(2.5)
for i in range(4):
mc.send_angle(4, 45, 95)
time.sleep(1.5)
mc.send_angle(4, (- 45), 95)
time.sleep(1.5)
for i in range(4):
mc.send_angle(4, 45, 20)
time.sleep(2.5)
mc.send_angle(4, (- 45), 20)
time.sleep(2.5)
mc.send_angles([(- 90), 0, 0, 0, 0, 0, 40], 50)
time.sleep(2.5) |
def test_dtcwt2(size, J, no_grad=False, dev='cuda'):
x = torch.randn(*size, requires_grad=(not no_grad)).to(dev)
(h0a, h0b, _, _, h1a, h1b, _, _) = level1('farras')
(cols, rows) = lowlevel2.prep_filt_quad_afb2d(h0a, h1a, h0b, h1b, device=dev)
yh = []
for j in range(3):
(x, y) = lowlevel2.quad_afb2d(x, cols, rows, mode='zero')
yh.append(y)
return (x, yh) |
class Solution():
def calculate(self, s: str) -> int:
nums = [[]]
ops = []
for c in s:
if (c == ' '):
continue
if ((c in set('+-*/')) and nums[0]):
ops.append(c)
nums[(- 1)] = int(''.join(nums[(- 1)]))
nums.append([])
else:
nums[(- 1)].append(c)
nums[(- 1)] = int(''.join(nums[(- 1)]))
nums2 = []
ops2 = []
for i in range(len(nums)):
if (i == 0):
nums2.append(nums[i])
continue
op = ops[(i - 1)]
if ((op == '+') or (op == '-')):
ops2.append(op)
nums2.append(nums[i])
continue
elif (op == '*'):
nums2[(- 1)] = (nums2[(- 1)] * nums[i])
else:
nums2[(- 1)] = (nums2[(- 1)] // nums[i])
nums = nums2
ops = ops2
res = None
for i in range(len(nums)):
if (i == 0):
res = nums[i]
continue
op = ops[(i - 1)]
if (op == '+'):
res = (res + nums[i])
else:
res = (res - nums[i])
return res |
_member_required
(last_modified_func=last_modified_document)
def view_document(request, uuid):
uuid = UUID(uuid)
try:
doc = Document.objects.all().get(uuid=uuid)
except Document.DoesNotExist:
raise Http404('Document with this uuid does not exist.')
backrefs = None
try:
with connections['bibliothecula'].cursor() as cursor:
cursor.execute(f"SELECT referrer FROM backrefs_fts WHERE target = '{doc.uuid.hex}'")
backrefs = cursor.fetchall()
print('backrefs: ', backrefs)
except db.OperationalError:
pass
if (backrefs is not None):
objs = []
for ref in backrefs:
try:
_doc = Document.objects.all().get(binary_metadata__metadata__uuid=ref[0])
met = BinaryMetadata.objects.all().get(uuid=ref[0])
objs.append((_doc, met))
except Document.DoesNotExist:
continue
backrefs = objs
context = {'title': doc.title, 'document': doc, 'EMBEDDED_SUBMIT_VALUE': EMBEDDED_SUBMIT_VALUE, 'LINK_SUBMIT_VALUE': LINK_SUBMIT_VALUE, 'add_link_form': DocumentAddLinkStorage(), 'add_embedded_form': DocumentAddEmbeddedStorage(), 'add_document_form': AddDocument(), 'add_tag': DocumentSetTag(), 'backrefs': (None if (backrefs == []) else backrefs)}
return render(request, 'document.html', context) |
class FromReader(UtilsFromReader):
def __init__(self, ffrom, bw, bl, ldr, ldr_w, data_pointers, code_pointers, bw_blocks, bl_blocks, ldr_blocks, ldr_w_blocks, data_pointers_blocks, code_pointers_blocks):
super().__init__(ffrom)
self._write_zeros_to_from(bw_blocks, bw)
self._write_zeros_to_from(bl_blocks, bl)
self._write_zeros_to_from(ldr_blocks, ldr)
self._write_zeros_to_from(ldr_w_blocks, ldr_w)
if (data_pointers_blocks is not None):
self._write_zeros_to_from(data_pointers_blocks, data_pointers)
if (code_pointers_blocks is not None):
self._write_zeros_to_from(code_pointers_blocks, code_pointers) |
def test_raw_pool_custom_cleanup_ok():
cleanup_mock = tests.mock.Mock()
pool = db_pool.RawConnectionPool(DummyDBModule(), cleanup=cleanup_mock)
conn = pool.get()
pool.put(conn)
assert (cleanup_mock.call_count == 1)
with pool.item() as conn:
pass
assert (cleanup_mock.call_count == 2) |
class TestLogger():
def test_format_should_be_valid_json(self, capsys: pytest.CaptureFixture[str]):
logger.log(foo='bar')
raw_log_output = capsys.readouterr().out
try:
json.loads(raw_log_output)
except json.JSONDecodeError:
pytest.fail('Log output was not valid JSON.')
def test_log_should_have_severity(self, capsys: pytest.CaptureFixture[str]):
logger.log(foo='bar')
raw_log_output = capsys.readouterr().out
log_output = json.loads(raw_log_output)
assert ('severity' in log_output)
def test_severity_should_be_debug(self, capsys: pytest.CaptureFixture[str]):
logger.debug(foo='bar')
raw_log_output = capsys.readouterr().out
log_output = json.loads(raw_log_output)
assert (log_output['severity'] == 'DEBUG')
def test_severity_should_be_notice(self, capsys: pytest.CaptureFixture[str]):
logger.log(foo='bar')
raw_log_output = capsys.readouterr().out
log_output = json.loads(raw_log_output)
assert (log_output['severity'] == 'NOTICE')
def test_severity_should_be_info(self, capsys: pytest.CaptureFixture[str]):
logger.info(foo='bar')
raw_log_output = capsys.readouterr().out
log_output = json.loads(raw_log_output)
assert (log_output['severity'] == 'INFO')
def test_severity_should_be_warning(self, capsys: pytest.CaptureFixture[str]):
logger.warn(foo='bar')
raw_log_output = capsys.readouterr().out
log_output = json.loads(raw_log_output)
assert (log_output['severity'] == 'WARNING')
def test_severity_should_be_error(self, capsys: pytest.CaptureFixture[str]):
logger.error(foo='bar')
raw_log_output = capsys.readouterr().err
log_output = json.loads(raw_log_output)
assert (log_output['severity'] == 'ERROR')
def test_log_should_have_message(self, capsys: pytest.CaptureFixture[str]):
logger.log('bar')
raw_log_output = capsys.readouterr().out
log_output = json.loads(raw_log_output)
assert ('message' in log_output)
def test_log_should_have_other_keys(self, capsys: pytest.CaptureFixture[str]):
logger.log(foo='bar')
raw_log_output = capsys.readouterr().out
log_output = json.loads(raw_log_output)
assert ('foo' in log_output)
def test_message_should_be_space_separated(self, capsys: pytest.CaptureFixture[str]):
logger.log('bar', 'qux')
expected_message = 'bar qux'
raw_log_output = capsys.readouterr().out
log_output = json.loads(raw_log_output)
assert (log_output['message'] == expected_message) |
.requires_roxar
def test_rox_surfaces_clipboard_general2d_data(roxar_project, roxinstance):
rox = xtgeo.RoxUtils(roxar_project)
project = rox.project
surf = xtgeo.surface_from_roxar(project, 'TopReek', SURFCAT1)
surf.to_roxar(project, 'mycase', 'myfolder', stype='clipboard')
surf2 = xtgeo.surface_from_roxar(project, 'mycase', 'myfolder', stype='clipboard')
assert (surf2.values.mean() == surf.values.mean())
if (not roxinstance.version_required('1.6')):
with pytest.raises(NotImplementedError, match='API Support for general2d_data is missing'):
surf.to_roxar(project, 'mycase', 'myfolder', stype='general2d_data')
logger.info('This version of RMS does not support this feature')
else:
surf.to_roxar(project, 'mycase', 'myfolder', stype='general2d_data')
surf2 = xtgeo.surface_from_roxar(project, 'mycase', 'myfolder', stype='general2d_data')
assert (surf2.values.tolist() == surf.values.tolist())
rox.safe_close() |
class FedLARSSyncAggregator(SyncAggregatorWithOptimizer):
def __init__(self, *, global_model: IFLModel, channel: Optional[IdentityChannel]=None, **kwargs) -> None:
init_self_cfg(self, component_class=__class__, config_class=FedLARSSyncAggregatorConfig, **kwargs)
super().__init__(global_model=global_model, channel=channel, **kwargs)
def _set_defaults_in_cfg(cls, cfg):
pass |
class FsspecFile(RepositoryFile):
def __init__(self, fs: AbstractFileSystem, path: str, fsspec_args: Optional[FsspecArgs]=None):
super().__init__()
self._fs = fs
self._path = path
self._fsspec_args = (FsspecArgs() if (fsspec_args is None) else fsspec_args)
def open(self, mode: str='rb', encoding: Optional[str]=None) -> IO:
if ((('r' in mode) or ('+' in mode)) and (not self.exists())):
raise FileNotFoundError(f'Cannot find fsspec path {self._fs.unstrip_protocol(self._path)}')
try:
return self._fs.open(self._path, mode=mode, encoding=encoding, **self._fsspec_args.kwargs)
except Exception as e:
raise OSError(f'Cannot open fsspec path {self._fs.unstrip_protocol(self._path)}') from e
def path(self) -> Optional[str]:
return None
def exists(self) -> bool:
return self._fs.exists(self._path, **self._fsspec_args.kwargs) |
class Shrink():
def __init__(self, ilo, shrink_node='DETERMINISTIC', node_filters=None, number_of_shards=1, number_of_replicas=1, shrink_prefix='', shrink_suffix='-shrink', copy_aliases=False, delete_after=True, post_allocation=None, wait_for_active_shards=1, wait_for_rebalance=True, extra_settings=None, wait_for_completion=True, wait_interval=9, max_wait=(- 1)):
if (node_filters is None):
node_filters = {}
if (post_allocation is None):
post_allocation = {}
if (extra_settings is None):
extra_settings = {}
self.loggit = logging.getLogger('curator.actions.shrink')
verify_index_list(ilo)
if ('permit_masters' not in node_filters):
node_filters['permit_masters'] = False
self.index_list = ilo
self.client = ilo.client
self.shrink_node = shrink_node
self.node_filters = node_filters
self.shrink_prefix = shrink_prefix
self.shrink_suffix = shrink_suffix
self.copy_aliases = copy_aliases
self.delete_after = delete_after
self.post_allocation = post_allocation
self.wait_for_rebalance = wait_for_rebalance
self.wfc = wait_for_completion
self.wait_interval = wait_interval
self.max_wait = max_wait
self.number_of_shards = number_of_shards
self.wait_for_active_shards = wait_for_active_shards
self.shrink_node_name = None
self.shrink_node_avail = None
self.shrink_node_id = None
self.settings = {'index.number_of_shards': number_of_shards, 'index.number_of_replicas': number_of_replicas}
if extra_settings:
self._merge_extra_settings(extra_settings)
self._merge_extra_settings({'settings': {'index.routing.allocation.require._name': None, 'index.blocks.write': None}})
def _merge_extra_settings(self, extra_settings):
self.loggit.debug('Adding extra_settings to shrink body: %s', extra_settings)
if ('settings' in extra_settings):
settings = extra_settings.pop('settings')
try:
self.settings.update(settings)
except Exception as exc:
raise ConfigurationError(f"""Unable to apply extra settings "{{'settings':settings}}" to shrink body. Exception: {exc}""") from exc
if extra_settings:
try:
self.settings.update(extra_settings)
except Exception as exc:
raise ConfigurationError(f'Unable to apply extra settings "{extra_settings}" to shrink body. Exception: {exc}') from exc
def _data_node(self, node_id):
roles = node_roles(self.client, node_id)
name = node_id_to_name(self.client, node_id)
if (not ('data' in roles)):
self.loggit.info('Skipping node "%s": non-data node', name)
return False
if (('master' in roles) and (not self.node_filters['permit_masters'])):
self.loggit.info('Skipping node "%s": master node', name)
return False
elif (('master' in roles) and self.node_filters['permit_masters']):
msg = f'Not skipping node "{name}" which is a master node (not recommended), but permit_masters is True'
self.loggit.warning(msg)
return True
else:
return True
def _exclude_node(self, name):
if ('exclude_nodes' in self.node_filters):
if (name in self.node_filters['exclude_nodes']):
self.loggit.info('Excluding node "%s" due to node_filters', name)
return True
return False
def _shrink_target(self, name):
return f'{self.shrink_prefix}{name}{self.shrink_suffix}'
def qualify_single_node(self):
node_id = name_to_node_id(self.client, self.shrink_node)
if node_id:
self.shrink_node_id = node_id
self.shrink_node_name = self.shrink_node
else:
raise ConfigurationError(f'Unable to find node named: "{self.shrink_node}"')
if self._exclude_node(self.shrink_node):
raise ConfigurationError(f'Node "{self.shrink_node}" listed for exclusion')
if (not self._data_node(node_id)):
raise ActionError(f'Node "{self.shrink_node}" is not usable as a shrink node')
self.shrink_node_avail = self.client.nodes.stats()['nodes'][node_id]['fs']['total']['available_in_bytes']
def most_available_node(self):
mvn_avail = 0
mvn_name = None
mvn_id = None
nodes = self.client.nodes.stats()['nodes']
for node_id in nodes:
name = nodes[node_id]['name']
if self._exclude_node(name):
self.loggit.debug('Node "%s" excluded by node filters', name)
continue
if (not self._data_node(node_id)):
self.loggit.debug('Node "%s" is not a data node', name)
continue
value = nodes[node_id]['fs']['total']['available_in_bytes']
if (value > mvn_avail):
mvn_name = name
mvn_id = node_id
mvn_avail = value
self.shrink_node_name = mvn_name
self.shrink_node_id = mvn_id
self.shrink_node_avail = mvn_avail
def route_index(self, idx, allocation_type, key, value):
bkey = f'index.routing.allocation.{allocation_type}.{key}'
routing = {bkey: value}
try:
self.client.indices.put_settings(index=idx, settings=routing)
if self.wait_for_rebalance:
wait_for_it(self.client, 'allocation', wait_interval=self.wait_interval, max_wait=self.max_wait)
else:
wait_for_it(self.client, 'relocate', index=idx, wait_interval=self.wait_interval, max_wait=self.max_wait)
except Exception as err:
report_failure(err)
def __log_action(self, error_msg, dry_run=False):
if (not dry_run):
raise ActionError(error_msg)
else:
self.loggit.warning('DRY-RUN: %s', error_msg)
def _block_writes(self, idx):
block = {'index.blocks.write': True}
self.client.indices.put_settings(index=idx, settings=block)
def _unblock_writes(self, idx):
unblock = {'index.blocks.write': False}
self.client.indices.put_settings(index=idx, settings=unblock)
def _check_space(self, idx, dry_run=False):
size = index_size(self.client, idx, value='primaries')
padded = ((size * 2) + (32 * 1024))
if (padded < self.shrink_node_avail):
msg = f'Sufficient space available for 2x the size of index "{idx}". Required: {padded}, available: {self.shrink_node_avail}'
self.loggit.debug(msg)
else:
error_msg = f'Insufficient space available for 2x the size of index "{idx}", shrinking will exceed space available. Required: {padded}, available: {self.shrink_node_avail}'
self.__log_action(error_msg, dry_run)
def _check_node(self):
if (self.shrink_node != 'DETERMINISTIC'):
if (not self.shrink_node_name):
self.qualify_single_node()
else:
self.most_available_node()
def _check_target_exists(self, idx, dry_run=False):
target = self._shrink_target(idx)
if self.client.indices.exists(index=target):
error_msg = f'Target index "{target}" already exists'
self.__log_action(error_msg, dry_run)
def _check_doc_count(self, idx, dry_run=False):
max_docs =
doc_count = self.client.indices.stats(index=idx)['indices'][idx]['primaries']['docs']['count']
if (doc_count > (max_docs * self.number_of_shards)):
error_msg = f'Too many documents ({doc_count}) to fit in {self.number_of_shards} shard(s). Maximum number of docs per shard is {max_docs}'
self.__log_action(error_msg, dry_run)
def _check_shard_count(self, idx, src_shards, dry_run=False):
if (self.number_of_shards >= src_shards):
error_msg = f'Target number of shards ({self.number_of_shards}) must be less than current number of shards ({src_shards}) in index "{idx}"'
self.__log_action(error_msg, dry_run)
def _check_shard_factor(self, idx, src_shards, dry_run=False):
factors = [x for x in range(1, (src_shards + 1)) if ((src_shards % x) == 0)]
factors.pop()
if (not (self.number_of_shards in factors)):
error_msg = f'"{self.number_of_shards}" is not a valid factor of {src_shards} shards of index {idx}. Valid values are {factors}'
self.__log_action(error_msg, dry_run)
def _check_all_shards(self, idx):
shards = self.client.cluster.state(index=idx)['routing_table']['indices'][idx]['shards']
found = []
for shardnum in shards:
for shard_idx in range(0, len(shards[shardnum])):
if (shards[shardnum][shard_idx]['node'] == self.shrink_node_id):
found.append({'shard': shardnum, 'primary': shards[shardnum][shard_idx]['primary']})
if (len(shards) != len(found)):
self.loggit.debug('Found these shards on node "%s": %s', self.shrink_node_name, found)
raise ActionError(f'Unable to shrink index "{idx}" as not all shards were found on the designated shrink node ({self.shrink_node_name}): {found}')
def pre_shrink_check(self, idx, dry_run=False):
self.loggit.debug('BEGIN PRE_SHRINK_CHECK')
self.loggit.debug('Check that target exists')
self._check_target_exists(idx, dry_run)
self.loggit.debug('Check doc count constraints')
self._check_doc_count(idx, dry_run)
self.loggit.debug('Check shard count')
src_shards = int(self.client.indices.get(index=idx)[idx]['settings']['index']['number_of_shards'])
self._check_shard_count(idx, src_shards, dry_run)
self.loggit.debug('Check shard factor')
self._check_shard_factor(idx, src_shards, dry_run)
self.loggit.debug('Check node availability')
self._check_node()
self.loggit.debug('Check available disk space')
self._check_space(idx, dry_run)
self.loggit.debug('FINISH PRE_SHRINK_CHECK')
def do_copy_aliases(self, source_idx, target_idx):
alias_actions = []
aliases = self.client.indices.get_alias(index=source_idx)
for alias in aliases[source_idx]['aliases']:
self.loggit.debug('alias: %s', alias)
alias_actions.append({'remove': {'index': source_idx, 'alias': alias}})
alias_actions.append({'add': {'index': target_idx, 'alias': alias}})
if alias_actions:
self.loggit.info('Copy alias actions: %s', alias_actions)
self.client.indices.update_aliases(actions=alias_actions)
def do_dry_run(self):
self.index_list.filter_closed()
self.index_list.filter_by_shards(number_of_shards=self.number_of_shards)
self.index_list.empty_list_check()
try:
index_lists = chunk_index_list(self.index_list.indices)
for lst in index_lists:
for idx in lst:
target = self._shrink_target(idx)
self.pre_shrink_check(idx, dry_run=True)
self.loggit.info('DRY-RUN: Moving shards to shrink node: "%s"', self.shrink_node_name)
msg = f'DRY-RUN: Shrinking index "{idx}" to "{target}" with settings: {self.settings}, wait_for_active_shards={self.wait_for_active_shards}'
self.loggit.info(msg)
if self.post_allocation:
submsg = f"index.routing.allocation.{self.post_allocation['allocation_type']}.{self.post_allocation['key']}:{self.post_allocation['value']}"
msg = f'DRY-RUN: Applying post-shrink allocation rule "{submsg}" to index "{target}"'
self.loggit.info(msg)
if self.copy_aliases:
msg = f'DRY-RUN: Copy source index aliases "{self.client.indices.get_alias(index=idx)}"'
self.loggit.info(msg)
if self.delete_after:
self.loggit.info('DRY-RUN: Deleting source index "%s"', idx)
except Exception as err:
report_failure(err)
def do_action(self):
self.index_list.filter_closed()
self.index_list.filter_by_shards(number_of_shards=self.number_of_shards)
self.index_list.empty_list_check()
msg = f'Shrinking {len(self.index_list.indices)} selected indices: {self.index_list.indices}'
self.loggit.info(msg)
try:
index_lists = chunk_index_list(self.index_list.indices)
for lst in index_lists:
for idx in lst:
target = self._shrink_target(idx)
self.loggit.info('Source index: %s -- Target index: %s', idx, target)
self.pre_shrink_check(idx)
self.loggit.info('Moving shards to shrink node: "%s"', self.shrink_node_name)
self.route_index(idx, 'require', '_name', self.shrink_node_name)
self._check_all_shards(idx)
self._block_writes(idx)
if (not health_check(self.client, status='green')):
raise ActionError('Unable to proceed with shrink action. Cluster health is not "green"')
msg = f'Shrinking index "{idx}" to "{target}" with settings: {self.settings}, wait_for_active_shards={self.wait_for_active_shards}'
self.loggit.info(msg)
try:
self.client.indices.shrink(index=idx, target=target, settings=self.settings, wait_for_active_shards=self.wait_for_active_shards)
if self.wfc:
self.loggit.debug('Wait for shards to complete allocation for index: %s', target)
if self.wait_for_rebalance:
wait_for_it(self.client, 'shrink', wait_interval=self.wait_interval, max_wait=self.max_wait)
else:
wait_for_it(self.client, 'relocate', index=target, wait_interval=self.wait_interval, max_wait=self.max_wait)
except Exception as exc:
if self.client.indices.exists(index=target):
msg = f'Deleting target index "{target}" due to failure to complete shrink'
self.loggit.error(msg)
self.client.indices.delete(index=target)
raise ActionError(f'Unable to shrink index "{idx}" -- Error: {exc}') from exc
self.loggit.info('Index "%s" successfully shrunk to "%s"', idx, target)
self._unblock_writes(idx)
if self.post_allocation:
submsg = f"index.routing.allocation.{self.post_allocation['allocation_type']}.{self.post_allocation['key']}:{self.post_allocation['value']}"
msg = f'Applying post-shrink allocation rule "{submsg}" to index "{target}"'
self.loggit.info(msg)
self.route_index(target, self.post_allocation['allocation_type'], self.post_allocation['key'], self.post_allocation['value'])
if self.copy_aliases:
self.loggit.info('Copy source index aliases "%s"', idx)
self.do_copy_aliases(idx, target)
if self.delete_after:
self.loggit.info('Deleting source index "%s"', idx)
self.client.indices.delete(index=idx)
else:
self.loggit.info('Unassigning routing for source index: "%s"', idx)
self.route_index(idx, 'require', '_name', '')
except Exception as err:
self._unblock_writes(idx)
report_failure(err) |
.skip(reason='Need to update to ethpm v3')
def test_ethpm_already_installed():
path = _get_data_folder().joinpath('packages/zeppelin.snakecharmers.eth')
path.mkdir()
path.joinpath('.0.0').mkdir()
with pytest.raises(FileExistsError):
install_package('ethpm://zeppelin.snakecharmers.eth:1/.0.0') |
.parametrize('value', (((ADDRESS_A, 'balance', 1), (ADDRESS_A, 'balance', 2)), ((ADDRESS_A, 'storage', 1, 12345), (ADDRESS_A, 'storage', 1, 54321))))
def test_normalize_state_detects_duplicates(value):
with pytest.raises(ValidationError, match='Some state item is defined multiple times'):
normalize_state(value) |
class FakeInfoWatcherWithTimer(core.InfoWatcher):
def __init__(self, delay_s: int=60, time_change: float=0.02):
super().__init__(delay_s)
self.start_timer = time.time()
self.time_change = time_change
def get_state(self, job_id: str, mode: str='standard') -> str:
duration = (time.time() - self.start_timer)
if (duration < self.time_change):
return 'pending'
elif ((2 * self.time_change) > duration > self.time_change):
return 'running'
if (job_id == 'failed'):
return 'failed'
return 'done' |
class OptionSeriesParetoSonificationDefaultinstrumentoptionsMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class GrpcNotifierFactory(object):
def __init__(self, config):
self.config = config
def create_and_register_service(self, server):
service = GrpcNotifier(notifier_api=notifier, service_config=self.config)
notifier_pb2_grpc.add_NotifierServicer_to_server(service, server)
LOGGER.info('Service %s created and registered', service)
return service |
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [migrations.CreateModel(name='Blog', fields=[('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=200)), ('date_added', models.DateTimeField(auto_now_add=True))]), migrations.CreateModel(name='BlogPost', fields=[('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=500)), ('body', models.TextField()), ('date_added', models.DateTimeField(auto_now_add=True)), ('blog', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blogs.blog'))])] |
def test_commands_can_return_md(dummy_execute_and_send):
(dummy, m) = dummy_execute_and_send
dummy._execute_and_send(cmd='return_args_as_md', args=['foo', 'bar'], match=None, msg=m, template_name=dummy.return_args_as_md._err_command_template)
response = dummy.pop_message()
assert ('foobar' == response.body) |
class InvalidForm(BadRequest):
def __init__(self, form):
super().__init__()
self.form = form
self.message = self._message()
def _message(self):
result = []
for (key, value) in self.form.errors.items():
for message in value:
result.append('{0}: {1}'.format(key, message))
return '\n'.join(result) |
def deploy_modal():
modal_deploy_cmd = ['modal', 'deploy', 'app']
try:
console.print(f" [bold cyan]Running: {' '.join(modal_deploy_cmd)}[/bold cyan]")
subprocess.run(modal_deploy_cmd, check=True)
console.print(" [bold green]'modal deploy' executed successfully.[/bold green]")
except subprocess.CalledProcessError as e:
console.print(f' [bold red]An error occurred: {e}[/bold red]')
except FileNotFoundError:
console.print(" [bold red]'modal' command not found. Please ensure Modal CLI is installed and in your PATH.[/bold red]") |
class TransactionCoverage(db.Model):
__tablename__ = 'ofec_agg_coverage_date_mv'
idx = db.Column(db.Integer, primary_key=True)
committee_id = db.Column('committee_id', db.String, doc=docs.COMMITTEE_ID)
fec_election_year = db.Column('fec_election_yr', db.Integer)
transaction_coverage_date = db.Column(db.Date, doc=docs.TRANSACTION_COVERAGE_DATE) |
def _is_declaration(Line: str) -> bool:
comment = False
for i in range(0, len(Line)):
if (Line[i] == '#'):
comment = True
if ((Line[i] == '=') and (not comment)):
if (('[' in Line[i]) and (']' in Line[i])):
l_bracket = Line[i].find('[')
new_array_string = '['
array = Line[(l_bracket + 1):].split(';')
for a in array:
new_array_string += (('(' + a) + '),')
r_bracket = new_array_string.find(']')
new_array_string = ((new_array_string[:r_bracket] + ')') + new_array_string[r_bracket:(- 1)])
Line[i] = (Line[i][:l_bracket] + new_array_string)
return True
return False |
class BaseModifyPacketTest(base_tests.SimpleDataPlane):
def verify_modify(self, actions, pkt, exp_pkt):
(in_port, out_port) = openflow_ports(2)
actions = (actions + [ofp.action.output(out_port)])
logging.info('Running actions test for %s', pp(actions))
delete_all_flows(self.controller)
logging.info('Inserting flow')
request = ofp.message.flow_add(table_id=test_param_get('table', 0), match=packet_to_flow_match(self, pkt), instructions=[ofp.instruction.apply_actions(actions)], buffer_id=ofp.OFP_NO_BUFFER, priority=1000)
self.controller.message_send(request)
do_barrier(self.controller)
logging.info('Sending packet, expecting output to port %d', out_port)
self.dataplane.send(in_port, str(pkt))
verify_packets(self, str(exp_pkt), [out_port]) |
_metaclass(abc.ABCMeta)
class tlv(stringify.StringifyMixin):
_TYPE_LEN = 1
_LENGTH_LEN = 2
_TYPE = {'ascii': ['egress_id_mac', 'last_egress_id_mac', 'next_egress_id_mac', 'mac_address']}
def __init__(self, length):
self.length = length
def parser(cls, buf):
pass
def serialize(self):
pass
def __len__(self):
return ((self.length + self._TYPE_LEN) + self._LENGTH_LEN) |
class UAVEnv(object):
height = ground_length = ground_width = 100
sum_task_size = (60 * 1048576)
loc_uav = [50, 50]
bandwidth_nums = 1
B = (bandwidth_nums * (10 ** 6))
p_noisy_los = (10 ** (- 13))
p_noisy_nlos = (10 ** (- 11))
flight_speed = 50.0
f_ue = .0
f_uav = .0
r = (10 ** (- 27))
s = 1000
p_uplink = 0.1
alpha0 = 1e-05
T = 200
delta_t = 5
slot_num = int((T / delta_t))
m_uav = 9.65
e_battery_uav = 500000
M = 4
block_flag_list = np.random.randint(0, 2, M)
loc_ue_list = np.random.randint(0, 101, size=[M, 2])
task_list = np.random.randint(1572864, 2097153, M)
loc_ue_trans_pro = np.array([[0.6, 0.1, 0.1, 0.1, 0.1], [0.6, 0.1, 0.1, 0.1, 0.1], [0.6, 0.1, 0.1, 0.1, 0.1], [0.6, 0.1, 0.1, 0.1, 0.1]])
action_bound = [(- 1), 1]
action_dim = 4
state_dim = (4 + (M * 4))
def __init__(self):
self.start_state = np.append(self.e_battery_uav, self.loc_uav)
self.start_state = np.append(self.start_state, self.sum_task_size)
self.start_state = np.append(self.start_state, np.ravel(self.loc_ue_list))
self.start_state = np.append(self.start_state, self.task_list)
self.start_state = np.append(self.start_state, self.block_flag_list)
self.state = self.start_state
def reset(self):
self.reset_env()
self.state = np.append(self.e_battery_uav, self.loc_uav)
self.state = np.append(self.state, self.sum_task_size)
self.state = np.append(self.state, np.ravel(self.loc_ue_list))
self.state = np.append(self.state, self.task_list)
self.state = np.append(self.state, self.block_flag_list)
return self._get_obs()
def reset_env(self):
self.sum_task_size = (100 * 1048576)
self.e_battery_uav = 500000
self.loc_uav = [50, 50]
self.loc_ue_list = np.random.randint(0, 101, size=[self.M, 2])
self.reset_step()
def reset_step(self):
self.task_list = np.random.randint(2621440, 3145729, self.M)
self.block_flag_list = np.random.randint(0, 2, self.M)
def _get_obs(self):
self.state = np.append(self.e_battery_uav, self.loc_uav)
self.state = np.append(self.state, self.sum_task_size)
self.state = np.append(self.state, np.ravel(self.loc_ue_list))
self.state = np.append(self.state, self.task_list)
self.state = np.append(self.state, self.block_flag_list)
return self.state
def step(self):
step_redo = False
is_terminal = False
ue_id = np.random.randint(0, self.M)
theta = 0
offloading_ratio = 1
task_size = self.task_list[ue_id]
block_flag = self.block_flag_list[ue_id]
dis_fly = 0
e_fly = (((((dis_fly / (self.delta_t * 0.5)) ** 2) * self.m_uav) * (self.delta_t * 0.5)) * 0.5)
dx_uav = (dis_fly * math.cos(theta))
dy_uav = (dis_fly * math.sin(theta))
loc_uav_after_fly_x = (self.loc_uav[0] + dx_uav)
loc_uav_after_fly_y = (self.loc_uav[1] + dy_uav)
t_server = ((offloading_ratio * task_size) / (self.f_uav / self.s))
e_server = ((self.r * (self.f_uav ** 3)) * t_server)
if (self.sum_task_size == 0):
is_terminal = True
reward = 0
elif ((self.sum_task_size - self.task_list[ue_id]) < 0):
self.task_list = (np.ones(self.M) * self.sum_task_size)
reward = 0
step_redo = True
elif ((loc_uav_after_fly_x < 0) or (loc_uav_after_fly_x > self.ground_width) or (loc_uav_after_fly_y < 0) or (loc_uav_after_fly_y > self.ground_length)):
reward = (- 100)
step_redo = True
elif (self.e_battery_uav < e_fly):
reward = (- 100)
elif ((self.e_battery_uav - e_fly) < e_server):
reward = (- 100)
else:
delay = self.com_delay(self.loc_ue_list[ue_id], np.array([loc_uav_after_fly_x, loc_uav_after_fly_y]), offloading_ratio, task_size, block_flag)
reward = delay
self.e_battery_uav = ((self.e_battery_uav - e_fly) - e_server)
self.sum_task_size -= self.task_list[ue_id]
for i in range(self.M):
tmp = np.random.rand()
if (0.6 < tmp <= 0.7):
self.loc_ue_list[i] += [0, 1]
elif (0.7 < tmp <= 0.8):
self.loc_ue_list[i] += [1, 0]
elif (0.8 < tmp <= 0.9):
self.loc_ue_list[i] += [0, (- 1)]
else:
self.loc_ue_list[i] += [(- 1), 0]
np.clip(self.loc_ue_list[i], 0, 100)
self.reset_step()
return (reward, is_terminal, step_redo)
def com_delay(self, loc_ue, loc_uav, offloading_ratio, task_size, block_flag):
dx = (loc_uav[0] - loc_ue[0])
dy = (loc_uav[1] - loc_ue[1])
dh = self.height
dist_uav_ue = np.sqrt((((dx * dx) + (dy * dy)) + (dh * dh)))
p_noise = self.p_noisy_los
if (block_flag == 1):
p_noise = self.p_noisy_nlos
g_uav_ue = abs((self.alpha0 / (dist_uav_ue ** 2)))
trans_rate = (self.B * math.log2((1 + ((self.p_uplink * g_uav_ue) / p_noise))))
t_tr = ((offloading_ratio * task_size) / trans_rate)
t_edge_com = ((offloading_ratio * task_size) / (self.f_uav / self.s))
t_local_com = (((1 - offloading_ratio) * task_size) / (self.f_ue / self.s))
return max([(t_tr + t_edge_com), t_local_com]) |
def poker_game(ws: WebSocket, connection_channel: str):
client_channel = ChannelWebSocket(ws)
if ('player-id' not in session):
client_channel.send_message({'message_type': 'error', 'error': 'Unrecognized user'})
client_channel.close()
return
session_id = str(uuid.uuid4())
player_id = session['player-id']
player_name = session['player-name']
player_money = session['player-money']
room_id = session['room-id']
player_connector = PlayerClientConnector(redis, connection_channel, app.logger)
try:
server_channel = player_connector.connect(player=Player(id=player_id, name=player_name, money=player_money), session_id=session_id, room_id=room_id)
except (ChannelError, MessageFormatError, MessageTimeout) as e:
app.logger.error('Unable to connect player {} to a poker5 server: {}'.format(player_id, e.args[0]))
else:
client_channel.send_message(server_channel.connection_message)
def message_handler(channel1, channel2):
try:
while True:
message = channel1.recv_message()
if (('message_type' in message) and (message['message_type'] == 'disconnect')):
raise ChannelError
channel2.send_message(message)
except (ChannelError, MessageFormatError):
pass
greenlets = [gevent.spawn(message_handler, client_channel, server_channel), gevent.spawn(message_handler, server_channel, client_channel)]
def closing_handler(*args, **kwargs):
gevent.killall(greenlets, ChannelError)
greenlets[0].link(closing_handler)
greenlets[1].link(closing_handler)
gevent.joinall(greenlets)
try:
client_channel.send_message({'message_type': 'disconnect'})
except:
pass
finally:
client_channel.close()
try:
server_channel.send_message({'message_type': 'disconnect'})
except:
pass
finally:
server_channel.close()
app.logger.info('player {} connection closed'.format(player_id)) |
.parametrize('analysis_config', [AnalysisConfig(), AnalysisConfig.from_dict({})])
def test_analysis_config_iter_config_default_initialisation(analysis_config):
assert (analysis_config.num_iterations == 4)
assert (analysis_config.num_retries_per_iter == 4)
analysis_config.set_num_iterations(42)
assert (analysis_config.num_iterations == 42) |
class OptionPlotoptionsXrangeSonificationDefaultspeechoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Schur_Qp(SchurPrecon):
def __init__(self, L, prefix=None, bdyNullSpace=False):
SchurPrecon.__init__(self, L, prefix, bdyNullSpace)
self.operator_constructor = SchurOperatorConstructor(self)
self.Q = self.operator_constructor.initializeQ()
def setUp(self, global_ksp, newton_its=None):
self.operator_constructor.updateQ()
self.Qp = self.Q.createSubMatrix(self.operator_constructor.linear_smoother.isp, self.operator_constructor.linear_smoother.isp)
self.Qp.scale((1.0 / self.L.pde.coefficients.nu))
L_sizes = self.Qp.size
self.QpInv_shell = p4pyPETSc.Mat().create()
self.QpInv_shell.setSizes(L_sizes)
self.QpInv_shell.setType('python')
self.matcontext_inv = MatrixInvShell(self.Qp)
self.QpInv_shell.setPythonContext(self.matcontext_inv)
self.QpInv_shell.setUp()
self._setSchurApproximation(global_ksp)
self._setSchurlog(global_ksp) |
class _SCUtil():
PROG_NAMESERVER = re.compile('.*nameserver\\[\\d+\\] : ({}).*'.format(RE_IPV4_ADDRESS))
def _parse_resolver(lines, istart):
ips = []
iline = istart
for iline in range(istart, len(lines)):
matches = _SCUtil.PROG_NAMESERVER.match(lines[iline])
if matches:
ips.append(matches.group(1))
continue
if (len(lines[iline].strip()) == 0):
break
return [ips, (iline + 1)]
def _parse_resolvers(lines):
ips = []
iline = 0
while (iline < len(lines)):
if ('resolver #' not in lines[iline]):
iline += 1
continue
(new_ips, iline) = _SCUtil._parse_resolver(lines, (iline + 1))
ips += new_ips
return [ipaddress.ip_address(ip) for ip in ips]
def dns_servers_in_priority_order():
lines = check_subprocess(['scutil', '--dns'])[0].splitlines()
istart = None
for iline in range(0, len(lines)):
if ('DNS configuration (for scoped queries)' in lines[iline]):
istart = (iline + 1)
break
return list(set(_SCUtil._parse_resolvers(lines[istart:]))) |
class JSONChecker(Checker):
CHECKER_DATA_TYPE = 'json'
CHECKER_DATA_SCHEMA = {'type': 'object', 'properties': {'url': {'type': 'string', 'format': 'uri'}, 'tag-query': {'type': 'string'}, 'tag-data-url': {'type': 'string'}, 'commit-query': {'type': 'string'}, 'commit-data-url': {'type': 'string'}, 'version-query': {'type': 'string'}, 'version-data-url': {'type': 'string'}, 'url-query': {'type': 'string'}, 'url-data-url': {'type': 'string'}, 'timestamp-query': {'type': 'string'}, 'timestamp-data-url': {'type': 'string'}}}
SUPPORTED_DATA_CLASSES = [ExternalData, ExternalGitRepo]
def get_json_schema(cls, data_class: t.Type[ExternalBase]):
schema = super().get_json_schema(data_class).copy()
if issubclass(data_class, ExternalGitRepo):
schema['anyOf'] = (schema.get('anyOf', []) + [{'required': ['tag-query']}, {'required': ['commit-query']}])
else:
schema['required'] = (schema.get('required', []) + ['version-query', 'url-query'])
return schema
async def _get_json(self, url: t.Union[(str, URL)], headers: t.Optional[t.Dict[(str, str)]]=None) -> JSONType:
url = URL(url)
headers = (headers.copy() if headers else {})
if (url.host == 'api.github.com'):
github_token = os.environ.get('GITHUB_TOKEN')
if github_token:
headers['Authorization'] = f'token {github_token}'
return (await super()._get_json(url, headers))
async def _query_sequence(self, queries: t.Iterable[_Query], json_vars: t.Dict[(str, JSONType)], init_json_data: JSONType=None) -> t.Dict[(str, str)]:
results: t.Dict[(str, str)] = {}
for query in queries:
_vars = (json_vars | results)
if query.url_expr:
url = (await _jq(query.url_expr, init_json_data, _vars))
json_data = (await self._get_json(url))
else:
json_data = init_json_data
results[query.name] = (await _jq(query.value_expr, json_data, _vars))
return results
def _read_q_seq(checker_data: t.Mapping, sequence: t.List[str]) -> t.Iterable[_Query]:
for query_name in sequence:
q_prop = f'{query_name}-query'
if (q_prop not in checker_data):
continue
url_prop = f'{query_name}-data-url'
(yield _Query(name=query_name, value_expr=checker_data[q_prop], url_expr=checker_data.get(url_prop)))
async def check(self, external_data: ExternalBase):
assert self.should_check(external_data)
json_url = external_data.checker_data.get('url')
json_data = ((await self._get_json(json_url)) if json_url else None)
json_vars: t.Dict[(str, JSONType)] = {}
if external_data.parent:
assert isinstance(external_data.parent, ExternalBase)
parent_data: t.Dict[(str, t.Optional[t.Dict[(str, JSONType)]])]
parent_data = {'current': external_data.parent.current_version.json, 'new': None}
if external_data.parent.new_version:
parent_data['new'] = external_data.parent.new_version.json
json_vars['parent'] = parent_data
if isinstance(external_data, ExternalGitRepo):
return (await self._check_git(json_data, json_vars, external_data))
else:
assert isinstance(external_data, ExternalData)
return (await self._check_data(json_data, json_vars, external_data))
async def _check_data(self, json_data: JSONType, json_vars: t.Dict[(str, JSONType)], external_data: ExternalData):
checker_data = external_data.checker_data
results = (await self._query_sequence(self._read_q_seq(checker_data, ['tag', 'commit', 'version', 'url', 'timestamp']), json_vars, json_data))
latest_version = results['version']
latest_url = results['url']
latest_timestamp = parse_timestamp(results.get('timestamp'))
(await self._update_version(external_data, latest_version, latest_url, follow_redirects=False, timestamp=latest_timestamp))
async def _check_git(self, json_data: JSONType, json_vars: t.Dict[(str, JSONType)], external_data: ExternalGitRepo):
checker_data = external_data.checker_data
results = (await self._query_sequence(self._read_q_seq(checker_data, ['tag', 'commit', 'version', 'timestamp']), json_vars, json_data))
new_version = ExternalGitRef(url=external_data.current_version.url, commit=results.get('commit'), tag=results.get('tag'), branch=None, version=results.get('version'), timestamp=parse_timestamp(results.get('timestamp')))
if (new_version.commit is None):
new_version = (await new_version.fetch_remote())
external_data.set_new_version(new_version) |
def run_migrations_online():
configuration = config.get_section(config.config_ini_section)
configuration['sqlalchemy.url'] = get_url()
connectable = engine_from_config(configuration, prefix='sqlalchemy.', poolclass=pool.NullPool)
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata, render_as_batch=True)
with context.begin_transaction():
context.run_migrations() |
def test_deepcopy_args():
provider = providers.List()
dependent_provider1 = providers.Factory(list)
dependent_provider2 = providers.Factory(dict)
provider.add_args(dependent_provider1, dependent_provider2)
provider_copy = providers.deepcopy(provider)
dependent_provider_copy1 = provider_copy.args[0]
dependent_provider_copy2 = provider_copy.args[1]
assert (provider.args != provider_copy.args)
assert (dependent_provider1.cls is dependent_provider_copy1.cls)
assert (dependent_provider1 is not dependent_provider_copy1)
assert (dependent_provider2.cls is dependent_provider_copy2.cls)
assert (dependent_provider2 is not dependent_provider_copy2) |
def main(timeout=0):
global mainloop
dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
bus = dbus.SystemBus()
adapter = find_adapter(bus)
if (not adapter):
print('LEAdvertisingManager1 interface not found')
return
adapter_props = dbus.Interface(bus.get_object(BLUEZ_SERVICE_NAME, adapter), 'org.freedesktop.DBus.Properties')
adapter_props.Set('org.bluez.Adapter1', 'Powered', dbus.Boolean(1))
ad_manager = dbus.Interface(bus.get_object(BLUEZ_SERVICE_NAME, adapter), LE_ADVERTISING_MANAGER_IFACE)
test_advertisement = TestAdvertisement(bus, 0)
mainloop = GObject.MainLoop()
ad_manager.RegisterAdvertisement(test_advertisement.get_path(), {}, reply_handler=register_ad_cb, error_handler=register_ad_error_cb)
if (timeout > 0):
threading.Thread(target=shutdown, args=(timeout,), daemon=True).start()
else:
print('Advertising forever ...')
mainloop.run()
ad_manager.UnregisterAdvertisement(test_advertisement)
print('Adevertisment unregistered')
dbus.service.Object.remove_from_connection(test_advertisement) |
class TestMacsXLSForMacs2010_(unittest.TestCase):
def test_load_macs2_xls_file(self):
macsxls = MacsXLS(fp=io.StringIO(MACS2010__data))
self.assertEqual(macsxls.macs_version, '2.0.10.')
self.assertEqual(macsxls.name, 'Gli1_ChIP_vs_input_36bp_bowtie_mm10_BASE_mfold5,50_Pe-5_Q0.05_bw250_MACS2')
self.assertEqual(macsxls.command_line, None)
self.assertFalse(macsxls.with_broad_option)
self.assertEqual(macsxls.columns, ['order', 'chr', 'start', 'end', 'length', 'abs_summit', 'pileup', '-log10(pvalue)', 'fold_enrichment', '-log10(qvalue)', 'name'])
self.assertEqual(len(macsxls.data), 5)
for i in range(0, 5):
self.assertEqual(macsxls.data[i]['order'], (i + 1))
def test_sort_on_columns(self):
macsxls = MacsXLS(fp=io.StringIO(MACS2010__data))
for (line, value) in zip(macsxls.data, (7.0, 12.0, 8.0, 9.0, 9.0)):
self.assertEqual(line['pileup'], value)
macsxls.sort_on('pileup')
for (line, value) in zip(macsxls.data, (12.0, 9.0, 9.0, 8.0, 7.0)):
self.assertEqual(line['pileup'], value) |
class Labish():
("Please use 'names' instead.")
def labish_names(self) -> tuple[(str, ...)]:
return self.names()
("Please use 'indexes' instead.")
def labish_indexes(self) -> list[int]:
return self.indexes()
def names(self) -> tuple[(str, ...)]:
return self.channels[:(- 1)]
def indexes(self) -> list[int]:
return [self.get_channel_index(name) for name in self.names()] |
class OptionPlotoptionsFunnel3dSonificationTracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestFocalLoss(unittest.TestCase):
def setUp(self) -> None:
super().setUp()
np.random.seed(42)
def test_focal_loss_equals_ce_loss(self) -> None:
inputs = logit(torch.tensor([[[0.95], [0.9], [0.98], [0.99]]], dtype=torch.float32))
targets = torch.tensor([[[1], [1], [1], [1]]], dtype=torch.float32)
inputs_fl = inputs.clone().requires_grad_()
targets_fl = targets.clone()
inputs_ce = inputs.clone().requires_grad_()
targets_ce = targets.clone()
focal_loss = sigmoid_focal_loss(inputs_fl, targets_fl, gamma=0, alpha=(- 1), reduction='mean')
ce_loss = F.binary_cross_entropy_with_logits(inputs_ce, targets_ce, reduction='mean')
self.assertEqual(ce_loss, focal_loss.data)
focal_loss.backward()
ce_loss.backward()
self.assertTrue(torch.allclose(inputs_fl.grad.data, inputs_ce.grad.data))
def test_easy_ex_focal_loss_less_than_ce_loss(self) -> None:
N = 5
inputs = logit(torch.rand(N))
targets = torch.randint(0, 2, (N,)).float()
focal_loss = sigmoid_focal_loss(inputs, targets, gamma=2, alpha=(- 1))
ce_loss = F.binary_cross_entropy_with_logits(inputs, targets, reduction='none')
loss_ratio = (ce_loss / focal_loss).squeeze()
prob = torch.sigmoid(inputs)
p_t = ((prob * targets) + ((1 - prob) * (1 - targets)))
correct_ratio = (1.0 / ((1.0 - p_t) ** 2))
self.assertTrue(np.allclose(loss_ratio, correct_ratio))
def test_easy_ex_focal_loss_weighted_less_than_ce_loss(self) -> None:
inputs = logit(torch.tensor([[[0.95], [0.9], [0.6], [0.3]]], dtype=torch.float64))
targets = torch.tensor([[[1], [1], [1], [1]]], dtype=torch.float64)
focal_loss = sigmoid_focal_loss(inputs, targets, gamma=2, alpha=0.5)
ce_loss = F.binary_cross_entropy_with_logits(inputs, targets, reduction='none')
loss_ratio = (ce_loss / focal_loss).squeeze()
correct_ratio = (2.0 / ((1.0 - inputs.squeeze().sigmoid()) ** 2))
self.assertTrue(np.allclose(loss_ratio, correct_ratio))
def test_hard_ex_focal_loss_similar_to_ce_loss(self) -> None:
inputs = logit(torch.tensor([0.05, 0.12, 0.09, 0.17], dtype=torch.float32))
targets = torch.tensor([1, 1, 1, 1], dtype=torch.float32)
focal_loss = sigmoid_focal_loss(inputs, targets, gamma=2, alpha=(- 1))
ce_loss = F.binary_cross_entropy_with_logits(inputs, targets, reduction='none')
loss_ratio = (ce_loss / focal_loss).squeeze()
correct_ratio = (1.0 / ((1.0 - inputs.sigmoid()) ** 2))
self.assertTrue(np.allclose(loss_ratio, correct_ratio))
def test_negatives_ignored_focal_loss(self) -> None:
inputs = logit(torch.tensor([[[0.05], [0.12], [0.89], [0.79]]], dtype=torch.float32))
targets = torch.tensor([[[1], [1], [0], [0]]], dtype=torch.float32)
focal_loss = sigmoid_focal_loss(inputs, targets, gamma=2, alpha=1).squeeze().numpy()
ce_loss = F.binary_cross_entropy_with_logits(inputs, targets, reduction='none').squeeze().numpy()
targets = targets.squeeze().numpy()
self.assertTrue(np.all((ce_loss[(targets == 0)] > 0)))
self.assertTrue(np.all((focal_loss[(targets == 0)] == 0)))
def test_positives_ignored_focal_loss(self) -> None:
inputs = logit(torch.tensor([[[0.05], [0.12], [0.89], [0.79]]], dtype=torch.float32))
targets = torch.tensor([[[1], [1], [0], [0]]], dtype=torch.float32)
focal_loss = sigmoid_focal_loss(inputs, targets, gamma=2, alpha=0).squeeze().numpy()
ce_loss = F.binary_cross_entropy_with_logits(inputs, targets, reduction='none').squeeze().numpy()
targets = targets.squeeze().numpy()
self.assertTrue(np.all((ce_loss[(targets == 1)] > 0)))
self.assertTrue(np.all((focal_loss[(targets == 1)] == 0)))
def test_mean_focal_loss_equals_ce_loss(self) -> None:
inputs = logit(torch.tensor([[0.05, 0.9], [0.52, 0.45], [0.89, 0.8], [0.39, 0.5]], dtype=torch.float32))
targets = torch.tensor([[1, 0], [1, 0], [1, 1], [0, 1]], dtype=torch.float32)
focal_loss = sigmoid_focal_loss(inputs, targets, gamma=0, alpha=(- 1), reduction='mean')
ce_loss = F.binary_cross_entropy_with_logits(inputs, targets, reduction='mean')
self.assertEqual(ce_loss, focal_loss)
def test_sum_focal_loss_equals_ce_loss(self) -> None:
inputs = logit(torch.tensor([[[0.05], [0.12], [0.89], [0.79]]], dtype=torch.float32))
targets = torch.tensor([[[1], [1], [0], [0]]], dtype=torch.float32)
focal_loss = sigmoid_focal_loss(inputs, targets, gamma=0, alpha=(- 1), reduction='sum')
ce_loss = F.binary_cross_entropy_with_logits(inputs, targets, reduction='sum')
self.assertEqual(ce_loss, focal_loss)
def test_focal_loss_equals_ce_loss_multi_class(self) -> None:
inputs = logit(torch.tensor([[[0.95, 0.55, 0.12, 0.05], [0.09, 0.95, 0.36, 0.11], [0.06, 0.12, 0.56, 0.07], [0.09, 0.15, 0.25, 0.45]]], dtype=torch.float32))
targets = torch.tensor([[[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]]], dtype=torch.float32)
focal_loss = sigmoid_focal_loss(inputs, targets, gamma=0, alpha=(- 1), reduction='mean')
ce_loss = F.binary_cross_entropy_with_logits(inputs, targets, reduction='mean')
self.assertEqual(ce_loss, focal_loss)
((not torch.cuda.is_available()), 'CUDA unavailable')
def test_focal_loss_equals_ce_loss_jit(self) -> None:
device = torch.device('cuda:0')
N = 5
inputs = logit(torch.rand(N)).to(device)
targets = torch.randint(0, 2, (N,)).float().to(device)
focal_loss = sigmoid_focal_loss_jit(inputs, targets, gamma=0, alpha=(- 1))
ce_loss = F.binary_cross_entropy_with_logits(inputs.cpu(), targets.cpu(), reduction='none')
self.assertTrue(np.allclose(ce_loss, focal_loss.cpu()))
def focal_loss_with_init(N: int, alpha: float=(- 1)) -> typing.Callable[([], None)]:
device = torch.device('cuda:0')
inputs: torch.Tensor = logit(torch.rand(N)).to(device).requires_grad_()
targets: torch.Tensor = torch.randint(0, 2, (N,)).float().to(device).requires_grad_()
torch.cuda.synchronize()
def run_focal_loss() -> None:
fl = sigmoid_focal_loss(inputs, targets, gamma=0, alpha=alpha, reduction='mean')
fl.backward()
torch.cuda.synchronize()
return run_focal_loss
def focal_loss_jit_with_init(N: int, alpha: float=(- 1)) -> typing.Callable[([], None)]:
device = torch.device('cuda:0')
inputs: torch.Tensor = logit(torch.rand(N)).to(device).requires_grad_()
targets: torch.Tensor = torch.randint(0, 2, (N,)).float().to(device).requires_grad_()
torch.cuda.synchronize()
def run_focal_loss_jit() -> None:
fl = sigmoid_focal_loss_jit(inputs, targets, gamma=0, alpha=alpha, reduction='mean')
fl.backward()
torch.cuda.synchronize()
return run_focal_loss_jit |
class LocalFileAdminTests(Base.FileAdminTests):
def fileadmin_class(self):
return fileadmin.FileAdmin
def fileadmin_args(self):
return ((self._test_files_root, '/files'), {})
def test_fileadmin_sort_bogus_url_param(self):
fileadmin_class = self.fileadmin_class()
(fileadmin_args, fileadmin_kwargs) = self.fileadmin_args()
(app, admin) = setup()
class MyFileAdmin(fileadmin_class):
editable_extensions = ('txt',)
view_kwargs = dict(fileadmin_kwargs)
view_kwargs.setdefault('name', 'Files')
view = MyFileAdmin(*fileadmin_args, **view_kwargs)
admin.add_view(view)
client = app.test_client()
with open(op.join(self._test_files_root, 'dummy2.txt'), 'w') as fp:
fp.write('test')
rv = client.get('/admin/myfileadmin/?sort=bogus')
assert (rv.status_code == 200)
assert (rv.data.decode('utf-8').find('path=dummy2.txt') < rv.data.decode('utf-8').find('path=dummy.txt'))
rv = client.get('/admin/myfileadmin/?sort=name')
assert (rv.status_code == 200)
assert (rv.data.decode('utf-8').find('path=dummy.txt') < rv.data.decode('utf-8').find('path=dummy2.txt'))
try:
os.remove(op.join(self._test_files_root, 'dummy2.txt'))
except (IOError, OSError):
pass |
class TestPluginsNorthCommon(object):
.parametrize('value, expected', [('xxx', 'xxx'), ('1xx', '1xx'), ('x1x', 'x1x'), ('xx1', 'xx1'), ('26/04/2018 11:14', '26/04/2018 11:14'), ((- 180.2), (- 180.2)), (0.0, 0.0), (180.0, 180.0), (180.2, 180.2), ('-180.2', (- 180.2)), ('180.2', 180.2), ('180.0', 180.0), ('180.', 180.0), ((- 10), (- 10)), (0, 0), (10, 10), ('-10', (- 10)), ('0', 0), ('10', 10)])
def test_convert_to_type_good(self, value, expected):
assert (plugin_common.convert_to_type(value) == expected)
.parametrize('value, expected', [('111', '111'), ('26/04/2018 11:14', '26/04/2018 11:00'), ('-180.2', 180.2), ((- 10), 10)])
def test_convert_to_type_bad(self, value, expected):
assert (plugin_common.convert_to_type(value) != expected)
.parametrize('value, expected', [('String 1', 'string'), ((- 999), 'integer'), ((- 1), 'integer'), (0, 'integer'), ((- 999), 'integer'), ((- 999.0), 'number'), ((- 1.2), 'number'), (0.0, 'number'), (1.2, 'number'), (999.0, 'number'), ('-1.2', 'number'), ('-1.0', 'number'), ('.0', 'number'), ('1.0', 'number'), ('1.2', 'number'), ('-1', 'integer'), ('0', 'integer'), ('1', 'integer'), (90774.998, 'number'), (41, 'integer'), ((- 2), 'integer'), ((- 159), 'integer'), ('up', 'string'), ('tock', 'string')])
def test_evaluate_type(self, value, expected):
assert (plugin_common.evaluate_type(value) == expected)
.parametrize('value, expected', [([{'asset_code': 'temperature0', 'reading': 20}, {'asset_code': 'temperature1', 'reading': 21}, {'asset_code': 'temperature2', 'reading': 22}], [{'asset_code': 'temperature0', 'asset_data': 20}, {'asset_code': 'temperature1', 'asset_data': 21}, {'asset_code': 'temperature2', 'asset_data': 22}]), ([{'asset_code': 'temperature0', 'reading': 20}, {'asset_code': 'temperature1', 'reading': 21}, {'asset_code': 'temperature0', 'reading': 22}], [{'asset_code': 'temperature0', 'asset_data': 20}, {'asset_code': 'temperature1', 'asset_data': 21}]), ([{'asset_code': 'temperature1', 'reading': 10}, {'asset_code': 'temperature2', 'reading': 20}, {'asset_code': 'temperature1', 'reading': 11}, {'asset_code': 'temperature2', 'reading': 21}, {'asset_code': 'temperature3', 'reading': 30}], [{'asset_code': 'temperature1', 'asset_data': 10}, {'asset_code': 'temperature2', 'asset_data': 20}, {'asset_code': 'temperature3', 'asset_data': 30}])])
def test_identify_unique_asset_codes(self, value, expected):
assert (plugin_common.identify_unique_asset_codes(value) == expected) |
class CustomSystemRoleSchema(Schema):
class Meta():
type_ = 'custom-system-role'
self_view = 'v1.custom_system_role_detail'
self_view_kwargs = {'id': '<id>'}
inflect = dasherize
id = fields.Str(dump_only=True)
name = fields.Str(required=True)
panel_permissions = Relationship(self_view='v1.custom_system_roles_panel_permissions', self_view_kwargs={'id': '<id>'}, related_view='v1.panel_permission_list', related_view_kwargs={'custom_system_role_id': '<id>'}, schema='PanelPermissionSchema', many=True, type_='panel-permission') |
('cuda.bmm_rcr_n1.func_decl')
def gen_function_decl(func_attrs):
func_name = func_attrs['name']
backend_spec = CUDASpec()
elem_input_type = backend_spec.dtype_to_lib_type(func_attrs['inputs'][0]._attrs['dtype'])
elem_output_type = backend_spec.dtype_to_lib_type(func_attrs['outputs'][0]._attrs['dtype'])
return FUNC_DECL_TEMPLATE.render(func_name=func_name, elem_input_type=elem_input_type, elem_output_type=elem_output_type) |
def flatten_diff(diff):
valid_instructions = ('KEEP', 'REMOVE', 'ADD', 'UPDATE')
def _get_all_nested_diff_instructions(diffpart):
out = []
typ = type(diffpart)
if ((typ == tuple) and (len(diffpart) == 3) and (diffpart[2] in valid_instructions)):
out = [diffpart[2]]
elif (typ == dict):
for val in diffpart.values():
out.extend(_get_all_nested_diff_instructions(val))
else:
raise RuntimeError(_('Diff contains non-dicts that are not on the form (old, new, action_to_take): {diffpart}').format(diffpart))
return out
flat_diff = {}
for (rootkey, diffpart) in diff.items():
insts = _get_all_nested_diff_instructions(diffpart)
if all(((inst == 'KEEP') for inst in insts)):
rootinst = 'KEEP'
elif all(((inst in ('ADD', 'UPDATE')) for inst in insts)):
rootinst = 'UPDATE'
elif all(((inst == 'REMOVE') for inst in insts)):
rootinst = 'REMOVE'
elif ('REMOVE' in insts):
rootinst = 'REPLACE'
else:
rootinst = 'UPDATE'
flat_diff[rootkey] = rootinst
return flat_diff |
class table_desc_stats_request(stats_request):
version = 6
type = 18
stats_type = 14
def __init__(self, xid=None, flags=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = table_desc_stats_request()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 18)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 14)
obj.flags = reader.read('!H')[0]
reader.skip(4)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
return True
def pretty_print(self, q):
q.text('table_desc_stats_request {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REQ_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.breakable()
q.text('}') |
class CategoricalDirichletModel():
def __init__(self, alpha: Tensor) -> None:
self.alpha_ = alpha
_variable
def dirichlet(self) -> dist.Distribution:
return dist.Dirichlet(self.alpha_)
_variable
def categorical(self) -> dist.Distribution:
return dist.Categorical(self.dirichlet()) |
def window_function(name, window_type, length, fft_mode, linewrap):
import scipy.signal
window = scipy.signal.get_window(window_type, length, fftbins=fft_mode)
arrays = [cgen.array_declare(name, length, values=window), cgen.constant_declare((name + '_length'), val=length)]
gen = '\n'.join(arrays)
w = textwrap.wrap(gen, linewrap)
wrapped = '\n'.join(w)
return wrapped |
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
class VarTestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(VarTestCase, self).__init__(*args, **kwargs)
self.test_count = 0
def _run_var(self, *, dim, unbiased, input_shape, keepdim=False, input_type='float16', output_type=None, copy_op=False, atol=0.01, rtol=0.01):
torch.manual_seed(0)
logging.info('Test input_shape={input_shape}, reduction_axes={dim}'.format(input_shape=input_shape, dim=dim))
target = detect_target()
X = Tensor(shape=input_shape, dtype=input_type, name='input_0', is_input=True)
op = ops.var(dim=dim, unbiased=unbiased, keepdim=keepdim, dtype=output_type)
if copy_op:
op = ops.var(**op._get_op_attributes())
Y = op(X)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
y_shape = [var._attrs['values'][0] for var in Y._attrs['shape']]
y_dtype = Y._attrs['dtype']
logging.info('AITemplate output_shape: {}'.format(y_shape))
logging.info('AITemplate output_type: {}'.format(y_dtype))
module = compile_model(Y, target, './tmp', f'var_{self.test_count}')
X_pt = get_random_torch_tensor(input_shape, input_type)
if (output_type is None):
torch_dtype = None
else:
torch_dtype = string_to_torch_dtype(output_type)
Y_pt = torch.var(X_pt.to(dtype=torch_dtype), dim=dim, unbiased=unbiased, keepdim=keepdim)
y = torch.empty_like(Y_pt)
module.run_with_tensors([X_pt], [y])
np.testing.assert_equal(y_shape, Y_pt.size())
np.testing.assert_equal(string_to_torch_dtype(y_dtype), Y_pt.dtype)
self.assertTrue(torch.allclose(Y_pt, y, atol=atol, rtol=rtol, equal_nan=True))
self.test_count += 1
def test_var_float16(self):
self._run_var(dim=(- 1), unbiased=True, input_shape=[1, 1], keepdim=False)
self._run_var(dim=(- 1), unbiased=False, input_shape=[1, 1], keepdim=False)
self._run_var(dim=(- 1), unbiased=True, input_shape=[1, 5], keepdim=False)
self._run_var(dim=(- 1), unbiased=False, input_shape=[2, 8], keepdim=False)
self._run_var(dim=(- 1), unbiased=False, input_shape=[3, 2, 2050], keepdim=False)
self._run_var(dim=(- 1), unbiased=True, input_shape=[3, 2, 2050], keepdim=False)
self._run_var(dim=1, unbiased=True, input_shape=[3, 2050, 2], keepdim=True)
self._run_var(dim=0, unbiased=True, input_shape=[3001, 4, 2], keepdim=True)
self._run_var(dim=(- 1), unbiased=True, input_shape=[1, 1000000, 6], keepdim=False)
self._run_var(dim=0, unbiased=True, input_shape=[3001, 4, 2], keepdim=True, copy_op=True)
self._run_var(dim=(- 1), unbiased=True, input_shape=[1, 1000000, 6], keepdim=False, copy_op=True)
def _run_batched_var(self, *, dim, unbiased, keepdim=False, input_type='float16', output_type=None, test_name='batched_var'):
torch.manual_seed(0)
logging.info('Test batched_var with reduction_axes={dim}'.format(dim=dim))
target = detect_target()
M = 4
N = 32
X = Tensor(shape=[IntImm(M), IntVar(name='input_batch', values=[1, 2048]), IntImm(N)], dtype=input_type, name='input_0', is_input=True)
op = ops.var(dim=dim, unbiased=unbiased, keepdim=keepdim, dtype=output_type)
Y = op(X)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
y_dtype = Y._attrs['dtype']
logging.info('AITemplate output_type: {}'.format(y_dtype))
module = compile_model(Y, target, './tmp', test_name)
for B in [5, 128, 1024, 1237, 2002]:
input_shape = [M, B, N]
logging.info('Testing input_shape={}'.format(input_shape))
X_pt = get_random_torch_tensor(input_shape, input_type)
Y_pt = torch.var(X_pt, dim=dim, unbiased=unbiased, keepdim=keepdim)
y = torch.empty_like(Y_pt)
module.run_with_tensors([X_pt], [y])
np.testing.assert_equal(string_to_torch_dtype(y_dtype), Y_pt.dtype)
self.assertTrue(torch.allclose(Y_pt, y, atol=0.01, rtol=0.01))
self.test_count += 1
def test_batched_var(self):
self._run_batched_var(dim=0, unbiased=False, keepdim=True, test_name='batched_var_0')
self._run_batched_var(dim=1, unbiased=True, keepdim=False, test_name='batched_var_1')
self._run_batched_var(dim=1, unbiased=False, keepdim=True, test_name='batched_var_2')
self._run_batched_var(dim=2, unbiased=True, keepdim=False, test_name='batched_var_3')
((detect_target().name() == 'rocm'), 'fp32 not supported in ROCm')
def test_var_float32(self):
self._run_var(dim=(- 1), unbiased=False, input_shape=[2, 8], keepdim=False, input_type='float32', atol=1.3e-06, rtol=1e-05)
self._run_var(dim=(- 1), unbiased=False, input_shape=[2, 8], keepdim=False, input_type='float16', output_type='float32', atol=1.3e-06, rtol=1e-05)
self._run_var(dim=(- 1), unbiased=False, input_shape=[3, 2, 2050], keepdim=False, input_type='float32', atol=1.3e-06, rtol=1e-05)
self._run_var(dim=(- 1), unbiased=False, input_shape=[3, 2, 2050], keepdim=False, input_type='float16', output_type='float32', atol=1.3e-06, rtol=1e-05)
self._run_var(dim=1, unbiased=True, input_shape=[1025, 2047], keepdim=True, input_type='float32', atol=1.3e-06, rtol=1e-05)
self._run_var(dim=1, unbiased=True, input_shape=[1025, 2047], keepdim=True, input_type='float16', output_type='float32', atol=1.3e-06, rtol=1e-05)
((detect_target().name() == 'rocm'), 'bf16 not supported in ROCm')
def test_var_bfloat16(self):
self._run_var(dim=(- 1), unbiased=False, input_shape=[2, 8], keepdim=False, input_type='bfloat16', atol=0.1, rtol=0.1)
self._run_var(dim=(- 1), unbiased=False, input_shape=[3, 2, 2050], keepdim=False, input_type='bfloat16', atol=0.1, rtol=0.1)
self._run_var(dim=1, unbiased=True, input_shape=[1025, 2047], keepdim=True, input_type='bfloat16', atol=0.1, rtol=0.1) |
class TornadoTest(unittest.TestCase, BaseTest):
expected_data = {'language': 'python', 'engine': 'tornado', 'evaluate': 'python', 'execute': True, 'read': True, 'write': True, 'prefix': '', 'suffix': '', 'trailer': '{{%(trailer)s}}', 'header': '{{%(header)s}}', 'render': '{{%(code)s}}', 'bind_shell': True, 'reverse_shell': True}
expected_data_blind = {'language': 'python', 'engine': 'tornado', 'evaluate_blind': 'python', 'execute_blind': True, 'write': True, 'blind': True, 'prefix': '', 'suffix': '', 'bind_shell': True, 'reverse_shell': True}
url = '
url_blind = '
plugin = Tornado
blind_tests = [(0, 0, 'AAA%sAAA', {}), (1, 2, '{%% for a in %s %%}\n{%% end %%}', {'prefix': '"1"%}', 'suffix': ''})]
reflection_tests = [(0, 0, '%s', {}), (0, 0, 'AAA%sAAA', {}), (1, 1, '{{%s}}', {'prefix': '1}}', 'suffix': ''}), (1, 1, "{{ '%s' }}", {'prefix': "1'}}", 'suffix': ''}), (1, 1, '{{ "%s" }}', {'prefix': '1"}}', 'suffix': ''}), (1, 3, '{{ """%s""" }}', {'prefix': '1"""}}', 'suffix': ''}), (1, 4, '{{[%s]}}', {'prefix': '1]}}', 'suffix': ''}), (1, 3, "{{ ['%s'] }}", {'prefix': "1']}}", 'suffix': ''}), (1, 3, '{{ ["%s"] }}', {'prefix': '1"]}}', 'suffix': ''}), (1, 3, '{{ ["""%s"""] }}', {'prefix': '1"""]}}', 'suffix': ''}), (1, 1, '{%% if %s: %%}\n{%% end %%}', {'prefix': '1%}', 'suffix': ''}), (1, 2, '{%% for a in %s: %%}\n{%% end %%}', {'prefix': '"1"%}', 'suffix': ''}), (1, 1, '{%% if %s==1 %%}\n{%% end %%}', {'prefix': '1%}', 'suffix': ''}), (1, 1, "{%% if '%s'==1 %%}\n{%% end %%}", {'prefix': "1'%}", 'suffix': ''}), (1, 1, '{%% if "%s"==1 %%}\n{%% end %%}', {'prefix': '1"%}', 'suffix': ''}), (1, 3, '{%% if """%s"""==1 %%}\n{%% end %%}', {'prefix': '1"""%}', 'suffix': ''}), (1, 2, '{%% if (1, %s)==1 %%}\n{%% end %%}', {'prefix': '1)%}', 'suffix': ''}), (1, 2, "{%% if (1, '%s')==1 %%}\n{%% end %%}", {'prefix': "1')%}", 'suffix': ''}), (1, 2, '{%% if (1, "%s")==1 %%}\n{%% end %%}', {'prefix': '1")%}', 'suffix': ''}), (1, 3, '{%% if (1, """%s""")==1 %%}\n{%% end %%}', {'prefix': '1""")%}', 'suffix': ''}), (1, 3, '{%% if [%s]==1 %%}\n{%% end %%}', {'prefix': '1]%}', 'suffix': ''}), (1, 3, "{%% if ['%s']==1 %%}\n{%% end %%}", {'prefix': "1']%}", 'suffix': ''}), (1, 3, '{%% if ["%s"]==1 %%}\n{%% end %%}', {'prefix': '1"]%}', 'suffix': ''}), (1, 3, '{%% if ["""%s"""]==1 %%}\n{%% end %%}', {'prefix': '1"""]%}', 'suffix': ''}), (1, 5, '{%% if (1, [%s])==1 %%}\n{%% end %%}', {'prefix': '1])%}', 'suffix': ''}), (1, 5, "{%% if (1, ['%s'])==1 %%}\n{%% end %%}", {'prefix': "1'])%}", 'suffix': ''}), (1, 5, '{%% if (1, ["%s"])==1 %%}\n{%% end %%}', {'prefix': '1"])%}', 'suffix': ''}), (1, 5, '{%% if (1, ["""%s"""])==1 %%}\n{%% end %%}', {'prefix': '1"""])%}', 'suffix': ''}), (1, 3, '{%% for a in {%s} %%}\n{%% end %%}', {'prefix': '1}%}', 'suffix': ''}), (1, 3, '{%% if {%s:1}==1 %%}\n{%% end %%}', {'prefix': '1}%}', 'suffix': ''}), (1, 3, "{%% if {'%s':1}==1 %%}\n{%% end %%}", {'prefix': "1'}%}", 'suffix': ''}), (1, 3, '{%% if {"%s":1}==1 %%}\n{%% end %%}', {'prefix': '1"}%}', 'suffix': ''}), (1, 3, '{%% if {"""%s""":1}==1 %%}\n{%% end %%}', {'prefix': '1"""}%}', 'suffix': ''}), (1, 3, '{%% if {1:%s}==1 %%}\n{%% end %%}', {'prefix': '1}%}', 'suffix': ''}), (1, 3, "{%% if {1:'%s'}==1 %%}\n{%% end %%}", {'prefix': "1'}%}", 'suffix': ''}), (1, 3, '{%% if {1:"%s"}==1 %%}\n{%% end %%}', {'prefix': '1"}%}', 'suffix': ''}), (1, 3, '{%% if {1:"""%s"""}==1 %%}\n{%% end %%}', {'prefix': '1"""}%}', 'suffix': ''}), (5, 1, '{# %s #}', {'prefix': '#}', 'suffix': '{#'})] |
def test_inactivate_thin_cells(tmpdir):
grd = xtgeo.grid_from_file(EMEGFILE)
reg = xtgeo.gridproperty_from_file(EMERFILE, name='REGION')
nhdiv = 40
grd.convert_to_hybrid(nhdiv=nhdiv, toplevel=1650, bottomlevel=1690, region=reg, region_number=1)
grd.inactivate_by_dz(0.001)
grd.to_file(join(tmpdir, 'test_hybridgrid2_inact_thin.roff')) |
class GreetingWorkflowImpl(GreetingWorkflow):
def __init__(self):
retry_parameters = RetryParameters(maximum_attempts=1)
self.greeting_activities: GreetingActivities = Workflow.new_activity_stub(GreetingActivities, retry_parameters=retry_parameters)
async def get_greeting(self):
try:
return (await self.greeting_activities.compose_greeting())
except Exception as ex:
global caught_exception
caught_exception = ex |
def f_parse_saves(threads=None, save_path=None, game_name_prefix='') -> None:
if (threads is not None):
config.CONFIG.threads = threads
if (save_path is None):
save_path = config.CONFIG.save_file_path
save_reader = save_parser.BatchSavePathMonitor(save_path, game_name_prefix=game_name_prefix)
tle = timeline.TimelineExtractor()
for (game_name, gamestate_dict) in save_reader.get_gamestates_and_check_for_new_files():
if (gamestate_dict is None):
continue
tle.process_gamestate(game_name, gamestate_dict)
del gamestate_dict |
class FaucetUntaggedIPv4ControlPlaneFuzzTest(FaucetUntaggedTest):
CONFIG_GLOBAL = '\nvlans:\n 100:\n description: "untagged"\n faucet_vips: ["10.0.0.254/24"]\n'
CONFIG = ('\n max_resolve_backoff_time: 1\n' + CONFIG_BOILER_UNTAGGED)
def test_ping_fragment_controller(self):
first_host = self.hosts_name_ordered()[0]
first_host.cmd(('ping -s 1476 -c 3 %s' % self.FAUCET_VIPV4.ip))
self.one_ipv4_controller_ping(first_host)
def test_fuzz_controller(self):
first_host = self.hosts_name_ordered()[0]
self.one_ipv4_controller_ping(first_host)
packets = 1000
fuzz_template = 'python3 -c "from scapy.all import * ; scapy.all.send(%s, count=%u)"'
for fuzz_cmd in ((fuzz_template % (("IP(dst='%s')/fuzz(%s(type=0))" % (self.FAUCET_VIPV4.ip, 'ICMP')), packets)), (fuzz_template % (("IP(dst='%s')/fuzz(%s(type=8))" % (self.FAUCET_VIPV4.ip, 'ICMP')), packets)), (fuzz_template % (("fuzz(%s(pdst='%s'))" % ('ARP', self.FAUCET_VIPV4.ip)), packets))):
fuzz_out = first_host.cmd(mininet_test_util.timeout_cmd(fuzz_cmd, 180))
self.assertTrue(re.search(('Sent %u packets' % packets), fuzz_out), msg=('%s: %s' % (fuzz_cmd, fuzz_out)))
self.one_ipv4_controller_ping(first_host)
def test_flap_ping_controller(self):
(first_host, second_host) = self.hosts_name_ordered()[0:2]
for _ in range(5):
self.one_ipv4_ping(first_host, second_host.IP())
for host in (first_host, second_host):
self.one_ipv4_controller_ping(host)
self.flap_all_switch_ports() |
class bBitMinHash(object):
__slots__ = ('seed', 'b', 'r', 'hashvalues')
_serial_fmt_params = '<qBdi'
_serial_fmt_block = 'Q'
def __init__(self, minhash, b=1, r=0.0):
b = int(b)
r = float(r)
if ((b > 32) or (b < 0)):
raise ValueError('b must be an integer in [0, 32]')
if (r > 1.0):
raise ValueError('r must be a float in [0.0, 1.0]')
bmask = ((1 << b) - 1)
self.hashvalues = np.bitwise_and(minhash.hashvalues, bmask).astype(np.uint32)
self.seed = minhash.seed
self.b = b
self.r = r
def __eq__(self, other):
return ((type(self) is type(other)) and (self.seed == other.seed) and (self.b == other.b) and (self.r == other.r) and np.array_equal(self.hashvalues, other.hashvalues))
def jaccard(self, other):
if (self.b != other.b):
raise ValueError('Cannot compare two b-bit MinHashes with different b values')
if (self.seed != other.seed):
raise ValueError('Cannot compare two b-bit MinHashes with different set of permutations')
intersection = np.count_nonzero((self.hashvalues == other.hashvalues))
raw_est = (float(intersection) / float(self.hashvalues.size))
a1 = self._calc_a(self.r, self.b)
a2 = self._calc_a(other.r, other.b)
(c1, c2) = self._calc_c(a1, a2, self.r, other.r)
return ((raw_est - c1) / (1 - c2))
def bytesize(self):
return self._bytesize()[(- 1)]
def __getstate__(self):
(slot_size, n, num_blocks, total) = self._bytesize()
buffer = bytearray(total)
blocks = np.zeros((num_blocks,), dtype=np.uint64)
for i in range(num_blocks):
start = (i * n)
hvs = self.hashvalues[start:(start + n)]
for (j, hv) in enumerate(hvs):
blocks[i] |= np.uint64((hv << (((n - 1) - j) * slot_size)))
fmt = (self._serial_fmt_params + ('%d%s' % (num_blocks, self._serial_fmt_block)))
struct.pack_into(fmt, buffer, 0, self.seed, self.b, self.r, self.hashvalues.size, *blocks)
return buffer
def __setstate__(self, buf):
try:
(self.seed, self.b, self.r, num_perm) = struct.unpack_from(self._serial_fmt_params, buf, 0)
except TypeError:
(self.seed, self.b, self.r, num_perm) = struct.unpack_from(self._serial_fmt_params, buffer(buf), 0)
offset = struct.calcsize(self._serial_fmt_params)
self.hashvalues = np.zeros((num_perm,), dtype=np.uint32)
(slot_size, n, num_blocks, total) = self._bytesize()
fmt = ('%d%s' % (num_blocks, self._serial_fmt_block))
try:
blocks = struct.unpack_from(fmt, buf, offset)
except TypeError:
blocks = struct.unpack_from(fmt, buffer(buf), offset)
mask = ((1 << slot_size) - 1)
for i in range(num_blocks):
start = (i * n)
for (j, _) in enumerate(self.hashvalues[start:(start + n)]):
hv = ((blocks[i] >> (((n - 1) - j) * slot_size)) & mask)
self.hashvalues[(start + j)] = np.uint32(hv)
def _calc_a(self, r, b):
if (r == 0.0):
return (1.0 / (1 << b))
return ((r * ((1 - r) ** ((2 ** b) - 1))) / (1 - ((1 - r) ** (2 * b))))
def _calc_c(self, a1, a2, r1, r2):
if ((r1 == 0.0) and (r2 == 0.0)):
return (a1, a2)
div = (1 / (r1 + r2))
c1 = (((a1 * r2) + (a2 * r1)) * div)
c2 = (((a1 * r1) + (a2 * r2)) * div)
return (c1, c2)
def _find_slot_size(self, b):
if (b == 1):
return 1
if (b == 2):
return 2
if (b <= 4):
return 4
if (b <= 8):
return 8
if (b <= 16):
return 16
if (b <= 32):
return 32
raise ValueError('Incorrect value of b')
def _bytesize(self):
block_size = struct.calcsize(self._serial_fmt_block)
slot_size = self._find_slot_size(self.b)
num_slots_per_block = int(((block_size * 8) / slot_size))
num_blocks = int(np.ceil((float(self.hashvalues.size) / num_slots_per_block)))
total = struct.calcsize((self._serial_fmt_params + ('%d%s' % (num_blocks, self._serial_fmt_block))))
return (slot_size, num_slots_per_block, num_blocks, total) |
('new')
def cancel_sell(uid, entry_id):
s = current_session()
entry = s.query(Exchange).filter((Exchange.id == entry_id), (Exchange.seller_id == uid)).first()
if (not entry):
raise exceptions.ItemNotFound
helpers.require_free_backpack_slot(s, uid)
item = entry.item
item.owner_id = uid
item.status = 'backpack'
s.add(ItemActivity(uid=uid, action='cancel_sell', item_id=entry.item.id, extra=json.dumps({'price': entry.price}), created=datetime.datetime.now()))
s.delete(entry) |
def test_read_json_file():
file_contents = '{\n "hello": "world"\n}'
with make_tempdir({'tmp.json': file_contents}) as temp_dir:
file_path = (temp_dir / 'tmp.json')
assert file_path.exists()
data = read_json(file_path)
assert (len(data) == 1)
assert (data['hello'] == 'world') |
def test_vector_bilinear_exterior_facet_integral():
mesh = IntervalMesh(5, 5)
V = VectorFunctionSpace(mesh, 'CG', 1, dim=2)
u = TrialFunction(V)
v = TestFunction(V)
a = (inner(u, v) * ds)
A = assemble(a)
values = A.M.values
nonzeros = [[0, 0], [1, 1], [(- 2), (- 2)], [(- 1), (- 1)]]
assert all((np.allclose(values[(row, col)], 1.0) for (row, col) in nonzeros))
for (row, col) in nonzeros:
values[(row, col)] = 0.0
assert np.allclose(values, 0.0) |
class ColorSet(object):
def __init__(self, h_range, s_range, v_range, overflow='cutoff', dep_wtp=1):
self.synchronization = 0
self.dep_wtp = dep_wtp
self.set_hsv_ranges(h_range, s_range, v_range)
assert (0.0 <= self._h_range[0] <= 360.0)
assert (0.0 <= self._h_range[1] <= 360.0)
assert (self._h_range[0] <= self._h_range[1])
assert (0.0 <= self._s_range[0] <= 1.0)
assert (0.0 <= self._s_range[1] <= 1.0)
assert (self._s_range[0] <= self._s_range[1])
assert (0.0 <= self._v_range[0] <= 1.0)
assert (0.0 <= self._v_range[1] <= 1.0)
assert (self._v_range[0] <= self._v_range[1])
self._color_set = [Color((0, 0, 0), tp='rgb', overflow=overflow), Color((0, 0, 0), tp='rgb', overflow=overflow), Color((0, 0, 0), tp='rgb', overflow=overflow), Color((0, 0, 0), tp='rgb', overflow=overflow), Color((0, 0, 0), tp='rgb', overflow=overflow)]
self.initialize()
def set_hsv_ranges(self, h_range, s_range, v_range):
if (isinstance(h_range, (tuple, list)) and isinstance(s_range, (tuple, list)) and isinstance(v_range, (tuple, list))):
self._h_range = tuple([float(i) for i in h_range[:2]])
self._s_range = tuple([float(i) for i in s_range[:2]])
self._v_range = tuple([float(i) for i in v_range[:2]])
else:
raise ValueError('expect h, s, v ranges in tuple or list type: {}, {}, {}.'.format(h_range, s_range, v_range))
def set_color_system(self, dep_wtp):
self.dep_wtp = dep_wtp
def set_overflow(self, overflow):
for color in self._color_set:
color.set_overflow(overflow)
def get_overflow(self):
assert (self._color_set[1].get_overflow() == self._color_set[0].get_overflow())
assert (self._color_set[2].get_overflow() == self._color_set[0].get_overflow())
assert (self._color_set[3].get_overflow() == self._color_set[0].get_overflow())
assert (self._color_set[4].get_overflow() == self._color_set[0].get_overflow())
return self._color_set[0].get_overflow()
def __str__(self):
return 'ColorSet({}, {}, {}, {}, {})'.format(*self._color_set)
def __repr__(self):
return 'ColorSet({}, {}, {}, {}, {})'.format(*self._color_set)
def __getitem__(self, idx):
return self._color_set[idx]
def __len__(self):
return 5
def backup(self):
color_set = (Color(self._color_set[0], tp='color', overflow=self.get_overflow()), Color(self._color_set[1], tp='color', overflow=self.get_overflow()), Color(self._color_set[2], tp='color', overflow=self.get_overflow()), Color(self._color_set[3], tp='color', overflow=self.get_overflow()), Color(self._color_set[4], tp='color', overflow=self.get_overflow()))
return color_set
def recover(self, color_set):
self._color_set = [Color(color_set[0], tp='color', overflow=self.get_overflow()), Color(color_set[1], tp='color', overflow=self.get_overflow()), Color(color_set[2], tp='color', overflow=self.get_overflow()), Color(color_set[3], tp='color', overflow=self.get_overflow()), Color(color_set[4], tp='color', overflow=self.get_overflow())]
def initialize(self):
for i in range(5):
h = (self._h_range[0] + ((self._h_range[1] - self._h_range[0]) * random.random()))
s = (self._s_range[0] + ((self._s_range[1] - self._s_range[0]) * random.random()))
v = (self._v_range[0] + ((self._v_range[1] - self._v_range[0]) * random.random()))
if self.dep_wtp:
h = Color.sys_ryb2rgb(h)
self._color_set[i].hsv = (h, s, v)
def create(self, harmony_rule):
if (harmony_rule == 'custom'):
return
methods = {'analogous': self._analogous_create, 'monochromatic': self._monochromatic_create, 'triad': self._triad_create, 'tetrad': self._tetrad_create, 'pentad': self._pentad_create, 'complementary': self._complementary_create, 'shades': self._shades_create}
if (harmony_rule in methods):
methods[harmony_rule]()
else:
raise ValueError("expect harmony rule in list 'analogous', 'monochromatic', etc.: {}.".format(harmony_rule))
def modify(self, harmony_rule, idx, color, do_sync=True):
if (do_sync and self.synchronization):
self._sync_modify(idx, color)
else:
methods = {'analogous': self._analogous_modify, 'monochromatic': self._monochromatic_modify, 'triad': self._multiple_modify, 'tetrad': self._tetrad_modify, 'pentad': self._multiple_modify, 'complementary': self._multiple_modify, 'shades': self._shades_modify, 'custom': self._custom_modify}
if (harmony_rule in methods):
methods[harmony_rule](idx, color)
else:
raise ValueError('unexpect harmony rule name for modify: {}.'.format(harmony_rule))
def _rotate(self, delta_h, delta_s, delta_v):
for idx in range(5):
if self.dep_wtp:
self._color_set[idx].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[idx].h) + delta_h))
else:
self._color_set[idx].h = (self._color_set[idx].h + delta_h)
self._color_set[idx].s = (self._color_set[idx].s + delta_s)
self._color_set[idx].v = (self._color_set[idx].v + delta_v)
def _analogous_create(self):
if self.dep_wtp:
angle = ((Color.sys_rgb2ryb(self._color_set[3].h) - Color.sys_rgb2ryb(self._color_set[1].h)) / 2)
else:
angle = ((self._color_set[3].h - self._color_set[1].h) / 2)
while ((angle > 30.0) or (angle < (- 30.0))):
angle = (angle / 1.5)
if self.dep_wtp:
self._color_set[1].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) - angle))
self._color_set[2].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) - (angle * 2)))
self._color_set[3].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) + angle))
self._color_set[4].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) + (angle * 2)))
else:
self._color_set[1].h = (self._color_set[0].h - angle)
self._color_set[2].h = (self._color_set[0].h - (angle * 2))
self._color_set[3].h = (self._color_set[0].h + angle)
self._color_set[4].h = (self._color_set[0].h + (angle * 2))
def _analogous_modify(self, idx, pr_color):
if (idx == 0):
if self.dep_wtp:
delta_h = (Color.sys_rgb2ryb(pr_color.h) - Color.sys_rgb2ryb(self._color_set[0].h))
else:
delta_h = (pr_color.h - self._color_set[0].h)
delta_s = (pr_color.s - self._color_set[0].s)
delta_v = (pr_color.v - self._color_set[0].v)
self._rotate(delta_h, delta_s, delta_v)
else:
if (idx in (1, 2)):
if self.dep_wtp:
angle = (Color.sys_rgb2ryb(self._color_set[0].h) - Color.sys_rgb2ryb(pr_color.h))
else:
angle = (self._color_set[0].h - pr_color.h)
elif self.dep_wtp:
angle = (Color.sys_rgb2ryb(pr_color.h) - Color.sys_rgb2ryb(self._color_set[0].h))
else:
angle = (pr_color.h - self._color_set[0].h)
if (idx in (2, 4)):
angle = ((angle - 360) if (angle > 180) else angle)
angle = ((angle + 360) if (angle < (- 180)) else angle)
angle /= 2
if self.dep_wtp:
self._color_set[1].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) - angle))
self._color_set[2].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) - (angle * 2)))
self._color_set[3].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) + angle))
self._color_set[4].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) + (angle * 2)))
else:
self._color_set[1].h = (self._color_set[0].h - angle)
self._color_set[2].h = (self._color_set[0].h - (angle * 2))
self._color_set[3].h = (self._color_set[0].h + angle)
self._color_set[4].h = (self._color_set[0].h + (angle * 2))
self._color_set[idx] = pr_color
def _monochromatic_create(self):
self._color_set[1].h = self._color_set[0].h
self._color_set[2].h = self._color_set[0].h
self._color_set[3].h = self._color_set[0].h
self._color_set[4].h = self._color_set[0].h
us_random = self._color_set[0].s
ls_random = self._color_set[0].s
if (us_random < 0.5):
us_random = ((us_random + 0.2) + (0.25 * random.random()))
ls_random = (ls_random + (0.15 * random.random()))
else:
us_random = ((us_random - 0.2) - (0.25 * random.random()))
ls_random = (ls_random - (0.15 * random.random()))
uv_random = self._color_set[0].v
lv_random = self._color_set[0].v
if (uv_random < 0.5):
uv_random = ((uv_random + 0.2) + (0.25 * random.random()))
lv_random = (lv_random + (0.15 * random.random()))
else:
uv_random = ((uv_random - 0.2) - (0.25 * random.random()))
lv_random = (lv_random - (0.15 * random.random()))
self._color_set[1].s = us_random
self._color_set[2].s = us_random
self._color_set[3].s = ls_random
self._color_set[4].s = ls_random
self._color_set[1].v = uv_random
self._color_set[2].v = lv_random
self._color_set[3].v = uv_random
self._color_set[4].v = lv_random
def _monochromatic_modify(self, idx, pr_color):
for i in range(5):
self._color_set[i].h = pr_color.h
self._color_set[idx] = pr_color
def _triad_create(self):
if self.dep_wtp:
self._color_set[1].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) - 120.0))
self._color_set[2].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) - 120.0))
self._color_set[3].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) + 120.0))
self._color_set[4].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) + 120.0))
else:
self._color_set[1].h = (self._color_set[0].h - 120.0)
self._color_set[2].h = (self._color_set[0].h - 120.0)
self._color_set[3].h = (self._color_set[0].h + 120.0)
self._color_set[4].h = (self._color_set[0].h + 120.0)
def _multiple_modify(self, idx, pr_color):
if self.dep_wtp:
delta_h = (Color.sys_rgb2ryb(pr_color.h) - Color.sys_rgb2ryb(self._color_set[idx].h))
else:
delta_h = (pr_color.h - self._color_set[idx].h)
if (idx == 0):
delta_s = (pr_color.s - self._color_set[idx].s)
delta_v = (pr_color.v - self._color_set[idx].v)
self._rotate(delta_h, delta_s, delta_v)
elif self.dep_wtp:
for i in range(5):
self._color_set[i].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[i].h) + delta_h))
else:
for i in range(5):
self._color_set[i].h = (self._color_set[i].h + delta_h)
self._color_set[idx] = pr_color
def _tetrad_create(self):
if self.dep_wtp:
self._color_set[1].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) - 90))
self._color_set[2].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) - 180))
self._color_set[3].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) + 90))
self._color_set[4].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) + 180))
else:
self._color_set[1].h = (self._color_set[0].h - 90)
self._color_set[2].h = (self._color_set[0].h - 180)
self._color_set[3].h = (self._color_set[0].h + 90)
self._color_set[4].h = (self._color_set[0].h + 180)
def _tetrad_modify(self, idx, pr_color):
if self.dep_wtp:
delta_h = (Color.sys_rgb2ryb(pr_color.h) - Color.sys_rgb2ryb(self._color_set[idx].h))
else:
delta_h = (pr_color.h - self._color_set[idx].h)
if (idx == 0):
delta_s = (pr_color.s - self._color_set[idx].s)
delta_v = (pr_color.v - self._color_set[idx].v)
self._rotate(delta_h, delta_s, delta_v)
elif (idx in (2, 4)):
self._color_set[0].h += delta_h
self._color_set[2].h += delta_h
self._color_set[4].h += delta_h
else:
self._color_set[1].h += delta_h
self._color_set[3].h += delta_h
self._color_set[idx] = pr_color
def _pentad_create(self):
if self.dep_wtp:
self._color_set[1].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) - 72))
self._color_set[2].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) - 144))
self._color_set[3].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) + 72))
self._color_set[4].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) + 144))
else:
self._color_set[1].h = (self._color_set[0].h - 72)
self._color_set[2].h = (self._color_set[0].h - 144)
self._color_set[3].h = (self._color_set[0].h + 72)
self._color_set[4].h = (self._color_set[0].h + 144)
def _complementary_create(self):
self._color_set[1].h = self._color_set[0].h
self._color_set[3].h = self._color_set[0].h
if self.dep_wtp:
self._color_set[2].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) + 180.0))
self._color_set[4].h = Color.sys_ryb2rgb((Color.sys_rgb2ryb(self._color_set[0].h) + 180.0))
else:
self._color_set[2].h = (self._color_set[0].h + 180.0)
self._color_set[4].h = (self._color_set[0].h + 180.0)
us_random = self._color_set[0].s
ls_random = self._color_set[0].s
if (us_random < 0.5):
us_random = ((us_random + 0.2) + (0.25 * random.random()))
ls_random = (ls_random + (0.15 * random.random()))
else:
us_random = ((us_random - 0.2) - (0.25 * random.random()))
ls_random = (ls_random - (0.15 * random.random()))
uv_random = self._color_set[0].v
lv_random = self._color_set[0].v
if (uv_random < 0.5):
uv_random = ((uv_random + 0.2) + (0.25 * random.random()))
lv_random = (lv_random + (0.15 * random.random()))
else:
uv_random = ((uv_random - 0.2) - (0.25 * random.random()))
lv_random = (lv_random - (0.15 * random.random()))
self._color_set[1].s = us_random
self._color_set[2].s = us_random
self._color_set[3].s = ls_random
self._color_set[4].s = ls_random
self._color_set[1].v = uv_random
self._color_set[2].v = lv_random
self._color_set[3].v = uv_random
self._color_set[4].v = lv_random
def _shades_create(self):
self._color_set[1].hsv = self._color_set[0].hsv
self._color_set[2].hsv = self._color_set[0].hsv
self._color_set[3].hsv = self._color_set[0].hsv
self._color_set[4].hsv = self._color_set[0].hsv
self._color_set[1].v = 0.15
self._color_set[2].v = 0.4
self._color_set[3].v = 0.65
self._color_set[4].v = 0.9
def _shades_modify(self, idx, pr_color):
for i in range(5):
self._color_set[i].h = pr_color.h
self._color_set[i].s = pr_color.s
self._color_set[idx] = pr_color
def _custom_modify(self, idx, pr_color):
if (idx in range(5)):
self._color_set[idx].hsv = pr_color.hsv
else:
raise ValueError('expect idx in range 0 ~ 4: {}.'.format(idx))
def _sync_modify(self, idx, pr_color):
if self.dep_wtp:
delta_h = (Color.sys_rgb2ryb(pr_color.h) - Color.sys_rgb2ryb(self._color_set[idx].h))
else:
delta_h = (pr_color.h - self._color_set[idx].h)
if (self.synchronization == 1):
delta_h = 0
delta_s = (pr_color.s - self._color_set[idx].s)
delta_v = (pr_color.v - self._color_set[idx].v)
elif (self.synchronization == 2):
delta_s = delta_v = 0
elif (self.synchronization == 3):
delta_s = (pr_color.s - self._color_set[idx].s)
delta_v = (pr_color.v - self._color_set[idx].v)
elif (self.synchronization == 4):
for i in range(5):
self._color_set[i].s = pr_color.s
self._color_set[i].v = pr_color.v
delta_s = 0
delta_v = 0
elif (self.synchronization == 5):
if (idx == 1):
self._color_set[1].s = pr_color.s
self._color_set[2].s = ((self._color_set[1].s * 2) - self._color_set[0].s)
self._color_set[4].s = ((self._color_set[3].s * 2) - self._color_set[0].s)
self._color_set[1].v = pr_color.v
self._color_set[2].v = ((self._color_set[1].v * 2) - self._color_set[0].v)
self._color_set[4].v = ((self._color_set[3].v * 2) - self._color_set[0].v)
elif (idx == 3):
self._color_set[3].s = pr_color.s
self._color_set[2].s = ((self._color_set[1].s * 2) - self._color_set[0].s)
self._color_set[4].s = ((self._color_set[3].s * 2) - self._color_set[0].s)
self._color_set[3].v = pr_color.v
self._color_set[2].v = ((self._color_set[1].v * 2) - self._color_set[0].v)
self._color_set[4].v = ((self._color_set[3].v * 2) - self._color_set[0].v)
elif (idx == 2):
self._color_set[2].s = pr_color.s
self._color_set[1].s = ((self._color_set[2].s + self._color_set[0].s) / 2)
self._color_set[3].s = ((self._color_set[4].s + self._color_set[0].s) / 2)
self._color_set[2].v = pr_color.v
self._color_set[1].v = ((self._color_set[2].v + self._color_set[0].v) / 2)
self._color_set[3].v = ((self._color_set[4].v + self._color_set[0].v) / 2)
elif (idx == 4):
self._color_set[4].s = pr_color.s
self._color_set[1].s = ((self._color_set[2].s + self._color_set[0].s) / 2)
self._color_set[3].s = ((self._color_set[4].s + self._color_set[0].s) / 2)
self._color_set[4].v = pr_color.v
self._color_set[1].v = ((self._color_set[2].v + self._color_set[0].v) / 2)
self._color_set[3].v = ((self._color_set[4].v + self._color_set[0].v) / 2)
else:
self._color_set[0].s = pr_color.s
self._color_set[1].s = ((self._color_set[2].s + self._color_set[0].s) / 2)
self._color_set[3].s = ((self._color_set[4].s + self._color_set[0].s) / 2)
self._color_set[0].v = pr_color.v
self._color_set[1].v = ((self._color_set[2].v + self._color_set[0].v) / 2)
self._color_set[3].v = ((self._color_set[4].v + self._color_set[0].v) / 2)
delta_s = 0
delta_v = 0
elif (self.synchronization == 6):
self._color_set[idx].s = pr_color.s
self._color_set[idx].v = pr_color.v
if (idx in (1, 2)):
self._color_set[3].s = self._color_set[1].s
self._color_set[4].s = self._color_set[2].s
self._color_set[3].v = self._color_set[1].v
self._color_set[4].v = self._color_set[2].v
elif (idx in (3, 4)):
self._color_set[1].s = self._color_set[3].s
self._color_set[2].s = self._color_set[4].s
self._color_set[1].v = self._color_set[3].v
self._color_set[2].v = self._color_set[4].v
else:
self._color_set[1].s = ((self._color_set[1].s + self._color_set[3].s) / 2)
self._color_set[2].s = ((self._color_set[2].s + self._color_set[4].s) / 2)
self._color_set[3].s = self._color_set[1].s
self._color_set[4].s = self._color_set[2].s
self._color_set[1].v = ((self._color_set[1].v + self._color_set[3].v) / 2)
self._color_set[2].v = ((self._color_set[2].v + self._color_set[4].v) / 2)
self._color_set[3].v = self._color_set[1].v
self._color_set[4].v = self._color_set[2].v
delta_s = 0
delta_v = 0
else:
raise ValueError('unexpect synchronization code for special rule: {}.'.format(self.synchronization))
self._rotate(delta_h, delta_s, delta_v) |
class OptionPlotoptionsAreasplinerangeStatesSelectMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
def make_local_storage(path, env=None):
path = str(path)
try:
if (not os.path.exists(path)):
action = 'create'
os.makedirs(path, exist_ok=True)
else:
action = 'write to'
with tempfile.NamedTemporaryFile(dir=path):
pass
except PermissionError as error:
message = [str(error), f"| Pooch could not {action} data cache folder '{path}'.", 'Will not be able to download data files.']
if (env is not None):
message.append(f"Use environment variable '{env}' to specify a different location.")
raise PermissionError(' '.join(message)) from error |
def print_type(gtype, nonnull=True, except_types=()):
if isinstance(gtype, except_types):
raise ValidationError(f'{gtype} is not a valid type')
literal = None
if is_union(gtype):
if is_optional(gtype):
return f'{print_type(gtype.__args__[0], nonnull=False, except_types=except_types)}'
else:
raise ValidationError(f'Native Union type is not supported except Optional')
elif is_list(gtype):
literal = f'[{print_type(gtype.__args__[0], except_types=except_types)}]'
elif isinstance(gtype, types.ObjectType):
literal = f'{gtype.__name__}'
elif (gtype in VALID_BASIC_TYPES):
literal = VALID_BASIC_TYPES[gtype]
elif ((gtype is None) or (gtype == type(None))):
return 'null'
elif isinstance(gtype, types.UnionType):
literal = f'{gtype.__name__}'
elif isinstance(gtype, types.EnumType):
literal = f'{gtype.__name__}'
elif isinstance(gtype, types.InputType):
literal = f'{gtype.__name__}'
elif isinstance(gtype, types.InterfaceType):
literal = f'{gtype.__name__}'
else:
raise ValidationError(f'Can not convert type {gtype} to GraphQL type')
if nonnull:
literal += '!'
return literal |
.integrationtest
.skipif((pymongo.version_tuple < (3, 0)), reason='New in 3.0')
def test_collection_insert_one(instrument, elasticapm_client, mongo_database):
blogpost = {'author': 'Tom', 'text': 'Foo', 'date': datetime.datetime.utcnow()}
elasticapm_client.begin_transaction('transaction.test')
r = mongo_database.blogposts.insert_one(blogpost)
assert (r.inserted_id is not None)
elasticapm_client.end_transaction('transaction.test')
transactions = elasticapm_client.events[TRANSACTION]
span = _get_pymongo_span(elasticapm_client.spans_for_transaction(transactions[0]))
assert (span['type'] == 'db')
assert (span['subtype'] == 'mongodb')
assert (span['action'] == 'query')
assert (span['name'] == 'elasticapm_test.blogposts.insert_one') |
class TestStackMin(unittest.TestCase):
def test_stack_min(self):
print('Test: Push on empty stack, non-empty stack')
stack = StackMin()
stack.push(5)
self.assertEqual(stack.peek(), 5)
self.assertEqual(stack.minimum(), 5)
stack.push(1)
self.assertEqual(stack.peek(), 1)
self.assertEqual(stack.minimum(), 1)
stack.push(3)
self.assertEqual(stack.peek(), 3)
self.assertEqual(stack.minimum(), 1)
stack.push(0)
self.assertEqual(stack.peek(), 0)
self.assertEqual(stack.minimum(), 0)
print('Test: Pop on non-empty stack')
self.assertEqual(stack.pop(), 0)
self.assertEqual(stack.minimum(), 1)
self.assertEqual(stack.pop(), 3)
self.assertEqual(stack.minimum(), 1)
self.assertEqual(stack.pop(), 1)
self.assertEqual(stack.minimum(), 5)
self.assertEqual(stack.pop(), 5)
self.assertEqual(stack.minimum(), sys.maxsize)
print('Test: Pop empty stack')
self.assertEqual(stack.pop(), None)
print('Success: test_stack_min') |
def aggregate(conf, fedavg_models, client_models, criterion, metrics, flatten_local_models, fa_val_perf, distillation_sampler, distillation_data_loader, val_data_loader, test_data_loader):
fl_aggregate = conf.fl_aggregate
(_, local_models) = agg_utils.recover_models(conf, client_models, flatten_local_models, use_cuda=conf.graph.on_cuda)
if (('eval_local' in fl_aggregate) and fl_aggregate['eval_local']):
perfs = []
for (idx, local_model) in enumerate(local_models.values()):
conf.logger.log(f'Evaluate the local model-{idx}.')
perf = master_utils.validate(conf, coordinator=None, model=local_model, criterion=criterion, metrics=metrics, data_loader=test_data_loader, label=None, display=False)
perfs.append(perf['top1'])
conf.logger.log(f'The averaged test performance of the local models: {(sum(perfs) / len(perfs))}; the details of the local performance: {perfs}.')
if (('eval_ensemble' in fl_aggregate) and fl_aggregate['eval_ensemble']):
master_utils.ensembled_validate(conf, coordinator=None, models=list(local_models.values()), criterion=criterion, metrics=metrics, data_loader=test_data_loader, label='ensemble_test_loader', ensemble_scheme=(None if ('update_student_scheme' not in fl_aggregate) else fl_aggregate['update_student_scheme']))
_client_models = {}
for (arch, fedavg_model) in fedavg_models.items():
conf.logger.log(f'Master: we have {len(local_models)} local models for noise distillation (use {arch} for the distillation).')
assert (len(fedavg_models) == 1), 'right now, we only support homo-arch case.'
sampled_models = sample_from_swag(conf, fedavg_model, local_models, loader=val_data_loader)
kt = SWAKTSolver(conf=conf, teacher_models=list(sampled_models.values()), student_model=fedavg_model, criterion=criterion, metrics=metrics, batch_size=(128 if ('batch_size' not in fl_aggregate) else int(fl_aggregate['batch_size'])), total_n_server_pseudo_batches=(250 if ('total_n_server_pseudo_batches' not in fl_aggregate) else int(fl_aggregate['total_n_server_pseudo_batches'])), val_data_loader=val_data_loader, distillation_sampler=distillation_sampler, distillation_data_loader=get_unlabeled_data(fl_aggregate, distillation_data_loader), student_learning_rate=(0.001 if ('student_learning_rate' not in fl_aggregate) else fl_aggregate['student_learning_rate']), AT_beta=(0 if ('AT_beta' not in fl_aggregate) else fl_aggregate['AT_beta']), KL_temperature=(1 if ('temperature' not in fl_aggregate) else fl_aggregate['temperature']), log_fn=conf.logger.log, eval_batches_freq=(100 if ('eval_batches_freq' not in fl_aggregate) else int(fl_aggregate['eval_batches_freq'])), update_student_scheme='avg_logits', server_teaching_scheme=(None if ('server_teaching_scheme' not in fl_aggregate) else fl_aggregate['server_teaching_scheme']), optimizer=('sgd' if ('optimizer' not in fl_aggregate) else fl_aggregate['optimizer']))
getattr(kt, ('distillation' if ('noise_kt_scheme' not in fl_aggregate) else fl_aggregate['noise_kt_scheme']))()
_client_models[arch] = kt.server_student.cpu()
del local_models, sampled_models, kt
torch.cuda.empty_cache()
return _client_models |
class Heapdump(TelemetryDevice):
internal = False
command = 'heapdump'
human_name = 'Heap Dump'
help = 'Captures a heap dump.'
def __init__(self, log_root):
super().__init__()
self.log_root = log_root
def detach_from_node(self, node, running):
if running:
io.ensure_dir(self.log_root)
heap_dump_file = os.path.join(self.log_root, f'heap_at_exit_{node.pid}.hprof')
console.info(f'{self.human_name}: Writing heap dump to [{heap_dump_file}]', logger=self.logger)
cmd = f'jmap -dump:format=b,file={heap_dump_file} {node.pid}'
if process.run_subprocess_with_logging(cmd):
self.logger.warning('Could not write heap dump to [%s]', heap_dump_file) |
def test_hover_parameter_double_sf():
string = write_rpc_request(1, 'initialize', {'rootPath': str(test_dir)})
file_path = ((test_dir / 'hover') / 'parameters.f90')
string += hover_req(file_path, 7, 55)
(errcode, results) = run_request(string, fortls_args=['--sort_keywords'])
assert (errcode == 0)
ref_results = ['```fortran90\nDOUBLE PRECISION, PARAMETER :: some = 1e-19\n```']
validate_hover(results, ref_results) |
class ClassBSpartlines(GrpCls.ClassHtml):
def css(self) -> AttrClsChart.AttrSkarkline:
if (self._css_struct is None):
self._css_struct = AttrClsChart.AttrSkarkline(self.component)
return self._css_struct
def css_class(self) -> Classes.CatalogDiv.CatalogDiv:
if (self._css_class is None):
self._css_class = Classes.CatalogDiv.CatalogDiv(self.component.page, self.classList['main'], component=self.component).margin_vertical()
return self._css_class |
class Use(models.Model):
PENDING = 'pending'
APPROVED = 'approved'
CONFIRMED = 'confirmed'
HOUSE_DECLINED = 'house declined'
USER_DECLINED = 'user declined'
CANCELED = 'canceled'
USE_STATUSES = ((PENDING, 'Pending'), (APPROVED, 'Approved'), (CONFIRMED, 'Confirmed'), (HOUSE_DECLINED, 'House Declined'), (USER_DECLINED, 'User Declined'), (CANCELED, 'Canceled'))
FIAT = 'fiat'
FIATDRFT = 'fiatdrft'
DRFT = 'drft'
BACKING = 'backing'
USE_ACCOUNTING = ((FIAT, 'Fiat'), (FIATDRFT, 'Fiat & DRFT'), (DRFT, 'DRFT'), (BACKING, 'Backing'))
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
location = models.ForeignKey(Location, related_name='uses', null=True)
status = models.CharField(max_length=200, choices=USE_STATUSES, default=PENDING, blank=True)
user = models.ForeignKey(User, related_name='uses')
arrive = models.DateField(verbose_name='Arrival Date')
depart = models.DateField(verbose_name='Departure Date')
arrival_time = models.CharField(help_text='Optional, if known', max_length=200, blank=True, null=True)
resource = models.ForeignKey(Resource, null=True)
purpose = models.TextField(verbose_name='Tell us a bit about the reason for your trip/stay')
last_msg = models.DateTimeField(blank=True, null=True)
accounted_by = models.CharField(max_length=200, choices=USE_ACCOUNTING, default=FIAT, blank=True)
objects = UseManager()
def __str__(self):
return ('%d' % self.id)
def total_nights(self):
return (self.depart - self.arrive).days
total_nights.short_description = 'Nights'
def drft_value(self):
return self.total_nights()
def nights_between(self, start, end):
nights = 0
if ((self.arrive >= start) and (self.depart <= end)):
nights = (self.depart - self.arrive).days
elif ((self.arrive <= start) and (self.depart >= end)):
nights = (end - start).days
elif (self.arrive < start):
nights = (self.depart - start).days
elif (self.depart > end):
nights = (end - self.arrive).days
return nights
def suggest_drft(self):
try:
return (self.resource.drftable_between(self.arrive, self.depart) and (self.user.profile.drft_spending_balance() >= self.total_nights()))
except:
return False |
class OptionPlotoptionsSeriesMarkerStatesSelect(Options):
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get('#cccccc')
def fillColor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#000000')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(2)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(None)
def radius(self, num: float):
self._config(num, js_type=False) |
def in_range(accessing_obj, accessed_obj, *args, **kwargs):
range = (args[0] if args else 'MELEE')
if hasattr(accessing_obj, 'nattributes'):
if (not (combat := accessing_obj.ndb.combat)):
return False
return combat.any_in_range(accessing_obj, range)
else:
return false |
class UCS(Space):
BASE = 'xyz-d65'
NAME = 'ucs'
SERIALIZE = ('--ucs',)
CHANNELS = (Channel('u', 0.0, 1.0), Channel('v', 0.0, 1.0), Channel('w', 0.0, 1.0))
WHITE = WHITES['2deg']['D65']
def to_base(self, coords: Vector) -> Vector:
return ucs_to_xyz(coords)
def from_base(self, coords: Vector) -> Vector:
return xyz_to_ucs(coords) |
class FalServerlessHost(Host):
_SUPPORTED_KEYS = frozenset({'machine_type', 'keep_alive', 'max_concurrency', 'max_multiplexing', 'setup_function', 'metadata', '_base_image', '_scheduler', '_scheduler_options'})
url: str = FAL_SERVERLESS_DEFAULT_URL
credentials: Credentials = field(default_factory=get_default_credentials)
_lock: threading.Lock = field(default_factory=threading.Lock)
_log_printer = IsolateLogPrinter(debug=flags.DEBUG)
def __setstate__(self, state: dict[(str, Any)]) -> None:
self.__dict__.update(state)
self.credentials = _get_agent_credentials(self.credentials)
def _connection(self) -> FalServerlessConnection:
with self._lock:
client = FalServerlessClient(self.url, self.credentials)
return client.connect()
_handle_grpc_error()
def register(self, func: Callable[(ArgsT, ReturnT)], options: Options, application_name: (str | None)=None, application_auth_mode: (Literal[('public', 'shared', 'private')] | None)=None, metadata: (dict[(str, Any)] | None)=None) -> (str | None):
environment_options = options.environment.copy()
environment_options.setdefault('python_version', active_python())
environments = [self._connection.define_environment(**environment_options)]
machine_type = options.host.get('machine_type', FAL_SERVERLESS_DEFAULT_MACHINE_TYPE)
keep_alive = options.host.get('keep_alive', FAL_SERVERLESS_DEFAULT_KEEP_ALIVE)
max_concurrency = options.host.get('max_concurrency')
max_multiplexing = options.host.get('max_multiplexing')
base_image = options.host.get('_base_image', None)
scheduler = options.host.get('_scheduler', None)
scheduler_options = options.host.get('_scheduler_options', None)
exposed_port = options.get_exposed_port()
machine_requirements = MachineRequirements(machine_type=machine_type, keep_alive=keep_alive, base_image=base_image, exposed_port=exposed_port, scheduler=scheduler, scheduler_options=scheduler_options, max_multiplexing=max_multiplexing, max_concurrency=max_concurrency)
partial_func = _prepare_partial_func(func)
if (metadata is None):
metadata = {}
if isinstance(func, ServeWrapper):
try:
metadata['openapi'] = func.openapi()
except Exception as e:
print(f'[warning] Failed to generate OpenAPI metadata for function: {e}')
for partial_result in self._connection.register(partial_func, environments, application_name=application_name, application_auth_mode=application_auth_mode, machine_requirements=machine_requirements, metadata=metadata):
for log in partial_result.logs:
self._log_printer.print(log)
if partial_result.result:
return partial_result.result.application_id
_handle_grpc_error()
def run(self, func: Callable[(..., ReturnT)], options: Options, args: tuple[(Any, ...)], kwargs: dict[(str, Any)]) -> ReturnT:
environment_options = options.environment.copy()
environment_options.setdefault('python_version', active_python())
environments = [self._connection.define_environment(**environment_options)]
machine_type = options.host.get('machine_type', FAL_SERVERLESS_DEFAULT_MACHINE_TYPE)
keep_alive = options.host.get('keep_alive', FAL_SERVERLESS_DEFAULT_KEEP_ALIVE)
max_concurrency = options.host.get('max_concurrency')
max_multiplexing = options.host.get('max_multiplexing')
base_image = options.host.get('_base_image', None)
scheduler = options.host.get('_scheduler', None)
scheduler_options = options.host.get('_scheduler_options', None)
exposed_port = options.get_exposed_port()
setup_function = options.host.get('setup_function', None)
machine_requirements = MachineRequirements(machine_type=machine_type, keep_alive=keep_alive, base_image=base_image, exposed_port=exposed_port, scheduler=scheduler, scheduler_options=scheduler_options, max_multiplexing=max_multiplexing, max_concurrency=max_concurrency)
return_value = _UNSET
partial_func = _prepare_partial_func(func, *args, **kwargs)
for partial_result in self._connection.run(partial_func, environments, machine_requirements=machine_requirements, setup_function=setup_function):
for log in partial_result.logs:
self._log_printer.print(log)
if (partial_result.status.state is not HostedRunState.IN_PROGRESS):
state = partial_result.status.state
if (state is HostedRunState.INTERNAL_FAILURE):
raise InternalFalServerlessError('An internal failure occurred while performing this run.')
elif (state is HostedRunState.SUCCESS):
return_value = partial_result.result
else:
raise NotImplementedError('Unknown state: ', state)
if (return_value is _UNSET):
raise InternalFalServerlessError('The input function did not return any value.')
return cast(ReturnT, return_value) |
def main():
parser = argparse.ArgumentParser()
add_debug(parser)
add_app(parser)
add_properties(parser)
add_provider(parser)
parser.add_argument('--email', help='Email address to associate with application', default='PS-')
parser.add_argument('--project', help='Git project to associate with application', default='None')
parser.add_argument('--repo', help='Git repo to associate with application', default='None')
parser.add_argument('--git', help='Git URI', default=None)
args = parser.parse_args()
logging.basicConfig(format=LOGGING_FORMAT)
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
if (args.git and (args.git != 'None')):
parsed = foremastutils.Parser(args.git).parse_url()
generated = foremastutils.Generator(*parsed, formats=APP_FORMATS)
project = generated.project
repo = generated.repo
else:
project = args.project
repo = args.repo
app_properties = get_properties(args.properties, 'pipeline')
spinnakerapp = SpinnakerApp(provider=args.provider, app=args.app, email=args.email, project=project, repo=repo, pipeline_config=app_properties)
spinnakerapp.create() |
class TestPaymentACK(unittest.TestCase):
def test_dict_optional_fields_unused(self):
payment = paymentrequest.Payment('merchant_data', 'transaction_hex')
payment_ack = paymentrequest.PaymentACK(payment)
data = payment_ack.to_dict()
self.assertTrue(('payment' in data))
self.assertFalse(('memo' in data))
def test_dict_optional_fields_used(self):
payment = paymentrequest.Payment('merchant_data', 'transaction_hex', 'memo')
payment_ack = paymentrequest.PaymentACK(payment, 'memo')
data = payment_ack.to_dict()
self.assertTrue(('payment' in data))
self.assertTrue(('memo' in data))
def test_json_restoration_all(self):
payment = paymentrequest.Payment('merchant_data', 'transaction_hex', 'memo')
original = paymentrequest.PaymentACK(payment)
json_value = original.to_json()
restored = paymentrequest.PaymentACK.from_json(json_value)
self.assertEqual(original.payment.merchant_data, restored.payment.merchant_data)
self.assertEqual(original.payment.transaction_hex, restored.payment.transaction_hex)
self.assertEqual(original.payment.memo, restored.payment.memo)
self.assertEqual(original.memo, restored.memo)
def test_json_restoration_required(self):
payment = paymentrequest.Payment({}, 'transaction_hex')
original = paymentrequest.PaymentACK(payment)
json_value = original.to_json()
restored = paymentrequest.PaymentACK.from_json(json_value)
self.assertEqual(original.memo, restored.memo) |
def cut_silences(audio_file, output_file, silence_time=400, threshold=(- 65)) -> Path:
audio = AudioSegment.from_file(audio_file)
silences = silence.split_on_silence(audio, min_silence_len=silence_time, silence_thresh=threshold)
combined = AudioSegment.empty()
for chunk in silences:
combined += chunk
combined.export(output_file, format='mp3')
return Path(output_file) |
def serialize_classification_report(cr):
tmp = list()
for row in cr.split('\n'):
parsed_row = [x.strip() for x in row.split(' ') if (len(x.strip()) > 0)]
if (len(parsed_row) > 0):
tmp.append(parsed_row)
measures = tmp[0]
out = defaultdict(dict)
for row in tmp[1:]:
columns = len(row)
class_label = row[0].strip()
num_measures = len(measures)
while (columns < (num_measures + 1)):
row.insert(1, None)
columns += 1
for (j, m) in enumerate(measures):
v = row[(j + 1)]
value = (float(v.strip()) if (v is not None) else None)
metric = m.strip()
out[class_label][metric] = value
return out |
def flatten_site_pins(tile, site, site_pins, site_pin_node_to_wires):
def inner():
for site_pin in site_pins:
wires = tuple(site_pin_node_to_wires(tile, site_pin['node']))
if (len(wires) == 0):
(yield (check_and_strip_prefix(site_pin['site_pin'], (site + '/')), None))
continue
assert (len(wires) == 1), repr(wires)
pin_info = {'wire': wires[0], 'speed_model_index': site_pin['speed_model_index']}
(yield (check_and_strip_prefix(site_pin['site_pin'], (site + '/')), pin_info))
return dict(inner()) |
def main(page: ft.Page):
page.add(ft.DataTable(width=700, bgcolor='yellow', border=ft.border.all(2, 'red'), border_radius=10, vertical_lines=ft.border.BorderSide(3, 'blue'), horizontal_lines=ft.border.BorderSide(1, 'green'), sort_column_index=0, sort_ascending=True, heading_row_color=ft.colors.BLACK12, heading_row_height=100, data_row_color={'hovered': '0x30FF0000'}, show_checkbox_column=True, divider_thickness=0, column_spacing=200, columns=[ft.DataColumn(ft.Text('Column 1'), on_sort=(lambda e: print(f'{e.column_index}, {e.ascending}'))), ft.DataColumn(ft.Text('Column 2'), tooltip='This is a second column', numeric=True, on_sort=(lambda e: print(f'{e.column_index}, {e.ascending}')))], rows=[ft.DataRow([ft.DataCell(ft.Text('A')), ft.DataCell(ft.Text('1'))], selected=True, on_select_changed=(lambda e: print(f'row select changed: {e.data}'))), ft.DataRow([ft.DataCell(ft.Text('B')), ft.DataCell(ft.Text('2'))])])) |
def test_client_register_task_definition_without_optional_values(client):
containers = [{u'name': u'foo'}]
volumes = [{u'foo': u'bar'}]
role_arn = 'arn:test:role'
execution_role_arn = 'arn:test:role'
runtime_platform = {u'cpuArchitecture': u'X86_64', u'operatingSystemFamily': u'LINUX'}
task_definition = EcsTaskDefinition(containerDefinitions=containers, volumes=volumes, family=u'family', revision=1, taskRoleArn=role_arn, executionRoleArn=execution_role_arn, tags={'Name': 'test_client_register_task_definition'}, status='active', taskDefinitionArn='arn:task', requiresAttributes={})
client.register_task_definition(family=task_definition.family, containers=task_definition.containers, volumes=task_definition.volumes, role_arn=task_definition.role_arn, execution_role_arn=execution_role_arn, tags=task_definition.tags, additional_properties=task_definition.additional_properties, runtime_platform=None, cpu=None, memory=None)
client.boto.register_task_definition.assert_called_once_with(family=u'family', containerDefinitions=containers, volumes=volumes, taskRoleArn=role_arn, executionRoleArn=execution_role_arn, tags=task_definition.tags) |
def test_envvar_expansion():
os.environ['TEST_SERVICE'] = 'foo'
aconf = Config()
fetcher = ResourceFetcher(logger, aconf)
fetcher.parse_yaml(yaml)
aconf.load_all(fetcher.sorted())
mappings = aconf.config['mappings']
test_mapping = mappings['test_mapping']
assert (test_mapping.service == 'foo:9999') |
def _shape(a: (ArrayLike | float), s: Shape) -> Shape:
if (not isinstance(a, Sequence)):
return s
size = len(a)
if (not size):
return (size,)
first = _shape(a[0], s)
for r in range(1, size):
if (_shape(a[r], s) != first):
raise ValueError('Ragged lists are not supported')
return ((size,) + first) |
def train(model, train_loader, val_loader, optimizer, init_lr=0.002, checkpoint_dir=None, checkpoint_interval=None, nepochs=None, clip_thresh=1.0):
model.train()
if use_cuda:
model = model.cuda()
linear_dim = model.linear_dim
criterion = nn.CrossEntropyLoss()
validate(val_loader)
global global_step, global_epoch
while (global_epoch < nepochs):
with autograd.detect_anomaly():
h = open(logfile_name, 'a')
running_loss = 0.0
for (step, (x, pos, neg)) in tqdm(enumerate(train_loader)):
current_lr = learning_rate_decay(init_lr, global_step)
for param_group in optimizer.param_groups:
param_group['lr'] = current_lr
optimizer.zero_grad()
(x, pos, neg) = (Variable(x), Variable(pos), Variable(neg))
if use_cuda:
(x, pos, neg) = (x.cuda(), pos.cuda(), neg.cuda())
positive_labels = (pos.new(pos.shape[0]).zero_() + 1)
negative_labels = pos.new(neg.shape[0]).zero_()
if use_multigpu:
(outputs, r_, o_) = data_parallel_workaround(model, (x, mel))
(mel_outputs, linear_outputs, attn) = (outputs[0], outputs[1], outputs[2])
else:
inps = [pos, neg]
labels = [positive_labels, negative_labels]
choice = random.choice([0, 1])
choice_inputs = inps[choice]
choice_labels = labels[choice]
(logits, x_reconstructed) = model(choice_inputs.long(), x)
loss_search = criterion(logits.contiguous().view((- 1), 2), choice_labels.long())
loss_reconstruction = criterion(x_reconstructed.contiguous().view((- 1), (1 + len(ph_ids))), x.long().contiguous().view((- 1)))
loss = (loss_search + loss_reconstruction)
if ((global_step > 0) and ((global_step % checkpoint_interval) == 0)):
save_checkpoint(model, optimizer, global_step, checkpoint_dir, global_epoch)
loss.backward(retain_graph=False)
grad_norm = torch.nn.utils.clip_grad_norm_(model.parameters(), clip_thresh)
optimizer.step()
log_value('loss', float(loss.item()), global_step)
log_value('search loss', float(loss_search.item()), global_step)
log_value('reconstruction loss', float(loss_reconstruction.item()), global_step)
log_value('gradient norm', grad_norm, global_step)
log_value('learning rate', current_lr, global_step)
global_step += 1
running_loss += loss.item()
averaged_loss = (running_loss / len(train_loader))
log_value('loss (per epoch)', averaged_loss, global_epoch)
h.write((((('Loss after epoch ' + str(global_epoch)) + ': ') + format((running_loss / len(train_loader)))) + '\n'))
h.close()
global_epoch += 1
validate(val_loader) |
class TestGetActiveTasks(unittest.TestCase):
def test_get_active_tasks(self):
task_active = Mock(spec=asyncio.Task)
task_active.done = (lambda : False)
task_active.cancelled = (lambda : False)
task_done = Mock(spec=asyncio.Task)
task_done.done = (lambda : True)
task_done.cancelled = (lambda : False)
task_cancelled = Mock(spec=asyncio.Task)
task_cancelled.done = (lambda : False)
task_cancelled.cancelled = (lambda : True)
def get_all_tasks():
return [task_active, task_done, task_cancelled]
with patch('testslide.get_all_tasks', new=get_all_tasks):
result = get_active_tasks()
self.assertEqual(result, [task_active]) |
def check_telemetry():
settings = sublime.load_settings('Emmet.sublime-settings')
updated = False
if (not settings.get('uid')):
uid = str(uuid.uuid4())
settings.set('uid', uid)
send_tracking_action('Init', 'install')
updated = True
if (settings.get('telemetry', None) is None):
allow_telemetry = ask_for_telemetry()
send_tracking_action('Init', 'Enable Telemetry', str(allow_telemetry))
settings.set('telemetry', bool(allow_telemetry))
updated = True
if updated:
sublime.save_settings('Emmet.sublime-settings') |
class TestListFieldLengthLimit(FieldValues):
valid_inputs = ()
invalid_inputs = [((0, 1), ['Ensure this field has at least 3 elements.']), ((0, 1, 2, 3, 4, 5), ['Ensure this field has no more than 4 elements.'])]
outputs = ()
field = serializers.ListField(child=serializers.IntegerField(), min_length=3, max_length=4) |
class PlotLimits():
value_minimum = limit_property('value_minimum', (float, int))
value_maximum = limit_property('value_maximum', (float, int))
value_limits = limits_property('value_minimum', 'value_maximum')
index_minimum = limit_property('index_minimum', int, minimum=0)
index_maximum = limit_property('index_maximum', int, minimum=0)
index_limits = limits_property('index_minimum', 'index_maximum')
count_minimum = limit_property('count_minimum', int, minimum=0)
count_maximum = limit_property('count_maximum', int, minimum=0)
count_limits = limits_property('count_minimum', 'count_maximum')
density_minimum = limit_property('density_minimum', (float, int), minimum=0.0)
density_maximum = limit_property('density_maximum', (float, int), minimum=0.0)
density_limits = limits_property('density_minimum', 'density_maximum')
depth_minimum = limit_property('depth_minimum', (float, int), minimum=0.0)
depth_maximum = limit_property('depth_maximum', (float, int), minimum=0.0)
depth_limits = limits_property('depth_minimum', 'depth_maximum')
date_minimum = limit_property('date_minimum', (datetime.date, datetime.datetime))
date_maximum = limit_property('date_maximum', (datetime.date, datetime.datetime))
date_limits = limits_property('date_minimum', 'date_maximum')
def __eq__(self, other: object) -> bool:
if (not isinstance(other, PlotLimits)):
return False
equality = (self.value_limits == other.value_limits)
equality = (equality and (self.index_limits == other.index_limits))
equality = (equality and (self.count_limits == other.count_limits))
equality = (equality and (self.depth_limits == other.depth_limits))
equality = (equality and (self.date_limits == other.date_limits))
equality = (equality and (self.density_limits == other.density_limits))
return equality
def copyLimitsFrom(self, other: 'PlotLimits'):
self.value_limits = other.value_limits
self.density_limits = other.density_limits
self.depth_limits = other.depth_limits
self.index_limits = other.index_limits
self.date_limits = other.date_limits
self.count_limits = other.count_limits |
class ERC20TransferBenchmark(BaseERC20Benchmark):
def __init__(self) -> None:
super().__init__()
self._next_nonce = None
def name(self) -> str:
return 'ERC20 Transfer'
def _setup_benchmark(self, chain: MiningChain) -> None:
self._next_nonce = None
(txn, callback) = self._deploy_simple_token(chain)
(_, receipts, computations) = chain.mine_all([txn])
assert (len(receipts) == 1)
assert (len(computations) == 1)
callback(receipts[0], computations[0])
def _next_transaction(self, chain: MiningChain) -> None:
txn_info = self._erc_transfer(self.addr1, chain, self._next_nonce)
txn = txn_info[0]
self._next_nonce = (txn.nonce + 1)
return txn_info |
def extractNishao10Com(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class udf_data(bsn_tlv):
type = 207
def __init__(self, value=None):
if (value != None):
self.value = value
else:
self.value = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!H', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.value))
length = sum([len(x) for x in packed])
packed[1] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = udf_data()
_type = reader.read('!H')[0]
assert (_type == 207)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.value = reader.read('!L')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.value != other.value):
return False
return True
def pretty_print(self, q):
q.text('udf_data {')
with q.group():
with q.indent(2):
q.breakable()
q.text('value = ')
q.text(('%#x' % self.value))
q.breakable()
q.text('}') |
()
('--path', help='Root directory of source to be checked', required=True, type=str)
('--fixit', default=False, help='Fix missing header', required=False, type=bool)
def check_header(path, fixit):
ret = dfs(path)
if (len(ret) == 0):
sys.exit(0)
print('Need to add Meta header to the following files.')
print('File List')
for line in ret:
print(line)
print('')
if fixit:
fix_header(ret)
sys.exit(1) |
def add_UnityToExternalProtoServicer_to_server(servicer, server):
rpc_method_handlers = {'Exchange': grpc.unary_unary_rpc_method_handler(servicer.Exchange, request_deserializer=pyrcareworld_dot_communicator__objects_dot_unity__message__pb2.UnityMessageProto.FromString, response_serializer=pyrcareworld_dot_communicator__objects_dot_unity__message__pb2.UnityMessageProto.SerializeToString)}
generic_handler = grpc.method_handlers_generic_handler('communicator_objects.UnityToExternalProto', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,)) |
def create_cfg_from_cli(config_file: str, overwrites: Optional[List[str]], runner_class: Union[(None, str, Type[BaseRunner], Type[DefaultTask])]) -> CfgNode:
config_file = reroute_config_path(config_file)
with PathManager.open(config_file, 'r') as f:
print('Loaded config file {}:\n{}'.format(config_file, f.read()))
if isinstance(runner_class, str):
print(f'Importing runner: {runner_class} ...')
runner_class = import_runner(runner_class)
if ((runner_class is None) or issubclass(runner_class, RunnerV2Mixin)):
cfg = load_full_config_from_file(config_file)
else:
cfg = runner_class.get_default_cfg()
cfg.merge_from_file(config_file)
cfg.merge_from_list((overwrites or []))
cfg.freeze()
return cfg |
class RegisterViewTests(TestCase):
urlpatterns = [path('', include('{{ cookiecutter.project_slug }}.auth.urls'))]
def test_endpoint(self):
url = reverse('{{ cookiecutter.project_slug }}-auth:register')
self.assertEqual(url, '/register/')
def test_get_response_status_code(self):
url = reverse('{{ cookiecutter.project_slug }}-auth:register')
response = self.client.get(url)
self.assertEqual(response.status_code, HTTPStatus.OK)
def test_get_response_context(self):
url = reverse('{{ cookiecutter.project_slug }}-auth:register')
response = self.client.get(url)
self.assertIn('form', response.context)
form = response.context['form']
self.assertIn('name', form.fields)
self.assertIn('email', form.fields)
self.assertIn('password', form.fields)
self.assertIn('terms', form.fields)
name_field = form.fields['name']
self.assertTrue(name_field.required)
self.assertFalse(name_field.disabled)
email_field = form.fields['email']
self.assertTrue(email_field.required)
self.assertFalse(email_field.disabled)
password_field = form.fields['password']
self.assertTrue(password_field.required)
self.assertFalse(password_field.disabled)
country_field = form.fields['country']
self.assertTrue(country_field.required)
self.assertFalse(country_field.disabled)
terms_field = form.fields['terms']
self.assertTrue(terms_field.required)
self.assertFalse(terms_field.disabled)
def test_post_invalid_email_displays_error_message(self):
url = reverse('{{ cookiecutter.project_slug }}-auth:register')
data = {'name': 'Marie C', 'email': '', 'password': 'safsdf678hg', 'country': 'ES', 'terms': 'on'}
response = self.client.post(url, data=data, follow=True)
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertContains(response, 'Este correo electronico es invalido. Asegurate de que tenga un formato como este: ', html=True)
def test_post_missing_name_displays_error_message(self):
url = reverse('{{ cookiecutter.project_slug }}-auth:register')
data = {'email': '', 'password': 'fdsjgkhdfgs', 'country': 'ES', 'terms': 'on'}
response = self.client.post(url, data=data, follow=True)
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertContains(response, 'Es necesario que indiques tu nombre.', html=True)
def test_post_missing_password_displays_error_message(self):
url = reverse('{{ cookiecutter.project_slug }}-auth:register')
data = {'name': 'John Smith', 'email': '', 'country': 'ES', 'terms': 'on'}
response = self.client.post(url, data=data, follow=True)
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertContains(response, 'Es necesario que indiques tu password.', html=True)
def test_post_password_with_less_than_8_characters_displays_error_message(self):
url = reverse('{{cookiecutter.project_slug }}-auth:register')
data = {'name': 'Ernesto Gonzalez', 'email': '', 'password': 'shd72!s', 'country': 'ES', 'terms': 'on'}
response = self.client.post(url, data=data, follow=True)
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertContains(response, 'Tu password debe tener al menos 8 caracteres.', html=True)
def test_post_terms_off_displays_error_message(self):
url = reverse('{{ cookiecutter.project_slug }}-auth:register')
data = {'name': 'John Doe', 'email': '', 'password': 'shd72!s', 'country': 'ES'}
response = self.client.post(url, data=data, follow=True)
self.assertEqual(response.status_code, HTTPStatus.OK)
self.assertContains(response, 'Debes aceptar los terminos y condiciones para poder empezar.', html=True)
def test_post_success_authenticates_request_user(self):
url = reverse('{{ cookiecutter.project_slug }}-auth:register')
data = {'name': 'John Doe', 'email': '', 'password': 'fdg7dsg8sdfg78', 'country': 'ES', 'terms': 'on'}
self.client.post(url, data=data, follow=True)
self.assertTrue(get_user(self.client).is_authenticated)
def test_post_success_redirects_to_index(self):
url = reverse('{{ cookiecutter.project_slug }}-auth:register')
data = {'name': 'John Doe', 'email': '', 'password': 'fdg7dsg8sdfg78', 'country': 'ES', 'terms': 'on'}
response = self.client.post(url, data=data, follow=False)
self.assertRedirects(response, reverse('index'), status_code=HTTPStatus.FOUND, target_status_code=HTTPStatus.FOUND, fetch_redirect_response=True)
def test_put_is_not_allowed(self):
url = reverse('{{ cookiecutter.project_slug }}-auth:register')
response = self.client.put(url, data={}, follow=True)
self.assertEqual(response.status_code, HTTPStatus.METHOD_NOT_ALLOWED)
def test_patch_is_not_allowed(self):
url = reverse('{{ cookiecutter.project_slug }}-auth:register')
response = self.client.patch(url, data={}, follow=True)
self.assertEqual(response.status_code, HTTPStatus.METHOD_NOT_ALLOWED)
def test_delete_is_not_allowed(self):
url = reverse('{{ cookiecutter.project_slug }}-auth:register')
response = self.client.delete(url, data={}, follow=True)
self.assertEqual(response.status_code, HTTPStatus.METHOD_NOT_ALLOWED) |
def init():
gitrootdir = subprocess.check_output(['git', 'rev-parse', '--show-toplevel']).decode('utf-8').strip()
sys.path.insert(0, os.path.join(gitrootdir, 'common'))
sys.path.insert(0, os.path.join(gitrootdir, 'python'))
gitsubdir = subprocess.check_output(['git', 'rev-parse', '--show-prefix']).decode('utf-8').strip()
for nice_subdir in ['backend', 'dist-git', 'rpmbuild']:
if gitsubdir.startswith(nice_subdir):
sys.path.insert(0, os.path.join(gitrootdir, nice_subdir))
if gitsubdir.startswith('frontend'):
sys.path.insert(0, os.path.join(gitrootdir, 'frontend', 'coprs_frontend'))
sys.path.insert(0, os.path.join(gitrootdir, 'frontend', 'coprs_frontend', 'run'))
if gitsubdir.startswith('keygen'):
sys.path.insert(0, os.path.join(gitrootdir, 'keygen', 'src')) |
def analyze_call_sites(ghidra_analysis, func, index, prev):
result = []
references_to = ghidra_analysis.current_program.getReferenceManager().getReferencesTo(func.getEntryPoint())
for reference in references_to:
from_address = reference.getFromAddress()
calling_func = ghidra_analysis.flat_api.getFunctionContaining(from_address)
if (calling_func is None):
continue
if (reference.getReferenceType() == RefType.UNCONDITIONAL_CALL):
if (calling_func == func):
continue
high_func = decompile_function(ghidra_analysis, calling_func)
pcode_ops = high_func.getPcodeOps(from_address.getPhysicalAddress())
for pcode_op in iter_array(pcode_ops, ghidra_analysis.monitor):
if (pcode_op.getOpcode() == PcodeOp.CALL):
target_func = ghidra_analysis.flat_api.getFunctionAt(pcode_op.getInput(0).getAddress())
if (target_func == func):
call_site_address = pcode_op.getSeqnum().getTarget()
(_, sources_pcode_ops) = get_call_site_pcode_ops(ghidra_analysis, func)
relevant_sources = get_relevant_sources(ghidra_analysis, func, call_site_address, sources_pcode_ops)
result.extend(analyze_function_call_site(ghidra_analysis, calling_func, index, pcode_op, relevant_sources, prev))
return result |
def annotation_to_gff(annotation):
attrs = []
(query_name, best_hit_name, best_hit_evalue, best_hit_score, annotations, (og_name, og_cat, og_desc), max_annot_lvl, match_nog_names, all_orthologies, annot_orthologs) = annotation
match_nog_names = ','.join(match_nog_names)
attrs.append(f'em_OGs={match_nog_names}')
attrs.append(f'em_COG_cat={og_cat}')
attrs.append(f'em_desc={og_desc}')
attrs.append(f'em_max_annot_lvl={max_annot_lvl}')
for (k, v) in annotations.items():
tag = f"em_{k.replace(' ', '_')}"
if (v is not None):
value = ','.join(sorted(list(v)))
attrs.append(f'{tag}={value}')
return attrs |
.parametrize('oper, expected', [('add', [10, 10, 10, 12]), ('sub', [10, 10, 10, 8]), ('div', [10, 10, 10, 5]), ('mul', [10, 10, 10, 20]), ('set', [10, 10, 10, 2])])
def test_oper_points_inside_overlapping_polygon_v2(oper, expected):
pol = Polygons((SMALL_POLY_INNER + SMALL_POLY_OVERLAP_INNER))
poi = Points([(3.5, 3.5, 10.0), (4.5, 4.5, 10.0), (5.5, 5.5, 10.0), (6.5, 6.5, 10.0)])
poi.operation_polygons(pol, value=2, opname=oper, inside=False, version=2)
assert (list(poi.dataframe[poi.zname].values) == expected) |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'name'
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'antivirus_exempt_list': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['antivirus_exempt_list']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['antivirus_exempt_list']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=False)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'antivirus_exempt_list')
(is_error, has_changed, result, diff) = fortios_antivirus(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
def wrap_stub(elf_file):
print(('Wrapping ELF file %s...' % elf_file))
e = esptool.bin_image.ELFFile(elf_file)
text_section = e.get_section('.text')
stub = {'entry': e.entrypoint, 'text': text_section.data, 'text_start': text_section.addr}
try:
data_section = e.get_section('.data')
stub['data'] = data_section.data
stub['data_start'] = data_section.addr
except ValueError:
pass
for s in e.nobits_sections:
if (s.name == '.bss'):
stub['bss_start'] = s.addr
if ((len(stub['text']) % 4) != 0):
stub['text'] += ((4 - (len(stub['text']) % 4)) * '\x00')
print(('Stub text: %d 0x%08x, data: %d 0x%08x, entry 0x%x' % (len(stub['text']), stub['text_start'], len(stub.get('data', '')), stub.get('data_start', 0), stub['entry'])), file=sys.stderr)
return stub |
def oklab_to_okhsl(lab: Vector, lms_to_rgb: Matrix, ok_coeff: list[Matrix]) -> Vector:
L = lab[0]
s = 0.0
l = toe(L)
c = math.sqrt(((lab[1] ** 2) + (lab[2] ** 2)))
h = (0.5 + (math.atan2((- lab[2]), (- lab[1])) / math.tau))
if ((l != 0.0) and (l != 1.0) and (c != 0)):
a_ = (lab[1] / c)
b_ = (lab[2] / c)
(c_0, c_mid, c_max) = get_cs([L, a_, b_], lms_to_rgb, ok_coeff)
mid = 0.8
mid_inv = 1.25
if (c < c_mid):
k_1 = (mid * c_0)
k_2 = (1.0 - (k_1 / c_mid))
t = (c / (k_1 + (k_2 * c)))
s = (t * mid)
else:
k_0 = c_mid
k_1 = (((0.2 * (c_mid ** 2)) * (mid_inv ** 2)) / c_0)
k_2 = (1.0 - (k_1 / (c_max - c_mid)))
t = ((c - k_0) / (k_1 + (k_2 * (c - k_0))))
s = (mid + (0.2 * t))
return [util.constrain_hue((h * 360)), s, l] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.