code stringlengths 281 23.7M |
|---|
def gazeSampleCallback(sample=POINTER(ELGazeSample)):
if (g_api is None):
return
g_api.sampleLock.acquire()
scaleX = (g_api.dispsize[0] / g_api.rawResolution[0])
scaleY = (g_api.dispsize[1] / g_api.rawResolution[1])
g_api.lastSample = ELGazeSample()
g_api.lastSample.timestampMicroSec = sample.contents.timestampMicroSec
g_api.lastSample.index = sample.contents.index
if (sample.contents.porRawX == ELInvalidValue):
g_api.lastSample.porRawX = ELInvalidValue
g_api.lastSample.porRawY = ELInvalidValue
else:
g_api.lastSample.porRawX = (sample.contents.porRawX * scaleX)
g_api.lastSample.porRawY = (sample.contents.porRawY * scaleY)
if (sample.contents.porFilteredX == ELInvalidValue):
g_api.lastSample.porFilteredX = ELInvalidValue
g_api.lastSample.porFilteredY = ELInvalidValue
else:
g_api.lastSample.porFilteredX = (sample.contents.porFilteredX * scaleX)
g_api.lastSample.porFilteredY = (sample.contents.porFilteredY * scaleY)
if (sample.contents.porLeftX == ELInvalidValue):
g_api.lastSample.porLeftX = ELInvalidValue
g_api.lastSample.porLeftY = ELInvalidValue
else:
g_api.lastSample.porLeftX = (sample.contents.porLeftX * scaleX)
g_api.lastSample.porLeftY = (sample.contents.porLeftY * scaleY)
g_api.lastSample.eyePositionLeftX = sample.contents.eyePositionLeftX
g_api.lastSample.eyePositionLeftY = sample.contents.eyePositionLeftY
g_api.lastSample.eyePositionLeftZ = sample.contents.eyePositionLeftZ
g_api.lastSample.pupilRadiusLeft = sample.contents.pupilRadiusLeft
if (sample.contents.porRightX == ELInvalidValue):
g_api.lastSample.porRightX = ELInvalidValue
g_api.lastSample.porRightY = ELInvalidValue
else:
g_api.lastSample.porRightX = (sample.contents.porRightX * scaleX)
g_api.lastSample.porRightY = (sample.contents.porRightY * scaleY)
g_api.lastSample.eyePositionRightX = sample.contents.eyePositionRightX
g_api.lastSample.eyePositionRightY = sample.contents.eyePositionRightY
g_api.lastSample.eyePositionRightZ = sample.contents.eyePositionRightZ
g_api.lastSample.pupilRadiusRight = sample.contents.pupilRadiusRight
gs = copy.copy(g_api.lastSample)
g_api.sampleLock.release()
if g_api._recording.is_set():
g_api._logging_queue.put(gs) |
class ssh_config_loading():
_system_path = join(support, 'ssh_config', 'system.conf')
_user_path = join(support, 'ssh_config', 'user.conf')
_runtime_path = join(support, 'ssh_config', 'runtime.conf')
_empty_kwargs = dict(system_ssh_path='nope/nope/nope', user_ssh_path='nope/noway/nuhuh')
def defaults_to_empty_sshconfig_obj_if_no_files_found(self):
c = Config(**self._empty_kwargs)
assert (type(c.base_ssh_config) is SSHConfig)
assert (c.base_ssh_config._config == [])
def object_can_be_given_explicitly_via_ssh_config_kwarg(self):
sc = SSHConfig()
assert (Config(ssh_config=sc).base_ssh_config is sc)
(Config, '_load_ssh_file')
def when_config_obj_given_default_paths_are_not_sought(self, method):
sc = SSHConfig()
Config(ssh_config=sc)
assert (not method.called)
(Config, '_load_ssh_file')
def config_obj_prevents_loading_runtime_path_too(self, method):
sc = SSHConfig()
Config(ssh_config=sc, runtime_ssh_path=self._system_path)
assert (not method.called)
(Config, '_load_ssh_file')
def when_runtime_path_given_other_paths_are_not_sought(self, method):
Config(runtime_ssh_path=self._runtime_path)
method.assert_called_once_with(self._runtime_path)
(Config, '_load_ssh_file')
def runtime_path_can_be_given_via_config_itself(self, method):
Config(overrides={'ssh_config_path': self._runtime_path})
method.assert_called_once_with(self._runtime_path)
def runtime_path_does_not_die_silently(self):
try:
Config(runtime_ssh_path='sure/thing/boss/whatever/you/say')
except FileNotFoundError as e:
assert ('No such file or directory' in str(e))
assert (e.errno == errno.ENOENT)
assert (e.filename == 'sure/thing/boss/whatever/you/say')
else:
assert False, "Bad runtime path didn't raise error!"
(Config, '_load_ssh_file')
def default_file_paths_match_openssh(self, method):
Config()
method.assert_has_calls([call(expanduser('~/.ssh/config')), call('/etc/ssh/ssh_config')])
def system_path_loads_ok(self):
c = Config(**dict(self._empty_kwargs, system_ssh_path=self._system_path))
names = c.base_ssh_config.get_hostnames()
assert (names == {'system', 'shared', '*'})
def user_path_loads_ok(self):
c = Config(**dict(self._empty_kwargs, user_ssh_path=self._user_path))
names = c.base_ssh_config.get_hostnames()
assert (names == {'user', 'shared', '*'})
def both_paths_loaded_if_both_exist_with_user_winning(self):
c = Config(user_ssh_path=self._user_path, system_ssh_path=self._system_path)
names = c.base_ssh_config.get_hostnames()
expected = {'user', 'system', 'shared', '*'}
assert (names == expected)
assert (c.base_ssh_config.lookup('shared')['port'] == '321')
(Config, '_load_ssh_file')
('fabric.config.os.path.exists', (lambda x: True))
def runtime_path_subject_to_user_expansion(self, method):
tilded = '~/probably/not/real/tho'
Config(runtime_ssh_path=tilded)
method.assert_called_once_with(expanduser(tilded))
(Config, '_load_ssh_file')
def user_path_subject_to_user_expansion(self, method):
tilded = '~/probably/not/real/tho'
Config(user_ssh_path=tilded)
method.assert_any_call(expanduser(tilded))
class core_ssh_load_option_allows_skipping_ssh_config_loading():
(Config, '_load_ssh_file')
def skips_default_paths(self, method):
Config(overrides={'load_ssh_configs': False})
assert (not method.called)
(Config, '_load_ssh_file')
def does_not_affect_explicit_object(self, method):
sc = SSHConfig()
c = Config(ssh_config=sc, overrides={'load_ssh_configs': False})
assert (not method.called)
assert (c.base_ssh_config is sc)
(Config, '_load_ssh_file')
def does_not_skip_loading_runtime_path(self, method):
Config(runtime_ssh_path=self._runtime_path, overrides={'load_ssh_configs': False})
method.assert_called_once_with(self._runtime_path)
class lazy_loading_and_explicit_methods():
(Config, '_load_ssh_file')
def may_use_lazy_plus_explicit_methods_to_control_flow(self, method):
c = Config(lazy=True)
assert (not method.called)
c.set_runtime_ssh_path(self._runtime_path)
c.load_ssh_config()
method.assert_called_once_with(self._runtime_path) |
def test():
import spacy.tokens
import spacy.lang.en
assert isinstance(nlp, spacy.lang.en.English), 'El objeto nlp deberia ser un instance de la clase de ingles.'
assert isinstance(doc, spacy.tokens.Doc), 'Procesaste el texto con el objeto nlp para crear un doc?'
assert ('print(doc.text)' in __solution__), 'Imprimiste en pantalla el doc.text?'
__msg__.good('Well done!') |
class RelationshipsForTlsPrivateKey(ModelComposed):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'tls_activations': (RelationshipTlsActivationTlsActivation,), 'tls_domains': (RelationshipTlsDomainsTlsDomains,)}
_property
def discriminator():
return None
attribute_map = {'tls_activations': 'tls_activations', 'tls_domains': 'tls_domains'}
read_only_vars = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
_property
def _composed_schemas():
lazy_import()
return {'anyOf': [RelationshipTlsActivations, bool, date, datetime, dict, float, int, list, str, none_type], 'allOf': [], 'oneOf': []} |
def test_metadata(hound, cve):
meta = hound.get_rule_metadata(cve)
assert ('files' in meta), 'no "Files:" tag in the rule'
assert ('fix' in meta), 'no "Fix:" tag in the rule'
assert ('fixes' in meta), 'no "Fixes:" or "Detect-To:" tag in the rule'
rule = hound.get_rule(cve)
if rule.endswith('.grep'):
return
found = False
cve_id = re.compile('CVE-\\d{4}-\\d{4,7}')
with open(rule, 'rt') as fh:
for line in fh:
res = cve_id.search(line)
if res:
assert (res.group(0) == cve), 'wrong CVE-id in the rule'
found = True
assert found, 'no CVE-id in the rule'
assert hound.get_cve_metadata(cve), 'no metadata in kernel_cves.json' |
class OptionPlotoptionsScatterClusterMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(15)
def radius(self, num: float):
self._config(num, js_type=False)
def symbol(self):
return self._config_get('cluster')
def symbol(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
class ConstantVelocityGaussian2D():
def __init__(self, sigma=(1.0 / 8.0), b=[1.0, 0.0], xc=0.25, yc=0.5):
self.sigma = sigma
self.xc = xc
self.yc = yc
self.b = b
def uOfXT(self, x, t):
centerX = ((self.xc + (self.b[0] * t)) % 1.0)
centerY = ((self.yc + (self.b[1] * t)) % 1.0)
d2 = (((x[0] - centerX) ** 2) + ((x[1] - centerY) ** 2))
return exp((((- 0.5) * d2) / (self.sigma ** 2))) |
def write_mode_tags(segmk, ps, site):
for param in ['WRITE_MODE_A', 'WRITE_MODE_B']:
set_val = verilog.unquote(ps[param])
segmk.add_site_tag(site, ('%s_READ_FIRST' % param), (set_val == 'READ_FIRST'))
segmk.add_site_tag(site, ('%s_NO_CHANGE' % param), (set_val == 'NO_CHANGE')) |
def extractNightowlwalkerWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def _handle_binary(p: Pattern, s: str) -> PatternRule:
op = ast.Attribute(value=ast.Name('operator', ctx=ast.Load()), attr=s, ctx=ast.Load())
return PatternRule(assign(value=binop(op=p)), (lambda a: ast.Assign(a.targets, _make_bmg_call('handle_function', [op, ast.List(elts=[a.value.left, a.value.right], ctx=ast.Load())])))) |
class sFlowV5FlowRecord(object):
_PACK_STR = '!II'
MIN_LEN = struct.calcsize(_PACK_STR)
def __init__(self, enterprise, flow_data_format, flow_data_length, flow_data):
super(sFlowV5FlowRecord, self).__init__()
self.enterprise = enterprise
self.flow_data_format = flow_data_format
self.flow_data_length = flow_data_length
self.flow_data = flow_data
def parser(cls, buf, offset):
(flowdata_format, flow_data_length) = struct.unpack_from(cls._PACK_STR, buf, offset)
format_mask = 4095
enterprise_shiftbit = 12
flow_data_format = (flowdata_format & format_mask)
enterprise = (flowdata_format >> enterprise_shiftbit)
offset += cls.MIN_LEN
if (flow_data_format == 1):
flow_data = sFlowV5RawPacketHeader.parser(buf, offset)
elif (flow_data_format == 1001):
flow_data = sFlowV5ExtendedSwitchData.parser(buf, offset)
else:
LOG.info('Unknown format. sFlowV5FlowRecord.flow_data_format=%d', flow_data_format)
pack_str = ('!%sc' % flow_data_length)
flow_data = struct.unpack_from(pack_str, buf, offset)
msg = cls(enterprise, flow_data_format, flow_data_length, flow_data)
return msg |
class MetaData():
def __init__(self):
self.tables: Dict[(str, MetaTable)] = {}
def __getitem__(self, key):
return self.tables[key]
def __getattr__(self, name):
return self.tables[name]
def where(self, *args, **kwargs) -> MetaDataSet:
return MetaDataSet(*args, **kwargs)
def __call__(self, *args, **kwargs):
return self.where(*args, **kwargs)
def create_table(self, name: str, columns: List[Column], primary_keys: List[str], **kw: Any):
self.tables[name] = MetaTable(name, columns, primary_keys, **kw)
def drop_table(self, name: str):
del self.tables[name]
def add_column(self, table: str, column: Column):
self.tables[table][column.name] = column
def drop_column(self, table: str, column: str):
del self.tables[table][column]
def change_column(self, table_name: str, column_name: str, changes: Dict[(str, Any)]):
self.tables[table_name][column_name].update(**changes)
def create_index(self, table_name: str, index_name: str, fields: List[str], expressions: List[str], unique: bool, **kw: Any):
self.tables[table_name].indexes[index_name] = MetaIndex(table_name, index_name, fields, expressions, unique, **kw)
def drop_index(self, table_name: str, index_name: str):
del self.tables[table_name].indexes[index_name]
def create_foreign_key_constraint(self, table_name: str, constraint_name: str, column_names: List[str], foreign_table_name: str, foreign_keys: List[str], on_delete: str):
self.tables[table_name].foreign_keys[constraint_name] = MetaForeignKey(table_name, constraint_name, column_names, foreign_table_name, foreign_keys, on_delete)
def drop_foreign_key_constraint(self, table_name: str, constraint_name: str):
del self.tables[table_name].foreign_keys[constraint_name] |
def check_not_null(tensor: Tensor, tensor_idx: Optional[int]=None, skip_if_lower_bound_is_zero: bool=False) -> str:
name = tensor._attrs['name']
if (tensor_idx is None):
check = name
else:
check = f'params_[{tensor_idx}].ptr'
shape = ['1']
lower_bound_is_zero = False
for dim in tensor._attrs['shape']:
lower_bound_is_zero |= (dim.lower_bound() == 0)
if (skip_if_lower_bound_is_zero and lower_bound_is_zero):
return ''
if isinstance(dim, IntImm):
shape.append(str(dim._attrs['values'][0]))
else:
shape.append(dim._attrs['name'])
nullptr_check = f'{check} == nullptr'
condition = (nullptr_check if (not lower_bound_is_zero) else f"{nullptr_check} && {'*'.join(shape)} != 0")
return f'''
if ({condition}) {{
throw std::runtime_error("Constant {name} was not set! Set the value with set_constant.");
}}
''' |
def extractArvelhurstWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
def mock_get_info(monkeypatch, set_api_key):
responses.add(responses.GET, f'{Env.current.web_api_endpoint}/tidy3d/tasks/{TASK_ID}/detail', json={'data': {'taskId': TASK_ID, 'taskName': TASK_NAME, 'createdAt': CREATED_AT, 'realFlexUnit': FLEX_UNIT, 'estFlexUnit': EST_FLEX_UNIT, 'taskType': TaskType.FDTD.name, 'metadataStatus': 'processed', 'status': 'success', 's3Storage': 1.0}}, status=200) |
class ETHPeer(BaseChainPeer):
max_headers_fetch = MAX_HEADERS_FETCH
supported_sub_protocols: Tuple[(Type[BaseETHProtocol], ...)] = (ETHProtocolV63, ETHProtocolV64, ETHProtocolV65)
sub_proto: BaseETHProtocol = None
eth_api: AnyETHAPI
wit_api: WitnessAPI
def get_behaviors(self) -> Tuple[(BehaviorAPI, ...)]:
return (super().get_behaviors() + (ETHV63API().as_behavior(), ETHV64API().as_behavior(), ETHV65API().as_behavior(), WitnessAPI().as_behavior()))
def _pre_run(self) -> None:
super()._pre_run()
if self.connection.has_protocol(WitnessProtocol):
self.wit_api = self.connection.get_logic(WitnessAPI.name, WitnessAPI)
if self.connection.has_protocol(ETHProtocolV63):
self.eth_api = self.connection.get_logic(ETHV63API.name, ETHV63API)
elif self.connection.has_protocol(ETHProtocolV64):
self.eth_api = self.connection.get_logic(ETHV64API.name, ETHV64API)
elif self.connection.has_protocol(ETHProtocolV65):
self.eth_api = self.connection.get_logic(ETHV65API.name, ETHV65API)
else:
raise Exception('Unreachable code')
def get_extra_stats(self) -> Tuple[(str, ...)]:
basic_stats = super().get_extra_stats()
eth_stats = self.eth_api.get_extra_stats()
if self.connection.has_logic(WitnessProtocol.name):
wit_stats = self.wit_api.get_extra_stats()
else:
wit_stats = ()
return ((basic_stats + eth_stats) + wit_stats) |
(name='api.node.vm.tasks.harvest_vm_cb', base=MgmtCallbackTask, bind=True)
()
def harvest_vm_cb(result, task_id, node_uuid=None):
node = Node.objects.get(uuid=node_uuid)
dc = Dc.objects.get_by_id(dc_id_from_task_id(task_id))
err = result.pop('stderr', None)
vms = []
vms_err = []
jsons = []
if ((result.pop('returncode', None) != 0) or err):
logger.error('Found nonzero returncode in result from harvest_vm(%s). Error: %s', node, err)
raise TaskException(result, ('Got bad return code (%s). Error: %s' % (result['returncode'], err)))
for json in result.pop('stdout', '').split('||||'):
json = json.strip()
if json:
try:
jsons.append(PickleDict.load(json))
except Exception as e:
logger.error('Could not parse json output from harvest_vm(%s). Error: %s', node, e)
raise TaskException(result, 'Could not parse json output')
if (not jsons):
raise TaskException(result, 'Missing json output')
request = get_dummy_request(dc, method='POST', system_user=True)
for json in jsons:
vm_uuid = json.get('uuid', None)
if vm_uuid:
if Vm.objects.filter(uuid=vm_uuid).exists():
logger.warning('Ignoring VM %s found in harvest_vm(%s)', vm_uuid, node)
continue
try:
vm = vm_from_json(request, task_id, json, dc, template=True, save=True, update_ips=True, update_dns=True)
except Exception as e:
logger.exception(e)
logger.error('Could not load VM from json:\n"""%s"""', json)
err_msg = ('Could not load server %s. Error: %s' % (vm_uuid, e))
task_log_cb_error({'message': err_msg}, task_id, obj=node, **result['meta'])
vms_err.append(vm_uuid)
else:
logger.info('Successfully saved new VM %s after harvest_vm(%s)', vm, node)
vms.append(vm.hostname)
vm_deployed.send(task_id, vm=vm)
if vm.json_changed():
try:
_vm_update(vm)
except Exception as e:
logger.exception(e)
if (vms or (not vms_err)):
if vms:
result['message'] = ('Successfully harvested %s server(s) (%s)' % (len(vms), ','.join(vms)))
else:
result['message'] = 'No new server found'
task_log_cb_success(result, task_id, obj=node, **result['meta'])
return result
else:
raise TaskException(result, 'Could not find or load any server') |
class QueryRewrite(BaseChat):
chat_scene: str = ChatScene.QueryRewrite.value()
def __init__(self, chat_param: Dict):
chat_param['chat_mode'] = ChatScene.QueryRewrite
super().__init__(chat_param=chat_param)
self.nums = chat_param['select_param']
self.current_user_input = chat_param['current_user_input']
async def generate_input_values(self):
input_values = {'nums': self.nums, 'original_query': self.current_user_input}
return input_values
def chat_type(self) -> str:
return ChatScene.QueryRewrite.value |
def transaction_search_1():
dsws = baker.make('submissions.DABSSubmissionWindowSchedule', submission_reveal_date='2021-04-09', submission_fiscal_year=2021, submission_fiscal_month=7, submission_fiscal_quarter=3, is_quarter=False, period_start_date='2021-03-01', period_end_date='2021-04-01')
baker.make('submissions.SubmissionAttributes', toptier_code='001', submission_window=dsws)
baker.make('submissions.SubmissionAttributes', toptier_code='002', submission_window=dsws)
toptier_agency_1 = baker.make('references.ToptierAgency', toptier_code='001')
toptier_agency_2 = baker.make('references.ToptierAgency', toptier_code='002')
awarding_agency_1 = baker.make('references.Agency', toptier_agency=toptier_agency_1, toptier_flag=True, _fill_optional=True)
awarding_agency_2 = baker.make('references.Agency', toptier_agency=toptier_agency_2, toptier_flag=True, _fill_optional=True)
award_contract = baker.make('search.AwardSearch', award_id=1, category='contract')
award_idv = baker.make('search.AwardSearch', award_id=2, category='idv')
award_grant = baker.make('search.AwardSearch', award_id=3, category='grant')
award_loan = baker.make('search.AwardSearch', award_id=4, category='loans')
award_dp = baker.make('search.AwardSearch', award_id=5, category='direct payment')
baker.make(TransactionSearch, transaction_id=1, award=award_contract, federal_action_obligation=101, generated_pragmatic_obligation=101, action_date='2021-04-01', fiscal_action_date='2021-07-01', awarding_agency_id=awarding_agency_1.id, type='A', awarding_agency_code='001', awarding_toptier_agency_name=toptier_agency_1.name)
baker.make(TransactionSearch, transaction_id=2, award=award_idv, federal_action_obligation=102, generated_pragmatic_obligation=102, action_date='2021-04-01', fiscal_action_date='2021-07-01', awarding_agency_id=awarding_agency_1.id, type='IDV_A', awarding_agency_code='001', awarding_toptier_agency_name=toptier_agency_1.name)
baker.make(TransactionSearch, transaction_id=3, award=award_grant, federal_action_obligation=103, generated_pragmatic_obligation=103, action_date='2021-04-01', fiscal_action_date='2021-07-01', awarding_agency_id=awarding_agency_1.id, type='02', awarding_agency_code='001', awarding_toptier_agency_name=toptier_agency_1.name)
baker.make(TransactionSearch, transaction_id=4, award=award_loan, federal_action_obligation=104, generated_pragmatic_obligation=0, action_date='2021-04-01', fiscal_action_date='2021-07-01', awarding_agency_id=awarding_agency_1.id, type='08', awarding_agency_code='001', awarding_toptier_agency_name=toptier_agency_1.name)
baker.make(TransactionSearch, transaction_id=5, award=award_dp, federal_action_obligation=105, generated_pragmatic_obligation=105, action_date='2021-04-01', fiscal_action_date='2021-07-01', awarding_agency_id=awarding_agency_1.id, type='10', awarding_agency_code='001', awarding_toptier_agency_name=toptier_agency_1.name)
baker.make(TransactionSearch, transaction_id=6, award=award_idv, federal_action_obligation=300, generated_pragmatic_obligation=300, action_date='2020-04-01', fiscal_action_date='2020-07-01', awarding_agency_id=awarding_agency_1.id, type='IDV_A', awarding_agency_code='001', awarding_toptier_agency_name=toptier_agency_1.name)
baker.make(TransactionSearch, transaction_id=7, award=award_idv, federal_action_obligation=400, generated_pragmatic_obligation=400, action_date='2021-04-01', fiscal_action_date='2021-07-01', awarding_agency_id=awarding_agency_2.id, type='IDV_C', awarding_agency_code='002', awarding_toptier_agency_name=toptier_agency_2.name) |
(CONSENT_REQUEST_PREFERENCES_WITH_ID, status_code=HTTP_200_OK, response_model=ConsentPreferences)
def set_consent_preferences(*, consent_request_id: str, db: Session=Depends(get_db), data: ConsentPreferencesWithVerificationCode) -> ConsentPreferences:
(consent_request, provided_identity) = _get_consent_request_and_provided_identity(db=db, consent_request_id=consent_request_id, verification_code=data.code)
consent_request.preferences = [schema.dict() for schema in data.consent]
consent_request.save(db=db)
if (not provided_identity.hashed_value):
raise HTTPException(status_code=HTTP_404_NOT_FOUND, detail='Provided identity missing')
for preference in data.consent:
current_preference = Consent.filter(db=db, conditions=((Consent.provided_identity_id == provided_identity.id) & (Consent.data_use == preference.data_use))).first()
if current_preference:
current_preference.update(db, data=dict(preference))
else:
preference_dict = dict(preference)
preference_dict['provided_identity_id'] = provided_identity.id
try:
Consent.create(db, data=preference_dict)
except IntegrityError as exc:
raise HTTPException(status_code=HTTP_400_BAD_REQUEST, detail=Pii(str(exc)))
consent_preferences: ConsentPreferences = _prepare_consent_preferences(db, provided_identity)
privacy_request_creation_results: Optional[BulkPostPrivacyRequests] = queue_privacy_request_to_propagate_consent_old_workflow(db, provided_identity, (data.policy_key or DEFAULT_CONSENT_POLICY), consent_preferences, data.executable_options, data.browser_identity)
if privacy_request_creation_results:
consent_request.privacy_request_id = privacy_request_creation_results.succeeded[0].id
consent_request.save(db=db)
return consent_preferences |
class OptionSeriesPolygonSonificationTracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def extractInchoateOeuvre(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
class LESProxyPeer(BaseProxyPeer):
def __init__(self, session: SessionAPI, event_bus: EndpointAPI, les_api: ProxyLESAPI):
super().__init__(session, event_bus)
self.les_api = les_api
def from_session(cls, session: SessionAPI, event_bus: EndpointAPI, broadcast_config: BroadcastConfig) -> 'LESProxyPeer':
return cls(session, event_bus, ProxyLESAPI(session, event_bus, broadcast_config)) |
def _get_element_type(element_property: typing.Dict[(str, str)]) -> Type:
element_type = ([e_property['type'] for e_property in element_property['anyOf']] if element_property.get('anyOf') else element_property['type'])
element_format = (element_property['format'] if ('format' in element_property) else None)
if (type(element_type) == list):
return typing.Optional[_get_element_type({'type': element_type[0]})]
if (element_type == 'string'):
return str
elif (element_type == 'integer'):
return int
elif (element_type == 'boolean'):
return bool
elif (element_type == 'number'):
if (element_format == 'integer'):
return int
else:
return float
return str |
class GlobalGenerator(nn.Module):
def __init__(self, input_nc, output_nc, ngf=64, bottleneck='2d', n_downsampling=3, n_blocks=9, max_mult=16, norm_layer=nn.BatchNorm2d, padding_type='reflect', vaeLike=False):
assert (n_blocks >= 0)
super(GlobalGenerator, self).__init__()
self.vaeLike = vaeLike
activation = nn.ReLU(True)
model = [nn.ReflectionPad2d(3), nn.Conv2d(input_nc, ngf, kernel_size=7, padding=0), norm_layer(ngf), activation]
for i in range(n_downsampling):
mult = (2 ** i)
if (mult >= max_mult):
model += [nn.Conv2d((ngf * max_mult), (ngf * max_mult), kernel_size=3, stride=2, padding=1), norm_layer((ngf * max_mult)), activation]
else:
model += [nn.Conv2d((ngf * mult), ((ngf * mult) * 2), kernel_size=3, stride=2, padding=1), norm_layer(((ngf * mult) * 2)), activation]
mult = min((2 ** n_downsampling), max_mult)
for i in range(n_blocks):
model += [ResnetBlock((ngf * mult), padding_type=padding_type, activation=activation, norm_layer=norm_layer)]
for i in range(n_downsampling):
mult = (2 ** (n_downsampling - i))
if (mult > max_mult):
model += [nn.ConvTranspose2d((ngf * max_mult), (ngf * max_mult), kernel_size=3, stride=2, padding=1, output_padding=1), norm_layer((ngf * max_mult)), activation]
else:
model += [nn.ConvTranspose2d((ngf * mult), int(((ngf * mult) / 2)), kernel_size=3, stride=2, padding=1, output_padding=1), norm_layer(int(((ngf * mult) / 2))), activation]
model += [nn.ReflectionPad2d(3), nn.Conv2d(ngf, output_nc, kernel_size=7, padding=0), nn.Sigmoid()]
self.model = nn.Sequential(*model)
def forward(self, input):
return self.model(input) |
def acSubmit(board, dom):
idsAndValues = dom.getValues(FIELDS)
if (not idsAndValues['Name'].strip()):
dom.alert('The name field can not be empty!')
else:
board.state = State.DISPLAY
contacts.append(idsAndValues)
displayContact(None, dom)
displayContacts(dom)
updateOutfit(board, dom) |
def set_brightness(brightness):
global _brightness
setup()
if ((not isinstance(brightness, int)) and (not isinstance(brightness, float))):
raise ValueError('Brightness should be an int or float')
if ((brightness < 0.0) or (brightness > 1.0)):
raise ValueError('Brightness should be between 0.0 and 1.0')
_brightness = brightness |
def test_index_file_path_redirected(finder_static_files, finder_server):
directory_path = (finder_static_files.index_path.rpartition('/')[0] + '/')
index_url = (settings.STATIC_URL + finder_static_files.index_path)
response = finder_server.get(index_url, allow_redirects=False)
location = get_url_path(response.url, response.headers['Location'])
assert (response.status_code == 302)
assert (location == (settings.STATIC_URL + directory_path)) |
class MetadataIndex(McapRecord):
offset: int
length: int
name: str
def write(self, stream: RecordBuilder) -> None:
stream.start_record(Opcode.METADATA_INDEX)
stream.write8(self.offset)
stream.write8(self.length)
stream.write_prefixed_string(self.name)
stream.finish_record()
def read(stream: ReadDataStream):
offset = stream.read8()
length = stream.read8()
name = stream.read_prefixed_string()
return MetadataIndex(offset=offset, length=length, name=name) |
_bad_request
def all_england(request):
if (request.method == 'POST'):
return _handle_bookmark_post(request, OrgBookmark)
form = _build_bookmark_form(OrgBookmark, {})
tag_filter = _get_measure_tag_filter(request.GET)
entity_type = request.GET.get('entity_type', 'CCG')
date = _specified_or_last_date(request, 'dashboard_data')
ppu_savings = get_total_savings_for_org(str(date), 'all_standard_practices', None)
measure_savings = cached(all_england_measure_savings, entity_type, date)
low_priority_savings = cached(all_england_low_priority_savings, entity_type, date)
low_priority_total = cached(all_england_low_priority_total, entity_type, date)
ncso_spending = first_or_none(ncso_spending_for_entity(None, 'all_england', num_months=1))
other_entity_type = ('practice' if (entity_type == 'CCG') else 'CCG')
other_entity_query = request.GET.copy()
other_entity_query['entity_type'] = other_entity_type
measure_options = {'aggregate': True, 'chartTitleUrlTemplate': _url_template('measure_for_all_ccgs'), 'globalMeasuresUrl': _build_global_measures_url(tags=tag_filter['tags']), 'measureUrlTemplate': _url_template('measure_for_all_ccgs'), 'measureDefinitionUrlTemplate': _url_template('measure_definition'), 'oneEntityUrlTemplate': _url_template('measure_for_all_england'), 'orgName': 'All {}s in England'.format(entity_type), 'orgType': entity_type.lower(), 'orgTypeHuman': _entity_type_human(entity_type.lower()), 'panelMeasuresUrl': _build_panel_measures_url(entity_type.lower(), tags=tag_filter['tags'], aggregate=True), 'rollUpBy': 'measure_id', 'tags': ','.join(tag_filter['tags']), 'tagsFocusUrlTemplate': reverse('all_england')}
context = {'tag_filter': tag_filter, 'entity_type': entity_type, 'other_entity_type': other_entity_type, 'other_entity_url': ('?' + other_entity_query.urlencode()), 'ppu_savings': ppu_savings, 'measure_savings': measure_savings, 'low_priority_savings': low_priority_savings, 'low_priority_total': low_priority_total, 'ncso_spending': ncso_spending, 'date': date, 'measure_options': measure_options, 'form': form}
return render(request, 'all_england.html', context) |
def _get_members(field_prefix: str, definitions: Dict, skip: Set[str]) -> Dict[(str, Member)]:
result = {}
required = definitions.get('required', [])
for (prop, prop_def) in definitions.get('properties', {}).items():
if ('$ref' in prop_def):
continue
full_name = (field_prefix + prop)
if (full_name in skip):
continue
result[full_name] = Member(description=prop_def.get('_description', ''), format=prop_def.get('_format'), legal_values=_get_field_enum_values(prop_def), name=full_name, typ=_get_field_type(prop_def), required=(prop in required))
if (prop_def.get('type') == 'object'):
result.update(_get_members(((field_prefix + prop) + '.'), prop_def, skip))
elif (prop_def.get('type') == 'array'):
result.update(_get_members(((field_prefix + prop) + '.'), prop_def['items'], skip))
return result |
def sanity_test():
from argparse import ArgumentParser
import traceback
ap = ArgumentParser()
ap.add_argument('item', metavar='FILE|DIR')
ap.add_argument('--no-tb', action='store_true', default=False, help='Do not show debug-style backtrace')
options = ap.parse_args()
mh = Message_Handler('debug')
mh.sort_messages = False
mh.colour = False
try:
register_item(mh, options.item, options)
except Error:
if (not options.no_tb):
traceback.print_exc()
except ICE as ice:
if (not options.no_tb):
traceback.print_exc()
print('ICE:', ice.reason)
for dirname in sorted(tree):
print(('Showing config for %s' % dirname))
node = tree[dirname]
print((' Root: %s' % node.project_root))
print((' File: %s' % ', '.join(node.config_files)))
cfg = node.config
if (cfg is None):
print(' No config attached')
continue
print((' Enabled = %s' % cfg.enabled))
print((' Octave = %s' % cfg.language))
print((' Rules = %u' % len(cfg.style_rules)))
print((' SConf = %s' % cfg.style_config))
print((' Metrics = %u' % len(cfg.enabled_metrics)))
print((' Limits = %s' % cfg.metric_limits)) |
class OptionPlotoptionsBellcurveSonificationPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class OptionPlotoptionsWindbarbSonificationTracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
def render_create_module(copr, form, profiles=2):
components_rpms = []
built_packages = []
for build in filter(None, [p.last_build(successful=True) for p in copr.packages]):
components_rpms.append((build.package.name, build))
for package in build.built_packages.split('\n'):
built_packages.append((package.split()[0], build))
return flask.render_template('coprs/create_module.html', copr=copr, form=form, profiles=profiles, built_packages=built_packages, components_rpms=components_rpms) |
def random_proxy(func):
(func)
def wrapper(*args, **kwargs):
for request in func(*args, **kwargs):
proxy_df = get_checked_proxy()
if (proxy_df is not None):
request._meta['proxy'] = proxy_df.at[(random.choice(proxy_df.index), 'url')]
(yield request)
return wrapper |
def downgrade():
op.execute('alter type connectiontype rename to connectiontype_old')
op.execute("create type connectiontype as enum('postgres', 'mongodb', 'mysql', ' 'snowflake', 'redshift', 'mssql', 'mariadb', 'bigquery', 'saas', 'manual', 'email', 'manual_webhook', 'timescale')")
op.execute('alter table connectionconfig alter column connection_type type connectiontype using connection_type::text::connectiontype')
op.execute('drop type connectiontype_old') |
class Migration(migrations.Migration):
dependencies = [('admin_interface', '0010_add_localization')]
operations = [migrations.RenameField(model_name='theme', old_name='env', new_name='env_name'), migrations.AlterField(model_name='theme', name='env_name', field=models.CharField(blank=True, max_length=50, verbose_name='name')), migrations.AddField(model_name='theme', name='env_color', field=colorfield.fields.ColorField(blank=True, default='#E74C3C', help_text='(red: #E74C3C, orange: #E67E22, yellow: #F1C40F, green: #2ECC71, blue: #3498DB)', max_length=10, verbose_name='color')), migrations.RenameField(model_name='theme', old_name='env_visible', new_name='env_visible_in_header'), migrations.AlterField(model_name='theme', name='env_visible_in_header', field=models.BooleanField(default=True, verbose_name='visible in header (marker and name)')), migrations.AddField(model_name='theme', name='env_visible_in_favicon', field=models.BooleanField(default=True, verbose_name='visible in favicon (marker)'))] |
def run_sample(target_fname, antitarget_fname, ref_probes, diploid_parx_genome):
tgt_raw = cnvlib.read(target_fname)
anti_raw = cnvlib.read(antitarget_fname)
cnr = commands.do_fix(tgt_raw, anti_raw, ref_probes, diploid_parx_genome=diploid_parx_genome)
cns = commands.do_segmentation(cnr, method='cbs', diploid_parx_genome=diploid_parx_genome, threshold=0.001)
cll = commands.do_call(cns, diploid_parx_genome=diploid_parx_genome)
return (cnr, cns, cll) |
class MegaFiles():
def __init__(self):
self.files_collection = MegaDB().db['files']
async def insert_new_files(self, file_name: str, msg_id: int, chat_id: int, url: str, file_type: str):
self.files_collection.insert_one({'file_name': file_name, 'msg_id': msg_id, 'chat_id': chat_id, 'url': url, 'file_type': file_type})
async def count_files_by_url(self, url: str):
return self.files_collection.count({'url': url})
async def get_file_by_url(self, url: str):
return self.files_collection.find({'url': url})
async def get_file_by_file_id(self, file_id: str):
return self.files_collection.find_one({'_id': ObjectId(file_id)})
async def get_file_by_name(self, file_name: str, row_limit: int):
return self.files_collection.find({'file_name': re.compile(file_name, re.IGNORECASE)}).limit(row_limit) |
def test_basic_create_transaction(chain, basic_transaction):
transaction = chain.create_transaction(nonce=basic_transaction.nonce, gas_price=basic_transaction.gas_price, gas=basic_transaction.gas, to=basic_transaction.to, value=basic_transaction.value, data=basic_transaction.data, v=basic_transaction.v, r=basic_transaction.r, s=basic_transaction.s)
assert (transaction == basic_transaction) |
class MacOSVPNApplication(DesktopVPNApplication):
def __init__(self, app_path, device, config):
super().__init__(app_path, device, config)
self._dns_servers_before_connect = device['dns_tool'].known_servers()
def dns_server_ips(self):
info = self._vpn_info()
if ((info is not None) and info.dns_server_ips):
return info.dns_server_ips
if (not self._config.get('strict', False)):
dns_servers_after_connect = set(self._device['dns_tool'].known_servers())
L.debug('Inferring VPN DNS servers. DNS before connect: {}, DNS after connect: {}'.format(self._dns_servers_before_connect, dns_servers_after_connect))
for server in self._dns_servers_before_connect:
dns_servers_after_connect.discard(server)
if dns_servers_after_connect:
L.warning("Inferring VPN DNS server IPs from System Configuration. This is likely correct, but can be prevented by specifying the 'strict' keyword in the VPN configuration.")
return list(dns_servers_after_connect)
L.warning("Couldn't find DNS servers by inspecting system.")
return super().dns_server_ips() |
def test_cache_does_not_close_session_before_a_call_when_multithreading():
session_cache_default = request._session_cache
timeout_default = request.DEFAULT_TIMEOUT
request._session_cache = SimpleCache(1)
_timeout_for_testing = 0.01
request.DEFAULT_TIMEOUT = _timeout_for_testing
with ThreadPoolExecutor(max_workers=len(UNIQUE_URIS)) as exc:
all_sessions = [exc.submit(_simulate_call, uri) for uri in UNIQUE_URIS]
cache_data = request._session_cache._data
assert (len(cache_data) == 1)
(_key, cached_session) = cache_data.popitem()
assert (cached_session == all_sessions[(- 1)].result())
cached_session.close()
request._session_cache = session_cache_default
request.DEFAULT_TIMEOUT = timeout_default |
class OptionPlotoptionsColumnpyramidSonificationContexttracksMappingLowpass(Options):
def frequency(self) -> 'OptionPlotoptionsColumnpyramidSonificationContexttracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsColumnpyramidSonificationContexttracksMappingLowpassFrequency)
def resonance(self) -> 'OptionPlotoptionsColumnpyramidSonificationContexttracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsColumnpyramidSonificationContexttracksMappingLowpassResonance) |
def get_arguments_for_inspection(inspection, kwargs):
names_to_args = {transform_name(arg_obj.name, to_char='_'): arg_obj.arg for (arg, arg_obj) in inspection.arguments.items()}
names_to_args.update({transform_name(extra_name, to_char='_'): arg_obj.arg for (arg, arg_obj) in inspection.arguments.items() for extra_name in arg_obj.extra_names})
valid_args = set(map((lambda arg_obj: arg_obj.arg), inspection.arguments.values()))
kwargs = {names_to_args.get(name, name): value for (name, value) in kwargs.items() if (names_to_args.get(name, name) in valid_args)}
return kwargs |
class OptionPlotoptionsSplineSonificationTracksMappingHighpass(Options):
def frequency(self) -> 'OptionPlotoptionsSplineSonificationTracksMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsSplineSonificationTracksMappingHighpassFrequency)
def resonance(self) -> 'OptionPlotoptionsSplineSonificationTracksMappingHighpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsSplineSonificationTracksMappingHighpassResonance) |
class ArtistInfoPane(GObject.GObject):
__gsignals__ = {'selected': (GObject.SIGNAL_RUN_LAST, None, (GObject.TYPE_STRING, GObject.TYPE_STRING))}
paned_pos = GObject.property(type=str)
min_paned_pos = 100
def __init__(self, button_box, stack, info_paned, source):
GObject.GObject.__init__(self)
self.ds = {}
self.view = {}
self.source = source
self.plugin = source.plugin
self.shell = source.shell
self.info_paned = info_paned
self.current_artist = None
self.current_album_title = None
self.current = 'artist'
self._from_paned_handle = 0
self.stack = stack
self.stack.set_transition_type(Gtk.StackTransitionType.SLIDE_LEFT_RIGHT)
stack_switcher = Gtk.StackSwitcher()
stack_switcher.set_stack(self.stack)
self.stack.connect('notify::visible-child-name', self.change_stack)
button_box.pack_start(stack_switcher, False, False, 0)
button_box.show_all()
self.info_cache = rb.URLCache(name='info', path=os.path.join('coverart_browser', 'info'), refresh=30, discard=180)
self.ranking_cache = rb.URLCache(name='ranking', path=os.path.join('coverart_browser', 'ranking'), refresh=7, lifetime=30)
self.info_cache.clean()
self.ranking_cache.clean()
self.ds['link'] = LinksDataSource()
self.ds['artist'] = ArtistDataSource(self.info_cache, self.ranking_cache)
self.view['artist'] = ArtistInfoView()
self.view['artist'].initialise(self.source, self.shell, self.plugin, self.stack, self.ds['artist'], self.ds['link'])
self.ds['album'] = AlbumDataSource(self.info_cache, self.ranking_cache)
self.view['album'] = AlbumInfoView()
self.view['album'].initialise(self.source, self.shell, self.plugin, self.stack, self.ds['album'])
self.ds['echoartist'] = EchoArtistDataSource(self.info_cache, self.ranking_cache)
self.view['echoartist'] = EchoArtistInfoView()
self.view['echoartist'].initialise(self.source, self.shell, self.plugin, self.stack, self.ds['echoartist'], self.ds['link'])
self.gs = GSetting()
self.connect_properties()
self.connect_signals()
Gdk.threads_add_timeout(GLib.PRIORITY_DEFAULT_IDLE, 50, self._change_paned_pos, self.source.viewmgr.view_name)
self.view[self.current].activate()
def connect_properties(self):
setting = self.gs.get_setting(self.gs.Path.PLUGIN)
setting.bind(self.gs.PluginKey.ARTIST_INFO_PANED_POSITION, self, 'paned-pos', Gio.SettingsBindFlags.DEFAULT)
def connect_signals(self):
self.tab_cb_ids = []
self.connect('selected', self.select_artist)
self.info_paned.connect('button_press_event', self.paned_button_press_callback)
self.info_paned.connect('button-release-event', self.paned_button_release_callback)
self.source.viewmgr.connect('new-view', self.on_view_changed)
def on_view_changed(self, widget, view_name):
self._change_paned_pos(view_name)
def _change_paned_pos(self, view_name):
print(self.paned_pos)
paned_positions = eval(self.paned_pos)
found = None
for viewpos in paned_positions:
if (view_name in viewpos):
found = viewpos
break
if (not found):
return
values = found.split(':')
child_width = int(values[1])
open_type = 'closed'
if (len(values) > 2):
open_type = values[2]
elif (child_width > 0):
open_type = 'opened'
if (open_type == 'closed'):
child_width = 0
calc_pos = (self.source.page.get_allocated_width() - child_width)
self.info_paned.set_position(calc_pos)
self.info_paned.set_visible(True)
def _get_child_width(self):
child = self.info_paned.get_child2()
return child.get_allocated_width()
def paned_button_press_callback(self, widget, event):
print('paned_button_press_callback')
self._from_paned_handle = 1
if (event.type == Gdk.EventType._2BUTTON_PRESS):
self._from_paned_handle = 2
def paned_button_release_callback(self, *args):
if (self._from_paned_handle == 0):
return False
print('paned_button_release_callback')
paned_positions = eval(self.paned_pos)
found = None
for viewpos in paned_positions:
if (self.source.viewmgr.view_name in viewpos):
found = viewpos
break
if (not found):
print('cannot find')
return True
values = found.split(':')
child_width = (self.source.page.get_allocated_width() - self.info_paned.get_position())
print(child_width)
open_type = 'closed'
print(values)
if (len(values) > 2):
open_type = values[2]
if ((child_width <= self.min_paned_pos) and (self._from_paned_handle == 1) and (open_type == 'closed')):
print('we are closed')
calc_pos = self.source.page.get_allocated_width()
self.info_paned.set_position(calc_pos)
return False
open_type = 'closed'
paned_positions.remove(found)
if (self._from_paned_handle == 2):
new_width = child_width
if (new_width <= self.min_paned_pos):
if (int(values[1]) == 0):
new_width = (self.min_paned_pos + 1)
else:
new_width = int(values[1])
open_type = 'opened'
child_width = new_width
else:
new_width = 0
calc_pos = (self.source.page.get_allocated_width() - new_width)
self.info_paned.set_position(calc_pos)
if ((child_width <= self.min_paned_pos) and (self._from_paned_handle == 1)):
if (int(values[1]) == 0):
child_width = (self.min_paned_pos + 1)
open_type = 'opened'
else:
child_width = 0
calc_pos = (self.source.page.get_allocated_width() - child_width)
self.info_paned.set_position(calc_pos)
if ((self._from_paned_handle == 1) and (child_width != 0)):
open_type = 'opened'
paned_positions.append(((((self.source.viewmgr.view_name + ':') + str(child_width)) + ':') + open_type))
self.paned_pos = repr(paned_positions)
self._from_paned_handle = 0
print('End artist_info_paned_button_release_callback')
def select_artist(self, widget, artist, album_title):
print(('artist %s title %s' % (artist, album_title)))
if (self._get_child_width() > self.min_paned_pos):
self.view[self.current].reload(artist, album_title)
else:
self.view[self.current].blank_view()
self.current_album_title = album_title
self.current_artist = artist
def change_stack(self, widget, value):
child_name = self.stack.get_visible_child_name()
if (child_name and (self.current != child_name)):
self.view[self.current].deactivate()
if (self._get_child_width() > self.min_paned_pos):
self.view[child_name].activate(self.current_artist, self.current_album_title)
else:
self.view[child_name].blank_view()
self.current = child_name |
.parametrize('wlogs, expected_output', [({}, {'wlogtypes': {}, 'wlogrecords': {}}), ({'X_UTME': ('CONT', None)}, {'wlogtypes': {'X_UTME': 'CONT'}, 'wlogrecords': {'X_UTME': None}}), ({'ZONELOG': ('DISC', {'0': 'ZONE00'})}, {'wlogtypes': {'ZONELOG': 'DISC'}, 'wlogrecords': {'ZONELOG': {'0': 'ZONE00'}}})])
def test_import_wlogs(wlogs, expected_output):
assert (import_wlogs(wlogs) == expected_output) |
def extractGaochaoTranslations(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if ('Otherworldly Evil Monarch' in item['tags']):
return buildReleaseMessageWithType(item, 'Otherworldly Evil Monarch', vol, chp, frag=frag, postfix=postfix)
return False |
def _getPatternTemplate(pattern, key=None):
if (key is None):
key = pattern
if ('%' not in pattern):
key = pattern.upper()
template = DD_patternCache.get(key)
if (not template):
if ('EPOCH' in key):
if RE_EPOCH_PATTERN.search(pattern):
template = DateEpoch(pattern=pattern, longFrm=('LEPOCH' in key))
elif (key in ('EPOCH', '{^LN-BEG}EPOCH', '^EPOCH')):
template = DateEpoch(lineBeginOnly=(key != 'EPOCH'))
elif (key in ('LEPOCH', '{^LN-BEG}LEPOCH', '^LEPOCH')):
template = DateEpoch(lineBeginOnly=(key != 'LEPOCH'), longFrm=True)
if (template is None):
if (key in ('TAI64N', '{^LN-BEG}TAI64N', '^TAI64N')):
template = DateTai64n(wordBegin=('start' if (key != 'TAI64N') else False))
else:
template = DatePatternRegex(pattern)
DD_patternCache.set(key, template)
return template |
def init_state_resets(ns, state_inputs, trigger, scheduler, tp_scheduler, node):
if (len(state_inputs) > 0):
channels = []
for s in state_inputs:
d = s['done'].pipe(ops.scan((lambda acc, x: (x if x else acc)), False))
c = s['msg'].pipe(convert(s['space'], s['processor'], s['name'], 'states', node, direction='in'), ops.share(), ops.scan((lambda acc, x: ((acc[0] + 1), x)), ((- 1), None)), ops.start_with(((- 1), None)), ops.combine_latest(d), ops.filter((lambda x: ((x[0][0] >= 0) or x[1]))), remap_state(s['name'], node.sync, node.real_time_factor))
(done, reset) = trigger.pipe(with_latest_from(c), ops.take(1), ops.merge(rx.never()), ops.map((lambda x: x[1])), ops.partition((lambda x: x.info.done)))
reset = reset.pipe(ops.observe_on(tp_scheduler), call_state_reset(s['state']), ops.observe_on(scheduler))
rs = rx.merge(done.pipe(spy(('done [%s]' % s['name'].split('/')[(- 1)][:12].ljust(4)), node)), reset.pipe(spy(('reset [%s]' % s['name'].split('/')[(- 1)][:12].ljust(4)), node)))
channels.append(rs)
return rx.zip(*channels).pipe(regroup_inputs(node, is_input=False), ops.merge(rx.never()))
else:
return rx.never().pipe(ops.start_with(dict())) |
def repr_message(msg):
from ..api.chats import Group
text = str((msg.text or '')).replace('\n', ' ')
text += (' ' if text else '')
if (msg.sender == msg.bot.self):
ret = ' {self.receiver.name}'
elif (isinstance(msg.chat, Group) and (msg.member != msg.receiver)):
ret = '{self.sender.name} {self.member.name}'
else:
ret = '{self.sender.name}'
ret += ' : {text}({self.type})'
return ret.format(self=msg, text=text) |
class TD3BCTest(absltest.TestCase):
def test_td3(self):
environment = fakes.ContinuousEnvironment(action_dim=2, observation_dim=3, episode_length=10, bounded=True)
spec = specs.make_environment_spec(environment)
agent_networks = td3.make_networks(spec, (10,), (10,))
dataset = fakes.transition_dataset(environment).batch(10).as_numpy_iterator()
learner = td3_bc.TD3BCLearner(policy_network=agent_networks['policy'], critic_network=agent_networks['critic'], random_key=jax.random.PRNGKey(0), iterator=dataset)
learner.step() |
_trigger_tests.post(schema=bodhi.server.schemas.TriggerTestsSchema(), validators=(colander_body_validator, validate_update_id, validate_qa_acls), permission='edit', renderer='json', error_handler=bodhi.server.services.errors.json_handler)
def trigger_tests(request):
update = request.validated['update']
if (update.status != UpdateStatus.testing):
log.error("Can't trigger tests for update: Update is not in testing status")
request.errors.add('body', 'request', 'Update is not in testing status')
elif (update.content_type == ContentType.rpm):
message = update_schemas.UpdateReadyForTestingV3.from_dict(message=update._build_group_test_message(agent=request.identity.name, retrigger=True))
notifications.publish(message)
return dict(update=update) |
class BaseTestExecTimeout(TestCase):
EXEC_TIMEOUT_CLASS = BaseExecTimeout
def setUpClass(cls):
if (cls is BaseTestExecTimeout):
raise unittest.SkipTest("Skip BaseTest tests, it's a base class")
def test_cancel_by_timeout(self):
slow_function_time = 0.4
timeout = 0.1
assert (timeout < slow_function_time)
with timeit_context() as timeit_result:
with pytest.raises(TimeoutException):
with self.EXEC_TIMEOUT_CLASS(timeout) as exec_timeout:
self.slow_function(slow_function_time)
assert exec_timeout.is_cancelled_by_timeout()
assert ((timeit_result.time_passed >= timeout) and (timeit_result.time_passed < slow_function_time))
def test_limit_is_0_do_not_limit_execution(self):
slow_function_time = 0.1
timeout = 0
assert (timeout < slow_function_time)
with timeit_context() as timeit_result:
with self.EXEC_TIMEOUT_CLASS(timeout) as exec_timeout:
self.slow_function(slow_function_time)
assert (not exec_timeout.is_cancelled_by_timeout())
assert (timeit_result.time_passed >= slow_function_time)
def test_timeout_bigger_than_execution_time(self):
slow_function_time = 0.1
timeout = 1
assert (timeout > slow_function_time)
with timeit_context() as timeit_result:
with self.EXEC_TIMEOUT_CLASS(timeout) as exec_timeout:
self.slow_function(slow_function_time)
assert (not exec_timeout.is_cancelled_by_timeout())
assert ((timeit_result.time_passed <= timeout) and (timeit_result.time_passed >= slow_function_time))
def slow_function(cls, sleep):
time.sleep(sleep) |
class EventLoop():
active = True
queue = Queue()
freeable = []
callbackExecutor = EventExecutorThread()
callbacks = WeakValueDictionary()
threads = []
outbound = []
requests = {}
responses = {}
def __init__(self):
connection.start()
self.callbackExecutor.start()
self.pyi = pyi.PyInterface(self, config.executor)
def stop(self):
connection.stop()
def newTaskThread(self, handler, *args):
state = TaskState()
t = threading.Thread(target=handler, args=(state, *args), daemon=True)
self.threads.append([state, handler, t])
return t
def startThread(self, method):
for (state, handler, thread) in self.threads:
if (method == handler):
thread.start()
return
t = self.newTaskThread(method)
t.start()
def stopThread(self, method):
for (state, handler, thread) in self.threads:
if (method == handler):
state.stopping = True
def abortThread(self, method, killAfter=0.5):
for (state, handler, thread) in self.threads:
if (handler == method):
state.stopping = True
killTime = (time.time() + killAfter)
while thread.is_alive():
time.sleep(0.2)
if (time.time() < killTime):
thread.terminate()
self.threads = [x for x in self.threads if (x[1] != method)]
def terminateThread(self, method):
for (state, handler, thread) in self.threads:
if (handler == method):
thread.terminate()
self.threads = [x for x in self.threads if (x[1] != method)]
def queue_request(self, request_id, payload, timeout=None):
self.outbound.append(payload)
lock = threading.Event()
self.requests[request_id] = [lock, timeout]
self.queue.put('send')
return lock
def queue_payload(self, payload):
self.outbound.append(payload)
self.queue.put('send')
def await_response(self, request_id, timeout=None):
lock = threading.Event()
self.requests[request_id] = [lock, timeout]
self.queue.put('send')
return lock
def on_exit(self):
if len(self.callbacks):
config.debug('cannot exit because active callback', self.callbacks)
while (len(self.callbacks) and connection.is_alive()):
time.sleep(0.4)
time.sleep(0.4)
self.callbackExecutor.running = False
self.queue.put('exit')
def loop(self):
while self.active:
self.queue.get(block=True)
self.queue.empty()
connection.writeAll(self.outbound)
self.outbound = []
self.threads = [x for x in self.threads if x[2].is_alive()]
if (len(self.freeable) > 40):
self.queue_payload({'r': r, 'action': 'free', 'ffid': '', 'args': self.freeable})
self.freeable = []
inbounds = connection.readAll()
for inbound in inbounds:
r = inbound['r']
cbid = (inbound['cb'] if ('cb' in inbound) else None)
if (('c' in inbound) and (inbound['c'] == 'pyi')):
j = inbound
self.callbackExecutor.add_job(r, cbid, self.pyi.inbound, inbound)
if (r in self.requests):
(lock, timeout) = self.requests[r]
barrier = threading.Barrier(2, timeout=5)
self.responses[r] = (inbound, barrier)
del self.requests[r]
lock.set()
barrier.wait() |
def output(outputable):
if ((format.get_selected() == 'html') or (format.get_selected() == 'htmlembedded')):
outputable.output_html()
elif (format.get_selected() == 'json'):
outputable.output_json()
elif (format.get_selected() == 'text'):
outputable.output_text()
else:
outputable.output_xml() |
class _ComputeDisksRepository(repository_mixins.AggregatedListQueryMixin, repository_mixins.ListQueryMixin, _base_repository.GCPRepository):
def __init__(self, **kwargs):
super(_ComputeDisksRepository, self).__init__(component='disks', **kwargs)
def list(self, resource, zone, **kwargs):
kwargs['zone'] = zone
return repository_mixins.ListQueryMixin.list(self, resource, **kwargs) |
.asyncio
.workspace_host
class TestGetClient():
async def test_unauthorized(self, unauthorized_api_assertions: HTTPXResponseAssertion, test_client_api: test_data: TestData):
client = test_data['clients']['default_tenant']
response = (await test_client_api.get(f'/clients/{client.id}'))
unauthorized_api_assertions(response)
.authenticated_admin
async def test_not_existing(self, test_client_api: not_existing_uuid: uuid.UUID):
response = (await test_client_api.get(f'/clients/{not_existing_uuid}'))
assert (response.status_code == status.HTTP_404_NOT_FOUND)
.authenticated_admin
async def test_valid(self, test_client_api: test_data: TestData):
client = test_data['clients']['default_tenant']
response = (await test_client_api.get(f'/clients/{client.id}'))
assert (response.status_code == status.HTTP_200_OK)
json = response.json()
assert (json['encrypt_jwk'] in [None, ''])
assert ('authorization_code_lifetime_seconds' in json)
assert ('access_id_token_lifetime_seconds' in json)
assert ('refresh_token_lifetime_seconds' in json) |
def test_oef_serialization_query():
query = Query([Constraint('foo', ConstraintType('==', 'bar'))], model=None)
msg = OefSearchMessage(performative=OefSearchMessage.Performative.SEARCH_SERVICES, dialogue_reference=(str(1), ''), query=query)
msg_bytes = OefSearchMessage.serializer.encode(msg)
assert (len(msg_bytes) > 0)
recovered_msg = OefSearchMessage.serializer.decode(msg_bytes)
assert (recovered_msg == msg) |
_os(*metadata.platforms)
def main():
common.log('Clearing Windows Event Logs')
common.log('WARNING - About to clear logs from Windows Event Viewer', log_type='!')
time.sleep(3)
wevtutil = 'wevtutil.exe'
for log in ['security', 'application', 'system']:
common.execute([wevtutil, 'cl', log]) |
.parametrize('_input_vars', [['var1', 'var2', 'var2', 'var3'], [0, 1, 1, 2]])
def test_raises_error_when_duplicated_var_names(_input_vars):
with pytest.raises(ValueError) as record:
assert _check_variables_input_value(_input_vars)
msg = 'The list entered in `variables` contains duplicated variable names.'
assert (str(record.value) == msg) |
class OptionPlotoptionsPackedbubbleStatesSelectHalo(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def opacity(self):
return self._config_get(0.25)
def opacity(self, num: float):
self._config(num, js_type=False)
def size(self):
return self._config_get(10)
def size(self, num: float):
self._config(num, js_type=False) |
def pre_process_repo_url(chroot, repo_url):
parsed_url = urlparse(repo_url)
query = parse_qs(parsed_url.query)
if (parsed_url.scheme == 'copr'):
user = parsed_url.netloc
prj = parsed_url.path.split('/')[1]
repo_url = ('/'.join([flask.current_app.config['BACKEND_BASE_URL'], 'results', user, prj, chroot]) + '/')
elif ('priority' in query):
query.pop('priority')
query_string = urlencode(query, doseq=True)
parsed_url = parsed_url._replace(query=query_string)
repo_url = urlunparse(parsed_url)
repo_url = repo_url.replace('$chroot', chroot)
repo_url = repo_url.replace('$distname', chroot.rsplit('-', 2)[0])
return repo_url |
def run_migrations_online():
engine = create_engine(get_url())
connection = engine.connect()
context.configure(connection=connection, target_metadata=target_metadata, version_table='alembic_ziggurat_foundations_version', transaction_per_migration=True)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close() |
class OptunaSweeper(Sweeper):
def __init__(self, sampler: SamplerConfig, direction: Any, storage: Optional[Any], study_name: Optional[str], n_trials: int, n_jobs: int, max_failure_rate: float, search_space: Optional[DictConfig], custom_search_space: Optional[str], params: Optional[DictConfig]) -> None:
from ._impl import OptunaSweeperImpl
self.sweeper = OptunaSweeperImpl(sampler, direction, storage, study_name, n_trials, n_jobs, max_failure_rate, search_space, custom_search_space, params)
def setup(self, *, hydra_context: HydraContext, task_function: TaskFunction, config: DictConfig) -> None:
self.sweeper.setup(hydra_context=hydra_context, task_function=task_function, config=config)
def sweep(self, arguments: List[str]) -> None:
return self.sweeper.sweep(arguments) |
class OptionPlotoptionsBarLabelStyle(Options):
def fontSize(self):
return self._config_get('0.8em')
def fontSize(self, num: float):
self._config(num, js_type=False)
def fontWeight(self):
return self._config_get('bold')
def fontWeight(self, text: str):
self._config(text, js_type=False) |
class TestPrecisionTopK(BaseTopkRecsysTest):
name: ClassVar = 'Precision (top-k)'
header: str = 'Precision'
def get_metric(self, k, min_rel_score, no_feedback_users) -> BaseTopKRecsysType:
return PrecisionTopKMetric(k=k, min_rel_score=min_rel_score, no_feedback_users=no_feedback_users) |
def convert_to_jstree_node(node: FileTreeNode):
if node.virtual:
jstree_node = _get_directory_jstree_node(node)
elif node.not_analyzed:
jstree_node = _get_not_analyzed_jstree_node(node)
else:
jstree_node = _get_file_jstree_node(node)
if node.has_children:
jstree_node['children'] = _get_jstree_child_nodes(node)
return jstree_node |
def usage_doc_files() -> List[str]:
usage_docs_dir: str = os.path.join(os.path.dirname(__file__), '../../docs/fields/usage')
usage_docs_path: pathlib.PosixPath = pathlib.Path(usage_docs_dir)
if usage_docs_path.is_dir():
return [x.name for x in usage_docs_path.glob('*.asciidoc') if x.is_file()]
return [] |
(autouse=True, scope='function')
def privacy_request_complete_email_notification_disabled(db):
original_value = CONFIG.notifications.send_request_completion_notification
CONFIG.notifications.send_request_completion_notification = False
ApplicationConfig.update_config_set(db, CONFIG)
db.commit()
(yield)
CONFIG.notifications.send_request_completion_notification = original_value
ApplicationConfig.update_config_set(db, CONFIG)
db.commit() |
class bsn_vlan_counter_stats_reply(bsn_stats_reply):
version = 5
type = 19
stats_type = 65535
experimenter = 6035143
subtype = 9
def __init__(self, xid=None, flags=None, entries=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (entries != None):
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_vlan_counter_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 5)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 65535)
obj.flags = reader.read('!H')[0]
reader.skip(4)
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_subtype = reader.read('!L')[0]
assert (_subtype == 9)
obj.entries = loxi.generic_util.unpack_list(reader, ofp.common.bsn_vlan_counter_stats_entry.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.entries != other.entries):
return False
return True
def pretty_print(self, q):
q.text('bsn_vlan_counter_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('entries = ')
q.pp(self.entries)
q.breakable()
q.text('}') |
def concatenate_fastq_files(merged_fastq, fastq_files, bufsize=10240, overwrite=False, verbose=True):
if verbose:
print(("Creating merged fastq file '%s'" % merged_fastq))
if (os.path.exists(merged_fastq) and (not overwrite)):
raise OSError(("Target file '%s' already exists, stopping" % merged_fastq))
merged_fastq_part = (merged_fastq + '.part')
if is_gzipped_file(merged_fastq):
if is_gzipped_file(fastq_files[0]):
if verbose:
print(('Copying %s' % fastq_files[0]))
shutil.copy(fastq_files[0], merged_fastq_part)
first_file = 1
fq_merged = gzip.GzipFile(merged_fastq_part, 'ab')
else:
first_file = 0
fq_merged = gzip.GzipFile(merged_fastq_part, 'wb')
elif (not is_gzipped_file(fastq_files[0])):
if verbose:
print(('Copying %s' % fastq_files[0]))
shutil.copy(fastq_files[0], merged_fastq_part)
first_file = 1
fq_merged = io.open(merged_fastq_part, 'ab')
else:
first_file = 1
fq_merged = io.open(merged_fastq_part, 'wb')
for fastq in fastq_files[first_file:]:
if verbose:
print(('Adding records from %s' % fastq))
if (not os.path.exists(fastq)):
raise OSError(("'%s' not found, stopping" % fastq))
if (not is_gzipped_file(fastq)):
fq = io.open(fastq, 'rb')
else:
fq = gzip.GzipFile(fastq, 'rb')
while True:
data = fq.read(10240)
if (not data):
break
fq_merged.write(data)
fq.close()
fq_merged.close()
os.rename(merged_fastq_part, merged_fastq) |
class TaskOutputMetaData():
def __init__(self, output_sequence, output_label, output_type: elmdpenum.TaskOutputMetaDataTypes):
self.output_sequence = output_sequence
self.output_label = output_label
self.output_type = output_type
def to_dict(self):
return {'output_sequence': self.output_sequence, 'output_label': self.output_label, 'output_type': self.output_type} |
class LifeEvent(AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isLifeEvent = True
super(LifeEvent, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
description = 'description'
end_time = 'end_time'
field_from = 'from'
id = 'id'
is_hidden = 'is_hidden'
start_time = 'start_time'
title = 'title'
updated_time = 'updated_time'
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=LifeEvent, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_likes(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.profile import Profile
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/likes', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Profile, api_type='EDGE', response_parser=ObjectParser(target_class=Profile, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {'description': 'string', 'end_time': 'datetime', 'from': 'Page', 'id': 'string', 'is_hidden': 'bool', 'start_time': 'datetime', 'title': 'string', 'updated_time': 'datetime'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
def f2p_word(word, max_word_size=15, cutoff=3):
original_word = word
word = word.lower()
c = dictionary.get(word)
if c:
return [(c, 1.0)]
if (word == ''):
return []
elif (len(word) > max_word_size):
return [(original_word, 1.0)]
results = []
for w in variations(word):
results.extend(f2p_word_internal(w, original_word))
results.sort(key=(lambda r: r[1]), reverse=True)
return results[:cutoff] |
_module()
class NaiveVOCODERDataset(NaiveDataset):
processing_pipeline = [dict(type='PickKeys', keys=['path', 'audio', 'pitches', 'sampling_rate'])]
collating_pipeline = [dict(type='ListToDict'), dict(type='PadStack', keys=[('audio', (- 1)), ('pitches', (- 1))])]
def __init__(self, path='dataset', segment_size: Optional[int]=16384, hop_length: int=512, sampling_rate: int=44100, pitch_shift: Optional[list[int]]=None, loudness_shift: Optional[list[int]]=None):
super().__init__(path)
self.segment_length = segment_size
self.hop_length = hop_length
self.sampling_rate = sampling_rate
self.pitch_shift = pitch_shift
self.loudness_shift = loudness_shift
def __getitem__(self, idx):
x = super().__getitem__(idx)
assert (x['sampling_rate'] == self.sampling_rate)
y = x['audio']
pitches = x['pitches']
if (self.pitch_shift is not None):
shift = ((np.random.random() * (self.pitch_shift[1] - self.pitch_shift[0])) + self.pitch_shift[0])
duration_shift = (2 ** (shift / 12))
orig_sr = round((self.sampling_rate * duration_shift))
orig_sr = (orig_sr - (orig_sr % 100))
y = torchaudio.functional.resample(torch.from_numpy(y).float(), orig_freq=orig_sr, new_freq=self.sampling_rate).numpy()
pitches *= (2 ** (shift / 12))
pitches = np.interp(np.linspace(0, 1, y.shape[(- 1)]), np.linspace(0, 1, len(pitches)), pitches)
if ((self.segment_length is not None) and (y.shape[(- 1)] > self.segment_length)):
start = np.random.randint(0, ((y.shape[(- 1)] - self.segment_length) + 1))
y = y[start:(start + self.segment_length)]
pitches = pitches[start:(start + self.segment_length)]
if (self.loudness_shift is not None):
new_amplitude = ((np.random.random() * (self.loudness_shift[1] - self.loudness_shift[0])) + self.loudness_shift[0])
max_amplitude = np.max(np.abs(y))
y = ((y / (max_amplitude + 1e-08)) * new_amplitude)
return {'audio': y[None], 'pitches': pitches[None]} |
.integration_saas
.integration_mailchimp_transactional
def test_build_consent_dataset_graph(postgres_example_test_dataset_config_read_access, mysql_example_test_dataset_config, mailchimp_transactional_dataset_config):
dataset_graph: DatasetGraph = build_consent_dataset_graph([postgres_example_test_dataset_config_read_access, mysql_example_test_dataset_config, mailchimp_transactional_dataset_config])
assert (len(dataset_graph.nodes.keys()) == 1)
assert ([col_addr.value for col_addr in dataset_graph.nodes.keys()] == ['mailchimp_transactional_instance:mailchimp_transactional_instance']) |
class OptionPlotoptionsBoxplotSonificationContexttracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class AmdGpuSensor(BaseSensor):
name = 'amdgpu'
desc = _('If CPU isnot AMD, this your eGPU')
def get_value(self, sensor):
if (sensor == 'amdgpu'):
return '{:02.0f}%'.format(self._fetch_gpu())
def _fetch_gpu(self):
result = subprocess.check_output(['cat', '/sys/class/drm/card0/device/gpu_busy_percent'])
return int(result) |
class TestMultiHeadAttentionConverter(AITTestCase):
def test_multihead_attention_cross_attenytion(self):
class TestModule(torch.nn.Module):
def __init__(self, dim, nheads):
super().__init__()
self.attn = torch.nn.modules.activation.MultiheadAttention(embed_dim=dim, num_heads=nheads, batch_first=True)
def forward(self, x):
layer_norm = torch.nn.functional.layer_norm(x, (dim,), eps=1e-05)
getitem = layer_norm[(slice(None, None, None), 0)]
unsqueeze = torch.unsqueeze(getitem, dim=1)
return self.attn(query=unsqueeze, key=layer_norm, value=layer_norm)
(seq_len_q, dim, nheads) = (4, 256, 16)
model = TestModule(dim, nheads).half().cuda()
input_q = torch.randn(128, seq_len_q, dim).cuda().half()
self.run_test(model, [input_q], expected_ops={torch.nn.modules.activation.MultiheadAttention, acc_ops.layer_norm, acc_ops.unsqueeze, acc_ops.getitem}, leaf_module=torch.nn.MultiheadAttention)
def test_multihead_attention(self):
class TestModule(torch.nn.Module):
def __init__(self, dim, nheads):
super().__init__()
self.attn = torch.nn.MultiheadAttention(embed_dim=dim, num_heads=nheads, batch_first=True)
def forward(self, x):
return self.attn(query=x, key=x, value=x)
batch_size = 2
seqlen = 4
dim = 512
num_heads = 8
x = torch.ones(batch_size, seqlen, dim).cuda().half()
model = TestModule(dim, num_heads).eval().half().cuda()
self.run_test(model, [x], expected_ops={torch.nn.MultiheadAttention}, leaf_module=torch.nn.MultiheadAttention) |
class BaseClassConditionAwareRefinement():
def __init__(self, asforest: AbstractSyntaxForest, options: RestructuringOptions):
self.asforest: AbstractSyntaxForest = asforest
self.condition_handler: ConditionHandler = asforest.condition_handler
self.options: RestructuringOptions = options
def _get_constant_equality_check_expressions_and_conditions(self, condition: LogicCondition) -> Iterator[Tuple[(ExpressionUsages, LogicCondition)]]:
if condition.is_conjunction:
for disjunction in condition.operands:
if (expression := self._get_const_eq_check_expression_of_disjunction(disjunction)):
(yield (expression, disjunction))
elif (expression := self._get_const_eq_check_expression_of_disjunction(condition)):
(yield (expression, condition))
def _get_const_eq_check_expression_of_disjunction(self, condition: LogicCondition) -> Optional[ExpressionUsages]:
if condition.is_literal:
return self._get_expression_compared_with_constant(condition)
operands = condition.operands
if ((not condition.is_disjunction) or any(((not literal.is_literal) for literal in operands))):
return None
compared_expressions = [self._get_expression_compared_with_constant(literal) for literal in operands]
if ((len(set(compared_expressions)) != 1) or (compared_expressions[0] is None)):
return None
return compared_expressions[0]
def _get_expression_compared_with_constant(self, reaching_condition: LogicCondition) -> Optional[ExpressionUsages]:
return self.asforest.switch_node_handler.get_potential_switch_expression(reaching_condition)
def _get_constant_compared_with_expression(self, reaching_condition: LogicCondition) -> Optional[Constant]:
return self.asforest.switch_node_handler.get_potential_switch_constant(reaching_condition)
def _convert_to_z3_condition(self, condition: LogicCondition) -> PseudoLogicCondition:
return PseudoLogicCondition.initialize_from_formula(condition, self.condition_handler.get_z3_condition_map())
def _z3_condition_of_literal(self, literal: LogicCondition) -> PseudoLogicCondition:
assert literal.is_literal, f'The input must be a literal, but it is {literal}'
if literal.is_symbol:
return self.condition_handler.get_z3_condition_of(literal)
return (~ self.condition_handler.get_z3_condition_of((~ literal)))
def _condition_is_redundant_for_switch_node(self, switch_node: AbstractSyntaxTreeNode, condition: LogicCondition) -> bool:
if ((not isinstance(switch_node, SwitchNode)) or switch_node.default):
return False
cmp_condition = PseudoLogicCondition.initialize_from_formula(condition, self.condition_handler.get_z3_condition_map())
for child in switch_node.children:
case_condition = PseudoLogicCondition.initialize_from_condition(Condition(OperationType.equal, [switch_node.expression, child.constant]), self.condition_handler.logic_context)
if (not case_condition.does_imply(cmp_condition)):
return False
return True
def _contains_no_violating_loop_break(self, ast_node: AbstractSyntaxTreeNode) -> bool:
return (((not ast_node.is_break_node) and (self.options.loop_break_strategy == LoopBreakOptions.structural_variable)) or (not ast_node._has_descendant_code_node_breaking_ancestor_loop())) |
class MultiLinear(Chain):
def __init__(self, input_dim: int, output_dim: int, inner_dim: int, num_layers: int, device: ((Device | str) | None)=None, dtype: (DType | None)=None) -> None:
layers: list[Module] = []
for i in range((num_layers - 1)):
layers.append(Linear((input_dim if (i == 0) else inner_dim), inner_dim, device=device, dtype=dtype))
layers.append(ReLU())
layers.append(Linear(inner_dim, output_dim, device=device, dtype=dtype))
super().__init__(layers) |
class TestEjectCommandCliConfigNotAvailable(AEATestCaseEmpty):
IS_EMPTY = True
def setup_class(cls):
super().setup_class()
cls.add_item('protocol', str(DefaultMessage.protocol_id))
('aea.cli.utils.config.get_or_create_cli_config', return_value={})
def test_error(self, *_mocks):
with pytest.raises(click.ClickException, match='The AEA configurations are not initialized. Use `aea init` before continuing.'):
self.invoke('eject', '--quiet', 'protocol', str(DefaultMessage.protocol_id)) |
def propagate_deletions(db, batch, path):
for i in reversed(range(len(path))):
current_node = deserialize(db_get(db, path[:i]))
print(path[:i], current_node)
assert isinstance(current_node, BranchNode)
if (current_node.values.count(ZERO) == 255):
print('one nonzero; continuing')
db_put(batch, path[:i], None)
elif (current_node.values.count(ZERO) == 254):
print('two nonzeroes; replacing with leaf')
db_put(batch, path[:i], sister_leaf.serialize())
propagate_along_path(db, batch, path[:i], hash_node(sister_leaf))
return
else:
print('3+ nonzeroes; removing')
current_node.values[path[i]] = ZERO
db_put(batch, path[:i], current_node.serialize())
propagate_along_path(db, batch, path[:i], hash_node(current_node))
return |
def forward(model: Model[(InT, OutT)], Xp: InT, is_train: bool) -> Tuple[(OutT, Callable[([OutT], InT)])]:
Ys = cast(OutT, model.ops.padded2list(Xp))
def backprop(dYs: OutT) -> InT:
dYp = model.ops.list2padded(dYs)
assert isinstance(dYp, Padded)
return dYp
return (Ys, backprop) |
def build_srpm(srcdir, destdir):
cmd = ['rpmbuild', '-bs', '--define', ('_sourcedir ' + srcdir), '--define', ('_rpmdir ' + srcdir), '--define', ('_builddir ' + srcdir), '--define', ('_specdir ' + srcdir), '--define', ('_srcrpmdir ' + destdir)]
specfiles = glob.glob(os.path.join(srcdir, '*.spec'))
if (len(specfiles) == 0):
raise RuntimeError('no spec file available')
if (len(specfiles) > 1):
raise RuntimeError('too many specfiles: {0}'.format(', '.join(specfiles)))
cmd += [specfiles[0]]
run_cmd(cmd) |
class SourceUnit(AstNode):
nodes: List[TopLevelNode]
stage1_context = synthesized()
stage2_context = synthesized()
_cache(None)
def ast_nodes_by_id(self):
return {d.id: d for d in self.descendants() if (d is not None)}
def push_cfgs(self, nodes: ContractDefinition.contract_cfg_unlinked) -> (ContractDefinition.cfgs_unlinked nodes):
return [c.contract_cfg_unlinked for c in of_type[ContractDefinition](self.nodes)]
def stage1_context(self, nodes: {ContractDefinition.cfg_local_state_init, ContractDefinition.contract_modifier_cfgs}):
contracts = list(of_type[ContractDefinition](nodes))
return Stage1Context({c.id: c.cfg_local_state_init for c in contracts}, {i: m for c in contracts for (i, m) in c.contract_modifier_cfgs.items()})
def stage2_context(self, nodes: {ContractDefinition.contract_function_cfgs}):
contracts = list(of_type[ContractDefinition](nodes))
return Stage2Context({i: m for c in contracts for (i, m) in c.contract_function_cfgs.items()})
def _(self) -> (AstNode.stage1_context nodes):
return self.stage1_context
def _(self) -> (AstNode.stage2_context nodes):
return self.stage2_context
def _(self: ListElement[(SourceUnit, 'nodes')]) -> (TopLevelNode.cfgs_constructors next):
if isinstance(self, ContractDefinition):
return {**self.cfgs_constructors, self.id: self.cfg_constructor_chain}
return self.cfgs_constructors
def cfg(self, nodes: AstNode.cfg):
return ir.SourceUnit(self, [node.cfg for node in of_type[ContractDefinition](nodes) if (not isinstance(node.cfg, UndefinedAttribute))]) |
class Command(BaseCommand):
help = 'Deterministically generate File B and C DEF code records for the fiscal year and period provided.'
fiscal_year = None
fiscal_period = None
allow_rds = False
vacuum = False
clone_factors = [('I', 0.2, 13), ('F', 0.1, 11), ('L', 0.3, 10), ('M', 0.25, 8), ('N', 0.4, 7), ('O', 0.15, 5)]
def add_arguments(self, parser):
helper.add_argument_fiscal_year(parser)
helper.add_argument_fiscal_period(parser)
helper.add_argument_rds(parser)
helper.add_argument_vacuum(parser)
helper.add_argument_warning_epilog(parser)
def handle(self, *args, **options):
self.set_state(options)
self.perform_validations()
with ScriptTimer(f'Generate DEF code File B and C records for FY{self.fiscal_year}P{self.fiscal_period}'):
with transaction.atomic():
helper.record_base_submission_ids()
self.clone_for_defc()
t = ScriptTimer('Commit transaction')
t.log_success_message()
if self.vacuum:
helper.vacuum_tables()
def set_state(self, options):
self.fiscal_year = options['fiscal year']
self.fiscal_period = options['fiscal period']
self.allow_rds = options['yes_i_know_its_rds']
self.vacuum = options['vacuum']
def perform_validations(self):
helper.validate_not_rds(self.allow_rds)
helper.validate_period_has_submissions(self.fiscal_year, self.fiscal_period)
helper.validate_disaster_emergency_fund_code_table_has_data()
def clone_for_defc(self):
sql = helper.read_sql_file('clone_for_defc.sql')
for p in self.clone_factors:
helper.run_sqls(helper.split_sql(sql.format(disaster_emergency_fund_code=p[0], filter_fiscal_year=self.fiscal_year, filter_fiscal_period=self.fiscal_period, adjustment_ratio=p[1], divisor=p[2]))) |
class Manager():
def __init__(self, config_dir=DATA_DIR, htpasswd_file=HTPASSWD_FILE, creds_file=CREDS_FILE):
if (not os.path.exists(config_dir)):
if os.path.exists(LEGACY_CONFIG_DIR):
import shutil
shutil.move(LEGACY_CONFIG_DIR, DATA_DIR)
else:
os.makedirs(config_dir, mode=448)
self.htpasswd = Htpasswd(htpasswd_file)
self.creds = Credentials(creds_file)
if (not os.path.exists(htpasswd_file)):
self.htpasswd.save()
def _generate_pasword(self):
return ''.join([random.choice(((string.ascii_uppercase + string.ascii_lowercase) + string.digits)) for i in range(16)])
def validate_username(self, username):
if (username is None):
raise RuntimeError('Username is required')
if (':' in username):
raise RuntimeError("Username can't include a colon.")
return (self.htpasswd.get(username) is not None)
def refresh_token(self, username, login_password):
server_url = self.creds.get_server_url(username)
stored_session = self.creds.get_etebase(username)
if (stored_session is not None):
etebase = local_cache.Etebase(username, stored_session, server_url).etebase
etebase.fetch_token()
self.creds.set_etebase(username, etebase.save(None), server_url)
else:
(_, cipher_key) = self.creds.get(username)
if (cipher_key is None):
raise RuntimeError('User not found in etesync-dav')
auth_token = api.Authenticator(server_url).get_auth_token(username, login_password)
self.creds.set(username, auth_token, cipher_key, server_url)
self.creds.save()
def add(self, username, login_password, encryption_password, remote_url=LEGACY_ETESYNC_URL):
exists = self.validate_username(username)
if exists:
raise RuntimeError("User already exists. Delete first if you'd like to override settings.")
print('Fetching auth token')
auth_token = api.Authenticator(remote_url).get_auth_token(username, login_password)
print('Deriving password')
etesync = api.EteSync(username, auth_token, remote=remote_url, db_path=':memory:')
cipher_key = etesync.derive_key(encryption_password)
print('Saving config')
generated_password = self._generate_pasword()
self.htpasswd.set(username, generated_password)
self.creds.set(username, auth_token, cipher_key, remote_url)
self.htpasswd.save()
self.creds.save()
print('Initializing account')
try:
with etesync_for_user(username) as (etesync, _):
etesync.get_or_create_user_info(force_fetch=True)
etesync.sync_journal_list()
if (not list(etesync.list())):
collection_info = {'displayName': 'My Calendar', 'description': ''}
collection_name = hashlib.sha256(str(time.time()).encode()).hexdigest()
inst = api.Calendar.create(etesync, collection_name, collection_info)
inst.save()
collection_info = {'displayName': 'My Tasks', 'description': ''}
collection_name = hashlib.sha256(str(time.time()).encode()).hexdigest()
inst = api.TaskList.create(etesync, collection_name, collection_info)
inst.save()
collection_info = {'displayName': 'My Contacts', 'description': ''}
collection_name = hashlib.sha256(str(time.time()).encode()).hexdigest()
inst = api.AddressBook.create(etesync, collection_name, collection_info)
inst.save()
etesync.sync_journal_list()
except Exception as e:
self.htpasswd.delete(username)
self.creds.delete(username)
self.htpasswd.save()
self.creds.save()
raise e
return self.get(username)
def add_etebase(self, username, password, remote_url=ETESYNC_URL):
exists = self.validate_username(username)
if exists:
raise RuntimeError("User already exists. Delete first if you'd like to override settings.")
print('Logging in')
client = Etebase.Client('etesync-dav', remote_url)
etebase = Etebase.Account.login(client, username, password)
print('Saving config')
generated_password = self._generate_pasword()
self.htpasswd.set(username, generated_password)
self.creds.set_etebase(username, etebase.save(None), remote_url)
self.htpasswd.save()
self.creds.save()
print('Initializing account')
try:
col_mgr = etebase.get_collection_manager()
fetch_options = Etebase.FetchOptions().limit(1)
collections = col_mgr.list(local_cache.COL_TYPES, fetch_options)
if (len(list(collections.data)) == 0):
wanted = [['etebase.vcard', 'My Contacts'], ['etebase.vevent', 'My Calendar'], ['etebase.vtodo', 'My Tasks']]
try:
for [col_type, name] in wanted:
meta = {'name': name, 'mtime': local_cache.get_millis()}
col = col_mgr.create(col_type, meta, b'')
col_mgr.upload(col)
except Exception as e:
print('Failed creating default collections (skipping). Reason:', e)
pass
except Exception as e:
self.htpasswd.delete(username)
self.creds.delete(username)
self.htpasswd.save()
self.creds.save()
raise e
return self.get(username)
def delete(self, username):
exists = self.validate_username(username)
if (not exists):
raise RuntimeError('User not found')
try:
with etesync_for_user(username) as (etesync, _):
if hasattr(etesync, 'clear_user'):
etesync.clear_user()
else:
user = etesync.user
for col in user.journals:
for item in col.entries:
item.delete_instance()
col.delete_instance()
user.user_info.delete_instance()
user.delete()
user = None
except Exception as e:
print('Failed removing user cache', e)
self.htpasswd.delete(username)
self.creds.delete(username)
self.htpasswd.save()
self.creds.save()
def get(self, username):
exists = self.validate_username(username)
if (not exists):
raise RuntimeError('User not found')
return self.htpasswd.get(username)
def list(self):
for user in self.htpasswd.list():
(yield user) |
class AIFlowConsole(Console):
def print_as_json(self, data: Dict):
json_content = json.dumps(data)
self.print(Syntax(json_content, 'json', theme='ansi_dark'), soft_wrap=True)
def print_as_yaml(self, data: Dict):
yaml_content = yaml.dump(data)
self.print(Syntax(yaml_content, 'yaml', theme='ansi_dark'), soft_wrap=True)
def print_as_table(self, data: List[Dict]):
if (not data):
self.print('No data found')
return
table = SimpleTable(show_header=True)
for col in data[0].keys():
table.add_column(col)
for row in data:
table.add_row(*[str(d) for d in row.values()])
self.print(table)
def _normalize_data(self, value: Any, output: str) -> Optional[Union[(list, str, dict)]]:
if isinstance(value, (tuple, list)):
if (output == 'table'):
return ','.join((self._normalize_data(x, output) for x in value))
return [self._normalize_data(x, output) for x in value]
if (isinstance(value, dict) and (output != 'table')):
return {k: self._normalize_data(v, output) for (k, v) in value.items()}
if (value is None):
return None
return str(value)
def print_as(self, data: List[Union[(Dict, Any)]], output: str, mapper: Optional[Callable]=None):
output_to_renderer = {'json': self.print_as_json, 'yaml': self.print_as_yaml, 'table': self.print_as_table}
renderer = output_to_renderer.get(output)
if (not renderer):
raise ValueError(f'Unknown formatter: {output}. Allowed options: {list(output_to_renderer.keys())}')
if ((not all((isinstance(d, dict) for d in data))) and (not mapper)):
raise ValueError('To tabulate non-dictionary data you need to provide `mapper` function')
if mapper:
dict_data: List[Dict] = [mapper(d) for d in data]
else:
dict_data: List[Dict] = data
dict_data = [{k: self._normalize_data(v, output) for (k, v) in d.items()} for d in dict_data]
renderer(dict_data) |
def _log_calibration(calibration):
values = ' '.join((format((x / 10), '5') for x in VOLT_RANGES))
_log('# VOLTBASE: %s', values)
for (i, name) in enumerate(('GAIN', 'AMPL', 'COMP')):
for chl in range(CHANNELS):
values = ' '.join((format(x, '5') for x in calibration[i][chl]))
_log('# %s CH%s: %s', name, (chl + 1), values) |
class table_feature_prop_table_sync_from(table_feature_prop):
type = 16
def __init__(self, table_ids=None):
if (table_ids != None):
self.table_ids = table_ids
else:
self.table_ids = []
return
def pack(self):
packed = []
packed.append(struct.pack('!H', self.type))
packed.append(struct.pack('!H', 0))
packed.append(loxi.generic_util.pack_list(self.table_ids))
length = sum([len(x) for x in packed])
packed[1] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = table_feature_prop_table_sync_from()
_type = reader.read('!H')[0]
assert (_type == 16)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.table_ids = loxi.generic_util.unpack_list(reader, ofp.common.uint8.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.table_ids != other.table_ids):
return False
return True
def pretty_print(self, q):
q.text('table_feature_prop_table_sync_from {')
with q.group():
with q.indent(2):
q.breakable()
q.text('table_ids = ')
q.pp(self.table_ids)
q.breakable()
q.text('}') |
def fortios_firewall(data, fos, check_mode):
fos.do_member_operation('firewall', 'vipgrp64')
if data['firewall_vipgrp64']:
resp = firewall_vipgrp64(data, fos, check_mode)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'firewall_vipgrp64'))
if check_mode:
return resp
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
def extractNanotranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_default_bytes_serialization():
expected_msg = DefaultMessage(dialogue_reference=('', ''), message_id=1, target=0, performative=DefaultMessage.Performative.BYTES, content=b'hello')
msg_bytes = DefaultMessage.serializer.encode(expected_msg)
actual_msg = DefaultMessage.serializer.decode(msg_bytes)
assert (expected_msg == actual_msg) |
class CorefClusterer(nn.Module):
def __init__(self, dim: int, dist_emb_size: int, hidden_size: int, n_layers: int, dropout: float, rough_k: int, batch_size: int):
super().__init__()
self.dropout = torch.nn.Dropout(dropout)
self.batch_size = batch_size
self.pairwise = DistancePairwiseEncoder(dist_emb_size, dropout)
pair_emb = ((dim * 3) + self.pairwise.shape)
self.ana_scorer = AnaphoricityScorer(pair_emb, hidden_size, n_layers, dropout)
self.lstm = torch.nn.LSTM(input_size=dim, hidden_size=dim, batch_first=True)
self.rough_scorer = RoughScorer(dim, dropout, rough_k)
def forward(self, word_features: torch.Tensor) -> Tuple[(torch.Tensor, torch.Tensor)]:
self.lstm.flatten_parameters()
word_features = torch.unsqueeze(word_features, dim=0)
(words, _) = self.lstm(word_features)
words = words.squeeze()
words = self.dropout(words)
(top_rough_scores, top_indices) = self.rough_scorer(words)
pairwise = self.pairwise(top_indices)
batch_size = self.batch_size
a_scores_lst: List[torch.Tensor] = []
for i in range(0, len(words), batch_size):
pairwise_batch = pairwise[i:(i + batch_size)]
words_batch = words[i:(i + batch_size)]
top_indices_batch = top_indices[i:(i + batch_size)]
top_rough_scores_batch = top_rough_scores[i:(i + batch_size)]
a_scores_batch = self.ana_scorer(all_mentions=words, mentions_batch=words_batch, pairwise_batch=pairwise_batch, top_indices_batch=top_indices_batch, top_rough_scores_batch=top_rough_scores_batch)
a_scores_lst.append(a_scores_batch)
coref_scores = torch.cat(a_scores_lst, dim=0)
return (coref_scores, top_indices) |
.parametrize('function, value', (('reflect', Decimal('12.8')), ('reflect', (Decimal(((2 ** 256) - 1)) / 10)), ('reflect', Decimal('-0.1')), ('reflect', Decimal('-12.8')), ('reflect', (Decimal(((2 ** 256) - 1)) / (10 ** 80))), ('reflect', (Decimal(1) / (10 ** 80))), ('reflect_short_u', 0), ('reflect_short_u', Decimal('25.5'))))
def test_reflect_fixed_value(fixed_reflector_contract, function, value):
contract_func = fixed_reflector_contract.functions[function]
reflected = contract_func(value).call({'gas': 420000})
assert (reflected == value) |
def deploy(w3, Factory, from_address, args=None):
args = (args or [])
factory = Factory(w3)
deploy_txn = factory.constructor(*args).transact({'from': from_address})
deploy_receipt = w3.eth.wait_for_transaction_receipt(deploy_txn)
assert (deploy_receipt is not None)
return factory(address=deploy_receipt['contractAddress']) |
def test_regression_ignore_format(df_enc_numeric):
random = np.random.RandomState(42)
y = random.normal(0, 0.1, len(df_enc_numeric))
encoder = DecisionTreeEncoder(regression=True, random_state=random, ignore_format=True)
encoder.fit(df_enc_numeric[['var_A', 'var_B']], y)
X = encoder.transform(df_enc_numeric[['var_A', 'var_B']])
transf_df = df_enc_numeric.copy()
transf_df['var_A'] = ((([0.034348] * 6) + ([(- 0.024679)] * 10)) + ([(- 0.075473)] * 4))
transf_df['var_B'] = (([0.044806] * 10) + ([(- 0.079066)] * 10))
pd.testing.assert_frame_equal(X.round(6), transf_df[['var_A', 'var_B']]) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.