code stringlengths 281 23.7M |
|---|
class GivReg(RegBase):
total = 0
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.binary = kwargs['binary']
def __repr__(self):
return '(GivReg {}.{})'.format(self.base_register, self.index)
def __str__(self):
return repr(self)
def add_pc(self, pc):
self.pcs.add(pc)
def str_noindex(self):
return self.base_register
def stat(self):
super().stat()
GivReg.total += 1
RegBase.giv += 1 |
class FieldTextArea(Field):
name = 'Field Textarea'
def __init__(self, page: primitives.PageModel, value, label, placeholder, icon, width, height, html_code, helper, options, profile):
html_input = page.ui.inputs.textarea(page.inputs.get(html_code, value), width=(100, '%'), placeholder=placeholder, options=options)
super(FieldTextArea, self).__init__(page, html_input, label, icon, width, height, html_code, helper, options, profile) |
class File(BaseAligo):
def _core_get_file_list(self, body: GetFileListRequest) -> Iterator[BaseFile]:
(yield from self._list_file(ADRIVE_V3_FILE_LIST, body, GetFileListResponse, params={'jsonmask': 'next_marker,items(name,file_id,drive_id,type,size,created_at,updated_at,category,file_extension,parent_file_id,mime_type,starred,thumbnail,url,streams_info,content_hash,user_tags,user_meta,trashed,video_media_metadata,video_preview_metadata,sync_meta,sync_device_flag,sync_flag,punish_flag'}))
def _core_batch_get_files(self, body: BatchGetFileRequest) -> Iterator[BatchSubResponse]:
if (body.drive_id is None):
body.drive_id = self.default_drive_id
(yield from self.batch_request(BatchRequest(requests=[BatchSubRequest(id=file_id, url='/file/get', body=GetFileRequest(drive_id=body.drive_id, file_id=file_id)) for file_id in body.file_id_list]), GetFileRequest))
def _core_walk_file(self, parent_file_id: str='root', drive_id: str=None, type_: BaseFileType=None, url_expire_sec: int=86400, limit: int=1000) -> Iterator[BaseFile]:
(yield from self._list_file(V2_FILE_WALK, {'parent_file_id': parent_file_id, 'drive_id': (drive_id or self.default_drive_id), 'type': type_, 'url_expire_sec': url_expire_sec, 'limit': limit}, GetFileListResponse))
def _core_scan_file(self, drive_id: str=None, category: BaseFileCategory=None, limit: int=1000) -> Iterator[BaseFile]:
(yield from self._list_file(V2_FILE_SCAN, {'drive_id': (drive_id or self.default_drive_id), 'category': category, 'limit': limit}, GetFileListResponse)) |
def get_data():
return {'fieldname': 'encounter', 'non_standard_fieldnames': {'Patient Medical Record': 'reference_name', 'Inpatient Medication Order': 'patient_encounter', 'Nursing Task': 'reference_name', 'Service Request': 'order_group', 'Medication Request': 'order_group'}, 'transactions': [{'label': _('Records'), 'items': ['Vital Signs', 'Patient Medical Record']}, {'label': _('Orders'), 'items': ['Inpatient Medication Order', 'Nursing Task', 'Service Request', 'Medication Request']}], 'disable_create_buttons': ['Inpatient Medication Order']} |
def gen_profiler(sorted_graph: List[Tensor], workdir: str, dynamic_profiling_strategy):
results = []
for node in sorted_graph:
for func in node.src_ops():
if (('has_profiler' in func._attrs) and func._attrs['has_profiler']):
results.append(func.gen_profiler(workdir, dynamic_profiling_strategy))
return results |
class Migration(migrations.Migration):
dependencies = [('transactions', '0001_initial')]
operations = [migrations.AlterField(model_name='sourceassistancetransaction', name='created_at', field=usaspending_api.common.custom_django_fields.NaiveTimestampField(blank=True, db_index=True, help_text='record creation datetime in Broker', null=True)), migrations.AlterField(model_name='sourceassistancetransaction', name='updated_at', field=usaspending_api.common.custom_django_fields.NaiveTimestampField(blank=True, db_index=True, help_text='record last update datetime in Broker', null=True)), migrations.AlterField(model_name='sourceprocurementtransaction', name='created_at', field=usaspending_api.common.custom_django_fields.NaiveTimestampField(blank=True, db_index=True, help_text='record creation datetime in Broker', null=True)), migrations.AlterField(model_name='sourceprocurementtransaction', name='updated_at', field=usaspending_api.common.custom_django_fields.NaiveTimestampField(blank=True, db_index=True, help_text='record last update datetime in Broker', null=True))] |
def extractWwwDanoveltranslationsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesDumbbellSonificationTracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
('overrides,expected', [(['--cfg', 'job', '-p', 'baud_rate'], '19200'), (['--cfg', 'hydra', '-p', 'hydra.job.name'], 'frozen'), (['--cfg', 'job', '--resolve', '-p', 'baud_rate'], '19200'), (['--cfg', 'hydra', '--resolve', '-p', 'hydra.job.name'], 'frozen'), (['--info', 'config'], 'baud_rate: 19200'), (['--hydra-help'], '== Flags =='), (['--help'], 'frozen is powered by Hydra.')])
def test_frozen_primary_config(tmpdir: Path, overrides: List[str], expected: str) -> None:
cmd = ['examples/patterns/write_protect_config_node/frozen.py', f'hydra.run.dir={tmpdir}', 'hydra.job.chdir=True']
cmd.extend(overrides)
(ret, _err) = run_python_script(cmd)
assert (expected in ret) |
class MTextField(HasTraits):
value = Str()
update_text = Enum('auto', 'editing_finished')
placeholder = Str()
echo = Enum('normal', 'password')
read_only = Bool()
def _initialize_control(self):
super()._initialize_control()
self._set_control_echo(self.echo)
self._set_control_placeholder(self.placeholder)
self._set_control_read_only(self.read_only)
def _add_event_listeners(self):
super()._add_event_listeners()
self.observe(self._update_text_updated, 'update_text', dispatch='ui')
self.observe(self._placeholder_updated, 'placeholder', dispatch='ui')
self.observe(self._echo_updated, 'echo', dispatch='ui')
self.observe(self._read_only_updated, 'read_only', dispatch='ui')
if (self.control is not None):
if (self.update_text == 'editing_finished'):
self._observe_control_value(remove=True)
self._observe_control_editing_finished()
def _remove_event_listeners(self):
if (self.control is not None):
if (self.update_text == 'editing_finished'):
self._observe_control_editing_finished(remove=True)
self._observe_control_value()
self.observe(self._update_text_updated, 'update_text', dispatch='ui', remove=True)
self.observe(self._placeholder_updated, 'placeholder', dispatch='ui', remove=True)
self.observe(self._echo_updated, 'echo', dispatch='ui', remove=True)
self.observe(self._read_only_updated, 'read_only', dispatch='ui', remove=True)
super()._remove_event_listeners()
def _editing_finished(self):
if (self.control is not None):
value = self._get_control_value()
self._update_value(value)
def _get_control_placeholder(self):
raise NotImplementedError()
def _set_control_placeholder(self, placeholder):
raise NotImplementedError()
def _get_control_echo(self):
raise NotImplementedError()
def _set_control_echo(self, echo):
raise NotImplementedError()
def _get_control_read_only(self):
raise NotImplementedError()
def _set_control_read_only(self, read_only):
raise NotImplementedError()
def _observe_control_editing_finished(self, remove=False):
raise NotImplementedError()
def _placeholder_updated(self, event):
if (self.control is not None):
self._set_control_placeholder(self.placeholder)
def _echo_updated(self, event):
if (self.control is not None):
self._set_control_echo(self.echo)
def _read_only_updated(self, event):
if (self.control is not None):
self._set_control_read_only(self.read_only)
def _update_text_updated(self, event):
if (self.control is not None):
if (event.new == 'editing_finished'):
self._observe_control_value(remove=True)
self._observe_control_editing_finished()
else:
self._observe_control_editing_finished(remove=True)
self._observe_control_value() |
()
def create_simple_saddle_point_problem(request):
class Output_Storage(object):
def __init__(self, petsc_matF, petsc_matD, petsc_matB, petsc_matBt, petsc_matC, x_vec, y_vec, num_p_unkwn, num_v_unkwn):
self.petsc_matF = petsc_matF
self.petsc_matD = petsc_matD
self.petsc_matB = petsc_matB
self.petsc_matBt = petsc_matBt
self.petsc_matC = petsc_matC
self.x_vec = x_vec
self.y_vec = y_vec
self.num_p_unkwn = num_p_unkwn
self.num_v_unkwn = num_v_unkwn
vals_F = [3.2, 1.1, 6.3, 1.0, (- 5.1)]
col_idx_F = [0, 1, 0, 2, 0]
row_idx_F = [0, 2, 4, 5]
vals_D = [5.5, 7.1, 1.0]
col_idx_D = [0, 1, 2]
row_idx_D = [0, 1, 2, 3]
vals_B = [1.1, 6.3, 7.3, 3.6, 6.3]
col_idx_B = [0, 2, 0, 1, 2]
row_idx_B = [0, 2, 5]
vals_Bt = [1.1, 7.3, 3.6, 6.3, 6.3]
col_idx_Bt = [0, 1, 1, 0, 1]
row_idx_Bt = [0, 2, 3, 5]
vals_C = [1.2, 2.1, 3.3]
col_idx_C = [0, 1, 1]
row_idx_C = [0, 2, 3]
num_p_unkwn = (len(row_idx_B) - 1)
num_v_unkwn = (len(row_idx_F) - 1)
petsc_matF = LAT.csr_2_petsc(size=(num_v_unkwn, num_v_unkwn), csr=(row_idx_F, col_idx_F, vals_F))
petsc_matD = LAT.csr_2_petsc(size=(num_v_unkwn, num_v_unkwn), csr=(row_idx_D, col_idx_D, vals_D))
petsc_matB = LAT.csr_2_petsc(size=(num_p_unkwn, num_v_unkwn), csr=(row_idx_B, col_idx_B, vals_B))
petsc_matBt = LAT.csr_2_petsc(size=(num_v_unkwn, num_p_unkwn), csr=(row_idx_Bt, col_idx_Bt, vals_Bt))
petsc_matC = LAT.csr_2_petsc(size=(num_p_unkwn, num_p_unkwn), csr=(row_idx_C, col_idx_C, vals_C))
x_vec = np.ones(num_p_unkwn)
y_vec = np.zeros(num_p_unkwn)
x_PETSc_vec = PETSc.Vec().createWithArray(x_vec)
y_PETSc_vec = PETSc.Vec().createWithArray(y_vec)
output_data = Output_Storage(petsc_matF, petsc_matD, petsc_matB, petsc_matBt, petsc_matC, x_PETSc_vec, y_PETSc_vec, num_p_unkwn, num_v_unkwn)
(yield output_data) |
def fetch_mixed(ctx: Context, public_id: PublicId, alias: Optional[str]=None, target_dir: Optional[str]=None) -> None:
try:
fetch_agent_locally(ctx, public_id, alias=alias, target_dir=target_dir)
except click.ClickException as e:
logger.debug(f'Fetch from local registry failed (reason={str(e)}), trying remote registry...')
fetch_agent(ctx, public_id, alias=alias, target_dir=target_dir) |
.usefixtures('use_tmpdir')
def test_that_substitution_happens_in_workflow():
with open('workflow', 'w', encoding='utf-8') as f:
f.write('JOB <A> <B>\n')
substlist = SubstitutionList()
substlist['<A>'] = 'a'
substlist['<B>'] = 'b'
job = WorkflowJob(name='JOB', internal=False, min_args=None, max_args=None, arg_types=[], executable='echo', script=None)
wf = Workflow.from_file('workflow', substlist, {'JOB': job})
assert (wf.cmd_list == [(job, ['a', 'b'])]) |
.skip
def test_pin_and_get(dep_project):
package_config = ETHPM_CONFIG.copy()
package_config['settings']['include_dependencies'] = False
(manifest, uri) = ethpm.create_manifest(dep_project._path, package_config, True)
process = ethpm.process_manifest(manifest, uri)
get = ethpm.get_manifest(uri)
for key in (list(process) + list(get)):
if isinstance(process[key], str):
assert (process[key] == get[key])
continue
for k in (list(process[key]) + list(get[key])):
assert (process[key][k] == get[key][k]) |
class OefSearchHandler(Handler):
SUPPORTED_PROTOCOL = OefSearchMessage.protocol_id
def setup(self) -> None:
def handle(self, message: Message) -> None:
oef_search_msg = cast(OefSearchMessage, message)
oef_search_dialogues = cast(OefSearchDialogues, self.context.oef_search_dialogues)
oef_search_dialogue = cast(Optional[OefSearchDialogue], oef_search_dialogues.update(oef_search_msg))
if (oef_search_dialogue is None):
self._handle_unidentified_dialogue(oef_search_msg)
return
if (oef_search_msg.performative == OefSearchMessage.Performative.SUCCESS):
self._handle_success(oef_search_msg, oef_search_dialogue)
elif (oef_search_msg.performative == OefSearchMessage.Performative.OEF_ERROR):
self._handle_error(oef_search_msg, oef_search_dialogue)
else:
self._handle_invalid(oef_search_msg, oef_search_dialogue)
def teardown(self) -> None:
def _handle_unidentified_dialogue(self, oef_search_msg: OefSearchMessage) -> None:
self.context.logger.info('received invalid oef_search message={}, unidentified dialogue.'.format(oef_search_msg))
def _handle_success(self, oef_search_success_msg: OefSearchMessage, oef_search_dialogue: OefSearchDialogue) -> None:
self.context.logger.info('received oef_search success message={} in dialogue={}.'.format(oef_search_success_msg, oef_search_dialogue))
target_message = cast(OefSearchMessage, oef_search_dialogue.get_message_by_id(oef_search_success_msg.target))
if (target_message.performative == OefSearchMessage.Performative.REGISTER_SERVICE):
description = target_message.service_description
data_model_name = description.data_model.name
registration_behaviour = cast(ServiceRegistrationBehaviour, self.context.behaviours.service)
if ('location_agent' in data_model_name):
registration_behaviour.register_service()
elif ('set_service_key' in data_model_name):
registration_behaviour.register_genus()
elif (('personality_agent' in data_model_name) and (description.values['piece'] == 'genus')):
registration_behaviour.register_classification()
elif (('personality_agent' in data_model_name) and (description.values['piece'] == 'classification')):
self.context.logger.info('the agent, with its genus and classification, and its service are successfully registered on the SOEF.')
else:
self.context.logger.warning(f'received soef SUCCESS message as a reply to the following unexpected message: {target_message}')
def _handle_error(self, oef_search_error_msg: OefSearchMessage, oef_search_dialogue: OefSearchDialogue) -> None:
self.context.logger.info('received oef_search error message={} in dialogue={}.'.format(oef_search_error_msg, oef_search_dialogue))
target_message = cast(OefSearchMessage, oef_search_dialogue.get_message_by_id(oef_search_error_msg.target))
if (target_message.performative == OefSearchMessage.Performative.REGISTER_SERVICE):
registration_behaviour = cast(ServiceRegistrationBehaviour, self.context.behaviours.service)
registration_behaviour.failed_registration_msg = target_message
def _handle_invalid(self, oef_search_msg: OefSearchMessage, oef_search_dialogue: OefSearchDialogue) -> None:
self.context.logger.warning('cannot handle oef_search message of performative={} in dialogue={}.'.format(oef_search_msg.performative, oef_search_dialogue)) |
def upgrade():
op.execute('COMMIT')
try:
op.execute('SHOW bdr.permit_ddl_locking')
op.execute('SET LOCAL bdr.permit_ddl_locking = true')
except exc.ProgrammingError:
pass
op.execute("UPDATE updates SET status = 'pending' WHERE status = 'processing'")
op.execute('ALTER TYPE ck_update_status RENAME TO ck_update_status_old')
op.execute("CREATE TYPE ck_update_status AS ENUM('testing', 'side_tag_active', 'side_tag_expired', 'obsolete', 'stable', 'unpushed', 'pending')")
op.execute('ALTER TABLE updates ALTER COLUMN status TYPE ck_update_status USING status::text::ck_update_status')
op.execute('DROP TYPE ck_update_status_old') |
class AuthView(APIView):
_decorator(ensure_csrf_cookie)
def get(self, request):
if request.user.is_authenticated:
username = request.user.username
else:
username = None
return Response({'username': username})
_decorator(csrf_protect)
def post(self, request):
username = request.data['username']
password = request.data['password']
user = authenticate(username=username, password=password)
if (user is None):
return Response({'error': 'incorrect credentials'})
login(request, user)
return redirect('/auth/') |
def test_error_when_nan_introduced_during_transform():
rng = default_rng()
random = skewnorm.rvs(a=(- 50), loc=4, size=100)
random = (random - min(random))
train = pd.concat([pd.Series(rng.standard_normal(100)), pd.Series(random)], axis=1)
train.columns = ['var_a', 'var_b']
test = pd.concat([pd.Series(rng.standard_normal(100)), pd.Series(rng.standard_normal(100))], axis=1)
test.columns = ['var_a', 'var_b']
msg = 'During the discretisation, NaN values were introduced in the feature(s) var_b.'
limits_dict = {'var_a': [(- 5), (- 2), 0, 2, 5], 'var_b': [0, 2, 5]}
with pytest.warns(UserWarning) as record:
transformer = ArbitraryDiscretiser(binning_dict=limits_dict, errors='ignore')
transformer.fit(train)
transformer.transform(test)
assert (len(record) == 1)
assert (record[0].message.args[0] == msg)
with pytest.raises(ValueError) as record:
transformer = ArbitraryDiscretiser(binning_dict=limits_dict, errors='raise')
transformer.fit(train)
transformer.transform(test)
assert (str(record.value) == msg) |
()
def sqs_client_and_queue():
sqs = boto3.client('sqs', endpoint_url=LOCALSTACK_ENDPOINT)
response = sqs.create_queue(QueueName='myqueue', Attributes={'MessageRetentionPeriod': '86400'})
queue_url = response['QueueUrl']
(yield (sqs, queue_url))
sqs.delete_queue(QueueUrl=queue_url) |
def extractTraetranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class flow_modify(flow_mod):
version = 1
type = 14
_command = 1
def __init__(self, xid=None, match=None, cookie=None, idle_timeout=None, hard_timeout=None, priority=None, buffer_id=None, out_port=None, flags=None, actions=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (match != None):
self.match = match
else:
self.match = ofp.match()
if (cookie != None):
self.cookie = cookie
else:
self.cookie = 0
if (idle_timeout != None):
self.idle_timeout = idle_timeout
else:
self.idle_timeout = 0
if (hard_timeout != None):
self.hard_timeout = hard_timeout
else:
self.hard_timeout = 0
if (priority != None):
self.priority = priority
else:
self.priority = 0
if (buffer_id != None):
self.buffer_id = buffer_id
else:
self.buffer_id = 0
if (out_port != None):
self.out_port = out_port
else:
self.out_port = 0
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (actions != None):
self.actions = actions
else:
self.actions = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(self.match.pack())
packed.append(struct.pack('!Q', self.cookie))
packed.append(util.pack_fm_cmd(self._command))
packed.append(struct.pack('!H', self.idle_timeout))
packed.append(struct.pack('!H', self.hard_timeout))
packed.append(struct.pack('!H', self.priority))
packed.append(struct.pack('!L', self.buffer_id))
packed.append(util.pack_port_no(self.out_port))
packed.append(struct.pack('!H', self.flags))
packed.append(loxi.generic_util.pack_list(self.actions))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = flow_modify()
_version = reader.read('!B')[0]
assert (_version == 1)
_type = reader.read('!B')[0]
assert (_type == 14)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
obj.match = ofp.match.unpack(reader)
obj.cookie = reader.read('!Q')[0]
__command = util.unpack_fm_cmd(reader)
assert (__command == 1)
obj.idle_timeout = reader.read('!H')[0]
obj.hard_timeout = reader.read('!H')[0]
obj.priority = reader.read('!H')[0]
obj.buffer_id = reader.read('!L')[0]
obj.out_port = util.unpack_port_no(reader)
obj.flags = reader.read('!H')[0]
obj.actions = loxi.generic_util.unpack_list(reader, ofp.action.action.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.match != other.match):
return False
if (self.cookie != other.cookie):
return False
if (self.idle_timeout != other.idle_timeout):
return False
if (self.hard_timeout != other.hard_timeout):
return False
if (self.priority != other.priority):
return False
if (self.buffer_id != other.buffer_id):
return False
if (self.out_port != other.out_port):
return False
if (self.flags != other.flags):
return False
if (self.actions != other.actions):
return False
return True
def pretty_print(self, q):
q.text('flow_modify {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('match = ')
q.pp(self.match)
q.text(',')
q.breakable()
q.text('cookie = ')
q.text(('%#x' % self.cookie))
q.text(',')
q.breakable()
q.text('idle_timeout = ')
q.text(('%#x' % self.idle_timeout))
q.text(',')
q.breakable()
q.text('hard_timeout = ')
q.text(('%#x' % self.hard_timeout))
q.text(',')
q.breakable()
q.text('priority = ')
q.text(('%#x' % self.priority))
q.text(',')
q.breakable()
q.text('buffer_id = ')
q.text(('%#x' % self.buffer_id))
q.text(',')
q.breakable()
q.text('out_port = ')
q.text(util.pretty_port(self.out_port))
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPFF_SEND_FLOW_REM', 2: 'OFPFF_CHECK_OVERLAP', 4: 'OFPFF_EMERG'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('actions = ')
q.pp(self.actions)
q.breakable()
q.text('}') |
def get_registry_load_or_rename_extra_details(metadata, event, extra_detail_io, details_info):
new_path = read_detail_string(extra_detail_io, details_info['new_path_info'])
if (event.operation == RegistryOperation.RegLoadKey.name):
event.details['Hive Path'] = new_path
elif (event.operation == RegistryOperation.RegRenameKey.name):
event.category = 'Write'
event.details['New Name'] = new_path |
class TestMultipleChoiceFieldHTMLFormRenderer(TestCase):
def setUp(self):
self.renderer = HTMLFormRenderer()
def test_render_selected_option_with_string_option_ids(self):
choices = (('1', 'Option1'), ('2', 'Option2'), ('12', 'Option12'), ('}', 'OptionBrace'))
class TestSerializer(serializers.Serializer):
test_field = serializers.MultipleChoiceField(choices=choices)
serializer = TestSerializer(data={'test_field': ['12']})
serializer.is_valid()
result = self.renderer.render(serializer.data)
self.assertIsInstance(result, SafeText)
self.assertInHTML('<option value="12" selected>Option12</option>', result)
self.assertInHTML('<option value="1">Option1</option>', result)
self.assertInHTML('<option value="2">Option2</option>', result)
self.assertInHTML('<option value="}">OptionBrace</option>', result)
def test_render_selected_option_with_integer_option_ids(self):
choices = ((1, 'Option1'), (2, 'Option2'), (12, 'Option12'))
class TestSerializer(serializers.Serializer):
test_field = serializers.MultipleChoiceField(choices=choices)
serializer = TestSerializer(data={'test_field': ['12']})
serializer.is_valid()
result = self.renderer.render(serializer.data)
self.assertIsInstance(result, SafeText)
self.assertInHTML('<option value="12" selected>Option12</option>', result)
self.assertInHTML('<option value="1">Option1</option>', result)
self.assertInHTML('<option value="2">Option2</option>', result) |
.unit_saas
class TestSaasConnector():
def test_handle_errored_response_ignore_errors(self):
fake_request: SaaSRequest = SaaSRequest(path='test/path', method=HTTPMethod.GET, ignore_errors=True)
fake_errored_response: Response = Response()
fake_errored_response.status_code = HTTP_404_NOT_FOUND
fake_errored_response._content = 'an ugly plaintext error message'
cleaned_response = SaaSConnector._handle_errored_response(fake_request, fake_errored_response)
assert ({} == cleaned_response.json())
def test_handle_errored_response(self):
fake_request: SaaSRequest = SaaSRequest(path='test/path', method=HTTPMethod.GET, ignore_errors=False)
fake_errored_response: Response = Response()
fake_errored_response.status_code = HTTP_404_NOT_FOUND
fake_errored_response._content = b'an ugly plaintext error message'
with pytest.raises(json.JSONDecodeError):
cleaned_response = SaaSConnector._handle_errored_response(fake_request, fake_errored_response)
cleaned_response.json()
def test_handle_errored_response_good_response(self):
fake_request: SaaSRequest = SaaSRequest(path='test/path', method=HTTPMethod.GET, ignore_errors=True)
nested_field_key = 'nested_field'
response_body = {'flat_field': 'foo', nested_field_key: {'nested_field1': 'nested_value1'}, 'array_field': ['array_value1', 'array_value2']}
fake_errored_response: Response = Response()
fake_errored_response.status_code = HTTP_200_OK
fake_errored_response._content = str.encode(json.dumps(response_body))
cleaned_response = SaaSConnector._handle_errored_response(fake_request, fake_errored_response)
assert (response_body == cleaned_response.json())
def test_unwrap_response_data_with_data_path(self):
nested_field_key = 'nested_field'
fake_request: SaaSRequest = SaaSRequest(path='test/path', method=HTTPMethod.GET, ignore_errors=True, data_path=nested_field_key)
response_body = {'flat_field': 'foo', nested_field_key: {'nested_field1': 'nested_value1'}, 'array_field': ['array_value1', 'array_value2']}
fake_response: Response = Response()
fake_response.status_code = HTTP_200_OK
fake_response._content = str.encode(json.dumps(response_body))
unwrapped = SaaSConnector._unwrap_response_data(fake_request, fake_response)
assert (response_body[nested_field_key] == unwrapped)
def test_unwrap_response_data_no_data_path(self):
fake_request: SaaSRequest = SaaSRequest(path='test/path', method=HTTPMethod.GET, ignore_errors=True)
nested_field_key = 'nested_field'
response_body = {'flat_field': 'foo', nested_field_key: {'nested_field1': 'nested_value1'}, 'array_field': ['array_value1', 'array_value2']}
fake_response: Response = Response()
fake_response.status_code = HTTP_200_OK
fake_response._content = str.encode(json.dumps(response_body))
unwrapped = SaaSConnector._unwrap_response_data(fake_request, fake_response)
assert (response_body == unwrapped)
def test_delete_only_endpoint(self, saas_example_config, saas_example_connection_config):
saas_config = SaaSConfig(**saas_example_config)
graph = saas_config.get_graph(saas_example_connection_config.secrets)
node = Node(graph, next((collection for collection in graph.collections if (collection.name == 'people'))))
traversal_node = TraversalNode(node)
connector: SaaSConnector = get_connector(saas_example_connection_config)
assert (connector.retrieve_data(traversal_node, Policy(), PrivacyRequest(id='123'), {}) == [{}])
('fides.api.service.connectors.saas_connector.AuthenticatedClient.send')
def test_input_values(self, mock_send: Mock, saas_example_config, saas_example_connection_config):
mock_send().json.return_value = {'conversation_messages': [{'id': '123', 'from_email': ''}]}
saas_config = SaaSConfig(**saas_example_config)
graph = saas_config.get_graph(saas_example_connection_config.secrets)
node = Node(graph, next((collection for collection in graph.collections if (collection.name == 'messages'))))
traversal_node = TraversalNode(node)
connector: SaaSConnector = get_connector(saas_example_connection_config)
privacy_request = PrivacyRequest(id='123')
privacy_request.cache_identity(Identity(email=''))
assert (connector.retrieve_data(traversal_node, Policy(), privacy_request, {'fidesops_grouped_inputs': [], 'conversation_id': ['456']}) == [{'id': '123', 'from_email': ''}])
def test_missing_input_values(self, saas_example_config, saas_example_connection_config):
saas_config = SaaSConfig(**saas_example_config)
graph = saas_config.get_graph(saas_example_connection_config.secrets)
node = Node(graph, next((collection for collection in graph.collections if (collection.name == 'messages'))))
traversal_node = TraversalNode(node)
connector: SaaSConnector = get_connector(saas_example_connection_config)
assert (connector.retrieve_data(traversal_node, Policy(), PrivacyRequest(id='123'), {}) == [])
('fides.api.service.connectors.saas_connector.AuthenticatedClient.send')
def test_grouped_input_values(self, mock_send: Mock, saas_example_config, saas_example_connection_config):
mock_send().json.return_value = {'id': '123'}
saas_config = SaaSConfig(**saas_example_config)
graph = saas_config.get_graph(saas_example_connection_config.secrets)
node = Node(graph, next((collection for collection in graph.collections if (collection.name == 'users'))))
traversal_node = TraversalNode(node)
connector: SaaSConnector = get_connector(saas_example_connection_config)
assert (connector.retrieve_data(traversal_node, Policy(), PrivacyRequest(id='123'), {'fidesops_grouped_inputs': [{'organization_slug': 'abc', 'project_slug': '123', 'query': ''}]}) == [{'id': '123'}])
def test_missing_grouped_inputs_input_values(self, saas_example_config, saas_example_connection_config):
saas_config = SaaSConfig(**saas_example_config)
graph = saas_config.get_graph(saas_example_connection_config.secrets)
node = Node(graph, next((collection for collection in graph.collections if (collection.name == 'users'))))
traversal_node = TraversalNode(node)
connector: SaaSConnector = get_connector(saas_example_connection_config)
assert (connector.retrieve_data(traversal_node, Policy(), PrivacyRequest(id='123'), {}) == [])
('fides.api.service.connectors.saas_connector.AuthenticatedClient.send')
def test_skip_missing_param_values_masking(self, mock_send: Mock, saas_example_config, saas_example_connection_config):
mock_send().json.return_value = 1
saas_config = SaaSConfig(**saas_example_config)
graph = saas_config.get_graph(saas_example_connection_config.secrets)
node = Node(graph, next((collection for collection in graph.collections if (collection.name == 'data_management'))))
traversal_node = TraversalNode(node)
connector: SaaSConnector = get_connector(saas_example_connection_config)
assert (connector.mask_data(traversal_node, Policy(), PrivacyRequest(id='123'), {'customer_id': 1}, {'phone_number': ''}) == 1)
connector.endpoints['data_management'].requests.update.body = '{\n "unique_id": "<privacy_request_id>", "email": "<test_val>"\n}\n'
with pytest.raises(ValueError):
connector.mask_data(traversal_node, Policy(), PrivacyRequest(id='123'), {'customer_id': 1}, {'phone_number': ''})
connector.endpoints['data_management'].requests.update.skip_missing_param_values = True
assert (connector.mask_data(traversal_node, Policy(), PrivacyRequest(id='123'), {'customer_id': 1}, {'phone_number': ''}) == 0) |
class Command(BaseCommand):
help = 'Updates empty awarding and funding agency fields on transactions and awards due to subtier/toptier mapping'
def update_awarding_funding_agency(fiscal_year=None, file_type=None, page=1, limit=500000):
offset = ((page - 1) * limit)
range_low = offset
range_high = (offset + limit)
if (file_type == 'D1'):
transaction_cgac_subtier_map = [{'transaction_id': transaction_FPDS['transaction_id'], 'awarding_toptier_code': transaction_FPDS['awarding_agency_code'], 'funding_toptier_code': transaction_FPDS['funding_agency_code'], 'awarding_subtier_code': transaction_FPDS['awarding_sub_tier_agency_c'], 'funding_subtier_code': transaction_FPDS['funding_sub_tier_agency_co']} for transaction_FPDS in TransactionFPDS.objects.filter(transaction__fiscal_year=fiscal_year).values('transaction_id', 'awarding_agency_code', 'funding_agency_code', 'awarding_sub_tier_agency_c', 'funding_sub_tier_agency_co')[range_low:range_high]]
elif (file_type == 'D2'):
transaction_cgac_subtier_map = [{'transaction_id': transaction_FABS['transaction_id'], 'awarding_toptier_code': transaction_FABS['awarding_agency_code'], 'funding_toptier_code': transaction_FABS['funding_agency_code'], 'awarding_subtier_code': transaction_FABS['awarding_sub_tier_agency_c'], 'funding_subtier_code': transaction_FABS['funding_sub_tier_agency_co']} for transaction_FABS in TransactionFABS.objects.filter(transaction__fiscal_year=fiscal_year).values('transaction_id', 'awarding_agency_code', 'funding_agency_code', 'awarding_sub_tier_agency_c', 'funding_sub_tier_agency_co')[range_low:range_high]]
total_rows = len(transaction_cgac_subtier_map)
logger.info((('Processing ' + str(total_rows)) + ' rows of transaction data'))
logger.info('Rows range from {} to {}'.format(range_low, range_high))
index = 1
start_time = datetime.now()
for row in transaction_cgac_subtier_map:
if (not (index % 100)):
logger.info('Updating agencies: Loading row {} of {} ({})'.format(str(index), str(total_rows), (datetime.now() - start_time)))
index += 1
transaction = TransactionNormalized.objects.filter(id=row['transaction_id']).first()
if (transaction is None):
logger.error('Unable to find Transaction {}'.format(str(row['transaction_id'])))
continue
awarding_agency = agency_no_sub_map.get((row['awarding_toptier_code'], row['awarding_subtier_code']))
if (awarding_agency is None):
awarding_agency = agency_cgac_only_map.get(row['awarding_toptier_code'])
funding_agency = agency_no_sub_map.get((row['funding_toptier_code'], row['funding_subtier_code']))
if (funding_agency is None):
funding_agency = agency_cgac_only_map.get(row['funding_toptier_code'])
if ((awarding_agency is None) and (funding_agency is None)):
logger.error('Unable to find awarding agency CGAC {} Subtier {} and funding agency CGAC {} Subtier {}'.format(row['awarding_toptier_code'], row['awarding_subtier_code'], row['funding_toptier_code'], row['awarding_subtier_code']))
continue
if (awarding_agency is None):
logger.error('Unable to find awarding agency for CGAC {} Subtier {}'.format(row['awarding_toptier_code'], row['awarding_subtier_code']))
elif (funding_agency is None):
pass
transaction.awarding_agency = awarding_agency
transaction.funding_agency = funding_agency
award = AwardSearch.objects.filter(award_id=transaction.award.id).first()
if (award is None):
logger.error('Unable to find Award {}'.format(str(transaction.award.id)))
continue
award.awarding_agency_id = awarding_agency.id
award.funding_agency_id = funding_agency.id
try:
transaction.save()
award.save()
except Exception as e:
logger.error('Unable to save Transaction {} and Award {}:{}'.format(str(transaction.id), str(award.id), str(e)))
def add_arguments(self, parser):
parser.add_argument('--fiscal_year', dest='fiscal_year', nargs='+', type=int, help='Year for which to run awarding agency clean up on')
parser.add_argument('--assistance', action='store_true', dest='assistance', default=False, help='Runs the award only for Award Financial Assistance (Assistance) data')
parser.add_argument('--contracts', action='store_true', dest='contracts', default=False, help='Runs the historical loader only for Award Procurement (Contract) data')
parser.add_argument('--page', dest='page', nargs='+', type=int, help='Page for batching and parallelization')
parser.add_argument('--limit', dest='limit', nargs='+', type=int, help='Limit for batching and parallelization')
def handle(self, *args, **options):
logger.info('Starting updating awarding agencies...')
fiscal_year = options.get('fiscal_year')[0]
page = options.get('page')
limit = options.get('limit')
page = (page[0] if page else 1)
limit = (limit[0] if limit else 500000)
if options.get('contracts', None):
with timer('D1 (contracts/FPDS) awarding/funding agencies updates', logger.info):
self.update_awarding_funding_agency(fiscal_year, 'D1', page=page, limit=limit)
elif options.get('assistance', None):
with timer('D2 (assistance/FABS) awarding/funding agencies updates', logger.info):
self.update_awarding_funding_agency(fiscal_year, 'D2', page=page, limit=limit)
else:
logger.error('Not a valid data type: --assistance,--contracts')
logger.info('Finished') |
class InstructionLengthHandler(PipelineStage):
name = 'instruction-length-handler'
def __init__(self):
self._bounds: Optional[ComplexityBounds] = None
def run(self, task: DecompilerTask):
self._bounds = ComplexityBounds.from_options(task.options)
target_generator = TargetGenerator(task.syntax_tree, self._bounds)
target_simplifier = TargetSimplifier()
for target in target_generator.generate():
target_simplifier.start_simplification(target)
self.substitute_in_node(target)
def substitute_in_node(target: Target):
if isinstance(target.node, CodeNode):
target.node.insert_instruction_list_before(target.get_tmp_instructions(), target.instruction)
else:
raise Exception(f'inserting into {type(target.node)} is not implemented') |
class ResourceSpecSchema(StrictSchema):
name = ma.fields.String(required=True)
create_operator_type = ma.fields.String(required=True)
destroy_operator_type = ma.fields.String()
provides_args = ma.fields.List(ma.fields.String(), required=True)
disable_sentinel_node = ma.fields.Boolean() |
def import_petromod(mfile, **_):
cfhandle = mfile.get_cfhandle()
logger.info('Enter function %s', __name__)
(dsc, _) = _cxtgeo.surf_import_petromod_bin(cfhandle, 0, 0.0, 0, 0, 0)
fields = dsc.split(',')
rota_xori = 0
rota_yori = 0
undef = 999999.0
args = {}
for field in fields:
(key, value) = field.split('=')
if (key == 'GridNoX'):
args['ncol'] = int(value)
if (key == 'GridNoY'):
args['nrow'] = int(value)
if (key == 'OriginX'):
args['xori'] = float(value)
if (key == 'OriginY'):
args['yori'] = float(value)
if (key == 'RotationOriginX'):
rota_xori = float(value)
if (key == 'RotationOriginY'):
rota_yori = float(value)
if (key == 'GridStepX'):
args['xinc'] = float(value)
if (key == 'GridStepY'):
args['yinc'] = float(value)
if (key == 'RotationAngle'):
args['rotation'] = float(value)
if (key == 'Undefined'):
undef = float(value)
if ((args['rotation'] != 0.0) and ((rota_xori != args['xori']) or (rota_yori != args['yori']))):
xtg.warnuser('Rotation origin and data origin do match')
(dsc, values) = _cxtgeo.surf_import_petromod_bin(cfhandle, 1, undef, args['ncol'], args['nrow'], (args['ncol'] * args['nrow']))
values = np.ma.masked_greater(values, xtgeo.UNDEF_LIMIT)
args['values'] = values.reshape(args['ncol'], args['nrow'])
mfile.cfclose()
return args |
class Migration(migrations.Migration):
initial = True
dependencies = [('reversion', '0001_squashed_0004_auto__1202'), ('contenttypes', '0002_remove_content_type_name')]
operations = [migrations.CreateModel(name='TestModel', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(default='v1', max_length=191))]), migrations.CreateModel(name='TestModelEscapePK', fields=[('name', models.CharField(max_length=191, primary_key=True, serialize=False))]), migrations.CreateModel(name='TestModelInline', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('inline_name', models.CharField(default='v1', max_length=191)), ('test_model', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='test_app.testmodel'))]), migrations.CreateModel(name='TestModelRelated', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(default='v1', max_length=191))]), migrations.CreateModel(name='TestModelWithNaturalKey', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(default='v1', max_length=191))]), migrations.CreateModel(name='TestModelParent', fields=[('testmodel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='test_app.testmodel')), ('parent_name', models.CharField(default='parent v1', max_length=191))], bases=('test_app.testmodel',)), migrations.CreateModel(name='TestModelThrough', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(default='v1', max_length=191)), ('test_model', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to='test_app.testmodel')), ('test_model_related', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to='test_app.testmodelrelated'))]), migrations.CreateModel(name='TestModelNestedInline', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('nested_inline_name', models.CharField(default='v1', max_length=191)), ('test_model_inline', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='test_app.testmodelinline'))]), migrations.CreateModel(name='TestModelInlineByNaturalKey', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('test_model', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='test_app.testmodelwithnaturalkey'))]), migrations.CreateModel(name='TestModelGenericInline', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('object_id', models.IntegerField()), ('inline_name', models.CharField(default='v1', max_length=191)), ('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype'))]), migrations.AddField(model_name='testmodel', name='related', field=models.ManyToManyField(blank=True, related_name='_testmodel_related_+', to='test_app.TestModelRelated')), migrations.AddField(model_name='testmodel', name='related_through', field=models.ManyToManyField(blank=True, related_name='_testmodel_related_through_+', through='test_app.TestModelThrough', to='test_app.TestModelRelated')), migrations.CreateModel(name='TestMeta', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=191)), ('revision', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='reversion.revision'))]), migrations.CreateModel(name='TestModelWithUniqueConstraint', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=191, unique=True))])] |
def is_string(value, min=None, max=None):
if (not isinstance(value, str)):
raise VdtTypeError(value)
(min_len, max_len) = _is_num_param(('min', 'max'), (min, max))
try:
num_members = len(value)
except TypeError:
raise VdtTypeError(value)
if ((min_len is not None) and (num_members < min_len)):
raise VdtValueTooShortError(value)
if ((max_len is not None) and (num_members > max_len)):
raise VdtValueTooLongError(value)
return value |
class TestLabTest(FrappeTestCase):
def test_lab_test_item(self):
lab_template = create_lab_test_template()
self.assertTrue(frappe.db.exists('Item', lab_template.item))
self.assertEqual(frappe.db.get_value('Item Price', {'item_code': lab_template.item}, 'price_list_rate'), lab_template.lab_test_rate)
lab_template.disabled = 1
lab_template.save()
self.assertEqual(frappe.db.get_value('Item', lab_template.item, 'disabled'), 1)
lab_template.reload()
lab_template.disabled = 0
lab_template.save()
def test_descriptive_lab_test(self):
lab_template = create_lab_test_template()
lab_test = create_lab_test(lab_template)
lab_test.descriptive_test_items[0].result_value = 12
lab_test.descriptive_test_items[2].result_value = 1
lab_test.save()
self.assertRaises(frappe.ValidationError, lab_test.submit)
def test_sample_collection(self):
frappe.db.set_value('Healthcare Settings', 'Healthcare Settings', 'create_sample_collection_for_lab_test', 1)
lab_template = create_lab_test_template()
lab_test = create_lab_test(lab_template)
lab_test.descriptive_test_items[0].result_value = 12
lab_test.descriptive_test_items[1].result_value = 1
lab_test.descriptive_test_items[2].result_value = 2.3
lab_test.save()
self.assertTrue(frappe.db.exists('Sample Collection', {'sample': lab_template.sample}))
frappe.db.set_value('Healthcare Settings', 'Healthcare Settings', 'create_sample_collection_for_lab_test', 0)
lab_test = create_lab_test(lab_template)
lab_test.descriptive_test_items[0].result_value = 12
lab_test.descriptive_test_items[1].result_value = 1
lab_test.descriptive_test_items[2].result_value = 2.3
lab_test.save()
lab_test.reload()
self.assertEqual(lab_test.sample, None)
def test_create_lab_tests_from_sales_invoice(self):
sales_invoice = create_sales_invoice()
create_multiple('Sales Invoice', sales_invoice.name)
sales_invoice.reload()
self.assertIsNotNone(sales_invoice.items[0].reference_dn)
self.assertIsNotNone(sales_invoice.items[1].reference_dn)
def test_create_lab_tests_from_patient_encounter(self):
patient_encounter = create_patient_encounter()
create_multiple('Patient Encounter', patient_encounter.name)
patient_encounter.reload()
service_requests = frappe.db.get_list('Service Request', filters={'order_group': patient_encounter.name, 'status': ['!=', 'Completed'], 'template_dt': 'Lab Test Template'}, fields=['name'])
if service_requests:
for service_request in service_requests:
self.assertTrue(frappe.db.exists('Lab Test', {'service_request': service_request.get('name')})) |
class HostType(object):
def __init__(self, ostype=None, distro=None, distrovers=None) -> None:
if (ostype is None):
distro = None
distrovers = None
if sys.platform.startswith('linux'):
(ostype, distro, distrovers) = get_linux_type()
elif sys.platform.startswith('darwin'):
ostype = 'darwin'
elif is_windows():
ostype = 'windows'
distrovers = str(sys.getwindowsversion().major)
elif sys.platform.startswith('freebsd'):
ostype = 'freebsd'
else:
ostype = sys.platform
self.ostype = ostype
self.distro = distro
self.distrovers = distrovers
machine = platform.machine().lower()
if (('arm' in machine) or ('aarch' in machine)):
self.isarm = True
else:
self.isarm = False
def is_windows(self):
return (self.ostype == 'windows')
def is_arm(self):
return self.isarm
def is_darwin(self):
return (self.ostype == 'darwin')
def is_linux(self):
return (self.ostype == 'linux')
def is_freebsd(self):
return (self.ostype == 'freebsd')
def as_tuple_string(self) -> str:
return ('%s-%s-%s' % (self.ostype, (self.distro or 'none'), (self.distrovers or 'none')))
def get_package_manager(self):
if ((not self.is_linux()) and (not self.is_darwin())):
return None
if self.is_darwin():
return 'homebrew'
if (self.distro in ('fedora', 'centos', 'centos_stream')):
return 'rpm'
if self.distro.startswith(('debian', 'ubuntu')):
return 'deb'
return None
def from_tuple_string(s) -> 'HostType':
(ostype, distro, distrovers) = s.split('-')
return HostType(ostype=ostype, distro=distro, distrovers=distrovers)
def __eq__(self, b):
return ((self.ostype == b.ostype) and (self.distro == b.distro) and (self.distrovers == b.distrovers)) |
def main(args_=None):
(args, default_config) = parse_args(args_)
formatters.configure_logger(verbose=args.verbose, format_=args.format)
logger.debug(f'Creosote version: {__version__}')
logger.debug(f"Command: creosote {' '.join(sys.argv[1:])}")
logger.debug(f'Default configuration (may have loaded pyproject.toml): {default_config}')
logger.debug(f'Arguments: {args}')
if args.features:
logger.info(f"Feature(s) enabled: {', '.join(args.features)}")
imports = parsers.get_module_names_from_code(args.paths)
deps_reader = parsers.DependencyReader(deps_file=args.deps_file, sections=args.sections, exclude_deps=args.exclude_deps)
dependency_names = deps_reader.read()
excluded_deps_and_not_installed = parsers.get_excluded_deps_which_are_not_installed(excluded_deps=args.exclude_deps, venvs=args.venvs)
deps_to_scan_for = list((set(dependency_names) - set(args.exclude_deps)))
deps_resolver = resolvers.DepsResolver(imports=imports, dependency_names=deps_to_scan_for, venvs=args.venvs)
unused_dependency_names = deps_resolver.resolve_unused_dependency_names()
formatters.print_results(unused_dependency_names=unused_dependency_names, format_=args.format)
if unused_dependency_names:
return 1
elif excluded_deps_and_not_installed:
if (Features.FAIL_EXCLUDED_AND_NOT_INSTALLED.value in args.features):
return 1
return 0 |
class Handler(HasPrivateTraits):
def init_info(self, info):
pass
def init(self, info: UIInfo) -> bool:
return True
def position(self, info):
toolkit().position(info.ui)
def close(self, info, is_ok):
return True
def closed(self, info, is_ok):
return
def revert(self, info):
return
def apply(self, info):
return
def show_help(self, info, control=None):
if (control is None):
control = info.ui.control
on_help_call()(info, control)
def perform(self, info, action, event):
if (action.action != ''):
method_name = action.action
else:
method_name = '_{}_clicked'.format(action.name.lower())
for object in self.get_perform_handlers(info):
method = getattr(object, method_name, None)
if (method is not None):
specification = getfullargspec(method)
if (len(specification.args) == 1):
method()
else:
method(info)
return
specification = getfullargspec(action.perform)
if (len(specification.args) == 1):
action.perform()
else:
action.perform(event)
def get_perform_handlers(self, info):
handlers = [self]
if (info is not None):
additional_objects = ['object', 'model']
handlers += [info.ui.context[name] for name in additional_objects if (name in info.ui.context)]
return handlers
def setattr(self, info, object, name, value):
setattr(object, name, value)
def trait_view_for(self, info, view, object, object_name, trait_name):
if isinstance(view, ViewElement):
return view
klass = object.__class__.__name__
cname = ('%s_%s' % (object_name, trait_name))
aview = ''
if view:
aview = ('_' + view)
names = [('%s_%s%s' % (cname, klass, aview)), ('%s%s' % (cname, aview)), ('%s%s' % (klass, aview))]
if view:
names.append(view)
for name in names:
result = self.trait_view(name)
if (result is not None):
return result
method = getattr(self, ('trait_view_for_%s' % name), None)
if callable(method):
result = method(info, object)
if (result is not None):
return result
return (object.trait_view(view) or object.trait_view())
def can_drop(self, info, object):
from pyface.dock.api import DockControl
if isinstance(object, DockControl):
return self.can_import(info, object.export)
drop_class = info.ui.view.drop_class
return ((drop_class is not None) and isinstance(object, drop_class))
def can_import(self, info, category):
return (category in info.ui.view.imports)
def dock_control_for(self, info, parent, object):
from pyface.dock.api import IDockable, DockControl
from .dockable_view_element import DockableViewElement
try:
name = object.name
except:
try:
name = object.label
except:
name = ''
if (len(name) == 0):
name = user_name_for(object.__class__.__name__)
image = None
export = ''
if isinstance(object, DockControl):
dock_control = object
image = dock_control.image
export = dock_control.export
dockable = dock_control.dockable
close = dockable.dockable_should_close()
if close:
dock_control.close(force=True)
control = dockable.dockable_get_control(parent)
if close:
dock_control.trait_set(control=control, style=parent.owner.style)
dockable.dockable_init_dockcontrol(dock_control)
return dock_control
elif isinstance(object, IDockable):
dockable = object
control = dockable.dockable_get_control(parent)
else:
ui = object.get_dockable_ui(parent)
dockable = DockableViewElement(ui=ui)
export = ui.view.export
control = ui.control
dc = DockControl(control=control, name=name, export=export, style=parent.owner.style, image=image, closeable=True)
dockable.dockable_init_dockcontrol(dc)
return dc
def open_view_for(self, control, use_mouse=True):
from pyface.dock.api import DockWindowShell
DockWindowShell(control, use_mouse=use_mouse)
def dock_window_empty(self, dock_window):
if dock_window.auto_close:
dock_window.control.GetParent.Destroy()
def edit_traits(self, view=None, parent=None, kind=None, context=None, handler=None, id='', scrollable=None, **args):
if (context is None):
context = self
if (handler is None):
handler = self
return self.trait_view(view).ui(context, parent, kind, self.trait_view_elements(), handler, id, scrollable, args)
def configure_traits(self, filename=None, view=None, kind=None, edit=True, context=None, handler=None, id='', scrollable=None, **args):
return super().configure_traits(filename, view, kind, edit, context, (handler or self), id, scrollable, **args)
def _on_undo(self, info):
if (info.ui.history is not None):
info.ui.history.undo()
def _on_redo(self, info):
if (info.ui.history is not None):
info.ui.history.redo()
def _on_revert(self, info):
if (info.ui.history is not None):
info.ui.history.revert()
self.revert(info)
def _on_close(self, info):
if ((info.ui.owner is not None) and self.close(info, True)):
info.ui.owner.close() |
def test_zero_encoding_for_new_categories():
df_fit = pd.DataFrame({'col1': ['a', 'a', 'b', 'a', 'c'], 'col2': ['1', '2', '3', '1', '2']})
df_transf = pd.DataFrame({'col1': ['a', 'd', 'b', 'a', 'c'], 'col2': ['1', '2', '3', '1', '4']})
encoder = CountFrequencyEncoder(unseen='encode').fit(df_fit)
result = encoder.transform(df_transf)
assert (pd.isnull(result).sum().sum() == 0)
expected_result = pd.DataFrame({'col1': [3, 0, 1, 3, 1], 'col2': [2, 2, 1, 2, 0]})
pd.testing.assert_frame_equal(result, expected_result, check_dtype=False) |
class Test_Worker():
def worker(self, app):
return Worker(app)
def test_constructor(self, app):
w = Worker(app)
assert (w.app is app)
assert (w.sensors == set())
assert (w.workdir == Path.cwd())
assert isinstance(w.spinner, terminal.Spinner)
w2 = Worker(app, redirect_stdouts=False)
assert (not w2.redirect_stdouts)
w3 = Worker(app, redirect_stdouts_level='DEBUG')
assert (w3.redirect_stdouts_level == 10)
w4 = Worker(app, logging_config={'foo': 1})
assert (w4.logging_config == {'foo': 1})
def test_set_sensors(self, app):
assert (Worker(app, sensors=[1, 2]).sensors == {1, 2})
def test_set_workdir(self, app):
assert (Worker(app, workdir='/foo').workdir == Path('/foo'))
.asyncio
async def test_on_start(self, worker):
(await worker.on_start())
.asyncio
async def test_on_siginit(self, worker):
with warnings.catch_warnings():
with patch('asyncio.ensure_future') as ensure_future:
worker._on_sigint()
assert worker._shutdown_immediately
assert worker.spinner.stopped
ensure_future.assert_called_with(CoroEq(worker._stop_on_signal), loop=worker.loop)
coro = ensure_future.call_args[0][0]
asyncio.ensure_future(coro).cancel()
.asyncio
async def test_on_siginit__no_spinner(self, worker, loop):
worker.spinner = None
with patch('asyncio.ensure_future') as ensure_future:
worker._on_sigint()
coro = ensure_future.call_args[0][0]
asyncio.ensure_future(coro, loop=loop).cancel()
.asyncio
async def test__on_sigterm(self, worker):
with patch('asyncio.ensure_future') as ensure_future:
worker._on_sigterm()
assert worker._shutdown_immediately
assert worker.spinner.stopped
ensure_future.assert_called_with(CoroEq(worker._stop_on_signal), loop=worker.loop)
coro = ensure_future.call_args[0][0]
asyncio.ensure_future(coro).cancel()
.asyncio
async def test_on_startup_finished__shutdown_requested(self, worker):
worker._shutdown_immediately = True
worker._on_shutdown_immediately = Mock(name='on_shutdown_immediately')
(await worker.on_startup_finished())
worker._on_shutdown_immediately.assert_called_once_with()
.asyncio
async def test_on_startup_finished(self, worker):
worker.maybe_start_blockdetection = AsyncMock(name='maybe_start_block')
worker._on_startup_end_spinner = Mock(name='on_startup_end_spinner')
(await worker.on_startup_finished())
worker.maybe_start_blockdetection.assert_called_once_with()
worker._on_startup_end_spinner.assert_called_once_with()
def test_on_startup_end_spinner(self, worker):
spinner = worker.spinner = Mock(name='spinner', autospec=terminal.Spinner)
spinner.file.isatty.return_value = True
worker.say = Mock(name='say')
worker._on_startup_end_spinner()
spinner.finish.assert_called_once_with()
worker.say.assert_called_once_with(' ')
def test_on_startup_end_spinner__no_spinner(self, worker):
worker.spinner = None
worker.log = Mock(name='log', spec=CompositeLogger)
worker._on_startup_end_spinner()
worker.log.info.assert_called_once_with('Ready')
def test_on_startup_end_spinner__notatty(self, worker):
spinner = worker.spinner = Mock(name='spinner', autospec=terminal.Spinner)
spinner.file.isatty.return_value = False
worker.say = Mock(name='say')
worker._on_startup_end_spinner()
spinner.finish.assert_called_once_with()
worker.say.assert_called_once_with(' OK ^')
def test_on_shutdown_immediately(self, worker):
worker.say = Mock(name='say')
worker._on_shutdown_immediately()
worker.say.assert_called_once_with('')
def test_on_init_dependencies(self, worker, app):
app.beacon = Mock(name='app.beacon', autospec=Node)
deps = worker.on_init_dependencies()
assert (list(deps) == (list(worker.services) + [app]))
app.beacon.reattach.assert_called_once_with(worker.beacon)
assert (app.on_startup_finished == worker.on_startup_finished)
def test_on_init_dependencies__sensors_to_app(self, worker, app):
s1 = Mock(name='S1', autospec=Sensor)
s2 = Mock(name='S2', autospec=Sensor)
worker.sensors = {s1, s2}
worker.on_init_dependencies()
assert app.sensors._sensors.issubset(worker.sensors)
.asyncio
async def test_on_first_start(self, worker):
worker.change_workdir = Mock(name='change_workdir')
worker.autodiscover = Mock(name='autodiscover')
worker.default_on_first_start = AsyncMock(name='on_first_start')
(await worker.on_first_start())
worker.change_workdir.assert_called_once_with(worker.workdir)
worker.default_on_first_start.assert_called_once_with()
worker.autodiscover.assert_called_once_with()
def test_change_workdir(self, worker):
with patch('os.chdir') as chdir:
p = Path('baz')
worker.change_workdir(p)
chdir.assert_called_once_with(p.absolute())
def test_change_workdir__already_cwd(self, worker):
with patch('os.chdir') as chdir:
p = Path.cwd()
worker.change_workdir(p)
chdir.assert_not_called()
def test_autodiscover(self, worker):
worker.app.conf.autodiscover = True
worker.app.discover = Mock(name='discover')
worker.autodiscover()
worker.app.discover.assert_called_once_with()
def test_autodiscover__disabled(self, worker):
worker.app.conf.autodiscover = False
worker.app.discover = Mock(name='discover')
worker.autodiscover()
worker.app.discover.assert_not_called()
def test_setproctitle(self, worker, app):
with patch('faust.worker.setproctitle') as setproctitle:
worker._setproctitle('foo')
setproctitle.assert_called_with(f'[Faust:Worker] -foo- testid -p {app.conf.web_port} {app.conf.datadir.absolute()}')
def test_proc_ident(self, worker, app):
assert (worker._proc_ident() == f'testid -p {app.conf.web_port} {app.conf.datadir.absolute()}')
def test_proc_web_ident__unix(self, worker, app):
worker.app.conf.web_transport = URL('unix:')
assert (worker._proc_web_ident() == str(URL('unix:')))
def test_proc_web_ident__tcp(self, worker):
worker.app.conf.web_transport_scheme = 'tcp'
assert (worker._proc_web_ident() == '-p 6066')
def test_on_worker_shutdown(self, worker):
worker.spinner = None
worker._say = Mock(name='say')
worker.on_worker_shutdown()
worker.spinner = Mock(name='spinner')
worker.on_worker_shutdown()
worker.spinner.reset.assert_called_once_with()
.asyncio
async def test_on_execute(self, worker):
worker._setproctitle = Mock(name='setproctitle')
worker.spinner = Mock(name='spinner', autospec=terminal.Spinner)
worker._say = Mock(name='say')
(await worker.on_execute())
worker._setproctitle.assert_called_with('init')
worker._say.assert_called_with('starting ', end='', flush=True)
worker.spinner = None
(await worker.on_execute())
def test_on_setup_root_logger(self, worker):
worker._disable_spinner_if_level_below_WARN = Mock(name='dd')
worker._setup_spinner_handler = Mock(name='ss')
logger = Mock(name='logger', autospec=logging.Logger)
worker.on_setup_root_logger(logger, logging.INFO)
worker._disable_spinner_if_level_below_WARN.assert_called_with(logging.INFO)
worker._setup_spinner_handler.assert_called_with(logger, logging.INFO)
.parametrize('loglevel,expected', [(None, True), (logging.CRITICAL, True), (logging.ERROR, True), (logging.WARN, True), (logging.INFO, False), (logging.DEBUG, False)])
def test_disable_spinner_if_level_below_WARN(self, loglevel, expected, worker):
worker._disable_spinner_if_level_below_WARN(loglevel)
if expected:
assert worker.spinner
else:
assert (worker.spinner is None)
def test_setup_spinner_handler(self, worker):
logger = Mock(name='logger', autospec=logging.Logger)
logger.handlers = [Mock(name='handler', autospec=logging.Handler)]
with patch('faust.utils.terminal.SpinnerHandler') as SpinnerHandler:
worker._setup_spinner_handler(logger, logging.INFO)
logger.handlers[0].setLevel.assert_called_with(logging.INFO)
SpinnerHandler.assert_called_once_with(worker.spinner, level=logging.DEBUG)
logger.addHandler.assert_called_once_with(SpinnerHandler())
logger.setLevel.assert_called_once_with(logging.DEBUG)
def test_setup_spinner_handler__when_no_spinner(self, worker):
worker.spinner = None
worker._setup_spinner_handler(Mock(name='logger', autospec=logging.Logger), logging.INFO) |
class OptionPlotoptionsPieOnpoint(Options):
def connectorOptions(self) -> 'OptionPlotoptionsPieOnpointConnectoroptions':
return self._config_sub_data('connectorOptions', OptionPlotoptionsPieOnpointConnectoroptions)
def id(self):
return self._config_get(None)
def id(self, text: str):
self._config(text, js_type=False)
def position(self) -> 'OptionPlotoptionsPieOnpointPosition':
return self._config_sub_data('position', OptionPlotoptionsPieOnpointPosition) |
def call_with_error(error_type):
def _call_with_error(f):
def error(log, msg):
log.append(msg)
(f)
def wrapped(*args, **kwargs):
log = []
result = f(*args, error=error(log), **kwargs)
if (len(log) > 0):
raise error_type('\n'.join(log))
return result
return wrapped
return _call_with_error |
class ExpressionPropagationMemory(ExpressionPropagationBase):
name = 'expression-propagation-memory'
def __init__(self):
ExpressionPropagationBase.__init__(self)
def run(self, task: DecompilerTask):
self._initialize_pointers(task.graph)
super().run(task)
def perform(self, graph, iteration) -> bool:
is_changed = super().perform(graph, iteration)
self._propagate_postponed_aliased_definitions()
return is_changed
def _definition_can_be_propagated_into_target(self, definition: Assignment, target: Instruction):
return (isinstance(definition, Assignment) and (not (self._is_phi(definition) or self._is_call_assignment(definition) or self._is_address_into_dereference(definition, target) or self._defines_unknown_expression(definition) or self._contains_global_variable(definition) or self._operation_is_propagated_in_phi(target, definition) or self._is_invalid_propagation_into_address_operation(target, definition) or self._is_aliased_postponed_for_propagation(target, definition) or self._definition_value_could_be_modified_via_memory_access_between_definition_and_target(definition, target) or self._pointer_value_used_in_definition_could_be_modified_via_memory_access_between_definition_and_target(definition, target))))
def _initialize_pointers(self, cfg: ControlFlowGraph):
self._pointers_info = Pointers().from_cfg(cfg)
def _update_block_map(self, old_instr_str: str, new_instr_str: str, basic_block: BasicBlock, index: int):
self._blocks_map[new_instr_str].add((basic_block, index))
if ((basic_block, index) in self._blocks_map[old_instr_str]):
self._blocks_map[old_instr_str].remove((basic_block, index))
def _update_use_map(self, variable: Variable, instruction: Instruction):
if (variable not in instruction.requirements):
self._use_map.remove_use(variable, instruction)
self._use_map.add(instruction)
def _propagate_postponed_aliased_definitions(self):
self._initialize_maps(self._cfg)
for var in self._postponed_aliased:
uses = self._use_map.get(var)
definition = self._def_map.get(var)
if (len(uses) == 1):
instruction = uses.pop()
if self._is_aliased_postponed_for_propagation(instruction, definition):
if (self._definition_value_could_be_modified_via_memory_access_between_definition_and_target(definition, instruction) or self._pointer_value_used_in_definition_could_be_modified_via_memory_access_between_definition_and_target(definition, instruction)):
continue
old_instr = str(instruction)
(block, index) = self._blocks_map.get(old_instr).pop()
instruction.substitute(var, definition.value.copy())
self._update_use_map(var, instruction)
self._update_block_map(old_instr, str(instruction), block, index) |
class RedisCli(Redis):
def __init__(self):
super(RedisCli, self).__init__(host=settings.REDIS_HOST, port=settings.REDIS_PORT, password=settings.REDIS_PASSWORD, db=settings.REDIS_DATABASE, socket_timeout=settings.REDIS_TIMEOUT, decode_responses=True)
async def open(self):
try:
(await self.ping())
except TimeoutError:
log.error(' redis ')
sys.exit()
except AuthenticationError:
log.error(' redis ')
sys.exit()
except Exception as e:
log.error(' redis {}', e)
sys.exit()
async def delete_prefix(self, prefix: str, exclude: (str | list)=None):
keys = []
async for key in self.scan_iter(match=f'{prefix}*'):
if isinstance(exclude, str):
if (key != exclude):
keys.append(key)
elif isinstance(exclude, list):
if (key not in exclude):
keys.append(key)
else:
keys.append(key)
for key in keys:
(await self.delete(key)) |
class Event():
def __init__(self, sse_event):
self._sse_event = sse_event
self._data = json.loads(sse_event.data)
def data(self):
return self._data['data']
def path(self):
return self._data['path']
def event_type(self):
return self._sse_event.event_type |
def test_encoding_unknown_performative():
msg = LedgerApiMessage(performative=LedgerApiMessage.Performative.GET_BALANCE, ledger_id='some_ledger_id', address='some_address')
with pytest.raises(ValueError, match='Performative not valid:'):
with mock.patch.object(LedgerApiMessage.Performative, '__eq__', return_value=False):
LedgerApiMessage.serializer.encode(msg) |
class Sync(Options):
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag)
def group(self):
return self._config_get(1)
def group(self, num: int):
self._config(num)
def suppressTooltips(self):
return self._config_get(True)
def suppressTooltips(self, flag: bool):
self._config(flag) |
class ABIType():
__slots__ = ('arrlist', 'node')
def __init__(self, arrlist=None, node=None):
self.arrlist = arrlist
self.node = node
def __repr__(self):
return f'<{type(self).__qualname__} {repr(self.to_type_str())}>'
def __eq__(self, other):
return ((type(self) is type(other)) and (self.to_type_str() == other.to_type_str()))
def to_type_str(self):
raise NotImplementedError('Must implement `to_type_str`')
def item_type(self):
raise NotImplementedError('Must implement `item_type`')
def validate(self):
raise NotImplementedError('Must implement `validate`')
def invalidate(self, error_msg):
node = self.node
raise ABITypeError(f"For '{node.text}' type at column {(node.start + 1)} in '{node.full_text}': {error_msg}")
def is_array(self):
return (self.arrlist is not None)
def is_dynamic(self):
raise NotImplementedError('Must implement `is_dynamic`')
def _has_dynamic_arrlist(self):
return (self.is_array and any(((len(dim) == 0) for dim in self.arrlist))) |
class BuildProgramTest(unittest.TestCase):
def setUp(self):
self.fake_file = os.path.join(gettempdir(), 'aibenchtest1', 'test')
self.actual_file = os.path.join(gettempdir(), 'aibenchtest2', 'program')
_setUpTempDirectory(self.actual_file)
with open(self.actual_file, 'a'):
os.utime(self.actual_file, None)
def testBuckBuild(self):
with patch('utils.subprocess_with_logger.processRun', return_value=('Build was unsuccessful', [Exception()])):
self.assertFalse(buildUsingBuck(self.fake_file, 'android', 'buck'))
with patch('utils.subprocess_with_logger.processRun', return_value=('Build was successful', [])), patch('utils.build_program._setUpTempDirectory'):
self.assertTrue(buildUsingBuck(self.actual_file, 'ios', 'buck'))
def testisBuildSuccessful(self):
self.assertFalse(_isBuildSuccessful(self.fake_file, 'ios', 'buck build aibench:run'))
self.assertTrue(_isBuildSuccessful(self.actual_file, 'oculus', 'buck build aibench:run')) |
def parse_argv(tokens: Tokens, options: List[Option], options_first: bool=False, more_magic: bool=False) -> List[Pattern]:
def isanumber(x):
try:
float(x)
return True
except ValueError:
return False
parsed: List[Pattern] = []
current_token = tokens.current()
while (current_token is not None):
if (current_token == '--'):
return (parsed + [Argument(None, v) for v in tokens])
elif current_token.startswith('--'):
parsed += parse_longer(tokens, options, argv=True, more_magic=more_magic)
elif (current_token.startswith('-') and (current_token != '-') and (not isanumber(current_token))):
parsed += parse_shorts(tokens, options, more_magic=more_magic)
elif options_first:
return (parsed + [Argument(None, v) for v in tokens])
else:
parsed.append(Argument(None, tokens.move()))
current_token = tokens.current()
return parsed |
def read_process_stats() -> ProcessStats:
main_trinity_process = get_main_trinity_process()
child_processes = main_trinity_process.children(recursive=True)
num_processes = (len(child_processes) + 1)
num_child_threads = sum(collect_thread_counts_for_processes(child_processes))
num_threads = (num_child_threads + main_trinity_process.num_threads())
return ProcessStats(process_count=num_processes, thread_count=num_threads) |
class ClassifierOnlineTest(object):
def __init__(self, model_path, action_labels, window_size, human_id=0):
self.human_id = human_id
with open(model_path, 'rb') as f:
self.model = pickle.load(f)
if (self.model is None):
print('my Error: failed to load model')
assert False
self.action_labels = action_labels
self.THRESHOLD_SCORE_FOR_DISP = 0.5
self.feature_generator = FeatureGenerator(window_size)
self.reset()
def reset(self):
self.feature_generator.reset()
self.scores_hist = deque()
self.scores = None
def predict(self, skeleton):
LABEL_UNKNOWN = ''
(is_features_good, features) = self.feature_generator.add_cur_skeleton(skeleton)
if is_features_good:
features = features.reshape((- 1), features.shape[0])
curr_scores = self.model._predict_proba(features)[0]
self.scores = self.smooth_scores(curr_scores)
if (self.scores.max() < self.THRESHOLD_SCORE_FOR_DISP):
prediced_label = LABEL_UNKNOWN
else:
predicted_idx = self.scores.argmax()
prediced_label = self.action_labels[predicted_idx]
else:
prediced_label = LABEL_UNKNOWN
return prediced_label
def smooth_scores(self, curr_scores):
self.scores_hist.append(curr_scores)
DEQUE_MAX_SIZE = 2
if (len(self.scores_hist) > DEQUE_MAX_SIZE):
self.scores_hist.popleft()
if 1:
score_sums = np.zeros((len(self.action_labels),))
for score in self.scores_hist:
score_sums += score
score_sums /= len(self.scores_hist)
print('\nMean score:\n', score_sums)
return score_sums
else:
score_mul = np.ones((len(self.action_labels),))
for score in self.scores_hist:
score_mul *= score
return score_mul
def draw_scores_onto_image(self, img_disp):
if (self.scores is None):
return
for i in range((- 1), len(self.action_labels)):
FONT_SIZE = 0.7
TXT_X = 20
TXT_Y = (150 + (i * 30))
COLOR_INTENSITY = 255
if (i == (- 1)):
s = 'P{}:'.format(self.human_id)
else:
label = self.action_labels[i]
s = '{:<5}: {:.2f}'.format(label, self.scores[i])
COLOR_INTENSITY *= ((0.0 + (1.0 * self.scores[i])) ** 0.5)
cv2.putText(img_disp, text=s, org=(TXT_X, TXT_Y), fontFace=cv2.FONT_HERSHEY_SIMPLEX, fontScale=FONT_SIZE, color=(0, 0, int(COLOR_INTENSITY)), thickness=2) |
class TraitCoerceType(TraitHandler):
def __init__(self, aType):
if (not isinstance(aType, type)):
aType = type(aType)
self.aType = aType
try:
self.fast_validate = CoercableTypes[aType]
except:
self.fast_validate = (ValidateTrait.coerce, aType)
def validate(self, object, name, value):
fv = self.fast_validate
tv = type(value)
if (tv is fv[1]):
return value
for typei in fv[2:]:
if (tv is typei):
return fv[1](value)
self.error(object, name, value)
def info(self):
return ('a value of %s' % str(self.aType)[1:(- 1)])
def get_editor(self, trait):
if (self.aType is bool):
if (self.editor is None):
from traitsui.api import BooleanEditor
self.editor = BooleanEditor()
return self.editor
auto_set = trait.auto_set
if (auto_set is None):
auto_set = True
from traitsui.api import TextEditor
return TextEditor(auto_set=auto_set, enter_set=(trait.enter_set or False), evaluate=self.fast_validate[1]) |
class ReversedList(object):
def __init__(self, list):
self.list = list
def insert(self, index, value):
return self.list.insert(self._index((index - 1)), value)
def index(self, value):
list = self.list[:]
list.reverse()
return list.index(value)
def __len__(self):
return len(self.list)
def __getitem__(self, index):
return self.list[self._index(index)]
def __setslice__(self, i, j, values):
return self.list.__setslice__(self._index(i), self._index(j), values)
def __delitem__(self, index):
return self.list.__delitem__(self._index(index))
def _index(self, index):
if (index < 0):
return ((- 1) - index)
result = ((len(self.list) - index) - 1)
if (result >= 0):
return result
return index |
def extractNocturnetlsNet(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class github_issue_0038_test_case(unittest.TestCase):
def get_dict_generator():
(yield from enumerate('abcd'))
def test_init_with_generator(self):
b = benedict(self.get_dict_generator())
self.assertEqual(b, {0: 'a', 1: 'b', 2: 'c', 3: 'd'})
self.assertEqual(b.to_json(), '{"0": "a", "1": "b", "2": "c", "3": "d"}')
b = benedict(self.get_dict_generator())
d = dict(b)
b = benedict(d)
self.assertEqual(b, {0: 'a', 1: 'b', 2: 'c', 3: 'd'})
self.assertEqual(b.to_json(), '{"0": "a", "1": "b", "2": "c", "3": "d"}') |
class StrWrapper(Wrapper):
def __init__(self, data):
self.data = data
def to_bounding_box(self):
from climetlab.utils.domains import domain_to_area
return domain_to_area(self.data)
def to_datetime(self):
return parse_date(self.data)
def to_datetime_list(self):
from climetlab.utils.dates import mars_like_date_list
if MONTH_DATE.match(self.data):
first_day = parse(self.data)
y = first_day.year
m = first_day.month
n_days = calendar.monthrange(y, m)[1]
return mars_like_date_list(parse_date(f'{y}-{m:02}-01'), parse_date(f'{y}-{m:02}-{n_days}'), 1)
bits = self.data.split('/')
if ((len(bits) == 3) and (bits[1].lower() == 'to')):
return mars_like_date_list(parse_date(bits[0]), parse_date(bits[2]), 1)
if ((len(bits) == 5) and (bits[1].lower() == 'to') and (bits[3].lower() == 'by')):
return mars_like_date_list(parse_date(bits[0]), parse_date(bits[2]), int(bits[4]))
return [parse_date(d) for d in bits] |
.end_to_end
def test_cpa_on_dpa_v2():
ths = traces.read_ths_from_ets_file('tests/end_to_end/dpa_v2_sub.ets')
expected_key = aes.key_schedule(key=ths[0].key)[(- 1)]
sf = aes.selection_functions.encrypt.DeltaRLastRounds()
container = scared.Container(ths)
att = scared.CPAAttack(selection_function=sf, model=scared.HammingWeight(), discriminant=scared.maxabs)
att.run(container)
last_key = np.argmax(att.scores, axis=0)
assert np.array_equal(expected_key, last_key) |
class TestWrongRepeatedClass():
def test_class_unknown(self, monkeypatch):
with monkeypatch.context() as m:
m.setattr(sys, 'argv', ['', '--config', './tests/conf/yaml/test_incorrect_repeated_class.yaml'])
with pytest.raises(ValueError):
ConfigArgBuilder(*all_configs, desc='Test Builder') |
class InstructionDataset(Dataset):
def __init__(self, dataset_config, tokenizer, partition='train'):
self.ann = json.load(open(dataset_config.data_path))
if (partition == 'train'):
self.ann = self.ann[200:]
else:
self.ann = self.ann[:200]
self.tokenizer = tokenizer
def __len__(self):
return len(self.ann)
def __getitem__(self, index):
IGNORE_INDEX = (- 100)
ann = self.ann[index]
if (ann.get('input', '') == ''):
prompt = PROMPT_DICT['prompt_no_input'].format_map(ann)
else:
prompt = PROMPT_DICT['prompt_input'].format_map(ann)
example = (prompt + ann['output'])
prompt = torch.tensor(self.tokenizer.encode(prompt), dtype=torch.int64)
example = self.tokenizer.encode(example)
example.append(self.tokenizer.eos_token_id)
example = torch.tensor(example, dtype=torch.int64)
labels = copy.deepcopy(example)
labels[:len(prompt)] = (- 1)
example_mask = example.ge(0)
label_mask = labels.ge(0)
example[(~ example_mask)] = 0
labels[(~ label_mask)] = IGNORE_INDEX
return {'input_ids': example.tolist(), 'labels': labels.tolist(), 'attention_mask': example_mask.tolist()} |
class OptionPlotoptionsSplineSonificationContexttracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.parametrize('use_weak_dirichlet', [False, True])
def test_load_vector_use_weak(use_weak_dirichlet):
from . import poisson_het_2d_p
reload(poisson_het_2d_p)
from . import poisson_het_2d_c0pk_n
pList = [poisson_het_2d_p]
nList = [poisson_het_2d_c0pk_n]
reload(default_so)
so = default_so
so.name = pList[0].name = (('poisson_2d_c0p1' + 'pe') + repr(comm.size()))
reload(default_s)
so.sList = [default_s]
opts.logLevel = 7
opts.verbose = True
opts.profile = True
opts.gatherArchive = True
nList[0].femSpaces[0] = default_n.C0_AffineLinearOnSimplexWithNodalBasis
nList[0].linearSolver = default_n.LU
nList[0].multilevelLinearSolver = default_n.LU
if use_weak_dirichlet:
nList[0].linearSolver = default_n.LU
nList[0].multilevelLinearSolver = default_n.LU
nList[0].numericalFluxType = default_n.Advection_DiagonalUpwind_Diffusion_SIPG_exterior
OptDB = nList[0].OptDB
soln_name = so.name
OptDB.clear()
OptDB.setValue('ksp_type', 'bcgsl')
OptDB.setValue('pc_type', 'asm')
OptDB.setValue('pc_asm_type', 'basic')
OptDB.setValue('pc_asm_overlap', 2)
OptDB.setValue('sub_ksp_type', 'preonly')
OptDB.setValue('sub_pc_type', 'lu')
OptDB.setValue('sub_pc_factor_type', 'superlu')
ns = NumericalSolution.NS_base(so, pList, nList, so.sList, opts)
ns.calculateSolution('poisson_2d_c0p1')
finest_model = ns.modelList[0].levelModelList[(- 1)]
nr = finest_model.u[0].femSpace.dim
r = np.zeros((nr,), 'd')
f = np.zeros((nr,), 'd')
utmp = np.zeros((nr,), 'd')
finest_model.getResidual(utmp, r)
finest_model.getLoadVector(f)
del ns
np.testing.assert_almost_equal(r, f) |
class ToolBar():
def __init__(self, plot):
self.plot = plot
def _get_subplot_dropdown_value(self) -> str:
raise NotImplemented
def current_subplot(self) -> Subplot:
if hasattr(self.plot, '_subplots'):
current = self._get_subplot_dropdown_value()
if (current[0] == '('):
current = tuple((int(i) for i in current.strip('()').split(',')))
return self.plot[current]
else:
return self.plot[current]
else:
return self.plot
def panzoom_handler(self, ev):
raise NotImplemented
def maintain_aspect_handler(self, ev):
raise NotImplemented
def y_direction_handler(self, ev):
raise NotImplemented
def auto_scale_handler(self, ev):
raise NotImplemented
def center_scene_handler(self, ev):
raise NotImplemented
def record_handler(self, ev):
raise NotImplemented
def add_polygon(self, ev):
raise NotImplemented |
class DirectOutput(object):
def __init__(self, dll_path):
logging.debug('DirectOutput.__init__')
self.DirectOutputDLL = ctypes.WinDLL(dll_path)
def Initialize(self, application_name):
logging.debug('DirectOutput.Initialize')
return self.DirectOutputDLL.DirectOutput_Initialize(ctypes.wintypes.LPWSTR(application_name))
def Deinitialize(self):
logging.debug('DirectOutput.Deinitialize')
return self.DirectOutputDLL.DirectOutput_Deinitialize()
def RegisterDeviceCallback(self, function):
logging.debug('DirectOutput.RegisterDeviceCallback')
return self.DirectOutputDLL.DirectOutput_RegisterDeviceCallback(function, 0)
def Enumerate(self, function):
logging.debug('DirectOutput.Enumerate')
return self.DirectOutputDLL.DirectOutput_Enumerate(function, 0)
def RegisterSoftButtonCallback(self, device_handle, function):
logging.debug('DirectOutput.RegisterSoftButtonCallback({}, {})'.format(device_handle, function))
return self.DirectOutputDLL.DirectOutput_RegisterSoftButtonCallback(ctypes.wintypes.HANDLE(device_handle), function, 0)
def RegisterPageCallback(self, device_handle, function):
logging.debug('DirectOutput.RegisterPageCallback({}, {})'.format(device_handle, function))
return self.DirectOutputDLL.DirectOutput_RegisterPageCallback(ctypes.wintypes.HANDLE(device_handle), function, 0)
def SetProfile(self, device_handle, profile):
logging.debug('DirectOutput.SetProfile({}, {})'.format(device_handle, profile))
if profile:
return self.DirectOutputDLL.DirectOutput_SetProfile(ctypes.wintypes.HANDLE(device_handle), len(profile), ctypes.wintypes.LPWSTR(profile))
else:
return self.DirectOutputDLL.DirectOutput_SetProfile(ctypes.wintypes.HANDLE(device_handle), 0, 0)
def AddPage(self, device_handle, page, name, active):
logging.debug('DirectOutput.AddPage({}, {}, {}, {})'.format(device_handle, page, name, active))
return self.DirectOutputDLL.DirectOutput_AddPage(ctypes.wintypes.HANDLE(device_handle), page, active)
def RemovePage(self, device_handle, page):
logging.debug('DirectOutput.RemovePage({}, {})'.format(device_handle, page))
return self.DirectOutputDLL.DirectOutput_RemovePage(ctypes.wintypes.HANDLE(device_handle), page)
def SetLed(self, device_handle, page, led, value):
logging.debug('DirectOutput.SetLed({}, {}, {}, {})'.format(device_handle, page, led, value))
return self.DirectOutputDLL.DirectOutput_SetLed(ctypes.wintypes.HANDLE(device_handle), page, led, value)
def SetString(self, device_handle, page, line, string):
logging.debug('DirectOutput.SetString({}, {}, {}, {})'.format(device_handle, page, line, string))
return self.DirectOutputDLL.DirectOutput_SetString(ctypes.wintypes.HANDLE(device_handle), page, line, len(string), ctypes.wintypes.LPWSTR(string)) |
class _TestGlobmatch(unittest.TestCase):
def mktemp(self, *parts):
filename = self.norm(*parts)
(base, file) = os.path.split(filename)
if (not os.path.exists(base)):
retry = 3
while retry:
try:
os.makedirs(base)
retry = 0
except Exception:
retry -= 1
create_empty_file(filename)
def force_err(self):
raise TypeError
def norm(self, *parts):
tempdir = (os.fsencode(self.tempdir) if isinstance(parts[0], bytes) else self.tempdir)
return os.path.join(tempdir, *parts)
def norm_list(self, files):
return sorted([self.norm(os.path.normpath(x)) for x in files])
def setUp(self):
self.tempdir = (TESTFN + '_dir')
self.default_flags = (glob.G | glob.P)
def tearDown(self):
retry = 3
while retry:
try:
shutil.rmtree(self.tempdir)
while os.path.exists(self.tempdir):
pass
retry = 0
except Exception:
retry -= 1 |
class OptionSeriesVariwideMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def states(self) -> 'OptionSeriesVariwideMarkerStates':
return self._config_sub_data('states', OptionSeriesVariwideMarkerStates)
def symbol(self):
return self._config_get(None)
def symbol(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
class _ExtendedCommunity(StringifyMixin, TypeDisp, _Value):
_PACK_STR = '!B7s'
_PACK_STR_SIZE = struct.calcsize(_PACK_STR)
_SUBTYPE_PACK_STR = '!B'
IANA_AUTHORITY = 128
TRANSITIVE = 64
_TYPE_HIGH_MASK = (~ TRANSITIVE)
TWO_OCTET_AS_SPECIFIC = 0
IPV4_ADDRESS_SPECIFIC = 1
FOUR_OCTET_AS_SPECIFIC = 2
OPAQUE = 3
SUBTYPE_ENCAPSULATION = 12
ENCAPSULATION = (OPAQUE, SUBTYPE_ENCAPSULATION)
EVPN = 6
SUBTYPE_EVPN_MAC_MOBILITY = 0
SUBTYPE_EVPN_ESI_LABEL = 1
SUBTYPE_EVPN_ES_IMPORT_RT = 2
EVPN_MAC_MOBILITY = (EVPN, SUBTYPE_EVPN_MAC_MOBILITY)
EVPN_ESI_LABEL = (EVPN, SUBTYPE_EVPN_ESI_LABEL)
EVPN_ES_IMPORT_RT = (EVPN, SUBTYPE_EVPN_ES_IMPORT_RT)
FLOWSPEC = 128
FLOWSPEC_L2VPN = 8
SUBTYPE_FLOWSPEC_TRAFFIC_RATE = 6
SUBTYPE_FLOWSPEC_TRAFFIC_ACTION = 7
SUBTYPE_FLOWSPEC_REDIRECT = 8
SUBTYPE_FLOWSPEC_TRAFFIC_REMARKING = 9
SUBTYPE_FLOWSPEC_VLAN_ACTION = 10
SUBTYPE_FLOWSPEC_TPID_ACTION = 11
FLOWSPEC_TRAFFIC_RATE = (FLOWSPEC, SUBTYPE_FLOWSPEC_TRAFFIC_RATE)
FLOWSPEC_TRAFFIC_ACTION = (FLOWSPEC, SUBTYPE_FLOWSPEC_TRAFFIC_ACTION)
FLOWSPEC_REDIRECT = (FLOWSPEC, SUBTYPE_FLOWSPEC_REDIRECT)
FLOWSPEC_TRAFFIC_REMARKING = (FLOWSPEC, SUBTYPE_FLOWSPEC_TRAFFIC_REMARKING)
FLOWSPEC_VLAN_ACTION = (FLOWSPEC_L2VPN, SUBTYPE_FLOWSPEC_VLAN_ACTION)
FLOWSPEC_TPID_ACTION = (FLOWSPEC_L2VPN, SUBTYPE_FLOWSPEC_TPID_ACTION)
def __init__(self, type_=None):
if (type_ is None):
type_ = self._rev_lookup_type(self.__class__)
if isinstance(type_, (tuple, list)):
type_ = type_[0]
self.type = type_
def parse_subtype(cls, buf):
(subtype,) = struct.unpack_from(cls._SUBTYPE_PACK_STR, buf)
return subtype
def parse(cls, buf):
(type_, value) = struct.unpack_from(cls._PACK_STR, buf)
rest = buf[cls._PACK_STR_SIZE:]
type_low = (type_ & cls._TYPE_HIGH_MASK)
subtype = cls.parse_subtype(value)
subcls = cls._lookup_type((type_low, subtype))
if (subcls == cls._UNKNOWN_TYPE):
subcls = cls._lookup_type(type_low)
return (subcls(type_=type_, **subcls.parse_value(value)), rest)
def serialize(self):
return struct.pack(self._PACK_STR, self.type, self.serialize_value()) |
class TestActionFileReindex(CuratorTestCase):
def test_reindex_manual(self):
source = 'my_source'
dest = 'my_dest'
expected = 3
self.create_index(source)
self.add_docs(source)
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.reindex.format(WAIT_INTERVAL, MAX_WAIT, source, dest))
self.invoke_runner()
assert (expected == self.client.count(index=dest)['count'])
def test_reindex_selected(self):
source = 'my_source'
dest = 'my_dest'
expected = 3
self.create_index(source)
self.add_docs(source)
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.reindex.format(WAIT_INTERVAL, MAX_WAIT, 'REINDEX_SELECTION', dest))
self.invoke_runner()
assert (expected == self.client.count(index=dest)['count'])
def test_reindex_empty_list(self):
source = 'my_source'
dest = 'my_dest'
expected = []
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.reindex.format(WAIT_INTERVAL, MAX_WAIT, source, dest))
self.invoke_runner()
assert (expected == get_indices(self.client))
def test_reindex_selected_many_to_one(self):
source1 = 'my_source1'
source2 = 'my_source2'
dest = 'my_dest'
expected = 6
self.create_index(source1)
self.add_docs(source1)
self.create_index(source2)
for i in ['4', '5', '6']:
self.client.create(index=source2, id=i, document={('doc' + i): 'TEST DOCUMENT'})
self.client.indices.flush(index=source2, force=True)
self.client.indices.refresh(index=source2)
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.reindex.format(WAIT_INTERVAL, MAX_WAIT, 'REINDEX_SELECTION', dest))
self.invoke_runner()
self.client.indices.refresh(index=dest)
assert (expected == self.client.count(index=dest)['count'])
def test_reindex_selected_empty_list_fail(self):
source1 = 'my_source1'
source2 = 'my_source2'
dest = 'my_dest'
self.create_index(source1)
self.add_docs(source1)
self.create_index(source2)
for i in ['4', '5', '6']:
self.client.create(index=source2, id=i, document={('doc' + i): 'TEST DOCUMENT'})
self.client.indices.flush(index=source2, force=True)
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.reindex_empty_list.format('false', WAIT_INTERVAL, MAX_WAIT, dest))
self.invoke_runner()
assert (1 == self.result.exit_code)
def test_reindex_selected_empty_list_pass(self):
source1 = 'my_source1'
source2 = 'my_source2'
dest = 'my_dest'
self.create_index(source1)
self.add_docs(source1)
self.create_index(source2)
for i in ['4', '5', '6']:
self.client.create(index=source2, id=i, document={('doc' + i): 'TEST DOCUMENT'})
self.client.indices.flush(index=source2, force=True)
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.reindex_empty_list.format('true', WAIT_INTERVAL, MAX_WAIT, dest))
self.invoke_runner()
assert (0 == self.result.exit_code)
.skipif((RHOST == UNDEF), reason='REMOTE_ES_SERVER is not defined')
def test_reindex_from_remote(self):
diff_wait = 6
source1 = 'my_source1'
source2 = 'my_source2'
prefix = 'my_'
dest = 'my_dest'
expected = 6
try:
remote_args = ClientArgs()
remote_args.hosts = RHOST
remote_config = {'elasticsearch': {'client': remote_args.asdict()}}
builder = Builder(configdict=remote_config, version_min=(5, 0, 0))
builder.connect()
rclient = builder.client
rclient.info()
except Exception as exc:
raise SkipTest(f'Unable to connect to host at {RHOST}') from exc
counter = 0
rclient.indices.delete(index=f'{source1},{source2}', ignore_unavailable=True)
for rindex in [source1, source2]:
rclient.indices.create(index=rindex)
for i in range(0, 3):
rclient.create(index=rindex, id=str((counter + 1)), document={('doc' + str(i)): 'TEST DOCUMENT'})
counter += 1
rclient.indices.flush(index=rindex, force=True)
rclient.indices.refresh(index=rindex)
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.remote_reindex.format(WAIT_INTERVAL, diff_wait, RHOST, 'REINDEX_SELECTION', dest, prefix))
self.invoke_runner()
rclient.indices.delete(index=f'{source1},{source2}', ignore_unavailable=True)
assert (expected == self.client.count(index=dest)['count'])
.skipif((RHOST == UNDEF), reason='REMOTE_ES_SERVER is not defined')
def test_reindex_migrate_from_remote(self):
source1 = 'my_source1'
source2 = 'my_source2'
prefix = 'my_'
dest = 'MIGRATION'
expected = 3
try:
remote_args = ClientArgs()
remote_args.hosts = RHOST
remote_config = {'elasticsearch': {'client': remote_args.asdict()}}
builder = Builder(configdict=remote_config, version_min=(5, 0, 0))
builder.connect()
rclient = builder.client
rclient.info()
except Exception as exc:
raise SkipTest(f'Unable to connect to host at {RHOST}') from exc
counter = 0
rclient.indices.delete(index=f'{source1},{source2}', ignore_unavailable=True)
for rindex in [source1, source2]:
rclient.indices.create(index=rindex)
for i in range(0, 3):
rclient.create(index=rindex, id=str((counter + 1)), document={('doc' + str(i)): 'TEST DOCUMENT'})
counter += 1
rclient.indices.flush(index=rindex, force=True)
rclient.indices.refresh(index=rindex)
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.remote_reindex.format(WAIT_INTERVAL, MAX_WAIT, RHOST, 'REINDEX_SELECTION', dest, prefix))
self.invoke_runner()
rclient.indices.delete(index=f'{source1},{source2}', ignore_unavailable=True)
assert (expected == self.client.count(index=source1)['count'])
assert (expected == self.client.count(index=source2)['count'])
.skipif((RHOST == UNDEF), reason='REMOTE_ES_SERVER is not defined')
def test_reindex_migrate_from_remote_with_pre_suf_fixes(self):
source1 = 'my_source1'
source2 = 'my_source2'
prefix = 'my_'
dest = 'MIGRATION'
expected = 3
mpfx = 'pre-'
msfx = '-fix'
try:
remote_args = ClientArgs()
remote_args.hosts = RHOST
remote_config = {'elasticsearch': {'client': remote_args.asdict()}}
builder = Builder(configdict=remote_config, version_min=(5, 0, 0))
builder.connect()
rclient = builder.client
rclient.info()
except Exception as exc:
raise SkipTest(f'Unable to connect to host at {RHOST}') from exc
counter = 0
rclient.indices.delete(index=f'{source1},{source2}', ignore_unavailable=True)
for rindex in [source1, source2]:
rclient.indices.create(index=rindex)
for i in range(0, 3):
rclient.create(index=rindex, id=str((counter + 1)), document={('doc' + str(i)): 'TEST DOCUMENT'})
counter += 1
rclient.indices.flush(index=rindex, force=True)
rclient.indices.refresh(index=rindex)
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.migration_reindex.format(WAIT_INTERVAL, MAX_WAIT, mpfx, msfx, RHOST, 'REINDEX_SELECTION', dest, prefix))
self.invoke_runner()
rclient.indices.delete(index=f'{source1},{source2}')
assert (expected == self.client.count(index=f'{mpfx}{source1}{msfx}')['count'])
def test_reindex_from_remote_no_connection(self):
dest = 'my_dest'
bad_remote = '
expected = 1
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.remote_reindex.format(WAIT_INTERVAL, MAX_WAIT, bad_remote, 'REINDEX_SELECTION', dest, 'my_'))
self.invoke_runner()
assert (expected == self.result.exit_code)
.skipif((RHOST == UNDEF), reason='REMOTE_ES_SERVER is not defined')
def test_reindex_from_remote_no_indices(self):
source1 = 'wrong1'
source2 = 'wrong2'
prefix = 'my_'
dest = 'my_dest'
expected = 1
try:
remote_args = ClientArgs()
remote_args.hosts = RHOST
remote_config = {'elasticsearch': {'client': remote_args.asdict()}}
builder = Builder(configdict=remote_config, version_min=(5, 0, 0))
builder.connect()
rclient = builder.client
rclient.info()
except Exception as exc:
raise SkipTest(f'Unable to connect to host at {RHOST}') from exc
counter = 0
rclient.indices.delete(index=f"{'my_source1'},{'my_source2'}", ignore_unavailable=True)
rclient.indices.delete(index=f'{source1},{source2}', ignore_unavailable=True)
for rindex in [source1, source2]:
rclient.indices.create(index=rindex)
for i in range(0, 3):
rclient.create(index=rindex, id=str((counter + 1)), document={('doc' + str(i)): 'TEST DOCUMENT'})
counter += 1
rclient.indices.flush(index=rindex, force=True)
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.remote_reindex.format(WAIT_INTERVAL, MAX_WAIT, f'{RHOST}', 'REINDEX_SELECTION', dest, prefix))
self.invoke_runner()
rclient.indices.delete(index=f'{source1},{source2}')
assert (expected == self.result.exit_code)
def test_reindex_into_alias(self):
source = 'my_source'
dest = 'my_dest'
expected = 3
alias_dict = {dest: {}}
self.client.indices.create(index='dummy', aliases=alias_dict)
self.add_docs(source)
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.reindex.format(WAIT_INTERVAL, MAX_WAIT, source, dest))
self.invoke_runner()
assert (expected == self.client.count(index=dest)['count'])
def test_reindex_manual_date_math(self):
source = '<source-{now/d}>'
dest = '<target-{now/d}>'
expected = 3
self.create_index(source)
self.add_docs(source)
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.reindex.format(WAIT_INTERVAL, MAX_WAIT, source, dest))
self.invoke_runner()
assert (expected == self.client.count(index=dest)['count'])
def test_reindex_bad_mapping(self):
source = 'my_source'
dest = 'my_dest'
expected = 1
settings = {'number_of_shards': 1, 'number_of_replicas': 0}
mappings1 = {'properties': {'doc1': {'type': 'keyword'}}}
mappings2 = {'properties': {'doc1': {'type': 'integer'}}}
self.client.indices.create(index=source, settings=settings, mappings=mappings1)
self.add_docs(source)
self.client.indices.create(index=dest, settings=settings, mappings=mappings2)
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.reindex.format(WAIT_INTERVAL, MAX_WAIT, source, dest))
self.invoke_runner()
assert (expected == self.result.exit_code) |
def get_solar_capacities() -> pd.DataFrame:
solar_capacity_base_url = '
START_YEAR = 2010
end_year = arrow.now().year
years = list(range(START_YEAR, (end_year + 1)))
url_solar_capacity = copy(solar_capacity_base_url)
for (i, year) in enumerate(years):
if (i == (len(years) - 1)):
url_solar_capacity += f'(Periods+eq+%27{year}JJ00%27))'
else:
url_solar_capacity += f'(Periods+eq+%27{year}JJ00%27)+or+'
solar_capacity_df = pd.DataFrame(columns=['datetime', 'capacity (MW)'])
try:
r = get(url_solar_capacity)
per_year_capacity = r.json()['value']
except Exception as e:
Logger.error(f'Error fetching solar capacities: {e}')
return solar_capacity_df
for yearly_row in per_year_capacity:
capacity = float(yearly_row['ElectricalCapacityEndOfYear_8'])
datetime = arrow.get(yearly_row['Periods'].split('JJ')[0]).format()
solar_capacity_df = solar_capacity_df.append({'datetime': datetime, 'capacity (MW)': capacity}, ignore_index=True)
solar_capacity_df.datetime = pd.to_datetime(solar_capacity_df.datetime)
solar_capacity_df = solar_capacity_df.set_index('datetime')
return solar_capacity_df |
class ReadWriteSerializerMixin(object):
read_serializer_class = None
write_serializer_class = None
def get_serializer_class(self):
if (self.action in ['create', 'update', 'partial_update', 'destroy']):
return self.get_write_serializer_class()
return self.get_read_serializer_class()
def get_read_serializer_class(self):
assert (self.read_serializer_class is not None), ("'%s' should either include a `read_serializer_class` attribute,or override the `get_read_serializer_class()` method." % self.__class__.__name__)
return self.read_serializer_class
def get_write_serializer_class(self):
assert (self.write_serializer_class is not None), ("'%s' should either include a `write_serializer_class` attribute,or override the `get_write_serializer_class()` method." % self.__class__.__name__)
return self.write_serializer_class |
def test_deposit_sets_start_dynasty(concise_casper, funded_account, validation_key, deposit_amount, deposit_validator):
validator_index = deposit_validator(funded_account, validation_key, deposit_amount)
expected_start_dynasty = (concise_casper.dynasty() + 2)
assert (concise_casper.validators__start_dynasty(validator_index) == expected_start_dynasty) |
class AutoReconnectTest(unittest.TestCase):
client = None
auto_reconnect = None
response = None
def setUp(self):
self.client = ami.AMIClient(**connection)
self.auto_reconnect = ami.AutoReconnect(self.client, on_reconnect=debug, on_disconnect=debug)
def tearDown(self):
future = self.client.logoff()
if (future is not None):
future.get_response()
self.client.disconnect()
def callback_response(self, response):
self.response = response
def test_login(self):
self.assertIsNone(self.response)
f = self.client.login(callback=self.callback_response, **login)
f.get_response()
self.assertIsNotNone(self.response) |
def _save_json(name, json_blob):
folder = os.path.split(__file__)[0]
protocol_file_path = os.path.join(folder, '../', 'protocols', name)
protocol_file_path = os.path.abspath(protocol_file_path)
with open(protocol_file_path, 'w') as fp:
fp.write(json.dumps(json_blob, indent=4)) |
class TransposeTest(unittest.TestCase):
dot_from_normal = '\ndigraph "graph" {\n N00[label=0.0];\n N01[label=1.0];\n N02[label=Normal];\n N03[label=Sample];\n N04[label=Sample];\n N05[label=Sample];\n N06[label=Sample];\n N07[label=2];\n N08[label=ToMatrix];\n N09[label=Transpose];\n N10[label=Query];\n N00 -> N02;\n N01 -> N02;\n N02 -> N03;\n N02 -> N04;\n N02 -> N05;\n N02 -> N06;\n N03 -> N08;\n N04 -> N08;\n N05 -> N08;\n N06 -> N08;\n N07 -> N08;\n N07 -> N08;\n N08 -> N09;\n N09 -> N10;\n}\n'.strip()
def test_transpose_1(self) -> None:
queries = [transpose_1()]
dot = BMGInference().to_dot(queries, {})
self.assertEqual(dot.strip(), self.dot_from_normal)
def test_transpose_2(self) -> None:
queries = [transpose_2()]
dot = BMGInference().to_dot(queries, {})
self.assertEqual(dot.strip(), self.dot_from_normal)
def test_transpose_3(self) -> None:
queries = [transpose_3()]
dot = BMGInference().to_dot(queries, {})
self.assertEqual(dot.strip(), self.dot_from_normal)
def test_unsupported_transpose_1(self) -> None:
with self.assertRaises(ValueError) as ex:
BMGInference().infer([unsupported_transpose_1()], {}, 1)
expected = '\nUnsupported dimension arguments for transpose: 3 and 2\n '
self.assertEqual(expected.strip(), str(ex.exception).strip())
def test_unsupported_transpose_2(self) -> None:
with self.assertRaises(ValueError) as ex:
BMGInference().infer([unsupported_transpose_2()], {}, 1)
expected = '\nUnsupported dimension arguments for transpose: 3 and 1\n '
self.assertEqual(expected.strip(), str(ex.exception).strip())
def test_unsupported_transpose_3(self) -> None:
with self.assertRaises(ValueError) as ex:
BMGInference().infer([unsupported_transpose_3()], {}, 1)
expected = '\nUnsupported dimension arguments for transpose: 3.2 and 1\n '
self.assertEqual(expected.strip(), str(ex.exception).strip())
def test_scalar_transpose(self) -> None:
queries = [scalar_transpose()]
dot = BMGInference().to_dot(queries, {})
expected = '\ndigraph "graph" {\n N0[label=0.0];\n N1[label=1.0];\n N2[label=Normal];\n N3[label=Sample];\n N4[label=Query];\n N0 -> N2;\n N1 -> N2;\n N2 -> N3;\n N3 -> N4;\n}\n '
self.assertEqual(dot.strip(), expected.strip())
def test_1x1_transpose(self) -> None:
queries = [scalar_transpose_2()]
dot = BMGInference().to_dot(queries, {})
expected = '\ndigraph "graph" {\n N0[label=0.0];\n N1[label=1.0];\n N2[label=Normal];\n N3[label=Sample];\n N4[label=Query];\n N0 -> N2;\n N1 -> N2;\n N2 -> N3;\n N3 -> N4;\n}\n '
self.assertEqual(dot.strip(), expected.strip()) |
def add_args(subparsers):
parser = subparsers.add_parser('publish', formatter_class=argparse.ArgumentDefaultsHelpFormatter, description=__doc__, help='Publish a stream of messages')
parser.add_argument('-c', '--clientid', default=('beem.loadr-%s-%d' % (socket.gethostname(), os.getpid())), help='Set the client id of the publisher, can be useful for acls.\n Default includes host and pid information, unless a keyfile was\n specified, in which case the "user/identity" part is used as the\n client id. The clientid is also used in the default topics.\n ')
parser.add_argument('-H', '--host', default='localhost', help='MQTT host to connect to')
parser.add_argument('-p', '--port', type=int, default=1883, help='Port for remote MQTT host')
parser.add_argument('-q', '--qos', type=int, choices=[0, 1, 2], help='set the mqtt qos for messages published', default=1)
parser.add_argument('-n', '--msg_count', type=int, default=10, help='How many messages to send')
parser.add_argument('-s', '--msg_size', type=int, default=100, help='Size of messages to send. This will be gaussian at (x, x/20)')
parser.add_argument('-t', '--timing', action='store_true', help='Message bodies will contain timing information instead of\n random hex characters. This can be combined with --msg-size option')
parser.add_argument('-T', '--msgs_per_second', type=float, default=0, help='Each publisher should target sending this many msgs per second,\n useful for simulating real devices.')
parser.add_argument('--jitter', type=float, default=0.1, help='Percentage jitter to use when rate limiting via --msgs_per_sec,\n Can/may help avoid processes sawtoothing and becoming synchronized')
parser.add_argument('-P', '--processes', type=int, default=1, help='How many separate processes to spin up (multiprocessing)')
parser.add_argument('--thread_ratio', type=int, default=1, help='Threads per process (bridged multiprocessing) WARNING! VERY ALPHA!')
parser.add_argument('-b', '--bridge', action='store_true', help='Instead of connecting directly to the target, fire up a\n separate mosquitto instance configured to bridge to the target')
parser.add_argument('--psk_file', type=argparse.FileType('r'), help="A file of psk 'identity:key' pairs, as you would pass to\nmosquitto's psk_file configuration option. Each process will use a single\nline from the file. Only as many processes will be made as there are keys")
parser.add_argument('--json', type=str, default=None, help='Dump the collected stats into the given JSON file.')
parser.set_defaults(handler=run) |
def test_dont_raise_vector():
mesh = UnitIntervalMesh(1)
V = VectorFunctionSpace(mesh, 'CG', 1, dim=2)
x = SpatialCoordinate(mesh)
f = Function(V).interpolate(as_vector((x[0], (2.0 * x[0]))))
with pytest.raises(PointNotInDomainError):
f.at((- 1))
assert (f.at((- 1), dont_raise=True) is None)
actual = f.at([(- 1), 1], dont_raise=True)
assert (actual[0] is None)
assert np.allclose([1.0, 2.0], actual[1]) |
class IPv6ProtoUDP(MatchTest):
def runTest(self):
match = ofp.match([ofp.oxm.eth_type(34525), ofp.oxm.ip_proto(17)])
matching = {'udp': simple_udpv6_packet()}
nonmatching = {'tcp': simple_tcpv6_packet(), 'icmp': simple_icmpv6_packet()}
self.verify_match(match, matching, nonmatching) |
.usefixtures('_run_around_tests')
def test_transform_output_write_to_folder(tmp_path_factory):
transforms_output_folder = pathlib.Path(tmp_path_factory.mktemp('transforms_output_folder'))
with PatchConfig(initial_config_overwrite={'transforms_output_folder': os.fspath(transforms_output_folder)}):
(output1=Output('/output/to/dataset'), output2=Output('/output/to/dataset2'))
def transform_me(output1, output2):
assert isinstance(output1, TransformOutput)
with output1.filesystem().open('output.json', 'w') as f:
f.write('test')
with output1.filesystem().open('output.json', 'r') as f:
content = f.read()
assert (content == 'test')
assert (output1.path == '/output/to/dataset')
assert (output1.branch is not None)
assert (output1.rid is not None)
assert (output1.rid is not None)
result = transform_me.compute()
assert ('output1' in result)
assert ('output2' in result)
with transforms_output_folder.joinpath('output1', 'output.json').open(encoding='UTF-8') as f:
assert (f.read() == 'test')
assert (pathlib.Path((transforms_output_folder / 'output2')).is_dir() is False) |
class HasLoggerMeta(type):
logger_class = logging.Logger
def __new__(mcls: Type[THasLoggerMeta], name: str, bases: Tuple[Type[Any]], namespace: Dict[(str, Any)]) -> THasLoggerMeta:
if ('logger' in namespace):
return super().__new__(mcls, name, bases, namespace)
if ('__qualname__' not in namespace):
raise AttributeError('Missing __qualname__')
with _use_logger_class(mcls.logger_class):
logger = logging.getLogger(namespace['__qualname__'])
return super().__new__(mcls, name, bases, assoc(namespace, 'logger', logger))
def replace_logger_class(mcls: Type[THasLoggerMeta], value: Type[logging.Logger]) -> Type[THasLoggerMeta]:
return type(mcls.__name__, (mcls,), {'logger_class': value})
def meta_compat(mcls: Type[THasLoggerMeta], other: Type[type]) -> Type[THasLoggerMeta]:
return type(mcls.__name__, (mcls, other), {}) |
def test_list_of_serializable_decoding_rlp_caching(rlp_obj):
rlp_obj_code = encode(rlp_obj, cache=False)
L = [rlp_obj, rlp_obj]
list_code = encode(L, cache=False)
L2 = decode(list_code, sedes=List((type(rlp_obj), type(rlp_obj))), recursive_cache=True)
assert (L2[0]._cached_rlp == rlp_obj_code)
assert (L2[1]._cached_rlp == rlp_obj_code) |
def register(registry):
_IndexedCustomEditor.register(registry)
registry.register_interaction(target_class=SimpleEditor, interaction_class=MouseClick, handler=(lambda wrapper, _: mouse_click_button(control=wrapper._target._button, delay=wrapper.delay)))
register_traitsui_ui_solvers(registry, SimpleEditor, _get_nested_ui_simple)
registry.register_interaction(target_class=CustomEditor, interaction_class=SelectedText, handler=_get_choice_text)
register_traitsui_ui_solvers(registry, CustomEditor, _get_nested_ui_custom) |
def connect_input(source_node, source_attr_name, target_node, target_attr_name):
inputs = source_node.attr(source_attr_name).inputs(p=1)
if inputs:
(inputs[0] >> target_node.attr(target_attr_name))
else:
target_node.attr(target_attr_name).set(source_node.attr(source_attr_name).get()) |
def test_insert_forward_orelse():
def example_old():
x: f32
if (1 < 2):
x = 1.0
else:
x = 2.0
x1_old = _find_stmt(example_old, 'x = 1.0')
x2_old = _find_stmt(example_old, 'x = 2.0')
gap = x1_old.after()
stmt = [LoopIR.Pass(None, x1_old._node.srcinfo)]
(example_new, fwd) = gap._insert(stmt)
x2_new = _find_stmt(example_new, 'x = 2.0')
assert (fwd(x2_old) == x2_new) |
def indexed_event_contract(w3, wait_for_block, wait_for_transaction, address_conversion_func):
wait_for_block(w3)
indexed_event_contract_factory = w3.eth.contract(**INDEXED_EVENT_CONTRACT_DATA)
deploy_txn_hash = indexed_event_contract_factory.constructor().transact({'from': w3.eth.coinbase, 'gas': 1000000})
deploy_receipt = wait_for_transaction(w3, deploy_txn_hash)
contract_address = address_conversion_func(deploy_receipt['contractAddress'])
bytecode = w3.eth.get_code(contract_address)
assert (bytecode == indexed_event_contract_factory.bytecode_runtime)
indexed_event_contract = indexed_event_contract_factory(address=contract_address)
assert (indexed_event_contract.address == contract_address)
return indexed_event_contract |
def _sort_sweep(sweep: Union[(ChoiceSweep, RangeSweep)], reverse: bool) -> Union[(ChoiceSweep, RangeSweep)]:
sweep = copy(sweep)
if isinstance(sweep, ChoiceSweep):
sweep.list = sorted(sweep.list, reverse=reverse)
return sweep
elif isinstance(sweep, RangeSweep):
assert (sweep.start is not None)
assert (sweep.stop is not None)
if (not reverse):
if (sweep.start > sweep.stop):
start = (sweep.stop + abs(sweep.step))
stop = (sweep.start + abs(sweep.step))
sweep.start = start
sweep.stop = stop
sweep.step = (- sweep.step)
elif (sweep.start < sweep.stop):
start = (sweep.stop - abs(sweep.step))
stop = (sweep.start - abs(sweep.step))
sweep.start = start
sweep.stop = stop
sweep.step = (- sweep.step)
return sweep
else:
assert False |
class Atom(Expr):
__slots__ = ('name',)
__hash__ = Expr.__hash__
def __init__(self, name):
if isinstance(name, Atom):
name = name.name
assert isinstance(name, str), name
self.name = name
def __repr__(self):
return (((self.__class__.__name__ + '("') + self.name) + '")')
def __str__(self):
return self.name
def __eq__(self, other):
return (isinstance(other, self.__class__) and (self.name == other.name))
def substitute_elements(self, subs, **kwargs):
return self
_coconut_tco
def substitute(self, subs, **kwargs):
if (not can_sub(kwargs)):
return self
_coconut_match_to_0 = subs
_coconut_match_check_0 = False
_coconut_match_set_name_sub = _coconut_sentinel
if _coconut.isinstance(_coconut_match_to_0, _coconut.abc.Mapping):
_coconut_match_temp_0 = _coconut_match_to_0.get(self, _coconut_sentinel)
if (_coconut_match_temp_0 is not _coconut_sentinel):
_coconut_match_set_name_sub = _coconut_match_temp_0
_coconut_match_check_0 = True
if _coconut_match_check_0:
if (_coconut_match_set_name_sub is not _coconut_sentinel):
sub = _coconut_match_set_name_sub
if _coconut_match_check_0:
do_sub(kwargs)
if wff(sub):
return sub
elif (sub is True):
return top
elif (sub is False):
return bot
else:
raise TypeError(((('cannot perform substitution ' + str(self)) + ' => ') + str(sub)))
else:
return _coconut_tail_call(self.substitute_elements, subs, **kwargs) |
class QGradientEditor(QtGui.QMainWindow):
def __init__(self, vtk_table, on_change_color_table=None, colors=None):
super(QGradientEditor, self).__init__()
self.setWindowTitle('Color Gradient Editor')
self.widget = QGradientEditorWidget(master=self, vtk_table=vtk_table, on_change_color_table=on_change_color_table, colors=colors)
self.setCentralWidget(self.widget)
self.resize(300, 500)
self.statusBar()
menu = self.menuBar()
file_menu = menu.addMenu('&File')
file_action = QtGui.QAction('&Save', self)
file_action.setStatusTip('Save CTF')
file_action.triggered.connect(self.widget.on_save)
file_menu.addAction(file_action)
load_action = QtGui.QAction('&Load', self)
load_action.setStatusTip('Load CTF')
load_action.triggered.connect(self.widget.on_load)
file_menu.addAction(load_action)
quit_action = QtGui.QAction('&Quit', self)
quit_action.setStatusTip('Quit application')
quit_action.triggered.connect(QtGui.QApplication.instance().quit)
file_menu.addAction(quit_action)
help_menu = menu.addMenu('&Help')
action = QtGui.QAction('&Help', self)
action.setStatusTip('Help')
action.triggered.connect(self.on_help)
help_menu.addAction(action)
action = QtGui.QAction('&About', self)
action.setStatusTip('About application')
action.triggered.connect(self.on_about)
help_menu.addAction(action)
def on_help(self, event=None):
message = 'Right click to add control points. Left click to move control points'
QtGui.QMessageBox.information(self, 'Help', message)
def on_about(self, event=None):
message = 'tk Gradient Editor for MayaVi1: Gerald Knizia (cgk.)\nwxPython port: Pete Schmitt ()\nQt port: Prabhu Ramachandran\nEnhanced for Mayavi2: Prabhu Ramachandran'
QtGui.QMessageBox.information(self, 'About gradient editor', message) |
class SparkAPI():
def __init__(self, appid: str, api_key: str, api_secret: str, spark_url: str) -> None:
self.appid = appid
self.api_key = api_key
self.api_secret = api_secret
self.host = urlparse(spark_url).netloc
self.path = urlparse(spark_url).path
self.spark_url = spark_url
def gen_url(self):
now = datetime.now()
date = format_date_time(mktime(now.timetuple()))
signature_origin = (('host: ' + self.host) + '\n')
signature_origin += (('date: ' + date) + '\n')
signature_origin += (('GET ' + self.path) + ' HTTP/1.1')
signature_sha = hmac.new(self.api_secret.encode('utf-8'), signature_origin.encode('utf-8'), digestmod=hashlib.sha256).digest()
signature_sha_base64 = base64.b64encode(signature_sha).decode(encoding='utf-8')
authorization_origin = f'api_key="{self.api_key}", algorithm="hmac-sha256", headers="host date request-line", signature="{signature_sha_base64}"'
authorization = base64.b64encode(authorization_origin.encode('utf-8')).decode(encoding='utf-8')
v = {'authorization': authorization, 'date': date, 'host': self.host}
url = ((self.spark_url + '?') + urlencode(v))
return url |
def req_with_pagination(f):
(f)
def wrapper(*args, **kwargs):
try:
page = flask.request.args.get('page', 1)
page = int(page)
except ValueError as err:
raise ObjectNotFound('Invalid pagination format') from err
return f(*args, page=page, **kwargs)
return wrapper |
def assemble_data(data_list):
data = _dict()
tmp = _dict()
partitions = _get_partitions(data_list)
for (name, data_dict) in data_list:
if (not data_dict):
continue
(prefix, name) = _split(name, '/', a_s='/')
splitted = name.split('.')
if partitions:
name = tuple(splitted[:(- 1)])
if (len(name) == 3):
name = ('%s.%s--%s' % name)
else:
name = ('%s--%s' % name)
else:
name = '.'.join(splitted[:(- 2)])
name = ('%s%s' % (prefix, name))
if (name not in tmp):
tmp[name] = _dict()
for k in data_dict.keys():
tmp[name][k] = list()
if (('%s-m' % k) not in data):
data[('%s-m' % k)] = _dict()
data[('%s-s' % k)] = _dict()
data[('%s-m' % k)][name] = list()
data[('%s-s' % k)][name] = list()
for (k, v) in data_dict.items():
tmp[name][k].append(v)
for (name, data_dict) in tmp.items():
for (k, v) in data_dict.items():
if (not v):
continue
try:
v = [i for i in v if (i is not None)]
if v:
data[('%s-m' % k)][name] = np.mean(v)
data[('%s-s' % k)][name] = np.std(v)
except Exception as exc:
warnings.warn(('Aggregating data for %s failed. Raw data:\n%r\nDetails: %r' % (k, v, exc)), MetricWarning)
discard = list()
for (key, data_dict) in data.items():
empty = True
for val in data_dict.values():
if (val or (val == 0)):
empty = False
if empty:
discard.append(key)
for key in discard:
data.pop(key)
return data |
(ignore_format=_ignore_format_docstring, variables=_variables_categorical_docstring, unseen=_unseen_docstring, variables_=_variables_attribute_docstring, feature_names_in_=_feature_names_in_docstring, n_features_in_=_n_features_in_docstring, fit_transform=_fit_transform_docstring, transform=_transform_encoders_docstring, inverse_transform=_inverse_transform_docstring)
class WoEEncoder(CategoricalInitMixin, CategoricalMethodsMixin, WoE):
def __init__(self, variables: Union[(None, int, str, List[Union[(str, int)]])]=None, ignore_format: bool=False, unseen: str='ignore', fill_value: Union[(int, float, None)]=None) -> None:
super().__init__(variables, ignore_format)
check_parameter_unseen(unseen, ['ignore', 'raise'])
if ((fill_value is not None) and (not isinstance(fill_value, (int, float)))):
raise ValueError(f'fill_value takes None, integer or float. Got {fill_value} instead.')
self.unseen = unseen
self.fill_value = fill_value
def fit(self, X: pd.DataFrame, y: pd.Series):
(X, y) = self._check_fit_input(X, y)
variables_ = self._check_or_select_variables(X)
_check_contains_na(X, variables_)
encoder_dict_ = {}
vars_that_fail = []
for var in variables_:
try:
(_, _, woe) = self._calculate_woe(X, y, var, self.fill_value)
encoder_dict_[var] = woe.to_dict()
except ValueError:
vars_that_fail.append(var)
if (len(vars_that_fail) > 0):
vars_that_fail_str = (', '.join(vars_that_fail) if (len(vars_that_fail) > 1) else vars_that_fail[0])
raise ValueError(f"During the WoE calculation, some of the categories in the following features contained 0 in the denominator or numerator, and hence the WoE can't be calculated: {vars_that_fail_str}.")
self.encoder_dict_ = encoder_dict_
self.variables_ = variables_
self._get_feature_names_in(X)
return self
def transform(self, X: pd.DataFrame) -> pd.DataFrame:
X = self._check_transform_input_and_state(X)
_check_contains_na(X, self.variables_)
X = self._encode(X)
return X
def _more_tags(self):
tags_dict = _return_tags()
tags_dict['variables'] = 'categorical'
tags_dict['requires_y'] = True
tags_dict['_skip_test'] = True
return tags_dict |
def mock_password_expiration_warning_is_configured_pass(self, cmd):
returncode = 0
stderr = ['']
if ('PASS_WARN_AGE' in cmd):
stdout = ['PASS_WARN_AGE 7']
elif ('shadow' in cmd):
stdout = ['root:7', 'vagrant:7']
return SimpleNamespace(returncode=returncode, stderr=stderr, stdout=stdout) |
def test_subset_prune_gdef_markglyphsetsdef():
fb = FontBuilder(unitsPerEm=1000, isTTF=True)
glyph_order = ['.notdef', 'A', 'Aacute', 'Acircumflex', 'Adieresis', 'a', 'aacute', 'acircumflex', 'adieresis', 'dieresiscomb', 'acutecomb', 'circumflexcomb']
fb.setupGlyphOrder(glyph_order)
fb.setupGlyf({g: TTGlyphPen(None).glyph() for g in glyph_order})
fb.setupHorizontalMetrics({g: (500, 0) for g in glyph_order})
fb.setupHorizontalHeader()
fb.setupPost()
fb.setupNameTable({'familyName': 'TestGDEFMarkGlyphSetsDef', 'styleName': 'Regular'})
fb.addOpenTypeFeatures('\n feature ccmp {\n lookup ccmp_1 {\n lookupflag UseMarkFilteringSet [acutecomb];\n sub a acutecomb by aacute;\n sub A acutecomb by Aacute;\n } ccmp_1;\n lookup ccmp_2 {\n lookupflag UseMarkFilteringSet [circumflexcomb];\n sub a circumflexcomb by acircumflex;\n sub A circumflexcomb by Acircumflex;\n } ccmp_2;\n lookup ccmp_3 {\n lookupflag UseMarkFilteringSet [dieresiscomb];\n sub a dieresiscomb by adieresis;\n sub A dieresiscomb by Adieresis;\n sub A acutecomb by Aacute;\n } ccmp_3;\n } ccmp;\n ')
buf = io.BytesIO()
fb.save(buf)
buf.seek(0)
font = TTFont(buf)
features = font['GSUB'].table.FeatureList.FeatureRecord
assert (features[0].FeatureTag == 'ccmp')
lookups = font['GSUB'].table.LookupList.Lookup
assert (lookups[0].LookupFlag == 16)
assert (lookups[0].MarkFilteringSet == 0)
assert (lookups[1].LookupFlag == 16)
assert (lookups[1].MarkFilteringSet == 1)
assert (lookups[2].LookupFlag == 16)
assert (lookups[2].MarkFilteringSet == 2)
marksets = font['GDEF'].table.MarkGlyphSetsDef.Coverage
assert (marksets[0].glyphs == ['acutecomb'])
assert (marksets[1].glyphs == ['circumflexcomb'])
assert (marksets[2].glyphs == ['dieresiscomb'])
options = subset.Options(layout_features=['*'])
subsetter = subset.Subsetter(options)
subsetter.populate(glyphs=['A', 'a', 'acutecomb', 'dieresiscomb'])
subsetter.subset(font)
features = font['GSUB'].table.FeatureList.FeatureRecord
assert (features[0].FeatureTag == 'ccmp')
lookups = font['GSUB'].table.LookupList.Lookup
assert (lookups[0].LookupFlag == 16)
assert (lookups[0].MarkFilteringSet == 0)
assert (lookups[1].LookupFlag == 16)
assert (lookups[1].MarkFilteringSet == 1)
marksets = font['GDEF'].table.MarkGlyphSetsDef.Coverage
assert (marksets[0].glyphs == ['acutecomb'])
assert (marksets[1].glyphs == ['dieresiscomb'])
buf = io.BytesIO()
fb.save(buf)
buf.seek(0)
font = TTFont(buf)
options = subset.Options(layout_features=['*'], layout_closure=False)
subsetter = subset.Subsetter(options)
subsetter.populate(glyphs=['A', 'acutecomb', 'Aacute'])
subsetter.subset(font)
features = font['GSUB'].table.FeatureList.FeatureRecord
assert (features[0].FeatureTag == 'ccmp')
lookups = font['GSUB'].table.LookupList.Lookup
assert (lookups[0].LookupFlag == 16)
assert (lookups[0].MarkFilteringSet == 0)
assert (lookups[1].LookupFlag == 0)
assert (lookups[1].MarkFilteringSet == None)
marksets = font['GDEF'].table.MarkGlyphSetsDef.Coverage
assert (marksets[0].glyphs == ['acutecomb']) |
def test_justification_and_finalization(casper, concise_casper, funded_accounts, validation_keys, deposit_amount, new_epoch, induct_validators, send_vote, mk_suggested_vote):
validator_indexes = induct_validators(funded_accounts, validation_keys, ([deposit_amount] * len(funded_accounts)))
assert (concise_casper.total_curdyn_deposits_in_wei() == (deposit_amount * len(funded_accounts)))
prev_dynasty = concise_casper.dynasty()
for _ in range(10):
for (key, validator_index) in zip(validation_keys, validator_indexes):
send_vote(mk_suggested_vote(validator_index, key))
assert concise_casper.main_hash_justified()
assert concise_casper.checkpoints__is_finalized(concise_casper.recommended_source_epoch())
new_epoch()
assert (concise_casper.dynasty() == (prev_dynasty + 1))
prev_dynasty += 1 |
_models('spacy.Text-Babbage.v1')
def openai_text_babbage(config: Dict[(Any, Any)]=SimpleFrozenDict(), name: Literal['text-babbage-001']='text-babbage-001', strict: bool=OpenAI.DEFAULT_STRICT, max_tries: int=OpenAI.DEFAULT_MAX_TRIES, interval: float=OpenAI.DEFAULT_INTERVAL, max_request_time: float=OpenAI.DEFAULT_MAX_REQUEST_TIME, endpoint: Optional[str]=None) -> Callable[([Iterable[str]], Iterable[str])]:
return OpenAI(name=name, endpoint=(endpoint or Endpoints.NON_CHAT.value), config=config, strict=strict, max_tries=max_tries, interval=interval, max_request_time=max_request_time) |
class CheckBox(BaseButton):
def _create_dom(self):
global window
outernode = window.document.createElement('label')
node = window.document.createElement('input')
outernode.appendChild(node)
node.setAttribute('type', 'checkbox')
node.setAttribute('id', self.id)
outernode.setAttribute('for', self.id)
self._addEventListener(node, 'click', self._check_changed_from_dom, 0)
return (outernode, node)
def _render_dom(self):
return [self.node, self.text]
('checked')
def __check_changed(self, *events):
self.node.checked = self.checked
def _check_changed_from_dom(self, ev):
self.user_checked(self.node.checked) |
class TestEnum():
def test_declaration(self, color):
assert (color.declaration() == 'enum Color {\n\tred = 0,\n\tgreen = 1,\n\tblue = 2\n}')
def test_str(self, color):
assert (str(color) == 'Color')
def test_copy(self, color):
new_color = color.copy()
assert (new_color == color)
assert (id(new_color) != color)
def test_add_members(self, empty_color, color, red, green, blue):
empty_color.add_member(red)
empty_color.add_member(green)
empty_color.add_member(blue)
assert (empty_color == color)
def test_get_complex_type_name(self, color):
assert (color.complex_type_name == ComplexTypeName(0, 'Color')) |
class CustomSharedMemorySize(ElyraProperty):
applies_to_generic = True
applies_to_custom = True
property_id = KUBERNETES_SHARED_MEM_SIZE
property_display_name = 'Shared Memory Size'
property_description = 'Configure a custom shared memory size in\n gigabytes (10^9 bytes) for the pod that executes a node. A custom\n value is assigned if the size property value is a number greater than zero.'
property_attributes = [PropertyAttribute(attribute_id='size', display_name='Memory Size (GB)', allowed_input_types=[PropertyInputType(base_type='int', placeholder=0)], hidden=False, required=False), PropertyAttribute(attribute_id='units', display_name='Units', allowed_input_types=[PropertyInputType(base_type='str')], hidden=True, required=False)]
default_units = 'G'
def __init__(self, size: str, units: str, **kwargs):
self.size = size
self.units = (units or CustomSharedMemorySize.default_units)
def get_schema(cls) -> Dict[(str, Any)]:
schema = super().get_schema()
schema['properties']['size']['minimum'] = 0
return schema
def to_dict(self) -> Dict[(str, Any)]:
dict_repr = {attr.id: getattr(self, attr.id, None) for attr in self.property_attributes}
return dict_repr
def get_value_for_display(self) -> Dict[(str, Any)]:
return self.to_dict()
def get_all_validation_errors(self) -> List[str]:
validation_errors = []
try:
if self.size:
size = float(self.size)
if (size < 0):
raise ValueError()
except ValueError:
validation_errors.append(f"Shared memory size '{self.size}' must be a positive number.")
if (self.units not in ['G']):
validation_errors.append(f"Shared memory size units '{self.units}' must be 'G'.")
return validation_errors
def add_to_execution_object(self, runtime_processor: RuntimePipelineProcessor, execution_object: Any, **kwargs):
runtime_processor.add_custom_shared_memory_size(instance=self, execution_object=execution_object, **kwargs)
def should_discard(self) -> bool:
return (not self.size) |
class DesktopVPNApplication(VPNApplication):
def __init__(self, app_path, device, config):
super().__init__(device, config)
self._app_path = app_path
self._vpn_detectors = [OpenVPNDetector(self._device), L2TPDetector(self._device), NEDetector(self._device)]
self._routes_before_connect = None
def _vpn_server_ip_from_route(self):
L.debug('Attempting to get VPN server IP from routing table')
routes = self._device['route'].get_v4_routes()
if ((not routes) or (not self._routes_before_connect)):
return None
potential_routes = []
for route in routes:
if (route in self._routes_before_connect):
continue
dest_ip = route.destination_ip()
if (dest_ip and route.gateway_ip() and (not dest_ip.is_private)):
potential_routes.append(route)
if (len(potential_routes) == 1):
ip = potential_routes[0].destination_ip()
L.debug('Got VPN server IP {} from routing table'.format(ip))
return ip
elif (len(potential_routes) >= 1):
L.warning('Tried to get VPN server from route table but found more than one candidate:\n{}'.format(potential_routes))
else:
L.warning("Couldn't find any routes which look like they're for the VPN server")
return None
def open(self):
if self._app_path:
L.info('Opening VPN application {} (it might take a moment to appear)'.format(self._config['name']))
self._device.open_app(self._app_path)
else:
super().open()
def close(self):
if self._app_path:
L.info('Closing VPN app: {}'.format(self._app_path))
self._device.close_app(self._app_path)
else:
super().close()
def connect(self):
self._routes_before_connect = self._device['route'].get_v4_routes()
super().connect()
def disconnect(self):
super().disconnect()
self._routes_before_connect = None
def _vpn_info(self):
for detector in self._vpn_detectors:
info = detector.detect()
if (info is not None):
return info
return None
def protocol(self):
info = self._vpn_info()
if ((info is not None) and info.protocol):
return info.protocol
L.warning("Couldn't determine the VPN protocol. Will fallback to default method")
return super().protocol()
def vpn_processes(self):
info = self._vpn_info()
if ((info is not None) and info.vpn_processes):
return info.vpn_processes
L.warning("Couldn't find VPN processes. Will fallback to default method")
return super().vpn_processes()
def vpn_server_ip(self):
info = self._vpn_info()
if ((info is not None) and info.vpn_server_ip):
return info.vpn_server_ip
ip = self._vpn_server_ip_from_route()
if ip:
return ip
L.warning("Couldn't find VPN server IP. Will fallback to default method")
return super().vpn_server_ip()
def dns_server_ips(self):
info = self._vpn_info()
if ((info is not None) and info.dns_server_ips):
return info.dns_server_ips
L.warning("Couldn't find VPN DNS server IPs. Will fallback to default method")
return super().dns_server_ips() |
def run_test(quadrilateral):
if quadrilateral:
mesh = UnitCubedSphereMesh(refinement_level=2)
RT_elt = FiniteElement('RTCF', 'quadrilateral', 1)
else:
mesh = UnitIcosahedralSphereMesh(refinement_level=2)
RT_elt = FiniteElement('RT', 'triangle', 1)
x = SpatialCoordinate(mesh)
global_normal = as_vector(((x[0] / sqrt((((x[0] * x[0]) + (x[1] * x[1])) + (x[2] * x[2])))), (x[1] / sqrt((((x[0] * x[0]) + (x[1] * x[1])) + (x[2] * x[2])))), (x[2] / sqrt((((x[0] * x[0]) + (x[1] * x[1])) + (x[2] * x[2]))))))
mesh.init_cell_orientations(global_normal)
V_dg = FunctionSpace(mesh, 'DG', 0)
M = FunctionSpace(mesh, RT_elt)
u0 = as_vector(((- x[1]), x[0], 0))
u = Function(M).project(u0)
n = FacetNormal(mesh)
un = (0.5 * (dot(u, n) + abs(dot(u, n))))
phi = TestFunction(V_dg)
D = TrialFunction(V_dg)
a_mass = (inner(D, phi) * dx)
a_int = (inner(((- u) * D), grad(phi)) * dx)
a_flux = (inner(((un('+') * D('+')) - (un('-') * D('-'))), jump(phi)) * dS)
arhs = (a_int + a_flux)
D1 = Function(V_dg)
D0 = exp(((- pow(x[2], 2)) - pow(x[1], 2)))
D = Function(V_dg).interpolate(D0)
D1problem = LinearVariationalProblem(a_mass, action(arhs, D), D1)
D1solver = LinearVariationalSolver(D1problem)
D1solver.solve()
V1 = FunctionSpace(mesh, RT_elt)
w = TestFunction(V1)
Ft = TrialFunction(V1)
Fs = Function(V1)
aFs = (((inner(n('+'), w('+')) * inner(Ft('+'), n('+'))) + (inner(n('-'), w('-')) * inner(Ft('-'), n('-')))) * dS)
LFs = ((2.0 * (((inner(n('+'), w('+')) * un('+')) * D('+')) + ((inner(n('-'), w('-')) * un('-')) * D('-')))) * dS)
Fsproblem = LinearVariationalProblem(aFs, LFs, Fs)
Fssolver = LinearVariationalSolver(Fsproblem, solver_parameters={'ksp_type': 'preonly'})
Fssolver.solve()
divFs = Function(V_dg)
solve((a_mass == (inner(div(Fs), phi) * dx)), divFs)
assert (errornorm(divFs, D1, degree_rise=0) < 1e-12) |
class CoreStickBallCollision(ABC):
def resolve(self, cue: Cue, ball: Ball, inplace: bool=False) -> Tuple[(Cue, Ball)]:
if (not inplace):
cue = cue.copy()
ball = ball.copy()
return self.solve(cue, ball)
def solve(self, cue: Cue, ball: Ball) -> Tuple[(Cue, Ball)]:
pass |
()
('-i', 'infile', type=click.File('r'), default='-', help='Input file (Default: stdin)')
('-v', 'verbose', is_flag=True, default=False, help='Verbose output')
('--json', 'json_output', is_flag=True, default=False, help='JSON output')
('field', type=click.STRING)
def cmd_data_select(infile, json_output, verbose, field):
if verbose:
logging.basicConfig(level=logging.INFO, format='%(message)s')
try:
data = json.loads(infile.read())
except ValueError as e:
click.echo('Invalid input data. Whe expect JSON here.', err=True)
return False
result = []
if (not isinstance(data, list)):
data = [data]
for item in data:
if (field in item):
result.append(item[field])
if json_output:
print(json.dumps(result, indent=4))
return True
for r in result:
if isinstance(r, list):
print('\n'.join(sorted(r)))
else:
print(r) |
def convert_timestamp(zone_key: str, timestamp_string: str, logger: Logger=getLogger(__name__)):
timestamp_string = re.sub('\\s+', ' ', timestamp_string)
logger.debug(f"PARSED TIMESTAMP {arrow.get(timestamp_string, 'MM/DD/YYYY HH:mm:ss A', tzinfo=timezone_name)}", extra={'key': zone_key})
return arrow.get(timestamp_string, 'MM/DD/YYYY HH:mm:ss A', tzinfo=timezone_name).datetime |
def draw_pdf(filename, size=(800, 600)):
from chaco.pdf_graphics_context import PdfPlotGraphicsContext
container = create_plot()
container.outer_bounds = list(size)
container.do_layout(force=True)
gc = PdfPlotGraphicsContext(filename=filename, dest_box=(0.5, 0.5, 5.0, 5.0))
for i in range(2):
gc.render_component(container)
gc.add_page()
gc.save() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.