code stringlengths 281 23.7M |
|---|
def test_lifecycle_hooks():
config = ''
r = helm_template(config)
c = r['statefulset'][name]['spec']['template']['spec']['containers'][0]
assert ('lifecycle' not in c)
config = '\n lifecycle:\n preStop:\n exec:\n command: ["/bin/bash","/preStop"]\n '
r = helm_template(config)
c = r['statefulset'][name]['spec']['template']['spec']['containers'][0]
assert (c['lifecycle']['preStop']['exec']['command'] == ['/bin/bash', '/preStop']) |
class TestBurnBitCommands(EfuseTestCase):
.skipif((arg_chip != 'esp32'), reason='ESP32-only')
def test_burn_bit_for_chips_with_3_key_blocks(self):
self.espefuse_py('burn_bit -h')
self.espefuse_py('burn_bit BLOCK3 0 1 2 4 8 16 32 64 96 128 160 192 224 255')
self.espefuse_py('summary', check_msg='17 01 01 00 01 00 00 00 01 00 00 00 01 00 00 00 01 00 00 00 01 00 00 00 01 00 00 00 01 00 00 80')
self.espefuse_py('burn_bit BLOCK3 3 5 6 7 9 10 11 12 13 14 15 31 63 95 127 159 191 223 254')
self.espefuse_py('summary', check_msg='ff ff 01 80 01 00 00 80 01 00 00 80 01 00 00 80 01 00 00 80 01 00 00 80 01 00 00 80 01 00 00 c0')
.skipif((arg_chip != 'esp32c2'), reason='ESP32-C2-only')
def test_burn_bit_for_chips_with_1_key_block(self):
self.espefuse_py('burn_bit -h')
self.espefuse_py('burn_bit BLOCK3 0 1 2 4 8 16 32 64 96 128 160 192 224 255')
self.espefuse_py('summary', check_msg='17 01 01 00 01 00 00 00 01 00 00 00 01 00 00 00 01 00 00 00 01 00 00 00 01 00 00 00 01 00 00 80')
self.espefuse_py('burn_bit BLOCK3 100', check_msg='Burn into BLOCK_KEY0 is forbidden (RS coding scheme does not allow this)', ret_code=2)
self.espefuse_py('burn_bit BLOCK0 0 1 2')
self.espefuse_py('summary', check_msg='[0 ] read_regs: ')
.skipif((arg_chip not in ['esp32s2', 'esp32s3', 'esp32s3beta1', 'esp32c3', 'esp32h2beta1', 'esp32c6', 'esp32h2', 'esp32p4']), reason='Only chip with 6 keys')
def test_burn_bit_for_chips_with_6_key_blocks(self):
self.espefuse_py('burn_bit -h')
self.espefuse_py('burn_bit BLOCK3 0 1 2 4 8 16 32 64 96 128 160 192 224 255')
self.espefuse_py('summary', check_msg='17 01 01 00 01 00 00 00 01 00 00 00 01 00 00 00 01 00 00 00 01 00 00 00 01 00 00 00 01 00 00 80')
self.espefuse_py('burn_bit BLOCK3 100', check_msg='Burn into BLOCK_USR_DATA is forbidden (RS coding scheme does not allow this)', ret_code=2)
self.espefuse_py('burn_bit BLOCK0 13')
self.espefuse_py('summary', check_msg='[0 ] read_regs: ')
self.espefuse_py('burn_bit BLOCK0 24')
self.espefuse_py('summary', check_msg='[0 ] read_regs: ')
.skipif((arg_chip != 'esp32'), reason='3/4 coding scheme is only in esp32')
def test_burn_bit_with_34_coding_scheme(self):
self._set_34_coding_scheme()
self.espefuse_py('burn_bit BLOCK3 0 1 2 4 8 16 32 64 96 128 160 191')
self.espefuse_py('summary', check_msg='17 01 01 00 01 00 00 00 01 00 00 00 01 00 00 00 01 00 00 00 01 00 00 80')
self.espefuse_py('burn_bit BLOCK3 17', check_msg='Burn into BLOCK3 is forbidden (3/4 coding scheme does not allow this).', ret_code=2) |
class TestTempDir(unittest.TestCase):
def tearDown(self):
if os.path.exists(self.path):
shutil.rmtree(self.path)
def test_temp_dir_removed_when_exception_occurs(self):
try:
with utils.TempDir() as temp:
self.path = temp
assert os.path.exists(self.path)
raise Exception()
except:
pass
assert (not os.path.exists(self.path)) |
class QGradientEditorWidget(QtGui.QWidget, GradientEditorWidget):
def __init__(self, master, vtk_table, on_change_color_table=None, colors=None):
kw = dict(master=master, vtk_table=vtk_table, on_change_color_table=on_change_color_table, colors=colors)
super().__init__(**kw)
gradient_preview_width = self.gradient_preview_width
gradient_preview_height = self.gradient_preview_height
channel_function_width = self.channel_function_width
channel_function_height = self.channel_function_height
grid = QtGui.QGridLayout()
grid.setColumnStretch(0, 0)
grid.setColumnStretch(1, 1)
self.gradient_control = QGradientControl(self, self.gradient_table, gradient_preview_width, gradient_preview_height)
self.setToolTip('Right click for menu')
grid.addWidget(QtGui.QLabel('', self), 0, 0)
grid.addWidget(self.gradient_control, 0, 1)
gc = self.gradient_control
gc.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
gc.customContextMenuRequested.connect(self.contextMenuEventOnGradient)
function_controls = self.function_controls
editor_data = self.editor_data
row = 1
for color in self.colors:
data = editor_data[color]
control = QFunctionControl(self, self.gradient_table, color, channel_function_width, channel_function_height)
txt = (data[0] + self.tooltip_text)
control.setToolTip(txt)
grid.addWidget(QtGui.QLabel(data[1], self), row, 0)
grid.addWidget(control, row, 1)
function_controls.append(control)
row += 1
self.text = QtGui.QLabel('status', self)
grid.addWidget(self.text, row, 0, 1, 2)
self.setLayout(grid)
self.show()
def set_status_text(self, msg):
self.text.setText(msg)
def contextMenuEventOnGradient(self, pos):
menu = QtGui.QMenu(self)
saveAction = menu.addAction('Save as')
loadAction = menu.addAction('Load')
action = menu.exec_(self.mapToGlobal(pos))
if (action == saveAction):
self.on_save()
elif (action == loadAction):
self.on_load()
def on_save(self, event=None):
wildcard = 'Gradient Files (*.grad);;All Files (*.*)'
(filename, filter) = QtGui.QFileDialog.getSaveFileName(self, 'Save LUT to...', '', wildcard)
if filename:
self.save(filename)
def on_load(self, event=None):
wildcard = 'Gradient Files (*.grad);;All Files (*.*)'
(filename, filter) = QtGui.QFileDialog.getOpenFileName(self, 'Open gradient file...', '', wildcard)
if filename:
self.load(filename) |
def plot_timeline(title: str, events: pd.DataFrame, ranks: Optional[List[int]]=None) -> None:
t0 = perf_counter()
must_have_columns: List[str] = ['calibrated_start_global', 'calibrated_end_global', 'task', 'label']
if (not set(must_have_columns).issubset(set(events.columns))):
raise ValueError(f"the events dataframe doesn't contain all required columns {must_have_columns}")
if (ranks is None):
if ('rank' not in events.columns):
ranks = []
else:
ranks = sorted(events['rank'].unique())
addition_columns: List[str] = ['rank', 'stream', 'iteration', 's_cat', 'cat', 's_name', 'name', 'dur']
hover_data = sorted(list(set(events.columns).intersection(set(addition_columns))))
sorted_tasks = sorted(events['task'].unique())
fig = px.timeline(events, x_start='calibrated_start_global', x_end='calibrated_end_global', y='task', hover_data=hover_data, category_orders={'task': sorted_tasks}, color='label', color_discrete_sequence=px.colors.qualitative.D3, width=1600, height=(200 + (120 * len(ranks))), title=title)
fig.show()
t1 = perf_counter()
logger.debug(f'Plotted timeline in {(t1 - t0):.2f} seconds') |
def test_exclude_columns():
(app, db, admin) = setup()
with app.app_context():
(Model1, Model2) = create_models(db)
view = CustomModelView(Model1, db.session, column_exclude_list=['test2', 'test4', 'enum_field', 'date_field', 'time_field', 'datetime_field', 'sqla_utils_choice', 'sqla_utils_enum', 'sqla_utils_arrow', 'sqla_utils_uuid', 'sqla_utils_url', 'sqla_utils_ip_address', 'sqla_utils_currency', 'sqla_utils_color'])
admin.add_view(view)
assert (view._list_columns == [('test1', 'Test1'), ('test3', 'Test3'), ('bool_field', 'Bool Field'), ('email_field', 'Email Field'), ('choice_field', 'Choice Field')])
client = app.test_client()
rv = client.get('/admin/model1/')
data = rv.data.decode('utf-8')
assert ('Test1' in data)
assert ('Test2' not in data) |
def downgrade():
op.create_index('web_pages_state_netloc_idx', 'web_pages', ['state', 'netloc'], unique=False)
op.drop_index(op.f('ix_nu_outbound_wrappers_link_url'), table_name='nu_outbound_wrappers')
op.drop_index(op.f('ix_nu_outbound_wrappers_container_page'), table_name='nu_outbound_wrappers')
op.drop_table('nu_outbound_wrappers') |
def test_rename_intrinsic():
string = write_rpc_request(1, 'initialize', {'rootPath': str((test_dir / 'rename'))})
file_path = ((test_dir / 'rename') / 'test_rename_nested.f90')
string += rename_request('bar', file_path, 8, 27)
(errcode, results) = run_request(string, ['-n', '1'])
assert (errcode == 0)
check_post_msg(results[1], 'Rename failed: Cannot rename intrinsics', 2)
assert (results[2] is None) |
(scope='module')
def sample_image(ref_path: Path) -> Image.Image:
test_image = (ref_path / 'macaw.png')
if (not test_image.is_file()):
warn(f'could not reference image at {test_image}, skipping')
pytest.skip(allow_module_level=True)
img = Image.open(test_image)
assert (img.size == (512, 512))
return img |
class DiplomacyDictProcessor(AbstractGamestateDataProcessor):
ID = 'diplomacy'
DEPENDENCIES = [CountryProcessor.ID]
def __init__(self):
super().__init__()
self.diplomacy_dict = None
self.truce_countries = None
def initialize_data(self):
self.diplomacy_dict = {}
self.truce_countries = collections.defaultdict(set)
def data(self):
return dict(diplomacy=self.diplomacy_dict, truce_countries=self.truce_countries)
def extract_data_from_gamestate(self, dependencies):
countries_dict = dependencies[CountryProcessor.ID]
self.diplomacy_dict = {}
diplo_predicates_by_key = dict(rivalries=(lambda r: (r.get('is_rival') == 'yes')), defensive_pacts=(lambda r: (r.get('defensive_pact') == 'yes')), federations=(lambda r: (r.get('alliance') == 'yes')), non_aggression_pacts=(lambda r: (r.get('non_aggression_pledge') == 'yes')), closed_borders=(lambda r: (r.get('closed_borders') == 'yes')), communations=(lambda r: (r.get('communications') == 'yes')), migration_treaties=(lambda r: (r.get('migration_access') == 'yes')), commercial_pacts=(lambda r: (r.get('commercial_pact') == 'yes')), neighbors=(lambda r: (r.get('borders') == 'yes')), research_agreements=(lambda r: (r.get('research_agreement') == 'yes')), embassies=(lambda r: (r.get('embassy') == 'yes')))
for (country_id, country_model) in countries_dict.items():
self.diplomacy_dict[country_id] = dict(rivalries=set(), defensive_pacts=set(), federations=set(), non_aggression_pacts=set(), closed_borders=set(), communations=set(), migration_treaties=set(), commercial_pacts=set(), neighbors=set(), research_agreements=set(), embassies=set())
country_data_dict = self._gamestate_dict['country'][country_id]
relations_manager = country_data_dict.get('relations_manager', [])
if (not isinstance(relations_manager, dict)):
continue
relation_list = relations_manager.get('relation', [])
if (not isinstance(relation_list, list)):
relation_list = [relation_list]
for relation in relation_list:
if (not isinstance(relation, dict)):
continue
target = relation.get('country')
for (key, predicate) in diplo_predicates_by_key.items():
if predicate(relation):
self.diplomacy_dict[country_id][key].add(target)
if ('truce' in relation):
self.truce_countries[relation['truce']].add(country_id)
self.truce_countries[relation['truce']].add(target) |
def sync():
print('logging in...')
client = FrappeClient(' 'xxx', 'xxx')
with open('jobs.csv', 'rU') as jobsfile:
reader = csv.reader(jobsfile, dialect='excel')
for row in reader:
if (row[0] == 'Timestamp'):
continue
print(('finding ' + row[EMAIL]))
name = client.get_value('Job Applicant', 'name', {'email_id': row[EMAIL]})
if name:
doc = client.get_doc('Job Applicant', name['name'])
else:
doc = {'doctype': 'Job Applicant'}
doc['applicant_name'] = row[NAME]
doc['email_id'] = row[EMAIL]
doc['introduction'] = row[INTRODUCTION]
doc['thoughts_on_company'] = row[THOUGHTS_ON_COMPANY]
doc['likes'] = row[LIKES]
doc['links'] = row[LINKS]
doc['phone_number'] = row[PHONE]
if (doc.get('status') != 'Rejected'):
doc['status'] = 'Filled Form'
if name:
client.update(doc)
print(('Updated ' + row[EMAIL]))
else:
client.insert(doc)
print(('Inserted ' + row[EMAIL])) |
.usefixtures('use_tmpdir')
def test_that_non_existant_job_directory_gives_config_validation_error():
test_config_file_base = 'test'
test_config_file_name = f'{test_config_file_base}.ert'
test_config_contents = dedent('\n NUM_REALIZATIONS 1\n DEFINE <STORAGE> storage/<CONFIG_FILE_BASE>-<DATE>\n RUNPATH <STORAGE>/runpath/realization-<IENS>/iter-<ITER>\n ENSPATH <STORAGE>/ensemble\n INSTALL_JOB_DIRECTORY does_not_exist\n ')
with open(test_config_file_name, 'w', encoding='utf-8') as fh:
fh.write(test_config_contents)
with pytest.raises(expected_exception=ConfigValidationError, match='Unable to locate job directory'):
ErtConfig.from_file(test_config_file_name) |
class FBPrintAccessibilityIdentifiers(fb.FBCommand):
def name(self):
return 'pa11yi'
def description(self):
return 'Print accessibility identifiers of all views in hierarchy of <aView>'
def args(self):
return [fb.FBCommandArgument(arg='aView', type='UIView*', help='The view to print the hierarchy of.', default='(id)[[UIApplication sharedApplication] keyWindow]')]
def run(self, arguments, option):
forceStartAccessibilityServer()
printAccessibilityIdentifiersHierarchy(arguments[0]) |
def gen_list_setup_check(out, cls, version):
out.write(("\n/**\n * Populate a list of type %(cls)s with two of each type of subclass\n * list Pointer to the list to be populated\n * value The seed value to use in populating the list\n * The value after increments for this object's values\n */\nint\n%(cls)s_%(v_name)s_populate(\n %(cls)s_t *list, int value)\n{\n" % dict(cls=cls, v_name=loxi_utils.version_to_name(version))))
base_type = loxi_utils.list_to_entry_type(cls)
out.write(('\n of_object_t elt;\n int cur_len = 0;\n static int recursion;\n (void) elt;\n (void) cur_len;\n\n if (recursion > 0) {\n return value;\n }\n\n recursion++;\n' % dict(cls=cls, base_type=base_type)))
sub_classes = type_maps.sub_class_map(base_type, version)
sub_classes = [(instance, subcls) for (instance, subcls) in sub_classes if (not type_maps.class_is_virtual(subcls))]
v_name = loxi_utils.version_to_name(version)
if (not type_maps.class_is_virtual(base_type)):
out.write((' /* No subclasses for %s */\n' % base_type))
out.write((' %s_t *elt_p;\n' % base_type))
out.write('\n elt_p = &elt;\n')
else:
out.write(' /* Declare pointers for each subclass */\n')
for (instance, subcls) in sub_classes:
out.write((' %s_t *%s;\n' % (subcls, instance)))
out.write('\n /* Instantiate pointers for each subclass */\n')
for (instance, subcls) in sub_classes:
out.write((' %s = &elt;\n' % instance))
if (not type_maps.class_is_virtual(base_type)):
setup_instance(out, cls, base_type, 'elt_p', v_name, version)
else:
for (instance, subcls) in sub_classes:
setup_instance(out, cls, subcls, instance, v_name, version)
out.write('\n recursion--;\n return value;\n}\n')
out.write(("\n/**\n * Check a list of type %(cls)s generated by\n * %(cls)s_%(v_name)s_populate\n * list Pointer to the list that was populated\n * value Starting value for checking\n * The value after increments for this object's values\n */\nint\n%(cls)s_%(v_name)s_check(\n %(cls)s_t *list, int value)\n{\n" % dict(cls=cls, v_name=loxi_utils.version_to_name(version))))
base_type = loxi_utils.list_to_entry_type(cls)
out.write(('\n of_object_t elt;\n int count = 0;\n int rv;\n static int recursion;\n\n if (recursion > 0) {\n return value;\n }\n\n recursion++;\n' % dict(cls=cls, base_type=base_type)))
sub_classes = type_maps.sub_class_map(base_type, version)
sub_classes = [(instance, subcls) for (instance, subcls) in sub_classes if (not type_maps.class_is_virtual(subcls))]
v_name = loxi_utils.version_to_name(version)
if (not type_maps.class_is_virtual(base_type)):
entry_count = 1
out.write((' /* No subclasses for %s */\n' % base_type))
out.write((' %s_t *elt_p;\n' % base_type))
out.write('\n elt_p = &elt;\n')
else:
entry_count = len(sub_classes)
out.write(' /* Declare pointers for each subclass */\n')
for (instance, subcls) in sub_classes:
out.write((' %s_t *%s;\n' % (subcls, instance)))
out.write('\n /* Instantiate pointers for each subclass */\n')
for (instance, subcls) in sub_classes:
out.write((' %s = &elt;\n' % instance))
if ((not type_maps.class_is_virtual(base_type)) or sub_classes):
out.write((' TEST_OK(%(cls)s_first(list, &elt));\n' % dict(cls=cls)))
if (not type_maps.class_is_virtual(base_type)):
check_instance(out, cls, base_type, 'elt_p', v_name, version, True)
else:
count = 0
for (instance, subcls) in sub_classes:
count += 1
check_instance(out, cls, subcls, instance, v_name, version, (count == len(sub_classes)))
out.write(('\n' % dict(base_type=base_type)))
out.write(('\n /* Do an iterate to test the iterator */\n %(u_cls)s_ITER(list, &elt, rv) {\n count += 1;\n }\n\n TEST_ASSERT(rv == OF_ERROR_RANGE);\n TEST_ASSERT(count == %(entry_count)d);\n\n /* We shoehorn a test of the dup functions here */\n {\n %(cls)s_t *dup;\n\n TEST_ASSERT((dup = %(cls)s_dup(list)) != NULL);\n TEST_ASSERT(dup->length == list->length);\n TEST_ASSERT(dup->object_id == list->object_id);\n TEST_ASSERT(dup->version == list->version);\n TEST_ASSERT(MEMCMP(OF_OBJECT_BUFFER_INDEX(dup, 0),\n OF_OBJECT_BUFFER_INDEX(list, 0), list->length) == 0);\n of_object_delete((of_object_t *)dup);\n\n /* And now for the generic dup function */\n TEST_ASSERT((dup = (%(cls)s_t *)\n of_object_dup(list)) != NULL);\n TEST_ASSERT(dup->length == list->length);\n TEST_ASSERT(dup->object_id == list->object_id);\n TEST_ASSERT(dup->version == list->version);\n TEST_ASSERT(MEMCMP(OF_OBJECT_BUFFER_INDEX(dup, 0),\n OF_OBJECT_BUFFER_INDEX(list, 0), list->length) == 0);\n of_object_delete((of_object_t *)dup);\n }\n\n recursion--;\n return value;\n}\n' % dict(cls=cls, u_cls=cls.upper(), entry_count=entry_count))) |
def audit_organizations(url: str, headers: Dict[(str, str)], include_keys: Optional[List]=None) -> None:
organization_resources: Optional[List[FidesModel]]
if include_keys:
organization_resources = get_server_resources(url, 'organization', include_keys, headers)
else:
raw_organization_resources = list_server_resources(url, headers, 'organization', exclude_keys=[])
if raw_organization_resources:
organization_resources = [parse_dict(resource=organization, resource_type='organization', from_server=True) for organization in raw_organization_resources]
else:
organization_resources = []
if (not organization_resources):
print('No organization resources were found.')
return
print(f'Found {len(organization_resources)} Organization resource(s) to audit...')
audit_findings = 0
for organization in organization_resources:
print(f'Auditing Organization: {(organization.name or organization.fides_key)}')
assert isinstance(organization, Organization)
new_findings = audit_organization_attributes(organization)
audit_findings += new_findings
if (audit_findings > 0):
print(f'{audit_findings} issue(s) were detected in auditing organization completeness.')
else:
echo_green('All audited organization resource(s) compliant!') |
('flytekit.configuration.plugin.FlyteRemote', spec=FlyteRemote)
('flytekit.clients.friendly.SynchronousFlyteClient', spec=SynchronousFlyteClient)
def test_non_fast_register_require_version(mock_client, mock_remote):
mock_remote._client = mock_client
mock_remote.return_value._version_from_hash.return_value = 'dummy_version_from_hash'
mock_remote.return_value.upload_file.return_value = ('dummy_md5_bytes', 'dummy_native_url')
runner = CliRunner()
context_manager.FlyteEntities.entities.clear()
with runner.isolated_filesystem():
out = subprocess.run(['git', 'init'], capture_output=True)
assert (out.returncode == 0)
os.makedirs('core3', exist_ok=True)
with open(os.path.join('core3', 'sample.py'), 'w') as f:
f.write(sample_file_contents)
f.close()
result = runner.invoke(pyflyte.main, ['register', '--non-fast', 'core3'])
assert (result.exit_code == 1)
assert (str(result.exception) == 'Version is a required parameter in case --non-fast is specified.')
shutil.rmtree('core3') |
def extractWwwOtakubuCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Hyaku Ma No Omo', 'Hyaku Ma No Omo', 'translated'), ('Hyaku Ma No Aruji', 'Hyaku Ma No Aruji', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class FeederBroker(Broker):
def __init__(self, config_overrides=None):
if (config_overrides is None):
config_overrides = {}
config = {'listeners': {'default': {'max-connections': 50000, 'type': 'tcp'}, 'tcp-1': {'bind': f'0.0.0.0:{settings.mqtt_port}'}, 'tcp-ssl-1': {'bind': f'0.0.0.0:{settings.mqtts_port}', 'ssl': True, 'cafile': abspath(settings.mqtts_public_key), 'certfile': abspath(settings.mqtts_public_key), 'keyfile': abspath(settings.mqtts_private_key)}}, 'auth': {'plugins': ['auth_petnet']}, 'topic-check': {'enabled': True, 'plugins': ['topic_petnet']}}
if config_overrides:
config.update(config_overrides)
super().__init__(config=config) |
class ServerLogFormatter(logging.Formatter):
def __init__(self, use_color=True):
super().__init__()
self.use_color = use_color
self.color_mapping = {'CRITICAL': 'bold_red', 'ERROR': 'red', 'WARNING': 'yellow', 'INFO': 'green', 'DEBUG': 'blue'}
def format(self, rec):
if rec.exc_info:
s = []
s.append(('>>>>>>' + ('-' * 74)))
s.append(self._format(rec))
import traceback
s.append(''.join(traceback.format_exception(*rec.exc_info)).strip())
s.append(('<<<<<<' + ('-' * 74)))
return '\n'.join(s)
else:
return self._format(rec)
def _format(self, rec):
import time
g = gevent.getcurrent()
gr_name = getattr(g, 'gr_name', None)
if (not gr_name):
if isinstance(g, gevent.Greenlet):
gr_name = repr(g)
else:
gr_name = '<RAW>'
rec.message = rec.getMessage()
lvl = rec.levelname
prefix = '[{} {} {}:{} {}]'.format(lvl[0], time.strftime('%y%m%d %H:%M:%S'), rec.module, rec.lineno, gr_name)
if self.use_color:
E = escape_codes
M = self.color_mapping
prefix = f"{E[M[lvl]]}{prefix}{E['reset']}"
return f'{prefix} {rec.message}' |
def unconfirm_multiple():
unconfirming_for_email = session.get('unconfirming')
if (not unconfirming_for_email):
return (render_template('error.html', title='Forbidden', text="You're not allowed to unconfirm these forms."), 401)
for form_id in request.form.getlist('form_ids'):
form = Form.query.get(form_id)
if (form.email == unconfirming_for_email):
form.confirmed = False
DB.session.add(form)
DB.session.commit()
return (render_template('info.html', title='Success', text='The selected forms were unconfirmed successfully.'), 200) |
class OptionSeriesWordcloudSonificationContexttracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
.parametrize('call_deployed_contract', (True, False))
.parametrize('api_style', ('v4', 'build_filter'))
def test_on_filter_using_get_entries_interface(w3, emitter, emitter_contract_factory, wait_for_transaction, emitter_contract_event_ids, call_deployed_contract, api_style, create_filter):
if call_deployed_contract:
contract = emitter
else:
contract = emitter_contract_factory
if (api_style == 'build_filter'):
event_filter = contract.events.LogNoArguments.build_filter().deploy(w3)
else:
event_filter = create_filter(emitter, ['LogNoArguments', {}])
txn_hash = emitter.functions.logNoArgs(emitter_contract_event_ids.LogNoArguments).transact()
wait_for_transaction(w3, txn_hash)
log_entries = event_filter.get_new_entries()
assert (len(log_entries) == 1)
assert (log_entries[0]['transactionHash'] == txn_hash)
new_entries = event_filter.get_new_entries()
assert (len(new_entries) == 0) |
def decompile_function(ghidra_analysis, func):
if (func.name in ghidra_analysis.high_funcs):
return ghidra_analysis.high_funcs[func.name]
decompile_result = ghidra_analysis.decompiler.decompileFunction(func, ghidra_analysis.decompiler.getOptions().getDefaultTimeout(), ghidra_analysis.monitor)
high_func = decompile_result.getHighFunction()
ghidra_analysis.high_funcs[func.name] = high_func
return high_func |
def test_loop_to_sequence_rule_not_possible_break():
ast = AbstractSyntaxForest(condition_handler=condition_handler1(LogicCondition.generate_new_context()))
root = ast.factory.create_endless_loop_node()
body = ast.factory.create_seq_node()
children = [ast.factory.create_code_node(stmts=[assignment_c_plus_5.copy()]), ast.factory.create_condition_node(condition=logic_cond('a', ast.factory.logic_context)), ast.factory.create_code_node(stmts=[assignment_c_plus_10.copy(), Break()])]
true_branch = ast.factory.create_true_node()
true_branch_child = ast.factory.create_code_node(stmts=[Break()])
ast._add_nodes_from([root, body, children[0], children[1], children[2], true_branch, true_branch_child])
ast._add_edges_from([(root, body), (body, children[0]), (body, children[1]), (body, children[2]), (children[1], true_branch), (true_branch, true_branch_child)])
ast._code_node_reachability_graph.add_reachability_from(((children[0], true_branch_child), (children[0], children[2]), (true_branch_child, children[2])))
body.sort_children()
assert (SequenceRule.can_be_applied(root) is False) |
(scope='function')
def storage_config(db: Session) -> Generator:
name = str(uuid4())
storage_config = StorageConfig.create(db=db, data={'name': name, 'type': StorageType.s3, 'details': {StorageDetails.AUTH_METHOD.value: S3AuthMethod.SECRET_KEYS.value, StorageDetails.NAMING.value: FileNaming.request_id.value, StorageDetails.BUCKET.value: 'test_bucket'}, 'key': 'my_test_config', 'format': ResponseFormat.json})
storage_config.set_secrets(db=db, storage_secrets={StorageSecrets.AWS_ACCESS_KEY_ID.value: '1234', StorageSecrets.AWS_SECRET_ACCESS_KEY.value: '5678'})
(yield storage_config)
storage_config.delete(db) |
class OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class SectionStructure(Structure):
def __init__(self, *argl, **argd):
if ('pe' in argd):
self.pe = argd['pe']
del argd['pe']
self.PointerToRawData = None
self.VirtualAddress = None
self.SizeOfRawData = None
self.Misc_VirtualSize = None
Structure.__init__(self, *argl, **argd)
self.PointerToRawData_adj = None
self.VirtualAddress_adj = None
self.section_min_addr = None
self.section_max_addr = None
def get_PointerToRawData_adj(self):
if (self.PointerToRawData_adj is None):
if (self.PointerToRawData is not None):
self.PointerToRawData_adj = self.pe.adjust_FileAlignment(self.PointerToRawData, self.pe.OPTIONAL_HEADER.FileAlignment)
return self.PointerToRawData_adj
def get_VirtualAddress_adj(self):
if (self.VirtualAddress_adj is None):
if (self.VirtualAddress is not None):
self.VirtualAddress_adj = self.pe.adjust_SectionAlignment(self.VirtualAddress, self.pe.OPTIONAL_HEADER.SectionAlignment, self.pe.OPTIONAL_HEADER.FileAlignment)
return self.VirtualAddress_adj
def get_data(self, start=None, length=None, ignore_padding=False):
if (start is None):
offset = self.get_PointerToRawData_adj()
else:
offset = ((start - self.get_VirtualAddress_adj()) + self.get_PointerToRawData_adj())
if (length is not None):
end = (offset + length)
elif (self.SizeOfRawData is not None):
end = (offset + self.SizeOfRawData)
else:
end = offset
if (ignore_padding and (end is not None) and (offset is not None)):
end = min(end, (offset + self.Misc_VirtualSize))
if ((self.PointerToRawData is not None) and (self.SizeOfRawData is not None)):
if (end > (self.PointerToRawData + self.SizeOfRawData)):
end = (self.PointerToRawData + self.SizeOfRawData)
return self.pe.__data__[offset:end]
def __setattr__(self, name, val):
if (name == 'Characteristics'):
section_flags = retrieve_flags(SECTION_CHARACTERISTICS, 'IMAGE_SCN_')
set_flags(self, val, section_flags)
elif (('IMAGE_SCN_' in name) and hasattr(self, name)):
if val:
self.__dict__['Characteristics'] |= SECTION_CHARACTERISTICS[name]
else:
self.__dict__['Characteristics'] ^= SECTION_CHARACTERISTICS[name]
self.__dict__[name] = val
def get_rva_from_offset(self, offset):
return ((offset - self.get_PointerToRawData_adj()) + self.get_VirtualAddress_adj())
def get_offset_from_rva(self, rva):
return ((rva - self.get_VirtualAddress_adj()) + self.get_PointerToRawData_adj())
def contains_offset(self, offset):
if (self.PointerToRawData is None):
return False
PointerToRawData_adj = self.get_PointerToRawData_adj()
return (PointerToRawData_adj <= offset < (PointerToRawData_adj + self.SizeOfRawData))
def contains_rva(self, rva):
if ((self.section_min_addr is not None) and (self.section_max_addr is not None)):
return (self.section_min_addr <= rva < self.section_max_addr)
VirtualAddress_adj = self.get_VirtualAddress_adj()
if ((len(self.pe.__data__) - self.get_PointerToRawData_adj()) < self.SizeOfRawData):
size = self.Misc_VirtualSize
else:
size = max(self.SizeOfRawData, self.Misc_VirtualSize)
if ((self.next_section_virtual_address is not None) and (self.next_section_virtual_address > self.VirtualAddress) and ((VirtualAddress_adj + size) > self.next_section_virtual_address)):
size = (self.next_section_virtual_address - VirtualAddress_adj)
self.section_min_addr = VirtualAddress_adj
self.section_max_addr = (VirtualAddress_adj + size)
return (VirtualAddress_adj <= rva < (VirtualAddress_adj + size))
def contains(self, rva):
return self.contains_rva(rva)
def get_entropy(self):
return self.entropy_H(self.get_data())
def get_hash_sha1(self):
if (sha1 is not None):
return sha1(self.get_data()).hexdigest()
def get_hash_sha256(self):
if (sha256 is not None):
return sha256(self.get_data()).hexdigest()
def get_hash_sha512(self):
if (sha512 is not None):
return sha512(self.get_data()).hexdigest()
def get_hash_md5(self):
if (md5 is not None):
return md5(self.get_data()).hexdigest()
def entropy_H(self, data):
if (not data):
return 0.0
occurences = Counter(bytearray(data))
entropy = 0
for x in occurences.values():
p_x = (float(x) / len(data))
entropy -= (p_x * math.log(p_x, 2))
return entropy |
class ConcurrencyMetricsReporter(FLMetricsReporter):
ACCURACY = 'Accuracy'
def __init__(self, channels: List[Channel]) -> None:
self.concurrency_metrics = []
self.eval_rounds = []
super().__init__(channels)
def compare_metrics(self, eval_metrics, best_metrics):
print(f'Current eval accuracy: {eval_metrics}%, Best so far: {best_metrics}%')
if (best_metrics is None):
return True
return (eval_metrics > best_metrics)
def compute_scores(self) -> Dict[(str, Any)]:
correct = torch.Tensor([0])
for i in range(len(self.predictions_list)):
all_preds = self.predictions_list[i]
pred = all_preds.data.max(1, keepdim=True)[1]
assert (pred.device == self.targets_list[i].device), f'Pred and targets moved to different devices: pred >> {pred.device} vs. targets >> {self.targets_list[i].device}'
if (i == 0):
correct = correct.to(pred.device)
correct += pred.eq(self.targets_list[i].data.view_as(pred)).sum()
total = sum((len(batch_targets) for batch_targets in self.targets_list))
accuracy = ((100.0 * correct.item()) / total)
return {self.ACCURACY: accuracy}
def create_eval_metrics(self, scores: Dict[(str, Any)], total_loss: float, **kwargs) -> Any:
return scores[self.ACCURACY]
def report_metrics(self, reset: bool, stage, extra_metrics=None, **kwargs):
if (stage != TrainingStage.EVAL):
assert (extra_metrics is not None), 'Async Trainer metrics reporting should have extra metrics'
metrics = [m for m in extra_metrics if (m.name == 'Concurrency_Rate')]
assert (len(metrics) == 1), 'Concurrency rate should be one of the extra metrics'
concurrency_rate = metrics[0]
self.concurrency_metrics.append(concurrency_rate.value)
else:
timeline: Timeline = kwargs.get('timeline', Timeline(global_round=1))
self.eval_rounds.append(timeline.global_round)
return super().report_metrics(reset, stage, extra_metrics, **kwargs) |
class AsciiCardCollection():
def __init__(self, *cards, hide_cards: bool=False, term: Terminal=None):
self.term = term
self.cards = cards
self.update(hide_cards)
def __str__(self):
return '\n'.join(self.lines)
def update(self, hide_cards: bool):
if hide_cards:
cards = [None for _ in self.cards]
else:
cards = self.cards
(self.lines, self.width, self.height) = self._create_card_collection_str(*cards, return_string=False)
def _create_card_collection_str(self, *cards, return_string=True) -> Tuple[(str, int, int)]:
hide_lines = (([''] + ([''] * 7)) + [''])
all_lines = ['' for _ in range(9)]
for card in cards:
if (card is None):
card_lines = hide_lines
else:
card_lines = self._ascii_card(card, return_string=False)
all_lines = [(x + y) for (x, y) in zip(all_lines, card_lines)]
if return_string:
card_str = '\n'.join(all_lines)
else:
card_str = all_lines
width = len(all_lines[0])
height = len(all_lines)
return (card_str, width, height)
def _ascii_card(*cards, return_string=True):
name_to_symbol = {'spades': '', 'diamonds': '', 'hearts': '', 'clubs': ''}
lines = [[] for _ in range(9)]
for (index, card) in enumerate(cards):
if (card.rank == '10'):
rank = card.rank
space = ''
else:
rank = card.rank[0]
space = ' '
suit = name_to_symbol[card.suit.lower()]
rank = rank.upper()
lines[0].append('')
lines[1].append('{}{} '.format(rank, space))
lines[2].append(' ')
lines[3].append(' ')
lines[4].append(' {} '.format(suit))
lines[5].append(' ')
lines[6].append(' ')
lines[7].append(' {}{}'.format(space, rank))
lines[8].append('')
result = [''.join(line) for line in lines]
if return_string:
return '\n'.join(result)
else:
return result |
def extractSnowTimeTranslations(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if ('In Different World With Naruto System' in item['tags']):
return buildReleaseMessageWithType(item, 'In Different World With Naruto System', vol, chp, frag=frag, postfix=postfix)
return False |
class OptionPlotoptionsWaterfallSonificationContexttracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestGFMEncoded(util.MdCase):
extension = ['markdown.extensions.toc']
extension_configs = {'markdown.extensions.toc': {'slugify': slugs.gfm_encoded}}
def test_slug(self):
with pytest.warns(DeprecationWarning):
self.check_markdown('# Testing GFM unicode-slugs_headers I with encoding', '<h1 id="testing-gfm-unicode-slugs_headers-%CE%A9%E2%84%91-with-encoding">Testing GFM unicode-slugs_headers I with encoding</h1>') |
class OptionPlotoptionsAreasplineSonificationContexttracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class DatabaseManager():
Query = BaseQuery
def __init__(self):
self._db_url = None
self._base: DeclarativeMeta = self._make_declarative_base(_Model)
self._engine: Optional[Engine] = None
self._session: Optional[scoped_session] = None
def Model(self) -> _Model:
return self._base
def metadata(self) -> MetaData:
return self.Model.metadata
def engine(self):
return self._engine
def is_initialized(self) -> bool:
return ((self._engine is not None) and (self._session is not None))
def session(self, commit: Optional[bool]=True) -> Session:
if (not self.is_initialized):
raise RuntimeError('The database manager is not initialized.')
session = self._session()
try:
(yield session)
if commit:
session.commit()
except:
session.rollback()
raise
finally:
session.close()
def _make_declarative_base(self, model: Union[(Type[DeclarativeMeta], Type[_Model])]) -> DeclarativeMeta:
if (not isinstance(model, DeclarativeMeta)):
model = declarative_base(cls=model, name='Model')
if (not getattr(model, 'query_class', None)):
model.query_class = self.Query
model.__db_manager__ = self
return model
def init_db(self, db_url: Union[(str, URL)], engine_args: Optional[Dict]=None, base: Optional[DeclarativeMeta]=None, query_class=BaseQuery, override_query_class: Optional[bool]=False, session_options: Optional[Dict]=None):
if (session_options is None):
session_options = {}
self._db_url = db_url
if (query_class is not None):
self.Query = query_class
if (base is not None):
self._base = base
if ((not getattr(base, 'query_class', None)) or override_query_class):
base.query_class = self.Query
if ((not hasattr(base, '__db_manager__')) or override_query_class):
base.__db_manager__ = self
self._engine = create_engine(db_url, **(engine_args or {}))
session_options.setdefault('class_', Session)
session_options.setdefault('query_cls', self.Query)
session_factory = sessionmaker(bind=self._engine, **session_options)
self._session = scoped_session(session_factory)
self._base.metadata.bind = self._engine
def init_default_db(self, sqlite_path: str, engine_args: Optional[Dict]=None, base: Optional[DeclarativeMeta]=None):
if (not engine_args):
engine_args = {}
engine_args['poolclass'] = QueuePool
engine_args['pool_size'] = 10
engine_args['max_overflow'] = 20
engine_args['pool_timeout'] = 30
engine_args['pool_recycle'] = 3600
engine_args['pool_pre_ping'] = True
self.init_db(f'sqlite:///{sqlite_path}', engine_args, base)
def create_all(self):
self.Model.metadata.create_all(self._engine)
def build_from(db_url_or_db: Union[(str, URL, DatabaseManager)], engine_args: Optional[Dict]=None, base: Optional[DeclarativeMeta]=None, query_class=BaseQuery, override_query_class: Optional[bool]=False) -> DatabaseManager:
if (isinstance(db_url_or_db, str) or isinstance(db_url_or_db, URL)):
db_manager = DatabaseManager()
db_manager.init_db(db_url_or_db, engine_args, base, query_class, override_query_class)
return db_manager
elif isinstance(db_url_or_db, DatabaseManager):
return db_url_or_db
else:
raise ValueError(f'db_url_or_db should be either url or a DatabaseManager, got {type(db_url_or_db)}') |
class GptsMessagesDao(BaseDao):
def append(self, entity: dict):
session = self.get_raw_session()
message = GptsMessagesEntity(conv_id=entity.get('conv_id'), sender=entity.get('sender'), receiver=entity.get('receiver'), content=entity.get('content'), role=entity.get('role', None), model_name=entity.get('model_name', None), context=entity.get('context', None), rounds=entity.get('rounds', None), current_gogal=entity.get('current_gogal', None), review_info=entity.get('review_info', None), action_report=entity.get('action_report', None))
session.add(message)
session.commit()
id = message.id
session.close()
return id
def get_by_agent(self, conv_id: str, agent: str) -> Optional[List[GptsMessagesEntity]]:
session = self.get_raw_session()
gpts_messages = session.query(GptsMessagesEntity)
if agent:
gpts_messages = gpts_messages.filter((GptsMessagesEntity.conv_id == conv_id)).filter(or_((GptsMessagesEntity.sender == agent), (GptsMessagesEntity.receiver == agent)))
result = gpts_messages.order_by(GptsMessagesEntity.rounds).all()
session.close()
return result
def get_by_conv_id(self, conv_id: str) -> Optional[List[GptsMessagesEntity]]:
session = self.get_raw_session()
gpts_messages = session.query(GptsMessagesEntity)
if conv_id:
gpts_messages = gpts_messages.filter((GptsMessagesEntity.conv_id == conv_id))
result = gpts_messages.order_by(GptsMessagesEntity.rounds).all()
session.close()
return result
def get_between_agents(self, conv_id: str, agent1: str, agent2: str, current_gogal: Optional[str]=None) -> Optional[List[GptsMessagesEntity]]:
session = self.get_raw_session()
gpts_messages = session.query(GptsMessagesEntity)
if (agent1 and agent2):
gpts_messages = gpts_messages.filter((GptsMessagesEntity.conv_id == conv_id)).filter(or_(and_((GptsMessagesEntity.sender == agent1), (GptsMessagesEntity.receiver == agent2)), and_((GptsMessagesEntity.sender == agent2), (GptsMessagesEntity.receiver == agent1))))
if current_gogal:
gpts_messages = gpts_messages.filter((GptsMessagesEntity.current_gogal == current_gogal))
result = gpts_messages.order_by(GptsMessagesEntity.rounds).all()
session.close()
return result
def get_last_message(self, conv_id: str) -> Optional[GptsMessagesEntity]:
session = self.get_raw_session()
gpts_messages = session.query(GptsMessagesEntity)
if conv_id:
gpts_messages = gpts_messages.filter((GptsMessagesEntity.conv_id == conv_id)).order_by(desc(GptsMessagesEntity.rounds))
result = gpts_messages.first()
session.close()
return result |
def test_beacon_from_bytes(beacon_x86_file):
data = beacon_x86_file.read()
bconfig = beacon.BeaconConfig.from_bytes(data)
assert len(bconfig.domains)
assert bconfig.xorencoded
assert (bconfig.architecture == 'x86')
assert (bconfig.watermark == )
assert (bconfig.pe_export_stamp == )
assert (bconfig.version == 'Cobalt Strike 4.2 (Nov 06, 2020)')
assert (bconfig.max_setting_enum == 58)
with pytest.raises(ValueError, match='No valid Beacon configuration found'):
beacon.BeaconConfig.from_bytes(b'no bacon for you') |
_set_stats_type(ofproto.OFPMP_PORT_DESC, OFPPort)
_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPPortDescStatsRequest(OFPMultipartRequest):
def __init__(self, datapath, flags=0, port_no=ofproto.OFPP_ANY, type_=None):
super(OFPPortDescStatsRequest, self).__init__(datapath, flags)
self.port_no = port_no
def _serialize_stats_body(self):
msg_pack_into(ofproto.OFP_PORT_MULTIPART_REQUEST_PACK_STR, self.buf, ofproto.OFP_MULTIPART_REQUEST_SIZE, self.port_no) |
def run_optics_clustering(features, names, args):
optics = OPTICS(min_samples=10).fit(features)
cluster_centroids = calculate_cluster_centroids(features, optics.labels_)
clusters = defaultdict(list)
for (num, label) in enumerate(optics.labels_):
clusters[label].append((names[num], calculate_score(cluster_centroids[label], features[num])))
return clusters |
('llama_recipes.finetuning.train')
('llama_recipes.finetuning.LlamaForCausalLM.from_pretrained')
('llama_recipes.finetuning.LlamaTokenizer.from_pretrained')
('llama_recipes.finetuning.optim.AdamW')
('llama_recipes.finetuning.StepLR')
def test_unknown_dataset_error(step_lr, optimizer, tokenizer, get_model, train, mocker):
from llama_recipes.finetuning import main
tokenizer.return_value = mocker.MagicMock(side_effect=(lambda x: {'input_ids': [(len(x) * [0])], 'attention_mask': [(len(x) * [0])]}))
kwargs = {'dataset': 'custom_dataset', 'custom_dataset.file': 'examples/custom_dataset.py:get_unknown_dataset', 'batch_size_training': 1, 'use_peft': False}
with pytest.raises(AttributeError):
main(**kwargs) |
def _test_correct_response_for_recipient_location_county_without_geo_filters(client):
resp = client.post('/api/v2/search/spending_by_geography', content_type='application/json', data=json.dumps({'scope': 'recipient_location', 'geo_layer': 'county', 'filters': {'time_period': [{'start_date': '2018-10-01', 'end_date': '2020-09-30'}]}}))
expected_response = {'scope': 'recipient_location', 'geo_layer': 'county', 'results': [{'aggregated_amount': 5000550.0, 'display_name': 'Charleston', 'per_capita': 5000550.0, 'population': 1, 'shape_code': '45001'}, {'aggregated_amount': 500000.0, 'display_name': 'Test Name', 'per_capita': 50000.0, 'population': 10, 'shape_code': '45005'}, {'aggregated_amount': 55000.0, 'display_name': 'Test Name', 'per_capita': 550.0, 'population': 100, 'shape_code': '53005'}], 'messages': [get_time_period_message()]}
assert (resp.status_code == status.HTTP_200_OK), 'Failed to return 200 Response'
resp_json = resp.json()
resp_json['results'].sort(key=_get_shape_code_for_sort)
assert (resp_json == expected_response) |
class StrainTest(unittest.TestCase):
def test_empty_sequence(self):
self.assertEqual(keep([], (lambda x: ((x % 2) == 0))), [])
def test_empty_keep(self):
inp = [2, 4, 6, 8, 10]
out = []
self.assertEqual(keep(inp, (lambda x: ((x % 2) == 1))), out)
def test_empty_discard(self):
inp = [2, 4, 6, 8, 10]
out = []
self.assertEqual(discard(inp, (lambda x: ((x % 2) == 0))), out)
def test_keep_everything(self):
inp = [2, 4, 6, 8, 10]
self.assertEqual(keep(inp, (lambda x: ((x % 2) == 0))), inp)
def test_discard_endswith(self):
inp = ['dough', 'cash', 'plough', 'though', 'through', 'enough']
out = ['cash']
self.assertEqual(discard(inp, (lambda x: str.endswith(x, 'ough'))), out)
def test_keep_z(self):
inp = ['zebra', 'arizona', 'apple', 'google', 'mozilla']
out = ['zebra', 'arizona', 'mozilla']
self.assertEqual(keep(inp, (lambda x: ('z' in x))), out)
def test_keep_discard(self):
inp = ['1,2,3', 'one', 'almost!', 'love']
self.assertEqual(discard(keep(inp, str.isalpha), str.isalpha), [])
def test_keep_plus_discard(self):
inp = ['1,2,3', 'one', 'almost!', 'love']
out = ['one', 'love', '1,2,3', 'almost!']
self.assertEqual((keep(inp, str.isalpha) + discard(inp, str.isalpha)), out) |
class DailyLink(Base):
__tablename__ = 'Daily_Links'
daily_id = Column(Integer, ForeignKey('Dailies.id'), primary_key=True)
daily = relationship(Daily, back_populates='link_relations', primaryjoin='DailyLink.daily_id==Daily.daily_id')
link_id = Column(Integer, ForeignKey('Links.id'), primary_key=True)
link = relationship(Link, primaryjoin='DailyLink.link_id==Link.link_id', doc='stalker.models.link.Link instances related to the Daily\n instance.\n\n Attach the same :class:`.Link` s that are linked as an output to a\n certain :class:`.Version` s instance to this attribute.\n\n This attribute is an **association_proxy** so and the real attribute\n that the data is related to is the :attr:`.link_relations` attribute.\n\n You can use the :attr:`.link_relations` attribute to change the\n ``rank`` attribute of the :class:`.DailyLink` instance (which is the\n returned data), thus change the order of the ``Links``.\n\n This is done in that way to be able to store the order of the links in\n this Daily instance.\n ')
rank = Column(Integer, default=0)
def __init__(self, daily=None, link=None, rank=0):
super(DailyLink, self).__init__()
self.daily = daily
self.link = link
self.rank = rank
('link')
def _validate_link(self, key, link):
from stalker import Link
if (link is not None):
if (not isinstance(link, Link)):
raise TypeError(('%(class)s.link should be an instance of stalker.models.link.Link instance, not %(link_class)s' % {'class': self.__class__.__name__, 'link_class': link.__class__.__name__}))
return link
('daily')
def _validate_daily(self, key, daily):
if (daily is not None):
if (not isinstance(daily, Daily)):
raise TypeError(('%(class)s.daily should be an instance of stalker.models.review.Daily instance, not %(daily_class)s' % {'class': self.__class__.__name__, 'daily_class': daily.__class__.__name__}))
return daily |
class JsMaths():
def E(self) -> JsNumber.JsNumber:
return JsNumber.JsNumber('Math.E', is_py_data=False)
def LN2(self):
return JsNumber.JsNumber('Math.LN2', is_py_data=False)
def LN10(self):
return JsNumber.JsNumber('Math.LN10', is_py_data=False)
def LOG2E(self):
return JsNumber.JsNumber('Math.LOG2E', is_py_data=False)
def SQRT1_2(self):
return JsNumber.JsNumber('Math.SQRT1_2', is_py_data=False)
def SQRT2(self) -> JsNumber:
return JsNumber.JsNumber('Math.SQRT2', is_py_data=False)
def PI(self):
return JsNumber.JsNumber('Math.PI', is_py_data=False)
def random(self, min_val: Union[(int, primitives.JsDataModel)]=0, max_val: Union[(int, primitives.JsDataModel)]=1):
if ((min_val == 0) and (max_val == 1)):
return JsNumber.JsNumber('Math.random()', is_py_data=False)
min_val = JsUtils.jsConvertData(min_val, None)
max_val = JsUtils.jsConvertData(max_val, None)
return JsNumber.JsNumber(('Math.random() * (%(max)s - %(min)s + 1) + %(min)s' % {'min': min_val, 'max': max_val}))
def min(self, *args):
js_args = [JsUtils.jsConvertData(a, None) for a in args]
return JsNumber.JsNumber(('Math.min(%s)' % ','.join([str(jsa) for jsa in js_args])), is_py_data=False)
def max(self, *args):
js_args = [JsUtils.jsConvertData(a, None) for a in args]
if ((len(js_args) == 1) and (getattr(js_args[0], '_jsClass', None) == 'Array')):
js_args[0] = ('...%s' % js_args[0])
return JsNumber.JsNumber(('Math.max(%s)' % ','.join([str(jsa) for jsa in js_args])), is_py_data=False)
def floor(self, number: Union[(float, primitives.JsDataModel)]):
number = JsUtils.jsConvertData(number, None)
return JsNumber.JsNumber(('Math.floor(%s)' % number), is_py_data=False)
def trunc(self, number: Union[(float, primitives.JsDataModel)]):
number = JsUtils.jsConvertData(number, None)
return JsNumber.JsNumber(('Math.trunc(%s)' % number), is_py_data=False)
def abs(self, number: Union[(float, primitives.JsDataModel)]):
number = JsUtils.jsConvertData(number, None)
return JsNumber.JsNumber(('Math.abs(%s)' % number), is_py_data=False)
def cos(self, number: Union[(float, primitives.JsDataModel)]):
number = JsUtils.jsConvertData(number, None)
return JsNumber.JsNumber(('Math.cos(%s)' % number), is_py_data=False)
def sin(self, number: Union[(float, primitives.JsDataModel)]):
number = JsUtils.jsConvertData(number, None)
return JsNumber.JsNumber(('Math.sin(%s)' % number), is_py_data=False)
def log(self, number: Union[(float, primitives.JsDataModel)]):
number = JsUtils.jsConvertData(number, None)
return JsNumber.JsNumber(('Math.log(%s)' % number), is_py_data=False)
def exp(self, number: Union[(float, primitives.JsDataModel)]):
number = JsUtils.jsConvertData(number, None)
return JsNumber.JsNumber(('Math.exp(%s)' % number), is_py_data=False)
def round(self, number: Union[(float, primitives.JsDataModel)]):
number = JsUtils.jsConvertData(number, None)
return JsNumber.JsNumber(('Math.round(%s)' % number), is_py_data=False)
def sqrt(self, number: Union[(float, primitives.JsDataModel)]):
number = JsUtils.jsConvertData(number, None)
return JsNumber.JsNumber(('Math.sqrt(%s)' % number), is_py_data=False)
def ceil(self, number: Union[(float, primitives.JsDataModel)]):
number = JsUtils.jsConvertData(number, None)
return JsNumber.JsNumber(('Math.ceil(%s)' % number), is_py_data=False)
def pow(number: Union[(primitives.JsDataModel, float)], power: Union[(primitives.JsDataModel, int)]):
number = JsUtils.jsConvertData(number, None)
power = JsUtils.jsConvertData(power, None)
return JsNumber.JsNumber(('Math.pow(%s, %s)' % (number, power)), is_py_data=False) |
def test_serialize_exception_without_traceback():
e: Exception = Exception()
assert (e.__traceback__ is None)
f: Failure = serialize_exception(e)
assert isinstance(f, Failure)
s = failure_to_str(f)
assert isinstance(s, str)
f2: Failure = str_to_failure(s)
assert isinstance(f2, Failure)
assert (f.to_dict() == f2.to_dict())
e2 = deserialize_exception(f2)
assert isinstance(e2, Exception)
assert (e2.__traceback__ is None) |
def extractMdanmeitranslationWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
.parametrize('gridder', [KNeighbors(), KNeighbors(k=1), KNeighbors(k=2), KNeighbors(k=10), KNeighbors(k=1, reduction=np.median)], ids=['k=default', 'k=1', 'k=2', 'k=10', 'median'])
def test_neighbors(gridder):
region = (1000, 5000, (- 8000), (- 6000))
synth = CheckerBoard(region=region)
data_coords = grid_coordinates(region, shape=(100, 100))
data = synth.predict(data_coords)
coords = grid_coordinates(region, shape=(95, 95))
true_data = synth.predict(coords)
gridder.fit(data_coords, data)
npt.assert_allclose(gridder.predict(coords), true_data, rtol=0, atol=100) |
class Solution():
def validateStackSequences(self, pushed: List[int], popped: List[int]) -> bool:
start = 0
stk = []
for a in popped:
if (stk and (stk[(- 1)] == a)):
stk.pop()
continue
idx = pushed.index(a)
if (idx < start):
return False
for i in range(start, idx):
stk.append(pushed[i])
start = (idx + 1)
return (len(stk) == 0) |
class Migration(migrations.Migration):
dependencies = [('awards', '0094_delete_covidfinancialaccountmatview')]
operations = [migrations.AddField(model_name='award', name='total_indirect_federal_sharing', field=models.DecimalField(blank=True, decimal_places=2, help_text='The total of the indirect_federal_sharing from associated transactions', max_digits=23, null=True)), migrations.AddField(model_name='transactionnormalized', name='indirect_federal_sharing', field=models.DecimalField(blank=True, decimal_places=2, help_text='The indirect_federal_sharing for this transaction', max_digits=23, null=True))] |
_trainer('omnimatte')
class OmnimatteTrainer(Trainer):
def __init__(self, img_batch_size: int, output: str, pbar_update_frequency: int=0, writer_path: Optional[str]=None, ray_batch_size: int=0, fg_batch_size_fg: int=0, fg_batch_size_bg: int=0, use_writer: bool=True, fg_indexing_strategy: (str | None)=None, num_workers: int=0):
super().__init__(True, False, True, output, pbar_update_frequency, writer_path, use_writer, fg_indexing_strategy=fg_indexing_strategy, num_workers=num_workers)
self.img_batch_size = img_batch_size
self.ray_batch_size = ray_batch_size
self.fg_batch_size_fg = fg_batch_size_fg
self.fg_batch_size_bg = fg_batch_size_bg
def _post_step(self, batch):
self.fg.post_training_step(self.global_step)
def _render_one_image(self, frame: Dict[(str, Any)]) -> Dict[(str, Tensor)]:
return batch_to_frame(self._render_fg(frame_to_batch(frame), None, False), 0)
def _train_fg(self, batch: Dict[(str, Tensor)]) -> Dict[(str, Tensor)]:
(B, L, H, W) = batch['masks'].shape
if (self.ray_batch_size == 0):
indices = None
else:
indices = self._get_fg_indices(batch['background_mask'], self.ray_batch_size, self.fg_batch_size_fg, self.fg_batch_size_bg)
return self._render_fg(batch, indices, True)
def _render_fg(self, batch: Dict[(str, Tensor)], indices: Optional[Tensor], is_train: bool):
(B, L, H, W) = batch['masks'].shape
dataset = self.dataset
homography = dataset.global_data['homography']
homography_size = dataset.global_data['homography_size']
bg_flow = []
for i in range(B):
bg_flow.append(get_background_flow(batch['homography'][i], homography[(int(batch['data_idx'][i]) + 1)], homography_size[1], homography_size[0], W, H).view(2, (- 1)))
batch['bg_flow'] = torch.stack(bg_flow, dim=0)
batch['flow_confidence'][batch['background_mask']] = 0
return render_omnimatte_fg(dataset, batch, indices, self.fg.forward, self._get_fg_context, self.coords, self.global_step, is_train) |
('/swagger.json')
('/allure-docker-service/swagger.json', strict_slashes=False)
def swagger_json_endpoint():
try:
specification_file = 'swagger.json'
if ENABLE_SECURITY_LOGIN:
specification_file = 'swagger_security.json'
if URL_PREFIX:
spec = get_file_as_string('{}/swagger/{}'.format(STATIC_CONTENT, specification_file))
spec_json = eval(spec)
server_url = spec_json['servers'][0]['url']
spec_json['servers'][0]['url'] = '{}{}'.format(URL_PREFIX, server_url)
return jsonify(spec_json)
return send_file('{}/swagger/{}'.format(STATIC_CONTENT, specification_file), mimetype='application/json')
except Exception as ex:
body = {'meta_data': {'message': str(ex)}}
resp = jsonify(body)
resp.status_code = 400
return resp |
def split_run_name_full(dirname):
fields = os.path.basename(dirname).split('_')
if ((len(fields) > 3) and fields[0].isdigit() and ((len(fields[0]) == 6) or (len(fields[0]) == 8))):
date_stamp = fields[0]
else:
raise IlluminaDataError(("Unable to extract date stamp from '%s'" % dirname))
if (len(fields) >= 2):
instrument_name = fields[1]
else:
raise IlluminaDataError(("Unable to extract instrument name from '%s'" % dirname))
if ((len(fields) >= 3) and fields[2].isdigit):
run_number = fields[2]
else:
raise IlluminaDataError(("Unable to extract run number from '%s'" % dirname))
if (len(fields) > 3):
flow_cell = fields[3]
else:
raise IlluminaDataError(("Unable to extract flow cell ID from '%s'" % dirname))
if (flow_cell[0] in ('A', 'B')):
flow_cell_prefix = flow_cell[0]
flow_cell = flow_cell[1:]
else:
flow_cell_prefix = ''
return (date_stamp, instrument_name, run_number, flow_cell_prefix, flow_cell) |
class TestRangeEditor(HasTraits):
x = Float()
low = Float(123.123)
high = Float(1123.123)
list = List(Float(editor=RangeEditor(low_name='low', high_name='high', low=100.0, high=10000.123)))
view = View(Item(name='x', editor=RangeEditor(low_name='low', high_name='high', low=100.0, high=10000.123)), Item('list'), resizable=True) |
def test_list_features():
method_list = list_features()
assert (len(method_list) != 0)
for method in method_list:
assert method
assert ((len(method) == 3) or (len(method) == 4))
if (len(method) == 4):
assert (('face_recognition' in method[2]) or ('search' in method[2]) or ('automl_classification' in method[2]))
for elem in method:
assert elem
method_dict = list_features(as_dict=True)
assert (len(method_dict) != 0)
for provider in method_dict:
assert provider
for feature in method_dict[provider]:
assert feature
for subfeature in method_dict[provider][feature]:
assert subfeature
assert method_dict[provider][feature][subfeature] |
class K8sServiceAccountTokenAuthenticator(AbstractAuthenticator):
_type = SupportedAuthProviders.KUBERNETES_SERVICE_ACCOUNT_TOKEN
def authenticate(self, kf_endpoint: str, runtime_config_name: str) -> ServiceAccountTokenVolumeCredentials:
request_history = []
service_account_token_path = os.environ.get(KF_PIPELINES_SA_TOKEN_ENV, KF_PIPELINES_SA_TOKEN_PATH)
try:
with open(service_account_token_path, 'r') as token_file:
if (len(token_file.read()) == 0):
raise AuthenticationError(f'Kubernetes service account token file {service_account_token_path} is empty.', provider=self._type, request_history=request_history)
except AuthenticationError:
raise
except Exception as ex:
raise AuthenticationError(f'Kubernetes service account token could not be read from {service_account_token_path}: {ex}.', provider=self._type, request_history=request_history)
return ServiceAccountTokenVolumeCredentials(path=service_account_token_path) |
class GenericLedgerApiHandler(Handler):
SUPPORTED_PROTOCOL = LedgerApiMessage.protocol_id
def setup(self) -> None:
def handle(self, message: Message) -> None:
ledger_api_msg = cast(LedgerApiMessage, message)
ledger_api_dialogues = cast(LedgerApiDialogues, self.context.ledger_api_dialogues)
ledger_api_dialogue = cast(Optional[LedgerApiDialogue], ledger_api_dialogues.update(ledger_api_msg))
if (ledger_api_dialogue is None):
self._handle_unidentified_dialogue(ledger_api_msg)
return
if (ledger_api_msg.performative is LedgerApiMessage.Performative.BALANCE):
self._handle_balance(ledger_api_msg)
elif (ledger_api_msg.performative is LedgerApiMessage.Performative.TRANSACTION_RECEIPT):
self._handle_transaction_receipt(ledger_api_msg, ledger_api_dialogue)
elif (ledger_api_msg.performative == LedgerApiMessage.Performative.ERROR):
self._handle_error(ledger_api_msg, ledger_api_dialogue)
else:
self._handle_invalid(ledger_api_msg, ledger_api_dialogue)
def teardown(self) -> None:
def _handle_unidentified_dialogue(self, ledger_api_msg: LedgerApiMessage) -> None:
self.context.logger.info('received invalid ledger_api message={}, unidentified dialogue.'.format(ledger_api_msg))
def _handle_balance(self, ledger_api_msg: LedgerApiMessage) -> None:
self.context.logger.info('starting balance on {} ledger={}.'.format(ledger_api_msg.ledger_id, ledger_api_msg.balance))
def _handle_transaction_receipt(self, ledger_api_msg: LedgerApiMessage, ledger_api_dialogue: LedgerApiDialogue) -> None:
fipa_dialogue = ledger_api_dialogue.associated_fipa_dialogue
is_settled = LedgerApis.is_transaction_settled(fipa_dialogue.terms.ledger_id, ledger_api_msg.transaction_receipt.receipt)
is_valid = LedgerApis.is_transaction_valid(fipa_dialogue.terms.ledger_id, ledger_api_msg.transaction_receipt.transaction, fipa_dialogue.terms.sender_address, fipa_dialogue.terms.counterparty_address, fipa_dialogue.terms.nonce, fipa_dialogue.terms.counterparty_payable_amount)
if (is_settled and is_valid):
last_message = cast(Optional[FipaMessage], fipa_dialogue.last_incoming_message)
if (last_message is None):
raise ValueError('Cannot retrieve last fipa message.')
inform_msg = fipa_dialogue.reply(performative=FipaMessage.Performative.INFORM, target_message=last_message, info=fipa_dialogue.data_for_sale)
self.context.outbox.put_message(message=inform_msg)
fipa_dialogues = cast(FipaDialogues, self.context.fipa_dialogues)
fipa_dialogues.dialogue_stats.add_dialogue_endstate(FipaDialogue.EndState.SUCCESSFUL, fipa_dialogue.is_self_initiated)
self.context.logger.info('transaction confirmed, sending data={} to buyer={}.'.format(fipa_dialogue.data_for_sale, last_message.sender[(- 5):]))
else:
self.context.logger.info('transaction_receipt={} not settled or not valid, aborting'.format(ledger_api_msg.transaction_receipt))
def _handle_error(self, ledger_api_msg: LedgerApiMessage, ledger_api_dialogue: LedgerApiDialogue) -> None:
self.context.logger.info('received ledger_api error message={} in dialogue={}.'.format(ledger_api_msg, ledger_api_dialogue))
def _handle_invalid(self, ledger_api_msg: LedgerApiMessage, ledger_api_dialogue: LedgerApiDialogue) -> None:
self.context.logger.warning('cannot handle ledger_api message of performative={} in dialogue={}.'.format(ledger_api_msg.performative, ledger_api_dialogue)) |
class KhalEvent(CalendarEvent):
def __init__(self, khal_event):
self.id = khal_event.uid
self.start = khal_event.start_local
self.end = khal_event.end_local
self.title = khal_event.summary
self.recurring = khal_event.recurring
self._calendar = khal_event.calendar
def calendar(self):
return self._calendar |
def test_minimal_integration_2d_gps():
data = fetch_california_gps()
proj_coords = projection(data.longitude.values, data.latitude.values)
spacing = (12 / 60)
(train, test) = train_test_split(coordinates=proj_coords, data=(data.velocity_east, data.velocity_north), weights=((1 / (data.std_east ** 2)), (1 / (data.std_north ** 2))), random_state=1)
chain = Chain([('mean', BlockMean(spacing=(spacing * 111000.0), uncertainty=True)), ('trend', Vector([Trend(1), Trend(1)])), ('spline', Vector([Spline(damping=1e-10), Spline(damping=1e-10)]))])
chain.fit(*train)
score = chain.score(*test)
npt.assert_allclose(0.99, score, atol=0.01)
region = get_region((data.longitude, data.latitude))
grid = chain.grid(region=region, spacing=spacing, projection=projection, dims=['latitude', 'longitude'])
grid = distance_mask((data.longitude, data.latitude), maxdist=((spacing * 2) * 111000.0), grid=grid, projection=projection) |
class BaseConnector(ABC):
def __init__(self, host='127.0.0.1', port=3306, user=None, passwd=None, db=None, charset='utf8', *args, **kwargs):
self._host = host
self._port = port
self._user = user
self._passwd = passwd
self._db = db
self._conn = None
self._cursor = None
def __del__(self):
if self._cursor:
self._cursor.close()
if self._conn:
self._conn.close()
def get_connect(self):
pass
def get_cursor(self, cursor=None):
pass
def select_db(self, db):
pass
def get_all_tables(self, args=None):
pass
def execute(self, sql, args=None):
pass
def get_version(self, args=None):
pass
def get_all_table_metadata(self, args=None):
pass
def get_table_metadata(self, db, table, args=None):
pass
def get_table_field_metadata(self, db, table, args=None):
pass |
class MF522():
CommandReg = (1 << 1)
ComIEnReg = (2 << 1)
DivIEnReg = (3 << 1)
ComIrqReg = (4 << 1)
DivIrqReg = (5 << 1)
ErrorReg = (6 << 1)
Status1Reg = (7 << 1)
Status2Reg = (8 << 1)
FIFODataReg = (9 << 1)
FIFOLevelReg = (10 << 1)
WaterLevelReg = (11 << 1)
ControlReg = (12 << 1)
BitFramingReg = (13 << 1)
CollReg = (14 << 1)
ModeReg = (17 << 1)
TxModeReg = (18 << 1)
RxModeReg = (19 << 1)
TxControlReg = (20 << 1)
TxASKReg = (21 << 1)
TxSelReg = (22 << 1)
RxSelReg = (23 << 1)
RxThresholdReg = (24 << 1)
DemodReg = (25 << 1)
MfTxReg = (28 << 1)
MfRxReg = (29 << 1)
SerialSpeedReg = (31 << 1)
CRCResultRegH = (33 << 1)
CRCResultRegL = (34 << 1)
ModWidthReg = (36 << 1)
RFCfgReg = (38 << 1)
GsNReg = (39 << 1)
CWGsPReg = (40 << 1)
ModGsPReg = (41 << 1)
TModeReg = (42 << 1)
TPrescalerReg = (43 << 1)
TReloadRegH = (44 << 1)
TReloadRegL = (45 << 1)
TCounterValueRegH = (46 << 1)
TCounterValueRegL = (47 << 1)
TestSel1Reg = (49 << 1)
TestSel2Reg = (50 << 1)
TestPinEnReg = (51 << 1)
TestPinValueReg = (52 << 1)
TestBusReg = (53 << 1)
AutoTestReg = (54 << 1)
VersionReg = (55 << 1)
AnalogTestReg = (56 << 1)
TestDAC1Reg = (57 << 1)
TestDAC2Reg = (58 << 1)
TestADCReg = (59 << 1)
PCD_Idle = 0
PCD_Mem = 1
PCD_GenerateRandomID = 2
PCD_CalcCRC = 3
PCD_Transmit = 4
PCD_NoCmdChange = 7
PCD_Receive = 8
PCD_Transceive = 12
PCD_MFAuthent = 14
PCD_SoftReset = 15
RxGain_18dB = (0 << 4)
RxGain_23dB = (1 << 4)
RxGain_18dB_2 = (2 << 4)
RxGain_23dB_2 = (3 << 4)
RxGain_33dB = (4 << 4)
RxGain_38dB = (5 << 4)
RxGain_43dB = (6 << 4)
RxGain_48dB = (7 << 4)
RxGain_min = (0 << 4)
RxGain_avg = (4 << 4)
RxGain_max = (7 << 4)
PICC_CMD_REQA = 38
PICC_CMD_WUPA = 82
PICC_CMD_CT = 136
PICC_CMD_SEL_CL1 = 147
PICC_CMD_SEL_CL2 = 149
PICC_CMD_SEL_CL3 = 151
PICC_CMD_HLTA = 80
PICC_CMD_MF_AUTH_KEY_A = 96
PICC_CMD_MF_AUTH_KEY_B = 97
PICC_CMD_MF_READ = 48
PICC_CMD_MF_WRITE = 160
PICC_CMD_MF_DECREMENT = 192
PICC_CMD_MF_INCREMENT = 193
PICC_CMD_MF_RESTORE = 194
PICC_CMD_MF_TRANSFER = 176
NRSTPD = 22
MAX_LEN = 16
MI_OK = 0
MI_NOTAGERR = 1
MI_ERR = 2
PCD_CALCCRC = 3
PICC_REQIDL = 38
PICC_REQALL = 82
PICC_ANTICOLL = 147
PICC_SElECTTAG = 147
PICC_AUTHENT1A = 96
PICC_AUTHENT1B = 97
PICC_READ = 48
PICC_WRITE = 160
PICC_DECREMENT = 192
PICC_INCREMENT = 193
PICC_RESTORE = 194
PICC_TRANSFER = 176
PICC_HALT = 80
PICC_CMD_UL_WRITE = 162
MF_ACK = 10
MF_KEY_SIZE = 6
def __init__(self, I, cs='CS1'):
self.cs = cs
self.I = I
self.I.SPI.set_parameters(2, 1, 1, 0)
if (not self.reset()):
self.connected = False
return None
self.write(self.TModeReg, 128)
self.write(self.TPrescalerReg, 169)
self.write(self.TReloadRegH, 3)
self.write(self.TReloadRegL, 232)
self.write(self.TxASKReg, 64)
self.write(self.ModeReg, 61)
self.enableAntenna()
self.connected = True
def enableAntenna(self):
val = self.read(self.TxControlReg)
if ((val & 3) != 3):
self.write(self.TxControlReg, (val | 3))
def reset(self):
self.write(self.CommandReg, self.PCD_SoftReset)
s = time.time()
while (self.read(self.CommandReg) & (1 << 4)):
print('wait')
time.sleep(0.1)
if ((time.time() - s) > 0.5):
return False
return True
def write(self, register, val):
self.I.SPI.set_cs(self.cs, 0)
ret = self.I.SPI.send16((((register & 127) << 8) | val))
self.I.SPI.set_cs(self.cs, 1)
return (ret & 255)
def read(self, register):
self.I.SPI.set_cs(self.cs, 0)
ret = self.I.SPI.send16(((register | 128) << 8))
self.I.SPI.set_cs(self.cs, 1)
return (ret & 255)
def readMany(self, register, total):
self.I.SPI.set_cs(self.cs, 0)
self.I.SPI.send8(register)
vals = []
for a in range((total - 1)):
vals.append(I.SPI.send8(register))
vals.append(I.SPI.send8(0))
self.I.SPI.set_cs(self.cs, 1)
return vals
def getStatus(self):
return self.read(self.Status1Reg)
def getVersion(self):
ver = self.read(self.VersionReg)
if (ver == 136):
print('Cloned version: Fudan Semiconductors')
elif (ver == 144):
print('version 1.0')
elif (ver == 145):
print('version 1.0')
elif (ver == 146):
print('version 2.0')
else:
print('Unknown version ', ver)
return ver
def SetBitMask(self, reg, mask):
tmp = self.read(reg)
self.write(reg, (tmp | mask))
def ClearBitMask(self, reg, mask):
tmp = self.read(reg)
self.write(reg, (tmp & (~ mask)))
def MFRC522_ToCard(self, command, sendData):
returnedData = []
backLen = 0
status = self.MI_ERR
irqEn = 0
waitIRq = 0
lastBits = None
n = 0
i = 0
if (command == self.PCD_MFAuthent):
irqEn = 18
waitIRq = 16
if (command == self.PCD_Transceive):
irqEn = 119
waitIRq = 48
self.write(self.ComIEnReg, (irqEn | 128))
self.ClearBitMask(self.ComIrqReg, 128)
self.SetBitMask(self.FIFOLevelReg, 128)
self.write(self.CommandReg, self.PCD_Idle)
for a in sendData:
self.write(self.FIFODataReg, a)
self.write(self.CommandReg, command)
if (command == self.PCD_Transceive):
self.SetBitMask(self.BitFramingReg, 128)
i = 2000
while True:
n = self.read(self.ComIrqReg)
i = (i - 1)
if (~ ((i != 0) and (~ (n & 1)) and (~ (n & waitIRq)))):
break
self.ClearBitMask(self.BitFramingReg, 128)
if (i != 0):
if ((self.read(self.ErrorReg) & 27) == 0):
status = self.MI_OK
if ((n & irqEn) & 1):
status = self.MI_NOTAGERR
if (command == self.PCD_Transceive):
n = self.read(self.FIFOLevelReg)
lastBits = (self.read(self.ControlReg) & 7)
if (lastBits != 0):
backLen = (((n - 1) * 8) + lastBits)
else:
backLen = (n * 8)
if (n == 0):
n = 1
if (n > self.MAX_LEN):
n = self.MAX_LEN
i = 0
while (i < n):
returnedData.append(self.read(self.FIFODataReg))
i = (i + 1)
else:
status = self.MI_ERR
return (status, returnedData, backLen)
def MFRC522_Request(self, reqMode):
status = None
backBits = None
TagType = []
self.write(self.BitFramingReg, 7)
TagType.append(reqMode)
(status, returnedData, backBits) = self.MFRC522_ToCard(self.PCD_Transceive, TagType)
if ((status != self.MI_OK) | (backBits != 16)):
status = self.MI_ERR
return (status, backBits)
def MFRC522_Anticoll(self):
returnedData = []
serNumCheck = 0
serNum = []
self.write(self.BitFramingReg, 0)
serNum.append(self.PICC_ANTICOLL)
serNum.append(32)
(status, returnedData, backBits) = self.MFRC522_ToCard(self.PCD_Transceive, serNum)
if (status == self.MI_OK):
i = 0
if (len(returnedData) == 5):
while (i < 4):
serNumCheck = (serNumCheck ^ returnedData[i])
i = (i + 1)
if (serNumCheck != returnedData[i]):
status = self.MI_ERR
else:
status = self.MI_ERR
return (status, returnedData)
def CalulateCRC(self, pIndata):
self.ClearBitMask(self.DivIrqReg, 4)
self.SetBitMask(self.FIFOLevelReg, 128)
for a in pIndata:
self.write(self.FIFODataReg, a)
self.write(self.CommandReg, self.PCD_CALCCRC)
for i in range(255):
n = self.read(self.DivIrqReg)
if (n & 4):
break
pOutData = []
pOutData.append(self.read(self.CRCResultRegL))
pOutData.append(self.read(self.CRCResultRegH))
return pOutData
def MFRC522_SelectTag(self, serNum):
returnedData = []
buf = []
buf.append(self.PICC_SElECTTAG)
buf.append(112)
i = 0
while (i < 5):
buf.append(serNum[i])
i = (i + 1)
pOut = self.CalulateCRC(buf)
buf.append(pOut[0])
buf.append(pOut[1])
(status, returnedData, backLen) = self.MFRC522_ToCard(self.PCD_Transceive, buf)
if ((status == self.MI_OK) and (backLen == 24)):
return returnedData[0]
else:
return 0
def MFRC522_Auth(self, authMode, BlockAddr, Sectorkey, serNum):
buff = []
buff.append(authMode)
buff.append(BlockAddr)
i = 0
while (i < len(Sectorkey)):
buff.append(Sectorkey[i])
i = (i + 1)
i = 0
while (i < 4):
buff.append(serNum[i])
i = (i + 1)
(status, returnedData, backLen) = self.MFRC522_ToCard(self.PCD_MFAuthent, buff)
if (not (status == self.MI_OK)):
print('AUTH ERROR!!')
if (not ((self.read(self.Status2Reg) & 8) != 0)):
print('AUTH ERROR(status2reg & 0x08) != 0')
return status
def MFRC522_StopCrypto1(self):
self.ClearBitMask(self.Status2Reg, 8)
self.SetBitMask(self.CommandReg, 16)
def MFRC522_Read(self, blockAddr):
recvData = []
recvData.append(self.PICC_READ)
recvData.append(blockAddr)
pOut = self.CalulateCRC(recvData)
recvData.append(pOut[0])
recvData.append(pOut[1])
(status, returnedData, backLen) = self.MFRC522_ToCard(self.PCD_Transceive, recvData)
if (not (status == self.MI_OK)):
print('Error while reading!')
i = 0
return returnedData
def MFRC522_Write(self, blockAddr, writeData):
buff = []
buff.append(self.PICC_WRITE)
buff.append(blockAddr)
crc = self.CalulateCRC(buff)
buff.append(crc[0])
buff.append(crc[1])
(status, returnedData, backLen) = self.MFRC522_ToCard(self.PCD_Transceive, buff)
if ((not (status == self.MI_OK)) or (not (backLen == 4)) or (not ((returnedData[0] & 15) == 10))):
status = self.MI_ERR
print(((str(backLen) + ' returnedData &0x0F == 0x0A ') + str((returnedData[0] & 15))))
if (status == self.MI_OK):
i = 0
buf = []
while (i < 16):
buf.append(writeData[i])
i = (i + 1)
crc = self.CalulateCRC(buf)
buf.append(crc[0])
buf.append(crc[1])
(status, returnedData, backLen) = self.MFRC522_ToCard(self.PCD_Transceive, buf)
if ((not (status == self.MI_OK)) or (not (backLen == 4)) or (not ((returnedData[0] & 15) == 10))):
print('Error while writing')
if (status == self.MI_OK):
print('Data written')
def MFRC522_DumpClassic1K(self, key, uid):
i = 0
while (i < 64):
status = self.MFRC522_Auth(self.PICC_AUTHENT1A, i, key, uid)
if (status == self.MI_OK):
self.MFRC522_Read(i)
else:
print('Authentication error')
i = (i + 1) |
def set_status(status):
global metrics
for worker in status:
if ('workers_state' in metrics):
metrics['workers_state'].labels(worker=worker, state=status[worker]['state']).inc()
else:
metrics['workers_state'] = Gauge('workers_state', 'State of workers', ['worker', 'state']) |
class AmountViewSet(AwardTypeMixin, FabaOutlayMixin, DisasterBase):
endpoint_doc = 'usaspending_api/api_contracts/contracts/v2/disaster/award/amount.md'
count_only = False
_response()
def post(self, request):
additional_models = [{'key': 'filter|award_type', 'name': 'award_type', 'type': 'enum', 'enum_values': ('assistance', 'procurement'), 'allow_nulls': False, 'optional': True}]
f = TinyShield(additional_models).block(self.request.data).get('filter')
if f:
self.filters['award_type'] = f.get('award_type')
if all(((x in self.filters) for x in ['award_type_codes', 'award_type'])):
raise UnprocessableEntityException("Cannot provide both 'award_type_codes' and 'award_type'")
self.nonzero_fields = ['obligated_sum', 'outlay_sum']
if (self.award_type_codes and (set(self.award_type_codes) <= set(loan_type_mapping.keys()))):
self.nonzero_fields.append('total_loan_value')
search = self.build_elasticsearch_search()
result = self.build_result(search)
if self.count_only:
return Response({'count': result['award_count']})
else:
return Response(result)
def build_elasticsearch_search(self) -> AccountSearch:
if self.award_type_codes:
count_field = 'award_id'
else:
count_field = 'financial_account_distinct_award_key'
filter_query = self._build_elasticsearch_query()
search = AccountSearch().filter(filter_query)
count_agg = create_count_aggregation(count_field)
financial_accounts_agg = A('nested', path='financial_accounts_by_award')
filter_agg_query = ES_Q('terms', **{'financial_accounts_by_award.disaster_emergency_fund_code': self.filters.get('def_codes')})
filtered_aggs = A('filter', filter_agg_query)
outlay_sum_agg = A('sum', script="doc['financial_accounts_by_award.is_final_balances_for_fy'].value ? (\n (doc['financial_accounts_by_award.gross_outlay_amount_by_award_cpe'].size() > 0 ? doc['financial_accounts_by_award.gross_outlay_amount_by_award_cpe'].value : 0)\n + (doc['financial_accounts_by_award.ussgl487200_down_adj_pri_ppaid_undel_orders_oblig_refund_cpe'].size() > 0 ? doc['financial_accounts_by_award.ussgl487200_down_adj_pri_ppaid_undel_orders_oblig_refund_cpe'].value : 0)\n + (doc['financial_accounts_by_award.ussgl497200_down_adj_pri_paid_deliv_orders_oblig_refund_cpe'].size() > 0 ? doc['financial_accounts_by_award.ussgl497200_down_adj_pri_paid_deliv_orders_oblig_refund_cpe'].value : 0)\n ) * 100 : 0")
obligation_sum_agg = A('sum', field='financial_accounts_by_award.transaction_obligated_amount', script='_value * 100')
reverse_nested_agg = A('reverse_nested', **{})
face_value_of_loan_sum_agg = A('sum', field='total_loan_value', script='_value * 100')
search.aggs.bucket('nested_agg', financial_accounts_agg).bucket('filter_agg', filtered_aggs).metric('obligation_sum_agg', obligation_sum_agg).metric('outlay_sum_agg', outlay_sum_agg).bucket('reverse_nested_agg', reverse_nested_agg).metric('count_agg', count_agg).metric('face_value_of_loan_sum_agg', face_value_of_loan_sum_agg)
return search
def _build_elasticsearch_query(self) -> ES_Q:
filters = deepcopy(self.filters)
award_type_filter = filters.pop('award_type', None)
filters['nonzero_fields'] = self.nonzero_fields
if filters.get('def_codes'):
filters['nested_def_codes'] = filters.pop('def_codes')
filter_query = QueryWithFilters.generate_accounts_elasticsearch_query(filters)
if award_type_filter:
is_procurement = (award_type_filter == 'procurement')
exists_query = ES_Q('exists', field='financial_accounts_by_award.piid')
nested_query = ES_Q('nested', path='financial_accounts_by_award', query=ES_Q('bool', **{f"must{('' if is_procurement else '_not')}": exists_query}))
filter_query.must.append(nested_query)
return filter_query
def build_result(self, search: AccountSearch) -> dict:
response = search.handle_execute()
response = response.aggs.to_dict()
filter_agg = response.get('nested_agg', {}).get('filter_agg', {})
reverse_nested_agg = filter_agg.get('reverse_nested_agg', {})
result = {'award_count': reverse_nested_agg.get('count_agg', {'value': 0})['value'], 'obligation': get_summed_value_as_float(filter_agg, 'obligation_sum_agg'), 'outlay': get_summed_value_as_float(filter_agg, 'outlay_sum_agg')}
if ('total_loan_value' in self.nonzero_fields):
result['face_value_of_loan'] = get_summed_value_as_float(reverse_nested_agg, 'face_value_of_loan_sum_agg')
return result |
def _compute_padding_flops(tensor: Tensor, shapes: List[IntVar], padding_idx: int) -> int:
if ((shapes[padding_idx].value() % 2) == 0):
return 0
if can_be_constant_folded(tensor):
return 0
elif _is_strided_tensor(tensor):
return ((_matrix_shape_prod(shapes) * get_padding_length(shapes[padding_idx].value(), tensor.dtype())) / shapes[padding_idx].value())
else:
return _matrix_shape_prod(shapes) |
def main(args=None):
import eql
parser = argparse.ArgumentParser(description='Event Query Language')
parser.add_argument('--version', '-V', action='version', version=('%s %s' % (eql.__name__, eql.__version__)))
subparsers = parser.add_subparsers(help='Sub Command Help')
build_parser = subparsers.add_parser('build', help='Build an EQL engine in a target language')
build_parser.set_defaults(func=build)
build_parser.add_argument('input_file', help='Input analytics file(s) (.yml or .json)')
build_parser.add_argument('output_file', help='Output analytics engine file')
build_parser.add_argument('--engine_type', help='Engine type. Autodetected from output extension if not provided')
build_parser.add_argument('--analytics-only', action='store_true', help='Skips core engine when building target')
query_parser = subparsers.add_parser('optimize', help='Optimize an EQL expression')
query_parser.set_defaults(func=optimize_expression)
query_parser.add_argument('expression', help='EQL expression to optimize')
query_parser = subparsers.add_parser('query', help='Run an EQL query over stdin or a data file')
query_parser.set_defaults(func=query)
query_parser.add_argument('query', help='The EQL query to run over the log file')
query_parser.add_argument('--encoding', '-e', help='Encoding of input file', default='utf8')
query_parser.add_argument('--format', help='', choices=['json', 'jsonl', 'json.gz', 'jsonl.gz'])
shell_parser = subparsers.add_parser('shell', help='Run an EQL query over stdin or a data file')
shell_parser.set_defaults(func=shell_main)
for p in (parser, build_parser, query_parser, shell_parser):
p.add_argument('--config', '-c', help='Engine configuration')
for p in (parser, query_parser, shell_parser):
p.add_argument('--file', '-f', help='Target file(s) to query with EQL')
parsed = parser.parse_args(args)
try:
if hasattr(parsed, 'func'):
return parsed.func(parsed)
else:
return shell_main(parsed)
except KeyboardInterrupt:
pass |
def _merge_grouper(items, group_size):
FileMergeGroup = namedtuple('FileMergeGroup', ['part', 'file_list'])
if (len(items) <= group_size):
(yield FileMergeGroup(None, items))
return
group_generator = (items[i:(i + group_size)] for i in range(0, len(items), group_size))
for (i, group) in enumerate(group_generator, start=1):
(yield FileMergeGroup(i, group)) |
('active', cls=FandoghCommand)
('--name', '-n', 'name', prompt='Namespace name', help='Namespace name that should be default', default=None)
def active(name):
namespaces = list_namespaces()
if (name in map((lambda n: n['name']), namespaces)):
click.echo('Setting the active namespace to {}'.format(name))
set_cluster_namespace(name)
else:
click.echo(format_text('Namespace not found', TextStyle.FAIL)) |
class TestS3StorageService(unittest.TestCase):
LOCAL_FILE = '/usr/test_file'
LOCAL_FOLDER = '/foo'
S3_FILE = '
S3_FILE_COPY = '
S3_FOLDER = '
S3_FOLDER_COPY = '
S3_FILE_WITH_SUBFOLDER = '
LOCAL_DIR = [('/foo', ('bar',), ('baz',)), ('/foo/baz', (), ('a', 'b'))]
S3_DIR = ['test_folder/bar/', 'test_folder/baz/', 'test_folder/baz/a', 'test_folder/baz/b']
('fbpcp.gateway.s3.S3Gateway')
def test_copy_local_to_s3(self, MockS3Gateway):
service = S3StorageService('us-west-1')
service.s3_gateway = MockS3Gateway()
service.s3_gateway.upload_file = MagicMock(return_value=None)
service.copy(self.LOCAL_FILE, self.S3_FILE)
service.s3_gateway.upload_file.assert_called_with(str(self.LOCAL_FILE), 'bucket', 'test_file')
def test_copy_local_dir_to_s3_recursive_false(self):
service = S3StorageService('us-west-1')
with patch('os.path.isdir', return_value=True):
self.assertRaises(ValueError, service.copy, self.LOCAL_FOLDER, self.S3_FOLDER, False)
('fbpcp.gateway.s3.S3Gateway')
def test_copy_local_dir_to_s3_recursive_true(self, MockS3Gateway):
service = S3StorageService('us-west-1')
service.s3_gateway = MockS3Gateway()
service.s3_gateway.put_object = MagicMock(return_value=None)
service.s3_gateway.upload_file = MagicMock(return_value=None)
with patch('os.path.isdir', return_value=True):
with patch('os.walk', return_value=self.LOCAL_DIR):
service.copy(self.LOCAL_FOLDER, self.S3_FOLDER, True)
service.s3_gateway.put_object.assert_called_with('bucket', 'test_folder/bar/', '')
service.s3_gateway.upload_file.assert_has_calls([call('/foo/baz/a', 'bucket', 'test_folder/baz/a'), call('/foo/baz/b', 'bucket', 'test_folder/baz/b')], any_order=True)
('fbpcp.gateway.s3.S3Gateway')
def test_copy_s3_to_local(self, MockS3Gateway):
service = S3StorageService('us-west-1')
service.s3_gateway = MockS3Gateway()
service.s3_gateway.download_file = MagicMock(return_value=None)
service.copy(self.S3_FILE, self.LOCAL_FILE)
service.s3_gateway.download_file.assert_called_with('bucket', 'test_file', str(self.LOCAL_FILE))
def test_copy_s3_dir_to_local_recursive_false(self):
service = S3StorageService('us-west-1')
self.assertRaises(ValueError, service.copy, self.S3_FOLDER, self.LOCAL_FOLDER, False)
('fbpcp.gateway.s3.S3Gateway')
def test_copy_s3_dir_to_local_source_does_not_exist(self, MockS3Gateway):
service = S3StorageService('us-west-1')
service.s3_gateway = MockS3Gateway()
service.s3_gateway.object_exists = MagicMock(return_value=False)
self.assertRaises(ValueError, service.copy, self.S3_FOLDER, self.LOCAL_FOLDER, False)
('os.makedirs')
('fbpcp.gateway.s3.S3Gateway')
def test_copy_s3_dir_to_local_ok(self, MockS3Gateway, os_makedirs):
service = S3StorageService('us-west-1')
service.s3_gateway = MockS3Gateway()
service.s3_gateway.object_exists = MagicMock(return_value=True)
service.s3_gateway.list_object2 = MagicMock(return_value=self.S3_DIR)
service.s3_gateway.download_file = MagicMock(return_value=None)
service.copy(self.S3_FOLDER, self.LOCAL_FOLDER, True)
os.makedirs.assert_has_calls([call('/foo/bar'), call('/foo/baz')], any_order=True)
service.s3_gateway.download_file.assert_has_calls([call('bucket', 'test_folder/baz/a', '/foo/baz/a'), call('bucket', 'test_folder/baz/b', '/foo/baz/b')], any_order=True)
('fbpcp.gateway.s3.S3Gateway')
def test_copy_local_to_local(self, MockS3Gateway):
service = S3StorageService('us-west-1')
service.s3_gateway = MockS3Gateway()
self.assertRaises(ValueError, service.copy, self.LOCAL_FILE, self.LOCAL_FILE)
('fbpcp.gateway.s3.S3Gateway')
def test_copy_s3_to_s3(self, MockS3Gateway):
service = S3StorageService('us-west-1')
service.s3_gateway = MockS3Gateway()
service.copy(self.S3_FILE, self.S3_FILE_COPY)
service.s3_gateway.copy.assert_called_with('bucket', 'test_file', 'bucket', 'test_file_copy')
def test_copy_s3_dir_to_s3_recursive_false(self):
service = S3StorageService('us-west-1')
self.assertRaises(ValueError, service.copy, self.S3_FOLDER, self.S3_FOLDER_COPY, False)
def test_copy_s3_dir_to_s3_source_and_dest_are_the_same(self):
service = S3StorageService('us-west-1')
self.assertRaises(ValueError, service.copy, self.S3_FOLDER, self.S3_FOLDER, True)
('fbpcp.gateway.s3.S3Gateway')
def test_copy_s3_dir_to_s3_source_does_not_exist(self, MockS3Gateway):
service = S3StorageService('us-west-1')
service.s3_gateway = MockS3Gateway()
service.s3_gateway.object_exists = MagicMock(return_value=False)
self.assertRaises(ValueError, service.copy, self.S3_FOLDER, self.S3_FOLDER_COPY, False)
('os.makedirs')
('fbpcp.gateway.s3.S3Gateway')
def test_copy_s3_dir_to_s3_ok(self, MockS3Gateway, os_makedirs):
service = S3StorageService('us-west-1')
service.s3_gateway = MockS3Gateway()
service.s3_gateway.object_exists = MagicMock(return_value=True)
service.s3_gateway.list_object2 = MagicMock(return_value=self.S3_DIR)
service.s3_gateway.put_object = MagicMock(return_value=None)
service.s3_gateway.copy = MagicMock(return_value=None)
service.copy(self.S3_FOLDER, self.S3_FOLDER_COPY, True)
service.s3_gateway.put_object.assert_has_calls([call('bucket', 'test_folder_copy/bar/', ''), call('bucket', 'test_folder_copy/baz/', '')], any_order=True)
service.s3_gateway.copy.assert_has_calls([call('bucket', 'test_folder/baz/a', 'bucket', 'test_folder_copy/baz/a'), call('bucket', 'test_folder/baz/b', 'bucket', 'test_folder_copy/baz/b')], any_order=True)
('fbpcp.gateway.s3.S3Gateway')
def test_delete_s3(self, MockS3Gateway):
service = S3StorageService('us-west-1')
service.s3_gateway = MockS3Gateway()
service.delete(self.S3_FILE)
service.s3_gateway.delete_object.assert_called_with('bucket', 'test_file')
('fbpcp.gateway.s3.S3Gateway')
def test_file_exists(self, MockS3Gateway):
service = S3StorageService('us-west-1')
service.s3_gateway = MockS3Gateway()
service.file_exists(self.S3_FILE)
service.s3_gateway.object_exists.assert_called_with('bucket', 'test_file')
('fbpcp.gateway.s3.S3Gateway')
def test_list_files(self, MockS3Gateway):
service = S3StorageService('us-west-1')
service.s3_gateway = MockS3Gateway()
service.list_files(self.S3_FOLDER)
service.s3_gateway.list_object2.assert_called_with('bucket', 'test_folder') |
def scan(pkt):
global offset
global scanning
global queue
global target_addr
head = [1, 3, 6, 10]
queue.append((len(pkt) - offset))
if (len(queue) > 4):
queue.pop(0)
if equal(queue, head):
scanning = False
queue.clear()
target_addr = pkt.addr3
print(('[+] Found target: ' + target_addr))
print('[+] Decoding ...') |
def drop_noncanonical_contigs(accessible, targets, verbose=True):
(access_chroms, target_chroms) = compare_chrom_names(accessible, targets)
untgt_chroms = (access_chroms - target_chroms)
if any((is_canonical_contig_name(c) for c in target_chroms)):
chroms_to_skip = [c for c in untgt_chroms if (not is_canonical_contig_name(c))]
else:
max_tgt_chr_name_len = max(map(len, target_chroms))
chroms_to_skip = [c for c in untgt_chroms if (len(c) > max_tgt_chr_name_len)]
if chroms_to_skip:
logging.info('Skipping untargeted chromosomes %s', ' '.join(sorted(chroms_to_skip)))
skip_idx = accessible.chromosome.isin(chroms_to_skip)
accessible = accessible[(~ skip_idx)]
return accessible |
class TestBuildrootOverrideMessage():
def test_tag_v1(self):
expected = {'topic': 'bodhi.buildroot_override.tag', 'summary': 'lmacken submitted a buildroot override for libxcrypt-4.4.4-2.fc28', '__str__': 'lmacken submitted a buildroot override for libxcrypt-4.4.4-2.fc28', 'app_icon': ' 'app_name': 'bodhi', 'url': ' 'agent_avatar': ' 'usernames': ['lmacken'], 'packages': ['libxcrypt'], 'build': base.BuildV1('libxcrypt-4.4.4-2.fc28'), 'submitter': base.UserV1('lmacken'), 'agent_name': 'lmacken'}
msg = BuildrootOverrideTagV1(body={'override': {'nvr': 'libxcrypt-4.4.4-2.fc28', 'submitter': {'name': 'lmacken'}}})
check_message(msg, expected)
def test_untag_v1(self):
expected = {'topic': 'bodhi.buildroot_override.untag', 'summary': 'lmacken expired a buildroot override for libxcrypt-4.4.4-2.fc28', 'app_icon': ' 'app_name': 'bodhi', 'url': ' 'agent_avatar': ' 'usernames': ['lmacken'], 'packages': ['libxcrypt'], 'build': base.BuildV1('libxcrypt-4.4.4-2.fc28'), 'submitter': base.UserV1('lmacken'), 'agent_name': 'lmacken'}
msg = BuildrootOverrideUntagV1(body={'override': {'nvr': 'libxcrypt-4.4.4-2.fc28', 'submitter': {'name': 'lmacken'}}})
check_message(msg, expected) |
def get_input_addr_calculator(func_attrs):
input_a_batch_stride_dim = 'M * K'
input_a_stride_k_dim = 'K'
input_a_offset = 0
input_b_batch_stride_dim = 'K * N'
input_b_stride_k_dim = 'N'
input_b_offset = 0
if ('input_accessors' in func_attrs):
input_a_accessor = func_attrs['input_accessors'][0]
input_b_accessor = func_attrs['input_accessors'][1]
if input_a_accessor.is_from_strided_tensor:
input_a_offset = input_a_accessor.offset
shapes = input_a_accessor.original_shapes
input_a_stride_k_dim = input_a_accessor.stride((len(shapes) - 2))
if input_b_accessor.is_from_strided_tensor:
input_b_offset = input_b_accessor.offset
shapes = input_b_accessor.original_shapes
input_b_stride_k_dim = input_b_accessor.stride((len(shapes) - 2))
input_addr_calculator = common.INPUT_ADDR_CALCULATOR.render(input_a_batch_stride_dim=input_a_batch_stride_dim, input_a_stride_dim=input_a_stride_k_dim, input_a_offset_val=input_a_offset, input_b_batch_stride_dim=input_b_batch_stride_dim, input_b_stride_dim=input_b_stride_k_dim, input_b_offset_val=input_b_offset)
return input_addr_calculator |
def test_scan_structure_type(thr, exclusive):
shape = (100, 100)
dtype = dtypes.align(numpy.dtype([('i1', numpy.uint32), ('nested', numpy.dtype([('v', numpy.uint64)])), ('i2', numpy.uint32)]))
a = get_test_array(shape, dtype)
a_dev = thr.to_device(a)
b_ref = numpy.empty(shape, dtype)
b_ref['i1'] = ref_scan(a['i1'], axes=0, exclusive=exclusive)
b_ref['nested']['v'] = ref_scan(a['nested']['v'], axes=0, exclusive=exclusive)
b_ref['i2'] = ref_scan(a['i2'], axes=0, exclusive=exclusive)
predicate = Predicate(Snippet.create((lambda v1, v2: '\n ${ctype} result = ${v1};\n result.i1 += ${v2}.i1;\n result.nested.v += ${v2}.nested.v;\n result.i2 += ${v2}.i2;\n return result;\n '), render_kwds=dict(ctype=dtypes.ctype_module(dtype))), numpy.zeros(1, dtype)[0])
scan = Scan(a_dev, predicate, axes=(0,), exclusive=exclusive)
b_dev = thr.empty_like(scan.parameter.output)
scanc = scan.compile(thr)
scanc(b_dev, a_dev)
b_res = b_dev.get()
assert diff_is_negligible(b_res, b_ref) |
def extractThejourneytotheskyWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Doro Doro', 'Doro Doro Obake Ouji-sama', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
if (item['tags'] != ['Uncategorized']):
return False
titlemap = [('[IRM2TM7H]', 'In Regards to My 2nd Trip and My 7 Husbands', 'translated'), ('Doro Doro Obake Ouji-sama', 'Doro Doro Obake Ouji-sama', 'translated'), ('Watashi wa Teki ni Narimasen! ', 'Watashi wa Teki ni Narimasen! ', 'translated'), ('[Arasaa]', 'I am the Newly Born Woman of Around Thirty', 'translated'), ('The Dukes daughter ch', "The Duke's Daughter Is the Knight Captain's (62) Young Wife", 'translated')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesLineAccessibilityPoint(Options):
def dateFormat(self):
return self._config_get(None)
def dateFormat(self, text: str):
self._config(text, js_type=False)
def dateFormatter(self):
return self._config_get(None)
def dateFormatter(self, value: Any):
self._config(value, js_type=False)
def describeNull(self):
return self._config_get(True)
def describeNull(self, flag: bool):
self._config(flag, js_type=False)
def descriptionFormat(self):
return self._config_get(None)
def descriptionFormat(self, text: str):
self._config(text, js_type=False)
def descriptionFormatter(self):
return self._config_get(None)
def descriptionFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valueDescriptionFormat(self):
return self._config_get('{xDescription}{separator}{value}.')
def valueDescriptionFormat(self, text: str):
self._config(text, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False) |
def attrs_help(input_classes: Union[(list, tuple)], module_name: str, extract_fnc: Callable, max_indent: int) -> None:
other_list = handle_help_main(input_classes, module_name, extract_fnc, max_indent)
handle_help_enums(other_list=other_list, module_name=module_name, extract_fnc=extract_fnc, max_indent=max_indent) |
class Procs(Module):
aliases = ['ps']
def init(self):
self.register_info({'author': ['paddlesteamer'], 'license': 'GPLv3'})
def run(self, **kwargs):
return PhpCode('\n class UIDMap {\n private $map = array();\n\n public function __construct() {\n $lines = (PHP_EOL, file_get_contents(\'/etc/passwd\'));\n\n if (!$lines) return;\n\n foreach ($lines as $line) {\n $els = explode(\':\', $line);\n\n $uname = $els[0];\n\n if (strlen($uname) > 8) $uname = substr($uname, 0, 7) . \'+\';\n\n $this->map[$els[2]] = $uname;\n }\n }\n\n public function getUserName($uid) {\n $uname = $this->map[$uid];\n\n if (!$uname) return $uid;\n\n return $uname;\n }\n }\n\n function getTtyName($ttynr) {\n $major = ($ttynr >> 8) & 0xffffffff ;\n $minor = $ttynr & 0xff;\n\n if ($major === 4) {\n if ($minor < 64) return \'tty\'.$minor;\n\n return \'ttyS\'.(255 - $minor);\n } else if ($major >= 136 && $major <=143) {\n return \'pts/\'.$minor;\n }\n\n // unsupported tty\n return \'?\';\n }\n\n\n function getProcInfo($procpath, $pid) {\n global $uidmap;\n\n $info = array(\n \'UID\' => \'?\',\n \'PID\' => \'?\',\n \'PPID\' => \'?\',\n \'STIME\' => \'?\',\n \'TTY\' => \'?\',\n \'TIME\' => \'?\',\n \'CMD\' => \'?\'\n );\n\n $content = _get_contents(join(DIRECTORY_SEPARATOR, array($procpath, $pid, \'stat\')));\n\n if (!$content) return $info;\n\n $stats = explode(\' \', $content);\n\n $info[\'PID\'] = $stats[0];\n $info[\'PPID\'] = $stats[3];\n\n // calculate stime and time\n // since there is no way to call\n // sysconf(_SC_CLK_TCK), let\'s use\n // a workaround with filectime\n $curtime = time();\n $stime = (join(DIRECTORY_SEPARATOR, array($procpath, $pid)));\n if (date(\'j\', $curtime) === date(\'j\', $stime)) {\n $info[\'STIME\'] = date(\'H:i\', $stime);\n } else {\n $info[\'STIME\'] = date(\'Md\', $stime);\n }\n $time = $curtime - $stime;\n $hours = floor($time / 3600);\n $minutes = floor(($time % 3600) / 60);\n $seconds = $time % 60;\n $info[\'TIME\'] = sprintf("%\'.02d:%\'.02d:%\'.02d", $hours, $minutes, $seconds);\n\n $info[\'TTY\'] = getTtyName($stats[6]);\n\n // get cmd\n $cmd = _get_contents(join(DIRECTORY_SEPARATOR, array($procpath, $pid, \'cmdline\')));\n\n if ($cmd && strlen($cmd) > 0) {\n $cmd = _replace("\x00", \' \', $cmd);\n } else {\n $cmd = _replace(\'(\', \'[\', str_replace(\')\', \']\', $stats[1]));\n }\n $info[\'CMD\'] = $cmd;\n\n // get user\n $content = (PHP_EOL, file_get_contents(join(DIRECTORY_SEPARATOR, array($procpath, $pid, \'status\'))));\n foreach ($content as $line) {\n $els = explode("\t", $line);\n if ($els[0] !== \'Uid:\') continue;\n\n $info[\'UID\'] = $uidmap->getUserName($els[1]);\n break;\n }\n\n return $info;\n }\n\n\n function main() {\n global $uidmap;\n\n // check proc\n $procpath = \'/proc\';\n if (!file_exists(\'/proc\')) {\n $lines = (PHP_EOL, file_get_contents(\'/etc/mtab\'));\n\n if (!$lines) {\n print(\'Unable to list processes.\' . PHP_EOL);\n return;\n }\n\n foreach ($lines as $line) {\n $els = explode(\' \', $line);\n\n if ($els[0] !== \'proc\') continue;\n\n $procpath = $els[1];\n }\n\n if ($procpath === \'/proc\') {\n print(\'Unable to list processes.\' . PHP_EOL);\n return;\n }\n }\n\n // init uidmap\n $uidmap = new UIDMap();\n\n $pids = ($procpath);\n\n $format = \'%-8s %5s %5s %5s %-8s %10s %s\' . PHP_EOL;\n printf($format, \'UID\', \'PID\', \'PPID\', \'STIME\', \'TTY\', \'TIME\', \'CMD\');\n foreach ($pids as $pid) {\n if (!is_numeric($pid)) continue;\n\n $proc = getProcInfo($procpath, $pid);\n printf($format, $proc[\'UID\'], $proc[\'PID\'], $proc[\'PPID\'], $proc[\'STIME\'], $proc[\'TTY\'], $proc[\'TIME\'], $proc[\'CMD\']);\n }\n\n }\n\n main();\n ').run() |
class Terminal(object):
def __init__(self, kind=None, stream=None, force_styling=False):
if (stream is None):
stream = sys.__stdout__
try:
stream_descriptor = (stream.fileno() if (hasattr(stream, 'fileno') and callable(stream.fileno)) else None)
except IOUnsupportedOperation:
stream_descriptor = None
self._is_a_tty = ((stream_descriptor is not None) and isatty(stream_descriptor))
self._does_styling = ((self.is_a_tty or force_styling) and (force_styling is not None))
self._init_descriptor = (sys.__stdout__.fileno() if (stream_descriptor is None) else stream_descriptor)
if self.does_styling:
try:
setupterm((kind or environ.get('TERM', 'dumb') or 'dumb'), self._init_descriptor)
except curses.error:
self._does_styling = False
self.stream = stream
_sugar = dict(save='sc', restore='rc', clear_eol='el', clear_bol='el1', clear_eos='ed', position='cup', enter_fullscreen='smcup', exit_fullscreen='rmcup', move='cup', move_x='hpa', move_y='vpa', move_left='cub1', move_right='cuf1', move_up='cuu1', move_down='cud1', hide_cursor='civis', normal_cursor='cnorm', reset_colors='op', normal='sgr0', reverse='rev', italic='sitm', no_italic='ritm', shadow='sshm', no_shadow='rshm', standout='smso', no_standout='rmso', subscript='ssubm', no_subscript='rsubm', superscript='ssupm', no_superscript='rsupm', underline='smul', no_underline='rmul')
def __getattr__(self, attr):
resolution = (self._resolve_formatter(attr) if self.does_styling else NullCallableString())
setattr(self, attr, resolution)
return resolution
def does_styling(self):
return self._does_styling
def is_a_tty(self):
return self._is_a_tty
def height(self):
return self._height_and_width()[0]
def width(self):
return self._height_and_width()[1]
def _height_and_width(self):
for descriptor in (self._init_descriptor, sys.__stdout__):
try:
return struct.unpack('hhhh', ioctl(descriptor, TIOCGWINSZ, ('\x00' * 8)))[0:2]
except IOError:
pass
try:
return (int(environ.get('LINES')), int(environ.get('COLUMNS')))
except TypeError:
return (None, None)
def location(self, x=None, y=None):
self.stream.write(self.save)
if ((x is not None) and (y is not None)):
self.stream.write(self.move(y, x))
elif (x is not None):
self.stream.write(self.move_x(x))
elif (y is not None):
self.stream.write(self.move_y(y))
try:
(yield)
finally:
self.stream.write(self.restore)
def fullscreen(self):
self.stream.write(self.enter_fullscreen)
try:
(yield)
finally:
self.stream.write(self.exit_fullscreen)
def hidden_cursor(self):
self.stream.write(self.hide_cursor)
try:
(yield)
finally:
self.stream.write(self.normal_cursor)
def color(self):
return ParametrizingString(self._foreground_color, self.normal)
def on_color(self):
return ParametrizingString(self._background_color, self.normal)
def number_of_colors(self):
if (not self._does_styling):
return 0
colors = tigetnum('colors')
return (colors if (colors >= 0) else 0)
def _resolve_formatter(self, attr):
if (attr in COLORS):
return self._resolve_color(attr)
elif (attr in COMPOUNDABLES):
return self._formatting_string(self._resolve_capability(attr))
else:
formatters = split_into_formatters(attr)
if all(((f in COMPOUNDABLES) for f in formatters)):
return self._formatting_string(u''.join((self._resolve_formatter(s) for s in formatters)))
else:
return ParametrizingString(self._resolve_capability(attr))
def _resolve_capability(self, atom):
code = tigetstr(self._sugar.get(atom, atom))
if code:
return code.decode('latin1')
return u''
def _resolve_color(self, color):
color_cap = (self._background_color if ('on_' in color) else self._foreground_color)
offset = (8 if ('bright_' in color) else 0)
base_color = color.rsplit('_', 1)[(- 1)]
return self._formatting_string(color_cap((getattr(curses, ('COLOR_' + base_color.upper())) + offset)))
def _foreground_color(self):
return (self.setaf or self.setf)
def _background_color(self):
return (self.setab or self.setb)
def _formatting_string(self, formatting):
return FormattingString(formatting, self.normal) |
_user.command()
_context
('--first-name', help=MODULE_OPTIONS['first_name']['help'])
('--last-name', help=MODULE_OPTIONS['last_name']['help'])
('--email', help=MODULE_OPTIONS['email']['help'])
('--login', help=MODULE_OPTIONS['login']['help'])
('--group-ids', help=MODULE_OPTIONS['group_ids']['help'])
def set(ctx, **kwargs):
MODULE.set_options(ctx, kwargs) |
class TestCoprActionsGeneration(CoprsTestCase):
('u1')
def test_createrepo_priority(self, f_users, f_mock_chroots, f_db):
self.test_client.post('/coprs/{0}/new/'.format(self.u1.name), data={'name': 'foo', 'chroots': ['fedora-rawhide-i386'], 'arches': ['i386']})
copr = CoprsLogic.get(self.u1.username, 'foo').one()
actions = ActionsLogic.get_many(ActionTypeEnum('createrepo')).all()
assert (len(actions) == 1)
assert (actions[0].priority == ActionPriorityEnum('highest'))
self.test_client.post('/coprs/id/{0}/createrepo/'.format(copr.id), data={})
actions = ActionsLogic.get_many(ActionTypeEnum('createrepo')).all()
assert (len(actions) == 2)
assert (actions[1].priority == 0)
('u1')
.usefixtures('f_users', 'f_users_api', 'f_mock_chroots', 'f_db')
def test_createrepo_on_reenable(self):
self.api3.new_project('test', ['fedora-rawhide-i386', 'fedora-17-x86_64'])
self.api3.modify_project('test', chroots=['fedora-rawhide-i386', 'fedora-18-x86_64'])
self.api3.modify_project('test', chroots=['fedora-rawhide-i386', 'fedora-17-x86_64', 'fedora-18-x86_64'])
actions = self.models.Action.query.all()
assert ([ActionTypeEnum(a) for a in ['createrepo', 'gen_gpg_key', 'createrepo', 'createrepo']] == [a.action_type for a in actions])
actions.pop(1)
template = {'ownername': 'user1', 'projectname': 'test', 'project_dirnames': ['test'], 'appstream': False, 'devel': False}
def _expected(action, chroots):
template['chroots'] = chroots
assert (json.loads(action.data) == template)
_expected(actions[0], ['fedora-17-x86_64', 'fedora-rawhide-i386'])
_expected(actions[1], ['fedora-18-x86_64'])
_expected(actions[2], ['fedora-17-x86_64'])
.usefixtures('f_u1_ts_client', 'f_mock_chroots', 'f_db')
def test_fedora_review_project(self):
create_chroot_function(['fedora-rawhide-x86_64'])
route = '/coprs/{0}/new-fedora-review/'.format(self.transaction_username)
resp = self.test_client.post(route, data={'name': 'test-fedora-review'}, follow_redirects=False)
assert ('user1/test-fedora-review/add_build' in resp.headers['Location'])
copr = self.models.Copr.query.get(1)
assert (copr.full_name == 'user1/test-fedora-review')
assert (len(copr.active_chroots) == 1)
assert (copr.active_chroots[0].name == 'fedora-rawhide-x86_64')
assert ('Fedora Review tool' in copr.description)
assert ('You should ask the project owner' in copr.instructions)
assert copr.fedora_review
assert copr.unlisted_on_hp
resp = self.test_client.post(route, data={'name': 'test-fedora-review'}, follow_redirects=True)
assert (resp.status_code == 200)
error = parse_web_form_error(resp.data, variant='b')
assert (error == 'Error in project config') |
class EasyBike(BaseSystem):
sync = True
unifeed = True
meta = {'system': 'EasyBike', 'company': ['Brainbox Technology', 'Smoove SAS']}
feed_url = '
def __init__(self, tag, meta, city_uid, bbox=None):
super(EasyBike, self).__init__(tag, meta)
self.feed_url = EasyBike.feed_url.format(city_uid=city_uid)
self.bbox = bbox
def update(self, scraper=None):
scraper = (scraper or utils.PyBikesScraper())
stations = []
data = json.loads(scraper.request(self.feed_url))
stations = self.get_stations(data)
if self.bbox:
stations = utils.filter_bounds(stations, None, self.bbox)
self.stations = list(stations)
def get_stations(self, data):
for station in data['stations']:
name = station['description']
longitude = float(station['lng'])
latitude = float(station['lat'])
bikes = int(station['free_bikes'])
free = int(station['free_spaces'])
extra = {'slots': int(station['total_spaces'])}
station = BikeShareStation(name, latitude, longitude, bikes, free, extra)
(yield station) |
class OptionPlotoptionsVariablepieSonificationDefaultinstrumentoptionsMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def shuffle_queue():
now = datetime.now()
conn = _get_connection()
nids = conn.execute('select id from notes where priority is not NULL and priority > 0').fetchall()
inserts = [((now + timedelta(days=(- random.randint(1, 365)))).strftime('%Y-%m-%d-%H-%M-%S'), nid[0]) for nid in nids]
conn.executemany('update notes set lastscheduled = ? where id = ?', inserts)
conn.commit()
conn.close()
recalculate_priority_queue() |
class ConnectTo():
def __init__(self, connected_interface, config: ConfigParser):
self.interface = connected_interface
self.config = config
self.connection = None
def __enter__(self):
self.connection = self.interface(self.config)
return self.connection
def __exit__(self, *args):
pass |
def extractNinjacrosstranslationBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_MeshAdaptRestart_adaptiveTime_BackwardEuler(verbose=0):
currentPath = os.path.dirname(os.path.abspath(__file__))
runCommand = (('cd ' + currentPath) + '; parun -C "gen_mesh=False usePUMI=True adapt=1 fixedTimeStep=False" -D "adapt_0" dambreak_Colagrossi_so.py;')
subprocess.check_call(runCommand, shell=True)
assert True |
class OptionSeriesWordcloudSonificationTracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class SimulationContext():
def __init__(self, ert: 'EnKFMain', sim_fs: EnsembleAccessor, mask: npt.NDArray[np.bool_], itr: int, case_data: List[Tuple[(Any, Any)]]):
self._ert = ert
self._mask = mask
if FeatureToggling.is_enabled('scheduler'):
if (ert.ert_config.queue_config.queue_system != QueueSystem.LOCAL):
raise NotImplementedError()
driver = create_driver(ert.ert_config.queue_config)
self._job_queue = Scheduler(driver, max_running=ert.ert_config.queue_config.max_running)
else:
self._job_queue = JobQueue(ert.ert_config.queue_config)
global_substitutions = ert.ert_config.substitution_list
global_substitutions['<CASE_NAME>'] = _slug(sim_fs.name)
for (sim_id, (geo_id, _)) in enumerate(case_data):
if mask[sim_id]:
global_substitutions[f'<GEO_ID_{sim_id}_{itr}>'] = str(geo_id)
self._run_context = RunContext(sim_fs=sim_fs, runpaths=Runpaths(jobname_format=ert.ert_config.model_config.jobname_format_string, runpath_format=ert.ert_config.model_config.runpath_format_string, filename=str(ert.ert_config.runpath_file), substitute=global_substitutions.substitute_real_iter), initial_mask=mask, iteration=itr)
for realization_nr in self._run_context.active_realizations:
self._run_context.sim_fs.state_map[realization_nr] = RealizationStorageState.INITIALIZED
create_run_path(self._run_context, global_substitutions, self._ert.ert_config)
self._ert.runWorkflows(HookRuntime.PRE_SIMULATION, None, self._run_context.sim_fs)
self._sim_thread = self._run_simulations_simple_step()
while (self.isRunning() and (not self._job_queue.is_active())):
sleep(0.1)
def get_run_args(self, iens: int) -> 'RunArg':
for run_arg in iter(self._run_context):
if ((run_arg is not None) and (run_arg.iens == iens)):
return run_arg
raise KeyError(f'No such realization: {iens}')
def _run_simulations_simple_step(self) -> Thread:
sim_thread = Thread(target=(lambda : _run_forward_model(self._ert, self._job_queue, self._run_context)))
sim_thread.start()
return sim_thread
def __len__(self) -> int:
return len(self._mask)
def isRunning(self) -> bool:
return (self._sim_thread.is_alive() or self._job_queue.is_active())
def getNumPending(self) -> int:
if isinstance(self._job_queue, JobQueue):
return self._job_queue.count_status(JobStatus.PENDING)
return self._job_queue.count_states()[JobState.PENDING]
def getNumRunning(self) -> int:
if isinstance(self._job_queue, JobQueue):
return self._job_queue.count_status(JobStatus.RUNNING)
return self._job_queue.count_states()[JobState.RUNNING]
def getNumSuccess(self) -> int:
if isinstance(self._job_queue, JobQueue):
return self._job_queue.count_status(JobStatus.SUCCESS)
return self._job_queue.count_states()[JobState.COMPLETED]
def getNumFailed(self) -> int:
if isinstance(self._job_queue, JobQueue):
return self._job_queue.count_status(JobStatus.FAILED)
return self._job_queue.count_states()[JobState.FAILED]
def getNumWaiting(self) -> int:
if isinstance(self._job_queue, JobQueue):
return self._job_queue.count_status(JobStatus.WAITING)
return self._job_queue.count_states()[JobState.WAITING]
def didRealizationSucceed(self, iens: int) -> bool:
if isinstance(self._job_queue, JobQueue):
queue_index = self.get_run_args(iens).queue_index
if (queue_index is None):
raise ValueError('Queue index not set')
return (self._job_queue.job_list[queue_index].queue_status == JobStatus.SUCCESS)
if (iens in self._job_queue._jobs):
return (self._job_queue._jobs[iens].state == JobState.COMPLETED)
return False
def didRealizationFail(self, iens: int) -> bool:
return (not self.didRealizationSucceed(iens))
def isRealizationFinished(self, iens: int) -> bool:
if isinstance(self._job_queue, JobQueue):
run_arg = self.get_run_args(iens)
queue_index = run_arg.queue_index
if (queue_index is not None):
return (not (self._job_queue.job_list[queue_index].is_running() or (self._job_queue.job_list[queue_index].queue_status == JobStatus.WAITING)))
else:
return False
if (iens not in self._job_queue._jobs):
return False
state_to_finished_or_not = {JobState.WAITING: False, JobState.SUBMITTING: False, JobState.PENDING: False, JobState.RUNNING: False, JobState.ABORTING: False, JobState.COMPLETED: True, JobState.FAILED: True, JobState.ABORTED: True}
return state_to_finished_or_not[self._job_queue._jobs[iens].state]
def __repr__(self) -> str:
if (not isinstance(self._job_queue, JobQueue)):
raise NotImplementedError
running = ('running' if self.isRunning() else 'not running')
numRunn = self.getNumRunning()
numSucc = self.getNumSuccess()
numFail = self.getNumFailed()
numWait = self.getNumWaiting()
return f'SimulationContext({running}, #running = {numRunn}, #success = {numSucc}, #failed = {numFail}, #waiting = {numWait})'
def get_sim_fs(self) -> EnsembleAccessor:
return self._run_context.sim_fs
def stop(self) -> None:
self._job_queue.kill_all_jobs()
self._sim_thread.join()
def job_progress(self, iens: int) -> Optional[ForwardModelStatus]:
run_arg = self.get_run_args(iens)
if isinstance(self._job_queue, JobQueue):
queue_index = run_arg.queue_index
if (queue_index is None):
return None
if (self._job_queue.job_list[queue_index].queue_status == JobStatus.WAITING):
return None
elif ((iens not in self._job_queue._jobs) or (self._job_queue._jobs[iens].state == JobState.WAITING)):
return None
return ForwardModelStatus.load(run_arg.runpath)
def run_path(self, iens: int) -> str:
return self.get_run_args(iens).runpath
def job_status(self, iens: int) -> Optional['JobStatus']:
if isinstance(self._job_queue, JobQueue):
run_arg = self.get_run_args(iens)
queue_index = run_arg.queue_index
if (queue_index is None):
return None
int_status = self._job_queue.job_list[queue_index].queue_status
return JobStatus(int_status)
state_to_legacy = {JobState.WAITING: JobStatus.WAITING, JobState.SUBMITTING: JobStatus.SUBMITTED, JobState.PENDING: JobStatus.PENDING, JobState.RUNNING: JobStatus.RUNNING, JobState.ABORTING: JobStatus.DO_KILL, JobState.COMPLETED: JobStatus.SUCCESS, JobState.FAILED: JobStatus.FAILED, JobState.ABORTED: JobStatus.IS_KILLED}
return state_to_legacy[self._job_queue._jobs[iens].state] |
def get_mouse_target(duration: float=INFINITE, position: POSITION_T=RANDOM, width: int=50, height: int=50, brush: QtGui.QColor=QtCore.Qt.cyan, pen: QtGui.QColor=QtCore.Qt.transparent, start_at: float=BEGINNING, hover_time: float=0.0) -> typing.List[Stimulus]:
position = get_position(position, (width, height))
return QtStimulus(start_at=start_at, duration=duration, qt_type='QGraphicsRectItemWithHover', callbacks=[('setRect', (position, width, height)), ('setBrush', brush), ('setPen', pen), ('setHoverEnterCallAt', hover_time), ('setHoverEnterCallback', Deferred('create_end_timer')), ('setHoverLeaveCallback', Deferred('cancel_end_timer'))]) |
def add_metaclass(metaclass):
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if (slots is not None):
if isinstance(slots, text_type):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
if hasattr(cls, '__qualname__'):
orig_vars['__qualname__'] = cls.__qualname__
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper |
def downgrade():
op.execute('UPDATE feed_event SET start_time = start_time * 1000, end_time = end_time * 1000, timestamp = timestamp * 1000')
op.execute('UPDATE kronos_device SET discoveredAt = discoveredAt * 1000, lastPingedAt = lastPingedAt * 1000')
op.execute('UPDATE kronos_device_sensors SET timestamp = timestamp * 1000')
op.execute('UPDATE hopper_references SET timestamp = timestamp * 1000')
op.execute('UPDATE kronos_gateway SET discoveredAt = discoveredAt * 1000')
op.execute('UPDATE pets SET birthday = birthday * 1000') |
_frequency(timedelta(days=1))
def fetch_exchange(zone_key1: str, zone_key2: str, session: Session=Session(), target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> list:
if (target_datetime is None):
target_datetime = datetime.now().replace(tzinfo=IE_TZ)
sortedZoneKeys = '->'.join(sorted([zone_key1, zone_key2]))
if (sortedZoneKeys == 'GB-NIR->IE'):
raise ParserException(parser='IE.py', message='the GB-NIR_IE interconnection is unsupported.')
exchange_data = fetch_data(target_datetime=target_datetime, zone_key=zone_key2, kind='exchange', session=session)
assert (len(exchange_data) > 0)
filtered_exchanges = [item for item in exchange_data if (item['FieldName'] == ZONE_MAPPING[zone_key2]['exchange'])]
exchange = []
for item in filtered_exchanges:
data_point = {'netFlow': item['Value'], 'sortedZoneKeys': sortedZoneKeys, 'datetime': datetime.strptime(item['EffectiveTime'], '%d-%b-%Y %H:%M:%S').replace(tzinfo=IE_TZ), 'source': 'eirgridgroup.com'}
exchange += [data_point]
return exchange |
class ICommandStack(Interface):
clean = Bool()
redo_name = Str()
undo_manager = Instance(IUndoManager)
undo_name = Str()
def begin_macro(self, name):
def clear(self):
def end_macro(self):
def push(self, command):
def redo(self, sequence_nr=0):
def undo(self, sequence_nr=0): |
def unpack_function(file_path, tmp_dir):
with TemporaryDirectory() as staging_dir:
staged_path = str((Path(staging_dir) / '{}.arj'.format(Path(file_path).name)))
symlink(file_path, staged_path)
output = execute_shell_command('arj x -r -y {} {}'.format(staged_path, tmp_dir), timeout=600)
return {'output': output} |
def centralize(context, udim_tile, column, row):
selection_mode = bpy.context.scene.tool_settings.uv_select_mode
bm = bmesh.from_edit_mesh(bpy.context.active_object.data)
uv_layers = bm.loops.layers.uv.verify()
islands = utilities_uv.getSelectionIslands(bm, uv_layers, extend_selection_to_islands=True)
for island in islands:
island_loops = {loop for face in island for loop in face.loops}
boundary_loops = {loop for loop in island_loops if ((loop.link_loop_radial_next not in island_loops) or loop.edge.is_boundary)}
bounds = utilities_uv.get_BBOX(boundary_loops, bm, uv_layers, are_loops=True)
center = bounds['center']
utilities_uv.move_island(island, (round(((- center.x) + 0.5)) + column), (round(((- center.y) + 0.5)) + row))
bpy.ops.uv.select_mode(type='VERTEX')
bpy.context.scene.tool_settings.uv_select_mode = selection_mode |
.django_db
def test_basic_data_set(client, monkeypatch, helpers, defc_codes, basic_ref_data, early_gtas, basic_faba):
helpers.patch_datetime_now(monkeypatch, EARLY_YEAR, EARLY_MONTH, 25)
helpers.reset_dabs_cache()
resp = client.get(OVERVIEW_URL)
assert (resp.data == {'funding': [{'amount': EARLY_GTAS_CALCULATIONS['total_budgetary_resources'], 'def_code': 'M'}], 'total_budget_authority': EARLY_GTAS_CALCULATIONS['total_budgetary_resources'], 'spending': {'award_obligations': Decimal('0.0'), 'award_outlays': Decimal('0'), 'total_obligations': EARLY_GTAS_CALCULATIONS['total_obligations'], 'total_outlays': EARLY_GTAS_CALCULATIONS['total_outlays']}, 'additional': None}) |
def test_delete_oidc_provider_config(sample_tenant):
client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id)
provider_config = _create_oidc_provider_config(client)
client.delete_oidc_provider_config(provider_config.provider_id)
with pytest.raises(auth.ConfigurationNotFoundError):
client.get_oidc_provider_config(provider_config.provider_id) |
class Module():
def __init__(self, template_src, render_kwds=None):
self.template = template_from(template_src)
self.render_kwds = ({} if (render_kwds is None) else dict(render_kwds))
def create(cls, func_or_str, render_kwds=None):
(signature, code) = extract_signature_and_value(func_or_str, default_parameters=['prefix'])
return cls(template_def(signature, code), render_kwds=render_kwds) |
class bsn_virtual_port_remove_request(bsn_header):
version = 6
type = 4
experimenter = 6035143
subtype = 17
def __init__(self, xid=None, vport_no=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (vport_no != None):
self.vport_no = vport_no
else:
self.vport_no = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
packed.append(struct.pack('!L', self.vport_no))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_virtual_port_remove_request()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 4)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_subtype = reader.read('!L')[0]
assert (_subtype == 17)
obj.vport_no = reader.read('!L')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.vport_no != other.vport_no):
return False
return True
def pretty_print(self, q):
q.text('bsn_virtual_port_remove_request {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('vport_no = ')
q.text(('%#x' % self.vport_no))
q.breakable()
q.text('}') |
def ddram_io():
return [('ddram_sstl15', 0, Subsignal('a', Pins('R2 M6 N4 T1 N6 R7 V6 U7', 'R8 V7 R6 U6 T6 T8'), IOStandard('SSTL15')), Subsignal('ba', Pins('R1 P4 P2'), IOStandard('SSTL15')), Subsignal('ras_n', Pins('P3'), IOStandard('SSTL15')), Subsignal('cas_n', Pins('M4'), IOStandard('SSTL15')), Subsignal('we_n', Pins('P5'), IOStandard('SSTL15')), Subsignal('cs_n', Pins('U8'), IOStandard('SSTL15')), Subsignal('dm', Pins('L1 U1'), IOStandard('SSTL15')), Subsignal('dq', Pins('K5 L3 K3 L6 M3 M1 L4 M2', 'V4 T5 U4 V5 V1 T3 U3 R3'), IOStandard('SSTL15'), Misc('IN_TERM=UNTUNED_SPLIT_40')), Subsignal('dqs_p', Pins('N2 U2'), IOStandard('DIFF_SSTL15'), Misc('IN_TERM=UNTUNED_SPLIT_40')), Subsignal('dqs_n', Pins('N1 V2'), IOStandard('DIFF_SSTL15'), Misc('IN_TERM=UNTUNED_SPLIT_40')), Subsignal('clk_p', Pins('U9'), IOStandard('DIFF_SSTL15')), Subsignal('clk_n', Pins('V9'), IOStandard('DIFF_SSTL15')), Subsignal('cke', Pins('N5'), IOStandard('SSTL15')), Subsignal('odt', Pins('R5'), IOStandard('SSTL15')), Subsignal('reset_n', Pins('K6'), IOStandard('SSTL15')), Misc('SLEW=FAST'))] |
def query_from_string(s, input_variables=None, is_async=True, output_writer=None, **extra_args):
if (input_variables is None):
input_variables = []
import inspect
temp_lmql_file = tempfile.mktemp(suffix='.lmql')
with open(temp_lmql_file, 'w', encoding='utf-8') as f:
f.write(s)
module = load(temp_lmql_file, output_writer=(output_writer or silent))
scope = EmptyVariableScope()
compiled_query_fct_args = inspect.getfullargspec(module.query.fct).args
fct_signature = inspect.Signature(parameters=[inspect.Parameter(name, inspect.Parameter.POSITIONAL_OR_KEYWORD) for name in input_variables])
module.query.function_context = FunctionContext(fct_signature, compiled_query_fct_args, scope)
module.query.is_async = is_async
module.query.output_writer = output_writer
module.query.extra_args = extra_args
return module.query |
class CommonNotification(models.Model):
notificationId = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=True)
notificationType = models.CharField(max_length=255, choices=NotificationType)
eventTime = models.DateTimeField(auto_now=True)
systemDN = models.CharField(max_length=255, null=True, blank=True)
objectClass = models.CharField(max_length=255, choices=MOIType)
objectInstanceInfos = models.TextField()
additionalText = models.TextField() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.