code stringlengths 281 23.7M |
|---|
class SceneTester(UnitTestDBBase):
def setUp(self):
super(SceneTester, self).setUp()
from stalker import Type
self.project_type = Type(name='Test Project Type', code='test', target_entity_type='Project')
self.repository_type = Type(name='Test Type', code='test', target_entity_type='Repository')
from stalker import Repository
self.test_repository = Repository(name='Test Repository', code='TR', type=self.repository_type)
from stalker import Project
self.test_project = Project(name='Test Project 1', code='tp1', type=self.project_type, repository=self.test_repository)
self.test_project2 = Project(name='Test Project 2', code='tp2', type=self.project_type, repository=self.test_repository)
self.kwargs = {'name': 'Test Scene', 'code': 'tsce', 'description': 'A test scene', 'project': self.test_project}
self.test_scene = Scene(**self.kwargs)
from stalker.db.session import DBSession
DBSession.add(self.test_scene)
DBSession.commit()
def test___auto_name__class_attribute_is_set_to_False(self):
assert (Scene.__auto_name__ is False)
def test_shots_attribute_defaults_to_empty_list(self):
new_scene = Scene(**self.kwargs)
assert (new_scene.shots == [])
def test_shots_attribute_is_set_None(self):
with pytest.raises(TypeError) as cm:
self.test_scene.shots = None
assert (str(cm.value) == 'Incompatible collection type: None is not list-like')
def test_shots_attribute_is_set_to_other_than_a_list(self):
test_value = [1, 1.2, 'a string']
with pytest.raises(TypeError) as cm:
self.test_scene.shots = test_value
assert (str(cm.value) == 'Scene.shots needs to be all stalker.models.shot.Shot instances, not int')
def test_shots_attribute_is_a_list_of_other_objects(self):
test_value = [1, 1.2, 'a string']
with pytest.raises(TypeError) as cm:
self.test_scene.shots = test_value
assert (str(cm.value) == 'Scene.shots needs to be all stalker.models.shot.Shot instances, not int')
def test_shots_attribute_elements_tried_to_be_set_to_non_Shot_object(self):
with pytest.raises(TypeError) as cm:
self.test_scene.shots.append('a string')
assert (str(cm.value) == 'Scene.shots needs to be all stalker.models.shot.Shot instances, not str')
def test_equality(self):
new_seq1 = Scene(**self.kwargs)
new_seq2 = Scene(**self.kwargs)
from stalker import Entity
new_entity = Entity(**self.kwargs)
self.kwargs['name'] = 'a different scene'
new_seq3 = Scene(**self.kwargs)
assert (new_seq1 == new_seq2)
assert (not (new_seq1 == new_seq3))
assert (not (new_seq1 == new_entity))
def test_inequality(self):
new_seq1 = Scene(**self.kwargs)
new_seq2 = Scene(**self.kwargs)
from stalker import Entity
new_entity = Entity(**self.kwargs)
self.kwargs['name'] = 'a different scene'
new_seq3 = Scene(**self.kwargs)
assert (not (new_seq1 != new_seq2))
assert (new_seq1 != new_seq3)
assert (new_seq1 != new_entity)
def test_ProjectMixin_initialization(self):
from stalker import Type
project_type = Type(name='Commercial', code='comm', target_entity_type='Project')
from stalker import Project
new_project = Project(name='Test Project', code='tp', type=project_type, repository=self.test_repository)
self.kwargs['project'] = new_project
new_scene = Scene(**self.kwargs)
assert (new_scene.project == new_project)
def test___strictly_typed___is_False(self):
assert (Scene.__strictly_typed__ is False) |
def extractThenoobtranslatorXyz(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if (item['tags'] == ['Uncategorized']):
titlemap = [('PS Chapter ', 'Perfect Superstar', 'translated'), ('HDLL Chapter ', "House Dad's Literary Life", 'translated')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
tagmap = [('Perfect Superstar', 'Perfect Superstar', 'translated'), ("House Dad's Literary Life", "House Dad's Literary Life", 'translated'), ('House Dad Literary Life', "House Dad's Literary Life", 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TestUpdateBuilds(CoprsTestCase):
built_packages = '\n{\n "packages":[\n {\n "name":"example",\n "epoch":0,\n "version":"1.0.14",\n "release":"1.fc30",\n "arch":"x86_64"\n }\n ]\n}'
data1 = '\n{\n "builds":[\n {\n "id": 1,\n "copr_id": 2,\n "result_dir": "bar",\n "started_on": \n }\n ]\n}'
data2 = '\n{\n "builds":[\n {\n "id": 1,\n "copr_id": 2,\n "status": 1,\n "chroot": "fedora-18-x86_64",\n "result_dir": "bar",\n "results": {\n "packages":[\n {\n "name":"example",\n "epoch":0,\n "version":"1.0.14",\n "release":"1.fc30",\n "arch":"x86_64"\n }\n ]\n },\n "ended_on": \n }\n ]\n}'
data3 = '\n{\n "builds":[\n {\n "id": 1,\n "copr_id": 2,\n "chroot": "fedora-18-x86_64",\n "status": 6,\n "result_dir": "bar",\n "started_on": \n },\n {\n "id": 2,\n "copr_id": 1,\n "status": 0,\n "chroot": "fedora-18-x86_64",\n "result_dir": "bar",\n "results": {"packages": []},\n "ended_on": \n },\n {\n "id": 123321,\n "copr_id": 1,\n "status": 0,\n "chroot": "fedora-18-x86_64",\n "result_dir": "bar",\n "results": {"packages": []},\n "ended_on": \n },\n {\n "id": 1234321,\n "copr_id": 2,\n "chroot": "fedora-18-x86_64",\n "result_dir": "bar",\n "started_on": \n }\n ]\n}'
import_data1 = '\n{\n "build_id": 2,\n "branch_commits": {\n "f28": "4dcc0ef1aacc6f345b674d4f40a026b8"\n },\n "reponame": "test/foo"\n}\n'
def test_updating_requires_password(self, f_users, f_coprs, f_builds, f_db):
r = self.tc.post('/backend/update/', content_type='application/json', data='')
assert (b'You have to provide the correct password' in r.data)
def test_update_build_ended(self, f_users, f_coprs, f_mock_chroots, f_builds, f_db):
self.db.session.commit()
r = self.tc.post('/backend/update/', content_type='application/json', headers=self.auth_header, data=self.data2)
assert (json.loads(r.data.decode('utf-8'))['updated_builds_ids'] == [1])
assert (json.loads(r.data.decode('utf-8'))['non_existing_builds_ids'] == [])
updated = self.models.Build.query.filter((self.models.Build.id == 1)).one()
assert (len(updated.build_chroots) == 1)
assert (updated.build_chroots[0].status == 1)
assert (updated.status == 1)
assert (updated.chroots_ended_on == {'fedora-18-x86_64': })
def test_update_state_from_dict(self, f_users, f_fork_prepare):
upd_dict = {'build_id': 6, 'chroot': 'srpm-builds', 'destdir': '/var/lib/copr/public_html/results', 'enable_net': False, 'ended_on': , 'id': 6, 'source_type': 0, 'status': 0, 'submitter': 'user1', 'task_id': '6', 'timeout': 3600}
BuildsLogic.update_state_from_dict(self.b6, upd_dict)
updated = self.models.Build.query.filter((self.models.Build.id == 6)).one()
assert (upd_dict['ended_on'] == updated.started_on)
upd_dict['started_on'] =
BuildsLogic.update_state_from_dict(self.b6, upd_dict)
updated = self.models.Build.query.filter((self.models.Build.id == 6)).one()
assert (upd_dict['started_on'] == updated.started_on)
def test_update_more_existent_and_non_existent_builds(self, f_users, f_coprs, f_mock_chroots, f_builds, f_db):
self.db.session.add_all([self.b1, self.b2])
self.db.session.commit()
r = self.tc.post('/backend/import-completed/', content_type='application/json', headers=self.auth_header, data=self.import_data1)
assert (r.status_code == 200)
r = self.tc.post('/backend/update/', content_type='application/json', headers=self.auth_header, data=self.data3)
assert (sorted(json.loads(r.data.decode('utf-8'))['updated_builds_ids']) == [1, 2])
assert (sorted(json.loads(r.data.decode('utf-8'))['non_existing_builds_ids']) == [123321, 1234321])
started = self.models.Build.query.filter((self.models.Build.id == 1)).first()
assert (started.chroots_started_on == {'fedora-18-x86_64': })
ended = self.models.Build.query.filter((self.models.Build.id == 2)).first()
assert (ended.status == 0)
assert (ended.result_dir == '')
assert (ended.chroots_ended_on == {'fedora-18-x86_64': })
def test_build_task_canceled_waiting_build(self, f_users, f_coprs, f_mock_chroots, f_builds, f_db):
self.db.session.add(self.b3)
self.db.session.commit()
r = self.tc.post('/backend/build-tasks/canceled/{}/'.format(self.b3.id), content_type='application/json', headers=self.auth_header, data=json.dumps(False))
assert (r.status_code == 200)
assert (json.loads(r.data.decode('utf-8')) == 'success')
build = self.models.Build.query.filter((self.models.Build.id == 3)).one()
assert (build.source_status == StatusEnum('canceled'))
def test_build_task_canceled_running_build(self, f_users, f_coprs, f_mock_chroots, f_builds, f_db):
self.b4.build_chroots.pop()
self.b4.build_chroots[0].status = StatusEnum('running')
self.db.session.add(self.b4)
self.db.session.commit()
r = self.tc.post('/backend/build-tasks/canceled/{}/'.format(self.b4.id), content_type='application/json', headers=self.auth_header, data=json.dumps(True))
assert (r.status_code == 200)
assert (json.loads(r.data.decode('utf-8')) == 'success')
build = self.models.Build.query.filter((self.models.Build.id == 4)).one()
assert (build.canceled == False)
assert (build.build_chroots[0].status == StatusEnum('running'))
.usefixtures('f_users', 'f_coprs', 'f_mock_chroots', 'f_builds', 'f_db')
def test_build_task_canceled_deleted_build(self):
self.models.Build.query.filter_by(id=self.b3.id).delete()
self.db.session.commit()
r = self.tc.post('/backend/build-tasks/canceled/{}/'.format(self.b3.id), content_type='application/json', headers=self.auth_header, data=json.dumps(False))
assert (r.status_code == 200)
assert (json.loads(r.data.decode('utf-8')) == 'success')
cancel_request_table = self.models.CancelRequest.query.all()
assert (len(cancel_request_table) == 0) |
def run_one_element_advection():
nx = 20
m = PeriodicRectangleMesh(nx, 1, 1.0, 1.0, quadrilateral=True)
nlayers = 20
mesh = ExtrudedMesh(m, nlayers, (1.0 / nlayers))
x = SpatialCoordinate(mesh)
fe_dg = FiniteElement('DQ', mesh.ufl_cell(), 1, variant='equispaced')
Vdg = FunctionSpace(mesh, fe_dg)
Vu = VectorFunctionSpace(mesh, fe_dg)
q0 = Function(Vdg).interpolate((cos(((2 * pi) * x[0])) * cos((pi * x[2]))))
q_init = Function(Vdg).assign(q0)
dq1 = Function(Vdg)
q1 = Function(Vdg)
Dt = 0.01
dt = Constant(Dt)
n = FacetNormal(mesh)
u0 = Function(Vu).interpolate(Constant((1.0, 0.0, 0.0)))
un = (0.5 * (dot(u0, n) + abs(dot(u0, n))))
q = TrialFunction(Vdg)
p = TestFunction(Vdg)
a_mass = (inner(q, p) * dx)
a_int = (inner(((- u0) * q), grad(p)) * dx)
a_flux = (inner(((un('+') * q('+')) - (un('-') * q('-'))), jump(p)) * (dS_v + dS_h))
arhs = (a_mass - (dt * (a_int + a_flux)))
q_problem = LinearVariationalProblem(a_mass, action(arhs, q1), dq1)
q_solver = LinearVariationalSolver(q_problem, solver_parameters={'ksp_type': 'preonly', 'pc_type': 'bjacobi', 'sub_pc_type': 'lu'})
t = 0.0
T = 1.0
while (t < (T - (Dt / 2))):
q1.assign(q0)
q_solver.solve()
q1.assign(dq1)
q_solver.solve()
q1.assign(((0.75 * q0) + (0.25 * dq1)))
q_solver.solve()
q0.assign(((q0 / 3) + ((2 * dq1) / 3)))
t += Dt
assert ((assemble((inner((q0 - q_init), (q0 - q_init)) * dx)) ** 0.5) < 0.005) |
class ConductorCompiler():
def build(self, conductor: 'Conductor', tp: TP, channels: MutableSet[_Topic]) -> ConsumerCallback:
(topic, partition) = tp
app = conductor.app
len_: Callable[([Any], int)] = len
consumer_on_buffer_full = app.consumer.on_buffer_full
consumer_on_buffer_drop = app.consumer.on_buffer_drop
acquire_flow_control: Callable = app.flow_control.acquire
wait_until_producer_ebb = app.producer.buffer.wait_until_ebb
on_topic_buffer_full = app.sensors.on_topic_buffer_full
def on_pressure_high() -> None:
on_topic_buffer_full(tp)
consumer_on_buffer_full(tp)
def on_pressure_drop() -> None:
consumer_on_buffer_drop(tp)
async def on_message(message: Message) -> None:
(await acquire_flow_control())
(await wait_until_producer_ebb())
channels_n = len_(channels)
if channels_n:
message.incref(channels_n)
event: Optional[EventT] = None
event_keyid: Optional[Tuple[(K, V)]] = None
delivered: Set[_Topic] = set()
full: typing.List[Tuple[(EventT, _Topic)]] = []
try:
for chan in channels:
keyid = (chan.key_type, chan.value_type)
if (event is None):
event = (await chan.decode(message, propagate=True))
event_keyid = keyid
queue = chan.queue
if queue.full():
full.append((event, chan))
continue
queue.put_nowait_enhanced(event, on_pressure_high=on_pressure_high, on_pressure_drop=on_pressure_drop)
else:
dest_event: EventT
if (keyid == event_keyid):
dest_event = event
else:
dest_event = (await chan.decode(message, propagate=True))
queue = chan.queue
if queue.full():
full.append((dest_event, chan))
continue
queue.put_nowait_enhanced(dest_event, on_pressure_high=on_pressure_high, on_pressure_drop=on_pressure_drop)
delivered.add(chan)
if full:
for (_, dest_chan) in full:
on_topic_buffer_full(dest_chan)
(await asyncio.wait([asyncio.ensure_future(dest_chan.put(dest_event)) for (dest_event, dest_chan) in full], return_when=asyncio.ALL_COMPLETED))
except KeyDecodeError as exc:
remaining = (channels - delivered)
message.ack(app.consumer, n=len(remaining))
for channel in remaining:
(await channel.on_key_decode_error(exc, message))
delivered.add(channel)
except ValueDecodeError as exc:
remaining = (channels - delivered)
message.ack(app.consumer, n=len(remaining))
for channel in remaining:
(await channel.on_value_decode_error(exc, message))
delivered.add(channel)
return on_message |
def run_cmd():
_cli.command(short_help='Run a served model', help='Run a served model')
('-i', '--input', 'input', required=True, type=click.STRING)
('-o', '--output', 'output', required=False, default=None, type=click.STRING)
('-b', '--batch_size', 'batch_size', required=False, default=100, type=click.INT)
('-t/', '--track_run/--no_track_run', 'track_run', required=False, default=False)
('--standard', is_flag=True, default=False, help='Assume that the run is standard and, therefore, do not do so many checks.')
def run(input, output, batch_size, track_run, standard):
session = Session(config_json=None)
model_id = session.current_model_id()
service_class = session.current_service_class()
if (model_id is None):
echo("No model seems to be served. Please run 'ersilia serve ...' before.", fg='red')
return
mdl = ErsiliaModel(model_id, service_class=service_class, config_json=None, track_runs=track_run)
result = mdl.run(input=input, output=output, batch_size=batch_size, track_run=track_run, try_standard=standard)
if isinstance(result, types.GeneratorType):
for result in mdl.run(input=input, output=output, batch_size=batch_size):
if (result is not None):
echo(json.dumps(result, indent=4))
else:
echo('Something went wrong', fg='red')
else:
echo(result) |
class OptionSeriesBulletSonificationContexttracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_normalize_norm_range_h5_cool_equal(capsys):
outfile_one = NamedTemporaryFile(suffix='.cool', delete=False)
outfile_one.close()
outfile_two = NamedTemporaryFile(suffix='.h5', delete=False)
outfile_two.close()
args = '--matrices {} --normalize norm_range -o {}'.format(matrix_one_cool, outfile_one.name).split()
compute(hicNormalize.main, args, 5)
args = '--matrices {} --normalize norm_range -o {}'.format(matrix_one_h5, outfile_two.name).split()
compute(hicNormalize.main, args, 5)
test_one = hm.hiCMatrix((ROOT + '/norm_range_one.cool'))
test_two = hm.hiCMatrix((ROOT + '/norm_range_one.h5'))
new_one = hm.hiCMatrix(outfile_one.name)
new_two = hm.hiCMatrix(outfile_two.name)
nt.assert_equal(test_one.matrix.data, new_one.matrix.data)
nt.assert_equal(test_one.cut_intervals, new_one.cut_intervals)
nt.assert_equal(test_two.matrix.data, new_two.matrix.data)
nt.assert_equal(test_two.cut_intervals, new_two.cut_intervals)
nt.assert_equal(new_one.matrix.data, new_two.matrix.data)
nt.assert_equal(len(new_one.cut_intervals), len(new_two.cut_intervals))
os.unlink(outfile_one.name)
os.unlink(outfile_two.name) |
.integration
class TestCrud():
.parametrize('endpoint', model_list)
def test_api_create(self, generate_auth_header, test_config: FidesConfig, resources_dict: Dict, endpoint: str) -> None:
manifest = resources_dict[endpoint]
print(manifest.json(exclude_none=True))
token_scopes: List[str] = [f'{CLI_SCOPE_PREFIX_MAPPING[endpoint]}:{CREATE}']
auth_header = generate_auth_header(scopes=token_scopes)
result = _api.create(url=test_config.cli.server_url, resource_type=endpoint, json_resource=manifest.json(exclude_none=True), headers=auth_header)
print(result.text)
assert (result.status_code == 201)
.parametrize('endpoint', model_list)
def test_api_create_wrong_scope(self, generate_auth_header, test_config: FidesConfig, resources_dict: Dict, endpoint: str) -> None:
manifest = resources_dict[endpoint]
token_scopes: List[str] = [PRIVACY_REQUEST_CREATE]
auth_header = generate_auth_header(scopes=token_scopes)
result = _api.create(url=test_config.cli.server_url, resource_type=endpoint, json_resource=manifest.json(exclude_none=True), headers=auth_header)
assert (result.status_code == 403)
async def test_create_dataset_data_categories_validated(self, test_config: FidesConfig, resources_dict: Dict):
endpoint = 'dataset'
manifest: Dataset = resources_dict[endpoint]
manifest.collections[0].data_categories = ['bad_category']
result = _api.create(url=test_config.cli.server_url, headers=test_config.user.auth_header, json_resource=manifest.json(exclude_none=True), resource_type=endpoint)
assert (result.status_code == 422)
assert (result.json()['detail'][0]['msg'] == 'The data category bad_category is not supported.')
.parametrize('endpoint', model_list)
def test_api_ls(self, test_config: FidesConfig, endpoint: str, generate_auth_header) -> None:
token_scopes: List[str] = [f'{CLI_SCOPE_PREFIX_MAPPING[endpoint]}:{READ}']
auth_header = generate_auth_header(scopes=token_scopes)
result = _api.ls(url=test_config.cli.server_url, resource_type=endpoint, headers=auth_header)
print(result.text)
assert (result.status_code == 200)
.parametrize('endpoint', model_list)
def test_api_ls_wrong_scope(self, test_config: FidesConfig, endpoint: str, generate_auth_header) -> None:
token_scopes: List[str] = [PRIVACY_REQUEST_READ]
auth_header = generate_auth_header(scopes=token_scopes)
result = _api.ls(url=test_config.cli.server_url, resource_type=endpoint, headers=auth_header)
assert (result.status_code == 403)
.parametrize('endpoint', model_list)
def test_api_get(self, test_config: FidesConfig, endpoint: str, generate_auth_header) -> None:
token_scopes: List[str] = [f'{CLI_SCOPE_PREFIX_MAPPING[endpoint]}:{READ}']
auth_header = generate_auth_header(scopes=token_scopes)
existing_id = get_existing_key(test_config, endpoint)
result = _api.get(url=test_config.cli.server_url, headers=auth_header, resource_type=endpoint, resource_id=existing_id)
print(result.text)
assert (result.status_code == 200)
.parametrize('endpoint', model_list)
def test_api_get_wrong_scope(self, test_config: FidesConfig, endpoint: str, generate_auth_header) -> None:
token_scopes: List[str] = [PRIVACY_REQUEST_READ]
auth_header = generate_auth_header(scopes=token_scopes)
existing_id = get_existing_key(test_config, endpoint)
result = _api.get(url=test_config.cli.server_url, headers=auth_header, resource_type=endpoint, resource_id=existing_id)
assert (result.status_code == 403)
.parametrize('endpoint', model_list)
def test_sent_is_received(self, test_config: FidesConfig, resources_dict: Dict, endpoint: str) -> None:
manifest = resources_dict[endpoint]
resource_key = (manifest.fides_key if (endpoint != 'user') else manifest.userName)
print(manifest.json(exclude_none=True))
result = _api.get(url=test_config.cli.server_url, headers=test_config.user.auth_header, resource_type=endpoint, resource_id=resource_key)
print(result.text)
assert (result.status_code == 200)
parsed_result = parse.parse_dict(endpoint, result.json())
assert (parsed_result == manifest)
.parametrize('endpoint', model_list)
def test_api_update(self, test_config: FidesConfig, resources_dict: Dict, endpoint: str, generate_auth_header) -> None:
token_scopes: List[str] = [f'{CLI_SCOPE_PREFIX_MAPPING[endpoint]}:{UPDATE}']
auth_header = generate_auth_header(scopes=token_scopes)
manifest = resources_dict[endpoint]
result = _api.update(url=test_config.cli.server_url, headers=auth_header, resource_type=endpoint, json_resource=manifest.json(exclude_none=True))
print(result.text)
assert (result.status_code == 200)
.parametrize('endpoint', model_list)
def test_api_update_wrong_scope(self, test_config: FidesConfig, resources_dict: Dict, endpoint: str, generate_auth_header) -> None:
token_scopes: List[str] = [POLICY_CREATE_OR_UPDATE]
auth_header = generate_auth_header(scopes=token_scopes)
manifest = resources_dict[endpoint]
result = _api.update(url=test_config.cli.server_url, headers=auth_header, resource_type=endpoint, json_resource=manifest.json(exclude_none=True))
assert (result.status_code == 403)
async def test_update_dataset_data_categories_validated(self, test_config: FidesConfig, resources_dict: Dict):
endpoint = 'dataset'
manifest: Dataset = resources_dict[endpoint]
manifest.collections[0].data_categories = ['bad_category']
result = _api.update(url=test_config.cli.server_url, headers=test_config.user.auth_header, resource_type=endpoint, json_resource=manifest.json(exclude_none=True))
assert (result.status_code == 422)
assert (result.json()['detail'][0]['msg'] == 'The data category bad_category is not supported.')
.parametrize('endpoint', model_list)
def test_api_upsert(self, test_config: FidesConfig, resources_dict: Dict, endpoint: str, generate_auth_header) -> None:
token_scopes: List[str] = [f'{CLI_SCOPE_PREFIX_MAPPING[endpoint]}:{UPDATE}', f'{CLI_SCOPE_PREFIX_MAPPING[endpoint]}:{CREATE}']
auth_header = generate_auth_header(scopes=token_scopes)
manifest = resources_dict[endpoint]
result = _api.upsert(url=test_config.cli.server_url, headers=auth_header, resource_type=endpoint, resources=[loads(manifest.json())])
assert (result.status_code == 200)
.parametrize('endpoint', model_list)
def test_api_upsert_wrong_scope(self, test_config: FidesConfig, resources_dict: Dict, endpoint: str, generate_auth_header) -> None:
token_scopes: List[str] = [f'{CLI_SCOPE_PREFIX_MAPPING[endpoint]}:{CREATE}']
auth_header = generate_auth_header(scopes=token_scopes)
manifest = resources_dict[endpoint]
result = _api.upsert(url=test_config.cli.server_url, headers=auth_header, resource_type=endpoint, resources=[loads(manifest.json())])
assert (result.status_code == 403)
async def test_upsert_validates_resources_against_pydantic_model(self, test_config: FidesConfig, resources_dict: Dict, async_session):
endpoint = 'dataset'
manifest: Dataset = resources_dict[endpoint]
dict_manifest = manifest.dict()
del dict_manifest['organization_fides_key']
result = _api.upsert(url=test_config.cli.server_url, headers=test_config.user.auth_header, resource_type=endpoint, resources=[dict_manifest])
assert (result.status_code == 200)
resource = (await get_resource(Dataset, manifest.fides_key, async_session))
assert (resource.organization_fides_key == 'default_organization')
async def test_upsert_dataset_data_categories_validated(self, test_config: FidesConfig, resources_dict: Dict):
endpoint = 'dataset'
manifest: Dataset = resources_dict[endpoint]
dict_manifest = manifest.dict()
dict_manifest['collections'][0]['data_categories'] = ['bad_category']
result = _api.upsert(url=test_config.cli.server_url, headers=test_config.user.auth_header, resource_type=endpoint, resources=[dict_manifest])
assert (result.status_code == 422)
assert (result.json()['detail'][0]['msg'] == 'The data category bad_category is not supported.')
.parametrize('endpoint', model_list)
def test_api_delete_wrong_scope(self, test_config: FidesConfig, resources_dict: Dict, endpoint: str, generate_auth_header) -> None:
token_scopes: List[str] = [PRIVACY_REQUEST_DELETE]
auth_header = generate_auth_header(scopes=token_scopes)
manifest = resources_dict[endpoint]
resource_key = (manifest.fides_key if (endpoint != 'user') else manifest.userName)
result = _api.delete(url=test_config.cli.server_url, resource_type=endpoint, resource_id=resource_key, headers=auth_header)
assert (result.status_code == 403)
.parametrize('endpoint', model_list)
def test_api_delete(self, test_config: FidesConfig, resources_dict: Dict, endpoint: str, generate_auth_header) -> None:
token_scopes: List[str] = [f'{CLI_SCOPE_PREFIX_MAPPING[endpoint]}:{DELETE}']
auth_header = generate_auth_header(scopes=token_scopes)
manifest = resources_dict[endpoint]
resource_key = (manifest.fides_key if (endpoint != 'user') else manifest.userName)
result = _api.delete(url=test_config.cli.server_url, resource_type=endpoint, resource_id=resource_key, headers=auth_header)
print(result.text)
assert (result.status_code == 200)
resp = result.json()
assert (resp['message'] == 'resource deleted')
assert (resp['resource']['fides_key'] == manifest.fides_key) |
def uses_tables(*names):
names = [n for n in names]
def decorator(decorated):
def wrapper(self):
if (self.uri not in (MYSQL_URI, BIGQUERY_URI)):
return decorated(self)
p = self.Preql()
def _key(t):
try:
return names.index(t.lower())
except ValueError:
return (- 1)
tables = sorted(p._interp.list_tables(), key=_key)
_drop_tables(p._interp.state, *tables)
try:
return decorated(self)
finally:
p = self.preql
if (p._interp.state.db.target in (mysql, bigquery)):
_drop_tables(p._interp.state, *map(Id, names))
tables = p._interp.list_tables()
assert (not tables), tables
return wrapper
return decorator |
def delete_user(username):
resp = token_validator(request.headers.get('Authorization'))
if ('expired' in resp):
return Response(error_message_helper(resp), 401, mimetype='application/json')
elif ('Invalid token' in resp):
return Response(error_message_helper(resp), 401, mimetype='application/json')
else:
user = User.query.filter_by(username=resp).first()
if user.admin:
if bool(User.delete_user(username)):
responseObject = {'status': 'success', 'message': 'User deleted.'}
return Response(json.dumps(responseObject), 200, mimetype='application/json')
else:
return Response(error_message_helper('User not found!'), 404, mimetype='application/json')
else:
return Response(error_message_helper('Only Admins may delete users!'), 401, mimetype='application/json') |
def extractAPurpleBlob(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if re.match('^Nirvana in Fire Chapter \\d+', item['title'], re.IGNORECASE):
return buildReleaseMessageWithType(item, 'Nirvana in Fire', vol, chp, frag=frag, postfix=postfix)
return False |
def _auth_multi_factor_info_from_token_data(token_data: dict[(str, _typing.Any)]):
from firebase_functions.identity_fn import AuthMultiFactorInfo
enrollment_time = token_data.get('enrollment_time')
if enrollment_time:
enrollment_time = _dt.datetime.fromisoformat(enrollment_time)
factor_id = (token_data['factor_id'] if (not token_data.get('phone_number')) else 'phone')
return AuthMultiFactorInfo(uid=token_data['uid'], factor_id=factor_id, display_name=token_data.get('display_name'), enrollment_time=enrollment_time, phone_number=token_data.get('phone_number')) |
class TestMultiAssets():
def test_multiple_assets_with_restart(self, remove_and_add_pkgs, reset_fledge, start_north, read_data_from_pi_web_api, skip_verify_north_interface, fledge_url, num_assets, wait_time, retries, pi_host, pi_port, pi_admin, pi_passwd, pi_db):
total_benchmark_services = 6
num_assets_per_service = (num_assets // total_benchmark_services)
total_assets = (num_assets_per_service * total_benchmark_services)
for count in range(total_benchmark_services):
service_name = (BENCHMARK_SOUTH_SVC_NAME + '{}'.format((count + 1)))
add_benchmark(fledge_url, service_name, (count + 1), num_assets_per_service)
verify_service_added(fledge_url, service_name)
time.sleep((wait_time * 3))
verify_ping(fledge_url, skip_verify_north_interface, wait_time, retries)
verify_asset(fledge_url, total_assets, (num_assets // 100), wait_time)
put_url = '/fledge/restart'
utils.put_request(fledge_url, urllib.parse.quote(put_url))
verify_restart(fledge_url, retries)
verify_ping(fledge_url, skip_verify_north_interface, wait_time, retries)
verify_asset(fledge_url, total_assets, (num_assets // 100), wait_time)
verify_asset_tracking_details(fledge_url, total_assets, total_benchmark_services, num_assets_per_service)
old_ping_result = verify_ping(fledge_url, skip_verify_north_interface, wait_time, retries)
time.sleep((wait_time * 3))
new_ping_result = verify_ping(fledge_url, skip_verify_north_interface, wait_time, retries)
assert (old_ping_result['dataRead'] < new_ping_result['dataRead'])
if (not skip_verify_north_interface):
assert (old_ping_result['dataSent'] < new_ping_result['dataSent'])
_verify_egress(read_data_from_pi_web_api, pi_host, pi_admin, pi_passwd, pi_db, wait_time, retries, total_benchmark_services, num_assets_per_service)
time.sleep((wait_time * 2))
def test_add_multiple_assets_before_after_restart(self, reset_fledge, start_north, read_data_from_pi_web_api, skip_verify_north_interface, fledge_url, num_assets, wait_time, retries, pi_host, pi_port, pi_admin, pi_passwd, pi_db):
total_benchmark_services = 3
num_assets_per_service = (num_assets // (total_benchmark_services * 2))
total_assets = (num_assets_per_service * total_benchmark_services)
for count in range(total_benchmark_services):
service_name = (BENCHMARK_SOUTH_SVC_NAME + '{}'.format((count + 1)))
add_benchmark(fledge_url, service_name, (count + 1), num_assets_per_service)
verify_service_added(fledge_url, service_name)
time.sleep((wait_time * 3))
verify_ping(fledge_url, skip_verify_north_interface, wait_time, retries)
verify_asset(fledge_url, total_assets, (num_assets // 100), wait_time)
verify_asset_tracking_details(fledge_url, total_assets, total_benchmark_services, num_assets_per_service)
put_url = '/fledge/restart'
utils.put_request(fledge_url, urllib.parse.quote(put_url))
verify_restart(fledge_url, retries)
total_assets = (total_assets * 2)
for count in range(total_benchmark_services):
service_name = (BENCHMARK_SOUTH_SVC_NAME + '{}'.format((count + 4)))
add_benchmark(fledge_url, service_name, (count + 4), num_assets_per_service)
verify_service_added(fledge_url, service_name)
verify_ping(fledge_url, skip_verify_north_interface, wait_time, retries)
verify_asset(fledge_url, total_assets, (num_assets // 100), wait_time)
verify_asset_tracking_details(fledge_url, total_assets, (total_benchmark_services * 2), num_assets_per_service)
old_ping_result = verify_ping(fledge_url, skip_verify_north_interface, wait_time, retries)
time.sleep((wait_time * 3))
new_ping_result = verify_ping(fledge_url, skip_verify_north_interface, wait_time, retries)
assert (old_ping_result['dataRead'] < new_ping_result['dataRead'])
if (not skip_verify_north_interface):
assert (old_ping_result['dataSent'] < new_ping_result['dataSent'])
_verify_egress(read_data_from_pi_web_api, pi_host, pi_admin, pi_passwd, pi_db, wait_time, retries, (total_benchmark_services * 2), num_assets_per_service)
def test_multiple_assets_with_reconfig(self, reset_fledge, start_north, read_data_from_pi_web_api, skip_verify_north_interface, fledge_url, num_assets, wait_time, retries, pi_host, pi_port, pi_admin, pi_passwd, pi_db):
total_benchmark_services = 3
num_assets_per_service = (num_assets // (total_benchmark_services * 2))
total_assets = (num_assets_per_service * total_benchmark_services)
for count in range(total_benchmark_services):
service_name = (BENCHMARK_SOUTH_SVC_NAME + '{}'.format((count + 1)))
add_benchmark(fledge_url, service_name, (count + 1), num_assets_per_service)
verify_service_added(fledge_url, service_name)
time.sleep((wait_time * 3))
verify_ping(fledge_url, skip_verify_north_interface, wait_time, retries)
verify_asset(fledge_url, total_assets, (num_assets // 100), wait_time)
verify_asset_tracking_details(fledge_url, total_assets, total_benchmark_services, num_assets_per_service)
num_assets_per_service = (2 * num_assets_per_service)
payload = {'numAssets': '{}'.format(num_assets_per_service)}
for count in range(total_benchmark_services):
service_name = (BENCHMARK_SOUTH_SVC_NAME + '{}'.format((count + 1)))
put_url = '/fledge/category/{}'.format(service_name)
utils.put_request(fledge_url, urllib.parse.quote(put_url), payload)
total_assets = (total_assets * 2)
verify_ping(fledge_url, skip_verify_north_interface, wait_time, retries)
verify_asset(fledge_url, total_assets, (num_assets // 100), wait_time)
verify_asset_tracking_details(fledge_url, total_assets, total_benchmark_services, num_assets_per_service)
old_ping_result = verify_ping(fledge_url, skip_verify_north_interface, wait_time, retries)
time.sleep((wait_time * 3))
new_ping_result = verify_ping(fledge_url, skip_verify_north_interface, wait_time, retries)
assert (old_ping_result['dataRead'] < new_ping_result['dataRead'])
if (not skip_verify_north_interface):
assert (old_ping_result['dataSent'] < new_ping_result['dataSent'])
_verify_egress(read_data_from_pi_web_api, pi_host, pi_admin, pi_passwd, pi_db, wait_time, retries, total_benchmark_services, num_assets_per_service) |
class TestTachoMotorSpeedValue(ptc.ParameterizedTestCase):
def test_speed_value_is_read_only(self):
with self.assertRaises(AttributeError):
self._param['motor'].speed = 1
def test_speed_value_after_reset(self):
self._param['motor'].command = 'reset'
self.assertEqual(self._param['motor'].speed, 0) |
def load_special_case_sites(override=False):
global last_special_load
now = time.time()
if ((flags.SPECIAL_CASE_CACHE == None) or ('debug' in sys.argv) or override or ((last_special_load + reload_interval) < now)):
last_special_load = now
print(('Need to load special-url handling ruleset (%s, %s, %s)' % ((flags.SPECIAL_CASE_CACHE == None), ('debug' in sys.argv), ((last_special_load + reload_interval) < now))))
(rules, specials) = get_rules()
flags.RULE_CACHE = rules
flags.SPECIAL_CASE_CACHE = specials
return flags.SPECIAL_CASE_CACHE |
def test_upload_multipart(client):
resp = client.simulate_post('/submit', headers={'Content-Type': 'multipart/form-data; boundary=5b11af82ab65407ba8cdccf37d2a9c4f'}, body=EXAMPLE1)
assert (resp.status_code == 200)
assert (resp.json == [{'content_type': 'text/plain', 'data': 'world', 'filename': None, 'name': 'hello', 'secure_filename': None, 'text': 'world'}, {'content_type': 'application/json', 'data': '{"debug": true, "message": "Hello, world!", "score": 7}', 'filename': None, 'name': 'document', 'secure_filename': None, 'text': None}, {'content_type': 'text/plain', 'data': 'Hello, world!\n', 'filename': 'test.txt', 'name': 'file1', 'secure_filename': 'test.txt', 'text': 'Hello, world!\n'}]) |
class GreenThreadWrapper(ProcessBase):
prologue = 'import eventlet\neventlet.monkey_patch()\nimport threading\ndef test():\n t = threading.currentThread()\n'
epilogue = '\nt = eventlet.spawn(test)\nt.wait()\n'
def test_join(self):
self.write_to_tempfile('newmod', (((self.prologue + '\n def test2():\n global t2\n t2 = threading.currentThread()\n eventlet.spawn(test2)\n') + self.epilogue) + '\nprint(repr(t2))\nt2.join()\n'))
(output, lines) = self.launch_subprocess('newmod.py')
self.assertEqual(len(lines), 2, '\n'.join(lines))
assert lines[0].startswith('<_GreenThread'), lines[0]
def test_name(self):
self.write_to_tempfile('newmod', ((self.prologue + "\n print(t.name)\n print(t.getName())\n print(t.get_name())\n t.name = 'foo'\n print(t.name)\n print(t.getName())\n print(t.get_name())\n t.setName('bar')\n print(t.name)\n print(t.getName())\n print(t.get_name())\n") + self.epilogue))
(output, lines) = self.launch_subprocess('newmod.py')
self.assertEqual(len(lines), 10, '\n'.join(lines))
for i in range(0, 3):
self.assertEqual(lines[i], 'GreenThread-1', lines[i])
for i in range(3, 6):
self.assertEqual(lines[i], 'foo', lines[i])
for i in range(6, 9):
self.assertEqual(lines[i], 'bar', lines[i])
def test_ident(self):
self.write_to_tempfile('newmod', ((self.prologue + '\n print(id(t._g))\n print(t.ident)\n') + self.epilogue))
(output, lines) = self.launch_subprocess('newmod.py')
self.assertEqual(len(lines), 3, '\n'.join(lines))
self.assertEqual(lines[0], lines[1])
def test_is_alive(self):
self.write_to_tempfile('newmod', ((self.prologue + '\n print(t.is_alive())\n print(t.isAlive())\n') + self.epilogue))
(output, lines) = self.launch_subprocess('newmod.py')
self.assertEqual(len(lines), 3, '\n'.join(lines))
self.assertEqual(lines[0], 'True', lines[0])
self.assertEqual(lines[1], 'True', lines[1])
def test_is_daemon(self):
self.write_to_tempfile('newmod', ((self.prologue + '\n print(t.is_daemon())\n print(t.isDaemon())\n') + self.epilogue))
(output, lines) = self.launch_subprocess('newmod.py')
self.assertEqual(len(lines), 3, '\n'.join(lines))
self.assertEqual(lines[0], 'True', lines[0])
self.assertEqual(lines[1], 'True', lines[1]) |
class CliProgresser(object):
def __init__(self):
self.errors = []
self.warnings = []
def on_new_object(self, resource):
print('found new object: {}'.format(resource))
def on_warning(self, warning):
print('warning: {}'.format(warning))
self.warnings.append(warning)
def on_error(self, error):
print('error: {}'.format(error))
self.errors.append(error)
def get_summary(self):
print('Errors: {}, Warnings: {}'.format(len(self.errors), len(self.warnings))) |
class OptionSeriesArearangeMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def states(self) -> 'OptionSeriesArearangeMarkerStates':
return self._config_sub_data('states', OptionSeriesArearangeMarkerStates)
def symbol(self):
return self._config_get(None)
def symbol(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
def run():
args = parse_args(sys.argv[1:])
log_fn = args.log
hess_fn = args.hess
h5_fn = args.out
parsed = ORCA.parse_hess_file(hess_fn)
print(f"Read '{hess_fn}'.")
cart_hessian = make_sym_mat(parsed['hessian'])
(atoms, _, coords3d) = zip(*parsed['atoms'][2:])
coords3d = np.array([c.asList() for c in coords3d])
geom = Geometry(atoms, coords3d)
with open(log_fn) as handle:
log_text = handle.read()
print(f"Read '{log_fn}'")
energies = re.findall('FINAL SINGLE POINT ENERGY\\s+([\\d\\-\\.]+)', log_text)
energy = float(energies[(- 1)])
print(f'''Found {len(energies)} energies in '{log_fn}'.
Using last one: {energy:.6f} au.''')
mult_re = re.compile('Multiplicity\\s+Mult\\s+\\.{4}\\s+(\\d+)')
mult = int(mult_re.search(log_text).group(1))
print(f'Multiplicity: {mult}')
save_hessian(h5_fn, geom, cart_hessian=cart_hessian, energy=energy, mult=mult)
print(f"Wrote pysisyphus HDF5 Hessian to '{h5_fn}'") |
def _build_argument_parser():
description = '\nLoads the data from each file ("some/path/filename.xxx") in INPUT_FILES\nand exposes it as the variable "filename". It then loads the Jinja2\ntemplate TEMPLATE_FILE and dumps the rendered result to OUTPUT.\n\nExample:\nGiven an input file my_input.json:\n\n{\n my_variable: my_value\n}\n\nAnd a template file tmpl.jinja:\n\nThis is written in my file together with {{my_input.my_variable}}\n\nThis job will produce an output file:\n\nThis is written in my file together with my_value\n'
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--output_file', '-o', required=True, help='the output file')
parser.add_argument('--template_file', '-t', required=True, help='the jinja2 template file')
parser.add_argument('--input_files', '-i', nargs='+', help='list of json and yaml input files')
return parser |
def printf_chk_call(string: Union[(str, Variable)], variable_1: Variable, variable_2: Variable, memory: int) -> Call:
if isinstance(string, str):
return Call(imp_function_symbol('__printf_chk'), [Constant(1, Integer(32, True)), Constant(string, Pointer(Integer(8, False), 32)), variable_1, variable_2], Pointer(CustomType('void', 0), 32), memory)
return Call(imp_function_symbol('__printf_chk'), [Constant(1, Integer(32, True)), string, variable_1, variable_2], Pointer(CustomType('void', 0), 32), memory) |
class OptionSeriesFunnel3dDataDragdropGuideboxDefault(Options):
def className(self):
return self._config_get('highcharts-drag-box-default')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('rgba(0, 0, 0, 0.1)')
def color(self, text: str):
self._config(text, js_type=False)
def cursor(self):
return self._config_get('move')
def cursor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#888')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(900)
def zIndex(self, num: float):
self._config(num, js_type=False) |
(help=load_text(['arg_new_mnemonic', 'help'], func='new_mnemonic'))
_context
_option(callback=captive_prompt_callback((lambda mnemonic_language: fuzzy_reverse_dict_lookup(mnemonic_language, MNEMONIC_LANG_OPTIONS)), choice_prompt_func((lambda : load_text(['arg_mnemonic_language', 'prompt'], func='new_mnemonic')), languages)), default=(lambda : load_text(['arg_mnemonic_language', 'default'], func='new_mnemonic')), help=(lambda : load_text(['arg_mnemonic_language', 'help'], func='new_mnemonic')), param_decls='--mnemonic_language', prompt=choice_prompt_func((lambda : load_text(['arg_mnemonic_language', 'prompt'], func='new_mnemonic')), languages))
_keys_arguments_decorator
def new_mnemonic(ctx: click.Context, mnemonic_language: str, **kwargs: Any) -> None:
mnemonic = get_mnemonic(language=mnemonic_language, words_path=WORD_LISTS_PATH)
test_mnemonic = ''
while (mnemonic != reconstruct_mnemonic(test_mnemonic, WORD_LISTS_PATH)):
click.clear()
click.echo(load_text(['msg_mnemonic_presentation']))
click.echo(('\n\n%s\n\n' % mnemonic))
click.pause(load_text(['msg_press_any_key']))
click.clear()
test_mnemonic = click.prompt((load_text(['msg_mnemonic_retype_prompt']) + '\n\n'))
click.clear()
ctx.obj = {'mnemonic': mnemonic, 'mnemonic_password': ''}
ctx.params['validator_start_index'] = 0
ctx.forward(generate_keys) |
def make_gaussian_pokes(grid, mu, sigma, cutoff=5):
sigma = (np.ones(mu.size) * sigma)
def poke(m, s):
def eval_func(func_grid):
if func_grid.is_('cartesian'):
r2 = (((func_grid.x - m[0]) ** 2) + ((func_grid.y - m[1]) ** 2))
else:
r2 = (func_grid.shifted((- m)).as_('polar').r ** 2)
res = np.exp((((- 0.5) * r2) / (s ** 2)))
if (cutoff is not None):
res -= np.exp(((- 0.5) * (cutoff ** 2)))
res[(r2 > ((cutoff * s) ** 2))] = 0
res = csr_matrix(res)
res.eliminate_zeros()
return res
return eval_func
pokes = [poke(m, s) for (m, s) in zip(mu.points, sigma)]
if (grid is None):
return pokes
else:
return evaluate_supersampled(pokes, grid, 1, make_sparse=True) |
class VolumeBars(Op):
__slots__ = ('_volume', 'bars')
__doc__ = Tickfilter.volumebars.__doc__
bars: BarList
def __init__(self, volume, source=None):
Op.__init__(self, source)
self._volume = volume
self.bars = BarList()
def on_source(self, time, price, size):
if ((not self.bars) or (self.bars[(- 1)].volume >= self._volume)):
bar = Bar(time, price, price, price, price, size, 1)
self.bars.append(bar)
else:
bar = self.bars[(- 1)]
bar.high = max(bar.high, price)
bar.low = min(bar.low, price)
bar.close = price
bar.volume += size
bar.count += 1
if (bar.volume >= self._volume):
self.bars.updateEvent.emit(self.bars, True)
self.emit(self.bars) |
class VegaView():
def __init__(self, component: primitives.HtmlModel, selector: str, js_code: str=None, set_var: bool=None, page: primitives.PageModel=None):
(self.component, self._selector, self.varName) = (component, selector, js_code)
(self._js, self.setVar, self.component) = ([], set_var, page)
def change(self, name, changeset):
pass
def insert(self, name, tuples):
def remove(self, name, tuples):
def run(self, encode=None, prerun=None, postrun=None):
pass
def runAfter(self, callback):
def runAsync(self, encode=None, prerun=None, postrun=None): |
def test_functional_block_multi_arg():
in_dict = build_multi_input_dict(dims=[[100, 64, 1], [100, 64, 1]])
def my_func(in_key_0, in_key_1):
return torch.cat((in_key_0, in_key_1), dim=(- 1))
net: FunctionalBlock = FunctionalBlock(in_keys=['in_key_0', 'in_key_1'], out_keys='out_key', in_shapes=[(100, 64, 1), (100, 64, 1)], func=my_func)
str(net)
out_dict = net(in_dict)
assert isinstance(out_dict, Dict)
assert set(net.out_keys).issubset(set(out_dict.keys()))
assert (out_dict[net.out_keys[0]].shape == (100, 64, 2)) |
def assert_listener_item_equal(test_case, item1, item2, msg=None):
def get_msg(name, msg):
return '{name} mismatched. {msg}'.format(name=name, msg=('' if (msg is None) else msg))
test_case.assertEqual(item1.name, item2.name, msg=get_msg('name', msg))
test_case.assertEqual(item1.metadata_name, item2.metadata_name, msg=get_msg('metadata_name', msg))
test_case.assertEqual(item1.metadata_defined, item2.metadata_defined, msg=get_msg('metadata_defined', msg))
test_case.assertEqual(item1.is_anytrait, item2.is_anytrait, msg=get_msg('is_anytrait', msg))
test_case.assertEqual(item1.dispatch, item2.dispatch, msg=get_msg('dispatch', msg))
test_case.assertEqual(item1.notify, item2.notify, msg=get_msg('notify', msg))
test_case.assertEqual(item1.is_list_handler, item2.is_list_handler, msg=get_msg('is_list_handler', msg))
test_case.assertEqual(item1.type, item2.type, msg=get_msg('type', msg))
if (item1.next is item2.next):
pass
else:
test_case.assertEqual(item1.next, item2.next, msg=get_msg('next', msg)) |
.parametrize('tvh_manager', [(2021, 11, 26, 19, 5)], indirect=True)
def test_tvh_widget_popup(tvh_manager):
tvh_manager.c.bar['top'].fake_button_press(0, 'top', 0, 0, 1)
(_, text) = tvh_manager.c.widget['tvhwidget'].eval('self.popup.text')
assert (text == 'Upcoming recordings:\nFri 26 Nov 18:55: TVH Widget Test 1\nFri 26 Nov 19:05: TVH Widget Test 2')
tvh_manager.c.bar['top'].fake_button_press(0, 'top', 0, 0, 1)
(_, result) = tvh_manager.c.widget['tvhwidget'].eval('self.popup is None')
assert (result == 'True') |
class Element():
def __init__(self, string, dataset_or_source, options):
self.dataset_or_source = dataset_or_source
self._source = None
self._init_string = string
self.options = options
self.name = string
assert (self.name is not None)
def source(self):
if (self._source is None):
self._source = self.dataset_or_source.build_source_for_element(self)
return self._source
def get_item(self, i):
return self.source[i]
def mutate(self):
return self
def func(self):
return as_numpy_func(self.source, self.options) |
class WebSocket(HTTPConnection):
def __init__(self, scope: Scope, receive: Receive, send: Send) -> None:
super().__init__(scope)
assert (scope['type'] == 'websocket')
self._receive = receive
self._send = send
self.client_state = WebSocketState.CONNECTING
self.application_state = WebSocketState.CONNECTING
async def receive(self) -> Message:
if (self.client_state == WebSocketState.CONNECTING):
message = (await self._receive())
message_type = message['type']
if (message_type != 'websocket.connect'):
raise RuntimeError(f'Expected ASGI message "websocket.connect", but got {message_type!r}')
self.client_state = WebSocketState.CONNECTED
return message
elif (self.client_state == WebSocketState.CONNECTED):
message = (await self._receive())
message_type = message['type']
if (message_type not in {'websocket.receive', 'websocket.disconnect'}):
raise RuntimeError(f'Expected ASGI message "websocket.receive" or "websocket.disconnect", but got {message_type!r}')
if (message_type == 'websocket.disconnect'):
self.client_state = WebSocketState.DISCONNECTED
return message
else:
raise RuntimeError('Cannot call "receive" once a disconnect message has been received.')
async def send(self, message: Message) -> None:
if (self.application_state == WebSocketState.CONNECTING):
message_type = message['type']
if (message_type not in {'websocket.accept', 'websocket.close'}):
raise RuntimeError(f'Expected ASGI message "websocket.accept" or "websocket.close", but got {message_type!r}')
if (message_type == 'websocket.close'):
self.application_state = WebSocketState.DISCONNECTED
else:
self.application_state = WebSocketState.CONNECTED
(await self._send(message))
elif (self.application_state == WebSocketState.CONNECTED):
message_type = message['type']
if (message_type not in {'websocket.send', 'websocket.close'}):
raise RuntimeError(f'Expected ASGI message "websocket.send" or "websocket.close", but got {message_type!r}')
if (message_type == 'websocket.close'):
self.application_state = WebSocketState.DISCONNECTED
(await self._send(message))
else:
raise RuntimeError('Cannot call "send" once a close message has been sent.')
async def accept(self, subprotocol: typing.Optional[str]=None, headers: typing.Optional[typing.Iterable[typing.Tuple[(bytes, bytes)]]]=None) -> None:
headers = (headers or [])
if (self.client_state == WebSocketState.CONNECTING):
(await self.receive())
(await self.send({'type': 'websocket.accept', 'subprotocol': subprotocol, 'headers': headers}))
def _raise_on_disconnect(self, message: Message) -> None:
if (message['type'] == 'websocket.disconnect'):
raise WebSocketDisconnect(message['code'], message.get('reason'))
async def receive_text(self) -> str:
if (self.application_state != WebSocketState.CONNECTED):
raise RuntimeError('WebSocket is not connected. Need to call "accept" first.')
message = (await self.receive())
self._raise_on_disconnect(message)
return typing.cast(str, message['text'])
async def receive_bytes(self) -> bytes:
if (self.application_state != WebSocketState.CONNECTED):
raise RuntimeError('WebSocket is not connected. Need to call "accept" first.')
message = (await self.receive())
self._raise_on_disconnect(message)
return typing.cast(bytes, message['bytes'])
async def receive_json(self, mode: str='text') -> typing.Any:
if (mode not in {'text', 'binary'}):
raise RuntimeError('The "mode" argument should be "text" or "binary".')
if (self.application_state != WebSocketState.CONNECTED):
raise RuntimeError('WebSocket is not connected. Need to call "accept" first.')
message = (await self.receive())
self._raise_on_disconnect(message)
if (mode == 'text'):
text = message['text']
else:
text = message['bytes'].decode('utf-8')
return json.loads(text)
async def iter_text(self) -> typing.AsyncIterator[str]:
try:
while True:
(yield (await self.receive_text()))
except WebSocketDisconnect:
pass
async def iter_bytes(self) -> typing.AsyncIterator[bytes]:
try:
while True:
(yield (await self.receive_bytes()))
except WebSocketDisconnect:
pass
async def iter_json(self) -> typing.AsyncIterator[typing.Any]:
try:
while True:
(yield (await self.receive_json()))
except WebSocketDisconnect:
pass
async def send_text(self, data: str) -> None:
(await self.send({'type': 'websocket.send', 'text': data}))
async def send_bytes(self, data: bytes) -> None:
(await self.send({'type': 'websocket.send', 'bytes': data}))
async def send_json(self, data: typing.Any, mode: str='text') -> None:
if (mode not in {'text', 'binary'}):
raise RuntimeError('The "mode" argument should be "text" or "binary".')
text = json.dumps(data, separators=(',', ':'), ensure_ascii=False)
if (mode == 'text'):
(await self.send({'type': 'websocket.send', 'text': text}))
else:
(await self.send({'type': 'websocket.send', 'bytes': text.encode('utf-8')}))
async def close(self, code: int=1000, reason: typing.Optional[str]=None) -> None:
(await self.send({'type': 'websocket.close', 'code': code, 'reason': (reason or '')})) |
class Access():
def __init__(self, modbus_type, addresses, pack_types, values=None, names=None, presenters=None, byte_order='be', silent=False):
self.modbus_type = modbus_type
self.values_to_write = (values or ([None] * len(addresses)))
self.addresses = addresses
self.pack_types = pack_types
self.names = (names or ([None] * len(addresses)))
self.presenters = (presenters or ([None] * len(addresses)))
self.byte_order = byte_order
self.silent = silent
def address(self):
return self.addresses[0]
def pack_type(self):
return self.pack_types[0]
def presenter(self):
return self.presenters[0]
def endianness(self):
return self.pack_type()[0]
def write(self):
return any(((x is not None) for x in self.values_to_write))
def size(self):
total = 0
for p in self.pack_types:
size = struct.calcsize(p)
if (self.modbus_type in ('h', 'H', 'i')):
assert ((size % 2) == 0)
size //= 2
total += size
return total
def operations(self):
if self.write:
return zip(self.pack_types, self.values_to_write)
else:
return self.pack_types
def append(self, other):
self.names.extend(other.names)
self.pack_types.extend(other.pack_types)
self.addresses.extend(other.addresses)
self.presenters.extend(other.presenters)
if self.write:
self.values_to_write.extend(other.values_to_write)
def labels(self):
return ((name or address) for (name, address) in zip(self.names, self.addresses))
def print_values(self, definitions=None):
for (label, value, presenter) in zip(self.labels(), self.values, self.presenters):
if (len(value) == 1):
value = value[0]
if self.silent:
logging.info('{}'.format(value))
else:
logging.info('{}: {} {}'.format(label, value, self.present_value(value, presenter, definitions)))
def present_value(self, value, presenter, definitions):
if (type(value) != int):
return ''
presentation = [hex(value)]
if presenter:
if (presenter[0] == ':'):
presentation.append(definitions.presenters[presenter][value])
elif (presenter[0] == '|'):
names = []
for (bit, name) in definitions.presenters[presenter].items():
if (value & (1 << bit)):
names.append(name)
presentation.append(' | '.join(names))
return ' '.join(presentation)
def perform(self, modbus):
if self.write:
self.write_registers_send(modbus)
self.write_registers_receive(modbus)
else:
self.read_registers_send(modbus)
self.read_registers_receive(modbus)
def read_registers_send(self, modbus):
if (self.modbus_type in 'cCd'):
n_registers = 0
for pack_type in self.pack_types:
n_registers += struct.calcsize(pack_type)
else:
n_bytes = 0
for pack_type in self.pack_types:
n_bytes += struct.calcsize(pack_type)
assert ((n_bytes % 2) == 0)
n_registers = (n_bytes // 2)
reader = {'c': 'read_coils', 'C': 'read_coils', 'd': 'read_discrete_inputs', 'h': 'read_holding_registers', 'H': 'read_holding_registers', 'i': 'read_input_registers'}[self.modbus_type]
self.request = getattr(modbus.protocol, reader)(modbus.slave_id, self.address(), n_registers)
logging.debug(' < %s >', dump(self.request))
modbus.send(self.request)
def read_registers_receive(self, modbus):
try:
words = modbus.receive(self.request)
except umodbus.exceptions.IllegalDataAddressError:
self.values = ('Invalid address',)
return
except umodbus.exceptions.IllegalFunctionError:
self.values = ('Invalid modbus type',)
return
logging.debug(' %s', words)
if (self.modbus_type in 'cd'):
self.values = [(w,) for w in words]
else:
if (self.byte_order == 'mixed'):
repack_byte_order = '<'
else:
repack_byte_order = '>'
packed = struct.pack('{}{}H'.format(repack_byte_order, len(words)), *words)
self.values = []
for pack in self.pack_types:
size = struct.calcsize(pack)
self.values.append(struct.unpack(pack, packed[:size]))
packed = packed[size:]
def write_registers_send(self, modbus):
if (self.modbus_type == 'c'):
if (len(self.values_to_write) == 1):
message = modbus.protocol.write_single_coil(modbus.slave_id, self.address(), int(self.values_to_write[0]))
else:
message = modbus.protocol.write_multiple_coils(modbus.slave_id, self.address(), [int(v) for v in self.values_to_write])
elif (self.modbus_type == 'C'):
message = modbus.protocol.write_multiple_coils(modbus.slave_id, self.address(), [int(v) for v in self.values_to_write])
else:
words = []
if (self.byte_order == 'mixed'):
register_fmt = '<H'
else:
register_fmt = '>H'
for (pack_type, value) in zip(self.pack_types, self.values_to_write):
n_bytes = struct.calcsize(pack_type)
assert ((n_bytes % 2) == 0)
if (('f' in pack_type) or ('d' in pack_type)):
value = float(value)
else:
value = int(value, 0)
words.extend([struct.unpack(register_fmt, bytes(byte_pair))[0] for byte_pair in grouper(struct.pack(pack_type, value), 2)])
if ((self.modbus_type == 'h') and (len(words) == 1)):
message = modbus.protocol.write_single_register(modbus.slave_id, self.address(), words[0])
else:
message = modbus.protocol.write_multiple_registers(modbus.slave_id, self.address(), words)
logging.debug(' < %s >', dump(message))
self.request = message
return modbus.send(message)
def write_registers_receive(self, modbus):
modbus.receive(self.request)
def __str__(self):
return '{}{}/{}{}'.format(self.modbus_type, self.address(), self.pack_types, ('={}'.format(self.values_to_write) if self.write else ''))
def __repr__(self):
return 'Access({!r}, {!r}, {!r}, {!r}, {!r})'.format(self.modbus_type, self.addresses, self.pack_types, self.values_to_write, self.names) |
def upgrade():
op.create_table('event_types', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('slug', sa.String(), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('id'), sa.UniqueConstraint('slug'))
op.alter_column('events', 'type', new_column_name='event_type_id')
op.alter_column('events_version', 'type', new_column_name='event_type_id')
op.execute("INSERT INTO event_types(name, slug) SELECT DISTINCT event_type_id, lower(replace(regexp_replace(event_type_id, '& |,', '', 'g'), ' ', '-')) FROM events where not exists (SELECT 1 FROM event_types where event_types.name=events.event_type_id) and event_type_id is not null;")
op.execute('UPDATE events SET event_type_id = (SELECT id FROM event_types WHERE event_types.name=events.event_type_id)')
op.execute('ALTER TABLE events ALTER COLUMN event_type_id TYPE integer USING event_type_id::integer')
op.create_foreign_key(None, 'events', 'event_types', ['event_type_id'], ['id'], ondelete='CASCADE')
op.execute('UPDATE events_version SET event_type_id = (SELECT id FROM event_types WHERE event_types.name=events_version.event_type_id)')
op.execute('ALTER TABLE events_version ALTER COLUMN event_type_id TYPE integer USING event_type_id::integer') |
def prepare_runtime_extensions_package_release() -> None:
global config
print('')
print(' Preparing Individual Packages ')
print('')
packages = {'kfp-notebook': ['kfp>=1.6.3'], 'airflow-notebook': ['pygithub', 'black']}
packages_source = {'kfp-notebook': 'kfp', 'airflow-notebook': 'airflow'}
for package in packages:
package_source_dir = os.path.join(config.work_dir, package)
print(f'Preparing package : {package} at {package_source_dir}')
if os.path.exists(package_source_dir):
print(f'Removing working directory: {config.source_dir}')
shutil.rmtree(package_source_dir)
check_run(['mkdir', '-p', package_source_dir], cwd=config.work_dir)
print(f"Copying : {_source('etc/templates/setup.py')} to {package_source_dir}")
check_run(['cp', _source('etc/templates/setup.py'), package_source_dir], cwd=config.work_dir)
setup_file = os.path.join(package_source_dir, 'setup.py')
sed(setup_file, '{{package-name}}', package)
sed(setup_file, '{{version}}', config.new_version)
sed(setup_file, '{{data - files}}', '')
requires = ''
for dependency in packages[package]:
requires += f"'{dependency}',"
sed(setup_file, '{{install - requires}}', requires)
source_dir = os.path.join(config.source_dir, 'elyra', packages_source[package])
dest_dir = os.path.join(package_source_dir, 'elyra', packages_source[package])
print(f'Copying package source from {source_dir} to {dest_dir}')
Path(os.path.join(package_source_dir, 'elyra')).mkdir(parents=True, exist_ok=True)
shutil.copytree(source_dir, dest_dir)
check_run(['python', 'setup.py', 'bdist_wheel', 'sdist'], cwd=package_source_dir)
print('') |
def fetch_data(zone_key: ZoneKey, session: (Session | None)=None, target_datetime: (datetime | None)=None) -> tuple[(list, str, str)]:
ses = (session or Session())
if ((target_datetime is None) and (zone_key not in LIVE_DATASETS.keys())):
raise ParserException('FR_O.py', f'Live data not implemented for {zone_key} in this parser.', zone_key)
target_date = (target_datetime.strftime('%Y-%m-%d') if target_datetime else None)
past_date = ((target_datetime - timedelta(days=3)).strftime('%Y-%m-%d') if target_datetime else None)
URL_QUERIES: dict[(str, (str | None))] = ({'timezone': 'UTC', 'order_by': 'date_heure', 'where': f"date_heure >= date'{past_date}' AND date_heure <= date'{target_date}'", 'refine': f'territoire:{HISTORICAL_MAPPING[zone_key]}'} if target_datetime else {'timezone': 'UTC', 'order_by': 'date'})
url = generate_url(zone_key, target_datetime)
response: Response = ses.get(url, params=URL_QUERIES)
data: ((dict | list) | None) = response.json()
if (data == []):
raise ParserException('FR_O.py', (f"No data available for {zone_key} for {target_datetime.strftime('%Y')}" if target_datetime else f'No live data available for {zone_key}.'), zone_key)
elif isinstance(data, dict):
if (data.get('errorcode') == '10002'):
raise ParserException('FR_O.py', f"Rate limit exceeded. Please try again later after: {data.get('reset_time')}")
elif (data.get('error_code') == 'ODSQLError'):
raise ParserException('FR_O.py', 'Query malformed. Please check the parameters. If this was previously working there has likely been a change in the API.')
if (not isinstance(data, list)):
raise ParserException('FR_O.py', (f'Unexpected data format for {zone_key} for {target_datetime}' if target_datetime else f'Unexpected data format for {zone_key}.'), zone_key)
source = url.split('//')[1].split('/')[0]
return (data, ('date_heure' if target_datetime else 'date'), source) |
def lindh_style_guess(geom, ks, rhos):
def k_func(indices):
rho_product = 1
inds_len = len(indices)
for (i, ind) in enumerate(indices[:(- 1)], 1):
(i1, i2) = (ind, indices[i])
rho_product *= rhos[(i1, i2)]
k = (ks[inds_len] * rho_product)
return k
H = improved_guess(geom, bond_func=k_func, bend_func=k_func, dihedral_func=k_func)
return H |
class OptionSeriesBarSonificationDefaultspeechoptionsMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.feature('unit')
.story('common', 'storage_client')
class TestStorageClientExceptions():
def test_init_StorageClientException(self):
with pytest.raises(Exception) as excinfo:
raise StorageClientException()
assert (excinfo.type is TypeError)
assert ("__init__() missing 1 required positional argument: 'code'" == str(excinfo.value))
def test_default_init_StorageClientException(self):
with pytest.raises(Exception) as excinfo:
raise StorageClientException(40)
assert (excinfo.type is StorageClientException)
assert issubclass(excinfo.type, Exception)
try:
raise StorageClientException(40)
except Exception as ex:
assert (ex.__class__ is StorageClientException)
assert issubclass(ex.__class__, Exception)
assert (40 == ex.code)
assert (ex.message is None)
def test_init_args_StorageClientException(self):
with pytest.raises(Exception) as excinfo:
raise StorageClientException(code=11, message='foo')
assert (excinfo.type is StorageClientException)
assert issubclass(excinfo.type, Exception)
assert ('foo' == str(excinfo.value))
try:
raise StorageClientException(code=11, message='foo')
except Exception as ex:
assert (ex.__class__ is StorageClientException)
assert issubclass(ex.__class__, Exception)
assert (11 == ex.code)
assert ('foo' == ex.message)
def test_BadRequest(self):
with pytest.raises(Exception) as excinfo:
raise BadRequest()
assert (excinfo.type is BadRequest)
assert issubclass(excinfo.type, StorageClientException)
try:
raise BadRequest()
except Exception as ex:
assert (ex.__class__ is BadRequest)
assert issubclass(ex.__class__, StorageClientException)
assert (400 == ex.code)
assert ('Bad request' == ex.message)
def test_StorageServiceUnavailable(self):
with pytest.raises(Exception) as excinfo:
raise StorageServiceUnavailable()
assert (excinfo.type is StorageServiceUnavailable)
assert issubclass(excinfo.type, StorageClientException)
try:
raise StorageServiceUnavailable()
except Exception as ex:
assert (ex.__class__ is StorageServiceUnavailable)
assert issubclass(ex.__class__, StorageClientException)
assert (503 == ex.code)
assert ('Storage service is unavailable' == ex.message)
def test_InvalidServiceInstance(self):
with pytest.raises(Exception) as excinfo:
raise InvalidServiceInstance()
assert (excinfo.type is InvalidServiceInstance)
assert issubclass(excinfo.type, StorageClientException)
try:
raise InvalidServiceInstance()
except Exception as ex:
assert (ex.__class__ is InvalidServiceInstance)
assert issubclass(ex.__class__, StorageClientException)
assert (502 == ex.code)
assert ('Storage client needs a valid *Fledge storage* micro-service instance' == ex.message)
def test_InvalidReadingsPurgeFlagParameters(self):
with pytest.raises(Exception) as excinfo:
raise InvalidReadingsPurgeFlagParameters()
assert (excinfo.type is InvalidReadingsPurgeFlagParameters)
assert issubclass(excinfo.type, BadRequest)
try:
raise InvalidReadingsPurgeFlagParameters()
except Exception as ex:
assert (ex.__class__ is InvalidReadingsPurgeFlagParameters)
assert issubclass(ex.__class__, BadRequest)
assert (400 == ex.code)
assert ('Purge flag valid options are retain or purge only' == ex.message)
def test_PurgeOneOfAgeAssetAndSize(self):
with pytest.raises(Exception) as excinfo:
raise PurgeOneOfAgeAssetAndSize()
assert (excinfo.type is PurgeOneOfAgeAssetAndSize)
assert issubclass(excinfo.type, BadRequest)
try:
raise PurgeOneOfAgeAssetAndSize()
except Exception as ex:
assert (ex.__class__ is PurgeOneOfAgeAssetAndSize)
assert issubclass(ex.__class__, BadRequest)
assert (400 == ex.code)
assert ('Purge must specify one of age, size or asset' == ex.message)
def test_PurgeOnlyOneOfAgeAndSize(self):
with pytest.raises(Exception) as excinfo:
raise PurgeOnlyOneOfAgeAndSize()
assert (excinfo.type is PurgeOnlyOneOfAgeAndSize)
assert issubclass(excinfo.type, BadRequest)
try:
raise PurgeOnlyOneOfAgeAndSize()
except Exception as ex:
assert (ex.__class__ is PurgeOnlyOneOfAgeAndSize)
assert issubclass(ex.__class__, BadRequest)
assert (400 == ex.code)
assert ('Purge must specify only one of age or size' == ex.message)
def test_StorageServerError(self):
with pytest.raises(Exception) as excinfo:
raise StorageServerError(code=400, reason='blah', error={'k': 'v'})
assert (excinfo.type is StorageServerError)
assert issubclass(excinfo.type, Exception)
try:
raise StorageServerError(code=400, reason='blah', error={'k': 'v'})
except Exception as ex:
assert (ex.__class__ is StorageServerError)
assert issubclass(ex.__class__, Exception)
assert (400 == ex.code)
assert ('blah' == ex.reason)
assert ({'k': 'v'} == ex.error) |
def enable_tls():
def _enable_tls(fledge_url, wait_time, auth):
conn =
headers = None
if (auth == 'password'):
headers = {'authorization': PASSWORD_TOKEN}
elif (auth == 'certificate'):
headers = {'authorization': CERT_TOKEN}
if (headers is None):
conn.request('PUT', '/fledge/category/rest_api', json.dumps({'enableHttp': 'false'}))
else:
conn.request('PUT', '/fledge/category/rest_api', json.dumps({'enableHttp': 'false'}), headers=headers)
r = conn.getresponse()
assert (200 == r.status)
time.sleep(wait_time)
conn =
if (headers is None):
conn.request('PUT', '/fledge/restart', json.dumps({}))
else:
conn.request('PUT', '/fledge/restart', json.dumps({}), headers=headers)
r = conn.getresponse()
assert (200 == r.status)
time.sleep((wait_time * 2))
return _enable_tls |
class LiteEthPHYGMII(LiteXModule):
dw = 8
tx_clk_freq = .0
rx_clk_freq = .0
def __init__(self, clock_pads, pads, with_hw_init_reset=True, model=False):
self.model = model
self.crg = LiteEthPHYGMIICRG(clock_pads, pads, with_hw_init_reset, model=model)
self.tx = ClockDomainsRenamer('eth_tx')(LiteEthPHYGMIITX(pads))
self.rx = ClockDomainsRenamer('eth_rx')(LiteEthPHYGMIIRX(pads))
(self.sink, self.source) = (self.tx.sink, self.rx.source)
if hasattr(pads, 'mdc'):
self.mdio = LiteEthPHYMDIO(pads) |
class Notifier(hass.Hass):
def initialize(self):
self.timer_handle_list = []
self.alexa_tts = self.args['alexa_tts']
self.alexa_media_player = self.args['alexa_media_player'].split(',')
self.app_switch_alexa = self.args['app_switch_alexa']
self.last_alexa_notification_time = None
def notify(self, notify_name, message, useAlexa=True, useTelegram=True):
if useTelegram:
self.log('Notifying via Telegram')
self.call_service((__NOTIFY__ + notify_name), message=message)
if (useAlexa and (self.get_state(self.app_switch_alexa) == 'on')):
self.log('Notifying via Alexa')
if ((self.last_alexa_notification_time is not None) and ((datetime.datetime.now() - self.last_alexa_notification_time) < datetime.timedelta(seconds=__WAIT_TIME__))):
self.timer_handle_list.append(self.run_in(self.notify_callback, __WAIT_TIME__, message=message))
else:
self.run_in(self.notify_callback, 0, message=message)
def notify_callback(self, kwargs):
self.last_alexa_notification_time = datetime.datetime.now()
self.call_service((__NOTIFY__ + self.alexa_tts), data={'type': 'announce', 'method': 'speak'}, target=self.alexa_media_player, message=kwargs['message'])
def getAlexaDeviceForUserLocation(self, notify_name):
if (notify_name == __GROUP_NOTIFICATIONS__):
return self.args['alexa_to_location_mapping']['Wohnzimmer']
elif (notify_name.lower() in self.args['user_location_sensors']):
location = self.get_state(self.args['user_location_sensors'][notify_name.lower()])
if (location in self.args['alexa_to_location_mapping']):
return self.args['alexa_to_location_mapping'][location]
else:
return None
else:
self.log('Unknown notify_name: {}'.format(notify_name))
return None
def terminate(self):
for timer_handle in self.timer_handle_list:
self.cancel_timer(timer_handle) |
def render_template(out, name, path, context, prefix=None):
pp = [(tenjin.PrefixedLinePreprocessor(prefix=prefix) if prefix else tenjin.PrefixedLinePreprocessor())]
template_globals = {'to_str': str, 'escape': str}
engine = TemplateEngine(path=path, pp=pp)
out.write(engine.render(name, context, template_globals)) |
def test_hexary_trie_avoid_over_pruning():
db = {}
trie = HexaryTrie(db, prune=True)
def _insert(trie, index, val):
index_key = rlp.encode(index, sedes=rlp.sedes.big_endian_int)
trie[index_key] = val
return index_key
inserted_keys = []
for (index, val) in enumerate(([(b'\x00' * 32)] * 129)):
new_key = _insert(trie, index, val)
inserted_keys.append(new_key)
for key in inserted_keys:
trie.get(key)
verify_ref_count(trie) |
def test():
from instakit.utils.static import asset
from itertools import chain
from os.path import relpath
from pprint import pprint
start = '/usr/local/Cellar/python/3.7.2_2/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages'
image_paths = list(map((lambda image_file: asset.path('img', image_file)), asset.listfiles('img')))
image_inputs = list(map((lambda image_path: Mode.RGB.open(image_path)), image_paths))
print('\t')
print()
functions = (gcr, ucr)
processors = (BasicGCR(), BasicUCR(), Mode.CMYK)
callables = chain((processor.process for processor in processors), functions)
image_components = zip(image_paths, image_inputs, callables)
for (path, image, process_functor) in image_components:
print(('TESTING: %s' % relpath(path, start=start)))
print()
tup = (image.size + (image.mode,))
print((' Input: %sx%s %s' % tup))
print(' Calling functor on image...')
result = process_functor(image)
tup = (result.size + (result.mode,))
print((' Output: %sx%s %s' % tup))
print(' Displaying...')
print()
result.show()
print('SUCCESS!')
print()
print('TESTING: MANUAL CALLABLES')
if len(image_inputs):
image = image_inputs.pop()
gcred = gcr(image)
assert (gcred.mode == Mode.CMYK.value.mode)
assert (Mode.of(gcred) is Mode.CMYK)
image.close()
if len(image_inputs):
image = image_inputs.pop()
ucred = ucr(image)
assert (ucred.mode == Mode.CMYK.value.mode)
assert (Mode.of(ucred) is Mode.CMYK)
image.close()
if len(image_inputs):
image = image_inputs.pop()
gcr_processor = BasicGCR()
gcred = gcr_processor.process(image)
assert (gcred.mode == Mode.CMYK.value.mode)
assert (Mode.of(gcred) is Mode.CMYK)
image.close()
if len(image_inputs):
image = image_inputs.pop()
ucr_processor = BasicUCR()
ucred = ucr_processor.process(image)
assert (ucred.mode == Mode.CMYK.value.mode)
assert (Mode.of(ucred) is Mode.CMYK)
image.close()
print('SUCCESS!')
print()
print('\t')
print()
pprint(list((relpath(path, start=start) for path in image_paths)))
print()
print('TESTING: DemoUCR ALGORITHM-STAGE TRACE PRINTER')
print()
print(DemoUCR('#BB2F53'))
print()
print(DemoUCR(7282745))
print() |
def can_use_total_obligation_enum(amount_obj):
try:
for v in amount_obj:
lower_bound = v.get('lower_bound')
upper_bound = v.get('upper_bound')
for (key, values) in WEBSITE_AWARD_BINS.items():
if ((lower_bound == values['lower']) and (upper_bound == values['upper'])):
break
else:
return False
return True
except Exception:
pass
return False |
(scope='module')
def hierarchy():
distribution_parameters = {'partition': True, 'overlap_type': (DistributedMeshOverlapType.VERTEX, 10)}
mesh = RectangleMesh(2, 2, 1, 1, diagonal='left', distribution_parameters=distribution_parameters)
mesh2 = RectangleMesh(5, 5, 1, 1, diagonal='right', distribution_parameters=distribution_parameters)
mesh.init()
mesh2.init()
coarse_to_fine = numpy.tile(numpy.arange(mesh2.num_cells(), dtype=IntType), (mesh.num_cells(), 1))
fine_to_coarse = numpy.tile(numpy.arange(mesh.num_cells(), dtype=IntType), (mesh2.num_cells(), 1))
hierarchy = HierarchyBase((mesh, mesh2), [coarse_to_fine], [None, fine_to_coarse], nested=False)
return hierarchy |
def run_rpc(interface_dict):
print('MpRPC server Started.')
server_instance = FetchInterfaceClass(interface_dict, 'MpRPC')
mprpc_server = StreamServer(('0.0.0.0', 4315), server_instance)
gevent.signal_handler(signal.SIGINT, build_mprpc_handler(mprpc_server))
mprpc_server.serve_forever() |
class Transforms(types.Singleton):
def __init__(self, todims: Integral, fromdims: Integral):
assert isinstance(todims, Integral), f'todims={todims!r}'
assert isinstance(fromdims, Integral), f'fromdims={fromdims!r}'
if (not (0 <= fromdims <= todims)):
raise ValueError('invalid dimensions')
self.todims = todims
self.fromdims = fromdims
super().__init__()
def __len__(self):
raise NotImplementedError
def __getitem__(self, index):
if numeric.isint(index):
raise NotImplementedError
elif isinstance(index, slice):
index = range(len(self))[index]
if (index == range(len(self))):
return self
if (index.step < 0):
raise NotImplementedError('reordering the sequence is not yet implemented')
return MaskedTransforms(self, types.arraydata(numpy.arange(index.start, index.stop, index.step)))
elif numeric.isintarray(index):
if (index.ndim != 1):
raise IndexError('invalid index')
if (numpy.any(numpy.less(index, 0)) or numpy.any(numpy.greater_equal(index, len(self)))):
raise IndexError('index out of range')
dindex = numpy.diff(index)
if ((len(index) == len(self)) and ((len(self) == 0) or ((index[0] == 0) and numpy.all(numpy.equal(dindex, 1))))):
return self
if numpy.any(numpy.equal(dindex, 0)):
raise ValueError('repeating an element is not allowed')
if (not numpy.all(numpy.greater(dindex, 0))):
s = numpy.argsort(index)
return ReorderedTransforms(self[index[s]], types.arraydata(numpy.argsort(s)))
if (len(index) == 0):
return EmptyTransforms(self.todims, self.fromdims)
if (len(index) == len(self)):
return self
return MaskedTransforms(self, types.arraydata(index))
elif numeric.isboolarray(index):
if (index.shape != (len(self),)):
raise IndexError('mask has invalid shape')
if (not numpy.any(index)):
return EmptyTransforms(self.todims, self.fromdims)
if numpy.all(index):
return self
(index,) = numpy.where(index)
return MaskedTransforms(self, types.arraydata(index))
else:
raise IndexError('invalid index')
def index_with_tail(self, trans):
raise NotImplementedError
def __iter__(self):
for i in range(len(self)):
(yield self[i])
def index(self, trans):
(index, tail) = self.index_with_tail(trans)
if tail:
raise ValueError('{!r} not in sequence of transforms'.format(trans))
return index
def contains(self, trans):
try:
self.index(trans)
except ValueError:
return False
else:
return True
__contains__ = contains
def contains_with_tail(self, trans):
try:
self.index_with_tail(trans)
except ValueError:
return False
else:
return True
def refined(self, references):
if references.isuniform:
return UniformDerivedTransforms(self, references[0], 'child_transforms', self.fromdims)
else:
return DerivedTransforms(self, references, 'child_transforms', self.fromdims)
def edges(self, references):
if references.isuniform:
return UniformDerivedTransforms(self, references[0], 'edge_transforms', (self.fromdims - 1))
else:
return DerivedTransforms(self, references, 'edge_transforms', (self.fromdims - 1))
def __add__(self, other):
if ((not isinstance(other, Transforms)) or (self.fromdims != other.fromdims)):
return NotImplemented
return chain((self, other), self.todims, self.fromdims)
def unchain(self):
(yield self) |
def csvr_closure(sigma, dof, dt, tau=100, rng=None):
tau_t = (dt / tau)
if (tau_t > 0.1):
factor = exp(((- 1.0) / tau_t))
else:
factor = 0.0
if (rng is None):
rng = RNG
def resample_kin(cur_kinetic_energy):
rr = rng.normal()
new_kinetic_energy = ((cur_kinetic_energy + ((1.0 - factor) * (((sigma * (sum_noises((dof - 1)) + (rr ** 2))) / dof) - cur_kinetic_energy))) + ((2.0 * rr) * sqrt(((((cur_kinetic_energy * sigma) / dof) * (1.0 - factor)) * factor))))
alpha = sqrt((new_kinetic_energy / cur_kinetic_energy))
return alpha
return resample_kin |
class ModelExportMethod(ABC):
def export(cls, model, input_args, save_path, export_method, **export_kwargs):
pass
def load(cls, save_path, **load_kwargs):
pass
def test_export_and_load(cls, model, input_args, export_method, export_kwargs, output_checker):
with make_temp_directory('test_export_and_load') as save_path:
assert isinstance(model, nn.Module), model
assert isinstance(input_args, (list, tuple)), input_args
original_output = model(*input_args)
model.eval()
load_kwargs = cls.export(model, input_args, save_path, export_method, **export_kwargs)
assert isinstance(load_kwargs, dict), load_kwargs
assert json.dumps(load_kwargs), load_kwargs
loaded_model = cls.load(save_path, **load_kwargs)
assert isinstance(loaded_model, nn.Module), loaded_model
new_output = loaded_model(*input_args)
output_checker(new_output, original_output) |
_handler(commands=['start'])
def start(message):
if (message.chat.type == 'private'):
bot.send_message(message.chat.id, (config.text_messages['start'].format(message.from_user.first_name) + msg.repo()), parse_mode='Markdown', disable_web_page_preview=True, reply_markup=markup.faqButton())
mysql.start_bot(message.chat.id)
else:
bot.reply_to(message, "Please send me a PM if you'd like to talk to the Support Team.") |
class OptionPlotoptionsPolygonSonificationTracksMappingTremolo(Options):
def depth(self) -> 'OptionPlotoptionsPolygonSonificationTracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionPlotoptionsPolygonSonificationTracksMappingTremoloDepth)
def speed(self) -> 'OptionPlotoptionsPolygonSonificationTracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionPlotoptionsPolygonSonificationTracksMappingTremoloSpeed) |
def test_decimal_scale_is_a_positive_int():
'
schema = {'type': 'record', 'name': 'test_scale_is_an_int', 'fields': [{'name': 'field', 'type': {'logicalType': 'decimal', 'precision': 5, 'scale': (- 2), 'type': 'bytes'}}]}
with pytest.raises(SchemaParseException, match='decimal scale must be a positive integer'):
parse_schema(schema) |
class OptionSeriesWordcloudSonificationContexttracks(Options):
def activeWhen(self) -> 'OptionSeriesWordcloudSonificationContexttracksActivewhen':
return self._config_sub_data('activeWhen', OptionSeriesWordcloudSonificationContexttracksActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionSeriesWordcloudSonificationContexttracksMapping':
return self._config_sub_data('mapping', OptionSeriesWordcloudSonificationContexttracksMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionSeriesWordcloudSonificationContexttracksPointgrouping':
return self._config_sub_data('pointGrouping', OptionSeriesWordcloudSonificationContexttracksPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def timeInterval(self):
return self._config_get(None)
def timeInterval(self, num: float):
self._config(num, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False)
def valueInterval(self):
return self._config_get(None)
def valueInterval(self, num: float):
self._config(num, js_type=False)
def valueMapFunction(self):
return self._config_get('linear')
def valueMapFunction(self, value: Any):
self._config(value, js_type=False)
def valueProp(self):
return self._config_get('"x"')
def valueProp(self, text: str):
self._config(text, js_type=False) |
def run_ciftify_dlabel_report(arguments, tmpdir):
dscalar_in = NibInput(arguments['<func.dscalar.nii>'])
surf_distance = arguments['--surface-distance']
outputbase = arguments['--outputbase']
dont_output_clusters = arguments['--no-cluster-dlabel']
output_peaktable = arguments['--output-peaks']
surf_settings = ciftify.report.CombinedSurfaceSettings(arguments, tmpdir)
atlas_settings = ciftify.report.define_atlas_settings()
if (not outputbase):
outputbase = os.path.join(os.path.dirname(dscalar_in.path), dscalar_in.base)
ciftify.utils.check_output_writable(outputbase, exit_on_error=True)
clusters_dscalar = clusterise_dscalar_input(dscalar_in.path, arguments, surf_settings, tmpdir)
if dont_output_clusters:
cluster_dlabel = os.path.join(tmpdir, 'clust.dlabel.nii')
else:
cluster_dlabel = '{}_clust.dlabel.nii'.format(outputbase)
empty_labels = os.path.join(tmpdir, 'empty_labels.txt')
ciftify.utils.run('touch {}'.format(empty_labels))
ciftify.utils.run(['wb_command', '-cifti-label-import', clusters_dscalar, empty_labels, cluster_dlabel])
(label_data, label_dict) = ciftify.niio.load_LR_label(cluster_dlabel, map_number=1)
outputcsv = '{}_statclust_report.csv'.format(outputbase)
logger.info('Output table: {}'.format(outputcsv))
surf_va_LR = load_LR_vertex_areas(surf_settings)
if (not (label_data.shape[0] == surf_va_LR.shape[0])):
logger.error('label file vertices {} not equal to vertex areas {}'.format(label_data.shape[0], surf_va_LR.shape[0]))
sys.exit(1)
df = pd.DataFrame.from_dict(label_dict, orient='index')
df['label_idx'] = df.index
df = df.rename(index=str, columns={0: 'label_name'})
df['area'] = (- 999)
for pd_idx in df.index.get_values():
df.loc[(pd_idx, 'area')] = ciftify.report.calc_cluster_area(pd_idx, label_data, surf_va_LR)
for atlas in atlas_settings.values():
df = report_atlas_overlap(df, label_data, atlas, surf_va_LR, min_percent_overlap=5)
df.to_csv(outputcsv)
if output_peaktable:
write_statclust_peaktable(dscalar_in.path, clusters_dscalar, outputbase, arguments, surf_settings, atlas_settings) |
class StridedGroupGemmTestCase(unittest.TestCase):
def _test_strided_group_gemm(self, M, N1, K1, N2, K2, N3, test_name, dtype='float16'):
target = detect_target()
if (int(target._arch) < 80):
_LOGGER.warning('Group Gemm need SM80 HW')
return
M1 = M
M2 = M
M3 = M
dim = 1
X1 = Tensor(shape=[IntImm(M1), IntImm(K1)], dtype=dtype, name='x1', is_input=True)
W1 = Tensor(shape=[N1, K1], dtype=dtype, name='w1', is_input=True)
X2 = Tensor(shape=[IntImm(M2), IntImm(K2)], dtype=dtype, name='x2', is_input=True)
W2 = Tensor(shape=[N2, K2], dtype=dtype, name='w2', is_input=True)
X3 = Tensor(shape=[M3, N3], dtype=dtype, name='x3', is_input=True)
group_gemm_op = ops.group_gemm_rcr()
(Y1, Y2) = group_gemm_op(operand_groups=[[X1, W1], [X2, W2]])
Y1._attrs['name'] = 'y1'
Y2._attrs['name'] = 'y2'
concat_op = ops.concatenate()
Y = concat_op([Y1, Y2, X3], dim=dim)
Y._attrs['name'] = 'y'
Y._attrs['is_output'] = True
module = compile_model([Y], target, './tmp', test_name)
Y_src_ops = Y._attrs['src_ops']
np.testing.assert_equal(len(Y_src_ops), 2)
np.testing.assert_equal(Y_src_ops, StableSet({group_gemm_op, concat_op}))
expected_inputs_group_gemm_op = [X1, W1, X2, W2]
np.testing.assert_equal(expected_inputs_group_gemm_op, group_gemm_op._attrs['inputs'])
X1_pt = get_random_torch_tensor([M1, K1], dtype)
W1_pt = get_random_torch_tensor([N1, K1], dtype)
X2_pt = get_random_torch_tensor([M2, K2], dtype)
W2_pt = get_random_torch_tensor([N2, K2], dtype)
X3_pt = get_random_torch_tensor([M3, N3], dtype)
Y1_pt = torch.nn.functional.linear(X1_pt, W1_pt)
Y2_pt = torch.nn.functional.linear(X2_pt, W2_pt)
Y_pt = torch.cat([Y1_pt, Y2_pt, X3_pt], dim=dim)
y_shape = [var._attrs['values'][0] for var in Y._attrs['shape']]
_LOGGER.info('AITemplate y_shape: {}'.format(y_shape))
np.testing.assert_equal(y_shape, Y_pt.size())
inputs = {'x1': X1_pt, 'w1': W1_pt, 'x2': X2_pt, 'w2': W2_pt, 'x3': X3_pt}
y = get_torch_empty_tensor(y_shape, dtype)
module.run_with_tensors(inputs, [y])
self.assertTrue(torch.allclose(Y_pt, y, atol=0.1, rtol=0.1))
def test_strided_group_gemm(self):
self._test_strided_group_gemm(M=128, N1=32, K1=32, N2=64, K2=16, N3=8, test_name='strided_group_gemm_rcr_cat1')
self._test_strided_group_gemm(M=8, N1=32, K1=32, N2=4, K2=4, N3=3, test_name='strided_group_gemm_rcr_cat2')
def _test_strided_group_gemm_bias(self, M, N1, K1, N2, K2, N3, test_name, input_first, dtype='float16'):
target = detect_target()
if (int(target._arch) < 80):
_LOGGER.warning('Group Gemm need SM80 HW')
return
M1 = M
M2 = M
M3 = M
dim = 1
X1 = Tensor(shape=[IntImm(M1), IntImm(K1)], dtype=dtype, name='x1', is_input=True)
W1 = Tensor(shape=[N1, K1], dtype=dtype, name='w1', is_input=True)
B1 = Tensor(shape=[N1], dtype=dtype, name='b1', is_input=True)
X2 = Tensor(shape=[IntImm(M2), IntImm(K2)], dtype=dtype, name='x2', is_input=True)
W2 = Tensor(shape=[N2, K2], dtype=dtype, name='w2', is_input=True)
B2 = Tensor(shape=[N2], dtype=dtype, name='b2', is_input=True)
X3 = Tensor(shape=[M3, N3], dtype=dtype, name='x3', is_input=True)
group_gemm_op = ops.group_gemm_rcr_bias()
(Y1, Y2) = group_gemm_op(operand_groups=[[X1, W1, B1], [X2, W2, B2]])
Y1._attrs['name'] = 'y1'
Y2._attrs['name'] = 'y2'
concat_op = ops.concatenate()
if input_first:
Y = concat_op([X3, Y1, Y2], dim=dim)
else:
Y = concat_op([Y1, Y2, X3], dim=dim)
Y._attrs['name'] = 'y'
Y._attrs['is_output'] = True
module = compile_model([Y], target, './tmp', test_name)
Y_src_ops = Y._attrs['src_ops']
np.testing.assert_equal(len(Y_src_ops), 2)
np.testing.assert_equal(Y_src_ops, StableSet({group_gemm_op, concat_op}))
expected_inputs_group_gemm_op = [X1, W1, B1, X2, W2, B2]
np.testing.assert_equal(expected_inputs_group_gemm_op, group_gemm_op._attrs['inputs'])
X1_pt = get_random_torch_tensor([M1, K1], dtype)
W1_pt = get_random_torch_tensor([N1, K1], dtype)
B1_pt = get_random_torch_tensor([N1], dtype)
X2_pt = get_random_torch_tensor([M2, K2], dtype)
W2_pt = get_random_torch_tensor([N2, K2], dtype)
B2_pt = get_random_torch_tensor([N2], dtype)
X3_pt = get_random_torch_tensor([M3, N3], dtype)
Y1_pt = torch.nn.functional.linear(X1_pt, W1_pt, bias=B1_pt)
Y2_pt = torch.nn.functional.linear(X2_pt, W2_pt, bias=B2_pt)
if input_first:
Y_pt = torch.cat([X3_pt, Y1_pt, Y2_pt], dim=dim)
else:
Y_pt = torch.cat([Y1_pt, Y2_pt, X3_pt], dim=dim)
y_shape = [var._attrs['values'][0] for var in Y._attrs['shape']]
_LOGGER.info('AITemplate y_shape: {}'.format(y_shape))
np.testing.assert_equal(y_shape, Y_pt.size())
inputs = {'x1': X1_pt, 'w1': W1_pt, 'b1': B1_pt, 'x2': X2_pt, 'w2': W2_pt, 'b2': B2_pt, 'x3': X3_pt}
y = get_torch_empty_tensor(y_shape, dtype)
module.run_with_tensors(inputs, [y])
self.assertTrue(torch.allclose(Y_pt, y, atol=0.1, rtol=0.1))
def test_strided_group_gemm_bias(self):
self._test_strided_group_gemm_bias(M=128, N1=32, K1=32, N2=64, K2=16, N3=8, test_name='strided_group_gemm_rcr_bias_cat1', input_first=False)
self._test_strided_group_gemm_bias(M=8, N1=32, K1=32, N2=4, K2=4, N3=3, test_name='strided_group_gemm_rcr_bias_cat2', input_first=False)
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
def test_strided_group_gemm_float(self):
self._test_strided_group_gemm(M=8, N1=32, K1=32, N2=4, K2=4, N3=3, test_name='strided_group_gemm_rcr_cat_float2', dtype='float')
self._test_strided_group_gemm_bias(M=128, N1=32, K1=32, N2=64, K2=16, N3=8, test_name='strided_group_gemm_rcr_bias_cat_float1', input_first=False, dtype='float')
def _test_strided_group_gemm_epilogue_alignment(self, dtype='float16'):
target = detect_target()
old_force_ci = os.environ.get('FORCE_PROFILE', None)
if target.in_ci_env():
os.environ['FORCE_PROFILE'] = '1'
self._test_strided_group_gemm_bias(M=18, N1=24, K1=32, N2=62, K2=16, N3=2, test_name=f'strided_group_gemm_rcr_epilogue_alignment_{dtype}_1', input_first=True, dtype=dtype)
self._test_strided_group_gemm_bias(M=18, N1=24, K1=32, N2=62, K2=16, N3=4, test_name=f'strided_group_gemm_rcr_epilogue_alignment_{dtype}_2', input_first=True, dtype=dtype)
if target.in_ci_env():
if (old_force_ci is None):
del os.environ['FORCE_PROFILE']
else:
os.environ['FORCE_PROFILE'] = old_force_ci
def test_strided_group_gemm_epilogue_alignment(self):
self._test_strided_group_gemm_epilogue_alignment()
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
def test_strided_group_gemm_epilogue_alignment_float(self):
self._test_strided_group_gemm_epilogue_alignment(dtype='float') |
def MiniPlayerWindow(playing_status, settings, title: str, artist: str, album_art_data: bytes, track_length: (float | int), track_position: (float | int)):
album_art = Sg.Column([[Sg.Image(data=album_art_data, key='artwork', pad=(0, 0))]], element_justification='left', pad=(0, 0))
music_controls = MusicControls(settings, playing_status, prev_button_pad=(10, 5, None))
progress_bar_layout = ProgressBar(settings, track_position, track_length, playing_status)
title = truncate_title(title)
right_side = Sg.Column([[Sg.Text(title, font=FONT_TITLE, key='title', pad=((10, 0), 0), size=(28, 1))], [Sg.Text(artist, font=FONT_MED, key='artist', pad=((10, 0), 0), size=(28, 2))], music_controls, progress_bar_layout], pad=(0, 0))
return [([album_art, right_side] if settings['show_album_art'] else [right_side])] |
class OptionSeriesParetoSonificationTracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.integration
.ledger
def test_construct_init_transaction():
account = FetchAICrypto()
fetchai_api = FetchAIApi(**FETCHAI_TESTNET_CONFIG)
init_transaction = fetchai_api._get_init_transaction(deployer_address=account.address, denom='atestfet', chain_id='cosmoshub-3', account_number=1, sequence=1, amount=0, code_id=200, init_msg={}, label='something', tx_fee_denom='stake')
assert (isinstance(init_transaction, dict) and (len(init_transaction) == 2)), 'Incorrect transfer_transaction constructed.'
assert (init_transaction['tx']['body']['messages'][0][''] == '/cosmwasm.wasm.v1.MsgInstantiateContract') |
class OptionPlotoptionsSankeySonificationContexttracksMapping(Options):
def frequency(self) -> 'OptionPlotoptionsSankeySonificationContexttracksMappingFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsSankeySonificationContexttracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionPlotoptionsSankeySonificationContexttracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsSankeySonificationContexttracksMappingGapbetweennotes)
def highpass(self) -> 'OptionPlotoptionsSankeySonificationContexttracksMappingHighpass':
return self._config_sub_data('highpass', OptionPlotoptionsSankeySonificationContexttracksMappingHighpass)
def lowpass(self) -> 'OptionPlotoptionsSankeySonificationContexttracksMappingLowpass':
return self._config_sub_data('lowpass', OptionPlotoptionsSankeySonificationContexttracksMappingLowpass)
def noteDuration(self) -> 'OptionPlotoptionsSankeySonificationContexttracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionPlotoptionsSankeySonificationContexttracksMappingNoteduration)
def pan(self) -> 'OptionPlotoptionsSankeySonificationContexttracksMappingPan':
return self._config_sub_data('pan', OptionPlotoptionsSankeySonificationContexttracksMappingPan)
def pitch(self) -> 'OptionPlotoptionsSankeySonificationContexttracksMappingPitch':
return self._config_sub_data('pitch', OptionPlotoptionsSankeySonificationContexttracksMappingPitch)
def playDelay(self) -> 'OptionPlotoptionsSankeySonificationContexttracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionPlotoptionsSankeySonificationContexttracksMappingPlaydelay)
def rate(self) -> 'OptionPlotoptionsSankeySonificationContexttracksMappingRate':
return self._config_sub_data('rate', OptionPlotoptionsSankeySonificationContexttracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionPlotoptionsSankeySonificationContexttracksMappingTime':
return self._config_sub_data('time', OptionPlotoptionsSankeySonificationContexttracksMappingTime)
def tremolo(self) -> 'OptionPlotoptionsSankeySonificationContexttracksMappingTremolo':
return self._config_sub_data('tremolo', OptionPlotoptionsSankeySonificationContexttracksMappingTremolo)
def volume(self) -> 'OptionPlotoptionsSankeySonificationContexttracksMappingVolume':
return self._config_sub_data('volume', OptionPlotoptionsSankeySonificationContexttracksMappingVolume) |
class OptionSeriesArearangeSonificationDefaultinstrumentoptionsMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_parse_arg_outformat_csv(caplog):
args = [path.relpath(__file__), '--debug', '--outformat', 'csv']
cis_audit.parse_arguments(argv=args)
status = False
for record in caplog.records:
if (record.msg == 'Going to use "csv" outputter'):
status = True
break
assert status |
def get_historical_prod_data(zone_key: ZoneKey=ZoneKey('KR'), session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> ProductionBreakdownList:
session = (session or Session())
target_datetime_formatted_daily = target_datetime.strftime('%Y-%m-%d')
logger.debug(f'Fetching CSRF token to access production data from {HISTORICAL_PRODUCTION_URL}')
session.get(HISTORICAL_PRODUCTION_URL, verify=False)
cookies_dict = session.cookies.get_dict()
payload = {'mid': 'a', 'device': 'chart', 'view_sdate': target_datetime_formatted_daily, 'view_edate': target_datetime_formatted_daily, '_csrf': cookies_dict.get('XSRF-TOKEN', None)}
logger.debug(f'Fetching production data from {HISTORICAL_PRODUCTION_URL}')
res = session.post(HISTORICAL_PRODUCTION_URL, payload)
assert (res.status_code == 200)
return parse_chart_prod_data(res.text, zone_key, logger) |
def test_disable_user(new_user_with_params):
user = auth.update_user(new_user_with_params.uid, display_name=auth.DELETE_ATTRIBUTE, photo_url=auth.DELETE_ATTRIBUTE, phone_number=auth.DELETE_ATTRIBUTE, disabled=True)
assert (user.uid == new_user_with_params.uid)
assert (user.email == new_user_with_params.email)
assert (user.display_name is None)
assert (user.phone_number is None)
assert (user.photo_url is None)
assert (user.email_verified is True)
assert (user.disabled is True)
assert (len(user.provider_data) == 1) |
def get_matching_count(connection_name, table_name, ids):
with connections[connection_name].cursor() as cursor:
cursor.execute(f'''
select count(*)
from "{table_name}"
where detached_award_procurement_id in %s
''', [ids])
return cursor.fetchall()[0][0] |
def get_authenticated_request_headers_data(method: str, client: Client) -> tuple[(dict[(str, str)], dict[(str, str)])]:
headers: dict[(str, str)] = {}
data: dict[(str, str)] = {}
if (method == 'client_secret_basic'):
headers['Authorization'] = get_basic_authorization_header(client.client_id, client.client_secret)
elif (method == 'client_secret_post'):
data['client_id'] = client.client_id
data['client_secret'] = client.client_secret
return (headers, data) |
_routes.route('/verify-email', methods=['POST'])
def verify_email():
try:
token = base64.b64decode(request.json['data']['token'])
except base64.binascii.Error:
logging.error('Invalid Token')
raise BadRequestError({'source': ''}, 'Invalid Token')
s = get_serializer()
try:
data = s.loads(token)
except Exception:
logging.error('Invalid Token')
raise BadRequestError({'source': ''}, 'Invalid Token')
try:
user = User.query.filter_by(email=data[0]).one()
except Exception:
logging.error('Invalid Token')
raise BadRequestError({'source': ''}, 'Invalid Token')
else:
user.is_verified = True
save_to_db(user)
logging.info('Email Verified')
return make_response(jsonify(message='Email Verified'), 200) |
_renderer(wrap_type=TestNumberOfColumns)
class TestNumberOfColumnsRenderer(TestRenderer):
def render_html(self, obj: TestNumberOfColumns) -> TestHtmlInfo:
info = super().render_html(obj)
columns = ['column name', 'current dtype']
dict_curr = obj.metric.get_result().current.columns_type
dict_ref = None
reference_stats = obj.metric.get_result().reference
if (reference_stats is not None):
dict_ref = reference_stats.columns_type
columns = (columns + ['reference dtype'])
additional_plots = plot_dicts_to_table(dict_curr, dict_ref, columns, 'number_of_column', 'diff')
info.details = additional_plots
return info |
class SchemaWriter(typing.Generic[T]):
def __init__(self, to_path: str, cols: typing.Optional[typing.Dict[(str, type)]], fmt: SchemaFormat):
self._to_path = to_path
self._fmt = fmt
self._columns = cols
self._file_name_gen = generate_ordered_files(Path(self._to_path), 1024)
def to_path(self) -> str:
return self._to_path
def column_names(self) -> typing.Optional[typing.List[str]]:
if self._columns:
return list(self._columns.keys())
return None
def write(self, *dfs, **kwargs):
... |
def auto_batch(item_dims):
def wrapper(orig_func):
(orig_func)
def expand_and_do(self, x: Tensor):
nonlocal orig_func, item_dims
is_item = (x.dim() == item_dims)
if is_item:
x = x.unsqueeze(0)
x = orig_func(self, x)
if is_item:
return x.squeeze(0)
else:
return x
return expand_and_do
return wrapper |
def bind_extension_point(obj, trait_name, extension_point_id, extension_registry):
binding = ExtensionPointBinding(obj=obj, trait_name=trait_name, extension_point_id=extension_point_id, extension_registry=extension_registry)
bindings = ExtensionPointBinding._bindings.setdefault(obj, [])
bindings.append(binding)
return binding |
()
('-v', 'verbose', is_flag=True, default=False, help='Verbose output')
('-s', 'screenshot', is_flag=True, default=False, help='Take a screenshot for each website')
('-b', 'browser', default=None, type=click.Choice(['firefox', 'chromium-browser']), help='Browser to use for screenshot.')
('input_file', type=click.File('rb'), default='-')
def cmd_web_report(input_file, verbose, browser, screenshot):
urls = input_file.read().decode().strip().split('\n')
urls = [url.strip() for url in urls if url.strip()]
report_dir = Path('report')
try:
report_dir.mkdir()
except Exception:
pass
report_file = (report_dir / 'index.html')
with report_file.open('w') as outfile:
outfile.write('<!doctype html>\n')
outfile.write('<html lang=en-us>\n')
outfile.write('<meta charset=utf-8>\n')
outfile.write('<title>habu.web.report</title>\n')
outfile.write('<body>\n')
outfile.write('<table border=1 style="max-width: 100%">\n')
for (i, url) in enumerate(sorted(urls)):
error = False
print(i, url, file=sys.stderr)
outfile.write('<tr>\n')
outfile.write('<td style="vertical-align:top;max-width:30%">\n')
outfile.write((('<p><strong>' + html.escape(url)) + '</strong></p>\n'))
try:
response = requests.head(url, verify=False, timeout=3)
headers = 'Status Code: {}\n'.format(response.status_code)
for (name, value) in response.headers.items():
headers += '{}: {}\n'.format(name, value)
outfile.write((('<pre style="white-space: pre-wrap;">' + html.escape(headers)) + '</pre>\n'))
except Exception as e:
outfile.write((('<pre>ERROR: ' + html.escape(str(e))) + '</pre>\n'))
error = True
outfile.write('</td><td>')
if (screenshot and (not error)):
web_screenshot(url, (report_dir / '{}.png'.format(i)), browser=browser)
outfile.write('<img src={}.png style="max-width: 100%" />\n'.format(i))
outfile.write('</td>\n')
outfile.write('</tr>\n')
outfile.write('</table>\n')
outfile.write('</body>\n')
outfile.write('</html>\n') |
def test_get_ball_ids():
anchor = (0.5, 0.5)
trans = Jump.UP(1)
ball_pos_1 = BallPos(loc=trans, relative_to=anchor, ids={'1', '2'})
ball_pos_2 = BallPos(loc=trans, relative_to=anchor, ids={'1', '3', '4'})
result = _get_ball_ids([ball_pos_1, ball_pos_2])
assert (result == {'1', '2', '3', '4'}) |
class InferenceConfig():
def __init__(self, *, configuration_type: str):
self.name = configuration_type
def to_dict(self) -> t.Dict[(str, t.Any)]:
return {self.name: {k: (v.to_dict() if hasattr(v, 'to_dict') else v) for (k, v) in self.__dict__.items() if ((v is not None) and (k != 'name'))}} |
def main(path: Path):
if path.is_dir():
answer = input(f'Are you sure you want to reset {path} (y)')
if (answer.lower().strip() == 'y'):
try:
shutil.rmtree(path)
msg.good(f'Deleted directory {path}')
except Exception as e:
print(e) |
def extractBakaPervert(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
ltitle = item['title'].lower()
if ((not (chp or vol or frag)) or ('preview' in ltitle)):
return None
if ('fanfic' in ltitle):
return None
if ('antihero' in ltitle):
return buildReleaseMessageWithType(item, 'Ultimate Antihero', vol, chp, frag=frag, postfix=postfix)
if ltitle.startswith('hxh'):
return buildReleaseMessageWithType(item, 'Hybrid x Heart Magis Academy Ataraxia', vol, chp, frag=frag, postfix=postfix)
if ltitle.startswith('magika vol'):
return buildReleaseMessageWithType(item, 'Magika No Kenshi To Shoukan Maou', vol, chp, frag=frag, postfix=postfix)
if (ltitle.startswith('arifureta chapter') and ('finished' in ltitle)):
return buildReleaseMessageWithType(item, 'Arifureta Shokugyou de Sekai Saikyou', vol, chp, frag=frag, postfix=postfix)
if ltitle.startswith('arifureta '):
return buildReleaseMessageWithType(item, 'Arifureta Shokugyou de Sekai Saikyou', vol, chp, frag=frag, postfix=postfix)
if (ltitle.startswith('bahamut ') and ('finished' in ltitle)):
return buildReleaseMessageWithType(item, 'Saijaku Muhai no Bahamut', vol, chp, frag=frag, postfix=postfix)
if (ltitle.startswith('seigensou ') and ('finished' in ltitle)):
return buildReleaseMessageWithType(item, 'Seirei Gensouki', vol, chp, frag=frag, postfix=postfix)
if (ltitle.startswith('sevens ') and ('finished' in ltitle)):
return buildReleaseMessageWithType(item, 'Sevens', vol, chp, frag=frag, postfix=postfix)
if (ltitle.startswith('campiones ') and ('finished' in ltitle)):
return buildReleaseMessageWithType(item, 'Campione', vol, chp, frag=frag, postfix=postfix)
if (ltitle.startswith('otomege ') and ('finished' in ltitle)):
return buildReleaseMessageWithType(item, 'Otomege Sekai wa Mob ni Kibishii Sekai desu', vol, chp, frag=frag, postfix=postfix)
if (ltitle.startswith('maou gakuen ') and ('finished' in ltitle)):
return buildReleaseMessageWithType(item, 'Maou Gakuen no Hangyakusha ~Jinrui Hatsu no Maou Kouhou, Kenzoku Shoujo to Ouza wo Mezashite Nariagaru~', vol, chp, frag=frag, postfix=postfix)
return False |
class Transaction(BaseSpan):
def __init__(self, tracer: 'Tracer', transaction_type: str='custom', trace_parent: Optional[TraceParent]=None, is_sampled: bool=True, start: Optional[float]=None, sample_rate: Optional[float]=None, links: Optional[Sequence[TraceParent]]=None) -> None:
self.id = self.get_dist_tracing_id()
if (not trace_parent):
trace_parent = TraceParent.new(self.id, is_sampled)
self.pause_sampling = False
self.trace_parent: TraceParent = trace_parent
self.timestamp = (start if (start is not None) else time.time())
self.name: Optional[str] = None
self.result: Optional[str] = None
self.transaction_type = transaction_type
self._tracer = tracer
self.transaction = self
self.config_span_compression_enabled = tracer.config.span_compression_enabled
self.config_span_compression_exact_match_max_duration = tracer.config.span_compression_exact_match_max_duration
self.config_span_compression_same_kind_max_duration = tracer.config.span_compression_same_kind_max_duration
self.config_exit_span_min_duration = tracer.config.exit_span_min_duration
self.config_transaction_max_spans = tracer.config.transaction_max_spans
self.dropped_spans: int = 0
self.context: Dict[(str, Any)] = {}
self._is_sampled = is_sampled
self.sample_rate = sample_rate
self._span_counter: int = 0
self._span_timers: Dict[(Tuple[(str, str)], Timer)] = defaultdict(Timer)
self._span_timers_lock = threading.Lock()
self._dropped_span_statistics = defaultdict((lambda : {'count': 0, 'duration.sum.us': 0}))
try:
self._breakdown = self.tracer._agent.metrics.get_metricset('elasticapm.metrics.sets.breakdown.BreakdownMetricSet')
except (LookupError, AttributeError):
self._breakdown = None
super().__init__(start=start)
if links:
for trace_parent in links:
self.add_link(trace_parent)
def end(self, skip_frames: int=0, duration: Optional[timedelta]=None) -> None:
super().end(skip_frames, duration)
if self._breakdown:
for ((span_type, span_subtype), timer) in self._span_timers.items():
labels = {'span.type': span_type, 'transaction.name': self.name, 'transaction.type': self.transaction_type}
if span_subtype:
labels['span.subtype'] = span_subtype
val = timer.val
self._breakdown.timer('span.self_time', reset_on_collect=True, unit='us', **labels).update(val[0], val[1])
if self.is_sampled:
self._breakdown.timer('span.self_time', reset_on_collect=True, unit='us', **{'span.type': 'app', 'transaction.name': self.name, 'transaction.type': self.transaction_type}).update(((self.duration - self._child_durations.duration).total_seconds() * 1000000))
def _begin_span(self, name, span_type, context=None, leaf=False, labels=None, parent_span_id=None, span_subtype=None, span_action=None, sync=None, start=None, auto_activate=True, links: Optional[Sequence[TraceParent]]=None):
parent_span = execution_context.get_span()
tracer = self.tracer
if (parent_span and parent_span.leaf):
span = DroppedSpan(parent_span, leaf=True)
elif (self.config_transaction_max_spans and (self._span_counter > (self.config_transaction_max_spans - 1))):
self.dropped_spans += 1
span = DroppedSpan(parent_span, context=context)
else:
span = Span(transaction=self, name=name, span_type=(span_type or 'code.custom'), context=context, leaf=leaf, labels=labels, parent=parent_span, parent_span_id=parent_span_id, span_subtype=span_subtype, span_action=span_action, sync=sync, start=start, links=links)
span.frames = tracer.frames_collector_func()
self._span_counter += 1
if auto_activate:
execution_context.set_span(span)
return span
def begin_span(self, name, span_type, context=None, leaf=False, labels=None, span_subtype=None, span_action=None, sync=None, start=None, auto_activate=True, links: Optional[Sequence[TraceParent]]=None):
return self._begin_span(name, span_type, context=context, leaf=leaf, labels=labels, parent_span_id=None, span_subtype=span_subtype, span_action=span_action, sync=sync, start=start, auto_activate=auto_activate, links=links)
def end_span(self, skip_frames: int=0, duration: Optional[float]=None, outcome: str='unknown'):
span = execution_context.get_span()
if (span is None):
raise LookupError()
if ((not span.outcome) or (span.outcome == 'unknown')):
span.outcome = outcome
span.end(skip_frames=skip_frames, duration=duration)
return span
def ensure_parent_id(self) -> str:
if (self.trace_parent.span_id == self.id):
self.trace_parent.span_id = ('%016x' % random.getrandbits(64))
logger.debug('Set parent id to generated %s', self.trace_parent.span_id)
return self.trace_parent.span_id
def to_dict(self) -> dict:
context = self.context.copy()
context['tags'] = self.labels
result = {'id': self.id, 'trace_id': self.trace_parent.trace_id, 'name': encoding.keyword_field((self.name or '')), 'type': encoding.keyword_field(self.transaction_type), 'duration': ((self.duration.total_seconds() * 1000) if self.duration else None), 'result': encoding.keyword_field(str(self.result)), 'timestamp': int((self.timestamp * 1000000)), 'outcome': self.outcome, 'sampled': self.is_sampled, 'span_count': {'started': self._span_counter, 'dropped': self.dropped_spans}}
if self._dropped_span_statistics:
result['dropped_spans_stats'] = [{'destination_service_resource': resource, 'service_target_type': target_type, 'service_target_name': target_name, 'outcome': outcome, 'duration': {'count': v['count'], 'sum': {'us': int(v['duration.sum.us'])}}} for ((resource, outcome, target_type, target_name), v) in self._dropped_span_statistics.items()]
if (self.sample_rate is not None):
result['sample_rate'] = float(self.sample_rate)
if self.trace_parent:
result['trace_id'] = self.trace_parent.trace_id
if (self.trace_parent.span_id and (self.trace_parent.span_id != self.id)):
result['parent_id'] = self.trace_parent.span_id
if self.links:
result['links'] = self.links
if ('faas' in context):
result['faas'] = context.pop('faas')
if ('otel_spankind' in context):
result['otel'] = {'span_kind': context.pop('otel_spankind')}
if (elasticapm.get_client() and elasticapm.get_client().check_server_version(gte=(7, 16))):
if ('otel_attributes' in context):
if ('otel' not in result):
result['otel'] = {'attributes': context.pop('otel_attributes')}
else:
result['otel']['attributes'] = context.pop('otel_attributes')
else:
attributes = context.pop('otel_attributes', {})
for (key, value) in attributes.items():
result['context']['tags'][key] = value
if self.is_sampled:
result['context'] = context
return result
def track_span_duration(self, span_type, span_subtype, self_duration) -> None:
with self._span_timers_lock:
self._span_timers[(span_type, span_subtype)].update((self_duration.total_seconds() * 1000000))
def is_sampled(self) -> bool:
return self._is_sampled
_sampled.setter
def is_sampled(self, is_sampled: bool) -> None:
self._is_sampled = is_sampled
if (not is_sampled):
if self.sample_rate:
self.sample_rate = '0'
self.trace_parent.add_tracestate(constants.TRACESTATE.SAMPLE_RATE, self.sample_rate)
def tracer(self) -> 'Tracer':
return self._tracer
def track_dropped_span(self, span: SpanType) -> None:
with self._span_timers_lock:
try:
resource = span.context['destination']['service']['resource']
target_type = nested_key(span.context, 'service', 'target', 'type')
target_name = nested_key(span.context, 'service', 'target', 'name')
stats = self._dropped_span_statistics[(resource, span.outcome, target_type, target_name)]
stats['count'] += 1
stats['duration.sum.us'] += int((span.duration.total_seconds() * 1000000))
except KeyError:
pass |
def test_mark(dataset, source):
view_id = 'text'
recipe = mark(dataset, source, view_id)
stream = list(recipe['stream'])
assert (recipe['view_id'] == view_id)
assert (recipe['dataset'] == dataset)
assert (len(stream) == 2)
assert hasattr(recipe['update'], '__call__')
assert hasattr(recipe['on_load'], '__call__')
assert hasattr(recipe['on_exit'], '__call__') |
def test_matcher_match_zero(matcher, en_vocab):
words1 = 'He said , " some words " ...'.split()
words2 = 'He said , " some three words " ...'.split()
pattern1 = [{'ORTH': '"'}, {'OP': '!', 'IS_PUNCT': True}, {'OP': '!', 'IS_PUNCT': True}, {'ORTH': '"'}]
pattern2 = [{'ORTH': '"'}, {'IS_PUNCT': True}, {'IS_PUNCT': True}, {'IS_PUNCT': True}, {'ORTH': '"'}]
matcher.add('Quote', [pattern1])
doc = Doc(en_vocab, words=words1)
assert (len(matcher(doc)) == 1)
doc = Doc(en_vocab, words=words2)
assert (len(matcher(doc)) == 0)
matcher.add('Quote', [pattern2])
assert (len(matcher(doc)) == 0) |
def hash(x):
if isinstance(x, bytes):
return hashlib.sha256(x).digest()
elif isinstance(x, blst.P1):
return hash(x.compress())
b = b''
for a in x:
if isinstance(a, bytes):
b += a
elif isinstance(a, int):
b += a.to_bytes(32, 'little')
elif isinstance(a, blst.P1):
b += hash(a.compress())
return hash(b) |
()
def sync(center, record_type, start_date=None, end_date=None):
if (record_type == 'Sales Invoice'):
if (get_datetime(end_date) < get_datetime(start_date)):
frappe.throw(_('To Date must be greater than From Date'))
if (date_diff(end_date, start_date) > 7):
frappe.throw(_('Difference between From Date and To Date cannot be more than 7.'))
frappe.enqueue('ecommerce_integrations.zenoti.doctype.zenoti_settings.zenoti_settings.sync_invoices', center_id=center, start_date=start_date, end_date=end_date, timeout=10000)
elif (record_type == 'Employees'):
frappe.enqueue('ecommerce_integrations.zenoti.doctype.zenoti_center.zenoti_center.sync_employees_', center_id=center, timeout=10000)
elif (record_type == 'Customers'):
frappe.enqueue('ecommerce_integrations.zenoti.doctype.zenoti_center.zenoti_center.sync_customers_', center_id=center, timeout=10000)
elif (record_type == 'Items'):
frappe.enqueue('ecommerce_integrations.zenoti.doctype.zenoti_center.zenoti_center.sync_items_', center_id=center, timeout=10000)
elif (record_type == 'Categories'):
frappe.enqueue('ecommerce_integrations.zenoti.doctype.zenoti_center.zenoti_center.sync_category_', center_id=center, timeout=10000)
elif (record_type == 'Stock Reconciliation'):
frappe.enqueue('ecommerce_integrations.zenoti.doctype.zenoti_settings.zenoti_settings.sync_stocks', center=center, date=start_date, timeout=10000) |
class LyricSearchMethod():
def find_lyrics(self, track):
raise NotImplementedError
def _set_manager(self, manager):
self.manager = manager
def remove_script(self, data):
p = re.compile('<script.*/script>')
return p.sub('', data)
def remove_div(self, data):
p = re.compile('<div.*/div>')
return p.sub('', data)
def remove_html_tags(self, data):
data = data.replace('<br/>', '\n')
p = re.compile('<[^<]*?/?>')
data = p.sub('', data)
p = re.compile('/<!--.*?-->/')
return p.sub('', data) |
def parse_message(msg: IncomingMessage) -> dict:
this_object_data = attr.base_attr.parse_message(msg)
if (msg.read_bool() is True):
this_object_data['light'] = base64.b64decode(msg.read_string())
this_object_data['depth'] = base64.b64decode(msg.read_string())
return this_object_data |
class role_reply(message):
version = 5
type = 25
def __init__(self, xid=None, role=None, generation_id=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (role != None):
self.role = role
else:
self.role = 0
if (generation_id != None):
self.generation_id = generation_id
else:
self.generation_id = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!L', self.role))
packed.append(('\x00' * 4))
packed.append(struct.pack('!Q', self.generation_id))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = role_reply()
_version = reader.read('!B')[0]
assert (_version == 5)
_type = reader.read('!B')[0]
assert (_type == 25)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
obj.role = reader.read('!L')[0]
reader.skip(4)
obj.generation_id = reader.read('!Q')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.role != other.role):
return False
if (self.generation_id != other.generation_id):
return False
return True
def pretty_print(self, q):
q.text('role_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('role = ')
value_name_map = {0: 'OFPCR_ROLE_NOCHANGE', 1: 'OFPCR_ROLE_EQUAL', 2: 'OFPCR_ROLE_MASTER', 3: 'OFPCR_ROLE_SLAVE'}
if (self.role in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.role], self.role)))
else:
q.text(('%#x' % self.role))
q.text(',')
q.breakable()
q.text('generation_id = ')
q.text(('%#x' % self.generation_id))
q.breakable()
q.text('}') |
def test_dict_to_vec_conversion():
d = {'a': torch.ones((2, 5)), 'b': torch.rand(5), 'c': torch.tensor(3.0)}
converter = DictToVecConverter(example_dict=d)
v = converter.to_vec(d)
assert (len(v) == 16)
d_exp = converter.to_dict(torch.exp(v))
for key in d:
assert torch.allclose(torch.exp(d[key]), d_exp[key]) |
class Collaborator(models.Model):
objects = EventUserManager()
created_at = models.DateTimeField(_('Created At'), auto_now_add=True)
updated_at = models.DateTimeField(_('Updated At'), auto_now=True)
event_user = models.ForeignKey(EventUser, verbose_name=_('Event User'), blank=True, null=True)
assignation = models.CharField(_('Assignation'), max_length=200, blank=True, null=True, help_text=_('Anything you can help with (i.e. Talks, Coffee...)'))
time_availability = models.CharField(_('Time Availability'), max_length=200, blank=True, null=True, help_text=_('Time period in which you can help during the event. i.e. "All the event", "Morning", "Afternoon", ...'))
phone = models.CharField(_('Phone'), max_length=200, blank=True, null=True)
address = models.CharField(_('Address'), max_length=200, blank=True, null=True)
additional_info = models.CharField(_('Additional Info'), max_length=200, blank=True, null=True, help_text=_('Additional info you consider relevant'))
class Meta():
verbose_name = _('Collaborator')
verbose_name_plural = _('Collaborators')
def __str__(self):
return str(self.event_user) |
def expand_interest_traces(interest_traces, trace_to_dependent_traces):
all_interest_traces = set()
pending_traces = list(interest_traces)
while len(pending_traces):
new_trace = pending_traces.pop()
if (new_trace not in all_interest_traces):
all_interest_traces.add(new_trace)
for dt in trace_to_dependent_traces[new_trace]:
pending_traces.append(dt)
return all_interest_traces |
class SomeStrategy(PostProcessorStrategy):
name = 'some postprocessor strategy'
configuration_model = SomeStrategyConfiguration
def __init__(self, configuration: SomeStrategyConfiguration):
self.some_config = configuration.some_key
def process(self, data: Any, identity_data: Dict[(str, Any)]=None) -> Union[(List[Dict[(str, Any)]], Dict[(str, Any)])]:
pass |
class TestTemplateExtras(unittest.TestCase):
def test_roundpound_less_than_10(self):
self.assertEqual(t.roundpound(0.1), '0')
self.assertEqual(t.roundpound(1), '1')
self.assertEqual(t.roundpound(9), '9')
self.assertEqual(t.roundpound(10), '10')
def test_roundpound_more_than_10(self):
self.assertEqual(t.roundpound(11), '10')
self.assertEqual(t.roundpound(56), '60')
self.assertEqual(t.roundpound(236), '200')
self.assertEqual(t.roundpound(4999), '5,000')
def test_deltawords_positive_all_sizes(self):
self.assertEqual(t.deltawords(0, 100), 'massively')
self.assertEqual(t.deltawords(0, 29), 'considerably')
self.assertEqual(t.deltawords(0, 19), 'moderately')
self.assertEqual(t.deltawords(0, 1), 'slightly')
self.assertEqual(t.deltawords(0, 0), 'not at all')
def test_deltawords_negative(self):
self.assertEqual(t.deltawords(29, 0), 'considerably')
('{.__name__}.timezone.now'.format(t))
def test_current_time(self, timezone_now):
timezone_now.return_value = datetime.date(2018, 1, 3)
self.assertEqual(t.current_time('%Y-%m-%d'), '2018-01-03')
def test_fancy_join(self):
self.assertEqual(t.fancy_join(['a', 'b', 'c']), 'a, b and c')
self.assertEqual(t.fancy_join(['a', 'b']), 'a and b')
self.assertEqual(t.fancy_join(['a']), 'a')
self.assertEqual(t.fancy_join([]), '') |
def test_config(tmpdir, monkeypatch):
path = os.path.join(tmpdir, '.env')
with open(path, 'w') as file:
file.write('# Do not commit to source control\n')
file.write('DATABASE_URL=postgres://user:/dbname\n')
file.write('REQUEST_HOSTNAME=example.com\n')
file.write('SECRET_KEY=12345\n')
file.write('BOOL_AS_INT=0\n')
file.write('\n')
file.write('\n')
config = Config(path, environ={'DEBUG': 'true'})
def cast_to_int(v) -> int:
return int(v)
DEBUG = config('DEBUG', cast=bool)
DATABASE_URL = config('DATABASE_URL', cast=URL)
REQUEST_TIMEOUT = config('REQUEST_TIMEOUT', cast=int, default=10)
REQUEST_HOSTNAME = config('REQUEST_HOSTNAME')
MAIL_HOSTNAME = config('MAIL_HOSTNAME', default=None)
SECRET_KEY = config('SECRET_KEY', cast=Secret)
UNSET_SECRET = config('UNSET_SECRET', cast=Secret, default=None)
EMPTY_SECRET = config('EMPTY_SECRET', cast=Secret, default='')
assert (config('BOOL_AS_INT', cast=bool) is False)
assert (config('BOOL_AS_INT', cast=cast_to_int) == 0)
assert (config('DEFAULTED_BOOL', cast=cast_to_int, default=True) == 1)
assert (DEBUG is True)
assert (DATABASE_URL.path == '/dbname')
assert (DATABASE_URL.password == 'pass')
assert (DATABASE_URL.username == 'user')
assert (REQUEST_TIMEOUT == 10)
assert (REQUEST_HOSTNAME == 'example.com')
assert (MAIL_HOSTNAME is None)
assert (repr(SECRET_KEY) == "Secret('')")
assert (str(SECRET_KEY) == '12345')
assert bool(SECRET_KEY)
assert (not bool(EMPTY_SECRET))
assert (not bool(UNSET_SECRET))
with pytest.raises(KeyError):
config.get('MISSING')
with pytest.raises(ValueError):
config.get('DEBUG', cast=int)
with pytest.raises(ValueError):
config.get('REQUEST_HOSTNAME', cast=bool)
config = Config(Path(path))
REQUEST_HOSTNAME = config('REQUEST_HOSTNAME')
assert (REQUEST_HOSTNAME == 'example.com')
config = Config()
monkeypatch.setenv('STARLETTE_EXAMPLE_TEST', '123')
monkeypatch.setenv('BOOL_AS_INT', '1')
assert (config.get('STARLETTE_EXAMPLE_TEST', cast=int) == 123)
assert (config.get('BOOL_AS_INT', cast=bool) is True)
monkeypatch.setenv('BOOL_AS_INT', '2')
with pytest.raises(ValueError):
config.get('BOOL_AS_INT', cast=bool) |
class OptionPlotoptionsArearangeLowmarkerStatesSelect(Options):
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get('#cccccc')
def fillColor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#000000')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(2)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(None)
def radius(self, num: float):
self._config(num, js_type=False) |
class AleaTracker(BaseEyeTracker):
def __init__(self, display, logfile=settings.LOGFILE, alea_key=settings.ALEAKEY, animated_calibration=settings.ALEAANIMATEDCALIBRATION, alea_logging=settings.ALEALOGGING, eventdetection=settings.EVENTDETECTION, saccade_velocity_threshold=35, saccade_acceleration_threshold=9500, blink_threshold=settings.BLINKTHRESH, **args):
try:
copy_docstr(BaseEyeTracker, AleaTracker)
except:
pass
self.disp = display
self.screen = Screen()
self.dispsize = self.disp.dispsize
self.screensize = settings.SCREENSIZE
self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1)
self.errorbeep = Sound(osc='saw', freq=100, length=100)
self.screen.clear()
self.screen.draw_text(text='Initialising the eye tracker, please wait...', fontsize=20)
self.disp.fill(self.screen)
self.disp.show()
self.alea_logging = alea_logging
self.outputfile = (logfile + '.tsv')
self.animated_calibration = (animated_calibration == True)
self.connected = False
self.recording = False
self.errdist = 2
self.pxerrdist = 30
self.maxtries = 100
self.prevsample = ((- 1), (- 1))
self.prevps = (- 1)
self.fixtresh = 1.5
self.fixtimetresh = 100
self.spdtresh = saccade_velocity_threshold
self.accthresh = saccade_acceleration_threshold
self.blinkthresh = blink_threshold
self.eventdetection = eventdetection
self.set_detection_type(self.eventdetection)
self.weightdist = 10
self.alea = OGAleaTracker(alea_key, alea_logging=self.alea_logging, file_path=self.outputfile)
self.samplerate = 60.0
self.sampletime = (1000.0 / self.samplerate)
self.log('pygaze initiation report start')
self.log('display resolution: {}x{}'.format(self.dispsize[0], self.dispsize[1]))
self.log('display size in cm: {}x{}'.format(self.screensize[0], self.screensize[1]))
self.log('samplerate: {} Hz'.format(self.samplerate))
self.log('sampletime: {} ms'.format(self.sampletime))
self.log('fixation threshold: {} degrees'.format(self.fixtresh))
self.log('speed threshold: {} degrees/second'.format(self.spdtresh))
self.log('acceleration threshold: {} degrees/second**2'.format(self.accthresh))
self.log('pygaze initiation report end')
def calibrate(self, animated=None, skip_bad_points=False):
if (animated is None):
animated = self.animated_calibration
if animated:
img = 'ANIMATION:PARROT'
else:
img = ''
self.screen.clear()
self.screen.draw_text(text='Running calibration in the foreground...', fontsize=20)
self.disp.fill(self.screen)
self.disp.show()
quited = False
calibration_approved = False
while (not calibration_approved):
(status, improve) = self.alea.calibrate(image=img, skip_bad_points=skip_bad_points)
if (status == 0):
calib_str = 'Calibration completed!'
else:
calib_str = 'Calibration failed!'
if improve:
calib_str += '\n\nWARNING: IntelliGaze recommends repeating the calibration to improve accuracy.'
calib_str += '\n\n\nPress R to retry, or Space to continue.'
self.screen.clear()
self.screen.draw_text(text=calib_str, fontsize=20)
self.disp.fill(self.screen)
self.disp.show()
key = None
while (key not in ['r', 'Space', 'space', 'q']):
(key, keytime) = self.kb.get_key(keylist=['q', 'r', 'space'], timeout=None, flush=True)
if (key in ['q', 'Space', 'space']):
calibration_approved = True
if (key == 'q'):
quited = True
if quited:
return False
self.screen.clear()
self.screen.draw_text(text='Noise calibration. Please look at the dot, and press any key to start.', fontsize=20, pos=(int((self.dispsize[0] / 2)), int((self.dispsize[1] * 0.3))))
self.screen.draw_fixation(fixtype='dot')
self.disp.fill(self.screen)
self.disp.show()
(key, keytime) = self.kb.get_key(keylist=None, timeout=None, flush=True)
err = {'LX': [], 'LY': [], 'RX': [], 'RY': []}
var = {'LX': [], 'LY': [], 'RX': [], 'RY': []}
self.start_recording()
self.log('noise_calibration_start')
x = int((float(self.dispsize[0]) / 2.0))
y = int((float(self.dispsize[1]) / 2.0))
self.screen.clear()
self.screen.draw_fixation(fixtype='dot', pos=(x, y))
self.disp.fill(self.screen)
t0 = self.disp.show()
i = 0
while ((i < 10) or ((clock.get_time() - t0) < 1000)):
(gx, gy) = self.sample()
if ((gx > 0) and (gy > 0)):
i += 1
err['LX'].append(abs((float(x) - float(gx))))
err['LY'].append(abs((float(y) - float(gy))))
err['RX'].append(abs((float(x) - float(gx))))
err['RY'].append(abs((float(y) - float(gy))))
for k in var.keys():
var[k].append((err[k][(- 1)] ** 2))
clock.pause(int(self.sampletime))
self.log('noise_calibration_stop')
self.stop_recording()
xnoise = ((math.sqrt((sum(var['LX']) / float(len(var['LX'])))) + math.sqrt((sum(var['RX']) / float(len(var['RX']))))) / 2.0)
ynoise = ((math.sqrt((sum(var['LY']) / float(len(var['LY'])))) + math.sqrt((sum(var['RY']) / float(len(var['RY']))))) / 2.0)
self.pxdsttresh = (xnoise, ynoise)
pixpercm = (((self.dispsize[0] / float(self.screensize[0])) + (self.dispsize[1] / float(self.screensize[1]))) / 2)
screendist = settings.SCREENDIST
self.accuracy = ((pix2deg(screendist, (sum(err['LX']) / float(len(err['LX']))), pixpercm), pix2deg(screendist, (sum(err['LY']) / float(len(err['LY']))), pixpercm)), (pix2deg(screendist, (sum(err['RX']) / float(len(err['RX']))), pixpercm), pix2deg(screendist, (sum(err['RY']) / float(len(err['RY']))), pixpercm)))
self.pxerrdist = deg2pix(screendist, self.errdist, pixpercm)
self.pxfixtresh = deg2pix(screendist, self.fixtresh, pixpercm)
self.pxaccuracy = (((sum(err['LX']) / float(len(err['LX']))), (sum(err['LY']) / float(len(err['LY'])))), ((sum(err['RX']) / float(len(err['RX']))), (sum(err['RY']) / float(len(err['RY'])))))
self.pxspdtresh = deg2pix(screendist, (self.spdtresh / 1000.0), pixpercm)
self.pxacctresh = deg2pix(screendist, (self.accthresh / 1000.0), pixpercm)
self.log('pygaze calibration report start')
self.log('accuracy (degrees): LX={}, LY={}, RX={}, RY={}'.format(self.accuracy[0][0], self.accuracy[0][1], self.accuracy[1][0], self.accuracy[1][1]))
self.log('accuracy (in pixels): LX={}, LY={}, RX={}, RY={}'.format(self.pxaccuracy[0][0], self.pxaccuracy[0][1], self.pxaccuracy[1][0], self.pxaccuracy[1][1]))
self.log('precision (RMS noise in pixels): X={}, Y={}'.format(self.pxdsttresh[0], self.pxdsttresh[1]))
self.log('distance between participant and display: {} cm'.format(screendist))
self.log('fixation threshold: {} pixels'.format(self.pxfixtresh))
self.log('speed threshold: {} pixels/ms'.format(self.pxspdtresh))
self.log('acceleration threshold: {} pixels/ms**2'.format(self.pxacctresh))
self.log('pygaze calibration report end')
return True
def close(self):
self.alea.close()
self.connected = False
def connected(self):
return self.connected
def drift_correction(self, pos=None, fix_triggered=False):
if (pos == None):
pos = (int((self.dispsize[0] / 2)), int((self.dispsize[1] / 2)))
if fix_triggered:
return self.fix_triggered_drift_correction(pos)
self.draw_drift_correction_target(pos[0], pos[1])
pressed = False
while (not pressed):
(pressed, presstime) = self.kb.get_key()
if pressed:
if (pressed in ['Escape', 'escape', 'q']):
print("libalea.AleaTracker.drift_correction: 'q' or 'escape' pressed")
return self.calibrate()
gazepos = self.sample()
if (((((gazepos[0] - pos[0]) ** 2) + ((gazepos[1] - pos[1]) ** 2)) ** 0.5) < self.pxerrdist):
return True
else:
self.errorbeep.play()
return False
def draw_drift_correction_target(self, x, y):
self.screen.clear()
self.screen.draw_fixation(fixtype='dot', colour=settings.FGC, pos=(x, y), pw=0, diameter=12)
self.disp.fill(self.screen)
self.disp.show()
def draw_calibration_target(self, x, y):
self.draw_drift_correction_target(x, y)
def fix_triggered_drift_correction(self, pos=None, min_samples=4, max_dev=120, timeout=10000):
if (pos == None):
pos = (int((self.dispsize[0] / 2)), int((self.dispsize[1] / 2)))
self.draw_drift_correction_target(pos[0], pos[1])
t0 = clock.get_time()
consecutive_count = 0
while (consecutive_count < min_samples):
(x, y) = self.sample()
if ((x is None) or (y is None)):
continue
d = ((((x - pos[0]) ** 2) + ((y - pos[1]) ** 2)) ** 0.5)
if (d <= max_dev):
consecutive_count += 1
else:
consecutive_count = 0
if ((clock.get_time() - t0) > timeout):
print('libalea.AleaTracker.fix_triggered_drift_correction: timeout during fixation-triggered drift check')
return self.calibrate()
if (self.kb.get_key()[0] in ['Escape', 'escape', 'q']):
print("libalea.AleaTracker.fix_triggered_drift_correction: 'q' or 'escape' pressed")
return self.calibrate()
return True
def get_eyetracker_clock_async(self):
print('get_eyetracker_clock_async function not supported for AleaTracker')
def log(self, msg):
self.alea.log(msg)
def prepare_drift_correction(self, pos):
print('prepare_drift_correction function not supported for AleaTracker')
def pupil_size(self):
(t, x, y, ps) = self.alea.sample()
if (ps == 0):
return (- 1)
if (ps != self.prevps):
self.prevps = copy.copy(ps)
return self.prevps
def sample(self):
(t, x, y, ps) = self.alea.sample()
if ((x == 0) and (y == 0)):
return ((- 1), (- 1))
s = (int(x), int(y))
if (s != self.prevsample):
self.prevsample = copy.copy(s)
return self.prevsample
def send_command(self, cmd):
print('send_command function not supported for AleaTracker')
def start_recording(self):
self.alea.start_recording()
self.recording = True
def status_msg(self, msg):
print('status_msg function not supported for AleaTracker')
def stop_recording(self):
self.alea.stop_recording()
self.recording = False
def set_detection_type(self, eventdetection):
if (eventdetection in ['pygaze', 'native']):
self.eventdetection = eventdetection
return ('pygaze', 'pygaze', 'pygaze')
def wait_for_event(self, event):
if (event == 5):
outcome = self.wait_for_saccade_start()
elif (event == 6):
outcome = self.wait_for_saccade_end()
elif (event == 7):
outcome = self.wait_for_fixation_start()
elif (event == 8):
outcome = self.wait_for_fixation_end()
elif (event == 3):
outcome = self.wait_for_blink_start()
elif (event == 4):
outcome = self.wait_for_blink_end()
else:
raise Exception('Error in libalea.AleaTracker.wait_for_event: eventcode {} is not supported'.format(event))
return outcome
def wait_for_blink_end(self):
if (self.eventdetection == 'native'):
print("WARNING! 'native' event detection not implemented")
blinking = True
while blinking:
gazepos = self.sample()
if self.is_valid_sample(gazepos):
blinking = False
return clock.get_time()
def wait_for_blink_start(self):
if (self.eventdetection == 'native'):
print("WARNING! 'native' event detection not implemented")
blinking = False
while (not blinking):
gazepos = self.sample()
if (not self.is_valid_sample(gazepos)):
t0 = clock.get_time()
while (not self.is_valid_sample(self.sample())):
if ((clock.get_time() - t0) >= self.blinkthresh):
return t0
def wait_for_fixation_end(self):
if (self.eventdetection == 'native'):
print("WARNING! 'native' event detection not implemented")
(stime, spos) = self.wait_for_fixation_start()
while True:
npos = self.sample()
if self.is_valid_sample(npos):
if ((((npos[0] - spos[0]) ** 2) + ((npos[1] - spos[1]) ** 2)) > (self.pxfixtresh ** 2)):
break
return (clock.get_time(), spos)
def wait_for_fixation_start(self):
if (self.eventdetection == 'native'):
print("WARNING! 'native' event detection not implemented")
spos = self.sample()
while (not self.is_valid_sample(spos)):
spos = self.sample()
t0 = clock.get_time()
moving = True
while moving:
npos = self.sample()
if self.is_valid_sample(npos):
if ((((npos[0] - spos[0]) ** 2) + ((npos[1] - spos[1]) ** 2)) > (self.pxfixtresh ** 2)):
spos = copy.copy(npos)
t0 = clock.get_time()
else:
t1 = clock.get_time()
if ((t1 - t0) >= self.fixtimetresh):
return (t1, spos)
def wait_for_saccade_end(self):
if (self.eventdetection == 'native'):
print("WARNING! 'native' event detection not implemented")
(t0, spos) = self.wait_for_saccade_start()
prevpos = self.sample()
while (not self.is_valid_sample(prevpos)):
prevpos = self.sample()
t1 = clock.get_time()
s = ((((prevpos[0] - spos[0]) ** 2) + ((prevpos[1] - spos[1]) ** 2)) ** 0.5)
v0 = (s / (t1 - t0))
saccadic = True
while saccadic:
newpos = self.sample()
t1 = clock.get_time()
if (self.is_valid_sample(newpos) and (newpos != prevpos)):
s = ((((newpos[0] - prevpos[0]) ** 2) + ((newpos[1] - prevpos[1]) ** 2)) ** 0.5)
v1 = (s / (t1 - t0))
a = ((v1 - v0) / (t1 - t0))
if ((v1 < self.pxspdtresh) and ((a > ((- 1) * self.pxacctresh)) and (a < 0))):
saccadic = False
epos = newpos[:]
etime = clock.get_time()
t0 = copy.copy(t1)
v0 = copy.copy(v1)
prevpos = newpos[:]
return (etime, spos, epos)
def wait_for_saccade_start(self):
if (self.eventdetection == 'native'):
print("WARNING! 'native' event detection not implemented")
newpos = self.sample()
while (not self.is_valid_sample(newpos)):
newpos = self.sample()
t0 = clock.get_time()
prevpos = newpos[:]
s = 0
v0 = 0
saccadic = False
while (not saccadic):
newpos = self.sample()
t1 = clock.get_time()
if (self.is_valid_sample(newpos) and (newpos != prevpos)):
sx = (newpos[0] - prevpos[0])
sy = (newpos[1] - prevpos[1])
if ((((sx / self.pxdsttresh[0]) ** 2) + ((sy / self.pxdsttresh[1]) ** 2)) > self.weightdist):
s = (((sx ** 2) + (sy ** 2)) ** 0.5)
v1 = (s / (t1 - t0))
a = ((v1 - v0) / (t1 - t0))
if ((v1 > self.pxspdtresh) or (a > self.pxacctresh)):
saccadic = True
spos = prevpos[:]
stime = clock.get_time()
t0 = copy.copy(t1)
v0 = copy.copy(v1)
prevpos = newpos[:]
return (stime, spos)
def is_valid_sample(self, gazepos):
if ((gazepos == (None, None)) or (gazepos == ((- 1), (- 1))) or (gazepos == (0, 0))):
return False
return True |
def parse_defaults(defaults_str):
if (defaults_str == ''):
return {'duration': 4, 'octave': 6, 'bpm': 63}
try:
if re.match('^(d=\\d{1,2},o=\\d,b=\\d{1,3})?$', defaults_str):
defaults = dict([d.split('=') for d in defaults_str.split(',')])
parsed_defaults = {'duration': parse_duration(defaults['d']), 'octave': parse_octave(defaults['o']), 'bpm': parse_bpm(defaults['b'])}
else:
return {'duration': 4, 'octave': 6, 'bpm': 63}
except InvalidElementError as element:
return {'duration': 4, 'octave': 6, 'bpm': 63}
return parsed_defaults |
def gen_tilegrid_masks(tiles):
for (tilek, tilev) in tiles.items():
for (block_type, blockj) in tilev['bits'].items():
baseaddr = int(blockj['baseaddr'], 0)
frames = blockj['frames']
offset = blockj['offset']
words = blockj['words']
(yield (baseaddr, (baseaddr + frames), offset, (offset + words))) |
def test_to_datetime_2():
assert (to_datetime('1851-06-25T00:00') == datetime.datetime(1851, 6, 25))
assert (to_datetime('1851-06-25T06:00') == datetime.datetime(1851, 6, 25, 6))
assert (to_datetime('1851-06-25') == datetime.datetime(1851, 6, 25))
assert (to_datetime('') == datetime.datetime(1851, 6, 25))
assert (to_datetime() == datetime.datetime(1851, 6, 25))
assert (to_datetime('1851-06-25 06:00:00') == datetime.datetime(1851, 6, 25, 6))
assert (to_datetime('1851-06-25T06:00:00') == datetime.datetime(1851, 6, 25, 6))
assert (to_datetime('1851-06-25T06:00:00Z') == datetime.datetime(1851, 6, 25, 6, tzinfo=datetime.timezone.utc))
assert (to_datetime((- 2)) == (to_datetime(0) - datetime.timedelta(days=2))) |
def test_vectorized_rollout():
concurrency = 3
env = SequentialVectorEnv(([build_dummy_structured_env] * concurrency))
rollout_generator = RolloutGenerator(env=env)
policy = DistributedRandomPolicy(env.action_spaces_dict, concurrency=concurrency)
trajectory = rollout_generator.rollout(policy, n_steps=10)
assert (len(trajectory) == 10)
sub_step_keys = env.action_spaces_dict.keys()
for record in trajectory.step_records:
assert (sub_step_keys == record.actions_dict.keys())
assert (sub_step_keys == record.observations_dict.keys())
assert (sub_step_keys == record.rewards_dict.keys())
assert (record.batch_shape == [concurrency])
first_sub_step_obs: Dict = list(record.observations_dict.values())[0]
first_obs_value = list(first_sub_step_obs.values())[0]
assert (first_obs_value.shape[0] == concurrency) |
class TraceCalls(object):
def __init__(self, stream=sys.stdout, indent_step=2, show_ret=False):
self.stream = stream
self.indent_step = indent_step
self.show_ret = show_ret
TraceCalls.cur_indent = 0
def __call__(self, fn):
(fn)
def wrapper(*args, **kwargs):
indent = (' ' * TraceCalls.cur_indent)
argstr = ', '.join(([self._argrepr(a) for a in args] + [('%s=%s' % (a, repr(b))) for (a, b) in kwargs.items()]))
self.stream.write(('%s%s(%s)\n' % (indent, fn.__name__, argstr)))
TraceCalls.cur_indent += self.indent_step
ret = fn(*args, **kwargs)
TraceCalls.cur_indent -= self.indent_step
if self.show_ret:
self.stream.write(('%s--> %s\n' % (indent, ret)))
return ret
return wrapper
def _argrepr(self, arg):
return repr(arg) |
('cuda.gemm_rrr_permute.config')
def gemm_rrr_permute_config(func_attrs, dtype='float16'):
def fproc(op):
import cutlass_lib
return common.default_fproc(op=op, a_layout=cutlass_lib.library.LayoutType.RowMajor, b_layout=cutlass_lib.library.LayoutType.RowMajor, c_layout=cutlass_lib.library.LayoutType.RowMajor, dtype=func_attrs['inputs'][0].dtype(), epilogue_name=func_attrs['epilogue'], permute_layout=func_attrs['layout'])
func_attrs['op_instance'] = common_permute.extract_config(fproc, func_attrs) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.