code stringlengths 281 23.7M |
|---|
def insert_dataset(session, data, tbl):
values = sqlalchemy.select([sqlalchemy.func.unnest(data.value), sqlalchemy.func.unnest(data.timestamp), sqlalchemy.func.ST_MakePoint(sqlalchemy.func.unnest(data.longitude), sqlalchemy.func.unnest(data.latitude))])
query = sqlalchemy.insert(tbl).from_select(tbl.columns, values)
session.execute(query) |
def _iter_avro_blocks(decoder, header, codec, writer_schema, named_schemas, reader_schema, options):
sync_marker = header['sync']
read_block = BLOCK_READERS.get(codec)
if (not read_block):
raise ValueError(f'Unrecognized codec: {codec}')
while True:
offset = decoder.fo.tell()
try:
num_block_records = decoder.read_long()
except EOFError:
return
block_bytes = read_block(decoder)
skip_sync(decoder.fo, sync_marker)
size = (decoder.fo.tell() - offset)
(yield Block(block_bytes, num_block_records, codec, reader_schema, writer_schema, named_schemas, offset, size, options)) |
def get_data_after_aku() -> list[dict[(str, int)]]:
data_1: list[dict[(str, int)]] = []
val_6 = next_int_len(2)
data_1.append(val_6)
val_7 = next_int_len(2)
data_1.append(val_7)
for _ in range(val_6['Value']):
val_7 = next_int_len(2)
data_1.append(val_7)
for _ in range(val_7['Value']):
data_1.append(next_int_len(2))
val_7 = next_int_len(2)
data_1.append(val_7)
val_4c = val_7
for _ in range(val_4c['Value']):
data_1.append(next_int_len(2))
data_1.append(next_int_len(8))
val_5 = next_int_len(2)
data_1.append(val_5)
for _ in range(val_5['Value']):
data_1.append(next_int_len(2))
data_1.append(next_int_len(8))
data_1.append(next_int_len(1))
return data_1 |
class Network():
def get_fields():
return {'hash_id': NO_DATA, 'mac': 0, 'ipv4_address': 0, 'ipv6_address': 0, 'ipv4_subnet': NO_DATA, 'ipv6_subnet': NO_DATA, 'tenant': 0, 'segment': 0, 'port': 0, 'state': NO_DATA, 'ignore': 'False', 'top_role': NO_DATA, 'top_confidence': 0, 'ipv4_os': NO_DATA, 'ipv6_os': NO_DATA, 'ipv4_rdns': NO_DATA, 'ipv6_rdns': NO_DATA, 'ether_vendor': NO_DATA, 'controller_type': NO_DATA, 'acls': NO_DATA}
def field_mapping():
return {'hash_id': 'ID', 'mac': 'MAC Address', 'segment': 'Switch', 'port': 'Port', 'tenant': 'VLAN', 'ipv4_address': 'IPv4', 'ipv4_subnet': 'IPv4 Subnet', 'ipv6_subnet': 'IPv6 Subnet', 'ipv6_address': 'IPv6', 'ignore': 'Ignored', 'state': 'State', 'next_state': 'Next State', 'prev_state': 'Previous State', 'ipv4_os': 'IPv4 OS (p0f)', 'ipv6_os': 'IPv6 OS (p0f)', 'top_role': 'Role (NetworkML)', 'top_confidence': 'Role Confidence (NetworkML)', 'ipv4_rdns': 'IPv4 rDNS', 'ipv6_rdns': 'IPv6 rDNS', 'ether_vendor': 'Ethernet Vendor', 'controller_type': 'SDN Controller Type', 'acls': 'ACL History'}
def get_dataset():
fields = Network.get_fields()
n = Nodes(fields)
n.build_nodes()
return n.nodes
def get_configuration():
configuration = {'fields': []}
for field in Network.get_fields():
configuration['fields'].append({'path': [field], 'displayName': Network.field_mapping()[field], 'groupable': 'true'})
return configuration
def on_get(_req, resp):
network = {}
dataset = Network.get_dataset()
configuration = Network.get_configuration()
network['dataset'] = dataset
network['configuration'] = configuration
resp.text = json.dumps(network, indent=2)
resp.content_type = falcon.MEDIA_JSON
resp.status = falcon.HTTP_200 |
class CanonicalBallPocket():
def resolve(self, ball: Ball, pocket: Pocket, inplace: bool=False) -> Tuple[(Ball, Pocket)]:
if (not inplace):
ball = ball.copy()
pocket = pocket.copy()
rvw = np.array([[pocket.a, pocket.b, (- pocket.depth)], [0, 0, 0], [0, 0, 0]])
ball.state = BallState(rvw, const.pocketed)
pocket.add(ball.id)
return (ball, pocket) |
class OptionSeriesBarLabelStyle(Options):
def fontSize(self):
return self._config_get('0.8em')
def fontSize(self, num: float):
self._config(num, js_type=False)
def fontWeight(self):
return self._config_get('bold')
def fontWeight(self, text: str):
self._config(text, js_type=False) |
def test_strip_protocol():
assert (FoundryFileSystem._strip_protocol('foundry://ri.foundry.main.dataset.fee62053-77ed-4617-bd01-fc2538366c3f/folder/test.csv') == 'folder/test.csv')
assert (FoundryFileSystem._strip_protocol('foundry://ri.foundry.main.dataset.fee62053-77ed-4617-bd01-fc2538366c3f/test.csv') == 'test.csv')
assert (not FoundryFileSystem._strip_protocol('foundry://ri.foundry.main.dataset.fee62053-77ed-4617-bd01-fc2538366c3f/'))
assert (not FoundryFileSystem._strip_protocol('foundry://ri.foundry.main.dataset.fee62053-77ed-4617-bd01-fc2538366c3f')) |
class TestReadCommEnvVars(unittest.TestCase):
def test_read_comm_env_vars(self):
os.environ['WORLD_SIZE'] = '16'
os.environ['LOCAL_SIZE'] = '8'
os.environ['RANK'] = '4'
os.environ['LOCAL_RANK'] = '0'
comm_env_vars = comms_utils.read_comms_env_vars()
self.assertEqual(4, len(comm_env_vars))
self.assertEqual(16, comm_env_vars['world_size'])
self.assertEqual(8, comm_env_vars['local_size'])
self.assertEqual(4, comm_env_vars['global_rank'])
self.assertEqual(0, comm_env_vars['local_rank']) |
def crypt(pw, salt):
iobuf = []
block = []
for c in pw:
for j in range(7):
block.append(((c >> (6 - j)) & 1))
block.append(0)
block += ([0] * (64 - len(block)))
__setkey(block)
if (len(salt) != 2):
raise Exception('Salt must be two bytes')
for i in range(2):
x = salt[i]
if ((not (ord('a') <= x <= ord('z'))) and (not (ord('A') <= x <= ord('Z'))) and (not (ord('0') <= x <= ord('9'))) and (not (x == ord('.'))) and (not (x == ord('/')))):
raise Exception('Salt bytes must be in the set of ascii [a-zA-Z0-9./]')
iobuf.append(chr(x))
c = salt[i]
if (c > ord('Z')):
c -= 6
if (c > ord('9')):
c -= 7
c -= ord('.')
for j in range(6):
if ((c >> j) & 1):
(E[((6 * i) + j)], E[(((6 * i) + j) + 24)]) = (E[(((6 * i) + j) + 24)], E[((6 * i) + j)])
block = ([0] * 66)
for _ in range(25):
block = __encrypt(block)
for i in range(11):
c = 0
for j in range(6):
c <<= 1
c |= block[((6 * i) + j)]
c += ord('.')
if (c > ord('9')):
c += 7
if (c > ord('Z')):
c += 6
iobuf.append(chr(c))
return ''.join(iobuf) |
class op(bpy.types.Operator):
bl_idname = 'uv.textools_meshtex_create'
bl_label = 'UV Mesh'
bl_description = 'Create a new Mesh from the selected UVs of the active Object'
bl_options = {'REGISTER', 'UNDO'}
def poll(cls, context):
if (not bpy.context.active_object):
return False
if (bpy.context.active_object.type != 'MESH'):
return False
if (not bpy.context.object.data.uv_layers):
return False
return True
def execute(self, context):
return create_uv_mesh(self, context, bpy.context.active_object) |
class TestCreatePrivacyRequest():
(scope='function')
def url(self, oauth_client: ClientDetail, policy) -> str:
return (V1_URL_PREFIX + PRIVACY_REQUESTS)
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
('fides.api.api.v1.endpoints.privacy_request_endpoints.dispatch_message_task.apply_async')
def test_create_privacy_request(self, mock_dispatch_message, run_access_request_mock, url, db, api_client: TestClient, policy):
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': ''}}]
resp = api_client.post(url, json=data)
assert (resp.status_code == 200)
print(resp.json())
response_data = resp.json()['succeeded']
assert (len(response_data) == 1)
pr = PrivacyRequest.get(db=db, object_id=response_data[0]['id'])
pr.delete(db=db)
assert run_access_request_mock.called
assert (not mock_dispatch_message.called)
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request_stores_identities(self, run_access_request_mock, url, db, api_client: TestClient, policy):
TEST_EMAIL = ''
TEST_PHONE_NUMBER = '+'
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': TEST_EMAIL, 'phone_number': TEST_PHONE_NUMBER}}]
resp = api_client.post(url, json=data)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 1)
pr = PrivacyRequest.get(db=db, object_id=response_data[0]['id'])
persisted_identity = pr.get_persisted_identity()
assert (persisted_identity.email == TEST_EMAIL)
assert (persisted_identity.phone_number == TEST_PHONE_NUMBER)
pr.delete(db=db)
assert run_access_request_mock.called
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request_stores_custom_fields(self, run_access_request_mock, url, db, api_client: TestClient, policy, allow_custom_privacy_request_field_collection_enabled):
TEST_EMAIL = ''
TEST_CUSTOM_FIELDS = {'first_name': {'label': 'First name', 'value': 'John'}, 'last_name': {'label': 'Last name', 'value': 'Doe'}}
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': TEST_EMAIL}, 'custom_privacy_request_fields': TEST_CUSTOM_FIELDS}]
resp = api_client.post(url, json=data)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 1)
pr = PrivacyRequest.get(db=db, object_id=response_data[0]['id'])
persisted_identity = pr.get_persisted_identity()
assert (persisted_identity.email == TEST_EMAIL)
persisted_custom_privacy_request_fields = pr.get_persisted_custom_privacy_request_fields()
assert (persisted_custom_privacy_request_fields == TEST_CUSTOM_FIELDS)
pr.delete(db=db)
assert run_access_request_mock.called
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request_require_manual_approval(self, run_access_request_mock, url, db, api_client: TestClient, policy, require_manual_request_approval):
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': ''}}]
resp = api_client.post(url, json=data)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 1)
assert (response_data[0]['status'] == 'pending')
pr = PrivacyRequest.get(db=db, object_id=response_data[0]['id'])
pr.delete(db=db)
assert (not run_access_request_mock.called)
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request_with_masking_configuration(self, run_access_request_mock, url, db, api_client: TestClient, erasure_policy_string_rewrite):
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': erasure_policy_string_rewrite.key, 'identity': {'email': ''}}]
resp = api_client.post(url, json=data)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 1)
pr = PrivacyRequest.get(db=db, object_id=response_data[0]['id'])
pr.delete(db=db)
assert run_access_request_mock.called
('fides.api.service.privacy_request.request_runner_service.run_access_request')
def test_create_privacy_request_limit_exceeded(self, _, url, db, api_client: TestClient, policy):
payload = []
for i in range(0, 51):
payload.append({'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': 'ftest{i}'}})
response = api_client.post(url, json=payload)
assert (422 == response.status_code)
assert (json.loads(response.text)['detail'][0]['msg'] == 'ensure this value has at most 50 items')
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request_starts_processing(self, run_privacy_request_mock, url, api_client: TestClient, db, policy):
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': ''}}]
resp = api_client.post(url, json=data)
assert run_privacy_request_mock.called
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
pr = PrivacyRequest.get(db=db, object_id=response_data[0]['id'])
pr.delete(db=db)
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request_with_external_id(self, run_access_request_mock, url, db, api_client: TestClient, policy):
external_id = 'ext_some-uuid-here-1234'
data = [{'external_id': external_id, 'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': ''}}]
resp = api_client.post(url, json=data)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 1)
assert (response_data[0]['external_id'] == external_id)
pr = PrivacyRequest.get(db=db, object_id=response_data[0]['id'])
assert (pr.external_id == external_id)
pr.delete(db=db)
assert run_access_request_mock.called
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request_caches_identity(self, run_access_request_mock, url, db, api_client: TestClient, policy, cache):
identity = {'email': ''}
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': identity}]
resp = api_client.post(url, json=data)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 1)
pr = PrivacyRequest.get(db=db, object_id=response_data[0]['id'])
key = get_identity_cache_key(privacy_request_id=pr.id, identity_attribute=list(identity.keys())[0])
assert (cache.get(key) == list(identity.values())[0])
pr.delete(db=db)
assert run_access_request_mock.called
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request_caches_masking_secrets(self, run_erasure_request_mock, url, db, api_client: TestClient, erasure_policy_aes, cache):
identity = {'email': ''}
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': erasure_policy_aes.key, 'identity': identity}]
resp = api_client.post(url, json=data)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 1)
pr = PrivacyRequest.get(db=db, object_id=response_data[0]['id'])
secret_key = get_masking_secret_cache_key(privacy_request_id=pr.id, masking_strategy='aes_encrypt', secret_type=SecretType.key)
assert (cache.get_encoded_by_key(secret_key) is not None)
pr.delete(db=db)
assert run_erasure_request_mock.called
def test_create_privacy_request_invalid_encryption_values(self, url, db, api_client: TestClient, policy, cache):
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': ''}, 'encryption_key': 'test'}]
resp = api_client.post(url, json=data)
assert (resp.status_code == 422)
assert (resp.json()['detail'][0]['msg'] == 'Encryption key must be 16 bytes long')
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request_caches_encryption_keys(self, run_access_request_mock, url, db, api_client: TestClient, policy, cache):
identity = {'email': ''}
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': identity, 'encryption_key': 'test--encryption'}]
resp = api_client.post(url, json=data)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 1)
pr = PrivacyRequest.get(db=db, object_id=response_data[0]['id'])
encryption_key = get_encryption_cache_key(privacy_request_id=pr.id, encryption_attr='key')
assert (cache.get(encryption_key) == 'test--encryption')
pr.delete(db=db)
assert run_access_request_mock.called
def test_create_privacy_request_no_identities(self, url, api_client: TestClient, policy):
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {}}]
resp = api_client.post(url, json=data)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 0)
response_data = resp.json()['failed']
assert (len(response_data) == 1)
def test_create_privacy_request_registers_async_task(self, db, url, api_client, policy):
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': ''}}]
resp = api_client.post(url, json=data)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 1)
pr = PrivacyRequest.get(db=db, object_id=response_data[0]['id'])
assert (pr.get_cached_task_id() is not None)
assert (pr.get_async_execution_task() is not None)
pr.delete(db=db)
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request_creates_system_audit_log(self, run_access_request_mock, url, db, api_client: TestClient, policy):
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': ''}}]
resp = api_client.post(url, json=data)
response_data = resp.json()['succeeded'][0]
approval_audit_log: AuditLog = AuditLog.filter(db=db, conditions=((AuditLog.privacy_request_id == response_data['id']) & (AuditLog.action == AuditLogAction.approved))).first()
assert (approval_audit_log is not None)
assert (approval_audit_log.user_id == 'system')
approval_audit_log.delete(db=db)
pr = PrivacyRequest.get(db=db, object_id=response_data['id'])
pr.delete(db=db)
.usefixtures('messaging_config')
('fides.api.service.messaging.message_dispatch_service._mailgun_dispatcher')
('fides.api.service.privacy_request.request_runner_service.run_privacy_request.delay')
def test_create_privacy_request_error_notification(self, mailgun_dispatcher_mock, run_access_request_mock, url, db, api_client: TestClient, policy):
TEST_EMAIL = ''
TEST_PHONE_NUMBER = '+'
data = [{'requested_at': '2021-08-30T16:09:37.359Z', 'policy_key': policy.key, 'identity': {'email': TEST_EMAIL, 'phone_number': TEST_PHONE_NUMBER}}]
PrivacyRequestNotifications.create(db=db, data={'email': ', ', 'notify_after_failures': 1})
privacy_request = PrivacyRequest.create(db=db, data={'external_id': f'ext-{str(uuid4())}', 'started_processing_at': datetime(2021, 1, 1), 'finished_processing_at': datetime(2021, 1, 1), 'requested_at': datetime(2021, 1, 1), 'status': PrivacyRequestStatus.error, 'origin': ' 'policy_id': policy.id, 'client_id': policy.client_id})
privacy_request.error_processing(db)
resp = api_client.post(url, json=data)
assert (resp.status_code == 200)
response_data = resp.json()['succeeded']
assert (len(response_data) == 1)
pr = PrivacyRequest.get(db=db, object_id=response_data[0]['id'])
persisted_identity = pr.get_persisted_identity()
assert (persisted_identity.email == TEST_EMAIL)
assert (persisted_identity.phone_number == TEST_PHONE_NUMBER)
sent_errors = PrivacyRequestError.filter(db=db, conditions=PrivacyRequestError.message_sent.is_(True)).all()
assert (len(sent_errors) == 1)
assert run_access_request_mock.called
assert mailgun_dispatcher_mock.called |
class AnyDBConnector(BaseConnector):
def __init__(self, db_type: str='mysql', host: str='127.0.0.1', port: str=3306, user: Optional[str]=None, passwd: Optional[str]=None, db: Optional[str]=None, charset: str='utf8', *args, **kwargs) -> Any:
super().__init__(db_type, host, port, user, passwd, db, charset, args, kwargs)
if (self.db_type == 'mysql'):
self.connector = MySQLConnector(host=self.host, port=self.port, user=self.user, passwd=self.passwd)
'TO DO: postgres, bigquery, etc.'
def __del__(self) -> Any:
super().__del__()
def get_connect(self) -> Any:
return self.connector.get_connect()
def get_cursor(self, cursor=None):
return self.connector.get_cursor()
def select_db(self, db):
return self.connector.select_db()
def get_all_tables(self, args=None):
return self.connector.get_all_tables(args)
def execute(self, sql, args=None):
return self.connector.execute(sql, args)
def get_version(self, args=None):
return self.connector.execute(args)
def get_all_table_metadata(self, args=None):
return self.connector.get_all_table_metadata(args)
def get_table_metadata(self, db, table, args=None):
return self.connector.get_table_metadata(db, table, args)
def get_table_field_metadata(self, db, table, args=None):
return self.connector.get_table_field_metadata(db, table, args) |
def generic_nice_representation(i):
if isinstance(i, struct_time):
return strftime('%Y-%m-%d - %H:%M:%S', i)
if isinstance(i, list):
return list_group(i)
if isinstance(i, dict):
return nice_dict(i)
if isinstance(i, (float, int)):
return nice_number_filter(i)
if isinstance(i, str):
return replace_underscore_filter(i)
if isinstance(i, bytes):
return bytes_to_str_filter(i)
return i |
class TestPipeline(Module):
def __init__(self, platform, pipeline):
clk = platform.request('clk100')
leds = platform.request_all('user_led')
btn = platform.request_all('user_btn')
btn_sync = Signal(len(btn))
for i in range(len(btn)):
self.specials += MultiReg(btn[i], btn_sync[i])
sw = platform.request_all('user_sw')
sw_sync = Signal(len(sw))
for i in range(len(sw)):
self.specials += MultiReg(sw[i], sw_sync[i])
crg = CRG(clk)
self.submodules.crg = crg
cnt = Signal(32)
compute = Compute(pipeline)
self.submodules += compute
self.sync += cnt.eq((cnt + 1))
self.comb += [compute.input1.eq(cnt), compute.input2.eq(btn_sync), compute.input1_valid.eq(1), compute.input2_valid.eq(1), leds.eq(compute.out)] |
class OriginInspectorHistoricalData(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'dimensions': (OriginInspectorDimensions,), 'values': (OriginInspectorEntryValues,)}
_property
def discriminator():
return None
attribute_map = {'dimensions': 'dimensions', 'values': 'values'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class DataMonitoringReport(DataMonitoring):
def __init__(self, config: Config, tracking: Tracking, selector_filter: FiltersSchema=FiltersSchema(), force_update_dbt_package: bool=False, disable_samples: bool=False):
super().__init__(config, tracking, force_update_dbt_package, disable_samples, selector_filter)
self.report_api = ReportAPI(self.internal_dbt_runner)
self.s3_client = S3Client.create_client(self.config, tracking=self.tracking)
self.gcs_client = GCSClient.create_client(self.config, tracking=self.tracking)
self.azure_client = AzureClient.create_client(self.config, tracking=self.tracking)
self.slack_client = SlackClient.create_client(self.config, tracking=self.tracking)
def generate_report(self, days_back: int=7, test_runs_amount: int=720, file_path: Optional[str]=None, disable_passed_test_metrics: bool=False, should_open_browser: bool=True, exclude_elementary_models: bool=False, project_name: Optional[str]=None) -> Tuple[(bool, str)]:
html_path = self._get_report_file_path(file_path)
output_data = self.get_report_data(days_back=days_back, test_runs_amount=test_runs_amount, disable_passed_test_metrics=disable_passed_test_metrics, exclude_elementary_models=exclude_elementary_models, project_name=project_name)
template_html_path = pkg_resources.resource_filename(__name__, 'index.html')
with open(template_html_path, 'r', encoding='utf-8') as template_html_file:
template_html_code = template_html_file.read()
dumped_output_data = json.dumps(output_data)
encoded_output_data = base64.b64encode(dumped_output_data.encode('utf-8'))
compiled_output_html = f"<script>window.elementaryData = JSON.parse(atob('{encoded_output_data.decode('utf-8')}'));</script>{template_html_code}"
with open(html_path, 'w', encoding='utf-8') as html_file:
html_file.write(compiled_output_html)
with open(os.path.join(self.config.target_dir, 'elementary_output.json'), 'w', encoding='utf-8') as elementary_output_json_file:
elementary_output_json_file.write(dumped_output_data)
if should_open_browser:
try:
webbrowser.open_new_tab(('file://' + html_path))
except webbrowser.Error:
logger.error('Unable to open the web browser.')
self.execution_properties['report_end'] = True
self.execution_properties['success'] = self.success
return (self.success, html_path)
def get_report_data(self, days_back: int=7, test_runs_amount: int=720, disable_passed_test_metrics: bool=False, exclude_elementary_models: bool=False, project_name: Optional[str]=None):
report_api = ReportAPI(self.internal_dbt_runner)
(report_data, error) = report_api.get_report_data(days_back=days_back, test_runs_amount=test_runs_amount, disable_passed_test_metrics=disable_passed_test_metrics, exclude_elementary_models=exclude_elementary_models, disable_samples=self.disable_samples, project_name=(project_name or self.project_name), filter=self.selector_filter.to_selector_filter_schema(), env=self.config.env, warehouse_type=(self.warehouse_info.type if self.warehouse_info else None))
self._add_report_tracking(report_data, error)
if error:
logger.exception(f'''Could not generate the report - Error: {error}
Please reach out to our community for help with this issue.''')
self.success = False
report_data_dict = report_data.dict()
return report_data_dict
def validate_report_selector(self):
self.selector_filter.validate_report_selector()
def _add_report_tracking(self, report_data: ReportDataSchema, error: Optional[Exception]=None):
if error:
if self.tracking:
self.tracking.record_internal_exception(error)
return
test_metadatas = []
for tests in report_data.test_results.values():
for test in tests:
test_metadatas.append(test.get('metadata'))
self.execution_properties['elementary_test_count'] = len([test_metadata for test_metadata in test_metadatas if (test_metadata.get('test_type') != 'dbt_test')])
self.execution_properties['test_result_count'] = len(test_metadatas)
if (self.config.anonymous_tracking_enabled and isinstance(self.tracking, AnonymousTracking)):
report_data.tracking = dict(posthog_api_key=self.tracking.POSTHOG_PROJECT_API_KEY, report_generator_anonymous_user_id=self.tracking.anonymous_user_id, anonymous_warehouse_id=(self.warehouse_info.id if self.warehouse_info else None))
def send_report(self, days_back: int=7, test_runs_amount: int=720, file_path: Optional[str]=None, disable_passed_test_metrics: bool=False, should_open_browser: bool=False, exclude_elementary_models: bool=False, project_name: Optional[str]=None, remote_file_path: Optional[str]=None, disable_html_attachment: bool=False, include_description: bool=False):
(generated_report_successfully, local_html_path) = self.generate_report(days_back=days_back, test_runs_amount=test_runs_amount, disable_passed_test_metrics=disable_passed_test_metrics, file_path=file_path, should_open_browser=should_open_browser, exclude_elementary_models=exclude_elementary_models, project_name=project_name)
if (not generated_report_successfully):
self.success = False
self.execution_properties['success'] = self.success
return self.success
bucket_website_url = None
upload_succeeded = False
if (self.s3_client or self.gcs_client or self.azure_client):
self.validate_report_selector()
(upload_succeeded, bucket_website_url) = self.upload_report(local_html_path=local_html_path, remote_file_path=remote_file_path)
should_send_report_over_slack = True
if ((upload_succeeded and (bucket_website_url is not None)) or disable_html_attachment):
should_send_report_over_slack = False
if self.slack_client:
self.send_test_results_summary(days_back=days_back, test_runs_amount=test_runs_amount, disable_passed_test_metrics=disable_passed_test_metrics, bucket_website_url=bucket_website_url, include_description=include_description)
if should_send_report_over_slack:
self.validate_report_selector()
self.send_report_attachment(local_html_path=local_html_path)
return self.success
def send_report_attachment(self, local_html_path: str) -> bool:
if self.slack_client:
send_succeeded = self.slack_client.send_report(self.config.slack_channel_name, local_html_path)
self.execution_properties['sent_to_slack_successfully'] = send_succeeded
if (not send_succeeded):
self.success = False
self.execution_properties['success'] = self.success
return self.success
def upload_report(self, local_html_path: str, remote_file_path: Optional[str]=None) -> Tuple[(bool, Optional[str])]:
if self.gcs_client:
(send_succeeded, bucket_website_url) = self.gcs_client.send_report(local_html_path, remote_bucket_file_path=remote_file_path)
self.execution_properties['sent_to_gcs_successfully'] = send_succeeded
if (not send_succeeded):
self.success = False
if self.s3_client:
(send_succeeded, bucket_website_url) = self.s3_client.send_report(local_html_path, remote_bucket_file_path=remote_file_path)
self.execution_properties['sent_to_s3_successfully'] = send_succeeded
if (not send_succeeded):
self.success = False
if self.azure_client:
(send_succeeded, bucket_website_url) = self.azure_client.send_report(local_html_path, remote_bucket_file_path=remote_file_path)
self.execution_properties['sent_to_azure_successfully'] = send_succeeded
if (not send_succeeded):
self.success = False
self.execution_properties['success'] = self.success
return (self.success, bucket_website_url)
def send_test_results_summary(self, days_back: int, test_runs_amount: int, disable_passed_test_metrics: bool=False, bucket_website_url: Optional[str]=None, include_description: bool=False) -> bool:
tests_api = TestsAPI(dbt_runner=self.internal_dbt_runner, days_back=days_back, invocations_per_test=test_runs_amount, disable_passed_test_metrics=disable_passed_test_metrics)
summary_test_results = tests_api.get_test_results_summary(filter=self.selector_filter.to_selector_filter_schema())
if self.slack_client:
send_succeeded = self.slack_client.send_message(channel_name=self.config.slack_channel_name, message=SlackReportSummaryMessageBuilder().get_slack_message(test_results=summary_test_results, bucket_website_url=bucket_website_url, include_description=include_description, filter=self.selector_filter.to_selector_filter_schema(), days_back=days_back))
else:
send_succeeded = False
self.execution_properties['sent_test_results_summary_successfully'] = send_succeeded
self.success = send_succeeded
if send_succeeded:
logger.info('Sent test results summary to Slack')
self.execution_properties['success'] = self.success
return self.success
def _get_report_file_path(self, file_path: Optional[str]=None) -> str:
if file_path:
if (file_path.endswith('.htm') or file_path.endswith('.html')):
return os.path.abspath(file_path)
raise ValueError('Report file path must end with .html')
return os.path.abspath(os.path.join(self.config.target_dir, 'elementary_report.html')) |
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--path', type=str, default='replays/demo_1.json.gz')
parser.add_argument('--output-prefix', type=str, default='demo')
parser.add_argument('--num-episodes', type=int, default=10000)
parser.add_argument('--append-instruction', dest='append_instruction', action='store_true')
parser.add_argument('--max-steps', type=int, default=5000)
parser.add_argument('--save-videos', dest='save_videos', action='store_true')
parser.add_argument('--save-step-image', dest='save_step_image', action='store_true')
args = parser.parse_args()
cfg = config
cfg.defrost()
cfg.DATASET.DATA_PATH = args.path
cfg.DATASET.MAX_EPISODE_STEPS = args.max_steps
cfg.ENVIRONMENT.MAX_EPISODE_STEPS = args.max_steps
cfg.freeze()
run_reference_replay(cfg, num_episodes=args.num_episodes, output_prefix=args.output_prefix, append_instruction=args.append_instruction, save_videos=args.save_videos, save_step_image=args.save_step_image) |
class MandelbrotCompiledFormula():
def __init__(self, compiler):
formula = compiler.get_formula(FORMULA_FILE, FORMULA_NAME)
coloring_formula_0 = compiler.get_formula(COLORING_FORMULA_0_FILE, COLORING_FORMULA_0_NAME, 'cf0')
coloring_formula_1 = compiler.get_formula(COLORING_FORMULA_1_FILE, COLORING_FORMULA_1_NAME, 'cf1')
self.__formula_params = ((formula.symbols.default_params() + coloring_formula_0.symbols.default_params()) + coloring_formula_1.symbols.default_params())
self.__library_path = compiler.compile_all(formula, coloring_formula_0, coloring_formula_1, [])
def get_formula_params(self):
return self.__formula_params
def get_library_path(self):
return self.__library_path |
class NNAgent(Agent):
def __init__(self, model=None, n_actions=None):
super().__init__()
self.model = model
self.n_actions = n_actions
self.z_size = self.model.initial_state(1).size()[1]
def update(self, state_dict):
self.model.load_state_dict(state_dict)
def __call__(self, state, observation, agent_info=None, history=None):
initial_state = observation['initial_state']
B = observation.n_elems()
if (agent_info is None):
agent_info = DictTensor({'stochastic': torch.tensor([True]).repeat(B)})
model_initial_state = self.model.initial_state(B)
agent_state = None
agent_step = None
if (state is None):
assert initial_state.all()
agent_state = model_initial_state
agent_step = torch.zeros(B).long()
else:
_is = initial_state.float().unsqueeze((- 1)).repeat(1, model_initial_state.size()[1])
agent_state = ((_is * model_initial_state) + ((1 - _is) * state['agent_state']))
agent_step = ((initial_state.float() * torch.zeros(B)) + ((1 - initial_state.float()) * state['agent_step'])).long()
(score_action, value, next_state) = self.model(agent_state, observation['frame'], observation['last_action'])
action_proba = torch.softmax(score_action, dim=1)
dist = torch.distributions.Categorical(action_proba)
action_sampled = dist.sample()
action_max = action_proba.max(1)[1]
smask = agent_info['stochastic'].float()
action = ((action_sampled * smask) + ((1 - smask) * action_max)).long()
new_state = DictTensor({'agent_state': next_state, 'agent_step': (agent_step + 1)})
agent_do = DictTensor({'action': action, 'action_probabilities': action_proba})
state = DictTensor({'agent_state': agent_state, 'agent_step': agent_step})
return (state, agent_do, new_state) |
_frequency(timedelta(days=1))
def fetch_production(zone_key: ZoneKey=ZoneKey('KR'), session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> list[dict]:
session = (session or Session())
first_available_date = arrow.get(2021, 12, 22, 0, 0, 0, tzinfo=TIMEZONE)
if ((target_datetime is not None) and (target_datetime < first_available_date)):
raise ParserException('KPX.py', 'This parser is not able to parse dates before 2021-12-22.', zone_key)
if (target_datetime is None):
production_list = get_real_time_prod_data(zone_key=zone_key, session=session, logger=logger)
else:
production_list = get_historical_prod_data(zone_key=zone_key, session=session, target_datetime=target_datetime, logger=logger)
return production_list.to_list() |
class group_stats_request(stats_request):
version = 2
type = 18
stats_type = 6
def __init__(self, xid=None, flags=None, group_id=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (group_id != None):
self.group_id = group_id
else:
self.group_id = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(struct.pack('!L', self.group_id))
packed.append(('\x00' * 4))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = group_stats_request()
_version = reader.read('!B')[0]
assert (_version == 2)
_type = reader.read('!B')[0]
assert (_type == 18)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 6)
obj.flags = reader.read('!H')[0]
reader.skip(4)
obj.group_id = reader.read('!L')[0]
reader.skip(4)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.group_id != other.group_id):
return False
return True
def pretty_print(self, q):
q.text('group_stats_request {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('group_id = ')
q.text(('%#x' % self.group_id))
q.breakable()
q.text('}') |
((_gemm_config_matmul + _gemm_matmul))
def matmul(N: size, M: size, K: size, A: ([R][(N, 16)] GEMM_SCRATCH), B: ([R][(K, 16)] GEMM_SCRATCH), C: ([R][(N, 16)] GEMM_ACCUM)):
assert (N <= 16)
assert (M <= 16)
assert (K <= 16)
for i in seq(0, N):
for j in seq(0, M):
for k in seq(0, K):
C[(i, j)] += (A[(i, k)] * B[(k, j)]) |
class NetworkDialog(QDialog):
network_updated_signal = pyqtSignal()
def __init__(self, network: Network, config) -> None:
super().__init__(flags=((Qt.WindowSystemMenuHint | Qt.WindowTitleHint) | Qt.WindowCloseButtonHint))
self.setWindowTitle(_('Network'))
self.setMinimumSize(500, 200)
self.resize(560, 400)
self._nlayout = NetworkChoiceLayout(network, config)
buttons_layout = Buttons(CloseButton(self))
buttons_layout.add_left_button(HelpDialogButton(self, 'misc', 'network-dialog'))
vbox = QVBoxLayout(self)
vbox.setSizeConstraint(QVBoxLayout.SetFixedSize)
vbox.addLayout(self._nlayout.layout())
vbox.addLayout(buttons_layout)
self.network_updated_signal.connect(self.on_update)
network.register_callback(self.on_network, ['updated', 'sessions'])
def on_network(self, event, *args):
self.network_updated_signal.emit()
def on_update(self):
self._nlayout.update() |
class PosixGoPacketCapturer():
def __init__(self, device, interface):
self._device = device
self._connector_helper = ConnectorHelper(self._device)
self._interface = interface
self._capture_file = 'capture.{}.json'.format(self._interface)
self._pid_file = None
def _binary_location(self):
here_relative = os.path.relpath(os.path.dirname(os.path.realpath(__file__)), tools_root())
return os.path.join(self._device.tools_root(), here_relative, '..', 'bin', 'xv_packet_capture')
def _get_packets(self):
src = os.path.join(self._device.temp_directory(), self._capture_file)
(file_, dst) = tempfile.mkstemp(prefix='xv_leak_test_', suffix='_{}'.format(self._capture_file))
os.close(file_)
os.remove(dst)
timeup = TimeUp(5)
while (not timeup):
try:
self._device.connector().pull(src, dst)
break
except XVEx:
L.warning('Waiting for capture file to be written: {}'.format(src))
time.sleep(1)
if (not os.path.exists(dst)):
raise XVEx("Couldn't get capture file from capture device")
packets = object_from_json_file(dst, 'attribute')
return packets['data']
def _random_pid_file():
return 'xv_packet_capture_{}.pid'.format(''.join((random.choice(string.ascii_uppercase) for _ in range(10))))
def start(self):
L.debug('Starting packet capture on interface {}'.format(self._interface))
if self._pid_file:
raise XVEx('Packet capture already started!')
self._pid_file = os.path.join(self._device.temp_directory(), PosixGoPacketCapturer._random_pid_file())
cmd = ['/usr/local/bin/daemon', '-o', os.path.join(self._device.temp_directory(), 'daemon.out'), '--', self._binary_location(), '-i', self._interface, '-o', self._device.temp_directory(), '-f', 'capture.{}.json'.format(self._interface), '--preserve', '--debug']
self._connector_helper.check_command(cmd, root=True)
def stop(self):
L.debug('Stopping packet capture on interface {} and getting packets'.format(self._interface))
if (not self._pid_file):
raise XVEx('Packet capture not started!')
cmd = ['killall', '-SIGINT', 'xv_packet_capture']
self._connector_helper.check_command(cmd, root=True)
self._pid_file = None
return self._get_packets() |
def import_coconut_header():
try:
from coconut import __coconut__
return __coconut__
except ImportError:
try:
from coconut.coconut import __coconut__
except ImportError:
__coconut__ = None
if (__coconut__ is not None):
return __coconut__
else:
raise |
class ProcessExecutor(ThreadExecutor):
def _set_executor_pool(self) -> None:
self._executor_pool = ProcessPoolExecutor(max_workers=len(self._tasks))
def _start_task(self, task: AbstractExecutorTask) -> TaskAwaitable:
(fn, args) = task.start()
return cast(TaskAwaitable, self._loop.run_in_executor(self._executor_pool, fn, *args)) |
class PrimTypes(OrderedEnum):
BOND = 0
AUX_BOND = 1
HYDROGEN_BOND = 2
INTERFRAG_BOND = 3
AUX_INTERFRAG_BOND = 4
BEND = 5
LINEAR_BEND = 6
LINEAR_BEND_COMPLEMENT = 7
PROPER_DIHEDRAL = 8
IMPROPER_DIHEDRAL = 9
OUT_OF_PLANE = 10
LINEAR_DISPLACEMENT = 11
LINEAR_DISPLACEMENT_COMPLEMENT = 12
TRANSLATION = 13
TRANSLATION_X = 14
TRANSLATION_Y = 15
TRANSLATION_Z = 16
ROTATION = 17
ROTATION_A = 18
ROTATION_B = 19
ROTATION_C = 20
CARTESIAN = 21
CARTESIAN_X = 22
CARTESIAN_Y = 23
CARTESIAN_Z = 24
BONDED_FRAGMENT = 25
DUMMY_TORSION = 26
DISTANCE_FUNCTION = 27
BEND2 = 28
PROPER_DIHEDRAL2 = 29
DUMMY_IMPROPER = 30
ROBUST_TORSION1 = 31
ROBUST_TORSION2 = 32 |
def test_pydantic_validator() -> None:
class ToyModel(BaseModel):
name: str
age: int
foo: Optional[str] = None
foo2: Optional[str] = Field(default=None, description='some field')
_validator('age')
def age_must_be_positive(cls, v: int) -> int:
if (v < 0):
raise ValueError('age must be positive')
return v
validator = PydanticValidator(ToyModel, None)
assert (validator.clean_data({'name': 'Eugene', 'age': 5}) == (ToyModel(name='Eugene', age=5), []))
(clean_data, exceptions) = validator.clean_data({'name': 'Eugene', 'age': (- 1)})
assert (clean_data is None)
assert (len(exceptions) == 1)
assert isinstance(exceptions[0], ValidationError) |
('builtins.open', new_callable=mock.mock_open)
('elasticsearch.Elasticsearch')
def test_extract(client, mo):
doc = {'field1': 'stuff', 'field2': 'things'}
doc_data = serialize_doc(doc)
client.count.return_value = {'count': 1001}
client.options.return_value = client
client.search.return_value = {'_scroll_id': 'uohialjrknf', '_shards': {'successful': 1, 'total': 1, 'skipped': 0}, 'hits': {'hits': [{'_index': 'test', '_id': '0', '_score': 0, '_source': doc}]}}
def set_corp_size(*args, **kwargs):
path = args[0]
mockstat = mock.Mock()
if ('.bz2' in path):
mockstat.st_size = 500
else:
mockstat.st_size = 1000
return mockstat
client.scroll.return_value = {}
index = 'test'
outdir = '/abs/outpath/to/tracks/'
with mock.patch('os.stat') as osstat:
osstat.side_effect = set_corp_size
res = corpus.extract(client, outdir, index)
assert (mo.call_count == 4)
mo.assert_has_calls([call('/abs/outpath/to/tracks/test-documents.json', 'wb'), call('/abs/outpath/to/tracks/test-documents.json.bz2', 'wb'), call('/abs/outpath/to/tracks/test-documents-1k.json', 'wb'), call('/abs/outpath/to/tracks/test-documents-1k.json.bz2', 'wb')], any_order=True)
assert (res == {'filename': 'test-documents.json.bz2', 'path': '/abs/outpath/to/tracks/test-documents.json.bz2', 'compressed_bytes': 500, 'index_name': 'test', 'doc_count': 1001, 'uncompressed_bytes': 1000})
file_mock = mo.return_value
file_mock.assert_has_calls([call.write(doc_data)]) |
.parametrize('section_message', ['first', 'second', 'third'])
def test_create_text_section_block(section_message):
text_section_block = SlackMessageBuilder.create_text_section_block(section_message)
assert (json.dumps(text_section_block, sort_keys=True) == json.dumps({'type': 'section', 'text': {'type': 'mrkdwn', 'text': section_message}}, sort_keys=True)) |
class IOSPlatform(PlatformBase):
def __init__(self, tempdir, platform_util, args, platform_meta, usb_controller=None):
super(IOSPlatform, self).__init__(tempdir, args.ios_dir, platform_util, args.hash_platform_mapping, args.device_name_mapping)
self.setPlatformHash(platform_util.device)
if self.platform:
self.platform_model = (re.findall('(.*)-[0-9.]+', self.platform) or [self.platform])[0]
else:
self.platform_model = platform_meta.get('model')
self.platform_os_version = platform_meta.get('os_version')
self.platform_abi = platform_meta.get('abi')
self.usb_controller = usb_controller
self.type = 'ios'
self.app = None
self.use_xcrun = (not (int(self.platform_os_version.split('.')[0]) < 17))
def getKind(self):
if (self.platform_model and self.platform_os_version):
return '{}-{}'.format(self.platform_model, self.platform_os_version)
return self.platform
def getOS(self):
if self.platform_os_version:
return 'iOS {}'.format(self.platform_os_version)
return 'iOS'
def preprocess(self, *args, **kwargs):
assert ('programs' in kwargs), 'Must have programs specified'
programs = kwargs['programs']
assert ('program' in programs), 'program is not specified'
program = programs['program']
assert program.endswith('.ipa'), 'IOS program must be an ipa file'
processRun(['unzip', '-o', '-d', self.tempdir, program])
app_dir = os.path.join(self.tempdir, 'Payload')
dirs = [f for f in os.listdir(app_dir) if os.path.isdir(os.path.join(app_dir, f))]
assert (len(dirs) == 1), f'Payload must contain exactly 1 app, found {len(dirs)}'
app_name = dirs[0]
self.app = os.path.join(app_dir, app_name)
(base_name, _) = os.path.splitext(app_name)
self.dsym = os.path.join(self.app, (base_name + '.dSYM'))
del programs['program']
(bundle_id, _) = processRun(['osascript', '-e', (('id of app "' + self.app) + '"')])
assert (len(bundle_id) > 0), 'bundle id cannot be found'
self.util.setBundleId(bundle_id[0].strip())
success = getRunStatus()
self.util.uninstallApp((self.util.bundle_id if self.use_xcrun else self.app))
if self.use_xcrun:
self.util.run(['install', 'app', self.app, '--device', self.util.device])
setRunStatus(success, overwrite=True)
def postprocess(self, *args, **kwargs):
success = getRunStatus()
setRunStatus(success, overwrite=True)
def runBenchmark(self, cmd, *args, **kwargs):
if (not isinstance(cmd, list)):
cmd = shlex.split(cmd)
assert (self.util.bundle_id is not None), 'Bundle id is not specified'
arguments = self.getPairedArguments(cmd)
argument_filename = os.path.join(self.tempdir, 'benchmark.json')
arguments_json = json.dumps(arguments, indent=2, sort_keys=True)
with open(argument_filename, 'w') as f:
f.write(arguments_json)
tgt_argument_filename = os.path.join(self.tgt_dir, 'benchmark.json')
self.util.push(argument_filename, tgt_argument_filename)
logfile = os.path.join(self.tempdir, '__app_stdout.json')
run_cmd = (['process', 'launch', '--no-activate', '--terminate-existing', '--user', 'mobile', '--verbose', '--json-output', logfile, '--device', self.util.device, self.util.bundle_id] if self.use_xcrun else ['--bundle', self.app, '--noninteractive', '--noinstall', '--unbuffered'])
platform_args = {}
if ('platform_args' in kwargs):
platform_args = kwargs['platform_args']
if (('power' in platform_args) and platform_args['power'] and (not self.use_xcrun)):
platform_args['timeout'] = 10
run_cmd += ['--justlaunch']
if platform_args.get('profiling_args', {}).get('enabled', False):
try:
profiling_types = platform_args['profiling_args']['types']
options = platform_args['profiling_args']['options']
args = ' '.join([((('--' + x) + ' ') + arguments[x]) for x in arguments])
xctrace = getProfilerByUsage('ios', None, platform=self, model_name=platform_args.get('model_name', None), args=args, types=profiling_types, options=options)
if xctrace:
f = xctrace.start()
(output, meta) = f.result()
if ((not output) or (not meta)):
raise RuntimeError('No data returned from XCTrace profiler.')
return (output, meta)
except Exception:
getLogger().exception(f'An error occurred when running XCTrace profiler on device {self.platform} {self.platform_hash}.')
meta = {}
if arguments:
if (not self.use_xcrun):
run_cmd += ['--args', ' '.join([((('--' + x) + ' ') + arguments[x]) for x in arguments])]
else:
for x in arguments:
run_cmd += [f'--{x}']
run_cmd += [f'{arguments[x]}']
log_screen = self.util.run(run_cmd, **platform_args)
if os.path.isfile(logfile):
with open(logfile, 'r') as f:
getLogger().info(f.read())
return (log_screen, meta)
def rebootDevice(self):
success = self.util.reboot()
if success:
time.sleep(180)
def killProgram(self, program):
pass
def currentPower(self):
result = self.util.batteryLevel()
return result
def powerInfo(self):
return {'unit': 'percentage', 'metric': 'batteryLevel'} |
def get_build_vm(srvdir, provider=None):
abssrvdir = abspath(srvdir)
if provider:
if (provider == 'libvirt'):
logging.debug("build vm provider 'libvirt' selected")
return LibvirtBuildVm(abssrvdir)
elif (provider == 'virtualbox'):
logging.debug("build vm provider 'virtualbox' selected")
return VirtualboxBuildVm(abssrvdir)
else:
logging.warning("build vm provider not supported: '%s'", provider)
kvm_installed = (shutil.which('kvm') is not None)
kvm_installed |= (shutil.which('qemu') is not None)
kvm_installed |= (shutil.which('qemu-kvm') is not None)
vbox_installed = (shutil.which('VBoxHeadless') is not None)
if (kvm_installed and vbox_installed):
logging.debug('both kvm and vbox are installed.')
elif kvm_installed:
logging.debug("libvirt is the sole installed and supported vagrant provider, selecting 'libvirt'")
return LibvirtBuildVm(abssrvdir)
elif vbox_installed:
logging.debug("virtualbox is the sole installed and supported vagrant provider, selecting 'virtualbox'")
return VirtualboxBuildVm(abssrvdir)
else:
logging.debug('could not confirm that either virtualbox or kvm/libvirt are installed')
vagrant_libvirt_path = os.path.join(abssrvdir, '.vagrant', 'machines', 'default', 'libvirt')
has_libvirt_machine = (isdir(vagrant_libvirt_path) and (len(os.listdir(vagrant_libvirt_path)) > 0))
vagrant_virtualbox_path = os.path.join(abssrvdir, '.vagrant', 'machines', 'default', 'virtualbox')
has_vbox_machine = (isdir(vagrant_virtualbox_path) and (len(os.listdir(vagrant_virtualbox_path)) > 0))
if (has_libvirt_machine and has_vbox_machine):
logging.info("build vm provider lookup found virtualbox and libvirt, defaulting to 'virtualbox'")
return VirtualboxBuildVm(abssrvdir)
elif has_libvirt_machine:
logging.debug("build vm provider lookup found 'libvirt'")
return LibvirtBuildVm(abssrvdir)
elif has_vbox_machine:
logging.debug("build vm provider lookup found 'virtualbox'")
return VirtualboxBuildVm(abssrvdir)
available_boxes = []
import vagrant
boxes = vagrant.Vagrant().box_list()
for box in boxes:
if (box.name == 'buildserver'):
available_boxes.append(box.provider)
if (('libvirt' in available_boxes) and ('virtualbox' in available_boxes)):
logging.info("basebox lookup found virtualbox and libvirt boxes, defaulting to 'virtualbox'")
return VirtualboxBuildVm(abssrvdir)
elif ('libvirt' in available_boxes):
logging.info("'libvirt' buildserver box available, using that")
return LibvirtBuildVm(abssrvdir)
elif ('virtualbox' in available_boxes):
logging.info("'virtualbox' buildserver box available, using that")
return VirtualboxBuildVm(abssrvdir)
else:
logging.error("No available 'buildserver' box. Cannot proceed")
os._exit(1) |
class OptionSeriesScatter3dStatesInactive(Options):
def animation(self) -> 'OptionSeriesScatter3dStatesInactiveAnimation':
return self._config_sub_data('animation', OptionSeriesScatter3dStatesInactiveAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def opacity(self):
return self._config_get(0.2)
def opacity(self, num: float):
self._config(num, js_type=False) |
def replace_docstring_sections(replacements):
def wrapper(func):
docstring = func.__doc__
for replacement in ([replacements] if isinstance(replacements, str) else replacements):
docstring = docs_add_section(docstring, replacement)
func.__doc__ = docstring
return func
return wrapper |
class CssSelectOptionItems(CssStyle.Style):
_attrs = {'padding': '1px 15px 1px 5px'}
classname = 'dropdown-item'
_focus = {'outline': 0}
def customize(self):
self.css({'z-index': 210, 'line-height': ('%spx' % Defaults_html.LINE_HEIGHT), 'color': (self.page.theme.white if self.page.theme.dark else self.page.theme.black)}, important=True)
self.hover.css({'color': self.page.theme.colors[0], 'background-color': self.page.theme.colors[(- 1)]}, important=True)
self.active.css({'background-color': self.page.theme.colors[(- 2)], 'color': self.page.theme.greys[0]}, important=True) |
class TestInspectApp():
def test_empty_app(self, asgi):
ai = inspect.inspect_app(get_app(asgi, False))
assert (ai.routes == [])
assert (ai.middleware.middleware_tree.request == [])
assert (ai.middleware.middleware_tree.resource == [])
assert (ai.middleware.middleware_tree.response == [])
assert (ai.middleware.middleware_classes == [])
assert (ai.middleware.independent is True)
assert (ai.static_routes == [])
assert (ai.sinks == [])
assert ((len(ai.error_handlers) == 4) if asgi else 3)
assert (ai.asgi is asgi)
def test_dependent_middleware(self, asgi):
app = get_app(asgi, cors=False, independent_middleware=False)
ai = inspect.inspect_app(app)
assert (ai.middleware.independent is False)
def test_app(self, asgi):
ai = inspect.inspect_app((make_app_async() if asgi else make_app()))
assert (len(ai.routes) == 3)
assert (len(ai.middleware.middleware_tree.request) == 2)
assert (len(ai.middleware.middleware_tree.resource) == 1)
assert (len(ai.middleware.middleware_tree.response) == 3)
assert (len(ai.middleware.middleware_classes) == 3)
assert (len(ai.static_routes) == 2)
assert (len(ai.sinks) == 2)
assert ((len(ai.error_handlers) == 5) if asgi else 4)
assert (ai.asgi is asgi)
def check_route(self, asgi, r, p, cn, ml, fnt):
assert isinstance(r, inspect.RouteInfo)
assert (r.path == p)
if asgi:
cn += 'Async'
assert (r.class_name == cn)
assert ('_inspect_fixture.py' in r.source_info)
for m in r.methods:
assert isinstance(m, inspect.RouteMethodInfo)
internal = ('_inspect_fixture.py' not in m.source_info)
assert (m.internal is internal)
if (not internal):
assert (m.method in ml)
assert ('_inspect_fixture.py' in m.source_info)
assert (m.function_name == fnt.format(m.method).lower())
def test_routes(self, asgi):
routes = inspect.inspect_routes((make_app_async() if asgi else make_app()))
self.check_route(asgi, routes[0], '/foo', 'MyResponder', ['GET', 'POST', 'DELETE'], 'on_{}')
self.check_route(asgi, routes[1], '/foo/{id}', 'MyResponder', ['GET', 'PUT', 'DELETE'], 'on_{}_id')
self.check_route(asgi, routes[2], '/bar', 'OtherResponder', ['POST'], 'on_{}_id')
def test_routes_empty_paths(self, asgi):
app = get_app(asgi)
r = (i_f.MyResponderAsync() if asgi else i_f.MyResponder())
app.add_route('/foo/bar/baz', r)
routes = inspect.inspect_routes(app)
assert (len(routes) == 1)
self.check_route(asgi, routes[0], '/foo/bar/baz', 'MyResponder', ['GET', 'POST', 'DELETE'], 'on_{}')
def test_static_routes(self, asgi):
routes = inspect.inspect_static_routes((make_app_async() if asgi else make_app()))
assert all((isinstance(sr, inspect.StaticRouteInfo) for sr in routes))
assert (routes[(- 1)].prefix == '/fal/')
assert (routes[(- 1)].directory == os.path.abspath('falcon'))
assert (routes[(- 1)].fallback_filename is None)
assert (routes[(- 2)].prefix == '/tes/')
assert (routes[(- 2)].directory == os.path.abspath('tests'))
assert routes[(- 2)].fallback_filename.endswith('conftest.py')
def test_sink(self, asgi):
sinks = inspect.inspect_sinks((make_app_async() if asgi else make_app()))
assert all((isinstance(s, inspect.SinkInfo) for s in sinks))
assert (sinks[(- 1)].prefix == '/sink_fn')
assert (sinks[(- 1)].name == 'sinkFn')
if (not asgi):
assert ('_inspect_fixture.py' in sinks[(- 1)].source_info)
assert (sinks[(- 2)].prefix == '/sink_cls')
assert (sinks[(- 2)].name == 'SinkClass')
if (not asgi):
assert ('_inspect_fixture.py' in sinks[(- 2)].source_info)
.skipif((sys.version_info < (3, 6)), reason='dict order is not stable')
def test_error_handler(self, asgi):
errors = inspect.inspect_error_handlers((make_app_async() if asgi else make_app()))
assert all((isinstance(e, inspect.ErrorHandlerInfo) for e in errors))
assert (errors[(- 1)].error == 'RuntimeError')
assert ((errors[(- 1)].name == 'my_error_handler_async') if asgi else 'my_error_handler')
assert ('_inspect_fixture.py' in errors[(- 1)].source_info)
assert (errors[(- 1)].internal is False)
for eh in errors[:(- 1)]:
assert eh.internal
assert (eh.error in ('WebSocketDisconnected', 'Exception', 'HTTPStatus', 'HTTPError'))
def test_middleware(self, asgi):
mi = inspect.inspect_middleware((make_app_async() if asgi else make_app()))
def test(m, cn, ml, inte):
assert isinstance(m, inspect.MiddlewareClassInfo)
assert (m.name == cn)
if inte:
assert ('_inspect_fixture.py' not in m.source_info)
else:
assert ('_inspect_fixture.py' in m.source_info)
for mm in m.methods:
assert isinstance(mm, inspect.MiddlewareMethodInfo)
if inte:
assert ('_inspect_fixture.py' not in mm.source_info)
else:
assert ('_inspect_fixture.py' in mm.source_info)
assert (mm.function_name in ml)
test(mi.middleware_classes[0], 'CORSMiddleware', (['process_response_async'] if asgi else ['process_response']), True)
test(mi.middleware_classes[1], ('MyMiddlewareAsync' if asgi else 'MyMiddleware'), ['process_request', 'process_resource', 'process_response'], False)
test(mi.middleware_classes[2], ('OtherMiddlewareAsync' if asgi else 'OtherMiddleware'), ['process_request', 'process_resource', 'process_response'], False)
def test_middleware_tree(self, asgi):
mi = inspect.inspect_middleware((make_app_async() if asgi else make_app()))
def test(tl, names, cls):
for (t, n, c) in zip(tl, names, cls):
assert isinstance(t, inspect.MiddlewareTreeItemInfo)
assert (t.name == n)
assert (t.class_name == c)
assert isinstance(mi.middleware_tree, inspect.MiddlewareTreeInfo)
test(mi.middleware_tree.request, (['process_request'] * 2), [((n + 'Async') if asgi else n) for n in ['MyMiddleware', 'OtherMiddleware']])
test(mi.middleware_tree.resource, ['process_resource'], [('MyMiddlewareAsync' if asgi else 'MyMiddleware')])
test(mi.middleware_tree.response, ['process_response', 'process_response', ('process_response_async' if asgi else 'process_response')], [('OtherMiddlewareAsync' if asgi else 'OtherMiddleware'), ('MyMiddlewareAsync' if asgi else 'MyMiddleware'), 'CORSMiddleware']) |
_meta(definition.SinsackCard)
class SinsackCard():
name = ''
illustrator = ''
cv = 'VV/'
tag = 'sinsack'
description = ',,,:<style=Desc.Li>1-8,3,</style><style=Desc.Li>,</style><style=Desc.Li>,<style=B></style>,<style=Card.Name></style></style>'
def is_action_valid(self, c, tl):
return (True, '!')
def sound_effect(self, act):
return 'thb-cv-card_sinsack' |
def extractKahoim(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('Soshite Shoujo wa Akujo no Karada o Te ni Ireru' in item['tags']):
return buildReleaseMessageWithType(item, 'Soshite Shoujo wa Akujo no Karada o Te ni Ireru', vol, chp, frag=frag, postfix=postfix)
return False |
class OptionPlotoptionsHeatmapSonificationContexttracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def fortios_extender(data, fos):
fos.do_member_operation('extender', 'modem-status')
if data['extender_modem_status']:
resp = extender_modem_status(data, fos)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'extender_modem_status'))
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
_os(*metadata.platforms)
def main():
proc = 'C:\\Users\\Public\\proc.exe'
path = 'C:\\Users\\Public\\AppData\\Roaming\\Mozilla\\Test\\Profiles\\AdefaultA'
file = (path + '\\extensions.json')
common.copy_file(EXE_FILE, proc)
Path(path).mkdir(parents=True, exist_ok=True)
common.execute([proc, '/c', f'Copy-Item {EXE_FILE} {file}'], timeout=10)
common.remove_files(proc, file) |
class Xpub(DerivablePaths):
def __init__(self) -> None:
self.xpub: Optional[str] = None
self._child_xpubs: Dict[(Sequence[int], str)] = {}
def get_master_public_key(self) -> Optional[str]:
return self.xpub
def get_fingerprint(self) -> bytes:
return bip32_key_from_string(self.xpub).fingerprint()
def derive_pubkey(self, derivation_path: Sequence[int]) -> PublicKey:
parent_path = derivation_path[:(- 1)]
xpub = self._child_xpubs.get(parent_path)
if (xpub is None):
xpubkey = bip32_key_from_string(self.xpub)
for n in parent_path:
xpubkey = xpubkey.child_safe(n)
xpub = xpubkey.to_extended_key_string()
self._child_xpubs[parent_path] = xpub
return self.get_pubkey_from_xpub(xpub, derivation_path[(- 1):])
def get_pubkey_from_xpub(self, xpub: str, sequence: Sequence[int]) -> PublicKey:
pubkey = bip32_key_from_string(xpub)
for n in sequence:
pubkey = pubkey.child_safe(n)
return pubkey
def get_xpubkey(self, derivation_path: Sequence[int]) -> XPublicKey:
return XPublicKey(bip32_xpub=self.xpub, derivation_path=derivation_path)
def is_signature_candidate(self, x_pubkey: XPublicKey) -> bool:
if (x_pubkey.kind() == XPublicKeyType.BIP32):
return (self.xpub == x_pubkey.bip32_extended_key())
return False |
class Solution():
def trap(self, heights: List[int]) -> int:
stack = []
area = 0
for i in range(len(heights)):
height = heights[i]
if (stack and (heights[stack[(- 1)]] > height)):
continue
if stack:
width = ((i - stack[(- 1)]) - 1)
height = heights[stack[(- 1)]]
tarea = (width * height)
for j in range((stack[(- 1)] + 1), i):
tarea -= heights[j]
area += tarea
stack.append(i)
if (not stack):
return area
index = stack[(- 1)]
stack = []
for i in range((len(heights) - 1), (index - 1), (- 1)):
height = heights[i]
if (stack and (heights[stack[(- 1)]] > height)):
continue
if stack:
width = ((stack[(- 1)] - i) - 1)
height = heights[stack[(- 1)]]
tarea = (width * height)
for j in range((i + 1), stack[(- 1)]):
tarea -= heights[j]
area += tarea
stack.append(i)
return area |
class RandomizedSkipRows():
def __init__(self, ratio: float, random_seed: int):
self.random = random.Random(random_seed)
self.ratio = ratio
self.selected_rows = self._select()
def skiprows(self, row_index: int):
if (row_index == 0):
return False
if ((row_index % CHUNK_SIZE) == 0):
self.selected_rows = self._select()
idx = (row_index - (int((row_index / CHUNK_SIZE)) * CHUNK_SIZE))
return self.selected_rows[idx]
def _select(self):
return [bool((self.random.random() < self.ratio)) for _ in range(1000)] |
def test_headerdb_canonical_head_updates_to_longest_chain(headerdb, genesis_header):
headerdb.persist_header(genesis_header)
chain_a = mk_header_chain(genesis_header, 7)
chain_b = mk_header_chain(genesis_header, 5)
chain_c = mk_header_chain(genesis_header, 9)
for (idx, header) in enumerate(chain_a, 1):
headerdb.persist_header(header)
assert_is_canonical_chain(headerdb, chain_a[:idx])
for header in chain_b:
headerdb.persist_header(header)
assert_is_canonical_chain(headerdb, chain_a)
for (idx, header) in enumerate(chain_c, 1):
headerdb.persist_header(header)
if (idx <= 7):
assert_is_canonical_chain(headerdb, chain_a)
else:
assert_is_canonical_chain(headerdb, chain_c[:idx])
assert_is_canonical_chain(headerdb, chain_c) |
class ForumIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
categorys = indexes.CharField(model_attr='category')
def get_model(self):
return Forum
def index_queryset(self, using=None):
return self.get_model().objects.filter(hidden=False) |
class LyricsManager(providers.ProviderHandler):
def __init__(self):
providers.ProviderHandler.__init__(self, 'lyrics')
self.preferred_order = settings.get_option('lyrics/preferred_order', [])
self.cache = LyricsCache(os.path.join(xdg.get_cache_dir(), 'lyrics.cache'))
event.add_callback(self.on_track_tags_changed, 'track_tags_changed')
def __get_cache_key(self, track: Track, provider) -> str:
return (((track.get_loc_for_io() + provider.display_name) + track.get_tag_display('artist')) + track.get_tag_display('title'))
def set_preferred_order(self, order):
if (not (type(order) in (list, tuple))):
raise AttributeError('order must be a list or tuple')
self.preferred_order = order
settings.set_option('lyrics/preferred_order', list(order))
def find_lyrics(self, track, refresh=False):
lyrics = None
source = None
url = None
for method in self.get_providers():
try:
(lyrics, source, url) = self._find_cached_lyrics(method, track, refresh)
except LyricsNotFoundException:
continue
break
else:
raise LyricsNotFoundException()
lyrics = lyrics.strip()
return (lyrics, source, url)
def find_all_lyrics(self, track, refresh=False):
lyrics_found = []
for method in self.get_providers():
lyrics = None
source = None
url = None
try:
(lyrics, source, url) = self._find_cached_lyrics(method, track, refresh)
except LyricsNotFoundException:
continue
lyrics = lyrics.strip()
lyrics_found.append((method.display_name, lyrics, source, url))
if (not lyrics_found):
raise LyricsNotFoundException()
return lyrics_found
def _find_cached_lyrics(self, method, track, refresh=False):
lyrics = None
source = None
url = None
cache_time = settings.get_option('lyrics/cache_time', 720)
key = self.__get_cache_key(track, method)
if (key in self.cache):
(lyrics, source, url, time) = self.cache[key]
now = datetime.now()
if (((now - time) < timedelta(hours=cache_time)) and (not refresh)):
try:
lyrics = zlib.decompress(lyrics)
except zlib.error as e:
raise LyricsNotFoundException(e)
return (lyrics.decode('utf-8', errors='replace'), source, url)
(lyrics, source, url) = method.find_lyrics(track)
assert isinstance(lyrics, str), (method, track)
time = datetime.now()
self.cache[key] = (zlib.compress(lyrics.encode('utf-8')), source, url, time)
return (lyrics, source, url)
def on_provider_removed(self, provider):
try:
self.preferred_order.remove(provider.name)
except (ValueError, AttributeError):
pass
def on_track_tags_changed(self, e, track, tags):
if ('lyrics' in tags):
local_provider = self.get_provider('__local')
if (local_provider is None):
return
key = self.__get_cache_key(track, local_provider)
try:
del self.cache[key]
except KeyError:
pass |
class NodeMechanicActor(actor.RallyActor):
def __init__(self):
super().__init__()
self.mechanic = None
self.host = None
def receiveMsg_StartNodes(self, msg, sender):
try:
self.host = msg.ip
if msg.external:
self.logger.info('Connecting to externally provisioned nodes on [%s].', msg.ip)
else:
self.logger.info('Starting node(s) %s on [%s].', msg.node_ids, msg.ip)
cfg = config.auto_load_local_config(msg.cfg, additional_sections=['track', 'mechanic', 'client', 'telemetry', 'race', 'source'])
cfg.add(config.Scope.application, 'node', 'rally.root', paths.rally_root())
if (not msg.external):
cfg.add(config.Scope.benchmark, 'provisioning', 'node.ids', msg.node_ids)
cls = metrics.metrics_store_class(cfg)
metrics_store = cls(cfg)
metrics_store.open(ctx=msg.open_metrics_context)
self.mechanic = create(cfg, metrics_store, msg.ip, msg.port, msg.all_node_ips, msg.all_node_ids, msg.sources, msg.distribution, msg.external, msg.docker)
self.mechanic.start_engine()
self.wakeupAfter(METRIC_FLUSH_INTERVAL_SECONDS)
self.send(getattr(msg, 'reply_to', sender), NodesStarted())
except Exception:
self.logger.exception('Cannot process message [%s]', msg)
(_, ex_value, _) = sys.exc_info()
self.send(getattr(msg, 'reply_to', sender), actor.BenchmarkFailure(ex_value, traceback.format_exc()))
def receiveMsg_PoisonMessage(self, msg, sender):
if (sender != self.myAddress):
self.send(sender, actor.BenchmarkFailure(msg.details))
def receiveMsg_BenchmarkFailure(self, msg, sender):
self.send(getattr(msg, 'reply_to', sender), msg)
def receiveUnrecognizedMessage(self, msg, sender):
try:
self.logger.debug('NodeMechanicActor#receiveMessage(msg = [%s] sender = [%s])', str(type(msg)), str(sender))
if (isinstance(msg, ResetRelativeTime) and self.mechanic):
self.mechanic.reset_relative_time()
elif (isinstance(msg, thespian.actors.WakeupMessage) and self.mechanic):
self.mechanic.flush_metrics()
self.wakeupAfter(METRIC_FLUSH_INTERVAL_SECONDS)
elif isinstance(msg, StopNodes):
self.mechanic.stop_engine()
self.send(sender, NodesStopped())
self.mechanic = None
elif isinstance(msg, thespian.actors.ActorExitRequest):
if self.mechanic:
self.mechanic.stop_engine()
self.mechanic = None
except BaseException as e:
self.logger.exception('Cannot process message [%s]', msg)
self.send(getattr(msg, 'reply_to', sender), actor.BenchmarkFailure(('Error on host %s' % str(self.host)), e)) |
def get_plugin(name, plugin_path):
search_dirs = [PluginFolder]
if plugin_path:
search_dirs = ([plugin_path] + search_dirs)
for dir in search_dirs:
location = os.path.join(dir, name)
if ((not os.path.isdir(location)) or (not ((MainModule + '.py') in os.listdir(location)))):
continue
spec = importlib.machinery.PathFinder.find_spec(MainModule, [location])
return {'name': name, 'spec': spec, 'path': location}
raise Exception(('Could not find plugin with name ' + name)) |
class SkipApi(CRUDApi):
def __init__(self, config):
super(SkipApi, self).__init__(config, object_type='skip', endpoint=EndpointFactory('skips'))
def delete(self, api_objects, **kwargs):
raise NotImplementedError('Cannot delete Skip objects')
def update(self, api_objects, **kwargs):
raise NotImplementedError('Cannot update Skip objects') |
def downgrade():
op.execute("delete from connectionconfig where connection_type in ('mssql')")
op.execute('alter type connectiontype rename to connectiontype_old')
op.execute("create type connectiontype as enum('postgres', 'mongodb', 'mysql', ' 'snowflake', 'redshift')")
op.execute('alter table connectionconfig alter column connection_type type connectiontype using connection_type::text::connectiontype')
op.execute('drop type connectiontype_old') |
_defaults()
class StationSchema(Schema):
class Meta():
type_ = 'station'
self_view = 'v1.station_detail'
self_view_kwargs = {'id': '<id>'}
inflect = dasherize
id = fields.Integer(dump_only=True)
station_name = fields.String(required=True, validate=validate.Length(min=1))
station_type = fields.String(required=True, validate=validate.OneOf(choices=STATION_CHOICES))
microlocation_id = fields.Function((lambda obj: obj.microlocation.id))
room = fields.Function((lambda obj: obj.microlocation.room))
event = Relationship(self_view='v1.station_event', self_view_kwargs={'id': '<id>'}, related_view='v1.event_detail', related_view_kwargs={'id': '<id>'}, schema='EventSchemaPublic', type_='event')
microlocation = Relationship(self_view='v1.station_microlocation', self_view_kwargs={'id': '<id>'}, related_view='v1.microlocation_detail', related_view_kwargs={'id': '<microlocation_id>'}, schema='MicrolocationSchema', type_='microlocation') |
def test_list_images(client):
doc = {'images': [{'href': '/images/1eaf6ef1-7f2d-4ecc-a8d5-6e8adba7cc0e.png'}]}
response = client.simulate_get('/images')
result_doc = msgpack.unpackb(response.content, raw=False)
assert (result_doc == doc)
assert (response.status == falcon.HTTP_OK) |
def extractPwilliamrossWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class BaseModelBuilder(ABC):
def __init__(self, modality_config: Dict[(str, Union[(str, Dict[(str, Any)])])], observation_modality_mapping: Dict[(str, str)], shared_embedding_keys: Optional[Union[(List[str], Dict[(str, List[str])])]]):
self.modality_config = modality_config
self.observation_modality_mapping = observation_modality_mapping
self.shared_embedding_keys = shared_embedding_keys
self.use_shared_embedding = None
def init_shared_embedding_keys(self, step_keys: Iterable[StepKeyType]) -> None:
self.shared_embedding_keys = (list() if (self.shared_embedding_keys is None) else self.shared_embedding_keys)
if isinstance(self.shared_embedding_keys, (list, ListConfig)):
self.shared_embedding_keys = {step_key: list(self.shared_embedding_keys) for step_key in step_keys}
else:
assert isinstance(self.shared_embedding_keys, (dict, DictConfig)), f'type: {type(self.shared_embedding_keys)}'
self.shared_embedding_keys = {step_key: list(shared_keys) for (step_key, shared_keys) in self.shared_embedding_keys.items()}
self.use_shared_embedding: Dict[(StepKeyType, bool)] = {step_key: (len(shared_keys) > 0) for (step_key, shared_keys) in self.shared_embedding_keys.items()}
def to_recurrent_gym_space(cls, observation_space: spaces.Dict, rnn_steps: int) -> spaces.Dict:
assert (rnn_steps > 1)
rnn_dict = dict()
for (key, space) in observation_space.spaces.items():
assert isinstance(space, spaces.Box)
rnn_low = np.repeat(space.low[np.newaxis], axis=0, repeats=rnn_steps)
rnn_high = np.repeat(space.high[np.newaxis], axis=0, repeats=rnn_steps)
rnn_dict[key] = spaces.Box(low=rnn_low, high=rnn_high, dtype=space.dtype)
return spaces.Dict(rnn_dict)
def from_observation_space(self, observation_space: spaces.Dict) -> InferenceBlock: |
def test_deserialize_part_media(client):
data = b'--BOUNDARY\r\nContent-Disposition: form-data; name="factorials"\r\nContent-Type: application/json\r\n\r\n{"count": 6, "numbers": [1, 2, 6, 24, 120, 720]}\r\n--BOUNDARY\r\nContent-Disposition: form-data; name="person"\r\nContent-Type: application/x-www-form-urlencoded\r\n\r\nname=Jane&surname=Doe&fruit=%F0%9F%8D%8F\r\n--BOUNDARY--\r\n'
resp = client.simulate_post('/media', headers={'Content-Type': 'multipart/form-data; boundary=BOUNDARY'}, body=data)
assert (resp.status_code == 200)
assert (resp.json == [{'count': 6, 'numbers': [1, 2, 6, 24, 120, 720]}, {'fruit': '', 'name': 'Jane', 'surname': 'Doe'}]) |
def test_parameter_order():
()
def task1(a: int, b: float, c: str) -> str:
return f'{a} - {b} - {c}'
()
def task2(b: float, c: str, a: int) -> str:
return f'{a} - {b} - {c}'
()
def task3(c: str, a: int, b: float) -> str:
return f'{a} - {b} - {c}'
param_a = [1, 2, 3]
param_b = [0.1, 0.2, 0.3]
param_c = 'c'
m1 = array_node_map_task(functools.partial(task1, c=param_c))(a=param_a, b=param_b)
m2 = array_node_map_task(functools.partial(task2, c=param_c))(a=param_a, b=param_b)
m3 = array_node_map_task(functools.partial(task3, c=param_c))(a=param_a, b=param_b)
assert (m1 == m2 == m3 == ['1 - 0.1 - c', '2 - 0.2 - c', '3 - 0.3 - c']) |
class MacsXLS():
def __init__(self, filen=None, fp=None, name=None):
self.__filen = filen
self.__name = name
self.__macs_version = None
self.__command_line = None
self.__header = []
self.__data = None
if (fp is None):
fp = io.open(filen, 'rt')
else:
filen = None
for line in fp:
line = line.strip()
if (line.startswith('#') or (line == '')):
self.__header.append(line)
if line.startswith('# This file is generated by MACS version '):
self.__macs_version = line.split()[8]
elif ((self.__name is None) and line.startswith('# name = ')):
self.__name = line[len('# name = '):]
elif line.startswith('# Command line: '):
self.__command_line = line[16:]
elif (self.__data is None):
columns = line.split('\t')
columns.insert(0, 'order')
self.__data = TabFile(column_names=columns)
else:
self.__data.append(tabdata=('\t%s' % line))
if (filen is not None):
fp.close()
if (self.macs_version is None):
raise Exception('Failed to extract MACS version, not a MACS output file?')
self.update_order()
def filen(self):
return self.__filen
def name(self):
return self.__name
def macs_version(self):
return self.__macs_version
def command_line(self):
return self.__command_line
def columns(self):
return self.__data.header()
def columns_as_xls_header(self):
return ([('#' + self.columns[0])] + self.columns[1:])
def header(self):
return self.__header
def data(self):
return self.__data
def with_broad_option(self):
if self.macs_version.startswith('1.'):
return False
try:
return ('--broad' in self.command_line.split())
except AttributeError:
return ('abs_summit' not in self.columns)
def sort_on(self, column, reverse=True):
self.__data.sort((lambda line: line[column]), reverse=reverse)
self.update_order()
def update_order(self):
for i in range(0, len(self.__data)):
self.__data[i]['order'] = (i + 1) |
class OptionPlotoptionsXrangeSonificationDefaultspeechoptionsPointgrouping(Options):
def algorithm(self):
return self._config_get('last')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class HeartRateControlPointChrc(Characteristic):
HR_CTRL_PT_UUID = '00002a39-0000-1000-8000-00805f9b34fb'
def __init__(self, bus, index, service):
Characteristic.__init__(self, bus, index, self.HR_CTRL_PT_UUID, ['write'], service)
def WriteValue(self, value, options):
print('Heart Rate Control Point WriteValue called')
if (len(value) != 1):
raise InvalidValueLengthException()
byte = value[0]
print(('Control Point value: ' + repr(byte)))
if (byte != 1):
raise FailedException('0x80')
print('Energy Expended field reset!')
self.service.energy_expended = 0 |
class OptionSeriesColumnrangeSonificationDefaultspeechoptionsMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class XTGeoDialog():
def __init__(self) -> None:
self._callclass = None
self._caller = None
self._rootlogger = logging.getLogger()
self._lformat = None
self._lformatlevel = 1
self._logginglevel = 'CRITICAL'
self._logginglevel_fromenv = None
self._loggingname = ''
self._test_env = True
self._testpath = os.environ.get('XTG_TESTPATH', '../xtgeo-testdata')
self._showrtwarnings = True
self._logginglevel_fromenv = os.environ.get('XTG_LOGGING_LEVEL', None)
loggingformat = os.environ.get('XTG_LOGGING_FORMAT')
_printdebug('Logging format is', loggingformat)
if self._logginglevel_fromenv:
self.logginglevel = self._logginglevel_fromenv
if (loggingformat is not None):
self._lformatlevel = int(loggingformat)
def testpathobj(self) -> pathlib.Path:
return pathlib.Path(self._testpath)
def testpath(self) -> str:
return self._testpath
def testpath(self, newtestpath: str) -> None:
if (not os.path.isdir(newtestpath)):
raise RuntimeError(f'Proposed test path is not valid: {newtestpath}')
self._testpath = newtestpath
def logginglevel(self) -> str:
return self._logginglevel
def logginglevel(self, level: str) -> None:
validlevels = ('INFO', 'WARNING', 'DEBUG', 'CRITICAL')
if (level in validlevels):
self._logginglevel = level
else:
raise ValueError(f"Invalid level given, must be one of: {', '.join(validlevels)}")
def numericallogginglevel(self) -> int:
llo = logging.CRITICAL
if (self._logginglevel == 'INFO'):
llo = logging.INFO
elif (self._logginglevel == 'WARNING'):
llo = logging.WARNING
elif (self._logginglevel == 'DEBUG'):
llo = logging.DEBUG
return llo
def loggingformatlevel(self) -> int:
return self._lformatlevel
def loggingformat(self) -> (str | None):
_printdebug('Logging format is', self._lformatlevel)
if (self._lformatlevel <= 1):
fmt = logging.Formatter(fmt='%(levelname)8s: (%(relative)ss) \t%(message)s')
elif (self._lformatlevel == 2):
fmt = _Formatter(fmt='%(levelname)8s (%(relative)ss) %(pathname)44s [%(funcName)40s()] %(lineno)4d >> \t%(message)s')
else:
fmt = logging.Formatter(fmt='%(asctime)s Line: %(lineno)4d %(name)44s (Delta=%(relative)ss) [%(funcName)40s()]%(levelname)8s:\t%(message)s')
log = self._rootlogger
for h in log.handlers:
h.addFilter(_TimeFilter())
h.setFormatter(fmt)
self._lformat = fmt._fmt
return self._lformat
def get_xtgeo_info(variant: Literal['clibinfo']='clibinfo') -> str:
if (variant == 'clibinfo'):
return f'XTGeo version {xtgeo.__version__} (Python {platform.python_version()} on {platform.system()})'
return 'Invalid'
def print_xtgeo_header(appname: str, appversion: (str | None), info: (str | None)=None) -> None:
cur_version = (('Python ' + str(sys.version_info[0])) + '.')
cur_version += ((str(sys.version_info[1]) + '.') + str(sys.version_info[2]))
app = ((appname + ', version ') + str(appversion))
if info:
app = (((app + ' (') + info) + ')')
print('')
print(HEADER)
print(('#' * 79))
print(f'#{app.center(77)}#')
print(('#' * 79))
nowtime = dtime.now().strftime('%Y-%m-%d %H:%M:%S')
ver = ('Using XTGeo version ' + xtgeo.__version__)
cur_version += f' {nowtime} on {platform.node()} by {getpass.getuser()}'
print(f'#{ver.center(77)}#')
print(f'#{cur_version.center(77)}#')
print(('#' * 79))
print(ENDC)
print('')
def basiclogger(self, name: str, logginglevel: (str | None)=None, loggingformat: (int | None)=None, info: bool=False) -> logging.Logger:
if ((logginglevel is not None) and (self._logginglevel_fromenv is None)):
self.logginglevel = logginglevel
if ((loggingformat is not None) and isinstance(loggingformat, int)):
self._lformatlevel = loggingformat
logging.basicConfig(stream=sys.stdout)
fmt = self.loggingformat
self._loggingname = name
if info:
print(f'Logginglevel is {self.logginglevel}, formatlevel is {self._lformatlevel}, and format is {fmt}')
self._rootlogger.setLevel(self.numericallogginglevel)
logging.captureWarnings(True)
return logging.getLogger(self._loggingname)
def functionlogger(name: str) -> logging.Logger:
warnings.warn('functionlogger is deprecated and will be removed in a future version. Use null_logger instead.', DeprecationWarning)
return null_logger(name)
def testsetup(self) -> bool:
tstpath = os.environ.get('XTG_TESTPATH', '../xtgeo-testdata')
if (not os.path.isdir(tstpath)):
raise RuntimeError(f'Test path is not valid: {tstpath}')
self._test_env = True
self._testpath = tstpath
return True
def timer(*args: float) -> float:
time1 = timeit.default_timer()
if args:
return (time1 - args[0])
return time1
def show_runtimewarnings(self, flag: bool=True) -> None:
self._showrtwarnings = flag
def insane(self, string: str) -> None:
level = 4
idx = 0
caller = sys._getframe(1).f_code.co_name
frame = inspect.stack()[1][0]
self.get_callerinfo(caller, frame)
self._output(idx, level, string)
def trace(self, string: str) -> None:
level = 3
idx = 0
caller = sys._getframe(1).f_code.co_name
frame = inspect.stack()[1][0]
self.get_callerinfo(caller, frame)
self._output(idx, level, string)
def debug(self, string: str) -> None:
level = 2
idx = 0
caller = sys._getframe(1).f_code.co_name
frame = inspect.stack()[1][0]
self.get_callerinfo(caller, frame)
self._output(idx, level, string)
def speak(self, string: str) -> None:
level = 1
idx = 1
caller = sys._getframe(1).f_code.co_name
frame = inspect.stack()[1][0]
self.get_callerinfo(caller, frame)
self._output(idx, level, string)
info = speak
def say(self, string: str) -> None:
level = (- 5)
idx = 3
caller = sys._getframe(1).f_code.co_name
frame = inspect.stack()[1][0]
self.get_callerinfo(caller, frame)
self._output(idx, level, string)
def warn(self, string: str) -> None:
level = 0
idx = 6
if self._showrtwarnings:
caller = sys._getframe(1).f_code.co_name
frame = inspect.stack()[1][0]
self.get_callerinfo(caller, frame)
self._output(idx, level, string)
warning = warn
def warndeprecated(string: str) -> None:
warnings.simplefilter('default', DeprecationWarning)
warnings.warn(string, DeprecationWarning, stacklevel=2)
def warnuser(string: str) -> None:
warnings.simplefilter('default', UserWarning)
warnings.warn(string, UserWarning, stacklevel=2)
def error(self, string: str) -> None:
level = (- 8)
idx = 8
caller = sys._getframe(1).f_code.co_name
frame = inspect.stack()[1][0]
self.get_callerinfo(caller, frame)
self._output(idx, level, string)
def critical(self, string: str) -> None:
level = (- 9)
idx = 9
caller = sys._getframe(1).f_code.co_name
frame = inspect.stack()[1][0]
self.get_callerinfo(caller, frame)
self._output(idx, level, string)
def get_callerinfo(self, caller: Any, frame: Any) -> tuple[(Any, str)]:
the_class = self._get_class_from_frame(frame)
x = str(the_class).split('.')
the_class = x[(- 1)]
self._caller = caller
self._callclass = the_class
return (self._caller, self._callclass)
def _get_class_from_frame(fr: Any) -> Any:
(args, _, _, value_dict) = inspect.getargvalues(fr)
if (args and (args[0] == 'self')):
instance = value_dict.get('self', None)
if instance:
return getattr(instance, '__class__', None)
return None
def _output(self, idx: int, level: int, string: str) -> None:
prefix = ''
endfix = ''
if (idx == 0):
prefix = '++'
elif (idx == 1):
prefix = '**'
elif (idx == 3):
prefix = '>>'
elif (idx == 6):
prefix = (WARN + '##')
endfix = ENDC
elif (idx == 8):
prefix = (ERROR + '!#')
endfix = ENDC
elif (idx == 9):
prefix = (CRITICAL + '!!')
endfix = ENDC
ulevel = str(level)
if (level == (- 5)):
ulevel = 'M'
if (level == (- 8)):
ulevel = 'E'
if (level == (- 9)):
ulevel = 'W'
print(f'{prefix} <{ulevel}> [{self._callclass:23s}-> {self._caller:>33s}] {string}{endfix}') |
class OptionPlotoptionsArearangeTooltip(Options):
def clusterFormat(self):
return self._config_get('Clustered points: {point.clusterPointsAmount}')
def clusterFormat(self, text: str):
self._config(text, js_type=False)
def dateTimeLabelFormats(self) -> 'OptionPlotoptionsArearangeTooltipDatetimelabelformats':
return self._config_sub_data('dateTimeLabelFormats', OptionPlotoptionsArearangeTooltipDatetimelabelformats)
def distance(self):
return self._config_get(16)
def distance(self, num: float):
self._config(num, js_type=False)
def followPointer(self):
return self._config_get(False)
def followPointer(self, flag: bool):
self._config(flag, js_type=False)
def followTouchMove(self):
return self._config_get(True)
def followTouchMove(self, flag: bool):
self._config(flag, js_type=False)
def footerFormat(self):
return self._config_get('')
def footerFormat(self, text: str):
self._config(text, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def headerFormat(self):
return self._config_get(None)
def headerFormat(self, text: str):
self._config(text, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, text: str):
self._config(text, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def pointFormat(self):
return self._config_get('<span style="color:{series.color}"></span> {series.name}: <b>{point.low}</b> - <b>{point.high}</b><br/>')
def pointFormat(self, text: str):
self._config(text, js_type=False)
def pointFormatter(self):
return self._config_get(None)
def pointFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False)
def xDateFormat(self):
return self._config_get(None)
def xDateFormat(self, text: str):
self._config(text, js_type=False) |
def check_if_from_block_match(block_number, _type, from_block):
if ((from_block is None) or (from_block == 'latest')):
return (_type == 'mined')
elif (from_block in {'earliest', 'pending'}):
return (_type == 'pending')
elif is_integer(from_block):
return (is_integer(block_number) and (block_number >= from_block))
else:
raise ValueError(f'Unrecognized from_block format: {from_block}') |
def test_dict_neg_logprob():
dist = get_dict_distribution()
actions = dist.sample()
log_probs = dist.neg_log_prob(actions=actions)
assert (log_probs['action_0'].numpy().ndim == 1)
assert (log_probs['action_0'].numpy().shape == (100,))
assert (log_probs['action_1'].numpy().ndim == 2)
assert (log_probs['action_1'].numpy().shape == (100, 5)) |
def at(hours: list[time], path: str, action: Callable):
def assert_integrity(persisted: PersistedDict, index):
if ((not (index in persisted)) or (not isinstance(persisted[index], list))):
persisted[index] = []
current_date = date.today()
previous_day = (current_date - timedelta(days=1)).strftime('%Y-%m-%d')
try:
del persisted[previous_day]
except:
pass
def custom_serializer(obj):
if isinstance(obj, time):
time_str = obj.strftime('%H:%M:%S')
return {'__time__': True, 'value': time_str}
return obj
def custom_object_hook(obj):
if ('__time__' in obj):
time_str = obj['value']
(hour, minute, second) = map(int, time_str.split(':'))
time_obj = time(hour, minute, second)
return time_obj
return obj
persisted = PersistedDict(path, serializer=custom_serializer, custom_object_hook=custom_object_hook)
result = None
async def wrapper(*args, **kwargs):
nonlocal result, persisted
current_date = datetime.now().date().strftime('%Y-%m-%d')
assert_integrity(persisted, current_date)
print(persisted)
for hour in hours:
hour_as_datetime = datetime.combine(date.today(), hour)
if ((datetime.now() >= hour_as_datetime) and (hour not in persisted[current_date])):
persisted[current_date] = (persisted[current_date] + [hour])
result = (await action(*args, **kwargs))
return result
return wrapper |
def test_main_starts_normcap(monkeypatch):
called = False
def mocked_app_main():
nonlocal called
called = True
monkeypatch.setattr(app, 'run', mocked_app_main)
main_module = str(((Path(__file__).parent.parent / 'normcap') / '__main__.py').absolute())
loader = SourceFileLoader('__main__', main_module)
spec = spec_from_loader(loader.name, loader)
assert spec
loader.exec_module(module_from_spec(spec))
assert called |
class TestOverfittingMethods(unittest.TestCase):
def setUp(self):
finlab_crypto.setup()
def test_overfitting(self):
nstrategy = 10
nreturns = 4001
returns = pd.DataFrame({('s' + str(i)): np.random.normal(0, 0.02, size=nreturns) for i in range(nstrategy)})
returns['s1'] += 0.02
cscv = overfitting.CSCV()
cscv.add_daily_returns(returns)
results = cscv.estimate_overfitting(plot=True)
self.assertEqual(results['pbo_test'], 0)
returns = pd.DataFrame({('s' + str(i)): np.random.normal(0, 0.02, size=nreturns) for i in range(nstrategy)})
cscv = overfitting.CSCV()
cscv.add_daily_returns(returns)
results = cscv.estimate_overfitting(plot=True)
self.assertEqual((results['pbo_test'] > 0), True) |
def build_ait_module_bmm_rrr(*, bs, m, n, k, split_k, test_name):
target = detect_target(use_fp16_acc=True)
input_params = {'dtype': 'float16', 'is_input': True}
batch_dim = shape_utils.gen_int_var_min_max(bs, 'batch_dim')
batch_a = Tensor(shape=[batch_dim, m, k], name='batch_a', **input_params)
batch_b = Tensor(shape=[batch_dim, k, n], name='batch_b', **input_params)
OP = ops.bmm_rrr()
OP._attrs['split_k_hints'] = (split_k,)
output = OP(batch_a, batch_b)
output._attrs['name'] = 'output'
output._attrs['is_output'] = True
return compile_model(output, target, './tmp', test_name=test_name) |
def _start():
global patch, name, path, monitor
global MININT16, MAXINT16, MININT32, MAXINT32, delay, filename, fileformat, filenumber, recording, adjust, maxabs
MININT16 = (- np.power(2.0, 15))
MAXINT16 = (np.power(2.0, 15) - 1)
MININT32 = (- np.power(2.0, 31))
MAXINT32 = (np.power(2.0, 31) - 1)
delay = patch.getfloat('general', 'delay')
filename = patch.getstring('recording', 'file')
fileformat = patch.getstring('recording', 'format')
if (fileformat is None):
(name, ext) = os.path.splitext(filename)
fileformat = ext[1:]
filenumber = 0
recording = False
adjust = 1
maxabs = 0
if len(locals()):
print(('LOCALS: ' + ', '.join(locals().keys()))) |
def test_missing_envs_required(config, json_config_file_3):
with open(json_config_file_3, 'w') as file:
file.write(json.dumps({'section': {'undefined': '${UNDEFINED}'}}))
with raises(ValueError, match='Missing required environment variable "UNDEFINED"'):
config.from_json(json_config_file_3, envs_required=True) |
def main(cfg: CfgNode, output_dir: str, runner_class: Union[(str, Type[DefaultTask])], eval_only: bool=False) -> Union[(TrainNetOutput, TestNetOutput)]:
task_cls: Type[DefaultTask] = setup_after_launch(cfg, output_dir, runner_class)
task = task_cls.from_config(cfg, eval_only)
trainer_params = get_trainer_params(cfg)
last_checkpoint = os.path.join(cfg.OUTPUT_DIR, 'last.ckpt')
if PathManager.exists(last_checkpoint):
trainer_params['resume_from_checkpoint'] = last_checkpoint
logger.info(f'Resuming training from checkpoint: {last_checkpoint}.')
trainer = pl.Trainer(**trainer_params)
if eval_only:
_do_test(trainer, task)
return TestNetOutput(tensorboard_log_dir=trainer_params['logger'].log_dir, accuracy=task.eval_res, metrics=task.eval_res)
else:
model_configs = _do_train(cfg, trainer, task)
return TrainNetOutput(tensorboard_log_dir=trainer_params['logger'].log_dir, accuracy=task.eval_res, metrics=task.eval_res, model_configs=model_configs) |
class CriticalStrikeHandler(THBEventHandler):
interested = ['action_apply', 'action_before', 'action_after', 'action_shootdown', 'action_stage_action']
execute_after = ['AttackCardHandler', 'FrozenFrogHandler', 'ElementalReactorHandler', 'ReversedScalesHandler']
execute_before = ['MomijiShieldHandler', 'WineHandler']
def handle(self, evt_type, act):
if ((evt_type == 'action_before') and isinstance(act, PrepareStage)):
ttags(act.target)['flan_targets'] = []
elif ((evt_type == 'action_before') and isinstance(act, FinalizeStage)):
if ttags(act.target)['flan_cs']:
g = self.game
tgt = act.target
g.process_action(CriticalStrikeDropAction(tgt, tgt))
elif ((evt_type == 'action_apply') and isinstance(act, BaseAttack)):
(src, tgt) = (act.source, act.target)
if (not self.in_critical_strike(src)):
return act
if isinstance(act, BaseAttack):
g = self.game
g.process_action(CriticalStrikeAction(src, tgt))
ttags(src)['flan_targets'].append(tgt)
act.damage += 1
elif ((evt_type == 'action_before') and isinstance(act, Damage)):
g = self.game
pact = g.action_stack[(- 1)]
if (not isinstance(pact, BaseDuel)):
return act
(src, tgt) = (act.source, act.target)
if (not self.in_critical_strike(src)):
return act
g.process_action(CriticalStrikeAction(src, tgt))
act.amount += 1
elif ((evt_type == 'action_after') and isinstance(act, Damage)):
(src, tgt) = (act.source, act.target)
if (not self.in_critical_strike(src)):
return act
ttags(src)['flan_cs'] = True
elif ((evt_type == 'action_before') and isinstance(act, ActionStageLaunchCard)):
src = act.source
if (not self.in_critical_strike(src)):
return act
if act.card.is_card(AttackCard):
act._[self.__class__] = 'vitality-consumed'
ttags(src)['vitality'] -= 1
elif (evt_type == 'action_shootdown'):
if (not isinstance(act, ActionStageLaunchCard)):
return act
c = act.card
src = act.source
if (not self.in_critical_strike(src)):
return act
if (not c.is_card(AttackCard)):
return act
if src.has_skill(ElementalReactorSkill):
return act
if (ttags(src)['vitality'] > 0):
return act
if act._[self.__class__]:
return act
if (set(act.target_list) & set(ttags(src)['flan_targets'])):
raise CriticalStrikeLimit
return act
elif (evt_type == 'action_stage_action'):
tgt = act
if (not self.in_critical_strike(tgt)):
return act
AttackCardVitalityHandler.disable(tgt)
return act
def in_critical_strike(self, p):
g = self.game
try:
current = PlayerTurn.get_current(g).target
except IndexError:
return False
return ((current is p) and p.has_skill(CriticalStrike)) |
class TestMergeKeysValues():
yaml_str = dedent(' - &mx\n a: x1\n b: x2\n c: x3\n - &my\n a: y1\n b: y2 # masked by the one in &mx\n d: y4\n -\n a: 1\n <<: [*mx, *my]\n m: 6\n ')
def test_merge_for(self):
from srsly.ruamel_yaml import safe_load
d = safe_load(self.yaml_str)
data = round_trip_load(self.yaml_str)
count = 0
for x in data[2]:
count += 1
print(count, x)
assert (count == len(d[2]))
def test_merge_keys(self):
from srsly.ruamel_yaml import safe_load
d = safe_load(self.yaml_str)
data = round_trip_load(self.yaml_str)
count = 0
for x in data[2].keys():
count += 1
print(count, x)
assert (count == len(d[2]))
def test_merge_values(self):
from srsly.ruamel_yaml import safe_load
d = safe_load(self.yaml_str)
data = round_trip_load(self.yaml_str)
count = 0
for x in data[2].values():
count += 1
print(count, x)
assert (count == len(d[2]))
def test_merge_items(self):
from srsly.ruamel_yaml import safe_load
d = safe_load(self.yaml_str)
data = round_trip_load(self.yaml_str)
count = 0
for x in data[2].items():
count += 1
print(count, x)
assert (count == len(d[2]))
def test_len_items_delete(self):
from srsly.ruamel_yaml import safe_load
from srsly.ruamel_yaml.compat import PY3
d = safe_load(self.yaml_str)
data = round_trip_load(self.yaml_str)
x = data[2].items()
print('d2 items', d[2].items(), len(d[2].items()), x, len(x))
ref = len(d[2].items())
print('ref', ref)
assert (len(x) == ref)
del data[2]['m']
if PY3:
ref -= 1
assert (len(x) == ref)
del data[2]['d']
if PY3:
ref -= 1
assert (len(x) == ref)
del data[2]['a']
if PY3:
ref -= 1
assert (len(x) == ref)
def test_issue_196_cast_of_dict(self, capsys):
from srsly.ruamel_yaml import YAML
yaml = YAML()
mapping = yaml.load(' anchored: &anchor\n a : 1\n\n mapping:\n <<: *anchor\n b: 2\n ')['mapping']
for k in mapping:
print('k', k)
for k in mapping.copy():
print('kc', k)
print('v', list(mapping.keys()))
print('v', list(mapping.values()))
print('v', list(mapping.items()))
print(len(mapping))
print('-----')
assert ('a' in mapping)
x = {}
for k in mapping:
x[k] = mapping[k]
assert ('a' in x)
assert ('a' in mapping.keys())
assert (mapping['a'] == 1)
assert (mapping.__getitem__('a') == 1)
assert ('a' in dict(mapping))
assert ('a' in dict(mapping.items()))
def test_values_of_merged(self):
from srsly.ruamel_yaml import YAML
yaml = YAML()
data = yaml.load(dedent(self.yaml_str))
assert (list(data[2].values()) == [1, 6, 'x2', 'x3', 'y4'])
def test_issue_213_copy_of_merge(self):
from srsly.ruamel_yaml import YAML
yaml = YAML()
d = yaml.load(' foo: &foo\n a: a\n foo2:\n <<: *foo\n b: b\n ')['foo2']
assert (d['a'] == 'a')
d2 = d.copy()
assert (d2['a'] == 'a')
print('d', d)
del d['a']
assert ('a' not in d)
assert ('a' in d2) |
('SENTENCES')
class Sentences(Node):
def forward(self, v, **kwargs):
sentences = tuple(self.split(v, separator=['.']))
return self.strip(sentences)
def strip(self, sentences):
if (len(sentences) == 0):
return sentences
elif (sentences[(- 1)] == ()):
return tuple(sentences[:(- 1)])
else:
return tuple(sentences)
def add_end(self, stc, end):
if (len(stc) == 0):
return stc
return (stc + end)
def split(self, v, separator):
result = ()
current = ''
for c in v:
if (c in separator):
result += ((current + c),)
current = ''
else:
current += c
if (len(current) > 0):
result += (current,)
if (len(result) == 0):
return ('',)
return result
def follow(self, x, **kwargs):
v = strip_next_token(x)
has_next_token = (v != x)
sentences = tuple(self.split(v, separator=['.']))
if has_next_token:
if ((len(sentences) > 0) and sentences[(- 1)].endswith('.')):
return fmap(('eos', self.strip(sentences)), ('*', (sentences + (NextToken,))))
else:
return fmap(('eos', self.strip(sentences)), ('*', tuple((sentences[:(- 1)] + ((sentences[(- 1)] + NextToken),)))))
else:
return fmap(('*', sentences))
def final(self, x, operands=None, result=None, **kwargs):
return x[0] |
class DiscoveryService(Service):
_stats_interval: int = 30
_max_neighbours_per_packet_cache = None
_max_pending_enrs: int = 20
_local_enr_refresh_interval: int = 60
def __init__(self, privkey: datatypes.PrivateKey, udp_port: int, tcp_port: int, bootstrap_nodes: Sequence[NodeAPI], event_bus: EndpointAPI, socket: trio.socket.SocketType, enr_db: ENRDatabaseAPI, enr_field_providers: Sequence[ENR_FieldProvider]=tuple()) -> None:
self.logger = get_logger('p2p.discovery.DiscoveryService')
self.privkey = privkey
self._event_bus = event_bus
self.enr_response_channels = ExpectedResponseChannels[Tuple[(ENRAPI, Hash32)]]()
self.pong_channels = ExpectedResponseChannels[Tuple[(Hash32, int)]]()
self.neighbours_channels = ExpectedResponseChannels[List[NodeAPI]]()
self.ping_channels = ExpectedResponseChannels[None]()
self.enr_field_providers = enr_field_providers
self.enr_db = enr_db
self._last_pong_at = LRU(2048)
self._local_enr_next_refresh: float = time.monotonic()
self._local_enr_lock = trio.Lock()
self._lookup_lock = trio.Lock()
self.parity_pong_tokens: Dict[(Hash32, Hash32)] = {}
if (socket.family != trio.socket.AF_INET):
raise ValueError('Invalid socket family')
elif (socket.type != trio.socket.SOCK_DGRAM):
raise ValueError('Invalid socket type')
self.socket = socket
(self.pending_enrs_producer, self.pending_enrs_consumer) = trio.open_memory_channel[Tuple[(NodeID, int)]](self._max_pending_enrs)
self._bootstrap_node_ids: List[NodeID] = []
for node in bootstrap_nodes:
self._bootstrap_node_ids.append(node.id)
try:
self.enr_db.get_enr(node.id)
except KeyError:
self.enr_db.set_enr(node.enr)
if (len(set(self._bootstrap_node_ids)) != len(self._bootstrap_node_ids)):
raise ValueError('Multiple bootnodes with the same ID are not allowed: {self._bootstrap_node_ids}')
self.this_node: NodeAPI = Node.from_pubkey_and_addr(self.pubkey, Address('127.0.0.1', udp_port, tcp_port))
self.routing = KademliaRoutingTable(self.this_node.id, constants.KADEMLIA_BUCKET_SIZE)
async def _init(self) -> None:
try:
enr = self.enr_db.get_enr(self.this_node.id)
except KeyError:
pass
else:
self.this_node = Node(enr)
(await self.maybe_update_local_enr(get_external_ipaddress(self.logger)))
_tuple
def bootstrap_nodes(self) -> Iterable[NodeAPI]:
for node_id in self._bootstrap_node_ids:
try:
enr = self.enr_db.get_enr(node_id)
except KeyError:
self.logger.exception('Bootnode not found in our DB')
else:
(yield Node(enr))
def is_bond_valid_with(self, node_id: NodeID) -> bool:
try:
pong_time = self._last_pong_at[node_id]
except KeyError:
return False
return (pong_time > (time.monotonic() - constants.KADEMLIA_BOND_EXPIRATION))
async def consume_datagrams(self) -> None:
while self.manager.is_running:
(await self.consume_datagram())
async def handle_new_upnp_mapping(self) -> None:
from trinity.components.builtin.upnp.events import UPnPMapping
async for event in self._event_bus.stream(UPnPMapping):
external_ip = event.ip
self.logger.debug('Got new external IP address via UPnP mapping: %s', external_ip)
(await self.maybe_update_local_enr(ipaddress.ip_address(external_ip)))
def get_peer_candidates(self, should_skip_fn: Callable[([NodeAPI], bool)], max_candidates: int) -> Tuple[(NodeAPI, ...)]:
candidates = []
skip_count = 0
for candidate in self.iter_nodes():
if should_skip_fn(candidate):
skip_count += 1
continue
candidates.append(candidate)
if (len(candidates) == max_candidates):
break
else:
log_msg = "Not enough nodes in routing table passed PeerCandidatesRequest's filter, "
if self._lookup_lock.locked():
log_msg += 'but not triggering random lookup as there is one in progress already'
else:
log_msg += 'triggering a random lookup in the background.'
self.manager.run_task(self.lookup_random)
self.logger.debug(log_msg)
self.logger.debug('Found %d peer candidates, skipped %d', len(candidates), skip_count)
return tuple(candidates)
async def handle_get_peer_candidates_requests(self) -> None:
async for event in self._event_bus.stream(PeerCandidatesRequest):
candidates = self.get_peer_candidates(event.should_skip_fn, event.max_candidates)
self.logger.debug('Broadcasting %d peer candidates', len(candidates))
(await self._event_bus.broadcast(event.expected_response_type()(candidates), event.broadcast_config()))
async def handle_get_random_bootnode_requests(self) -> None:
async for event in self._event_bus.stream(RandomBootnodeRequest):
nodes = tuple(self.get_random_bootnode())
self.logger.debug('Broadcasting random boot node: %s', nodes)
(await self._event_bus.broadcast(event.expected_response_type()(nodes), event.broadcast_config()))
async def run(self) -> None:
(await self._init())
self.logger.info('Running on %s', self.this_node.uri())
self.run_daemons_and_bootstrap()
(await self.manager.wait_finished())
def run_daemons_and_bootstrap(self) -> None:
self.manager.run_daemon_task(self.handle_new_upnp_mapping)
self.manager.run_daemon_task(self.handle_get_peer_candidates_requests)
self.manager.run_daemon_task(self.handle_get_random_bootnode_requests)
self.manager.run_daemon_task(self.report_stats)
self.manager.run_daemon_task(self.fetch_enrs)
self.manager.run_daemon_task(self.consume_datagrams)
self.manager.run_task(self.bootstrap)
async def fetch_enrs(self) -> None:
async with self.pending_enrs_consumer:
async with trio.open_nursery() as nursery:
async for (remote_id, enr_seq) in self.pending_enrs_consumer:
self.logger.debug2('Received request to fetch ENR for %s', encode_hex(remote_id))
nursery.start_soon(self._ensure_enr, remote_id, enr_seq)
async def _ensure_enr(self, node_id: NodeID, enr_seq: int) -> None:
if (not self.is_bond_valid_with(node_id)):
self.logger.debug('No valid bond with %s, cannot fetch its ENR', encode_hex(node_id))
return
try:
enr = self.enr_db.get_enr(node_id)
except KeyError:
self.logger.warning('Attempted to fetch ENR for Node (%s) not in our DB', encode_hex(node_id))
return
if (enr.sequence_number >= enr_seq):
self.logger.debug2('Already got latest ENR for %s', encode_hex(node_id))
return
node = Node(enr)
try:
(await self.request_enr(node))
except CouldNotRetrieveENR as e:
self.logger.debug('Failed to retrieve ENR for %s: %s', node, e)
async def report_stats(self) -> None:
async for _ in trio_utils.every(self._stats_interval):
self.logger.debug(' Stats ')
full_buckets = [bucket for bucket in self.routing.buckets if (len(bucket) >= self.routing.bucket_size)]
total_nodes = sum([len(bucket) for bucket in self.routing.buckets])
nodes_in_replacement_cache = sum([len(replacement_cache) for replacement_cache in self.routing.replacement_caches])
self.logger.debug('Routing table has %s nodes in %s buckets (%s of which are full), and %s nodes are in the replacement cache', total_nodes, len(self.routing.buckets), len(full_buckets), nodes_in_replacement_cache)
self.logger.debug('')
def update_routing_table(self, node: NodeAPI) -> None:
if (not self.is_bond_valid_with(node.id)):
self.logger.warning("Attempted to add node to RT when we haven't bonded before")
eviction_candidate = self.routing.update(node.id)
if eviction_candidate:
self.logger.debug2("Routing table's bucket is full, couldn't add %s. Checking if %s is still responding, will evict if not", node, eviction_candidate)
self.manager.run_task(self.bond, eviction_candidate)
try:
self.enr_db.set_enr(node.enr)
except OldSequenceNumber:
self.logger.exception('Attempted to overwrite ENR of %s with a previous version', node, stack_info=True)
async def bond(self, node_id: NodeID) -> bool:
if (node_id == self.this_node.id):
self.logger.warning("Attempted to bond with self; this shouldn't happen")
return False
try:
node = Node(self.enr_db.get_enr(node_id))
except KeyError:
self.logger.exception("Attempted to bond with node that doesn't exist in our DB")
return False
self.logger.debug2('Starting bond process with %s', node)
if self.is_bond_valid_with(node_id):
self.logger.debug2('Bond with %s is still valid, not doing it again', node)
return True
token = (await self.send_ping_v4(node))
(send_chan, recv_chan) = trio.open_memory_channel[Tuple[(Hash32, int)]](1)
try:
with trio.fail_after(constants.KADEMLIA_REQUEST_TIMEOUT):
(received_token, enr_seq) = (await self.pong_channels.receive_one(node, send_chan, recv_chan))
except AlreadyWaitingDiscoveryResponse:
self.logger.debug('Bonding failed, already waiting pong from %s', node)
return False
except trio.TooSlowError:
self.logger.debug2('Bonding with %s timed out', node)
return False
if (received_token != token):
self.logger.info('Bonding with %s failed, expected pong with token %s, but got %s', node, token, received_token)
self.routing.remove(node.id)
return False
(ping_send_chan, ping_recv_chan) = trio.open_memory_channel[None](1)
try:
with trio.move_on_after(constants.KADEMLIA_REQUEST_TIMEOUT):
(await self.ping_channels.receive_one(node, ping_send_chan, ping_recv_chan))
except AlreadyWaitingDiscoveryResponse:
self.logger.debug('bonding failed, already waiting for ping')
return False
self.logger.debug2('bonding completed successfully with %s', node)
if (enr_seq is not None):
self.schedule_enr_retrieval(node.id, enr_seq)
return True
def schedule_enr_retrieval(self, node_id: NodeID, enr_seq: int) -> None:
self.logger.debug2('scheduling ENR retrieval from %s', encode_hex(node_id))
try:
self.pending_enrs_producer.send_nowait((node_id, enr_seq))
except trio.WouldBlock:
self.logger.warning('Failed to schedule ENR retrieval; channel buffer is full')
async def request_enr(self, remote: NodeAPI) -> ENRAPI:
(await self.bond(remote.id))
token = (await self.send_enr_request(remote))
(send_chan, recv_chan) = trio.open_memory_channel[Tuple[(ENRAPI, Hash32)]](1)
try:
with trio.fail_after(constants.KADEMLIA_REQUEST_TIMEOUT):
(enr, received_token) = (await self.enr_response_channels.receive_one(remote, send_chan, recv_chan))
except trio.TooSlowError:
raise CouldNotRetrieveENR(f'Timed out waiting for ENR from {remote}')
except AlreadyWaitingDiscoveryResponse:
raise CouldNotRetrieveENR(f'Already waiting for ENR from {remote}')
if (received_token != token):
raise CouldNotRetrieveENR(f'Got ENR from {remote} with token {received_token!r} but expected {token!r}')
return enr
async def _generate_local_enr(self, sequence_number: int, ip_address: Optional[ipaddress.IPv4Address]=None) -> ENRAPI:
if (ip_address is None):
ip_address = ipaddress.ip_address(self.this_node.address.ip)
kv_pairs = {IDENTITY_SCHEME_ENR_KEY: V4IdentityScheme.id, V4IdentityScheme.public_key_enr_key: self.pubkey.to_compressed_bytes(), IP_V4_ADDRESS_ENR_KEY: ip_address.packed, UDP_PORT_ENR_KEY: self.this_node.address.udp_port, TCP_PORT_ENR_KEY: self.this_node.address.tcp_port}
for field_provider in self.enr_field_providers:
(key, value) = (await field_provider())
if (key in kv_pairs):
raise AssertionError('ENR field provider attempted to override already used key: %s', key)
kv_pairs[key] = value
unsigned_enr = UnsignedENR(sequence_number, kv_pairs)
return unsigned_enr.to_signed_enr(self.privkey.to_bytes())
async def get_local_enr(self) -> ENRAPI:
if (self._local_enr_next_refresh <= time.monotonic()):
(await self.maybe_update_local_enr())
return self.this_node.enr
async def maybe_update_local_enr(self, ip_address: Optional[ipaddress.IPv4Address]=None) -> None:
async with self._local_enr_lock:
(await self._maybe_update_local_enr(ip_address))
async def _maybe_update_local_enr(self, ip_address: Optional[ipaddress.IPv4Address]=None) -> None:
self._local_enr_next_refresh = (time.monotonic() + self._local_enr_refresh_interval)
current_enr = (await self._generate_local_enr(self.this_node.enr.sequence_number, ip_address))
if (current_enr == self.this_node.enr):
return
enr = (await self._generate_local_enr((self.this_node.enr.sequence_number + 1), ip_address))
self.this_node = Node(enr)
self.logger.info('Node details changed, generated new local ENR with sequence number %d', enr.sequence_number)
self.enr_db.set_enr(enr)
async def get_local_enr_seq(self) -> int:
enr = (await self.get_local_enr())
return enr.sequence_number
async def wait_neighbours(self, remote: NodeAPI) -> Tuple[(NodeAPI, ...)]:
neighbours: List[NodeAPI] = []
(send_chan, recv_chan) = trio.open_memory_channel[List[NodeAPI]](1)
with trio.move_on_after(constants.KADEMLIA_REQUEST_TIMEOUT) as cancel_scope:
gen = self.neighbours_channels.receive(remote, send_chan, recv_chan)
async with aclosing(gen):
async for batch in gen:
self.logger.debug2('got expected neighbours response from %s: %s', remote, batch)
neighbours.extend(batch)
if (len(neighbours) >= constants.KADEMLIA_BUCKET_SIZE):
break
self.logger.debug2('got expected neighbours response from %s', remote)
if cancel_scope.cancelled_caught:
self.logger.debug2('timed out waiting for %d neighbours from %s, got only %d', constants.KADEMLIA_BUCKET_SIZE, remote, len(neighbours))
return tuple((n for n in neighbours if (n != self.this_node)))
async def lookup(self, target_key: bytes) -> Tuple[(NodeAPI, ...)]:
async with self._lookup_lock:
return (await self._lookup(target_key))
async def _lookup(self, target_key: bytes) -> Tuple[(NodeAPI, ...)]:
if (len(target_key) != (constants.KADEMLIA_PUBLIC_KEY_SIZE // 8)):
raise ValueError(f'Invalid lookup target ({target_key!r}). Length is not 64')
target_id = NodeID(keccak(target_key))
nodes_asked: Set[NodeAPI] = set()
nodes_seen: Set[NodeAPI] = set()
async def _find_node(target: bytes, remote: NodeAPI) -> Tuple[(NodeAPI, ...)]:
(await self.send_find_node_v4(remote, target))
candidates = (await self.wait_neighbours(remote))
if (not candidates):
self.logger.debug2('got no neighbors from %s, returning', remote)
return tuple()
all_candidates = tuple((c for c in candidates if (c not in nodes_seen)))
candidates = tuple((c for c in all_candidates if ((not self.ping_channels.already_waiting_for(c)) and (not self.pong_channels.already_waiting_for(c)))))
self.logger.debug2('got %s new neighbors', len(candidates))
self._ensure_nodes_are_in_db(candidates)
nodes_seen.update(candidates)
bonded = (await trio_utils.gather(*((self.bond, c.id) for c in candidates)))
self.logger.debug2('bonded with %s neighbors', bonded.count(True))
return tuple((c for c in candidates if bonded[candidates.index(c)]))
def _exclude_if_asked(nodes: Iterable[NodeAPI]) -> List[NodeAPI]:
nodes_to_ask = list(set(nodes).difference(nodes_asked))
return sort_by_distance(nodes_to_ask, target_id)[:constants.KADEMLIA_FIND_CONCURRENCY]
closest = list(self.get_neighbours(target_id))
self.logger.debug('starting lookup; initial neighbours: %s', closest)
nodes_to_ask = _exclude_if_asked(closest)
if (not nodes_to_ask):
self.logger.warning("No nodes found in routing table, can't perform lookup")
return tuple()
while nodes_to_ask:
self.logger.debug2('node lookup; querying %s', nodes_to_ask)
nodes_asked.update(nodes_to_ask)
next_find_node_queries = ((_find_node, target_key, n) for n in nodes_to_ask if (not self.neighbours_channels.already_waiting_for(n)))
results = (await trio_utils.gather(*next_find_node_queries))
for candidates in results:
closest.extend(candidates)
closest = sort_by_distance(eth_utils.toolz.unique(closest), target_id)[:constants.KADEMLIA_BUCKET_SIZE]
nodes_to_ask = _exclude_if_asked(closest)
self.logger.debug('lookup finished for target %s; closest neighbours: %s', to_hex(target_id), closest)
return tuple(closest)
async def lookup_random(self) -> Tuple[(NodeAPI, ...)]:
target_key = int_to_big_endian(secrets.randbits(constants.KADEMLIA_PUBLIC_KEY_SIZE)).rjust((constants.KADEMLIA_PUBLIC_KEY_SIZE // 8), b'\x00')
return (await self.lookup(target_key))
def _ensure_nodes_are_in_db(self, nodes: Tuple[(NodeAPI, ...)]) -> None:
for node in nodes:
try:
self.enr_db.set_enr(node.enr)
except OldSequenceNumber:
self.logger.debug2('DB entry for %s has a more recent ENR, keeping that', node)
def get_random_bootnode(self) -> Iterator[NodeAPI]:
if self.bootstrap_nodes:
(yield random.choice(self.bootstrap_nodes))
else:
self.logger.warning('No bootnodes available')
def iter_nodes(self) -> Iterator[NodeAPI]:
for node_id in self.routing.iter_all_random():
try:
(yield Node(self.enr_db.get_enr(node_id)))
except KeyError:
self.logger.exception('Node with ID %s is in routing table but not in node DB', encode_hex(node_id))
def pubkey(self) -> datatypes.PublicKey:
return self.privkey.public_key
def _get_handler(self, cmd: DiscoveryCommand) -> V4_HANDLER_TYPE:
if (cmd == CMD_PING):
return self.recv_ping_v4
elif (cmd == CMD_PONG):
return self.recv_pong_v4
elif (cmd == CMD_FIND_NODE):
return self.recv_find_node_v4
elif (cmd == CMD_NEIGHBOURS):
return self.recv_neighbours_v4
elif (cmd == CMD_ENR_REQUEST):
return self.recv_enr_request
elif (cmd == CMD_ENR_RESPONSE):
return self.recv_enr_response
else:
raise ValueError(f'Unknown command: {cmd}')
def _get_max_neighbours_per_packet(cls) -> int:
if (cls._max_neighbours_per_packet_cache is not None):
return cls._max_neighbours_per_packet_cache
cls._max_neighbours_per_packet_cache = _get_max_neighbours_per_packet()
return cls._max_neighbours_per_packet_cache
def invalidate_bond(self, node_id: NodeID) -> None:
try:
del self._last_pong_at[node_id]
except KeyError:
pass
async def bootstrap(self) -> None:
bonding_queries = []
for node in self.bootstrap_nodes:
uri = node.uri()
(pubkey, _, uri_tail) = uri.partition('')
pubkey_head = pubkey[:16]
pubkey_tail = pubkey[(- 8):]
self.logger.debug('full-bootnode: %s', uri)
self.logger.debug('bootnode: %s...%%s', pubkey_head, pubkey_tail, uri_tail)
self.invalidate_bond(node.id)
bonding_queries.append((self.bond, node.id))
bonded = (await trio_utils.gather(*bonding_queries))
successful_bonds = len([item for item in bonded if (item is True)])
if (not successful_bonds):
self.logger.warning('Failed to bond with any bootstrap nodes %s', self.bootstrap_nodes)
return
else:
self.logger.info('Bonded with %d bootstrap nodes, performing initial lookup', successful_bonds)
(await self.lookup_random())
async def send(self, node: NodeAPI, msg_type: DiscoveryCommand, payload: Sequence[Any]) -> bytes:
message = _pack_v4(msg_type.id, payload, self.privkey)
try:
(await self.socket.sendto(message, (node.address.ip, node.address.udp_port)))
except OSError:
self.logger.exception('Unexpected error sending msg to %s', node.address)
return message
async def consume_datagram(self) -> None:
(datagram, (ip_address, port)) = (await self.socket.recvfrom(constants.DISCOVERY_DATAGRAM_BUFFER_SIZE))
address = Address(ip_address, port, port)
self.logger.debug2('Received datagram from %s', address)
self.manager.run_task(self.handle_msg, address, datagram)
async def handle_msg(self, address: AddressAPI, message: bytes) -> None:
try:
(remote_pubkey, cmd_id, payload, message_hash) = _unpack_v4(message)
except DefectiveMessage as e:
self.logger.error('error unpacking message (%s) from %s: %s', message, address, e)
return
try:
cmd = CMD_ID_MAP[cmd_id]
except KeyError:
self.logger.warning('Ignoring uknown msg type: %s; payload=%s', cmd_id, payload)
return
node = Node(self.lookup_and_maybe_update_enr(remote_pubkey, address))
self.logger.debug2('Received %s from %s with payload: %s', cmd.name, node, payload)
handler = self._get_handler(cmd)
(await handler(node, payload, message_hash))
def lookup_and_maybe_update_enr(self, pubkey: datatypes.PublicKey, address: AddressAPI) -> ENRAPI:
try:
enr = self.enr_db.get_enr(node_id_from_pubkey(pubkey))
except KeyError:
enr = create_stub_enr(pubkey, address)
self.enr_db.set_enr(enr)
else:
node = Node(enr)
if (node.address != address):
self.logger.debug('Received msg from %s, using an address (%s) different than what we have stored in our DB (%s). Overwriting DB record with new one.', node, address, node.address)
self.enr_db.delete_enr(enr.node_id)
enr = create_stub_enr(pubkey, address)
self.enr_db.set_enr(enr)
return enr
def _is_msg_expired(self, rlp_expiration: bytes) -> bool:
expiration = rlp.sedes.big_endian_int.deserialize(rlp_expiration)
if (time.time() > expiration):
self.logger.debug('Received message already expired')
return True
return False
async def recv_pong_v4(self, node: NodeAPI, payload: Sequence[Any], _: Hash32) -> None:
if (len(payload) < 3):
self.logger.warning('Ignoring PONG msg with invalid payload: %s', payload)
return
elif (len(payload) == 3):
(_, token, expiration) = payload[:3]
enr_seq = None
else:
(_, token, expiration, enr_seq) = payload[:4]
enr_seq = big_endian_to_int(enr_seq)
if self._is_msg_expired(expiration):
return
self.logger.debug2('<<< pong (v4) from %s (token == %s)', node, encode_hex(token))
(await self.process_pong_v4(node, token, enr_seq))
async def recv_neighbours_v4(self, remote: NodeAPI, payload: Sequence[Any], _: Hash32) -> None:
try:
channel = self.neighbours_channels.get_channel(remote)
except KeyError:
self.logger.debug('Unexpected NEIGHBOURS from %s, probably came too late', remote)
return
if (len(payload) < 2):
self.logger.warning('Ignoring NEIGHBOURS msg with invalid payload: %s', payload)
return
(nodes, expiration) = payload[:2]
if self._is_msg_expired(expiration):
return
try:
neighbours = _extract_nodes_from_payload(remote.address, nodes, self.logger)
except ValueError:
self.logger.debug('Malformed NEIGHBOURS packet from %s: %s', remote, nodes)
return
self.logger.debug2('<<< neighbours from %s: %s', remote, neighbours)
try:
(await channel.send(neighbours))
except trio.BrokenResourceError:
pass
async def recv_ping_v4(self, remote: NodeAPI, payload: Sequence[Any], message_hash: Hash32) -> None:
if (remote.id == self.this_node.id):
self.logger.info('Invariant: received ping from this_node: %s', remote)
return
if (len(payload) < 4):
self.logger.warning('Ignoring PING msg with invalid payload: %s', payload)
return
elif (len(payload) == 4):
(_, _, _, expiration) = payload[:4]
enr_seq = None
else:
(_, _, _, expiration, enr_seq) = payload[:5]
enr_seq = big_endian_to_int(enr_seq)
self.logger.debug2('<<< ping(v4) from %s, enr_seq=%s', remote, enr_seq)
if self._is_msg_expired(expiration):
return
try:
channel = self.ping_channels.get_channel(remote)
except KeyError:
pass
else:
try:
(await channel.send(None))
except trio.BrokenResourceError:
pass
(await self.send_pong_v4(remote, message_hash))
if (not self.is_bond_valid_with(remote.id)):
self.manager.run_task(self.bond, remote.id)
async def recv_find_node_v4(self, node: NodeAPI, payload: Sequence[Any], _: Hash32) -> None:
if (len(payload) < 2):
self.logger.warning('Ignoring FIND_NODE msg with invalid payload: %s', payload)
return
(target, expiration) = payload[:2]
self.logger.debug2('<<< find_node from %s', node)
if self._is_msg_expired(expiration):
return
if (not self.is_bond_valid_with(node.id)):
self.logger.debug("Ignoring find_node request from node (%s) we haven't bonded with", node)
return
target_id = NodeID(keccak(target))
(await self.send_neighbours_v4(node, self.get_neighbours(target_id)))
async def recv_enr_request(self, node: NodeAPI, payload: Sequence[Any], msg_hash: Hash32) -> None:
if (len(payload) < 1):
self.logger.warning('Ignoring ENR_REQUEST msg with invalid payload: %s', payload)
return
expiration = payload[0]
if self._is_msg_expired(expiration):
return
if (not self.is_bond_valid_with(node.id)):
self.logger.debug("Ignoring ENR_REQUEST from node (%s) we haven't bonded with", node)
return
enr = (await self.get_local_enr())
self.logger.debug('Sending local ENR to %s: %s', node, enr)
payload = (msg_hash, ENR.serialize(enr))
(await self.send(node, CMD_ENR_RESPONSE, payload))
async def recv_enr_response(self, node: NodeAPI, payload: Sequence[Any], _: Hash32) -> None:
if (len(payload) < 2):
self.logger.warning('Ignoring ENR_RESPONSE msg with invalid payload: %s', payload)
return
(token, serialized_enr) = payload[:2]
try:
enr = ENR.deserialize(serialized_enr)
except DeserializationError as error:
self.logger.info('Ignoring improperly encoded ENR_RESPONSE: %s', error)
return
except ValidationError as error:
self.logger.info('Ignoring ENR_RESPONSE with invalid ENR: %s', error)
return
try:
enr.validate_signature()
except (ValidationError, eth_keys.exceptions.ValidationError) as error:
self.logger.info('Ignoring ENR_RESPONSE with invalid ENR signature: %s', error)
return
try:
channel = self.enr_response_channels.get_channel(node)
except KeyError:
self.logger.debug('Unexpected ENR_RESPONSE from %s', node)
return
self.logger.debug2('Received ENR %s (%s) with expected response token: %s', enr, enr.items(), encode_hex(token))
try:
existing_enr = self.enr_db.get_enr(enr.node_id)
except KeyError:
self.logger.warning("No existing ENR for %s, this shouldn't happen", node)
else:
if (enr.sequence_number < existing_enr.sequence_number):
self.logger.warning('Remote %s sent us an ENR with seq number (%d) prior to the one we already have (%d). Ignoring it', encode_hex(enr.node_id), enr.sequence_number, existing_enr.sequence_number)
return
new_node = Node(enr)
if (new_node.address is None):
self.logger.debug('Received ENR with no endpoint info from %s, removing from DB/RT', node)
self.routing.remove(node.id)
try:
self.enr_db.delete_enr(node.id)
except KeyError:
pass
else:
self.update_routing_table(new_node)
try:
(await channel.send((enr, token)))
except trio.BrokenResourceError:
pass
async def send_enr_request(self, node: NodeAPI) -> Hash32:
message = (await self.send(node, CMD_ENR_REQUEST, [_get_msg_expiration()]))
token = Hash32(message[:MAC_SIZE])
self.logger.debug('Sending ENR request with token: %s', encode_hex(token))
return token
async def send_ping_v4(self, node: NodeAPI) -> Hash32:
version = rlp.sedes.big_endian_int.serialize(PROTO_VERSION)
expiration = _get_msg_expiration()
local_enr_seq = (await self.get_local_enr_seq())
payload = (version, self.this_node.address.to_endpoint(), node.address.to_endpoint(), expiration, int_to_big_endian(local_enr_seq))
message = (await self.send(node, CMD_PING, payload))
token = Hash32(message[:MAC_SIZE])
self.logger.debug2('>>> ping (v4) %s (token == %s)', node, encode_hex(token))
parity_token = keccak(message[(HEAD_SIZE + 1):])
self.parity_pong_tokens[parity_token] = token
return token
async def send_find_node_v4(self, node: NodeAPI, target_key: bytes) -> None:
if (len(target_key) != (constants.KADEMLIA_PUBLIC_KEY_SIZE // 8)):
raise ValueError(f'Invalid FIND_NODE target ({target_key!r}). Length is not 64')
expiration = _get_msg_expiration()
self.logger.debug2('>>> find_node to %s', node)
(await self.send(node, CMD_FIND_NODE, (target_key, expiration)))
async def send_pong_v4(self, node: NodeAPI, token: Hash32) -> None:
expiration = _get_msg_expiration()
self.logger.debug2('>>> pong %s', node)
local_enr_seq = (await self.get_local_enr_seq())
payload = (node.address.to_endpoint(), token, expiration, int_to_big_endian(local_enr_seq))
(await self.send(node, CMD_PONG, payload))
async def send_neighbours_v4(self, node: NodeAPI, neighbours: Tuple[(NodeAPI, ...)]) -> None:
nodes = []
sorted_neighbours: List[NodeAPI] = sorted(neighbours)
for n in sorted_neighbours:
nodes.append((n.address.to_endpoint() + [n.pubkey.to_bytes()]))
expiration = _get_msg_expiration()
max_neighbours = self._get_max_neighbours_per_packet()
for i in range(0, len(nodes), max_neighbours):
self.logger.debug2('>>> neighbours to %s: %s', node, sorted_neighbours[i:(i + max_neighbours)])
payload = NeighboursPacket(neighbours=nodes[i:(i + max_neighbours)], expiration=expiration)
(await self.send(node, CMD_NEIGHBOURS, payload))
async def process_pong_v4(self, remote: NodeAPI, token: Hash32, enr_seq: int) -> None:
if (token in self.parity_pong_tokens):
token = self.parity_pong_tokens.pop(token)
else:
self.parity_pong_tokens = eth_utils.toolz.valfilter((lambda val: (val != token)), self.parity_pong_tokens)
try:
channel = self.pong_channels.get_channel(remote)
except KeyError:
self.logger.debug(f'Unexpected pong from {remote} with token {encode_hex(token)}')
return
self._last_pong_at[remote.id] = time.monotonic()
self.update_routing_table(remote)
try:
(await channel.send((token, enr_seq)))
except trio.BrokenResourceError:
pass
_tuple
def get_neighbours(self, target_id: NodeID) -> Iterator[NodeAPI]:
count = 0
for node_id in self.routing.iter_nodes_around(target_id):
try:
(yield Node(self.enr_db.get_enr(node_id)))
count += 1
except KeyError:
self.logger.exception('Node with ID %s is in routing table but not in node DB', encode_hex(node_id))
if (count == constants.NEIGHBOURS_RESPONSE_ITEMS):
break |
class Command(BaseCommand):
help = 'Calculate recommendations and similarities based on ratings'
def add_arguments(self, parser):
parser.add_argument('--verbose', action='store_true', dest='verbose', default=False, help='verbose mode')
def handle(self, *args, **options):
verbosity = int(options.get('verbosity', 0))
if options['verbose']:
warnings.warn('The `--verbose` option is being deprecated and it will be removed in the next release. Use `--verbosity` instead.', PendingDeprecationWarning)
verbosity = 1
if (verbosity == 0):
recommends_precompute()
else:
self.stdout.write('\nCalculation Started.\n')
start_time = datetime.now()
results = recommends_precompute()
end_time = datetime.now()
if (verbosity > 1):
for r in results:
self.stdout.write(('%d similarities and %d recommendations saved.\n' % (r['similar_count'], r['recommend_count'])))
rd = dateutil.relativedelta.relativedelta(end_time, start_time)
self.stdout.write(('Calculation finished in %d years, %d months, %d days, %d hours, %d minutes and %d seconds\n' % (rd.years, rd.months, rd.days, rd.hours, rd.minutes, rd.seconds))) |
def test_that_values_are_correctly_shaped():
content = dedent('roff-asc\n #ROFF file#\n #Creator: Ert#\n tag dimensions\n int nX 2\n int nY 2\n int nZ 2\n endtag\n tag parameter\n char name "parameter"\n array float data 8\n 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0\n endtag\n tag eof\n endtag\n ')
assert (import_roff(StringIO(content), 'parameter').tolist() == [[[2.0, 1.0], [4.0, 3.0]], [[6.0, 5.0], [8.0, 7.0]]]) |
def test_contract_types(tp_path):
(manifest, _) = ethpm.create_manifest(tp_path, ETHPM_CONFIG)
with tp_path.joinpath('build/contracts/EVMTester.json').open() as fp:
build = json.load(fp)
assert ('EVMTester' in manifest['contract_types'])
assert (manifest['contract_types']['EVMTester'] == {'contract_name': 'EVMTester', 'source_path': './EVMTester.sol', 'deployment_bytecode': {'bytecode': f"0x{build['bytecode']}"}, 'runtime_bytecode': {'bytecode': f"0x{build['deployedBytecode']}"}, 'abi': build['abi'], 'compiler': {'name': 'solc', 'version': build['compiler']['version'], 'settings': {'optimizer': build['compiler']['optimizer'], 'evmVersion': build['compiler']['evm_version']}}}) |
.parametrize('text1,text2', [("You're happy", 'You are happy'), ("I'm happy", 'I am happy'), ("he's happy", "he's happy")])
def test_en_tagger_lemma_issue717(NLP, text1, text2):
doc1 = NLP(text1)
doc2 = NLP(text2)
assert (doc1[1].lemma_ == doc2[1].lemma_)
assert (doc1[1].lemma == doc2[1].lemma) |
def extractAxxelTranslationsBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_get_mitigation_stats(stats_updater, backend_db):
assert (stats_updater.get_exploit_mitigations_stats() == {'exploit_mitigations': []})
mitigation_plugin_summaries = [[['RELRO disabled', 'NX disabled', 'CANARY disabled', 'PIE disabled', 'FORTIFY_SOURCE disabled'], ['RELRO disabled', 'NX enabled', 'CANARY enabled', 'PIE disabled', 'FORTIFY_SOURCE disabled']]]
_add_objects_with_summary('exploit_mitigations', mitigation_plugin_summaries, backend_db)
stats = stats_updater.get_exploit_mitigations_stats().get('exploit_mitigations')
expected = [('NX enabled', 1, 0.5), ('NX disabled', 1, 0.5), ('Canary enabled', 1, 0.5), ('Canary disabled', 1, 0.5), ('RELRO disabled', 2, 1.0), ('PIE disabled', 2, 1.0), ('FORTIFY_SOURCE disabled', 2, 1.0)]
assert (stats == expected) |
class TupleVariation(object):
def __init__(self, axes, coordinates):
self.axes = axes.copy()
self.coordinates = list(coordinates)
def __repr__(self):
axes = ','.join(sorted([('%s=%s' % (name, value)) for (name, value) in self.axes.items()]))
return ('<TupleVariation %s %s>' % (axes, self.coordinates))
def __eq__(self, other):
return ((self.coordinates == other.coordinates) and (self.axes == other.axes))
def getUsedPoints(self):
if (None not in self.coordinates):
return frozenset()
used = frozenset([i for (i, p) in enumerate(self.coordinates) if (p is not None)])
return (used if used else None)
def hasImpact(self):
return any(((c is not None) for c in self.coordinates))
def toXML(self, writer, axisTags):
writer.begintag('tuple')
writer.newline()
for axis in axisTags:
value = self.axes.get(axis)
if (value is not None):
(minValue, value, maxValue) = value
defaultMinValue = min(value, 0.0)
defaultMaxValue = max(value, 0.0)
if ((minValue == defaultMinValue) and (maxValue == defaultMaxValue)):
writer.simpletag('coord', axis=axis, value=fl2str(value, 14))
else:
attrs = [('axis', axis), ('min', fl2str(minValue, 14)), ('value', fl2str(value, 14)), ('max', fl2str(maxValue, 14))]
writer.simpletag('coord', attrs)
writer.newline()
wrote_any_deltas = False
for (i, delta) in enumerate(self.coordinates):
if ((type(delta) == tuple) and (len(delta) == 2)):
writer.simpletag('delta', pt=i, x=delta[0], y=delta[1])
writer.newline()
wrote_any_deltas = True
elif (type(delta) == int):
writer.simpletag('delta', cvt=i, value=delta)
writer.newline()
wrote_any_deltas = True
elif (delta is not None):
log.error('bad delta format')
writer.comment(('bad delta #%d' % i))
writer.newline()
wrote_any_deltas = True
if (not wrote_any_deltas):
writer.comment('no deltas')
writer.newline()
writer.endtag('tuple')
writer.newline()
def fromXML(self, name, attrs, _content):
if (name == 'coord'):
axis = attrs['axis']
value = str2fl(attrs['value'], 14)
defaultMinValue = min(value, 0.0)
defaultMaxValue = max(value, 0.0)
minValue = str2fl(attrs.get('min', defaultMinValue), 14)
maxValue = str2fl(attrs.get('max', defaultMaxValue), 14)
self.axes[axis] = (minValue, value, maxValue)
elif (name == 'delta'):
if ('pt' in attrs):
point = safeEval(attrs['pt'])
x = safeEval(attrs['x'])
y = safeEval(attrs['y'])
self.coordinates[point] = (x, y)
elif ('cvt' in attrs):
cvt = safeEval(attrs['cvt'])
value = safeEval(attrs['value'])
self.coordinates[cvt] = value
else:
log.warning(('bad delta format: %s' % ', '.join(sorted(attrs.keys()))))
def compile(self, axisTags, sharedCoordIndices={}, pointData=None):
assert (set(self.axes.keys()) <= set(axisTags)), ('Unknown axis tag found.', self.axes.keys(), axisTags)
tupleData = []
auxData = []
if (pointData is None):
usedPoints = self.getUsedPoints()
if (usedPoints is None):
return (b'', b'')
pointData = self.compilePoints(usedPoints)
coord = self.compileCoord(axisTags)
flags = sharedCoordIndices.get(coord)
if (flags is None):
flags = EMBEDDED_PEAK_TUPLE
tupleData.append(coord)
intermediateCoord = self.compileIntermediateCoord(axisTags)
if (intermediateCoord is not None):
flags |= INTERMEDIATE_REGION
tupleData.append(intermediateCoord)
if pointData:
flags |= PRIVATE_POINT_NUMBERS
auxData.append(pointData)
auxData.append(self.compileDeltas())
auxData = b''.join(auxData)
tupleData.insert(0, struct.pack('>HH', len(auxData), flags))
return (b''.join(tupleData), auxData)
def compileCoord(self, axisTags):
result = []
axes = self.axes
for axis in axisTags:
triple = axes.get(axis)
if (triple is None):
result.append(b'\x00\x00')
else:
result.append(struct.pack('>h', fl2fi(triple[1], 14)))
return b''.join(result)
def compileIntermediateCoord(self, axisTags):
needed = False
for axis in axisTags:
(minValue, value, maxValue) = self.axes.get(axis, (0.0, 0.0, 0.0))
defaultMinValue = min(value, 0.0)
defaultMaxValue = max(value, 0.0)
if ((minValue != defaultMinValue) or (maxValue != defaultMaxValue)):
needed = True
break
if (not needed):
return None
minCoords = []
maxCoords = []
for axis in axisTags:
(minValue, value, maxValue) = self.axes.get(axis, (0.0, 0.0, 0.0))
minCoords.append(struct.pack('>h', fl2fi(minValue, 14)))
maxCoords.append(struct.pack('>h', fl2fi(maxValue, 14)))
return b''.join((minCoords + maxCoords))
def decompileCoord_(axisTags, data, offset):
coord = {}
pos = offset
for axis in axisTags:
coord[axis] = fi2fl(struct.unpack('>h', data[pos:(pos + 2)])[0], 14)
pos += 2
return (coord, pos)
def compilePoints(points):
if (not points):
return b'\x00'
points = list(points)
points.sort()
numPoints = len(points)
result = bytearray()
if (numPoints < 128):
result.append(numPoints)
else:
result.append(((numPoints >> 8) | 128))
result.append((numPoints & 255))
MAX_RUN_LENGTH = 127
pos = 0
lastValue = 0
while (pos < numPoints):
runLength = 0
headerPos = len(result)
result.append(0)
useByteEncoding = None
while ((pos < numPoints) and (runLength <= MAX_RUN_LENGTH)):
curValue = points[pos]
delta = (curValue - lastValue)
if (useByteEncoding is None):
useByteEncoding = (0 <= delta <= 255)
if (useByteEncoding and ((delta > 255) or (delta < 0))):
break
if useByteEncoding:
result.append(delta)
else:
result.append((delta >> 8))
result.append((delta & 255))
lastValue = curValue
pos += 1
runLength += 1
if useByteEncoding:
result[headerPos] = (runLength - 1)
else:
result[headerPos] = ((runLength - 1) | POINTS_ARE_WORDS)
return result
def decompilePoints_(numPoints, data, offset, tableTag):
assert (tableTag in ('cvar', 'gvar'))
pos = offset
numPointsInData = data[pos]
pos += 1
if ((numPointsInData & POINTS_ARE_WORDS) != 0):
numPointsInData = (((numPointsInData & POINT_RUN_COUNT_MASK) << 8) | data[pos])
pos += 1
if (numPointsInData == 0):
return (range(numPoints), pos)
result = []
while (len(result) < numPointsInData):
runHeader = data[pos]
pos += 1
numPointsInRun = ((runHeader & POINT_RUN_COUNT_MASK) + 1)
point = 0
if ((runHeader & POINTS_ARE_WORDS) != 0):
points = array.array('H')
pointsSize = (numPointsInRun * 2)
else:
points = array.array('B')
pointsSize = numPointsInRun
points.frombytes(data[pos:(pos + pointsSize)])
if (sys.byteorder != 'big'):
points.byteswap()
assert (len(points) == numPointsInRun)
pos += pointsSize
result.extend(points)
absolute = []
current = 0
for delta in result:
current += delta
absolute.append(current)
result = absolute
del absolute
badPoints = {str(p) for p in result if ((p < 0) or (p >= numPoints))}
if badPoints:
log.warning(("point %s out of range in '%s' table" % (','.join(sorted(badPoints)), tableTag)))
return (result, pos)
def compileDeltas(self):
deltaX = []
deltaY = []
if (self.getCoordWidth() == 2):
for c in self.coordinates:
if (c is None):
continue
deltaX.append(c[0])
deltaY.append(c[1])
else:
for c in self.coordinates:
if (c is None):
continue
deltaX.append(c)
bytearr = bytearray()
self.compileDeltaValues_(deltaX, bytearr)
self.compileDeltaValues_(deltaY, bytearr)
return bytearr
def compileDeltaValues_(deltas, bytearr=None):
if (bytearr is None):
bytearr = bytearray()
pos = 0
numDeltas = len(deltas)
while (pos < numDeltas):
value = deltas[pos]
if (value == 0):
pos = TupleVariation.encodeDeltaRunAsZeroes_(deltas, pos, bytearr)
elif ((- 128) <= value <= 127):
pos = TupleVariation.encodeDeltaRunAsBytes_(deltas, pos, bytearr)
else:
pos = TupleVariation.encodeDeltaRunAsWords_(deltas, pos, bytearr)
return bytearr
def encodeDeltaRunAsZeroes_(deltas, offset, bytearr):
pos = offset
numDeltas = len(deltas)
while ((pos < numDeltas) and (deltas[pos] == 0)):
pos += 1
runLength = (pos - offset)
while (runLength >= 64):
bytearr.append((DELTAS_ARE_ZERO | 63))
runLength -= 64
if runLength:
bytearr.append((DELTAS_ARE_ZERO | (runLength - 1)))
return pos
def encodeDeltaRunAsBytes_(deltas, offset, bytearr):
pos = offset
numDeltas = len(deltas)
while (pos < numDeltas):
value = deltas[pos]
if (not ((- 128) <= value <= 127)):
break
if ((value == 0) and ((pos + 1) < numDeltas) and (deltas[(pos + 1)] == 0)):
break
pos += 1
runLength = (pos - offset)
while (runLength >= 64):
bytearr.append(63)
bytearr.extend(array.array('b', deltas[offset:(offset + 64)]))
offset += 64
runLength -= 64
if runLength:
bytearr.append((runLength - 1))
bytearr.extend(array.array('b', deltas[offset:pos]))
return pos
def encodeDeltaRunAsWords_(deltas, offset, bytearr):
pos = offset
numDeltas = len(deltas)
while (pos < numDeltas):
value = deltas[pos]
if (value == 0):
break
if (((- 128) <= value <= 127) and ((pos + 1) < numDeltas) and ((- 128) <= deltas[(pos + 1)] <= 127)):
break
pos += 1
runLength = (pos - offset)
while (runLength >= 64):
bytearr.append((DELTAS_ARE_WORDS | 63))
a = array.array('h', deltas[offset:(offset + 64)])
if (sys.byteorder != 'big'):
a.byteswap()
bytearr.extend(a)
offset += 64
runLength -= 64
if runLength:
bytearr.append((DELTAS_ARE_WORDS | (runLength - 1)))
a = array.array('h', deltas[offset:pos])
if (sys.byteorder != 'big'):
a.byteswap()
bytearr.extend(a)
return pos
def decompileDeltas_(numDeltas, data, offset):
result = []
pos = offset
while (len(result) < numDeltas):
runHeader = data[pos]
pos += 1
numDeltasInRun = ((runHeader & DELTA_RUN_COUNT_MASK) + 1)
if ((runHeader & DELTAS_ARE_ZERO) != 0):
result.extend(([0] * numDeltasInRun))
else:
if ((runHeader & DELTAS_ARE_WORDS) != 0):
deltas = array.array('h')
deltasSize = (numDeltasInRun * 2)
else:
deltas = array.array('b')
deltasSize = numDeltasInRun
deltas.frombytes(data[pos:(pos + deltasSize)])
if (sys.byteorder != 'big'):
deltas.byteswap()
assert (len(deltas) == numDeltasInRun)
pos += deltasSize
result.extend(deltas)
assert (len(result) == numDeltas)
return (result, pos)
def getTupleSize_(flags, axisCount):
size = 4
if ((flags & EMBEDDED_PEAK_TUPLE) != 0):
size += (axisCount * 2)
if ((flags & INTERMEDIATE_REGION) != 0):
size += (axisCount * 4)
return size
def getCoordWidth(self):
firstDelta = next((c for c in self.coordinates if (c is not None)), None)
if (firstDelta is None):
return 0
if (type(firstDelta) in (int, float)):
return 1
if ((type(firstDelta) is tuple) and (len(firstDelta) == 2)):
return 2
raise TypeError(('invalid type of delta; expected (int or float) number, or Tuple[number, number]: %r' % firstDelta))
def scaleDeltas(self, scalar):
if (scalar == 1.0):
return
coordWidth = self.getCoordWidth()
self.coordinates = [(None if (d is None) else ((d * scalar) if (coordWidth == 1) else ((d[0] * scalar), (d[1] * scalar)))) for d in self.coordinates]
def roundDeltas(self):
coordWidth = self.getCoordWidth()
self.coordinates = [(None if (d is None) else (otRound(d) if (coordWidth == 1) else (otRound(d[0]), otRound(d[1])))) for d in self.coordinates]
def calcInferredDeltas(self, origCoords, endPts):
from fontTools.varLib.iup import iup_delta
if (self.getCoordWidth() == 1):
raise TypeError("Only 'gvar' TupleVariation can have inferred deltas")
if (None in self.coordinates):
if (len(self.coordinates) != len(origCoords)):
raise ValueError(('Expected len(origCoords) == %d; found %d' % (len(self.coordinates), len(origCoords))))
self.coordinates = iup_delta(self.coordinates, origCoords, endPts)
def optimize(self, origCoords, endPts, tolerance=0.5, isComposite=False):
from fontTools.varLib.iup import iup_delta_optimize
if (None in self.coordinates):
return
deltaOpt = iup_delta_optimize(self.coordinates, origCoords, endPts, tolerance=tolerance)
if (None in deltaOpt):
if (isComposite and all(((d is None) for d in deltaOpt))):
deltaOpt = ([(0, 0)] + ([None] * (len(deltaOpt) - 1)))
varOpt = TupleVariation(self.axes, deltaOpt)
axisTags = sorted(self.axes.keys())
(tupleData, auxData) = self.compile(axisTags)
unoptimizedLength = (len(tupleData) + len(auxData))
(tupleData, auxData) = varOpt.compile(axisTags)
optimizedLength = (len(tupleData) + len(auxData))
if (optimizedLength < unoptimizedLength):
self.coordinates = varOpt.coordinates
def __imul__(self, scalar):
self.scaleDeltas(scalar)
return self
def __iadd__(self, other):
if (not isinstance(other, TupleVariation)):
return NotImplemented
deltas1 = self.coordinates
length = len(deltas1)
deltas2 = other.coordinates
if (len(deltas2) != length):
raise ValueError('cannot sum TupleVariation deltas with different lengths')
if (self.getCoordWidth() == 2):
for (i, d2) in zip(range(length), deltas2):
d1 = deltas1[i]
try:
deltas1[i] = ((d1[0] + d2[0]), (d1[1] + d2[1]))
except TypeError:
raise ValueError('cannot sum gvar deltas with inferred points')
else:
for (i, d2) in zip(range(length), deltas2):
d1 = deltas1[i]
if ((d1 is not None) and (d2 is not None)):
deltas1[i] = (d1 + d2)
elif ((d1 is None) and (d2 is not None)):
deltas1[i] = d2
return self |
def proc_run_args(args=()):
args = list(args)
if (('--verbose' not in args) and ('--quiet' not in args)):
args.append('--quiet')
for run_arg in coconut_base_run_args:
run_arg_name = run_arg.split('=', 1)[0]
if (not any((arg.startswith(run_arg_name) for arg in args))):
args.append(run_arg)
return args |
def main(n_hidden: int=256, dropout: float=0.2, n_iter: int=10, batch_size: int=128):
model: Model = chain(Relu(nO=n_hidden, dropout=dropout), Relu(nO=n_hidden, dropout=dropout), Softmax())
((train_X, train_Y), (dev_X, dev_Y)) = ml_datasets.mnist()
model.initialize(X=train_X[:5], Y=train_Y[:5])
train_data = model.ops.multibatch(batch_size, train_X, train_Y, shuffle=True)
dev_data = model.ops.multibatch(batch_size, dev_X, dev_Y)
optimizer = Adam(0.001)
for i in range(n_iter):
for (X, Y) in tqdm(train_data, leave=False):
(Yh, backprop) = model.begin_update(X)
backprop((Yh - Y))
model.finish_update(optimizer)
correct = 0
total = 0
for (X, Y) in dev_data:
Yh = model.predict(X)
correct += (Yh.argmax(axis=1) == Y.argmax(axis=1)).sum()
total += Yh.shape[0]
score = (correct / total)
msg.row((i, f'{score:.3f}'), widths=(3, 5)) |
class OptionSeriesWordcloudSonificationDefaultinstrumentoptionsMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class SchemaT(Generic[(KT, VT)]):
key_type: Optional[_ModelArg] = None
value_type: Optional[_ModelArg] = None
key_serializer: Optional[CodecArg] = None
value_serializer: Optional[CodecArg] = None
allow_empty: bool = False
def __init__(self, *, key_type: _ModelArg=None, value_type: _ModelArg=None, key_serializer: CodecArg=None, value_serializer: CodecArg=None, allow_empty: Optional[bool]=None) -> None:
...
def update(self, *, key_type: _ModelArg=None, value_type: _ModelArg=None, key_serializer: CodecArg=None, value_serializer: CodecArg=None, allow_empty: Optional[bool]=None) -> None:
...
def loads_key(self, app: _AppT, message: _Message, *, loads: Optional[Callable]=None, serializer: CodecArg=None) -> KT:
...
def loads_value(self, app: _AppT, message: _Message, *, loads: Optional[Callable]=None, serializer: CodecArg=None) -> VT:
...
def dumps_key(self, app: _AppT, key: K, *, serializer: CodecArg=None, headers: OpenHeadersArg) -> Tuple[(Any, OpenHeadersArg)]:
...
def dumps_value(self, app: _AppT, value: V, *, serializer: CodecArg=None, headers: OpenHeadersArg) -> Tuple[(Any, OpenHeadersArg)]:
...
def on_dumps_key_prepare_headers(self, key: V, headers: OpenHeadersArg) -> OpenHeadersArg:
...
def on_dumps_value_prepare_headers(self, value: V, headers: OpenHeadersArg) -> OpenHeadersArg:
... |
class HalfRateGENSDRPHY(Module):
def __init__(self, pads, sys_clk_freq=.0, cl=None):
pads = PHYPadsCombiner(pads)
addressbits = len(pads.a)
bankbits = len(pads.ba)
nranks = (1 if (not hasattr(pads, 'cs_n')) else len(pads.cs_n))
databits = len(pads.dq)
nphases = 2
cl = (get_default_cl(memtype='SDR', tck=(1 / sys_clk_freq)) if (cl is None) else cl)
full_rate_phy = GENSDRPHY(pads, (2 * sys_clk_freq), cl)
self.submodules += ClockDomainsRenamer('sys2x')(full_rate_phy)
phase_sel = Signal()
phase_sys = Signal()
phase_sys2x = Signal()
self.sync += phase_sys.eq(phase_sys2x)
self.sync.sys2x += phase_sys2x.eq((~ phase_sel))
self.sync.sys2x += phase_sel.eq(((~ phase_sel) & (phase_sys2x ^ phase_sys)))
self.settings = PhySettings(phytype='HalfRateGENSDRPHY', memtype='SDR', databits=databits, dfi_databits=databits, nranks=nranks, nphases=nphases, rdphase=0, wrphase=0, cl=cl, read_latency=((full_rate_phy.settings.read_latency // 2) + 1), write_latency=0)
self.dfi = dfi = Interface(addressbits, bankbits, nranks, databits, nphases)
self.comb += Case(phase_sel, {0: dfi.phases[0].connect(full_rate_phy.dfi.phases[0], omit={'rddata', 'rddata_valid', 'wrdata_en'}), 1: dfi.phases[1].connect(full_rate_phy.dfi.phases[0], omit={'rddata', 'rddata_valid', 'wrdata_en'})})
wr_data_en = (dfi.phases[self.settings.wrphase].wrdata_en & (phase_sel == 0))
wr_data_en_d = Signal()
self.sync.sys2x += wr_data_en_d.eq(wr_data_en)
self.comb += full_rate_phy.dfi.phases[0].wrdata_en.eq((wr_data_en | wr_data_en_d))
rddata_d = Signal(databits)
self.sync.sys2x += rddata_d.eq(full_rate_phy.dfi.phases[0].rddata)
self.comb += [dfi.phases[0].rddata.eq(rddata_d), dfi.phases[0].rddata_valid.eq(full_rate_phy.dfi.phases[0].rddata_valid), dfi.phases[1].rddata.eq(full_rate_phy.dfi.phases[0].rddata), dfi.phases[1].rddata_valid.eq(full_rate_phy.dfi.phases[0].rddata_valid)] |
(allow_guest=True)
def submit_feedback(name, feedback, rating, email=None, feedback_index=None):
email = validate_email_address(email)
if (feedback_name := frappe.db.get_value('Wiki Feedback', {'wiki_page': name})):
doc = frappe.get_doc('Wiki Feedback', feedback_name)
if feedback_index:
feedback_index = int(feedback_index)
doc.response[(feedback_index - 1)].rating = rating
doc.response[(feedback_index - 1)].feedback = feedback
doc.response[(feedback_index - 1)].email_id = email
else:
doc.append('response', {'rating': rating, 'feedback': feedback, 'email_id': email})
doc.save()
return (feedback_index if feedback_index else len(doc.response))
else:
doc = frappe.get_doc({'doctype': 'Wiki Feedback', 'wiki_page': name})
doc.append('response', {'rating': rating, 'feedback': feedback, 'email_id': email})
doc.insert()
return 1 |
def add_client(*client_type, add_to_ctx=True, add_func_arg=True):
from elasticsearch import Elasticsearch
from elasticsearch.exceptions import AuthenticationException
from kibana import Kibana
def _wrapper(func):
client_ops_dict = {}
client_ops_keys = {}
for c_type in client_type:
ops = client_options.get(c_type)
client_ops_dict.update(ops)
client_ops_keys[c_type] = list(ops)
if (not client_ops_dict):
raise ValueError(f'Unknown client: {client_type} in {func.__name__}')
client_ops = list(client_ops_dict.values())
(func)
_params(*client_ops)
def _wrapped(*args, **kwargs):
ctx: click.Context = next((a for a in args if isinstance(a, click.Context)), None)
es_client_args = {k: kwargs.pop(k, None) for k in client_ops_keys.get('elasticsearch', [])}
kibana_client_args = {k: kwargs.pop(k, es_client_args.get(k)) for k in client_ops_keys.get('kibana', [])}
if ('elasticsearch' in client_type):
elasticsearch_client: Elasticsearch = kwargs.get('elasticsearch_client')
try:
if (elasticsearch_client and isinstance(elasticsearch_client, Elasticsearch) and elasticsearch_client.info()):
pass
else:
elasticsearch_client = get_elasticsearch_client(**es_client_args)
except AuthenticationException:
elasticsearch_client = get_elasticsearch_client(**es_client_args)
if add_func_arg:
kwargs['elasticsearch_client'] = elasticsearch_client
if (ctx and add_to_ctx):
ctx.obj['es'] = elasticsearch_client
if ('kibana' in client_type):
kibana_client: Kibana = kwargs.get('kibana_client')
try:
with kibana_client:
if (kibana_client and isinstance(kibana_client, Kibana) and kibana_client.version):
pass
else:
kibana_client = get_kibana_client(**kibana_client_args)
except (requests.HTTPError, AttributeError):
kibana_client = get_kibana_client(**kibana_client_args)
if add_func_arg:
kwargs['kibana_client'] = kibana_client
if (ctx and add_to_ctx):
ctx.obj['kibana'] = kibana_client
return func(*args, **kwargs)
return _wrapped
return _wrapper |
def extend_imports(extension: dict):
global CSS_IMPORTS, JS_IMPORTS
for (alias, mod) in extension.items():
(css, js) = ({'website': mod.get('website', ''), 'modules': []}, {'website': mod.get('website', ''), 'modules': []})
if ('register' in mod):
js['register'] = mod['register']
if ('req' in mod):
(css['req'], js['req']) = ([], [])
for req in mod['req']:
if (req['alias'] in CSS_IMPORTS):
css['req'].append(req)
if (req['alias'] in JS_IMPORTS):
js['req'].append(req)
if ('modules' in mod):
for module in mod['modules']:
if ('cdnjs' not in module):
module['cdnjs'] = CDNJS_REPO
module['version'] = mod['version']
if module['script'].endswith('.js'):
js['modules'].append(module)
elif module['script'].endswith('.css'):
css['modules'].append(module)
if css['modules']:
CSS_IMPORTS[alias] = css
if js['modules']:
JS_IMPORTS[alias] = js
if ('services' in mod):
for service in mod['services']:
service['pmts'] = ';'.join([('%s=%s' % (k, v)) for (k, v) in service['values'].items()])
if (service['type'] not in _SERVICES):
_SERVICES[alias] = {}
_SERVICES[alias].setdefault(service['type'], []).append(('%(url)s?%(pmts)s' % service)) |
def takes_stream(name, mode):
def decorator(func):
(func)
def wrapper(*args, **kwargs):
kwargs = inspect.getcallargs(func, *args, **kwargs)
if ((name in kwargs) and isinstance(kwargs[name], (str, Path))):
with open(kwargs[name], mode) as f:
kwargs[name] = f
return func(**kwargs)
else:
return func(**kwargs)
return wrapper
return decorator |
def get_playing_data(play_data_map):
try:
playing_file = xbmc.Player().getPlayingFile()
except Exception as e:
log.error('get_playing_data : getPlayingFile() : {0}', e)
return None
log.debug('get_playing_data : getPlayingFile() : {0}', playing_file)
if (playing_file not in play_data_map):
infolabel_path_and_file = xbmc.getInfoLabel('Player.Filenameandpath')
log.debug('get_playing_data : Filenameandpath : {0}', infolabel_path_and_file)
if (infolabel_path_and_file not in play_data_map):
log.debug('get_playing_data : play data not found')
return None
else:
playing_file = infolabel_path_and_file
return play_data_map.get(playing_file) |
class dbus_service_method_deprecated():
deprecated = {}
def __init__(self, interface=None):
self.interface = interface
if self.interface:
if (self.interface not in self.deprecated):
self.deprecated[self.interface] = set()
def __call__(self, func):
if self.interface:
self.deprecated[self.interface].add(func.__name__)
(func)
def _impl(*args, **kwargs):
return func(*args, **kwargs)
return _impl |
class RFileOp(FileOpBase):
def execute(self) -> None:
r_script = os.path.basename(self.filepath)
r_script_name = r_script.replace('.r', '')
r_script_output = (r_script_name + '.log')
try:
OpUtil.log_operation_info(f"executing R script using 'Rscript {r_script}' to '{r_script_output}'")
t0 = time.time()
with open(r_script_output, 'w') as log_file:
subprocess.run(['Rscript', r_script], stdout=log_file, stderr=subprocess.STDOUT, check=True)
duration = (time.time() - t0)
OpUtil.log_operation_info('R script execution completed', duration)
self.put_file_to_object_storage(r_script_output, r_script_output)
self.process_outputs()
except Exception as ex:
logger.error(f'Unexpected error: {sys.exc_info()[0]}')
logger.error(f'Error details: {ex}')
self.put_file_to_object_storage(r_script_output, r_script_output)
raise ex |
def test_qt_log_wrapper(qtlog, caplog):
logger = logging.getLogger(__name__).root
logger.setLevel('DEBUG')
QtCore.qDebug('should_show_in_qtlog_only')
QtCore.qInstallMessageHandler(utils.qt_log_wrapper)
QtCore.qDebug('should_show_in_logger_only')
qt_log_entries = [m.message.strip() for m in qtlog.records]
assert (len(qt_log_entries) == 1)
assert (qt_log_entries[0] == 'should_show_in_qtlog_only')
assert ('should_show_in_qtlog_only' not in caplog.text)
assert ('should_show_in_logger_only' in caplog.text)
assert ('[QT]' in caplog.text)
assert ('debug' in caplog.text) |
def create_property_layout(prop, name, color, pos, active, column):
class Layout(_LayoutNodeProperty):
def __init__(self, prop=prop, name=name, pos=pos, column=column, color=color, *args, **kwargs):
super().__init__(*args, prop=prop, name=name, pos=pos, column=column, color=color, **kwargs)
self.active = active
def __name__(self):
return layout_name
layout_name = ('Layout' + cased_name(name))
Layout.__name__ = layout_name
globals()[layout_name] = Layout
return Layout |
()
('--transcription-path', default=preprocess_text_config.transcription_path, type=click.Path(exists=True, file_okay=True, dir_okay=False))
('--cleaned-path', default=preprocess_text_config.cleaned_path)
('--train-path', default=preprocess_text_config.train_path)
('--val-path', default=preprocess_text_config.val_path)
('--config-path', default=preprocess_text_config.config_path, type=click.Path(exists=True, file_okay=True, dir_okay=False))
('--val-per-lang', default=preprocess_text_config.val_per_lang)
('--max-val-total', default=preprocess_text_config.max_val_total)
('--clean/--no-clean', default=preprocess_text_config.clean)
('-y', '--yml_config')
def preprocess(transcription_path: str, cleaned_path: Optional[str], train_path: str, val_path: str, config_path: str, val_per_lang: int, max_val_total: int, clean: bool, yml_config: str):
if ((cleaned_path == '') or (cleaned_path is None)):
cleaned_path = (transcription_path + '.cleaned')
if clean:
with open(cleaned_path, 'w', encoding='utf-8') as out_file:
with open(transcription_path, 'r', encoding='utf-8') as trans_file:
lines = trans_file.readlines()
if (len(lines) != 0):
for line in tqdm(lines):
try:
(utt, spk, language, text) = line.strip().split('|')
(norm_text, phones, tones, word2ph) = clean_text(text, language)
out_file.write('{}|{}|{}|{}|{}|{}|{}\n'.format(utt, spk, language, norm_text, ' '.join(phones), ' '.join([str(i) for i in tones]), ' '.join([str(i) for i in word2ph])))
except Exception as e:
print(line)
print(f'''!, :
{e}''')
transcription_path = cleaned_path
spk_utt_map = defaultdict(list)
spk_id_map = {}
current_sid = 0
with open(transcription_path, 'r', encoding='utf-8') as f:
audioPaths = set()
countSame = 0
countNotFound = 0
for line in f.readlines():
(utt, spk, language, text, phones, tones, word2ph) = line.strip().split('|')
if (utt in audioPaths):
print(f':{line}')
countSame += 1
continue
if (not os.path.isfile(utt)):
print(f':{utt}')
countNotFound += 1
continue
audioPaths.add(utt)
spk_utt_map[language].append(line)
if (spk not in spk_id_map.keys()):
spk_id_map[spk] = current_sid
current_sid += 1
print(f':{countSame},:{countNotFound}')
train_list = []
val_list = []
for (spk, utts) in spk_utt_map.items():
shuffle(utts)
val_list += utts[:val_per_lang]
train_list += utts[val_per_lang:]
shuffle(val_list)
if (len(val_list) > max_val_total):
train_list += val_list[max_val_total:]
val_list = val_list[:max_val_total]
with open(train_path, 'w', encoding='utf-8') as f:
for line in train_list:
f.write(line)
with open(val_path, 'w', encoding='utf-8') as f:
for line in val_list:
f.write(line)
json_config = json.load(open(config_path, encoding='utf-8'))
json_config['data']['spk2id'] = spk_id_map
json_config['data']['n_speakers'] = len(spk_id_map)
json_config['version'] = latest_version
json_config['data']['training_files'] = os.path.normpath(train_path).replace('\\', '/')
json_config['data']['validation_files'] = os.path.normpath(val_path).replace('\\', '/')
with open(config_path, 'w', encoding='utf-8') as f:
json.dump(json_config, f, indent=2, ensure_ascii=False)
print('!') |
def test_measurement_empty(tmpdir, merge_files_oneLR):
path = os.path.join(str(tmpdir), 'measurement-empty.dlis')
content = ['data/chap4-7/eflr/envelope.dlis.part', 'data/chap4-7/eflr/ndattrs/set/calibration-measurement.dlis.part', 'data/chap4-7/eflr/ndattrs/template/measurement.dlis.part', 'data/chap4-7/eflr/ndattrs/template/reference.dlis.part', 'data/chap4-7/eflr/ndattrs/template/dimension.dlis.part', 'data/chap4-7/eflr/ndattrs/object.dlis.part', 'data/chap4-7/eflr/ndattrs/objattr/empty-INT.dlis.part', 'data/chap4-7/eflr/ndattrs/objattr/empty-INT.dlis.part', 'data/chap4-7/eflr/ndattrs/objattr/empty-INT.dlis.part']
merge_files_oneLR(path, content)
with dlis.load(path) as (f, *_):
m = f.object('CALIBRATION-MEASUREMENT', 'OBJECT', 10, 0)
assert (m.samples.size == 0)
assert (m.reference.size == 0)
assert (m.std_deviation.size == 0)
assert (m.dimension == [])
assert (m.axis == []) |
class Harness(Unicorefuzz):
def __init__(self, config) -> None:
super().__init__(config)
self.fetched_regs = None
self.is_afl_child = False
def harness(self, input_file: str, wait: bool, debug: bool, trace: bool) -> None:
exit_func = (os._exit if (not os.getenv('UCF_DEBUG_CLEAN_SHUTDOWN')) else exit)
init_sleep = os.getenv('UCF_DEBUG_SLEEP_BEFORE_INIT')
if init_sleep:
print('[d] Sleeping. Unicorn init will start in {} seconds.'.format(init_sleep))
time.sleep(float(init_sleep))
if (debug or trace):
global CHILD_SHOULD_PRINT
CHILD_SHOULD_PRINT = True
(uc, entry, exits) = self.uc_init(input_file, wait, trace, verbose=(debug or trace))
if debug:
self.uc_debug(uc, input_file, exits)
print('[*] Debugger finished :)')
elif self.uc_fuzz(uc, input_file, exits):
print('[*] Done fuzzing. Cya.')
else:
print('[*] Finished one run (without AFL).')
def uc_init(self, input_file, wait: bool=False, trace: bool=False, verbose: bool=False) -> Tuple[(Uc, int, List[int])]:
config = self.config
uc = Uc(self.arch.unicorn_arch, self.arch.unicorn_mode)
if trace:
print('[+] Settings trace hooks')
uc.hook_add(UC_HOOK_BLOCK, unicorn_debug_block)
uc.hook_add(UC_HOOK_CODE, unicorn_debug_instruction, self)
uc.hook_add(((UC_HOOK_MEM_WRITE | UC_HOOK_MEM_READ) | UC_HOOK_MEM_FETCH), unicorn_debug_mem_access)
if wait:
self.wait_for_probe_wrapper()
if verbose:
print('[*] Reading from file {}'.format(input_file))
self.uc_load_registers(uc)
config.init_func(self, uc)
pc = self.uc_read_pc(uc)
self.map_known_mem(uc)
exits = self.calculate_exits(pc)
if (not exits):
raise ValueError('No exits founds. Would run forever... Please set an exit address in config.py.')
uc.hook_add(UC_HOOK_MEM_UNMAPPED, unicorn_debug_mem_invalid_access, self)
if os.getenv('UCF_DEBUG_MEMORY'):
from pympler import muppy, summary
all_objects = muppy.get_objects()
sum1 = summary.summarize(all_objects)
summary.print_(sum1)
fork_sleep = os.getenv('UCF_DEBUG_SLEEP_BEFORE_FORK')
if fork_sleep:
print('[d] Sleeping. Forkserver will start in {} seconds.'.format(fork_sleep))
time.sleep(float(fork_sleep))
return (uc, pc, exits)
def uc_debug(self, uc: Uc, input_file: str, exits: List[int]) -> None:
print('[*] Loading debugger...')
from udbg import UnicornDbg
udbg = UnicornDbg()
if (uc.afl_forkserver_start(exits) != UC_AFL_RET_NO_AFL):
raise Exception('Debugger cannot run in AFL! Did you mean -t instead of -d?')
with open(input_file, 'rb') as f:
input = f.read()
try:
self.config.place_input(self, uc, input)
except Exception as ex:
raise Exception('[!] Error setting testcase for input {}: {}'.format(input, ex))
entry_point = self.uc_read_pc(uc)
exit_point = exits[0]
mappings = [(hex(start), start, ((end - start) + 1)) for (start, end, perms) in uc.mem_regions()]
udbg.initialize(emu_instance=uc, entry_point=entry_point, exit_point=exit_point, hide_binary_loader=True, mappings=mappings)
def dbg_except(x, y):
raise Exception(y)
os.kill = dbg_except
udbg.start()
print('[*] Done.')
def uc_fuzz(self, uc: Uc, input_file: str, exits: List[int]) -> bool:
def input_callback(uc: Uc, input: bytes, persistent_round: int, data: Harness):
self.config.place_input(data, uc, input)
try:
return uc.afl_fuzz(input_file=input_file, place_input_callback=input_callback, exits=exits, validate_crash_callback=None, persistent_iters=1, data=self)
except UcError as e:
print('[!] Execution failed with error: {} at address {:x}'.format(e, self.uc_read_pc(uc)))
def map_known_mem(self, uc: Uc):
for filename in os.listdir(self.statedir):
if ((not filename.endswith(REJECTED_ENDING)) and (filename not in self.fetched_regs)):
try:
address = int(filename, 16)
self.map_page(uc, address)
except Exception:
pass
def _raise_if_reject(self, base_address: int, dump_file_name: str) -> None:
if os.path.isfile((dump_file_name + REJECTED_ENDING)):
with open((dump_file_name + REJECTED_ENDING), 'r') as f:
err = ''.join(f.readlines()).strip()
raise Exception('Page at 0x{:016x} was rejected by target: {}'.format(base_address, err))
def fetch_page_blocking(self, address: int) -> Tuple[(int, bytes)]:
base_address = self.get_base(address)
input_file_name = os.path.join(self.requestdir, '{0:016x}'.format(address))
dump_file_name = os.path.join(self.statedir, '{0:016x}'.format(base_address))
if (base_address in self._mapped_page_cache.keys()):
return (base_address, self._mapped_page_cache[base_address])
else:
self._raise_if_reject(base_address, dump_file_name)
if (not os.path.isfile(dump_file_name)):
open(input_file_name, 'a').close()
if self.should_log:
print('Requesting page 0x{:016x} from `ucf attach`'.format(base_address))
while 1:
self._raise_if_reject(base_address, dump_file_name)
try:
with open(dump_file_name, 'rb') as f:
content = f.read()
if (len(content) < self.config.PAGE_SIZE):
time.sleep(0.001)
continue
self._mapped_page_cache[base_address] = content
return (base_address, content)
except IOError:
pass
def _fetch_register(self, name: str) -> int:
with open(os.path.join(self.statedir, name), 'r') as f:
return int(f.read())
def uc_load_registers(self, uc: Uc) -> None:
regs = self.fetch_all_regs()
for (key, value) in regs.items():
if (key in self.arch.ignored_regs):
continue
try:
uc.reg_write(uc_reg_const(self.arch, key), value)
except Exception as ex:
print('[d] Faild to load reg: {} ({})'.format(key, ex))
pass
def uc_reg_const(self, reg_name: str) -> int:
return uc_reg_const(self.arch, reg_name)
def uc_reg_read(self, uc: Uc, reg_name: str) -> int:
reg_name = reg_name.lower()
return uc.reg_read(self.uc_reg_const(reg_name))
def uc_reg_write(self, uc: Uc, reg_name: str, val: int) -> int:
reg_name = reg_name.lower()
return uc.reg_write(self.uc_reg_const(reg_name), val)
def uc_read_page(self, uc: Uc, addr: int) -> Tuple[(int, bytes)]:
base_addr = self.get_base(addr)
return (base_addr, uc.mem_read(base_addr, self.config.PAGE_SIZE))
def fetch_all_regs(self, refetch: bool=False) -> Dict[(str, int)]:
if (refetch or (self.fetched_regs is None)):
self.fetched_regs = {}
for reg_name in self.arch.reg_names:
try:
self.fetched_regs[reg_name] = self._fetch_register(reg_name)
except Exception as ex:
pass
return self.fetched_regs
def uc_read_pc(self, uc) -> int:
return uc.reg_read(uc_reg_const(self.arch, self.arch.pc_name))
def uc_write_pc(self, uc, val) -> int:
return uc.reg_write(uc_reg_const(self.arch, self.arch.pc_name), val) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.