code stringlengths 281 23.7M |
|---|
class SqlSelection(SqlFilter):
def create_view_statement(self, db, old_view, new_view):
conditions = []
for (k, v) in self.kwargs.items():
if ((v is None) or (v is cml.ALL)):
continue
name = entryname_to_dbname(k)
dbkey = db.dbkeys[name]
if (not isinstance(v, (list, tuple))):
v = [v]
v = [dbkey.cast(x) for x in v]
v = [(("'" + str(x)) + "'") for x in v]
conditions.append(f"{name} IN ({', '.join(v)})")
if (not conditions):
return None
assert (new_view != old_view)
return (((f'CREATE TEMP VIEW IF NOT EXISTS {new_view} AS SELECT * ' + f'FROM {old_view} WHERE ') + ' AND '.join(conditions)) + ';') |
def test_custom_sellmeier():
b1 = td.SpatialDataArray(np.random.random((Nx, Ny, Nz)), coords=dict(x=X, y=Y, z=Z))
c1 = td.SpatialDataArray(np.random.random((Nx, Ny, Nz)), coords=dict(x=X, y=Y, z=Z))
b2 = td.SpatialDataArray(np.random.random((Nx, Ny, Nz)), coords=dict(x=X, y=Y, z=Z))
c2 = td.SpatialDataArray(np.random.random((Nx, Ny, Nz)), coords=dict(x=X, y=Y, z=Z))
with pytest.raises(pydantic.ValidationError):
btmp = td.SpatialDataArray((np.random.random((Nx, Ny, Nz)) - 0.5j), coords=dict(x=X, y=Y, z=Z))
mat = CustomSellmeier(coeffs=((b1, c1), (btmp, c2)))
with pytest.raises(pydantic.ValidationError):
ctmp = td.SpatialDataArray((np.random.random((Nx, Ny, Nz)) - 0.5j), coords=dict(x=X, y=Y, z=Z))
mat = CustomSellmeier(coeffs=((b1, c1), (b2, ctmp)))
with pytest.raises(pydantic.ValidationError):
ctmp = td.SpatialDataArray((np.random.random((Nx, Ny, Nz)) - 0.5), coords=dict(x=X, y=Y, z=Z))
mat = CustomSellmeier(coeffs=((b1, c1), (b2, ctmp)))
btmp = td.SpatialDataArray((np.random.random((Nx, Ny, Nz)) - 0.5), coords=dict(x=X, y=Y, z=Z))
with pytest.raises(pydantic.ValidationError):
mat = CustomSellmeier(coeffs=((b1, c1), (btmp, c2)))
mat = CustomSellmeier(coeffs=((b1, c1), (btmp, c2)), allow_gain=True)
assert mat.pole_residue.allow_gain
with pytest.raises(pydantic.ValidationError):
btmp = td.SpatialDataArray(np.random.random((Nx, Ny, Nz)), coords=dict(x=(X + 1), y=Y, z=Z))
mat = CustomSellmeier(coeffs=((b1, c2), (btmp, c2)))
mat = CustomSellmeier(coeffs=((b1, c1), (b2, c2)))
verify_custom_dispersive_medium_methods(mat)
assert (mat.n_cfl == 1)
n = td.SpatialDataArray((2 + np.random.random((Nx, Ny, Nz))), coords=dict(x=X, y=Y, z=Z))
dn_dwvl = td.SpatialDataArray((- np.random.random((Nx, Ny, Nz))), coords=dict(x=X, y=Y, z=Z))
mat = CustomSellmeier.from_dispersion(n=n, dn_dwvl=dn_dwvl, freq=2, interp_method='linear')
verify_custom_dispersive_medium_methods(mat)
assert (mat.n_cfl == 1) |
def test_line_search():
ls = (True, False)
recalc = (None, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
results = list()
for key in it.product(ls, recalc):
(line_search, hessian_recalc) = key
(conv, cycs) = run_opt(line_search, hessian_recalc)
res = (line_search, hessian_recalc, conv, cycs)
results.append(res)
columns = 'line_search hessian_recalc converged cycles'.split()
df = pd.DataFrame(results, columns=columns)
import pdb
pdb.set_trace()
pass |
class Font(object):
def __init__(self, height=8, width=0, escapement=0, orientation=0, weight=0, italic=0, underline=0, strikeout=0, char_set=0, out_precision=0, clip_precision=0, quality=0, pitch_and_family=0, face_name='MS Shell Dlg'):
self.height = height
self.width = width
self.escapement = escapement
self.orientation = orientation
self.weight = weight
self.italic = italic
self.underline = underline
self.strikeout = strikeout
self.char_set = char_set
self.out_precision = out_precision
self.clip_precision = clip_precision
self.quality = quality
self.pitch_and_family = pitch_and_family
self.face_name = face_name |
class ImpalaAlgorithmConfig(AlgorithmConfig):
n_epochs: int
epoch_length: int
patience: int
critic_burn_in_epochs: int
n_rollout_steps: int
lr: float
gamma: float
policy_loss_coef: float
value_loss_coef: float
entropy_coef: float
max_grad_norm: float
device: str
queue_out_of_sync_factor: float
actors_batch_size: int
num_actors: int
vtrace_clip_rho_threshold: float
vtrace_clip_pg_rho_threshold: float
rollout_evaluator: RolloutEvaluator |
def _walk_subclasses(cls, indent=0):
if (cls.__module__ == '__main__'):
modname = 'puresnmp.types'
else:
modname = cls.__module__
cname = '.'.join([modname, cls.__qualname__])
ref = (':py:class:`%s`' % cname)
print('\n', (' ' * indent), '* ', ref)
for subclass in sorted(cls.__subclasses__(), key=(lambda x: (x.__module__ + x.__name__))):
_walk_subclasses(subclass, (indent + 1)) |
class OptionSeriesColumnpyramidSonificationDefaultspeechoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
def expand_languages_for_user(list_languages: list) -> list:
appended_list = []
for language in list_languages:
if (language == AUTO_DETECT):
appended_list.append(language)
continue
if ('-' in language):
if ('Unknown language' not in Language.get(language.split('-')[0]).display_name()):
appended_list.append(convert_three_two_letters(language.split('-')[0]))
appended_list.append(convert_three_two_letters(language))
return appended_list |
class Support(Skill):
associated_action = SupportAction
skill_category = ['character', 'active']
target = t_OtherOne()
usage = 'handover'
no_drop = True
no_reveal = True
def check(self):
cl = self.associated_cards
return (cl and all((((c.resides_in is not None) and (c.resides_in.type in ('cards', 'showncards', 'equips'))) for c in cl))) |
def main():
print('\nmodule top(\n input wire in,\n output wire out\n);\n\nassign out = in;\n')
luts = LutMaker()
primitives_list = list()
for (tile_name, tile_type, site_name, site_type) in gen_sites('GTPE2_CHANNEL'):
params_list = list()
params_dict = dict()
params_dict['tile_type'] = tile_type
params = dict()
params['site'] = site_name
verilog_attr = ''
verilog_attr = '#('
fuz_dir = os.getenv('FUZDIR', None)
assert fuz_dir
with open(os.path.join(fuz_dir, 'attrs.json'), 'r') as attrs_file:
attrs = json.load(attrs_file)
in_use = bool(random.randint(0, 9))
params['IN_USE'] = in_use
if in_use:
for (param, param_info) in attrs.items():
param_type = param_info['type']
param_values = param_info['values']
param_digits = param_info['digits']
if (param_type == INT):
value = random.choice(param_values)
value_str = value
elif (param_type == BIN):
value = random.randint(0, param_values[0])
value_str = "{digits}'b{value:0{digits}b}".format(value=value, digits=param_digits)
elif (param_type in [BOOL, STR]):
value = random.choice(param_values)
value_str = verilog.quote(value)
params[param] = value
verilog_attr += '\n .{}({}),'.format(param, value_str)
verilog_ports = ''
for param in ['TXUSRCLK', 'TXUSRCLK2', 'TXPHDLYTSTCLK', 'SIGVALIDCLK', 'RXUSRCLK', 'RXUSRCLK2', 'DRPCLK', 'DMONITORCLK', 'CLKRSVD0', 'CLKRSVD1']:
is_inverted = random.randint(0, 1)
params[param] = is_inverted
verilog_attr += '\n .IS_{}_INVERTED({}),'.format(param, is_inverted)
verilog_ports += '\n .{}({}),'.format(param, luts.get_next_output_net())
verilog_attr = verilog_attr.rstrip(',')
verilog_attr += '\n)'
print('(* KEEP, DONT_TOUCH, LOC="{}" *)'.format(site_name))
print('GTPE2_CHANNEL {attrs} {site} (\n {ports}\n);\n '.format(attrs=verilog_attr, site=tile_type.lower(), ports=verilog_ports.rstrip(',')))
params_list.append(params)
params_dict['params'] = params_list
primitives_list.append(params_dict)
for l in luts.create_wires_and_luts():
print(l)
print('endmodule')
with open('params.json', 'w') as f:
json.dump(primitives_list, f, indent=2) |
def upgrade():
op.execute('alter type resourcetypes rename to resourcetypesold')
op.execute("create type resourcetypes as enum ('system', 'data_use', 'data_category', 'data_subject', 'privacy_declaration');")
op.execute('alter table plus_custom_field_definition alter column resource_type type resourcetypes using resource_type::text::resourcetypes')
op.execute('alter table plus_custom_field alter column resource_type type resourcetypes using resource_type::text::resourcetypes')
op.execute('drop type resourcetypesold;') |
def main():
logging.basicConfig(format=LOGGING_FORMAT)
parser = argparse.ArgumentParser(description=main.__doc__)
add_debug(parser)
add_app(parser)
add_env(parser)
add_region(parser)
args = parser.parse_args()
logging.getLogger(__package__.split('.')[0]).setLevel(args.debug)
assert destroy_sg(**vars(args)) |
class SampleGeneratorConfig(GeneratorConfig):
temperature: float = 1.0
top_k: int = 0
top_p: float = 1.0
def logits_transform(self) -> LogitsTransform:
transforms: List[LogitsTransform] = []
if (self.masked_pieces is not None):
transforms.append(VocabMaskTransform(self.masked_pieces))
transforms += [TemperatureTransform(self.temperature), TopKTransform(self.top_k), TopPTransform(self.top_p)]
return CompoundLogitsTransform(transforms) |
class ColorField(CharField):
default_validators = []
def __init__(self, *args, **kwargs):
self.samples = kwargs.pop('samples', None)
self.format = kwargs.pop('format', 'hex').lower()
if (self.format not in ['hex', 'hexa', 'rgb', 'rgba']):
raise ValueError(f'Unsupported color format: {self.format}')
self.default_validators = [VALIDATORS_PER_FORMAT[self.format]]
self.image_field = kwargs.pop('image_field', None)
if self.image_field:
kwargs.setdefault('blank', True)
kwargs.setdefault('max_length', 25)
if kwargs.get('null'):
kwargs.setdefault('blank', True)
kwargs.setdefault('default', None)
elif kwargs.get('blank'):
kwargs.setdefault('default', '')
else:
kwargs.setdefault('default', DEFAULT_PER_FORMAT[self.format])
super().__init__(*args, **kwargs)
if (self.choices and self.samples):
raise ImproperlyConfigured("Invalid options: 'choices' and 'samples' are mutually exclusive, you can set only one of the two for a ColorField instance.")
def formfield(self, **kwargs):
palette = []
if self.choices:
choices = self.get_choices(include_blank=False)
palette = [choice[0] for choice in choices]
elif self.samples:
palette = [choice[0] for choice in self.samples]
kwargs['widget'] = ColorWidget(attrs={'default': self.get_default(), 'format': self.format, 'palette': palette})
return super().formfield(**kwargs)
def contribute_to_class(self, cls, name, **kwargs):
super().contribute_to_class(cls, name, **kwargs)
if cls._meta.abstract:
return
if self.image_field:
signals.post_save.connect(self._update_from_image_field, sender=cls)
def deconstruct(self):
(name, path, args, kwargs) = super().deconstruct()
kwargs['samples'] = self.samples
kwargs['image_field'] = self.image_field
return (name, path, args, kwargs)
def _get_image_field_color(self, instance):
color = ''
image_file = getattr(instance, self.image_field)
if image_file:
with image_file.open() as _:
color = get_image_file_background_color(image_file, self.format)
return color
def _update_from_image_field(self, instance, created, *args, **kwargs):
if ((not instance) or (not instance.pk) or (not self.image_field)):
return
try:
field_cls = instance._meta.get_field(self.image_field)
if (not isinstance(field_cls, ImageField)):
raise ImproperlyConfigured("Invalid 'image_field' field type, expected an instance of 'models.ImageField'.")
except FieldDoesNotExist as error:
raise ImproperlyConfigured(f"Invalid 'image_field' field name, {self.image_field!r} field not found.") from error
color = self._get_image_field_color(instance)
color_field_name = self.attname
color_field_value = getattr(instance, color_field_name, None)
if ((color_field_value != color) and color):
color_field_value = (color or self.default)
setattr(instance, color_field_name, color_field_value)
manager = instance.__class__.objects
manager.filter(pk=instance.pk).update(**{color_field_name: color_field_value}) |
class MPCService(RunBinaryBaseService):
def __init__(self, container_svc: ContainerService, task_definition: str, mpc_game_svc: MPCGameService) -> None:
if ((container_svc is None) or (mpc_game_svc is None)):
raise ValueError(f'Dependency is missing. container_svc={container_svc}, mpc_game_svc={mpc_game_svc}')
self.container_svc = container_svc
self.task_definition = task_definition
self.mpc_game_svc: MPCGameService = mpc_game_svc
self.logger: logging.Logger = logging.getLogger(__name__)
self.onedocker_svc = OneDockerService(self.container_svc, self.task_definition)
'\n The game_args should be consistent with the game_config, which should be\n defined in caller\'s game repository.\n\n For example,\n If the game config looks like this:\n\n game_config = {\n "game": {\n "onedocker_package_name": "package_name",\n "arguments": [\n {"name": "input_filenames", "required": True},\n {"name": "input_directory", "required": True},\n {"name": "output_filenames", "required": True},\n {"name": "output_directory", "required": True},\n {"name": "concurrency", "required": True},\n ],\n },\n\n The game args should look like this:\n [\n # 1st container\n {\n "input_filenames": input_path_1,\n "input_directory": input_directory,\n "output_filenames": output_path_1,\n "output_directory": output_directory,\n "concurrency": cocurrency,\n },\n # 2nd container\n {\n "input_filenames": input_path_2,\n "input_directory": input_directory,\n "output_filenames": output_path_2,\n "output_directory": output_directory,\n "concurrency": cocurrency,\n },\n ]\n '
def convert_cmd_args_list(self, game_name: str, game_args: List[Dict[(str, Any)]], mpc_party: MPCParty, server_ips: Optional[List[str]]=None) -> Tuple[(str, List[str])]:
if (not game_args):
raise ValueError("Missing game_args or it's empty")
if ((mpc_party is MPCParty.CLIENT) and (not server_ips)):
raise ValueError('Missing server_ips')
cmd_args_list = []
binary_name = None
for i in range(len(game_args)):
game_arg = (game_args[i] if (game_args is not None) else {})
server_ip = (server_ips[i] if (server_ips is not None) else None)
(package_name, cmd_args) = self.mpc_game_svc.build_onedocker_args(game_name=game_name, mpc_party=mpc_party, server_ip=server_ip, **game_arg)
if (binary_name is None):
binary_name = package_name
cmd_args_list.append(cmd_args)
if (binary_name is None):
raise ValueError("Can't get binary_name from game_args")
return (binary_name, cmd_args_list) |
class AddCredit(object):
def Field(cls, **kw):
return gh.Field(Player, id=gh.Int(required=True, description='ID'), jiecao=gh.Int(description=''), games=gh.Int(description=''), drops=gh.Int(description=''), resolver=cls.mutate, **kw)
def mutate(root, info, id, jiecao=0, games=0, drops=0):
ctx = info.context
require_perm(ctx, 'player.change_credit')
p = models.Player.objects.get(id=id)
p.jiecao += jiecao
p.games += games
p.drops += drops
p.save()
return p |
class TestValidator(TestCase):
TEST_REGION = 'us-east-1'
TEST_REGION_AZS = ['us-east-1-bos-1a', 'us-east-1-chi-1a', 'us-east-1-dfw-1a']
TEST_VPC_ID = 'test_vpc_id'
TEST_PCE_ID = 'test_pce_id'
TEST_ACCOUNT_ID =
TEST_TASK_ROLE_NOT_RELATED_NAME = 'test_task_role_bad_name'
TEST_TASK_ROLE_NAME = 'test_task_role_name'
TEST_TASK_ROLE_ID = f'foo::bar::role/{TEST_TASK_ROLE_NAME}'
TEST_TASK_ROLE_NOT_RELATED_ID = f'foo::bar::role/{TEST_TASK_ROLE_NOT_RELATED_NAME}'
TEST_POLICY_TASK_ROLE_NAME = 'a/b/test_policy_task_role_name'
TEST_LOG_GROUP_NAME = '/ecs/test_log_group'
TEST_NONEXIST_LOG_GROUP_NAME = '/etc/nonexist_log_group'
def setUp(self) -> None:
self.ec2_gateway = MagicMock()
self.iam_gateway = MagicMock()
self.logs_gateway = MagicMock()
self.ecs_gateway = MagicMock()
self.validator = ValidationSuite('test_region', 'test_key_id', 'test_key_data', ec2_gateway=self.ec2_gateway, iam_gateway=self.iam_gateway, ecs_gateway=self.ecs_gateway, logs_gateway=self.logs_gateway)
self.maxDiff = None
def _test_validate_vpc_cidr(self, cidr: str, expected_result: Optional[ValidationResult], expected_error_msg: Optional[str]=None) -> None:
pce = MagicMock()
pce.pce_network = MagicMock()
pce.pce_network.vpc = MagicMock()
pce.pce_network.vpc.vpc_id = TestValidator.TEST_VPC_ID
pce.pce_network.region = 'us-east-1'
pce.pce_network.vpc.cidr = cidr
if expected_error_msg:
with self.assertRaises(Exception) as ex:
self.validator.validate_vpc_cidr(pce)
self.assertEqual(expected_error_msg, str(ex.exception))
return
actual_result = self.validator.validate_vpc_cidr(pce)
self.assertEqual(expected_result, actual_result)
def test_validate_vpc_cidr_non_valid(self) -> None:
for invalid_ip in ['non_valid', '10.1.1.300']:
self._test_validate_vpc_cidr(invalid_ip, None, f"'{invalid_ip}' does not appear to be an IPv4 or IPv6 network")
def test_validate_vpc_cidr_success(self) -> None:
for invalid_ip in ['10.1.0.0/16', '10.1.10.0/24', '10.1.128.128/28']:
self._test_validate_vpc_cidr(invalid_ip, ValidationResult(ValidationResultCode.SUCCESS, ValidationStepNames.VPC_CIDR.code_name))
def test_validate_vpc_cidr_fail(self) -> None:
for invalid_ip in ['10.0.0.0/7', '173.16.0.0/12', '192.168.0.0/15']:
self._test_validate_vpc_cidr(invalid_ip, ValidationResult(ValidationResultCode.ERROR, ValidationStepNames.VPC_CIDR.code_name, NetworkingErrorTemplate.VPC_NON_PRIVATE_CIDR.value.format(vpc_cidr=TestValidator.TEST_VPC_ID), NetworkingErrorSolutionHintTemplate.VPC_NON_PRIVATE_CIDR.value.format(default_vpc_cidr=DEFAULT_PARTNER_VPC_CIDR)), None)
def test_validate_partner_cidr(self) -> None:
pce = MagicMock()
pce.pce_network.vpc.cidr = DEFAULT_PARTNER_VPC_CIDR
self.validator.role = MPCRoles.PARTNER
expected_result = ValidationResult(ValidationResultCode.SUCCESS, ValidationStepNames.VPC_CIDR.code_name)
actual_result = self.validator.validate_vpc_cidr(pce)
self.assertEqual(expected_result, actual_result)
def test_validate_publisher_cidr(self) -> None:
pce = MagicMock()
pce.pce_network.vpc.cidr = DEFAULT_VPC_CIDR
self.validator.role = MPCRoles.PUBLISHER
expected_result = ValidationResult(ValidationResultCode.SUCCESS, ValidationStepNames.VPC_CIDR.code_name)
actual_result = self.validator.validate_vpc_cidr(pce)
self.assertEqual(expected_result, actual_result)
def _test_validate_firewall(self, vpc_cidr: str, routes: List[Route], firewall_rulesets: List[FirewallRuleset], expected_result: ValidationResult, expected_error_msg: Optional[str]=None) -> None:
pce = MagicMock()
pce.pce_network = MagicMock()
pce.pce_network.vpc = MagicMock()
pce.pce_network.vpc.vpc_id = TestValidator.TEST_VPC_ID
pce.pce_network.vpc.cidr = vpc_cidr
pce.pce_network.vpc.tags = {PCE_ID_KEY: TestValidator.TEST_PCE_ID}
pce.pce_network.firewall_rulesets = firewall_rulesets
pce.pce_network.route_table = MagicMock()
pce.pce_network.route_table.routes = routes
if expected_error_msg:
with self.assertRaises(Exception) as ex:
self.validator.validate_firewall(pce)
self.assertEqual(expected_error_msg, str(ex.exception))
return
actual_result = self.validator.validate_firewall(pce)
self.assertEqual(expected_result, actual_result)
def test_validate_firewall_not_overlapping_vpc(self) -> None:
self._test_validate_firewall('10.1.0.0/16', [create_mock_route('11.2.0.0/16', RouteTargetType.VPC_PEERING)], [create_mock_firewall_rule_set([create_mock_firewall_rule('10.2.0.0/16'), create_mock_firewall_rule('10.1.1.0/24'), create_mock_firewall_rule('10.3.0.0/16')])], ValidationResult(ValidationResultCode.ERROR, ValidationStepNames.FIREWALL.code_name, NetworkingErrorTemplate.FIREWALL_INVALID_RULESETS.value.format(error_reasons=str(NetworkingErrorTemplate.FIREWALL_CIDR_NOT_OVERLAPS_VPC.value.format(peer_target_id='target_VPC_PEERING_11.2.0.0/16', vpc_id=TestValidator.TEST_VPC_ID, vpc_cidr='10.1.0.0/16'))), NetworkingErrorSolutionHintTemplate.FIREWALL_INVALID_RULESETS.value.format(error_remediation='')))
def test_validate_firewall_bad_port_range(self) -> None:
initial_port = (FIREWALL_RULE_INITIAL_PORT + 1)
mock_rule_set = create_mock_firewall_rule_set([create_mock_firewall_rule('10.2.0.0/16'), create_mock_firewall_rule('12.4.0.0/16', initial_port), create_mock_firewall_rule('10.3.0.0/16')])
self._test_validate_firewall('10.1.0.0/16', [create_mock_route('12.4.1.0/24', RouteTargetType.VPC_PEERING)], [mock_rule_set], ValidationResult(ValidationResultCode.ERROR, ValidationStepNames.FIREWALL.code_name, NetworkingErrorTemplate.FIREWALL_INVALID_RULESETS.value.format(error_reasons=str(NetworkingErrorTemplate.FIREWALL_CIDR_CANT_CONTAIN_EXPECTED_RANGE.value.format(fr_vpc_id='create_mock_firewall_rule_set', fri_cidr='12.4.0.0/16', fri_from_port=initial_port, fri_to_port=FIREWALL_RULE_FINAL_PORT))), NetworkingErrorSolutionHintTemplate.FIREWALL_INVALID_RULESETS.value.format(error_remediation=str(NetworkingErrorSolutionHintTemplate.FIREWALL_CIDR_CANT_CONTAIN_EXPECTED_RANGE.value.format(sec_group=mock_rule_set.id, from_port=FIREWALL_RULE_INITIAL_PORT, to_port=FIREWALL_RULE_FINAL_PORT)))))
def test_validate_firewall_success(self) -> None:
self._test_validate_firewall('10.1.0.0/16', [create_mock_route('12.4.1.0/24', RouteTargetType.VPC_PEERING)], [create_mock_firewall_rule_set([create_mock_firewall_rule('10.2.0.0/16'), create_mock_firewall_rule('12.4.0.0/16'), create_mock_firewall_rule('10.3.0.0/16')])], ValidationResult(ValidationResultCode.SUCCESS, ValidationStepNames.FIREWALL.code_name))
def test_validate_firewall_exceeding_port_range(self) -> None:
initial_port = (FIREWALL_RULE_INITIAL_PORT - 1)
self._test_validate_firewall('10.1.0.0/16', [create_mock_route('12.4.1.0/24', RouteTargetType.VPC_PEERING)], [create_mock_firewall_rule_set([create_mock_firewall_rule('10.2.0.0/16'), create_mock_firewall_rule('12.4.0.0/16', initial_port), create_mock_firewall_rule('10.3.0.0/16')])], ValidationResult(ValidationResultCode.WARNING, ValidationStepNames.FIREWALL.code_name, NetworkingValidationWarningDescriptionTemplate.NETWORKING_FIREWALL_FLAGGED_RULESETS.value.format(warning_reasons=str(NetworkingValidationWarningDescriptionTemplate.NETWORKING_FIREWALL_CIDR_EXCEED_EXPECTED_RANGE.value.format(fr_vpc_id='create_mock_firewall_rule_set', fri_cidr='12.4.0.0/16', fri_from_port=initial_port, fri_to_port=FIREWALL_RULE_FINAL_PORT)))))
def test_validate_firewall_no_rulez(self) -> None:
self._test_validate_firewall('10.1.0.0/16', [create_mock_route('12.4.1.0/24', RouteTargetType.VPC_PEERING)], [], ValidationResult(ValidationResultCode.ERROR, ValidationStepNames.FIREWALL.code_name, NetworkingErrorTemplate.FIREWALL_RULES_NOT_FOUND.value.format(pce_id=TestValidator.TEST_PCE_ID)))
def _test_validate_route_table(self, routes: List[Route], expected_result: ValidationResult, expected_error_msg: Optional[str]=None) -> None:
pce = MagicMock()
pce.pce_network = MagicMock()
pce.pce_network.vpc = MagicMock()
pce.pce_network.route_table = MagicMock()
pce.pce_network.route_table.routes = routes
if expected_error_msg:
with self.assertRaises(Exception) as ex:
self.validator.validate_route_table(pce)
self.assertEqual(expected_error_msg, str(ex.exception))
return
actual_result = self.validator.validate_route_table(pce)
self.assertEqual(expected_result, actual_result)
def test_validate_route_table_no_vpc_peering(self) -> None:
self._test_validate_route_table([create_mock_route('11.2.0.0/16', RouteTargetType.INTERNET), create_mock_route('11.3.0.0/16', RouteTargetType.OTHER), create_mock_route('11.4.0.0/16', RouteTargetType.INTERNET)], ValidationResult(validation_result_code=ValidationResultCode.ERROR, validation_step_name=ValidationStepNames.ROUTE_TABLE.code_name, description=NetworkingErrorTemplate.ROUTE_TABLE_VPC_PEERING_MISSING.value, solution_hint=NetworkingErrorSolutionHintTemplate.ROUTE_TABLE_VPC_PEERING_MISSING.value))
def test_validate_route_table_route_not_active(self) -> None:
self._test_validate_route_table([create_mock_route('11.2.0.0/16', RouteTargetType.INTERNET), create_mock_route('10.3.0.0/16', RouteTargetType.VPC_PEERING, RouteState.UNKNOWN), create_mock_route('11.4.0.0/16', RouteTargetType.INTERNET)], ValidationResult(validation_result_code=ValidationResultCode.ERROR, validation_step_name=ValidationStepNames.ROUTE_TABLE.code_name, description=NetworkingErrorTemplate.ROUTE_TABLE_VPC_PEERING_MISSING.value, solution_hint=NetworkingErrorSolutionHintTemplate.ROUTE_TABLE_VPC_PEERING_MISSING.value))
def test_validate_route_table_success(self) -> None:
self._test_validate_route_table([create_mock_route('11.2.0.0/16', RouteTargetType.INTERNET), create_mock_route('10.1.0.0/16', RouteTargetType.VPC_PEERING), create_mock_valid_igw_route()], ValidationResult(ValidationResultCode.SUCCESS, ValidationStepNames.ROUTE_TABLE.code_name))
def test_validate_route_table_no_igw(self) -> None:
self._test_validate_route_table([create_mock_route('11.2.0.0/16', RouteTargetType.OTHER), create_mock_route('10.1.0.0/16', RouteTargetType.VPC_PEERING), create_mock_route('11.2.0.0/16', RouteTargetType.INTERNET, route_target_id='vgw-a1b2c3d4'), create_mock_route('11.2.0.0/16', RouteTargetType.INTERNET, route_target_id=f'{IGW_ROUTE_TARGET_PREFIX}a1b2c3d4')], ValidationResult(validation_result_code=ValidationResultCode.ERROR, validation_step_name=ValidationStepNames.ROUTE_TABLE.code_name, description=NetworkingErrorTemplate.ROUTE_TABLE_IGW_MISSING.value, solution_hint=NetworkingErrorSolutionHintTemplate.ROUTE_TABLE_IGW_MISSING.value))
def test_validate_route_table_igw_inactive(self) -> None:
self._test_validate_route_table([create_mock_route('11.2.0.0/16', RouteTargetType.OTHER), create_mock_route('10.1.0.0/16', RouteTargetType.VPC_PEERING), create_mock_valid_igw_route(state=RouteState.UNKNOWN)], ValidationResult(validation_result_code=ValidationResultCode.ERROR, validation_step_name=ValidationStepNames.ROUTE_TABLE.code_name, description=NetworkingErrorTemplate.ROUTE_TABLE_IGW_INACTIVE.value, solution_hint=NetworkingErrorSolutionHintTemplate.ROUTE_TABLE_IGW_INACTIVE.value))
def _test_validate_subnet(self, subnet_availability_zones: List[AvailabilityZone], region_availability_zones: List[AvailabilityZone], expected_result: ValidationResult, expected_error_msg: Optional[str]=None) -> None:
pce = MagicMock()
pce.pce_network = MagicMock()
pce.pce_network.region = 'us-east-1'
pce.pce_network.subnets = [create_mock_subnet(az) for az in subnet_availability_zones]
self.ec2_gateway.describe_availability_zones = MagicMock(return_value=region_availability_zones)
if expected_error_msg:
with self.assertRaises(Exception) as ex:
self.validator.validate_subnets(pce)
self.assertEqual(expected_error_msg, str(ex.exception))
return
actual_result = self.validator.validate_subnets(pce)
self.assertEqual(expected_result, actual_result)
def test_validate_subnet_single_zone(self) -> None:
subnet_availability_zones = ['us-east-1-bos-1a', 'us-east-1-bos-1a', 'us-east-1-bos-1a']
self._test_validate_subnet(subnet_availability_zones, TestValidator.TEST_REGION_AZS, ValidationResult(ValidationResultCode.ERROR, ValidationStepNames.SUBNETS.code_name, NetworkingErrorTemplate.SUBNETS_NOT_ALL_AZ_USED.value.format(region=TestValidator.TEST_REGION, azs=','.join(set(subnet_availability_zones))), NetworkingErrorSolutionHintTemplate.SUBNETS_NOT_ALL_AZ_USED.value.format(azs=','.join(sorted((set(TestValidator.TEST_REGION_AZS) - set(subnet_availability_zones)))))))
def test_validate_subnet_more_subnets_than_zone(self) -> None:
subnet_availability_zones = ['us-east-1-bos-1a', 'us-east-1-chi-1a', 'us-east-1-chi-1a']
self._test_validate_subnet(subnet_availability_zones, TestValidator.TEST_REGION_AZS, ValidationResult(ValidationResultCode.ERROR, ValidationStepNames.SUBNETS.code_name, NetworkingErrorTemplate.SUBNETS_NOT_ALL_AZ_USED.value.format(region=TestValidator.TEST_REGION, azs=','.join(sorted(set(subnet_availability_zones)))), NetworkingErrorSolutionHintTemplate.SUBNETS_NOT_ALL_AZ_USED.value.format(azs=','.join(sorted((set(TestValidator.TEST_REGION_AZS) - set(subnet_availability_zones)))))))
def test_validate_subnet_success(self) -> None:
self._test_validate_subnet(TestValidator.TEST_REGION_AZS, TestValidator.TEST_REGION_AZS, ValidationResult(ValidationResultCode.SUCCESS, ValidationStepNames.SUBNETS.code_name))
def _test_validate_cluster_definition(self, cpu: int, memory: int, image: str, expected_result: ValidationResult, expected_error_msg: Optional[str]=None) -> None:
pce = MagicMock()
pce.pce_compute = MagicMock()
pce.pce_compute.container_definition = create_mock_container_definition(cpu, memory, image)
if expected_error_msg:
with self.assertRaises(Exception) as ex:
self.validator.validate_cluster_definition(pce)
self.assertEqual(expected_error_msg, str(ex.exception))
return
actual_result = self.validator.validate_cluster_definition(pce)
self.assertEqual(expected_result, actual_result)
def test_validate_cluster_definition_wrong_cpu(self) -> None:
cpu = (CONTAINER_CPU * 2)
self._test_validate_cluster_definition(cpu, CONTAINER_MEMORY, CONTAINER_IMAGE, ValidationResult(ValidationResultCode.ERROR, ValidationStepNames.CLUSTER_DEFINITION.code_name, ComputeErrorTemplate.CLUSTER_DEFINITION_WRONG_VALUES.value.format(error_reasons=','.join([ComputeErrorTemplate.CLUSTER_DEFINITION_WRONG_VALUE.value.format(resource_name=ClusterResourceType.CPU.name.title(), value=cpu, expected_value=CONTAINER_CPU)])), ComputeErrorSolutionHintTemplate.CLUSTER_DEFINITION_WRONG_VALUES.value))
def test_validate_cluster_definition_success(self) -> None:
self._test_validate_cluster_definition(CONTAINER_CPU, CONTAINER_MEMORY, CONTAINER_IMAGE, ValidationResult(ValidationResultCode.SUCCESS, ValidationStepNames.CLUSTER_DEFINITION.code_name))
def test_validate_cluster_definition_wrong_image(self) -> None:
image = 'foo_image'
self._test_validate_cluster_definition(CONTAINER_CPU, CONTAINER_MEMORY, image, ValidationResult(ValidationResultCode.WARNING, ValidationStepNames.CLUSTER_DEFINITION.code_name, ValidationWarningDescriptionTemplate.CLUSTER_DEFINITION_FLAGGED_VALUES.value.format(warning_reasons=','.join([ValidationWarningDescriptionTemplate.CLUSTER_DEFINITION_FLAGGED_VALUE.value.format(resource_name=ClusterResourceType.IMAGE.name.title(), value=image, expected_value=CONTAINER_IMAGE)]))))
def _test_validate_network_and_compute(self, vpc_cidr: str, routes: List[Route], firewall_rulesets: List[FirewallRuleset], cpu: int, expected_result: List[ValidationResult], expected_error_msg: Optional[str]=None) -> None:
pce = MagicMock()
pce.pce_network = MagicMock()
pce.pce_network.vpc = MagicMock()
pce.pce_network.vpc.vpc_id = TestValidator.TEST_VPC_ID
pce.pce_network.vpc.cidr = vpc_cidr
pce.pce_network.firewall_rulesets = firewall_rulesets
pce.pce_network.route_table = MagicMock()
pce.pce_network.route_table.routes = routes
pce.pce_network.subnets = []
pce.pce_network.vpc_peering = MagicMock()
pce.pce_network.vpc_peering.status = VpcPeeringState.ACTIVE
pce.pce_compute = MagicMock()
pce.pce_compute.container_definition = create_mock_container_definition(cpu, CONTAINER_MEMORY, CONTAINER_IMAGE, task_role_id=TestValidator.TEST_TASK_ROLE_ID, tags={PCE_ID_KEY: TestValidator.TEST_PCE_ID})
self.iam_gateway.get_policies_for_role = MagicMock(return_value=IAMRole(TestValidator.TEST_TASK_ROLE_ID, {TestValidator.TEST_POLICY_TASK_ROLE_NAME: TASK_POLICY}))
self.ec2_gateway.describe_availability_zones = MagicMock(return_value=[])
if expected_error_msg:
with self.assertRaises(Exception) as ex:
self.validator.validate_network_and_compute(pce)
self.assertEqual(expected_error_msg, str(ex.exception))
return
actual_result = self.validator.validate_network_and_compute(pce)
self.assertEqual(expected_result, actual_result)
def test_validate_network_and_compute_not_overlapping_firewall_cidr_and_wrong_cpu(self) -> None:
cpu = (CONTAINER_CPU + 1)
self._test_validate_network_and_compute('10.1.0.0/16', [create_mock_route('12.4.1.0/24', RouteTargetType.VPC_PEERING), create_mock_valid_igw_route()], [create_mock_firewall_rule_set([create_mock_firewall_rule('10.2.0.0/16'), create_mock_firewall_rule('10.1.1.0/24'), create_mock_firewall_rule('10.3.0.0/16')])], cpu, [ValidationResult(ValidationResultCode.ERROR, ValidationStepNames.FIREWALL.code_name, NetworkingErrorTemplate.FIREWALL_INVALID_RULESETS.value.format(error_reasons=str(NetworkingErrorTemplate.FIREWALL_CIDR_NOT_OVERLAPS_VPC.value.format(peer_target_id='target_VPC_PEERING_12.4.1.0/24', vpc_id=TestValidator.TEST_VPC_ID, vpc_cidr='10.1.0.0/16'))), NetworkingErrorSolutionHintTemplate.FIREWALL_INVALID_RULESETS.value.format(error_remediation='')), ValidationResult(ValidationResultCode.ERROR, ValidationStepNames.CLUSTER_DEFINITION.code_name, ComputeErrorTemplate.CLUSTER_DEFINITION_WRONG_VALUES.value.format(error_reasons=','.join([ComputeErrorTemplate.CLUSTER_DEFINITION_WRONG_VALUE.value.format(resource_name=ClusterResourceType.CPU.name.title(), value=cpu, expected_value=CONTAINER_CPU)])), ComputeErrorSolutionHintTemplate.CLUSTER_DEFINITION_WRONG_VALUES.value)])
def _test_validate_iam_roles(self, task_role_id: RoleId, task_role_policy: IAMRole, expected_result: ValidationResult, expected_error_msg: Optional[str]=None) -> None:
pce = MagicMock()
pce.pce_compute = MagicMock()
pce.pce_compute.container_definition = create_mock_container_definition(task_role_id=task_role_id, tags={PCE_ID_KEY: TestValidator.TEST_PCE_ID})
def get_policies(role_id: RoleId) -> Optional[IAMRole]:
if (task_role_policy.role_id == role_id):
return task_role_policy
return None
self.iam_gateway.get_policies_for_role = MagicMock(side_effect=get_policies)
if expected_error_msg:
with self.assertRaises(Exception) as ex:
self.validator.validate_iam_roles(pce)
self.assertEqual(expected_error_msg, str(ex.exception))
return
actual_result = self.validator.validate_iam_roles(pce)
self.assertEqual(expected_result, actual_result)
def test_validate_iam_roles_bad_task_policy(self) -> None:
bad_task_policy: PolicyContents = TASK_POLICY.copy()
bad_task_policy['Version'] = '2020-01-01'
self._test_validate_iam_roles(TestValidator.TEST_TASK_ROLE_ID, IAMRole(TestValidator.TEST_TASK_ROLE_ID, {TestValidator.TEST_POLICY_TASK_ROLE_NAME: bad_task_policy}), ValidationResult(ValidationResultCode.ERROR, ValidationStepNames.IAM_ROLES.code_name, ComputeErrorTemplate.ROLE_WRONG_POLICY.value.format(policy_names=TestValidator.TEST_POLICY_TASK_ROLE_NAME, role_name=TestValidator.TEST_TASK_ROLE_ID), ComputeErrorSolutionHintTemplate.ROLE_WRONG_POLICY.value.format(role_name=TestValidator.TEST_TASK_ROLE_ID, role_policy=TASK_POLICY)))
def test_validate_iam_roles_no_attached_policies(self) -> None:
task_policy: PolicyContents = TASK_POLICY.copy()
self._test_validate_iam_roles(TestValidator.TEST_TASK_ROLE_ID, IAMRole(TestValidator.TEST_TASK_ROLE_NOT_RELATED_ID, {TestValidator.TEST_POLICY_TASK_ROLE_NAME: task_policy}), ValidationResult(ValidationResultCode.ERROR, ValidationStepNames.IAM_ROLES.code_name, ComputeErrorTemplate.ROLE_POLICIES_NOT_FOUND.value.format(role_names=','.join((TestValidator.TEST_TASK_ROLE_ID,))), ComputeErrorSolutionHintTemplate.ROLE_POLICIES_NOT_FOUND.value.format(role_names=','.join((TestValidator.TEST_TASK_ROLE_ID,)), pce_id=TestValidator.TEST_PCE_ID)))
def test_validate_iam_roles_more_policies_than_expected(self) -> None:
additional_policy_name = 'task_policy_name_additional'
task_policy: PolicyContents = TASK_POLICY.copy()
self._test_validate_iam_roles(TestValidator.TEST_TASK_ROLE_ID, IAMRole(TestValidator.TEST_TASK_ROLE_ID, {TestValidator.TEST_POLICY_TASK_ROLE_NAME: task_policy, additional_policy_name: {}}), ValidationResult(ValidationResultCode.WARNING, ValidationStepNames.IAM_ROLES.code_name, ValidationWarningDescriptionTemplate.MORE_POLICIES_THAN_EXPECTED.value.format(policy_names=additional_policy_name, role_id=TestValidator.TEST_TASK_ROLE_ID), ValidationWarningSolutionHintTemplate.MORE_POLICIES_THAN_EXPECTED.value))
def test_validate_log_group_deleted(self) -> None:
pce = MagicMock()
self.ecs_gateway.extract_log_group_name = MagicMock(return_value=TestValidator.TEST_NONEXIST_LOG_GROUP_NAME)
self.logs_gateway.describe_log_group = MagicMock(return_value=None)
expected_result = ValidationResult(ValidationResultCode.WARNING, ValidationStepNames.LOG_GROUP.code_name, ValidationWarningDescriptionTemplate.CLOUDWATCH_LOGS_NOT_FOUND.value.format(log_group_name_from_task=TestValidator.TEST_NONEXIST_LOG_GROUP_NAME))
actual_result = self.validator.validate_log_group(pce)
self.assertEqual(expected_result, actual_result)
def test_validate_log_group_success(self) -> None:
pce = MagicMock()
self.ecs_gateway.extract_log_group_name = MagicMock(return_value=TestValidator.TEST_LOG_GROUP_NAME)
self.logs_gateway.describe_log_group = create_mock_log_group(log_group_name=TestValidator.TEST_LOG_GROUP_NAME)
expected_result = ValidationResult(ValidationResultCode.SUCCESS, ValidationStepNames.LOG_GROUP.code_name)
actual_result = self.validator.validate_log_group(pce)
self.assertEqual(expected_result, actual_result)
def test_validate_log_group_not_configuered_in_task(self) -> None:
pce = MagicMock()
self.ecs_gateway.extract_log_group_name = MagicMock(return_value=None)
expected_result = ValidationResult(ValidationResultCode.WARNING, ValidationStepNames.LOG_GROUP.code_name, ValidationWarningDescriptionTemplate.CLOUDWATCH_LOGS_NOT_CONFIGURED_IN_TASK_DEFINITION.value)
actual_result = self.validator.validate_log_group(pce)
self.assertEqual(expected_result, actual_result) |
class OptionSeriesFunnel3dMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def states(self) -> 'OptionSeriesFunnel3dMarkerStates':
return self._config_sub_data('states', OptionSeriesFunnel3dMarkerStates)
def symbol(self):
return self._config_get(None)
def symbol(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
def test_matmul_paper(golden):
NN = 128
MM = 128
KK = 128
gemmini = rename(matmul_algorithm(), 'matmul_on_gemmini')
gemmini = gemmini.partial_eval(NN, MM, KK)
gemmini = stage_mem(gemmini, 'for k in _: _', 'C[i, j]', 'res')
gemmini = bind_expr(gemmini, 'A[_]', 'a')
gemmini = bind_expr(gemmini, 'B[_]', 'b')
gemmini = old_split(gemmini, 'i', 16, ['io', 'ii'], perfect=True)
gemmini = old_split(gemmini, 'j', 16, ['jo', 'ji'], perfect=True)
gemmini = old_reorder(gemmini, 'ii jo')
gemmini = old_split(gemmini, 'k', 16, ['ko', 'ki'], perfect=True)
gemmini = old_lift_alloc(gemmini, 'res:_', n_lifts=2)
gemmini = old_fission_after(gemmini, 'res = _', n_lifts=2)
gemmini = old_fission_after(gemmini, 'for ko in _:_', n_lifts=2)
gemmini = old_reorder(gemmini, 'ji ko')
gemmini = old_reorder(gemmini, 'ii ko')
gemmini = old_lift_alloc(gemmini, 'a:_', n_lifts=3)
gemmini = old_lift_alloc(gemmini, 'b:_')
gemmini = old_lift_alloc(gemmini, 'b:_', mode='col', n_lifts=2)
gemmini = old_fission_after(gemmini, 'a[_] = _', n_lifts=3)
gemmini = old_fission_after(gemmini, 'b[_] = _', n_lifts=3)
gemmini = old_lift_alloc(gemmini, 'res:_', n_lifts=2)
gemmini = old_lift_alloc(gemmini, 'a:_', n_lifts=3)
gemmini = old_lift_alloc(gemmini, 'b:_', n_lifts=3)
gemmini = replace(gemmini, 'for ii in _:_ #0', ld_acc)
gemmini = replace(gemmini, 'for ii in _:_ #0', ld_data)
gemmini = old_reorder(gemmini, 'ji ki')
gemmini = replace(gemmini, 'for ki in _:_ #0', ld_data)
gemmini = old_reorder(gemmini, 'ki ji')
gemmini = replace(gemmini, 'for ii in _:_ #0', matmul)
gemmini = replace(gemmini, 'for ii in _:_ #0', st_acc)
gemmini = simplify(gemmini)
gemmini = inline_lift_config(gemmini)
gemmini = simplify(gemmini)
gemmini_str = str(gemmini)
print(gemmini_str)
assert (gemmini_str == golden) |
class Progress(Thread):
def __init__(self, event: Event, initial_message: str='') -> None:
super().__init__()
self.states = ['', '', '', '', '', '']
self.message = ''
self.finished = event
if (initial_message is not None):
self.update(initial_message)
def run(self):
for char in cycle(self.states):
if self.finished.is_set():
break
stdout.write(f'''
{char} {self.message}''')
stdout.flush()
sleep(0.2)
def update(self, message: str):
if (not self.is_alive()):
self.start()
self.message = message
def finish(self, message: str='Finished'):
self.finished.set()
stdout.write(f'''
{message}
''')
stdout.flush() |
class TestViews(SingleCreateApiTestCase, SingleUpdateApiTestCase, SingleDeleteApiTestCase, PaginationTestCase):
__test__ = True
ZenpyType = View
object_kwargs = dict(title='testView{}', all=[{'field': 'status', 'operator': 'less_than', 'value': 'solved'}])
api_name = 'views'
pagination_limit = 10
def create_objects(self):
for i in range(100, 105):
zenpy_object = self.instantiate_zenpy_object(format_val=i)
self.created_objects.append(self.create_method(zenpy_object))
def test_count_views(self):
cassette_name = '{}'.format(self.generate_cassette_name())
with self.recorder.use_cassette(cassette_name=cassette_name, serialize_with='prettyjson'):
view = self.create_single_zenpy_object()
self.created_objects.append(view)
count = self.zenpy_client.views.count()
self.assertGreater(count.value, 0, 'Has non zero count')
def test_get_active_views(self):
cassette_name = '{}'.format(self.generate_cassette_name())
with self.recorder.use_cassette(cassette_name=cassette_name, serialize_with='prettyjson'):
view = self.create_single_zenpy_object()
self.created_objects.append(view)
count = 0
for _ in self.zenpy_client.views.active():
count += 1
self.assertNotEqual(count, 0, 'Must be positive')
def test_get_compact_views(self):
cassette_name = '{}'.format(self.generate_cassette_name())
with self.recorder.use_cassette(cassette_name=cassette_name, serialize_with='prettyjson'):
view = self.create_single_zenpy_object()
self.created_objects.append(view)
count = 0
for _ in self.zenpy_client.views.compact():
count += 1
self.assertNotEqual(count, 0, 'Must be positive')
def wait_for_view_is_ready(self, view, max_attempts=50):
if self.recorder.current_cassette.is_recording():
request_interval = 5
else:
request_interval = 0.0001
n = 0
while True:
sleep(request_interval)
n += 1
view_count = self.zenpy_client.views.count(view)
if view_count.fresh:
return
elif (n > max_attempts):
raise Exception('Too many attempts to retrieve view count!')
def count_tickets_in_a_view(self, view, cursor_pagination=None):
if (cursor_pagination is not None):
generator = self.zenpy_client.views.tickets(view, cursor_pagination=cursor_pagination)
else:
generator = self.zenpy_client.views.tickets(view)
tickets_count = 0
for _ in generator:
tickets_count += 1
if (tickets_count > 10):
break
return tickets_count
def test_getting_tickets_from_a_view(self):
cassette_name = '{}'.format(self.generate_cassette_name())
with self.recorder.use_cassette(cassette_name=cassette_name, serialize_with='prettyjson'):
ticket_audit = self.zenpy_client.tickets.create(Ticket(subject='test', description='test'))
ticket = ticket_audit.ticket
view = self.create_single_zenpy_object()
self.wait_for_view_is_ready(view)
try:
count = self.zenpy_client.views.count(view)
self.assertNotEqual(count.value, 0, 'Tickets count must be positive')
tickets_count_default = self.count_tickets_in_a_view(view)
tickets_count_obp = self.count_tickets_in_a_view(view, cursor_pagination=False)
tickets_count_cbp = self.count_tickets_in_a_view(view, cursor_pagination=True)
tickets_count_cbp1 = self.count_tickets_in_a_view(view, cursor_pagination=1)
self.assertGreater(tickets_count_default, 1, 'Default pagination returned less than 2 objects')
self.assertNotEqual(tickets_count_cbp, 0, 'CBP returned zero')
self.assertNotEqual(tickets_count_obp, 0, 'OBP returned zero')
self.assertEqual(tickets_count_cbp, tickets_count_obp, 'OBP<>CBP')
self.assertEqual(tickets_count_cbp, tickets_count_cbp1, 'CBP<>CBP[1]')
finally:
self.zenpy_client.tickets.delete(ticket)
self.zenpy_client.views.delete(view) |
def pathparse(value, sep=os.pathsep, os_sep=os.sep):
escapes = []
normpath = (ntpath.normpath if (os_sep == '\\') else posixpath.normpath)
if ('\\' not in (os_sep, sep)):
escapes.extend((('\\\\', '<ESCAPE-ESCAPE>', '\\'), ('\\"', '<ESCAPE-DQUOTE>', '"'), ("\\'", '<ESCAPE-SQUOTE>', "'"), (('\\%s' % sep), '<ESCAPE-PATHSEP>', sep)))
for (original, escape, unescape) in escapes:
value = value.replace(original, escape)
for part in pathsplit(value, sep=sep):
if ((part[(- 1):] == os_sep) and (part != os_sep)):
part = part[:(- 1)]
for (original, escape, unescape) in escapes:
part = part.replace(escape, unescape)
(yield normpath(fsdecode(part))) |
class GodotWebSocketClient(webclient.WebSocketClient):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.protocol_key = 'godotclient/websocket'
def send_text(self, *args, **kwargs):
if args:
args = list(args)
text = args[0]
if (text is None):
return
else:
return
flags = self.protocol_flags
options = kwargs.pop('options', {})
nocolor = options.get('nocolor', flags.get('NOCOLOR', False))
prompt = options.get('send_prompt', False)
cmd = ('prompt' if prompt else 'text')
args[0] = parse_to_bbcode(text, strip_ansi=nocolor)
self.sendLine(json.dumps([cmd, args, kwargs])) |
class OptionPlotoptionsPictorialSonificationTracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def main():
buttons = SpecialKeysMseButtons()
cids = print_cid_info(buttons)
print()
print_cid_reporting(buttons, cids)
print()
print('### REMAP CID 0xD0 TO 0x53 ###')
buttons.set_cid_reporting(208, False, False, True, True, False, False, 83)
print()
print_cid_reporting(buttons, cids) |
class ModulationSpec(Tidy3dBaseModel):
permittivity: SpaceTimeModulation = pd.Field(None, title='Space-time modulation of relative permittivity', description='Space-time modulation of relative permittivity at infinite frequency applied on top of the base permittivity at infinite frequency.')
conductivity: SpaceTimeModulation = pd.Field(None, title='Space-time modulation of conductivity', description='Space-time modulation of electric conductivity applied on top of the base conductivity.')
('conductivity', always=True)
def _same_modulation_frequency(cls, val, values):
permittivity = values.get('permittivity')
if ((val is not None) and (permittivity is not None)):
if (val.time_modulation != permittivity.time_modulation):
raise ValidationError("'permittivity' and 'conductivity' should have the same time modulation.")
return val
_property
def applied_modulation(self) -> bool:
return ((self.permittivity is not None) or (self.conductivity is not None)) |
def choose_master_channel(data: DataModel):
(channel_names, channel_ids) = data.get_master_lists()
list_widget = KeyValueBullet(prompt=_('1. Choose master channel'), choices=channel_names, choices_id=channel_ids)
default_idx = None
default_instance = ''
if (('master_channel' in data.config) and data.config['master_channel']):
default_config = data.config['master_channel'].split('#')
default_id = default_config[0]
if (len(default_config) > 1):
default_instance = default_config[1]
with suppress(ValueError):
default_idx = channel_ids.index(default_id)
(chosen_channel_name, chosen_channel_id) = list_widget.launch(default=default_idx)
chosen_instance = input((_('Instance name to use with {channel_name}: [{default_instance}]').format(channel_name=chosen_channel_name, default_instance=(default_instance or _('default instance'))) + ' ')).strip()
if chosen_instance:
chosen_channel_id += ('#' + chosen_instance)
data.config['master_channel'] = chosen_channel_id |
class TransactionsTests(N26TestBase):
_requests(method=GET, response_file='transactions.json')
def test_transactions_cli(self):
from n26.cli import transactions
result = self._run_cli_cmd(transactions, ['--from', '01/30/2019', '--to', '30.01.2020'])
self.assertIsNotNone(result.output) |
def step3():
with open('../local.settings.json') as fd:
settings = json.load(fd)
connectionString = settings['Values']['AzureWebJobsStorage']
container_name = 'opengameart'
with open('allblobs2.txt') as fd:
files = []
sizes = []
for line in fd.readlines():
(fname, size) = line.split('\t')
files.append(fname.strip())
sizes.append(int(size))
connect_str = os.getenv('AZURE_STORAGE_CONNECTION_STRING')
class Vars():
pass
lvars = Vars()
lvars.counts = defaultdict((lambda : 0))
unpacked = []
byfilename = {}
byfilenameAndSize = defaultdict((lambda : []))
with open('unpacked.txt') as fd:
for line in fd.readlines():
path = line.strip()
unpacked.append(path)
key = os.path.split(path)[(- 1)]
byfilename[key] = path
try:
key2 = (key, os.path.getsize(path))
except:
lvars.counts['failsize'] += 1
pass
byfilenameAndSize[key2].append(path.lower())
lvars.existCount = 0
lvars.existSize = 0
lvars.notExistCount = 0
lvars.notExistSize = 0
lvars.prc = 0
lvars.unpacked = set(unpacked)
def updateExists(localpath, lvars):
lvars.existCount += 1
lvars.existSize += size
lvars.counts[('exist' + filetype)] += 1
if (filetype == 'extracted'):
try:
lvars.unpacked.remove(localpath)
except:
lvars.counts['unpackedRemoveFail'] += 1
pass
def linkfile(src, dest2):
dest = ('/mnt/data2/opengameart2/' + dest2)
dirname = os.path.split(dest)[0]
if (not os.path.exists(dirname)):
os.makedirs(dirname)
if (not os.path.exists(dest)):
shutil.copyfile(src, dest)
ofd = open('copyFromAzure.txt', 'w')
for (i, (path, size)) in enumerate(zip(files, sizes)):
if ((i % 1000) == 0):
print(i)
if (size == 0):
continue
if path.startswith('extract/files'):
localpath = path[len('extract/files/'):]
prefix = '/mnt/data/opengameart/unpacked/'
unq = urllib.parse.unquote_plus
localpath = unq(localpath)
localpath = localpath.split('/')
parts = ([urllib.parse.quote(localpath[0])] + [unq(x) for x in localpath[1:]])
parts2 = (([urllib.parse.quote(localpath[0])] + [os.path.splitext(urllib.parse.quote(localpath[0]))[0]]) + [unq(x) for x in localpath[1:]])
localpath = (prefix + os.path.join(*parts))
localpath2 = (prefix + os.path.join(*parts2))
filetype = 'extracted'
elif path.startswith('files'):
localpath = path[len('files'):]
localpath2 = None
prefix = '/mnt/data/opengameart/files'
localpath = (prefix + localpath)
filetype = 'file'
else:
localpath = path
localpath2 = None
prefix = ''
filetype = 'other'
key = (os.path.split(localpath)[(- 1)], size)
key2 = ((os.path.split(localpath2)[(- 1)], size) if (localpath2 != None) else None)
if (os.path.exists(localpath) and (os.path.getsize(localpath) == size)):
updateExists(localpath, lvars)
linkfile(localpath, path)
elif ((localpath2 != None) and os.path.exists(localpath2) and (os.path.getsize(localpath2) == size)):
updateExists(localpath2, lvars)
linkfile(localpath2, path)
else:
ofd.write((path + '\n'))
if (localpath.endswith('.png') and (lvars.prc < 10)):
print(localpath)
lvars.prc += 1
lvars.counts[('notexist' + filetype)] += 1
lvars.notExistCount += 1
lvars.notExistSize += size
print('exists', lvars.existCount, (((lvars.existSize / 1024) / 1024) / 1024), 'GiB')
print('notExists', lvars.notExistCount, (((lvars.notExistSize / 1024) / 1024) / 1024), 'GiB')
print('NOMATCH:')
pprint(lvars.counts)
ofd.close() |
.parallel
.parametrize(('MeshClass', 'hdiv_family'), [(UnitIcosahedralSphereMesh, 'BDM'), (UnitCubedSphereMesh, 'RTCF')])
def test_hybrid_conv_parallel(MeshClass, hdiv_family):
errors = [run_hybrid_poisson_sphere(MeshClass, r, hdiv_family) for r in range(2, 5)]
errors = np.asarray(errors)
l2conv = np.log2((errors[:(- 1)] / errors[1:]))[(- 1)]
assert (l2conv > 1.8) |
class CREDHIST_ENTRY(Structure):
structure = (('Version', '<L=0'), ('HashAlgo', '<L=0'), ('Rounds', '<L=0'), ('SidLen', '<L=0'), ('_Sid', '_-Sid', 'self["SidLen"]'), ('CryptAlgo', '<L=0'), ('shaHashLen', '<L=0'), ('ntHashLen', '<L=0'), ('Salt', '16s=b'), ('Sid', ':'), ('_data', '_-data', '(self["shaHashLen"]+self["ntHashLen"]) + (-(self["shaHashLen"]+self["ntHashLen"])) % 16'), ('data', ':'), ('Version2', '<L=0'), ('Guid', '16s=b'))
def __init__(self, data=None, alignment=0):
Structure.__init__(self, data, alignment)
self.sid = RPC_SID((b'\x05\x00\x00\x00' + self['Sid'])).formatCanonical()
self.pwdhash = None
self.nthash = None
def deriveKey(self, passphrase, salt, keylen, count, hashFunction):
keyMaterial = b''
i = 1
while (len(keyMaterial) < keylen):
U = (salt + pack('!L', i))
i += 1
derived = bytearray(hashFunction(passphrase, U))
for r in range((count - 1)):
actual = bytearray(hashFunction(passphrase, derived))
if PY3:
derived = (int.from_bytes(derived, sys.byteorder) ^ int.from_bytes(actual, sys.byteorder)).to_bytes(len(actual), sys.byteorder)
else:
derived = bytearray([chr((a ^ b)) for (a, b) in zip(derived, actual)])
keyMaterial += derived
return keyMaterial[:keylen]
def decrypt(self, key):
if (self['HashAlgo'] == ALGORITHMS.CALG_HMAC.value):
hashModule = SHA1
else:
hashModule = ALGORITHMS_DATA[self['HashAlgo']][1]
prf = (lambda p, s: HMAC.new(p, s, hashModule).digest())
derivedBlob = self.deriveKey(key, self['Salt'], (ALGORITHMS_DATA[self['CryptAlgo']][0] + ALGORITHMS_DATA[self['CryptAlgo']][3]), count=self['Rounds'], hashFunction=prf)
cryptKey = derivedBlob[:ALGORITHMS_DATA[self['CryptAlgo']][0]]
iv = derivedBlob[ALGORITHMS_DATA[self['CryptAlgo']][0]:][:ALGORITHMS_DATA[self['CryptAlgo']][3]]
cipher = ALGORITHMS_DATA[self['CryptAlgo']][1].new(cryptKey, mode=ALGORITHMS_DATA[self['CryptAlgo']][2], iv=iv)
cleartext = cipher.decrypt(self['data'])
ntHashSize = 16
self.pwdhash = cleartext[:self['shaHashLen']]
self.nthash = cleartext[self['shaHashLen']:(self['shaHashLen'] + ntHashSize)]
if (cleartext[(self['shaHashLen'] + ntHashSize):] != (((len(self['data']) - self['shaHashLen']) - ntHashSize) * b'\x00')):
self.pwdhash = None
self.nthash = None
def dump(self):
print('[CREDHIST ENTRY]')
print(('Version : 0x%.8x (%d)' % (self['Version'], self['Version'])))
print(('HashAlgo : 0x%.8x (%d) (%s)' % (self['HashAlgo'], self['HashAlgo'], ALGORITHMS(self['HashAlgo']).name)))
print(('Rounds : %d' % self['Rounds']))
print(('CryptAlgo : 0x%.8x (%d) (%s)' % (self['CryptAlgo'], self['CryptAlgo'], ALGORITHMS(self['CryptAlgo']).name)))
print(('shaHashLen : 0x%.8x (%d)' % (self['shaHashLen'], self['shaHashLen'])))
print(('ntHashLen : 0x%.8x (%d)' % (self['ntHashLen'], self['ntHashLen'])))
print(('Salt : %s' % hexlify(self['Salt']).decode()))
print(('SID : %s' % self.sid))
print(('Version2 : 0x%.8x (%d)' % (self['Version2'], self['Version2'])))
print(('Guid : %s' % bin_to_string(self['Guid'])))
if ((self.pwdhash is not None) and (self.nthash is not None)):
print(('pwdHash : %s' % hexlify(self.pwdhash).decode()))
print(('ntHash : %s' % hexlify(self.nthash).decode()))
else:
print(('Data : %s' % hexlify(self['data']).decode()))
print()
def summarize(self):
print('[CREDHIST ENTRY]')
print(('Guid : %s' % bin_to_string(self['Guid'])))
if ((self.pwdhash is not None) and (self.nthash is not None)):
print(('pwdHash : %s' % hexlify(self.pwdhash).decode()))
print(('ntHash : %s' % hexlify(self.nthash).decode()))
else:
print(('Data : %s' % hexlify(self['data']).decode()))
print() |
def test_attributedict_dict_in_list_in_dict():
data = {'instructions': [0, 1, 'neither shalt thou count, excepting that thou then proceedeth to three', {'if_naughty': 'snuff it'}, 'shalt thou not count', 'right out']}
attrdict = AttributeDict.recursive(data)
assert (attrdict.instructions[3].if_naughty == 'snuff it') |
class H2Protocol(asyncio.Protocol):
def __init__(self, upstream_resolver=None, upstream_port=None, uri=None, logger=None, debug=False, ecs=False):
config = H2Configuration(client_side=False, header_encoding='utf-8')
self.conn = H2Connection(config=config)
self.logger = logger
if (logger is None):
self.logger = utils.configure_logger('doh-proxy', 'DEBUG')
self.transport = None
self.debug = debug
self.ecs = ecs
self.stream_data = {}
self.upstream_resolver = upstream_resolver
self.upstream_port = upstream_port
self.time_stamp = 0
self.uri = (constants.DOH_URI if (uri is None) else uri)
assert (upstream_resolver is not None), 'An upstream resolver must be provided'
assert (upstream_port is not None), 'An upstream resolver port must be provided'
def connection_made(self, transport: asyncio.Transport):
self.transport = transport
self.conn.initiate_connection()
self.transport.write(self.conn.data_to_send())
def data_received(self, data: bytes):
try:
events = self.conn.receive_data(data)
except ProtocolError:
self.transport.write(self.conn.data_to_send())
self.transport.close()
else:
self.transport.write(self.conn.data_to_send())
for event in events:
if isinstance(event, RequestReceived):
self.request_received(event.headers, event.stream_id)
elif isinstance(event, DataReceived):
self.receive_data(event.data, event.stream_id)
elif isinstance(event, StreamEnded):
self.stream_complete(event.stream_id)
elif isinstance(event, ConnectionTerminated):
self.transport.close()
self.transport.write(self.conn.data_to_send())
def request_received(self, headers: List[Tuple[(str, str)]], stream_id: int):
_headers = collections.OrderedDict(headers)
method = _headers[':method']
if (method not in ['GET', 'POST', 'HEAD']):
self.return_501(stream_id)
return
request_data = RequestData(_headers, io.BytesIO())
self.stream_data[stream_id] = request_data
def stream_complete(self, stream_id: int):
try:
request_data = self.stream_data[stream_id]
except KeyError:
return
headers = request_data.headers
method = request_data.headers[':method']
(path, params) = utils.extract_path_params(headers[':path'])
if (path != self.uri):
self.return_404(stream_id)
return
if (method in ['GET', 'HEAD']):
try:
(ct, body) = utils.extract_ct_body(params)
except DOHParamsException as e:
self.return_400(stream_id, body=e.body())
return
elif (method == 'POST'):
body = request_data.data.getvalue()
ct = headers.get('content-type')
else:
self.return_501(stream_id)
return
if (ct != constants.DOH_MEDIA_TYPE):
self.return_415(stream_id)
return
try:
dnsq = utils.dns_query_from_body(body, self.debug)
except DOHDNSException as e:
self.return_400(stream_id, body=e.body())
return
clientip = utils.get_client_ip(self.transport)
self.logger.info('[HTTPS] {} {}'.format(clientip, utils.dnsquery2log(dnsq)))
self.time_stamp = time.time()
asyncio.ensure_future(self.resolve(dnsq, stream_id))
def on_answer(self, stream_id, dnsr=None, dnsq=None):
try:
request_data = self.stream_data[stream_id]
except KeyError:
return
response_headers = [(':status', '200'), ('content-type', constants.DOH_MEDIA_TYPE), ('server', 'asyncio-h2')]
if (dnsr is None):
dnsr = dns.message.make_response(dnsq)
dnsr.set_rcode(dns.rcode.SERVFAIL)
elif len(dnsr.answer):
ttl = min((r.ttl for r in dnsr.answer))
response_headers.append(('cache-control', 'max-age={}'.format(ttl)))
clientip = utils.get_client_ip(self.transport)
interval = int(((time.time() - self.time_stamp) * 1000))
self.logger.info('[HTTPS] {} {} {}ms'.format(clientip, utils.dnsans2log(dnsr), interval))
if (request_data.headers[':method'] == 'HEAD'):
body = b''
else:
body = dnsr.to_wire()
response_headers.append(('content-length', str(len(body))))
self.conn.send_headers(stream_id, response_headers)
self.conn.send_data(stream_id, body, end_stream=True)
self.transport.write(self.conn.data_to_send())
async def resolve(self, dnsq, stream_id):
clientip = utils.get_client_ip(self.transport)
dnsclient = DNSClient(self.upstream_resolver, self.upstream_port, logger=self.logger)
dnsr = (await dnsclient.query(dnsq, clientip, ecs=self.ecs))
if (dnsr is None):
self.on_answer(stream_id, dnsq=dnsq)
else:
self.on_answer(stream_id, dnsr=dnsr)
def return_XXX(self, stream_id: int, status: int, body: bytes=b''):
response_headers = ((':status', str(status)), ('content-length', str(len(body))), ('server', 'asyncio-h2'))
self.conn.send_headers(stream_id, response_headers)
self.conn.send_data(stream_id, body, end_stream=True)
def return_400(self, stream_id: int, body: bytes=b''):
self.return_XXX(stream_id, 400, body)
def return_403(self, stream_id: int, body: bytes=b''):
self.return_XXX(stream_id, 403, body)
def return_404(self, stream_id: int):
self.return_XXX(stream_id, 404, body=b'Wrong path')
def return_405(self, stream_id: int):
self.return_XXX(stream_id, 405)
def return_415(self, stream_id: int):
self.return_XXX(stream_id, 415, body=b'Unsupported content type')
def return_501(self, stream_id: int):
self.return_XXX(stream_id, 501, body=b'Not Implemented')
def receive_data(self, data: bytes, stream_id: int):
try:
stream_data = self.stream_data[stream_id]
except KeyError:
clientip = utils.get_client_ip(self.transport)
self.logger.info('[HTTPS] %s Unknown stream %d', clientip, stream_id)
else:
stream_data.data.write(data) |
class WebSocketTestSession():
def __init__(self, app: ASGI3App, scope: Scope, portal_factory: _PortalFactoryType) -> None:
self.app = app
self.scope = scope
self.accepted_subprotocol = None
self.portal_factory = portal_factory
self._receive_queue: 'queue.Queue[Message]' = queue.Queue()
self._send_queue: 'queue.Queue[Message | BaseException]' = queue.Queue()
self.extra_headers = None
def __enter__(self) -> 'WebSocketTestSession':
self.exit_stack = contextlib.ExitStack()
self.portal = self.exit_stack.enter_context(self.portal_factory())
try:
_: 'Future[None]' = self.portal.start_task_soon(self._run)
self.send({'type': 'websocket.connect'})
message = self.receive()
self._raise_on_close(message)
except Exception:
self.exit_stack.close()
raise
self.accepted_subprotocol = message.get('subprotocol', None)
self.extra_headers = message.get('headers', None)
return self
def __exit__(self, *args: typing.Any) -> None:
try:
self.close(1000)
finally:
self.exit_stack.close()
while (not self._send_queue.empty()):
message = self._send_queue.get()
if isinstance(message, BaseException):
raise message
async def _run(self) -> None:
scope = self.scope
receive = self._asgi_receive
send = self._asgi_send
try:
(await self.app(scope, receive, send))
except BaseException as exc:
self._send_queue.put(exc)
raise
async def _asgi_receive(self) -> Message:
while self._receive_queue.empty():
(await anyio.sleep(0))
return self._receive_queue.get()
async def _asgi_send(self, message: Message) -> None:
self._send_queue.put(message)
def _raise_on_close(self, message: Message) -> None:
if (message['type'] == 'websocket.close'):
raise WebSocketDisconnect(message.get('code', 1000), message.get('reason', ''))
def send(self, message: Message) -> None:
self._receive_queue.put(message)
def send_text(self, data: str) -> None:
self.send({'type': 'websocket.receive', 'text': data})
def send_bytes(self, data: bytes) -> None:
self.send({'type': 'websocket.receive', 'bytes': data})
def send_json(self, data: typing.Any, mode: str='text') -> None:
assert (mode in ['text', 'binary'])
text = json.dumps(data, separators=(',', ':'), ensure_ascii=False)
if (mode == 'text'):
self.send({'type': 'websocket.receive', 'text': text})
else:
self.send({'type': 'websocket.receive', 'bytes': text.encode('utf-8')})
def close(self, code: int=1000, reason: typing.Union[(str, None)]=None) -> None:
self.send({'type': 'websocket.disconnect', 'code': code, 'reason': reason})
def receive(self) -> Message:
message = self._send_queue.get()
if isinstance(message, BaseException):
raise message
return message
def receive_text(self) -> str:
message = self.receive()
self._raise_on_close(message)
return typing.cast(str, message['text'])
def receive_bytes(self) -> bytes:
message = self.receive()
self._raise_on_close(message)
return typing.cast(bytes, message['bytes'])
def receive_json(self, mode: str='text') -> typing.Any:
assert (mode in ['text', 'binary'])
message = self.receive()
self._raise_on_close(message)
if (mode == 'text'):
text = message['text']
else:
text = message['bytes'].decode('utf-8')
return json.loads(text) |
def test_task_node_metadata():
task_id = identifier.Identifier(identifier.ResourceType.TASK, 'project', 'domain', 'name', 'version')
wf_exec_id = identifier.WorkflowExecutionIdentifier('project', 'domain', 'name')
node_exec_id = identifier.NodeExecutionIdentifier('node_id', wf_exec_id)
te_id = identifier.TaskExecutionIdentifier(task_id, node_exec_id, 3)
ds_id = identifier.Identifier(identifier.ResourceType.TASK, 'project', 'domain', 't1', 'abcdef')
tag = catalog.CatalogArtifactTag('my-artifact-id', 'some name')
catalog_metadata = catalog.CatalogMetadata(dataset_id=ds_id, artifact_tag=tag, source_task_execution=te_id)
obj = node_execution_models.TaskNodeMetadata(cache_status=0, catalog_key=catalog_metadata)
assert (obj.cache_status == 0)
assert (obj.catalog_key == catalog_metadata)
obj2 = node_execution_models.TaskNodeMetadata.from_flyte_idl(obj.to_flyte_idl())
assert (obj2 == obj) |
def rank_quadgrams(corpus, metric, path=None):
ngrams = QuadgramCollocationFinder.from_words(corpus.words())
scored = ngrams.score_ngrams(metric)
if path:
with open(path, 'w') as f:
f.write('Collocation\tScore ({})\n'.format(metric.__name__))
for (ngram, score) in scored:
f.write('{}\t{}\n'.format(repr(ngram), score))
else:
return scored |
def d2q_rd1(m0, m1, m2, o0, o1, o2, p0, p1, p2, n0, n1, n2):
x0 = (m0 - o0)
x1 = (- x0)
x2 = (n0 - p0)
x3 = (x2 ** 2)
x4 = (n1 - p1)
x5 = (x4 ** 2)
x6 = (n2 - p2)
x7 = (x6 ** 2)
x8 = ((x3 + x5) + x7)
x9 = (1 / math.sqrt(x8))
x10 = (x0 ** 2)
x11 = (m1 - o1)
x12 = (x11 ** 2)
x13 = (m2 - o2)
x14 = (x13 ** 2)
x15 = ((x10 + x12) + x14)
x16 = (x15 ** ((- 3) / 2))
x17 = (x16 * x9)
x18 = (x17 * x2)
x19 = (x1 * x18)
x20 = ((3 * m0) - (3 * o0))
x21 = (x9 / (x15 ** (5 / 2)))
x22 = ((- x20) * x21)
x23 = (x1 * x22)
x24 = (x0 * x2)
x25 = (x11 * x4)
x26 = (x13 * x6)
x27 = (x0 * x18)
x28 = (x17 * x4)
x29 = (x11 * x28)
x30 = (x17 * x6)
x31 = (x13 * x30)
x32 = (((- x27) - x29) - x31)
x33 = (- x11)
x34 = (x22 * x33)
x35 = (x18 * x33)
x36 = (x1 * x28)
x37 = (x35 + x36)
x38 = (- x13)
x39 = (x22 * x38)
x40 = (x18 * x38)
x41 = (x1 * x30)
x42 = (x40 + x41)
x43 = ((2 * m0) - (2 * o0))
x44 = (x10 * x2)
x45 = (x0 * x22)
x46 = (((- x19) + x29) + x31)
x47 = (x12 * x4)
x48 = (x22 * x24)
x49 = (x11 * x13)
x50 = (x22 * x49)
x51 = (x11 * x18)
x52 = ((- x36) + x51)
x53 = (x14 * x6)
x54 = (x13 * x18)
x55 = ((- x41) + x54)
x56 = (1 / math.sqrt(x15))
x57 = (x56 * x9)
x58 = (- x57)
x59 = (x8 ** ((- 3) / 2))
x60 = (x56 * x59)
x61 = (x3 * x60)
x62 = (x1 * x17)
x63 = (x0 * x62)
x64 = (x16 * x59)
x65 = (x3 * x64)
x66 = (x0 * x65)
x67 = (x1 * x64)
x68 = (x2 * x25)
x69 = (x26 * x67)
x70 = ((((((x1 * x66) + (x2 * x69)) + x58) + x61) - x63) + (x67 * x68))
x71 = (x2 * x60)
x72 = (x4 * x71)
x73 = (x11 * x62)
x74 = (x5 * x64)
x75 = (x11 * x74)
x76 = (x24 * x67)
x77 = (((((x1 * x75) + (x4 * x69)) + (x4 * x76)) + x72) - x73)
x78 = (x6 * x71)
x79 = (x13 * x62)
x80 = (x64 * x7)
x81 = (x13 * x80)
x82 = (x25 * x67)
x83 = (((((x1 * x81) + (x6 * x76)) + (x6 * x82)) + x78) - x79)
x84 = (- x2)
x85 = (x71 * x84)
x86 = (((((x57 + x63) + (x69 * x84)) + (x76 * x84)) + (x82 * x84)) + x85)
x87 = (- x4)
x88 = (x71 * x87)
x89 = (((((x69 * x87) + x73) + (x76 * x87)) + (x82 * x87)) + x88)
x90 = (- x6)
x91 = (x71 * x90)
x92 = (((((x69 * x90) + (x76 * x90)) + x79) + (x82 * x90)) + x91)
x93 = ((3 * m1) - (3 * o1))
x94 = ((- x21) * x93)
x95 = (x1 * x94)
x96 = (x28 * x33)
x97 = (x33 * x94)
x98 = (x38 * x94)
x99 = (x28 * x38)
x100 = (x30 * x33)
x101 = (x100 + x99)
x102 = (x0 * x94)
x103 = (x0 * x28)
x104 = (x103 - x35)
x105 = ((2 * m1) - (2 * o1))
x106 = (x24 * x94)
x107 = (x49 * x94)
x108 = ((x27 + x31) - x96)
x109 = (x13 * x28)
x110 = ((- x100) + x109)
x111 = (x17 * x33)
x112 = (x0 * x111)
x113 = (x33 * x64)
x114 = (x113 * x26)
x115 = (((((- x112) + (x113 * x68)) + (x114 * x2)) + (x33 * x66)) + x72)
x116 = (x5 * x60)
x117 = (x11 * x111)
x118 = (x113 * x24)
x119 = ((((((x114 * x4) + x116) - x117) + (x118 * x4)) + (x33 * x75)) + x58)
x120 = (x4 * x60)
x121 = (x120 * x6)
x122 = (x111 * x13)
x123 = (x113 * x25)
x124 = (((((x118 * x6) + x121) - x122) + (x123 * x6)) + (x33 * x81))
x125 = (x120 * x84)
x126 = ((((x112 + (x114 * x84)) + (x118 * x84)) + (x123 * x84)) + x125)
x127 = (x120 * x87)
x128 = ((((((x114 * x87) + x117) + (x118 * x87)) + (x123 * x87)) + x127) + x57)
x129 = (x120 * x90)
x130 = (((((x114 * x90) + (x118 * x90)) + x122) + (x123 * x90)) + x129)
x131 = ((3 * m2) - (3 * o2))
x132 = ((- x131) * x21)
x133 = (x1 * x132)
x134 = (x132 * x33)
x135 = (x30 * x38)
x136 = (x132 * x38)
x137 = (x0 * x132)
x138 = (x0 * x30)
x139 = (x138 - x40)
x140 = (x132 * x24)
x141 = (x132 * x49)
x142 = (x11 * x30)
x143 = (x142 - x99)
x144 = ((2 * m2) - (2 * o2))
x145 = (((- x135) + x27) + x29)
x146 = (x17 * x38)
x147 = (x0 * x146)
x148 = (x38 * x64)
x149 = (x148 * x26)
x150 = (((((- x147) + (x148 * x68)) + (x149 * x2)) + (x38 * x66)) + x78)
x151 = (x11 * x146)
x152 = (x148 * x24)
x153 = ((((x121 + (x149 * x4)) - x151) + (x152 * x4)) + (x38 * x75))
x154 = (x60 * x7)
x155 = (x13 * x146)
x156 = (x148 * x25)
x157 = ((((((x152 * x6) + x154) - x155) + (x156 * x6)) + (x38 * x81)) + x58)
x158 = (x6 * x60)
x159 = (x158 * x84)
x160 = ((((x147 + (x149 * x84)) + (x152 * x84)) + (x156 * x84)) + x159)
x161 = (x158 * x87)
x162 = (((((x149 * x87) + x151) + (x152 * x87)) + (x156 * x87)) + x161)
x163 = (x158 * x90)
x164 = ((((((x149 * x90) + (x152 * x90)) + x155) + (x156 * x90)) + x163) + x57)
x165 = (x20 * x21)
x166 = (x1 * x165)
x167 = (x165 * x33)
x168 = (x165 * x38)
x169 = (x0 * x165)
x170 = (x165 * x24)
x171 = (x165 * x49)
x172 = ((- x103) - x51)
x173 = ((- x138) - x54)
x174 = (x10 * x17)
x175 = (x24 * x64)
x176 = (x175 * x26)
x177 = ((x175 * x25) + x57)
x178 = (((((x10 * x65) - x174) + x176) + x177) - x61)
x179 = (x44 * x64)
x180 = (x0 * x64)
x181 = (x26 * x4)
x182 = (x0 * x17)
x183 = (x11 * x182)
x184 = ((- x183) - x72)
x185 = ((((x0 * x75) + (x179 * x4)) + (x180 * x181)) + x184)
x186 = (x180 * x25)
x187 = (x13 * x182)
x188 = ((- x187) - x78)
x189 = ((((x0 * x81) + (x179 * x6)) + (x186 * x6)) + x188)
x190 = (x180 * x26)
x191 = (((((x174 + (x179 * x84)) + (x186 * x84)) + (x190 * x84)) + x58) - x85)
x192 = (((((x179 * x87) + x183) + (x186 * x87)) + (x190 * x87)) - x88)
x193 = (((((x179 * x90) + (x186 * x90)) + x187) + (x190 * x90)) - x91)
x194 = (x21 * x93)
x195 = (x1 * x194)
x196 = (x194 * x33)
x197 = (x194 * x38)
x198 = (x0 * x194)
x199 = (x194 * x24)
x200 = (x194 * x49)
x201 = ((- x109) - x142)
x202 = (x47 * x64)
x203 = (x49 * x6)
x204 = (x203 * x64)
x205 = ((((x11 * x66) + x184) + (x2 * x202)) + (x2 * x204))
x206 = (x12 * x17)
x207 = (x204 * x4)
x208 = (((((- x116) + (x12 * x74)) + x177) - x206) + x207)
x209 = (x11 * x175)
x210 = (x17 * x49)
x211 = ((- x121) - x210)
x212 = ((((x202 * x6) + (x209 * x6)) + x211) + (x49 * x80))
x213 = (((((- x125) + x183) + (x202 * x84)) + (x204 * x84)) + (x209 * x84))
x214 = ((((((- x127) + (x202 * x87)) + (x204 * x87)) + x206) + (x209 * x87)) + x58)
x215 = (((((- x129) + (x202 * x90)) + (x204 * x90)) + (x209 * x90)) + x210)
x216 = (x131 * x21)
x217 = (x1 * x216)
x218 = (x216 * x33)
x219 = (x216 * x38)
x220 = (x0 * x216)
x221 = (x216 * x24)
x222 = (x4 * x49)
x223 = (x53 * x64)
x224 = (x222 * x64)
x225 = ((((x13 * x66) + x188) + (x2 * x223)) + (x2 * x224))
x226 = (x13 * x175)
x227 = (((x211 + (x223 * x4)) + (x226 * x4)) + (x49 * x74))
x228 = (x14 * x17)
x229 = ((((((x14 * x80) - x154) + x176) + x207) - x228) + x57)
x230 = (((((- x159) + x187) + (x223 * x84)) + (x224 * x84)) + (x226 * x84))
x231 = (((((- x161) + x210) + (x223 * x87)) + (x224 * x87)) + (x226 * x87))
x232 = ((((((- x163) + (x223 * x90)) + (x224 * x90)) + (x226 * x90)) + x228) + x58)
x233 = ((2 * n0) - (2 * p0))
x234 = (x0 * x60)
x235 = ((3 * n0) - (3 * p0))
x236 = (x56 / (x8 ** (5 / 2)))
x237 = (x235 * x236)
x238 = (x0 * x3)
x239 = (x237 * x26)
x240 = (x0 * x71)
x241 = (x11 * x120)
x242 = (x13 * x158)
x243 = (((- x240) - x241) - x242)
x244 = (x11 * x5)
x245 = (x237 * x24)
x246 = (x0 * x120)
x247 = (x11 * x71)
x248 = ((- x246) - x247)
x249 = (x13 * x7)
x250 = (x237 * x25)
x251 = (x0 * x158)
x252 = (x13 * x71)
x253 = ((- x251) - x252)
x254 = (x234 * x84)
x255 = ((x241 + x242) - x254)
x256 = (x234 * x87)
x257 = (x247 - x256)
x258 = (x234 * x90)
x259 = (x252 - x258)
x260 = ((3 * n1) - (3 * p1))
x261 = (x236 * x260)
x262 = (x26 * x261)
x263 = ((2 * n1) - (2 * p1))
x264 = (x11 * x60)
x265 = (x24 * x261)
x266 = (x25 * x261)
x267 = (x11 * x158)
x268 = (x120 * x13)
x269 = ((- x267) - x268)
x270 = (x264 * x84)
x271 = (x246 - x270)
x272 = (x264 * x87)
x273 = ((x240 + x242) - x272)
x274 = (x264 * x90)
x275 = (x268 - x274)
x276 = ((3 * n2) - (3 * p2))
x277 = (x236 * x276)
x278 = (x26 * x277)
x279 = (x24 * x277)
x280 = ((2 * n2) - (2 * p2))
x281 = (x13 * x60)
x282 = (x25 * x277)
x283 = (x281 * x84)
x284 = (x251 - x283)
x285 = (x281 * x87)
x286 = (x267 - x285)
x287 = (x281 * x90)
x288 = ((x240 + x241) - x287)
x289 = ((- x235) * x236)
x290 = (x26 * x289)
x291 = (x24 * x289)
x292 = (x25 * x289)
x293 = (x256 + x270)
x294 = (x258 + x283)
x295 = ((- x236) * x260)
x296 = (x26 * x295)
x297 = (x24 * x295)
x298 = (x25 * x295)
x299 = (x274 + x285)
x300 = ((- x236) * x276)
x301 = (x26 * x300)
x302 = (x24 * x300)
x303 = (x25 * x300)
return np.array([(((((2 * x19) + (x23 * x24)) + (x23 * x25)) + (x23 * x26)) + x32), ((((x24 * x34) + (x25 * x34)) + (x26 * x34)) + x37), ((((x24 * x39) + (x25 * x39)) + (x26 * x39)) + x42), (((((x18 * x43) + (x22 * x44)) + (x25 * x45)) + (x26 * x45)) + x46), ((((x11 * x48) + (x22 * x47)) + (x50 * x6)) + x52), ((((x13 * x48) + (x22 * x53)) + (x4 * x50)) + x55), x70, x77, x83, x86, x89, x92, ((((x24 * x95) + (x25 * x95)) + (x26 * x95)) + x37), (((((x24 * x97) + (x25 * x97)) + (x26 * x97)) + x32) + (2 * x96)), (((x101 + (x24 * x98)) + (x25 * x98)) + (x26 * x98)), ((((x102 * x25) + (x102 * x26)) + x104) + (x44 * x94)), (((((x105 * x28) + (x106 * x11)) + (x107 * x6)) + x108) + (x47 * x94)), ((((x106 * x13) + (x107 * x4)) + x110) + (x53 * x94)), x115, x119, x124, x126, x128, x130, ((((x133 * x24) + (x133 * x25)) + (x133 * x26)) + x42), (((x101 + (x134 * x24)) + (x134 * x25)) + (x134 * x26)), (((((2 * x135) + (x136 * x24)) + (x136 * x25)) + (x136 * x26)) + x32), ((((x132 * x44) + (x137 * x25)) + (x137 * x26)) + x139), ((((x11 * x140) + (x132 * x47)) + (x141 * x6)) + x143), (((((x13 * x140) + (x132 * x53)) + (x141 * x4)) + (x144 * x30)) + x145), x150, x153, x157, x160, x162, x164, (((((x166 * x24) + (x166 * x25)) + (x166 * x26)) + (2 * x27)) + x46), (((x104 + (x167 * x24)) + (x167 * x25)) + (x167 * x26)), (((x139 + (x168 * x24)) + (x168 * x25)) + (x168 * x26)), (((((x165 * x44) + (x169 * x25)) + (x169 * x26)) - (x18 * x43)) + x32), ((((x11 * x170) + (x165 * x47)) + (x171 * x6)) + x172), ((((x13 * x170) + (x165 * x53)) + (x171 * x4)) + x173), x178, x185, x189, x191, x192, x193, ((((x195 * x24) + (x195 * x25)) + (x195 * x26)) + x52), ((((x108 + (x196 * x24)) + (x196 * x25)) + (x196 * x26)) + (2 * x29)), (((x143 + (x197 * x24)) + (x197 * x25)) + (x197 * x26)), (((x172 + (x194 * x44)) + (x198 * x25)) + (x198 * x26)), ((((((- x105) * x28) + (x11 * x199)) + (x194 * x47)) + (x200 * x6)) + x32), ((((x13 * x199) + (x194 * x53)) + (x200 * x4)) + x201), x205, x208, x212, x213, x214, x215, ((((x217 * x24) + (x217 * x25)) + (x217 * x26)) + x55), (((x110 + (x218 * x24)) + (x218 * x25)) + (x218 * x26)), ((((x145 + (x219 * x24)) + (x219 * x25)) + (x219 * x26)) + (2 * x31)), (((x173 + (x216 * x44)) + (x220 * x25)) + (x220 * x26)), ((((x11 * x221) + x201) + (x203 * x216)) + (x216 * x47)), (((((x13 * x221) - (x144 * x30)) + (x216 * x222)) + (x216 * x53)) + x32), x225, x227, x229, x230, x231, x232, x70, x115, x150, x178, x205, x225, (((((x2 * x239) - (x233 * x234)) + (x237 * x238)) + (x237 * x68)) + x243), ((((x181 * x237) + (x237 * x244)) + (x245 * x4)) + x248), ((((x237 * x249) + (x245 * x6)) + (x250 * x6)) + x253), (((((x239 * x84) + (2 * x240)) + (x245 * x84)) + (x250 * x84)) + x255), ((((x239 * x87) + (x245 * x87)) + (x250 * x87)) + x257), ((((x239 * x90) + (x245 * x90)) + (x250 * x90)) + x259), x77, x119, x153, x185, x208, x227, ((((x2 * x262) + (x238 * x261)) + x248) + (x261 * x68)), (((((x181 * x261) + x243) + (x244 * x261)) - (x263 * x264)) + (x265 * x4)), ((((x249 * x261) + (x265 * x6)) + (x266 * x6)) + x269), ((((x262 * x84) + (x265 * x84)) + (x266 * x84)) + x271), (((((2 * x241) + (x262 * x87)) + (x265 * x87)) + (x266 * x87)) + x273), ((((x262 * x90) + (x265 * x90)) + (x266 * x90)) + x275), x83, x124, x157, x189, x212, x229, ((((x2 * x278) + (x238 * x277)) + x253) + (x277 * x68)), ((((x181 * x277) + (x244 * x277)) + x269) + (x279 * x4)), ((((x243 + (x249 * x277)) + (x279 * x6)) - (x280 * x281)) + (x282 * x6)), ((((x278 * x84) + (x279 * x84)) + (x282 * x84)) + x284), ((((x278 * x87) + (x279 * x87)) + (x282 * x87)) + x286), (((((2 * x242) + (x278 * x90)) + (x279 * x90)) + (x282 * x90)) + x288), x86, x126, x160, x191, x213, x230, (((((x2 * x290) + (x233 * x234)) + (x238 * x289)) + x255) + (x289 * x68)), ((((x181 * x289) + (x244 * x289)) + x271) + (x291 * x4)), ((((x249 * x289) + x284) + (x291 * x6)) + (x292 * x6)), ((((x243 + (2 * x254)) + (x290 * x84)) + (x291 * x84)) + (x292 * x84)), ((((x290 * x87) + (x291 * x87)) + (x292 * x87)) + x293), ((((x290 * x90) + (x291 * x90)) + (x292 * x90)) + x294), x89, x128, x162, x192, x214, x231, ((((x2 * x296) + (x238 * x295)) + x257) + (x295 * x68)), (((((x181 * x295) + (x244 * x295)) + (x263 * x264)) + x273) + (x297 * x4)), ((((x249 * x295) + x286) + (x297 * x6)) + (x298 * x6)), (((x293 + (x296 * x84)) + (x297 * x84)) + (x298 * x84)), ((((x243 + (2 * x272)) + (x296 * x87)) + (x297 * x87)) + (x298 * x87)), ((((x296 * x90) + (x297 * x90)) + (x298 * x90)) + x299), x92, x130, x164, x193, x215, x232, ((((x2 * x301) + (x238 * x300)) + x259) + (x300 * x68)), ((((x181 * x300) + (x244 * x300)) + x275) + (x302 * x4)), (((((x249 * x300) + (x280 * x281)) + x288) + (x302 * x6)) + (x303 * x6)), (((x294 + (x301 * x84)) + (x302 * x84)) + (x303 * x84)), (((x299 + (x301 * x87)) + (x302 * x87)) + (x303 * x87)), ((((x243 + (2 * x287)) + (x301 * x90)) + (x302 * x90)) + (x303 * x90))], dtype=np.float64) |
class RSProxyDataManager(object):
def __init__(self):
self.data = []
def load(self, path):
import json
with open(path, 'r') as f:
data = json.load(f)
for i in range(len(data['instance_file'])):
data_obj = RSProxyDataObject()
self.data.append(data_obj)
data_obj.pos = data['pos'][i]
data_obj.rot = data['rot'][i]
data_obj.sca = data['sca'][i]
data_obj.parent_name = data['parent_name'][i]
data_obj.instance_file = data['instance_file'][i]
data_obj.node_name = data['node_name'][i]
def create(self):
for d in self.data:
d.create() |
class BaseTransaction(LegacyTransactionFieldsAPI, BaseTransactionFields, SignedTransactionMethods, TransactionBuilderAPI):
fields = BASE_TRANSACTION_FIELDS
def decode(cls, encoded: bytes) -> SignedTransactionAPI:
return rlp.decode(encoded, sedes=cls)
def encode(self) -> bytes:
return rlp.encode(self) |
def test_normalize_smallest_h5(capsys):
outfile_one = NamedTemporaryFile(suffix='.h5', delete=False)
outfile_one.close()
outfile_two = NamedTemporaryFile(suffix='.h5', delete=False)
outfile_two.close()
args = '--matrices {} {} --normalize smallest -o {} {}'.format(matrix_one_h5, matrix_two_h5, outfile_one.name, outfile_two.name).split()
compute(hicNormalize.main, args, 5)
test_one = hm.hiCMatrix((ROOT + '/smallest_one.h5'))
test_two = hm.hiCMatrix((ROOT + '/smallest_two.h5'))
new_one = hm.hiCMatrix(outfile_one.name)
new_two = hm.hiCMatrix(outfile_two.name)
nt.assert_equal(test_one.matrix.data, new_one.matrix.data)
nt.assert_equal(test_one.cut_intervals, new_one.cut_intervals)
nt.assert_equal(test_two.matrix.data, new_two.matrix.data)
nt.assert_equal(test_two.cut_intervals, new_two.cut_intervals)
os.unlink(outfile_one.name)
os.unlink(outfile_two.name) |
def train_model():
(model, weights_file, start_iter, checkpoints, output_dir) = create_model()
if ('final' in checkpoints):
return checkpoints
setup_model_for_training(model, weights_file, output_dir)
training_stats = TrainingStats(model)
CHECKPOINT_PERIOD = int((cfg.TRAIN.SNAPSHOT_ITERS / cfg.NUM_GPUS))
for cur_iter in range(start_iter, cfg.SOLVER.MAX_ITER):
if model.roi_data_loader.has_stopped():
handle_critical_error(model, 'roi_data_loader failed')
training_stats.IterTic()
lr = model.UpdateWorkspaceLr(cur_iter, lr_policy.get_lr_at_iter(cur_iter))
workspace.RunNet(model.net.Proto().name)
if (cur_iter == start_iter):
nu.print_net(model)
training_stats.IterToc()
training_stats.UpdateIterStats()
training_stats.LogIterStats(cur_iter, lr)
if ((((cur_iter + 1) % CHECKPOINT_PERIOD) == 0) and (cur_iter > start_iter)):
checkpoints[cur_iter] = os.path.join(output_dir, 'model_iter{}.pkl'.format(cur_iter))
nu.save_model_to_weights_file(checkpoints[cur_iter], model)
save_trainlog(training_stats.log, output_dir)
if (cur_iter == (start_iter + training_stats.LOG_PERIOD)):
training_stats.ResetIterTimer()
if np.isnan(training_stats.iter_total_loss):
for i in training_stats.losses_and_metrics:
print(i, training_stats.losses_and_metrics[i])
handle_critical_error(model, 'Loss is NaN')
checkpoints['final'] = os.path.join(output_dir, 'model_final.pkl')
nu.save_model_to_weights_file(checkpoints['final'], model)
model.roi_data_loader.shutdown()
save_trainlog(training_stats.log, output_dir)
return checkpoints |
class BodyLevel(BodyElement):
VALIDATE_ONLY_BOOLEAN_OR_STR = False
def __init__(self):
self.bodyElements = []
def __repr__(self):
return ('%s( bodyElements = %s )' % (self.__class__.__name__, repr(self.bodyElements)))
def appendBodyElement(self, bodyElement):
self.bodyElements.append(bodyElement)
def appendBodyElements(self, bodyElements):
self.bodyElements += bodyElements
def __len__(self):
return len(self.bodyElements)
def getBodyElements(self):
return self.bodyElements
def __iter__(self):
for bodyElement in self.bodyElements:
(yield bodyElement)
raise StopIteration()
def evaluateLevelForTag(self, currentTag):
return self.evaluateLevelForTags([currentTag])[0]
def evaluateLevelForTags(self, currentTags):
thisLevelElements = self.bodyElements
resultPerTag = []
if (len(thisLevelElements) == 0):
return resultPerTag
ORDERED_BE_TYPES_TO_PROCESS_TAGS = [(BodyLevel, (lambda _bl, _curTag: _bl.evaluateLevelForTag(_curTag))), (BodyElementValueGenerator, (lambda _bevg, _curTag: _bevg.resolveValueFromTag(_curTag)))]
ORDERED_BE_TYPES_TO_PROCESS_VALUES = [(BodyElementOperation, (lambda _beo, _leftSide, _rightSide: _beo.performOperation(_leftSide, _rightSide))), (BodyElementComparison, (lambda _bec, _leftSide, _rightSide: _bec.doComparison(_leftSide, _rightSide))), (BodyElementBooleanOps, (lambda _bebo, _leftSide, _rightSide: _bebo.doBooleanOp(_leftSide, _rightSide)))]
for thisTag in currentTags:
curElements = thisLevelElements
for (typeToProcess, processFunction) in ORDERED_BE_TYPES_TO_PROCESS_TAGS:
curElements = [((issubclass(curElement.__class__, typeToProcess) and processFunction(curElement, thisTag)) or curElement) for curElement in curElements]
for (typeToProcess, processFunction) in ORDERED_BE_TYPES_TO_PROCESS_VALUES:
nextElements = []
leftSide = None
numElements = len(curElements)
i = 0
while (i < numElements):
curElement = curElements[i]
curElementClass = curElement.__class__
if (not issubclass(curElementClass, typeToProcess)):
nextElements.append(curElement)
leftSide = curElement
i += 1
continue
else:
if ((i + 1) >= numElements):
raise XPathParseError('XPath expression ends in an operation, no right-side to operation.')
if (not issubclass(leftSide.__class__, BodyElementValue)):
raise XPathParseError('XPath expression contains two consecutive operations (left side)')
rightSide = curElements[(i + 1)]
if (not issubclass(rightSide.__class__, BodyElementValue)):
raise XPathParseError('XPath expression contains two consecutive operations (right side)')
resolvedValue = processFunction(curElement, leftSide, rightSide)
if (not issubclass(resolvedValue.__class__, BodyElementValue)):
raise XPathRuntimeError(('XPath expression for op "%s" did not return a BodyElementValue, as expected. Got: <%s> %s' % (repr(curElement), resolvedValue.__class__.__name__, repr(resolvedValue))))
nextElements = (nextElements[:(- 1)] + [resolvedValue])
leftSide = resolvedValue
i += 2
curElements = nextElements
numElementsRemaining = len(curElements)
if (numElementsRemaining != 1):
raise XPathRuntimeError(('Got unexpected current number of elements at the end. Expected 1, got %d. Repr: %s' % (numElementsRemaining, repr(curElements))))
finalElement = curElements[0]
finalElementClass = finalElement.__class__
try:
finalElementValueType = finalElement.VALUE_TYPE
except AttributeError:
raise XPathRuntimeError(('Final Value resolved from level """%s""" was not a BodyElementValue, as was expected.\nIt is a: %s \nrepr: %s' % (repr(self), finalElementClass.__name__, repr(finalElement))))
if (self.VALIDATE_ONLY_BOOLEAN_OR_STR and (finalElementValueType not in (BODY_VALUE_TYPE_BOOLEAN, BODY_VALUE_TYPE_NUMBER))):
raise XPathRuntimeError(('Final value resolved from level """%s""" was not an integer or a boolean, cannot proceed.\nVALUE_TYPE is %s.\nClass: %s\nRepr: %s' % (repr(self), _bodyValueTypeToDebugStr(finalElementValueType), finalElementClass.__name__, repr(finalElement))))
resultPerTag.append(finalElement)
return resultPerTag |
.django_db
def test_really_old_transaction(client, agency_data):
TransactionNormalized.objects.update(fiscal_year=(fy(settings.API_SEARCH_MIN_DATE) - 1))
resp = client.get(URL.format(code='001', filter=''))
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.data['toptier_code'] == '001')
assert (resp.data['subtier_agency_count'] == 0) |
class FBHeapFromCommand(fb.FBCommand):
def name(self):
return 'heapfrom'
def description(self):
return 'Show all nested heap pointers contained within a given variable.'
def run(self, arguments, options):
var = self.context.frame.var(arguments[0])
if ((not var) or (not var.IsValid())):
self.result.SetError('No variable named "{}"'.format(arguments[0]))
return
root = var.GetNonSyntheticValue()
leafs = []
queue = [root]
while queue:
node = queue.pop(0)
if (node.num_children == 0):
leafs.append(node)
else:
queue += [node.GetChildAtIndex(i) for i in range(node.num_children)]
pointers = {}
for node in leafs:
if (node.addr and (not node.value)):
pointers[node.load_addr] = node.path
options = lldb.SBExpressionOptions()
options.SetLanguage(lldb.eLanguageTypeC)
def isHeap(addr):
lookup = '(int)malloc_size({})'.format(addr)
return (self.context.frame.EvaluateExpression(lookup, options).unsigned != 0)
allocations = (addr for addr in pointers if isHeap(addr))
for addr in allocations:
print('0x{addr:x} {path}'.format(addr=addr, path=pointers[addr]), file=self.result)
if (not allocations):
print('No heap addresses found', file=self.result) |
class ChannelWebSocket(Channel):
def __init__(self, ws: WebSocket):
self._ws: WebSocket = ws
def close(self):
self._ws.close()
def send_message(self, message: Any):
if self._ws.closed:
raise ChannelError('Unable to send data to the remote host (not connected)')
try:
self._ws.send(json.dumps(message))
except:
raise ChannelError('Unable to send data to the remote host')
def recv_message(self, timeout_epoch: Optional[float]=None) -> Any:
def timeout_handler(signum, frame):
raise MessageTimeout('Timed out')
if self._ws.closed:
raise ChannelError('Unable to receive data from the remote host (not connected)')
if timeout_epoch:
signal.signal(signal.SIGALRM, timeout_handler)
signal.alarm(int(round((timeout_epoch - time.time()))))
try:
message = self._ws.receive()
finally:
if timeout_epoch:
signal.alarm(0)
if (not message):
raise ChannelError('Unable to receive data from the remote host (message was empty)')
try:
return json.loads(message)
except ValueError:
raise MessageFormatError(desc='Unable to decode the JSON message') |
def test_source_code_renderer():
renderer = SourceCodeRenderer()
source_code = "def hello_world():\n print('Hello, world!')"
result = renderer.to_html(source_code)
assert ('hello_world' in result)
assert ('Hello, world!' in result)
assert ('#ffffff' in result)
assert ('#fff0f0' not in result) |
def create_log_config_set_mask(equip_id, last_item, *bits):
diag_log_config_mask_header = struct.pack('<LLLL', DIAG_LOG_CONFIG_F, LOG_CONFIG_SET_MASK_OP, equip_id, last_item)
diag_log_config_mask_payload = bytearray((b'\x00' * bytes_reqd_for_bit(last_item)))
for bit in bits:
if (bit > last_item):
continue
pos_byte = int((bit / 8))
pos_bit = (bit % 8)
diag_log_config_mask_payload[pos_byte] |= (1 << pos_bit)
return (diag_log_config_mask_header + bytes(diag_log_config_mask_payload)) |
.parametrize('subcommand', SUB_COMMANDS)
def test_subcommand_with_no_nodes(subcommand, kubeflow_pipelines_runtime_instance):
if (subcommand == 'describe'):
return
runner = CliRunner()
with runner.isolated_filesystem():
pipeline_file = 'pipeline_with_zero_nodes.pipeline'
pipeline_file_path = (((Path(__file__).parent / 'resources') / 'pipelines') / pipeline_file)
assert pipeline_file_path.is_file()
invoke_parameters = [subcommand, str(pipeline_file_path)]
if (subcommand in ['submit', 'export']):
invoke_parameters.extend(['--runtime-config', kubeflow_pipelines_runtime_instance])
result = runner.invoke(pipeline, invoke_parameters)
assert (result.exit_code != 0) |
def _vm_backup_cb_failed(result, task_id, bkp, action, vm=None):
if (action == 'POST'):
bkp.delete()
bkp.update_zpool_resources()
elif (action == 'PUT'):
bkp.status = bkp.OK
bkp.save_status()
vm.revert_notready()
elif (action == 'DELETE'):
bkp.status = bkp.OK
bkp.save_status() |
def extractArcanedreamOrg(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
(scope='function')
def segment_erasure_data(segment_connection_config, segment_erasure_identity_email) -> str:
segment_secrets = segment_connection_config.secrets
if (not segment_identity_email):
return
api_domain = segment_secrets['api_domain']
user_token = segment_secrets['user_token']
faker = Faker()
timestamp = int(time.time())
email = segment_erasure_identity_email
first_name = faker.first_name()
last_name = faker.last_name()
headers = {'Content-Type': 'application/json', 'Authorization': f'Basic {user_token}'}
body = {'userId': email, 'traits': {'subscriptionStatus': 'active', 'address': {'city': faker.city(), 'country': faker.country(), 'postalCode': faker.postcode(), 'state': 'NY'}, 'age': random.randrange(18, 99), 'avatar': '', 'industry': 'data', 'description': faker.job(), 'email': email, 'firstName': first_name, 'id': timestamp, 'lastName': last_name, 'name': f'{first_name} {last_name}', 'phone': faker.phone_number(), 'title': faker.prefix(), 'username': f'test_fidesops_user_{timestamp}', 'website': 'www.example.com'}}
response = requests.post(f' headers=headers, json=body)
assert response.ok
error_message = 'The user endpoint did not return the required data for testing during the time limit'
segment_id = poll_for_existence(_get_user_id, (email, segment_secrets), error_message=error_message, interval=10)
body = {'userId': email, 'type': 'track', 'event': 'User Registered', 'properties': {'plan': 'Free', 'accountType': faker.company()}, 'context': {'ip': faker.ipv4()}}
response = requests.post(f' headers=headers, json=body)
assert response.ok
error_message = 'The track_events endpoint did not return the required data for testing during the time limit'
poll_for_existence(_get_track_events, (segment_id, segment_secrets), error_message=error_message) |
class TestStripQuery(unittest.TestCase):
def test_strip_query(self):
url = '
expected = '
self.assertEqual(strip_query(url), expected)
def test_preserve_nice_query(self):
url = '
expected = url
self.assertEqual(strip_query(url), expected)
def test_preserve_auth(self):
url = '
expected = url
self.assertEqual(strip_query(url), expected) |
def test_dispatch_to_response_pure_notification_invalid_params_auto() -> None:
def foo(colour: str, size: str) -> Result:
return Success()
assert (dispatch_to_response_pure(deserializer=default_deserializer, validator=default_validator, post_process=identity, context=NOCONTEXT, methods={'foo': foo}, request='{"jsonrpc": "2.0", "method": "foo", "params": {"colour":"blue"}}') is None) |
class TestIndia(unittest.TestCase):
def test_read_datetime_from_span_id(self):
html_span = '<p><span id="lbldate">9/4/2017 5:17:00 PM</span></p>'
html = BeautifulSoup(html_span, 'html.parser')
india_date_time = IN.read_datetime_from_span_id(html, 'lbldate', 'D/M/YYYY h:mm:ss A')
self.assertIsNotNone(india_date_time)
self.assertEqual(india_date_time.isoformat(), '2017-04-09T17:17:00+05:30')
html_span = '<p><span id="lblPowerStatusDate">04-09-2017 17:13</span></p>'
html = BeautifulSoup(html_span, 'html.parser')
india_date_time = IN.read_datetime_from_span_id(html, 'lblPowerStatusDate', 'DD-MM-YYYY HH:mm')
self.assertIsNotNone(india_date_time)
self.assertEqual(india_date_time.isoformat(), '2017-09-04T17:13:00+05:30')
def test_read_text_from_span_id(self):
html_span = '<span id="lblcgs" style="font-weight:bold;">2998</span>'
html = BeautifulSoup(html_span, 'html.parser')
cgs_value = IN.read_text_from_span_id(html, 'lblcgs')
self.assertIsNotNone(cgs_value)
self.assertEqual(cgs_value, '2998')
def test_read_value_from_span_id(self):
html_span = '<span id="lblcgs" style="font-weight:bold;">2998</span>'
html = BeautifulSoup(html_span, 'html.parser')
cgs_value = IN.read_value_from_span_id(html, 'lblcgs')
self.assertIsNotNone(cgs_value)
self.assertEqual(cgs_value, 2998.0)
def test_read_india_datetime_with_only_time(self):
mock_now = get('2017-11-01T19:05:01+00:00')
time_string = '01:05:01'
time_format = 'HH:mm:ss'
india_date_time = IN.read_datetime_with_only_time(time_string, time_format, mock_now)
self.assertIsNotNone(india_date_time)
self.assertEqual(india_date_time.isoformat(), '2017-11-02T01:05:01+05:30')
mock_now = get('2017-11-02T01:05:01+00:00')
time_string = '06:35:01'
time_format = 'HH:mm:ss'
india_date_time = IN.read_datetime_with_only_time(time_string, time_format, mock_now)
self.assertIsNotNone(india_date_time)
self.assertEqual(india_date_time.isoformat(), '2017-11-02T06:35:01+05:30') |
class TestCreateDataStreamParamSource():
def test_create_data_stream(self):
source = params.CreateDataStreamParamSource(track.Track(name='unit-test'), params={'data-stream': 'test-data-stream'})
assert (source.params() == {'data-stream': 'test-data-stream', 'data-streams': ['test-data-stream'], 'request-params': {}})
def test_create_data_stream_inline_without_body(self):
source = params.CreateDataStreamParamSource(track.Track(name='unit-test'), params={'data-stream': 'test-data-stream', 'request-params': {'wait_for_active_shards': True}})
assert (source.params() == {'data-stream': 'test-data-stream', 'data-streams': ['test-data-stream'], 'request-params': {'wait_for_active_shards': True}})
def test_filter_data_stream(self):
source = params.CreateDataStreamParamSource(track.Track(name='unit-test', data_streams=[track.DataStream(name='data-stream-1'), track.DataStream(name='data-stream-2'), track.DataStream(name='data-stream-3')]), params={'data-stream': 'data-stream-2'})
assert (source.params() == {'data-stream': 'data-stream-2', 'data-streams': ['data-stream-2'], 'request-params': {}}) |
class MeterStats(base_tests.SimpleProtocol):
def runTest(self):
request = ofp.message.meter_stats_request(meter_id=ofp.OFPM_ALL)
logging.info('Sending meter stats request')
stats = get_stats(self, request)
logging.info('Received %d meter stats entries', len(stats))
for entry in stats:
logging.info(entry.show()) |
_view(('GET',))
_data(permissions=(IsAdminOrReadOnly,))
_required('VMS_VM_SNAPSHOT_ENABLED')
def vm_define_snapshot_list_all(request, data=None):
extra = output_extended_snap_count(request, data)
snap_define = SnapshotDefine.objects.select_related('vm', 'periodic_task', 'periodic_task__crontab').filter(vm__in=get_vms(request)).order_by(*SnapshotDefineView.get_order_by(data))
if extra:
snap_define = snap_define.extra(extra)
return SnapshotDefineView(request, data=data).get(None, snap_define, many=True, extended=bool(extra)) |
class BenchThread(threading.Thread):
def __init__(self, event, wait_event):
threading.Thread.__init__(self)
self.counter = 0
self.event = event
self.wait_event = wait_event
def run(self):
while (self.counter <= CONTEXT_SWITCHES):
self.wait_event.wait()
self.wait_event.clear()
self.counter += 1
self.event.set() |
class TestAny(unittest.TestCase):
def test_default_default(self):
class A(HasTraits):
foo = Any()
a = A()
self.assertEqual(a.foo, None)
def test_list_default(self):
message_pattern = "a default value of type 'list'.* will be shared"
with self.assertWarnsRegex(DeprecationWarning, message_pattern):
class A(HasTraits):
foo = Any([])
a = A()
b = A()
self.assertEqual(a.foo, [])
self.assertEqual(b.foo, [])
a.foo.append(35)
self.assertEqual(a.foo, [35])
self.assertEqual(b.foo, [])
def test_dict_default(self):
message_pattern = "a default value of type 'dict'.* will be shared"
with self.assertWarnsRegex(DeprecationWarning, message_pattern):
class A(HasTraits):
foo = Any({})
a = A()
b = A()
self.assertEqual(a.foo, {})
self.assertEqual(b.foo, {})
a.foo['color'] = 'red'
self.assertEqual(a.foo, {'color': 'red'})
self.assertEqual(b.foo, {})
def test_with_factory(self):
class A(HasTraits):
foo = Any(factory=dict)
a = A()
b = A()
self.assertEqual(a.foo, {})
self.assertEqual(b.foo, {})
a.foo['key'] = 23
self.assertEqual(a.foo, {'key': 23})
self.assertEqual(b.foo, {})
a.foo = b.foo = {'red': }
a.foo['green'] = 65280
self.assertEqual(b.foo['green'], 65280)
def test_with_factory_and_args(self):
def factory(*args, **kw):
return ('received', args, kw)
args = (21, 34, 'some string')
kw = {'bar': 57}
class A(HasTraits):
foo = Any(factory=factory, args=args, kw=kw)
a = A()
self.assertEqual(a.foo, ('received', args, kw))
def test_with_default_value_and_factory(self):
with self.assertRaises(TypeError):
Any(23, factory=int) |
def _test_correct_response_for_time_period(client):
resp = client.post('/api/v2/search/spending_by_award', content_type='application/json', data=json.dumps({'filters': {'award_type_codes': ['A'], 'time_period': [{'start_date': '2014-01-01', 'end_date': '2008-12-31'}]}, 'fields': ['Award ID'], 'page': 1, 'limit': 60, 'sort': 'Award ID', 'order': 'desc', 'subawards': False}))
expected_result = [{'internal_id': 1, 'Award ID': 'abc111', 'generated_internal_id': 'CONT_AWD_TESTING_1'}]
assert (resp.status_code == status.HTTP_200_OK)
assert (len(resp.json().get('results')) == 1)
assert (resp.json().get('results') == expected_result), 'Time Period filter does not match expected result' |
class ConsumptionTestCase(unittest.TestCase):
def test_positive_consumption(self):
self.assertFalse(validate_consumption(c1, 'FR'), msg='Positive consumption is fine!')
def test_negative_consumption(self):
with self.assertRaises(ValueError, msg='Negative consumption is not allowed!'):
validate_consumption(c2, 'FR')
def test_None_consumption(self):
self.assertFalse(validate_consumption(c3, 'FR'), msg='Consumption can be undefined!') |
.parametrize('types, expected', (({'Person': [{'name': 'name', 'type': 'string'}]}, 'Person'), ({'Person': [{'name': 'name', 'type': 'string'}], 'Mail': [{'name': 'from', 'type': 'Person'}]}, 'Mail'), ({'Person': [{'name': 'name', 'type': 'string'}, {'name': 'friend', 'type': 'Person'}]}, 'Person'), ({'Person': [{'name': 'name', 'type': 'string'}, {'name': 'friends', 'type': 'Person[]'}], 'Mail': [{'name': 'from', 'type': 'Person'}, {'name': 'attachments', 'type': 'Attachment[]'}], 'Attachment': [{'name': 'from', 'type': 'string'}]}, 'Mail')), ids=['primary_type with no dependencies', 'primary_type with one dependency', 'primary_type with recursive dependency', 'primary_type with array dependency'])
def test_get_primary_type_pass(types, expected):
assert (get_primary_type(types) == expected) |
class Crypt(Validator):
def __init__(self, key=None, algorithm='pbkdf2(1000,20,sha512)', salt=True, message=None):
super().__init__(message=message)
self.key = key
self.digest_alg = algorithm
self.salt = salt
def __call__(self, value):
if getattr(value, '_emt_field_hashed_contents_', False):
return (value, None)
crypt = LazyCrypt(self, value)
if (isinstance(value, LazyCrypt) and (value == crypt)):
return (value, None)
return (crypt, None) |
class BaseIntegrityValueTest(ConditionFromReferenceMixin[DatasetSummary], ABC):
group: ClassVar = DATA_INTEGRITY_GROUP.id
_metric: DatasetSummaryMetric
def __init__(self, eq: Optional[NumericApprox]=None, gt: Optional[Numeric]=None, gte: Optional[Numeric]=None, is_in: Optional[List[Union[(Numeric, str, bool)]]]=None, lt: Optional[Numeric]=None, lte: Optional[Numeric]=None, not_eq: Optional[Numeric]=None, not_in: Optional[List[Union[(Numeric, str, bool)]]]=None, is_critical: bool=True):
super().__init__(eq=eq, gt=gt, gte=gte, is_in=is_in, lt=lt, lte=lte, not_eq=not_eq, not_in=not_in, is_critical=is_critical)
self._metric = DatasetSummaryMetric() |
def job_viz(jobs: typing.List[job.Job]) -> str:
result = ''
result += '1'
for i in range(0, 8):
result += n_to_char(n_at_ph(jobs, job.Phase(1, i)))
result += '2'
for i in range(0, 8):
result += n_to_char(n_at_ph(jobs, job.Phase(2, i)))
result += '3'
for i in range(0, 7):
result += n_to_char(n_at_ph(jobs, job.Phase(3, i)))
result += '4'
result += n_to_char(n_at_ph(jobs, job.Phase(4, 0)))
return result |
class EmptyImporter(object):
def __init__(self, session, readonly_session, model, dao, _, *args, **kwargs):
del args, kwargs
self.session = session
self.readonly_session = readonly_session
self.model = model
self.dao = dao
def run(self):
self.session.add(self.model)
self.model.add_description(json.dumps({'source': 'empty', 'pristine': True}, sort_keys=True))
self.model.set_done()
self.session.commit() |
class ReflectionService(object):
def GetAuthnDescriptor(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/cosmos.base.reflection.v2alpha1.ReflectionService/GetAuthnDescriptor', cosmos_dot_base_dot_reflection_dot_v2alpha1_dot_reflection__pb2.GetAuthnDescriptorRequest.SerializeToString, cosmos_dot_base_dot_reflection_dot_v2alpha1_dot_reflection__pb2.GetAuthnDescriptorResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def GetChainDescriptor(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/cosmos.base.reflection.v2alpha1.ReflectionService/GetChainDescriptor', cosmos_dot_base_dot_reflection_dot_v2alpha1_dot_reflection__pb2.GetChainDescriptorRequest.SerializeToString, cosmos_dot_base_dot_reflection_dot_v2alpha1_dot_reflection__pb2.GetChainDescriptorResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def GetCodecDescriptor(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/cosmos.base.reflection.v2alpha1.ReflectionService/GetCodecDescriptor', cosmos_dot_base_dot_reflection_dot_v2alpha1_dot_reflection__pb2.GetCodecDescriptorRequest.SerializeToString, cosmos_dot_base_dot_reflection_dot_v2alpha1_dot_reflection__pb2.GetCodecDescriptorResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def GetConfigurationDescriptor(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/cosmos.base.reflection.v2alpha1.ReflectionService/GetConfigurationDescriptor', cosmos_dot_base_dot_reflection_dot_v2alpha1_dot_reflection__pb2.GetConfigurationDescriptorRequest.SerializeToString, cosmos_dot_base_dot_reflection_dot_v2alpha1_dot_reflection__pb2.GetConfigurationDescriptorResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def GetQueryServicesDescriptor(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/cosmos.base.reflection.v2alpha1.ReflectionService/GetQueryServicesDescriptor', cosmos_dot_base_dot_reflection_dot_v2alpha1_dot_reflection__pb2.GetQueryServicesDescriptorRequest.SerializeToString, cosmos_dot_base_dot_reflection_dot_v2alpha1_dot_reflection__pb2.GetQueryServicesDescriptorResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def GetTxDescriptor(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/cosmos.base.reflection.v2alpha1.ReflectionService/GetTxDescriptor', cosmos_dot_base_dot_reflection_dot_v2alpha1_dot_reflection__pb2.GetTxDescriptorRequest.SerializeToString, cosmos_dot_base_dot_reflection_dot_v2alpha1_dot_reflection__pb2.GetTxDescriptorResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) |
(custom_vjp, nondiff_argnums=tuple(range(1, 6)))
def run_local(simulation: JaxSimulation, task_name: str, folder_name: str='default', path: str='simulation_data.hdf5', callback_url: str=None, verbose: bool=True) -> JaxSimulationData:
(sim_tidy3d, jax_info) = simulation.to_simulation()
sim_data_tidy3d = tidy3d_run_fn(simulation=sim_tidy3d, task_name=str(task_name), folder_name=folder_name, path=path, callback_url=callback_url, verbose=verbose)
return JaxSimulationData.from_sim_data(sim_data_tidy3d, jax_info=jax_info) |
def upsert_entry(database_connection: Connection, warning_messages_table: Table, code: int, message: str) -> None:
warning = database_connection.execute(warning_messages_table.select().where((warning_messages_table.c.code == code))).first()
if (warning and (warning.message != message)):
database_connection.execute(warning_messages_table.update().where((warning_messages_table.c.code == code)).values(message=message))
log.info(f'Updated - {code}: {message}')
elif (not warning):
database_connection.execute(warning_messages_table.insert().values(code=code, message=message))
log.info(f'Added - {code}: {message}') |
def upgrade_from_old_version(app):
if app.config['migrate_from_0_3_2']:
if app.is_old_database():
click.echo('Upgrading from old Stellar version...')
def after_rename(old_name, new_name):
click.echo(('* Renamed %s to %s' % (old_name, new_name)))
app.update_database_names_to_new_version(after_rename=after_rename)
app.config['migrate_from_0_3_2'] = False
save_config(app.config) |
def test_Monitor_init():
config = get_test_config()
sdnc = SDNConnect(config, logger, prom, faucetconfgetsetter_cl=FaucetLocalConfGetSetter)
sdne = SDNEvents(logger, prom, sdnc)
monitor = Monitor(logger, config, schedule, sdne.job_queue, sdnc, prom)
hosts = [{'active': 0, 'source': 'poseidon', 'role': 'unknown', 'state': 'unknown', 'ipv4_os': 'unknown', 'tenant': 'vlan1', 'port': 1, 'segment': 'switch1', 'ipv4': '123.123.123.123', 'mac': '00:00:00:00:00:00', 'id': 'foo1', 'ipv6': '0'}, {'active': 1, 'source': 'poseidon', 'role': 'unknown', 'state': 'unknown', 'ipv4_os': 'unknown', 'tenant': 'vlan1', 'port': 1, 'segment': 'switch1', 'ipv4': '123.123.123.123', 'mac': '00:00:00:00:00:00', 'id': 'foo2', 'ipv6': '0'}, {'active': 0, 'source': 'poseidon', 'role': 'unknown', 'state': 'unknown', 'ipv4_os': 'unknown', 'tenant': 'vlan1', 'port': 1, 'segment': 'switch1', 'ipv4': '123.123.123.123', 'mac': '00:00:00:00:00:00', 'id': 'foo3', 'ipv6': '0'}, {'active': 1, 'source': 'poseidon1', 'role': 'unknown', 'state': 'unknown', 'ipv4_os': 'unknown', 'tenant': 'vlan1', 'port': 2, 'segment': 'switch1', 'ipv4': '2106::1', 'mac': '00:00:00:00:00:00', 'id': 'foo4', 'ipv6': '0'}, {'active': 1, 'source': 'poseidon', 'role': 'unknown', 'state': 'unknown', 'ipv4_os': 'unknown', 'tenant': 'vlan1', 'port': 1, 'segment': 'switch1', 'ipv4': '::', 'mac': '00:00:00:00:00:00', 'id': 'foo5', 'ipv6': '0'}]
monitor.prom.update_metrics(hosts)
sdne.update_prom_var_time('last_rabbitmq_routing_key_time', 'routing_key', 'foo') |
class Event(object):
match_regex = re.compile('^Event: .*', re.IGNORECASE)
parsers = {}
def register_parser(*event_name):
def wrapper(parser):
for name in event_name:
Event.parsers[name] = parser
return wrapper
def read(event):
lines = event.splitlines()
(key, value) = lines[0].split(': ', 1)
if (not (key.lower() == 'event')):
raise Exception()
name = value
keys = {}
for i in range(1, len(lines)):
try:
(key, value) = lines[i].split(': ', 1)
if (key in Event.parsers):
Event.parsers[key](name, keys)(key, value)
else:
keys[key] = value
except:
pass
return Event(name, keys)
def match(event):
return bool(Event.match_regex.match(event))
def __init__(self, name, keys={}):
self.name = name
self.keys = keys
def __getitem__(self, item):
return self.keys[item]
def __setitem__(self, key, value):
self.keys[key] = value
def __iter__(self):
return iter(self.keys)
def __str__(self):
return ('Event : %s -> %s' % (self.name, self.keys)) |
class WorkflowExecutionMeta(Base):
__tablename__ = 'workflow_execution'
id = Column(Integer, autoincrement=True, primary_key=True)
workflow_id = Column(Integer, ForeignKey('workflow.id'))
begin_date = Column(DateTime)
end_date = Column(DateTime)
status = Column(String(256))
run_type = Column(String(256))
snapshot_id = Column(Integer, ForeignKey('workflow_snapshot.id'))
event_offset = Column(BigInteger, default=(- 1))
workflow = relationship('WorkflowMeta')
workflow_snapshot = relationship('WorkflowSnapshotMeta')
def __init__(self, workflow_id, run_type, snapshot_id, begin_date=None, end_date=None, status=WorkflowStatus.INIT.value, event_offset=(- 1), uuid=None):
self.workflow_id = workflow_id
self.run_type = run_type
self.snapshot_id = snapshot_id
self.begin_date = begin_date
self.end_date = end_date
self.status = status
self.event_offset = event_offset
self.id = uuid |
def test_goerli_eip1085_matches_goerli_chain(goerli_genesis_config):
genesis_data = extract_genesis_data(goerli_genesis_config)
genesis_state = {address: account.to_dict() for (address, account) in genesis_data.state.items()}
genesis_params = genesis_data.params.to_dict()
chain = Chain.configure(vm_configuration=genesis_data.vm_configuration, chain_id=genesis_data.chain_id).from_genesis(AtomicDB(), genesis_params, genesis_state)
genesis_header = chain.get_canonical_head()
assert (genesis_header == GOERLI_GENESIS_HEADER) |
def _escape(value: Any) -> str:
if isinstance(value, (list, tuple)):
value = ','.join([_escape(item) for item in value])
elif isinstance(value, (date, datetime)):
value = value.isoformat()
elif isinstance(value, bool):
value = str(value).lower()
elif isinstance(value, bytes):
return value.decode('utf-8', 'surrogatepass')
if (not isinstance(value, str)):
return str(value)
return value |
def single_line_beta_description(schema_or_field: FieldEntry, strict: Optional[bool]=True) -> None:
if ('\n' in schema_or_field['field_details']['beta']):
msg: str = 'Beta descriptions must be single line.\n'
msg += f"Offending field or field set: {schema_or_field['field_details']['name']}"
strict_warning_handler(msg, strict) |
def addWidget(type, parent=None, cpos=0, rpos=0, **kwargs):
cspan = kwargs.pop('colspan', None)
rspan = kwargs.pop('rowspan', None)
if (type == 'combo'):
widget = Combobox(parent, textvariable=kwargs.pop('textvariable', None))
if ('bind' in kwargs):
widget.bind('<<ComboboxSelected>>', kwargs['bind'])
elif (type == 'ticks'):
widget = Listbox(parent, listvariable=kwargs.pop('listvariable', None), selectmode=kwargs.pop('selectmode', None), height=kwargs.pop('height', None))
elif (type == 'stext'):
widget = scrolledtext.ScrolledText(parent, textvariable=kwargs.pop('textvariable', None))
elif (type == 'button'):
widget = Button(parent, text=kwargs.pop('text', None), textvariable=kwargs.pop('textvariable', None), command=kwargs.pop('func', None))
elif (type == 'frame'):
widget = Frame(parent, textvariable=kwargs.pop('textvariable', None))
elif (type == 'tab'):
widget = Notebook(parent, textvariable=kwargs.pop('textvariable', None))
elif (type == 'label'):
widget = Label(parent, text=kwargs.pop('text', None), textvariable=kwargs.pop('textvariable', None))
elif (type == 'check'):
widget = Checkbutton(parent, text=kwargs.pop('text', None), variable=kwargs.pop('textvariable', None), onvalue=kwargs.pop('onvalue', None), offvalue=kwargs.pop('offvalue', None), command=kwargs.pop('func', None))
elif (type == 'spin'):
widget = Spinbox(parent, from_=kwargs.pop('min', None), to=kwargs.pop('max', None), textvariable=kwargs.pop('textvariable', None))
else:
widget = Entry(parent, textvariable=kwargs.pop('textvariable', None))
if ('font' in kwargs):
widget['font'] = kwargs['font']
if ('sticky' in kwargs):
widget.sticky = kwargs['sticky']
if ('values' in kwargs):
widget['values'] = kwargs['values']
if ('width' in kwargs):
widget.width = kwargs['width']
if ('justify' in kwargs):
widget['justify'] = kwargs['justify']
if ('height' in kwargs):
widget['height'] = kwargs['height']
if ('default' in kwargs):
try:
widget.set(kwargs['default'])
except Exception as e:
try:
widget.insert(0, kwargs['default'])
except:
widget.insert(kwargs['default'][0], kwargs['default'][1])
if ('state' in kwargs):
widget['state'] = kwargs['state']
widget.grid(column=cpos, row=rpos, columnspan=cspan, rowspan=rspan)
if ('tab' in kwargs):
parent.add(widget, text=kwargs['tab'])
return widget |
class PathMatcher(object):
def __init__(self, include_patterns, omit_patterns):
self.include_patterns = include_patterns
self.omit_patterns = omit_patterns
def omit(self, path):
path = os.path.realpath(path)
return (any((fnmatch.fnmatch(path, p) for p in self.omit_patterns)) or (self.include_patterns and (not any((fnmatch.fnmatch(path, p) for p in self.include_patterns)))))
def include(self, path):
return (not self.omit(path)) |
def llvm_build_bin_dir(tool):
build_dir = llvm_build_dir(tool)
if (WINDOWS and ('Visual Studio' in CMAKE_GENERATOR)):
old_llvm_bin_dir = os.path.join(build_dir, 'bin', decide_cmake_build_type(tool))
new_llvm_bin_dir = None
default_cmake_build_type = decide_cmake_build_type(tool)
cmake_build_types = [default_cmake_build_type, 'Release', 'RelWithDebInfo', 'MinSizeRel', 'Debug']
for build_type in cmake_build_types:
d = os.path.join(build_dir, build_type, 'bin')
if os.path.isfile(os.path.join(tool.installation_path(), d, exe_suffix('clang'))):
new_llvm_bin_dir = d
break
if (new_llvm_bin_dir and os.path.exists(os.path.join(tool.installation_path(), new_llvm_bin_dir))):
return new_llvm_bin_dir
elif os.path.exists(os.path.join(tool.installation_path(), old_llvm_bin_dir)):
return old_llvm_bin_dir
return os.path.join(build_dir, default_cmake_build_type, 'bin')
else:
return os.path.join(build_dir, 'bin') |
class CollapseCodeExtension(BlocksExtension):
def __init__(self, *args, **kwargs):
self.config = {'expand_text': ['Expand', 'Set the text for the expand button.'], 'collapse_text': ['Collapse', 'Set the text for the collapse button.'], 'expand_title': ['expand', 'Set the text for the expand title.'], 'collapse_title': ['collapse', 'Set the text for the collapse title.']}
super().__init__(*args, **kwargs)
def extendMarkdownBlocks(self, md, blocks):
blocks.register(CollapseCode, self.getConfigs()) |
class CommonSegCode(CommonSegGroup):
def __init__(self, rom_start: Optional[int], rom_end: Optional[int], type: str, name: str, vram_start: Optional[int], args: list, yaml):
self.bss_size: int = (yaml.get('bss_size', 0) if isinstance(yaml, dict) else 0)
super().__init__(rom_start, rom_end, type, name, vram_start, args=args, yaml=yaml)
self.reported_file_split = False
self.jtbl_glabels_to_add: Set[int] = set()
self.jumptables: Dict[(int, Tuple[(int, int)])] = {}
self.rodata_syms: Dict[(int, List[Symbol])] = {}
self.align = parse_segment_align(yaml)
if (self.align is None):
self.align = 16
def needs_symbols(self) -> bool:
return True
def vram_end(self) -> Optional[int]:
if ((self.vram_start is not None) and (self.size is not None)):
return ((self.vram_start + self.size) + self.bss_size)
else:
return None
def check_rodata_sym_impl(self, func_addr: int, sym: Symbol, rodata_section: Range):
if rodata_section.is_complete():
assert (rodata_section.start is not None)
assert (rodata_section.end is not None)
rodata_start: int = rodata_section.start
rodata_end: int = rodata_section.end
if (rodata_start <= sym.vram_start < rodata_end):
if (func_addr not in self.rodata_syms):
self.rodata_syms[func_addr] = []
self.rodata_syms[func_addr].append(sym)
def check_rodata_sym(self, func_addr: int, sym: Symbol):
rodata_section = self.section_boundaries.get('.rodata')
if (rodata_section is not None):
self.check_rodata_sym_impl(func_addr, sym, rodata_section)
rodata_section = self.section_boundaries.get('.rdata')
if (rodata_section is not None):
self.check_rodata_sym_impl(func_addr, sym, rodata_section)
def handle_alls(self, segs: List[Segment], base_segs) -> bool:
for (i, elem) in enumerate(segs):
if elem.type.startswith('all_'):
alls = []
rep_type = f'{elem.type[4:]}'
replace_class = Segment.get_class_for_type(rep_type)
for base in base_segs.items():
if (isinstance(elem.rom_start, int) and isinstance(self.rom_start, int)):
assert (self.vram_start is not None), self.vram_start
vram_start = ((elem.rom_start - self.rom_start) + self.vram_start)
else:
vram_start = None
rep: Segment = replace_class(rom_start=elem.rom_start, rom_end=elem.rom_end, type=rep_type, name=base[0], vram_start=vram_start, args=[], yaml={})
rep.extract = False
rep.given_subalign = self.given_subalign
rep.exclusive_ram_id = self.get_exclusive_ram_id()
rep.given_dir = self.given_dir
rep.given_symbol_name_format = self.symbol_name_format
rep.given_symbol_name_format_no_rom = self.symbol_name_format_no_rom
rep.sibling = base[1]
rep.parent = self
if rep.special_vram_segment:
self.special_vram_segment = True
alls.append(rep)
del segs[i]
segs[i:i] = alls
return True
return False
def find_inserts(self, found_sections: typing.OrderedDict[(str, Range)]) -> 'OrderedDict[str, int]':
inserts: OrderedDict[(str, int)] = OrderedDict()
section_order = self.section_order.copy()
section_order.remove('.text')
for (i, section) in enumerate(section_order):
if (section not in options.opts.auto_all_sections):
continue
if (not found_sections[section].has_start()):
search_done = False
for j in range((i - 1), (- 1), (- 1)):
end = found_sections[section_order[j]].end
if (end is not None):
inserts[section] = end
search_done = True
break
if (not search_done):
inserts[section] = (- 1)
pass
return inserts
def parse_subsegments(self, segment_yaml) -> List[Segment]:
if ('subsegments' not in segment_yaml):
if (not self.parent):
raise Exception(f'No subsegments provided in top-level code segment {self.name}')
return []
base_segments: OrderedDict[(str, Segment)] = OrderedDict()
ret = []
prev_start: Optional[int] = (- 1)
prev_vram: Optional[int] = (- 1)
inserts: OrderedDict[(str, int)] = OrderedDict()
self.section_boundaries = OrderedDict(((s_name, Range()) for s_name in options.opts.section_order))
found_sections = OrderedDict(((s_name, Range()) for s_name in self.section_boundaries))
found_sections.pop('.text')
cur_section = None
for (i, subsegment_yaml) in enumerate(segment_yaml['subsegments']):
if (isinstance(subsegment_yaml, list) and (len(subsegment_yaml) == 1)):
continue
typ = Segment.parse_segment_type(subsegment_yaml)
if typ.startswith('all_'):
typ = typ[4:]
if (not typ.startswith('.')):
typ = f'.{typ}'
if (typ in found_sections):
if (cur_section is None):
found_sections[typ].start = i
cur_section = typ
elif (cur_section != typ):
if options.opts.check_consecutive_segment_types:
if found_sections[cur_section].has_end():
log.error(f'Section {cur_section} end encountered but was already ended earlier!')
if found_sections[typ].has_start():
log.error(f'Section {typ} start encounted but has already started earlier!')
found_sections[cur_section].end = i
found_sections[typ].start = i
cur_section = typ
if (cur_section is not None):
found_sections[cur_section].end = (- 1)
inserts = self.find_inserts(found_sections)
last_rom_end = None
for (i, subsegment_yaml) in enumerate(segment_yaml['subsegments']):
if (isinstance(subsegment_yaml, list) and (len(subsegment_yaml) == 1)):
continue
typ = Segment.parse_segment_type(subsegment_yaml)
start = Segment.parse_segment_start(subsegment_yaml)
if typ.startswith('all_'):
dummy_seg = Segment(rom_start=start, rom_end=None, type=typ, name='', vram_start=None, args=[], yaml={})
dummy_seg.given_subalign = self.given_subalign
dummy_seg.exclusive_ram_id = self.exclusive_ram_id
dummy_seg.given_dir = self.given_dir
dummy_seg.given_symbol_name_format = self.symbol_name_format
dummy_seg.given_symbol_name_format_no_rom = self.symbol_name_format_no_rom
ret.append(dummy_seg)
continue
segment_class = Segment.get_class_for_type(typ)
end = self.get_next_seg_start(i, segment_yaml['subsegments'])
if (start is None):
if (i == 0):
start = self.rom_start
else:
start = last_rom_end
if ((start is not None) and (end is None)):
est_size = segment_class.estimate_size(subsegment_yaml)
if (est_size is not None):
end = (start + est_size)
if ((start is not None) and (prev_start is not None) and (start < prev_start)):
log.error(f"Error: Group segment '{self.name}' contains subsegments which are out of ascending rom order (0x{prev_start:X} followed by 0x{start:X})")
vram = None
if (start is not None):
assert isinstance(start, int)
vram = self.get_most_parent().rom_to_ram(start)
if segment_class.is_noload():
start = last_rom_end
end = last_rom_end
segment: Segment = Segment.from_yaml(segment_class, subsegment_yaml, start, end, vram)
if ((segment.vram_start is not None) and (prev_vram is not None) and (segment.vram_start < prev_vram)):
log.error((f'''Error: Group segment '{self.name}' contains subsegments which are out of ascending vram order (0x{prev_vram:X} followed by 0x{segment.vram_start:X}).
''' + f"Detected when processing file '{segment.name}' of type '{segment.type}'"))
segment.sibling = base_segments.get(segment.name, None)
if (segment.sibling is not None):
if (self.section_order.index('.text') < self.section_order.index('.rodata')):
if segment.is_rodata():
segment.sibling.rodata_sibling = segment
elif (segment.is_text() and segment.sibling.is_rodata()):
segment.rodata_sibling = segment.sibling
segment.sibling.sibling = segment
if (self.section_order.index('.text') < self.section_order.index('.data')):
if segment.is_data():
segment.sibling.data_sibling = segment
elif (segment.is_text() and segment.sibling.is_data()):
segment.data_sibling = segment.sibling
segment.sibling.sibling = segment
segment.parent = self
if segment.special_vram_segment:
self.special_vram_segment = True
for (i, section) in enumerate(self.section_order):
if ((not self.section_boundaries[section].has_start()) and (dotless_type(section) == dotless_type(segment.type))):
if (i > 0):
prev_section = self.section_order[(i - 1)]
self.section_boundaries[prev_section].end = segment.vram_start
self.section_boundaries[section].start = segment.vram_start
segment.bss_contains_common = self.bss_contains_common
ret.append(segment)
if segment.is_text():
base_segments[segment.name] = segment
if (self.section_order.index('.rodata') < self.section_order.index('.text')):
if (segment.is_rodata() and (segment.sibling is None)):
base_segments[segment.name] = segment
prev_start = start
prev_vram = segment.vram_start
if (end is not None):
last_rom_end = end
orig_len = len(ret)
for section in reversed(inserts):
idx = inserts[section]
if (idx == (- 1)):
idx = orig_len
if ((section == 'bss') and (self.vram_start is not None) and (self.rom_end is not None) and (self.rom_start is not None)):
rom_start = self.rom_end
vram_start = ((self.vram_start + self.rom_end) - self.rom_start)
else:
rom_start = None
vram_start = None
new_seg = Segment(rom_start=rom_start, rom_end=None, type=('all_' + section), name='', vram_start=vram_start, args=[], yaml={})
new_seg.given_subalign = self.given_subalign
new_seg.exclusive_ram_id = self.exclusive_ram_id
new_seg.given_dir = self.given_dir
new_seg.given_symbol_name_format = self.symbol_name_format
new_seg.given_symbol_name_format_no_rom = self.symbol_name_format_no_rom
ret.insert(idx, new_seg)
check = True
while check:
check = self.handle_alls(ret, base_segments)
rodata_section = (self.section_boundaries.get('.rodata') or self.section_boundaries.get('.rdata'))
if ((rodata_section is not None) and rodata_section.has_start() and (not rodata_section.has_end())):
assert (self.vram_end is not None)
rodata_section.end = self.vram_end
return ret
def scan(self, rom_bytes):
for sub in self.subsegments:
if (sub.is_text() and sub.should_scan()):
sub.scan(rom_bytes)
for sub in self.subsegments:
if ((not sub.is_text()) and sub.should_scan()):
sub.scan(rom_bytes) |
def create_organisations(random):
for regtm_ix in range(5):
regtm = RegionalTeam.objects.create(code='Y0{}'.format(regtm_ix), name='Region {}'.format(regtm_ix))
for stp_ix in range(5):
stp = STP.objects.create(code='E{}{}'.format(regtm_ix, stp_ix), name='STP {}/{}'.format(regtm_ix, stp_ix))
pcns = []
for pcn_ix in range(5):
pcn = PCN.objects.create(code='E00000{}{}{}'.format(regtm_ix, stp_ix, pcn_ix), name='PCN {}/{}/{}'.format(regtm_ix, stp_ix, pcn_ix))
pcns.append(pcn)
get_next_pcn = itertools.cycle(pcns).__next__
for ccg_ix in range(5):
ccg = PCT.objects.create(regional_team=regtm, stp=stp, code='{}{}{}'.format(regtm_ix, stp_ix, ccg_ix).replace('0', 'A'), name='CCG {}/{}/{}'.format(regtm_ix, stp_ix, ccg_ix), org_type='CCG')
for prac_ix in range(5):
Practice.objects.create(ccg=ccg, pcn=get_next_pcn(), code='P0{}{}{}{}'.format(regtm_ix, stp_ix, ccg_ix, prac_ix), name='Practice {}/{}/{}/{}'.format(regtm_ix, stp_ix, ccg_ix, prac_ix), setting=4) |
class TestComposerThread__determine_tag_actions(ComposerThreadBaseTestCase):
('bodhi.server.models.buildsys.get_session')
def test_from_tag_not_found(self, get_session):
tags = ['some', 'unknown', 'tags']
get_session.return_value.listTags.return_value = [{'name': n} for n in tags]
task = self._make_task()
t = ComposerThread(self.semmock, task['composes'][0], 'bowlofeggs', self.Session, self.tempdir)
t.compose = Compose.from_dict(self.db, task['composes'][0])
t.db = self.db
t.id = getattr(self.db.query(Release).one(), '{}_tag'.format('stable'))
t.skip_compose = True
expected_messages = (update_schemas.UpdateEjectV1.from_dict({'repo': 'f17-updates', 'update': self.db.query(Update).one().__json__(), 'reason': f"Cannot find relevant tag for bodhi-2.0-1.fc17. None of {tags} are in {Release.get_tags()[0]['candidate']}.", 'request': UpdateRequest.testing, 'release': t.compose.release, 'agent': 'bowlofeggs'}),)
with mock_sends(*expected_messages):
t._determine_tag_actions()
expected_messages[0].body['update'] = self.db.query(Update).one().__json__()
for attr in ('add_tags_sync', 'move_tags_sync', 'add_tags_async', 'move_tags_async'):
assert (getattr(t, attr) == [])
self.db.expire(t.compose, ['updates'])
assert (len(t.compose.updates) == 0)
self.assert_sems(0) |
def test_mask_arguments_null_list():
configuration = AesEncryptionMaskingConfiguration()
masker = AesEncryptionMaskingStrategy(configuration)
expected = [None]
cache_secrets()
masked = masker.mask([None], request_id)
assert (expected == masked)
clear_cache_secrets(request_id) |
class pool2d_base(Operator):
def __init__(self, stride, pad, kernel_size, reduce_func) -> None:
super().__init__()
self._attrs['op'] = 'pool2d'
self._attrs['stride'] = stride
self._attrs['pad'] = pad
self._attrs['reduce_func'] = reduce_func
self._attrs['kernel_size'] = kernel_size
self._attrs['KH'] = kernel_size
self._attrs['KW'] = kernel_size
self.shape_eval_template = SHAPE_FUNC_TEMPLATE
self.shape_save_template = SHAPE_ASSIGNMENT_TEMPLATE
self.exec_cond_template = EXEC_COND_TEMPLATE
def _infer_shape(self, x: List[int]):
eval_func = self.shape_eval_template.render(indent='', dtype='', div='//', stride=self._attrs['stride'], pad=self._attrs['pad'], x_dim0=x[0], x_dim1=x[1], x_dim2=x[2], x_dim3=x[3], kernel_h=self._attrs['kernel_size'], kernel_w=self._attrs['kernel_size'])
output = {}
exec(eval_func, output)
return [int(output['NO']), int(output['HO']), int(output['WO']), int(output['CO'])]
def _infer_shapes(self, x: Tensor):
x_shape_values = [var._attrs['values'] for var in x._attrs['shape']]
x_shapes = itertools.product(*x_shape_values)
y_shapes = []
for x_shape in x_shapes:
y_shape = self._infer_shape(x_shape)
y_shapes.append(y_shape)
def unique(vector):
return sorted(set(vector))
output_shape = [x._attrs['shape'][0], shape_utils.gen_int_var(unique([d[1] for d in y_shapes])), shape_utils.gen_int_var(unique([d[2] for d in y_shapes])), shape_utils.gen_int_var(unique([d[3] for d in y_shapes]))]
return output_shape
def _invert_exec_key(self, key: str):
tmp = re.findall('(\\d+)', key)
return [int(x) for x in tmp]
def _gen_exec_key(self, shape):
return self.exec_key_template.render(x_dim0=shape[0], x_dim1=shape[1], x_dim2=shape[2], x_dim3=shape[3]).replace('\n', '')
def _extract_exec_path(self, x: Tensor):
self._attrs['exec_path'] = OrderedDict()
self._attrs['exec_path']['true'] = ''
def _signature(self) -> str:
signature = 'pooling2d: K=[{kh}, {kw}], S=[{s}], P=[{p}], CO=[{co}]'.format(kh=self._attrs['KH'], kw=self._attrs['KW'], s=self._attrs['stride'], p=self._attrs['pad'], co=self._attrs['CO'])
return signature
def __call__(self, x: Tensor) -> List[Tensor]:
self._attrs['inputs'] = [x]
self._set_depth()
self._extract_exec_path(x)
output_shape = self._infer_shapes(x)
output = Tensor(output_shape, src_ops={self}, dtype=x._attrs['dtype'])
self._attrs['outputs'] = [output]
return output
def _get_op_attributes(self):
target_attrs = ['stride', 'pad', 'kernel_size', 'reduce_func']
attr = {}
for target_attr in target_attrs:
if (target_attr in self._attrs):
attr[target_attr] = self._attrs[target_attr]
return attr
def gen_function(self) -> str:
target = backend.target.Target.current()
func_key = '{target}.{op}.gen_function'.format(target=target.name(), op=self._attrs['op'])
func = registry.get(func_key)
return func(self._attrs, self.exec_cond_template, self.shape_eval_template, self.shape_save_template) |
class CatalogTable(object):
def __init__(self, data, columns):
self.data = data
self.columns = columns
def as_list_of_dicts(self):
R = []
for r in self.data:
d = {}
for (i, c) in enumerate(self.columns):
d[c] = r[i]
R += [d]
return R
def as_json(self):
R = self.as_list_of_dicts()
return json.dumps(R, indent=4)
def write(self, file_name):
with open(file_name, 'w') as f:
if file_name.endswith('.csv'):
delimiter = ','
elif file_name.endswith('.tsv'):
delimiter = '\t'
else:
return None
writer = csv.writer(f, delimiter=delimiter)
writer.writerow(self.columns)
for r in self.data:
writer.writerow(r)
def __str__(self):
return self.as_json()
def __repr__(self):
return self.__str__() |
def run_all(joblist):
run_id = 'run_all'
context = SubstitutionList.from_dict({'DEFINE': [['<RUNPATH>', './']]})
data = ErtConfig(forward_model_list=set_up_forward_model(joblist), substitution_list=context).forward_model_data_to_json(run_id)
verify_json_dump(joblist, data, range(len(joblist)), run_id) |
def olar(nome: str=Argument(..., help='Seu primeiro nome', callback=lower), email: str=Argument(..., metavar='<email>'), senha: str=Option(..., prompt=True, hide_input=True, confirmation_prompt=True, help='A senha sera perguntada no prompt!'), version: bool=Option(False, '--version', '-v', '--versao', callback=version, is_eager=True, is_flag=True, case_sensitive=False)):
print(f'nome={nome!r}, email={email!r}, senha={senha!r}') |
def lines(geom, **kwargs):
if (geom['type'] == 'LineString'):
return linestring(geom['coordinates'], **kwargs)
if (geom['type'] == 'MultiLineString'):
return multilinestring(geom['coordinates'], **kwargs)
raise SvgisError(('Unexpected geometry type. Expected LineString or MultiLineString, but got: ' + geom['type'])) |
('pyscf')
def test_geometry_get_restart_info():
geom = geom_loader('lib:benzene.xyz')
calc = PySCF(method='scf', basis='def2svp')
geom.set_calculator(calc)
restart = geom.get_restart_info()
atoms = restart['atoms']
coords = restart['cart_coords']
assert (atoms == geom.atoms)
assert (len(coords) == len((geom.atoms * 3)))
assert ('calc_info' in restart) |
class ShardedFileComponents():
def __init__(self, filepattern) -> None:
(self.directory, root) = os.path.split(filepattern)
m = re.match('([^]+)([^.]+)(\\.[^.]*)?$', root)
if (not m):
raise ValueError('Not a sharded file: {}'.format(filepattern))
self.extension: str = (m.group(3) if (m.lastindex >= 3) else '')
self.stem: str = (m.group(1) if (m.lastindex >= 1) else '')
shards = (m.group(2) if (m.lastindex >= 2) else '')
if (shards == '*'):
self.shard_index: int = (- 1)
self.shard_total: int = (- 1)
else:
try:
self.shard_total = int(shards)
self.shard_index = (- 1)
except ValueError:
m = re.match('(\\d{5})-of-(\\d{5})$', shards)
if (not m):
raise ValueError('Invalid shard specification: {}'.format(shards))
self.shard_index = int(m.group(1))
self.shard_total = int(m.group(2))
if (self.directory == ''):
self.directory = '.'
if (self.shard_total == 0):
raise ValueError('Invalid shard total 0')
def get_shard_filename(self, index: int) -> str:
if ((index < 0) or (index >= self.shard_total)):
raise ValueError('Invalid shard index')
return os.path.join(self.directory, '{}{:05d}-of-{:05d}{}'.format(self.stem, index, self.shard_total, self.extension))
def is_at_n_pattern(self) -> bool:
return ((self.shard_total > 0) and (self.shard_index == (- 1)))
def is_at_star_pattern(self) -> bool:
return ((self.shard_total == (- 1)) and (self.shard_index == (- 1))) |
class TestPrismaticSerialize(util.ColorAssertsPyTest):
COLORS = [('color(--prismatic 0 0.3 0.75 0.5 / 0.5)', {}, 'color(--prismatic 0 0.3 0.75 0.5 / 0.5)'), ('color(--prismatic 0 0.3 0.75 0.5)', {'alpha': True}, 'color(--prismatic 0 0.3 0.75 0.5 / 1)'), ('color(--prismatic 0 0.3 0.75 0.5 / 0.5)', {'alpha': False}, 'color(--prismatic 0 0.3 0.75 0.5)'), ('color(--prismatic none 0.3 0.75 0.5)', {}, 'color(--prismatic 0 0.3 0.75 0.5)'), ('color(--prismatic none 0.3 0.75 0.5)', {'none': True}, 'color(--prismatic none 0.3 0.75 0.5)'), ('color(--prismatic 1.2 0.2 0 0.5)', {}, 'color(--prismatic 1 0.30075 0.10941 0.58984)'), ('color(--prismatic 1.2 0.2 0 0.5)', {'fit': False}, 'color(--prismatic 1.2 0.2 0 0.5)')]
.parametrize('color1,options,color2', COLORS)
def test_colors(self, color1, options, color2):
self.assertEqual(Color(color1).to_string(**options), color2) |
def test_filter_by_nonexistent_identity_reference():
identity_data: Dict[(str, Any)] = {'phone_number': '123-1234-1235'}
config = FilterPostProcessorConfiguration(field='email_contact', value={'identity': 'email'})
data = [{'id': , 'email_contact': '', 'name': 'Somebody Awesome'}, {'id': , 'email_contact': 'somebody-', 'name': 'Somebody Cool'}]
processor = FilterPostProcessorStrategy(configuration=config)
result = processor.process(data)
assert (result == []) |
class AbstractFieldMonitor(Monitor, ABC):
fields: Tuple[(EMField, ...)] = pydantic.Field(['Ex', 'Ey', 'Ez', 'Hx', 'Hy', 'Hz'], title='Field Components', description='Collection of field components to store in the monitor.')
interval_space: Tuple[(pydantic.PositiveInt, pydantic.PositiveInt, pydantic.PositiveInt)] = pydantic.Field((1, 1, 1), title='Spatial Interval', description='Number of grid step intervals between monitor recordings. If equal to 1, there will be no downsampling. If greater than 1, the step will be applied, but the first and last point of the monitor grid are always included.')
colocate: bool = pydantic.Field(True, title='Colocate Fields', description='Toggle whether fields should be colocated to grid cell boundaries (i.e. primal grid nodes).') |
def test_ref_task_more_2():
_task(project='flytesnacks', domain='development', name='recipes.aaa.simple.join_strings', version='553018f39e519bdb2597b652639c30ce16b99c79')
def ref_t1(a: typing.List[str]) -> str:
...
_task(project='flytesnacks', domain='development', name='recipes.aaa.simple.join_string_second', version='553018f39e519bdb2597b652639c30ce16b99c79')
def ref_t2(a: typing.List[str]) -> str:
...
def wf1(in1: typing.List[str]) -> str:
x = ref_t1(a=in1)
y = ref_t2(a=in1)
(y >> x)
return x
with task_mock(ref_t1) as mock_x:
with task_mock(ref_t2) as mock_y:
mock_y.return_value = 'ignored'
mock_x.return_value = 'hello'
assert (wf1(in1=['hello', 'world']) == 'hello') |
class EntityExtractor(BaseEstimator, TransformerMixin):
def __init__(self, labels=GOODLABELS, **kwargs):
self.labels = labels
def get_entities(self, document):
entities = []
for paragraph in document:
for sentence in paragraph:
trees = ne_chunk(sentence)
for tree in trees:
if hasattr(tree, 'label'):
if (tree.label() in self.labels):
entities.append(' '.join([child[0].lower() for child in tree]))
return entities
def fit(self, documents, labels=None):
return self
def transform(self, documents):
for document in documents:
(yield self.get_entities(document[0])) |
def test_form_args_embeddeddoc():
(app, db, admin) = setup()
class Info(db.EmbeddedDocument):
name = db.StringField()
age = db.StringField()
class Model(db.Document):
info = db.EmbeddedDocumentField('Info')
timestamp = db.DateTimeField()
view = CustomModelView(Model, form_args={'info': {'label': 'Information'}, 'timestamp': {'label': 'Last Updated Time'}})
admin.add_view(view)
form = view.create_form()
assert (form.timestamp.label.text == 'Last Updated Time')
assert (form.info.label.text == 'Information') |
def fetch_exchange(zone_key1, zone_key2, session=None, target_datetime=None, logger=None) -> dict:
exchange_data = get_data(exchange_url, target_datetime)
exchange_dataframe = create_exchange_df(exchange_data)
if ('->'.join(sorted([zone_key1, zone_key2])) == 'GB->GB-NIR'):
moyle = moyle_processor(exchange_dataframe)
return moyle
elif ('->'.join(sorted([zone_key1, zone_key2])) == 'GB-NIR->IE'):
IE = IE_processor(exchange_dataframe)
return IE
else:
raise NotImplementedError('This exchange pair is not implemented') |
def launch(main_func, num_gpus_per_machine, num_machines=1, machine_rank=0, dist_url=None, args=(), timeout=DEFAULT_TIMEOUT):
world_size = (num_machines * num_gpus_per_machine)
args[0].distributed = (world_size > 1)
if args[0].distributed:
if (dist_url == 'auto'):
assert (num_machines == 1), 'dist_url=auto not supported in multi-machine jobs.'
port = _find_free_port()
dist_url = f'tcp://127.0.0.1:{port}'
if ((num_machines > 1) and dist_url.startswith('file://')):
logger = logging.getLogger(__name__)
logger.warning('file:// is not a reliable init_method in multi-machine jobs. Prefer tcp://')
mp.spawn(_distributed_worker, nprocs=num_gpus_per_machine, args=(main_func, world_size, num_gpus_per_machine, machine_rank, dist_url, args, timeout), daemon=False)
else:
main_func(*args) |
class Fixed(Raw):
def __init__(self, decimals=5, **kwargs):
super(Fixed, self).__init__(**kwargs)
self.precision = MyDecimal((('0.' + ('0' * (decimals - 1))) + '1'))
def format(self, value):
dvalue = MyDecimal(value)
if ((not dvalue.is_normal()) and (dvalue != ZERO)):
raise MarshallingException('Invalid Fixed precision number.')
return six.text_type(dvalue.quantize(self.precision, rounding=ROUND_HALF_EVEN)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.