code stringlengths 281 23.7M |
|---|
class TelegramError(Exception):
__slots__ = ('message',)
def __init__(self, message: str):
super().__init__()
msg = _lstrip_str(message, 'Error: ')
msg = _lstrip_str(msg, '[Error]: ')
msg = _lstrip_str(msg, 'Bad Request: ')
if (msg != message):
msg = msg.capitalize()
self.message: str = msg
def __str__(self) -> str:
return self.message
def __repr__(self) -> str:
return f"{self.__class__.__name__}('{self.message}')"
def __reduce__(self) -> Tuple[(type, Tuple[str])]:
return (self.__class__, (self.message,)) |
class PynagError(Exception):
def __init__(self, message, errorcode=None, errorstring=None, *args, **kwargs):
self.errorcode = errorcode
self.message = message
self.errorstring = errorstring
try:
super(PynagError, self).__init__(message, *args, **kwargs)
except TypeError:
Exception.__init__(self, message, *args, **kwargs) |
def complex_email_validation(test):
if (test.subject != 'email'):
return
message = 'Complex email validation is (intentionally) unsupported.'
return (skip(message=message, description='an invalid domain')(test) or skip(message=message, description='an invalid IPv4-address-literal')(test) or skip(message=message, description='dot after local part is not valid')(test) or skip(message=message, description='dot before local part is not valid')(test) or skip(message=message, description='two subsequent dots inside local part are not valid')(test)) |
_required
_POST
def user_permissions_manage(request, username):
if request.POST.get('user_block'):
return user_block(request, username)
if request.POST.get('user_unblock'):
return user_unblock(request, username)
if request.POST.get('user_trust'):
return user_trust(request, username)
if request.POST.get('user_untrust'):
return user_untrust(request, username)
return HttpResponseRedirect(reverse('user_details', args=[username])) |
class IBNDenseUnit(nn.Module):
def __init__(self, in_channels, out_channels, dropout_rate, conv1_ibn):
super(IBNDenseUnit, self).__init__()
self.use_dropout = (dropout_rate != 0.0)
bn_size = 4
inc_channels = (out_channels - in_channels)
mid_channels = (inc_channels * bn_size)
self.conv1 = ibn_pre_conv1x1_block(in_channels=in_channels, out_channels=mid_channels, use_ibn=conv1_ibn)
self.conv2 = pre_conv3x3_block(in_channels=mid_channels, out_channels=inc_channels)
if self.use_dropout:
self.dropout = nn.Dropout(p=dropout_rate)
def forward(self, x):
identity = x
x = self.conv1(x)
x = self.conv2(x)
if self.use_dropout:
x = self.dropout(x)
x = torch.cat((identity, x), dim=1)
return x |
class EventLoop(asyncio.SelectorEventLoop):
def __init__(self, selector=None):
super(EventLoop, self).__init__(selector)
self.set_exception_handler(EventLoop.handleException)
if (sys.platform != 'win32'):
signals = (signal.SIGTERM, signal.SIGINT)
for s in signals:
self.add_signal_handler(s, (lambda sig=s: asyncio.create_task(self.shutdown(sig))))
async def shutdown(self, signal=None):
if (signal is not None):
Logger.LogLine(Logger.INFO, f'Received exit signal {signal.name}')
tasks = [t for t in asyncio.all_tasks(self) if (t is not asyncio.current_task())]
for task in tasks:
task.cancel()
(await asyncio.gather(*tasks, return_exceptions=True))
self.stop()
def handleException(self, context):
if ('exception' in context):
with EventLoopManager.exceptionLock:
EventLoopManager.exceptions.append(context['exception']) |
class TestDataset(unittest.TestCase):
def setUp(self):
logging.disable(logging.CRITICAL)
def tearDown(self):
logging.disable(logging.NOTSET)
def test_round_robin_zip_datasets(self):
long_dataset = lang_pair_dataset([10, 9, 8, 11])
short_dataset = lang_pair_dataset([11, 9])
dataset = RoundRobinZipDatasets({'a': long_dataset, 'b': short_dataset})
dataset.ordered_indices()
assert (dataset.longest_dataset is long_dataset)
self.assertEqual(dict(dataset[0]), {'a': sample(2, 8), 'b': sample(1, 9)})
self.assertEqual(dict(dataset[2]), {'a': sample(0, 10), 'b': sample(1, 9)})
def test_round_robin_zip_datasets_filtered(self):
long_dataset = lang_pair_dataset([10, 20, 8, 11, 1000, 7, 12])
short_dataset = lang_pair_dataset([11, 20, 9, 1000])
dataset = RoundRobinZipDatasets({'a': long_dataset, 'b': short_dataset})
idx = dataset.ordered_indices()
(idx, _) = dataset.filter_indices_by_size(idx, {'a': 19, 'b': 900})
self.assertEqual(list(idx), [0, 1, 2, 3, 4])
self.assertEqual(dict(dataset[0]), {'a': sample(5, 7), 'b': sample(2, 9)})
self.assertEqual(dict(dataset[2]), {'a': sample(0, 10), 'b': sample(1, 20)})
self.assertEqual(dict(dataset[4]), {'a': sample(6, 12), 'b': sample(0, 11)})
def test_round_robin_zip_datasets_filtered_with_tuple(self):
long_dataset = lang_pair_dataset([10, 20, 8, 11, 1000, 7, 12])
short_dataset = lang_pair_dataset([11, 20, 9, 1000])
dataset = RoundRobinZipDatasets({'a': long_dataset, 'b': short_dataset})
idx = dataset.ordered_indices()
(idx, _) = dataset.filter_indices_by_size(idx, 19)
self.assertEqual(list(idx), [0, 1, 2, 3, 4])
self.assertEqual(dict(dataset[0]), {'a': sample(5, 7), 'b': sample(2, 9)})
self.assertEqual(dict(dataset[2]), {'a': sample(0, 10), 'b': sample(2, 9)})
self.assertEqual(dict(dataset[4]), {'a': sample(6, 12), 'b': sample(2, 9)}) |
def test_do_not_claim_an_almost_expiring_lock_if_a_payment_didnt_occur():
amount = UNIT_TRANSFER_AMOUNT
block_number = BlockNumber(1)
pseudo_random_generator = random.Random()
our_state = factories.NettingChannelEndStateProperties(balance=amount)
partner_state = replace(our_state, address=UNIT_TRANSFER_SENDER)
attacked_channel = factories.create(factories.NettingChannelStateProperties(our_state=our_state))
target_attacker2 = attacked_channel.partner_state.address
bc_channel = factories.create(factories.NettingChannelStateProperties(our_state=our_state, partner_state=partner_state))
from_hop = factories.make_hop_from_channel(bc_channel)
from_transfer = factories.make_signed_transfer_for(bc_channel, LockedTransferSignedStateProperties(initiator=HOP1, target=target_attacker2, canonical_identifier=factories.make_canonical_identifier(token_network_address=bc_channel.token_network_address)))
channel_map = {bc_channel.identifier: bc_channel, attacked_channel.identifier: attacked_channel}
addresses_to_channel = {(UNIT_TOKEN_NETWORK_ADDRESS, bc_channel.identifier): bc_channel, (UNIT_TOKEN_NETWORK_ADDRESS, attacked_channel.identifier): attacked_channel}
init_state_change = ActionInitMediator(candidate_route_states=[RouteState(route=[our_state.address, attacked_channel.partner_state.address], address_to_metadata={})], from_hop=from_hop, from_transfer=from_transfer, balance_proof=from_transfer.balance_proof, sender=from_transfer.balance_proof.sender)
iteration = mediator.state_transition(mediator_state=None, state_change=init_state_change, channelidentifiers_to_channels=channel_map, addresses_to_channel=addresses_to_channel, pseudo_random_generator=pseudo_random_generator, block_number=block_number, block_hash=factories.make_block_hash())
attack_block_number = (from_transfer.lock.expiration - attacked_channel.reveal_timeout)
is_safe = mediator.is_safe_to_wait(from_transfer.lock.expiration, attacked_channel.reveal_timeout, attack_block_number)
assert is_safe.fail
new_iteration = iteration
for new_block_number in range(block_number, (attack_block_number + 1)):
block = Block(block_number=BlockNumber(new_block_number), gas_limit=BlockGasLimit(1), block_hash=Hash32(factories.make_transaction_hash()))
new_iteration = mediator.state_transition(mediator_state=new_iteration.new_state, state_change=block, channelidentifiers_to_channels=channel_map, addresses_to_channel=addresses_to_channel, pseudo_random_generator=pseudo_random_generator, block_number=BlockNumber(new_block_number), block_hash=factories.make_block_hash())
assert (not any((event for event in new_iteration.events if (not isinstance(event, EventUnlockFailed)))))
receive_secret = ReceiveSecretReveal(target_attacker2, UNIT_SECRET)
attack_iteration = mediator.state_transition(mediator_state=new_iteration.new_state, state_change=receive_secret, channelidentifiers_to_channels=channel_map, addresses_to_channel=addresses_to_channel, pseudo_random_generator=pseudo_random_generator, block_number=attack_block_number, block_hash=factories.make_block_hash())
assert (not any((isinstance(event, ContractSendChannelClose) for event in attack_iteration.events)))
for new_block_number in range(block_number, (from_transfer.lock.expiration + 1)):
new_block_hash = factories.make_block_hash()
block = Block(block_number=BlockNumber(new_block_number), gas_limit=BlockGasLimit(1), block_hash=new_block_hash)
new_iteration = mediator.state_transition(mediator_state=new_iteration.new_state, state_change=block, channelidentifiers_to_channels=channel_map, addresses_to_channel=addresses_to_channel, pseudo_random_generator=pseudo_random_generator, block_number=BlockNumber(new_block_number), block_hash=new_block_hash)
assert (not any((event for event in new_iteration.events if (not isinstance(event, (EventUnlockFailed, ContractSendSecretReveal)))))) |
def test_mel_audio_dataset():
dataset_config = {'metadata_file': 'temp/metadata.txt', 'hop_length': 256, 'batch_mel_length': 64}
dataloader = create_dataloader('AudioMelNoiseDataset', dataset_config=dataset_config, batch_size=4, shuffle=True, num_workers=4, drop_last=False)
for batch in tqdm.tqdm(dataloader):
(wavs, mels, noises) = batch |
def _lw(solver, partInfo, subname, shape, retAll=False):
_ = retAll
if (not solver):
if (utils.isLinearEdge(shape) or utils.isPlanar(shape) or utils.isCylindricalPlane(shape)):
return
return 'a linear edge or edge/face with planar or cylindrical surface'
if utils.isLinearEdge(shape):
return _l(solver, partInfo, subname, shape, False)
if utils.isCylindricalPlane(shape):
return _n(solver, partInfo, subname, shape, False)
return _wa(solver, partInfo, subname, shape) |
class SSCDataset():
def __init__(self, directory, split='train'):
self.files = {}
self.filenames = []
for ext in SPLIT_FILES[split]:
self.files[EXT_TO_NAME[ext]] = []
for sequence in SPLIT_SEQUENCES[split]:
complete_path = os.path.join(directory, 'sequences', sequence, 'voxels')
if (not os.path.exists(complete_path)):
raise RuntimeError(('Voxel directory missing: ' + complete_path))
files = os.listdir(complete_path)
for ext in SPLIT_FILES[split]:
data = sorted([os.path.join(complete_path, f) for f in files if f.endswith(ext)])
if (len(data) == 0):
raise RuntimeError(('Missing data for ' + EXT_TO_NAME[ext]))
self.files[EXT_TO_NAME[ext]].extend(data)
self.filenames.extend(sorted([(sequence, os.path.splitext(f)[0]) for f in files if f.endswith(SPLIT_FILES[split][0])]))
self.num_files = len(self.filenames)
for (k, v) in self.files.items():
print(k, len(v))
assert (len(v) == self.num_files)
def __len__(self):
return self.num_files
def __getitem__(self, t):
collection = {}
for typ in self.files.keys():
scan_data = None
if (typ == 'label'):
scan_data = np.fromfile(self.files[typ][t], dtype=np.uint16)
else:
scan_data = unpack(np.fromfile(self.files[typ][t], dtype=np.uint8))
collection[typ] = scan_data.reshape(VOXEL_DIMS)
return (self.filenames[t], collection) |
def test_thread(testdir):
testdir.makepyfile('\n import time\n\n def test_foo():\n time.sleep(2)\n ')
result = testdir.runpytest('--timeout=1', '--timeout-method=thread')
result.stderr.fnmatch_lines(['*++ Timeout ++*', '*~~ Stack of MainThread* ~~*', '*File *, line *, in *', '*++ Timeout ++*'])
assert ('++ Timeout ++' in result.stderr.lines[(- 1)]) |
.grid
def test_transformer_group__get_transform_crs():
tg = TransformerGroup('epsg:4258', 'epsg:7415')
if grids_available('nl_nsgi_nlgeo2018.tif', 'nl_nsgi_rdtrans2018.tif', check_all=True):
if PROJ_GTE_91:
assert (len(tg.transformers) == 2)
else:
assert (len(tg.transformers) == 6)
elif ((not PROJ_GTE_91) and grids_available('nl_nsgi_rdtrans2018.tif')):
assert (len(tg.transformers) == 2)
elif ((not PROJ_GTE_91) and grids_available('nl_nsgi_nlgeo2018.tif')):
assert (len(tg.transformers) == 4)
else:
assert (len(tg.transformers) == 1) |
class LoginForm(Form):
def __init__(self, view, event_channel_name, account_management_interface):
super().__init__(view, event_channel_name)
self.account_management_interface = account_management_interface
if self.exception:
self.add_child(Alert(view, self.exception.as_user_message(), 'warning'))
login_inputs = self.add_child(FieldSet(view, legend_text=_('Please specify'))).use_layout(FormLayout())
email_cue = P(view, _('The email address you used to register here.'))
login_inputs.layout.add_input(CueInput(TextInput(self, self.account_management_interface.fields.email), email_cue))
password_cue = P(view, _('The secret password you supplied upon registration.'))
password_cue_input = CueInput(PasswordInput(self, self.account_management_interface.fields.password), password_cue)
forgot_password_bookmark = self.user_interface.get_bookmark(relative_path='/resetPassword', description=_('Forgot your password?'))
password_cue_input.add_child(A.from_bookmark(view, forgot_password_bookmark))
login_inputs.layout.add_input(password_cue_input)
stay_cue = P(view, _('If selected, you will stay logged in for longer.'))
login_inputs.layout.add_input(CueInput(CheckboxInput(self, self.account_management_interface.fields.stay_logged_in), stay_cue))
login_buttons = self.add_child(ActionButtonGroup(view))
btn = login_buttons.add_child(Button(self, account_management_interface.events.login_event, style='primary')) |
def main():
args = parse_args()
(logger, final_output_dir, tb_log_dir) = create_logger(config, args.cfg, 'validate')
logger.info(pprint.pformat(args))
logger.info(pprint.pformat(config))
gpus = [i for i in range(args.gpus)]
print('=> Using GPUs', gpus)
print('=> Loading data ..')
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
test_dataset = eval(('dataset.' + config.DATASET.TEST_DATASET))(config, config.DATASET.TEST_SUBSET, False, config.DATASET.TEST_ROOT, transforms.Compose([transforms.ToTensor(), normalize]), args.data_path)
test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=(config.TEST.BATCH_SIZE * len(gpus)), shuffle=False, num_workers=config.WORKERS, pin_memory=True)
cudnn.benchmark = config.CUDNN.BENCHMARK
torch.backends.cudnn.deterministic = config.CUDNN.DETERMINISTIC
torch.backends.cudnn.enabled = config.CUDNN.ENABLED
print('=> Constructing models ..')
model = eval((('models.' + config.MODEL) + '.get_multi_person_pose_net'))(config, is_train=True)
with torch.no_grad():
model = torch.nn.DataParallel(model, device_ids=gpus).cuda()
test_model_file = os.path.join(final_output_dir, config.TEST.MODEL_FILE)
if (config.TEST.MODEL_FILE and os.path.isfile(test_model_file)):
logger.info('=> load models state {}'.format(test_model_file))
model.module.load_state_dict(torch.load(test_model_file))
elif config.NETWORK.PRETRAINED:
pretrained_model_file = osp.join(args.data_path, config.NETWORK.PRETRAINED)
logger.info('=> load models state {}'.format(pretrained_model_file))
state_dict = torch.load(pretrained_model_file)
new_state_dict = {k: v for (k, v) in state_dict.items() if ('backbone.pose_branch.' not in k)}
model.module.load_state_dict(new_state_dict, strict=False)
else:
raise ValueError('Check the model file for testing!')
if config.NETWORK.PRETRAINED_BACKBONE:
model = load_backbone_validate(model, osp.join(args.cur_path, config.NETWORK.PRETRAINED_BACKBONE))
writer_dict = {'writer': SummaryWriter(log_dir=tb_log_dir), 'train_global_steps': 0, 'valid_global_steps': 0}
eval_list = validate_3d(config, model, test_loader, final_output_dir, writer_dict)
pickle.dump(eval_list, open(osp.join(final_output_dir, 'results.pkl'), 'wb')) |
class LastLevelP6P7(nn.Module):
def __init__(self, in_channels, out_channels, in_feature='res5'):
super().__init__()
self.num_levels = 2
self.in_feature = in_feature
self.p6 = nn.Conv2d(in_channels, out_channels, 3, 2, 1)
self.p7 = nn.Conv2d(out_channels, out_channels, 3, 2, 1)
for module in [self.p6, self.p7]:
weight_init.c2_xavier_fill(module)
def forward(self, c5):
p6 = self.p6(c5)
p7 = self.p7(F.relu(p6))
return [p6, p7] |
def norm_sensor(name, value):
conversions = {'imu.accel': 1, 'imu.gyro': 0.1, 'servo.current': 1, 'servo.command': 1, 'ap.heading_error': 0.2, 'imu.headingrate_lowpass': 0.1}
c = conversions[name]
def norm_value(value):
return math.tanh((c * value))
if (type(value) == type([])):
return list(map(norm_value, value))
return norm_value(value) |
class IBNbResNet(nn.Module):
def __init__(self, channels, init_block_channels, in_channels=3, in_size=(224, 224), num_classes=1000):
super(IBNbResNet, self).__init__()
self.in_size = in_size
self.num_classes = num_classes
self.features = nn.Sequential()
self.features.add_module('init_block', IBNbResInitBlock(in_channels=in_channels, out_channels=init_block_channels))
in_channels = init_block_channels
for (i, channels_per_stage) in enumerate(channels):
stage = nn.Sequential()
for (j, out_channels) in enumerate(channels_per_stage):
stride = (2 if ((j == 0) and (i != 0)) else 1)
use_inst_norm = ((i < 2) and (j == (len(channels_per_stage) - 1)))
stage.add_module('unit{}'.format((j + 1)), IBNbResUnit(in_channels=in_channels, out_channels=out_channels, stride=stride, use_inst_norm=use_inst_norm))
in_channels = out_channels
self.features.add_module('stage{}'.format((i + 1)), stage)
self.features.add_module('final_pool', nn.AvgPool2d(kernel_size=7, stride=1))
self.output = nn.Linear(in_features=in_channels, out_features=num_classes)
self._init_params()
def _init_params(self):
for (name, module) in self.named_modules():
if isinstance(module, nn.Conv2d):
init.kaiming_uniform_(module.weight)
if (module.bias is not None):
init.constant_(module.bias, 0)
def forward(self, x):
x = self.features(x)
x = x.view(x.size(0), (- 1))
x = self.output(x)
return x |
_model
class pvt_v2_b4(PyramidVisionTransformerImpr):
def __init__(self, **kwargs):
super(pvt_v2_b4, self).__init__(patch_size=4, embed_dims=[64, 128, 320, 512], num_heads=[1, 2, 5, 8], mlp_ratios=[8, 8, 4, 4], qkv_bias=True, norm_layer=partial(nn.LayerNorm, eps=1e-06), depths=[3, 8, 27, 3], sr_ratios=[8, 4, 2, 1], drop_rate=0.0, drop_path_rate=0.1) |
def handle_napi_poll(event_info):
(name, context, cpu, time, pid, comm, napi, dev_name, work, budget) = event_info
if (cpu in net_rx_dic.keys()):
event_list = net_rx_dic[cpu]['event_list']
rec_data = {'event_name': 'napi_poll', 'dev': dev_name, 'event_t': time, 'work': work, 'budget': budget}
event_list.append(rec_data) |
def assert_rank(tensor, expected_rank, name=None):
if (name is None):
name = tensor.name
expected_rank_dict = {}
if isinstance(expected_rank, six.integer_types):
expected_rank_dict[expected_rank] = True
else:
for x in expected_rank:
expected_rank_dict[x] = True
actual_rank = tensor.shape.ndims
if (actual_rank not in expected_rank_dict):
scope_name = tf.get_variable_scope().name
raise ValueError(('For the tensor `%s` in scope `%s`, the actual rank `%d` (shape = %s) is not equal to the expected rank `%s`' % (name, scope_name, actual_rank, str(tensor.shape), str(expected_rank)))) |
def create_guid(bus: int, vendor: int, product: int, version: int, name: str, signature: int, data: int) -> str:
bus = _swap_le16(bus)
vendor = _swap_le16(vendor)
product = _swap_le16(product)
version = _swap_le16(version)
return f'{bus:04x}0000{vendor:04x}0000{product:04x}0000{version:04x}0000' |
def register_equilibrium_solver(name: str, overwrite: bool=False, reason_to_exclude: Optional[str]=None) -> Callable:
return generic_register(name=name, registrator_name='Equilibrium solver', registry=EQUILIBRIUM_SOLVER_REGISTRY, signature=EQUILIBRIUM_SOLVER_SIGNATURE, overwrite=overwrite, reason_to_exclude=reason_to_exclude) |
class VoxelBackBone8x(nn.Module):
def __init__(self, model_cfg, input_channels, grid_size, **kwargs):
super().__init__()
self.model_cfg = model_cfg
norm_fn = partial(nn.BatchNorm1d, eps=0.001, momentum=0.01)
self.sparse_shape = (grid_size[::(- 1)] + [1, 0, 0])
self.input_channels = input_channels
self.conv_input = spconv.SparseSequential(spconv.SubMConv3d(input_channels, 16, 3, padding=1, bias=False, indice_key='subm1'), norm_fn(16), nn.ReLU())
block = post_act_block
self.conv1 = spconv.SparseSequential(block(16, 16, 3, norm_fn=norm_fn, padding=1, indice_key='subm1'))
self.conv2 = spconv.SparseSequential(block(16, 32, 3, norm_fn=norm_fn, stride=2, padding=1, indice_key='spconv2', conv_type='spconv'), block(32, 32, 3, norm_fn=norm_fn, padding=1, indice_key='subm2'), block(32, 32, 3, norm_fn=norm_fn, padding=1, indice_key='subm2'))
self.conv3 = spconv.SparseSequential(block(32, 64, 3, norm_fn=norm_fn, stride=2, padding=1, indice_key='spconv3', conv_type='spconv'), block(64, 64, 3, norm_fn=norm_fn, padding=1, indice_key='subm3'), block(64, 64, 3, norm_fn=norm_fn, padding=1, indice_key='subm3'))
self.conv4 = spconv.SparseSequential(block(64, 64, 3, norm_fn=norm_fn, stride=2, padding=(0, 1, 1), indice_key='spconv4', conv_type='spconv'), block(64, 64, 3, norm_fn=norm_fn, padding=1, indice_key='subm4'), block(64, 64, 3, norm_fn=norm_fn, padding=1, indice_key='subm4'))
last_pad = 0
last_pad = self.model_cfg.get('last_pad', last_pad)
self.conv_out = spconv.SparseSequential(spconv.SparseConv3d(64, 128, (3, 1, 1), stride=(2, 1, 1), padding=last_pad, bias=False, indice_key='spconv_down2'), norm_fn(128), nn.ReLU())
self.num_point_features = 128
self.backbone_channels = {'x_conv1': 16, 'x_conv2': 32, 'x_conv3': 64, 'x_conv4': 64}
def forward(self, batch_dict):
(voxel_features, voxel_coords) = (batch_dict['voxel_features'], batch_dict['voxel_coords'])
batch_size = batch_dict['batch_size']
input_sp_tensor = spconv.SparseConvTensor(features=voxel_features, indices=voxel_coords.int(), spatial_shape=self.sparse_shape, batch_size=batch_size)
if ((voxel_features.shape[(- 1)] != self.input_channels) and (dist.get_rank() == 0)):
warnings.warn(f'shape does not compatible: {voxel_features.shape}, {self.input_channels}')
x = self.conv_input(input_sp_tensor)
x_conv1 = self.conv1(x)
x_conv2 = self.conv2(x_conv1)
x_conv3 = self.conv3(x_conv2)
x_conv4 = self.conv4(x_conv3)
out = self.conv_out(x_conv4)
batch_dict.update({'encoded_spconv_tensor': out, 'encoded_spconv_tensor_stride': 8})
batch_dict.update({'multi_scale_3d_features': {'x_conv1': x_conv1, 'x_conv2': x_conv2, 'x_conv3': x_conv3, 'x_conv4': x_conv4}})
batch_dict.update({'multi_scale_3d_strides': {'x_conv1': 1, 'x_conv2': 2, 'x_conv3': 4, 'x_conv4': 8}})
return batch_dict |
def _truncate(basis, contr_scheme, symb, split_name):
contr_b = []
b_index = 0
for (l, n_keep) in enumerate(contr_scheme):
n_saved = 0
if (n_keep > 0):
for segm in basis:
segm_l = segm[0]
if (segm_l == l):
segm_len = len(segm[1][1:])
n_save = min(segm_len, (n_keep - n_saved))
if (n_save > 0):
save_segm = [line[:(n_save + 1)] for line in segm[:][1:]]
contr_b.append(([l] + save_segm))
n_saved += n_save
assert (n_saved == n_keep), ('{} implies {} l={} function(s), but' + 'only {} in {}:{}').format(split_name[1], contr_scheme[l], l, n_saved, symb, split_name[0])
return contr_b |
class KiteParamCovariance(KiteParameterGroup):
sigSamplingMethod = QtCore.pyqtSignal(str)
sigSpatialBins = QtCore.pyqtSignal(int)
sigSpatialPairs = QtCore.pyqtSignal(int)
def __init__(self, model, **kwargs):
kwargs['type'] = 'group'
kwargs['name'] = 'Scene.covariance'
self.sp = model
self.parameters = OrderedDict([('variance', None), ('covariance_model', (lambda c: ', '.join((('%g' % p) for p in c.covariance_model)))), ('covariance_model_rms', None), ('noise_patch_size_km2', None), ('noise_patch_coord', (lambda c: ', '.join([('%.2f' % f) for f in c.noise_coord.tolist()])))])
model.sigCovarianceChanged.connect(self.updateValues)
KiteParameterGroup.__init__(self, model=model, model_attr='covariance', **kwargs)
def changeSamplingMethod():
model.covariance.setSamplingMethod(sampling_method.value())
p = {'name': 'sampling_method', 'values': {'spatial random': 'spatial', 'spectral': 'spectral'}, 'value': model.covariance.config.sampling_method, 'tip': CovarianceConfig.sampling_method.help}
sampling_method = pTypes.ListParameter(**p)
sampling_method.sigValueChanged.connect(changeSamplingMethod)
def changeSpatialBins():
model.covariance.setSpatialBins(spatial_bins.value())
p = {'name': 'spatial_bins', 'value': model.covariance.config.spatial_bins, 'type': 'int', 'limits': (1, 500), 'step': 5, 'editable': True, 'tip': CovarianceConfig.spatial_bins.help}
spatial_bins = pTypes.SimpleParameter(**p)
spatial_bins.sigValueChanged.connect(changeSpatialBins)
def changeSpatialPairs():
model.covariance.setSpatialPairs(spatial_pairs.value())
p = {'name': 'spatial_pairs', 'value': model.covariance.config.spatial_pairs, 'type': 'int', 'limits': (1, 1000000), 'step': 50000, 'editable': True, 'tip': CovarianceConfig.spatial_pairs.help}
spatial_pairs = pTypes.SimpleParameter(**p)
spatial_pairs.sigValueChanged.connect(changeSpatialPairs)
def changeModelFunction():
model.covariance.setModelFunction(model_function.value())
p = {'name': 'model_function', 'values': {'exponential': 'exponential', 'exp + cosine': 'exponential_cosine'}, 'value': model.covariance.config.model_function, 'tip': CovarianceConfig.model_function.help}
model_function = pTypes.ListParameter(**p)
model_function.sigValueChanged.connect(changeModelFunction)
def toggle_adaptive_subsampling(param, checked):
model.covariance.config.adaptive_subsampling = checked
p = {'name': 'adaptive_subsampling', 'type': 'bool', 'value': model.covariance.config.adaptive_subsampling, 'tip': 'detrend the scene'}
adaptive_subsampling = pTypes.SimpleParameter(**p)
adaptive_subsampling.sigValueChanged.connect(toggle_adaptive_subsampling)
self.pushChild(adaptive_subsampling)
self.pushChild(model_function)
self.pushChild(spatial_bins)
self.pushChild(spatial_pairs)
self.pushChild(sampling_method) |
class TAudioFormats(TestCase):
def setUp(self):
with temp_filename() as filename:
self.filename = filename
def test_load_non_exist(self):
for t in format_types:
if (not t.is_file):
continue
self.assertRaises(AudioFileError, t, self.filename)
def test_write_non_existing(self):
for t in format_types:
if (not t.is_file):
continue
instance = AudioFile.__new__(t)
instance.sanitize(self.filename)
try:
instance.write()
except AudioFileError:
pass
def test_reload_non_existing(self):
for t in format_types:
if (not t.is_file):
continue
instance = AudioFile.__new__(t)
instance.sanitize(self.filename)
try:
instance.reload()
except AudioFileError:
pass |
def test_logging_emit_error_supressed(pytester: Pytester) -> None:
pytester.makepyfile("\n import logging\n\n def test_bad_log(monkeypatch):\n monkeypatch.setattr(logging, 'raiseExceptions', False)\n logging.warning('oops', 'first', 2)\n ")
result = pytester.runpytest()
result.assert_outcomes(passed=1) |
class DynamicState(State):
def enter(self, event_data):
logger.debug('%sEntering state %s. Processing callbacks...', event_data.machine.name, self.name)
if hasattr(event_data.model, f'on_enter_{self.name}'):
event_data.machine.callbacks([getattr(event_data.model, f'on_enter_{self.name}')], event_data)
logger.info('%sFinished processing state %s enter callbacks.', event_data.machine.name, self.name)
def exit(self, event_data):
logger.debug('%sExiting state %s. Processing callbacks...', event_data.machine.name, self.name)
if hasattr(event_data.model, f'on_exit_{self.name}'):
event_data.machine.callbacks([getattr(event_data.model, f'on_exit_{self.name}')], event_data)
logger.info('%sFinished processing state %s exit callbacks.', event_data.machine.name, self.name) |
class GatherOpsTest(tf.test.TestCase):
def setUp(self):
boxes = np.array([[3.0, 4.0, 6.0, 8.0], [14.0, 14.0, 15.0, 15.0], [0.0, 0.0, 20.0, 20.0]], dtype=float)
self.boxlist = np_box_list.BoxList(boxes)
self.boxlist.add_field('scores', np.array([0.5, 0.7, 0.9], dtype=float))
self.boxlist.add_field('labels', np.array([[0, 0, 0, 1, 0], [0, 1, 0, 0, 0], [0, 0, 0, 0, 1]], dtype=int))
def test_gather_with_out_of_range_indices(self):
indices = np.array([3, 1], dtype=int)
boxlist = self.boxlist
with self.assertRaises(ValueError):
np_box_list_ops.gather(boxlist, indices)
def test_gather_with_invalid_multidimensional_indices(self):
indices = np.array([[0, 1], [1, 2]], dtype=int)
boxlist = self.boxlist
with self.assertRaises(ValueError):
np_box_list_ops.gather(boxlist, indices)
def test_gather_without_fields_specified(self):
indices = np.array([2, 0, 1], dtype=int)
boxlist = self.boxlist
subboxlist = np_box_list_ops.gather(boxlist, indices)
expected_scores = np.array([0.9, 0.5, 0.7], dtype=float)
self.assertAllClose(expected_scores, subboxlist.get_field('scores'))
expected_boxes = np.array([[0.0, 0.0, 20.0, 20.0], [3.0, 4.0, 6.0, 8.0], [14.0, 14.0, 15.0, 15.0]], dtype=float)
self.assertAllClose(expected_boxes, subboxlist.get())
expected_labels = np.array([[0, 0, 0, 0, 1], [0, 0, 0, 1, 0], [0, 1, 0, 0, 0]], dtype=int)
self.assertAllClose(expected_labels, subboxlist.get_field('labels'))
def test_gather_with_invalid_field_specified(self):
indices = np.array([2, 0, 1], dtype=int)
boxlist = self.boxlist
with self.assertRaises(ValueError):
np_box_list_ops.gather(boxlist, indices, 'labels')
with self.assertRaises(ValueError):
np_box_list_ops.gather(boxlist, indices, ['objectness'])
def test_gather_with_fields_specified(self):
indices = np.array([2, 0, 1], dtype=int)
boxlist = self.boxlist
subboxlist = np_box_list_ops.gather(boxlist, indices, ['labels'])
self.assertFalse(subboxlist.has_field('scores'))
expected_boxes = np.array([[0.0, 0.0, 20.0, 20.0], [3.0, 4.0, 6.0, 8.0], [14.0, 14.0, 15.0, 15.0]], dtype=float)
self.assertAllClose(expected_boxes, subboxlist.get())
expected_labels = np.array([[0, 0, 0, 0, 1], [0, 0, 0, 1, 0], [0, 1, 0, 0, 0]], dtype=int)
self.assertAllClose(expected_labels, subboxlist.get_field('labels')) |
class PackagesDistributionsPrebuiltTest(fixtures.ZipFixtures, unittest.TestCase):
def test_packages_distributions_example(self):
self._fixture_on_path('example-21.12-py3-none-any.whl')
assert (packages_distributions()['example'] == ['example'])
def test_packages_distributions_example2(self):
self._fixture_on_path('example2-1.0.0-py3-none-any.whl')
assert (packages_distributions()['example2'] == ['example2']) |
class MultiHeadAttention(nn.Module):
def __init__(self, input_dim, embed_dim, num_heads):
super().__init__()
assert ((embed_dim % num_heads) == 0), 'Embedding dimension must be 0 modulo number of heads.'
self.embed_dim = embed_dim
self.num_heads = num_heads
self.head_dim = (embed_dim // num_heads)
self.qkv_proj = nn.Linear(input_dim, (3 * embed_dim))
self.o_proj = nn.Linear(embed_dim, embed_dim)
self._reset_parameters()
def _reset_parameters(self):
nn.init.xavier_uniform_(self.qkv_proj.weight)
self.qkv_proj.bias.data.fill_(0)
nn.init.xavier_uniform_(self.o_proj.weight)
self.o_proj.bias.data.fill_(0)
def forward(self, x, mask=None, return_attention=False):
(batch_size, seq_length, embed_dim) = x.size()
qkv = self.qkv_proj(x)
qkv = qkv.reshape(batch_size, seq_length, self.num_heads, (3 * self.head_dim))
qkv = qkv.permute(0, 2, 1, 3)
(q, k, v) = qkv.chunk(3, dim=(- 1))
(values, attention) = scaled_dot_product(q, k, v, mask=mask)
values = values.permute(0, 2, 1, 3)
values = values.reshape(batch_size, seq_length, embed_dim)
o = self.o_proj(values)
if return_attention:
return (o, attention)
else:
return o |
def test_api_error_formatter(testdir, xdist_args):
testdir.makepyfile(bad='\n def pyfunc(x: int) -> str:\n return x * 2\n ')
testdir.makepyfile(conftest="\n def custom_file_error_formatter(item, results, errors):\n return '\\n'.join(\n '{path}:{error}'.format(\n path=item.fspath,\n error=error,\n )\n for error in errors\n )\n\n def pytest_configure(config):\n plugin = config.pluginmanager.getplugin('mypy')\n plugin.file_error_formatter = custom_file_error_formatter\n ")
result = testdir.runpytest_subprocess('--mypy', *xdist_args)
result.stdout.fnmatch_lines(['*/bad.py:2: error: Incompatible return value*'])
assert (result.ret != 0) |
class ProgressHooksTestCase(WithSeededRandomPipelineEngine, ZiplineTestCase):
ASSET_FINDER_COUNTRY_CODE = 'US'
START_DATE = pd.Timestamp('2014-01-02', tz='UTC')
END_DATE = pd.Timestamp('2014-01-31', tz='UTC')
PREPOPULATED_TERM_CUTOFF = (END_DATE - pd.Timedelta('2 days'))
def make_seeded_random_populate_initial_workspace(cls):
def populate(initial_workspace, root_mask_term, execution_plan, dates, assets):
if (PREPOPULATED_TERM not in execution_plan):
return initial_workspace
elif (dates[(- 1)] > cls.PREPOPULATED_TERM_CUTOFF):
return initial_workspace
workspace = initial_workspace.copy()
(_, dates) = execution_plan.mask_and_dates_for_term(PREPOPULATED_TERM, root_mask_term, workspace, dates)
shape = (len(dates), len(assets))
workspace[PREPOPULATED_TERM] = np.zeros(shape, dtype=float)
return workspace
return populate
def make_seeded_random_loader_columns(cls):
return (TestingDataSet.columns | ShouldGetSkipped.columns)
def test_progress_hooks(self):
publisher = TestingProgressPublisher()
hooks = [ProgressHooks.with_static_publisher(publisher)]
pipeline = Pipeline({'bool_': TestingDataSet.bool_col.latest, 'factor_rank': TrivialFactor().rank().zscore(), 'prepopulated': PREPOPULATED_TERM}, domain=US_EQUITIES)
(start_date, end_date) = self.trading_days[[(- 10), (- 1)]]
expected_chunks = [tuple(self.trading_days[[(- 10), (- 6)]]), tuple(self.trading_days[[(- 5), (- 1)]])]
self.assertLess(expected_chunks[0][1], self.PREPOPULATED_TERM_CUTOFF)
self.assertLess(expected_chunks[0][1], self.PREPOPULATED_TERM_CUTOFF)
self.run_chunked_pipeline(pipeline=pipeline, start_date=start_date, end_date=end_date, chunksize=5, hooks=hooks)
self.verify_trace(publisher.trace, pipeline_start_date=start_date, pipeline_end_date=end_date, expected_chunks=expected_chunks)
def test_progress_hooks_empty_pipeline(self):
publisher = TestingProgressPublisher()
hooks = [ProgressHooks.with_static_publisher(publisher)]
pipeline = Pipeline({}, domain=US_EQUITIES)
(start_date, end_date) = self.trading_days[[(- 10), (- 1)]]
expected_chunks = [tuple(self.trading_days[[(- 10), (- 6)]]), tuple(self.trading_days[[(- 5), (- 1)]])]
self.run_chunked_pipeline(pipeline=pipeline, start_date=start_date, end_date=end_date, chunksize=5, hooks=hooks)
self.verify_trace(publisher.trace, pipeline_start_date=start_date, pipeline_end_date=end_date, expected_chunks=expected_chunks, empty=True)
def verify_trace(self, trace, pipeline_start_date, pipeline_end_date, expected_chunks, empty=False):
for (before, after) in toolz.sliding_window(2, trace):
self.assertGreaterEqual(after.percent_complete, before.percent_complete)
first = trace[0]
expected_first = TestingProgressPublisher.TraceState(state='init', percent_complete=0.0, execution_bounds=(pipeline_start_date, pipeline_end_date), current_chunk_bounds=expected_chunks[0], current_work=None)
self.assertEqual(first, expected_first)
last = trace[(- 1)]
expected_last = TestingProgressPublisher.TraceState(state='success', percent_complete=100.0, execution_bounds=(pipeline_start_date, pipeline_end_date), current_chunk_bounds=expected_chunks[(- 1)], current_work=(None if empty else [instance_of(ComputableTerm)]))
self.assertEqual(last, expected_last)
middle = trace[1:(- 1)]
for update in middle:
if empty:
self.assertEqual(update.state, 'init')
self.assertIs(update.current_work, None)
continue
if (update.state in ('loading', 'computing')):
self.assertIsInstance(update.current_work, list)
if (update.state == 'loading'):
for term in update.current_work:
self.assertIsInstance(term, (LoadableTerm, AssetExists))
elif (update.state == 'computing'):
for term in update.current_work:
self.assertIsInstance(term, ComputableTerm)
else:
raise AssertionError('Unexpected state: {}'.format(update.state))
all_chunks = []
grouped = itertools.groupby(middle, attrgetter('current_chunk_bounds'))
for ((chunk_start, chunk_stop), chunk_trace) in grouped:
all_chunks.append((chunk_start, chunk_stop))
chunk_trace = list(chunk_trace)
expected_end_progress = self.expected_chunk_progress(pipeline_start_date, pipeline_end_date, chunk_stop)
end_progress = chunk_trace[(- 1)].percent_complete
assert_almost_equal(end_progress, expected_end_progress)
self.assertEqual(all_chunks, expected_chunks)
_space(chunked=[True, False])
def test_error_handling(self, chunked):
publisher = TestingProgressPublisher()
hooks = [ProgressHooks.with_static_publisher(publisher)]
class SomeError(Exception):
pass
class ExplodingFactor(CustomFactor):
inputs = [TestingDataSet.float_col]
window_length = 1
def compute(self, *args, **kwargs):
raise SomeError()
pipeline = Pipeline({'boom': ExplodingFactor()}, domain=US_EQUITIES)
(start_date, end_date) = self.trading_days[[(- 10), (- 1)]]
with self.assertRaises(SomeError):
if chunked:
self.run_chunked_pipeline(pipeline=pipeline, start_date=start_date, end_date=end_date, chunksize=5, hooks=hooks)
else:
self.run_pipeline(pipeline=pipeline, start_date=start_date, end_date=end_date, hooks=hooks)
final_update = publisher.trace[(- 1)]
self.assertEqual(final_update.state, 'error')
def expected_chunk_progress(self, pipeline_start, pipeline_end, chunk_end):
total_days = ((pipeline_end - pipeline_start).days + 1)
days_complete = ((chunk_end - pipeline_start).days + 1)
return round(((100.0 * days_complete) / total_days), 3) |
def plot_entropy(myax, inclass, outclass, label, bins=np.logspace((- 8), 0.5, num=30), show_legend=False, show_xlabel=False, show_ylabel=False):
myax.set_title(str(label), fontsize=12)
myax.set_xscale('log')
myax.hist(inclass, bins=bins, color='red', label='In Class')
myax.hist(outclass, bins=bins, color='black', alpha=0.3, label='Out of Class')
myax.set_ylim((0, 1000))
if show_xlabel:
myax.set_xlabel('Entropy')
if show_ylabel:
myax.set_ylabel('Count')
if show_legend:
myax.legend() |
def load_model(models_path, model_name, epoch=0):
model_params = load_params(models_path, model_name, epoch)
architecture = ('empty' if ('architecture' not in model_params) else model_params['architecture'])
network_type = model_params['network_type']
if ((architecture == 'sdn') or ('sdn' in model_name)):
if ('wideresnet' in network_type):
model = WideResNet_SDN(model_params)
elif ('resnet' in network_type):
model = ResNet_SDN(model_params)
elif ('vgg' in network_type):
model = VGG_SDN(model_params)
elif ('mobilenet' in network_type):
model = MobileNet_SDN(model_params)
elif ((architecture == 'cnn') or ('cnn' in model_name)):
if ('wideresnet' in network_type):
model = WideResNet(model_params)
elif ('resnet' in network_type):
model = ResNet(model_params)
elif ('vgg' in network_type):
model = VGG(model_params)
elif ('mobilenet' in network_type):
model = MobileNet(model_params)
network_path = ((models_path + '/') + model_name)
if (epoch == 0):
load_path = (network_path + '/untrained')
elif (epoch == (- 1)):
load_path = (network_path + '/last')
else:
load_path = ((network_path + '/') + str(epoch))
model.load_state_dict(torch.load(load_path), strict=False)
return (model, model_params) |
def load_image(file_or_name: str, *, size: Optional[Union[(int, Tuple[(int, int)])]], device: torch.device) -> torch.Tensor:
file = os.path.abspath(os.path.expanduser(file_or_name))
name = file_or_name
images = demo.images()
image: _Image
if os.path.exists(file):
image = LocalImage(file)
elif (name in images):
image = images[name]
else:
path = pathlib.Path(file_or_name)
could_be_name = ((not path.suffix) and (path.parent == pathlib.Path('.')))
if could_be_name:
raise ValueError(add_suggestion(f"Unknown demo image '{name}'.", word=name, possibilities=images._images.keys()))
else:
raise ValueError(f'The file {file} does not exist.')
return image.read(size=size, device=device) |
def parse_args(script):
parser = argparse.ArgumentParser(description=('few-shot script %s' % script))
parser.add_argument('--dataset', default='CUB', help='CUB/miniImagenet/cross/omniglot/cross_char')
parser.add_argument('--model', default='Conv4', help='model: Conv{4|6} / ResNet{10|18|34|50|101}')
parser.add_argument('--method', default='baseline', help='baseline/baseline++/protonet/matchingnet/relationnet{_softmax}/maml{_approx}')
parser.add_argument('--train_n_way', default=5, type=int, help='class num to classify for training')
parser.add_argument('--test_n_way', default=5, type=int, help='class num to classify for testing (validation) ')
parser.add_argument('--n_shot', default=5, type=int, help='number of labeled data in each class, same as n_support')
parser.add_argument('--train_aug', action='store_true', help='perform data augmentation or not during training ')
if (script == 'train'):
parser.add_argument('--num_classes', default=200, type=int, help='total number of classes in softmax, only used in baseline')
parser.add_argument('--save_freq', default=50, type=int, help='Save frequency')
parser.add_argument('--start_epoch', default=0, type=int, help='Starting epoch')
parser.add_argument('--stop_epoch', default=(- 1), type=int, help='Stopping epoch')
parser.add_argument('--resume', action='store_true', help='continue from previous trained model with largest epoch')
parser.add_argument('--warmup', action='store_true', help='continue from baseline, neglected if resume is true')
elif (script == 'save_features'):
parser.add_argument('--split', default='novel', help='base/val/novel')
parser.add_argument('--save_iter', default=(- 1), type=int, help='save feature from the model trained in x epoch, use the best model if x is -1')
elif (script == 'test'):
parser.add_argument('--split', default='novel', help='base/val/novel')
parser.add_argument('--save_iter', default=(- 1), type=int, help='saved feature from the model trained in x epoch, use the best model if x is -1')
parser.add_argument('--adaptation', action='store_true', help='further adaptation in test time or not')
else:
raise ValueError('Unknown script')
return parser.parse_args() |
def make_zipfile(output_filename, source_dir):
relroot = os.path.abspath(os.path.join(source_dir, os.pardir))
with zipfile.ZipFile(output_filename, 'w', zipfile.ZIP_DEFLATED) as zip:
for (root, dirs, files) in os.walk(source_dir):
zip.write(root, os.path.relpath(root, relroot))
for file in files:
filename = os.path.join(root, file)
if os.path.isfile(filename):
arcname = os.path.join(os.path.relpath(root, relroot), file)
zip.write(filename, arcname) |
class TestEuropeanCallOption(QiskitFinanceTestCase):
def setUp(self):
super().setUp()
num_uncertainty_qubits = 3
s_p = 2.0
vol = 0.4
r = 0.05
t_m = (40 / 365)
m_u = (((r - (0.5 * (vol ** 2))) * t_m) + np.log(s_p))
sigma = (vol * np.sqrt(t_m))
mean = np.exp((m_u + ((sigma ** 2) / 2)))
variance = ((np.exp((sigma ** 2)) - 1) * np.exp(((2 * m_u) + (sigma ** 2))))
stddev = np.sqrt(variance)
low = np.maximum(0, (mean - (3 * stddev)))
high = (mean + (3 * stddev))
warnings.filterwarnings('ignore', category=DeprecationWarning)
uncertainty_model = LogNormalDistribution(num_uncertainty_qubits, mu=m_u, sigma=sigma, low=low, high=high)
strike_price = 1.896
c_approx = 0.1
breakpoints = [uncertainty_model.low, strike_price]
slopes = [0, 1]
offsets = [0, 0]
f_min = 0
f_max = (uncertainty_model.high - strike_price)
european_call_objective = PwlObjective(uncertainty_model.num_target_qubits, uncertainty_model.low, uncertainty_model.high, breakpoints, slopes, offsets, f_min, f_max, c_approx)
self.european_call = UnivariateProblem(uncertainty_model, european_call_objective)
self.european_call_delta = EuropeanCallDelta(uncertainty_model, strike_price=strike_price)
warnings.filterwarnings('always', category=DeprecationWarning)
self._statevector = QuantumInstance(backend=BasicAer.get_backend('statevector_simulator'), seed_simulator=2, seed_transpiler=2)
self._qasm = QuantumInstance(backend=BasicAer.get_backend('qasm_simulator'), shots=100, seed_simulator=2, seed_transpiler=2)
([['statevector', AmplitudeEstimation(3), {'estimation': 0., 'mle': 0.163316}], ['qasm', AmplitudeEstimation(4), {'estimation': 0., 'mle': 0.}], ['statevector', MaximumLikelihoodAmplitudeEstimation(5), {'estimation': 0.}], ['qasm', MaximumLikelihoodAmplitudeEstimation(3), {'estimation': 0.}]])
def test_expected_value(self, simulator, a_e, expect):
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
a_e.a_factory = self.european_call
result = a_e.run((self._qasm if (simulator == 'qasm') else self._statevector))
for (key, value) in expect.items():
self.assertAlmostEqual(getattr(result, key), value, places=4, msg='estimate `{}` failed'.format(key))
([['statevector', AmplitudeEstimation(3), {'estimation': 0.8535534, 'mle': 0.}], ['qasm', AmplitudeEstimation(4), {'estimation': 0.8535534, 'mle': 0.}], ['statevector', MaximumLikelihoodAmplitudeEstimation(5), {'estimation': 0.}], ['qasm', MaximumLikelihoodAmplitudeEstimation(6), {'estimation': 0.}]])
def test_delta(self, simulator, a_e, expect):
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
a_e.a_factory = self.european_call_delta
result = a_e.run((self._qasm if (simulator == 'qasm') else self._statevector))
for (key, value) in expect.items():
self.assertAlmostEqual(getattr(result, key), value, places=4, msg='estimate `{}` failed'.format(key)) |
def test_sqliteio_write_updates_progress(tmpfile, view):
worker = MagicMock(canceled=False)
io = SQLiteIO(tmpfile, view.scene, create_new=True, worker=worker)
item = BeePixmapItem(QtGui.QImage())
view.scene.addItem(item)
io.write()
worker.begin_processing.emit.assert_called_once_with(1)
worker.progress.emit.assert_called_once_with(0)
worker.finished.emit.assert_called_once_with(tmpfile, []) |
class Track():
def __init__(self):
self.log_point = time.time()
self.enable_track = False
def track(self, mark):
if (not self.enable_track):
return
if torch.cuda.is_available():
torch.cuda.synchronize()
print('{} memory:'.format(mark), ((torch.cuda.memory_allocated() / 1024) / 1024), 'M')
print('{} time cost:'.format(mark), (time.time() - self.log_point))
self.log_point = time.time() |
def loads(s: str, *, parse_float: ParseFloat=float) -> Dict[(str, Any)]:
src = s.replace('\r\n', '\n')
pos = 0
out = Output(NestedDict(), Flags())
header: Key = ()
while True:
pos = skip_chars(src, pos, TOML_WS)
try:
char = src[pos]
except IndexError:
break
if (char == '\n'):
pos += 1
continue
if (char in KEY_INITIAL_CHARS):
pos = key_value_rule(src, pos, out, header, parse_float)
pos = skip_chars(src, pos, TOML_WS)
elif (char == '['):
try:
second_char: Optional[str] = src[(pos + 1)]
except IndexError:
second_char = None
if (second_char == '['):
(pos, header) = create_list_rule(src, pos, out)
else:
(pos, header) = create_dict_rule(src, pos, out)
pos = skip_chars(src, pos, TOML_WS)
elif (char != '#'):
raise suffixed_err(src, pos, 'Invalid statement')
pos = skip_comment(src, pos)
try:
char = src[pos]
except IndexError:
break
if (char != '\n'):
raise suffixed_err(src, pos, 'Expected newline or end of document after a statement')
pos += 1
return out.data.dict |
def check_corr(result: float, correct_solution: float, tol: float=0.001):
if (result.strip() == correct_solution.strip()):
return 1
try:
result = float(result.strip())
correct_solution = float(correct_solution.strip())
return (abs((result - correct_solution)) < tol)
except:
return 0 |
class TCoverManagerBuiltin(TestCase):
def setUp(self):
config.init()
self.main = mkdtemp()
self.dir1 = mkdtemp(dir=self.main)
self.dir2 = mkdtemp(dir=self.main)
(h, self.cover1) = mkstemp('.png', dir=self.main)
os.close(h)
pb = GdkPixbuf.Pixbuf.new(GdkPixbuf.Colorspace.RGB, True, 8, 10, 10)
pb.savev(self.cover1, 'png', [], [])
(h, self.cover2) = mkstemp('.png', dir=self.main)
os.close(h)
pb = GdkPixbuf.Pixbuf.new(GdkPixbuf.Colorspace.RGB, True, 8, 20, 20)
pb.savev(self.cover2, 'png', [], [])
self.file1 = get_temp_copy(get_data_path('silence-44-s.mp3'))
self.file2 = get_temp_copy(get_data_path('silence-44-s.mp3'))
self.manager = CoverManager()
def tearDown(self):
shutil.rmtree(self.main)
config.quit()
def test_connect_cover_changed(self):
called_with = []
def sig_handler(*args):
called_with.extend(args)
obj = object()
self.manager.connect('cover-changed', sig_handler)
self.manager.cover_changed([obj])
self.assertEqual(called_with, [self.manager, [obj]])
def test_get_primary_image(self):
self.assertFalse(MP3File(self.file1).has_images)
self.assertFalse(MP3File(self.file1).has_images)
def test_manager(self):
self.assertEqual(len(list(self.manager.sources)), 2)
def test_get_cover_many_prefer_embedded(self):
MP3File(self.file1).set_image(EmbeddedImage.from_path(self.cover1))
os.unlink(self.cover1)
self.external_cover = os.path.join(self.dir2, 'cover.png')
shutil.move(self.cover2, self.external_cover)
shutil.move(self.file1, self.dir1)
self.file1 = os.path.join(self.dir1, os.path.basename(self.file1))
shutil.move(self.file2, self.dir2)
self.file2 = os.path.join(self.dir2, os.path.basename(self.file2))
song1 = MP3File(self.file1)
song2 = MP3File(self.file2)
self.assertTrue(self.is_embedded(self.manager.get_cover(song1)))
self.assertFalse(self.is_embedded(self.manager.get_cover(song2)))
cover_for = self.manager.get_cover_many
config.set('albumart', 'prefer_embedded', True)
self.assertTrue(self.is_embedded(cover_for([song1, song2])))
self.assertTrue(self.is_embedded(cover_for([song2, song1])))
config.set('albumart', 'prefer_embedded', False)
self.assertFalse(self.is_embedded(cover_for([song1, song2])))
self.assertFalse(self.is_embedded(cover_for([song2, song1])))
def is_embedded(self, fileobj):
return (not path_equal(fileobj.name, self.external_cover, True))
def test_acquire_prefer_embedded(self):
MP3File(self.file1).set_image(EmbeddedImage.from_path(self.cover1))
os.unlink(self.cover1)
self.external_cover = os.path.join(self.dir1, 'cover.png')
shutil.copy(self.cover2, self.external_cover)
shutil.move(self.file1, self.dir1)
self.file1 = os.path.join(self.dir1, os.path.basename(self.file1))
both_song = MP3File(self.file1)
results = []
def acquire(song):
def cb(source, result):
results.append(result)
self.manager.acquire_cover(cb, None, song)
def result_was_embedded():
return self.is_embedded(results.pop())
config.set('albumart', 'prefer_embedded', True)
acquire(both_song)
self.assertTrue(result_was_embedded(), 'Embedded image expected due to prefs')
config.set('albumart', 'prefer_embedded', False)
acquire(both_song)
self.assertFalse(result_was_embedded(), 'Got an embedded image despite prefs') |
def qufpn_config(min_level, max_level, weight_method=None):
p = OmegaConf.create()
weight_method = (weight_method or 'fastattn')
quad_method = 'fastattn'
num_levels = ((max_level - min_level) + 1)
node_ids = {(min_level + i): [i] for i in range(num_levels)}
level_last_id = (lambda level: node_ids[level][(- 1)])
level_all_ids = (lambda level: node_ids[level])
level_first_id = (lambda level: node_ids[level][0])
id_cnt = itertools.count(num_levels)
p.nodes = []
for i in range((max_level - 1), (min_level - 1), (- 1)):
p.nodes.append({'reduction': (1 << i), 'inputs_offsets': [level_last_id(i), level_last_id((i + 1))], 'weight_method': weight_method})
node_ids[i].append(next(id_cnt))
node_ids[max_level].append(node_ids[max_level][(- 1)])
for i in range((min_level + 1), max_level):
p.nodes.append({'reduction': (1 << i), 'inputs_offsets': (level_all_ids(i) + [level_last_id((i - 1))]), 'weight_method': weight_method})
node_ids[i].append(next(id_cnt))
i = max_level
p.nodes.append({'reduction': (1 << i), 'inputs_offsets': ([level_first_id(i)] + [level_last_id((i - 1))]), 'weight_method': weight_method})
node_ids[i].append(next(id_cnt))
node_ids[min_level].append(node_ids[min_level][(- 1)])
for i in range((min_level + 1), (max_level + 1), 1):
p.nodes.append({'reduction': (1 << i), 'inputs_offsets': [level_first_id(i), (level_last_id((i - 1)) if (i != (min_level + 1)) else level_first_id((i - 1)))], 'weight_method': weight_method})
node_ids[i].append(next(id_cnt))
node_ids[min_level].append(node_ids[min_level][(- 1)])
for i in range((max_level - 1), min_level, (- 1)):
p.nodes.append({'reduction': (1 << i), 'inputs_offsets': (([node_ids[i][0]] + [node_ids[i][(- 1)]]) + [level_last_id((i + 1))]), 'weight_method': weight_method})
node_ids[i].append(next(id_cnt))
i = min_level
p.nodes.append({'reduction': (1 << i), 'inputs_offsets': ([node_ids[i][0]] + [level_last_id((i + 1))]), 'weight_method': weight_method})
node_ids[i].append(next(id_cnt))
node_ids[max_level].append(node_ids[max_level][(- 1)])
for i in range(min_level, (max_level + 1)):
p.nodes.append({'reduction': (1 << i), 'inputs_offsets': [node_ids[i][2], node_ids[i][4]], 'weight_method': quad_method})
node_ids[i].append(next(id_cnt))
return p |
def pitch_each_chunk_with_crepe(directory: str) -> list[str]:
print(f"{ULTRASINGER_HEAD} Pitching each chunk with {blue_highlighted('crepe')}")
midi_notes = []
for filename in sorted([f for f in os.listdir(directory) if f.endswith('.wav')], key=(lambda x: int(x.split('_')[1]))):
filepath = os.path.join(directory, filename)
pitched_data = get_pitch_with_crepe_file(filepath, settings.crepe_model_capacity, settings.crepe_step_size, settings.tensorflow_device)
conf_f = get_frequencies_with_high_confidence(pitched_data.frequencies, pitched_data.confidence)
notes = convert_frequencies_to_notes(conf_f)
note = most_frequent(notes)[0][0]
midi_notes.append(note)
return midi_notes |
class Glm(LossMixin, BaseGlm):
def _is_tuner(self):
return False
def fit(self, X, y, sample_weight=None, offsets=None):
(pro_data, raw_data, pre_pro_out, configs, solver, init_data, inferencer) = self.setup_and_prefit(X=X, y=y, sample_weight=sample_weight, offsets=offsets)
if (get_flavor_kind(configs['penalty']) in ['adaptive', 'mixed']):
configs['penalty'] = set_adaptive_weights(penalty=configs['penalty'], init_data=init_data)
self.inferencer_ = inferencer
self._fit_from_configs(pro_data=pro_data, raw_data=raw_data, configs=configs, solver=solver, pre_pro_out=pre_pro_out, init_data=init_data)
return self
def get_unflavored_tunable(self):
params = deepcopy(self.get_params(deep=False))
params['initializer'] = 'default'
params['penalty'] = get_unflavored(params['penalty'])
return GlmCV(**params) |
class Scenario(VersionBase):
_XMLNS = XMLNS
_XSI = XSI
def __init__(self, name, author, parameters, entities, storyboard, roadnetwork, catalog, osc_minor_version=_MINOR_VERSION, license=None, creation_date=None, header_properties=None, variable_declaration=None):
if (not isinstance(entities, Entities)):
raise TypeError('entities input is not of type Entities')
if (not isinstance(storyboard, StoryBoard)):
raise TypeError('storyboard input is not of type StoryBoard')
if (not isinstance(roadnetwork, RoadNetwork)):
raise TypeError('roadnetwork input is not of type RoadNetwork')
if (not isinstance(catalog, Catalog)):
raise TypeError('catalog input is not of type StorCatalogyBoard')
if (not isinstance(parameters, ParameterDeclarations)):
raise TypeError('parameters input is not of type ParameterDeclarations')
if (variable_declaration and (not isinstance(variable_declaration, VariableDeclarations))):
raise TypeError('variable_declaration input is not of type VariableDeclarations')
self.variable_declaration = variable_declaration
self.entities = entities
self.storyboard = storyboard
self.roadnetwork = roadnetwork
self.catalog = catalog
self.parameters = parameters
self.header = FileHeader(author, name, revMinor=osc_minor_version, license=license, creation_date=creation_date, properties=header_properties)
def __eq__(self, other):
if isinstance(other, Scenario):
if ((self.entities == other.entities) and (self.storyboard == other.storyboard) and (self.roadnetwork == other.roadnetwork) and (self.catalog == other.catalog) and (self.header == other.header) and (self.parameters == other.parameters) and (self.variable_declaration == other.variable_declaration)):
return True
return False
def parse(element):
header = FileHeader.parse(element.find('FileHeader'))
parameters = ParameterDeclarations()
if (element.find('ParameterDeclarations') is not None):
parameters = ParameterDeclarations.parse(element.find('ParameterDeclarations'))
catalog = Catalog.parse(element.find('CatalogLocations'))
storyboard = StoryBoard.parse(element.find('Storyboard'))
entities = Entities.parse(element.find('Entities'))
roadnetwork = RoadNetwork.parse(element.find('RoadNetwork'))
variables = None
if (element.find('VariableDeclarations') is not None):
variables = VariableDeclarations.parse(element.find('VariableDeclarations'))
return Scenario(header.description, header.author, parameters, entities, storyboard, roadnetwork, catalog, header._revMinor, header.properties, variables)
def get_element(self):
element = ET.Element('OpenSCENARIO', attrib={'xmlns:xsi': self._XMLNS, 'xsi:noNamespaceSchemaLocation': self._XSI})
element.append(self.header.get_element())
if self.parameters.get_element():
element.append(self.parameters.get_element())
element.append(self.catalog.get_element())
element.append(self.roadnetwork.get_element())
element.append(self.entities.get_element())
element.append(self.storyboard.get_element())
if self.variable_declaration:
element.append(self.variable_declaration.get_element())
return element
def write_xml(self, filename, prettyprint=True, encoding='utf-8'):
printToFile(self.get_element(), filename, prettyprint, encoding) |
class TestBetaIncGrad():
def test_stan_grad_partial(self):
(a, b, z) = pt.scalars('a', 'b', 'z')
betainc_out = pt.betainc(a, b, z)
betainc_grad = pt.grad(betainc_out, [a, b, z])
f_grad = function([a, b, z], betainc_grad)
decimal_precision = (7 if (config.floatX == 'float64') else 3)
for (test_a, test_b, test_z, expected_dda, expected_ddb, expected_ddz) in ((1.5, 1.25, 0.001, (- 0.), 4.e-05, 0.), (1.5, 1.25, 0.5, (- 0.), 0., 1.1905416), (1.5, 1.25, 0.6, (- 0.), 0., 1.), (1.5, 1.25, 0.999, (- 0.), 0., 0.), (15000, 1.25, 0.001, 0, 0, 0), (15000, 1.25, 0.5, 0, 0, 0), (15000, 1.25, 0.6, 0, 0, 0), (15000, 1.25, 0.999, (- 6.e-10), 2.e-06, 0.), (1.5, 12500, 0.001, (- 3.e-05), 1.e-09, 0.1848717), (1.5, 12500, 0.5, 0, 0, 0), (1.5, 12500, 0.6, 0, 0, 0), (1.5, 12500, 0.999, 0, 0, 0), (15000, 12500, 0.001, 0, 0, 0), (15000, 12500, 0.5, (- 8.e-53), 9.e-53, 5.e-48), (15000, 12500, 0.6, (- 4.085621e-14), (- 5.5067062e-14), 1.e-71), (15000, 12500, 0.999, 0, 0, 0)):
np.testing.assert_almost_equal(f_grad(test_a, test_b, test_z), [expected_dda, expected_ddb, expected_ddz], decimal=decimal_precision)
def test_boik_robison_cox(self):
(a, b, z) = pt.scalars('a', 'b', 'z')
betainc_out = pt.betainc(a, b, z)
betainc_grad = pt.grad(betainc_out, [a, b])
f_grad = function([a, b, z], betainc_grad)
decimal = (7 if (config.floatX == 'float64') else 5)
for (test_a, test_b, test_z, expected_dda, expected_ddb) in ((1.5, 11.0, 0.001, (- 0.), 0.), (1.5, 11.0, 0.5, (- 0.), 0.), (1000.0, 1000.0, 0.5, (- 0.), 0.), (1000.0, 1000.0, 0.55, (- 3.6713108e-07), 4.0584118e-07)):
np.testing.assert_almost_equal(f_grad(test_a, test_b, test_z), [expected_dda, expected_ddb], decimal=decimal)
def test_beta_inc_stan_grad_combined(self):
(a, b, z) = pt.scalars('a', 'b', 'z')
betainc_out = pt.betainc(a, b, z)
betainc_grad = pt.grad(betainc_out, [a, b])
f_grad = function([a, b, z], betainc_grad)
for (test_a, test_b, test_z, expected_dda, expected_ddb) in ((1.0, 1.0, 1.0, 0, np.nan), (1.0, 1.0, 0.4, (- 0.), 0.)):
np.testing.assert_allclose(f_grad(test_a, test_b, test_z), [expected_dda, expected_ddb]) |
class DcardComment(Base, Timestamp):
__tablename__ = 'dcard_comments'
__table_args__ = (UniqueConstraint('id', 'post_id', name='pair_key'),)
id = sa.Column(sa.String(64), primary_key=True)
post_id = sa.Column(sa.Integer, sa.ForeignKey('dcard_posts.id'), nullable=False)
post = relationship('DcardPost', back_populates='comments')
anonymous = sa.Column(sa.Boolean, nullable=False)
with_nickname = sa.Column(sa.Boolean, nullable=False)
floor = sa.Column(sa.Integer, nullable=False)
content = sa.Column(MEDIUMTEXT(charset='utf8mb4', collation='utf8mb4_unicode_ci'))
gender = sa.Column(sa.String(10), nullable=False)
school = sa.Column(sa.String(64), nullable=False)
host = sa.Column(sa.Boolean, nullable=False)
like_count = sa.Column(sa.Integer, nullable=False) |
def _do_import_class(name, currmodule=None):
path_stack = list(reversed(name.split('.')))
if (not currmodule):
currmodule = path_stack.pop()
try:
target = astroid.MANAGER.ast_from_module_name(currmodule)
while (target and path_stack):
path_part = path_stack.pop()
target = (target.getattr(path_part) or (None,))[0]
while isinstance(target, (astroid.ImportFrom, astroid.Import)):
try:
target = target.do_import_module(path_part)
except astroid.AstroidImportError:
target = target.do_import_module()
target = (target.getattr(path_part) or (None,))[0]
break
except astroid.AstroidError:
target = None
return target |
class F8_User(FC6_User):
removedKeywords = FC6_User.removedKeywords
removedAttrs = FC6_User.removedAttrs
def _getParser(self):
op = FC6_User._getParser(self)
op.add_argument('--lock', action='store_true', default=False, version=F8, help='\n If this is present, the new user account is locked by\n default. That is, the user will not be able to login\n from the console.')
op.add_argument('--plaintext', dest='isCrypted', version=F8, action='store_false', help='\n If specified, consider the password provided by\n ``--password`` to be plain text.')
return op |
def canonicalize_list(smiles_list: Iterable[str], include_stereocenters=True) -> List[str]:
canonicalized_smiles = [canonicalize(smiles, include_stereocenters) for smiles in smiles_list]
canonicalized_smiles = [s for s in canonicalized_smiles if (s is not None)]
return remove_duplicates(canonicalized_smiles) |
class Menu(QtWidgets.QMenu):
_dummyActionForHiddenEntryInKeyMapDialog = QtWidgets.QAction()
def __init__(self, parent=None, name=None):
QtWidgets.QMenu.__init__(self, parent)
if name:
self.setTitle(name)
else:
raise ValueError
try:
self.setToolTipsVisible(True)
except Exception:
pass
if hasattr(name, 'tt'):
self.setStatusTip(name.tt)
self.setToolTip(name.tt)
self._groups = {}
if hasattr(parent, 'menuPath'):
self.menuPath = (parent.menuPath + '__')
else:
self.menuPath = ''
key = name
if hasattr(name, 'key'):
key = name.key
self.menuPath += self._createMenuPathName(key)
self.build()
def _createMenuPathName(self, name):
name = re.sub('\\(.*\\)', '', name)
name = name.replace(' ', '_')
if (name and (name[0] in '_')):
name = ('_' + name)
name = re.sub('[^a-zA-z_0-9]', '', name)
return name.lower()
def _addAction(self, text, icon, selected=None):
if (icon is None):
a = self.addAction(text)
else:
a = self.addAction(icon, text)
if (selected is not None):
a.setCheckable(True)
a.setChecked(selected)
if hasattr(text, 'tt'):
a.setStatusTip(text.tt)
a.setToolTip(text.tt)
key = a.text()
if hasattr(text, 'key'):
key = text.key
a.menuPath = ((self.menuPath + '__') + self._createMenuPathName(key))
pyzo.keyMapper.keyMappingChanged.connect((lambda : pyzo.keyMapper.setShortcut(a)))
pyzo.keyMapper.setShortcut(a)
return a
def build(self):
pass
def popup(self, pos, action=None):
self._pos = pos
super().popup(pos, action)
def addMenu(self, menu, icon=None):
a = QtWidgets.QMenu.addMenu(self, menu)
a.menuPath = menu.menuPath
if (icon is not None):
a.setIcon(icon)
return menu
def addItem(self, text, icon=None, callback=None, value=None):
a = self._addAction(text, icon)
if callback:
if (value is not None):
a.triggered.connect((lambda b=None, v=value: callback(v)))
else:
a.triggered.connect((lambda b=None: callback()))
return a
def addGroupItem(self, text, icon=None, callback=None, value=None, group=None):
a = self._addAction(text, icon)
a.setCheckable(True)
if callback:
def doCallback(b, v):
if b:
callback(v)
a.toggled.connect((lambda b=None, v=value: doCallback(a.isChecked(), v)))
if (group is None):
group = 'default'
if (group not in self._groups):
self._groups[group] = (QtWidgets.QActionGroup(self), {})
(actionGroup, actions) = self._groups[group]
actionGroup.addAction(a)
actions[value] = a
return a
def addCheckItem(self, text, icon=None, callback=None, value=None, selected=False):
a = self._addAction(text, icon, selected)
if callback:
if (value is not None):
a.triggered.connect((lambda b=None, v=value: callback(a.isChecked(), v)))
else:
a.triggered.connect((lambda b=None: callback(a.isChecked())))
return a
def setCheckedOption(self, group, value):
if (group is None):
group = 'default'
(actionGroup, actions) = self._groups[group]
if (value in actions):
actions[value].setChecked(True) |
def test_ls_non_empty():
name1 = 'test_schema1'
name2 = 'test_schema2'
type1 = StructType(fields=[IntType(bits=16)])
type2 = StructType(fields=[IntType(bits=8)])
response = client.post(f'/registry/{name1}', json=to_dict(type1), headers={'Content-Type': 'application/x-recap+json'})
assert (response.status_code == 200)
response = client.post(f'/registry/{name2}', json=to_dict(type2), headers={'Content-Type': 'application/x-recap+json'})
assert (response.status_code == 200)
response = client.get('/registry/')
assert (response.status_code == 200)
schema_names = response.json()
assert (schema_names == [name1, name2]) |
class Window(QWidget):
NumRows = 2
NumColumns = 3
def __init__(self):
super(Window, self).__init__()
self.glWidgets = []
mainLayout = QGridLayout()
for i in range(Window.NumRows):
row = []
for j in range(Window.NumColumns):
clearColor = QColor()
clearColor.setHsv(((((i * Window.NumColumns) + j) * 255) / ((Window.NumRows * Window.NumColumns) - 1)), 255, 63)
widget = GLWidget()
widget.setClearColor(clearColor)
widget.rotateBy(((+ 42) * 16), ((+ 42) * 16), ((- 21) * 16))
mainLayout.addWidget(widget, i, j)
widget.clicked.connect(self.setCurrentGlWidget)
row.append(widget)
self.glWidgets.append(row)
self.setLayout(mainLayout)
self.currentGlWidget = self.glWidgets[0][0]
timer = QTimer(self)
timer.timeout.connect(self.rotateOneStep)
timer.start(20)
self.setWindowTitle('Textures')
def setCurrentGlWidget(self):
self.currentGlWidget = self.sender()
def rotateOneStep(self):
if self.currentGlWidget:
self.currentGlWidget.rotateBy(((+ 2) * 16), ((+ 2) * 16), ((- 1) * 16)) |
def calculate(file_list: List[str], gt_file_list: List[str], args: argparse.Namespace, mcd_dict: Dict):
for (i, gen_path) in enumerate(file_list):
corresponding_list = list(filter((lambda gt_path: (_get_basename(gt_path) in gen_path)), gt_file_list))
assert (len(corresponding_list) == 1)
gt_path = corresponding_list[0]
gt_basename = _get_basename(gt_path)
(gen_x, gen_fs) = sf.read(gen_path, dtype='int16')
(gt_x, gt_fs) = sf.read(gt_path, dtype='int16')
fs = gen_fs
if (gen_fs != gt_fs):
gt_x = librosa.resample(gt_x.astype(np.float), gt_fs, gen_fs)
gen_mcep = sptk_extract(x=gen_x, fs=fs, n_fft=args.n_fft, n_shift=args.n_shift, mcep_dim=args.mcep_dim, mcep_alpha=args.mcep_alpha)
gt_mcep = sptk_extract(x=gt_x, fs=fs, n_fft=args.n_fft, n_shift=args.n_shift, mcep_dim=args.mcep_dim, mcep_alpha=args.mcep_alpha)
(_, path) = fastdtw(gen_mcep, gt_mcep, dist=spatial.distance.euclidean)
twf = np.array(path).T
gen_mcep_dtw = gen_mcep[twf[0]]
gt_mcep_dtw = gt_mcep[twf[1]]
diff2sum = np.sum(((gen_mcep_dtw - gt_mcep_dtw) ** 2), 1)
mcd = np.mean(((10.0 / np.log(10.0)) * np.sqrt((2 * diff2sum))), 0)
logging.info(f'{gt_basename} {mcd:.4f}')
mcd_dict[gt_basename] = mcd |
def test_scan_while():
def power_of_2(previous_power, max_value):
return ((previous_power * 2), until(((previous_power * 2) > max_value)))
max_value = pt.scalar()
(values, _) = scan(power_of_2, outputs_info=pt.constant(1.0), non_sequences=max_value, n_steps=1024)
out_fg = FunctionGraph([max_value], [values])
test_input_vals = [np.array(45).astype(config.floatX)]
compare_numba_and_py(out_fg, test_input_vals) |
class ExportAssetsAsZipTests(TestCase):
def setUp(self):
self.request_factory = RequestFactory()
self.request = self.request_factory.get('/')
self.request.user = baker.make('users.User')
self.sponsorship = baker.make(Sponsorship, sponsor__name='Sponsor Name')
self.ModelAdmin = Mock()
self.text_asset = TextAsset.objects.create(uuid=uuid4(), content_object=self.sponsorship, internal_name='text_input')
self.img_asset = ImgAsset.objects.create(uuid=uuid4(), content_object=self.sponsorship.sponsor, internal_name='img_input')
def test_display_same_page_with_warning_message_if_no_query(self):
queryset = GenericAsset.objects.none()
response = export_assets_as_zipfile(self.ModelAdmin, self.request, queryset)
self.assertEqual(302, response.status_code)
self.assertEqual(self.request.path, response['Location'])
msg = 'You have to select at least one asset to export.'
self.ModelAdmin.message_user.assert_called_once_with(self.request, msg, messages.WARNING)
def test_display_same_page_with_warning_message_if_any_asset_without_value(self):
self.text_asset.value = 'Foo'
self.text_asset.save()
queryset = GenericAsset.objects.all()
response = export_assets_as_zipfile(self.ModelAdmin, self.request, queryset)
self.assertEqual(302, response.status_code)
self.assertEqual(self.request.path, response['Location'])
msg = "1 assets from the selection doesn't have data to export. Please review your selection!"
self.ModelAdmin.message_user.assert_called_once_with(self.request, msg, messages.WARNING)
def test_response_is_configured_to_be_zip_file(self):
self.text_asset.value = 'foo'
self.img_asset.value = SimpleUploadedFile(name='test_image.jpg', content=b'content', content_type='image/jpeg')
self.text_asset.save()
self.img_asset.save()
queryset = GenericAsset.objects.all()
response = export_assets_as_zipfile(self.ModelAdmin, self.request, queryset)
self.assertEqual('application/x-zip-compressed', response['Content-Type'])
self.assertEqual('attachment; filename=assets.zip', response['Content-Disposition'])
def test_zip_file_organize_assets_within_sponsors_directories(self):
self.text_asset.value = 'foo'
self.img_asset.value = get_static_image_file_as_upload('psf-logo.png')
self.text_asset.save()
self.img_asset.save()
queryset = GenericAsset.objects.all()
response = export_assets_as_zipfile(self.ModelAdmin, self.request, queryset)
content = io.BytesIO(response.content)
with zipfile.ZipFile(content, 'r') as zip_file:
self.assertEqual(2, len(zip_file.infolist()))
with zip_file.open('Sponsor Name/text_input.txt') as cur_file:
self.assertEqual('foo', cur_file.read().decode())
with zip_file.open('Sponsor Name/img_input.png') as cur_file:
self.assertEqual(self.img_asset.value.read(), cur_file.read()) |
class RHEL7_LogVolData(F21_LogVolData):
removedKeywords = F21_LogVolData.removedKeywords
removedAttrs = F21_LogVolData.removedAttrs
def __init__(self, *args, **kwargs):
F21_LogVolData.__init__(self, *args, **kwargs)
self.mkfsopts = (kwargs.get('mkfsoptions', '') or kwargs.get('mkfsopts', ''))
def _getArgsAsStr(self):
retval = F21_LogVolData._getArgsAsStr(self)
if self.mkfsopts:
retval += (' --mkfsoptions="%s"' % self.mkfsopts)
return retval |
def _reshape_4(arr, dim):
npair = ((dim * (dim + 1)) // 2)
if (len(arr) == ((npair * (npair + 1)) // 2)):
return S8Integrals(np.asarray(arr))
if (len(arr) == (npair ** 2)):
return S4Integrals(np.asarray(arr).reshape(((npair,) * 2)))
if (len(arr) == (dim ** 4)):
return S1Integrals(np.asarray(arr).reshape(((dim,) * 4)))
return arr |
def test_create_email_authorization_for_repo(get_monkeypatch):
mock = Mock()
get_monkeypatch.setattr(model.repository, 'create_email_authorization_for_repo', mock)
pre_oci_model.create_email_authorization_for_repo('namespace_name', 'repository_name', 'email')
mock.assert_called_once_with('namespace_name', 'repository_name', 'email') |
def getCommandFromKernelInfo(info, port):
info = KernelInfo(info)
exe = (info.exe or 'python')
if exe.startswith('.'):
exe = os.path.abspath(os.path.join(EXE_DIR, exe))
if (exe.count(' ') and (exe[0] != '"')):
exe = '"{}"'.format(exe)
startScript = os.path.join(pyzo.pyzoDir, 'pyzokernel', 'start.py')
startScript = '"{}"'.format(startScript)
command = ((((exe + ' ') + startScript) + ' ') + str(port))
return command |
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None, gate=None):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.gate1 = GateLayer(planes, planes, [1, (- 1), 1, 1])
self.relu1 = nn.ReLU(inplace=True)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.gate2 = GateLayer(planes, planes, [1, (- 1), 1, 1])
self.relu2 = nn.ReLU(inplace=True)
self.conv3 = nn.Conv2d(planes, (planes * self.expansion), kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d((planes * self.expansion))
self.relu3 = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
self.gate = gate
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.gate1(out)
out = self.relu1(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.gate2(out)
out = self.relu2(out)
out = self.conv3(out)
out = self.bn3(out)
if (self.downsample is not None):
residual = self.downsample(x)
out += residual
out = self.relu3(out)
if (self.gate is not None):
out = self.gate(out)
return out |
def create_parser() -> Tuple[(argparse.ArgumentParser, argparse.ArgumentParser, argparse._SubParsersAction)]:
parser = argparse.ArgumentParser(description='Video codec baselines.', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parent_parser = argparse.ArgumentParser(add_help=False)
parent_parser.add_argument('dataset', type=str, help='sequences directory')
parent_parser.add_argument('outputdir', type=str, help='output directory')
parent_parser.add_argument('-n', '--dry-run', action='store_true', help='dry run')
parent_parser.add_argument('-f', '--force', action='store_true', help='overwrite previous runs')
parent_parser.add_argument('-j', '--num-jobs', type=int, metavar='N', default=1, help='number of parallel jobs (default: %(default)s)')
parent_parser.add_argument('-q', '--qps', dest='qps', metavar='Q', default=[32], nargs='+', type=int, help='list of quality/quantization parameter (default: %(default)s)')
parent_parser.add_argument('--cuda', action='store_true', help='use cuda')
subparsers = parser.add_subparsers(dest='codec', help='video codec')
subparsers.required = True
return (parser, parent_parser, subparsers) |
class CoerceTestCase(unittest.TestCase):
if (sys.version_info[0] <= 2):
import contextlib
def subTest(self, **kwargs):
(yield)
examples = {'0.0.0': ('0', '0.0', '0.0.0', '0.0.0+', '0-'), '0.1.0': ('0.1', '0.1+', '0.1-', '0.1.0', '0.01.0', '000.0001.'), '0.1.0+2': ('0.1.0+2', '0.1.0.2'), '0.1.0+2.3.4': ('0.1.0+2.3.4', '0.1.0+2+3+4', '0.1.0.2+3+4'), '0.1.0+2-3.4': ('0.1.0+2-3.4', '0.1.0+2-3+4', '0.1.0.2-3+4', '0.1.0.2_3+4'), '0.1.0-a2.3': ('0.1.0-a2.3', '0.1.0a2.3', '0.1.0_a2.3'), '0.1.0-a2.3+4.5-6': ('0.1.0-a2.3+4.5-6', '0.1.0a2.3+4.5-6', '0.1.0a2.3+4.5_6', '0.1.0a2.3+4+5/6')}
def test_coerce(self):
for (equivalent, samples) in self.examples.items():
target = base.Version(equivalent)
for sample in samples:
with self.subTest(target=equivalent, sample=sample):
v_sample = base.Version.coerce(sample)
self.assertEqual(target, v_sample)
def test_invalid(self):
self.assertRaises(ValueError, base.Version.coerce, 'v1') |
class AsyncSchemaTypeMixin():
async def create_async(self, bind=None, checkfirst=False):
if (bind is None):
bind = _bind_or_error(self)
t = self.dialect_impl(bind.dialect)
if ((t.__class__ is not self.__class__) and isinstance(t, SchemaType)):
(await t.create_async(bind=bind, checkfirst=checkfirst))
async def drop_async(self, bind=None, checkfirst=False):
if (bind is None):
bind = _bind_or_error(self)
t = self.dialect_impl(bind.dialect)
if ((t.__class__ is not self.__class__) and isinstance(t, SchemaType)):
(await t.drop_async(bind=bind, checkfirst=checkfirst))
async def _on_table_create_async(self, target, bind, **kw):
if (not self._is_impl_for_variant(bind.dialect, kw)):
return
t = self.dialect_impl(bind.dialect)
if ((t.__class__ is not self.__class__) and isinstance(t, SchemaType)):
(await getattr(t, '_on_table_create_async')(target, bind, **kw))
async def _on_table_drop_async(self, target, bind, **kw):
if (not self._is_impl_for_variant(bind.dialect, kw)):
return
t = self.dialect_impl(bind.dialect)
if ((t.__class__ is not self.__class__) and isinstance(t, SchemaType)):
(await getattr(t, '_on_table_drop_async')(target, bind, **kw))
async def _on_metadata_create_async(self, target, bind, **kw):
if (not self._is_impl_for_variant(bind.dialect, kw)):
return
t = self.dialect_impl(bind.dialect)
if ((t.__class__ is not self.__class__) and isinstance(t, SchemaType)):
(await getattr(t, '_on_metadata_create_async')(target, bind, **kw))
async def _on_metadata_drop_async(self, target, bind, **kw):
if (not self._is_impl_for_variant(bind.dialect, kw)):
return
t = self.dialect_impl(bind.dialect)
if ((t.__class__ is not self.__class__) and isinstance(t, SchemaType)):
(await getattr(t, '_on_metadata_drop_async')(target, bind, **kw)) |
class Item():
__slots__ = ('s', 'rule', 'ptr', 'start', 'is_complete', 'expect', 'previous', 'node', '_hash')
def __init__(self, rule, ptr, start):
self.is_complete = (len(rule.expansion) == ptr)
self.rule = rule
self.ptr = ptr
self.start = start
self.node = None
if self.is_complete:
self.s = rule.origin
self.expect = None
self.previous = (rule.expansion[(ptr - 1)] if ((ptr > 0) and len(rule.expansion)) else None)
else:
self.s = (rule, ptr)
self.expect = rule.expansion[ptr]
self.previous = (rule.expansion[(ptr - 1)] if ((ptr > 0) and len(rule.expansion)) else None)
self._hash = hash((self.s, self.start))
def advance(self):
return Item(self.rule, (self.ptr + 1), self.start)
def __eq__(self, other):
return ((self is other) or ((self.s == other.s) and (self.start == other.start)))
def __hash__(self):
return self._hash
def __repr__(self):
before = (expansion.name for expansion in self.rule.expansion[:self.ptr])
after = (expansion.name for expansion in self.rule.expansion[self.ptr:])
symbol = '{} ::= {}* {}'.format(self.rule.origin.name, ' '.join(before), ' '.join(after))
return ('%s (%d)' % (symbol, self.start)) |
def generate_prompt(data_point):
if data_point['input']:
return f'''Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.
### Instruction:
{data_point['instruction']}
### Input:
{data_point['input']}
### Response:
{data_point['output']}'''
else:
return f'''Below is an instruction that describes a task. Write a response that appropriately completes the request.
### Instruction:
{data_point['instruction']}
### Response:
{data_point['output']}''' |
class Decimation(Object):
input_sample_rate = Frequency.T(xmltagname='InputSampleRate')
factor = Int.T(xmltagname='Factor')
offset = Int.T(xmltagname='Offset')
delay = FloatWithUnit.T(xmltagname='Delay')
correction = FloatWithUnit.T(xmltagname='Correction')
def summary(self):
return ('deci(%i, %g -> %g, %g)' % (self.factor, self.input_sample_rate.value, (self.input_sample_rate.value / self.factor), self.delay.value))
def get_pyrocko_response(self):
if (self.delay and (self.delay.value != 0.0)):
return Delivery([response.DelayResponse(delay=(- self.delay.value))])
else:
return Delivery([]) |
class Factory(BaseFactory):
def create_poetry(self, cwd: (Path | None)=None, with_groups: bool=True, io: (IO | None)=None, disable_plugins: bool=False, disable_cache: bool=False) -> Poetry:
if (io is None):
io = NullIO()
base_poetry = super().create_poetry(cwd=cwd, with_groups=with_groups)
poetry_file = base_poetry.pyproject_path
locker = Locker((poetry_file.parent / 'poetry.lock'), base_poetry.local_config)
config = Config.create()
local_config_file = TOMLFile((poetry_file.parent / 'poetry.toml'))
if local_config_file.exists():
if io.is_debug():
io.write_line(f'Loading configuration file {local_config_file.path}')
config.merge(local_config_file.read())
repositories = {}
existing_repositories = config.get('repositories', {})
for source in base_poetry.pyproject.poetry_config.get('source', []):
name = source.get('name')
url = source.get('url')
if (name and url and (name not in existing_repositories)):
repositories[name] = {'url': url}
config.merge({'repositories': repositories})
poetry = Poetry(poetry_file, base_poetry.local_config, base_poetry.package, locker, config, disable_cache)
poetry.set_pool(self.create_pool(config, poetry.local_config.get('source', []), io, disable_cache=disable_cache))
plugin_manager = PluginManager(Plugin.group, disable_plugins=disable_plugins)
plugin_manager.load_plugins()
poetry.set_plugin_manager(plugin_manager)
plugin_manager.activate(poetry, io)
return poetry
def get_package(cls, name: str, version: str) -> ProjectPackage:
return ProjectPackage(name, version)
def create_pool(cls, config: Config, sources: Iterable[dict[(str, Any)]]=(), io: (IO | None)=None, disable_cache: bool=False) -> RepositoryPool:
from poetry.repositories import RepositoryPool
from poetry.repositories.repository_pool import Priority
if (io is None):
io = NullIO()
if disable_cache:
logger.debug('Disabling source caches')
pool = RepositoryPool(config=config)
explicit_pypi = False
for source in sources:
repository = cls.create_package_source(source, config, disable_cache=disable_cache)
priority = Priority[source.get('priority', Priority.PRIMARY.name).upper()]
if (('default' in source) or ('secondary' in source)):
warning = f"Found deprecated key 'default' or 'secondary' in pyproject.toml configuration for source {source.get('name')}. Please provide the key 'priority' instead. Accepted values are: {', '.join((repr(p.name.lower()) for p in Priority))}."
io.write_error_line(f'<warning>Warning: {warning}</warning>')
if source.get('default'):
priority = Priority.DEFAULT
elif source.get('secondary'):
priority = Priority.SECONDARY
if (priority is Priority.SECONDARY):
allowed_prios = (p for p in Priority if (p is not Priority.SECONDARY))
warning = f"Found deprecated priority 'secondary' for source '{source.get('name')}' in pyproject.toml. Consider changing the priority to one of the non-deprecated values: {', '.join((repr(p.name.lower()) for p in allowed_prios))}."
io.write_error_line(f'<warning>Warning: {warning}</warning>')
if io.is_debug():
message = f'Adding repository {repository.name} ({repository.url})'
if (priority is Priority.DEFAULT):
message += ' and setting it as the default one'
else:
message += f' and setting it as {priority.name.lower()}'
io.write_line(message)
pool.add_repository(repository, priority=priority)
if (repository.name.lower() == 'pypi'):
explicit_pypi = True
if (not explicit_pypi):
if pool.has_default():
if io.is_debug():
io.write_line('Deactivating the PyPI repository')
else:
from poetry.repositories.pypi_repository import PyPiRepository
if pool.has_primary_repositories():
io.write_error_line("<warning>Warning: In a future version of Poetry, PyPI will be disabled automatically if at least one custom primary source is configured. In order to avoid a breaking change and make your pyproject.toml forward compatible, add PyPI explicitly via 'poetry source add pypi'. By the way, this has the advantage that you can set the priority of PyPI as with any other source.</warning>")
if pool.has_primary_repositories():
pypi_priority = Priority.SECONDARY
else:
pypi_priority = Priority.DEFAULT
pool.add_repository(PyPiRepository(disable_cache=disable_cache), priority=pypi_priority)
if (not pool.repositories):
raise PoetryException("At least one source must not be configured as 'explicit'.")
return pool
def create_package_source(cls, source: dict[(str, str)], config: Config, disable_cache: bool=False) -> HTTPRepository:
from poetry.repositories.exceptions import InvalidSourceError
from poetry.repositories.legacy_repository import LegacyRepository
from poetry.repositories.pypi_repository import PyPiRepository
from poetry.repositories.single_page_repository import SinglePageRepository
try:
name = source['name']
except KeyError:
raise InvalidSourceError('Missing [name] in source.')
pool_size = config.installer_max_workers
if (name.lower() == 'pypi'):
if ('url' in source):
raise InvalidSourceError('The PyPI repository cannot be configured with a custom url.')
return PyPiRepository(disable_cache=disable_cache, pool_size=pool_size)
try:
url = source['url']
except KeyError:
raise InvalidSourceError(f'Missing [url] in source {name!r}.')
repository_class = LegacyRepository
if re.match('.*\\.(htm|html)$', url):
repository_class = SinglePageRepository
return repository_class(name, url, config=config, disable_cache=disable_cache, pool_size=pool_size)
def create_pyproject_from_package(cls, package: Package) -> TOMLDocument:
import tomlkit
from poetry.utils.dependency_specification import dependency_to_specification
pyproject: dict[(str, Any)] = tomlkit.document()
pyproject['tool'] = tomlkit.table(is_super_table=True)
content: dict[(str, Any)] = tomlkit.table()
pyproject['tool']['poetry'] = content
content['name'] = package.name
content['version'] = package.version.text
content['description'] = package.description
content['authors'] = package.authors
content['license'] = (package.license.id if package.license else '')
if package.classifiers:
content['classifiers'] = package.classifiers
for (key, attr) in {('documentation', 'documentation_url'), ('repository', 'repository_url'), ('homepage', 'homepage'), ('maintainers', 'maintainers'), ('keywords', 'keywords')}:
value = getattr(package, attr, None)
if value:
content[key] = value
readmes = []
for readme in package.readmes:
readme_posix_path = readme.as_posix()
with contextlib.suppress(ValueError):
if package.root_dir:
readme_posix_path = readme.relative_to(package.root_dir).as_posix()
readmes.append(readme_posix_path)
if readmes:
content['readme'] = readmes
optional_dependencies = set()
extras_section = None
if package.extras:
extras_section = tomlkit.table()
for extra in package.extras:
_dependencies = []
for dependency in package.extras[extra]:
_dependencies.append(dependency.name)
optional_dependencies.add(dependency.name)
extras_section[extra] = _dependencies
optional_dependencies = set(optional_dependencies)
dependency_section = content['dependencies'] = tomlkit.table()
dependency_section['python'] = package.python_versions
for dep in package.all_requires:
constraint: (DependencySpec | str) = dependency_to_specification(dep, tomlkit.inline_table())
if (not isinstance(constraint, str)):
if (dep.name in optional_dependencies):
constraint['optional'] = True
if ((len(constraint) == 1) and ('version' in constraint)):
assert isinstance(constraint['version'], str)
constraint = constraint['version']
elif (not constraint):
constraint = '*'
for group in dep.groups:
if (group == MAIN_GROUP):
dependency_section[dep.name] = constraint
else:
if ('group' not in content):
content['group'] = tomlkit.table(is_super_table=True)
if (group not in content['group']):
content['group'][group] = tomlkit.table(is_super_table=True)
if ('dependencies' not in content['group'][group]):
content['group'][group]['dependencies'] = tomlkit.table()
content['group'][group]['dependencies'][dep.name] = constraint
if extras_section:
content['extras'] = extras_section
pyproject = cast('TOMLDocument', pyproject)
return pyproject
def validate(cls, config: dict[(str, Any)], strict: bool=False) -> dict[(str, list[str])]:
results = super().validate(config, strict)
results['errors'].extend(validate_object(config))
dependencies = set(config.get('dependencies', {}).keys())
dependencies.update(config.get('dev-dependencies', {}).keys())
groups = config.get('group', {}).values()
for group in groups:
dependencies.update(group.get('dependencies', {}).keys())
dependencies = {canonicalize_name(d) for d in dependencies}
project_name = config.get('name')
if ((project_name is not None) and (canonicalize_name(project_name) in dependencies)):
results['errors'].append(f'Project name ({project_name}) is same as one of its dependencies')
return results |
class run_in_temp(object):
def __init__(self, path=None):
self._must_delete = False
self._path = path
def __enter__(self):
if (self._path is None):
from tempfile import mkdtemp
self._path = mkdtemp(prefix='pyrocko-test')
self._must_delete = True
self._oldwd = os.getcwd()
os.chdir(self._path)
def __exit__(self, *args):
os.chdir(self._oldwd)
if self._must_delete:
shutil.rmtree(self._path) |
_config
def test_tall_add_clients_after_current(manager):
manager.test_window('one')
manager.test_window('two')
manager.test_window('three')
manager.c.layout.previous()
assert_focused(manager, 'two')
manager.test_window('four')
assert (manager.c.layout.info()['main'] == 'one')
assert (manager.c.layout.info()['secondary'] == ['two', 'four', 'three'])
assert_focused(manager, 'four') |
class LogoPlacementConfigurationModelTests(TestCase):
def setUp(self):
self.config = baker.make(LogoPlacementConfiguration, publisher=PublisherChoices.FOUNDATION, logo_place=LogoPlacementChoices.FOOTER)
def test_get_benefit_feature_respecting_configuration(self):
benefit_feature = self.config.get_benefit_feature()
self.assertIsInstance(benefit_feature, LogoPlacement)
self.assertEqual(benefit_feature.publisher, PublisherChoices.FOUNDATION)
self.assertEqual(benefit_feature.logo_place, LogoPlacementChoices.FOOTER)
self.assertIsNone(benefit_feature.pk)
self.assertIsNone(benefit_feature.sponsor_benefit_id)
def test_display_modifier_returns_same_name(self):
name = 'Benefit'
self.assertEqual(name, self.config.display_modifier(name))
def test_clone_configuration_for_new_sponsorship_benefit(self):
sp_benefit = baker.make(SponsorshipBenefit)
(new_cfg, created) = self.config.clone(sp_benefit)
self.assertTrue(created)
self.assertEqual(2, LogoPlacementConfiguration.objects.count())
self.assertEqual(PublisherChoices.FOUNDATION, new_cfg.publisher)
self.assertEqual(LogoPlacementChoices.FOOTER, new_cfg.logo_place)
self.assertEqual(sp_benefit, new_cfg.benefit)
(repeated, created) = self.config.clone(sp_benefit)
self.assertFalse(created)
self.assertEqual(new_cfg.pk, repeated.pk) |
def process_obj(obj_file):
sdfcommand = './preprocess/isosurface/computeDistanceField'
mcube_cmd = 'preprocess/isosurface/computeMarchingCubes'
lib_cmd = 'preprocess/isosurface/LIB_PATH'
h5_file = obj_file.replace('.obj', '_sdf.h5')
num_sample = (65 ** 3)
bandwidth = 0.1
sdf_res = 256
expand_rate = 1.3
iso_val = 0.003
max_verts = 16384
g = 0.0
indx = 0
ish5 = True
norm = True
reduce = 4
os.environ['LD_LIBRARY_PATH'] = '$LD_LIBRARY_PATH:./preprocess/isosurface/:./preprocess/isosurface/tbb/tbb2018_oss/lib/intel64/gcc4.7:/opt/intel/lib/intel64:/opt/intel/mkl/lib/intel64:/usr/local/lib64:/usr/local/lib:/usr/local/cuda/lib64'
tmp_dir = f'tmp/for_sdf'
model_dir = f'{tmp_dir}/model'
norm_mesh_dir = f'{tmp_dir}/norm_mesh'
sdf_dir = f'{tmp_dir}/sdf'
if (not os.path.exists(norm_mesh_dir)):
os.makedirs(norm_mesh_dir)
if (not os.path.exists(sdf_dir)):
os.makedirs(sdf_dir)
create_sdf_obj(sdfcommand, mcube_cmd, norm_mesh_dir, sdf_dir, obj_file, sdf_res, iso_val, expand_rate, indx, ish5, norm, num_sample, bandwidth, max_verts, g, reduce, h5_file)
print(f'[*] successfully extract sdf and save to: {h5_file}')
return h5_file |
def train_loop(dataloader, model, optimizer):
for (batch, (feat, ret)) in enumerate(train_dl):
loss = model.run_model(feat, ret)
optimizer.zero_grad()
loss.backward()
optimizer.step()
if ((batch % 10) == 0):
print(f'batch: {batch}, loss: {loss.item()}') |
class TestOtherCertificate():
def test_unsupported_subject_public_key_info(self, backend):
cert = _load_cert(os.path.join('x509', 'custom', 'unsupported_subject_public_key_info.pem'), x509.load_pem_x509_certificate)
with pytest.raises(ValueError):
cert.public_key()
def test_bad_time_in_validity(self, backend):
with pytest.raises(ValueError, match='Validity::not_after'):
_load_cert(os.path.join('x509', 'badasn1time.pem'), x509.load_pem_x509_certificate) |
def load_image_label_from_xml(img_name, voc12_root):
from xml.dom import minidom
elem_list = minidom.parse(os.path.join(voc12_root, ANNOT_FOLDER_NAME, (decode_int_filename(img_name) + '.xml'))).getElementsByTagName('name')
multi_cls_lab = np.zeros(N_CAT, np.float32)
for elem in elem_list:
cat_name = elem.firstChild.data
if (cat_name in CAT_LIST):
cat_num = CAT_NAME_TO_NUM[cat_name]
multi_cls_lab[cat_num] = 1.0
return multi_cls_lab |
class _SocketType(SocketType):
def __init__(self, sock: _stdlib_socket.socket):
if (type(sock) is not _stdlib_socket.socket):
raise TypeError(f"expected object of type 'socket.socket', not '{type(sock).__name__}'")
self._sock = sock
self._sock.setblocking(False)
self._did_shutdown_SHUT_WR = False
def detach(self) -> int:
return self._sock.detach()
def fileno(self) -> int:
return self._sock.fileno()
def getpeername(self) -> AddressFormat:
return self._sock.getpeername()
def getsockname(self) -> AddressFormat:
return self._sock.getsockname()
def getsockopt(self, /, level: int, optname: int) -> int:
...
def getsockopt(self, /, level: int, optname: int, buflen: int) -> bytes:
...
def getsockopt(self, /, level: int, optname: int, buflen: (int | None)=None) -> (int | bytes):
if (buflen is None):
return self._sock.getsockopt(level, optname)
return self._sock.getsockopt(level, optname, buflen)
def setsockopt(self, /, level: int, optname: int, value: (int | Buffer)) -> None:
...
def setsockopt(self, /, level: int, optname: int, value: None, optlen: int) -> None:
...
def setsockopt(self, /, level: int, optname: int, value: ((int | Buffer) | None), optlen: (int | None)=None) -> None:
if (optlen is None):
if (value is None):
raise TypeError("invalid value for argument 'value', must not be None when specifying optlen")
return self._sock.setsockopt(level, optname, value)
if (value is not None):
raise TypeError(f"invalid value for argument 'value': {value!r}, must be None when specifying optlen")
return self._sock.setsockopt(level, optname, value, optlen)
def listen(self, /, backlog: int=min(_stdlib_socket.SOMAXCONN, 128)) -> None:
return self._sock.listen(backlog)
def get_inheritable(self) -> bool:
return self._sock.get_inheritable()
def set_inheritable(self, inheritable: bool) -> None:
return self._sock.set_inheritable(inheritable)
if ((sys.platform == 'win32') or ((not TYPE_CHECKING) and hasattr(_stdlib_socket.socket, 'share'))):
def share(self, /, process_id: int) -> bytes:
return self._sock.share(process_id)
def __enter__(self) -> Self:
return self
def __exit__(self, exc_type: (type[BaseException] | None), exc_value: (BaseException | None), traceback: (TracebackType | None)) -> None:
return self._sock.__exit__(exc_type, exc_value, traceback)
def family(self) -> AddressFamily:
return self._sock.family
def type(self) -> SocketKind:
return self._sock.type
def proto(self) -> int:
return self._sock.proto
def did_shutdown_SHUT_WR(self) -> bool:
return self._did_shutdown_SHUT_WR
def __repr__(self) -> str:
return repr(self._sock).replace('socket.socket', 'trio.socket.socket')
def dup(self) -> SocketType:
return _SocketType(self._sock.dup())
def close(self) -> None:
if (self._sock.fileno() != (- 1)):
trio.lowlevel.notify_closing(self._sock)
self._sock.close()
async def bind(self, address: AddressFormat) -> None:
address = (await self._resolve_address_nocp(address, local=True))
if (hasattr(_stdlib_socket, 'AF_UNIX') and (self.family == _stdlib_socket.AF_UNIX) and address[0]):
return (await trio.to_thread.run_sync(self._sock.bind, address))
else:
(await trio.lowlevel.checkpoint())
return self._sock.bind(address)
def shutdown(self, flag: int) -> None:
self._sock.shutdown(flag)
if (flag in [_stdlib_socket.SHUT_WR, _stdlib_socket.SHUT_RDWR]):
self._did_shutdown_SHUT_WR = True
def is_readable(self) -> bool:
if (sys.platform == 'win32'):
(rready, _, _) = select.select([self._sock], [], [], 0)
return bool(rready)
p = select.poll()
p.register(self._sock, select.POLLIN)
return bool(p.poll(0))
async def wait_writable(self) -> None:
(await _core.wait_writable(self._sock))
async def _resolve_address_nocp(self, address: AddressFormat, *, local: bool) -> AddressFormat:
if (self.family == _stdlib_socket.AF_INET6):
ipv6_v6only = self._sock.getsockopt(_stdlib_socket.IPPROTO_IPV6, _stdlib_socket.IPV6_V6ONLY)
else:
ipv6_v6only = False
return (await _resolve_address_nocp(self.type, self.family, self.proto, ipv6_v6only=ipv6_v6only, address=address, local=local))
async def _nonblocking_helper(self, wait_fn: Callable[([_stdlib_socket.socket], Awaitable[None])], fn: Callable[(Concatenate[(_stdlib_socket.socket, P)], T)], *args: P.args, **kwargs: P.kwargs) -> T:
async with _try_sync():
return fn(self._sock, *args, **kwargs)
while True:
(await wait_fn(self._sock))
try:
return fn(self._sock, *args, **kwargs)
except BlockingIOError:
pass
_accept = _make_simple_sock_method_wrapper(_stdlib_socket.socket.accept, _core.wait_readable)
async def accept(self) -> tuple[(SocketType, AddressFormat)]:
(sock, addr) = (await self._accept())
return (from_stdlib_socket(sock), addr)
async def connect(self, address: AddressFormat) -> None:
try:
address = (await self._resolve_address_nocp(address, local=False))
async with _try_sync():
return self._sock.connect(address)
(await _core.wait_writable(self._sock))
except trio.Cancelled:
self._sock.close()
raise
err = self._sock.getsockopt(_stdlib_socket.SOL_SOCKET, _stdlib_socket.SO_ERROR)
if (err != 0):
raise OSError(err, f'Error connecting to {address!r}: {os.strerror(err)}')
if TYPE_CHECKING:
def recv(__self, __buflen: int, __flags: int=0) -> Awaitable[bytes]:
...
recv = _make_simple_sock_method_wrapper(_stdlib_socket.socket.recv, _core.wait_readable)
if TYPE_CHECKING:
def recv_into(__self, buffer: Buffer, nbytes: int=0, flags: int=0) -> Awaitable[int]:
...
recv_into = _make_simple_sock_method_wrapper(_stdlib_socket.socket.recv_into, _core.wait_readable)
if TYPE_CHECKING:
def recvfrom(__self, __bufsize: int, __flags: int=0) -> Awaitable[tuple[(bytes, AddressFormat)]]:
...
recvfrom = _make_simple_sock_method_wrapper(_stdlib_socket.socket.recvfrom, _core.wait_readable)
if TYPE_CHECKING:
def recvfrom_into(__self, buffer: Buffer, nbytes: int=0, flags: int=0) -> Awaitable[tuple[(int, AddressFormat)]]:
...
recvfrom_into = _make_simple_sock_method_wrapper(_stdlib_socket.socket.recvfrom_into, _core.wait_readable)
if ((sys.platform != 'win32') or ((not TYPE_CHECKING) and hasattr(_stdlib_socket.socket, 'recvmsg'))):
if TYPE_CHECKING:
def recvmsg(__self, __bufsize: int, __ancbufsize: int=0, __flags: int=0) -> Awaitable[tuple[(bytes, list[tuple[(int, int, bytes)]], int, Any)]]:
...
recvmsg = _make_simple_sock_method_wrapper(_stdlib_socket.socket.recvmsg, _core.wait_readable, maybe_avail=True)
if ((sys.platform != 'win32') or ((not TYPE_CHECKING) and hasattr(_stdlib_socket.socket, 'recvmsg_into'))):
if TYPE_CHECKING:
def recvmsg_into(__self, __buffers: Iterable[Buffer], __ancbufsize: int=0, __flags: int=0) -> Awaitable[tuple[(int, list[tuple[(int, int, bytes)]], int, Any)]]:
...
recvmsg_into = _make_simple_sock_method_wrapper(_stdlib_socket.socket.recvmsg_into, _core.wait_readable, maybe_avail=True)
if TYPE_CHECKING:
def send(__self, __bytes: Buffer, __flags: int=0) -> Awaitable[int]:
...
send = _make_simple_sock_method_wrapper(_stdlib_socket.socket.send, _core.wait_writable)
async def sendto(self, __data: Buffer, __address: ((tuple[(object, ...)] | str) | Buffer)) -> int:
...
async def sendto(self, __data: Buffer, __flags: int, __address: ((tuple[(object, ...)] | str) | Buffer)) -> int:
...
_wraps(_stdlib_socket.socket.sendto, assigned=(), updated=())
async def sendto(self, *args: Any) -> int:
args_list = list(args)
args_list[(- 1)] = (await self._resolve_address_nocp(args[(- 1)], local=False))
return (await self._nonblocking_helper(_core.wait_writable, _stdlib_socket.socket.sendto, *args_list))
if ((sys.platform != 'win32') or ((not TYPE_CHECKING) and hasattr(_stdlib_socket.socket, 'sendmsg'))):
_wraps(_stdlib_socket.socket.sendmsg, assigned=(), updated=())
async def sendmsg(self, __buffers: Iterable[Buffer], __ancdata: Iterable[tuple[(int, int, Buffer)]]=(), __flags: int=0, __address: (AddressFormat | None)=None) -> int:
if (__address is not None):
__address = (await self._resolve_address_nocp(__address, local=False))
return (await self._nonblocking_helper(_core.wait_writable, _stdlib_socket.socket.sendmsg, __buffers, __ancdata, __flags, __address)) |
_register_func(cls=object, pipeable=True, dispatchable=True)
def make_names(names, unique: bool=True) -> Any:
try:
from slugify import slugify
except ImportError as imerr:
raise ValueError('`make_names()` requires `python-slugify` package.\nTry: pip install -U slugify') from imerr
if isinstance(names, str):
names = [names]
try:
iter(names)
except TypeError:
names = [names]
names = [slugify(str(name), separator='_', lowercase=False) for name in names]
names = [(f'_{name}' if name[0].isdigit() else name) for name in names]
if unique:
return _repair_names(names, 'unique')
return names |
class KeyPressTransition(QSignalTransition):
def __init__(self, receiver, key, target=None):
super(KeyPressTransition, self).__init__(receiver.keyPressed)
self.m_key = key
if (target is not None):
self.setTargetState(target)
def eventTest(self, e):
if super(KeyPressTransition, self).eventTest(e):
key = e.arguments()[0]
return (key == self.m_key)
return False |
class CloudzillaTo(BaseAccount):
__name__ = 'CloudzillaTo'
__type__ = 'account'
__version__ = '0.09'
__status__ = 'testing'
__description__ = 'Cloudzilla.to account plugin'
__license__ = 'GPLv3'
__authors__ = [('Walter Purcaro', '')]
PREMIUM_PATTERN = '<h2>account type</h2>\\s*Premium Account'
def grab_info(self, user, password, data):
html = self.load('
premium = (re.search(self.PREMIUM_PATTERN, html) is not None)
return {'validuntil': (- 1), 'trafficleft': (- 1), 'premium': premium}
def signin(self, user, password, data):
html = self.load(' post={'lusername': user, 'lpassword': password, 'w': 'dologin'})
if ('ERROR' in html):
self.fail_login() |
class FakePipeWrapper():
def __init__(self, filesystem: 'FakeFilesystem', fd: int, can_write: bool, mode: str=''):
self._filesystem = filesystem
self.fd = fd
self.can_write = can_write
self.file_object = None
self.filedes: Optional[int] = None
self.real_file = None
if mode:
self.real_file = open(fd, mode)
def __enter__(self) -> 'FakePipeWrapper':
return self
def __exit__(self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]) -> None:
self.close()
def get_object(self) -> None:
return self.file_object
def fileno(self) -> int:
if (self.filedes is not None):
return self.filedes
raise OSError(errno.EBADF, 'Invalid file descriptor')
def read(self, numBytes: int=(- 1)) -> bytes:
if self.real_file:
return self.real_file.read(numBytes)
return os.read(self.fd, numBytes)
def flush(self) -> None:
def write(self, contents: bytes) -> int:
if self.real_file:
return self.real_file.write(contents)
return os.write(self.fd, contents)
def close(self) -> None:
assert (self.filedes is not None)
open_files = self._filesystem.open_files[self.filedes]
assert (open_files is not None)
open_files.remove(self)
if self.real_file:
self.real_file.close()
else:
os.close(self.fd)
def readable(self) -> bool:
return (not self.can_write)
def writable(self) -> bool:
return self.can_write
def seekable(self) -> bool:
return False |
class SignalBlocker(_AbstractSignalBlocker):
def __init__(self, timeout=5000, raising=True, check_params_cb=None):
super().__init__(timeout, raising=raising)
self._signals = []
self.args = None
self.all_args = []
self.check_params_callback = check_params_cb
self.signal_name = ''
def connect(self, signal):
self.signal_name = self.determine_signal_name(potential_signal_tuple=signal)
actual_signal = self.get_signal_from_potential_signal_tuple(signal)
actual_signal.connect(self._quit_loop_by_signal)
self._signals.append(actual_signal)
def _quit_loop_by_signal(self, *args):
if self.check_params_callback:
self.all_args.append(args)
if (not self.check_params_callback(*args)):
return
try:
self.signal_triggered = True
self.args = list(args)
self._cleanup()
finally:
self._loop.quit()
def _cleanup(self):
super()._cleanup()
for signal in self._signals:
_silent_disconnect(signal, self._quit_loop_by_signal)
self._signals = []
def get_params_as_str(self):
if (not self.all_args):
return ''
if (len(self.all_args[0]) == 1):
args_list = [arg[0] for arg in self.all_args]
else:
args_list = self.all_args
return str(args_list)
def _get_timeout_error_message(self):
if (self.check_params_callback is not None):
return 'Signal {signal_name} emitted with parameters {params} within {timeout} ms, but did not satisfy the {cb_name} callback'.format(signal_name=self.signal_name, params=self.get_params_as_str(), timeout=self.timeout, cb_name=self.get_callback_name(self.check_params_callback))
else:
return 'Signal {signal_name} not emitted after {timeout} ms'.format(signal_name=self.signal_name, timeout=self.timeout) |
class NormalizedToImageCoordinatesTest(tf.test.TestCase):
def test_normalized_to_image_coordinates(self):
normalized_boxes = tf.placeholder(tf.float32, shape=(None, 1, 4))
normalized_boxes_np = np.array([[[0.0, 0.0, 1.0, 1.0]], [[0.5, 0.5, 1.0, 1.0]]])
image_shape = tf.convert_to_tensor([1, 4, 4, 3], dtype=tf.int32)
absolute_boxes = ops.normalized_to_image_coordinates(normalized_boxes, image_shape, parallel_iterations=2)
expected_boxes = np.array([[[0, 0, 4, 4]], [[2, 2, 4, 4]]])
with self.test_session() as sess:
absolute_boxes = sess.run(absolute_boxes, feed_dict={normalized_boxes: normalized_boxes_np})
self.assertAllEqual(absolute_boxes, expected_boxes) |
class ArgumentParser(cfargparse.ArgumentParser):
def __init__(self, config_file_parser_class=cfargparse.YAMLConfigFileParser, formatter_class=cfargparse.ArgumentDefaultsHelpFormatter, **kwargs):
super(ArgumentParser, self).__init__(config_file_parser_class=config_file_parser_class, formatter_class=formatter_class, **kwargs)
def defaults(cls, *args):
dummy_parser = cls()
for callback in args:
callback(dummy_parser)
defaults = dummy_parser.parse_known_args([])[0]
return defaults
def update_model_opts(cls, model_opt):
if (model_opt.word_vec_size > 0):
model_opt.src_word_vec_size = model_opt.word_vec_size
model_opt.tgt_word_vec_size = model_opt.word_vec_size
if (model_opt.layers > 0):
model_opt.enc_layers = model_opt.layers
model_opt.dec_layers = model_opt.layers
if (model_opt.rnn_size > 0):
model_opt.enc_rnn_size = model_opt.rnn_size
model_opt.dec_rnn_size = model_opt.rnn_size
model_opt.brnn = (model_opt.encoder_type == 'brnn')
if (model_opt.copy_attn_type is None):
model_opt.copy_attn_type = model_opt.global_attention
def validate_model_opts(cls, model_opt):
assert (model_opt.model_type in ['text', 'img', 'audio', 'vec']), ('Unsupported model type %s' % model_opt.model_type)
same_size = (model_opt.enc_rnn_size == model_opt.dec_rnn_size)
assert ((model_opt.model_type == 'audio') or same_size), 'The encoder and decoder rnns must be the same size for now'
assert ((model_opt.rnn_type != 'SRU') or model_opt.gpu_ranks), 'Using SRU requires -gpu_ranks set.'
if model_opt.share_embeddings:
if (model_opt.model_type != 'text'):
raise AssertionError('--share_embeddings requires --model_type text.')
def ckpt_model_opts(cls, ckpt_opt):
opt = cls.defaults(opts.model_opts)
opt.__dict__.update(ckpt_opt.__dict__)
return opt
def validate_train_opts(cls, opt):
if opt.epochs:
raise AssertionError('-epochs is deprecated please use -train_steps.')
if ((opt.truncated_decoder > 0) and (max(opt.accum_count) > 1)):
raise AssertionError('BPTT is not compatible with -accum > 1')
if opt.gpuid:
raise AssertionError('gpuid is deprecated see world_size and gpu_ranks')
if (torch.cuda.is_available() and (not opt.gpu_ranks)):
logger.info('WARNING: You have a CUDA device, should run with -gpu_ranks')
if (opt.world_size < len(opt.gpu_ranks)):
raise AssertionError('parameter counts of -gpu_ranks must be less or equal than -world_size.')
if ((opt.world_size == len(opt.gpu_ranks)) and (min(opt.gpu_ranks) > 0)):
raise AssertionError('-gpu_ranks should have master(=0) rank unless -world_size is greater than len(gpu_ranks).')
assert (len(opt.data_ids) == len(opt.data_weights)), 'Please check -data_ids and -data_weights options!'
assert (len(opt.dropout) == len(opt.dropout_steps)), 'Number of dropout values must match accum_steps values'
assert (len(opt.attention_dropout) == len(opt.dropout_steps)), 'Number of attention_dropout values must match accum_steps values'
def validate_translate_opts(cls, opt):
if ((opt.beam_size != 1) and (opt.random_sampling_topk != 1)):
raise ValueError('Can either do beam search OR random sampling.')
def validate_preprocess_args(cls, opt):
assert (opt.max_shard_size == 0), '-max_shard_size is deprecated. Please use -shard_size (number of examples) instead.'
assert (opt.shuffle == 0), '-shuffle is not implemented. Please shuffle your data before pre-processing.'
assert (len(opt.train_src) == len(opt.train_tgt)), 'Please provide same number of src and tgt train files!'
assert (len(opt.train_src) == len(opt.train_ids)), 'Please provide proper -train_ids for your data!'
for file in (opt.train_src + opt.train_tgt):
assert os.path.isfile(file), ('Please check path of %s' % file)
assert ((not opt.valid_src) or os.path.isfile(opt.valid_src)), 'Please check path of your valid src file!'
assert ((not opt.valid_tgt) or os.path.isfile(opt.valid_tgt)), 'Please check path of your valid tgt file!'
assert ((not opt.src_vocab) or os.path.isfile(opt.src_vocab)), 'Please check path of your src vocab!'
assert ((not opt.tgt_vocab) or os.path.isfile(opt.tgt_vocab)), 'Please check path of your tgt vocab!' |
def test_blob_mounting_with_empty_layers(manifest_protocol, pusher, puller, images_with_empty_layer, liveserver_session, app_reloader):
pusher.push(liveserver_session, 'devtable', 'simple', 'latest', images_with_empty_layer, credentials=('devtable', 'password'))
options = ProtocolOptions()
options.scopes = ['repository:devtable/newrepo:push,pull', ('repository:%s:pull' % 'devtable/simple')]
options.mount_blobs = {('sha256:' + hashlib.sha256(image.bytes).hexdigest()): 'devtable/simple' for image in images_with_empty_layer}
options.skip_head_checks = True
manifest_protocol.push(liveserver_session, 'devtable', 'newrepo', 'latest', images_with_empty_layer, credentials=('devtable', 'password'), options=options) |
class Highway(Layer):
def __init__(self, init='glorot_uniform', activation=None, weights=None, W_regularizer=None, b_regularizer=None, activity_regularizer=None, W_constraint=None, b_constraint=None, bias=True, input_dim=None, **kwargs):
warnings.warn('The `Highway` layer is deprecated and will be removed after 06/2017.')
if ('transform_bias' in kwargs):
kwargs.pop('transform_bias')
warnings.warn('`transform_bias` argument is deprecated and has been removed.')
self.init = initializers.get(init)
self.activation = activations.get(activation)
self.W_regularizer = regularizers.get(W_regularizer)
self.b_regularizer = regularizers.get(b_regularizer)
self.activity_regularizer = regularizers.get(activity_regularizer)
self.W_constraint = constraints.get(W_constraint)
self.b_constraint = constraints.get(b_constraint)
self.bias = bias
self.initial_weights = weights
self.input_spec = InputSpec(ndim=2)
self.input_dim = input_dim
if self.input_dim:
kwargs['input_shape'] = (self.input_dim,)
super(Highway, self).__init__(**kwargs)
def build(self, input_shape):
input_dim = input_shape[1]
self.input_spec = InputSpec(dtype=K.floatx(), shape=(None, input_dim))
self.W = self.add_weight((input_dim, input_dim), initializer=self.init, name='W', regularizer=self.W_regularizer, constraint=self.W_constraint)
self.W_carry = self.add_weight((input_dim, input_dim), initializer=self.init, name='W_carry')
if self.bias:
self.b = self.add_weight((input_dim,), initializer='zero', name='b', regularizer=self.b_regularizer, constraint=self.b_constraint)
self.b_carry = self.add_weight((input_dim,), initializer='one', name='b_carry')
else:
self.b_carry = None
if (self.initial_weights is not None):
self.set_weights(self.initial_weights)
del self.initial_weights
self.built = True
def call(self, x):
y = K.dot(x, self.W_carry)
if self.bias:
y += self.b_carry
transform_weight = activations.sigmoid(y)
y = K.dot(x, self.W)
if self.bias:
y += self.b
act = self.activation(y)
act *= transform_weight
output = (act + ((1 - transform_weight) * x))
return output
def get_config(self):
config = {'init': initializers.serialize(self.init), 'activation': activations.serialize(self.activation), 'W_regularizer': regularizers.serialize(self.W_regularizer), 'b_regularizer': regularizers.serialize(self.b_regularizer), 'activity_regularizer': regularizers.serialize(self.activity_regularizer), 'W_constraint': constraints.serialize(self.W_constraint), 'b_constraint': constraints.serialize(self.b_constraint), 'bias': self.bias, 'input_dim': self.input_dim}
base_config = super(Highway, self).get_config()
return dict((list(base_config.items()) + list(config.items()))) |
def custom_noise_schedule(timesteps, p):
scale = (1000 / timesteps)
beta_min = (scale * 0.0001)
beta_max = 1
betas = [beta_max]
for i in range(1, timesteps):
beta_i = (((beta_max ** (1 / p)) + ((i / (timesteps - 1)) * ((beta_min ** (1 / p)) - (beta_max ** (1 / p))))) ** p)
betas.append(beta_i)
return np.clip(betas[::(- 1)], a_min=0, a_max=0.999) |
class Effect6174(BaseEffect):
type = 'passive'
def handler(fit, ship, context, projectionRange, **kwargs):
fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Medium Projectile Turret')), 'maxRange', ship.getModifiedItemAttr('roleBonusCBC'), **kwargs)
fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Medium Projectile Turret')), 'falloff', ship.getModifiedItemAttr('roleBonusCBC'), **kwargs) |
class ContextMeta(type):
def __new__(cls, name, bases, dct, **kwargs):
def __enter__(self):
self.__class__.context_class.get_contexts().append(self)
self._config_context = None
if hasattr(self, '_pytensor_config'):
self._config_context = pytensor.config.change_flags(**self._pytensor_config)
self._config_context.__enter__()
return self
def __exit__(self, typ, value, traceback):
self.__class__.context_class.get_contexts().pop()
if self._config_context:
self._config_context.__exit__(typ, value, traceback)
dct[__enter__.__name__] = __enter__
dct[__exit__.__name__] = __exit__
return super().__new__(cls, name, bases, dct)
def __init__(cls, name, bases, nmspc, context_class: Optional[Type]=None, **kwargs):
if (context_class is not None):
cls._context_class = context_class
super().__init__(name, bases, nmspc)
def get_context(cls, error_if_none=True, allow_block_model_access=False) -> Optional[T]:
try:
candidate: Optional[T] = cls.get_contexts()[(- 1)]
except IndexError:
if error_if_none:
raise TypeError(f'No {cls} on context stack')
return None
if (isinstance(candidate, BlockModelAccess) and (not allow_block_model_access)):
raise BlockModelAccessError(candidate.error_msg_on_access)
return candidate
def get_contexts(cls) -> List[T]:
context_class = cls.context_class
assert isinstance(context_class, type), f'Name of context class, {context_class} was not resolvable to a class'
if (not hasattr(context_class, 'contexts')):
context_class.contexts = threading.local()
contexts = context_class.contexts
if (not hasattr(contexts, 'stack')):
contexts.stack = []
return contexts.stack
def context_class(cls) -> Type:
def resolve_type(c: Union[(Type, str)]) -> Type:
if isinstance(c, str):
c = getattr(modules[cls.__module__], c)
if isinstance(c, type):
return c
raise ValueError(f'Cannot resolve context class {c}')
assert (cls is not None)
if isinstance(cls._context_class, str):
cls._context_class = resolve_type(cls._context_class)
if (not isinstance(cls._context_class, (str, type))):
raise ValueError(f'Context class for {cls.__name__}, {cls._context_class}, is not of the right type')
return cls._context_class
def __init_subclass__(cls, **kwargs):
super().__init_subclass__(**kwargs)
cls.context_class = super().context_class
def __call__(cls, *args, **kwargs):
instance: 'Model' = cls.__new__(cls, *args, **kwargs)
with instance:
instance.__init__(*args, **kwargs)
return instance |
def test_inspector_with_nested_exception() -> None:
try:
nested_exception()
except RuntimeError as e:
inspector = Inspector(e)
assert (inspector.exception == e)
assert inspector.has_previous_exception()
assert (inspector.previous_exception is not None)
assert (inspector.exception_name == 'RuntimeError')
assert (inspector.exception_message == 'Nested Exception')
assert (len(inspector.frames) > 0)
assert (len(inspector.frames.compact()) == 1) |
def replace_modules_with_new_variants(manager: BuildManager, graph: dict[(str, State)], old_modules: dict[(str, (MypyFile | None))], new_modules: dict[(str, (MypyFile | None))]) -> None:
for id in new_modules:
preserved_module = old_modules.get(id)
new_module = new_modules[id]
if (preserved_module and (new_module is not None)):
merge_asts(preserved_module, preserved_module.names, new_module, new_module.names)
manager.modules[id] = preserved_module
graph[id].tree = preserved_module |
class OnlineProductsDataset(H5PYDataset):
_filename = 'online_products/online_products.hdf5'
def __init__(self, which_sets, **kwargs):
try:
path = '/home/zwz/Desktop/DML/lib/datasets/data/online_products/online_products.hdf5'
except IOError as e:
msg = (str(e) + '.\n You need to download the dataset and convert it to hdf5 before.')
raise IOError(msg)
super(OnlineProductsDataset, self).__init__(file_or_path=path, which_sets=which_sets, **kwargs) |
class Issue69_BadV1Year(TestCase):
def test_missing_year(self):
tag = ParseID3v1(b'ABCTAGhello world\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff')
self.failUnlessEqual(tag['TIT2'], 'hello world')
def test_short_year(self):
data = b'XTAGhello world\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x001\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff'
tag = ParseID3v1(data)
self.failUnlessEqual(tag['TIT2'], 'hello world')
self.failUnlessEqual(tag['TDRC'], '0001')
(frames, offset) = find_id3v1(BytesIO(data))
self.assertEqual(offset, (- 125))
self.assertEqual(frames, tag)
def test_none(self):
s = MakeID3v1(dict())
self.failUnlessEqual(len(s), 128)
tag = ParseID3v1(s)
self.failIf(('TDRC' in tag))
def test_empty(self):
s = MakeID3v1(dict(TDRC=''))
self.failUnlessEqual(len(s), 128)
tag = ParseID3v1(s)
self.failIf(('TDRC' in tag))
def test_short(self):
s = MakeID3v1(dict(TDRC='1'))
self.failUnlessEqual(len(s), 128)
tag = ParseID3v1(s)
self.failUnlessEqual(tag['TDRC'], '0001')
def test_long(self):
s = MakeID3v1(dict(TDRC=''))
self.failUnlessEqual(len(s), 128)
tag = ParseID3v1(s)
self.failUnlessEqual(tag['TDRC'], '1234') |
def _get_stream_params(command: str):
arg_names = ['base', 'audio', 'video']
command_args: Dict = {arg: [] for arg in arg_names}
current_arg = arg_names[0]
for part in shlex.split(command):
arg_name = part[2:]
if (arg_name in arg_names):
current_arg = arg_name
else:
command_args[current_arg].append(part)
command_args = {command: _extract_stream_params(command_args[command]) for command in command_args}
for arg in arg_names[1:]:
for x in command_args[arg_names[0]]:
command_args[arg][x] += command_args[arg_names[0]][x]
del command_args[arg_names[0]]
return command_args |
class TFMobileViTASPPPooling(tf.keras.layers.Layer):
def __init__(self, config: MobileViTConfig, out_channels: int, **kwargs) -> None:
super().__init__(**kwargs)
self.global_pool = tf.keras.layers.GlobalAveragePooling2D(keepdims=True, name='global_pool')
self.conv_1x1 = TFMobileViTConvLayer(config, out_channels=out_channels, kernel_size=1, stride=1, use_normalization=True, use_activation='relu', name='conv_1x1')
def call(self, features: tf.Tensor, training: bool=False) -> tf.Tensor:
spatial_size = shape_list(features)[1:(- 1)]
features = self.global_pool(features)
features = self.conv_1x1(features, training=training)
features = tf.image.resize(features, size=spatial_size, method='bilinear')
return features |
def load_fips_ecdsa_key_pair_vectors(vector_data):
vectors = []
key_data = None
for line in vector_data:
line = line.strip()
if ((not line) or line.startswith('#')):
continue
if (line[1:(- 1)] in _ECDSA_CURVE_NAMES):
curve_name = _ECDSA_CURVE_NAMES[line[1:(- 1)]]
elif line.startswith('d = '):
if (key_data is not None):
vectors.append(key_data)
key_data = {'curve': curve_name, 'd': int(line.split('=')[1], 16)}
elif (key_data is not None):
if line.startswith('Qx = '):
key_data['x'] = int(line.split('=')[1], 16)
elif line.startswith('Qy = '):
key_data['y'] = int(line.split('=')[1], 16)
assert (key_data is not None)
vectors.append(key_data)
return vectors |
class PutExecutor(ActionExecutor):
def __init__(self, relation: Relation):
self.relation = relation
def execute(self, script: Script, state: EnvironmentState, info: ExecutionInfo, char_index, modify=True, in_place=False):
current_line = script[0]
info.set_current_line(current_line)
src_node = state.get_state_node(current_line.object())
dest_node = state.get_state_node(current_line.subject())
if ((src_node is None) or (dest_node is None)):
info.script_object_found_error((current_line.object() if (src_node is None) else current_line.subject()))
elif _check_puttable(state, src_node, dest_node, self.relation, info, char_index):
if modify:
(yield state.change_state([DeleteEdges(CharacterNode(char_index), [Relation.HOLDS_LH, Relation.HOLDS_RH], NodeInstance(src_node)), AddEdges(CharacterNode(char_index), Relation.CLOSE, NodeInstance(dest_node), add_reverse=True), AddEdges(NodeInstance(src_node), Relation.CLOSE, NodeInstance(dest_node), add_reverse=True), AddEdges(NodeInstance(src_node), self.relation, NodeInstance(dest_node)), ClearExecDataKey((Action.GRAB, src_node.id))], in_place=in_place))
else:
(yield state) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.