code stringlengths 281 23.7M |
|---|
class CustomEnv7DOF(mujoco_env.MujocoEnv, utils.EzPickle):
def __init__(self, xml_path, viewer_params):
utils.EzPickle.__init__(self)
self._params = viewer_params
mujoco_env.MujocoEnv.__init__(self, xml_path, viewer_params['speedup'])
self._image_width = viewer_params['image_width']
... |
class MyAccessor(Accessor):
value: Any
is_required: bool
def getter(self) -> Callable[([Any], Any)]:
def my_getter(obj):
try:
return getattr(obj, self.value)
except AttributeError as e:
raise MyAccessError(*e.args)
return my_getter
... |
class Video(ContentManageable):
post = models.ForeignKey(Post, related_name='related_%(class)s', editable=False, null=True, on_delete=models.CASCADE)
video_embed = models.TextField(blank=True)
video_data = models.FileField(upload_to='community/videos/', blank=True)
caption = models.TextField(blank=True)... |
def get_args():
parser = argparse.ArgumentParser('MixPath')
parser.add_argument('--exp_name', type=str, required=True, help='search model name')
parser.add_argument('--m', type=int, default=2, required=True, help='num of selected paths as most')
parser.add_argument('--shadow_bn', action='store_false', d... |
class FNMGLikelihood(FixedNoiseGaussianLikelihood):
def noise(self) -> Tensor:
return (self.noise_covar.noise * self.second_noise)
def _shaped_noise_covar(self, base_shape: torch.Size, *params: Any, **kwargs: Any):
if (len(params) > 0):
shape = None
else:
shape = ... |
class Solution(object):
def canPlaceFlowers(self, flowerbed, n):
count = 0
for i in range(len(flowerbed)):
curr = flowerbed[i]
if ((i - 1) >= 0):
curr += flowerbed[(i - 1)]
if ((i + 1) < len(flowerbed)):
curr += flowerbed[(i + 1)]
... |
class ImageEncoder(nn.Module):
def __init__(self, encoder_type, in_dim, **kwargs):
super(ImageEncoder, self).__init__()
if (encoder_type == 'default'):
self.module = Identity()
self.module.in_dim = in_dim
self.module.out_dim = in_dim
elif (encoder_type == ... |
class PexpectPeer(InteractionPeer):
def __init__(self, *args, **kwargs):
self.child = pexpect.spawn(*args, **kwargs)
self.pending_data = b''
self.done = False
def receive(self, timeout=None):
self.poll(timeout=timeout)
if self.pending_data:
ret = self.pending_... |
def get_majorana_operator(operator: Union[(PolynomialTensor, DiagonalCoulombHamiltonian, FermionOperator)]) -> MajoranaOperator:
if isinstance(operator, FermionOperator):
return _fermion_operator_to_majorana_operator(operator)
elif isinstance(operator, (PolynomialTensor, DiagonalCoulombHamiltonian)):
... |
class AssignedName(pynames.AssignedName):
def __init__(self, lineno=None, module=None, pyobject=None):
self.lineno = lineno
self.module = module
self.assignments = []
self.pyobject = _Inferred(self._get_inferred, pynames._get_concluded_data(module))
self.pyobject.set(pyobject... |
.skipif(PYPY, reason='garbage-collection differences make this flaky')
.filterwarnings('default::pytest.PytestUnraisableExceptionWarning')
def test_unraisable_in_setup(pytester: Pytester) -> None:
pytester.makepyfile(test_it='\n import pytest\n\n class BrokenDel:\n def __del__(self):\n ... |
class DummyVecEnv(VecEnv):
def __init__(self, env_fns):
self.envs = [fn() for fn in env_fns]
env = self.envs[0]
VecEnv.__init__(self, len(env_fns), env.observation_space, env.action_space)
obs_space = env.observation_space
(self.keys, shapes, dtypes) = obs_space_info(obs_spac... |
def _migrate_v8(preset: dict) -> dict:
migration = migration_data.get_raw_data(RandovaniaGame(preset['game']))
def _name_to_location(name: str):
(world_name, area_name) = name.split('/', 1)
return {'world_asset_id': migration['world_name_to_id'][world_name], 'area_asset_id': migration['area_name... |
def batch_intersection_union(output, target, nclass):
predict = torch.max(output, dim=1)[1]
mini = 1
maxi = (nclass - 1)
nbins = (nclass - 1)
predict = predict.cpu().numpy().astype('int64')
target = target.cpu().numpy().astype('int64')
predict = (predict * (target >= 0).astype(predict.dtype)... |
def test_inline(config, workspace, code_action_context):
document = create_document(workspace, 'simple_extract_method.py')
line = 6
start_col = end_col = document.lines[line].index('extracted_method')
selection = Range((line, start_col), (line, end_col))
response = plugin.pylsp_code_actions(config=c... |
class VTUNet(nn.Module):
def __init__(self, config, num_classes=3, zero_head=False, embed_dim=96, win_size=7):
super(VTUNet, self).__init__()
self.num_classes = num_classes
self.zero_head = zero_head
self.config = config
self.embed_dim = embed_dim
self.win_size = win_... |
def test_game_tab_created(skip_qtbot):
game = RandovaniaGame.METROID_PRIME_ECHOES
widget = GamesHelpWidget()
widget.set_main_window(MagicMock())
widget.on_options_changed(MagicMock())
skip_qtbot.addWidget(widget)
widget.set_current_game(game)
widget.showEvent(None)
assert (widget.current... |
class LossFunc(nn.Module):
def __init__(self):
super(LossFunc, self).__init__()
return
def forward(self, y_true_cls, y_pred_cls, y_true_geo, y_pred_geo, training_mask):
classification_loss = dice_coefficient(y_true_cls, y_pred_cls, training_mask)
classification_loss *= 0.01
... |
def ordinal(value: NumberOrString, gender: str='male') -> str:
try:
if (not math.isfinite(float(value))):
return _format_not_finite(float(value))
value = int(value)
except (TypeError, ValueError):
return str(value)
if (gender == 'male'):
t = (P_('0 (male)', 'th'),... |
('beeref.widgets.SceneToPixmapExporterDialog.exec')
('beeref.widgets.SceneToPixmapExporterDialog.value')
('PyQt6.QtWidgets.QFileDialog.getSaveFileName')
def test_on_action_export_scene_no_filename(file_mock, value_mock, exec_mock, view):
item = BeeTextItem('foo')
view.scene.addItem(item)
file_mock.return_va... |
def test_sharing_off(capfd, path_rgb_byte_tif):
with rasterio.Env() as env:
with rasterio.open(path_rgb_byte_tif, sharing=False) as srcx:
env._dump_open_datasets()
captured = capfd.readouterr()
assert ('1 N GTiff' in captured.err)
assert ('1 S GTiff' not in ca... |
class StrictFileObject(object):
def __init__(self, fileobj):
self._fileobj = fileobj
for m in ['close', 'tell', 'seek', 'write', 'name', 'flush', 'truncate']:
if hasattr(fileobj, m):
setattr(self, m, getattr(fileobj, m))
def read(self, size=(- 1)):
data = self... |
class QueueOrder(ShufflePlugin, OrderInOrder):
PLUGIN_ID = 'queue'
PLUGIN_NAME = _('Queue Only')
PLUGIN_ICON = Icons.VIEW_LIST
PLUGIN_DESC = _('Limits playing of songs to the queue.\n\nSelect this play order in the main window, then double-clicking any song will enqueue it instead of playing.')
disp... |
def compute_score(real, fake, k=1, sigma=1, sqrt=True):
Mxx = distance(real, real, False)
Mxy = distance(real, fake, False)
Myy = distance(fake, fake, False)
s = Score()
s.emd = wasserstein(Mxy, sqrt)
s.mmd = mmd(Mxx, Mxy, Myy, sigma)
s.knn = knn(Mxx, Mxy, Myy, k, sqrt)
s.printScore()
... |
def pretrain_stem(stem, train_x, train_y, loss_fn, lr, num_epochs, batch_size, **kwargs):
train_dataset = torch.utils.data.TensorDataset(train_x, train_y)
dataloader = torch.utils.data.DataLoader(train_dataset, batch_size=batch_size)
model = torch.nn.Sequential(stem, torch.nn.Linear(stem.output_dim, train_y... |
class training_config(object):
def __init__(self):
self.gen_learning_rate = 0.01
self.gen_update_time = 1
self.dis_update_time_adv = 5
self.dis_update_epoch_adv = 3
self.dis_update_time_pre = 50
self.dis_update_epoch_pre = 3
self.pretrained_epoch_num = 20
... |
def test_repository_merge_base(project):
refs = [commit.id for commit in project.commits.list(all=True)]
commit = project.repository_merge_base(refs)
assert (commit['id'] in refs)
with pytest.raises(gitlab.GitlabGetError, match='Provide at least 2 refs'):
commit = project.repository_merge_base(r... |
class EditBookmarksPane(Gtk.VBox):
def __init__(self, library, song, close=False):
super().__init__(spacing=6)
hb = Gtk.HBox(spacing=12)
self.time = time = Gtk.Entry()
time.set_width_chars(5)
self.markname = name = Gtk.Entry()
add = qltk.Button(_('_Add'), Icons.LIST_A... |
class CSAM_Module(nn.Module):
def __init__(self, in_dim):
super(CSAM_Module, self).__init__()
self.chanel_in = in_dim
self.conv = nn.Conv3d(1, 1, 3, 1, 1)
self.gamma = nn.Parameter(torch.zeros(1))
self.sigmoid = nn.Sigmoid()
def forward(self, x):
(m_batchsize, C, ... |
class Appr(Inc_Learning_Appr):
def __init__(self, model, device, nepochs=100, lr=0.05, lr_min=0.0001, lr_factor=3, lr_patience=5, clipgrad=10000, momentum=0, wd=0, multi_softmax=False, wu_nepochs=0, wu_lr_factor=1, fix_bn=False, eval_on_train=False, logger=None, exemplars_dataset=None, lamb=5000, alpha=0.5, fi_samp... |
class GenericLoss(torch.nn.Module):
def __init__(self, opt):
super(GenericLoss, self).__init__()
self.crit = FastFocalLoss(opt=opt)
self.crit_reg = RegWeightedL1Loss()
if ('rot' in opt.heads):
self.crit_rot = BinRotLoss()
if ('nuscenes_att' in opt.heads):
... |
('/v1/organization/<orgname>/logs')
_param('orgname', 'The name of the organization')
_user_resource(UserLogs)
class OrgLogs(ApiResource):
('listOrgLogs')
_args()
_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
_param('endtime', 'Latest time for logs. Format: "%m/%d/%... |
class ColorJitter(object):
def __init__(self, brightness=None, contrast=None, saturation=None, *args, **kwargs):
if ((not (brightness is None)) and (brightness > 0)):
self.brightness = [max((1 - brightness), 0), (1 + brightness)]
if ((not (contrast is None)) and (contrast > 0)):
... |
class DataPaths():
def __init__(self):
pass
def load_splits(split_file):
dataset_path = PROCESSED_PATH
assert os.path.exists(dataset_path), f'the given dataset path {dataset_path} does not exist, please check if your training data are placed over there!'
(train, val) = DataPaths.... |
def onjava_test_deps(unit, *args):
assert (unit.get('MODULE_TYPE') is not None)
path = strip_roots(unit.path())
test_record = {'SOURCE-FOLDER-PATH': path, 'TEST-NAME': '-'.join([os.path.basename(os.path.dirname(path)), os.path.basename(path), 'dependencies']), 'SCRIPT-REL-PATH': 'java.dependency.test', 'TES... |
def trainingsetfromswapwiththreememqubit(N):
state1 = qt.basis(2, 0)
state2 = qt.basis(2, 0)
state3 = qt.basis(2, 0)
trainingset = [qt.tensor(qt.ket2dm(state1), qt.ket2dm(state2), qt.ket2dm(state3))]
for i in range(N):
state4 = randomstate(1)
trainingset.append([qt.ket2dm(state4), st... |
def test_completion_items(ac_app):
text = ''
line = 'choices --completion_items {}'.format(text)
endidx = len(line)
begidx = (endidx - len(text))
first_match = complete_tester(text, line, begidx, endidx, ac_app)
assert (first_match is not None)
assert (len(ac_app.completion_matches) == len(a... |
def vqa_calculate(batch_dict, vocab):
pred_answers = batch_dict['pred_answers']
ocr_tokens_enc = batch_dict['ocr_tokens']
gt_answers_enc = batch_dict['answers']
topkscores = batch_dict['topkscores']
answer_space_size = len(vocab)
predictions = []
for (idx, question_id) in enumerate([batch_di... |
def gen_attr_names() -> Iterable[str]:
lc = string.ascii_lowercase
has_underscore = False
for c in lc:
(yield (c if (not has_underscore) else ('_' + c)))
has_underscore = (not has_underscore)
for outer in lc:
for inner in lc:
res = (outer + inner)
if keywo... |
def load_svhn(data_dir, use_augmentation='base'):
test_transform = transforms.Compose([transforms.ToTensor()])
train_transform = test_transform
train_dataset = torchvision.datasets.SVHN(root=data_dir, split='train', download=True, transform=train_transform)
test_dataset = torchvision.datasets.SVHN(root=... |
.parametrize('spec, no_match_cases, match_cases', [(Spec('<1.6.0'), ['1.6.0', '1.6.1', '1.9.0', '100.5.2'], ['0.0.0', '1.5.99']), (Spec('<1.9.0'), ['1.9.0', '100.5.2'], ['0.0.0', '1.5.99', '1.6.0', '1.6.1']), (Spec('<1.6.0,>0.0.1'), ['1.6.0', '1.6.1', '1.9.0', '0.0.0'], ['1.5.99']), (Spec('>17.3.0'), ['17.3.0', '1.13.0... |
class PreResNet56Drop():
base = PreResNetDrop
args = list()
kwargs = {'depth': 56}
transform_train = transforms.Compose([transforms.Resize(32), transforms.RandomCrop(32, padding=4), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, ... |
class CustomExtensionTests(SphinxIntegrationTests):
build_path = 'tests/sphinx_custom_md'
def test_integration(self):
output = self.read_file('index.html')
self.assertIn('<table ', output)
self.assertIn('<th class="head">abc</th>', output)
self.assertIn('<th class="head">data</th... |
def model_from_biomodels(accession_no, force=False, cleanup=True, mirror='ebi', **kwargs):
logger = get_logger(__name__, log_level=kwargs.get('verbose'))
if (not BIOMODELS_REGEX.match(accession_no)):
try:
accession_no = 'BIOMD{:010d}'.format(int(accession_no))
except ValueError:
... |
class Solution():
def lastStoneWeight(self, stones: List[int]) -> int:
if (len(stones) <= 1):
return (stones[0] if (len(stones) == 1) else None)
temp = sorted(stones, reverse=True)
while (len(temp) > 2):
new_weight = abs((temp[0] - temp[1]))
temp = sorted(... |
class Effect6058(BaseEffect):
type = 'passive'
def handler(fit, ship, context, projectionRange, **kwargs):
fit.drones.filteredItemBoost((lambda drone: drone.item.requiresSkill('Medium Drone Operation')), 'armorHP', ship.getModifiedItemAttr('shipBonusGC2'), skill='Gallente Cruiser', **kwargs) |
class MobileNetV2ImageProcessingTester(unittest.TestCase):
def __init__(self, parent, batch_size=7, num_channels=3, image_size=18, min_resolution=30, max_resolution=400, do_resize=True, size=None, do_center_crop=True, crop_size=None):
size = (size if (size is not None) else {'shortest_edge': 20})
cr... |
class VanLargeKernelAttentionLayer(nn.Module):
def __init__(self, hidden_size: int):
super().__init__()
self.attention = VanLargeKernelAttention(hidden_size)
def forward(self, hidden_state: torch.Tensor) -> torch.Tensor:
attention = self.attention(hidden_state)
attended = (hidden... |
class MBConvBlock(nn.Module):
def __init__(self, block_args, global_params, image_size=None):
super().__init__()
self._block_args = block_args
self._bn_mom = (1 - global_params.batch_norm_momentum)
self._bn_eps = global_params.batch_norm_epsilon
self.has_se = ((self._block_ar... |
def NonOverlappingCropPatches_random(im, gt, patch_size=32, stride=32):
(w, h) = im.size
rnd_h = random.randint(0, max(0, (h - patch_size)))
rnd_w = random.randint(0, max(0, (w - patch_size)))
im_crop = im.crop((rnd_w, rnd_h, (rnd_w + patch_size), (rnd_h + patch_size)))
im_crop = np.asarray(im_crop)... |
def test_class_errors(c: Converter) -> None:
class C():
a: int
b: int = 0
c.register_structure_hook(C, make_dict_structure_fn(C, c, _cattrs_forbid_extra_keys=True))
try:
c.structure({'d': 1}, C)
except Exception as exc:
assert (transform_error(exc) == ['required field mis... |
class TestAHIGriddedLUTs(unittest.TestCase):
def mocked_ftp_dl(fname):
import os
import tarfile
import tempfile
with tarfile.open(fname, 'w:gz') as tar_handle:
for namer in AHI_LUT_NAMES:
tmpf = os.path.join(tempfile.tempdir, namer)
with op... |
def test_dynamic_property_values_update_in_instance(fake):
fake.fake_ctrl_values = (0, 33)
fake.fake_ctrl = 50
assert (fake.fake_ctrl == 33)
fake.fake_setting_values = (0, 33)
fake.fake_setting = 50
assert (fake.read() == '33')
fake.fake_measurement_values = {'X': 7}
fake.write('7')
... |
class Migration(migrations.Migration):
dependencies = [('sponsors', '0043_auto__1343')]
operations = [migrations.AlterField(model_name='sponsorship', name='level_name_old', field=models.CharField(blank=True, default='', help_text='DEPRECATED: shall be removed after manual data sanity check.', max_length=64, ver... |
def _orbital_basis(basis):
r = {}
basis = manip.make_general(basis, False, True)
basis = sort.sort_basis(basis, False)
electron_elements = [k for (k, v) in basis['elements'].items() if ('electron_shells' in v)]
reference_list = []
if electron_elements:
for z in electron_elements:
... |
def iter_files(path):
if os.path.isfile(path):
(yield path)
elif os.path.isdir(path):
for (dirpath, _, filenames) in os.walk(path):
for f in filenames:
(yield os.path.join(dirpath, f))
else:
raise RuntimeError(('Path %s is invalid' % path)) |
class TinyModel(nn.Module):
def __init__(self):
super(TinyModel, self).__init__()
self.conv1 = nn.Conv2d(3, 32, kernel_size=2, stride=2, padding=2, bias=False)
self.bn1 = nn.BatchNorm2d(32)
self.relu1 = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=2, stride=2... |
def test_no_update_mixin():
class M(NoUpdateMixin):
pass
obj = M()
assert hasattr(obj, 'get')
assert hasattr(obj, 'list')
assert hasattr(obj, 'create')
assert (not hasattr(obj, 'update'))
assert hasattr(obj, 'delete')
assert isinstance(obj, ListMixin)
assert isinstance(obj, G... |
def parse_readme_frontmatter(dirname):
readme_filename = os.path.join(dirname, 'readme.md')
with open(readme_filename) as f:
lines = [line.strip() for line in f.readlines()]
top = lines.index('---')
bottom = lines.index('---', (top + 1))
frontmatter = yaml.load('\n'.join(lines[(top + 1):bott... |
class Wav2Vec2ProcessorTest(unittest.TestCase):
def setUp(self):
vocab = "<pad> <s> </s> <unk> | E T A O N I H S R D L U M W C F G Y P B V K ' X J Q Z".split(' ')
vocab_tokens = dict(zip(vocab, range(len(vocab))))
self.add_kwargs_tokens_map = {'pad_token': '<pad>', 'unk_token': '<unk>', 'bos... |
def test_smallest():
x = dvector()
y = dvector()
z = dvector()
f1 = inplace_func([x], smallest(x))
assert np.all(([1, 2, 3] == f1([1, 2, 3])))
f3 = inplace_func([x, y, z], smallest(x, y, z))
assert np.all(([1, 2, 3] == f3([1, 3, 9], [7, 7, 7], [8, 2, 3])))
(sx, sy) = (dscalar(), dscalar(... |
_module()
class ImageToTensor(object):
def __init__(self, keys):
self.keys = keys
def __call__(self, results):
for key in self.keys:
results[key] = to_tensor(results[key].transpose(2, 0, 1))
return results
def __repr__(self):
return f'{self.__class__.__name__}(key... |
class PerspectiveFivePointPlaneFitting():
def __init__(self, data):
self.method_name = 'perspective_five_point_plane_fitting'
print('running {}...'.format(self.method_name))
method_start = time.time()
(H, W) = data.mask.shape
(vv, uu) = np.meshgrid(range(W), range(H))
... |
(hasattr(socket, 'AF_UNIX'), 'this test requires Unix sockets')
class SecureUnixClientTests(unittest.TestCase):
def test_connection(self):
with temp_unix_socket_path() as path:
with run_unix_server(path, ssl_context=SERVER_CONTEXT):
with run_unix_client(path, ssl_context=CLIENT_C... |
def rtn___fprintf_chk(se: 'SymbolicExecutor', pstate: 'ProcessState'):
logger.debug('__fprintf_chk hooked')
arg0 = pstate.get_argument_value(0)
flag = pstate.get_argument_value(1)
arg1 = pstate.get_argument_value(2)
arg1f = pstate.get_format_string(arg1)
nbArgs = arg1f.count('{')
args = psta... |
class ConfigurationStore(Construct):
def __init__(self, scope: Construct, id_: str, environment: str, service_name: str, configuration_name: str) -> None:
super().__init__(scope, id_)
configuration_str = self._get_and_validate_configuration(environment)
self.app_name = f'{id_}{service_name}'... |
class MultiHeadAttention(nn.Module):
def __init__(self, d_model, d_k, d_v, h, dropout=0.1, identity_map_reordering=False, can_be_stateful=False, attention_module=None, attention_module_kwargs=None):
super(MultiHeadAttention, self).__init__()
self.identity_map_reordering = identity_map_reordering
... |
class PointnetSAModuleMSG(_PointnetSAModuleBase):
def __init__(self, npoint, radii, nsamples, mlps, bn=True, use_xyz=True):
super(PointnetSAModuleMSG, self).__init__()
assert (len(radii) == len(nsamples) == len(mlps))
self.npoint = npoint
self.groupers = nn.ModuleList()
self.... |
class ScipyGer(Ger):
def prepare_node(self, node, storage_map, compute_map, impl):
if (impl == 'py'):
node.tag.local_ger = _blas_ger_fns[np.dtype(node.inputs[0].type.dtype)]
def perform(self, node, inputs, output_storage):
(cA, calpha, cx, cy) = inputs
(cZ,) = output_storage
... |
def get_data_with_strict_range(ts_code: int, begin: str, end: str) -> List:
data = get_data(ts_code, begin, end)
first_record_date = to_datetime(data[0]['data'], 'pt')
period_start_date = to_datetime(begin, 'pt')
try:
is_out_of_range = (first_record_date < period_start_date)
if is_out_of... |
def test_cswap_cirq_decomp():
cswap = CSwap(3)
quregs = get_named_qubits(cswap.signature)
cswap_op = cswap.on_registers(**quregs)
circuit = cirq.Circuit(cswap_op, cirq.decompose_once(cswap_op))
cirq.testing.assert_has_diagram(circuit, '\nctrl: \n \nx0: (x)\n \... |
def _remove_dup_initializers_from_model(model, model_without_ext, ind_to_replace):
inits_with_data = [i for i in model.graph.initializer]
inits = [i for i in model_without_ext.graph.initializer]
for (i, ref_i) in ind_to_replace:
assert (inits_with_data[i].name == inits[i].name)
assert (inits... |
class EnumChoiceType(Choice):
def __init__(self, enum_type: EnumMeta, case_sensitive=True):
self._enum_type = enum_type
super().__init__([choice.value for choice in enum_type], case_sensitive=case_sensitive)
def convert(self, value, param, ctx):
try:
return self._enum_type(va... |
def _set_enable_recompute(mode: bool):
global _ENABLE_RECOMPUTE
original_mode = _ENABLE_RECOMPUTE
def cleanup():
global _ENABLE_RECOMPUTE
_ENABLE_RECOMPUTE = original_mode
try:
_ENABLE_RECOMPUTE = mode
return Handle(cleanup)
except Exception:
cleanup()
... |
def _test():
import torch
pretrained = False
models = [ibn_resnet50, ibn_resnet101, ibn_resnet152]
for model in models:
net = model(pretrained=pretrained)
net.eval()
weight_count = _calc_width(net)
print('m={}, {}'.format(model.__name__, weight_count))
assert ((mo... |
(reahl_system_fixture=ReahlSystemFixture)
class ValueScenarios(Fixture):
def single_value(self):
self.field = IntegerField(required=False, default=1, label='field')
self.field_on_query_string = '{field_name}=123'
self.field_value_marshalled = 123
self.field_value_as_string = '123'
... |
def fsdp_main(args):
(model, tokenizer) = setup_model(train_config.model_name)
local_rank = int(os.environ['LOCAL_RANK'])
rank = int(os.environ['RANK'])
world_size = int(os.environ['WORLD_SIZE'])
dataset = load_dataset('wikihow', 'all', data_dir='data/')
print(dataset.keys())
print('Size of ... |
class Locker():
_VERSION = '2.0'
_READ_VERSION_RANGE = '>=1,<3'
_legacy_keys: ClassVar[list[str]] = ['dependencies', 'source', 'extras', 'dev-dependencies']
_relevant_keys: ClassVar[list[str]] = [*_legacy_keys, 'group']
def __init__(self, lock: Path, local_config: dict[(str, Any)]) -> None:
... |
class PasswordDumpCommand(ops.cmd.DszCommand):
def __init__(self, plugin='passworddump', **optdict):
ops.cmd.DszCommand.__init__(self, plugin, **optdict)
def validateInput(self):
truecount = 0
for optkey in self.optdict:
optval = self.optdict[optkey]
if (type(optv... |
def test_set_pass_no_substitutions():
context = Context({'key1': 'value1', 'key2': 'value2', 'key3': 'value3', 'set': {'key2': 'value4', 'key4': 'value5'}})
pypyr.steps.set.run_step(context)
assert (context['key1'] == 'value1')
assert (context['key2'] == 'value4')
assert (context['key3'] == 'value3'... |
def _split_qobj_to_qobjs(qobj: QasmQobj, chunk_size: int) -> List[QasmQobj]:
qobjs = []
num_chunks = int(np.ceil((len(qobj.experiments) / chunk_size)))
if (num_chunks == 1):
qobjs = [qobj]
elif isinstance(qobj, QasmQobj):
qobj_template = QasmQobj(qobj_id=qobj.qobj_id, config=qobj.config,... |
class ReqChannel(BaseChannel):
def __init__(self, context, slot_base):
BaseChannel.__init__(self, context, slot_base, OBJECT)
self._request_items = {}
self._next_recheck_time = (time.time() + 0.2)
self._request_counter = 0
self._run_mode = 1
self.received.bind(self._p... |
class ZeroconfServiceTypes(ServiceListener):
def __init__(self) -> None:
self.found_services: Set[str] = set()
def add_service(self, zc: Zeroconf, type_: str, name: str) -> None:
self.found_services.add(name)
def update_service(self, zc: Zeroconf, type_: str, name: str) -> None:
def remo... |
class TestAudio():
def test_audio_amplify():
target_keys = ['audios', 'amplify_ratio']
with pytest.raises(TypeError):
AudioAmplify(1)
audio = np.random.rand(8)
results = dict(audios=audio)
amplifier = AudioAmplify(1.5)
results = amplifier(results)
... |
_rewriter([WaldRV])
def wald_from_normal_uniform(fgraph, node):
(rng, *other_inputs, mean, scale) = node.inputs
(next_rng, n) = normal.make_node(rng, *other_inputs, zeros_like(mean), ones_like(scale)).outputs
(next_rng, u) = uniform.make_node(next_rng, *other_inputs, zeros_like(mean), ones_like(scale)).outp... |
.parametrize('iso_too_big', [False, True])
('randovania.patching.patchers.gamecube.iso_packager.nod')
('randovania.patching.patchers.gamecube.iso_packager.validate_game_files_path', autospec=True)
def test_pack_iso(mock_validate_game_files_path: MagicMock, mock_nod: MagicMock, iso_too_big: bool):
iso = MagicMock()
... |
def compare_overall_changes_line_plot(before_module_weights_statistics, after_module_weights_statistics, tab_name, subplot_name):
if (not has_display()):
return
switch_backend()
(fig, ax1) = plt.subplots(1, figsize=(14, 12))
count_col = before_module_weights_statistics.shape[1]
output_channe... |
def write_obj_with_colors(obj_name, vertices, triangles, colors):
triangles = triangles.copy()
triangles += 1
if (obj_name.split('.')[(- 1)] != 'obj'):
obj_name = (obj_name + '.obj')
with open(obj_name, 'w') as f:
for i in range(vertices.shape[0]):
s = 'v {} {} {} {} {} {}\n'... |
def get_mat_and_func(config, n_features):
if (isinstance(config, NoPenalty) or (config is None)):
func_config = config
mat = csr_matrix((n_features, n_features))
elif isinstance(config, (Ridge, Lasso, GroupLasso, ExclusiveGroupLasso, MultiTaskLasso, NuclearNorm, ElasticNet, GroupElasticNet, Mult... |
def is_untyped_decorator(typ: (Type | None)) -> bool:
typ = get_proper_type(typ)
if (not typ):
return True
elif isinstance(typ, CallableType):
return (not is_typed_callable(typ))
elif isinstance(typ, Instance):
method = typ.type.get_method('__call__')
if method:
... |
(autouse=True, scope='session')
def check_parity_version_for_tests(blockchain_type):
if (blockchain_type != 'parity'):
return
(parity_version_string, _) = subprocess.Popen(['openethereum', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
(supported, _, our_version) = is_su... |
class Wavefront(Resource):
thumbnail_image = models.ImageField(_('Thumbnail'), help_text=_('Please upload an image that demonstrate this 3D Model'), blank=False, null=False, upload_to=WAVEFRONTS_STORAGE_PATH)
file = models.FileField(_('3D Model file'), help_text=_('A 3D model zip file. The zip file must contain... |
class CaptureFixture(Generic[AnyStr]):
def __init__(self, captureclass: Type[CaptureBase[AnyStr]], request: SubRequest, *, _ispytest: bool=False) -> None:
check_ispytest(_ispytest)
self.captureclass: Type[CaptureBase[AnyStr]] = captureclass
self.request = request
self._capture: Optio... |
class Solution(object):
def reverseVowels(self, s):
str_index = []
vowel = []
res = []
pos = (- 1)
for (index, value) in enumerate(s):
if (value in 'aeiouAEIOU'):
str_index.append((- 1))
vowel.append(value)
else:
... |
.parametrize('xs, ys, exp_rowcol', [([(101985.0 + 400.0)], [2826915.0], ([0], [1])), (array('d', [(101985.0 + 400.0)]), array('d', [2826915.0]), ([0], [1])), (numpy.array([(101985.0 + 400.0)]), numpy.array([2826915.0]), ([0], [1]))])
def test_rowcol_input(xs, ys, exp_rowcol):
with rasterio.open('tests/data/RGB.byte... |
class LSCPB(LocateSolver, BaseOutputMixin, BackupPercentageMixinMixin):
def __init__(self, name: str, problem: pulp.LpProblem, solver: pulp.LpSolver):
self.solver = solver
super().__init__(name, problem)
def __add_obj(self) -> None:
cov_vars = getattr(self, 'cli_vars')
self.probl... |
class PandaStickConfig(PandaDefaultConfig):
def __init__(self) -> None:
super().__init__()
self.urdf_path = '{PACKAGE_ASSET_DIR}/descriptions/panda_stick.urdf'
self.ee_link_name = 'panda_hand'
def controllers(self):
controller_configs = super().controllers
arm_pd_ee_delta... |
.parametrize('manifest_exists,test_tag,expected_status', [(True, '-INVALID-TAG-NAME', 400), (True, '.INVALID-TAG-NAME', 400), (True, 'INVALID-TAG_NAME-BECAUSE-THIS-IS-WAY-WAY-TOO-LOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOONG', 400), (False, 'newtag', 404), (True, 'generatemanife... |
def test_inherit_bag_tuple():
with BufferingNodeExecutionContext(append) as context:
context.set_input_fields(['message'])
context.write_sync(*messages)
assert (context.get_output_fields() == ('message', '0'))
assert (context.get_buffer() == list(map((lambda x: (x + ('!',))), messages))) |
def test_Vector_init():
v = pg.Vector(0, 1)
assert (v.z() == 0)
v = pg.Vector(0.0, 1.0)
assert (v.z() == 0)
v = pg.Vector(0, 1, 2)
assert (v.x() == 0)
assert (v.y() == 1)
assert (v.z() == 2)
v = pg.Vector(0.0, 1.0, 2.0)
assert (v.x() == 0)
assert (v.y() == 1)
assert (v.z(... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.