code stringlengths 281 23.7M |
|---|
def generate_voucher_codes(year):
for (benefit_id, code) in BENEFITS.items():
for sponsorbenefit in SponsorBenefit.objects.filter(sponsorship_benefit_id=benefit_id).filter(sponsorship__status='finalized').all():
try:
quantity = BenefitFeature.objects.instance_of(TieredBenefit).ge... |
def test_select_area_by_name(echoes_game_data, skip_qtbot):
window = DataEditorWindow(echoes_game_data, None, False, True)
skip_qtbot.addWidget(window)
window.focus_on_region_by_name('Torvus Bog')
assert (window.current_area.name != 'Forgotten Bridge')
window.focus_on_area_by_name('Forgotten Bridge'... |
def handle_new_cfp_submission(data):
from conferences.models import Conference
title = data['title']
elevator_pitch = data['elevator_pitch']
submission_type = data['submission_type']
admin_url = data['admin_url']
tags = data['tags']
speaker_id = data['speaker_id']
speaker = User.objects.... |
class ValidateLogger(object):
def __init__(self, tensorboard_logdir=None):
self._tensorboard_writer = None
if tensorboard_logdir:
self._tensorboard_writer = tf.summary.FileWriter(tensorboard_logdir)
def log_info(self, metric_results, epoch, step):
self._log_to_console(metric_... |
class MrpcProcessor(DataProcessor):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
warnings.warn(DEPRECATION_WARNING.format('processor'), FutureWarning)
def get_example_from_tensor_dict(self, tensor_dict):
return InputExample(tensor_dict['idx'].numpy(), tensor_dic... |
class FasterRcnnBoxCoder(object):
def __init__(self, scale_factors: Optional[List[float]]=None, eps: float=EPS):
self._scale_factors = scale_factors
if (scale_factors is not None):
assert (len(scale_factors) == 4)
for scalar in scale_factors:
assert (scalar > ... |
def test_matrix_variable_selection_include_none(hatch, helpers, temp_dir, config_file):
config_file.model.template.plugins['default']['tests'] = False
config_file.save()
project_name = 'My.App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert (result.exit_code == 0), result... |
def test_foo_as_writer():
foo_wrap = Foo_wrap(32)
foo_wrap.elaborate()
try:
simple_sim_pass(foo_wrap)
except LeftoverPlaceholderError as e:
print('{} is thrown\n{}'.format(e.__class__.__name__, e))
return
raise Exception("Should've thrown LeftoverPlaceholderError.") |
class ModuleLoader(resources.SysPathSetup):
def setUp(self) -> None:
super().setUp()
self.module = resources.build_file('data/module.py', 'data.module')
self.module2 = resources.build_file('data/module2.py', 'data.module2')
self.nonregr = resources.build_file('data/nonregr.py', 'data... |
class comattribs():
vendor = PSPAttribute('vendor', config=True, displayas='Vendor')
product = PSPAttribute('product', config=True, displayas='Product')
version = PSPAttribute('version', config=True, displayas='Version')
installdate = PSPAttribute('installdate', config=True, displayas='Install Date')
... |
def requires_backends(obj, backends):
if (not isinstance(backends, (list, tuple))):
backends = [backends]
name = (obj.__name__ if hasattr(obj, '__name__') else obj.__class__.__name__)
if (('torch' in backends) and ('tf' not in backends) and (not is_torch_available()) and is_tf_available()):
... |
class RoadNet():
def __init__(self, roadnet_file):
self.roadnet_dict = json.load(open(roadnet_file, 'r'))
self.net_edge_dict = {}
self.net_node_dict = {}
self.net_lane_dict = {}
self.generate_node_dict()
self.generate_edge_dict()
self.generate_lane_dict()
... |
_tag()
def i18n_switcher():
string = ''
for (language, language_string) in settings.LANGUAGES:
url = reverse('i18n_switcher', args=[language])
if (language == translation.get_language()):
string += f'<li><a href="{url}"><u>{language_string}</u></a></li>'
else:
str... |
_specialize
_rewriter([mul, true_div])
def local_mul_exp_to_exp_add(fgraph, node):
exps = [n.owner.inputs[0] for n in node.inputs if (n.owner and hasattr(n.owner.op, 'scalar_op') and isinstance(n.owner.op.scalar_op, ps.Exp))]
if (len(exps) >= 2):
(orig_op, new_op) = (mul, add)
if isinstance(node... |
class TestEnhancerUserConfigs(_BaseCustomEnhancementConfigTests):
ENH_FN = 'test_sensor.yaml'
ENH_ENH_FN = os.path.join('enhancements', ENH_FN)
ENH_FN2 = 'test_sensor2.yaml'
ENH_ENH_FN2 = os.path.join('enhancements', ENH_FN2)
ENH_FN3 = 'test_empty.yaml'
TEST_CONFIGS = {ENH_FN: '\nenhancements:\n... |
def check_disjoint(sol, list_of_subsets):
n = len(list_of_subsets)
selected_subsets = []
for i in range(n):
if (sol[i] == 1):
selected_subsets.append(list_of_subsets[i])
tmplen = len(selected_subsets)
for i in range(tmplen):
for j in range(i):
L = selected_sub... |
def get_random_distorted_bottlenecks(sess, image_lists, how_many, category, image_dir, input_jpeg_tensor, distorted_image, resized_input_tensor, bottleneck_tensor):
class_count = len(image_lists.keys())
bottlenecks = []
ground_truths = []
for unused_i in range(how_many):
label_index = random.ran... |
_config
def test_fullscreen(manager):
manager.test_window('one')
manager.c.window.set_position_floating(50, 20)
manager.c.window.set_size_floating(1280, 720)
assert (manager.c.window.info()['width'] == 1280)
assert (manager.c.window.info()['height'] == 720)
assert (manager.c.window.info()['x'] =... |
class Migration(migrations.Migration):
dependencies = [('api', '0035_create_table_log_entry')]
operations = [migrations.AlterField(model_name='nomination', name='user', field=models.ForeignKey(help_text='The nominated user.', on_delete=django.db.models.deletion.CASCADE, related_name='nomination', to='api.User')... |
def auth_arg(arg: str) -> t.List[str]:
items = sorted(list(set((i.strip().lower() for i in arg.split(',')))))
if any(((i not in ('download', 'list', 'update', '.')) for i in items)):
raise ValueError('Invalid authentication option. Valid values are download, list, and update, or . (for no authentication... |
class Migration(migrations.Migration):
dependencies = [('petition', '0015_petitiontemplate_has_share_buttons')]
operations = [migrations.AlterField(model_name='signature', name='phone', field=phonenumber_field.modelfields.PhoneNumberField(blank=True, max_length=20, region=None, verbose_name='Phone number'))] |
.skipif((not torch.cuda.is_available()), reason='requires CUDA support')
def test_nms_normal_bev():
np_boxes = np.array([[6.0, 3.0, 8.0, 7.0, 2.0], [3.0, 6.0, 9.0, 11.0, 1.0], [3.0, 7.0, 10.0, 12.0, 1.0], [1.0, 4.0, 13.0, 7.0, 3.0]], dtype=np.float32)
np_scores = np.array([0.6, 0.9, 0.7, 0.2], dtype=np.float32)... |
class TestCli():
def setup_class(cls):
cls.temp_file = tempfile.NamedTemporaryFile(delete=False)
cls.temp_file_name = cls.temp_file.name
def teardown_class(cls):
os.unlink(cls.temp_file_name)
('recap.commands.ls')
def test_ls_root(self, mock_ls):
mock_ls.return_value = ['... |
class SupervisedDataset(Dataset):
def __init__(self, tokenizer: transformers.PreTrainedTokenizer):
super(SupervisedDataset, self).__init__()
targets = [f"{example['output']}{tokenizer.eos_token}" for example in raw_dataset]
data_dict = preprocess(raw_dataset['input'], targets, tokenizer)
... |
class IsValidTypeSuite(unittest.TestCase):
def test_is_valid_type(self) -> None:
assert is_valid_type('int')
assert is_valid_type('str')
assert is_valid_type('Foo_Bar234')
assert is_valid_type('foo.bar')
assert is_valid_type('List[int]')
assert is_valid_type('Dict[str... |
def test_solvers():
cnf = CardEnc.atmost(lits=range(1, 6), bound=1, encoding=EncType.pairwise)
for name in solvers:
with Solver(name=name, bootstrap_with=cnf) as solver:
for l in range(1, 6):
(st, lits) = solver.propagate(assumptions=[l], phase_saving=1)
asser... |
def try_infer_format_from_ext(path: str):
if (not path):
return 'pipe'
for ext in PipelineDataFormat.SUPPORTED_FORMATS:
if path.endswith(ext):
return ext
raise Exception('Unable to determine file format from file extension {}. Please provide the format through --format {}'.format... |
class QtumRegtest(QtumTestnet):
SEGWIT_HRP = 'qcrt'
GENESIS = '665ed5b402ac0b44efc37de8a7278b7ee9a58fb972efadae943'
DEFAULT_SERVERS = read_json('servers_regtest.json', {})
CHECKPOINTS = {}
HEADERS_URL = None
POS_NO_RETARGET = True
POW_LIMIT =
POS_LIMIT =
QIP9_POS_LIMIT =
QIP5_... |
class LogFiles(object):
def __init__(self, maincfg=None):
main_config = pynag.Parsers.main.MainConfig(maincfg)
self.log_file = main_config.get('log_file')
self.log_archive_path = main_config.get('log_archive_path')
def get_log_entries(self, start_time=None, end_time=None, strict=True, se... |
class BitPackEnum(BitPackValue):
def bit_pack_encode(self, metadata: dict) -> Iterator[tuple[(int, int)]]:
assert isinstance(self, Enum)
cls = self.__class__
values = list(cls.__members__.values())
(yield from pack_array_element(self, values))
def bit_pack_unpack(cls, decoder: Bi... |
def loader(filenames, fileformat, cache, filename_attributes, show_progress=True, update_progress=None):
if show_progress_force_off:
show_progress = False
class Progress(object):
def __init__(self, label, n):
self._label = label
self._n = n
self._bar = None
... |
def _map_reduce_tree(iterables, map_func, reduce_func, constraints, tree, chunksize, shortcircuit_func, shortcircuit_callback, shortcircuit_callback_args, ordered, inflight_limit, map_kwargs, reduce_kwargs, progress_bar, _level=1):
total = fallback(try_len(*iterables), float('inf'))
branch = ((_level < tree.dep... |
class SurfaceForm(BaseModel):
def __init__(self, param, domain, options=None):
super().__init__(param, domain, options=options)
def get_coupled_variables(self, variables):
Domain = self.domain.capitalize()
x_n = pybamm.standard_spatial_vars.x_n
x_p = pybamm.standard_spatial_vars.... |
class MusicBrainzCover(CoverSourcePlugin, HTTPDownloadMixin):
PLUGIN_ID = 'musicbrainz-cover'
PLUGIN_NAME = _('MusicBrainz Cover Source')
PLUGIN_DESC = _("Downloads covers from MusicBrainz's cover art archive.")
_SIZES = {'original': 'front', '500x500': 'front-500', '500x500 (back)': 'back-500', 'origin... |
class AndFilter(Filter):
def __init__(self, base, other):
self.base = base
self.other = other
async def __call__(self, client: 'pyrogram.Client', update: Update):
if inspect.iscoroutinefunction(self.base.__call__):
x = (await self.base(client, update))
else:
... |
def locate_cuda():
if ('CUDA_PATH' in os.environ):
home = os.environ['CUDA_PATH']
nvcc = pjoin(home, 'bin', NVCC)
else:
default_path = pjoin(os.sep, 'usr', 'local', 'cuda', 'bin')
nvcc = find_in_path(NVCC, ((os.environ['PATH'] + os.pathsep) + default_path))
if (nvcc is No... |
class PrettyTable(object):
def __init__(self, field_names=None, **kwargs):
self.encoding = kwargs.get('encoding', 'UTF-8')
self._field_names = []
self._align = {}
self._valign = {}
self._max_width = {}
self._rows = []
if field_names:
self.field_nam... |
_env('PickYCBInReplicaCAD-v0', max_episode_steps=200, override=True)
class PickYCBInReplicaCAD(PickCubeEnv):
def _load_actors(self):
builder = self._scene.create_actor_builder()
model_dir = (ASSET_DIR / 'mani_skill2_ycb/models/011_banana')
scale = (self.cube_half_size / 0.)
collision... |
def mock_fastrcnn_outputs_inference(tensor_mode, check=True, box_predictor_type=FastRCNNOutputLayers):
with mock.patch.object(box_predictor_type, 'inference', autospec=True, side_effect=Caffe2FastRCNNOutputsInference(tensor_mode)) as mocked_func:
(yield)
if check:
assert (mocked_func.call_count ... |
.parametrize('dynamic', [False, True])
def test_control_preprocess_reply_property(dynamic):
class Fake(FakeBase):
x = CommonBase.control('', 'JUNK%d', '', preprocess_reply=(lambda v: v.replace('JUNK', '')), dynamic=dynamic, cast=int)
fake = Fake()
fake.x = 5
assert (fake.read() == 'JUNK5')
f... |
class ImageNetDataPipeline():
def get_val_dataloader() -> torch.utils.data.DataLoader:
data_loader = ImageNetDataLoader(DATASET_DIR, image_size=image_net_config.dataset['image_size'], batch_size=image_net_config.evaluation['batch_size'], is_training=False, num_workers=image_net_config.evaluation['num_worker... |
def is_mnist_cache_present():
mnist_cache_present = False
if ('DEPENDENCY_DATA_PATH' in os.environ):
logger.info('Dependency data path was set to %s', os.environ.get('DEPENDENCY_DATA_PATH'))
mnist_cache_folder = os.path.join(os.environ.get('DEPENDENCY_DATA_PATH'), 'MNIST', 'processed', 'training... |
def validate(args, trainer, task, epoch_itr, subsets):
if (args.fixed_validation_seed is not None):
utils.set_torch_seed(args.fixed_validation_seed)
valid_losses = []
for subset in subsets:
itr = task.get_batch_iterator(dataset=task.dataset(subset), max_tokens=args.max_tokens, max_sentences=... |
def get_lib_path(executable, name, required=True):
code = ['try:', ' import {}'.format(name), 'except ImportError as e:', ' print("ImportError: " + str(e))', 'else:', ' print("path: " + {}.__file__)'.format(name)]
output = run_py(executable, *code)
try:
(prefix, data) = output.split(': ')
... |
class Adam(Optimizer):
def __init__(self, params, lr=0.001, betas=(0.9, 0.999), eps=1e-08, weight_decay=0, amsgrad=False):
if (not (0.0 <= lr)):
raise ValueError('Invalid learning rate: {}'.format(lr))
if (not (0.0 <= eps)):
raise ValueError('Invalid epsilon value: {}'.format... |
def _test_initialize_ucx_nvlink(protocol):
if (protocol == 'ucx'):
ucp = pytest.importorskip('ucp')
elif (protocol == 'ucxx'):
ucp = pytest.importorskip('ucxx')
kwargs = {'enable_nvlink': True}
initialize(protocol=protocol, **kwargs)
with LocalCluster(protocol=protocol, dashboard_add... |
class _ConvBase(nn.Sequential):
def __init__(self, in_size, out_size, kernel_size, stride, padding, activation, bn, init, conv=None, batch_norm=None, bias=True, preact=False, name=''):
super().__init__()
bias = (bias and (not bn))
conv_unit = conv(in_size, out_size, kernel_size=kernel_size, ... |
def test_illegal_hosts_too_far():
class A(ComponentLevel3):
def construct(s):
s.out = OutPort(32)
def up_A_write():
s.out = 123
class AWrap(ComponentLevel3):
def construct(s):
s.out = OutPort(32)
s.A = A()
s.A.out //= s.... |
def test_widgetbox_mirror(manager_nospawn, minimal_conf_noscreen):
config = minimal_conf_noscreen
tbox = TextBox(text='Text Box')
config.screens = [libqtile.config.Screen(top=libqtile.bar.Bar([tbox, WidgetBox(widgets=[tbox])], 10))]
manager_nospawn.start(config)
manager_nospawn.c.widget['widgetbox']... |
def tuple_fallback(typ: TupleType) -> Instance:
from mypy.join import join_type_list
info = typ.partial_fallback.type
if (info.fullname != 'builtins.tuple'):
return typ.partial_fallback
items = []
for item in typ.items:
if isinstance(item, UnpackType):
unpacked_type = get... |
def main():
if (not ('debug' in args.save)):
from nasbench_analysis import eval_darts_one_shot_model_in_nasbench as naseval
if (args.search_space == '1'):
search_space = SearchSpace1()
elif (args.search_space == '2'):
search_space = SearchSpace2()
elif (args.search_space == '3'):... |
(SponsorshipBenefit)
class SponsorshipBenefitAdmin(PolymorphicInlineSupportMixin, OrderedModelAdmin):
change_form_template = 'sponsors/admin/sponsorshipbenefit_change_form.html'
inlines = [BenefitFeatureConfigurationInline]
ordering = ('-year', 'program', 'order')
list_display = ['program', 'year', 'sho... |
.parametrize('dependency, expected', [(Dependency('foo', '<2'), [Package('foo', '1')]), (Dependency('foo', '<2', extras=['bar']), [Package('foo', '1')]), (Dependency('foo', '>=1'), [Package('foo', '2'), Package('foo', '1')]), (Dependency('foo', '>=1a'), [Package('foo', '2'), Package('foo', '1')]), (Dependency('foo', '>... |
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument('--results_root', help='root directory with results from running learncurve experiments, where subdirectories are individuals from dataset, and each subdirectory contains results folders from a run of `vak learncurve`', default=BR_RESULTS... |
class SessionData(Base):
__tablename__ = 'sessiondata'
id = Column(Integer, primary_key=True)
discriminator = Column('row_type', String(40))
__mapper_args__ = {'polymorphic_identity': 'sessiondata', 'polymorphic_on': discriminator}
web_session_id = Column(Integer, ForeignKey('usersession.id', ondele... |
class DBPedia(AbsTaskRetrieval, BeIRTask):
def description(self):
return {'name': 'DBPedia', 'beir_name': 'dbpedia-entity', 'description': 'DBpedia-Entity is a standard test collection for entity search over the DBpedia knowledge base', 'reference': ' 'type': 'Retrieval', 'category': 's2p', 'eval_splits': [... |
def to_sgkit(mydata):
(variant_contig, variant_contig_names) = encode_array(mydata.V1.to_numpy())
variant_contig = variant_contig.astype('int16')
variant_contig_names = [str(contig) for contig in variant_contig_names]
variant_position = mydata.V3.to_numpy()
variant_id = mydata.V2.to_numpy()
vari... |
class EcmParameters():
def __init__(self):
self.cell_capacity = pybamm.Parameter('Cell capacity [A.h]')
self._set_current_parameters()
self._set_voltage_parameters()
self._set_thermal_parameters()
self._set_initial_condition_parameters()
self._set_compatibility_parame... |
def parse_args():
parser = argparse.ArgumentParser(description='train.py')
parser.add_argument('--n_embs', type=int, default=512, help='Embedding size')
parser.add_argument('--img_embs', type=int, default=2048, help='Image embedding size')
parser.add_argument('--dim_ff', type=int, default=512, help='Fee... |
def test_varyings_remove5():
code1 = '\n fn vs_main() -> Varyings {\n var varyings : Varyings;\n varyings.spam = vec3<f32>(\n 1.0, 2.0, 3.0\n );\n return varyings;\n }\n '
code2 = '\n struct Varyings {\n };\n\n fn vs_main() -> Varyings {\n var vary... |
.parametrize('function_', FUNCTIONS_HAVE_SAME_APERTURE)
.parametrize('aperture', ['MIN', 'MAX', 'DEF', 0.016])
def test_general_aperture(resetted_dmm6500, function_, aperture):
resetted_dmm6500.mode = function_
resetted_dmm6500.aperture = aperture
assert (len(resetted_dmm6500.check_errors()) == 0)
expec... |
def coord_map(fn):
if (fn.type_name in ['Convolution', 'Pooling', 'Im2col']):
(axis, stride, ks, pad) = conv_params(fn)
return (axis, (1 / stride), ((pad - ((ks - 1) / 2)) / stride))
elif (fn.type_name == 'Deconvolution'):
(axis, stride, ks, pad) = conv_params(fn)
return (axis, s... |
class EmptyParserTests(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp(prefix='qiime2-test-parse-temp-')
def tearDown(self):
shutil.rmtree(self.tempdir)
def test_get_parser(self):
parser = EmptyParser.get_parser(None)
self.assertIsInstance(parser, EmptyPa... |
def log_dir():
log_dir = ((FLAGS.base_log_dir + '/sup-') + FLAGS.train_prefix.split('/')[(- 2)])
log_dir += '/{model:s}_{model_size:s}_{lr:0.4f}/'.format(model=FLAGS.model, model_size=FLAGS.model_size, lr=FLAGS.learning_rate)
if (not os.path.exists(log_dir)):
os.makedirs(log_dir)
return log_dir |
def test_string_list():
lst = m.StringList()
lst.push_back('Element 1')
lst.push_back('Element 2')
assert (m.print_opaque_list(lst) == 'Opaque list: [Element 1, Element 2]')
assert (lst.back() == 'Element 2')
for (i, k) in enumerate(lst, start=1):
assert (k == 'Element {}'.format(i))
... |
def evaluate_metrics_from_files(pred_file: Union[(Path, str)], ref_file: Union[(Path, str)]) -> Tuple[(Dict[(str, float)], Dict[(int, Dict[(str, float)])])]:
coco = COCO(str(ref_file))
cocoRes = coco.loadRes(str(pred_file))
cocoEval = COCOEvalCap(coco, cocoRes)
cocoEval.params['audio_id'] = cocoRes.getA... |
def get_mask_pallete(npimg, dataset='detail'):
dataset = dataset.lower()
if (dataset == 'pascal_voc'):
npimg[(npimg == 21)] = 255
out_img = Image.fromarray(npimg.squeeze().astype('uint8'))
if (dataset == 'ade20k'):
out_img.putpalette(adepallete)
elif (dataset == 'cityscapes'):
... |
def get_extension() -> type[Extension]:
use_setuptools = ('setuptools' in sys.modules)
extension_class: type[Extension]
if ((sys.version_info < (3, 12)) and (not use_setuptools)):
import distutils.core
extension_class = distutils.core.Extension
else:
if (not use_setuptools):
... |
def get_vocabulary(cfg: DatasetConfig) -> Tuple[(seqio.Vocabulary, seqio.Vocabulary)]:
if cfg.module:
warnings.warn('The use of `DatasetConfig.module` and `MIXTURE_OR_TASK_MODULE` is deprecated in favor of importing the module directly or via gin.', DeprecationWarning)
import_module(cfg.module)
... |
class AnnotationDial(QtWidgets.QDial):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setRange(0, 360)
self.setSingleStep(45)
self.setWrapping(True)
self.setMaximumSize(45, 45)
self.setNotchesVisible(True)
def enterEvent(self, e):
... |
class EditAddressView(UrlBoundView):
def assemble(self, address_id=None):
address = Address.by_id(address_id, CannotCreate())
self.title = ('Edit Address for %s' % address.name)
self.set_slot('main', EditAddressForm.factory(address))
self.read_check = address.can_be_edited |
class BlockData(object):
def __init__(self, block_data_path=None, load_from_path=True, rank=None):
self.embed_data = dict()
self.meta_data = dict()
if (block_data_path is None):
args = get_args()
block_data_path = args.block_data_path
rank = args.rank
... |
class Migration(migrations.Migration):
dependencies = [('styles', '0005_style_modified_date')]
operations = [migrations.AddField(model_name='stylereview', name='require_action', field=models.BooleanField(db_index=True, default=False, help_text='Set to True if you require creator to update its style.', verbose_n... |
class Auxiliary_Classifier(nn.Module):
def __init__(self, T, feature_dim, input_size=32, width_mult=1.0, remove_avg=False):
super(Auxiliary_Classifier, self).__init__()
self.remove_avg = remove_avg
self.width_mult = width_mult
interverted_residual_setting1 = [[T, 32, 3, 2], [T, 64, 4... |
_tokenizers
class RobertaTokenizationTest(TokenizerTesterMixin, unittest.TestCase):
tokenizer_class = RobertaTokenizer
rust_tokenizer_class = RobertaTokenizerFast
test_rust_tokenizer = True
from_pretrained_kwargs = {'cls_token': '<s>'}
def setUp(self):
super().setUp()
vocab = ['l', '... |
def sia_partitions(nodes, node_labels=None):
scheme = config.SYSTEM_PARTITION_TYPE
valid = ['DIRECTED_BI', 'DIRECTED_BI_CUT_ONE']
if (scheme not in valid):
raise ValueError(f'IIT 3.0 calculations must use one of the following system partition schemes: {valid}; got {scheme}')
return system_partit... |
class PartitionLocator(Locator, dict):
def of(stream_locator: Optional[StreamLocator], partition_values: Optional[List[Any]], partition_id: Optional[str]) -> PartitionLocator:
partition_locator = PartitionLocator()
partition_locator.stream_locator = stream_locator
partition_locator.partition... |
class BundledUpdate(Update):
def get_updates(self):
if self:
(yield (self.get_title(), self.get_body([update for updates in self.values() for update in updates if self.should_update(update.requirement, update.requirement_file)]), self.get_branch(), [update for updates in self.values() for update... |
def prepare_cross_domains_model_specific():
print('# preparing cross_domains_model_specific ...')
for model_patterns in model_sets:
sub_dir = f'{data_dir}/cross_domains_model_specific/model_{model_patterns[0]}'
os.makedirs(sub_dir, exist_ok=True)
_tmp = ' '.join(model_patterns)
p... |
def _test_connection(conn_number, rp):
print('Testing server started by: ', __conn_remote_cmds[conn_number])
conn = Globals.connections[conn_number]
if (conn is None):
sys.stderr.write('- Connection failed, server tests skipped\n')
return False
try:
remote_time = conn.Globals.get... |
def main():
args = cfg.parse_args()
log_path = os.path.join(args.demo_path, (args.demo_name + '/log'))
vid_path = os.path.join(args.demo_path, (args.demo_name + '/vids'))
if ((not os.path.exists(log_path)) and (not os.path.exists(vid_path))):
os.makedirs(log_path)
os.makedirs(vid_path)
... |
.parametrize('returncode', (None, 42))
def test_wrap_session_exit_sessionfinish(returncode: Optional[int], pytester: Pytester) -> None:
pytester.makeconftest('\n import pytest\n def pytest_sessionfinish():\n pytest.exit(reason="exit_pytest_sessionfinish", returncode={returncode})\n '.for... |
def get_text(task, line):
if (task in ['MNLI', 'MRPC', 'QNLI', 'QQP', 'RTE', 'SNLI', 'SST-2', 'STS-B', 'WNLI', 'CoLA']):
line = line.strip().split('\t')
if (task == 'CoLA'):
pass
elif (task == 'MNLI'):
pass
elif (task == 'MRPC'):
pass
elif ... |
def hans_convert_examples_to_features(examples: List[InputExample], label_list: List[str], max_length: int, tokenizer: PreTrainedTokenizer):
label_map = {label: i for (i, label) in enumerate(label_list)}
features = []
for (ex_index, example) in tqdm.tqdm(enumerate(examples), desc='convert examples to featur... |
class SegCrossEntropyLoss(nn.Module):
def __init__(self, ignore_index=(- 1), **kwargs):
super(SegCrossEntropyLoss, self).__init__()
self.task_loss = nn.CrossEntropyLoss(ignore_index=ignore_index)
def forward(self, inputs, targets):
(B, H, W) = targets.size()
inputs = F.interpolat... |
class AddImportTest(unittest.TestCase):
def setUp(self):
super().setUp()
self.project = testutils.sample_project()
self.mod1 = testutils.create_module(self.project, 'mod1')
self.mod2 = testutils.create_module(self.project, 'mod2')
self.pkg = testutils.create_package(self.proj... |
def test_lid_detailed_report():
with Simulation(MODEL_LIDS_PATH) as sim:
subLIDs = LidGroups(sim)
sub_2_lids = subLIDs['2']
first_LID_unit_on_sub_2 = sub_2_lids[0]
second_LID_unit_on_sub_2 = sub_2_lids[1]
assert (first_LID_unit_on_sub_2.number == 4)
assert (second_LID... |
class DistInfoDistribution(Distribution):
PKG_INFO = 'METADATA'
EQEQ = re.compile('([\\(,])\\s*(\\d.*?)\\s*([,\\)])')
def _parsed_pkg_info(self):
try:
return self._pkg_info
except AttributeError:
metadata = self.get_metadata(self.PKG_INFO)
self._pkg_info =... |
def open_specified_layers(model, open_layers):
if isinstance(model, nn.DataParallel):
model = model.module
if isinstance(open_layers, str):
open_layers = [open_layers]
for layer in open_layers:
assert hasattr(model, layer), '"{}" is not an attribute of the model, please provide the c... |
def create_file_dialog(dialog_type, directory, allow_multiple, save_filename, file_types, uid):
window = BrowserView.instances[uid]
if (not directory):
directory = os.environ['HOMEPATH']
try:
if (dialog_type == FOLDER_DIALOG):
dialog = WinForms.FolderBrowserDialog()
d... |
class APNXBuilder(object):
def write_apnx(self, mobi_file_path, apnx_path, page_count=0):
import uuid
apnx_meta = {'guid': str(uuid.uuid4()).replace('-', '')[:8], 'asin': '', 'cdetype': 'EBOK', 'format': 'MOBI_7', 'acr': ''}
try:
with open(mobi_file_path, 'rb') as mf:
... |
.parametrize('url, title, out', [(' 'Installing qutebrowser | qutebrowser', 'Installing qutebrowser _ qutebrowser.html'), (' 'Installing qutebrowser | qutebrowser.html', 'Installing qutebrowser _ qutebrowser.html'), (' 'Installing qutebrowser | qutebrowser', 'Installing qutebrowser _ qutebrowser.html'), (' 'Installing ... |
def test_to_2d_arg():
val = 0
actual = misc.to_2d_arg(val)
desired = (val, val)
assert (actual == desired)
val = (0, 0)
actual = misc.to_2d_arg(val)
desired = val
assert (actual == desired)
val = 0
actual = misc.to_2d_arg(([val] * 2))
desired = (val, val)
assert (actual =... |
def cats_loss(prediction, label, l_weight=[0.0, 0.0], device='cpu'):
(tex_factor, bdr_factor) = l_weight
balanced_w = 1.1
label = label.float()
prediction = prediction.float()
with torch.no_grad():
mask = label.clone()
num_positive = torch.sum((mask == 1).float()).float()
num... |
def main() -> None:
parser = argparse.ArgumentParser(description='Generate baseline stubs automatically for an installed pip package\n using stubgen. Also run Black and Ruff. If the name of\n the project is different from the runtime Python package name, you may\n ... |
class Normalize():
def __init__(self, mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)):
self.mean = mean
self.std = std
def __call__(self, img):
imgarr = np.asarray(img)
proc_img = np.empty_like(imgarr, np.float32)
proc_img[(..., 0)] = (((imgarr[(..., 0)] / 255.0) -... |
def preprocess_singleton_data(samples: List[EntityContext], tokenizer: PreTrainedTokenizer, max_seq_length=64, disable_tqdm=False):
raw_sentences = []
for ent_ctx in samples:
raw_sentences.extend([ent_ctx.left_context, ent_ctx.entity, ent_ctx.right_context])
tokenizer_output = tokenizer(raw_sentence... |
def test_scenario_all_steps(mocker):
background = mocker.MagicMock(all_steps=[])
precondition_scenario = mocker.MagicMock(all_steps=[])
scenario = Scenario(1, 'Scenario', 'I am a Scenario', 'foo.feature', 1, parent=None, tags=None, preconditions=[precondition_scenario], background=background)
scenario.s... |
_SEG_HEADS_REGISTRY.register()
class PerPixelBaselineHead(nn.Module):
_version = 2
def _load_from_state_dict(self, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs):
version = local_metadata.get('version', None)
if ((version is None) or (version < 2)):
... |
.gitlab_premium
def test_project_merge_request_approval_rules(group, project):
approval_rules = project.approvalrules.list(get_all=True)
assert (not approval_rules)
project.approvalrules.create({'name': 'approval-rule', 'approvals_required': 2, 'group_ids': [group.id]})
approval_rules = project.approval... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.