code stringlengths 281 23.7M |
|---|
.parametrize('seed, maker_op, numpy_res', [(3, RandomState, np.random.RandomState(3)), (3, default_rng, np.random.default_rng(3))])
def test_random_maker_op(seed, maker_op, numpy_res):
seed = pt.as_tensor_variable(seed)
z = function(inputs=[], outputs=[maker_op(seed)])()
aes_res = z[0]
assert maker_op.r... |
def define_classifier(input_nc, ncf, ninput_edges, nclasses, opt, gpu_ids, arch, init_type, init_gain):
net = None
norm_layer = get_norm_layer(norm_type=opt.norm, num_groups=opt.num_groups)
if (arch == 'mconvnet'):
net = MeshConvNet(norm_layer, input_nc, ncf, nclasses, ninput_edges, opt.pool_res, op... |
def eval_det_multiprocessing(pred_all, gt_all, ovthresh=0.25, use_07_metric=False, get_iou_func=get_iou):
pred = {}
gt = {}
for img_id in pred_all.keys():
for (classname, bbox, score) in pred_all[img_id]:
if (classname not in pred):
pred[classname] = {}
if (im... |
def test_nested() -> None:
src = '\n try:\n x = 0\n try:\n x = 10\n except KeyError:\n pass\n except Exception:\n pass\n '
expected_blocks = [['x = 0'], ['x = 10'], ['pass'], ['pass'], []]
assert (_extract_blocks(build_cfg(src)) == expected_blocks) |
class FeedForward(nn.Module):
def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0.0):
super().__init__()
inner_dim = int((dim * mult))
dim_out = default(dim_out, dim)
project_in = (nn.Sequential(nn.Linear(dim, inner_dim), nn.GELU()) if (not glu) else GEGLU(dim, inner_d... |
def test_complete(queue, transaction_factory):
queue.put(['somenamespace', 'abc', 'def'], TEST_MESSAGE_1, available_after=(- 10))
now = datetime.utcnow()
count = queue.num_available_jobs_between((now - timedelta(seconds=60)), now, ['/somenamespace'])
assert (count == 1)
item = queue.get()
assert... |
class TxsETHSpider(scrapy.Spider):
TXS_API_URL = '
custom_settings = {'ITEM_PIPELINES': {'BlockchainSpider.pipelines.SubgraphTxsPipeline': 298, 'BlockchainSpider.pipelines.ImportancePipeline': 299}}
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.info = dict()
self.sour... |
class UseFunction():
def __init__(self, project, resource, offset):
self.project = project
self.offset = offset
this_pymodule = project.get_pymodule(resource)
pyname = evaluate.eval_location(this_pymodule, offset)
if (pyname is None):
raise exceptions.RefactoringE... |
class _XXXX(_Stabilizer):
def entangle(self) -> None:
syndrome = self.qubit_indices[0]
top_l = self.qubit_indices[1]
top_r = self.qubit_indices[2]
bot_l = self.qubit_indices[3]
bot_r = self.qubit_indices[4]
if ((top_r and (not top_l)) or (bot_r and (not bot_l))):
... |
(IBodyProducer)
class StringProducer(object):
def __init__(self, body):
self.body = bytes(body, 'utf-8')
self.length = len(body)
def startProducing(self, consumer):
consumer.write(self.body)
return defer.succeed(None)
def pauseProducing(self):
pass
def stopProduci... |
class HP8116A(Instrument):
def __init__(self, adapter, name='Hewlett-Packard 8116A', **kwargs):
kwargs.setdefault('read_termination', '\r\n')
kwargs.setdefault('write_termination', '\r\n')
kwargs.setdefault('send_end', True)
super().__init__(adapter, name, includeSCPI=False, **kwargs... |
def current_config_in_supported_kernels(current_dtype_bw: QuantDtypeBwInfo, supported_kernels: List) -> bool:
for supported_kernel_config in supported_kernels:
act_config = supported_kernel_config[ConfigDictKeys.ACTIVATION]
param_config = None
if (ConfigDictKeys.PARAM in supported_kernel_con... |
def load_init_param(opts):
if (opts.output_dir != ''):
sync_dir = Path(opts.output_dir).resolve()
sync_dir.mkdir(parents=True, exist_ok=True)
sync_file = f'{sync_dir}/.torch_distributed_sync'
else:
raise RuntimeError("Can't find any sync dir")
if (opts.world_size != (- 1)):
... |
def get_network_fn(name, num_classes, weight_decay=0.0, is_training=False):
if (name not in networks_map):
raise ValueError(('Name of network unknown %s' % name))
func = networks_map[name]
(func)
def network_fn(images, scope=None):
arg_scope = arg_scopes_map[name](weight_decay=weight_dec... |
def create_model(image_size, num_channels, num_res_blocks, channel_mult='', learn_sigma=False, class_cond=False, use_checkpoint=False, attention_resolutions='16', num_heads=1, num_head_channels=(- 1), num_heads_upsample=(- 1), use_scale_shift_norm=False, dropout=0, resblock_updown=False, use_fp16=False, use_new_attenti... |
def signal_handler(sig, frame):
print('\nCatched keyboard interrupt, exit programm!')
try:
print('Remove empty directories and files before leaving...')
if args.execute:
for directory in os.scandir(args.output):
if (os.path.isdir(directory) and (not os.listdir(directo... |
class QlProcFS():
def self_auxv(os: 'QlOsLinux') -> QlFsMappedObject:
nbytes = (os.ql.arch.bits // 8)
auxv_addr = os.ql.loader.auxv
null_entry = bytes((nbytes * 2))
auxv_data = bytearray()
while (not auxv_data.endswith(null_entry)):
auxv_data.extend(os.ql.mem.read... |
class MLP_swiglu(nn.Module):
def __init__(self, mlp_dim: int=1024) -> None:
super().__init__()
hidden_dim = (4 * mlp_dim)
scaled_hidden = int(((2 * hidden_dim) / 3))
rounded_hidden = find_multiple(scaled_hidden, 256)
self.in_proj = nn.Linear(mlp_dim, rounded_hidden, bias=Fals... |
class Effect6604(BaseEffect):
type = 'passive'
def handler(fit, src, context, projectionRange, **kwargs):
fit.fighters.filteredItemBoost((lambda mod: mod.item.requiresSkill('Fighters')), 'fighterAbilityMissilesDamageMultiplier', src.getModifiedItemAttr('shipBonusSupercarrierC1'), skill='Caldari Carrier'... |
class AnnotationArrayField(ArrayField):
def deserialize(self, value):
if (not value):
value = DirtyableList([])
elements = []
for annotation in value:
if isinstance(annotation, dict):
elements.append(Annotation(annotation['description'], annotation['en... |
class GuiChangeProjectedDroneMetasCommand(wx.Command):
def __init__(self, fitID, itemIDs, newItemID):
wx.Command.__init__(self, True, 'Change Projected Drone Metas')
self.internalHistory = InternalCommandHistory()
self.fitID = fitID
self.itemIDs = itemIDs
self.newItemID = new... |
class RegisterFile(Component):
def construct(s, Type, nregs=32, rd_ports=1, wr_ports=1, const_zero=False):
addr_type = mk_bits(max(1, clog2(nregs)))
s.raddr = [InPort(addr_type) for i in range(rd_ports)]
s.rdata = [OutPort(Type) for i in range(rd_ports)]
s.waddr = [InPort(addr_type) ... |
class DataDF(object):
def __init__(self, features, click_ts, pay_ts, sample_ts=None, labels=None, delay_labels=None, inw_labels=None, attr_win=None):
self.x = features.copy(deep=True)
self.click_ts = copy.deepcopy(click_ts)
self.pay_ts = copy.deepcopy(pay_ts)
self.delay_labels = dela... |
def test_animal_ATRW_dataset_compatibility():
dataset = 'AnimalATRWDataset'
dataset_class = DATASETS.get(dataset)
channel_cfg = dict(num_output_channels=15, dataset_joints=15, dataset_channel=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]], inference_channel=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 1... |
class _DefinitionGenerator():
unique_prefix = unique_prefix()
def __init__(self, project, pyfunction, body=None):
self.project = project
self.pyfunction = pyfunction
self.pymodule = pyfunction.get_module()
self.resource = self.pymodule.get_resource()
self.definition_info ... |
class ProjectJoinView(LoginRequiredMixin, RedirectViewMixin, TemplateView):
template_name = 'core/error.html'
def get(self, request, token):
try:
invite = Invite.objects.get(token=token)
if invite.is_expired:
error = _('Sorry, your invitation has been expired.')
... |
def kmeans(samples, num_clusters: int, num_iters: int=10):
(dim, dtype) = (samples.shape[(- 1)], samples.dtype)
means = sample_vectors(samples, num_clusters)
for _ in range(num_iters):
diffs = (rearrange(samples, 'n d -> n () d') - rearrange(means, 'c d -> () c d'))
dists = (- (diffs ** 2).s... |
class AbstractPasswordDialog(Factory.Popup):
def __init__(self, app: 'ElectrumWindow', *, check_password=None, on_success: Callable=None, on_failure: Callable=None, is_change: bool=False, is_password: bool=True, has_password: bool=False, message: str='', basename: str=''):
Factory.Popup.__init__(self)
... |
class Nadam(Optimizer):
def __init__(self, params, lr=0.002, betas=(0.9, 0.999), eps=1e-08, weight_decay=0, schedule_decay=0.004):
defaults = dict(lr=lr, betas=betas, eps=eps, weight_decay=weight_decay, schedule_decay=schedule_decay)
super(Nadam, self).__init__(params, defaults)
def step(self, c... |
class TransformerDecoderLayerMaskedBN(nn.Module):
def __init__(self, args, no_encoder_attn=False, add_bias_kv=False, add_zero_attn=False):
super().__init__()
self.embed_dim = args.decoder_embed_dim
self.cross_self_attention = getattr(args, 'cross_self_attention', False)
self.self_att... |
class JWNumberIndicesTest(unittest.TestCase):
def test_jw_sparse_index(self):
expected = [1, 2]
calculated_indices = jw_number_indices(1, 2)
self.assertEqual(expected, calculated_indices)
expected = [3]
calculated_indices = jw_number_indices(2, 2)
self.assertEqual(exp... |
def register_coco_panoptic_separated(name, metadata, image_root, panoptic_root, panoptic_json, sem_seg_root, instances_json):
panoptic_name = (name + '_separated')
DatasetCatalog.register(panoptic_name, (lambda : merge_to_panoptic(load_coco_json(instances_json, image_root, panoptic_name), load_sem_seg(sem_seg_r... |
class MsgPath():
def __init__(self):
self.type = 'line'
self.airspeed = 25
self.line_origin = np.array([[0.0, 0.0, 0.0]]).T
self.line_direction = np.array([[1.0, 0.0, 0.0]]).T
self.orbit_center = np.array([[0.0, 0.0, 0.0]]).T
self.orbit_radius = 50
self.orbit_... |
class VBehavioralTranslatorL2(VBehavioralTranslatorL1, BehavioralTranslatorL2):
def _get_rtlir2v_visitor(s):
return BehavioralRTLIRToVVisitorL2
def rtlir_tr_behavioral_tmpvars(s, tmpvars):
make_indent(tmpvars, 1)
return '\n'.join(tmpvars)
def rtlir_tr_behavioral_tmpvar(s, id_, upblk_... |
class FocalLoss(nn.Module):
def __init__(self, alpha=1, gamma=2, reduce=True, **kwargs):
super(FocalLoss, self).__init__()
self.alpha = alpha
self.gamma = gamma
self.reduce = reduce
def forward(self, inputs, targets):
ce = F.cross_entropy(inputs, targets, reduction='none'... |
def handle_images(args, image_ids, h5_file):
with open(args.images_json, 'r') as f:
images = json.load(f)
if image_ids:
image_ids = set(image_ids)
(image_heights, image_widths) = ([], [])
(image_ids_out, image_paths) = ([], [])
for image in images:
image_id = image['image_id'... |
def test_reg(do_test):
def tv_in(m, tv):
m.in_ = Bits32(tv[0])
def tv_out(m, tv):
if (tv[1] != '*'):
assert (m.out == Bits32(tv[1]))
class VReg(Component, VerilogPlaceholder):
def construct(s):
s.in_ = InPort(Bits32)
s.out = OutPort(Bits32)
... |
.parametrize('annotation_class', (t.Dict, t.Mapping, t.MutableMapping))
def test_dict(module: DataclassModule, annotation_class: type) -> None:
cls = type('A', (object,), {'__annotations__': {'y': annotation_class[(int, int)]}})
A = module.dataclass(cls)
schema = desert.schema_class(A)()
data = schema.l... |
class SquirrelCheck(Object):
entries = List.T(SquirrelCheckEntry.T(), help='')
def get_nproblems(self):
return sum((len(entry.problems) for entry in self.entries))
def get_summary(self):
nproblems = self.get_nproblems()
lines = []
lines.append(('%i potential problem%s discove... |
def nmc_LGM50_diffusivity_ORegan2022(sto, T):
a1 = (- 0.9231)
a2 = (- 0.4066)
a3 = (- 0.993)
b1 = 0.3216
b2 = 0.4532
b3 = 0.8098
c0 = (- 13.96)
c1 = 0.002534
c2 = 0.003926
c3 = 0.09924
d = 1449
D_ref = ((10 ** (((c0 + (a1 * np.exp(((- ((sto - b1) ** 2)) / c1)))) + (a2 * n... |
def extract_best_from_event_file(event_path, log_details=False):
(steps, valid_accs) = ([], [])
try:
for event in tf.train.summary_iterator(event_path):
step = event.step
for value in event.summary.value:
if (value.tag == 'mean valid acc'):
ste... |
def pretf_test_function(func: Callable) -> Callable:
(func)
def wrapped(self: Any, *args: tuple, **kwargs: dict) -> Any:
if hasattr(self.__class__, '_failed'):
if (func.__name__ not in self._always):
pytest.xfail(f'{self.__class__} failed')
cwd_before = os.getcwd()
... |
class Tto_int_be(TestCase):
def test_empty(self):
self.failUnlessEqual(to_int_be(b''), 0)
def test_0(self):
self.failUnlessEqual(to_int_be(b'\x00'), 0)
def test_1(self):
self.failUnlessEqual(to_int_be(b'\x01'), 1)
def test_256(self):
self.failUnlessEqual(to_int_be(b'\x01\... |
.parametrize('qc_spec', [pytest.param(QCOptions(program='rdkit', basis=None, method='UFF'), id='rdkit uff'), pytest.param(QCOptions(program='torchani', basis=None, method='ani2x'), id='ani2x'), pytest.param(QCOptions(program='psi4', basis='6-311G', method='b3lyp'), id='psi4 b3lyp'), pytest.param(QCOptions(program='open... |
_auth
def edit_group(request, pk):
group = Group.objects.get(id=pk)
if (request.method == 'POST'):
try:
group_name = request.POST.get('group_name')
users = request.POST.getlist('users')
group_role = request.POST.getlist('group_role')
group.name = group_nam... |
class EnumSelectView(discord.ui.View):
class EnumSelect(discord.ui.Select):
def __init__(self, setting_name: str, enum_cls: EnumMeta, update_callback: Callable):
super().__init__(options=[SelectOption(label=elem.name) for elem in enum_cls])
self.setting_name = setting_name
... |
def put_link(name: str, url: str=None, app: str=None, new_window: bool=False, scope: str=None, position: int=OutputPosition.BOTTOM) -> Output:
assert (bool((url is None)) != bool((app is None))), 'Must set `url` or `app` parameter but not both'
href = (('javascript:WebIO.openApp(%r, %d)' % (app, new_window)) if... |
def test_simulate_trotter_unsupported_trotter_step_raises_error():
qubits = cirq.LineQubit.range(2)
control = cirq.LineQubit((- 1))
hamiltonian = openfermion.random_diagonal_coulomb_hamiltonian(2, seed=0)
time = 1.0
class EmptyTrotterAlgorithm(TrotterAlgorithm):
supported_types = {openfermio... |
_test
def test_recursion():
a = Input(shape=(32,), name='input_a')
b = Input(shape=(32,), name='input_b')
dense = Dense(16, name='dense_1')
a_2 = dense(a)
b_2 = dense(b)
merged = layers.concatenate([a_2, b_2], name='merge')
c = Dense(64, name='dense_2')(merged)
d = Dense(5, name='dense_3... |
def parse_args():
parser = argparse.ArgumentParser(description='Train a segmentation model')
parser.add_argument('config', type=str, help='config file path')
parser.add_argument('--distribute', default=False, action='store_true')
parser.add_argument('--local_rank', type=int, default=0)
args = parser... |
def test_walk_model():
a = pt.vector('a')
b = uniform(0.0, a, name='b')
c = pt.log(b)
c.name = 'c'
d = pt.vector('d')
e = normal(c, d, name='e')
test_graph = pt.exp((e + 1))
with pytest.warns(FutureWarning):
res = list(walk_model((test_graph,)))
assert (a in res)
assert (... |
class RandomTransforms(object):
def __init__(self, transforms):
assert isinstance(transforms, (list, tuple))
self.transforms = transforms
def __call__(self, *args, **kwargs):
raise NotImplementedError()
def __repr__(self):
format_string = (self.__class__.__name__ + '(')
... |
def build_val(config, is_train=True):
data_list = []
if ('vggface2' in config.eval_data):
data_list.append(VGGFace2Dataset(isEval=True, K=config.K, image_size=config.image_size, scale=[config.scale_min, config.scale_max], trans_scale=config.trans_scale, isSingle=config.isSingle))
if ('now' in config... |
class TestWeightPadUtils():
def test_recompute_encodings_assertion_error(self):
bw_params = WeightPaddingParams(target_kernel_bw=4, simulated_bw=12)
quantizer = StaticGridPerTensorQuantizer(bitwidth=8, round_mode='nearest', quant_scheme=QuantScheme.post_training_tf, use_symmetric_encodings=False, en... |
(auto_attribs=True)
class ConfigSchema():
scenes: str = ''
scene_prefix: str = ''
scene_suffix: str = ''
direct_image_prompts: str = ''
init_image: str = ''
direct_init_weight: str = ''
semantic_init_weight: str = ''
image_model: str = field(default='Unlimited Palette')
vqgan_model: ... |
def step_or_stages(name, spec, inputs, parameters, state_provider, stageview):
dependencies = [stageview.dag.getNode(k.stepid) for k in inputs]
depstates = [d.task.state for d in set(dependencies) if d.task.state]
if ('step' in spec):
step_state = state_provider.new_state(name, depstates)
p ... |
class ServiceStateTableType(GeneratedsSuper):
__hash__ = GeneratedsSuper.__hash__
subclass = None
superclass = None
def __init__(self, stateVariable=None, gds_collector_=None, **kwargs_):
self.gds_collector_ = gds_collector_
self.gds_elementtree_node_ = None
self.original_tagname... |
def _find_coding(text):
if isinstance(text, str):
text = text.encode('utf-8')
coding = b'coding'
to_chr = chr
try:
start = (text.index(coding) + len(coding))
if (text[start] not in b'=:'):
return
start += 1
while ((start < len(text)) and to_chr(text[st... |
_label
def hash_map_loop(f, ht, index, w_acc, env, cont):
from pycket.interpreter import return_value
try:
(w_key, w_value) = ht.get_item(index)
except KeyError:
return hash_map_loop(f, ht, (index + 1), w_acc, env, cont)
except IndexError:
return return_value(w_acc, env, cont)
... |
class WignerDistribution(Distribution):
def __init__(self, rho=None, extent=[[(- 5), 5], [(- 5), 5]], steps=250):
self.xvecs = [np.linspace(extent[0][0], extent[0][1], steps), np.linspace(extent[1][0], extent[1][1], steps)]
self.xlabels = ['$\\rm{Re}(\\alpha)$', '$\\rm{Im}(\\alpha)$']
if rho... |
def test_debug_verbosity(hatch, temp_dir, helpers):
project_name = 'My.App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert (result.exit_code == 0), result.output
path = (temp_dir / 'my-app')
with path.as_cwd():
result = hatch('-v', 'build', '-t', 'wheel:standa... |
def decode_stopping_sequences_where_needed(tokenizer: PreTrainedTokenizer, stopping_sequences: List[Union[(str, int, List[int])]]) -> List[str]:
if (not stopping_sequences):
return None
return [(decode_tokens(tokenizer, sequence) if (not isinstance(sequence, str)) else sequence) for sequence in stopping... |
class Recognizer():
def __init__(self, pm, am, lm, config):
self.pm = pm
self.am = am
self.lm = lm
self.config = config
def is_available(self, lang_id):
return self.lm.inventory.is_available(lang_id)
def recognize(self, filename, lang_id='ipa', topk=1, emit=1.0, times... |
('a style of type {style_type}')
def given_a_style_of_type(context, style_type):
document = Document(test_docx('sty-known-styles'))
name = {'WD_STYLE_TYPE.CHARACTER': 'Default Paragraph Font', 'WD_STYLE_TYPE.LIST': 'No List', 'WD_STYLE_TYPE.PARAGRAPH': 'Normal', 'WD_STYLE_TYPE.TABLE': 'Normal Table'}[style_type... |
(eq=False, hash=False, repr=False)
class _TaskStatus(TaskStatus[StatusT]):
_old_nursery: Nursery = attr.ib()
_new_nursery: Nursery = attr.ib()
_value: (StatusT | type[_NoStatus]) = attr.ib(default=_NoStatus)
def __repr__(self) -> str:
return f'<Task status object at {id(self):#x}>'
def start... |
class ChatGPTStreamResponse():
def __init__(self, response: Stream[ChatCompletionChunk]):
self.response = response
self.yielded = []
self.finish_reason = None
def __next__(self):
chunk = next(self.response)
self.finish_reason = chunk.choices[0].finish_reason
delta... |
class Repository(AbstractRepository):
def __init__(self, name: str, packages: (list[Package] | None)=None) -> None:
super().__init__(name)
self._packages: list[Package] = []
for package in (packages or []):
self.add_package(package)
def packages(self) -> list[Package]:
... |
class PassThrough(ComponentLevel5):
_port
def req(s, msg):
assert s.resp_rdy()
s.resp(msg)
_port
def req_rdy(s):
return s.resp_rdy()
def construct(s):
s.resp = CalleePort()
s.resp_rdy = CalleePort()
s.entry = None
s.add_constraints((M(s.req) ==... |
class SelfRemoveCommand(SelfCommand, RemoveCommand):
name = 'self remove'
description = "Remove additional packages from Poetry's runtime environment."
options = [o for o in RemoveCommand.options if (o.name in {'dry-run'})]
help = f'''The <c1>self remove</c1> command removes additional package's to Poet... |
def format_code(h: str) -> str:
a = h.splitlines()
r = []
i = 0
while (i < len(a)):
if (a[i].startswith(' ') or a[i].startswith('```')):
indent = a[i].startswith(' ')
if (not indent):
i += 1
r.append('<pre>')
while ((i < len(a... |
def test_custom_validator_class_can_pass_when_valid(run_line, tmp_path):
doc = (tmp_path / 'valid.json')
doc.write_text(json.dumps(VALID_DOC))
schema = (tmp_path / 'schema.json')
schema.write_text(json.dumps(SCHEMA))
result = run_line(['check-jsonschema', '--schemafile', str(schema), str(doc)])
... |
def _validate_semantics(quantsim_config: ConfigDictType):
default_op_configs = quantsim_config[ConfigDictKeys.DEFAULTS][ConfigDictKeys.OPS]
if (ConfigDictKeys.IS_INPUT_QUANTIZED in default_op_configs):
logger.error('Currently IS_INPUT_QUANTIZED setting in default configs is not supported')
raise... |
class SimplifiedScaledDotProductAttention(nn.Module):
def __init__(self, d_model, h, dropout=0.1):
super(SimplifiedScaledDotProductAttention, self).__init__()
self.d_model = d_model
self.d_k = (d_model // h)
self.d_v = (d_model // h)
self.h = h
self.fc_o = nn.Linear((... |
def _dump_1e_ints(hij: List[List[float]], mos: Union[(range, List[int])], outfile: TextIO, beta: bool=False) -> None:
idx_offset = (1 if (not beta) else (1 + len(mos)))
hij_elements = set()
for (i, j) in itertools.product(mos, repeat=2):
if (i == j):
_write_to_outfile(outfile, hij[i][j],... |
(config_path='../exp_config', config_name='config')
def main(global_cfg):
cfg = global_cfg.inference.ensemble
if cfg.selected_pt:
pts = cfg.selected_pt.strip().split(',')
else:
pts = get_product_types(cfg.emb_dir)
tune.run(run_mixed_inference_pt, config={'cfg': cfg, 'pt': tune.grid_searc... |
def test_if_uninferable() -> None:
(node1, node2) = builder.extract_node('\n def f1():\n x = None\n if x is not None:\n x #\n\n def f2():\n x = 1\n if x is not None:\n pass\n else:\n x #\n ')
inferred = node1.inferred()
assert (l... |
def main():
args = create_argparser().parse_args()
dist_util.setup_dist()
logger.configure()
logger.log('creating model and diffusion...')
(model, diffusion) = create_model_and_diffusion(**args_to_dict(args, model_and_diffusion_defaults().keys()))
model.load_state_dict(dist_util.load_state_dict(... |
def renamePyc(startDir):
printed = False
startDir = os.path.abspath(startDir)
for (path, dirs, files) in os.walk(startDir):
if ('__pycache__' in path):
continue
for f in files:
fileName = os.path.join(path, f)
(base, ext) = os.path.splitext(fileName)
... |
class TestClassAttributeChecker(TestNameCheckVisitorBase):
_passes()
def test_mangled_attributes(self):
class Capybara(object):
def __mangled(self):
pass
def other_method(self):
self.__mangled()
_passes()
def test_never_set(self):
c... |
class ProcessStarter(ABC):
env = None
timeout = 120
popen_kwargs = {}
max_read_lines = 50
terminate_on_interrupt = False
def __init__(self, control_dir, process):
self.control_dir = control_dir
self.process = process
def args(self):
def pattern(self):
def startup_chec... |
def test_default_attribute():
json_schema = '\n {\n "type": "object",\n "properties": {\n "a_string": {\n "type": "string",\n "default": "Default value"\n }\n }\n }\n '
Draft202012Validator.check_schema(loads(json_schema))
str... |
class AnthropicAPIWrapper(BaseAPIWrapper):
def _call_api(prompt: str, max_tokens: int, engine: str, stop_token: str, temperature: float, num_completions: int=1) -> dict:
prompt = f'{anthropic.HUMAN_PROMPT} {prompt}{anthropic.AI_PROMPT}'
response = client.completion(prompt=prompt, stop_sequences=[ant... |
def test_init():
init = OSC.Init()
TD = OSC.TransitionDynamics(OSC.DynamicsShapes.step, OSC.DynamicsDimension.rate, 1)
egospeed = OSC.AbsoluteSpeedAction(10, TD)
init.add_init_action('Ego', egospeed)
init.add_init_action('Ego', OSC.TeleportAction(OSC.WorldPosition(1, 2, 3, 0, 0, 0)))
init.add_in... |
class ExactGPModel(gpytorch.models.ExactGP):
def __init__(self, train_x, train_y, likelihood, mean, ard_flag=False):
super(ExactGPModel, self).__init__(train_x, train_y, likelihood)
self.mean_module = gpytorch.means.ConstantMean()
self.covar_module = mean()
if ard_flag:
s... |
def delete_team_permission(team_name, namespace_name, repository_name):
fetched = list(__entity_permission_repo_query(team_name, Team, Team.name, namespace_name, repository_name))
if (not fetched):
raise DataModelException('Team does not have permission for repo.')
fetched[0].delete_instance() |
def _read_full_embeddings_process_fn(chunk_idxs: Tuple[(int, int)], h5_file_path: Path, hidden_dim: int, model_seq_len: int) -> np.ndarray:
num_embs = (chunk_idxs[1] - chunk_idxs[0])
embs = np.zeros(shape=(num_embs, model_seq_len, hidden_dim), dtype=np.float32)
emb = np.zeros((model_seq_len, hidden_dim), dt... |
def test_module_level_skip_error(pytester: Pytester) -> None:
pytester.makepyfile('\n import pytest\n pytest.skip("skip_module_level")\n\n def test_func():\n assert True\n ')
result = pytester.runpytest()
result.stdout.fnmatch_lines(['*Using pytest.skip outside of a test w... |
class LevelMapper():
def __init__(self, k_min, k_max, canonical_scale=224, canonical_level=3, eps=1e-06):
self.k_min = k_min
self.k_max = k_max
self.s0 = canonical_scale
self.lvl0 = canonical_level
self.eps = eps
def __call__(self, boxlists):
s = torch.sqrt(cat([b... |
class Solution():
def generate(self, numRows):
result = []
for i in range(numRows):
result.append(([0] * (i + 1)))
for i in range(numRows):
for j in range((i + 1)):
if ((j == 0) or (j == i)):
result[i][j] = 1
else:
... |
def test_trigger():
with expected_protocol(LeCroyT3DSO1204, [(b'CHDR OFF', None), (b'TRSE?', b'EDGE,SR,C1,HT,OFF'), (b'TRMD?', b'AUTO'), (b'C1:TRCP?', b'DC'), (b'C1:TRLV?', b'1.50E-01'), (b'C1:TRLV2?', b'1.50E-01'), (b'C1:TRSL?', b'POS')]) as instr:
assert (instr.trigger == {'mode': 'auto', 'trigger_type': ... |
.parametrize('pyfile_count', [1, 2])
def test_mypy_success(testdir, pyfile_count, xdist_args):
testdir.makepyfile(**{'pyfile_{0}'.format(pyfile_i): '\n def pyfunc(x: int) -> int:\n return x * 2\n ' for pyfile_i in range(pyfile_count)})
result = testdir.runpytest_subp... |
class BaseSelector(discord.ui.View):
message: discord.Message
def __init__(self, author_id, selector: discord.ui.Select, **kwargs):
self.author_id = author_id
self.custom_id = None
super().__init__(timeout=30.0)
self.add_item(selector(**kwargs))
async def interaction_check(se... |
def _build_key_size_numel_dictionaries(keys, data):
max_dim = _MAX_DATA_DIM
sizes = [0 for _ in range(max_dim) for _ in keys]
if (get_model_parallel_rank() == 0):
offset = 0
for key in keys:
assert (data[key].dim() < max_dim), 'you should increase MAX_DATA_DIM'
size =... |
def _postprocess_yml_value(value):
if ((value == '~') or (value.lower() == 'none')):
return None
if (value.lower() == 'true'):
return True
elif (value.lower() == 'false'):
return False
if value.startswith('!!float'):
return float(value.replace('!!float', ''))
if value... |
def _get_cosine_schedule_with_warmup_lr_lambda(current_step: int, *, num_warmup_steps: int, num_training_steps: int, num_cycles: float):
if (current_step < num_warmup_steps):
return (float(current_step) / float(max(1, num_warmup_steps)))
progress = (float((current_step - num_warmup_steps)) / float(max(1... |
def build_node_set(node, s=None):
if (s is None):
s = set()
if ((node is None) or ((node in s) and all(((n in s) for n in node.upstreams)) and all(((n in s) for n in node.downstreams)))):
return
new_nodes = {n for n in node.downstreams}
new_nodes.update(node.upstreams)
new_nodes.add(... |
def print_tree_deps_of(module, all_edges=None):
if (all_edges is None):
all_edges = create_reverse_dependency_tree()
tree = get_tree_starting_at(module, all_edges)
lines = [(tree[0], tree[0])]
for index in range(1, len(tree)):
edges = tree[index]
start_edges = {edge[0] for edge i... |
def get_encoder(encoding, input_dim=3, multires=6, degree=4, num_levels=16, level_dim=2, base_resolution=16, log2_hashmap_size=19, desired_resolution=2048, align_corners=False, **kwargs):
if (encoding == 'None'):
return ((lambda x, **kwargs: x), input_dim)
elif (encoding == 'frequency'):
from fr... |
_module()
class DistributionFocalLoss(nn.Module):
def __init__(self, reduction='mean', loss_weight=1.0):
super(DistributionFocalLoss, self).__init__()
self.reduction = reduction
self.loss_weight = loss_weight
def forward(self, pred, target, weight=None, avg_factor=None, reduction_overrid... |
class NonActiveWindowFocusTests(unittest.TestCase):
def setUp(self):
Timings.fast()
self.app = Application()
self.app.start(os.path.join(mfc_samples_folder, u'CmnCtrl3.exe'))
self.app2 = Application().start(_notepad_exe())
def tearDown(self):
self.app.kill()
self.... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.