code stringlengths 101 5.91M |
|---|
_utils.test()
def test_default_template_args_on_func():
def bar(a: ti.template()=123):
return a
def foo() -> ti.i32:
return bar()
assert (foo() == 123) |
def test_predict(create_X_y, create_pool_classifiers):
(X, y) = create_X_y
oracle_test = Oracle(create_pool_classifiers)
oracle_test.fit(X, y)
predicted_labels = oracle_test.predict(X, y)
assert np.equal(predicted_labels, y).all()
assert (oracle_test.score(X, y) == 1.0) |
def cosine_rampdown(current, rampdown_length):
'Cosine rampdown from
assert (0 <= current <= rampdown_length)
return max(0.0, float((0.5 * (np.cos(((np.pi * current) / rampdown_length)) + 1)))) |
class ThroughputNormalizedByCostSumWithPerf(Policy):
def __init__(self, solver, num_threads=None):
self._name = 'ThroughputNormalizedByCostSum_Perf'
self._policy = ThroughputNormalizedByCostSumWithPerfSLOs(solver, num_threads=num_threads)
def get_allocation(self, unflattened_throughputs, scale_f... |
class PPM(nn.Module):
def __init__(self, num_class=150, fc_dim=4096, use_softmax=False, pool_scales=(1, 2, 3, 6)):
super(PPM, self).__init__()
self.use_softmax = use_softmax
self.ppm = []
for scale in pool_scales:
self.ppm.append(nn.Sequential(nn.AdaptiveAvgPool2d(scale),... |
class GANLoss(nn.Module):
def __init__(self, use_lsgan=True, target_real_label=1.0, target_fake_label=0.0):
super(GANLoss, self).__init__()
self.register_buffer('real_label', torch.tensor(target_real_label))
self.register_buffer('fake_label', torch.tensor(target_fake_label))
if use_l... |
def get_training_list(file_list):
sentence_list = ['_'.join(filename.split('/')[(- 1)].split('_')[:(- 1)]) for filename in file_list]
sentence_list = list(set(sentence_list))
random.shuffle(sentence_list)
num_train = int((len(sentence_list) * ratio))
return sentence_list[:num_train] |
def load_compression_model(model_path, cache_dir, device):
print('Loading compression Model')
tokenizer = AutoTokenizer.from_pretrained('facebook/bart-large', cache_dir=cache_dir, use_fast=False)
model = AutoModelForSeq2SeqLM.from_pretrained(model_path).to(device)
print('Done')
return (model, tokeni... |
def main():
parser = argparse.ArgumentParser()
parser.add_argument('input_path', help='Path to a CMFGEN file')
parser.add_argument('output_path', help='Path to store converted TARDIS format files')
args = parser.parse_args()
parse_file(args) |
def e2h(omega: complex, dxes: dx_lists_t, mu: field_t=None) -> functional_matrix:
A2 = curl_e(dxes)
def e2h_1_1(e):
return [(y / ((- 1j) * omega)) for y in A2(e)]
def e2h_mu(e):
return [(y / (((- 1j) * omega) * m)) for (y, m) in zip(A2(e), mu)]
if np.any(np.equal(mu, None)):
retu... |
class AST_Matrix_Row(AST_Node):
def __init__(self, context, elements):
AST_Node.__init__(self, context)
self.elements = elements
if (not isinstance(self.elements, list)):
raise ValueError(('AST_Matrix_Row() expects a list of elements, got ' + str(type(self.elements))))
def pr... |
def test_nested_objects_same_name():
class ObjA():
def __init__(self, q) -> None:
self.q = np.full([20], q)
def __call__(self, A):
return (A + self.q)
class ObjB():
def __init__(self, q) -> None:
self.q = np.full([20], q)
self.obja = ObjA((... |
class BlobsQueueDBTest(test_util.TestCase):
def test_create_blobs_queue_db_string(self):
def add_blobs(queue, num_samples):
blob = core.BlobReference('blob')
status = core.BlobReference('blob_status')
for i in range(num_samples):
self._add_blob_to_queue(qu... |
def splder(tck, n=1):
if (n < 0):
return splantider(tck, (- n))
(t, c, k) = tck
if (n > k):
raise ValueError(('Order of derivative (n = %r) must be <= order of spline (k = %r)' % (n, tck[2])))
sh = ((slice(None),) + ((None,) * len(c.shape[1:])))
with np.errstate(invalid='raise', divi... |
class GoogleCalendarSearchEvents(VirtualFunctionTool):
name = 'GoogleCalendarSearchEvents'
summary = 'Search events by keywords, date range, or attendees. If certain arguments are not provided, the corresponding filters are not applied.'
parameters: List[ArgParameter] = [{'name': 'keywords', 'type': 'array'... |
class TestEventHandling(unittest.TestCase):
def test_odeint(self):
for reverse in (False, True):
for dtype in DTYPES:
for device in DEVICES:
for method in METHODS:
if (method == 'scipy_solver'):
continue
... |
def _load_dataset(dataroot, name, img_id2val, label2ans):
data_path = os.path.join(dataroot, (name + 'set.json'))
samples = json.load(open(data_path))
samples = sorted(samples, key=(lambda x: x['qid']))
answer_path = os.path.join(dataroot, 'cache', ('%s_openclose_target.pkl' % name))
answers = cPick... |
def load_from_nifti(parent_dir, percent_train, shuffle, channels_last=True, task='whole_tumor', **kwargs):
path = os.path.join(parent_dir)
subdirs = os.listdir(path)
subdirs.sort()
if (not subdirs):
raise SystemError(f'''{parent_dir} does not contain subdirectories.
Please make sure you have Bra... |
def create_local_explanation_layout(state) -> html.Div:
return html.Div(id='local_explanation_views', children=[html.Div(id='left-column-local', className='three columns', children=[create_control_panel(state)]), html.Div(className='nine columns', children=create_right_column(state))]) |
class SVNProjectCheckout(ProjectCheckout):
def __init__(self, name: str, version: str, url: str, revision: str, base_path: str):
super().__init__(url, base_path, name)
self.version = version
self.revision = revision
self.__base_checkout_dir = self.checkout_dir
self.checkout_d... |
class TestPreprocess(unittest.TestCase):
def setUp(self):
with open(klpt.get_data('data/default-options.json'), encoding='utf-8') as f:
self.options = json.load(f)
def tearDown(self):
pass
def testInsufficientArgs(self):
for dialect in self.options['dialects']:
... |
class SubwordTextEncoder(_BaseTextEncoder):
def __init__(self, spm):
if ((spm.pad_id() != 0) or (spm.eos_id() != 1) or (spm.unk_id() != 2)):
raise ValueError('Please train sentencepiece model with following argument:\n--pad_id=0 --eos_id=1 --unk_id=2 --bos_id=-1 --model_type=bpe --eos_piece=<eos... |
def put_cmd_to_list(cmds_list: list, buffer: bytearray, type: int):
assert (type in {EngineType.TPU, EngineType.GDMA, EngineType.SDMA, EngineType.HAU})
print('cmd_buffer_byary len : {}'.format(len(buffer)))
if (len(buffer) != 0):
cmd = BaseCmd(buffer.copy(), type)
cmds_list.append(cmd)
... |
def update_joint_plots(plot_data1: LivePlotData, plot_data2: LivePlotData, display_id: DisplayHandle, logging_steps: int=1, fig: Optional[(plt.Figure | plt.FigureBase)]=None):
if (not is_interactive()):
return
plot_obj1 = plot_data1.plot_obj
plot_obj2 = plot_data2.plot_obj
y1 = np.array(plot_dat... |
def is_dagshub_available():
return (None not in [importlib.util.find_spec('dagshub'), importlib.util.find_spec('mlflow')]) |
_seed
.slow
def test_bayesian_optimizer_with_sgpr_finds_minima_of_scaled_branin() -> None:
_test_optimizer_finds_minimum(SparseGaussianProcessRegression, 9, EfficientGlobalOptimization[(SearchSpace, SparseGaussianProcessRegression)](), optimize_branin=True)
_test_optimizer_finds_minimum(SparseGaussianProcessReg... |
class FlashlightDecoderConfig(FairseqDataclass):
nbest: int = field(default=1, metadata={'help': 'Number of decodings to return'})
unitlm: bool = field(default=False, metadata={'help': 'If set, use unit language model'})
lmpath: str = field(default=MISSING, metadata={'help': 'Language model for KenLM decode... |
def list_datasets(folder: Union[(Path, str)]=dataset_dir):
files = sorted((file.stem for file in Path(folder).iterdir() if (file.suffix == '.pkl')))
dirs = sorted((d for d in Path(folder).iterdir() if (d.is_dir() and (not d.name.startswith('_')))))
for d in dirs:
if (d.is_dir() and (not d.stem.start... |
class LinearizationMode(enum.Enum):
STACKED_JACOBIAN = 'stacked_jacobian'
FULL_LINEARIZATION = 'full_linearization' |
def create_2d_box(box_2d):
corner1_2d = box_2d[0]
corner2_2d = box_2d[1]
pt1 = corner1_2d
pt2 = (corner1_2d[0], corner2_2d[1])
pt3 = corner2_2d
pt4 = (corner2_2d[0], corner1_2d[1])
return (pt1, pt2, pt3, pt4) |
_model_architecture('transformer_lm', 'transformer_lm_wiki103')
_model_architecture('transformer_lm', 'transformer_lm_baevski_wiki103')
def transformer_lm_baevski_wiki103(args):
args.decoder_layers = getattr(args, 'decoder_layers', 16)
args.decoder_attention_heads = getattr(args, 'decoder_attention_heads', 8)
... |
def main():
args = parse_args()
if (args is None):
exit()
with tf.Session(config=tf.ConfigProto(allow_soft_placement=True)) as sess:
gan = MUNIT(sess, args)
gan.build_model()
show_all_variables()
if (args.phase == 'train'):
gan.train()
print(' ... |
def rand_split_log_normal(shape, loc, scale_1, scale_2, device='cpu', dtype=torch.float32):
n = torch.randn(shape, device=device, dtype=dtype).abs()
u = torch.rand(shape, device=device, dtype=dtype)
n_left = ((n * (- scale_1)) + loc)
n_right = ((n * scale_2) + loc)
ratio = (scale_1 / (scale_1 + scal... |
class triang_gen(rv_continuous):
def _rvs(self, c, size=None, random_state=None):
return random_state.triangular(0, c, 1, size)
def _argcheck(self, c):
return ((c >= 0) & (c <= 1))
def _shape_info(self):
return [_ShapeInfo('c', False, (0, 1.0), (True, True))]
def _pdf(self, x, c)... |
def discretize(xs):
def discretize_one(x):
if (len(x) > 1):
return tuple(x)
else:
return x[0]
return [discretize_one(x) for x in xs] |
def masked_softmax(matrix, mask=None, q_mask=None, dim=(- 1)):
NEG_INF = (- 1e-06)
TINY_FLOAT = 1e-06
if (q_mask is not None):
mask = (~ mask.byte()).float().unsqueeze((- 1))
q_mask = (~ q_mask.byte()).float().unsqueeze((- 1)).transpose(1, 2).contiguous()
mask = (~ torch.bmm(mask, q_... |
_module()
class ConcatDataset(_ConcatDataset):
def __init__(self, datasets, separate_eval=True):
super(ConcatDataset, self).__init__(datasets)
self.CLASSES = datasets[0].CLASSES
self.separate_eval = separate_eval
if (not separate_eval):
if any([isinstance(ds, CocoDataset)... |
class lib_wrapper():
__slots__ = ['_lib', '_fntab']
def __init__(self, lib):
self._lib = lib
self._fntab = {}
def __getattr__(self, name):
try:
return self._fntab[name]
except KeyError:
cfn = getattr(self._lib, name)
wrapped = _lib_fn_wrapp... |
_footprint
def dilation(image, footprint=None, out=None, shift_x=DEPRECATED, shift_y=DEPRECATED, *, mode='reflect', cval=0.0):
if (out is None):
out = np.empty_like(image)
if (mode not in _SUPPORTED_MODES):
raise ValueError(f'unsupported mode, got {mode!r}')
if (mode == 'ignore'):
mo... |
.parametrize('method', ['l-bfgs-b', 'tnc', 'Powell', 'Nelder-Mead'])
def test_minimize_with_scalar(method):
def f(x):
return np.sum((x ** 2))
res = optimize.minimize(f, 17, bounds=[((- 100), 100)], method=method)
assert res.success
assert_allclose(res.x, [0.0], atol=1e-05) |
class Net_orig(torch.nn.Module):
def __init__(self, dataset):
super(Net2, self).__init__()
self.conv1 = GCNConv(dataset.num_features, args.hidden)
self.conv2 = GCNConv(args.hidden, dataset.num_classes)
def reset_parameters(self):
self.conv1.reset_parameters()
self.conv2.r... |
def unicode_to_utf8(d):
return dict(((key.encode('UTF-8'), value) for (key, value) in d.items())) |
.parametrize('arr', [np.arange(2), np.matrix([0, 1]), np.matrix([[0, 1], [2, 5]])])
def test_outer_subclass_preserve(arr):
class foo(np.ndarray):
pass
actual = np.multiply.outer(arr.view(foo), arr.view(foo))
assert (actual.__class__.__name__ == 'foo') |
def register_Ns3CallbackImplBase_methods(root_module, cls):
cls.add_constructor([])
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
cls.add_method('GetTypeid', 'std::string', [], is_pure_virtual=True, is_const=True, is_virtual=True)
cls.add_method('IsEqual', 'bool', [param('ns3::Pt... |
def check_empty_target(targets):
for tar in targets:
if (len(tar['boxes']) < 1):
return True
return False |
class PairwiseMetric(BaseMetric):
def __init__(self, func, name=None, **kwargs):
name = (func.__name__ if (name is None) else name)
self.func = func
super(PairwiseMetric, self).__init__(name=name, **kwargs)
def compute(self, y_true, y_pred):
mean = self.func(y_true, y_pred)
... |
def get_assignment_map_from_checkpoint(tvars, init_checkpoint):
assignment_map = {}
initialized_variable_names = {}
name_to_variable = collections.OrderedDict()
for var in tvars:
name = var.name
m = re.match('^(.*):\\d+$', name)
if (m is not None):
name = m.group(1)
... |
class Axmodel(nn.Module):
def __init__(self, opt):
super(Axmodel, self).__init__()
self.opt = opt
self.action_dim = self.opt.action_dim
self.state_dim = self.opt.state_dim
self.state_fc = nn.Sequential(nn.Linear(self.state_dim, 128), nn.ReLU())
self.action_fc = nn.Seq... |
class SegfaultDataset(Dataset):
def __init__(self, size):
self.size = size
def __getitem__(self, idx):
return ctypes.string_at(0)
def __len__(self):
return self.size |
class ScilabElement(ExpectElement):
def __getitem__(self, n):
if isinstance(n, tuple):
index = str(n)[1:(- 1)]
else:
index = str(n)
return self.parent()(('%s(%s)' % (self._name, index)))
def __setitem__(self, n, value):
if isinstance(n, tuple):
... |
def extract_email_inbox(utterance):
for (task, regex, keys) in EMAIL_INBOX_PATTERNS:
match = re.match(regex, utterance)
if match:
return dict(zip(keys, match.groups()))
raise ValueError('Bad email-inbox utterance: {}'.format(utterance)) |
class A(FairseqDataclass):
data: str = field(default='test', metadata={'help': 'the data input'})
num_layers: int = field(default=200, metadata={'help': 'more layers is better?'}) |
def find_tasklet_by_connector(sdfg: SDFG, name: str):
for (node, _) in sdfg.start_state.all_nodes_recursive():
if (name in node.in_connectors):
return node
elif (name in node.out_connectors):
return node
raise NodeNotFoundError(f'Could not find connector "{name}"') |
def _autograd_grad(outputs, inputs, grad_outputs=None, create_graph=False, retain_graph=None):
assert isinstance(outputs, tuple)
if (grad_outputs is None):
grad_outputs = ((None,) * len(outputs))
assert isinstance(grad_outputs, tuple)
assert (len(outputs) == len(grad_outputs))
new_outputs: T... |
class Sigma():
def __repr__(self):
return 'Function that adds up (k-th powers of) the divisors of n'
def __call__(self, n, k=1):
n = ZZ(n)
k = ZZ(k)
one = ZZ(1)
if (k == ZZ(0)):
return prod(((expt + one) for (p, expt) in factor(n)))
elif (k == one):
... |
def test_elements(default_test_case):
int0 = stmt.IntPrimitiveStatement(default_test_case, 3)
dummy = DummyCollectionStatement(default_test_case, default_test_case.test_cluster.type_system.convert_type_hint(list[int]), [int0.ret_val])
default_test_case.add_statements([int0, dummy])
assert (dummy.element... |
def test_rnn(helpers):
modules = [RNNEncoder(input_size=8, output_size=6, module='LSTM', hidden_size=[10, 10, 10], dropout=[0.1, 0.1, 0.1], layer_norm=[True, True, True], proj=[True, True, True], sample_rate=[1, 2, 1], sample_style='drop', bidirectional=True), RNNEncoder(input_size=8, output_size=6, module='LSTM', ... |
class ROIBoxHead(torch.nn.Module):
def __init__(self, cfg, in_channels):
super(ROIBoxHead, self).__init__()
self.feature_extractor = make_roi_box_feature_extractor(cfg, in_channels)
self.predictor = make_roi_box_predictor(cfg, self.feature_extractor.out_channels)
self.post_processor ... |
def fit_rbv2_super(key='rbv2_super', **kwargs):
tfms = {}
[tfms.update({k: ContTransformerRange}) for k in ['mmce', 'f1', 'auc', 'aknn.k', 'aknn.M', 'rpart.maxdepth', 'rpart.minsplit', 'rpart.minbucket', 'xgboost.max_depth']]
[tfms.update({k: partial(ContTransformerLogRange)}) for k in ['timetrain', 'timepr... |
def tree_shap_independent_200(model, data):
data_subsample = sklearn.utils.resample(data, replace=False, n_samples=min(200, data.shape[0]), random_state=0)
return TreeExplainer(model, data_subsample, feature_dependence='independent').shap_values |
class ActionScores(object):
def __init__(self, d, state_value):
for v in d.values():
assert isinstance(v, Variable)
assert isinstance(state_value, Variable)
self._vars = d
self._floats = {action: v.data.cpu()[0] for (action, v) in self._vars.items()}
self._state_v... |
class _MSDataLoaderIter(_DataLoaderIter):
def __init__(self, loader):
self.dataset = loader.dataset
self.scale = loader.scale
self.collate_fn = loader.collate_fn
self.batch_sampler = loader.batch_sampler
self.num_workers = loader.num_workers
self.pin_memory = (loader.... |
def compute_error_general(model_file, data_loader, cuda_on=False, soft_decision=True, stochastic=False, breadth_first=False, fast=False, task='classification', name=''):
map_location = None
if (not cuda_on):
map_location = 'cpu'
tree_tmp = torch.load(model_file, map_location=map_location)
(tree_... |
def load_state_dict(state_dict):
for (name, agg_state) in state_dict.items():
_aggregators[name] = MetersDict()
_aggregators[name].load_state_dict(agg_state) |
class Window(object):
def __init__(self, window_width, window_height, samples=1, window_title='', monitor=1, show_at_center=True, offscreen=False):
self.window_title = window_title
assert glfw.Init(), 'Glfw Init failed!'
glfw.WindowHint(glfw.SAMPLES, samples)
if offscreen:
... |
class PyTestAssertionToAstVisitor(ass.AssertionVisitor):
def __init__(self, variable_names: ns.AbstractNamingScope, module_aliases: ns.AbstractNamingScope, common_modules: set[str], statement_node: ast.stmt):
self._common_modules = common_modules
self._module_aliases = module_aliases
self._v... |
def test_spectrum_section_config(tardis_config_verysimple):
tardis_config_verysimple['spectrum']['start'] = Quantity('2500 angstrom')
tardis_config_verysimple['spectrum']['stop'] = Quantity('500 angstrom')
with pytest.raises(ValueError):
conf = Configuration.from_config_dict(tardis_config_verysimple... |
def _impl(array, characters, highlevel, behavior, attrs):
from awkward._connect.pyarrow import import_pyarrow_compute
pc = import_pyarrow_compute('m')
with HighLevelContext(behavior=behavior, attrs=attrs) as ctx:
layout = ctx.unwrap(array)
out = ak._do.recursively_apply(layout, ak.operations.str... |
def parseException(exception_str, verbose=True):
split_exception = exception_str.replace('> (', '>(').split(',')
exception_name = ' '.join(split_exception[1:])
exception_split = exception_name.split('when executing')
exception_name = exception_split[0]
try:
instruction = exception_split[1][1... |
class SparseSmoothness(BaseSparse, SmoothnessFirstOrder):
def __init__(self, mesh, orientation='x', gradient_type='total', **kwargs):
if ('gradientType' in kwargs):
self.gradientType = kwargs.pop('gradientType')
else:
self.gradient_type = gradient_type
super().__init_... |
def load_pickle(path):
with open(path, 'rb') as fp:
data = pickle.load(fp)
return data |
class TestLoss(TestCase):
def test_basic(self):
loss = Loss('rmse')
true = np.random.random(100)
pred = np.random.random(100)
self.assertEqual(rmse(true, pred), loss(true, pred)['rmse'])
def test_shortcut(self):
loss = get_loss('default')
self.assertEqual(loss.los... |
()
class DecisionTransformerConfig(TransformerConfig):
batch_size: int = 64
learning_rate: float = 0.0001
encoder_factory: EncoderFactory = make_encoder_field()
optim_factory: OptimizerFactory = make_optimizer_field()
num_heads: int = 1
num_layers: int = 3
attn_dropout: float = 0.1
resid... |
def dace_max(X_in: dace.float32[N], X_out: dace.float32[1]):
dace.reduce((lambda a, b: max(a, b)), X_in, X_out, identity=(- 9999999)) |
class Decoder(nn.Module):
def __init__(self, input_size, embedding_size, hidden_size, output_size, num_layers, p):
super(Decoder, self).__init__()
self.dropout = nn.Dropout(p)
self.hidden_size = hidden_size
self.num_layers = num_layers
self.embedding = nn.Embedding(input_size... |
class Deb03(Benchmark):
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self.change_dimensionality = True
self._bounds = list(zip(([0.0] * self.N), ([1.0] * self.N)))
self.global_optimum = [[0., 0.]]
self.fglob = (- 1.0)
def fun(self, x, *args):
... |
def load_proposals_into_dataset(dataset_dicts, proposal_file):
logger = logging.getLogger(__name__)
logger.info('Loading proposals from: {}'.format(proposal_file))
with PathManager.open(proposal_file, 'rb') as f:
proposals = pickle.load(f, encoding='latin1')
rename_keys = {'indexes': 'ids', 'sco... |
class TestFunHash(hu.HypothesisTestCase):
(n_out=st.integers(min_value=5, max_value=20), n_in=st.integers(min_value=10, max_value=20), n_data=st.integers(min_value=2, max_value=8), n_weight=st.integers(min_value=8, max_value=15), n_alpha=st.integers(min_value=3, max_value=8), sparsity=st.floats(min_value=0.1, max_v... |
.parametrize('observation_shape', [(100,)])
.parametrize('action_size', [2])
.parametrize('episode_length', [10])
def test_discrete_action_match_with_algos(observation_shape: Sequence[int], action_size: int, episode_length: int) -> None:
discrete_episode = create_episode(observation_shape, action_size, length=episo... |
class TodoistSearchTasks(VirtualFunctionTool):
name = 'TodoistSearchTasks'
summary = 'Searches tasks by keywords, due date, and priority.'
parameters: List[ArgParameter] = [{'name': 'keywords', 'type': 'string', 'description': 'The keywords to search in the task name and description.', 'required': False}, {... |
class SagePackageSystem(PackageSystem):
def __classcall__(cls):
return PackageSystem.__classcall__(cls, 'sage_spkg')
def _is_present(self):
from subprocess import run, DEVNULL, CalledProcessError
try:
run('sage -p', shell=True, stdout=DEVNULL, stderr=DEVNULL, check=True)
... |
def _run(args, path):
(partitions, metas) = load_data(path, args.data.meta.path, args.computation.num_workers, args.verbose, args.log_every)
pbar = sorted(list(partitions.keys()))
results = []
for k in pbar:
print('running partition {}/{}'.format(k, len(partitions.keys())))
partition = p... |
def test_raise_configuration_exception():
with pytest.raises(ConfigurationException):
raise ConfigurationException() |
def _compute_reciprocal_rank(gold_labels, ranked_lines):
rr = 0.0
for (i, line_number) in enumerate(ranked_lines):
if (gold_labels[line_number] == 1):
rr += (1.0 / (i + 1))
break
return rr |
def img_collate(imgs):
w = imgs[0].width
h = imgs[0].height
tensor = torch.zeros((len(imgs), 3, h, w), dtype=torch.uint8).contiguous()
for (i, img) in enumerate(imgs):
nump_array = np.array(img, dtype=np.uint8)
if (nump_array.ndim < 3):
nump_array = np.expand_dims(nump_array,... |
def score(system_conllu_file, gold_conllu_file, verbose=True):
evaluation = ud_scores(gold_conllu_file, system_conllu_file)
el = evaluation['LAS']
p = el.precision
r = el.recall
f = el.f1
if verbose:
scores = [(evaluation[k].f1 * 100) for k in ['LAS', 'MLAS', 'BLEX']]
logger.info... |
def download_tagger(dirpath):
tagger_dir = 'stanford-tagger'
if os.path.exists(os.path.join(dirpath, tagger_dir)):
print('Found Stanford POS Tagger - skip')
return
url = '
filepath = download(url, dirpath)
zip_dir = ''
with zipfile.ZipFile(filepath) as zf:
zip_dir = zf.na... |
def main(argv):
env = Wahba()
device = (torch.device('cuda:0') if torch.cuda.is_available() else torch.device('cpu'))
policy_kwargs = dict(features_extractor_class=CustomCNN, features_extractor_kwargs=dict(features_dim=256))
if ('sac' in FLAGS.alg):
policy_kwargs['n_critics'] = 1
policy_... |
def val(args, val_loader, model, criterion):
model.eval()
iouEvalVal = iouEval(args.classes)
epoch_loss = []
total_batches = len(val_loader)
for (i, (input, target)) in enumerate(val_loader):
start_time = time.time()
if (args.onGPU == True):
input = input.cuda()
... |
def my_config():
TIMESTAMP_DIR = True
EX_NAME = 'undefined_name'
if TIMESTAMP_DIR:
SAVE_DIR = (((PBT_DATA_DIR + time.strftime('%Y_%m_%d-%H_%M_%S_')) + EX_NAME) + '/')
else:
SAVE_DIR = ((PBT_DATA_DIR + EX_NAME) + '/')
print('Saving data to ', SAVE_DIR)
RUN_TYPE = 'pbt'
LOCAL_T... |
def require_wandb(test_case):
return unittest.skipUnless(is_wandb_available(), 'test requires wandb')(test_case) |
.skipif((sys.version_info.major < 3), reason='Python 2 scalars lack a buffer interface')
class TestScalarPEP3118(object):
.parametrize('scalar', scalars_only, ids=codes_only)
def test_scalar_match_array(self, scalar):
x = scalar()
a = np.array([], dtype=np.dtype(scalar))
mv_x = memoryvie... |
class _StdoutTextFold():
def __init__(self, name):
self.name = name
self.start_time = time.time()
if github_env:
if (not folds):
print(('::group::%s' % name))
if travis_env:
print(('travis_fold:start:%s' % name))
sys.stdout.flush()
... |
def register_Ns3DropTailQueue__Ns3Packet_methods(root_module, cls):
cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True)
cls.add_constructor([])
cls.add_method('Enqueue', 'bool', [param('ns3::Ptr< ns3::Packet >', 'item')], is_virtual=True)
cls.add_method('Dequeue', 'ns3::Ptr< ns3::Packet >', [... |
def filter_short_utterances(utterance_info, min_len_sec=1.0):
return ((utterance_info['end_time'] - utterance_info['start_time']) > min_len_sec) |
def main():
opt = get_option()
torch.manual_seed(opt.seed)
module = importlib.import_module('model.{}'.format(opt.model.lower()))
if (not opt.test_only):
print(json.dumps(vars(opt), indent=4))
solver = Solver(module, opt)
if opt.test_only:
print('Evaluate {} (loaded from {})'.for... |
def try_infer_format_from_ext(path: str):
if (not path):
return 'pipe'
for ext in PipelineDataFormat.SUPPORTED_FORMATS:
if path.endswith(ext):
return ext
raise Exception(f'Unable to determine file format from file extension {path}. Please provide the format through --format {Pipe... |
class PathBuffer():
def __init__(self, capacity_in_transitions):
self._capacity = capacity_in_transitions
self._transitions_stored = 0
self._first_idx_of_next_path = 0
self._path_segments = collections.deque()
self._buffer = {}
def add_path(self, path):
for (key, ... |
def pgen_msa(msa, outpath, steps, device, model):
clean_flag = 'upper'
msa = parse_fasta(msa, clean=clean_flag)
gibbs_sampler = ESM_MSA_sampler(model_map[model](), device=device)
if (steps == None):
steps = len(msa[(- 1)])
(probs, toks) = gibbs_sampler.probs_single(msa, steps=steps, show_pro... |
def block_inception_a(input):
if (K.image_dim_ordering() == 'th'):
channel_axis = 1
else:
channel_axis = (- 1)
branch_0 = conv2d_bn(input, 96, 1, 1)
branch_1 = conv2d_bn(input, 64, 1, 1)
branch_1 = conv2d_bn(branch_1, 96, 3, 3)
branch_2 = conv2d_bn(input, 64, 1, 1)
branch_2 =... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.