code stringlengths 101 5.91M |
|---|
()
class CliSnapshotConfig():
request: FixtureRequest
replace_server_host: bool = True
replace_service_host: bool = True
replace_service_error_report: bool = True
replace_tmp_dir: bool = True
replace_duration: bool = True
replace_multi_worker_progress: (bool | str) = True
replace_statist... |
def bounding_box_union(subset_a: Subset, subset_b: Subset) -> Range:
if (subset_a.dims() != subset_b.dims()):
raise ValueError(('Dimension mismatch between %s and %s' % (str(subset_a), str(subset_b))))
symbolic_positive = Config.get('optimizer', 'symbolic_positive')
result = []
for (arb, brb, ar... |
def register_Ns3LteRrcSapPdschConfigDedicated_methods(root_module, cls):
cls.add_constructor([])
cls.add_constructor([param('ns3::LteRrcSap::PdschConfigDedicated const &', 'arg0')])
cls.add_instance_attribute('pa', 'uint8_t', is_const=False)
return |
def main():
args = parse_args()
if (args.device < 0):
device = torch.device('cpu')
else:
device = torch.device('cuda', args.device)
model = init_model(args.config, args.checkpoint, device=device)
video_interpolation_inference(model=model, input_dir=args.input_dir, start_idx=args.star... |
def binary_cross_entropy(pred, label, weight=None, reduction='mean', avg_factor=None, class_weight=None):
if (pred.dim() != label.dim()):
(label, weight) = _expand_onehot_labels(label, weight, pred.size((- 1)))
if (weight is not None):
weight = weight.float()
loss = F.binary_cross_entropy_wi... |
class CartanType(CartanType_standard, CartanType_simple):
def __init__(self, index_set):
super().__init__()
self.letter = 'A'
self.n = index_set
def _repr_(self, compact=False):
ret = ('%s_%s' if compact else "['%s', %s]")
return (ret % (self.letter, ('ZZ' if (self.n == Z... |
class Logger():
def __init__(self, logdir: str, config: str):
self.writer = SummaryWriter(logdir)
self._log_dir = logdir
self._tags_to_log_file = ('reward',)
print('Will copy file to ', logdir)
copy_exp_dir(logdir)
save_json_config(config, os.path.join(logdir, 'config... |
def numerical_bias_by_clustering(model_before, model_after, masc_words, fem_words, k):
(scores_before, scores_after) = ([], [])
all_biased = masc_words.union(fem_words)
for w in all_biased:
most_similar_before = model_before.most_similar(w, topn=k)
(most_similar_before, _) = zip(*most_simila... |
def test_indexedoptionarray():
content = ak.contents.NumpyArray(np.arange(((2 * 3) * 5)).reshape(5, 3, 2))
offsets = ak.index.Index64(np.array([0, 3, 3, 5], dtype=np.int64))
listarray = ak.contents.ListOffsetArray(offsets, content)
index = ak.index.Index64(np.array([2, (- 1), 2, 1, (- 1), 0], dtype=np.i... |
def hist(ax, lbp):
n_bins = int((lbp.max() + 1))
return ax.hist(lbp.ravel(), density=True, bins=n_bins, range=(0, n_bins), facecolor='0.5') |
class InfiniteDataLoader():
def __init__(self, dataset, weights, batch_size, num_workers):
super().__init__()
if (weights is not None):
sampler = torch.utils.data.WeightedRandomSampler(weights, replacement=True, num_samples=batch_size)
else:
sampler = torch.utils.data... |
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument('--game', type=str, default='breakout')
parser.add_argument('--seed', type=int, default=1)
parser.add_argument('--gpu', type=int)
args = parser.parse_args()
d3rlpy.seed(args.seed)
(dataset, env) = d3rlpy.datasets.get_... |
class Subversion(VersionControl):
name = 'svn'
dirname = '.svn'
repo_name = 'checkout'
schemes = ('svn', 'svn+ssh', 'svn+ 'svn+ 'svn+svn')
def should_add_vcs_url_prefix(cls, remote_url):
return True
def get_base_rev_args(rev):
return ['-r', rev]
def get_revision(cls, location... |
_metric
def fid50k_val(opts):
opts.dataset_kwargs.update(max_size=None, xflip=False)
fid = frechet_inception_distance.compute_fid(opts, max_real=None, num_gen=50000)
return dict(fid50k_val=fid) |
def check_args(args):
if (not args.pred_src_languages):
setattr(args, 'pred_src_languages', [args.eval_src_languages])
if (not args.pred_tgt_languages):
setattr(args, 'pred_tgt_languages', [args.eval_tgt_languages])
if (args.is_hf_model and ((not args.pred_src_languages) or (not args.model) ... |
def _impl(array, max_splits, reverse, highlevel, behavior, attrs):
from awkward._connect.pyarrow import import_pyarrow_compute
pc = import_pyarrow_compute('ak.str.split_whitespace')
with HighLevelContext(behavior=behavior, attrs=attrs) as ctx:
layout = ctx.unwrap(array, allow_record=False, allow_unk... |
def main(N, family, sol=0, kind='G', alpha=0, beta=0):
if (sol == 0):
domain = ((- 1), 1)
ue = (sp.sin(((sp.pi * x) * 2)) * sp.exp(((- x) / 2)))
measure = (- 1)
elif (sol == 1):
domain = (0, 1.0)
ue = ((x ** 3) * ((1 - x) ** 3))
measure = (- sp.exp((- x)))
fe ... |
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3ApplicationContainer_methods(root_module, root_module['ns3::ApplicationContainer'])
register_Ns3AsciiFile_methods(root_module, root_module['ns3::AsciiFile'])
register_Ns3AsciiTraceHelper_... |
def get_constant_schedule_with_warmup(optimizer: Optimizer, num_warmup_steps: int, last_epoch: int=(- 1)):
def lr_lambda(current_step: int):
if (current_step < num_warmup_steps):
return (float(current_step) / float(max(1.0, num_warmup_steps)))
return 1.0
return LambdaLR(optimizer, lr... |
class TestControl(test_util.TestCase):
def test_disambiguate_grad_if_op_output(self):
workspace.FeedBlob('cond', np.array(True))
workspace.FeedBlob('then_grad', np.array(1))
workspace.FeedBlob('else_grad', np.array(2))
then_model = ModelHelper(name='then_test_model')
then_mod... |
class NodeFinder(TreeVisitor):
def __init__(self, node):
super(NodeFinder, self).__init__()
self.node = node
self.found = False
def visit_Node(self, node):
if self.found:
pass
elif (node is self.node):
self.found = True
else:
se... |
def _variable_with_weight_decay(name, shape, stddev, wd, use_xavier=True):
if use_xavier:
initializer = tf.contrib.layers.xavier_initializer()
else:
initializer = tf.truncated_normal_initializer(stddev=stddev)
var = _variable_on_cpu(name, shape, initializer)
if (wd is not None):
... |
def determine_filetype(filename):
filename = os.path.basename(filename)
if ('xfrac3d' in filename):
return 'xfrac'
elif ('n_all' in filename):
return 'density'
elif ('Temper' in filename):
return 'temper'
elif ('v_all' in filename):
return 'velocity'
elif ('.cbin'... |
def process_rel_candidate_for_drop_led(relnode_candidate, filtered_mod_pos, nodeset, simple_sentences, main_sent_dict, boxer_graph, opr_drop_rel):
simple_sentence = ' '.join(simple_sentences)
sentence_before_drop = boxer_graph.extract_main_sentence(nodeset, main_sent_dict, filtered_mod_pos)
edit_dist_before... |
def eval(configs):
print('Evaluate')
statistics_file = (BASE_PATH + '/stats.json')
if os.path.exists(statistics_file):
print('Statistics file already exists!')
import common.utils as utils
import pyrenderer
from volnet.inference import LoadedModel
from losses.lossbuilder import LossB... |
def compile_rules(environment):
e = re.escape
rules = [(len(environment.comment_start_string), TOKEN_COMMENT_BEGIN, e(environment.comment_start_string)), (len(environment.block_start_string), TOKEN_BLOCK_BEGIN, e(environment.block_start_string)), (len(environment.variable_start_string), TOKEN_VARIABLE_BEGIN, e(... |
def test_no_mixed_overloads():
from pybind11_tests import debug_enabled
with pytest.raises(RuntimeError) as excinfo:
m.ExampleMandA.add_mixed_overloads1()
assert (str(excinfo.value) == ('overloading a method with both static and instance methods is not supported; ' + ('compile in debug mode for more... |
class GTestTestFilterTest(gtest_test_utils.TestCase):
def testTestExecutionIsFiltered(self):
subprocess_env = os.environ.copy()
subprocess_env[TESTBRIDGE_NAME] = '*.TestThatSucceeds'
p = gtest_test_utils.Subprocess(COMMAND, env=subprocess_env)
self.assertEquals(0, p.exit_code)
... |
class LoadImage(object):
def __call__(self, results):
warnings.simplefilter('once')
warnings.warn('`LoadImage` is deprecated and will be removed in future releases. You may use `LoadImageFromWebcam` from `mmdet.datasets.pipelines.` instead.')
if isinstance(results['img'], str):
r... |
def violates_moderation(text):
url = '
headers = {'Content-Type': 'application/json', 'Authorization': ('Bearer ' + os.environ['OPENAI_API_KEY'])}
text = text.replace('\n', '')
data = ((('{' + '"input": ') + f'"{text}"') + '}')
data = data.encode('utf-8')
try:
ret = requests.post(url, he... |
def load_tf2_weights_in_pytorch_model(pt_model, tf_weights, allow_missing_keys=False):
try:
import tensorflow as tf
import torch
except ImportError:
logger.error('Loading a TensorFlow model in PyTorch, requires both PyTorch and TensorFlow to be installed. Please see and for installatio... |
def CreateDatatypes(*ds):
ds = _get_args(ds)
if z3_debug():
_z3_assert((len(ds) > 0), 'At least one Datatype must be specified')
_z3_assert(all([isinstance(d, Datatype) for d in ds]), 'Arguments must be Datatypes')
_z3_assert(all([(d.ctx == ds[0].ctx) for d in ds]), 'Context mismatch')
... |
def test_kswin_initialization():
with pytest.raises(ValueError):
KSWIN(alpha=(- 0.1))
with pytest.raises(ValueError):
KSWIN(alpha=1.1)
kswin = KSWIN(alpha=0.5)
assert (kswin.alpha == 0.5)
kswin = KSWIN(data='st')
assert isinstance(kswin.window, np.ndarray)
kswin = KSWIN(data=... |
def load_data_by_id(sub_set, id_path):
with open(id_path) as inf:
id_list = inf.readlines()
contents = []
for example_id in id_list:
example_id = example_id.strip()
label = example_id.split('_')[0]
file_path = os.path.join(FLAGS.raw_data_dir, sub_set, label, example_id[(len(l... |
class ImageAdditiveGaussianNoise(NoiseModel.NoiseModel):
def __init__(self, shape, probability=0, feature_importance=[], one_cell_flag=False, min_val=0.0, max_val=1.0, mu=0.0, sigma=1.0, scale=numpy.array([]), p_pixel=(- 1)):
super(ImageAdditiveGaussianNoise, self).__init__(shape, probability, feature_impor... |
def p_pass_statement(s, with_newline=0):
pos = s.position()
s.expect('pass')
if with_newline:
s.expect_newline('Expected a newline', ignore_semicolon=True)
return Nodes.PassStatNode(pos) |
def maybe_get(dictionary: Dict[(str, any)], key: Union[(str, List[str])], default=None):
if (len(key) == 1):
key = key[(- 1)]
if isinstance(key, str):
return (dictionary[key] if (key in dictionary.keys()) else default)
return (maybe_get(dictionary[key[0]], key[1:], default=default) if (key[0... |
def inference(config: DictConfig):
device = check_envirionment(config.eval.use_cuda)
model = load_test_model(config.eval, device)
if (config.eval.dataset == 'kspon'):
vocab = KsponSpeechVocabulary(f'../../../data/vocab/aihub_{config.eval.output_unit}_vocabs.csv', output_unit=config.eval.output_unit)... |
def test_mi_static_properties():
for d in (m.VanillaStaticMix1(), m.VanillaStaticMix2()):
assert (d.vanilla() == 'Vanilla')
assert (d.static_func1() == 'WithStatic1')
assert (d.static_func2() == 'WithStatic2')
assert (d.static_func() == d.__class__.__name__)
m.WithStatic1.sta... |
class RecordingDict(dict):
def __init__(self, *args, **kwds):
dict.__init__(self, *args, **kwds)
self.start()
def start(self):
self.set = set()
self.got = set()
def __getitem__(self, name):
if (name not in self.set):
self.got.add(name)
return dict.... |
class FocalCosineLoss(nn.Module):
def __init__(self, alpha: float=1, gamma: float=2, xent: float=0.1, reduction='mean'):
super(FocalCosineLoss, self).__init__()
self.alpha = alpha
self.gamma = gamma
self.xent = xent
self.reduction = reduction
def forward(self, input: Tens... |
def _convert_param_to_schematics(param):
(schema_type, schema_args) = _convert_type_to_schematics(param.annotation)
schema_kwargs = {}
if (param.default != param.empty):
schema_kwargs['default'] = param.default
else:
schema_kwargs['required'] = True
return schema_type(*schema_args, *... |
def translate_method(input_method):
sel_method = input_method.lower()
transtable = str.maketrans({'-': '', '_': '', ' ': ''})
return sel_method.translate(transtable) |
class PerformanceRecoder(object):
def __init__(self, dict, predix='_perf'):
self.dict = dict
self.predix = predix
self.recoder = defaultdict(list)
self.recoder[('all' + self.predix)] = []
for d in self.dict:
for k in d.keys():
self.recoder[(k + sel... |
def test_sympy_stencil():
D = FunctionSpace(8, 'L', bc='u(-1)=0&&u(1)=0', scaled=True)
(i, j) = sp.symbols('i,j', integer=True)
assert (str(D.sympy_stencil()) == 'KroneckerDelta(i, j)/sqrt(4*i + 6) - KroneckerDelta(j, i + 2)/sqrt(4*i + 6)')
assert (str(D.sympy_stencil(implicit='a')) == 'KroneckerDelta(i... |
_module
class S2D_RPN(RPN):
def __init__(self, layer_nums, ds_layer_strides, ds_num_filters, us_layer_strides, us_num_filters, num_input_features, norm_cfg=None, name='rpn', logger=None, **kwargs):
super(S2D_RPN, self).__init__(layer_nums, ds_layer_strides, ds_num_filters, us_layer_strides, us_num_filters, ... |
class SwishFn(torch.autograd.Function):
def forward(ctx, x, beta):
beta_sigm = torch.sigmoid((beta * x))
output = (x * beta_sigm)
ctx.save_for_backward(x, output, beta)
return (output / 1.1)
def backward(ctx, grad_output):
(x, output, beta) = ctx.saved_tensors
bet... |
def get_commit_message():
build_source_version_message = os.environ['BUILD_SOURCEVERSIONMESSAGE']
if (os.environ['BUILD_REASON'] == 'PullRequest'):
commit_id = build_source_version_message.split()[1]
git_cmd = ['git', 'log', commit_id, '-1', '--pretty=%B']
commit_message = subprocess.run... |
.parametrize('prior', SE_PRIORS)
def test_prior_BO_limit(prior):
df = check_prior_BO_limit(prior)
assert_allclose(df['A_BO'], df['A_RS'])
assert_allclose(df['vx_BO'], df['vx_RS'])
assert_allclose(df['mx_BO'], df['mx_RS'])
assert_allclose(df['mx_BO'], df['qx_RS']) |
class RobertaEMOEncoder(FairseqDecoder):
def __init__(self, args, dictionary):
super().__init__(dictionary)
self.args = args
self.sentence_encoder = TransformerMultiEncoder(padding_idx=1, num_encoder_layers_cross=args.encoder_layers_cross, embedding_dim_text=args.encoder_embed_dim_t, embeddi... |
class UpCoXvD(nn.Module):
def __init__(self, in_channels, out_channels, blocks, residual=True, batch_norm=True, transpose=True, concat=True, use_att=False):
super(UpCoXvD, self).__init__()
self.concat = concat
self.residual = residual
self.batch_norm = batch_norm
self.bn = No... |
def Linear2d(in_channels: int, out_channels: int, stride: int=1, bias: bool=True) -> torch.nn.Module:
return nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=stride, bias=bias) |
class MT5Model(metaclass=DummyObject):
_backends = ['torch']
def __init__(self, *args, **kwargs):
requires_backends(self, ['torch']) |
class SaveCheckpoint(ModelCheckpoint):
def __init__(self, dirpath: (str | None)=None, filename: (str | None)=None, monitor: str='validation_loss', **kwargs):
if (dirpath is None):
dirpath = os.path.join(settings.logging_dir, datetime.now().strftime('%Y-%m-%d_%H-%M-%S'))
dirpath += f'... |
def sanity_check_proced_question():
legacy_results = load_json('entity_linking/grailqa_el.json')
new_results = load_json('tmp/tmp_el_results.json')
count = 0
for (qid, leg_res) in legacy_results.items():
if (qid not in new_results):
continue
new_res = new_results[str(qid)]
... |
def convert_examples_to_features(examples, label_list, max_seq_length, tokenizer, output_mode):
label_map = {label: i for (i, label) in enumerate(label_list)}
features = []
for (ex_index, example) in enumerate(examples):
if ((ex_index % 10000) == 0):
logger.info(('Writing example %d of %... |
class KerasMetricCallback(metaclass=DummyObject):
_backends = ['tf']
def __init__(self, *args, **kwargs):
requires_backends(self, ['tf']) |
def write_gipuma_dmb(path, image):
image_shape = np.shape(image)
width = image_shape[1]
height = image_shape[0]
if (len(image_shape) == 3):
channels = image_shape[2]
else:
channels = 1
if (len(image_shape) == 3):
image = np.transpose(image, (2, 0, 1)).squeeze()
with o... |
def get_random_ce_pred(ce_ref, verbose=False):
pred = (['O'] * len(ce_ref))
(c_start, e_start) = np.random.choice(range(len(ce_ref)), 2, replace=False)
if (c_start < e_start):
c_end = random_choice_incr_probability((c_start + 1), e_start)
e_end = random_choice_incr_probability((e_start + 1),... |
class DBLDetectorParams(Config):
threshold: float = 0.0
fixed_period: Tuple[(str, str)] = None
train_window: str = None
wind_sz: str = '1h'
trends: List[str] = None
kwargs: dict = {} |
def read_yaml(path):
yaml_dict = None
with open(path, 'r') as f:
try:
yaml_dict = yaml.safe_load(f)
except yaml.YAMLError as exc:
print(exc)
exit()
return yaml_dict |
def generate_class_mask(label, classes):
(label, classes) = torch.broadcast_tensors(label, classes.unsqueeze(1).unsqueeze(2))
class_mask = label.eq(classes).sum(0, keepdims=True)
return class_mask |
class DatasetCatalog(object):
DATA_DIR = 'datasets'
DATASETS = {'plabels_1m_cocostyle': {'img_dir': '', 'ann_file': 'plabels_coco_vg_sbu_clipemb.json'}, 'coco_zeroshot_train': {'img_dir': 'coco/images/train2017', 'ann_file': 'instances_train2017_seen_2_clipemb.json'}, 'coco_zeroshot_val': {'img_dir': 'coco/imag... |
class TestQuantizedConv(TestCase):
def _test_qconv_unpack_impl(self, qconv_prepack_fn, qconv_unpack_fn, inputs, strides, i_pads, o_pads, channelwise):
(X_data, W_data, bias_data, groups, transposed) = inputs
(X, (X_scale, X_zero_point, X_qtype)) = X_data
(W, (W_scale, W_zero_point, W_qtype))... |
def show_running(func):
(func)
def g(*args, **kargs):
x = WaitPrint(2, '{}({})... '.format(func.__name__, ', '.join(([repr(x) for x in args] + ['{}={}'.format(key, repr(value)) for (key, value) in kargs.items()]))))
x.start()
t = time.perf_counter()
r = func(*args, **kargs)
... |
class TestDFManipulations(unittest.TestCase):
def setUp(self):
self.extract_user_id_testcase1 = {'user_id': '10'}
self.extract_user_id_testcase2 = {'item_id': '10'}
self.price_testcase1 = [{'price': '1000'}, {'price': '5000'}]
self.price_testcase2 = [{'price': '0'}]
self.pric... |
def project_uv_on_boxes(proposals, resolution):
uv_anns = [[] for _ in range(8)]
for proposals_per_image in proposals:
if (len(proposals_per_image) == 0):
continue
targets = proposals_per_image.get_field('uv_target')
targets = targets.convert('xyxy')
proposals_per_ima... |
class RayBenchmarkActorPool():
def __init__(self, notification_address, world_size, object_size):
self.actors = []
for world_rank in range(world_size):
self.actors.append(RayBenchmarkWorker.remote(notification_address, world_size, world_rank, object_size))
def barrier(self):
... |
def translate_graph_dict(path):
graph_dict = utils.load_graph_dict(path)
abs_dir_path = os.path.dirname(os.path.abspath(__file__))
file_name = os.path.join(abs_dir_path, '../../resources/properties_data.json')
properties_data = utils.load_properties_data(file_name=file_name)
static_objects = ['bathr... |
class VI_Block():
def __init__(self, scope, X, S1, S2, config):
self.scope = scope
self.X = X
self.S1 = S1
self.S2 = S2
self.config = config
def conv2d_flipkernel(x, k, name=None):
return tf.nn.conv2d(x, flipkernel(k), name=name, strides=(1, 1, 1, 1), padding='SAM... |
def is_classifier(estimator):
return (getattr(estimator, '_estimator_type', None) == 'classifier') |
def spalde(x, tck):
if isinstance(tck, BSpline):
raise TypeError('spalde does not accept BSpline instances.')
else:
return _impl.spalde(x, tck) |
def write_sentence(setting, delim_left, delim_right, orig_style, opp_style, orig_text, rewritten_text=None, full_style_description=None):
if (full_style_description is not None):
orig_style = full_style_description[orig_style]
opp_style = full_style_description[opp_style]
if (setting == 'contras... |
def visualize_graph_executor(state, name_prefix, pb_graph, inline_graph):
if (state.autograd_fallback_graph is not None):
visualize(graph=state.autograd_fallback_graph, name_prefix=(name_prefix + 'autograd_fallback/'), pb_graph=pb_graph, executors_it=iter(state.autograd_fallback.executors()))
for (i, (a... |
def get_cos_difference(stft_1, stft_2):
angle_1 = get_angle(stft_1)
angle_2 = get_angle(stft_2)
return np.cos((angle_1 - angle_2)) |
def block_stabilizer(G, B):
if (not B):
return list(G)
(identity, op, inv) = group_law(G)
b0 = inv(B[0])
S = []
for b in B:
bb0 = op(b, b0)
if all(((op(bb0, c) in B) for c in B)):
S.append(bb0)
return S |
def partition_dict(pred, d):
trues = {}
falses = {}
for (k, v) in d.items():
if pred(k):
trues[k] = v
else:
falses[k] = v
return (trues, falses) |
def quote(string):
try:
from urllib.parse import quote
return quote(string)
except ModuleNotFoundError:
from urllib import pathname2url
return pathname2url(string) |
.parametrize('base_path', ('/v1', '/v1/'))
def test_base_path_suffix(swagger_20, base_path):
swagger_20.raw_schema['basePath'] = base_path
assert (swagger_20.base_path == '/v1/')
assert (swagger_20.verbose_name == 'Swagger 2.0')
assert (swagger_20.spec_version == '2.0') |
_task('noisy_channel_translation')
class NoisyChannelTranslation(TranslationTask):
def add_args(parser):
TranslationTask.add_args(parser)
parser.add_argument('--channel-model', metavar='FILE', help='path to P(S|T) model. P(S|T) and P(T|S) must share source and target dictionaries.')
parser.a... |
def test_variable_type_runtime(type_system):
scope = ns.VariableTypeNamingScope(return_type_trace={0: type_system.convert_type_hint(int)})
var = MagicMock(type=type_system.convert_type_hint(None))
var.get_statement_position.return_value = 1
assert (scope.get_name(var) == 'var_0') |
def cerCalc(s1, s2):
(s1, s2) = (s1.replace(' ', ''), s2.replace(' ', ''))
return Lev.distance(s1, s2) |
def get_staleness_for_stage(stage, scheduler: WorkScheduler, num_stages, num_batches, se) -> Dict[(int, Dict[(int, Any)])]:
s = get_fwd_bwd_string_for_stage(stage, scheduler, num_stages, num_batches)
d = {}
done_fwds = 0
done_bwds = 0
for c in s:
if (c == 'F'):
es = expected_stal... |
def register_Ns3DefaultDeleter__Ns3Dot11sDestinationAddressUnit_methods(root_module, cls):
cls.add_constructor([])
cls.add_constructor([param('ns3::DefaultDeleter< ns3::dot11s::DestinationAddressUnit > const &', 'arg0')])
cls.add_method('Delete', 'void', [param('ns3::dot11s::DestinationAddressUnit *', 'obje... |
class KsponSpeechVocabulary(Vocabulary):
def __init__(self, vocab_path, output_unit: str='character', sp_model_path=None):
super(KsponSpeechVocabulary, self).__init__()
if (output_unit == 'subword'):
import sentencepiece as spm
self.sp = spm.SentencePieceProcessor()
... |
def save_memmap(data: np.ndarray, path, dtype=None, node_chunk_size=1000000, log=print):
dtype = (_judge_type(data) if (dtype is None) else dtype)
x = np.memmap(path, dtype=dtype, mode='w+', shape=data.shape)
for i in tqdm(range(0, data.shape[0], node_chunk_size)):
j = min((i + node_chunk_size), dat... |
.parametrize('seed', [313])
.parametrize('test', [True])
.parametrize('w_bias', [True, False])
.parametrize('graph_ref, graph_act, rm_func', [(small_bn_rm_resnet, small_bn_resnet, True), (small_bsf_resnet, small_bn_resnet, False), (bsf_lenet, lenet, False)])
def test_test_mode(seed, test, w_bias, graph_ref, graph_act, ... |
.parametrize('symbol, typ, result', [(sym, dict, dict[(Any, int)]) for sym in InferredSignature._DICT_VALUE_ATTRIBUTES])
def test_guess_generic_types_dict_value_from_elements(inferred_signature, symbol, typ, result):
config.configuration.test_creation.negate_type = 0.0
knowledge = UsageTraceNode('ROOT')
kno... |
.parametrize('ti_func,np_func', binary_func_table)
def test_python_scope_vector_binary(ti_func, np_func):
ti.init()
x = ti.Vector([2, 3])
y = ti.Vector([5, 4])
result = ti_func(x, y).to_numpy()
if (ti_func in [ops.eq, ops.ne, ops.lt, ops.le, ops.gt, ops.ge]):
result = result.astype(bool)
... |
def _step(state: State, action) -> State:
discard = ((action < 34) | (action == 68))
self_kan = ((34 <= action) & (action < 68))
action_ix = (action - 69)
return jax.lax.cond(discard, (lambda : _discard(state, action)), (lambda : jax.lax.cond(self_kan, (lambda : _selfkan(state, action)), (lambda : jax.l... |
def main(args, config):
utils.init_distributed_mode(args)
device = torch.device(args.device)
seed = (args.seed + utils.get_rank())
torch.manual_seed(seed)
np.random.seed(seed)
random.seed(seed)
cudnn.benchmark = True
start_epoch = 0
max_epoch = config['schedular']['epochs']
warmu... |
def resnext101(baseWidth, cardinality):
model = ResNeXt(baseWidth, cardinality, [3, 4, 23, 3], 1000)
return model |
class Object():
def __init__(self, id, x, y, width, height, names, synsets):
self.id = id
self.x = x
self.y = y
self.width = width
self.height = height
self.names = names
self.synsets = synsets
def __str__(self):
name = (self.names[0] if (len(self.... |
def random_jitter(wave, max_jitter_steps):
shape = wave.shape
wave = F.pad(wave, (0, 0, max_jitter_steps, max_jitter_steps))
wave = F.random_crop(wave, shape=shape)
return wave |
def vectorize_ptr_out(tokens, out_vocab, unique_input_ids, relaxed_matching=False):
ptr_ids = []
for (i, token) in enumerate(tokens):
if (not out_vocab.is_unknown(token)):
ptr_ids.append(out_vocab.to_idx(token))
elif (token in unique_input_ids):
ptr_ids.append(unique_inpu... |
def _make_connect_four_dwg(dwg, state: ConnectFourState, config):
GRID_SIZE = config['GRID_SIZE']
BOARD_WIDTH = config['BOARD_WIDTH']
BOARD_HEIGHT = config['BOARD_HEIGHT']
color_set = config['COLOR_SET']
dwg.add(dwg.rect((0, 0), ((BOARD_WIDTH * GRID_SIZE), (BOARD_HEIGHT * GRID_SIZE)), fill=color_set... |
class Params(object):
def __init__(self, params):
self.params = params
def __eq__(self, other):
if (not isinstance(other, Params)):
logger.info('The params you compare is not an instance of Params. ({} != {})'.format(type(self), type(other)))
return False
this_fla... |
_model
def caformer_b36_in21ft1k(pretrained=False, **kwargs):
model = MetaFormer(depths=[3, 12, 18, 3], dims=[128, 256, 512, 768], token_mixers=[SepConv, SepConv, Attention, Attention], head_fn=MlpHead, **kwargs)
model.default_cfg = default_cfgs['caformer_b36_in21ft1k']
if pretrained:
state_dict = t... |
def main():
base_model_name_HF = HF_BASE_MODEL
base_model_name = base_model_name_HF.split('/')[(- 1)]
model_path = f'{MODEL_OUT_DIR}/bioclinical-longformer'
unpretrained_model_path = f'{MODEL_OUT_DIR}/{base_model_name}-{GLOBAL_MAX_POS}'
if (not os.path.exists(model_path)):
os.makedirs(model_... |
def test_Metrics():
reset_seed(0, check_cudnn=False)
instance = Metrics()
announce_msg('Testing {}'.format(instance))
cuda = 0
DEVICE = torch.device(('cuda:{}'.format(cuda) if torch.cuda.is_available() else 'cpu'))
if torch.cuda.is_available():
torch.cuda.set_device(int(cuda))
instan... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.