code stringlengths 101 5.91M |
|---|
def add_cross_entropy_loss(model, pred, label, loss, weight=None, cpg=None):
in_blob = [pred, label]
if cpg:
in_blob.append(cpg)
out_blob = [loss]
if weight:
in_blob.insert(2, weight)
model.net.WeightedCrossEntropyWithLogits(in_blob, out_blob)
else:
model.net.CrossEnt... |
def make_dataset(mode, maxSkip=0, cv_split=0):
items = []
aug_items = []
assert (mode in ['train', 'val', 'test', 'trainval'])
img_dir_name = 'images'
img_path = os.path.join(root, img_dir_name)
mask_path = os.path.join(root, 'labels')
mask_postfix = '_train_id.png'
if (mode == 'trainval... |
def get_depth_choices(nDepth, return_num):
if (nDepth == 2):
choices = (1, 2)
elif (nDepth == 3):
choices = (1, 2, 3)
elif (nDepth > 3):
choices = list(range(1, (nDepth + 1), 2))
if (choices[(- 1)] < nDepth):
choices.append(nDepth)
else:
raise ValueErr... |
def get_preprocessor(space: spaces.Space, mode: str=Mode.FLATTEN):
if (mode == Mode.FLATTEN):
if isinstance(space, spaces.Dict):
return DictFlattenPreprocessor
elif isinstance(space, spaces.Tuple):
return TupleFlattenPreprocessor
elif isinstance(space, spaces.Box):
... |
def makeVocabulary(filename, size):
vocab = onmt.Dict([onmt.Constants.PAD_WORD, onmt.Constants.UNK_WORD, onmt.Constants.BOS_WORD, onmt.Constants.EOS_WORD], lower=opt.lower, seq_len=opt.seq_length)
with codecs.open(filename, 'r', 'utf-8') as f:
for sent in f.readlines():
for word in sent.spli... |
class FitDataError(ValueError):
def __init__(self, distr, lower, upper):
self.args = (f'Invalid values in `data`. Maximum likelihood estimation with {distr!r} requires that {lower!r} < (x - loc)/scale < {upper!r} for each x in `data`.',) |
def eval(opt):
model = CycleGANModel(opt)
dataset = CDFdata.get_loader(opt)
(img_logs, weight_logs) = init_logs(opt)
model.load(weight_logs)
for (batch_id, data) in enumerate(dataset):
print('===> Epoch({}/{})'.format(batch_id, len(dataset)))
model.set_input(data)
model.test(... |
def test_save_xdmf_files_mixed(dir_path, rng, config_ocp, geometry):
config_ocp.set('Output', 'save_state', 'True')
config_ocp.set('Output', 'save_results', 'True')
config_ocp.set('Output', 'save_txt', 'True')
config_ocp.set('Output', 'save_adjoint', 'True')
config_ocp.set('Output', 'save_gradient',... |
def test_batch_meta_dataloader():
dataset = Sinusoid(10, num_tasks=1000, noise_std=None)
meta_dataloader = BatchMetaDataLoader(dataset, batch_size=4)
assert isinstance(meta_dataloader, DataLoader)
assert (len(meta_dataloader) == 250)
(inputs, targets) = next(iter(meta_dataloader))
assert isinsta... |
class PreNorm(nn.Module):
def __init__(self, dim, fn):
super().__init__()
self.norm = nn.LayerNorm(dim)
self.fn = fn
def forward(self, x, **kwargs):
return self.fn(self.norm(x), **kwargs) |
class AutoModelForAudioClassification(_BaseAutoModelClass):
_model_mapping = MODEL_FOR_AUDIO_CLASSIFICATION_MAPPING |
class ExhaustiveEnumerator(FromIteratorEnumerator):
def __init__(self, spec: TyrellSpec, max_depth: int):
super().__init__(ExhaustiveIterator(spec, max_depth).iter()) |
def load_inferred(infer_history_path, normalized_gold_code):
inferred_all = [json.loads(line) for line in open(infer_history_path)]
exact_match_all = [((normalized_gold_code[i] == example['beams'][0]['inferred_code']) if example['beams'] else False) for (i, example) in enumerate(inferred_all)]
return (infer... |
.parametrize('ctx, func_name', ctxs)
.parametrize('seed', [313])
.parametrize('test', [True])
.parametrize('w_bias', [True])
.parametrize('channel_last', [True, False])
.parametrize('graph_ref, graph_act, opposite', [(resnet_ref, small_bn_resnet, False), (resnet_ref, small_bn_opp_resnet, True)])
.parametrize('dims', [1... |
def validate_args(args):
if (args.training_curriculum == 'random'):
args.bootstrapping_update_epochs = []
else:
assert (args.bootstrapping_start is not None)
assert (args.bootstrapping_start > 0)
if (args.bootstrapping_ticks is None):
bootstrapping_update_epochs = [ar... |
def main():
args = parse_args()
scriptfile = args.scriptfile
scriptargs = ([] if (args.args is None) else args.args)
scriptargs.insert(0, scriptfile)
cprofile_sortby = 'tottime'
cprofile_topk = 15
autograd_prof_sortby = 'cpu_time_total'
autograd_prof_topk = 15
redirect_argv(scriptarg... |
def mkdir(path):
if (not os.path.exists(path)):
try:
os.makedirs(path)
except FileExistsError:
pass |
class McIdasImageFile(ImageFile.ImageFile):
format = 'MCIDAS'
format_description = 'McIdas area file'
def _open(self):
s = self.fp.read(256)
if ((not _accept(s)) or (len(s) != 256)):
raise SyntaxError('not an McIdas area file')
self.area_descriptor_raw = s
self.ar... |
def train_val_test():
set_random_seed()
model = get_model()
model_wrapper = torch.nn.DataParallel(model).cuda()
criterion = torch.nn.CrossEntropyLoss().cuda()
(train_loader, val_loader) = get_dataset()
if FLAGS.pretrained:
checkpoint = torch.load(FLAGS.pretrained)
if ((type(check... |
class SawyerBasketballV1Policy(Policy):
_fully_parsed
def _parse_obs(obs):
return {'hand_pos': obs[:3], 'ball_pos': obs[3:6], 'hoop_x': obs[(- 3)], 'unused_info': obs[[6, 7, 8, 10, 11]]}
def get_action(self, obs):
o_d = self._parse_obs(obs)
action = Action({'delta_pos': np.arange(3),... |
(scope='module')
def base_recs_pd():
return pd.DataFrame(base_recs_data, columns=['uid', 'iid', 'scores']) |
def generate_gallery(examples_dir, output_filename, doc_dir, rst_dir, thumbnails_dir, dir_map, n_col=3):
output(('generating %s...' % output_filename))
lines = [_gallery_head]
for (dirname, filenames) in ordered_iteritems(dir_map):
title = [(' %s' % dirname.title().replace('_', ' ')), ((' ' + (l... |
def createCorrect(outputFilename):
assigns = []
for _ in range(NUM_SEQS):
assigns += np.random.choice(GARBAGE_CLUSTERS, NUM_GARBAGE).tolist()
assigns += CLUSTER_SEQUENCE
np.savetxt(outputFilename, np.array(assigns), delimiter=',', fmt='%d') |
class Bukin06(Benchmark):
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = [((- 15.0), (- 5.0)), ((- 3.0), 3.0)]
self.global_optimum = [[(- 10.0), 1.0]]
self.fglob = 0.0
def fun(self, x, *args):
self.nfev += 1
return ((100 * sq... |
def tot() -> operations.GraphOfOperations:
operations_graph = operations.GraphOfOperations()
operations_graph.append_operation(operations.Generate(1, 20))
operations_graph.append_operation(operations.Score(1, False, utils.num_errors))
keep_best_1 = operations.KeepBestN(1, False)
operations_graph.app... |
def mul(g, self, other):
return g.op('Mul', self, _if_scalar_type_as(other, self), **_broadcast_if_scalar(other)) |
class Environment():
def __init__(self, vehicle, controller, trajectory, wind_profile=None, imu=None, mocap=None, world=None, estimator=None, sim_rate=100, safety_margin=0.25):
self.sim_rate = sim_rate
self.vehicle = vehicle
self.controller = controller
self.trajectory = trajectory
... |
class Bilinear(Module):
def __init__(self, in1_features, in2_features, out_features, bias=True):
super(Bilinear, self).__init__()
self.in1_features = in1_features
self.in2_features = in2_features
self.out_features = out_features
self.weight = Parameter(torch.Tensor(out_featur... |
class AzureCredentials(Credentials):
_appId: str
_tenant: str
_password: str
def __init__(self, appId: str, tenant: str, password: str, subscription_id: Optional[str]=None):
super().__init__()
self._appId = appId
self._tenant = tenant
self._password = password
sel... |
class CumulativeGainExplanation(ExplanationBase):
def __init__(self):
super().__init__()
self.explanations = {}
def add(self, gains: Dict, percentages: np.ndarray, num_samples: Dict):
self.explanations = {'gains': gains, 'percentages': percentages, 'num_samples': num_samples}
def get... |
class HuffmanCoder():
def __init__(self, root: 'HuffmanNode', bos='<s>', pad='<pad>', eos='</s>', unk='<unk>'):
self.root = root
self.table = root.code_table()
(self.bos_word, self.unk_word, self.pad_word, self.eos_word) = (bos, unk, pad, eos)
def _pad(self, a: bitarray) -> bitarray:
... |
def test_ssurgeon_become_mwt():
semgrex_pattern = "{word:It}=it . {word:/'s/}=s"
ssurgeon_edits = ["EditNode -node it -is_mwt true -is_first_mwt true -mwt_text It's", "EditNode -node s -is_mwt true -is_first_mwt false -mwt_text It's"]
doc = CoNLL.conll2doc(input_str=BECOME_MWT_DOC_INPUT)
ssurgeon_re... |
def is_image_file(filename: str) -> bool:
filename_lower = filename.lower()
return any((filename_lower.endswith(extension) for extension in IMG_EXTENSIONS)) |
def register_Ns3CallbackImplBase_methods(root_module, cls):
cls.add_constructor([])
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
cls.add_method('GetTypeid', 'std::string', [], is_pure_virtual=True, is_const=True, is_virtual=True)
cls.add_method('IsEqual', 'bool', [param('ns3::Pt... |
class conv3_cgen(nn.Module):
def __init__(self, z_dim, start_dim=8, out_channels=3, n_classes=10):
super(conv3_cgen, self).__init__()
self.label_emb = nn.Embedding(n_classes, n_classes)
self.linear = nn.Linear((z_dim + n_classes), (128 * (start_dim ** 2)))
self.flatten = View(((- 1),... |
class testmanager(ContextDecorator):
def __enter__(self):
self.dir_path = os.path.dirname(os.path.realpath(__file__))
for folder in ['predictions', 'preprocessed', 'processed', 'models']:
shutil.rmtree(os.path.join(self.dir_path, 'dump', folder), ignore_errors=True)
cmd = 'python... |
class QuaternionAlgebra_abstract(Algebra):
def _repr_(self):
return ('Quaternion Algebra with base ring %s' % self.base_ring())
def ngens(self):
return 3
_method
def basis(self):
(i, j, k) = self.gens()
return (self.one(), i, j, k)
_method
def inner_product_matrix... |
class MVTecDataset(Dataset):
def __init__(self, dataset_path, class_name='bottle', is_train=True, resize=256, cropsize=224, wild_ver=False):
assert (class_name in CLASS_NAMES), 'class_name: {}, should be in {}'.format(class_name, CLASS_NAMES)
self.dataset_path = dataset_path
self.class_name ... |
def resnet101_atrous(pretrained=True, os=16, **kwargs):
return _resnet(arch='resnet101', block=Bottleneck, layers=[3, 4, 23, 3], atrous=[2, 2, 2], os=os, pretrained=pretrained, progress=True) |
def load_txt_info(gt_file, img_info):
anno_info = []
for line in list_from_file(gt_file):
line = line.strip()
strs = line.split(',')
category_id = 1
assert (strs[28][0] == '#')
xy = [int(x) for x in strs[0:28]]
assert (len(xy) == 28)
coordinates = np.array... |
def _operator_to_node(shapes, op):
assert op.name, op
n = NodeDef()
n.name = op.name
n.input.extend(op.input)
n.op = op.type
n.device = _tf_device(op.device_option)
if shapes:
for output in op.output:
if (output not in shapes):
break
_add_tf_sh... |
def test_record_to_ndarray():
class Point(ak.Record):
def __getitem__(self, where):
return np.array([1, 2, 3])
array = ak.Array([[{'rho': 1, 'phi': 1.0}], [], [{'rho': 2, 'phi': 2.0}]], with_name='point', behavior={'point': Point})
assert (array.to_list() == [[{'rho': [1, 2, 3], 'phi': [... |
def bi_attention(config, is_train, h, u, h_mask=None, u_mask=None, scope=None, tensor_dict=None):
with tf.variable_scope((scope or 'bi_attention')):
JX = tf.shape(h)[2]
M = tf.shape(h)[1]
JQ = tf.shape(u)[1]
h_aug = tf.tile(tf.expand_dims(h, 3), [1, 1, 1, JQ, 1])
u_aug = tf.t... |
def _check_psd_eigenvalues(lambdas, enable_warnings=False):
lambdas = np.array(lambdas)
is_double_precision = (lambdas.dtype == np.float64)
significant_imag_ratio = 1e-05
significant_neg_ratio = (1e-05 if is_double_precision else 0.005)
significant_neg_value = (1e-10 if is_double_precision else 1e-0... |
def main():
parser = HfArgumentParser((ModelArguments, DataTrainingArguments, Seq2SeqTrainingArguments))
if ((len(sys.argv) == 2) and sys.argv[1].endswith('.json')):
(model_args, data_args, training_args) = parser.parse_json_file(json_file=os.path.abspath(sys.argv[1]))
else:
(model_args, dat... |
def test_tunable_mixin():
model_cls = DummyModel
manager = scvi.autotune.TunerManager(model_cls)
registry = manager._registry['tunables']
assert ('n_train' in registry)
assert ('n_val' in registry)
assert ('n_hidden' in registry)
assert ('n_latent' in registry)
assert ('lr' in registry)
... |
_utils.test(arch=ti.cpu)
def test_func_bad_argument_annotation():
with pytest.raises(ti.TaichiSyntaxError, match='annotation'):
def func(x: 'foo'):
print(x) |
.skip('testing the overflow of 32 bit sparse indexing requires a large amount of memory')
def test_load_large_qid():
data = b'\n'.join(('3 qid:{0} 1:0.53 2:0.12\n2 qid:{0} 1:0.13 2:0.1'.format(i).encode() for i in range(1, ((40 * 1000) * 1000))))
(X, y, qid) = load_svmlight_file(BytesIO(data), query_id=True)
... |
def process_audio_files(queue):
while (not queue.empty()):
(assigned_anno, sample_rate, num_samples, split, shard, num_total_shards) = queue.get()
is_test = (split == 'test')
output_filename_format = ('{}-{:04d}-of-{:04d}.seq.tfrecord' if is_test else '{}-{:04d}-of-{:04d}.tfrecord')
... |
class TriangularModuleMorphism(ModuleMorphism):
def __init__(self, triangular='upper', unitriangular=False, key=None, inverse=None, inverse_on_support=identity, invertible=None):
if (key is not None):
self._key_kwds = {'key': key}
else:
self._key_kwds = {}
if (triangu... |
def check_fn(fn, loc):
try:
source = dedent(''.join(get_source_lines_and_file(fn)[0]))
except (TypeError, IOError):
return
if (source is None):
return
py_ast = ast.parse(source)
if ((len(py_ast.body) == 1) and isinstance(py_ast.body[0], ast.ClassDef)):
raise torch.jit... |
def TranslateXAbs(img, v):
assert (0 <= v <= 10)
if (random.random() > 0.5):
v = (- v)
return img.transform(img.size, PIL.Image.AFFINE, (1, 0, v, 0, 1, 0)) |
class BuildEnvironment(object):
def __init__(self):
self._temp_dir = TempDirectory(kind='build-env')
self._temp_dir.create()
def path(self):
return self._temp_dir.path
def __enter__(self):
self.save_path = os.environ.get('PATH', None)
self.save_pythonpath = os.environ... |
def create_function_list(function_spaces: List[fenics.FunctionSpace]) -> List[fenics.Function]:
function_list = [fenics.Function(function_space) for function_space in function_spaces]
return function_list |
class PriorLatentPolicy(ExplorationPolicy):
def __init__(self, policy, prior, unconditional=False, steps_between_sampling=100):
self.policy = policy
self.prior = prior
self.unconditional = unconditional
self.steps_between_sampling = steps_between_sampling
self.fixed_latent = ... |
def _args_to_kwargs_xdist(args, kwargs, metric, func_name):
if (not args):
return kwargs
if (callable(metric) and (metric not in [braycurtis, canberra, chebyshev, cityblock, correlation, cosine, dice, euclidean, hamming, jaccard, jensenshannon, kulsinski, mahalanobis, matching, minkowski, rogerstanimoto... |
class UtilTest(tf.test.TestCase):
def test_pad_tensor_using_integer_input(self):
t1 = tf.constant([1], dtype=tf.int32)
pad_t1 = shape_utils.pad_tensor(t1, 2)
t2 = tf.constant([[0.1, 0.2]], dtype=tf.float32)
pad_t2 = shape_utils.pad_tensor(t2, 2)
self.assertEqual(2, pad_t1.get... |
def prepare_dirs_loggers(config, script=''):
logFormatter = logging.Formatter('%(message)s')
rootLogger = logging.getLogger()
rootLogger.setLevel(logging.DEBUG)
consoleHandler = logging.StreamHandler(sys.stdout)
consoleHandler.setLevel(logging.DEBUG)
consoleHandler.setFormatter(logFormatter)
... |
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--output-dir', required=True)
args = parser.parse_args()
runner = './rmse.py'
output_dir = args.output_dir
if (not os.path.exists(output_dir)):
os.makedirs(output_dir)
run_args = []
numa_queue = get_numa_queue(2)... |
class QuerySearcherHead(nn.Module):
def __init__(self, neural_ir_model: nn.Module, use_fp16=True):
super(QuerySearcherHead, self).__init__()
self.neural_ir_model = neural_ir_model
self.use_fp16 = use_fp16
def forward(self, seq: Dict[(str, torch.Tensor)], search_type='encode', document_en... |
def class_process(dir_path, class_name):
class_path = os.path.join(dir_path, class_name)
if (not os.path.isdir(class_path)):
return
for file_name in os.listdir(class_path):
video_dir_path = os.path.join(class_path, file_name)
image_indices = []
for image_file_name in os.listd... |
_function
def sub_reflexive_polygons():
result = []
def add_result(subpolygon, ambient):
if (not any((subpolygon.is_isomorphic(p[0]) for p in result))):
result.append((subpolygon, ambient))
for p in subpolygons_of_polar_P2():
add_result(p, polar_P2_polytope())
for p in subpol... |
def handler(event):
input_bucket = event.get('bucket').get('input')
output_bucket = event.get('bucket').get('output')
key = event.get('object').get('key')
download_path = '/tmp/{}-{}'.format(key, uuid.uuid4())
os.makedirs(download_path)
s3_download_begin = datetime.datetime.now()
client.down... |
class FBCacheBase():
PREFIX = 'cache'
FILENAME = 'base'
DATASET = 'base'
def __init__(self):
self.ready = False
self.update_count = 0
self.data = {}
def cache_filename(self):
return join(self.PREFIX, '{}-{}'.format(self.DATASET, self.FILENAME))
def load(self):
... |
class pAdicRingFixedMod(pAdicRingBaseGeneric, pAdicFixedModRingGeneric):
def __init__(self, p, prec, print_mode, names):
pAdicRingBaseGeneric.__init__(self, p, prec, print_mode, names, pAdicFixedModElement)
def _coerce_map_from_(self, R):
if (isinstance(R, pAdicRingFixedMod) and (R.prime() == se... |
class BlockSwap(TransformationBase):
def __init__(self, parser_path, language):
super(BlockSwap, self).__init__(parser_path=parser_path, language=language)
self.language = language
self.transformations = processor_function[language]
processor_map = {'java': self.get_tokens_with_node_... |
def read_json(filename: str) -> bool:
with open(filename) as json_file:
_ = json.load(json_file)
return True |
def test_all():
test_loader = unittest.TestLoader()
test_suite = test_loader.discover('tests', pattern='*_test.py')
return test_suite |
def _match_hostname(cert, asserted_hostname):
try:
match_hostname(cert, asserted_hostname)
except CertificateError as e:
log.error('Certificate did not match expected hostname: %s. Certificate: %s', asserted_hostname, cert)
e._peer_cert = cert
raise |
class GCN(torch.nn.Module):
def __init__(self, num_features, num_classes, dim=16, drop=0.5):
super(GCN, self).__init__()
self.conv1 = GCNConv(num_features, dim)
self.conv2 = GCNConv(dim, num_classes)
self.drop = torch.nn.Dropout(p=drop)
def forward(self, x, edge_index):
x... |
_BOX_TD_V3_FEATURE_EXTRACTORS.register('ResNet50Conv5ROIFeatureExtractor')
class ResNet50Conv5ROIFeatureExtractor(nn.Module):
def __init__(self, config, in_channels):
super(ResNet50Conv5ROIFeatureExtractor, self).__init__()
resolution = config.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION
scales = co... |
class ResNeSt(ResNetV1d):
arch_settings = {50: (Bottleneck, (3, 4, 6, 3)), 101: (Bottleneck, (3, 4, 23, 3)), 152: (Bottleneck, (3, 8, 36, 3)), 200: (Bottleneck, (3, 24, 36, 3))}
def __init__(self, groups=1, base_width=4, radix=2, reduction_factor=4, avg_down_stride=True, **kwargs):
self.groups = groups
... |
def linear_normalize(weights):
weights = torch.max(weights, torch.zeros_like(weights))
if (torch.sum(weights) > 1e-08):
return (weights / torch.sum(weights))
return torch.zeros_like(weights) |
class BaseFacade(Parent, UniqueRepresentation):
def __init__(self, ring):
Parent.__init__(self, facade=ring, category=Rings())
self._ring = _get_base_ring(ring)
self.register_embedding(self.Hom(self._ring, Sets())((lambda x: x)))
def __repr__(self):
return 'BaseFacade({})'.format... |
def open_api_2_user_form_with_file_parameters(open_api_2_user_form_parameters):
return (open_api_2_user_form_parameters + [{'in': 'formData', 'name': 'scan', 'required': True, 'type': 'file'}]) |
def _format(val: Any, output_format: str='standard', errors: str='coarse') -> Any:
val = str(val)
result: Any = []
if (val in NULL_VALUES):
return [np.nan]
if (not validate_es_ccc(val)):
if (errors == 'raise'):
raise ValueError(f'Unable to parse value {val}')
error_re... |
class PegasusTokenizerFast(ReformerTokenizerFast):
offset = 103
vocab_files_names = VOCAB_FILES_NAMES
pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP
max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES
slow_tokenizer_class = PegasusTokenizer
def _special_token_mask(self, seq):... |
class NotebookTrainingTracker(NotebookProgressBar):
def __init__(self, num_steps, column_names=None):
super().__init__(num_steps)
self.inner_table = (None if (column_names is None) else [column_names])
self.child_bar = None
def display(self):
self.html_code = html_progress_bar(se... |
class GetWeightAndActivation():
def __init__(self, model, layers):
self.model = model
self.hooks = {}
self.layers_names = layers
self.model.eval()
self._register_hooks()
def _get_layer(self, layer_name):
layer_ls = layer_name.split('/')
prev_module = self.... |
def calc_qoe(vid_bitrate, act_tiles, frame_nos, chunk_frames, width, height, nrow_tiles, ncol_tiles, player_width, player_height):
qoe = 0
prev_qoe_1 = 0
weight_1 = 1
weight_2 = 1
weight_3 = 1
tile_width = (width / ncol_tiles)
tile_height = (height / nrow_tiles)
for i in range(len(chunk_... |
def register_Ns3HtRateInfo_methods(root_module, cls):
cls.add_constructor([])
cls.add_constructor([param('ns3::HtRateInfo const &', 'arg0')])
cls.add_instance_attribute('adjustedRetryCount', 'uint32_t', is_const=False)
cls.add_instance_attribute('attemptHist', 'uint64_t', is_const=False)
cls.add_ins... |
class NamedVideoStorage(NamedStorage):
def source(self, sc, streams):
return sc.sources.FrameColumn(table_name=[s._name for s in streams], column_name=['frame' for s in streams])
def sink(self, sc, op, streams):
return sc.sinks.FrameColumn(columns={'frame': op}, table_name=[s._name for s in stre... |
def pre_user_cohort_triplet(cad_prescription_taken_by_patient, cad_user_cohort_rx, cad_user_cohort_dx, save_cohort_outcome, cad_user_cohort_demo, out_file_root):
cohorts_size = dict()
for (drug, taken_by_patient) in tqdm(cad_user_cohort_rx.items()):
file_x = '{}/{}.pkl'.format(out_file_root, drug)
... |
def gen_docker_image(container_type):
return ('/'.join([AWS_DOCKER_HOST, 'pytorch', container_type]), f'docker-{container_type}') |
class BertCoQA(BaseModel):
def __init__(self, vocab=None, bert_dir='', answer_verification=True):
super(BertCoQA, self).__init__(vocab)
self.bert_dir = bert_dir
self.activation = 'relu'
self.answer_verification = answer_verification
self.beta = 100
self.n_layers = 2
... |
class BartOnnxConfig(OnnxSeq2SeqConfigWithPast):
def inputs(self) -> Mapping[(str, Mapping[(int, str)])]:
if (self.task in ['default', 'seq2seq-lm']):
common_inputs = OrderedDict([('input_ids', {0: 'batch', 1: 'encoder_sequence'}), ('attention_mask', {0: 'batch', 1: 'encoder_sequence'})])
... |
def add_flops_mask(module, mask):
def add_flops_mask_func(module):
if isinstance(module, torch.nn.Conv2d):
module.__mask__ = mask
module.apply(add_flops_mask_func) |
def _quantize_per_tensor(x, scale, zero_point, quant_min, quant_max):
return ((x / scale) + zero_point).round().clamp(quant_min, quant_max) |
def segm2json(dataset, results):
json_results = []
for idx in range(len(dataset)):
img_id = dataset.img_ids[idx]
(det, seg) = results[idx]
for label in range(len(det)):
bboxes = det[label]
segms = seg[label]
for i in range(bboxes.shape[0]):
... |
def create_parser():
parser = argparse.ArgumentParser()
parser.add_argument('--device', default='cuda', type=str, help='Name of device to use for tensor computations (cuda/cpu)')
parser.add_argument('--res_dir', default='./results', type=str)
parser.add_argument('--ex_name', default='Debug', type=str)
... |
class ColoredPermutations(Parent, UniqueRepresentation):
def __init__(self, m, n):
if (m <= 0):
raise ValueError('m must be a positive integer')
self._m = ZZ(m)
self._n = ZZ(n)
self._C = IntegerModRing(self._m)
self._P = Permutations(self._n)
if ((self._m ... |
def test():
train_dir = FLAGS.train_dir
if (not tf.gfile.IsDirectory(train_dir)):
tf.logging.info('Training directory %s not found.', train_dir)
return
g = tf.Graph()
with g.as_default():
network_fn = nets_factory.get_network_fn(FLAGS.model_name, num_classes=FLAGS.NUM_CLASSES, is... |
class ModelConverterBase(object):
def __init__(self, converters, use_mro=True):
self.use_mro = use_mro
if (not converters):
converters = {}
for name in dir(self):
obj = getattr(self, name)
if hasattr(obj, '_converter_for'):
for classname in... |
def partition_profiled_graph(graph, model, nparts, partitioning_method, node_weight_function, edge_weight_function, use_virtual_stages, use_layers_only_graph, METIS_opt, acyclic_opt, binpack_opt, mpipe_opt):
partitioning_method = partitioning_method.lower()
if (partitioning_method == 'metis'):
print('-I... |
def messages_path():
module_path = os.path.abspath(__file__)
locale_path = os.path.join(os.path.dirname(module_path), 'locale')
if (not os.path.exists(locale_path)):
locale_path = '/usr/share/locale'
return locale_path |
class GINConv(MessagePassing):
def __init__(self, emb_dim):
super(GINConv, self).__init__(aggr='add')
self.mlp = torch.nn.Sequential(torch.nn.Linear(emb_dim, emb_dim), torch.nn.BatchNorm1d(emb_dim), torch.nn.ReLU(), torch.nn.Linear(emb_dim, emb_dim))
self.eps = torch.nn.Parameter(torch.Tenso... |
def test_one2many_match_ic13():
gt_id = 0
recall_mat = np.array([[1, 0], [0, 0]])
precision_mat = np.array([[1, 0], [0, 0]])
recall_thr = 0.5
precision_thr = 0.5
gt_match_flag = [0, 0]
det_match_flag = [0, 0]
det_dont_care_index = []
with pytest.raises(AssertionError):
gt_id_... |
class SCVI(RNASeqMixin, VAEMixin, ArchesMixin, UnsupervisedTrainingMixin, BaseMinifiedModeModelClass):
_module_cls = VAE
def __init__(self, adata: AnnData, n_hidden: int=128, n_latent: int=10, n_layers: int=1, dropout_rate: float=0.1, dispersion: Literal[('gene', 'gene-batch', 'gene-label', 'gene-cell')]='gene'... |
def resnet100(use_se=False):
model = ResNet(IRBlock, [3, 13, 30, 3], num_layers=100, use_se=use_se)
return model |
def test_download_missing_ner_model():
with tempfile.TemporaryDirectory(dir=TEST_WORKING_DIR) as test_dir:
stanza.download('en', model_dir=test_dir, processors='tokenize', package='combined', verbose=False)
pipe = stanza.Pipeline('en', model_dir=test_dir, processors='tokenize,ner', package={'ner': '... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.