code stringlengths 101 5.91M |
|---|
class ActionMap(gym.Wrapper):
def __init__(self, game, cfg: Config, bordered_observation=False):
if isinstance(game, str):
self.env = gym.make(game)
else:
self.env = game
self.env.unwrapped.adjust_param(cfg)
gym.Wrapper.__init__(self, self.env)
assert ... |
def exponential_integral_1(x, n=0):
if isinstance(x, Expression):
if x.is_trivial_zero():
return Infinity
else:
raise NotImplementedError(('Use the symbolic exponential integral ' + 'function: exp_integral_e1.'))
if (not x):
return Infinity
try:
prec =... |
class IBMCloudServer(Server):
def __init__(self, vpc_backend, region_tag, instance_id, vsi, log_dir=None):
super().__init__(region_tag, log_dir=log_dir)
assert (self.region_tag.split(':')[0] == 'ibmcloud')
self.vpc_backend = vpc_backend
self.instance_id = instance_id
self.vsi... |
class SimpleEstimator(BaseEstimator):
def fit(self, X, y=None):
self.X_ = X
self.n_features_ = X.shape[0]
return self |
def validate_and_save(args, trainer, task, epoch_itr, valid_subsets, end_of_epoch):
num_updates = trainer.get_num_updates()
max_update = (args.max_update or math.inf)
do_save = ((end_of_epoch and (args.save_interval > 0) and ((epoch_itr.epoch % args.save_interval) == 0)) or (num_updates >= max_update) or ((... |
class PairwiseAutoAnnotator(eval_annotators.PairwiseAnnotator):
def __init__(self, annotators_config: Union[(eval_utils.AnyPath, list[dict[(str, Any)]])]='annotator_pool_v0', input_keys: Sequence[str]=('instruction', 'input'), p_label_flip: Optional[float]=None, base_dir: eval_utils.AnyPath=ANNOTATORS_CONFIG_DIR, o... |
def load_data(dirname):
raw = [line[:(- 1)] for line in open(dirname, encoding='utf-8')]
data = []
for line in raw:
if (line == ''):
data.append([])
else:
data[(- 1)].append(line)
x = []
emo = []
act = []
action_list = {}
for session in data:
... |
def get_top8_classes_scannet():
top = collections.defaultdict((lambda : 'other'))
top[''] = 'display'
top[''] = 'table'
top[''] = 'bathtub'
top[''] = 'trashbin'
top[''] = 'sofa'
top[''] = 'chair'
top[''] = 'cabinet'
top[''] = 'bookshelf'
return top |
def winograd_convolution(dace_session, tf_node):
debugNodes = []
state = dace_session.state
add_cublas_cusolver(dace_session.graph)
dace_session.graph.add_constant('Btrans', bt)
dace_session.graph.add_constant('B', b)
bNode = 'B'
bTransposeNode = 'Btrans'
dace_session.graph.add_constant(... |
def train(model, device, train_loader, optimizer):
model.train()
for (batch_idx, (data, target)) in enumerate(train_loader):
(data, target) = (data.to(device), target.to(device))
optimizer.zero_grad()
output = model(data)
output = torch.nn.functional.log_softmax(output, dim=1)
... |
class unit_gcn(nn.Module):
def __init__(self, in_channels, out_channels, A, coff_embedding=4, adaptive=True, residual=True):
super(unit_gcn, self).__init__()
inter_channels = (out_channels // coff_embedding)
self.inter_c = inter_channels
self.out_c = out_channels
self.in_c = ... |
def assert_iterator_finished(iter):
try:
_ = next(iter)
except StopIteration:
pass
else:
assert False |
class FreeSemigroup(UniqueRepresentation, Parent):
def __init__(self, alphabet=('a', 'b', 'c', 'd')):
self.alphabet = alphabet
Parent.__init__(self, category=Semigroups().FinitelyGenerated())
def _repr_(self):
return ('An example of a semigroup: the free semigroup generated by %s' % (sel... |
class Walker2DEnv(MujocoEnv, Serializable):
FILE = 'walker2d.xml'
('ctrl_cost_coeff', type=float, help='cost coefficient for controls')
def __init__(self, ctrl_cost_coeff=0.01, *args, **kwargs):
self.ctrl_cost_coeff = ctrl_cost_coeff
super(Walker2DEnv, self).__init__(*args, **kwargs)
... |
class CosineLrUpdaterHook(LrUpdaterHook):
def __init__(self, target_lr=0, **kwargs):
self.target_lr = target_lr
super(CosineLrUpdaterHook, self).__init__(**kwargs)
def get_lr(self, trainer, base_lr):
if self.by_epoch:
progress = trainer.epoch
max_progress = traine... |
class DummyObservationScaler(ObservationScaler):
def fit_with_transition_picker(self, episodes: Sequence[EpisodeBase], transition_picker: TransitionPickerProtocol) -> None:
pass
def fit_with_trajectory_slicer(self, episodes: Sequence[EpisodeBase], trajectory_slicer: TrajectorySlicerProtocol) -> None:
... |
def getTriples(input_file):
triples = []
lines = []
line_num = (- 1)
base_filename = os.path.basename(input_file)
tokens = []
node_indices = {}
flag_collect_nodes = False
flag_collect_edges = False
with codecs.open(input_file, 'r', 'utf-8') as infile:
for line in infile:
... |
class TestRankHotEncoder(TestCase):
def test_handleNaNvalue(self):
enc = encoders.RankHotEncoder(handle_unknown='value', cols=['none'])
enc.fit(X)
t_f = enc.transform(X)
inv_tf = enc.inverse_transform(t_f)
self.assertEqual((t_f.shape[1] - (X.shape[1] - 1)), len(X.none.unique(... |
def set_precision(precision):
policy = tf.keras.mixed_precision.Policy(precision)
tf.keras.mixed_precision.set_global_policy(policy)
logging.info('Compute dtype: {}'.format(policy.compute_dtype))
logging.info('Variable dtype: {}'.format(policy.variable_dtype)) |
def get_os_name():
import platform
basic = os.uname()[0].lower()
if (basic == 'linux'):
dist = platform.libc_ver()
if ((len(dist) == 2) and (len(dist[0]) > 0) and (len(dist[1]) > 0)):
return ('%s-%s' % (dist[0].lower(), dist[1].lower()))
else:
return basic
... |
class Controlling():
mean: np.ndarray
U: np.ndarray
eigenvalues: np.ndarray
__ratio: list
pwl: list
def __init__(self, path: str=None, model=None, device: str=None):
self.path = path
self.device = device
self.model = model
self.model = self.model.to(self.device)
... |
def udep_post_process(ds):
return ds.cast_column('labels', Sequence(ClassLabel(names=udep_labels))) |
def swa(self, scaler, model, datasets, dataloaders, timer, lossLogger, performanceLogger, cfg):
swa_model = AveragedModel(model)
swa_optimizer = build_optimizer(cfg, model)
swa_scheduler = SWALR(swa_optimizer, swa_lr=cfg.SWA.LR, anneal_epochs=cfg.SWA.EPOCHS, anneal_strategy=cfg.SWA.NAME)
for epoch in ra... |
def dev_process_tx(frame):
global dev_tx
dev_tx.append(frame)
dr = frame['dr']
freq = frame['freq']
with res_lock:
res[dr][freq]['sent'] += 1
print((' Device: tx on %7.3f MHz, DR%d' % (freq, dr))) |
def test_warn_if_metric_bool_data_no_bool():
pairwise_metric = 'rogerstanimoto'
X = np.random.randint(2, size=(5, 2), dtype=np.int32)
msg = f'Data will be converted to boolean for metric {pairwise_metric}'
with pytest.warns(DataConversionWarning, match=msg) as warn_record:
OPTICS(metric=pairwise... |
def parse_doc(doc: spacy.tokens.Doc, disable: Set[str]=None, keep_whitespace: bool=False):
disable = ({'ner', 'parser', 'tagger', 'lemmatizer'} if (disable is None) else disable)
for (position, sent) in enumerate(doc.sents):
parts = defaultdict(list)
for (i, token) in enumerate(sent):
... |
def _gather_clone_loss(clone, num_clones, regularization_losses):
sum_loss = None
clone_loss = None
regularization_loss = None
with tf.device(clone.device):
all_losses = []
clone_losses = tf.get_collection(tf.GraphKeys.LOSSES, clone.scope)
if clone_losses:
clone_loss ... |
def mk_install_tactic_cpp(cnames, path):
component_src_dirs = []
for cname in cnames:
print(('Component %s' % cname))
c = get_component(cname)
component_src_dirs.append(c.src_dir)
h_files_full_path = get_header_files_for_components(component_src_dirs)
generated_file = mk_genfile_... |
def augment(image, label):
image = tf.image.resize_with_crop_or_pad(image, 40, 40)
image = tf.image.random_crop(image, size=[32, 32, 3])
image = tf.image.random_brightness(image, max_delta=0.5)
image = tf.clip_by_value(image, 0.0, 1.0)
return (image, label) |
def to_list(obj):
items = obj.replace('(', '').replace(')', '')
items = items.split(',')
lst = [float(i) for i in items]
return lst |
def main():
gui = ti.GUI('mgpcg', res=(N_gui, N_gui))
init()
sum_[None] = 0.0
reduce(r[0], r[0])
initial_rTr = sum_[None]
if use_multigrid:
apply_preconditioner()
else:
z[0].copy_from(r[0])
update_p()
sum_[None] = 0.0
reduce(z[0], r[0])
old_zTr = sum_[None]
... |
def perform_tests_on(fn, value, ngram='chicken', cat='food'):
assert (round(fn(ngram, cat), 4) == value)
assert (round(fn('xxx', cat), 4) == 0)
assert (round(fn('the xxx chicken', cat), 4) == 0)
assert (round(fn('', cat), 4) == 0)
assert (round(fn('', ''), 4) == 0)
with pytest.raises(pyss3.Inval... |
def test_validate_intervals():
annotations.validate_intervals(None)
with pytest.raises(ValueError):
annotations.validate_intervals(np.array([0, 2]))
with pytest.raises(ValueError):
annotations.validate_intervals(np.array([0, (- 2)]))
with pytest.raises(ValueError):
annotations.va... |
def fibonacci_py(v):
if (v == 0):
return 0
if (v == 1):
return 1
return (fibonacci_py((v - 1)) + fibonacci_py((v - 2))) |
(frozen=True, repr=False)
class Span():
i1 = attr.ib()
i2 = attr.ib()
id = attr.ib()
speaker = attr.ib()
genre = attr.ib()
si = attr.ib(default=None)
yi = attr.ib(default=None)
yi_idx = attr.ib(default=None)
def __len__(self):
return ((self.i2 - self.i1) + 1)
def __repr__... |
class Tokenizer():
def build(sp_model):
if ('sp.20k.model' in sp_model):
return SPTokenizer(sp_model)
elif ('bert' in sp_model):
return PretrainedTokenizer(sp_model)
elif ('split' in sp_model):
return SplitTokenizer()
else:
raise NotImp... |
def flatten(_input):
input_shape = _input.shape.as_list()
if (len(input_shape) != 2):
return tf.reshape(_input, [(- 1), np.prod(input_shape[1:])])
else:
return _input |
def DM_21_6_1():
from sage.rings.finite_rings.integer_mod_ring import IntegerModRing as AdditiveCyclic
M = [[8, 17, 20, 2], [9, 16, 4, 15], [11, 5, 10, 6], [14, 1, 3, 13], [18, 19, 12, 7]]
Mb = [[0, 0, 0, 0, 0, 0]]
for (a, b, c, d, e) in zip(*M):
Mb.append([a, b, c, d, e, 0])
Mb.append([... |
(rpacket_spec)
class RPacket(object):
def __init__(self, r, mu, nu, energy, seed, index=0):
self.r = r
self.mu = mu
self.nu = nu
self.energy = energy
self.current_shell_id = 0
self.status = PacketStatus.IN_PROCESS
self.seed = seed
self.index = index
... |
def get_question(problem_list, prob_index):
root = problem_list[prob_index]
if os.path.exists(os.path.join(root, 'question.txt')):
with open(os.path.join(root, 'question.txt')) as f:
question = f.readlines()
else:
print('question prompt not found')
question = ''
quest... |
def register_Ns3CallbackImpl__Void_Unsigned_short_Unsigned_short_Double_Double_Bool_Unsigned_char_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
cls.add_constructor([])
cls.add_constructor([param('ns3::CallbackImpl< void, unsigned short, unsigned short, double, double, bool, unsigned char, ns3::empty, ns... |
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3Angles_methods(root_module, root_module['ns3::Angles'])
register_Ns3AsciiTraceHelper_methods(root_module, root_module['ns3::AsciiTraceHelper'])
register_Ns3AsciiTraceHelperForDevice_metho... |
def get_model(height, width) -> None:
if check_file(f'../model/float_scale_0.5_{height}_{width}.onnx'):
return
inputs = torch.randn(1, 3, height, width)
net = UNet(3, 2)
net.load_state_dict(torch.load('../model/unet_carvana_scale0.5_epoch2.pth', map_location='cpu'))
torch.onnx.export(net, in... |
class TanhBijector(object):
def __init__(self, epsilon: float=1e-06):
super(TanhBijector, self).__init__()
self.epsilon = epsilon
def forward(x: torch.Tensor) -> torch.Tensor:
return torch.tanh(x)
def atanh(x: torch.Tensor) -> torch.Tensor:
return (0.5 * (x.log1p() - (- x).lo... |
def keras_evaluate(keras_model, eval_dataset_fn, save, keras_model_pkg, validation_metrics):
model_metrics = []
if hasattr(keras_model_pkg, 'eval_metrics_fn'):
metrics_functions = keras_model_pkg.eval_metrics_fn()
for (key, func) in metrics_functions.items():
func.__name__ = key
... |
def save_checkpoint(state, is_best, filename):
ch.save(state, filename, pickle_module=dill)
if is_best:
shutil.copyfile(filename, (filename + constants.BEST_APPEND)) |
class _OSA_module(nn.Module):
def __init__(self, in_ch, stage_ch, concat_ch, layer_per_block, module_name, SE=False, identity=False, depthwise=False):
super(_OSA_module, self).__init__()
self.identity = identity
self.depthwise = depthwise
self.isReduced = False
self.layers = ... |
def load_model(model, ckpt, session, name):
start_time = time.time()
model.saver.restore(session, ckpt)
session.run(tf.tables_initializer())
utils.print_out((' loaded %s model parameters from %s, time %.2fs' % (name, ckpt, (time.time() - start_time))))
return model |
def build_wiki_table_index(table_file, index_name='table_index_wikitable_train_jan_13'):
mappings = {'core_entity_n': Elastic.notanalyzed_field(), 'all_entity_n': Elastic.notanalyzed_field(), 'data': Elastic.analyzed_field(), 'caption': Elastic.analyzed_field(), 'secTitle': Elastic.analyzed_field(), 'headings_n': E... |
class SerializableObject(object):
json_obj = {}
typename = None
def __init__(self, json_obj={}, typename=None):
self.json_obj = json_obj
self.typename = typename
def to_json(self):
retval = self.json_obj
retval['dace_unregistered'] = True
return retval
def fro... |
def flops_estimate(fn, *args, **kwargs):
return jax.jit(fn).lower(*args).cost_analysis()['flops'] |
.parametrize('wtype', [torch.float32, torch.complex64])
.parametrize('itype', [torch.float32])
.parametrize('seqlen', [128])
.parametrize('is_variable_C', [False, True])
.parametrize('is_variable_B', [False, True])
def test_mamba_inner_fn(is_variable_B, is_variable_C, seqlen, itype, wtype):
device = 'cuda'
(rto... |
def build_model():
model = Wavenet(out_channels=2, num_blocks=args.num_blocks, num_layers=args.num_layers, residual_channels=args.residual_channels, gate_channels=args.gate_channels, skip_channels=args.skip_channels, kernel_size=args.kernel_size, cin_channels=args.cin_channels, upsample_scales=[16, 16])
return ... |
def merge_required_resources(lhs, rhs):
if (not lhs):
return (dict() if (rhs is None) else rhs)
if (not rhs):
return (dict() if (lhs is None) else lhs)
merged_resources = dict()
if (lhs.get(NOISE, False) or rhs.get(NOISE, False)):
merged_resources[NOISE] = True
if (lhs.get(ST... |
class PeftType(str, enum.Enum):
PROMPT_TUNING = 'PROMPT_TUNING'
P_TUNING = 'P_TUNING'
PREFIX_TUNING = 'PREFIX_TUNING'
LORA = 'LORA'
ADALORA = 'ADALORA'
ADAPTION_PROMPT = 'ADAPTION_PROMPT' |
class ProgressBar():
def __init__(self, total: int, length: int=40, decimals: int=1, fill: str='='):
self.total = total
self.length = length
self.decimals = decimals
self.fill = fill
def print(self, iteration, prefix, suffix, end=''):
percent = (('{0:.' + str(self.decimal... |
class TestVsNumpyNorm():
_sparse_types = (scipy.sparse.bsr_matrix, scipy.sparse.coo_matrix, scipy.sparse.csc_matrix, scipy.sparse.csr_matrix, scipy.sparse.dia_matrix, scipy.sparse.dok_matrix, scipy.sparse.lil_matrix)
_test_matrices = ((np.arange(9) - 4).reshape((3, 3)), [[1, 2, 3], [(- 1), 1, 4]], [[1, 0, 3], [... |
def siamese_core(support_embeddings, query_embeddings, support_labels, query_labels, ways):
support_embeddings = _ordered_tensor(support_embeddings, support_labels, ways)
(support_embeddings, query_embeddings) = (support_embeddings.unsqueeze(0), query_embeddings.unsqueeze(1))
z = (support_embeddings * query... |
class CfgNode(_CfgNode):
def merge_from_file(self, cfg_filename: str, allow_unsafe: bool=True) -> None:
assert PathManager.isfile(cfg_filename), f"Config file '{cfg_filename}' does not exist!"
loaded_cfg = _CfgNode.load_yaml_with_base(cfg_filename, allow_unsafe=allow_unsafe)
loaded_cfg = typ... |
def main():
parser = argparse.ArgumentParser()
parser.add_argument('data_dir', type=str)
args = parser.parse_args()
for data_type in ['train', 'dev']:
prepro_naturalquestions(args.data_dir, data_type) |
class NameValidator(object):
defaultexcludelist = ['return', 'file', 'print']
defaultdeletechars = set("~!#$%^&*()-=+~\\|]}[{';: /?.>,<")
def __init__(self, excludelist=None, deletechars=None, case_sensitive=None, replace_space='_'):
if (excludelist is None):
excludelist = []
exc... |
def exp(solvers, penalty, single_target, n_samples=30000, max_iter=20, dataset='rcv1', n_jobs=1, skip_slow=False):
dtypes_mapping = {'float64': np.float64, 'float32': np.float32}
if (dataset == 'rcv1'):
rcv1 = fetch_rcv1()
lbin = LabelBinarizer()
lbin.fit(rcv1.target_names)
X = r... |
class RandomApply(RandomTransforms):
def __init__(self, transforms, p=0.5):
super(RandomApply, self).__init__(transforms)
self.p = p
def __call__(self, img):
if (self.p < random.random()):
return img
for t in self.transforms:
img = t(img)
return im... |
.parametrize('extension', ['.protobuf', '.h5'])
.parametrize('file_format', ['file_io', 'byte_io', 'str'])
def test_module_load_save_parameter_file_io(extension, file_format):
module_creator = ModuleCreator(TSTNetNormal(), [(4, 3, 32, 32), (4, 3, 32, 32)])
variable_inputs = module_creator.get_variable_inputs()
... |
class QiskitEncodingCircuit(EncodingCircuitBase):
def __init__(self, qiskit_circuit: Union[(BlueprintCircuit, Callable, QuantumCircuit)], mode: str='auto', decompose: bool=False, feature_label: str='x', parameter_label: str='p', **kwargs) -> None:
if callable(qiskit_circuit):
self._qiskit_circui... |
.spark
.parametrize('row_count', [3, 2, 1])
.parametrize('column_count', [11, 1, 5])
.usefixtures('interactions_spark')
def test_CSRConverter_user_column_counts_exception(row_count, column_count, interactions_spark):
with pytest.raises(ValueError):
CSRConverter(first_dim_column='user_id', second_dim_column=... |
def plot_radius(problem, state):
import matplotlib.pyplot as plt
from sfepy.postprocess.time_history import extract_time_history
(ths, ts) = extract_time_history('unit_ball.h5', 'p e 0')
p = ths['p'][0]
L = (1.0 + ts.times[:p.shape[0]])
L2 = (1.0 + nm.linspace(ts.times[0], ts.times[(- 1)], 1000)... |
def ray_gather(notification_address, world_size, object_size):
actor_pool = RayBenchmarkActorPool(notification_address, world_size, object_size)
object_ids = actor_pool.prepare_objects()
return ray.get(actor_pool[0].get_objects.remote(object_ids)) |
def test_bytestring():
source = ak.Array([b'foo', b'bar', b'catastrophic', b'\x03\x07'])
result = source.to_numpy(False)
expected = np.array([b'foo', b'bar', b'catastrophic', b'\x03\x07'])
assert (result.dtype == expected.dtype)
np.testing.assert_equal(result, expected) |
def MatrixGroup(*gens, **kwds):
if isinstance(gens[(- 1)], dict):
kwds.update(gens[(- 1)])
gens = gens[:(- 1)]
check = kwds.get('check', True)
if (len(gens) == 1):
if isinstance(gens[0], (list, tuple)):
gens = list(gens[0])
else:
try:
g... |
class SphereObjective(OptimizationFunction):
def __init__(self, radius, r0):
self.radius = radius
self.r0 = r0
def calculate_gradient(self, param: Parametrization) -> np.array:
vec = param.get_structure()
return ((- 2) * (vec - self.r0))
def calculate_objective_function(self,... |
class DenoiseTest(ReBenchTestCase):
def setUp(self):
super(DenoiseTest, self).setUp()
self._set_path(__file__)
def test_minimize(self):
result = minimize_noise(False, self.ui, True)
self.assertIsInstance(result.succeeded, bool)
self.assertIsInstance(result.use_nice, bool)... |
def index_put(g, self, indices_list_value, values, accumulate):
if (sym_help._operator_export_type == torch.onnx.OperatorExportTypes.ONNX_ATEN_FALLBACK):
indices_list = sym_help._unpack_list(indices_list_value)
args = (([self] + indices_list) + [values, accumulate])
return g.op('ATen', *args... |
class NuSVR(RegressorMixin, BaseLibSVM):
_impl = 'nu_svr'
_parameter_constraints: dict = {**BaseLibSVM._parameter_constraints}
for unused_param in ['class_weight', 'epsilon', 'probability', 'random_state']:
_parameter_constraints.pop(unused_param)
def __init__(self, *, nu=0.5, C=1.0, kernel='rbf... |
def weighted_alpha_calc(classes, table, P, TOP, POP, weight):
p_e = 0
p_a = 0
population = list(POP.values())[0]
epsi = (1 / (2 * population))
try:
w_max = max(map((lambda x: max(x.values())), weight.values()))
for i in classes:
for j in classes:
v_i_j = (... |
class FunctionBasedPyroModel(PyroSviTrainMixin, PyroSampleMixin, BaseModelClass):
def __init__(self, adata: AnnData):
clear_param_store()
super().__init__(adata)
self.module = FunctionBasedPyroModule(n_input=adata.n_vars, n_hidden=32, n_latent=5, n_layers=1)
self._model_summary_strin... |
class AdjDivGradTerm(Term):
name = 'dw_adj_div_grad'
arg_types = ('material_1', 'material_2', 'virtual', 'parameter')
arg_shapes = {'material_1': '1, 1', 'material_2': '1, 1', 'virtual': ('D', None), 'parameter': 'D'}
function = staticmethod(terms.term_ns_asm_div_grad)
def get_fargs(self, mat1, mat2... |
def model_prediction(model, inputs):
prob = model.model.predict(inputs)
predicted_class = np.argmax(prob)
prob_str = np.array2string(prob).replace('\n', '')
return (prob, predicted_class, prob_str) |
def get_weights_inferable_quantizer_kwargs(node_qc: NodeWeightsQuantizationConfig) -> Dict[(str, Any)]:
if (not isinstance(node_qc, NodeWeightsQuantizationConfig)):
Logger.error(f'Non-compatible node quantization config was given for quantization target Weights.')
quantization_method = node_qc.weights_q... |
class PartitioningTask(ABC):
def __init__(self, args) -> None:
pass
def batch_dim(self) -> int:
pass
def get_model(self, args) -> torch.nn.Module:
pass
def get_input(self, args, analysis=False):
pass
def register_functions(self):
def update_analysis_kwargs(self, a... |
def return_sdg_matrix(size):
rows = tf.random.normal(shape=[size[0]])
rows = tf.abs(rows)
rows = tf.sort(rows, direction='DESCENDING')
cols = ([0.0] * size[0])
cols[0] = rows[0].numpy()
(cols, rows)
operator_1 = LinearOperatorToeplitz(cols, rows)
operator_1 = operator_1.to_dense()
ut... |
def config():
parser = argparse.ArgumentParser()
parser.add_argument('--retrieval_stage', type=int, choices=(0, 1, 2), help='which retrieval stage to run for tldrstage 0: build retrieval indexstage 1: stage 1 retrieval that retrieves the bash commandstage 2: stage 2 retrieval that retrieves the paragraphs')
... |
def GetGraphPngSafe(func, *args, **kwargs):
try:
graph = func(*args, **kwargs)
if (not isinstance(graph, pydot.Dot)):
raise ValueError('func is expected to return pydot.Dot')
return graph.create_png()
except Exception as e:
logger.error('Failed to draw graph: {}'.form... |
def evaluate(data, hidden, args):
bdata = batchify(torch.LongTensor(data), test_batch_size, args)
(source, targets) = get_batch(bdata, 0, args, evaluation=True)
(loutput, lhidden) = model(source, hidden)
output_flat = loutput.view((- 1), ntokens)
total_loss = criterion(output_flat, targets).data
... |
def _lazy_new(cls, *args, **kwargs):
_lazy_init()
return super(_CudaBase, cls).__new__(cls, *args, **kwargs) |
class TFSegformerSelfOutput(tf.keras.layers.Layer):
def __init__(self, config: SegformerConfig, hidden_size: int, **kwargs):
super().__init__(**kwargs)
self.dense = tf.keras.layers.Dense(hidden_size, name='dense')
self.dropout = tf.keras.layers.Dropout(config.hidden_dropout_prob)
def cal... |
class XiangZhangDataset(TextClassification):
def load_train_data(self):
assert (self.dirname in self.root)
return self.load_data(os.path.join(self.root, 'train.csv'))
def load_test_data(self):
assert (self.dirname in self.root)
return self.load_data(os.path.join(self.root, 'test.... |
class ProductProjectiveSpaces_point_field(ProductProjectiveSpaces_point_ring):
def intersection_multiplicity(self, X):
from sage.schemes.product_projective.space import is_ProductProjectiveSpaces
if is_ProductProjectiveSpaces(self.codomain()):
raise TypeError('this point must be a point ... |
def test_strings():
assert ak.to_layout('hello').is_equal_to(ak.contents.NumpyArray(np.array([104, 101, 108, 108, 111], dtype=np.uint8), parameters={'__array__': 'char'}))
assert ak.to_layout('hello', string_policy='promote').is_equal_to(ak.contents.ListOffsetArray(ak.index.Index64([0, 5]), ak.contents.NumpyArr... |
def is_depth_k_lca_neighbor(dom_elem, potential_neighbor, k, cache=None):
if (not potential_neighbors(dom_elem, potential_neighbor)):
return False
key = (dom_elem.ref, potential_neighbor.ref)
if (cache is not None):
if (key in cache):
lca_depth = cache[key]
else:
... |
def main():
parser = ArgumentParser()
parser.add_argument('img', help='Image file')
parser.add_argument('config', help='Config file')
parser.add_argument('checkpoint', help='Checkpoint file')
parser.add_argument('--device', default='cuda:0', help='Device used for inference')
parser.add_argument(... |
def create_inputs(rng, x_shape, batch_axis, channel_axis, no_scale, no_bias, broadcast_affine_params):
x = np.array(rng.randn(*x_shape).astype(np.float32))
channel_axis += (len(x_shape) * (channel_axis < 0))
if broadcast_affine_params:
affine_param_shape = tuple([(x_shape[i] if (i in [channel_axis])... |
def try_run_setup(*, with_binary, with_cuda):
global _BUILD_ATTEMPTS
_BUILD_ATTEMPTS += 1
try:
print(f'Attempting to build SHAP: with_binary={with_binary!r}, with_cuda={with_cuda!r} (Attempt {_BUILD_ATTEMPTS})')
run_setup(with_binary=with_binary, with_cuda=with_cuda)
except Exception as ... |
def get_tokens(tokens):
index_of_separator = set([i for (i, x) in enumerate(tokens) if (x.text == '[DQSEP]')])
assert (len(index_of_separator) <= 1)
if (len(index_of_separator) == 0):
tokens = [tokens]
else:
index_of_separator = list(index_of_separator)[0]
tokens = [tokens[:index... |
class CalculateWindowedFFT(Benchmark):
def setup(self):
rng = np.random.default_rng(5678)
x = rng.standard_normal((2 ** 20))
y = rng.standard_normal((2 ** 20))
self.x = x
self.y = y
def time_welch(self):
signal.welch(self.x)
def time_csd(self):
signal.... |
def BgzfBlocks(handle):
data_start = 0
while True:
start_offset = handle.tell()
(block_length, data) = _load_bgzf_block(handle)
data_len = len(data)
(yield (start_offset, block_length, data_start, data_len))
data_start += data_len |
class Page(Base):
__tablename__ = 'page'
id = Column(Integer, primary_key=True)
name = Column(String)
doc = Column(Text)
raw = Column(Text) |
class Max(Function):
node_type = 'goos.function.max'
def __init__(self, funs: List[Function]) -> None:
super().__init__(funs)
def eval(self, input_vals: List[goos.NumericFlow]) -> goos.NumericFlow:
arr = np.stack([node.array for node in input_vals], axis=0)
max_arr = np.max(arr, axis... |
class NoTreatmentPolicy(utils.Policy):
def __init__(self):
super(NoTreatmentPolicy, self).__init__(env)
def sample_action(self, obs):
return 0 |
def get_name(sv, rules, model, enc_dim, dim, seed):
d = (seed % 5)
s = (seed // 5)
if ('Monolithic' in model):
heads = (2 * rules)
else:
heads = 2
return f'Sequence_10/Search-Version_{sv}/Data-Seed_{d}/GT_Rules_{rules}/{model}_{enc_dim}_{dim}_{heads}_{rules}_{s}' |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.