code stringlengths 101 5.91M |
|---|
def get_constraint_permutation(tab1_sets_by_columns: List[Set], result2: List[Tuple]):
num_cols = len(result2[0])
perm_constraints = [{i for i in range(num_cols)} for _ in range(num_cols)]
if (num_cols <= 3):
return product(*perm_constraints)
for _ in range(20):
random_tab2_row = random.... |
def test_input_shap_values_type_2():
rs = np.random.RandomState(42)
emsg = 'When passing several Explanation objects, they must all have the same number of feature columns!'
with pytest.raises(DimensionError, match=emsg):
shap.plots.bar({'t1': shap.Explanation(values=rs.randn(40, 10), base_values=(n... |
class RobertaForMaskedLMwithLoss(RobertaForMaskedLM):
def __init__(self, config):
super().__init__(config)
def forward(self, input_ids=None, attention_mask=None, token_type_ids=None, position_ids=None, head_mask=None, masked_lm_labels=None):
assert (attention_mask is not None)
outputs = ... |
class L1BatchNorm(nn.Module):
axis: int = (- 1)
momentum: float = 0.99
epsilon: float = 1e-05
dtype: Dtype = jnp.float32
use_bias: bool = True
use_scale: bool = True
bias_init: Callable[([PRNGKey, Shape, Dtype], Array)] = initializers.zeros
scale_init: Callable[([PRNGKey, Shape, Dtype], ... |
class JobExecutor(ExecutorBase):
def __init__(self, n_workers: int, polling_frequency=0.5, verbose=False):
super().__init__(n_workers, verbose=verbose)
self._creation_time = time.time()
self._polling_delay = polling_frequency
self._executor = futures.ProcessPoolExecutor(n_workers)
... |
def main():
global args, cls_funcs
args = parse_args()
func = cls_funcs[args.cls]
func()
anet_detection = ActivityNetLocalization(args.gt, args.det_output, tiou_thresholds=np.linspace(0.5, 0.95, 10), verbose=True)
(mAP, average_mAP) = anet_detection.evaluate()
print(f'''[RESULTS] Performance... |
class SoftCrossEntropy(nn.Module):
def __init__(self):
super(SoftCrossEntropy, self).__init__()
self.criterion = torch.nn.KLDivLoss(reduction='none')
def forward(self, input, target_prob, target_lens):
assert (input.shape == target_prob.shape)
assert (input.shape[0] == target_len... |
class Float32FromUInt8(iaa.Augmenter):
def __init__(self, name=None, deterministic=False, random_state=None):
super(Float32FromUInt8, self).__init__(name=name, deterministic=deterministic, random_state=random_state)
def _augment_images(self, images, random_state, parents, hooks):
iadt.gate_dtype... |
class AutoModelWithLMHead(object):
def __init__(self):
raise EnvironmentError('AutoModelWithLMHead is designed to be instantiated using the `AutoModelWithLMHead.from_pretrained(pretrained_model_name_or_path)` method.')
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
... |
def init_seed(seed):
torch.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
np.random.seed(seed)
random.seed(seed) |
_model
def resnet18d(pretrained=False, **kwargs):
model_args = dict(block=BasicBlock, layers=[2, 2, 2, 2], stem_width=32, stem_type='deep', avg_down=True, **kwargs)
return _create_resnet('resnet18d', pretrained, **model_args) |
class COMATrainer(Trainer):
def __init__(self, training_config: Dict[(str, Any)], critic_creator: Callable, policy_instance: Policy=None):
self.critic_creator = critic_creator
super().__init__(training_config, policy_instance)
def setup(self):
self.critic: torch.nn.Module = self.critic_c... |
def get_resume_file(checkpoint_dir):
filelist = glob.glob(os.path.join(checkpoint_dir, '*.tar'))
if (len(filelist) == 0):
return None
filelist = [x for x in filelist if (os.path.basename(x) != 'best_model.tar')]
epochs = np.array([int(os.path.splitext(os.path.basename(x))[0]) for x in filelist])... |
def _bashcomplete(cmd, prog_name, complete_var=None):
if (complete_var is None):
complete_var = '_{}_COMPLETE'.format(prog_name.replace('-', '_').upper())
complete_instr = os.environ.get(complete_var)
if (not complete_instr):
return
from ._bashcomplete import bashcomplete
if bashcomp... |
def generate_code(ninja_global=None, declarations_path=None, nn_path=None, install_dir=None, subset=None, disable_autograd=False, selected_op_list_path=None, selected_op_list=None, force_schema_registration=False):
root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.inse... |
class Dataset(object):
def __init__(self, data: dict) -> None:
self._data = data
self._keys = list(data.keys())
self._sampler = None
def size(self):
return len(self._data[self._keys[0]])
def retrieve(self, indices: np.ndarray):
indexed = {}
for key in self._ke... |
def conv_program(X_arr: dace.float32[(5, 3, 10, 10)], W_arr: dace.float32[(16, 3, 3, 3)]):
output = np.ndarray([5, 16, 4, 4], dtype=np.float32)
donnx.ONNXConv(X=X_arr, W=W_arr, Y=output, strides=[2, 2])
return output |
def write_list(list_in, path_save):
fout = open(path_save, 'w')
fout.write('\n'.join(list_in)) |
.node
class Gearbox(dace.sdfg.nodes.LibraryNode):
implementations = {'pure': ExpandGearbox}
default_implementation = 'pure'
size = dace.properties.SymbolicProperty(desc='Number of wide vectors to convert to/from narrow vectors.', default=0)
def __init__(self, size, name=None, schedule=None, **kwargs):
... |
def invG(p):
index = int(np.ceil((p * m)))
if (index >= m):
return G[(m - 1)]
elif (index == 0):
return G[0]
return G[(index - 1)] |
def register_Ns3FdTbfqFfMacScheduler_methods(root_module, cls):
cls.add_constructor([param('ns3::FdTbfqFfMacScheduler const &', 'arg0')])
cls.add_constructor([])
cls.add_method('DoDispose', 'void', [], is_virtual=True)
cls.add_method('GetFfMacCschedSapProvider', 'ns3::FfMacCschedSapProvider *', [], is_v... |
def ref_grad_binary_weight_convolution(x, w, wb, alpha, b, dy, base_axis, pad, stride, dilation, group, quantize_zero_to, **kw):
vx = nn.Variable(x.shape, need_grad=True)
vx.d = x
vx.grad.zero()
vw = nn.Variable(w.shape, need_grad=True)
vw.d = binarize_kernel(w, quantize_zero_to)
vw.grad.zero()
... |
(0.2)
def search_departuring_flight(entities, *argv, **kargs):
msg = "Let's start with your departing flight. "
msg += 'Here are the cheapest flights departing from {} to {} on {}:\n'.format(entities['origin'], entities['destination'], entities['start_date'])
msg += 'Oceanic 815, Depart at 4:16am, 800USD\n'... |
def run_with_tf_optimizations(do_eager_mode: bool, use_xla: bool):
def run_func(func):
(func)
def run_in_eager_mode(*args, **kwargs):
return func(*args, **kwargs)
(func)
(experimental_compile=use_xla)
def run_in_graph_mode(*args, **kwargs):
return func... |
def all_reduce_sum(tensor_list):
if (get_world_size() == 1):
return
for tensor in tensor_list:
dist.all_reduce(tensor, op=dist.reduce_op.SUM) |
class Config(ABC, LoggingBase):
_region: str
def __init__(self):
super().__init__()
def region(self) -> str:
return self._region
def credentials(self) -> Credentials:
pass
def resources(self) -> Resources:
pass
def deserialize(config: dict, cache: Cache, handlers:... |
class PointnetSAModuleMSG(_PointnetSAModuleBase):
def __init__(self, *, npoint: int, radii: List[float], nsamples: List[int], mlps: List[List[int]], bn: bool=True, use_xyz: bool=True, pool_method='max_pool', instance_norm=False):
super().__init__()
assert (len(radii) == len(nsamples) == len(mlps))
... |
class PrepareDataset(Dataset):
def __init__(self, csv_file, root_dir):
self.landmarks_frame = pd.read_csv(csv_file, sep='|', header=None)
self.root_dir = root_dir
def load_wav(self, filename):
return librosa.load(filename, sr=hp.sample_rate)
def __len__(self):
return len(self... |
def test_sample_from_conditions_with_batch_size():
data = pd.DataFrame({'column1': list(range(100)), 'column2': list(range(100)), 'column3': list(range(100))})
metadata = SingleTableMetadata()
metadata.add_column('column1', sdtype='numerical')
metadata.add_column('column2', sdtype='numerical')
metad... |
def _impl(array, axis, keepdims, mask_identity, highlevel, behavior, attrs):
axis = regularize_axis(axis)
with HighLevelContext(behavior=behavior, attrs=attrs) as ctx:
layout = ctx.unwrap(array, allow_record=False, primitive_policy='error')
reducer = ak._reducers.CountNonzero()
out = ak._do.redu... |
class CIFAR100(Dataset):
def __init__(self, path):
self.cifar100 = datasets.CIFAR100(root=path, download=True, train=True, transform=cifar10_transformer())
def __getitem__(self, index):
if isinstance(index, numpy.float64):
index = index.astype(numpy.int64)
(data, target) = se... |
def main(raw_args=None):
parser = argparse.ArgumentParser()
parser.add_argument('--model_name', type=str, default='roberta_large', help='model name e.g. roberta_large')
parser.add_argument('--cache_dir', type=str, default='./models/roberta.large', help='Directory containing pytorch model')
parser.add_ar... |
_test()
def test_hardware_add42_single():
N = dace.symbol('N')
M = dace.symbol('M')
N.set(32)
M.set(32)
a = np.random.randint(0, 100, N.get()).astype(np.int32)
b = np.zeros((N.get(),)).astype(np.int32)
sdfg = make_vadd_multi_sdfg(N, M)
sdfg.specialize(dict(N=N, M=M))
sdfg(A=a, B=b)
... |
class EDLDataset(data.Dataset):
def __init__(self, args, split, vocab, device, label_size=None):
if (split == 'train'):
loc_file_name = args.train_loc_file
self.data_dir = args.train_data_dir
elif (split == 'valid'):
loc_file_name = args.valid_loc_file
... |
class PPM(nn.Module):
def __init__(self, dim_in, ppm_dim=512, pool_scales=(1, 2, 3, 6), **kwargs):
super(PPM, self).__init__()
conv = kwargs.pop('conv', 'Conv2d')
norm = kwargs.pop('norm', 'BN')
act = kwargs.pop('act', 'ReLU')
self.dim_in = dim_in
self.ppm_dim = ppm_d... |
def proxyless_network(structure, genotypes, n_classes=1000, bn_param=(0.1, 0.001), dropout_rate=0):
net_config_json = json.load(open(network_path[structure], 'r'))
net_config_json['classifier']['out_features'] = n_classes
net_config_json['classifier']['dropout_rate'] = dropout_rate
if (len(genotypes) > ... |
class CleanAuthors():
def __init__(self, nlp):
self.nlp = nlp
def get_valid_names(self, author_list, blocklist):
if (not isinstance(author_list, list)):
author_list = [str(author_list)]
authors = set()
for doc in self.nlp.pipe(author_list, disable=['tagger', 'parser',... |
.parametrize('ctx, func_name', ctxs)
.parametrize('seed', [313])
def test_round_forward_backward(seed, ctx, func_name):
from nbla_test_utils import cap_ignore_region, function_tester
rng = np.random.RandomState(seed)
inputs = [(rng.randn(2, 3, 4).astype(np.float32) * 2)]
function_tester(rng, F.round, re... |
.parametrize('extra', ({}, {'content': {}}))
def test_response_conformance_openapi_no_media_types(openapi_30, extra, response_factory):
definition = {'responses': {'default': {'description': 'text', **extra}}}
assert_no_media_types(response_factory, openapi_30, definition) |
def get_category(text: str) -> Optional[str]:
if (text.startswith('${') and text.endswith('}')):
return text[2:(- 1)]
return None |
def get_test_data_SR(rgb_dir):
assert os.path.exists(rgb_dir)
return DataLoaderTestSR(rgb_dir, None) |
class TestEntityLoading(TestCase):
def test_from_yaml_file(self):
entity = Entity.from_yaml(io.StringIO('\n# Location Entity\n---\ntype: entity\nname: location\nautomatically_extensible: no\nuse_synonyms: yes\nmatching_strictness: 0.5\nvalues:\n- [new york, big apple]\n- [paris, city of lights]\n- london\n ... |
def evaluate_file(file_path):
(predictions, ground_truth, questions) = ([], [], [])
with open(file_path, 'r', encoding='utf8') as f:
json_lines = f.readlines()
for (idx, line) in enumerate(json_lines):
try:
json_obj = json.loads(line)
if isinstance(jso... |
def parse_log_file(log_file_path, agent=None, env_list=None):
logs = {}
agent_keys = [CURRENT_EPISODIC_REWARD, AVERAGE_EPISODIC_REWARD, AVERAGE_BATCH_LOSS, ENVIRONMENT, TIME]
common_keys = [CONFIG]
keys = (agent_keys + common_keys)
for env in env_list:
logs[env] = {}
for key in keys:... |
def wilmes_algorithm(M):
if M.matrix_over_field().is_invertible():
L = deepcopy(M)
L = matrix(ZZ, L)
U = matrix(ZZ, [sum(i) for i in L]).smith_form()[2].transpose()
L = (U * M)
for k in range(1, (M.nrows() - 1)):
smith = matrix(ZZ, [i[(k - 1)] for i in L[k:]]).smi... |
def initialize_gpu_0_from_weights_file(model, weights_file):
ws_blobs = workspace.Blobs()
with open(weights_file, 'r') as f:
src_blobs = pickle.load(f)
if ('cfg' in src_blobs):
saved_cfg = yaml.load(src_blobs['cfg'])
configure_bbox_reg_weights(model, saved_cfg)
if ('blobs' in src... |
def _called_with_cfg(*args, **kwargs):
from omegaconf import DictConfig
if (len(args) and isinstance(args[0], (_CfgNode, DictConfig))):
return True
if isinstance(kwargs.pop('cfg', None), (_CfgNode, DictConfig)):
return True
return False |
class DeepFashionKeypointFaceEmbed(Loader):
def __init__(self, pickle_file, folder, is_train, shuffle=False, random_drop=0.0, test_size=0.005, test_split_random=8):
super().__init__(pickle_file, folder, shuffle)
self.random_drop = random_drop
self.df['num_keypoints'] = self.df.keypoints.map(... |
def zeros_nobroadcast(shape, dtype=theano.config.floatX):
zeros = T.zeros(shape, dtype=dtype)
zeros = T.unbroadcast(zeros, *range(len(shape)))
return zeros |
class DCC():
def __init__(self, lag):
self.lag = lag
self.k = 2
check_acc(self.lag, self.k)
def make_vec(self, input_data, phyche_index=None, all_property=False, extra_phyche_index=None):
(sequence_list, phyche_value) = ready_acc(input_data, self.k, phyche_index, all_property, ex... |
def get_mixture_mse_accuracy(output_dim, num_mixes):
def mse_func(y_true, y_pred):
y_pred = tf.reshape(y_pred, [(- 1), (((2 * num_mixes) * output_dim) + num_mixes)], name='reshape_ypreds')
y_true = tf.reshape(y_true, [(- 1), output_dim], name='reshape_ytrue')
(out_mu, out_sigma, out_pi) = tf... |
def compute_auc(s_error, p_error, a_error):
assert (len(s_error) == 71)
assert (len(p_error) == 48)
assert (len(a_error) == 14)
s_error = np.array(s_error)
p_error = np.array(p_error)
a_error = np.array(a_error)
limit = 25
gs_error = np.zeros((limit + 1))
gp_error = np.zeros((limit +... |
class Type(ABC):
_name: str
def __init__(self, name: str):
self._name = name
def name(self) -> str:
return self._name
def is_enum(self) -> bool:
raise NotImplementedError
def is_value(self) -> bool:
raise NotImplementedError
def __str__(self) -> str:
retur... |
def rand_beta_backward(grad_inputs, inputs, input_shapes, outputs, output_shapes, alpha=0.5, beta=0.5, shape=[], seed=(- 1)):
return ([None] * (len(grad_inputs) + len(inputs))) |
class MaxComputeDBWriter(BufferedDBWriter):
def __init__(self, conn, table_name, table_schema, buff_size):
super(MaxComputeDBWriter, self).__init__(conn, table_name, table_schema, buff_size)
from odps import tunnel
self.compress = tunnel.CompressOption.CompressAlgorithm.ODPS_ZLIB
def flu... |
def test_duplicate_object():
sdfg = dace.SDFG('shouldfail')
sdfg.add_array('A', [20], dace.float64)
state = sdfg.add_state()
a = state.add_read('A')
b = state.add_write('A')
memlet = dace.Memlet('A[0]')
state.add_nedge(a, b, memlet)
state.add_nedge(a, b, memlet)
with pytest.raises(In... |
def mae_ast_patch(refresh=False, *args, **kwargs):
kwargs['ckpt'] = '
return mae_ast_url(*args, refresh=refresh, **kwargs) |
def update_bias(net, amnt_new_classes):
bias_key = list(net.state_dict().keys())[(- 1)]
bias = net.state_dict()[bias_key].cpu().detach().numpy()
b_mean = np.mean(bias)
b_std = np.std(bias)
new_bias = np.zeros((len(bias) + amnt_new_classes), dtype='f')
new_bias[:len(bias)] = bias
for i in ran... |
_resource
def load_model():
model = AutoModel.from_pretrained(model_name_or_path, trust_remote_code=True).half()
model.to(device)
print('Model Load done!')
return model |
class MobileNetV1ImageProcessor(metaclass=DummyObject):
_backends = ['vision']
def __init__(self, *args, **kwargs):
requires_backends(self, ['vision']) |
def gen_implicit_args(func: LeanFunctionInfo, num_expl_args: int, name_sub: Dict[(str, int)]) -> str:
implicit_args = ' '.join(['{}'.format(name_with_sub(a, name_sub)) for a in func.arg_names[:func.num_implicit_args]])
if ((func.num_implicit_args > 0) and (num_expl_args > 0)):
implicit_args += ' '
r... |
def plot_mma(config, captions, mma_day, mma_night):
models = config['models_name']
n_models = len(models)
colors = np.array(brewer2mpl.get_map('Set2', 'qualitative', 8).mpl_colors)[:n_models]
linestyles = (['-'] * n_models)
plt_lim = [1, config['max_mma_threshold']]
plt_rng = np.arange(plt_lim[0... |
def contains_conj_strict(depSet, depTagSet):
lexical_match = (('and' in depSet) or ('or' in depSet) or ('nor' in depSet) or ('but' in depSet) or ('yet' in depSet) or ('so' in depSet) or ('for' in depSet))
return lexical_match |
def _dev():
MODEL_PATH = 'C:\\Users\\saareliad\\workspace\\ViT-B_16.npz'
MODEL_PATH = pathlib.Path(MODEL_PATH)
def read_npz_checkpoint(path):
with np.load(path) as data:
lst = data.files
state_dict = {k: data[k] for k in lst}
dd = {k: data[k].shape for k in lst}
... |
class CoefRegion(CoefN):
def __init__(self, name, problem, kwargs):
CoefN.__init__(self, name, problem, kwargs)
self.corr_dim = len(list(kwargs['regions'].values())[0])
def get_variables(self, problem, ir, data):
corr = data[self.requires[(- 1)]]
(yield (self.variables[0], corr.s... |
class ReplayBuffer(object):
def __init__(self, state_dim, action_dim, device, max_size=int(1000000.0)):
self.max_size = max_size
self.ptr = 0
self.size = 0
self.state = np.zeros((max_size, state_dim))
self.action = np.zeros((max_size, action_dim))
self.next_state = np... |
def get_from_clause_tag(args):
from_clause_tag = ('no_from.' if args.omit_from_clause else '')
return from_clause_tag |
def cli_main():
parser = get_parser()
args = parser.parse_args()
print(args)
assert ((args.sys == '-') or os.path.exists(args.sys)), 'System output file {} does not exist'.format(args.sys)
assert os.path.exists(args.ref), 'Reference file {} does not exist'.format(args.ref)
dict = dictionary.Dict... |
_criterion('sentence_ranking')
class SentenceRankingCriterion(FairseqCriterion):
def __init__(self, task, ranking_head_name, save_predictions, num_classes):
super().__init__(task)
self.ranking_head_name = ranking_head_name
if (save_predictions is not None):
self.prediction_h = op... |
def make_read_B(state):
(entry, exit) = state.add_map('read_B', {'n': '0:N/P', 'k': '0:K', 'm': '0:M'}, schedule=dace.ScheduleType.FPGA_Device)
mem = state.add_read('B_device')
pipe = state.add_write('B_pipe')
tasklet = state.add_tasklet('read_B', {'from_memory'}, {'to_kernel'}, 'to_kernel = from_memory... |
def getVal(kwargs, key, default):
out = kwargs.get(key, default)
if (out is None):
return default
return out |
def main():
pop = toolbox.population(n=300)
(CXPB, MUTPB) = (0.8, 0.1)
print('Start of evolution')
fitnesses = list(map(toolbox.evaluate, pop))
for (ind, fit) in zip(pop, fitnesses):
ind.fitness.values = (fit,)
print((' Evaluated %i individuals' % len(pop)))
fits = [ind.fitness.valu... |
def test_implicit_casting():
z = m.get_implicit_casting()
assert (z['d'] == {'char*_i1': 'abc', 'char*_i2': 'abc', 'char*_e': 'abc', 'char*_p': 'abc', 'str_i1': 'str', 'str_i2': 'str1', 'str_e': 'str2', 'str_p': 'str3', 'int_i1': 42, 'int_i2': 42, 'int_e': 43, 'int_p': 44})
assert (z['l'] == [3, 6, 9, 12, 1... |
class ContinuousMap(Morphism):
def __init__(self, parent, coord_functions=None, name=None, latex_name=None, is_isomorphism=False, is_identity=False):
Morphism.__init__(self, parent)
domain = parent.domain()
codomain = parent.codomain()
self._domain = domain
self._codomain = c... |
def mnist2d_5class(data_dir):
transform = transforms.Compose([transforms.ToTensor()])
trainset = datasets.MNIST(data_dir, train=True, transform=transform, download=True)
testset = datasets.MNIST(data_dir, train=False, transform=transform, download=True)
(trainset, num_classes) = filter_class(trainset, [... |
class WordPaths_dyck(WordPaths_all):
def __init__(self, alphabet):
d = [(1, 1), (1, (- 1))]
super().__init__(alphabet, steps=d)
self._infinite_word_class = None
self._finite_word_class = FiniteWordPath_dyck
_attribute
def _element_classes(self):
return {'list': Finite... |
def _GetPseudoAAC2(ProteinSequence, lamda=10, weight=0.05):
rightpart = []
for i in range(lamda):
rightpart.append(_GetSequenceOrderCorrelationFactor(ProteinSequence, k=(i + 1)))
result = {}
temp = (1 + (weight * sum(rightpart)))
for index in range(20, (20 + lamda)):
result[('PAAC' +... |
class AccurateGELUActivation(nn.Module):
def __init__(self):
super().__init__()
self.precomputed_constant = math.sqrt((2 / math.pi))
def forward(self, input: Tensor) -> Tensor:
return ((0.5 * input) * (1 + torch.tanh((self.precomputed_constant * (input + (0.044715 * torch.pow(input, 3)))... |
def decoder_fc(z):
with tf.variable_scope('decoder') as scope:
Wd_fc1 = utils.weight_variable([FLAGS.z_dim, 50], name='Wd_fc1')
bd_fc1 = utils.bias_variable([50], name='bd_fc1')
hd_relu1 = activation_function((tf.matmul(z, Wd_fc1) + bd_fc1), name='hdfc_1')
Wd_fc2 = utils.weight_varia... |
def _run_function(python_udf):
try:
if isinstance(python_udf, AttributeError):
raise python_udf
result = python_udf.func(*python_udf.args, **python_udf.kwargs)
except Exception as e:
except_str = f'''On {_get_current_rpc_agent().get_worker_info()}:
{repr(e)}
{traceback.format... |
def write_shap_values(shap_values, conn, result_table, feature_column_names):
with db.buffered_db_writer(conn, result_table, feature_column_names, 100) as w:
for row in shap_values:
row_float = [float(c) for c in row]
w.write(list(row_float)) |
class Function_sinh(GinacFunction):
def __init__(self):
GinacFunction.__init__(self, 'sinh', latex_name='\\sinh') |
def surrogate(student, teacher, X, verbose=False):
y = teacher(X).astype(np.int)
if verbose:
print('Sampled', len(y), 'data')
student.fit(X, y)
return student |
class LipNormConv2d(nn.Conv2d):
def __init__(self, in_channels, out_channels, kernel_size, stride, padding, bias=True, coeff=0.97, domain=float('inf'), codomain=float('inf'), local_constraint=True, **unused_kwargs):
del unused_kwargs
super(LipNormConv2d, self).__init__(in_channels, out_channels, ker... |
def register_Ns3Mac16Address_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_output_stream_operator()
cls.add_constructor([param('ns3::Mac16Address const &', 'arg0')])
cls.add_constructor... |
def batch_norm(x, is_training, gamma=None, beta=None, axes=[0], eps=1e-10, name='bn_out', decay=0.99, dtype=tf.float32):
n_out = x.get_shape()[(- 1)]
try:
n_out = int(n_out)
shape = [n_out]
except:
shape = None
emean = tf.get_variable('ema_mean', shape=shape, trainable=False, dty... |
def check_modification(input_path: str, output_images_path: str) -> bool:
original_images = [os.path.basename(f) for f in glob.glob(os.path.join(input_path, '*.jpg'))]
original_images.sort()
modified_images = [os.path.basename(f) for f in glob.glob(os.path.join(output_images_path, '*.jpg'))]
modified_im... |
def test_per_test_hooks(testdir, simple_openapi):
testdir.make_test('\nfrom hypothesis import strategies as st\n\ndef replacement(context, query):\n return {"id": "foobar"}\n\.apply(replacement, name="map_query")\()\(max_examples=1)\ndef test_a(case):\n assert case.query["id"] == "foobar"\n\()\.apply(replacem... |
class FmapRange(namedtuple('FmapRange', ['fp_beg', 'fp_end'])):
def __new__(cls, fp_beg, fp_end):
for (b, e) in zip(fp_beg, fp_end):
if (b > e):
raise ValueError('FmapRange: begin value > end value? beg: {}, end: {}'.format(fp_beg, fp_end))
ntp = super(FmapRange, cls).__n... |
def test_nonzero_offset_fromarrow_RecordArray_4_again():
content = ak.contents.NumpyArray(np.array([0.0, 1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8, 9.9, 10.1]))
offsets = ak.index.Index64(np.array([0, 3, 3, 5, 6, 10, 10]))
listoffsetarray = ak.contents.ListOffsetArray(offsets, content)
content1 = ak.conten... |
()
class PLASWithPerturbationConfig(PLASConfig):
action_flexibility: float = 0.05
def create(self, device: DeviceArg=False) -> 'PLASWithPerturbation':
return PLASWithPerturbation(self, device)
def get_type() -> str:
return 'plas_with_perturbation' |
class RevGATBlock(nn.Module):
def __init__(self, node_feats, edge_feats, edge_emb, out_feats, n_heads=1, attn_drop=0.0, edge_drop=0.0, negative_slope=0.2, residual=True, activation=None, use_attn_dst=True, allow_zero_in_degree=True, use_symmetric_norm=False):
super(RevGATBlock, self).__init__()
self... |
def hevc_compression(crf, framerate, src, dst, config):
dim = config['n_features_per_level']
for d in range(dim):
src_path = os.path.join(src, f'dim{d}', '%05d.png')
os.makedirs(os.path.join(dst, f'dim{d}'), exist_ok=True)
save_path = os.path.join(dst, f'dim{d}', f'{crf}_{framerate}.mp4'... |
def setup_config():
config_str = next((x for x in sys.argv if ('config_name' in x)), None)
if (config_str is not None):
config_name = config_str.split('=')[1]
sys.argv.remove(config_str)
os.environ['HYDRA_CONFIG_NAME'] = config_name
return config_name
elif ('HYDRA_CONFIG_NAME... |
class NllbTokenizerFast(metaclass=DummyObject):
_backends = ['tokenizers']
def __init__(self, *args, **kwargs):
requires_backends(self, ['tokenizers']) |
class ImmutableSandboxedEnvironment(SandboxedEnvironment):
def is_safe_attribute(self, obj, attr, value):
if (not SandboxedEnvironment.is_safe_attribute(self, obj, attr, value)):
return False
return (not modifies_known_mutable(obj, attr)) |
def load_inline(name, cpp_sources, cuda_sources=None, functions=None, extra_cflags=None, extra_cuda_cflags=None, extra_ldflags=None, extra_include_paths=None, build_directory=None, verbose=False, with_cuda=None, is_python_module=True, with_pytorch_error_handling=True, keep_intermediates=True):
build_directory = (bu... |
def _find_observable_paths(extra_files=None):
rv = set(((os.path.dirname(os.path.abspath(x)) if os.path.isfile(x) else os.path.abspath(x)) for x in sys.path))
for filename in (extra_files or ()):
rv.add(os.path.dirname(os.path.abspath(filename)))
for module in list(sys.modules.values()):
fn ... |
def test_record_int32_parameters():
t = RecordType([NumpyType('int32')], None, parameters={'p': [123]})
assert (str(ak.types.from_datashape(str(t), highlevel=False)) == str(t)) |
def compute_n_params(model, return_str=True):
tot = 0
for p in model.parameters():
w = 1
for x in p.shape:
w *= x
tot += w
if return_str:
if (tot >= 1000000.0):
return '{:.1f}M'.format((tot / 1000000.0))
else:
return '{:.1f}K'.forma... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.