code stringlengths 101 5.91M |
|---|
def main(args):
data = load_dataset(**DATASET_ARGS)
data_idxs = list(range(len(data)))
os.makedirs(args.cache_folder, exist_ok=True)
def gen(seeds):
r = random.Random((seeds[0] + 10))
cache = open(os.path.join(args.cache_folder, f'gpt-cache.{seeds[0]}.jsonl'), 'a')
i = 0
... |
class MBConvBlock(nn.Module):
def __init__(self, block_args, global_params):
super().__init__()
self._block_args = block_args
self._bn_mom = (1 - global_params.batch_norm_momentum)
self._bn_eps = global_params.batch_norm_epsilon
self.has_se = ((self._block_args.se_ratio is no... |
def split(a, n):
(k, m) = divmod(len(a), n)
return [a[((i * k) + min(i, m)):(((i + 1) * k) + min((i + 1), m))] for i in range(n)] |
class MHist(Estimator):
def __init__(self, partitions, table):
super(MHist, self).__init__(table=table, bins=len(partitions))
self.partitions = partitions
self.column_bound_map = {}
for cid in range(self.table.col_num):
self.column_bound_map[cid] = {}
self.col... |
def lsqr(A, b, damp=0.0, atol=1e-06, btol=1e-06, conlim=.0, iter_lim=None, show=False, calc_var=False, x0=None):
A = aslinearoperator(A)
b = np.atleast_1d(b)
if (b.ndim > 1):
b = b.squeeze()
(m, n) = A.shape
if (iter_lim is None):
iter_lim = (2 * n)
var = np.zeros(n)
msg = ('... |
def bwa_mem(ref, reads, outfile, threads=1, bwa_options='-x pacbio', verbose=False, index=None):
samtools = external_progs.make_and_check_prog('samtools', verbose=verbose)
bwa = external_progs.make_and_check_prog('bwa', verbose=verbose)
unsorted_bam = (outfile + '.tmp.unsorted.bam')
tmp_index = (outfile... |
def main_channel_estimation(params_system, len_pilot, noise_power_db, Pt, location_user, Rician_factor, path):
(num_antenna_bs, num_elements_irs, num_user) = params_system
channel_true = sio.loadmat(path)
channels = (channel_true['channel_bs_user'], channel_true['channel_irs_user'], channel_true['channel_bs... |
def dynamic_range_compression(x, C=1, clip_val=1e-05):
return np.log((np.clip(x, a_min=clip_val, a_max=None) * C)) |
class BaseTower(object):
def __init__(self, params):
self.params = params
self.placeholders = {}
self.tensors = {}
self.variables_dict = {}
self.initializer = tf.truncated_normal_initializer(params.init_mean, params.init_std)
def initialize(self):
raise Exception(... |
def genpykernels():
print('Generating Python kernels')
prefix = '\nfrom numpy import uint8\nkMaxInt64 = \nkSliceNone = kMaxInt64 + 1\n'
tests_spec = os.path.join(CURRENT_DIR, '..', 'awkward-cpp', 'tests-spec')
if os.path.exists(tests_spec):
shutil.rmtree(tests_spec)
os.mkdir(tests_spec)
... |
_function(resources=dict(db=[3, 6, 9]))
def g_np(x: DataPoint, db: List[int]) -> int:
return (0 if (x[1] in db) else (- 1)) |
def LF_history_of(span, window=25):
i = span.get_word_start()
left = ' '.join(span.sentence.words[max(0, (i - window)):i])
text = f'{left} {span.text}'
accept_left_rgxs = ['\\b(h/o|hx|history of)\\b', '\\b(s/p|SP|status[- ]post)\\b', '\\b(recent|previous)\\b', '\\b(in the (distant )*past)\\b', '\\b([0-9... |
def process_tagger_prediction(split, datadir, tag_pred: str, threshold: float, summary_len=10, minimum_word=1, maximum_word=25, outfix='default', extsent=False, weight_sent=False, sent_separator=True):
data_pred = {}
cur_example = 0
local_index = Counter()
orig_data = {}
prefix = f'{datadir}/{split}... |
def neighbor_boxes(box1, box2, threshold=0.1):
if (math.abs((box1[0] - box2[0])) > threshold):
return False
if (math.abs((box1[1] - box2[1])) > threshold):
return False
if (math.abs((box1[2] - box2[2])) > threshold):
return False
if (math.abs((box1[3] - box2[3])) > threshold):
... |
class TLU(nn.Module):
def __init__(self, num_features):
super(TLU, self).__init__()
self.num_features = num_features
self.tau = Parameter(torch.Tensor(num_features))
self.reset_parameters()
def reset_parameters(self):
nn.init.zeros_(self.tau)
def extra_repr(self):
... |
_converter_regitstry('sAR')
def sAR_converter(context: 'SG2260Context', reg: sAR_reg):
(n, c, h, w) = (reg[f'res0_{d}'] for d in 'nchw')
opd0 = dict(address=reg.opd0_addr, dtype=(reg.opt_opd0_prec, reg.opt_opd0_sign), shape=(n, c, h, w), stride=tuple((reg[f'opd0_{d}_str'] for d in 'nchw')), layout=reg.short_opd... |
def test_chararray():
array = ak.contents.NumpyArray(np.frombuffer(b'hellothere', 'u1'), parameters={'__array__': 'char'})
assert (ak.operations.to_json(array) == '"hellothere"') |
def test_mutation_change_single_prim(test_case_chromosome_with_test):
(chromosome, test_case) = test_case_chromosome_with_test
int0 = IntPrimitiveStatement(test_case, 5)
int0.ret_val.distance = 5
test_case.add_statement(int0)
with mock.patch('pynguin.utils.randomness.next_float') as float_mock:
... |
def add_activation_counter_variable_or_reset(module):
if is_supported_instance_for_activation(module):
module.__activation__ = 0
module.__num_conv__ = 0 |
class CustomDataParallel(nn.DataParallel):
def __init__(self, module: nn.Module, device_ids: Optional[List[int]]=None, output_device: Optional[torch.device]=None, dim: Optional[int]=0):
super(CustomDataParallel, self).__init__(module, device_ids, output_device, dim)
try:
self.n_out = mod... |
_numpy_output(check_dtype=True)
def test_ufunc_left_shift_ff(A: dace.float32[10], B: dace.float32[10]):
return np.left_shift(A, B) |
('/image/<id>')
def get_img(id):
img_path = read(queries.select_object_by_id.format(id=id))['image'][0]
return send_from_directory(os.path.dirname(img_path), os.path.basename(img_path)) |
class MapVectorSpaceToNumberField(NumberFieldIsomorphism):
def __init__(self, V, K):
NumberFieldIsomorphism.__init__(self, Hom(V, K))
def _call_(self, v):
K = self.codomain()
f = K.polynomial_ring()(v.list())
return K._element_class(K, f) |
def _harmonic_number(x):
one = x.new_ones([1])
return (torch.digamma((x + one)) - torch.digamma(one)) |
class BlockNode(object):
def generate_cached_builtins_decls(self, env, code):
entries = env.global_scope().undeclared_cached_builtins
for entry in entries:
code.globalstate.add_cached_builtin_decl(entry)
del entries[:]
def generate_lambda_definitions(self, env, code):
... |
def src_dot_dst(src_field, dst_field, out_field):
def func(edges):
return {out_field: (edges.src[src_field] * edges.dst[dst_field]).sum((- 1), keepdim=True)}
return func |
class ServiceMerger(Merger):
def _createService(self) -> Service:
raise NotImplementedError('_createService not implemented')
def doMerge(self, objectA: Service, objectB: Service) -> Service:
assert (objectA.getName() == objectB.getName()), 'cannot merge different services.'
new_service ... |
def register_Ns3TcpHtcp_methods(root_module, cls):
cls.add_constructor([])
cls.add_constructor([param('ns3::TcpHtcp const &', 'sock')])
cls.add_method('Fork', 'ns3::Ptr< ns3::TcpCongestionOps >', [], is_virtual=True)
cls.add_method('GetName', 'std::string', [], is_const=True, is_virtual=True)
cls.ad... |
def test():
print('Fibonacci recursion using consume (with chunks, custom condition)')
input = np.ndarray([1], np.int32)
output = np.ndarray([1], np.float32)
input[0] = 10
output[0] = 0
regression = 44
sdfg(iv=input, res=output)
diff = (output[0] - regression)
print('Difference:', di... |
def test_arrow_struct():
a = pyarrow.array([{'x': 1, 'y': 1.1}, {'x': 2, 'y': 2.2}, {'x': 3, 'y': 3.3}])
assert (to_list(ak._connect.pyarrow.handle_arrow(a)) == [{'x': 1, 'y': 1.1}, {'x': 2, 'y': 2.2}, {'x': 3, 'y': 3.3}]) |
def print_status(conf, status):
print('[{}] {}: {}'.format(time_only(datetime.datetime.now()), str(conf.to_string()), status)) |
def prepare_lstm_jit(bench_args):
model_def = lstm_creator(script=True, seqLength=bench_args.lstmSeqLength, numLayers=bench_args.lstmNumLayers, inputSize=bench_args.lstmInputSize, hiddenSize=bench_args.lstmHiddenSize, miniBatch=bench_args.lstmMiniBatch, device='cpu')
return (model_def.inputs, model_def.forward) |
def test_enum_array():
from sys import byteorder
e = ('<' if (byteorder == 'little') else '>')
arr = m.create_enum_array(3)
dtype = arr.dtype
assert (dtype == np.dtype([('e1', (e + 'i8')), ('e2', 'u1')]))
assert (m.print_enum_array(arr) == ['e1=A,e2=X', 'e1=B,e2=Y', 'e1=A,e2=X'])
assert (arr... |
def load_stack(type_process, ite_stack):
stack_name = (((('stack_' + type_process) + '_pre_') + str(ite_stack)) + '.hdf5')
stack_path = os.path.join(dir_stack, stack_name)
pre_list = h5py.File(stack_path, 'r')['stack_pre'][:]
print('pre loaded.')
stack_name = (((('stack_' + type_process) + '_cmp_') ... |
class SwizzlingFunctor(enum.Enum):
Identity1 = enum_auto()
Identity2 = enum_auto()
Identity4 = enum_auto()
Identity8 = enum_auto() |
class RobotEnv(mujoco_env.MujocoEnv):
ROBOTS = {}
CALIBRATION_PATHS = {}
def __init__(self, model_path: str, robot: BaseRobot, frame_skip: int, camera_settings: Optional[Dict]=None):
self._robot = robot
self.desired_pose = np.zeros(self.n_jnt)
if (not model_path.startswith('/')):
... |
def fold(input, output_size, kernel_size, dilation=1, padding=0, stride=1):
if (input.dim() == 3):
msg = '{} must be int or 2-tuple for 3D input'
assert_int_or_pair(output_size, 'output_size', msg)
assert_int_or_pair(kernel_size, 'kernel_size', msg)
assert_int_or_pair(dilation, 'dila... |
class DatasetLoader(Dataset):
def __init__(self, dir, d_type):
self.x_path = os.path.join(dir, str(d_type), 'Images')
self.y_path = os.path.join(dir, str(d_type), 'Labels')
self.X = os.listdir(self.x_path)
self.Y = os.listdir(self.y_path)
self.length = len(self.X)
def __l... |
def callback(odom_msg):
q = np.array([odom_msg.pose.pose.orientation.x, odom_msg.pose.pose.orientation.y, odom_msg.pose.pose.orientation.z, odom_msg.pose.pose.orientation.w])
e = tfs.euler_from_quaternion(q, 'rzyx')
euler_msg = Vector3Stamped()
euler_msg.header = odom_msg.header
euler_msg.vector.z =... |
class RandomSampler(Sampler):
def __call__(self, data_set):
return list(np.random.permutation(len(data_set))) |
def mobilenetv3_large_075(pretrained=False, **kwargs):
model = _gen_mobilenet_v3('mobilenetv3_large_075', 0.75, pretrained=pretrained, **kwargs)
return model |
def eval_all(model, trainloader, devloader, testloader):
train_results = evaluate(trainloader, model)
dev_results = evaluate(devloader, model)
test_results = evaluate(testloader, model)
print(('Final loss. Train: %.4f Dev: %.4f Test: %.4f' % (train_results['loss'], dev_results['loss'], test_results['los... |
def masked_mae_np(y_true, y_pred, null_val=np.nan):
mask = mask_np(y_true, null_val)
mask /= mask.mean()
mae = np.abs((y_true - y_pred))
return np.mean(np.nan_to_num((mask * mae))) |
def cnn_model(vocab_length, embedding_dim=32, sequence_length=52, dropout_rate=0.5, num_filters=32, hidden_units=50, for_interpretation=False):
input_shape = (sequence_length,)
activation_function = tf.keras.activations.relu
if for_interpretation:
activation_function = tf.keras.activations.softplus
... |
def register_Ns3NetDeviceQueueInterface_methods(root_module, cls):
cls.add_constructor([param('ns3::NetDeviceQueueInterface const &', 'arg0')])
cls.add_constructor([])
cls.add_method('CreateTxQueues', 'void', [])
cls.add_method('GetLateTxQueuesCreation', 'bool', [], is_const=True)
cls.add_method('Ge... |
def test_compare_lt():
a_raw = torch.tensor([2.0, 2.0, 2.0])
b_raw = torch.tensor([1.0, 2.0, 3.0])
feature_dim = Dim(3)
a = Tensor(name='a', raw_tensor=a_raw, dims=[feature_dim], dtype='float32')
b = Tensor(name='b', raw_tensor=b_raw, dims=[feature_dim], dtype='float32')
result = (a < b)
res... |
class Config():
def __init__(self, args):
self.config = {}
self.args = args
registry.register('configuration', self)
user_config = self._build_opt_list(self.args.options)
config = OmegaConf.load(self.args.cfg_path)
runner_config = self.build_runner_config(config)
... |
def _FormattedValue(t, symbols, inferred_symbols):
_dispatch(t.value, symbols, inferred_symbols)
if (t.format_spec is not None):
if (not isinstance(t.format_spec, ast.Str)):
_dispatch(t.format_spec, symbols, inferred_symbols) |
def calculate_activation_statistics(files, model, batch_size=50, dims=2048, device='cpu', num_workers=8):
act = get_activations(files, model, batch_size, dims, device, num_workers)
mu = np.mean(act, axis=0)
sigma = np.cov(act, rowvar=False)
return (mu, sigma) |
class TFGPTJPreTrainedModel(metaclass=DummyObject):
_backends = ['tf']
def __init__(self, *args, **kwargs):
requires_backends(self, ['tf']) |
def k_means_1d(x, k, max_iter=100):
sorted_x = sorted(list(set(x)))
x = np.array(x)
if (len(sorted_x) < k):
raise ValueError('too few buckets')
gap = (len(sorted_x) / k)
centroids = np.array([sorted_x[int((x * gap))] for x in range(k)])
assign = None
for i in range(max_iter):
... |
def optimize_acqf_sgld(acq_function: AcquisitionFunction, bounds: Tensor, q: int, num_restarts: int, raw_samples: Optional[int]=None, options: Optional[Dict[(str, Union[(bool, float, int, str)])]]=None, inequality_constraints: Optional[List[Tuple[(Tensor, Tensor, float)]]]=None, equality_constraints: Optional[List[Tupl... |
def create_lexicon(cfg: KaldiInitializerConfig, fst_dir: Path, unique_label: str, in_units_file: Path, out_words_file: Path) -> (Path, Path):
disambig_in_units_file = (fst_dir / f'kaldi_dict.{cfg.in_labels}_disambig.txt')
lexicon_file = (fst_dir / f'kaldi_lexicon.{unique_label}.txt')
disambig_lexicon_file =... |
class StringPrimitiveStatement(PrimitiveStatement[str]):
def __init__(self, test_case: tc.TestCase, value: (str | None)=None, constant_provider: (constants.ConstantProvider | None)=None) -> None:
super().__init__(test_case, Instance(test_case.test_cluster.type_system.to_type_info(str)), value, constant_prov... |
def check(variables, Ar, dim):
for e in range(dim):
(yield (variables[e] == Select(Ar, e))) |
def call_api(prompt, image_path):
def encode_image(image_path):
with open(image_path, 'rb') as image_file:
return base64.b64encode(image_file.read()).decode('utf-8')
base64_image = encode_image(image_path)
headers = {'Content-Type': 'application/json', 'Authorization': f'Bearer {API_KEY}... |
def get_click_checkboxes_hard(metadata):
if (not metadata['done']):
return 0.0
return (1.0 if (metadata['raw_reward'] == 1.0) else (- 1.0)) |
def test_sanitize_case_custom_replacement(sanitized_case_factory_factory):
custom_replacement = '[Redacted]'
case = sanitized_case_factory_factory(path_parameters={'password': '1234'}, default_replacement=custom_replacement)
assert (case.path_parameters['password'] == custom_replacement) |
def collect_configurations():
cfgs = []
for (config, fourier, importance) in itertools.product(configX, fourierX, importanceX):
filename = (FILENAME_PATTERN % (config[0], fourier[0], importance[0]))
cfgs.append((config[1], fourier[1], importance[2], filename))
return cfgs |
def env_desc_gen(**config):
env_id = config['env_id']
assert (env_id in SCENARIO_CONFIGS), f'available env ids: {SCENARIO_CONFIGS.keys()}'
if ('scenario_configs' not in config):
config['scenario_configs'] = SCENARIO_CONFIGS[env_id]
else:
scenario_config = SCENARIO_CONFIGS[env_id].copy()
... |
class AmberApp(tk.Tk):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.geometry('800x600+500+100')
self.resizable(0, 0)
self.style = ttk.Style()
self.grid_columnconfigure(0, weight=1)
self.grid_rowconfigure(0, weight=1)
self.global_... |
def test_top_down_unary():
check_reproduce_tree(transition_scheme=TransitionScheme.TOP_DOWN_UNARY) |
def convert_example_to_features(example, max_seq_length, tokenizer, mlm_loss):
tokens_a = example.tokens_a[:max_seq_length]
tokens_nl = example.tokens_nl[:max_seq_length]
tokens_sql = example.tokens_sql[:max_seq_length]
raw_label = example.raw_label
col_ids = [i for (i, x) in enumerate(tokens_a) if ... |
def register_Ns3ApInfo_methods(root_module, cls):
cls.add_constructor([])
cls.add_constructor([param('ns3::ApInfo const &', 'arg0')])
cls.add_instance_attribute('m_activeProbing', 'bool', is_const=False)
cls.add_instance_attribute('m_apAddr', 'ns3::Mac48Address', is_const=False)
cls.add_instance_att... |
def to_edgelist(G_times, outfile):
outfile = open(outfile, 'w')
tdx = 0
for G in G_times:
for (u, v) in G.edges:
outfile.write((((((str(tdx) + ',') + str(u)) + ',') + str(v)) + '\n'))
tdx = (tdx + 1)
outfile.close()
print('write successful') |
def main():
parser = HfArgumentParser((ModelArguments, DataTrainingArguments, TrainingArguments))
if ((len(sys.argv) == 2) and sys.argv[1].endswith('.json')):
(model_args, data_args, training_args) = parser.parse_json_file(json_file=os.path.abspath(sys.argv[1]))
else:
(model_args, data_args,... |
def print_assistant_thoughts(ai_name: object, assistant_reply_json_valid: object, speak_mode: bool=False) -> None:
assistant_thoughts_reasoning = None
assistant_thoughts_plan = None
assistant_thoughts_speak = None
assistant_thoughts_criticism = None
assistant_thoughts = assistant_reply_json_valid.ge... |
def runcmd(cmd, shell=True):
if shell:
cmd = ' '.join(cmd)
_debug(cmd)
else:
_debug(' '.join(cmd))
try:
import subprocess
except ImportError:
returncode = os.system(cmd)
else:
returncode = subprocess.call(cmd, shell=shell)
if returncode:
sy... |
class RewardModelWrapper(gym.Wrapper):
def __init__(self, env, cfg):
self.env = env
super().__init__(env)
self.metric_keys = list(env.metrics.keys())
self.datapoints = []
self._last_changes = (- 1)
def step(self, action):
ret = self.env.step(action)
if (se... |
class ConvUser():
def __init__(self):
self.first_name = 'Anonym'
self.id = str(uuid4()) |
class MiniImagenetClassDataset(ClassDataset):
folder = 'miniimagenet'
gdrive_id = '16V_ZlkW4SsnNDtnGmaBRq2OoPmUOc5mY'
gz_filename = 'mini-imagenet.tar.gz'
gz_md5 = 'b38f1eb4251fb9459ecc8e7febf9b2eb'
pkl_filename = 'mini-imagenet-cache-{0}.pkl'
filename = '{0}_data.hdf5'
filename_labels = '{0... |
.parametrize('nuclide_name', ['Ni-56', 'Fe-52', 'Cr-48'])
def test_inventories_dict(gamma_ray_simulation_state, nuclide_name):
nuclide = rd.Nuclide(nuclide_name)
isotopic_mass_fractions = gamma_ray_simulation_state.composition.isotopic_mass_fraction
composition = gamma_ray_simulation_state.composition
c... |
class FixedNormalizer(object):
def __init__(self, size, default_clip_range=np.inf, mean=0, std=1, eps=1e-08):
assert (std > 0)
std = (std + eps)
self.size = size
self.default_clip_range = default_clip_range
self.mean = (mean + np.zeros(self.size, np.float32))
self.std... |
def exp_train_mot17_final_net():
config['epoch_size'] = 664
config['mot_root'] = '/home/ssm/ssj/dataset/MOT17'
config['base_net_folder'] = '/home/ssm/ssj/weights/MOT17/vgg16_reducedfc.pth'
config['log_folder'] = '/home/ssm/ssj/weights/MOT17/0528-E120-M80-G30-log'
config['save_folder'] = '/home/ssm/s... |
class HyperbolicModelUHP(HyperbolicModel):
Element = HyperbolicPointUHP
_Geodesic = HyperbolicGeodesicUHP
_Isometry = HyperbolicIsometryUHP
def __init__(self, space):
HyperbolicModel.__init__(self, space, name='Upper Half Plane Model', short_name='UHP', bounded=True, conformal=True, dimension=2,... |
def read_CIFAR10(data_folder):
train_img = []
train_label = []
test_img = []
test_label = []
train_file_list = ['data_batch_1', 'data_batch_2', 'data_batch_3', 'data_batch_4', 'data_batch_5']
test_file_list = ['test_batch']
for i in xrange(len(train_file_list)):
tmp_dict = unpickle(o... |
class AGCode(AbstractLinearCode):
def base_function_field(self):
return self._function_field |
def write_predictions(logger, all_examples, all_features, all_results, n_best_size, max_answer_length, do_lower_case, output_prediction_file, output_nbest_file, verbose_logging, write_prediction=True, return_prediction=False):
example_index_to_features = collections.defaultdict(list)
for feature in all_features... |
def get_dps_single_file(ext_type_hints: dict) -> Tuple[list]:
nlp_prep = NLPreprocessor()
vars_type_slots = []
params_type_slots = []
rets_type_slots = []
vars_type_hints = []
params_type_hints = []
rets_type_hints = []
ext_type_hints['variables_p'] = {}
for (m_v, m_v_o) in zip(ext_t... |
class MemoryEfficientFP16Optimizer(_MemoryEfficientFP16OptimizerMixin, optim.FairseqOptimizer):
def __init__(self, cfg: DictConfig, params, optimizer, **kwargs):
if (not optimizer.supports_memory_efficient_fp16):
raise ValueError('Unsupported optimizer: {}'.format(optimizer.__class__.__name__))
... |
def _neighbour(xy, size):
x = (xy // size)
y = (xy % size)
xs = jnp.array([x, (x + 1), (x - 1), (x + 1), (x - 1), x])
ys = jnp.array([(y - 1), (y - 1), y, y, (y + 1), (y + 1)])
on_board = ((((0 <= xs) & (xs < size)) & (0 <= ys)) & (ys < size))
return jnp.where(on_board, ((xs * size) + ys), (- 1)... |
def train(model=model):
for epoch in range(num_epochs):
if ((epoch % 10) == 0):
train_set = torchvision.datasets.CIFAR100(root='../CIFAR100', train=True, transform=transforms.Compose([transforms.RandomHorizontalFlip(), transforms.RandomCrop(32, 4), transforms.ToTensor(), normalize]), download=Tr... |
class CacheGenderizer():
def __init__(self, db_client, manual_cache_col, genderapi_cache_col, genderize_cache_col, firstname_cache_col):
self.manual_cache_col = db_client['genderCache'][manual_cache_col]
self.genderapi_cache_col = db_client['genderCache'][genderapi_cache_col]
self.genderize_... |
.parametrize(['log_level', 'specific_log_level'], [('Info', False), ('INFO', False), ('INFO', True), ('DEBUG', False), ('DEBUG', True), ('WARNING', True), ('ERROR', True), ('CRITICAL', True), ('NOTSET', False)])
class TestSimulationLogging():
def test_logging_config(self, atomic_data_fname, caplog, log_level, speci... |
def register_archive_format(name, function, extra_args=None, description=''):
if (extra_args is None):
extra_args = []
if (not isinstance(function, collections.Callable)):
raise TypeError(('The %s object is not callable' % function))
if (not isinstance(extra_args, (tuple, list))):
ra... |
class InceptionResnetV2Triplet(nn.Module):
def __init__(self, embedding_dimension=512, pretrained=False):
super(InceptionResnetV2Triplet, self).__init__()
if pretrained:
self.model = inceptionresnetv2(pretrained='imagenet')
else:
self.model = inceptionresnetv2(pretrai... |
def load_tf_conv2d(weights, layer, transpose=False):
if isinstance(weights, list):
if (len(weights) == 2):
layer.bias.data = torch.tensor(weights[1]).view(layer.bias.data.shape)
weights = weights[0]
if transpose:
dim_order = (3, 2, 1, 0)
else:
dim_order = (3, 2, 0... |
class CategoricalColumnWithIdentityTransformer(CategoricalColumnTransformer):
def __init__(self, key, num_buckets, default_value=None):
self.key = key
self.num_buckets = num_buckets
self.default_value = default_value
def _set_feature_column_names(self, names):
CategoricalColumnTr... |
def rmtree(path, ignore_errors=False):
def remove_readonly(func, path, _):
if (os.name == 'nt'):
os.chmod(path, stat.S_IWRITE)
func(path)
return shutil.rmtree(path, ignore_errors=ignore_errors, onerror=remove_readonly) |
class MINIBOONE():
class Data():
def __init__(self, data):
self.x = data.astype(np.float32)
self.N = self.x.shape[0]
def __init__(self):
file = (datasets.root + 'miniboone/data.npy')
(trn, val, tst) = load_data_normalised(file)
self.trn = self.Data(trn)
... |
def check_prior_RS_BN_high_dim(teacher, student, n_samples):
mx_hat_values = np.linspace(1, 3, 30)
df = simple_run_experiments(get_prior_RS_BN_instance, teacher=teacher, student=student, mx_hat=mx_hat_values, qx_hat=1, tx_hat=1, sample=np.arange(n_samples)).drop(columns=['student', 'teacher', 'sample'])
df ... |
def sample(train: list[Example], k: int):
rng = random.Random(dsp.settings.branch_idx)
shuffled_train = [dsp.Example(example) for example in train]
rng.shuffle(shuffled_train)
return shuffled_train[:k] |
def one_step_diff(dat, axis):
return (mx.sym.slice_axis(dat, axis=axis, begin=0, end=(- 1)) - mx.sym.slice_axis(dat, axis=axis, begin=1, end=None)) |
def env_desc_gen(**config):
env = SC2Env(**config)
env_desc = {'creator': SC2Env, 'possible_agents': env.possible_agents, 'action_spaces': env.action_spaces, 'observation_spaces': env.observation_spaces, 'state_spaces': env.state_spaces, 'config': config}
env.close()
return env_desc |
def compute_hd(mask1, mask2):
if ((mask1.sum() > 0) and (mask2.sum() > 0)):
hausdorff_distance_filter = sitk.HausdorffDistanceImageFilter()
img1 = sitk.GetImageFromArray(mask1.astype(int))
img2 = sitk.GetImageFromArray(mask2.astype(int))
hausdorff_distance_filter.Execute(img1, img2)
... |
class KitchenMicrowaveKettleLightSliderV0Custom(KitchenBase):
TASK_ELEMENTS = ['microwave', 'kettle', 'light switch', 'slide cabinet']
def render(self, mode='human', width=None, height=None):
if ((width is None) or (height is None)):
return []
camera = engine.MovableCamera(self.sim, ... |
def test_get_predecessors(graph, node, second_node):
graph.add_node(node)
graph.add_node(second_node)
graph.add_edge(node, second_node)
result = graph.get_predecessors(second_node)
assert (result == {node}) |
_tokenizers
_vision
class VisionTextDualEncoderProcessorTest(unittest.TestCase):
def setUp(self):
self.tmpdirname = tempfile.mkdtemp()
vocab_tokens = ['[UNK]', '[CLS]', '[SEP]', '[PAD]', '[MASK]', 'want', '##want', '##ed', 'wa', 'un', 'runn', '##ing', ',', 'low', 'lowest']
self.vocab_file = ... |
class Bottleneck(nn.Module):
expansion: int = 4
def __init__(self, inplanes: int, planes: int, stride: int=1, downsample: Optional[nn.Module]=None, groups: int=1, base_width: int=64, dilation: int=1, norm_layer: Optional[Callable[(..., nn.Module)]]=None) -> None:
super(Bottleneck, self).__init__()
... |
def load_imf(log_path, config_fpath=None, ckpt_fpath=None, epoch=None, verbose=False, return_trainer=False, return_cfg=False):
if (config_fpath is None):
config_fpath = osp.join(log_path, 'config', 'config.yaml')
with open(config_fpath) as f:
cfg = dict2namespace(yaml.load(f, Loader=yaml.Loader)... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.