code stringlengths 281 23.7M |
|---|
def process_dicom_directory(dicom_directory, parent_sorting_field='PatientName', output_image_name_format='{parent_sorting_data}_{study_uid_index}_{Modality}_{image_desc}_{SeriesNumber}', output_structure_name_format='{parent_sorting_data}_{study_uid_index}_{Modality}_{structure_name}', output_dose_name_format='{parent... |
class Dataset():
def __init__(self, config, mode):
self.config = config
if (self.config.MODE == 'training'):
self.input_tensors = self.inputs_for_training(mode)
else:
(self.input_tensors, self.name_list) = self.inputs_for_testing()
self.feed = iter(self.input_... |
def conv3D_layer_bn(l0, name=None, filters=32, kernel_size=(3, 3, 3), strides=(1, 1, 1), padding=3, activation='relu', kernel_initializer='he_nomral'):
l = Conv3D(filters=filters, name=name, kernel_size=kernel_size, strides=strides, padding=padding, activation=activation, kernel_initializer=kernel_initializer)(l0)
... |
class Policy(Parameterized):
def __init__(self, env_spec):
Parameterized.__init__(self)
self._env_spec = env_spec
def get_action(self, observation):
raise NotImplementedError
def reset(self):
pass
def observation_space(self):
return self._env_spec.observation_spac... |
('nfh_orcale_reader')
class NFHReader(DatasetReader):
def __init__(self, token_indexers: Dict[(str, TokenIndexer)]=None, oracle_head: str='ref', lazy: bool=False) -> None:
super().__init__(lazy)
self._token_indexers = (token_indexers or {'tokens': SingleIdTokenIndexer()})
self._span_d = self... |
def add_one_qubit_gates(circ: QuantumCircuit, q_reg: QuantumRegister, params: List[float], u3: bool=True) -> QuantumCircuit:
for (i, qubit) in enumerate(q_reg):
if u3:
circ.u(params[(i * 3)], params[((i * 3) + 1)], params[((i * 3) + 2)], qubit)
else:
circ.rx(params[(i * 3)], ... |
('/v1/user/robots/<robot_shortname>/permissions')
_param('robot_shortname', 'The short name for the robot, without any user or organization prefix')
class UserRobotPermissions(ApiResource):
_user_admin()
('getUserRobotPermissions')
def get(self, robot_shortname):
parent = get_authenticated_user()
... |
def main():
logging.basicConfig(level=logging.WARNING)
parser = argparse.ArgumentParser()
parser.add_argument('path', help='path to file(s) to reserialize')
parser.add_argument('-a', '--all', action='store_true', help='reserialize all JSON files under path')
args = parser.parse_args()
if args.al... |
class _TestNumericOpsBase(unittest.TestCase):
def setUpClass(cls):
cls.base_add_df = ta.dataframe({'c': [0, 1, 3], 'd': [5, 5, 6], 'e': [1.0, 1, 7]})
cls.base_log_df = ta.dataframe({'int32': ta.column([1, 0, 4, None], dtype=dt.Int32(nullable=True)), 'int64': ta.column([1, 0, 4, None], dtype=dt.Int64... |
class GumbelTempScheduler(pl.callbacks.Callback):
def __init__(self, init_temp, final_temp):
self.init_temp = init_temp
self.final_temp = final_temp
def on_train_batch_start(self, trainer, pl_module, batch, batch_idx, dataloader_idx):
current_step = pl_module.global_step
max_step... |
class Junction(list):
def __init__(self, *args, **kwargs):
self.__dict__.update(kwargs)
list.__init__(self, *args)
def __str__(self):
layer_info = ['{}'.format(layer) for layer in self]
return '<Junction object \n\t{}\n\t{}>'.format(str(self.__dict__), '\n\t'.join(layer_info)) |
class TestTensorDictParams():
def _get_params(self):
module = nn.Sequential(nn.Linear(3, 4), nn.Linear(4, 4))
params = TensorDict.from_module(module)
params.lock_()
return params
class CustomModule(nn.Module):
def __init__(self, *params):
super().__init__()
... |
(ReahlWSGIApplication)
class ReahlWSGIApplicationStub(ReahlWSGIApplication):
def add_reahl_static_files(self):
static_files = self.config.web.frontend_libraries.packaged_files()
static_files_no_js = [packaged_file for packaged_file in static_files if (not packaged_file.relative_name.endswith('.js'))... |
def test_model_policy_gradient_limited_iterations():
x0 = np.random.randn(10)
result = model_policy_gradient(sum_of_squares, x0, learning_rate=0.1, decay_rate=0.96, decay_steps=10, log_sigma_init=(- 6.0), batch_size=30, radius_coeff=3.0, warmup_steps=10, known_values=None, max_iterations=15)
assert isinstan... |
class OpenEditModel(torch.nn.Module):
def __init__(self, opt):
super().__init__()
self.opt = opt
self.FloatTensor = torch.cuda.FloatTensor
self.ByteTensor = torch.cuda.ByteTensor
self.perturbation = opt.perturbation
(self.netG, self.netD, self.netE) = self.initialize_... |
def risk_difference(a, b, c, d, alpha=0.05):
check_positivity_or_throw(a, b, c, d)
warn_if_normal_approximation_invalid(a, b, c, d)
zalpha = normal_ppf((1 - (alpha / 2)))
r1 = (a / (a + b))
r0 = (c / (c + d))
riskdiff = (r1 - r0)
sd = np.sqrt((((r1 * (1 - r1)) / (a + b)) + ((r0 * (1 - r0)) /... |
def cmd__py3(cmdline, bufsize=(- 1), cwd=None, timeout=60):
cmdline.insert(0, '-u')
cmdline.insert(0, sys.executable)
p = subprocess.Popen(cmdline, bufsize=bufsize, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd)
killed = True
try:
(out, err) = p.communicate(timeout=tim... |
_module()
class CondInst(SingleStageDetector):
'Implementation of `CondInst <
def __init__(self, backbone, neck, bbox_head, mask_branch, mask_head, segm_head=None, train_cfg=None, test_cfg=None, pretrained=None, init_cfg=None):
super(CondInst, self).__init__(backbone, neck, bbox_head, train_cfg, test_cf... |
class CIFAR100(data.Dataset):
base_folder = 'cifar-100-python'
url = '
filename = 'cifar-100-python.tar.gz'
tgz_md5 = 'eb9058c3a382ffc7106e4002c42a8d85'
train_list = [['train', '16019d7e3df5f24257cddd939b257f8d']]
test_list = [['test', 'f0ef6b0ae62326f3e7ffdfab6717acfc']]
def __init__(self, ... |
def trunc_normal_init(module: nn.Module, mean: float=0, std: float=1, a: float=(- 2), b: float=2, bias: float=0) -> None:
if (hasattr(module, 'weight') and (module.weight is not None)):
trunc_normal_(module.weight, mean, std, a, b)
if (hasattr(module, 'bias') and (module.bias is not None)):
nn.i... |
class DataAugmentationTransform(object):
def __init__(self, input_size):
self.input_size = input_size
def get_aug_policy_1(self):
dns_1 = A.Compose([A.Transpose(p=0.5), A.VerticalFlip(p=0.5), A.HorizontalFlip(p=0.5), A.RandomBrightness(limit=0.1, p=0.75), A.RandomContrast(limit=0.1, p=0.75), A.O... |
def test_equal_connections():
road1 = xodr.create_road(xodr.Line(100), 1, 2, 2)
road2 = xodr.create_road(xodr.Line(100), 2, 2, 2)
jc = xodr.CommonJunctionCreator(100, 'my junc')
jc.add_incoming_road_cartesian_geometry(road1, 0, 0, 0, 'successor')
jc.add_incoming_road_cartesian_geometry(road2, 20, 0,... |
def add_ngram(sequences, token_indice, ngram_range=2):
new_sequences = []
for input_list in sequences:
new_list = input_list[:]
for i in range(((len(new_list) - ngram_range) + 1)):
for ngram_value in range(2, (ngram_range + 1)):
ngram = tuple(new_list[i:(i + ngram_val... |
def get_pspnet(backbone, num_classes, aux=False, model_name=None, pretrained=False, root=os.path.join('~', '.torch', 'models'), **kwargs):
net = PSPNet(backbone=backbone, num_classes=num_classes, aux=aux, **kwargs)
if pretrained:
if ((model_name is None) or (not model_name)):
raise ValueErro... |
class Banking77Classification(AbsTaskClassification):
def description(self):
return {'name': 'Banking77Classification', 'hf_hub_name': 'mteb/banking77', 'description': 'Dataset composed of online banking queries annotated with their corresponding intents.', 'reference': ' 'category': 's2s', 'type': 'Classif... |
def ack_startup() -> None:
if (not is_worker()):
raise NotEinhornWorker
control_sock_name = os.environ['EINHORN_SOCK_PATH']
control_sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
control_sock.connect(control_sock_name)
with contextlib.closing(control_sock):
control_sock.sen... |
def test_adblock_cache(config_stub, easylist_easyprivacy, caplog, ad_blocker):
config_stub.val.content.blocking.adblock.lists = easylist_easyprivacy
config_stub.val.content.blocking.enabled = True
for i in range(3):
print('At cache test iteration {}'.format(i))
with caplog.at_level(logging.I... |
def make_view(*args, **kwargs):
graph = (kwargs['graph'] if ('graph' in kwargs) else None)
if ((len(args) == 1) and isinstance(args[0], SubGraphView)):
return _check_graph(args[0], graph)
(ops, ts) = select.select_ops_and_ts(*args, **kwargs)
sgv = SubGraphView(ops, ts)
return _check_graph(sg... |
def consider_sys_version_info(expr: Expression, pyversion: tuple[(int, ...)]) -> int:
if (not isinstance(expr, ComparisonExpr)):
return TRUTH_VALUE_UNKNOWN
if (len(expr.operators) > 1):
return TRUTH_VALUE_UNKNOWN
op = expr.operators[0]
if (op not in ('==', '!=', '<=', '>=', '<', '>')):
... |
def _parse_paren(line):
global _parse_paren_calls
_parse_paren_calls += 1
if (('(' not in line) or (')' not in line)):
return [[[line]]]
level = 0
max_level = 0
ich = 0
paren_lst = []
for ch in line:
if (ch == '('):
level += 1
paren_lst.append([lev... |
def load_dataset(dataset, transform=None):
if (dataset.lower() in ['cora', 'citeseer', 'pubmed']):
path = os.path.join('.datasets', 'Plantoid')
dataset = Planetoid(path, dataset.lower(), transform=transform)
elif (dataset.lower() in ['cs', 'physics']):
path = os.path.join('.datasets', 'C... |
def main():
sys.stdout.write(banner())
version = '%(prog)s {version}'.format(version=udemy.__version__)
description = 'A cross-platform python based utility to download courses from udemy for personal offline use.'
parser = argparse.ArgumentParser(description=description, conflict_handler='resolve')
... |
class ShippingAddress(Resource):
schema = {'account_id': str, 'city': str, 'company': str, 'country': str, 'created_at': datetime, 'email': str, 'first_name': str, 'geo_code': str, 'id': str, 'last_name': str, 'nickname': str, 'object': str, 'phone': str, 'postal_code': str, 'region': str, 'street1': str, 'street2'... |
def test_classmethod_decorator():
profile = LineProfiler()
c_wrapped = profile(C.c)
assert (c_wrapped.__name__ == 'c')
assert (profile.enable_count == 0)
val = c_wrapped('test')
assert (profile.enable_count == 0)
assert (val == C.c('test'))
assert (profile.enable_count == 0) |
def particle_picking_main(p: PPRequest):
item = particlePickingPool.get(p.path, new_one=True)
result = picking(item.mrc_path, s1=p.sigma1, s2=(p.sigma1 * 1.1), t=3, find_maxima=False, partition_op=None, multiprocessing_process_num=0)
ppr = PPResponse()
ppr.pick_total = len(result)
ppr.uid = item.uid... |
def test_push_pull_same_blobs(pusher, puller, liveserver_session, app_reloader):
credentials = ('devtable', 'password')
layer_bytes = layer_bytes_for_contents(b'some contents')
images = [Image(id='parentid', bytes=layer_bytes, parent_id=None), Image(id='someid', bytes=layer_bytes, parent_id='parentid')]
... |
class ScheduledOptim():
def __init__(self, optimizer, learning_rate, minimum_learning_rate, epoch_decay_rate, grad_clip=2, n_warmup_steps=0, summarywriter=None):
self._optimizer = optimizer
self.n_current_steps = 0
self.lr = learning_rate
self.mlr = minimum_learning_rate
self... |
def normalize_string(s):
def remove_articles(text):
regex = re.compile('\\b(a|an|the)\\b', re.UNICODE)
return re.sub(regex, ' ', text)
def white_space_fix(text):
return ' '.join(text.split())
def remove_punc(text):
exclude = set(string.punctuation)
return ''.join((ch ... |
class DSpritesMapper(Dataset):
def __init__(self, dataset: DSprites, output_path: str):
self.dataset = dataset
self.output_path = output_path
self.training_set_ids = self._get_training_set_ids(ratio=0.8)
def __len__(self):
return len(self.dataset)
def __getitem__(self, idx: i... |
class SegmentronConfig(dict):
def __init__(self, *args, **kwargs):
super(SegmentronConfig, self).__init__(*args, **kwargs)
self.immutable = False
def __setattr__(self, key, value, create_if_not_exist=True):
if (key in ['immutable']):
self.__dict__[key] = value
ret... |
class FC6_MultiPath(KickstartCommand):
removedKeywords = KickstartCommand.removedKeywords
removedAttrs = KickstartCommand.removedAttrs
def __init__(self, writePriority=50, *args, **kwargs):
KickstartCommand.__init__(self, writePriority, *args, **kwargs)
self.op = self._getParser()
se... |
class Tsentrywrapper(TestCase):
def test_main(self):
sentry = get_sentry()
try:
raise Exception
except Exception:
exc_info = sys.exc_info()
try:
err = sentry.capture(exc_info)
except SentryError:
return
assert isinstance... |
def find_examples(query=None, negative_query=None, return_stems=False):
result = []
for example_path in chain(*(examples_dir.glob(x) for x in example_globs)):
example_code = example_path.read_text(encoding='UTF-8')
query_match = ((query is None) or (query in example_code))
negative_query... |
class TestDiscoverPackagesAndPyModules():
OPTIONS = {'explicit-src': {'package_dir': {'': 'src'}, 'packages': ['pkg']}, 'variation-lib': {'package_dir': {'': 'lib'}}, 'explicit-flat': {'packages': ['pkg']}, 'explicit-single_module': {'py_modules': ['pkg']}, 'explicit-namespace': {'packages': ['ns', 'ns.pkg']}, 'aut... |
class HoneypotFieldTest(TestCase):
def test_class_of_widget(self):
field = HoneypotField()
self.assertIsInstance(field.widget, HoneypotWidget)
def test_initial_and_value_in_EMPTY_VALUES(self):
field = HoneypotField(initial=None)
output = field.clean('')
self.assertEqual(o... |
def to_bytes(text, session=None):
if isinstance(text, bytes):
return text
if (not isinstance(text, str)):
try:
text = str(text)
except Exception:
text = repr(text)
default_encoding = (session.protocol_flags.get('ENCODING', 'utf-8') if session else 'utf-8')
... |
def _get_required_gas_estimate(gas_measurements: Dict[(str, int)], new_channels: int=0, opening_channels: int=0, opened_channels: int=0, closing_channels: int=0, closed_channels: int=0, settling_channels: int=0, settled_channels: int=0) -> int:
estimate = 0
estimate += (new_channels * gas_required_for_channel_l... |
def test_load_checkpoint_with_prefix():
class FooModule(nn.Module):
def __init__(self):
super().__init__()
self.linear = nn.Linear(1, 2)
self.conv2d = nn.Conv2d(3, 1, 3)
self.conv2d_2 = nn.Conv2d(3, 2, 3)
model = FooModule()
nn.init.constant_(model.lin... |
def format_registers(func_ir: FuncIR, names: dict[(Value, str)]) -> list[str]:
result = []
i = 0
regs = all_values_full(func_ir.arg_regs, func_ir.blocks)
while (i < len(regs)):
i0 = i
group = [names[regs[i0]]]
while (((i + 1) < len(regs)) and (regs[(i + 1)].type == regs[i0].type)... |
def assert_df_equal(df1, df2):
print(('-' * 100))
print('df1')
print(df1)
print('df2')
print(df2)
pd.options.mode.chained_assignment = None
if (('Strand' in df1) and ('Strand' in df2)):
sort_on = 'Chromosome Start End Strand'.split()
df1.Strand = df1.Strand.astype('object')
... |
class Encoder_Net(nn.Module):
def __init__(self, dims, cluster_num):
super(Encoder_Net, self).__init__()
self.layers1 = nn.Linear(dims[0], dims[1])
self.low = nn.Linear(dims[1], cluster_num)
def forward(self, x):
out1 = self.layers1(x)
out1 = F.normalize(out1, dim=1, p=2)... |
def test_singleband_calc_byindex(tmpdir, runner):
outfile = str(tmpdir.join('out.tif'))
result = runner.invoke(main_group, (['calc'] + ['(+ 125 (* 0.1 (read 1 1)))', 'tests/data/shade.tif', outfile]), catch_exceptions=False)
assert (result.exit_code == 0)
with rasterio.open(outfile) as src:
asse... |
def format_report_table_row(package_data: PackageData) -> str:
clear_install_time = f'{package_data.clear_elapsed_time:>3.0f}s'
if (package_data.sys_elapsed_time is not None):
sys_install_time = f'{package_data.sys_elapsed_time:>3.0f}s'
else:
sys_install_time = ''
row_string = f'{package... |
.parametrize('method, paths, expected_result', [('ls', ['project1'], ('bigquery://', ['project1', None, None])), ('ls', [], ('bigquery://', [None, None, None])), ('ls', ['project1', 'dataset1'], ('bigquery://', ['project1', 'dataset1', None])), ('ls', ['project1', 'dataset1', 'table1'], ('bigquery://', ['project1', 'da... |
def test_preserve_unicode_metadata(monkeypatch, tmp_path):
monkeypatch.chdir(tmp_path)
egginfo = (tmp_path / 'dummy_dist.egg-info')
distinfo = (tmp_path / 'dummy_dist.dist-info')
egginfo.mkdir()
(egginfo / 'PKG-INFO').write_text(UTF8_PKG_INFO, encoding='utf-8')
(egginfo / 'dependency_links.txt')... |
def rotate_about_vector(coords_to_rotate, vector, theta, active=False):
unit_vector = (vector / np.linalg.norm(vector))
u_x = unit_vector[0]
u_y = unit_vector[1]
u_z = unit_vector[2]
s = np.sin(np.radians(theta))
c = np.cos(np.radians(theta))
rotation_matrix = np.array([[(c + ((u_x * u_x) * ... |
class traindataset(data.Dataset):
def __init__(self, root, transform=None, train=True, args=None):
self.root_dir = root
self.transform = transform
self.name = []
self.train = train
self.multitask = args.multitask
self.multiaug = args.multiaug
self.synthesis = ... |
def _convert_to_dict(x: Any, first_level: bool=True) -> (Any | dict[(Any, Any)]):
if isinstance(x, dict):
return {k: _convert_to_dict(v, False) for (k, v) in x.items()}
if (isinstance(x, Iterable) and (not isinstance(x, str))):
if first_level:
return {_Placeholder(): _convert_to_dict... |
def _create_meta_extra_kwargs(*, for_filter: bool) -> dict[(str, dict[(str, bool)])]:
extra_kwargs = {}
for field in SETTINGS_FIELDS:
field_args = ({'required': False, 'allow_null': True} if for_filter else {})
if (field in ALLOW_BLANK_SETTINGS):
field_args['allow_blank'] = True
... |
def plot_curve(log_dicts, args):
if (args.backend is not None):
plt.switch_backend(args.backend)
sns.set_style(args.style)
legend = args.legend
if (legend is None):
legend = []
for json_log in args.json_logs:
for metric in args.keys:
legend.append(f'{j... |
def validate(args):
setup_default_logging()
if args.amp:
if has_apex:
args.apex_amp = True
elif has_native_amp:
args.native_amp = True
assert ((not args.apex_amp) or (not args.native_amp)), 'Only one AMP mode should be set.'
args.pretrained = (args.pretrained or (... |
def model(model: str, **kwargs) -> Type[Model]:
if (model == 'rf'):
from molpal.models.sklmodels import RFModel
return RFModel(**kwargs)
if (model == 'gp'):
from molpal.models.sklmodels import GPModel
return GPModel(**kwargs)
if (model == 'nn'):
return nn(**kwargs)
... |
def getCoeffError(coeff):
if isinstance(coeff, (int, float)):
if (not (coeff > 0)):
return 'coefficient value needs to be strictly > 0.'
elif (not all(((i > 0) for i in coeff))):
return 'all coefficients need to be strictly > 0.'
return 'unknown error with coefficients.' |
def test_coordinates_straight_road(zarr_dataset: ChunkedDataset, cfg: dict) -> None:
render_context = RenderContext(np.asarray(cfg['raster_params']['raster_size']), np.asarray(cfg['raster_params']['pixel_size']), np.asarray(cfg['raster_params']['ego_center']), set_origin_to_bottom=cfg['raster_params']['set_origin_t... |
_datapipe('round_robin_demux')
class RoundRobinDemultiplexerIterDataPipe(IterDataPipe):
def __new__(cls, datapipe: IterDataPipe, num_instances: int, buffer_size: int=1000):
if (num_instances < 1):
raise ValueError(f'Expected `num_instaces` larger than 0, but {num_instances} is found')
if... |
class FC3_SELinux(KickstartCommand):
removedKeywords = KickstartCommand.removedKeywords
removedAttrs = KickstartCommand.removedAttrs
def __init__(self, writePriority=0, *args, **kwargs):
KickstartCommand.__init__(self, writePriority, *args, **kwargs)
self.op = self._getParser()
self.... |
class CustomDataset(Pix2pixDataset):
def modify_commandline_options(parser, is_train):
parser = Pix2pixDataset.modify_commandline_options(parser, is_train)
parser.set_defaults(preprocess_mode='resize_and_crop')
load_size = (286 if is_train else 256)
parser.set_defaults(load_size=load... |
class RegularDateTimeRule():
def __init__(self, year: int=None, month: int=None, day: int=None, weekday: int=None, hour: int=None, minute: int=None, second: int=None, microsecond: int=None):
self.trigger_time = RelativeDelta(year=year, month=month, day=day, weekday=weekday, hour=hour, minute=minute, second=... |
class DummyTrace(object):
def __init__(self, nut):
self.nut = nut
self.codes = nut.codes
self.meta = {}
def tmin(self):
return self.nut.tmin
def tmax(self):
return self.nut.tmax
def deltat(self):
return self.nut.deltat
def nslc_id(self):
return... |
class WSGIRequest(HttpRequest):
def __init__(self, environ):
script_name = get_script_name(environ)
path_info = (get_path_info(environ) or '/')
self.environ = environ
self.path_info = path_info
self.path = ('%s/%s' % (script_name.rstrip('/'), path_info.replace('/', '', 1)))
... |
class Effect3773(BaseEffect):
type = 'passive'
def handler(fit, module, context, projectionRange, **kwargs):
fit.ship.increaseItemAttr('turretSlotsLeft', module.getModifiedItemAttr('turretHardPointModifier'), **kwargs)
fit.ship.increaseItemAttr('launcherSlotsLeft', module.getModifiedItemAttr('la... |
def test_bulk_insert(queue, transaction_factory):
queue_items_locked.labels(queue._queue_name).set(0)
queue_items_available.labels(queue._queue_name).set(0)
queue_items_available_unlocked.labels(queue._queue_name).set(0)
with queue.batch_insert() as queue_put:
queue_put(['abc', 'def'], TEST_MESS... |
class Alibaba():
def __init__(self):
try:
key_id = os.getenv('ALIBABA_ID')
key_secret = os.getenv('ALIBABA_SECRET')
region_id = os.getenv('ALIBABA_REGION_ID')
self.compute_client = AcsClient(key_id, key_secret, region_id)
except Exception as e:
... |
class DilatedResnetBackbone(nn.Module):
def __init__(self, orig_resnet, dilate_scale=8, multi_grid=(1, 2, 4)):
super(DilatedResnetBackbone, self).__init__()
self.num_features = 2048
from functools import partial
if (dilate_scale == 8):
orig_resnet.layer3.apply(partial(sel... |
class BaseDataset(Dataset):
def __init__(self, cfg: DictConfig, split: str) -> None:
assert (split in ['train', 'test', 'val'])
self.cfg = cfg
self.split = split
self.split_dataset = ['train', 'test'][(self.split == 'test')]
if ((self.split_dataset == 'test') and cfg[__key__]... |
def training(config):
if (not os.path.exists(os.path.join(config.split_dir, 'splits.pkl'))):
create_splits(output_dir=config.split_dir, image_dir=config.data_dir)
if (config.saved_model_path is not None):
config.load_model = True
exp = MixExperiment(config=config, name=config.name, n_epochs=... |
class SemanticLossCircuitSolver(Solver):
def __init__(self):
self.sdd = None
def loss(self, *logits):
probs = [torch.softmax(logits[i], dim=(- 1)) for i in range(len(logits))]
if (self.sdd is None):
ys = [ConstShapedLazyTensor(i) for i in range(len(probs))]
slt = ... |
('aimet_common.connected_graph.connectedgraph.ConnectedGraph.__abstractmethods__', set())
def test_serialize_products():
conn_graph = get_dummy_connected_graph()
(activations, params) = connectedgraph_utils._serialize_products(conn_graph)
assert (len(activations) == 5)
assert (len(params) == 3)
expe... |
def _conda_format(req):
def _sub(m):
name = m.group('name').lower()
if (name == 'numpy'):
return 'numpy x.x'
if (name == 'tables'):
name = 'pytables'
(comp, spec) = m.group('comp', 'spec')
if (comp and spec):
formatted = ('%s %s%s' % (name,... |
class SponsorshipsInline(admin.TabularInline):
model = Sponsorship
fields = ['link', 'status', 'year', 'applied_on', 'start_date', 'end_date']
readonly_fields = ['link', 'status', 'year', 'applied_on', 'start_date', 'end_date']
can_delete = False
extra = 0
def link(self, obj):
url = reve... |
def test_cluster():
global outstructs
global outstrings
print('=== Testing generation of point group clusters. This may take some time. ===')
from time import time
from spglib import get_symmetry_dataset
from pyxtal.symmetry import Group
from pyxtal.crystal import random_cluster
from pym... |
def interpolate_fn(x, xp, yp):
(N, K) = (x.shape[0], xp.shape[1])
all_x = torch.cat([x.unsqueeze(2), xp.unsqueeze(0).repeat((N, 1, 1))], dim=2)
(sorted_all_x, x_indices) = torch.sort(all_x, dim=2)
x_idx = torch.argmin(x_indices, dim=2)
cand_start_idx = (x_idx - 1)
start_idx = torch.where(torch.e... |
class LatexyzInstallBhSettings(sublime_plugin.TextCommand):
def run(self, edit, remove=False):
bh_core_settings = sublime.load_settings(bh_core_settings_file)
for (k, v) in bh_core_latex_settings.items():
bh_core_settings.set(k, self.merge(v, bh_core_latex_settings[k], remove))
s... |
class GroupEpicManager(CRUDMixin, RESTManager):
_path = '/groups/{group_id}/epics'
_obj_cls = GroupEpic
_from_parent_attrs = {'group_id': 'id'}
_list_filters = ('author_id', 'labels', 'order_by', 'sort', 'search')
_create_attrs = RequiredOptional(required=('title',), optional=('labels', 'description... |
class TreeCache(object):
STATE_LATENT = 0
STATE_STARTED = 1
STATE_CLOSED = 2
_STOP = object()
def __init__(self, client, path):
self._client = client
self._root = TreeNode.make_root(self, path)
self._state = self.STATE_LATENT
self._outstanding_ops = 0
self._is... |
def test_asynq_traceback_gets_glued_at_each_task_level():
traceback_to_verify = None
try:
async_function_whose_child_async_task_will_throw_an_error()
except ValueError:
traceback_to_verify = sys.exc_info()[2]
assert_is_not(None, traceback_to_verify)
traceback_printed = '\n'.join(trac... |
def init_logging():
loggers = (logging.getLogger(name) for name in logging.root.manager.loggerDict if name.startswith('uvicorn.'))
for uvicorn_logger in loggers:
uvicorn_logger.handlers = []
intercept_handler = InterceptHandler()
logging.getLogger('uvicorn').handlers = [intercept_handler]
lo... |
class TestGetWeightedAverageCacheLoadFactor(unittest.TestCase):
def test_get_avg_cache_load_factor_hbm(self) -> None:
cache_load_factors = [random.random() for _ in range(5)]
embedding_tables: List[ShardedEmbeddingTable] = [ShardedEmbeddingTable(num_embeddings=1000, embedding_dim=MagicMock(), fused_... |
class ShiftAugment(DeformableAugment):
def __init__(self, mask, vector_shift=(10, 10, 10), gaussian_smooth=5):
self.mask = mask
self.vector_shift = vector_shift
self.gaussian_smooth = gaussian_smooth
def augment(self):
(_, transform, dvf) = generate_field_shift(self.mask, self.ve... |
class ColorBar(HeatMapChartDecorator):
def __init__(self, key=None, **plot_settings):
super().__init__(key)
self._color_bar = None
self._plot_settings = plot_settings
def decorate(self, chart: 'HeatMapChart'):
self._color_bar = chart.axes.figure.colorbar(chart.color_mesh_, ax=cha... |
class CooccurGraph(object):
def __init__(self, stopword_ids, tokenizer):
self.stopword_ids = stopword_ids
self.tokenizer = tokenizer
self.word2id = self.tokenizer.get_vocab()
def update_node_dict(self, b, new_sentence):
token_ids = [item for item in new_sentence if (item not in s... |
def evaluate(dataloader, model, criterion, postprocessors, confusion, summary, config, args, epoch):
model.eval()
criterion.eval()
global_thresh = 0.3
logging.error('VALIDATION')
for (i, batch) in enumerate(tqdm(dataloader)):
(seq_images, targets, _) = batch
seq_images = seq_images.c... |
class FCI(GeoBenchmarks):
timeout = 600
region = 'eurol'
reader = 'fci_l1c_nc'
filenames: list[str] = []
def setup_cache(self, *args):
fns = self.get_filenames()
cnt = len(fns)
if (cnt > 40):
raise ValueError(f'Expected 41 files, found {cnt:d}')
if (cnt < ... |
def check_diff(spm_diff, tok_diff, slow, fast):
if (spm_diff == list(reversed(tok_diff))):
return True
elif ((len(spm_diff) == len(tok_diff)) and (fast.decode(spm_diff) == fast.decode(tok_diff))):
return True
spm_reencoded = slow.encode(slow.decode(spm_diff))
tok_reencoded = fast.encode(... |
def find_pruneable_heads_and_indices(heads: List[int], n_heads: int, head_size: int, already_pruned_heads: Set[int]) -> Tuple[(Set[int], torch.LongTensor)]:
mask = torch.ones(n_heads, head_size)
heads = (set(heads) - already_pruned_heads)
for head in heads:
head = (head - sum(((1 if (h < head) else ... |
def train_model(model, db_gen, optimizer, epoch, args, lr_scheduler, criterion, gpu):
model.train()
if args.use_clf_l:
criterion['clf_l'].train()
if args.use_metric_l:
criterion['metric_l'].train()
_loss = 0.0
if args.use_clf_l:
_loss_clf = 0.0
if args.use_metric_l:
... |
class EFI_MEMORY_TYPE(ENUM):
_members_ = ['EfiReservedMemoryType', 'EfiLoaderCode', 'EfiLoaderData', 'EfiBootServicesCode', 'EfiBootServicesData', 'EfiRuntimeServicesCode', 'EfiRuntimeServicesData', 'EfiConventionalMemory', 'EfiUnusableMemory', 'EfiACPIReclaimMemory', 'EfiACPIMemoryNVS', 'EfiMemoryMappedIO', 'EfiMe... |
def main():
assert (sys.version_info >= (3, 12))
root_dir = Path(__file__).resolve().parent.parent
tests_dir = (root_dir / 'tests')
assert tests_dir.is_dir()
test_groups = get_test_groups(root_dir)
test_cases = get_test_cases(test_groups, tests_dir)
os.chdir(tests_dir)
for type_checker i... |
class PeleeBranch1(nn.Module):
def __init__(self, in_channels, out_channels, mid_channels, stride=1):
super(PeleeBranch1, self).__init__()
self.conv1 = conv1x1_block(in_channels=in_channels, out_channels=mid_channels)
self.conv2 = conv3x3_block(in_channels=mid_channels, out_channels=out_chan... |
def writeDocOptions(docFile, options, default_options):
for option in sorted(options.keys()):
defaultOption = ''
defaultOptionType = ''
if (option in default_options):
defaultOptionType = default_options[option].__class__.__name__
if isinstance(default_options[option]... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.