code stringlengths 281 23.7M |
|---|
class SingleSiblingPureFunction(PureFunction):
def __init__(self, fcn: Callable, fcntocall: Callable):
self.pfunc = get_pure_function(fcn)
super().__init__(fcntocall)
def _get_all_obj_params_init(self) -> List:
return self.pfunc._get_all_obj_params_init()
def _set_all_obj_params(self... |
class BlueZone(Object):
def from_dict(self):
self.circle_algorithm = self._data.get('circleAlgorithm')
self.land_ratio = self._data.get('landRatio')
self.phase_num = self._data.get('phaseNum')
self.poison_gas_dps = self._data.get('poisonGasDamagePerSecond')
self.radius_rate =... |
class PatchEmbedding(nn.Module):
def __init__(self, in_channels=1, patch_size_w=9, patch_size_h=25, emb_size=(9 * 25), img_size=(342 * 500)):
self.patch_size_w = patch_size_w
self.patch_size_h = patch_size_h
super().__init__()
self.projection = nn.Sequential(nn.Conv2d(in_channels, em... |
class DescribeCoreProperties():
def it_knows_the_string_property_values(self, text_prop_get_fixture):
(core_properties, prop_name, expected_value) = text_prop_get_fixture
actual_value = getattr(core_properties, prop_name)
assert (actual_value == expected_value)
def it_can_change_the_stri... |
def deserialize_privkey(key: str) -> Tuple[(str, bytes, bool)]:
if is_minikey(key):
return ('p2pkh', minikey_to_private_key(key), False)
txin_type = None
if (':' in key):
(txin_type, key) = key.split(sep=':', maxsplit=1)
if (txin_type not in WIF_SCRIPT_TYPES):
raise Bitco... |
def iff(*args):
if ((len(args) == 1) and isinstance(args[0], (tuple, list, set, frozenset, types.GeneratorType))):
args = tuple(args[0])
assert (len(args) >= 2)
res = manage_global_indirection(*args)
if (res is None):
return Iff(*args, meta=True)
res = [(v if (not isinstance(v, (tupl... |
def main():
pl_spec = os.environ.get(PlatSpec_EnvVar, '')
modspec = os.environ.get(ModulesSpec_EnvVar, '')
(with_prepare, pl_name, pdfium_ver, use_v8) = parse_pl_spec(pl_spec)
modnames = parse_modspec(modspec)
if ((ModuleRaw in modnames) and with_prepare and (pl_name != ExtPlats.none)):
prep... |
def get_model(input_shape, labels=2):
model = Sequential()
model.add(Conv3D(32, (3, 3, 3), activation='relu', input_shape=input_shape, padding='same', kernel_initializer='he_uniform'))
model.add(Conv3D(32, (3, 3, 3), activation='relu', padding='same', kernel_initializer='he_uniform'))
model.add(MaxPooli... |
class Constant():
def __init__(self, name, optional=False, requirements=(), doc=None):
self.name = name
if optional:
self_requirement = f'defined(LDAP_{self.name})'
requirements = (list(requirements) + [self_requirement])
self.requirements = requirements
self.... |
_auth
def reset_password(request, pk):
if (request.method == 'POST'):
try:
user = UserProfile.objects.get(id=pk)
reset_pass = (user.username.capitalize() + '')
user.password = make_password(reset_pass)
user.save()
return JsonResponse({'code': 200, ... |
def test_unpack_variables__invalid_gp_dims(shared_datadir, tmp_path):
(_, dsr, store) = _rechunk_bgen(shared_datadir, tmp_path, pack=False)
with pytest.raises(ValueError, match="Expecting variable 'call_genotype_probability' to have genotypes dimension of size 2"):
unpack_variables(dsr) |
def instruction_format(data_info: Dict, data_type, data_name) -> List[Dict]:
instruction_data = list()
label_mappings = data_info.get('label_mappings')
data_list = data_info['data_list']
for instruction in [dataset2instruction]:
format_info = instruction[data_type]
instruction_processor ... |
class SimpleForm(Form):
def __init__(self, view):
super().__init__(view, 'simple_form')
self.use_layout(FormLayout())
if self.exception:
self.layout.add_alert_for_domain_exception(self.exception)
domain_object = self.get_or_create_domain_object()
link = self.add_c... |
class Blip2VisionConfig(PretrainedConfig):
model_type = 'blip_2_vision_model'
def __init__(self, hidden_size=1408, intermediate_size=6144, projection_dim=512, num_hidden_layers=39, num_attention_heads=16, num_channels=3, image_size=224, patch_size=14, hidden_act='gelu', layer_norm_eps=1e-05, dropout=0.0, attent... |
class Migration(migrations.Migration):
dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]
operations = [migrations.CreateModel(name='DetailKey', fields=[('id', models.AutoField(verbose_name='ID', auto_created=True, serialize=False, primary_key=True)), ('key', models.SlugField()), ('label... |
def test_validate_direction():
validate.direction(Direction.CAUSE)
validate.direction(Direction.EFFECT)
with pytest.raises(ValueError):
validate.direction('dogeeeee')
validate.direction(Direction.BIDIRECTIONAL, allow_bi=True)
with pytest.raises(ValueError):
validate.direction(Directi... |
class WebPage(Object):
def __init__(self, *, client: 'pyrogram.Client'=None, id: str, url: str, display_url: str, type: str=None, site_name: str=None, title: str=None, description: str=None, audio: 'types.Audio'=None, document: 'types.Document'=None, photo: 'types.Photo'=None, animation: 'types.Animation'=None, vid... |
def polygon_align_length(tile_1: Tile, tile_2: Tile):
assert (not tile_1.tile_poly.exterior.is_ccw)
assert (not tile_2.tile_poly.exterior.is_ccw)
trinagle_1_points = list(tile_1.tile_poly.exterior.coords)
trinagle_2_points = list(tile_2.tile_poly.exterior.coords)
total_overlap = 0.0
for i in ran... |
class TestDOTAKF(TestDOTA):
def eval(self):
txt_name = '{}.txt'.format(self.cfgs.VERSION)
real_test_img_list = self.get_test_image()
kf = build_whole_network.DetectionNetworkKF(cfgs=self.cfgs, is_training=False)
self.test_dota(det_net=kf, real_test_img_list=real_test_img_list, txt_na... |
def _add_container_methods(op_container_cls):
op_container_cls._get_image_or_guide = _get_image_or_guide
op_container_cls.get_target_guide = _get_target_guide
op_container_cls.get_target_image = _get_target_image
op_container_cls.get_input_guide = _get_input_guide
op_container_cls._set_image_or_guid... |
def test_session_factory_s3_kwargs():
pytest.importorskip('boto3')
sesh = Session.from_path('s3://lol/wut', aws_access_key_id='foo', aws_secret_access_key='bar')
assert isinstance(sesh, AWSSession)
assert (sesh._session.get_credentials().access_key == 'foo')
assert (sesh._session.get_credentials().s... |
def HANP_Miner(filename, mingap, maxgap, minsup):
read_file(filename)
cannum = 0
frenum = 0
compnum = 0
global S
global ww
global NumbS
global candidate
begin_time = time_now()
min_freItem()
f_level = 1
gen_candidate(f_level)
while (len(candidate) != 0):
for p... |
class LoginEncryptionRequest(Packet):
id = 1
to = 1
def __init__(self, public_key: bytes) -> None:
super().__init__()
self.public_key = public_key
self.verify_token = secrets.token_bytes(16)
def encode(self) -> bytes:
return ((((Buffer.pack_string((' ' * 20)) + Buffer.pac... |
def prune_heads(args, model, eval_dataloader, head_mask):
before_time = datetime.now()
(_, _, loss) = compute_heads_importance(args, model, eval_dataloader, compute_entropy=False, compute_importance=False, head_mask=head_mask)
score_masking = (1 / loss)
original_time = (datetime.now() - before_time)
... |
class PostProcessingBuilderTest(tf.test.TestCase):
def test_build_non_max_suppressor_with_correct_parameters(self):
post_processing_text_proto = '\n batch_non_max_suppression {\n score_threshold: 0.7\n iou_threshold: 0.6\n max_detections_per_class: 100\n max_total_detections... |
class SDFNetwork(nn.Module):
def __init__(self, encoding='hashgrid', num_layers=3, skips=[], hidden_dim=64, clip_sdf=None):
super().__init__()
self.num_layers = num_layers
self.skips = skips
self.hidden_dim = hidden_dim
self.clip_sdf = clip_sdf
assert (self.skips == [... |
.parametrize('index', [None, [0]])
def test_memmap_new(index):
t = torch.tensor([1])
m = MemmapTensor.from_tensor(t)
if (index is not None):
m1 = m[index]
else:
m1 = m
m2 = MemmapTensor.from_tensor(m1)
assert isinstance(m2, MemmapTensor)
assert (m2.filename == m1.filename)
... |
def temp_workspace_factory(workspace):
def fn(files):
def create_file(name, content):
fn = os.path.join(workspace.root_path, name)
with open(fn, 'w', encoding='utf-8') as f:
f.write(content)
workspace.put_document(uris.from_fs_path(fn), content)
fo... |
def test_lock_file_resolves_file_url_symlinks(root: ProjectPackage) -> None:
with tempfile.TemporaryDirectory() as d1:
symlink_path = Path(d1).joinpath('testsymlink')
with tempfile.TemporaryDirectory(dir=d1) as d2, tempfile.TemporaryDirectory(dir=d1) as d4, tempfile.TemporaryDirectory(dir=d2) as d3,... |
def test_range_dynamic_sum(start: int, end: int, step: int, result: wp.array(dtype=int)):
a = int(0)
for i in range(end):
a = (a + 1)
b = int(0)
for i in range(start, end):
b = (b + 1)
c = int(0)
for i in range(start, (end * step), step):
c = (c + 1)
d = int(0)
fo... |
def dicttoxml(obj, root=True, custom_root='root', xml_declaration=True, ids=False, attr_type=True, item_func=default_item_func, cdata=False, include_encoding=True, encoding='UTF-8', return_bytes=True):
LOG.info(('Inside dicttoxml(): type(obj) is: "%s", obj="%s"' % (type(obj).__name__, unicode_me(obj))))
output ... |
def train(models_path, untrained_models, sdn=False, ic_only_sdn=False, device='cpu', ds=False):
print('Training models...')
for base_model in untrained_models:
(trained_model, model_params) = arcs.load_model(models_path, base_model, 0)
dataset = af.get_dataset(model_params['task'])
learn... |
def create_job(batch_cli, body, namespace='default'):
try:
api_response = batch_cli.create_namespaced_job(body=body, namespace=namespace)
return api_response
except ApiException as api:
logging.warn(('Exception when calling BatchV1Api->create_job: %s' % api))
... |
def zz_circuit_execution() -> Tuple[(qiskit.result.Result, np.array, List[int], List[int], float, float)]:
num_of_gates = np.arange(0, 60, 10)
gate_time = 0.1
qubits = [0]
spectators = [1]
(circs, xdata, omega) = zz_circuits(num_of_gates, gate_time, qubits, spectators, nosc=2)
zz_value = 0.1
... |
def isNaN_or_Inf_or_None(x):
isNone = (x is None)
try:
isNaN = np.isnan(x)
isInf = np.isinf(x)
isStr = isinstance(x, str)
except Exception:
isNaN = False
isInf = False
isStr = False
if ((not isNaN) and (not isInf)):
try:
val = get_under... |
class Command(BaseCommand):
def handle(self, *args, **options):
for (name, permissions) in GROUPS:
(group, created) = Group.objects.get_or_create(name=name)
if created:
print(('Group "%s" created' % name))
else:
group.permissions.clear()
... |
_REGISTRY.register()
class VLCS(DatasetBase):
dataset_dir = 'VLCS'
domains = ['caltech', 'labelme', 'pascal', 'sun']
data_url = '
def __init__(self, cfg):
root = osp.abspath(osp.expanduser(cfg.DATASET.ROOT))
self.dataset_dir = osp.join(root, self.dataset_dir)
if (not osp.exists(s... |
class KnownValues(unittest.TestCase):
def test_mp2(self):
cell = build_cell()
mf = pbcscf.RHF(cell).density_fit()
mf.conv_tol = 1e-10
mf.kernel()
pt = pyscf.pbc.mp.mp2.RMP2(mf).run()
self.assertAlmostEqual(pt.e_corr, (- 0.), 7)
self.assertAlmostEqual(pt.e_corr... |
def _majorana_terms_commute(term_a, term_b):
intersection = 0
(i, j) = (0, 0)
while ((i < len(term_a)) and (j < len(term_b))):
if (term_a[i] < term_b[j]):
i += 1
elif (term_a[i] > term_b[j]):
j += 1
else:
intersection += 1
i += 1
... |
class PortfolioParameters():
def __init__(self, scale, variance_weight, mean_weight, max_dd_weight, skewness_weight):
self.scale = scale
self.variance_weight = variance_weight
self.mean_weight = mean_weight
self.max_dd_weight = max_dd_weight
self.skewness_weight = skewness_we... |
class TestUCASAODKL(TestUCASAOD):
def eval(self):
kl = build_whole_network.DetectionNetworkKL(cfgs=self.cfgs, is_training=False)
all_boxes_r = self.eval_with_plac(img_dir=self.args.img_dir, det_net=kl, image_ext=self.args.image_ext)
imgs = os.listdir(self.args.img_dir)
real_test_imgn... |
class SemanticTemplate(TypeTemplate):
public_proxy = ('field',)
def __init__(self, name, field_names, field_members, variant_of):
self.name = name
self.field_names = field_names
self.__field = {f: VariantField(name, f, field_members[f]) for f in self.field_names}
self.variant_of ... |
class MarkupLMFeatureExtractor(FeatureExtractionMixin):
def __init__(self, **kwargs):
requires_backends(self, ['bs4'])
super().__init__(**kwargs)
def xpath_soup(self, element):
xpath_tags = []
xpath_subscripts = []
child = (element if element.name else element.parent)
... |
class Molecule(object):
def __init__(self, name, verbose=True):
self.verbose = verbose
if isinstance(name, str):
self.name = name
filtername = re.sub('[\\(\\[].*?[\\)\\]]', '', name)
try:
self.id = get_molecule_identifier(filtername)
ex... |
class RandomSampling(object):
def __init__(self, num, interval=1, speed=[1.0, 1.0], seed=0):
assert (num > 0), 'at least sampling 1 frame'
self.num = num
self.interval = (interval if (type(interval) == list) else [interval])
self.speed = speed
self.rng = np.random.RandomState... |
def extract_node(code: str, module_name: str='') -> (nodes.NodeNG | list[nodes.NodeNG]):
def _extract(node: (nodes.NodeNG | None)) -> (nodes.NodeNG | None):
if isinstance(node, nodes.Expr):
return node.value
return node
requested_lines: list[int] = []
for (idx, line) in enumerate... |
class TestLayerOutputUtil():
def test_generate_layer_outputs(self):
(quantsim, layer_output_names, dummy_input) = get_quantsim_artifacts()
layer_output_names = [re.sub('\\W+', '_', name) for name in layer_output_names]
(dummy_dataset, dummy_data_loader, data_count) = get_dataset_artifacts()
... |
def _get_ngrams(sentence: Sequence[str], n_gram: int) -> Counter[str]:
if (n_gram not in [1, 2, 3, 4]):
raise ValueError(f'n_gram should be 1, 2, 3, or 4, got {n_gram}.')
ngram_counts = counter()
for n_val in range(1, (n_gram + 1)):
for i in range(0, ((len(sentence) - n_val) + 1)):
... |
class CommandRefactorExtractVariable(Command):
name = commands.COMMAND_REFACTOR_EXTRACT_VARIABLE
kind: CodeActionKind = 'refactor.extract'
document_uri: DocumentUri
range: typing.Range
similar: bool
global_: bool
def validate(self, info):
ast.parse(info.selected_text, mode='eval')
... |
def cosql_get_utterances(utterances: List[str], prefix: str, sep: str=' | ') -> str:
if (len(utterances) > 1):
reversed_utterance_head = (utterance.strip() for utterance in reversed(utterances[:(- 1)]))
serialized_reversed_utterance_head = (' || ' + sep.join(reversed_utterance_head))
else:
... |
class PreConvBlock1bit(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, stride, padding, dilation=1, bias=False, bn_affine=True, return_preact=False, activate=True, binarized=False):
super(PreConvBlock1bit, self).__init__()
self.return_preact = return_preact
self.activa... |
def _transfer_expired(initiator_app: RaidenService, target_app: RaidenService, token_address: TokenAddress, amount: PaymentAmount, identifier: PaymentID, timeout: Optional[float]=None) -> SecretHash:
assert (identifier is not None), 'The identifier must be provided'
assert isinstance(target_app.message_handler,... |
class CondorJob(cpi.job.Job):
def __init__(self, api, adaptor):
_cpi_base = super(CondorJob, self)
_cpi_base.__init__(api, adaptor)
_CALL
def init_instance(self, job_info):
self.jd = job_info['job_description']
self.js = job_info['job_service']
self._name = self.jd.na... |
class InceptionResNetV2(nn.Module):
def __init__(self, num_classes, loss={'xent'}, **kwargs):
super(InceptionResNetV2, self).__init__()
self.loss = loss
self.conv2d_1a = BasicConv2d(3, 32, kernel_size=3, stride=2)
self.conv2d_2a = BasicConv2d(32, 32, kernel_size=3, stride=1)
... |
def run_client_from_existing_scheduler(args: Namespace, config: Config):
if (args.scheduler_address is not None):
kwargs = {'address': args.scheduler_address}
elif (args.scheduler_file is not None):
kwargs = {'scheduler_file': args.scheduler_file}
else:
raise RuntimeError('Need to sp... |
class TextRCNN(Classifier):
def __init__(self, dataset, config):
super(TextRCNN, self).__init__(dataset, config)
self.rnn = RNN(config.embedding.dimension, config.TextRCNN.hidden_dimension, num_layers=config.TextRCNN.num_layers, batch_first=True, bidirectional=config.TextRCNN.bidirectional, rnn_type... |
class PerformNotificationAction():
notification_id: int
is_positive: bool
def to_list(self) -> List[int]:
msg = struct.pack(f'<BIB', CommandID.PerformNotificationAction, self.notification_id, (ActionID.Positive if self.is_positive else ActionID.Negative))
return list(msg) |
def test_traceback_failure(pytester: Pytester) -> None:
p1 = pytester.makepyfile('\n def g():\n return 2\n def f(x):\n assert x == g()\n def test_onefails():\n f(3)\n ')
result = pytester.runpytest(p1, '--tb=long')
result.stdout.fnmatch_lines(['*test_... |
class TestMl():
def session(self):
db_config = MySQLConfig()
engine = sa.create_engine(f'mysql://{db_config.user}:{db_config.password}{db_config.host}:{db_config.port}/{db_config.database}?charset=utf8mb4')
session_factory = sessionmaker(engine)
session = scoped_session(session_facto... |
def gaussian_blur(blur, data=None, target=None):
if (not (data is None)):
if (data.shape[1] == 3):
if (blur > 0.5):
sigma = np.random.uniform(0.15, 1.15)
kernel_size_y = int(np.floor(((np.ceil((0.1 * data.shape[2])) - 0.5) + (np.ceil((0.1 * data.shape[2])) % 2))))... |
_api()
class sink(Sink):
def __init__(self, upstream, func, *args, **kwargs):
self.func = func
sig = set(inspect.signature(Stream).parameters)
stream_kwargs = {k: v for (k, v) in kwargs.items() if (k in sig)}
self.kwargs = {k: v for (k, v) in kwargs.items() if (k not in sig)}
... |
def test_missing_link(initialized_db):
with set_tag_expiration_policy('devtable', 0):
location_name = storage.preferred_locations[0]
location = database.ImageStorageLocation.get(name=location_name)
first_blob_sha = ('sha256:' + hashlib.sha256(b'FIRST').hexdigest())
model.blob.store_b... |
def get_example_files_by_name(name: str, relative_to: ((str | Path) | None)=SOURCE_DIR) -> list[Path]:
path = _get_root_example_path_by_name(name, relative_to)
if path.is_dir():
return [p for p in path.glob('*') if (not p.is_dir())]
else:
path = path.with_suffix('.py')
return ([path]... |
class AllowMoveStrategy(abc.ABC):
def start_new_component(self, initial_labels, attr, objective_func, comp_idx):
self.attr = attr
self.objective_func = objective_func
self.objective_val = self.objective_func(initial_labels, self.attr)
def __call__(self, moving_area, new_region, labels): |
def test_planck_cm(verbose=True, plot=True, *args, **kwargs):
if plot:
import matplotlib.pyplot as plt
plt.ion()
T = 287.2
eps = 0.78
s = sPlanck(wavenum_min=10, wavenum_max=3000, T=T, eps=eps, wstep=0.1)
w_nm = s.get_wavelength()
w_cm = s.get_wavenumber()
I_nm = s.get_radian... |
class WeightedSoftmaxClassificationLoss(Loss):
def __init__(self, anchorwise_output=False):
self._anchorwise_output = anchorwise_output
def _compute_loss(self, prediction_tensor, target_tensor, weights):
num_classes = prediction_tensor.get_shape().as_list()[(- 1)]
per_row_cross_ent = tf.... |
class Window(ABCWindow):
def __init__(self, name):
self.name = name
(major, minor) = ((gl.GLint * 1)(), (gl.GLint * 1)())
egl.eglInitialize(eglDpy, major, minor)
self.eglCfg = getEglCfg()
if (self.eglCfg is None):
raise WindowProviderException('Could not get EGL c... |
def test_step_name_is_cached():
step = parser.Step(name='step name', type='given', indent=8, line_number=3, keyword='Given')
assert (step.name == 'step name')
step._name = 'incorrect step name'
assert (step.name == 'step name')
step.name = 'new step name'
assert (step.name == 'new step name')
... |
class Operation():
def __init__(self, reason: (str | None)=None, priority: (int | float)=0) -> None:
self._reason = reason
self._skipped = False
self._skip_reason: (str | None) = None
self._priority = priority
def job_type(self) -> str:
raise NotImplementedError
def r... |
def test_ecb():
net = ECB(in_channels=2, out_channels=2, depth_multiplier=1, act_type='softplus', with_idt=False).cuda()
img = torch.rand((1, 2, 12, 12), dtype=torch.float32).cuda()
output = net(img)
assert (output.shape == (1, 2, 12, 12))
net = net.eval()
output_rep = net(img)
assert (outpu... |
def compute_opt_lr(grad_list, momentum, dataset_size):
var_diag_sum = 0
num_params = 0
if (grad_list[0]['num_models'] < 2):
print('No models stored yet')
return (None, None)
for grad_dict in grad_list:
first_moment_squared = ((grad_dict['sum'] / grad_dict['num_models']) ** 2)
... |
class TrainingDataHandler(TrainingData):
def __init__(self, train_batcher: ListBatcher, dev_batcher: ListBatcher, sample_filter: Optional[SampleFilter]=None, preprocessor: Optional[Preprocessor]=None, sample_train=None, sample_dev=None, sample_seed=18):
self.train_batcher = train_batcher
self.dev_ba... |
def test_add_package_with_extras_and_whitespace(tester: CommandTester) -> None:
command = tester.command
assert isinstance(command, InitCommand)
result = command._parse_requirements(['databases[postgresql, sqlite]'])
assert (result[0]['name'] == 'databases')
assert (len(result[0]['extras']) == 2)
... |
('pyinaturalist.v1.projects.put')
def test_delete_project_users(mock_put, requests_mock):
requests_mock.get(f'{API_V1}/projects/1234', json=SAMPLE_DATA['get_projects'], status_code=200)
delete_project_users(1234, [5678])
project_params = mock_put.call_args[1]['json']['project']
rules = project_params['p... |
class Tset_scan_dirs(TestCase):
def scan_dirs(self):
return config.get('settings', 'scan')
def test_set_scan_dirs_empty(self):
set_scan_dirs([])
self.assertEqual(self.scan_dirs, '')
def test_set_scan_dirs_single(self):
set_scan_dirs([STANDARD_PATH])
self.assertEqual(s... |
class CObject(Base_CObject):
option_spec = {'name': directives.unchanged}
def handle_func_like_macro(self, sig, signode):
global namespace
if (not (self.objtype == 'function')):
return False
m = c_funcptr_sig_re.match(sig)
if (m is None):
m = c_sig_re.matc... |
def build_model(cfg: FairseqDataclass, task):
model = None
model_type = (getattr(cfg, '_name', None) or getattr(cfg, 'arch', None))
if ((not model_type) and (len(cfg) == 1)):
model_type = next(iter(cfg))
if (model_type in MODEL_DATACLASS_REGISTRY):
cfg = cfg[model_type]
e... |
class TestEmbeddingShardingPlanner(unittest.TestCase):
def setUp(self) -> None:
compute_device = 'cuda'
self.topology = Topology(world_size=2, hbm_cap=((1024 * 1024) * 2), compute_device=compute_device)
self.planner = EmbeddingShardingPlanner(topology=self.topology)
def test_tw_solution(... |
_config
def test_mode_chord(manager):
manager.test_window('three')
manager.test_window('two')
manager.test_window('one')
assert (manager.c.get_groups()['a']['focus'] == 'one')
manager.c.simulate_keypress([], 'k')
assert (manager.c.get_groups()['a']['focus'] == 'two')
manager.c.simulate_keypr... |
class PoolFormerFeatureExtractor(FeatureExtractionMixin, ImageFeatureExtractionMixin):
model_input_names = ['pixel_values']
def __init__(self, do_resize_and_center_crop=True, size=224, resample=Image.BICUBIC, crop_pct=0.9, do_normalize=True, image_mean=None, image_std=None, **kwargs):
super().__init__(*... |
class StreamVideoInfo():
def __init__(self, width, height, sample_aspect_num, sample_aspect_den, frame_rate_num, frame_rate_den, codec_id):
self.width = width
self.height = height
self.sample_aspect_num = sample_aspect_num
self.sample_aspect_den = sample_aspect_den
self.frame... |
class Task():
__slots__ = ['_params', '_result', '_error']
def __init__(self, **params):
if (not params):
params = None
self._params = params
self._result = None
self._error = None
def process(self, proxy, **params):
pass
def _run(self, proxy):
... |
class MRFLoss(ComparisonLoss):
def __init__(self, encoder: enc.Encoder, patch_size: Union[(int, Sequence[int])], *, stride: Union[(int, Sequence[int])]=1, target_transforms: Optional[Iterable[nn.Module]]=None, input_guide: Optional[torch.Tensor]=None, target_image: Optional[torch.Tensor]=None, target_guide: Optiona... |
class DebuggerTest(unittest.TestCase):
def test_qdb_mips32el_hello(self):
rootfs = '../examples/rootfs/mips32el_linux'
path = (rootfs + '/bin/mips32el_hello')
ql = Qiling([path], rootfs)
ql.debugger = 'qdb::rr:qdb_scripts/mips32el.qdb'
ql.run()
del ql
def test_qdb... |
class QSniffer():
def __init__(self, parsed=None, filter=None, interface=None, analyzer_db=None):
self.current_ip = ifaddresses(interface)[AF_INET][0]['addr'].encode('utf-8')
self.current_mac = ifaddresses(interface)[AF_LINK][0]['addr'].encode('utf-8')
self.filter = filter
self.inter... |
def main():
os.chdir(os.path.dirname(__file__))
args = get_arguments()
constr_weight = get_constraint(args.weight_bits, 'weight')
constr_activation = get_constraint(args.activation_bits, 'activation')
if (args.dataset == 'cifar10'):
network = resnet20
dataloader = dataloader_cifar10
... |
def make_weights_for_balanced_classes(images, nclasses=6):
count = ([0] * nclasses)
for item in images:
count[item[1]] += 1
weight_per_class = ([0.0] * nclasses)
N = float(sum(count))
for i in range(nclasses):
weight_per_class[i] = ((N / float(count[i])) if (count[i] != 0) else N)
... |
class DataProcessor(object):
def get_PTB_train_examples(self, data_dir):
return self._create_examples(self._read_pkl(os.path.join(data_dir, 'PTB_train.pkl')), 'PTB_train')
def get_PTB_dev_examples(self, data_dir):
return self._create_examples(self._read_pkl(os.path.join(data_dir, 'PTB_dev.pkl'))... |
def parse_args():
parser = argparse.ArgumentParser(description='Generate training and val set of RCTW.')
parser.add_argument('root_path', help='Root dir path of RCTW')
parser.add_argument('--val-ratio', help='Split ratio for val set', default=0.0, type=float)
parser.add_argument('--nproc', default=1, ty... |
def merge_info(*allinfo):
def _info_to_list(infodata, field):
iterable = chain.from_iterable((info[field] for info in allinfo))
return list(iterable)
allinfo_copy = list(allinfo)
for (pos, info) in enumerate(allinfo_copy):
allinfo_copy[pos] = calculate_metrics(info)
result = dict... |
class TestDriverPsi4Extra(QiskitNatureTestCase):
((not _optionals.HAS_PSI4), 'psi4 not available.')
def setUp(self):
super().setUp()
def test_input_format_list(self):
driver = Psi4Driver(['molecule h2 {', ' 0 1', ' H 0.0 0.0 0.0', ' H 0.0 0.0 0.735', ' no_com', ' no_reorient', '}', ''... |
def test_anything_else_pickle() -> None:
fsm1 = Fsm(alphabet={Charclass('z'), (~ Charclass('z'))}, states={0, 1, 2}, initial=0, finals={1}, map={0: {Charclass('z'): 2, (~ Charclass('z')): 1}, 1: {Charclass('z'): 2, (~ Charclass('z')): 1}, 2: {Charclass('z'): 2, (~ Charclass('z')): 2}})
fsm1_unpickled = pickle.l... |
def _simple_send_tensors(tensor: Tensor, world_size: int, group: Optional[dist.ProcessGroup], rank: Optional[int]) -> Optional[List[Tensor]]:
gathered_result = None
local_rank = dist.get_rank(group=group)
if ((rank is None) or (local_rank == rank)):
stacked_result_sizes = ([world_size] + list(tensor... |
(parallel=True)
def _numba_equi_join_range_join(left_index, right_index, slice_starts, slice_ends, ge_arr1, ge_arr2, ge_strict, le_arr1, le_arr2, le_strict):
length = left_index.size
ends = np.empty(length, dtype=np.intp)
booleans = np.ones(length, dtype=np.bool_)
counts = 0
for num in prange(length... |
class FilterCheckButton(ConfigCheckButton):
__gsignals__ = {'preview': (GObject.SignalFlags.RUN_LAST, None, ())}
_tooltip = None
def __init__(self):
super().__init__(self._label, self._section, self._key, tooltip=self._tooltip)
try:
self.set_active(config.getboolean(self._section... |
class Migration(migrations.Migration):
dependencies = [('petition', '0008_auto__1805')]
operations = [migrations.AddField(model_name='petition', name='paper_signatures', field=models.IntegerField(default=0)), migrations.AddField(model_name='petition', name='paper_signatures_enabled', field=models.BooleanField(d... |
def scale_bb_by(rmin, rmax, cmin, cmax, im_height, im_width, h_scale, w_scale):
height = (rmax - rmin)
width = (cmax - cmin)
rmin -= ((h_scale * height) / 2)
rmax += ((h_scale * height) / 2)
cmin -= ((w_scale * width) / 2)
cmax += ((w_scale * width) / 2)
rmin = int(max(0, rmin))
rmax = i... |
class CallerIfcCL(NonBlockingIfc):
def construct(s, *, Type=None):
s.Type = Type
s.method = CallerPort(Type=Type)
s.rdy = CallerPort()
s.method._dsl.in_non_blocking_ifc = True
s.rdy._dsl.in_non_blocking_ifc = True
s.method._dsl.is_rdy = False
s.rdy._dsl.is_rdy... |
class MonadTall(_SimpleLayoutBase):
_left = 0
_right = 1
defaults = [('border_focus', '#ff0000', 'Border colour(s) for the focused window.'), ('border_normal', '#000000', 'Border colour(s) for un-focused windows.'), ('border_width', 2, 'Border width.'), ('single_border_width', None, 'Border width for single... |
def syllabify_orth_with_nltk(token, num_sylls=None):
global nltk_ssp
if (not nltk_ssp):
from nltk.tokenize import SyllableTokenizer
nltk_ssp = SyllableTokenizer()
tokenl = token.lower()
l = nltk_ssp.tokenize(tokenl)
if (tokenl != token):
o = []
i = 0
for x in ... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.