code stringlengths 281 23.7M |
|---|
class MetricCollectionToolkitTest(unittest.TestCase):
def _test_per_process_metric_collection_sync(input_tensor: torch.Tensor, metric_constructors: List[Callable[([], Metric)]]) -> None:
device = init_from_env()
if (device.type == 'cuda'):
torch.cuda.empty_cache()
rank = int(os.e... |
def generate_alexnet_asset(root, file='alexnet'):
model = models.alexnet(pretrained=True).features
modules = OrderedDict()
block = 1
for (idx, module) in model.named_children():
if isinstance(module, nn.Conv2d):
layer = f'conv{block}'
elif isinstance(module, nn.ReLU):
... |
class TestStochasticIPTW():
def test_error_no_model(self, sdata):
sipw = StochasticIPTW(sdata.dropna(), treatment='art', outcome='dead')
with pytest.raises(ValueError):
sipw.fit(p=0.8)
def test_error_p_oob(self, sdata):
sipw = StochasticIPTW(sdata.dropna(), treatment='art', o... |
class ThumbnailProcessor(Processor):
def __init__(self, width: int=None, height: int=None, ratio: float=None, ratio_precision: int=5, thumbnail_type: Type[Thumbnail]=Thumbnail):
self.width = width
self.height = height
self.ratio = ratio
self.ratio_precision = ratio_precision
... |
class Module():
def __init__(self, name):
self.name = name
self.kernels = {}
self.functions = []
self.constants = []
self.structs = []
self.dll = None
self.cuda = None
self.build_failed = False
self.options = {'max_unroll': 16, 'mode': warp.con... |
_fixtures(WebFixture)
def test_bookmarks_overrides(web_fixture):
fixture = web_fixture
user_interface = UserInterface(None, '/a_ui', {}, False, 'test_ui')
view = UrlBoundView(user_interface, '/aview', 'A View title')
bookmark = view.as_bookmark(description='different description', query_arguments={'arg1... |
class ProvidersBucket():
def __init__(self, providers: [str], qps: int):
self.providers = providers
self.qps = qps
self._last_get_time = [0 for _ in range(len(self.providers))]
self._get_interval = (1 / (len(self.providers) * qps))
self._lock = DeferredLock()
def get(self... |
.parametrize('enabled', [True, False])
def test_system_env_usersite(mocker: MockerFixture, enabled: bool) -> None:
mocker.patch('site.check_enableusersite', return_value=enabled)
env = SystemEnv(Path(sys.prefix))
assert ((enabled and (env.usersite is not None)) or ((not enabled) and (env.usersite is None))) |
def send_message(blocks: List[dict], attachments: List[dict], *, token: str):
if (not token):
logger.info('Incoming webhook variable is not set,skipping slack notification')
return
response = post(url=token, json={'blocks': blocks, 'attachments': attachments})
if (response.status_code != 200... |
def Tvib12Tvib3Trot_NonLTEModel(factory, fittable_parameters, fixed_parameters={}) -> Spectrum:
fittable_parameters = fittable_parameters.copy()
T12 = fittable_parameters.pop('T12')
T3 = fittable_parameters.pop('T3')
Trot = fittable_parameters.pop('Trot')
kwargs = {'Tvib': (T12, T12, T3), 'Trot': Tr... |
def test_compute_recall_precision():
gt_polys = []
det_polys = []
with pytest.raises(AssertionError):
hmean_ic13.compute_recall_precision(1, 1)
box1 = [0, 0, 1, 0, 1, 1, 0, 1]
box2 = [0, 0, 10, 0, 10, 1, 0, 1]
gt_polys = [utils.points2polygon(box1)]
det_polys = [utils.points2polygon(... |
def test_render_debug_better_error_message_recursion_error_with_multiple_duplicated_frames() -> None:
def first() -> None:
def second() -> None:
first()
second()
io = BufferedIO()
io.set_verbosity(Verbosity.VERY_VERBOSE)
with pytest.raises(RecursionError) as e:
first(... |
class CosineSimilarityLoss(nn.Module):
def __init__(self, loss_fct=nn.MSELoss(), cos_score_transformation=nn.Identity()):
super(CosineSimilarityLoss, self).__init__()
self.loss_fct = loss_fct
self.cos_score_transformation = cos_score_transformation
def forward(self, rep_a, rep_b, label: ... |
def mslogo():
if (system == 'termux'):
sleep(0.2)
Mylogo()
print('\n\x1b[1;32m [\x1b[1;31m 1 \x1b[1;32m] \x1b[1;33mStart default server.\n\x1b[1;32m [\x1b[1;31m 2 \x1b[1;32m] \x1b[1;33mStart PHP web server.\n\x1b[1;32m [\x1b[1;31m 3 \x1b[1;32m] \x1b[1;33mStart Python web server.\n\x1b[... |
def test_add_pass_with_create_from_list_and_add():
context = Context({'add': {'set': 'arbset', 'addMe': [1, 2], 'unpack': True}})
add.run_step(context)
context['add']['addMe'] = [3, 4]
add.run_step(context)
assert (context['arbset'] == {1, 2, 3, 4})
assert (len(context) == 2) |
class VersionRange(VersionRangeConstraint):
def __init__(self, min: (Version | None)=None, max: (Version | None)=None, include_min: bool=False, include_max: bool=False) -> None:
self._max = max
self._min = min
self._include_min = include_min
self._include_max = include_max
def mi... |
class AlphaModelStrategy(AbstractStrategy):
def __init__(self, ts: TradingSession, model_tickers_dict: Dict[(AlphaModel, Sequence[Ticker])], use_stop_losses=True, max_open_positions: Optional[int]=None, time_in_force: Optional[TimeInForce]=TimeInForce.OPG):
super().__init__(ts)
all_future_tickers = ... |
def phi_by_grain(network, state):
list_of_phi = []
systems = utils.powerset(network.node_indices, nonempty=True)
for system in systems:
micro_subsystem = Subsystem(network, state, system)
phi = compute.phi(micro_subsystem)
list_of_phi.append([len(micro_subsystem), phi, system, None])... |
class TestConstraints(TestNameCheckVisitorBase):
_passes()
def test_assert_truthy(self):
def capybara(x):
if x:
y = True
else:
y = False
assert_is_value(y, MultiValuedValue([KnownValue(True), KnownValue(False)]))
assert y
... |
class struct__EFI_IFR_FORM_SET(ctypes.Structure):
_pack_ = True
_functions_ = []
_fields_ = [('Header', EFI_IFR_OP_HEADER), ('PADDING_0', (ctypes.c_ubyte * 2)), ('Guid', EFI_GUID), ('FormSetTitle', ctypes.c_uint16), ('Help', ctypes.c_uint16), ('Flags', ctypes.c_ubyte), ('PADDING_1', (ctypes.c_ubyte * 3))] |
def _build_offset(offset, kwargs, default):
kwargs = {k: v for (k, v) in six.iteritems(kwargs) if (v is not None)}
if (offset is None):
if (not kwargs):
return default
else:
return _td_check(datetime.timedelta(**kwargs))
elif kwargs:
raise ValueError('Cannot p... |
(frozen=True)
class User():
id: int
name: str
discord_id: (int | None) = None
def from_json(cls, data) -> User:
return cls(id=data['id'], name=data['name'], discord_id=data.get('discord_id'))
def as_json(self) -> dict:
return {'id': self.id, 'name': self.name, 'discord_id': self.disc... |
def main():
args = parse_args()
icdar_path = args.icdar_path
out_dir = (args.out_dir if args.out_dir else icdar_path)
mmcv.mkdir_or_exist(out_dir)
img_dir = osp.join(icdar_path, 'imgs')
gt_dir = osp.join(icdar_path, 'annotations')
set_name = {}
for split in args.split_list:
set_n... |
def _blackjax_stats_to_dict(sample_stats, potential_energy) -> Dict:
rename_key = {'is_divergent': 'diverging', 'energy': 'energy', 'num_trajectory_expansions': 'tree_depth', 'num_integration_steps': 'n_steps', 'acceptance_rate': 'acceptance_rate', 'acceptance_probability': 'acceptance_rate'}
converted_stats = ... |
def test_explicit_roadline():
line = xodr.ExplicitRoadLine()
prettyprint(line.get_element())
line = xodr.ExplicitRoadLine(1, 2, 5, 1, xodr.MarkRule.no_passing)
prettyprint(line.get_element())
line2 = xodr.ExplicitRoadLine(1, 2, 5, 1, xodr.MarkRule.no_passing)
line3 = xodr.RoadLine(1, 2, 5, 1, xo... |
class GroupsCommand(ops.cmd.DszCommand):
optgroups = {}
reqgroups = []
reqopts = []
defopts = {}
def __init__(self, plugin='groups', netmap_type=None, **optdict):
ops.cmd.DszCommand.__init__(self, plugin, **optdict)
def validateInput(self):
for opt in self.optdict:
if... |
class IsTicketOwner(BasePermission):
message = 'You are not allowed to update this ticket.'
def has_permission(self, source, info, **kwargs):
user = info.context.request.user
if (not user.is_authenticated):
return False
conference = Conference.objects.get(code=kwargs['confere... |
def update_dir(model_dir, log_dir, data_dir):
if model_dir:
config.OUTPUT_DIR = model_dir
if log_dir:
config.LOG_DIR = log_dir
if data_dir:
config.DATA_DIR = data_dir
if config.DATA_DIR:
config.DATASET.ROOT = os.path.join(config.DATA_DIR, config.DATASET.ROOT)
if c... |
def to_docker_command(params, docker_image, python_command='python', script='scripts/run_experiment.py', pre_commands=None, use_tty=False, post_commands=None, dry=False, use_gpu=False, env=None, local_code_dir=None):
log_dir = params.get('log_dir')
if (not dry):
mkdir_p(log_dir)
if use_gpu:
... |
class TestFakeIOStream():
def _write_func(self, text):
return text
def test_flush(self):
s = utils.FakeIOStream(self._write_func)
s.flush()
def test_isatty(self):
s = utils.FakeIOStream(self._write_func)
assert (not s.isatty())
def test_write(self):
s = ut... |
class TestTabboxApp(unittest.TestCase):
def setUpClass(cls):
import tabbox
cls.AppClass = tabbox.MyApp
def setUp(self):
self.AppClass.log_request = (lambda x, y: None)
def tearDown(self):
del self.AppClass.log_request
self.app.on_close()
def test_main(self):
... |
_required()
def token(request):
if (request.method == 'POST'):
try:
Token.objects.get(user=request.user).delete()
except Token.DoesNotExist:
pass
(token, created) = Token.objects.get_or_create(user=request.user)
return render(request, 'account/account_token.html', {'t... |
class Migration(migrations.Migration):
dependencies = [('domain', '0012_renaming')]
operations = [migrations.AlterModelManagers(name='attribute', managers=[('objects', django.db.models.manager.Manager())]), migrations.AlterModelManagers(name='attributeentity', managers=[('objects', django.db.models.manager.Mana... |
def find_version():
version_file = io.open(os.path.join(os.path.dirname(__file__), 'MCEvidence.py')).read()
version_match = re.search('^__version__ = [\'\\"]([^\'\\"]*)[\'\\"]', version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError('Unable to find version string.... |
def strong_transform(param, data=None, target=None):
assert ((data is not None) or (target is not None))
(data, target) = one_mix(mask=param['mix'], data=data, target=target)
(data, target) = color_jitter(color_jitter=param['color_jitter'], s=param['color_jitter_s'], p=param['color_jitter_p'], mean=param['m... |
def test_while_exec_iteration_no_stop():
wd = WhileDecorator({'max': 3})
context = Context({})
mock = MagicMock()
assert (not wd.exec_iteration(2, context, mock))
assert (context['whileCounter'] == 2)
assert (wd.while_counter == 2)
assert (len(context) == 1)
mock.assert_called_once_with(... |
class LidGroup(object):
def __init__(self, model, subcatchmentid):
if (not model.fileLoaded):
raise PYSWMMException('SWMM Model Not Open')
if (subcatchmentid not in model.getObjectIDList(ObjectType.SUBCATCH.value)):
raise PYSWMMException('Subcatchment ID Does not Exist')
... |
def get_training_roidb(imdb):
if cfg.TRAIN.USE_FLIPPED:
print('Appending horizontally-flipped training examples...')
imdb.append_flipped_images()
print('done')
print('Preparing training data...')
if cfg.TRAIN.HAS_RPN:
rdl_roidb.prepare_roidb(imdb)
else:
rdl_roidb.... |
class nnUNetTrainerAdam1en3(nnUNetTrainerAdam):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool=True, device: torch.device=torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.initial_lr... |
def test_paginate(bot, mock_req):
chains = inline_default(bot)
bot.api.call = call_overwrite
for i in range(0, 5):
update = update_inline(i, str((i * 10)))
for hook in chains['inline']:
result = hook.call(bot, update)
assert (result[0] == 'answerInlineQuery')
... |
def build_layer(in_channels, out_channels, type, **kwargs):
if (type == 'id'):
return nn.Identity()
elif (type == 'mlp'):
return MLP(input_dim=in_channels, embed_dim=out_channels)
elif (type == 'sep_conv'):
return DepthwiseSeparableConvModule(in_channels=in_channels, out_channels=out... |
def build_backbone(args):
position_embedding = build_position_encoding(args)
train_backbone = (args.lr_backbone > 0)
return_interm_layers = (args.masks or (args.num_feature_levels > 1))
backbone = Backbone(args.backbone, train_backbone, return_interm_layers, args.dilation)
model = Joiner(backbone, p... |
class CloudzillaToFolder(SimpleDecrypter):
__name__ = 'CloudzillaToFolder'
__type__ = 'decrypter'
__version__ = '0.11'
__status__ = 'testing'
__pattern__ = '
__config__ = [('enabled', 'bool', 'Activated', True), ('use_premium', 'bool', 'Use premium account if available', True), ('folder_per_pack... |
class SnapshotsPanel(qw.QFrame):
def __init__(self, viewer):
qw.QFrame.__init__(self)
layout = qw.QGridLayout()
self.setLayout(layout)
self.model = SnapshotsModel()
self.viewer = viewer
lv = SnapshotListView()
lv.sizePolicy().setVerticalPolicy(qw.QSizePolicy.E... |
def _add_xy_projected_coords_attrs(data_arr: xr.DataArray, x: str='x', y: str='y') -> xr.DataArray:
if (x in data_arr.coords):
data_arr[x].attrs['standard_name'] = 'projection_x_coordinate'
data_arr[x].attrs['units'] = 'm'
if (y in data_arr.coords):
data_arr[y].attrs['standard_name'] = '... |
class DualAverageAdaptation():
def __init__(self, initial_step, target, gamma, k, t0):
self._initial_step = initial_step
self._target = target
self._k = k
self._t0 = t0
self._gamma = gamma
self.reset()
def reset(self):
self._log_step = np.log(self._initial... |
class DZBlock(dict):
def read_dz(self, fid):
(self['dz_org_block_type'], self['dz_zip_type'], dz_reserved, self['dz_zip_parameter'], self['dz_org_data_length'], self['dz_data_length']) = _DZStruct.unpack(fid.read(24))
def decompress_data_block(block, zip_type, zip_parameter, org_data_length):
bl... |
def parse_args():
parser = argparse.ArgumentParser(description='Train a detector')
parser.add_argument('config', help='train config file path')
parser.add_argument('--work-dir', help='the dir to save logs and models')
parser.add_argument('--amp', action='store_true', default=False, help='enable automati... |
class Effect5319(BaseEffect):
type = 'passive'
def handler(fit, ship, context, projectionRange, **kwargs):
fit.modules.filteredChargeBoost((lambda mod: mod.charge.requiresSkill('Light Missiles')), 'explosiveDamage', ship.getModifiedItemAttr('shipBonusMD1'), skill='Minmatar Destroyer', **kwargs) |
def subprocess_Popen(command, **params):
startupinfo = None
if (os.name == 'nt'):
startupinfo = subprocess.STARTUPINFO()
try:
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
except AttributeError:
startupinfo.dwFlags |= subprocess._subprocess.STARTF_USESHOW... |
class Columns(Enum):
pt = {'start': 'Inicio dd/MM/aaaa', 'last': 'Ult. valor', 'code': 'Cod.', 'frequency': 'Per.', 'name': 'Nome completo', 'source': 'Fonte', 'unit': 'Unid.'}
en = {'start': 'Start dd/MM/yyyy', 'last': 'Last value', 'code': 'Code', 'frequency': 'Per.', 'name': 'Full name', 'source': 'Source'... |
def main(path: Path, function: Optional[str]=None, n_seeds: int=15, n_ensembles: int=3, ensemble_size: int=5, *, continue_: bool=False, force: bool=False):
path = lib.get_path(path)
if path.name.endswith(('-tuning', '-tuning.toml')):
assert (function is None)
tuning_output = path.with_suffix('')... |
def train(train_loader, model, lemniscate, criterion, optimizer, epoch, writer):
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
losses_ins = AverageMeter()
losses_rot = AverageMeter()
model.train()
end = time.time()
optimizer.zero_grad()
for (i, (input... |
.parametrize('data,num_controls', [pytest.param(data, num_controls, id=f'{num_controls}-data{idx}', marks=(pytest.mark.slow if ((num_controls == 2) and (idx == 2)) else ())) for (idx, data) in enumerate([[[1, 2, 3, 4, 5]], [[1, 2, 3], [4, 5, 10]], [[1], [2], [3], [4], [5], [6]]]) for num_controls in [0, 1, 2]])
def tes... |
def unload_dll(dll):
handle = dll._handle
del dll
import gc
gc.collect()
if (os.name == 'nt'):
max_attempts = 100
for i in range(max_attempts):
result = ctypes.windll.kernel32.FreeLibrary(ctypes.c_void_p(handle))
if (result != 0):
return
el... |
class Solution():
def productExceptSelf(self, nums: List[int]) -> List[int]:
ans = ([1] * len(nums))
left = 1
right = 1
for i in range(len(nums)):
ans[i] *= left
ans[((- 1) - i)] *= right
left *= nums[i]
right *= nums[((- 1) - i)]
... |
def _gen_efficientnetv2_s(variant, channel_multiplier=1.0, depth_multiplier=1.0, group_size=None, rw=False, pretrained=False, **kwargs):
arch_def = [['cn_r2_k3_s1_e1_c24_skip'], ['er_r4_k3_s2_e4_c48'], ['er_r4_k3_s2_e4_c64'], ['ir_r6_k3_s2_e4_c128_se0.25'], ['ir_r9_k3_s1_e6_c160_se0.25'], ['ir_r15_k3_s2_e6_c256_se0... |
def checkOptionsGivenByTheUser(args, operationsAllowed, checkAccount=True, allowHostsFile=False):
if (allowHostsFile == True):
if ((args['hostlist'] == None) and (ipOrNameServerHasBeenGiven(args) == False)):
return False
elif (ipOrNameServerHasBeenGiven(args) == False):
return False
... |
class TestBNReEstimation():
def test_reestimation_with_quantsim_model(self, gpu_sessions, bn_re_estimation_dataset, bn_num_batches, bn_momentum_names, bn_training_names):
(sess_sim, sess_fp32) = gpu_sessions
self._reestimate_and_compare_results(sess_sim, bn_re_estimation_dataset, bn_num_batches, ['i... |
class PdfTextPage(pdfium_i.AutoCloseable):
def __init__(self, raw, page):
self.raw = raw
self.page = page
super().__init__(pdfium_c.FPDFText_ClosePage)
def parent(self):
return self.page
def _get_active_text_range(self, c_start, c_end, l_passive=0, r_passive=0):
if (c... |
def _get_rerun_filter_regex(item, regex_name):
rerun_marker = _get_marker(item)
if ((rerun_marker is not None) and (regex_name in rerun_marker.kwargs)):
regex = rerun_marker.kwargs[regex_name]
if isinstance(regex, str):
regex = [regex]
else:
regex = getattr(item.session.c... |
class W_FileOutputPort(W_OutputPort):
errorname = 'output-port'
_immutable_fields_ = ['file', 'path']
_attrs_ = ['closed', 'file', 'stdout', 'path']
def __init__(self, f, path, stdout=False):
self.closed = False
self.file = f
self.stdout = stdout
self.path = path
def ... |
def node_game_index_fields(wizard, status=None):
if (not hasattr(wizard, 'game_index_listing')):
wizard.game_index_listing = settings.GAME_INDEX_LISTING
status_default = wizard.game_index_listing['game_status']
text = f'''
What is the status of your game?
- pre-alpha: a game in its very ... |
def agent_step(model, input_dict, vocab, prev_action, env, args, num_fails, obj_predictor):
with torch.no_grad():
m_out = model.step(input_dict, vocab, prev_action=prev_action)
m_pred = model_util.extract_action_preds(m_out, model.pad, vocab['action_low'], clean_special_tokens=False)[0]
action = m_p... |
('auditwheel.patcher._verify_patchelf')
('auditwheel.patcher.check_output')
('auditwheel.patcher.check_call')
class TestRepair():
def test_append_rpath(self, check_call, check_output, _):
patcher = Patchelf()
existing_rpath = b'$ORIGIN/.existinglibdir'
check_output.return_value = existing_rp... |
class CKCCClient(HardwareClientBase):
def __init__(self, plugin, handler, dev_path, *, is_simulator=False):
HardwareClientBase.__init__(self, plugin=plugin)
self.device = plugin.device
self.handler = handler
self._expected_device = None
if is_simulator:
self.dev =... |
def get_comp_refs(ref_logprobs, count_steps, step_history, grounding):
assert (len(grounding) == (count_steps - 1)), (grounding, count_steps)
refs = [idx for (rule, idx) in step_history if (rule == 'ref')]
assert (len(refs) == 2), refs
lhs = refs[1]
lhs_val_type = set([grnd.data_type for grnd in gro... |
def _find_and_run_interpolation(interpolation_functions, src_resolution, dst_resolution, args):
try:
interpolation_function = interpolation_functions[(src_resolution, dst_resolution)]
except KeyError:
error_message = 'Interpolation from {}m to {}m not implemented'.format(src_resolution, dst_reso... |
def main():
for pofilename in sorted(glob.glob((PO_DIR + '**/*/*.po'))):
po = polib.pofile(pofilename)
percent_translated = po.percent_translated()
if (percent_translated > 90):
pofilename = pofilename.replace((PO_DIR + os.sep), '')
print(f'{pofilename:<30} :: {percen... |
class Plot2D(object):
def __init__(self, scene, **kwargs):
self.evPlotChanged = Subject()
self._scene = scene
self._data = None
self.fig = None
self.ax = None
self._show_plt = False
self._colormap_symmetric = True
self.title = 'unnamed'
self._l... |
class CECT_dataset(Dataset):
def __init__(self, path=None):
self.path = path
self.classes = os.listdir(self.path)
self.class2id = {}
self.imgs = []
for each_class in self.classes:
if (not (each_class in self.class2id)):
self.class2id[each_class] = ... |
def _is_repl_code(content: str, threshold: int=3) -> bool:
log.trace(f'Checking if content is (I)Python REPL code using a threshold of {threshold}.')
repl_lines = 0
patterns = (_RE_PYTHON_REPL, _RE_IPYTHON_REPL)
for line in content.splitlines():
for pattern in patterns:
if pattern.ma... |
class WithFutureDailyBarData(WithAssetFinder, WithTradingCalendars):
FUTURE_DAILY_BAR_USE_FULL_CALENDAR = False
FUTURE_DAILY_BAR_START_DATE = alias('START_DATE')
FUTURE_DAILY_BAR_END_DATE = alias('END_DATE')
FUTURE_DAILY_BAR_SOURCE_FROM_MINUTE = None
def FUTURE_DAILY_BAR_LOOKBACK_DAYS(cls):
... |
def _migrate_v5(json_dict: dict) -> dict:
gate_mapping = {'Hive Access Tunnel': 'Temple Grounds/Hive Access Tunnel/Translator Gate', 'Meeting Grounds': 'Temple Grounds/Meeting Grounds/Translator Gate', 'Hive Transport Area': 'Temple Grounds/Hive Transport Area/Translator Gate', 'Industrial Site': 'Temple Grounds/In... |
(params=[{'game': RandovaniaGame.METROID_PRIME_ECHOES, 'encoded': b'\x00', 'pickups_state': {}}, {'game': RandovaniaGame.METROID_PRIME_ECHOES, 'encoded': b'\x8aH\x80', 'pickups_state': {'Missile Expansion': {'ammo_count': [10], 'pickup_count': 12}}}, {'game': RandovaniaGame.METROID_PRIME_ECHOES, 'encoded': b'\x8fH\x80'... |
class AddEdges(StateChanger):
def __init__(self, from_node: NodeEnumerator, relation: Relation, to_node: NodeEnumerator, add_reverse=False):
self.from_node = from_node
self.relation = relation
self.to_node = to_node
self.add_reverse = add_reverse
def apply_changes(self, state: En... |
.end_to_end()
def test_node_protocol_for_custom_nodes_with_paths(runner, tmp_path):
source = '\n from typing_extensions import Annotated\n from pytask import Product\n from pathlib import Path\n from attrs import define\n import pickle\n\n \n class PickleFile:\n name: str\n path: ... |
def _build_shebang(executable: str, forlauncher: bool) -> bytes:
executable_bytes = executable.encode('utf-8')
if forlauncher:
return (b'#!' + executable_bytes)
if _is_executable_simple(executable_bytes):
return (b'#!' + executable_bytes)
quoted = shlex.quote(executable).encode('utf-8')
... |
def read_label_file(dataset_dir, filename=LABELS_FILENAME):
labels_filename = os.path.join(dataset_dir, filename)
with tf.gfile.Open(labels_filename, 'r') as f:
lines = f.read().decode()
lines = lines.split('\n')
lines = filter(None, lines)
labels_to_class_names = {}
for line in lines:
... |
def _get_query_and_value_notation(rank, batch_dims):
chr_idx = string.ascii_lowercase
query_notation = chr_idx[:rank]
letter_offset = rank
value_notation = ''
for i in range(rank):
if ((i in batch_dims) or (i == (rank - 1))):
value_notation += query_notation[i]
else:
... |
def main(start_rank, end_rank, protocol='tcp'):
if (protocol == 'tcp'):
port_file = 'nmap_top_ports_tcp.txt'
elif (protocol == 'udp'):
port_file = 'nmap_top_ports_udp.txt'
else:
print('This should never be reached.')
exit()
port_list = []
with open(port_file, 'r') as ... |
class FileWriter(AsyncSearchHandler):
def __init__(self, l, f, headerStr='', footerStr=''):
AsyncSearchHandler.__init__(self, l)
self._f = f
self.headerStr = headerStr
self.footerStr = footerStr
def preProcessing(self):
self._f.write(self.headerStr)
def postProcessing... |
def create_temp_view_statements(table_file):
sql_statements = []
with open(table_file, 'r') as f:
for line in f:
item = json.loads(line.strip())
table_name = get_table_name(item['id'])
headers = ['`{}`'.format(h) for h in item['header']]
header_str = (('('... |
class _DAHead(nn.Module):
def __init__(self, in_channels, nclass, aux=True, norm_layer=nn.BatchNorm2d, norm_kwargs=None, **kwargs):
super(_DAHead, self).__init__()
self.aux = aux
inter_channels = (in_channels // 4)
self.conv_p1 = nn.Sequential(nn.Conv2d(in_channels, inter_channels, 3... |
def thc_objective_and_grad(xcur, norb, nthc, eri, verbose=False):
etaPp = xcur[:(norb * nthc)].reshape(nthc, norb)
MPQ = xcur[(norb * nthc):((norb * nthc) + (nthc * nthc))].reshape(nthc, nthc)
CprP = numpy.einsum('Pp,Pr->prP', etaPp, etaPp)
Iapprox = numpy.einsum('pqU,UV,rsV->pqrs', CprP, MPQ, CprP, opt... |
_attention('pooling')
class PoolingAttention(BaseAttention):
def __init__(self, decoder_hidden_state_dim, context_dim, **kwargs):
super().__init__(decoder_hidden_state_dim, context_dim)
self.pool_type = kwargs.get('pool_type', 'mean')
def forward(self, decoder_state, source_hids, src_lengths):
... |
class ProjectItem(ContextMenuSingle):
visibilitySetting = 'project'
def __init__(self):
self.mainFrame = gui.mainFrame.MainFrame.getInstance()
def display(self, callingWindow, srcContext, mainItem):
if ((srcContext not in ('marketItemGroup', 'marketItemMisc')) or (self.mainFrame.getActiveFit... |
def convert(path):
with open(path, 'r') as f:
text = f.read().strip()
tokens = text.split()
oldkey = parse(tokens)[0]['privkeys']['account']
k = oldkey['private-key']['dsa']
newkey = DSAKey((k['y'], k['g'], k['p'], k['q'], k['x']), private=True)
print(('Writing converted key for %s/%s to... |
class UNetMidBlock2DSimpleCrossAttn(nn.Module):
def __init__(self, in_channels: int, temb_channels: int, dropout: float=0.0, num_layers: int=1, resnet_eps: float=1e-06, resnet_time_scale_shift: str='default', resnet_act_fn: str='swish', resnet_groups: int=32, resnet_pre_norm: bool=True, attention_head_dim=1, output... |
class LabelSmoothing(nn.Module):
def __init__(self, smoothing=0.0):
super(LabelSmoothing, self).__init__()
self.confidence = (1.0 - smoothing)
self.smoothing = smoothing
def forward(self, x, target):
logprobs = torch.nn.functional.log_softmax(x, dim=(- 1))
nll_loss = (- l... |
def Xception65(num_classes=None, global_pool=True, keep_prob=0.5, output_stride=None, regularize_depthwise=False, multi_grid=None, scope='xception_65'):
blocks = [xception_block('entry_flow/block1', in_channels=64, depth_list=[128, 128, 128], skip_connection_type='conv', activation_fn_in_separable_conv=False, regul... |
def test_internal_server_error(mocker, table_name: str):
mock_dynamic_configuration(mocker, MOCKED_SCHEMA)
db_handler: DynamoDalHandler = DynamoDalHandler(table_name)
table = db_handler._get_db_handler(table_name)
with Stubber(table.meta.client) as stubber:
stubber.add_client_error(method='put_i... |
def calIoU(result, gt_i):
x1 = result[0]
y1 = result[1]
x2 = result[2]
y2 = result[3]
overmax = (- 1)
is_which = (- 1)
for (k, gt) in enumerate(gt_i):
gt_x1 = gt[0]
gt_y1 = gt[1]
gt_x2 = gt[2]
gt_y2 = gt[3]
inter_x1 = max(x1, gt_x1)
inter_y1 = ... |
class TAPEv2File(TestCase):
def setUp(self):
self.audio = APEv2File(os.path.join(DATA_DIR, 'click.mpc'))
def test_empty(self):
f = APEv2File(os.path.join(DATA_DIR, 'xing.mp3'))
self.assertFalse(f.items())
def test_add_tags(self):
self.failUnless((self.audio.tags is None))
... |
_info
def Censys_ip(Domain, page):
data = {'query': Domain, 'page': page, 'fields': ['ip']}
try:
res = requests.post(API_URL, data=json.dumps(data), auth=(API_ID, API_SECRET), headers=headers)
results = res.json()['results']
ips = []
for i in results:
ips.append(i['ip... |
def Dictionary_to_Matrix(dict_rep, ga):
lst_mat = []
for e_row in ga.basis:
lst_mat_row = (len(ga.basis) * [S.Zero])
element = dict_rep.get(e_row, S.Zero)
if isinstance(element, mv.Mv):
element = element.obj
for (coef, base) in metric.linear_expand_terms(element):
... |
_jit
def sliced_argmax(inp, slices, out=None):
if (out is None):
out = np.full((len(slices) - 1), (- 1), dtype=np.int64)
for i in range((len(slices) - 1)):
if (slices[i] == slices[(i + 1)]):
continue
out[i] = np.argmax(inp[slices[i]:slices[(i + 1)]])
return out |
class AssociationStrength(Predictor):
def predict(self, weight=None):
res = Scoresheet()
for (a, b) in self.likely_pairs():
w = (neighbourhood_intersection_size(self.G, a, b, weight) / (neighbourhood_size(self.G, a, weight) * neighbourhood_size(self.G, b, weight)))
if (w > 0)... |
class MMDRegularizer(Regularizer):
def __init__(self, l=1, beta=1.0):
self.uses_learning_phase = 1
self.l = l
self.beta = beta
def set_layer(self, layer):
self.layer = layer
def __call__(self, loss):
if (not hasattr(self, 'layer')):
raise Exception('Need t... |
class BatchItemBase(futures.FutureBase):
def __init__(self, batch):
super(BatchItemBase, self).__init__()
assert (not batch.is_flushed()), "can't add an item to the batch that is already flushed"
self.batch = batch
self.index = len(batch.items)
batch.items.append(self)
... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.