code stringlengths 281 23.7M |
|---|
class _FileRenameCloser(_FileCloser):
def __init__(self, target_file, temp_file, delete_failures, parent, dry_run, is_binary):
self.target_file = target_file
self.dry_run = dry_run
self.is_binary = is_binary
super().__init__(temp_file, delete_failures, parent)
def _success(self):... |
class HobuneChannel():
id: str
name: str
date: Optional[int] = 0
removed_count: Optional[int] = 0
unlisted_count: Optional[int] = 0
videos: list = field(default_factory=list)
names: set = field(default_factory=set)
handles: set = field(default_factory=set)
username: Optional[str] = N... |
def maybe_process_conditional_comparison(self: IRBuilder, e: Expression, true: BasicBlock, false: BasicBlock) -> bool:
if ((not isinstance(e, ComparisonExpr)) or (len(e.operands) != 2)):
return False
ltype = self.node_type(e.operands[0])
rtype = self.node_type(e.operands[1])
if (not ((is_tagged(... |
class _TimeGoal():
def __init__(self, dt: (((timedelta | datetime) | int) | float)):
self.dt = (dt if isinstance(dt, (timedelta, datetime)) else timedelta(seconds=dt))
self.start_time = None
def __call__(self, _):
if isinstance(self.dt, timedelta):
if (self.start_time is None... |
def _perturb_vec(mean, cov, nsamps, perturb_diag=1e-10):
ndim = len(mean)
if (not np.allclose(cov, cov.T)):
raise ValueError('Covariance matrix is not symmetric.')
cov += (np.eye(ndim) * perturb_diag)
l_mat = np.linalg.cholesky(cov)
x_mat = np.random.normal(loc=0.0, scale=1.0, size=(ndim, ns... |
class Defaults(NamedTuple):
actions: ActionSettings
validations: ValidationSettings
def dict(self) -> dict[(str, Any)]:
dict_ = {}
for settings in self:
dict_ = reduce(operator.or_, (entry.model_dump() for entry in settings.values()), dict_)
return dict_ |
def model_with_global_max_pool2d():
inputs = tf.keras.Input(shape=(8, 8, 3))
x = tf.keras.layers.Conv2D(8, (2, 2))(inputs)
x = tf.keras.layers.GlobalMaxPool2D()(x)
x = tf.keras.layers.Flatten()(x)
outputs = tf.keras.layers.Dense(10, activation=tf.nn.softmax, name='model_with_global_max_pool2d')(x)
... |
def test_delay_suppresses_output(capsys, monkeypatch):
monkeypatch.setattr(pipx.animate, 'stderr_is_tty', True)
monkeypatch.setenv('COLUMNS', '80')
test_string = 'asdf'
with pipx.animate.animate(test_string, do_animation=True, delay=0.9):
time.sleep(0.5)
captured = capsys.readouterr()
as... |
def tiny_imagenet_parse(serialized_example):
feature_map = {'height': tf.compat.v1.FixedLenFeature((), tf.int64), 'width': tf.compat.v1.FixedLenFeature((), tf.int64), 'channel': tf.compat.v1.FixedLenFeature((), tf.int64), 'label': tf.compat.v1.FixedLenFeature((), tf.int64), 'image_raw': tf.compat.v1.FixedLenFeature... |
def _expand_prefix_paths(urls: List[S3Url], content_type_provider: Callable[([str], ContentType)], **s3_client_kwargs) -> Tuple[(Dict[(ContentType, List[str])], CachedFileMetadataProvider)]:
assert (len(urls) == 1), f'Expected 1 S3 prefix, found {len(urls)}.'
objects = list(filter_objects_by_prefix(urls[0].buck... |
class SegformerMixFFN(nn.Module):
def __init__(self, config, in_features, hidden_features=None, out_features=None):
super().__init__()
out_features = (out_features or in_features)
self.dense1 = nn.Linear(in_features, hidden_features)
self.dwconv = SegformerDWConv(hidden_features)
... |
class TestGraphicsExpose(EndianTest):
def setUp(self):
self.evt_args_0 = {'count': 49818, 'drawable': , 'height': 2892, 'major_event': 172, 'minor_event': 50267, 'sequence_number': 50375, 'type': 133, 'width': 38020, 'x': 54088, 'y': 17918}
self.evt_bin_0 = b'\x85\x00\xc7\xc4\xaaR\x0eVH\xd3\xfeE\x84... |
class MultiTextureSprite(pyglet.sprite.AdvancedSprite):
group_class = MultiTextureSpriteGroup
def __init__(self, imgs, x=0, y=0, z=0, blend_src=GL_SRC_ALPHA, blend_dest=GL_ONE_MINUS_SRC_ALPHA, batch=None, group=None, subpixel=False, program=None):
textures = {}
for (name, img) in imgs.items():
... |
def accum_slots(usr_act_turns):
inform_hist = {}
book_inform_hist = {}
output_str = []
for usr_act in usr_act_turns:
if (usr_act.act in [UserAct.INFORM_TYPE, UserAct.INFORM_TYPE_CHANGE]):
inform_hist.update({k: v for (k, v) in usr_act.parameters.items() if (v != dialog_config.I_DO_NO... |
def test_pype_no_skip_parse(mock_pipe):
context = Context({'pype': {'name': 'pipe name', 'pipeArg': 'argument here', 'useParentContext': False, 'skipParse': False, 'raiseError': True}})
with patch_logger('pypyr.steps.pype', logging.INFO) as mock_logger_info:
with get_arb_pipeline_scope(context):
... |
class Elongation():
def __init__(self, gdf):
self.gdf = gdf
bbox = shapely.minimum_rotated_rectangle(gdf.geometry)
a = bbox.area
p = bbox.length
cond1 = (p ** 2)
cond2 = (16 * a)
bigger = (cond1 >= cond2)
sqrt = np.empty(len(a))
sqrt[bigger] = ... |
def _subtree_from_traversal(traversal, tree):
is_frozen = isinstance(tree, flax.core.frozen_dict.FrozenDict)
flat_tree = {}
for (path, leaf) in zip(traversal.iterate(_tree_of_paths(tree)), traversal.iterate(tree)):
flat_tree[path] = leaf
new_tree = traverse_util.unflatten_dict({tuple(k.split('/'... |
_fixtures(PartyAccountFixture)
def test_migrate_password_hash_scheme(party_account_fixture):
fixture = party_account_fixture
system_account = fixture.system_account
md5_hash = passlib.hash.hex_md5.hash(system_account.password)
system_account.password_hash = md5_hash
system_account.authenticate(syste... |
def order_clothes_list(clothes_list):
ordered_clothes_list = clothes_list
for current_type in reversed(CLOTHING_TYPE_ORDER):
for clothes in clothes_list:
if clothes.db.clothing_type:
item_type = clothes.db.clothing_type
if (item_type == current_type):
... |
class Call(BaseCall):
def compute_msg_extra_gas(self, computation: ComputationAPI, gas: int, to: Address, value: int) -> int:
account_exists = computation.state.account_exists(to)
transfer_gas_fee = (constants.GAS_CALLVALUE if value else 0)
create_gas_fee = (constants.GAS_NEWACCOUNT if (not ... |
def train(epoch):
print(('\nEpoch: %d' % epoch))
net.train()
train_loss = 0
correct = 0
total = 0
for (batch_idx, (inputs, targets)) in enumerate(trainloader):
(inputs, targets) = (inputs.to(device), targets.to(device))
optimizer.zero_grad()
outputs = net(inputs)
... |
class ReadRegistersRequestBase(ModbusRequest):
_rtu_frame_size = 8
def __init__(self, address, count, slave=0, **kwargs):
super().__init__(slave, **kwargs)
self.address = address
self.count = count
def encode(self):
return struct.pack('>HH', self.address, self.count)
def ... |
class FontConfigPattern():
def __init__(self, fontconfig, pattern=None):
self._fontconfig = fontconfig
self._pattern = pattern
def is_valid(self):
return (self._fontconfig and self._pattern)
def _create(self):
assert (not self._pattern)
assert self._fontconfig
... |
_on_failure
.parametrize('privatekey_seed', ['test_token_registration:{}'])
.parametrize('number_of_nodes', [1])
.parametrize('channels_per_node', [0])
.parametrize('number_of_tokens', [1])
def test_register_token_insufficient_eth(raiden_network: List[RaidenService], retry_timeout, unregistered_token):
app1 = raide... |
def make_optimizer(cfg, model):
params = []
for (key, value) in model.named_parameters():
if (not value.requires_grad):
continue
lr = cfg.SOLVER.BASE_LR
if ('bias' in key):
lr = (cfg.SOLVER.BASE_LR * cfg.SOLVER.BIAS_LR_FACTOR)
params += [{'params': [value]... |
def command_double(command, args):
def setup(parser):
add_double_options(parser)
parser.set_defaults(plot_velocity=None)
parser.set_defaults(plot_everything=None)
(parser, opts, args) = cl_parse(command, args, setup=setup)
filename = verify_arguements('double', 1, args)
verify_op... |
class RoIAlign(nn.Module):
_api_warning({'out_size': 'output_size', 'sample_num': 'sampling_ratio'}, cls_name='RoIAlign')
def __init__(self, output_size, spatial_scale=1.0, sampling_ratio=0, pool_mode='avg', aligned=True, use_torchvision=False):
super(RoIAlign, self).__init__()
self.output_size ... |
def build_vocab(imgs, params):
count_thr = params['word_count_threshold']
counts = {}
for img in imgs:
for sent in img['sentences']:
for w in sent['tokens']:
counts[w] = (counts.get(w, 0) + 1)
cw = sorted([(count, w) for (w, count) in counts.items()], reverse=True)
... |
def analyze_dialogue(dialogue, maxlen):
d = dialogue
if ((len(d['log']) % 2) != 0):
print('odd # of turns')
return None
d_pp = {}
d_pp['goal'] = d['goal']
usr_turns = []
sys_turns = []
for i in range(len(d['log'])):
if (len(d['log'][i]['text'].split()) > maxlen):
... |
def calc_uncertainty(path, uncert_dict):
uc_sents = []
with open(path, 'r', encoding='utf-8') as file:
for line in file:
ws = line.strip('\n').split()
ucs = [(uncert_dict[w] if (w in uncert_dict.keys()) else 1e-06) for w in ws]
uc_sent = np.mean(ucs)
uc_se... |
def get_reprs_at_word_tokens(model: AutoModelForCausalLM, tok: AutoTokenizer, context_templates: List[str], words: List[str], layer: int, module_template: str, subtoken: str, track: str='in') -> torch.Tensor:
idxs = get_words_idxs_in_templates(tok, context_templates, words, subtoken)
return get_reprs_at_idxs(mo... |
class MaxPooling1D(_Pooling1D):
_pooling1d_support
def __init__(self, pool_size=2, strides=None, padding='valid', **kwargs):
super(MaxPooling1D, self).__init__(pool_size, strides, padding, **kwargs)
def _pooling_function(self, inputs, pool_size, strides, padding, data_format):
output = K.poo... |
class WorkerError(object):
def __init__(self, error_code, base_message=None):
self._error_code = error_code
self._base_message = base_message
self._error_handlers = {'io.quay.builder.buildpackissue': {'message': 'Could not load build package', 'is_internal': True}, 'io.quay.builder.gitfailur... |
.parametrize('vulns', itertools.permutations([VulnerabilityResult(id='PYSEC-0', description='fake', fix_versions=[Version('1.1.0')], aliases={'CVE-XXXX-YYYYY'}), VulnerabilityResult(id='FAKE-1', description='fake', fix_versions=[Version('1.1.0')], aliases={'CVE-XXXX-YYYYY'}), VulnerabilityResult(id='CVE-XXXX-YYYYY', de... |
def initialize(ql: Qiling, context: UefiContext, gST: int):
ql.loader.gST = gST
gBS = (gST + EFI_SYSTEM_TABLE.sizeof())
gRT = (gBS + EFI_BOOT_SERVICES.sizeof())
gDS = (gRT + EFI_RUNTIME_SERVICES.sizeof())
cfg = (gDS + ds.EFI_DXE_SERVICES.sizeof())
ql.log.info(f'Global tables:')
ql.log.info(f... |
def compute_aggregate_values(value_list):
from scipy import stats
import numpy as np
value_list = sorted(value_list)
results = []
n = len(value_list)
assert (n > 0)
results.append(n)
avg = (sum(value_list) / n)
results.append(avg)
if (n > 1):
variance = online_variance(va... |
class FC5_RaidData(FC4_RaidData):
removedKeywords = FC4_RaidData.removedKeywords
removedAttrs = FC4_RaidData.removedAttrs
def __init__(self, *args, **kwargs):
FC4_RaidData.__init__(self, *args, **kwargs)
self.bytesPerInode = kwargs.get('bytesPerInode', 4096)
def _getArgsAsStr(self):
... |
_functional
def _delete_bn_from_functional(model: tf.keras.Model, bn_layers_to_remove: List[tf.keras.layers.BatchNormalization]) -> tf.keras.Model:
def wrapped_bn_layer_in_bns_to_remove(layer: tf.keras.layers.Layer) -> bool:
return (isinstance(layer, QcQuantizeWrapper) and (layer._layer_to_wrap in bn_layers... |
class SamplerReport():
def __init__(self) -> None:
self._chain_warnings: Dict[(int, List[SamplerWarning])] = {}
self._global_warnings: List[SamplerWarning] = []
self._n_tune = None
self._n_draws = None
self._t_sampling = None
def _warnings(self):
chains = sum(self... |
class ButterworthNotch(CtrlNode):
nodeName = 'ButterworthNotchFilter'
uiTemplate = [('low_wPass', 'spin', {'value': 1000.0, 'step': 1, 'dec': True, 'bounds': [0.0, None], 'suffix': 'Hz', 'siPrefix': True}), ('low_wStop', 'spin', {'value': 2000.0, 'step': 1, 'dec': True, 'bounds': [0.0, None], 'suffix': 'Hz', 's... |
def test_get_users_autocomplete(requests_mock):
requests_mock.get(f'{API_V1}/users/autocomplete', json=SAMPLE_DATA['get_users_autocomplete'], status_code=200)
response = get_users_autocomplete(q='niconoe')
first_result = response['results'][0]
assert (len(response['results']) == response['total_results'... |
_get_vector_length.register(Subtensor)
def _get_vector_length_Subtensor(op, var):
try:
indices = pytensor.tensor.subtensor.get_idx_list(var.owner.inputs, var.owner.op.idx_list)
start = (None if (indices[0].start is None) else get_underlying_scalar_constant_value(indices[0].start))
stop = (No... |
class Bsp(Layout):
defaults = [('border_focus', '#881111', 'Border colour(s) for the focused window.'), ('border_normal', '#220000', 'Border colour(s) for un-focused windows.'), ('border_width', 2, 'Border width.'), ('border_on_single', False, 'Draw border when there is only one window.'), ('margin_on_single', None... |
class saturation_nonlinearity(DescribingFunctionNonlinearity):
def __init__(self, ub=1, lb=None):
super(saturation_nonlinearity, self).__init__()
if (lb == None):
(lb, ub) = ((- abs(ub)), abs(ub))
if ((lb > 0) or (ub < 0) or ((lb + ub) != 0)):
warn('asymmetric saturat... |
class DCNv2Function(Function):
def __init__(self, stride, padding, dilation=1, deformable_groups=1):
super(DCNv2Function, self).__init__()
self.stride = stride
self.padding = padding
self.dilation = dilation
self.deformable_groups = deformable_groups
def forward(self, inp... |
def update_args(args):
import os
from fairseq.checkpoint_utils import load_checkpoint_to_cpu
bart_large_cnn_path = os.path.join(os.path.dirname(os.path.dirname(args.pretrained_doc_model_path)), 'bart.large.cnn/model.pt')
state = load_checkpoint_to_cpu(bart_large_cnn_path)
new_args = state['args']
... |
_model
def efficientnet_b1_pruned(pretrained=False, **kwargs):
kwargs['bn_eps'] = BN_EPS_TF_DEFAULT
kwargs['pad_type'] = 'same'
variant = 'efficientnet_b1_pruned'
model = _gen_efficientnet(variant, channel_multiplier=1.0, depth_multiplier=1.1, pruned=True, pretrained=pretrained, **kwargs)
return mod... |
class StringFormatterChecker():
chk: mypy.checker.TypeChecker
msg: MessageBuilder
exprchk: mypy.checkexpr.ExpressionChecker
def __init__(self, exprchk: mypy.checkexpr.ExpressionChecker, chk: mypy.checker.TypeChecker, msg: MessageBuilder) -> None:
self.chk = chk
self.exprchk = exprchk
... |
class StringSpec(Spec):
def __init__(self, name, length, default=None):
if (default is None):
default = (u' ' * length)
super(StringSpec, self).__init__(name, default)
self.len = length
def read(s, header, frame, data):
chunk = data[:s.len]
try:
as... |
def evaluate(org_seq_path: Path, dec_seq_path: Path, bitstream_path: Path, cuda: bool=False) -> Dict[(str, Any)]:
org_seq = RawVideoSequence.from_file(str(org_seq_path))
dec_seq = RawVideoSequence.new_like(org_seq, str(dec_seq_path))
max_val = ((2 ** org_seq.bitdepth) - 1)
num_frames = len(org_seq)
... |
def test_try_int_or_force_to_lower_case():
str1 = '17'
assert (cu.try_int_or_force_to_lower_case(str1) == 17)
str1 = 'ABC'
assert (cu.try_int_or_force_to_lower_case(str1) == 'abc')
str1 = 'X19'
assert (cu.try_int_or_force_to_lower_case(str1) == 'x19')
str1 = ''
assert (cu.try_int_or_forc... |
class BasicConv2d(nn.Module):
def __init__(self, in_planes, out_planes, kernel_size, stride, padding=0):
super(BasicConv2d, self).__init__()
self.conv = nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=padding, bias=False)
self.bn = nn.BatchNorm2d(out_planes, ... |
def test_readconfig():
bzapi = tests.mockbackend.make_bz(version='4.4.0', rhbz=True)
bzapi.url = 'example.com'
temp = tempfile.NamedTemporaryFile(mode='w')
def _check(user, password, api_key, cert):
assert (bzapi.user == user)
assert (bzapi.password == password)
assert (bzapi.api... |
def infer_shape(outs, inputs, input_shapes):
for (inp, inp_shp) in zip(inputs, input_shapes):
if ((inp_shp is not None) and (len(inp_shp) != inp.type.ndim)):
assert (len(inp_shp) == inp.type.ndim)
shape_feature = ShapeFeature()
shape_feature.on_attach(FunctionGraph([], []))
for (inp,... |
def mean_iou(results, gt_seg_maps, num_classes, ignore_index, nan_to_num=None, label_map=dict(), reduce_zero_label=False):
iou_result = eval_metrics(results=results, gt_seg_maps=gt_seg_maps, num_classes=num_classes, ignore_index=ignore_index, metrics=['mIoU'], nan_to_num=nan_to_num, label_map=label_map, reduce_zero... |
class Effect6793(BaseEffect):
type = 'passive'
def handler(fit, src, context, projectionRange, **kwargs):
fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Mining Foreman')), 'warfareBuff1Value', src.getModifiedItemAttr('shipBonusORECapital2'), skill='Capital Industrial Ships', **kwargs... |
def fornav(cols, rows, area_def, data_in, rows_per_scan=None, fill=None, out=None, weight_count=10000, weight_min=0.01, weight_distance_max=1.0, weight_delta_max=10.0, weight_sum_min=(- 1.0), maximum_weight_mode=False):
(data_in, convert_to_masked, fill) = _data_in_as_masked_arrays(data_in, fill)
if (out is not... |
def l1_ewta_loss_prob(prediction, target, k=6, eps=1e-06, mr=2.0):
num_mixtures = prediction.shape[1]
output_dim = target.shape[(- 1)]
target = target.unsqueeze(1).expand((- 1), num_mixtures, (- 1), (- 1))
xy_points = prediction.narrow((- 1), 0, output_dim)
probs = prediction.narrow((- 1), output_di... |
def build_optimizer_schedulers(config):
param_scheduler_config = copy.deepcopy(config.get('param_schedulers', {}))
for cfg in param_scheduler_config.values():
cfg['num_epochs'] = config['num_epochs']
param_schedulers = {param: build_param_scheduler(cfg) for (param, cfg) in param_scheduler_config.ite... |
def format(color, style=''):
_color = QColor()
if (type(color) is not str):
_color.setRgb(color[0], color[1], color[2])
else:
_color.setNamedColor(color)
_format = QTextCharFormat()
_format.setForeground(_color)
if ('bold' in style):
_format.setFontWeight(QFont.Weight.Bol... |
class Lookup():
def __init__(self, path: FastPath):
base = os.path.basename(path.root).lower()
base_is_egg = base.endswith('.egg')
self.infos = FreezableDefaultDict(list)
self.eggs = FreezableDefaultDict(list)
for child in path.children():
low = child.lower()
... |
def pblock_054(content):
stage_number = int(get1(content, b'04'))
cfs = sxml.Coefficients(cf_transfer_function_type=pcftype(get1(content, b'03')), input_units=sxml.Units(name=punit(get1(content, b'05'))), output_units=sxml.Units(name=punit(get1(content, b'06'))), numerator_list=list(map(pcfu, getn(content, b'08... |
class BSR(nn.Module):
def __init__(self, args, conv=common.default_conv):
super(BSR, self).__init__()
n_resblocks = args.n_resblocks
n_feats = args.n_feats
kernel_size = 3
self.scale_idx = 0
act = nn.ReLU(True)
self.DWT = common.DWT()
self.IWT = common... |
class TestBase(metaclass=ABCMeta):
encoder_type: TQubit = None
errors = ['x', 'z']
def get_logical_error_rate(self, readout_strings, correct_logical_value, logical_readout_type, err_prob=None):
total_count = 0
total_errors = 0
for (readout, count) in readout_strings.items():
... |
def _make_smarts(*center_smarts_list):
N = len(center_smarts_list)
if (N == 1):
return center_smarts_list[0]
if (N == 2):
(A, B) = center_smarts_list
A = A.replace(':2', ':1')
B = B.replace(':1', ':2')
return ((A + '.') + B)
if (N == 3):
(A, B, C) = center... |
def unescape_html(resp, show=False):
response = ''
if hasattr(resp, 'read'):
response = resp.read()
if hasattr(resp, 'content'):
response = resp.content
encoding = chardet.detect(response)['encoding']
if (not encoding):
encoding = 'utf-8'
if show:
logger.debug(f"d... |
.skipif((not PY_3_8_PLUS), reason='cached_property is 3.8+')
def test_slots_cached_properties_work_independently():
(slots=True)
class A():
x = attr.ib()
_property
def f_1(self):
return self.x
_property
def f_2(self):
return (self.x * 2)
obj = ... |
class PoolFormerFeatureExtractor(PoolFormerImageProcessor):
def __init__(self, *args, **kwargs) -> None:
warnings.warn('The class PoolFormerFeatureExtractor is deprecated and will be removed in version 5 of Transformers. Please use PoolFormerImageProcessor instead.', FutureWarning)
super().__init__(... |
class CalcRemoveProjectedFighterCommand(wx.Command):
def __init__(self, fitID, position):
wx.Command.__init__(self, True, 'Add Projected Fighter')
self.fitID = fitID
self.position = position
self.savedFighterInfo = None
def Do(self):
pyfalog.debug('Doing removal of projec... |
class BoundingBox(VersionBase):
def __init__(self, width, length, height, x_center, y_center, z_center):
self.boundingbox = Dimensions(width, length, height)
self.center = Center(x_center, y_center, z_center)
def __eq__(self, other):
if isinstance(other, BoundingBox):
if ((se... |
def test_guess_c_lexer():
code = '\n #include <stdio.h>\n #include <stdlib.h>\n\n int main(void);\n\n int main(void) {\n uint8_t x = 42;\n uint8_t y = x + 1;\n\n /* exit 1 for success! */\n return 1;\n }\n '
lexer = guess_lexer(code)
assert (lexer.__class__.__na... |
(u'user loads the data without providing a config file')
def step_impl_user_loads_no_config(context):
from datetime import datetime
from satpy import Scene, find_files_and_readers
os.chdir('/tmp/')
readers_files = find_files_and_readers(sensor='viirs', start_time=datetime(2015, 3, 11, 11, 20), end_time=... |
('/v1/find/repositories')
class ConductRepositorySearch(ApiResource):
_args()
_param('query', 'The search query.', type=str, default='')
_param('page', 'The page.', type=int, default=1)
_param('includeUsage', 'Whether to include usage metadata', type=truthy_bool, default=False)
('conductRepoSearch')... |
('pypyr.moduleloader.get_module')
(Step, 'invoke_step', side_effect=[None, ValueError('whoops')])
def test_while_error_kicks_loop(mock_invoke, mock_moduleloader):
step = Step({'name': 'step1', 'while': {'max': 3}})
context = get_test_context()
original_len = len(context)
with patch_logger('pypyr.dsl', l... |
def reserialize(file_):
with open(file_) as fp:
try:
data = json.load(fp)
except ValueError:
logging.error('Json syntax error in file {}'.format(file_))
raise
with open(file_, 'w') as fp:
json.dump(data, fp, **JSON_FORMAT_KWARGS)
fp.write('\n') |
def eval_IUEN(pred, label):
(lt1, pt1, cnt1) = eval_nested(pred['intersect'], label['intersect'])
(lt2, pt2, cnt2) = eval_nested(pred['except'], label['except'])
(lt3, pt3, cnt3) = eval_nested(pred['union'], label['union'])
label_total = ((lt1 + lt2) + lt3)
pred_total = ((pt1 + pt2) + pt3)
cnt =... |
class CmdForce(COMMAND_DEFAULT_CLASS):
key = 'force'
locks = 'cmd:perm(spawn) or perm(Builder)'
help_category = 'Building'
perm_used = 'edit'
def func(self):
if ((not self.lhs) or (not self.rhs)):
self.caller.msg('You must provide a target and a command string to execute.')
... |
class MCR(IntEnum):
HALT = (1 << 0)
SMPL_PT = (3 << 8)
CLR_RXF = (1 << 10)
CLR_TXF = (1 << 11)
DIS_RXF = (1 << 12)
DIS_TXF = (1 << 13)
MDIS = (1 << 14)
DOZE = (1 << 15)
PCSIS0 = (1 << 16)
PCSIS1 = (1 << 17)
PCSIS2 = (1 << 18)
PCSIS3 = (1 << 19)
PCSIS4 = (1 << 20)
... |
def validate_data(data_to_be_saved):
logging.debug('Validating an iCOM dataset.')
try:
delivery = pymedphys.Delivery.from_icom(data_to_be_saved)
logging.debug('iCOM dataset was found to be valid.')
except Exception as _:
logging.debug('Was not able to transform the iCOM dataset.')
... |
def is_same_graph(var1, var2, givens=None):
use_equal_computations = True
if (givens is None):
givens = {}
if (not isinstance(givens, dict)):
givens = dict(givens)
rval1 = is_same_graph_with_merge(var1=var1, var2=var2, givens=givens)
if givens:
ok = True
in_xs = []
... |
class DictWrapper(GetAttrData):
def __init__(self, dict):
self.__dict__['_data'] = dict
def __getitem__(self, key):
return self._data[key]
def __setitem__(self, key, value):
self._data[key] = value
def __delitem__(self, key):
del self._data[key]
def __setattr__(self, ... |
class ApplyXToLthQubit(UnaryIterationGate):
def __init__(self, selection_bitsize: int, target_bitsize: int, control_bitsize: int=1):
self._selection_bitsize = selection_bitsize
self._target_bitsize = target_bitsize
self._control_bitsize = control_bitsize
_property
def control_registe... |
class TestDemo(unittest.TestCase):
def setUp(self):
import tempfile
self.base_dir = tempfile.mkdtemp()
self.prev_dir = os.getcwd()
os.chdir(self.base_dir)
def tearDown(self):
os.chdir(self.prev_dir)
try:
import shutil
shutil.rmtree(self.bas... |
_REGISTRY.register()
def resnet101_ms_l1(pretrained=True, **kwargs):
from dassl.modeling.ops import MixStyle
model = ResNet(block=Bottleneck, layers=[3, 4, 23, 3], ms_class=MixStyle, ms_layers=['layer1'])
if pretrained:
init_pretrained_weights(model, model_urls['resnet101'])
return model |
def getArgFloat(name, args, min, max, main=True):
if main:
try:
arg = next(args)
except:
doError((name + ': no argument supplied'), True)
else:
arg = args
try:
val = float(arg)
except:
doError((name + ': non-numerical value given'), True)
... |
def postprocess_text(preds, references_s, metric_name):
preds = [pred.strip() for pred in preds]
references_s = [[reference.strip() for reference in references] for references in references_s]
if (metric_name in ['sacrebleu']):
ref_max_len = max([len(ref) for ref in references_s])
for ref in... |
def RenderGradientBar(windowColor, width, height, sFactor, eFactor, mFactor=None, fillRatio=2):
if ((sFactor == 0) and (eFactor == 0) and (mFactor is None)):
return DrawFilledBitmap(width, height, windowColor)
gStart = color.GetSuitable(windowColor, sFactor)
if mFactor:
gMid = color.GetSuita... |
def get_utilization(delta):
if (delta[(- 1)] == 0):
return {'user': 0, 'nice': 0, 'system': 0, 'idle': 0}
return {'user': (100.0 * (delta[0] / delta[(- 1)])), 'nice': (100.0 * (delta[1] / delta[(- 1)])), 'system': (100.0 * (delta[2] / delta[(- 1)])), 'idle': (100.0 * (delta[3] / delta[(- 1)]))} |
def demo_tracking_visualization(model_spec=ModelPreset.constant_acceleration_and_static_box_size_2d.value, num_steps: int=1000, num_objects: int=20):
gen = image_generator(num_steps=num_steps, num_objects=num_objects, max_omega=0.03, miss_prob=0.33, disappear_prob=0.0, det_err_sigma=3.33)
dt = (1 / 24)
trac... |
class TwoLayersModel(nn.Module):
def __init__(self, config):
super().__init__()
self.gpu = config.use_gpu
self.input_dim = config.input_dim
self.hidden1_dim = config.hidden1_dim
self.hidden2_dim = config.hidden2_dim
self.linear1 = nn.Linear(self.input_dim, self.hidden... |
class ExampleDataset(Dataset):
def __init__(self):
self.index = 0
self.eval_result = [0.1, 0.4, 0.3, 0.7, 0.2, 0.05, 0.4, 0.6]
def __getitem__(self, idx):
results = dict(imgs=torch.tensor([1]))
return results
def __len__(self):
return 1
_autospec
def evaluate(... |
def reduce_dict(input_dict, average=True):
world_size = get_world_size()
if (world_size < 2):
return input_dict
with torch.no_grad():
names = []
values = []
for k in sorted(input_dict.keys()):
names.append(k)
values.append(input_dict[k])
values... |
class DropBlock2d(nn.Module):
def __init__(self, drop_prob: float=0.1, block_size: int=7, gamma_scale: float=1.0, with_noise: bool=False, inplace: bool=False, batchwise: bool=False, fast: bool=True):
super(DropBlock2d, self).__init__()
self.drop_prob = drop_prob
self.gamma_scale = gamma_scal... |
class Migration(migrations.Migration):
dependencies = [('conferences', '0022_allow_multilanguage_keynote_info')]
operations = [migrations.RunPython(convert_data), migrations.AlterField(model_name='keynote', name='slug', field=i18n.fields.I18nCharField(max_length=200, unique=True, verbose_name='slug'))] |
.parametrize('comm_pairs, value', (([(b'*LANG SCPI', None), (b':READ?', b'9.900000E+37\n')], 9.9e+37), ([(b'*LANG SCPI', None), (b':READ?', b'9.900000E+37\n')], 9.9e+37)))
def test_resistance_getter(comm_pairs, value):
with expected_protocol(KeithleyDMM6500, comm_pairs) as inst:
assert (inst.resistance == v... |
def test_inputs():
assert (distance('10').value() == 10.0)
assert (distance(10).value() == 10.0)
assert (distance(10.0).value() == 10.0)
assert (distance(10.0, None).value() == 10.0)
assert (distance('1/2').value() == 0.5)
assert (distance('1 1/2').value() == 1.5)
assert (distance('11/2').va... |
(scope='module')
def purerpc_codegen_greeter_port(greeter_pb2, greeter_grpc):
class Servicer(greeter_grpc.GreeterServicer):
async def SayHello(self, message):
return greeter_pb2.HelloReply(message=('Hello, ' + message.name))
async def SayHelloGoodbye(self, message):
(yield gr... |
class SyncMaster(object):
def __init__(self, master_callback):
self._master_callback = master_callback
self._queue = queue.Queue()
self._registry = collections.OrderedDict()
self._activated = False
def __getstate__(self):
return {'master_callback': self._master_callback}
... |
class Effect1182(BaseEffect):
type = 'passive'
def handler(fit, ship, context, projectionRange, **kwargs):
fit.modules.filteredItemBoost((lambda mod: (mod.item.group.name == 'Remote Capacitor Transmitter')), 'maxRange', ship.getModifiedItemAttr('shipBonusAC'), skill='Amarr Cruiser', **kwargs) |
class TestPostInfraction(unittest.IsolatedAsyncioTestCase):
def setUp(self):
self.bot = MockBot()
self.member = MockMember(id=1234)
self.user = MockUser(id=1234)
self.ctx = MockContext(bot=self.bot, author=self.member)
async def test_normal_post_infraction(self):
now = da... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.