code stringlengths 281 23.7M |
|---|
class BottleneckTransform(nn.Sequential):
def __init__(self, width_in: int, width_out: int, stride: int, norm_layer: Callable[(..., nn.Module)], activation_layer: Callable[(..., nn.Module)], group_width: int, bottleneck_multiplier: float, se_ratio: Optional[float]) -> None:
layers: OrderedDict[(str, nn.Modu... |
class Adam(torch.optim.Optimizer):
def __init__(self, params, lr=0.001, betas=(0.9, 0.999), eps=1e-08, weight_decay=0, amsgrad=False):
defaults = dict(lr=lr, betas=betas, eps=eps, weight_decay=weight_decay, amsgrad=amsgrad)
super(Adam, self).__init__(params, defaults)
def supports_memory_efficie... |
def build_lr_scheduler(epoch, warmup_epoch, optimizer, n_iter_per_epoch):
num_steps = int((epoch * n_iter_per_epoch))
warmup_steps = int((warmup_epoch * n_iter_per_epoch))
scheduler = CosineLRScheduler(optimizer, t_initial=num_steps, t_mul=1.0, lr_min=0, warmup_lr_init=0, warmup_t=warmup_steps, cycle_limit=... |
class traindataset(data.Dataset):
def __init__(self, root, transform=None, train=True, args=None):
self.root_dir = root
self.transform = transform
self.name = []
self.train = train
self.multitask = args.multitask
self.multiaug = args.multiaug
self.synthesis = ... |
def deprecation_warning(func_name, additional_info=None):
logger = logging.getLogger(__name__)
logger.debug('func_name: %s, additional_info: %s', func_name, additional_info)
msg = '{} has been deprecated and will be removed from a future update.'.format(func_name)
if (additional_info is not None):
... |
class TestEnumTypes():
def test_enum_class(self):
for invalid_name in ('a', '_A', '0'):
try:
EnumList(invalid_name)
except AttributeError:
pass
else:
raise Exception('EnumList with invalid name should fail.')
try... |
class NitrobitNet(BaseAccount):
__name__ = 'NitrobitNet'
__type__ = 'account'
__version__ = '0.01'
__status__ = 'testing'
__description__ = 'Nitrobit.net account plugin'
__license__ = 'GPLv3'
__authors__ = [('GammaC0de', 'nitzo2001[AT]yahoo[DOT]com')]
def grab_info(self, user, password, ... |
class F33_TimesourceData(BaseData):
removedKeywords = BaseData.removedKeywords
removedAttrs = BaseData.removedAttrs
def __init__(self, *args, **kwargs):
BaseData.__init__(self, *args, **kwargs)
self.ntp_server = kwargs.get('ntp_server', '')
self.ntp_pool = kwargs.get('ntp_pool', '')
... |
def test_slots_super_property_get():
(slots=True)
class A():
x = attr.ib()
def f(self):
return self.x
(slots=True)
class B(A):
def f(self):
return (super().f ** 2)
(slots=True)
class C(A):
def f(self):
return (super(C, self).f *... |
class TestImplicitNamespacePackage():
(autouse=True, scope='class')
def built(self, builder):
builder('py3implicitnamespace')
def test_sibling_import_from_namespace(self, parse):
example_file = parse('_build/html/autoapi/namespace/example/index.html')
assert example_file.find(id='nam... |
.parametrize('function_', FUNCTIONS_WITH_RANGE)
def test_given_function_is_set_then_range_available(resetted_dmm6500, function_):
resetted_dmm6500.mode = function_
assert (len(resetted_dmm6500.check_errors()) == 0)
new = (function_ + ' range')
new = new.replace(' ', '_')
range_ = getattr(resetted_dm... |
class RPMSpecLexer(RegexLexer):
name = 'RPMSpec'
aliases = ['spec']
filenames = ['*.spec']
mimetypes = ['text/x-rpm-spec']
url = '
version_added = '1.6'
_directives = '(?:package|prep|build|install|clean|check|pre[a-z]*|post[a-z]*|trigger[a-z]*|files)'
tokens = {'root': [('#.*$', Comment... |
def train_meta(train_loader, validation_loader, model, vnet, optimizer_a, optimizer_c, epoch):
batch_time = AverageMeter()
losses = AverageMeter()
meta_losses = AverageMeter()
top1 = AverageMeter()
meta_top1 = AverageMeter()
model.train()
iter_validation_loader = iter(validation_loader)
... |
_on_py2
def test_invalid_self():
class NotPybindDerived(object):
pass
class BrokenTF1(m.TestFactory1):
def __init__(self, bad):
if (bad == 1):
a = m.TestFactory2(tag.pointer, 1)
m.TestFactory1.__init__(a, tag.pointer)
elif (bad == 2):
... |
class WeightedCrossEntropyLoss(nn.Module):
def __init__(self, thresholds, weight=None, LAMBDA=None):
super().__init__()
self._weight = weight
self._lambda = LAMBDA
self._thresholds = thresholds
def forward(self, inputs, targets):
inputs = inputs.permute((0, 2, 1, 3, 4))
... |
class ASPP(nn.Module):
def __init__(self, in_channels, atrous_rates, out_channels=256):
super(ASPP, self).__init__()
modules = []
modules.append(nn.Sequential(nn.Conv2d(in_channels, out_channels, 1, bias=False), nn.BatchNorm2d(out_channels), nn.ReLU()))
rates = tuple(atrous_rates)
... |
def get_score(a, b, c, target_len, bitext_score1, bitext_score2=None, lm_score=None, lenpen=None, src_len=None, tgt_len=None, bitext1_backwards=False, bitext2_backwards=False, normalize=False):
if bitext1_backwards:
bitext1_norm = src_len
else:
bitext1_norm = tgt_len
if (bitext_score2 is not... |
class Widar_LSTM(nn.Module):
def __init__(self, num_classes):
super(Widar_LSTM, self).__init__()
self.lstm = nn.LSTM(400, 64, num_layers=1)
self.fc = nn.Linear(64, num_classes)
def forward(self, x):
x = x.view((- 1), 22, 400)
x = x.permute(1, 0, 2)
(_, (ht, ct)) =... |
class ipf_filter_t(ctypes.Structure):
_fields_ = (('cookie', POINTER64), ('name', POINTER64), ('ipf_input', POINTER64), ('ipf_output', POINTER64), ('ipf_detach', POINTER64))
def __init__(self, ql, base):
self.ql = ql
self.base = base
def updateToMem(self):
self.ql.mem.write(self.base... |
class HTTPBearerAuth(AuthBase):
def __init__(self, password: str) -> None:
self.password = password
def __eq__(self, other: object) -> bool:
return (self.password == getattr(other, 'password', None))
def __ne__(self, other: object) -> bool:
return (not (self == other))
def __call... |
def formatValue(v):
if (isinstance(v, string_types) or isinstance(v, numbers.Number)):
return 'uniform {}'.format(v)
elif (isinstance(v, list) or isinstance(v, tuple)):
return 'uniform ({} {} {})'.format(v[0], v[1], v[2])
else:
raise Exception('Error: vector input {} is not string or... |
def to_configs(username='', password='', cookies='', quality='', output='', language=''):
configs = load_configs()
fname = '.udemy-dl.conf'
fmode = 'w'
if configs:
cfu = configs.get('username')
cfp = configs.get('password')
cfc = configs.get('cookies')
cfq = configs.get('... |
def test_mu0_against_analytics():
theta = ((rng.random() * np.pi) / 2)
phi = (rng.random() * np.pi)
s = np.sin(theta)
c = np.cos(theta)
mu = 0
cycles = 1
expt = kpz.KPZExperiment(cycles, mu, _TRIALS, theta, phi)
res = expt.run_experiment_amplitudes(_SAMPLER)
d_kur = res.jackknife_kur... |
('pytube.cli.YouTube')
('pytube.cli._ffmpeg_downloader')
def test_ffmpeg_process_audio_fallback_none_should_exit(_ffmpeg_downloader, youtube):
target = '/target'
streams = MagicMock()
youtube.streams = streams
stream = MagicMock()
streams.filter.return_value.order_by.return_value.last.side_effect = ... |
class MyStandardItem(QStandardItem):
def __lt__(self, other):
if (self.data(Qt.ItemDataRole.UserRole) and other.data(Qt.ItemDataRole.UserRole)):
return (self.data(Qt.ItemDataRole.UserRole) < other.data(Qt.ItemDataRole.UserRole))
else:
return (self.text() < other.text()) |
class DataSetFamily(with_metaclass(DataSetFamilyMeta)):
_abstract = True
domain = GENERIC
slice_ndim = 2
_SliceType = DataSetFamilySlice
__call__
class extra_dims(object):
__isabstractmethod__ = True
def __get__(self, instance, owner):
return []
def _canonical_key... |
class ResolvePeer():
async def resolve_peer(self: 'pyrogram.Client', peer_id: Union[(int, str)]) -> Union[(raw.base.InputPeer, raw.base.InputUser, raw.base.InputChannel)]:
if (not self.is_connected):
raise ConnectionError('Client has not been started yet')
try:
return (await ... |
class BertEmbeddingPatternHandler():
def __init__(self):
patterns = [(['ResourceGather', 'Identity', 'branch', 'AddV2', 'AddV2', 'LayerNorm', 'Identity'], 4), (['ResourceGather', 'Identity', 'Tile', 'AddV2', 'AddV2', 'LayerNorm', 'Identity'], 4), (['ResourceGather', 'Identity', 'branch', 'Shape', 'StridedSl... |
class Gradient_Difference_Loss(nn.Module):
def __init__(self, alpha=1, chans=3, cuda=True):
super(Gradient_Difference_Loss, self).__init__()
self.alpha = alpha
self.chans = chans
Tensor = (torch.cuda.FloatTensor if cuda else torch.FloatTensor)
SobelX = [[1, 2, 1], [0, 0, 0], ... |
def usercache(initial_func=None, *, timeout=86400):
def inner(method):
(method)
def wrapped(self, *args, **kwargs):
return cached_context(prefix=('usercache_' + method.__name__), vary_on_user=True, timeout=timeout)((lambda user=None: method(self, *args, **kwargs)))(user=self)
ret... |
def read_customer_review_data(filename):
with open(filename) as f:
content = f.readlines()
content = [x.strip() for x in content]
list_of_meta_dict = []
for line in content:
if (('[t]' not in line) and ('[p]' not in line) and ('[cs]' not in line) and ('[cc]' not in line) and ('[s]' not i... |
class XPBDIntegrator():
def __init__(self, iterations, relaxation):
self.iterations = iterations
self.relaxation = relaxation
def simulate(self, model, state_in, state_out, dt):
with wp.ScopedTimer('simulate', False):
q_pred = wp.zeros_like(state_in.particle_q)
qd... |
def play(_request: WSGIRequest) -> None:
player.play()
try:
current_song = models.CurrentSong.objects.get()
now = timezone.now()
pause_duration = (now - current_song.last_paused).total_seconds()
current_song.created += datetime.timedelta(seconds=pause_duration)
current_so... |
class TwoWayBBlock(nn.Module):
def __init__(self):
super(TwoWayBBlock, self).__init__()
in_channels = 1152
self.branches = Concurrent()
self.branches.add_module('branch1', ConvSeqBranch(in_channels=in_channels, out_channels_list=(128, 160, 192), kernel_size_list=(1, (1, 7), (7, 1)), ... |
class ResponsePlotTestCase(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tempdir)
def fpath(self, fn):
return os.path.join(self.tempdir, fn)
def fpath_ref(self, fn):
try:
return common.test_data_f... |
_metaclass(AtspiMeta)
class AtspiWrapper(BaseWrapper):
_control_types = []
def __new__(cls, element_info):
return super(AtspiWrapper, cls)._create_wrapper(cls, element_info, AtspiWrapper)
def __init__(self, element_info):
BaseWrapper.__init__(self, element_info, backend.registry.backends['at... |
def test_sizes():
prev_area = 128
for (size, area, _) in SIZES:
assert ((size % 8) == 0)
assert ((size * size) == area)
assert ((1.7 * prev_area) <= area < (2.3 * prev_area))
prev_area = area
for i in range(1, len(SIZES)):
size1 = SIZES[(i - 1)][0]
size2 = SIZ... |
class daputouch(scan):
def __init__(self, job, timeout=60):
scan.__init__(self, job)
if (len(job) > 1):
self.port = job[0].split('|')[1]
self.scan_type = _whats_your_name()
self.timeout = timeout
def execute_scan(self, verbose):
redir_cmd = scan.gettunnel(self... |
def handle_dag_request(**kwargs) -> Any:
headers = kwargs['headers']
data = kwargs['data']
if ('data' in str(data)):
resp = json.dumps(1)
elif ('delete' in str(data)):
resp = json.dumps(1)
elif ('switch' in str(data)):
resp = '1'
else:
resp = [{'data_access_group_... |
def get_host_latency(host_url):
try:
return 0.25
global _latencies
if (host_url in _latencies):
return _latencies[host_url]
u = Url(host_url)
if u.host:
host = u.host
else:
host = 'localhost'
if u.port:
port = u.... |
def poly_learning_rate(optimizer, base_lr, curr_iter, max_iter, power=0.9, index_split=(- 1), scale_lr=10.0, warmup=False, warmup_step=500):
if (warmup and (curr_iter < warmup_step)):
lr = (base_lr * (0.1 + (0.9 * (curr_iter / warmup_step))))
else:
lr = (base_lr * ((1 - (float(curr_iter) / max_i... |
class BatchNorm2dReimpl(nn.Module):
def __init__(self, num_features, eps=1e-05, momentum=0.1):
super().__init__()
self.num_features = num_features
self.eps = eps
self.momentum = momentum
self.weight = nn.Parameter(torch.empty(num_features))
self.bias = nn.Parameter(to... |
class Notifiers():
def __init__(self, timer: Timer):
self.all_event_notifier = AllEventNotifier()
self.empty_queue_event_notifier = EmptyQueueEventNotifier(self.all_event_notifier)
self.end_trading_event_notifier = EndTradingEventNotifier(self.all_event_notifier)
self.scheduler = Sch... |
def without_uncommon_nodes(networks, eligible=None):
def items_outside(G, nbunch):
if (eligible is None):
return [n for n in G.nodes() if (n not in nbunch)]
return [n for n in G.nodes() if (G.nodes[n][eligible] and (n not in nbunch))]
common = set.intersection(*[set(G) for G in netwo... |
class Segment(object):
def __init__(self, uttid, spkr, stime, etime, text):
self.uttid = uttid
self.spkr = spkr
self.stime = round(stime, 2)
self.etime = round(etime, 2)
self.text = text
def change_stime(self, time):
self.stime = time
def change_etime(self, ti... |
_task('wsc')
class WSCTask(FairseqTask):
def add_args(parser):
parser.add_argument('data', metavar='DIR', help='path to data directory; we load <split>.jsonl')
parser.add_argument('--init-token', type=int, default=None, help='add token at the beginning of each batch item')
def __init__(self, arg... |
class DetectMethodCalls(DetectVarNames):
def enter(self, node, methods):
self.methods = methods
self.visit(node)
def visit_Call(self, node):
obj_name = self.get_full_name(node.func)
if (not obj_name):
return
pair = (obj_name, node)
self.methods.append(... |
class _PointnetSAModuleBase(nn.Module):
def __init__(self):
super().__init__()
self.npoint = None
self.groupers = None
self.mlps = None
def forward(self, xyz: torch.Tensor, features: torch.Tensor=None) -> (torch.Tensor, torch.Tensor):
new_features_list = []
xyz_fl... |
class TestViiL1bNCFileHandler(unittest.TestCase):
def setUp(self):
self.test_file_name = ((TEST_FILE + str(uuid.uuid1())) + '.nc')
with Dataset(self.test_file_name, 'w') as nc:
g1 = nc.createGroup('data')
g1.createDimension('num_chan_solar', 11)
g1.createDimension... |
def vgg_block(num_convs, in_channels, num_channels):
layers = []
for i in range(num_convs):
layers += [nn.Conv2d(in_channels=in_channels, out_channels=num_channels, kernel_size=3, padding=1)]
in_channels = num_channels
layers += [nn.ReLU()]
layers += [nn.MaxPool2d(kernel_size=2, stride=2... |
def check_pending_patches():
issue_name = 'pending-patches'
db = get_db()
problem_hosts = set()
for patch in db.patches.find({'pending_hosts': {'$not': {'$size': 0}}}):
for hostname in patch['pending_hosts']:
if (not client_exists(hostname)):
db.patches.update({'_id':... |
('make-struct-type', [values.W_Symbol, values.W_Object, values.W_Fixnum, values.W_Fixnum, default(values.W_Object, values.w_false), default(values.W_Object, values.w_null), default(values.W_Object, None), default(values.W_Object, values.w_false), default(values.W_List, values.w_null), default(values.W_Object, values.w_... |
.patch(PATCH_METHOD)
.patch('pynamodb.connection.base.uuid')
def test_signal_exception_pre_signal(mock_uuid, mock_req):
post_recorded = []
UUID = '123-abc'
def record_pre_dynamodb_send(sender, operation_name, table_name, req_uuid):
raise ValueError()
def record_post_dynamodb_send(sender, operati... |
def test_tensordictsequential_trace_consistency():
class Net(nn.Module):
def __init__(self, input_size=100, hidden_size=50, output_size=10):
super().__init__()
self.fc1 = nn.Linear(input_size, hidden_size)
self.fc2 = nn.Linear(hidden_size, output_size)
def forward... |
def test_can_edit_schedule(user, graphql_client):
graphql_client.force_login(user)
resp = graphql_client.query('\n {\n me {\n canEditSchedule\n }\n }\n ')
assert ('errors' not in resp)
assert (resp['data']['me']['canEditSchedule'] is False) |
def test_error_checking():
with pytest.raises(ValueError):
pressure('A2995')
with pytest.raises(UnitsError):
pressure('1000', 'bars')
with pytest.raises(UnitsError):
pressure(pressure('30.00').value, 'psi')
with pytest.raises(UnitsError):
pressure(pressure('32.00').string... |
def _add_field_to_dataset(category: str, key: str, vcfzarr_key: str, variable_name: str, dims: List[str], field_def: Dict[(str, Any)], vcfzarr: zarr.Array, ds: xr.Dataset) -> None:
if ('ID' not in vcfzarr[vcfzarr_key].attrs):
return
vcf_number = field_def.get('Number', vcfzarr[vcfzarr_key].attrs['Number... |
class Effect6874(BaseEffect):
type = 'passive'
def handler(fit, src, context, projectionRange, **kwargs):
fit.modules.filteredChargeBoost((lambda mod: mod.charge.requiresSkill('Heavy Missiles')), 'explosionDelay', src.getModifiedItemAttr('shipBonusCC2'), skill='Caldari Cruiser', **kwargs)
fit.mo... |
class Effect5482(BaseEffect):
runTime = 'early'
type = 'passive'
def handler(fit, implant, context, projectionRange, **kwargs):
fit.appliedImplants.filteredItemMultiply((lambda mod: (mod.item.group.name == 'Special Edition Implant')), 'agilityBonus', implant.getModifiedItemAttr('implantSetChristmas'... |
def parse_text_complete(code):
if ('\n' in code):
try:
return (compile_command(code, '<input>', 'exec') is not None)
except Exception:
return True
elif (len(code.strip()) == 0):
return True
elif ((code[0] == '?') or (code[(- 1)] == '?')):
return True
... |
.parametrize('detector_bandwidth', [3, 20, 200])
def test_detector_bandwidth(resetted_hp34401a, detector_bandwidth):
resetted_hp34401a.function_ = 'FREQ'
resetted_hp34401a.detector_bandwidth = detector_bandwidth
assert (len(resetted_hp34401a.check_errors()) == 0)
assert (resetted_hp34401a.detector_bandw... |
def main(results_root, min_segment_dur_ini, csv_filename):
results_root = Path(results_root)
indiv_roots = sorted([subdir for subdir in results_root.iterdir() if subdir.is_dir()])
config = configparser.ConfigParser()
config.read(Path(min_segment_dur_ini).expanduser().resolve())
min_segment_durs = {k... |
def _get_info_for_reused_node(traced_model: torch.fx.GraphModule, node: torch.fx.Node, node_name_to_scope: Dict[(str, Tuple[(str, type)])]) -> Tuple[(torch.fx.GraphModule, str, str)]:
parent_module = traced_model
new_module_name = ('module_' + node.name)
new_module_qualified_name = new_module_name
if no... |
def test_rpcs_calculate_transform_pass_kwargs_to_transformer(caplog):
with rasterio.open('tests/data/RGB.byte.rpc.vrt') as src:
caplog.set_level(logging.DEBUG)
(_, width, height) = calculate_default_transform('EPSG:4326', 'EPSG:32610', width=7449, height=11522, rpcs=src.rpcs, RPC_HEIGHT=1000)
... |
class Dataset():
def __init__(self, args):
print('loading data')
random.seed(args.seed)
self.batch_size = args.batch_size
self.data_dir = args.data_dir
self.topic = (args.task == 'topic')
self.formality = (args.task == 'formality')
self.iambic = (args.task == ... |
class SolutionData():
def __init__(self, unscaled, scaled, n_nodes: list[(int, ...)]):
self.unscaled = unscaled
self.scaled = scaled
self.n_phases = len(self.unscaled)
self.n_nodes = n_nodes
def from_unscaled(ocp, unscaled: list, variable_type: str):
n_nodes = [nlp.n_stat... |
class TestAssertIsNot(TestCase):
def test_you(self):
self.assertIsNot(abc, 'xxx')
def test_me(self):
self.assertIsNot(123, (xxx + y))
self.assertIsNot(456, (aaa and bbb))
self.assertIsNot(789, (ccc or ddd))
self.assertIsNot(123, (True if You else False))
def test_ever... |
def calculate_MinDCF(scores, labels, p_target=0.01, c_miss=10, c_false_alarm=1):
if (len(scores) != len(labels)):
raise Exception('length between scores and labels is different')
elif (len(scores) == 0):
raise Exception("There's no elements in scores")
(fpr, tpr, _) = metrics.roc_curve(label... |
def delete_job(joblst, kubecli: KrknKubernetes):
for jobname in joblst:
try:
api_response = kubecli.get_job_status(jobname, namespace='default')
if (api_response.status.failed is not None):
pod_name = get_job_pods(api_response, kubecli)
pod_stat = kube... |
('/PenguinDome/v1/server_pipe/client/open', methods=('POST',))
('/penguindome/v1/server_pipe/client/open', methods=('POST',))
_signature
def pipe_open():
data = json.loads(request.form['data'])
uuid = data['pipe_id']
with pipes_lock:
if (uuid not in pipes):
log.error('Attempt to open non... |
class RelicSetSkillModel(Struct):
RelicSet: Dict[(str, Dict[(str, Union[(RelicSetStatusAdd, None)])])]
def from_json(cls, data: Dict):
return cls(RelicSet={str(k): {str(k2): (RelicSetStatusAdd(Property=v2['Property'], Value=v2['Value']) if v2 else None) for (k2, v2) in v.items()} for (k, v) in data.item... |
def __get_filter(image, d_0: int=80, high: float=1.5, low: float=0.25, c: int=1):
(h, w) = image.shape
(u, v) = np.meshgrid(np.arange(w), np.arange(h))
(median_u, median_v) = (np.floor((w / 2)), np.floor((h / 2)))
u = (u - median_u)
v = (v - median_v)
dist_matrix = ((u ** 2) + (v ** 2))
tmp ... |
def jordan_wigner_dual_basis_jellium(grid: Grid, spinless: bool=False, include_constant: bool=False) -> QubitOperator:
n_orbitals = grid.num_points
volume = grid.volume_scale()
if spinless:
n_qubits = n_orbitals
else:
n_qubits = (2 * n_orbitals)
hamiltonian = QubitOperator()
mome... |
def main():
parser = HfArgumentParser((ModelArguments, DataTrainingArguments, Seq2SeqTrainingArguments))
if ((len(sys.argv) == 2) and sys.argv[1].endswith('.json')):
(model_args, data_args, training_args) = parser.parse_json_file(json_file=os.path.abspath(sys.argv[1]))
else:
(model_args, dat... |
def _run_for_camera(camera, near, far, check_halfway):
pos_ndc1 = la.vec_transform((0, 0, (- near)), camera.projection_matrix)
pos_ndc2 = la.vec_transform((0, 0, ((- 0.5) * (near + far))), camera.projection_matrix)
pos_ndc3 = la.vec_transform((0, 0, (- far)), camera.projection_matrix)
print('------', ca... |
class AttnSum(nn.Module):
def __init__(self, x_size, y_size, identity=False):
super(AttnSum, self).__init__()
if (not identity):
self.linear = nn.Linear(y_size, x_size)
else:
self.linear = None
def forward(self, x, y, x_mask, candidate_aggre):
x_ans_mask =... |
def test_wheel_src_module(copy_sample):
td = copy_sample('module3')
make_wheel_in((td / 'pyproject.toml'), td)
whl_file = (td / 'module3-0.1-py2.py3-none-any.whl')
assert_isfile(whl_file)
with unpack(whl_file) as unpacked:
assert_isfile(Path(unpacked, 'module3.py'))
assert_isdir(Path... |
class GlShader():
def __init__(self, shader_type, source):
self.code_ = source
self.shader_type_ = shader_type
self.id_ = gl.glCreateShader(self.shader_type_)
gl.glShaderSource(self.id_, source)
gl.glCompileShader(self.id_)
success = gl.glGetShaderiv(self.id_, gl.GL_C... |
def validate_generation_args(args):
assert (args.unkpen == 0), "PyTorch Translate does not use fairseq's --unkpen flag. Use --unk-reward instead, and check the flag description regarding sign polarity meaning."
assert (args.lenpen == 1), 'Argument --lenpen is IGNORED by pytorch_translate. Use --length-penalty i... |
def extract_cnn_feature_classification(model, inputs, modules=None):
model.eval()
inputs = to_torch(inputs)
inputs = Variable(inputs).cuda()
with torch.no_grad():
if (modules is None):
outputs = model.extract_feat(inputs)
outputs = outputs.data.cpu()
return ou... |
class QuantizationSimModel():
def __init__(self, model: ModelProto, dummy_input: Dict[(str, np.ndarray)]=None, quant_scheme: QuantScheme=QuantScheme.post_training_tf_enhanced, rounding_mode: str='nearest', default_param_bw: int=8, default_activation_bw: int=8, use_symmetric_encodings: bool=False, use_cuda: bool=Tru... |
class Test_get_image_size(unittest.TestCase):
data = [{'path': 'lookmanodeps.png', 'width': 251, 'height': 208, 'file_size': 22228, 'type': 'PNG'}]
def setUp(self):
pass
def test_get_image_size_from_bytesio(self):
img = self.data[0]
p = img['path']
with io.open(p, 'rb') as fp... |
def master2model(model_params, master_params, flat_master: bool=False) -> None:
if flat_master:
for (model_group, master_group) in zip(model_params, master_params):
if (len(model_group) != 0):
for (model, master) in zip(model_group, _unflatten_dense_tensors(master_group[0].data, ... |
def test(test_loader, model, criterion, it, logger, writer):
model.eval()
losses = AverageMeter()
top1 = AverageMeter()
all_pred = []
time1 = time.time()
with torch.no_grad():
for (idx, (images, labels)) in enumerate(test_loader):
images = images.float().cuda()
la... |
class BottleNeck(nn.Module):
expansion = 4
def __init__(self, in_channel, channel, stride=1, downsample=None):
super().__init__()
self.conv1 = nn.Conv2d(in_channel, channel, kernel_size=1, stride=stride, bias=False)
self.bn1 = nn.BatchNorm2d(channel)
self.conv2 = nn.Conv2d(channe... |
def _distance_from_center_forward(var: tuple, center: tuple, p: Proj):
if (center is None):
center = (0, 0)
center_as_angle = p(*center, inverse=True, errcheck=True)
pole = 90
if (abs((abs(center_as_angle[1]) - pole)) < 0.001):
direction_of_poles = _sign(center_as_angle[1])
var =... |
class State(BaseState):
hints = [STATE_HINT_LVL1, STATE_HINT_LVL2, STATE_HINT_LVL3]
def character_enters(self, char):
self.cinematic(GREETING.format(name=char.key))
def init(self):
self.room.db.desc = ROOM_DESC
door = self.create_object(Door, key='door to the cabin', aliases=['door']... |
class History(QObject):
changed = pyqtSignal()
def __init__(self, *, history=None, parent=None):
super().__init__(parent)
self._tmphist = None
if (history is None):
self.history: MutableSequence[str] = []
else:
self.history = history
def __getitem__(se... |
def detect_clearsky_threshold_data():
data_file = (DATA_DIR / 'detect_clearsky_threshold_data.csv')
expected = pd.read_csv(data_file, index_col=0, parse_dates=True, comment='#')
expected = expected.tz_localize('UTC').tz_convert('Etc/GMT+7')
metadata = {}
with data_file.open() as f:
for line ... |
class _ChangeMoveOccurrencesHandle():
def __init__(self, new_name):
self.new_name = new_name
self.occurred = False
def occurred_inside_skip(self, change_collector, occurrence):
pass
def occurred_outside_skip(self, change_collector, occurrence):
(start, end) = occurrence.get_p... |
def test_voring_closed_and_user_is_not_authenticated(graphql_client, submission_factory, user):
submission = _submission(submission_factory, user, conference__active_voting=False)
data = _query(graphql_client, submission)
assert (data['submission']['title'] == submission.title.localize('en'))
assert (da... |
def schedule_hostgroup_host_downtime(hostgroup_name, start_time, end_time, fixed, trigger_id, duration, author, comment, command_file=None, timestamp=0):
return send_command('SCHEDULE_HOSTGROUP_HOST_DOWNTIME', command_file, timestamp, hostgroup_name, start_time, end_time, fixed, trigger_id, duration, author, commen... |
class TestWaiting():
()
def instr(self):
class Faked(Instrument):
def wait_for(self, query_delay=0):
self.waited = query_delay
return Faked(ProtocolAdapter(), name='faked')
def test_waiting(self):
instr = Instrument(ProtocolAdapter(), 'faked')
stop... |
class SongListPaned(RVPaned):
def __init__(self, song_scroller, qexpander):
super().__init__()
self.pack1(song_scroller, resize=True, shrink=False)
self.pack2(qexpander, resize=True, shrink=False)
self.set_relative(config.getfloat('memory', 'queue_position', 0.75))
self.conne... |
def process_url(url):
valid_moves = re.compile('(ATK|DEF|HUG)')
moves = []
for part in url.upper().split('/'):
if (('VIEORD' in part) or ('VIXORD' in part)):
moves = valid_moves.findall(part)[::(- 1)]
url = url.lower()
seed = 0
seed_str = url.split('/')[(- 1)]
if (('vieor... |
def build(preprocessor_step_config):
step_type = preprocessor_step_config.WhichOneof('preprocessing_step')
if (step_type in PREPROCESSING_FUNCTION_MAP):
preprocessing_function = PREPROCESSING_FUNCTION_MAP[step_type]
step_config = _get_step_config_from_proto(preprocessor_step_config, step_type)
... |
def region_or_label_to_mask(segmentation: np.ndarray, region_or_label: Union[(int, Tuple[(int, ...)])]) -> np.ndarray:
if np.isscalar(region_or_label):
return (segmentation == region_or_label)
else:
mask = np.zeros_like(segmentation, dtype=bool)
for r in region_or_label:
mask... |
class M2M100Tokenizer(PreTrainedTokenizer):
vocab_files_names = VOCAB_FILES_NAMES
max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES
pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP
model_input_names = ['input_ids', 'attention_mask']
prefix_tokens: List[int] = []
suffix_tokens... |
def test_skip_from_fixture(pytester: Pytester) -> None:
pytester.makepyfile(**{'tests/test_1.py': '\n import pytest\n def test_pass(arg):\n pass\n \n def arg():\n condition = True\n if condition:\n pytest.skip("Fixture conditional skip")\n ... |
class TestReportsMethodKwargRenames(MethodRenamedBase):
api_class = apis.Reports
.parametrize('old, new', [('marketplaceids', 'marketplace_ids')])
def test_request_report_kwargs_renamed(self, api_instance, old, new):
required = ['report_type']
method = api_instance.request_report
sel... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.