code stringlengths 281 23.7M |
|---|
class TestLogitNormal(BaseTestDistributionRandom):
def logit_normal_rng_fn(self, rng, size, loc, scale):
return sp.expit(st.norm.rvs(loc=loc, scale=scale, size=size, random_state=rng))
pymc_dist = pm.LogitNormal
pymc_dist_params = {'mu': 5.0, 'sigma': 10.0}
expected_rv_op_params = {'mu': 5.0, 's... |
def load_checkpoint(model, checkpoint_path, strict=True):
state_dict = load_state_dict(checkpoint_path)
if (('positional_embedding' in state_dict) and (not hasattr(model, 'positional_embedding'))):
state_dict = convert_to_custom_text_state_dict(state_dict)
resize_pos_embed(state_dict, model)
inc... |
def test_install_suffix(pipx_temp_env, capsys):
name = 'pbr'
suffix = '_a'
assert (not run_pipx_cli(['install', PKG[name]['spec'], f'--suffix={suffix}']))
captured = capsys.readouterr()
name_a = app_name(f'{name}{suffix}')
assert (f'- {name_a}' in captured.out)
suffix = '_b'
assert (not ... |
def add_start_docstrings_to_callable(*docstr):
def docstring_decorator(fn):
class_name = ':class:`~transformers.{}`'.format(fn.__qualname__.split('.')[0])
intro = ' The {} forward method, overrides the :func:`__call__` special method.'.format(class_name)
note = '\n .. note::\n Al... |
_sentencepiece
_tokenizers
_pandas
class LayoutXLMTokenizationTest(TokenizerTesterMixin, unittest.TestCase):
tokenizer_class = LayoutXLMTokenizer
rust_tokenizer_class = LayoutXLMTokenizerFast
test_rust_tokenizer = True
from_pretrained_filter = filter_non_english
test_seq2seq = False
test_sentenc... |
def system_bench(func, dims):
from qutip.random_objects import rand_ket
ratio = 0
ratio_old = 0
nnz_old = 0
for N in dims:
L = func(N).data
vec = rand_ket(L.shape[0], 0.25).full().ravel()
nnz = L.nnz
out = np.zeros_like(vec)
ser = _min_timer(_spmvpy, L.data, L... |
class MetaDataGenerator(object):
def __init__(self, num_samples_per_class):
self.num_samples_per_class = num_samples_per_class
self.num_unlabeled_samples = FLAGS.nb_ul_samples
self.num_classes = FLAGS.way_num
metatrain_labeled_folder = (((FLAGS.data_path + '/data/') + FLAGS.dataset) ... |
def main(args):
if (len(args) != 2):
sys.stderr.write('Usage: example.py <aggressiveness> <path to wav file>\n')
sys.exit(1)
(audio, sample_rate) = read_wave(args[1])
vad = webrtcvad.Vad(int(args[0]))
frames = frame_generator(30, audio, sample_rate)
frames = list(frames)
segments... |
_REGISTRY.register()
def build_res2net_bifpn_backbone(cfg, input_shape: ShapeSpec):
bottom_up = build_res2net_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
backbone = BiFPN(cfg=cfg, bottom_up=bottom_up, in_features=in_features, out_channels=cfg.MODEL.BIFPN.OUT_CHANNELS, norm=cfg.MODEL.B... |
def get_version():
version = '0.0.0'
pkg_info = ((CURRENT_DIR / 'pylegu.egg-info') / 'PKG-INFO')
git_dir = (CURRENT_DIR / '.git')
if git_dir.is_dir():
is_tagged = False
try:
is_tagged = check_if_tagged()
except Exception:
is_tagged = False
try:
... |
class Effect6702(BaseEffect):
type = 'passive'
def handler(fit, src, context, projectionRange, **kwargs):
lvl = src.level
fit.modules.filteredItemBoost((lambda mod: (mod.item.group.name == 'Rig Energy Weapon')), 'drawback', (src.getModifiedItemAttr('rigDrawbackBonus') * lvl), **kwargs) |
class IDWriteFactory4(IDWriteFactory3, com.pIUnknown):
_methods_ = [('TranslateColorGlyphRun4', com.STDMETHOD(D2D_POINT_2F, POINTER(DWRITE_GLYPH_RUN), c_void_p, DWRITE_GLYPH_IMAGE_FORMATS, DWRITE_MEASURING_MODE, c_void_p, UINT32, POINTER(IDWriteColorGlyphRunEnumerator1))), ('ComputeGlyphOrigins_', com.STDMETHOD()),... |
class LeNet(PruningModule):
def __init__(self, mask=False):
super(LeNet, self).__init__()
linear = (MaskedLinear if mask else nn.Linear)
self.fc1 = linear(784, 300)
self.fc2 = linear(300, 100)
self.fc3 = linear(100, 10)
def forward(self, x):
x = x.view((- 1), 784)... |
def conv2d_args_preprocessor(args, kwargs):
converted = []
if (len(args) > 4):
raise TypeError('Layer can receive at most 3 positional arguments.')
if (len(args) == 4):
if (isinstance(args[2], int) and isinstance(args[3], int)):
new_keywords = ['padding', 'strides', 'data_format'... |
def _cost_on_direction(cost_map, bbox, inter_region, inter_edge, mask, edgeness, corner_heatmap, direction, density_img, intra_map):
assert (0 <= direction < np.pi)
if (direction < (np.pi / 2)):
diag_end_1 = (0, bbox[3])
diag_end_2 = (bbox[2], 0)
diag_type = 'main'
else:
diag... |
class BoxListTest(tf.test.TestCase):
def test_num_boxes(self):
data = tf.constant([[0, 0, 1, 1], [1, 1, 2, 3], [3, 4, 5, 5]], tf.float32)
expected_num_boxes = 3
boxes = box_list.BoxList(data)
with self.test_session() as sess:
num_boxes_output = sess.run(boxes.num_boxes())... |
def create_s3_file_system(s3_client_kwargs: dict) -> s3fs.S3FileSystem:
if (not s3_client_kwargs):
return s3fs.S3FileSystem(anon=True)
config_kwargs = {}
if (s3_client_kwargs.get('config') is not None):
boto_config = s3_client_kwargs.pop('config')
for (key, val) in boto_config.__dict... |
class ResizeKeepRatio():
def __init__(self, size, longest=0.0, interpolation='bilinear', fill=0):
if isinstance(size, (list, tuple)):
self.size = tuple(size)
else:
self.size = (size, size)
self.interpolation = str_to_interp_mode(interpolation)
self.longest = f... |
def convert_date_to_fronting(thought):
if ('Today is 04/19/1969.' in thought):
return '04/19/1969 is the date today. 24 hours later, or one day after, would be the date 04/20/1969.'
if ('One day after 06/01/1943 is 06/02/1943,' in thought):
return "06/02/1943, that's the date one day after 06/01... |
def test_get_example_spectral_response():
sr = spectrum.get_example_spectral_response()
assert_equal(len(sr), 185)
assert_equal(np.sum(sr.index), 136900)
assert_approx_equal(np.sum(sr), 107.6116)
wavelength = [270, 850, 950, 1200, 4001]
expected = [0.0, 0.92778, 1.0, 0.0, 0.0]
sr = spectrum.... |
def test_gradient_cumulative_optimizer_hook():
class ToyModel(nn.Module):
def __init__(self, with_norm=False):
super().__init__()
self.fp16_enabled = False
self.fc = nn.Linear(3, 2)
nn.init.constant_(self.fc.weight, 1.0)
nn.init.constant_(self.fc.b... |
def train_one_epoch(model, optimizer, train_loader, model_func, lr_scheduler, accumulated_iter, optim_cfg, rank, tbar, total_it_each_epoch, dataloader_iter, tb_log=None, leave_pbar=False, dist_train=False, logger=None):
if (total_it_each_epoch == len(train_loader)):
dataloader_iter = iter(train_loader)
... |
def train_multi(args):
init_logger()
nb_gpu = args.world_size
mp = torch.multiprocessing.get_context('spawn')
error_queue = mp.SimpleQueue()
error_handler = ErrorHandler(error_queue)
procs = []
for i in range(nb_gpu):
device_id = i
procs.append(mp.Process(target=run, args=(ar... |
def load_packets():
packet_path = os.path.join('pymine', 'net', 'packets')
packet_dot_path = packet_path.replace('\\', '/').replace('/', '.')
packet_map = {}
packet_map_clientbound = {}
for state_name in os.listdir(packet_path):
state = STATES.encode(state_name)
packet_map[state] = {... |
class GitLabBuildTrigger(BuildTriggerHandler):
def service_name(cls):
return 'gitlab'
def _get_authorized_client(self):
auth_token = (self.auth_token or 'invalid')
api_version = self.config.get('API_VERSION', '4')
client = gitlab.Gitlab(gitlab_trigger.api_endpoint(), oauth_token=... |
def repartition(annotated_delta: DeltaAnnotated, destination_partition: Partition, repartition_type: RepartitionType, repartition_args: dict, max_records_per_output_file: int, enable_profiler: bool, metrics_config: Optional[MetricsConfig], read_kwargs_provider: Optional[ReadKwargsProvider], s3_table_writer_kwargs: Opti... |
def eval_soft_contacts(particle_x: wp.array(dtype=wp.vec3), particle_v: wp.array(dtype=wp.vec3), body_q: wp.array(dtype=wp.transform), body_qd: wp.array(dtype=wp.spatial_vector), body_com: wp.array(dtype=wp.vec3), ke: float, kd: float, kf: float, ka: float, mu: float, contact_count: wp.array(dtype=int), contact_particl... |
def render_to(template):
def decorator(func):
(func)
def wrapper(request, *args, **kwargs):
out = (func(request, *args, **kwargs) or {})
if isinstance(out, dict):
out = render(request, template, common_context(settings.AUTHENTICATION_BACKENDS, load_strategy(),... |
def ql_syscall_readlink(ql: Qiling, pathname: int, buf: int, bufsize: int):
vpath = ql.os.utils.read_cstring(pathname)
absvpath = ql.os.path.virtual_abspath(vpath)
regreturn = __do_readlink(ql, absvpath, buf)
ql.log.debug(f'readlink("{vpath}", {buf:#x}, {bufsize:#x}) = {regreturn}')
return regreturn |
class Solution():
def wordPattern(self, pattern: str, str: str) -> bool:
if ((not pattern) or (not str)):
return False
str_list = str.split(' ')
if (len(pattern) != len(str_list)):
return False
if (len(set(pattern)) != len(set(str_list))):
return F... |
class MessageHandler(BaseHandler[(Update, CCT)]):
__slots__ = ('filters',)
def __init__(self, filters: Optional[filters_module.BaseFilter], callback: HandlerCallback[(Update, CCT, RT)], block: DVType[bool]=DEFAULT_TRUE):
super().__init__(callback, block=block)
self.filters: filters_module.BaseFi... |
class StatusBar(QWidget):
resized = pyqtSignal('QRect')
moved = pyqtSignal('QPoint')
STYLESHEET = _generate_stylesheet()
def __init__(self, *, win_id, private, parent=None):
super().__init__(parent)
self.setObjectName(self.__class__.__name__)
self.setAttribute(Qt.WidgetAttribute.... |
def xautolock_status(user, display):
procs = (p for p in process_dict_iter(('username', 'environ', 'exe', 'cmdline')) if (p['username'] == user))
procs = (p for p in procs if (p['environ'].get('DISPLAY', None) == display))
procs = (p for p in procs if p['exe'].endswith('/xautolock'))
for proc in procs:
... |
def smart_lower(value):
url_nc = re.compile(f'({RE_WEBURL_NC})')
if url_nc.search(value):
substrings = url_nc.split(value)
for (idx, substr) in enumerate(substrings):
if (not url_nc.match(substr)):
substrings[idx] = i18n_lower(substr)
return ''.join(substrings... |
def _rotate_basis(term, transformation_matrix):
n = transformation_matrix.shape[0]
rotated_op = MajoranaOperator()
for tup in itertools.product(range(n), repeat=len(term)):
coeff = 1.0
for (i, j) in zip(term, tup):
coeff *= transformation_matrix[(j, i)]
rotated_op += Majo... |
def test_keys_of_mixed_types() -> None:
OBJ = {0: {'0': 'foo', 1: 'bar'}, '1': 'baz'}
EXPECTED_MANIFEST = {'': DictEntry(keys=[0, '1']), '/0': DictEntry(keys=['0', 1])}
EXPECTED_FLATTENED = {'/0/0': 'foo', '/0/1': 'bar', '/1': 'baz'}
(manifest, flattened) = flatten(obj=OBJ, prefix='')
assert (manife... |
class TestNeuralNetwork(QiskitMachineLearningTestCase):
def _get_batch_size(input_data):
batch_size = 1
if (isinstance(input_data, list) and isinstance(input_data[0], list)):
batch_size = len(input_data)
return batch_size
(((0, 0, True, 1), None), ((0, 1, True, 1), None), ((0... |
def get_set():
checked_list = check_match()
match_list = pd.read_csv('match.csv')
match_id_list = match_list['id'].values
match_name_list = match_list['video'].values
success_count = 0
for (id, name) in zip(match_id_list, match_name_list):
if (id in checked_list):
success_cou... |
def test_pix_cen():
mc_hdu = moment_cube()
sc = SpectralCube.read(mc_hdu)
(s, y, x) = sc._pix_cen()
bytes_per_pix = 8
assert (find_base_nbytes(s) == (sc.shape[0] * bytes_per_pix))
assert (find_base_nbytes(y) == ((sc.shape[1] * sc.shape[2]) * bytes_per_pix))
assert (find_base_nbytes(x) == ((s... |
def upgrade(op, tables, tester):
op.create_table('userorganizationquota', sa.Column('id', sa.Integer, nullable=False), sa.Column('namespace_id', sa.Integer, nullable=False), sa.Column('limit_bytes', sa.BigInteger, nullable=False), sa.PrimaryKeyConstraint('id', name=op.f('pk_userorganizationquota')), sa.ForeignKeyCo... |
def _get_nonbonded_force(system: openmm.System, topology: Topology) -> openmm.NonbondedForce:
existing = [system.getForce(i) for i in range(system.getNumForces())]
existing = [f for f in existing if (type(f) == openmm.NonbondedForce)]
if (len(existing) == 0):
force = openmm.NonbondedForce()
... |
class Abstract3DUNet(nn.Module):
def __init__(self, in_channels, out_channels, final_sigmoid, basic_module, f_maps=64, layer_order='gcr', num_groups=8, num_levels=4, is_segmentation=False, testing=False, **kwargs):
super(Abstract3DUNet, self).__init__()
self.testing = testing
if isinstance(f... |
def save_seq_info_data(seq):
seq_name = basename(seq)
ss = seq_name.split('_')
obj_name = seq_name.split('_')[2]
(date, subj) = (ss[0], ss[1])
assert (obj_name in OBJ_NAMES), f'invalid object name {obj_name} found!'
config = f'../../calibs/{date}/config'
intrinsic = f'../../calibs/intrinsics... |
class Event(GetAttrData):
def __init__(self, binarydata=None, display=None, **keys):
if binarydata:
self._binary = binarydata
(self._data, data) = self._fields.parse_binary(binarydata, display, rawdict=True)
self._data['send_event'] = (not (not (self._data['type'] & 128))... |
def smoothed_softmax_cross_entropy_with_logits(**kwargs):
logits = kwargs.get('logits')
labels = kwargs.get('labels')
smoothing = (kwargs.get('smoothing') or 0.0)
normalize = kwargs.get('normalize')
scope = kwargs.get('scope')
if ((logits is None) or (labels is None)):
raise ValueError('... |
class SpatialNorm(nn.Module):
def __init__(self, divergence='kl'):
if (divergence == 'kl'):
self.criterion = nn.KLDivLoss()
else:
self.criterion = nn.MSELoss()
self.norm = nn.Softmax(dim=(- 1))
def forward(self, pred_S, pred_T):
norm_S = self.norm(pred_S)
... |
def test_assert_raises_on_assertthis_not_equals_floats():
context = Context({'assert': {'this': 123.45, 'equals': 5.432}})
with pytest.raises(AssertionError) as err_info:
assert_step.run_step(context)
assert (str(err_info.value) == "assert assert['this'] is of type float and does not equal assert['e... |
def write_game_description(game: GameDescription) -> dict:
return {'schema_version': game_migration.CURRENT_VERSION, 'game': game.game.value, 'resource_database': write_resource_database(game.resource_database), 'layers': frozen_lib.unwrap(game.layers), 'starting_location': game.starting_location.as_json, 'initial_... |
class TableCreator():
def __init__(self, cols: Sequence[Column], *, tab_width: int=4) -> None:
if (tab_width < 1):
raise ValueError('Tab width cannot be less than 1')
self.cols = copy.copy(cols)
self.tab_width = tab_width
for col in self.cols:
col.header = col... |
def test_alias_create_with_macro_name(base_app):
macro = 'my_macro'
run_cmd(base_app, 'macro create {} help'.format(macro))
(out, err) = run_cmd(base_app, 'alias create {} help'.format(macro))
assert ('Alias cannot have the same name as a macro' in err[0])
assert (base_app.last_result is False) |
class SuperNetwork(nn.Module):
def __init__(self, shadow_bn, layers=12, classes=10):
super(SuperNetwork, self).__init__()
self.layers = layers
self.stem = nn.Sequential(nn.Conv2d(3, channel[0], kernel_size=3, stride=1, padding=1, bias=False), nn.BatchNorm2d(channel[0]), nn.ReLU6(inplace=True... |
_HEADS_REGISTRY.register()
class VLPLMROIHeads(StandardROIHeads):
def _init_box_head(self, cfg, input_shape):
ret = super()._init_box_head(cfg, input_shape)
del ret['box_predictor']
ret['box_predictor'] = VLPLMFastRCNNOutputLayers(cfg, ret['box_head'].output_shape)
return ret
_gr... |
class ModbusSimulatorContext():
start_time = int(datetime.now().timestamp())
def __init__(self, config: dict[(str, Any)], custom_actions: dict[(str, Callable)]) -> None:
self.registers: list[int] = []
self.fc_offset: dict[(int, int)] = {}
self.register_count = 0
self.type_excepti... |
class CelebADataset(dataset_mixin.DatasetMixin):
def __init__(self, resize=128):
self.resize = resize
self.image_files = glob('/home/yasin/sharedLocal/data/celeba/img_align_celeba/*.jpg')
print(len(self.image_files))
def __len__(self):
return len(self.image_files)
def get_exa... |
class Discriminator128(chainer.Chain):
def __init__(self, ch=512, wscale=0.02):
super(Discriminator128, self).__init__()
w = chainer.initializers.Normal(wscale)
with self.init_scope():
self.in_ = SNConvolution2D(3, (ch // 8), 1, 1, 0, initialW=w)
self.b4 = Discriminat... |
_factory
def Rename(**translations):
fields = None
translations = {v: k for (k, v) in translations.items()}
_context
_raw_input
def _Rename(context, bag):
nonlocal fields, translations
if (not fields):
fields = tuple((translations.get(field, field) for field in context.ge... |
class DummyEncoder(Encoder):
def trainable(self) -> bool:
return False
def embedding_size(self) -> int:
return 3
def forward(self, batch):
return batch
def save(self, output_path: str):
pass
def load(cls, input_path: str) -> Encoder:
pass
def get_collate_f... |
class GuiMergeLocalDroneStacksCommand(wx.Command):
def __init__(self, fitID, srcPosition, dstPosition):
wx.Command.__init__(self, True, 'Merge Local Drone Stacks')
self.internalHistory = InternalCommandHistory()
self.fitID = fitID
self.srcPosition = srcPosition
self.dstPositi... |
class TestBaseLithiumIonModel(TestCase):
def test_incompatible_options(self):
with self.assertRaisesRegex(pybamm.OptionError, 'convection not implemented'):
pybamm.lithium_ion.BaseModel({'convection': 'uniform transverse'})
def test_default_parameters(self):
model = pybamm.lithium_io... |
def test_version(monkeypatch, capsys):
mock_exit = mock.Mock(side_effect=ValueError('raised in test to exit early'))
with mock.patch.object(sys, 'exit', mock_exit), pytest.raises(ValueError, match='raised in test to exit early'):
assert (not run_pipx_cli(['--version']))
captured = capsys.readouterr(... |
def test_positional_only():
def f(__x, _f__x):
pass
class Y():
def f(self, __x):
pass
class X():
def f(self, __x, _Y__x):
pass
asc = Checker().arg_spec_cache
assert (asc.get_argspec(f) == Signature.make([SigParameter('__x', ParameterKind.PO... |
()
def validatetag(context):
result = context.run("git describe --exact-match --tags $(git log -n1 --pretty='%h')")
git_tag = result.stdout.rstrip()
ver_regex = re.compile('(\\d+)\\.(\\d+)\\.(\\d+)')
match = ver_regex.fullmatch(git_tag)
if (match is None):
print('Tag {!r} does not appear to ... |
class LogFormatterForFiles(logging.Formatter):
def formatTime(self, record, datefmt=None):
date = datetime.datetime.fromtimestamp(record.created).astimezone(datetime.timezone.utc)
if (not datefmt):
datefmt = '%Y%m%dT%H%M%S.%fZ'
return date.strftime(datefmt)
def format(self, r... |
.skipif((not tcp_libs_available), reason='TCP communication packages not installed')
def test_zmq_topic_filtering_works(caplog):
class ThreeEmitsProcedure(Procedure):
def execute(self):
self.emit('results', 'Data 1')
self.emit('progress', 33)
self.emit('results', 'Data 2'... |
def generate():
global MESSAGES
keys = sorted(MESSAGES.keys())
offsets = []
ids = strs = b''
for id in keys:
offsets.append((len(ids), len(id), len(strs), len(MESSAGES[id])))
ids += (id + b'\x00')
strs += (MESSAGES[id] + b'\x00')
output = ''
keystart = ((7 * 4) + (16 ... |
class MobilenetV3Encoder(Encoder):
def __init__(self, embedding_size: int):
super().__init__()
self.encoder = torchvision.models.mobilenet_v3_small(pretrained=True)
self.encoder.classifier = nn.Sequential(nn.Linear(576, embedding_size))
self._embedding_size = embedding_size
def t... |
class DCUN_TFC_FiLM(DenseCUNet_FiLM):
def __init__(self, n_fft, input_channels, internal_channels, n_blocks, n_internal_layers, first_conv_activation, last_activation, t_down_layers, f_down_layers, kernel_size_t, kernel_size_f, tfc_activation, control_vector_type, control_input_dim, embedding_dim, control_type, con... |
class LineSegment(Geometry):
def __init__(self, start_pt, end_pt):
warnings.warn(dep_msg, FutureWarning, stacklevel=2)
self._p1 = start_pt
self._p2 = end_pt
self._reset_props()
def __str__(self):
return (((('LineSegment(' + str(self._p1)) + ', ') + str(self._p2)) + ')')
... |
class Effect6858(BaseEffect):
type = 'passive'
def handler(fit, src, context, projectionRange, **kwargs):
fit.modules.filteredItemBoost((lambda mod: (mod.item.group.name == 'Energy Nosferatu')), 'powerTransferAmount', src.getModifiedItemAttr('shipBonusForceAuxiliaryA1'), skill='Amarr Carrier', **kwargs) |
def test_threadpolltext_update_interval_none(minimal_conf_noscreen, manager_nospawn):
config = minimal_conf_noscreen
tpoll = PollingWidget('Not polled', update_interval=None)
config.screens = [libqtile.config.Screen(top=libqtile.bar.Bar([tpoll], 10))]
manager_nospawn.start(config)
widget = manager_n... |
def to_ising(quad_prog: QuadraticProgram) -> Tuple[(SparsePauliOp, float)]:
if (quad_prog.get_num_vars() > quad_prog.get_num_binary_vars()):
raise QiskitOptimizationError('The type of all variables must be binary. You can use `QuadraticProgramToQubo` converter to convert integer variables to binary variable... |
def preprocess(args):
train_dir = os.path.join(args.output, 'train')
test_dir = os.path.join(args.output, 'test')
os.makedirs(args.output, exist_ok=True)
os.makedirs(train_dir, exist_ok=True)
os.makedirs(test_dir, exist_ok=True)
os.makedirs(os.path.join(train_dir, 'audio'), exist_ok=True)
os... |
_fast
def test_multi_destroyers_through_views():
(x, y, z) = inputs()
e = dot(add(transpose_view(z), y), add(z, x))
g = create_fgraph([x, y, z], [e])
assert g.consistent()
fail = FailureWatch()
TopoSubstitutionNodeRewriter(add, add_in_place, fail).rewrite(g)
assert g.consistent()
assert ... |
class LineCountReporter(AbstractReporter):
def __init__(self, reports: Reports, output_dir: str) -> None:
super().__init__(reports, output_dir)
self.counts: dict[(str, tuple[(int, int, int, int)])] = {}
def on_file(self, tree: MypyFile, modules: dict[(str, MypyFile)], type_map: dict[(Expression,... |
class TestInlineQueryResultAudioWithoutRequest(TestInlineQueryResultAudioBase):
def test_slot_behaviour(self, inline_query_result_audio):
inst = inline_query_result_audio
for attr in inst.__slots__:
assert (getattr(inst, attr, 'err') != 'err'), f"got extra slot '{attr}'"
assert (... |
class MultiTensorApply(object):
available = False
warned = False
def __init__(self, chunk_size):
try:
import fused_optim
MultiTensorApply.available = True
self.chunk_size = chunk_size
except ImportError as err:
MultiTensorApply.available = Fals... |
def logreg(hdf5, batch_size):
n = caffe.NetSpec()
(n.data, n.label) = L.HDF5Data(batch_size=batch_size, source=hdf5, ntop=2)
n.ip1 = L.InnerProduct(n.data, num_output=2, weight_filler=dict(type='xavier'))
n.accuracy = L.Accuracy(n.ip1, n.label)
n.loss = L.SoftmaxWithLoss(n.ip1, n.label)
return n... |
def run(config: Config, nursery: Nursery) -> None:
for (i, qty_of_rooms) in enumerate(batch_size(config.target_qty_of_chat_rooms, config.qty_of_new_rooms_per_iteration)):
log_file = os.path.join(config.logdir, str(i))
script_args: List[str] = [GENERATE_MESSAGES_SCRIPT, '--concurrent-messages', str(c... |
_model
def vip_s14(pretrained=False, **kwargs):
layers = [4, 3, 8, 3]
transitions = [False, False, False, False]
segment_dim = [16, 16, 16, 16]
mlp_ratios = [3, 3, 3, 3]
embed_dims = [384, 384, 384, 384]
model = VisionPermutator(layers, embed_dims=embed_dims, patch_size=14, transitions=transitio... |
def test_perform_indexing_api_request_failure_state(initialized_db, set_secscan_config):
secscan = V4SecurityScanner(application, instance_keys, storage)
secscan._secscan_api = mock.Mock()
secscan._secscan_api.state.side_effect = APIRequestFailure()
secscan._secscan_api.vulnerability_report.return_value... |
def parse_keybinding(obj):
assert isinstance(obj, (tuple, int, str))
if isinstance(obj, tuple):
for char in obj:
(yield char)
elif isinstance(obj, int):
(yield obj)
elif isinstance(obj, str):
in_brackets = False
bracket_content = []
for char in obj:
... |
def all_in(loss_vector):
(mean, var) = tf.nn.moments(loss_vector, axes=0, keep_dims=False)
return tf.logical_and(tf.not_equal(tf.shape(tf.reshape(tf.gather(params=loss_vector, indices=tf.where(tf.greater(loss_vector, ((mean + (3.0 * tf.sqrt(var))) * tf.ones(tf.shape(loss_vector), dtype=tf.float32))))), [(- 1)])... |
class TrainRunner(InferenceRunner):
def __init__(self, train_cfg, inference_cfg, base_cfg=None):
super().__init__(inference_cfg, base_cfg)
self.train_dataloader = self._build_dataloader(train_cfg['data']['train'])
if ('val' in train_cfg['data']):
self.val_dataloader = self._build... |
def _draw_chains(up_qubits: List[cirq.GridQubit], down_qubits: List[cirq.GridQubit], interactions: List[Tuple[(cirq.GridQubit, cirq.GridQubit)]], draw_grid_coords: bool) -> str:
def qubit_coords(qubit: cirq.GridQubit) -> Tuple[(int, int)]:
return ((qubit.col - min_col), (qubit.row - min_row))
diagram = ... |
def reestimate_bn_stats(model: tf.keras.Model, bn_re_estimation_dataset: tf.data.Dataset, bn_num_batches: int=100) -> Handle:
bn_layers = _get_bn_submodules(model)
bn_mean_ori = {layer.name: layer.moving_mean.numpy() for layer in bn_layers}
bn_var_ori = {layer.name: layer.moving_variance.numpy() for layer i... |
def check_transform(transform, domain, constructor=pt.scalar, test=0, rv_var=None):
x = constructor('x')
x.tag.test_value = test
if (rv_var is None):
rv_var = x
rv_inputs = (rv_var.owner.inputs if rv_var.owner else [])
forward_f = pytensor.function([x], transform.forward(x, *rv_inputs))
... |
class PandasModelBase(QtCore.QAbstractTableModel):
float_digits = 6
concat_axis = 0
def __init__(self, column_index=None, results_list=[], parent=None):
super().__init__(parent)
self.column_index = column_index
self._init_data(results_list)
def _init_data(self, results_list=None)... |
class PauliOp(PrimitiveOp):
def __init__(self, primitive: Union[Pauli], coeff: Union[(int, float, complex, ParameterExpression)]=1.0) -> None:
if (not isinstance(primitive, Pauli)):
raise TypeError('PauliOp can only be instantiated with Paulis, not {}'.format(type(primitive)))
super().__... |
def _get_image_or_guide(self: loss.Loss, attr: str, comparison_only: bool=False) -> torch.Tensor:
images_or_guides: List[torch.Tensor] = []
for op in self._losses():
if (comparison_only and (not isinstance(op, loss.ComparisonLoss))):
continue
try:
image_or_guide = getattr... |
class AutoScalingGroup():
def __init__(self, session):
self._session = session
self._asg = session.client('autoscaling')
self._ec2 = session.client('ec2')
def get_user_data(self, user_data_template, **kwargs):
if os.path.isabs(user_data_template):
user_data_path = use... |
class FaceswapControl():
def __init__(self, wrapper):
logger.debug('Initializing %s', self.__class__.__name__)
self.wrapper = wrapper
self.config = get_config()
self.statusbar = self.config.statusbar
self.command = None
self.args = None
self.process = None
... |
def calculate_pool_results(layout_configuration: BaseConfiguration, game: GameDescription) -> PoolResults:
base_results = PoolResults([], {}, [])
base_results.extend_with(add_standard_pickups(game.resource_database, layout_configuration.standard_pickup_configuration, layout_configuration.ammo_pickup_configurati... |
_datapipe('set_length')
class LengthSetterIterDataPipe(IterDataPipe[T_co]):
def __init__(self, source_datapipe: IterDataPipe[T_co], length: int) -> None:
self.source_datapipe: IterDataPipe[T_co] = source_datapipe
assert (length >= 0)
self.length: int = length
def __iter__(self) -> Iterat... |
class TimeFixedGFormula():
def __init__(self, df, exposure, outcome, exposure_type='binary', outcome_type='binary', standardize='population', weights=None):
self.exposure = exposure
self.outcome = outcome
self._missing_indicator = '__missing_indicator__'
(self.gf, self._miss_flag, se... |
(autouse=True)
def _push_custom_request_context(request):
app = request.getfixturevalue('app')
options = request.node.get_closest_marker('request_context')
if (options is None):
return
ctx = app.test_request_context(*options.args, **options.kwargs)
ctx.push()
def teardown():
ctx.... |
class HandlerMask_TestCase(ParserTest):
def runTest(self):
for cmd in self.handler.commands:
self.assertIsNotNone(self.handler.commands[cmd])
lst = ['rootpw', 'user', 'group']
self.handler.maskAllExcept(lst)
for cmd in self.handler.commands:
if (cmd in lst):
... |
def mutate_spec(old_spec, mutation_rate=1.0):
while True:
new_matrix = copy.deepcopy(old_spec.original_matrix)
new_ops = copy.deepcopy(old_spec.original_ops)
edge_mutation_prob = (mutation_rate / NUM_VERTICES)
for src in range(0, (NUM_VERTICES - 1)):
for dst in range((src... |
def mutation(observed_archs, observed_errors, n_best=10, n_mutate=None, pool_size=250, allow_isomorphism=False, patience=50, benchmark='nasbench101', observed_archs_unpruned=None):
if (n_mutate is None):
n_mutate = int((0.5 * pool_size))
assert (pool_size >= n_mutate), ' pool_size must be larger or equa... |
class XLMRobertaBuilder(object):
def __init__(self, version, config, choice=None):
self.config = config
self.choice = {'embedding': ({'embedding', 'quantize'} & set(choice)), 'attention': ({'attention', 'linear', 'quantize'} & set(choice)), 'addNorm_sy': ({'addNorm', 'addNorm_sy', 'linear', 'quantiz... |
class TestParsing():
def testEmptyParse(self):
assert (list(parse_requirements('')) == [])
def testYielding(self):
for (inp, out) in [([], []), ('x', ['x']), ([[]], []), (' x\n y', ['x', 'y']), (['x\n\n', 'y'], ['x', 'y'])]:
assert (list(pkg_resources.yield_lines(inp)) == out)
de... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.