code stringlengths 281 23.7M |
|---|
class WindowCalendar(object):
def __init__(self, data_path=None, parent=None, date=None):
warnings.warn('Deprecated WindowCalendar class called', DeprecationWarning, stacklevel=2)
self.parent = parent
self.date = date
def run(self):
date = calendar_dialog(date=self.date)
... |
def save_class_stats(out_dir, sample_class_stats):
with open(osp.join(out_dir, 'sample_class_stats.json'), 'w') as of:
json.dump(sample_class_stats, of, indent=2)
sample_class_stats_dict = {}
for stats in sample_class_stats:
f = stats.pop('file')
sample_class_stats_dict[f] = stats
... |
def test_step_match(sentence, expected_step, expected_arguments, steps):
sys.stdout.write('{0} STEP "{1}" SHOULD MATCH {2} '.format(colorful.yellow('>>'), colorful.cyan(sentence), colorful.cyan(expected_step)))
result = match_step(sentence, steps)
if (not result):
output_failure(None, ["Expected ... |
class VideoRecord(object):
def __init__(self, video, feature_dir, annot_dir, label_name, test_mode=False):
self.video = video
self.feature_dir = feature_dir
self.annot_dir = annot_dir
self.label_name = label_name
if (self.label_name is not None):
self.label_name =... |
_module(force=True)
class DiceLoss(nn.Module):
def __init__(self, use_sigmoid=True, activate=True, reduction='mean', naive_dice=False, loss_weight=1.0, eps=0.001):
super(DiceLoss, self).__init__()
self.use_sigmoid = use_sigmoid
self.reduction = reduction
self.naive_dice = naive_dice
... |
def get_upgrade_config(_request: WSGIRequest) -> HttpResponse:
with open(os.path.join(settings.BASE_DIR, 'config/raveberry.yaml'), encoding='utf-8') as config_file:
config = config_file.read()
lines = config.splitlines()
lines = [line for line in lines if (not line.startswith('#'))]
return HttpR... |
class example_args(object):
__slots__ = ()
def read(self, iprot):
if ((iprot._fast_decode is not None) and isinstance(iprot.trans, TTransport.CReadableTransport) and (self.thrift_spec is not None)):
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
... |
def clean_domainnet():
full_path = '/users/smart/Dataset/DG/domain_net'
with open('./domainbed/misc/domain_net_duplicates.txt', 'r') as f:
for line in f.readlines():
try:
os.remove(os.path.join(full_path, line.strip()))
except OSError:
pass |
class Repo2DirSetGlobals(SetGlobals):
def __init__(self, src_repo, dest_dir):
super().__init__(src_repo, dest_dir)
self.repo = src_repo
def __call__(self):
self.set_eas()
self.set_acls()
self.set_win_acls()
self.set_resource_forks()
self.set_carbonfile()
... |
def _custom_fromfile(*args, **kwargs):
from satpy.readers.ahi_hsd import _BASIC_INFO_TYPE, _CAL_INFO_TYPE, _DATA_INFO_TYPE, _ERROR_INFO_TYPE, _ERROR_LINE_INFO_TYPE, _INTER_CALIBRATION_INFO_TYPE, _IRCAL_INFO_TYPE, _NAV_INFO_TYPE, _NAVIGATION_CORRECTION_INFO_TYPE, _NAVIGATION_CORRECTION_SUBINFO_TYPE, _OBSERVATION_LIN... |
def identity_block(input, num_channel, kernel_size):
net = tf.contrib.layers.layer_norm(input, scale=True)
net = tf.nn.relu(net)
residual = slim.conv2d(activation_fn=None, inputs=net, num_outputs=num_channel, biases_initializer=None, kernel_size=[1, kernel_size], stride=[1, 1], padding='SAME')
residual ... |
def merge_to_panoptic(detection_dicts, sem_seg_dicts):
results = []
sem_seg_file_to_entry = {x['file_name']: x for x in sem_seg_dicts}
assert (len(sem_seg_file_to_entry) > 0)
for det_dict in detection_dicts:
dic = copy.copy(det_dict)
dic.update(sem_seg_file_to_entry[dic['file_name']])
... |
class Scenario(ScenarioGenerator):
def __init__(self):
super().__init__()
self.open_scenario_version = 2
def scenario(self, **kwargs):
catalog = xosc.Catalog()
catalog.add_catalog('VehicleCatalog', '../xosc/Catalogs/Vehicles')
road = xosc.RoadNetwork(roadfile='../xodr/e6m... |
class EncoderConv():
def __init__(self, name, is_training, latent_code_dim=128):
self.name = name
self.is_training = is_training
self.latent_code_dim = latent_code_dim
def __call__(self, point_cloud):
with tf.variable_scope(self.name):
num_point = point_cloud.get_shap... |
class TContainer(Container, QtWidgets.QWidget):
sigStretchChanged = QtCore.Signal()
def __init__(self, area):
QtWidgets.QWidget.__init__(self)
Container.__init__(self, area)
self.layout = QtWidgets.QGridLayout()
self.layout.setSpacing(0)
self.layout.setContentsMargins(0, ... |
def submit_run(submit_config: SubmitConfig, run_func_name: str, **run_func_kwargs) -> None:
submit_config = copy.copy(submit_config)
if (submit_config.user_name is None):
submit_config.user_name = get_user_name()
submit_config.run_func_name = run_func_name
submit_config.run_func_kwargs = run_fun... |
def meta_sync_data(check_expired, get_remote_and_cache):
def sync_data():
logging.basicConfig(level=logging.INFO)
if check_expired():
logging.info('trying to fetch data...')
get_remote_and_cache()
logging.info('done')
else:
logging.info('local ... |
def get_matching(coverages, coverage):
matching = []
for candidate in coverages:
if (candidate.codes == coverage.codes):
matching.append(candidate)
matching.sort(key=(lambda c: ((coverage.deltat == c.deltat), (not c.deltat))))
matching.reverse()
return matching |
def exciton_bohr_radius(me, mh, eps):
science_reference('Definition of the exciton bohr radius for a quantum well.', 'S. L. Chuang, Physics of Optoelectonic Devices, Second Edition, p.554, Table 13.1')
mr = ((me * mh) / (me + mh))
return (((hbar ** 2) / mr) * (((4 * np.pi) * eps) / (q ** 2))) |
class MultiLayerLoss(nn.Module):
def __init__(self, score_weight=1.0):
super().__init__()
self.score_weight = score_weight
self._numel_target_encs = 0
def _target_enc_name(self, idx):
return f'_target_encs_{idx}'
def set_target_encs(self, target_encs):
self._numel_tar... |
def parse_args():
parser = argparse.ArgumentParser(description='Export Bart model + Beam Search to ONNX graph.')
parser.add_argument('--validation_file', type=str, default=None, help='A csv or a json file containing the validation data.')
parser.add_argument('--max_length', type=int, default=5, help='The ma... |
def plot_airline(y_true, mean, lb, ub, trainlen, n, r):
plt.plot(range(len(y_true)), y_true, 'b', label='Target')
plt.plot(range(len(y_true)), mean, 'r', label=((('ESN n=' + str(n)) + ', r=') + str(r)))
plt.fill_between(range(len(y_true)), lb, ub, facecolor='grey', alpha=0.3)
(lo, hi) = plt.ylim()
p... |
def get_psp(dataset='pascal_voc', backbone='resnet50', pretrained=False, root='~/.encoding/models', **kwargs):
acronyms = {'pascal_voc': 'voc', 'pascal_aug': 'voc', 'ade20k': 'ade'}
from ..datasets import datasets
model = PSP(datasets[dataset.lower()].NUM_CLASS, backbone=backbone, root=root, **kwargs)
i... |
.parametrize('validation_part', ['all', 'entries', 'none'])
def test_main_validate_record_all_pass(fancy_wheel, tmp_path, validation_part):
destdir = (tmp_path / 'dest')
main([str(fancy_wheel), '-d', str(destdir), '--validate-record', validation_part], 'python -m installer')
installed_py_files = destdir.rgl... |
def download_wheel(distribution, version_spec, for_py_version, search_dirs, app_data, to_folder, env):
to_download = f"{distribution}{(version_spec or '')}"
logging.debug('download wheel %s %s to %s', to_download, for_py_version, to_folder)
cmd = [sys.executable, '-m', 'pip', 'download', '--progress-bar', '... |
class ModelTraceScriptTest(unittest.TestCase):
def _set_up_qebc(self, sharding_type: str, quant_state_dict_split_scale_bias: bool) -> TestModelInfo:
local_device = torch.device('cuda:0')
model_info = TestModelInfo(sparse_device=local_device, dense_device=local_device, num_features=2, num_float_featu... |
def read_lexiconp(filename):
ans = []
found_empty_prons = False
found_large_pronprobs = False
with open(filename, 'r', encoding='latin-1') as f:
whitespace = re.compile('[ \t]+')
for line in f:
a = whitespace.split(line.strip(' \t\r\n'))
if (len(a) < 2):
... |
class TestMapRequest(EndianTest):
def setUp(self):
self.evt_args_0 = {'parent': , 'sequence_number': 63838, 'type': 157, 'window': }
self.evt_bin_0 = b'\x9d\x00^\xf9\xbd\xd9\xe3b\x1d\x02\xbd3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
def testPack0(self):
... |
class TestChatPhotoBase():
chatphoto_small_file_id = 'smallCgADAQADngIAAuyVeEez0xRovKi9VAI'
chatphoto_big_file_id = 'bigCgADAQADngIAAuyVeEez0xRovKi9VAI'
chatphoto_small_file_unique_id = 'smalladc3145fd2e84d95b64d68eaa22aa33e'
chatphoto_big_file_unique_id = 'bigadc3145fd2e84d95b64d68eaa22aa33e'
chatp... |
class AdaBIGGANLoss(nn.Module):
def __init__(self, perceptual_loss='vgg', scale_per=0.001, scale_emd=0.1, scale_reg=0.02, normalize_img=True, normalize_per=False, dist_per='l1'):
super(AdaBIGGANLoss, self).__init__()
if (perceptual_loss == 'vgg'):
self.perceptual_loss = Vgg16PerceptualLo... |
def setup_setuptools_cross_compile(tmp: Path, python_configuration: PythonConfiguration, python_libs_base: Path, env: MutableMapping[(str, str)]) -> None:
distutils_cfg = (tmp / 'extra-setup.cfg')
env['DIST_EXTRA_CONFIG'] = str(distutils_cfg)
log.notice(f'Setting DIST_EXTRA_CONFIG={distutils_cfg} for cross-... |
.filterwarnings('default::pytest.PytestUnhandledThreadExceptionWarning')
def test_unhandled_thread_exception_in_setup(pytester: Pytester) -> None:
pytester.makepyfile(test_it='\n import threading\n import pytest\n\n \n def threadexc():\n def oops():\n raise Valu... |
def channet_conv3x3(in_channels, out_channels, stride, padding=1, dilation=1, groups=1, bias=False, dropout_rate=0.0, activate=True):
return ChannetConv(in_channels=in_channels, out_channels=out_channels, kernel_size=3, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias, dropout_rate=dropou... |
class TreeWidget(QtWidgets.QTreeWidget):
sigItemMoved = QtCore.Signal(object, object, object)
sigItemCheckStateChanged = QtCore.Signal(object, object)
sigItemTextChanged = QtCore.Signal(object, object)
sigColumnCountChanged = QtCore.Signal(object, object)
def __init__(self, parent=None):
QtW... |
def build_dataset(config, ues_word):
if ues_word:
tokenizer = (lambda x: x.split(' '))
else:
tokenizer = (lambda x: [y for y in x])
if os.path.exists(config.vocab_path):
vocab = pkl.load(open(config.vocab_path, 'rb'))
else:
vocab = build_vocab(config.train_path, tokenizer... |
def get_map(scope, bottleneck_nums, show_mxnettf=True, show_tfmxnet=True):
if scope.endswith('b'):
update_C1_resnet_v1_b()
elif scope.endswith('d'):
update_C1_resnet_v1_d()
update_C2345(scope, bottleneck_nums)
update_logitis()
mxnet_tf_map = {}
for (tf_name, mxnet_name) in tf_mxn... |
_torch
class CTRLModelLanguageGenerationTest(unittest.TestCase):
def test_lm_generate_ctrl(self):
model = CTRLLMHeadModel.from_pretrained('ctrl')
model.to(torch_device)
input_ids = torch.tensor([[11859, 0, 1611, 8]], dtype=torch.long, device=torch_device)
expected_output_ids = [11859... |
class CallbackContainer():
callbacks: List[Callback] = field(default_factory=list)
def append(self, callback):
self.callbacks.append(callback)
def set_params(self, params):
for callback in self.callbacks:
callback.set_params(params)
def set_trainer(self, trainer):
sel... |
def make_ode_k3_block_layers(input_size, activation='softplus', squeeze=False, last_activation=True, hidden_width=128, mode=0):
(channels, height, width) = input_size
activation = utils.select_activation(activation)
if (mode == 0):
layers = [ConcatConv2d(in_channels=channels, out_channels=hidden_wid... |
def test_locker_properly_loads_nested_extras(locker: Locker) -> None:
content = f'''# {GENERATED_COMMENT}
[[package]]
name = "a"
version = "1.0"
description = ""
optional = false
python-versions = "*"
files = []
[package.dependencies]
b = {{version = "^1.0", optional = true, extras = "c"}}
[package.extras]
b = ["b[... |
def test_rotate_items_by_ignore_first_redo(qapp):
item1 = BeePixmapItem(QtGui.QImage())
item1.setRotation(0)
item2 = BeePixmapItem(QtGui.QImage())
item2.setRotation(30)
item2.setPos(100, 100)
item2.do_flip()
command = commands.RotateItemsBy([item1, item2], (- 90), QtCore.QPointF(100, 100), i... |
class KnownValues(unittest.TestCase):
def setUpClass(self):
self.nmo = 100
self.nocc = 20
self.nvir = 80
self.naux = 400
np.random.seed(1)
def tearDownClass(self):
del self.nmo, self.nocc, self.nvir, self.naux
np.random.seed()
def test_c_ragf2(self):
... |
class _Relationship():
def __init__(self, rId: str, reltype, target, baseURI, external=False):
super(_Relationship, self).__init__()
self._rId = rId
self._reltype = reltype
self._target = target
self._baseURI = baseURI
self._is_external = bool(external)
def is_ext... |
class TripletsNet5g(ResNet):
def __init__(self, config):
super(TripletsNet5g, self).__init__()
self.trunk = ClusterNet5gTrunk(config)
self.head = TripletsNet5gHead(config)
self._initialize_weights()
def forward(self, x, kmeans_use_features=False):
x = self.trunk(x)
... |
def gammainc_grad(k, x):
dtype = upcast(k.type.dtype, x.type.dtype, 'float32')
def grad_approx(skip_loop):
precision = np.array(1e-10, dtype=config.floatX)
max_iters = switch(skip_loop, np.array(0, dtype='int32'), np.array(100000.0, dtype='int32'))
log_x = log(x)
log_gamma_k_plus... |
def move_out_8(library, session, space, offset, length, data, extended=False):
converted_buffer = (ViUInt8 * length)(*tuple(data))
if extended:
return library.viMoveOut8Ex(session, space, offset, length, converted_buffer)
else:
return library.viMoveOut8(session, space, offset, length, conver... |
class TDF_NET_Framework(Dense_UNET_Framework):
def __init__(self, target_name, n_fft, hop_length, num_frame, spec_type, spec_est_mode, optimizer, lr, dev_mode, train_loss, val_loss, layer_level_init_weight, unfreeze_stft_from, **kwargs):
valid_kwargs = inspect.signature(TDF_NET.__init__).parameters
... |
def createInstanceImage(annotation, encoding):
size = (annotation.imgWidth, annotation.imgHeight)
if (encoding == 'ids'):
backgroundId = name2label['unlabeled'].id
elif (encoding == 'trainIds'):
backgroundId = name2label['unlabeled'].trainId
else:
print("Unknown encoding '{}'".fo... |
def _parse_yaml_backends(name: str, node: Union[(None, str, _BackendDict)]) -> Sequence[usertypes.Backend]:
if (node is None):
return [usertypes.Backend.QtWebKit, usertypes.Backend.QtWebEngine]
elif (node == 'QtWebKit'):
return [usertypes.Backend.QtWebKit]
elif (node == 'QtWebEngine'):
... |
class ContractInfoLayout(QVBoxLayout):
def __init__(self, dialog, contract, callback):
QVBoxLayout.__init__(self)
if (not contract):
contract = {'name': '', 'interface': '', 'address': ''}
self.contract = contract
self.callback = callback
self.dialog = dialog
... |
class Solution(object):
def generateTrees(self, n):
if (n == 0):
return []
return self.get_trees(1, n)
def get_trees(self, start, end):
res = []
if (start > end):
res.append(None)
return res
for i in range(start, (end + 1)):
... |
def int_to_float_fn(inputs, out_dtype):
if (all(((input.type.numpy_dtype == np.dtype(out_dtype)) for input in inputs)) and isinstance(np.dtype(out_dtype), np.floating)):
_njit
def inputs_cast(x):
return x
elif any(((i.type.numpy_dtype.kind in 'ib') for i in inputs)):
args_dty... |
def test_nnet():
x = vector('x')
x.tag.test_value = np.r_[(1.0, 2.0)].astype(config.floatX)
out = sigmoid(x)
fgraph = FunctionGraph([x], [out])
compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs])
out = softplus(x)
fgraph = FunctionGraph([x], [out])
compare_jax_and_py(f... |
.unit()
class TestStdCapture():
captureclass = staticmethod(StdCapture)
def getcapture(self, **kw):
cap = self.__class__.captureclass(**kw)
cap.start_capturing()
try:
(yield cap)
finally:
cap.stop_capturing()
def test_capturing_done_simple(self):
... |
class TestHRParser(TestCase):
def test_precedences(self):
p = HRParser()
(a, b, c) = (Symbol(v) for v in 'abc')
(x, y) = (Symbol(v, REAL) for v in 'xy')
tests = []
tests.append(('a | b & c', Or(a, And(b, c))))
tests.append(('a & b | c', Or(And(a, b), c)))
f1 =... |
def catalyze(enzyme, e_site, substrate, s_site, product, klist):
_verify_sites(enzyme, e_site)
_verify_sites(substrate, s_site)
enzyme_free = enzyme({e_site: None})
if (s_site in substrate.site_conditions):
substrate_free = substrate()
s_state = (substrate.site_conditions[s_site], 1)
... |
class TestWebhookInfoWithoutRequest(TestWebhookInfoBase):
def test_slot_behaviour(self, webhook_info):
for attr in webhook_info.__slots__:
assert (getattr(webhook_info, attr, 'err') != 'err'), f"got extra slot '{attr}'"
assert (len(mro_slots(webhook_info)) == len(set(mro_slots(webhook_in... |
def test_write_read_events():
wal = new_wal(state_transition_noop)
event = EventPaymentSentFailed(make_token_network_registry_address(), make_address(), 1, make_address(), 'whatever')
with pytest.raises(sqlite3.IntegrityError):
unexisting_state_change_id = random.getrandbits((16 * 8)).to_bytes(16, '... |
def main(data_dir, client, c, config):
benchmark(read_tables, config, c)
query = "\n SELECT\n --wcs_user_sk,\n clicks_in_category,\n CASE WHEN cd_education_status IN ('Advanced Degree', 'College', '4 yr Degree', '2 yr Degree') \n THEN 1 ELSE 0 END AS college_ed... |
class PytitionUser(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name='pytitionuser')
invitations = models.ManyToManyField('Organization', related_name='invited', blank=True)
default_template = models.ForeignKey('PetitionTemplate', blank=True, null=Tr... |
def _recreate_payload_schema(dest_client: QdrantBase, collection_name: str, payload_schema: Dict[(str, models.PayloadIndexInfo)]) -> None:
for (field_name, field_info) in payload_schema.items():
dest_client.create_payload_index(collection_name, field_name=field_name, field_schema=(field_info.data_type if (f... |
def update_xi_help(i, theta, use_voronoi):
K = theta['K']
N = theta['N']
result = 0
for k in range(K):
phis = get_trans_list(k=k, i=i, theta=theta)
temp_sum = 0
for (j, phi) in enumerate(phis):
if use_voronoi:
dphi = theta['voronoi'][i][k][j]
... |
def test_prepare_metadata_for_build_editable_no_fallback():
hooks = get_hooks('pkg2')
with TemporaryDirectory() as metadatadir:
with modified_env({'PYTHONPATH': BUILDSYS_PKGS}):
with pytest.raises(HookMissing) as exc_info:
hooks.prepare_metadata_for_build_editable(metadatadir... |
class KeyCode(IntEnum):
A = auto()
B = auto()
C = auto()
D = auto()
E = auto()
F = auto()
G = auto()
H = auto()
I = auto()
J = auto()
K = auto()
L = auto()
M = auto()
N = auto()
O = auto()
P = auto()
Q = auto()
R = auto()
S = auto()
T = aut... |
.parametrize('input, output', [('>1!2,<=2!3', VersionRange(Version.from_parts(2, 0, 0, epoch=1), Version.from_parts(3, 0, 0, epoch=2), include_min=False, include_max=True)), ('>=1!2,<2!3', VersionRange(Version.from_parts(2, 0, 0, epoch=1), Version.from_parts(3, 0, 0, epoch=2), include_min=True, include_max=False))])
de... |
class SingPhoneTokenizer(AbsTokenizer):
def __init__(self, phone_table='UniAudio/tools/tokenizer/Sing/dict_phone.txt'):
AbsTokenizer.__init__(self)
phone_dict = open(phone_table, encoding='utf-8').readlines()
phone_dict = [line.strip().split() for line in phone_dict]
phone_dict = {li... |
class NoSuchCommandError(Error):
def for_cmd(cls, cmd: str, all_commands: List[str]=None) -> 'NoSuchCommandError':
suffix = ''
if all_commands:
matches = difflib.get_close_matches(cmd, all_commands, n=1)
if matches:
suffix = f' (did you mean :{matches[0]}?)'
... |
class StartEndDataset_audio(Dataset):
Q_FEAT_TYPES = ['pooler_output', 'last_hidden_state']
def __init__(self, dset_name, data_path, v_feat_dirs, q_feat_dir, a_feat_dir=None, q_feat_type='last_hidden_state', max_q_l=32, max_v_l=75, data_ratio=1.0, ctx_mode='video', normalize_v=True, normalize_t=True, load_label... |
def get_version(verbose=False, add_git_number=True):
with open(os.path.join(getProjectRoot(), '__version__.txt')) as version_file:
version = version_file.read().strip()
if add_git_number:
import subprocess
import sys
cd = _chdir(os.path.dirname(__file__))
try:
... |
class Backbone(BackboneBase):
def __init__(self, name: str, train_backbone: bool, return_interm_layers: bool, dilation: bool):
if (name.startswith('resnet') or name.startswith('resnext')):
backbone = getattr(torchvision.models, name)(replace_stride_with_dilation=[False, False, dilation], pretrai... |
class float():
def __init__(self, x: object) -> None:
pass
def __add__(self, n: float) -> float:
pass
def __radd__(self, n: float) -> float:
pass
def __sub__(self, n: float) -> float:
pass
def __rsub__(self, n: float) -> float:
pass
def __mul__(self, n: fl... |
def show(source, with_bounds=True, contour=False, contour_label_kws=None, ax=None, title=None, transform=None, adjust=False, **kwargs):
plt = get_plt()
if isinstance(source, tuple):
arr = source[0].read(source[1])
if (len(arr.shape) >= 3):
arr = reshape_as_image(arr)
if with_... |
class Event(GeoLocalizedModel, TimeFramedModel, TimeStampedModel):
slug = I18nCharField(_('slug'), blank=False)
content = I18nTextField(_('content'), blank=False)
title = I18nCharField(_('title'), blank=False)
conference = models.ForeignKey('conferences.Conference', on_delete=models.CASCADE, verbose_nam... |
('PyQt6.QtWidgets.QGraphicsPixmapItem.keyPressEvent')
def test_key_press_event_escape(key_mock, qapp, item):
item.exit_crop_mode = MagicMock()
event = MagicMock()
event.key.return_value = Qt.Key.Key_Escape
item.keyPressEvent(event)
item.exit_crop_mode.assert_called_once_with(confirm=False)
key_m... |
class MyQListWidget(QtWidgets.QListWidget):
listEntryDragStart = QtCore.Signal()
listEntryDragEnd = QtCore.Signal()
middleButtonClicked = QtCore.Signal(QtCore.QPoint)
doubleClicked = QtCore.Signal(QtCore.QPoint)
keyPressed = QtCore.Signal(str)
def dragEnterEvent(self, event):
super().dra... |
class DFAState(object):
def __init__(self, nfaset, final):
assert isinstance(nfaset, dict)
assert isinstance(next(iter(nfaset)), NFAState)
assert isinstance(final, NFAState)
self.nfaset = nfaset
self.isfinal = (final in nfaset)
self.arcs = {}
def addarc(self, next... |
def start_server(applications, port=0, host='', cdn=True, reconnect_timeout=0, static_dir=None, remote_access=False, debug=False, allowed_origins=None, check_origin=None, auto_open_webbrowser=False, max_payload_size='200M', **uvicorn_settings):
app = asgi_app(applications, cdn=cdn, reconnect_timeout=reconnect_timeo... |
def _do_trash_songs(parent, songs, librarian):
dialog = TrashDialog.for_songs(parent, songs)
resp = dialog.run()
if (resp != TrashDialog.RESPONSE_TRASH):
return
window_title = _('Moving %(current)d/%(total)d.')
w = WaitLoadWindow(parent, len(songs), window_title)
w.show()
ok = []
... |
class TestJSONAttribute():
def test_quoted_json(self):
attr = JSONAttribute()
serialized = attr.serialize('\\t')
assert (attr.deserialize(serialized) == '\\t')
serialized = attr.serialize('"')
assert (attr.deserialize(serialized) == '"')
def test_json_attribute(self):
... |
('train')
def train(args, trainer, task, epoch_itr):
itr = epoch_itr.next_epoch_itr(fix_batches_to_gpus=args.fix_batches_to_gpus, shuffle=(epoch_itr.next_epoch_idx > args.curriculum))
update_freq = (args.update_freq[(epoch_itr.epoch - 1)] if (epoch_itr.epoch <= len(args.update_freq)) else args.update_freq[(- 1)... |
class SLAKE_VQA_Dataset(Dataset):
def __init__(self, csv_path, img_root_dir, image_res, is_train=True):
self.is_train = is_train
self.root_dir = img_root_dir
data_info = pd.read_csv(csv_path)
self.img_path_list = np.asarray(data_info['img_name'])
self.question_list = np.asarr... |
def test_send_reply_emails_waiting_list_maybe(rf, grant_factory, mocker):
mock_messages = mocker.patch('grants.admin.messages')
grant = grant_factory(status=Grant.Status.waiting_list_maybe)
request = rf.get('/')
mock_send = mocker.patch('grants.admin.send_grant_reply_waiting_list_email')
send_reply_... |
class OnPoll():
def on_poll(self=None, filters=None, group: int=0) -> Callable:
def decorator(func: Callable) -> Callable:
if isinstance(self, pyrogram.Client):
self.add_handler(pyrogram.handlers.PollHandler(func, filters), group)
elif (isinstance(self, Filter) or (se... |
class GroupingOperation(Function):
def forward(ctx, features: torch.Tensor, idx: torch.Tensor) -> torch.Tensor:
assert features.is_contiguous()
assert idx.is_contiguous()
(B, nfeatures, nsample) = idx.size()
(_, C, N) = features.size()
output = torch.cuda.FloatTensor(B, C, nf... |
def interpolate_video(cfg):
sample_directory = create_sample_directory(cfg, 'frames')
projector_path = get_model_path(cfg.image_name, (cfg.run_name + '_projector'))
projector_model = Diffusion.load_from_checkpoint(projector_path, training_target='noise', model=NextNet(depth=cfg.network_depth), noise_schedul... |
def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error, io=codecs):
required = {}
for linenum in xrange(clean_lines.NumLines()):
line = clean_lines.elided[linenum]
if ((not line) or (line[0] == '#')):
continue
matched = _RE_PATTERN_STRING.search(line)
... |
def generateFeature(opt, video_list, video_dict):
num_sample_start = opt['num_sample_start']
num_sample_end = opt['num_sample_end']
num_sample_action = opt['num_sample_action']
num_sample_interpld = opt['num_sample_interpld']
for video_name in video_list:
adf = pandas.read_csv((('./output/TE... |
def _create_ap_per_tolerance_graph(ap_data_frame: DataFrame, methods: List[str], ordered_class_names: List[str]) -> Figure:
tolerances = _extract_tolerances(ap_data_frame, methods)
active_tolerance_index = (len(tolerances) - 1)
active_tolerance = tolerances[active_tolerance_index]
active_tolerance_ap_da... |
def fid_score(r_imgs, g_imgs, batch_size=32, dims=2048, cuda=False, normalize=False, r_cache=None, verbose=0):
block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[dims]
model = InceptionV3([block_idx])
if (r_cache and (not r_cache.endswith('.npz'))):
r_cache = (r_cache + '.npz')
if (r_cache and os.path.e... |
class Effect6478(BaseEffect):
type = ('projected', 'active')
def handler(fit, container, context, projectionRange, **kwargs):
if ('projected' not in context):
return
if fit.ship.getModifiedItemAttr('disallowOffensiveModifiers'):
return
fit.ship.boostItemAttr('sign... |
class UserManager(CRUDMixin, RESTManager):
_path = '/users'
_obj_cls = User
_list_filters = ('active', 'blocked', 'username', 'extern_uid', 'provider', 'external', 'search', 'custom_attributes', 'status', 'two_factor')
_create_attrs = RequiredOptional(optional=('email', 'username', 'name', 'password', '... |
class TestSimulatedExecutor(unittest.TestCase):
def setUpClass(cls) -> None:
cls.example_ticker = BloombergTicker('Example Index')
cls.example_ticker_2 = BloombergTicker('Example2 Index')
cls.orders = [Order(ticker=cls.example_ticker, quantity=1000, execution_style=MarketOrder(), time_in_for... |
.skipif((not PY310_PLUS), reason='Match requires python 3.10')
class TestPatternMatching():
def test_assigned_stmts_match_mapping():
assign_stmts = extract_node('\n var = {1: "Hello", 2: "World"}\n match var:\n case {**rest}: #\n pass\n ')
match_mappin... |
class DatabaseRouter(BlockingRouter):
def _default_batch_size(self):
if hasattr(settings, 'DB_ROUTER_DEFAULT_BATCH_SIZE'):
return settings.DB_ROUTER_DEFAULT_BATCH_SIZE
return 200
def queue_message(self, direction, connections, text, fields=None):
from rapidsms.router.db.model... |
class SPM(BaseModel):
def __init__(self, options=None, name='Single Particle Model', build=True):
options = (options or {})
kinetics = options.get('intercalation kinetics')
surface_form = options.get('surface form')
if ((kinetics is not None) and (surface_form is None)):
... |
class UtilTest(unittest.TestCase):
(utils.logger)
def test_endpoint_address(self):
self.assertEqual(endpoint_address(1), 1)
self.assertEqual(endpoint_address(129), 1)
(utils.logger)
def test_endpoint_direction(self):
self.assertEqual(endpoint_direction(1), ENDPOINT_OUT)
s... |
def test_transfer_2step(fints_client):
with fints_client:
accounts = fints_client.get_sepa_accounts()
a = fints_client.simple_sepa_transfer(accounts[0], 'DE', 'GENODE23X42', 'Test Receiver', Decimal('2.34'), 'Test Sender', 'Test transfer 2step')
assert isinstance(a, NeedTANResponse)
... |
class TestCSVHook(HookTestBase):
def setUp(self) -> None:
self.base_dir = tempfile.mkdtemp()
def tearDown(self) -> None:
shutil.rmtree(self.base_dir)
def test_constructors(self) -> None:
folder = f'{self.base_dir}/constructor_test/'
os.makedirs(folder)
self.constructo... |
class TestMappingNotify(EndianTest):
def setUp(self):
self.evt_args_0 = {'count': 244, 'first_keycode': 224, 'request': 213, 'sequence_number': 22874, 'type': 251}
self.evt_bin_0 = b'\xfb\x00YZ\xd5\xe0\xf4\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0... |
class Effect11445(BaseEffect):
runTime = 'early'
type = ('projected', 'passive')
def handler(fit, beacon, context, projectionRange, **kwargs):
for sensor_type in ('Gravimetric', 'Ladar', 'Magnetometric', 'Radar'):
fit.ship.boostItemAttr(f'scan{sensor_type}Strength', beacon.getModifiedIte... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.