code stringlengths 281 23.7M |
|---|
def format_captured_exceptions(exceptions):
from io import StringIO
stream = StringIO()
stream.write('Exceptions caught in Qt event loop:\n')
sep = (('_' * 80) + '\n')
stream.write(sep)
for (exc_type, value, tback) in exceptions:
traceback.print_exception(exc_type, value, tback, file=str... |
def write_geojson(df, filename=None, geomtype='linestring', drop_na=True):
df['Name'] = df.index
records = json.loads(df.to_json(orient='records'))
features = []
for rec in records:
coordinates = rec['coords']
del rec['coords']
if drop_na:
rec = {k: v for (k, v) in re... |
def test_tar_archive_one_pass():
context = Context({'key1': 'value1', 'key2': 'value2', 'key3': 'value3', 'tar': {'archive': [{'in': 'path/to/dir', 'out': './blah.tar.xz'}]}})
with patch('tarfile.open') as mock_tarfile:
pypyr.steps.tar.run_step(context)
mock_tarfile.assert_called_once_with('./blah.t... |
class QueuedSong(models.Model):
id: int
index = models.IntegerField()
manually_requested = models.BooleanField()
votes = models.IntegerField(default=0)
internal_url = models.CharField(max_length=2000, blank=True, null=True)
external_url = models.CharField(max_length=2000)
stream_url = models... |
def test_no_expired_memberships():
with time_machine.travel('2020-10-10 10:00:00', tick=False):
membership_1 = MembershipFactory(status=MembershipStatus.ACTIVE)
membership_1.add_pretix_payment(organizer='python-italia', event='pycon-demo', order_code='XXYYZZ', total=1000, status=PaymentStatus.PAID, ... |
def test_update_matrix():
root = WorldObject()
root.local.position = (3, 6, 8)
root.local.scale = (1, 1.2, 1)
root.local.rotation = la.quat_from_euler(((pi / 2), 0, 0))
(pos, rot, scale) = la.mat_decompose(root.local.matrix)
assert np.allclose(pos, root.local.position)
assert np.allclose(rot... |
class DiamondHFTestGamma(unittest.TestCase):
def setUpClass(cls):
cls.cell = cell = Cell()
cell.atom = '\n C 0. 0. 0.\n C 1.67 1.68 1.69\n '
cell.basis = {'C': [[0, (0.8, 1.0)], [1, (1.0, 1.0)]]}
cell.pseudo = 'gth-pade'
cell.a = '\n 0., 3.... |
class TokenRematch():
def __init__(self):
self._do_lower_case = True
def stem(token):
if (token[:2] == '##'):
return token[2:]
else:
return token
def _is_control(ch):
return (unicodedata.category(ch) in ('Cc', 'Cf'))
def _is_special(ch):
re... |
class ExampleForm(Form):
def __init__(self, view, event_channel_name):
super().__init__(view, event_channel_name)
self.use_layout(FormLayout())
model_object = ModelObject()
self.layout.add_input(TextInput(self, model_object.fields.text_input_field))
self.layout.add_input(Chec... |
class GinoNullType(sqltypes.NullType):
def result_processor(self, dialect, coltype):
if (coltype == JSON_COLTYPE):
return JSON().result_processor(dialect, coltype)
if (coltype == JSONB_COLTYPE):
return JSONB().result_processor(dialect, coltype)
return super().result_p... |
class BasicBlockMtl(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None, last=False):
super(BasicBlockMtl, self).__init__()
self.conv1 = conv3x3mtl(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
... |
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--checkpoint', required=True, help='Path to model checkpoint')
parser.add_argument('--reference_audio', required=True)
parser.add_argument('--output')
parser.add_argument('--hparams', default='', help='Hyperparameter overrides as a ... |
class OpDialog(QDialog):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setWindowTitle('Optimization Inputs and Output')
self.gui_init()
def gui_init(self):
self.in_range = QLineEdit()
self.out_cell = QLineEdit()
row_1 = QHBoxLayout()
... |
def extract_feature_from_images(generator, inception, truncation, truncation_latent, batch_size, n_sample, device, loader, info_print=False):
with torch.no_grad():
generator.eval()
inception.eval()
n_batch = (n_sample // batch_size)
resid = (n_sample - ((n_batch - 1) * batch_size))
... |
def getFPDT(output):
rectype = {}
rectype[0] = 'Firmware Basic Boot Performance Record'
rectype[1] = 'S3 Performance Table Record'
prectype = {}
prectype[0] = 'Basic S3 Resume Performance Record'
prectype[1] = 'Basic S3 Suspend Performance Record'
sysvals.rootCheck(True)
if (not os.path.... |
class TPlaylistModel(TestCase):
def setUp(self):
self.pl = PlaylistModel()
self.pl.set(range(10))
do_events()
self.assertTrue((self.pl.current is None))
def test_current_recover(self):
self.pl.set(range(10))
self.pl.next()
self.assertEqual(self.pl.current,... |
class LayoutTranslatorRequirement(BitPackEnum, Enum):
long_name: str
VIOLET = 'violet'
AMBER = 'amber'
EMERALD = 'emerald'
COBALT = 'cobalt'
RANDOM = 'random'
REMOVED = 'removed'
RANDOM_WITH_REMOVED = 'random-removed'
def from_item_short_name(cls, name: str) -> Self:
for (key... |
(params=[_m(b'\x08', 5, 'Original connections'), _m(b'\x18', 5, 'Original connections', skip_final_bosses=True), _m(b'\xc1', 8, 'One-way, with cycles', mode='one-way-teleporter'), _m(b'\xc81d', 22, 'One-way, with cycles; excluded 1 elevators', mode='one-way-teleporter', excluded_teleporters=[_a('Temple Grounds', 'Templ... |
def shared(value, name=None, strict=False, allow_downcast=None):
if (not isinstance(value, (np.number, float, int, complex))):
raise TypeError()
try:
dtype = value.dtype
except AttributeError:
dtype = np.asarray(value).dtype
dtype = str(dtype)
value = getattr(np, dtype)(value... |
.parametrize('env_var', ['PROJ_CURL_CA_BUNDLE', 'CURL_CA_BUNDLE', 'SSL_CERT_FILE'])
('pyproj.network._set_ca_bundle_path')
def test_ca_bundle_path__env_var_skip(c_set_ca_bundle_path_mock, env_var):
with patch.dict('os.environ', {env_var: '/tmp/dummy/path/cacert.pem'}, clear=True):
set_ca_bundle_path()
c... |
class SpaceReader():
def __init__(self, space):
self.basedir = ''
self.space = space
def read(self, file):
if (not hasattr(file, 'read')):
self.basedir = os.path.dirname(file)
file = open(file, 'rt')
elif hasattr(file, 'name'):
self.basedir = o... |
class Bottleneck(nn.Module):
def forward(self, x):
shortcut = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if (se... |
def load(fnames, tag='', inst_id='', sim_multi_file_right=False, sim_multi_file_left=False, non_monotonic_index=False, non_unique_index=False, malformed_index=False, start_time=None, num_samples=86400, test_load_kwarg=None, max_latitude=90.0):
pysat.logger.info(''.join(('test_load_kwarg = ', str(test_load_kwarg))))... |
def weights_init(m):
if isinstance(m, nn.Conv2d):
init.kaiming_normal(m.weight, mode='fan_out')
if (m.bias is not None):
init.constant(m.bias, 0)
elif (isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d)):
init.constant(m.weight, 1)
init.constant(m.bias, 0)... |
def test_run_has_helpful_error_when_command_not_found(app_tester: ApplicationTester, env: MockEnv, capfd: pytest.CaptureFixture[str]) -> None:
nonexistent_command = 'nonexistent-command'
env._execute = True
app_tester.execute(f'run {nonexistent_command}')
assert (env.executed == [[nonexistent_command]])... |
class ModelWithFunctionalReLU(torch.nn.Module):
def __init__(self):
super(ModelWithFunctionalReLU, self).__init__()
self.conv1 = torch.nn.Conv2d(3, 6, 5)
self.conv2 = torch.nn.Conv2d(6, 16, 5)
self.fc1 = torch.nn.Linear(9216, 128)
self.fc2 = torch.nn.Linear(128, 10)
def f... |
class Transform(torch.nn.Module):
def __init__(self, image_size, mean, std):
super().__init__()
self.transforms = torch.nn.Sequential(Resize([image_size], interpolation=InterpolationMode.BICUBIC), CenterCrop(image_size), ConvertImageDtype(torch.float), Normalize(mean, std))
def forward(self, x: ... |
def test_create_tasks(db, settings):
Task.objects.all().delete()
xml_file = (((Path(settings.BASE_DIR) / 'xml') / 'elements') / 'tasks.xml')
root = read_xml_file(xml_file)
version = root.attrib.get('version')
elements = flat_xml_to_elements(root)
elements = convert_elements(elements, version)
... |
class SyncSubgraphTask(SubgraphTask):
def __init__(self, strategy, **kwargs):
super().__init__(strategy, **kwargs)
self._cache = list()
self._mux = 0
def wait(self):
if self.is_closed:
return
self._mux -= 1
def push(self, node, edges: list, **kwargs):
... |
('evennia.server.evennia_launcher.Popen', new=MagicMock())
class TestLauncher(TwistedTestCase):
def test_is_windows(self):
self.assertEqual(evennia_launcher._is_windows(), (os.name == 'nt'))
def test_file_compact(self):
self.assertEqual(evennia_launcher._file_names_compact('foo/bar/test1', 'foo/... |
def make_vdom_constructor(tag: str, allow_children: bool=True, import_source: (ImportSourceDict | None)=None) -> VdomDictConstructor:
def constructor(*attributes_and_children: Any, **kwargs: Any) -> VdomDict:
model = vdom(tag, *attributes_and_children, **kwargs)
if ((not allow_children) and ('childr... |
class DRN(nn.Module):
def __init__(self, channels, init_block_channels, dilations, bottlenecks, simplifieds, residuals, in_channels=3, in_size=(224, 224), num_classes=1000):
super(DRN, self).__init__()
self.in_size = in_size
self.num_classes = num_classes
self.features = nn.Sequentia... |
class GNMTGlobalScorer(object):
def __init__(self, alpha, length_penalty):
self.alpha = alpha
penalty_builder = penalties.PenaltyBuilder(length_penalty)
self.length_penalty = penalty_builder.length_penalty()
def score(self, beam, logprobs):
normalized_probs = self.length_penalty(... |
def _get_response_for_error(e: Exception, request_id: str):
logger.error(f'Request {request_id} failed with:', exc_info=e)
status_code = status.HTTP_500_INTERNAL_SERVER_ERROR
if isinstance(e, HTTPException):
status_code = e.status_code
elif isinstance(e, OpenAIHTTPException):
status_code... |
def importEftCfg(shipname, lines, iportuser):
sMkt = Market.getInstance()
try:
sMkt.getItem(shipname)
except (KeyboardInterrupt, SystemExit):
raise
except:
return []
fits = []
fitIndices = []
for line in lines:
if ((line[:1] == '[') and (line[(- 1):] == ']')):... |
class MultinodeConstraintList(MultinodePenaltyList):
def add(self, multinode_constraint: Any, **extra_arguments: Any):
super(MultinodeConstraintList, self).add(option_type=MultinodeConstraint, multinode_penalty=multinode_constraint, _multinode_penalty_fcn=MultinodeConstraintFcn, **extra_arguments) |
def get_parallax_corrected_lonlats(sat_lon, sat_lat, sat_alt, lon, lat, height):
elevation = _get_satellite_elevation(sat_lon, sat_lat, sat_alt, lon, lat)
parallax_distance = _calculate_slant_cloud_distance(height, elevation)
shifted_xyz = _get_parallax_shift_xyz(sat_lon, sat_lat, sat_alt, lon, lat, paralla... |
def test_nested_process_search_pq_over_max_char_limit(s1_product: SentinelOne):
list_o_terms = (['abcdefghijklmnopqrstuvwxyz'] * 251)
first_list = (('("' + '", "'.join((['abcdefghijklmnopqrstuvwxyz'] * 125))) + '")')
second_list = (('("' + '", "'.join((['abcdefghijklmnopqrstuvwxyz'] * 1))) + '")')
s1_pr... |
def bench_coroutines(loops: int) -> float:
range_it = range(loops)
t0 = pyperf.perf_counter()
for _ in range_it:
coro = fibonacci(25)
try:
while True:
coro.send(None)
except StopIteration:
pass
return (pyperf.perf_counter() - t0) |
class StepLRScheduler(Scheduler):
def __init__(self, optimizer: torch.optim.Optimizer, decay_t: float, decay_rate: float=1.0, warmup_t=0, warmup_lr_init=0, warmup_prefix=True, t_in_epochs=True, noise_range_t=None, noise_pct=0.67, noise_std=1.0, noise_seed=42, initialize=True) -> None:
super().__init__(optim... |
class PortalLogObserver(log.FileLogObserver):
timeFormat = None
prefix = ' |Portal| '
def emit(self, eventDict):
text = log.textFromEventDict(eventDict)
if (text is None):
return
timeStr = timeformat(eventDict['time'])
fmtDict = {'text': text.replace('\n', '\n\t'... |
.parametrize('username,password', users)
def test_create(db, client, username, password):
client.login(username=username, password=password)
instances = Question.objects.all()
for instance in instances:
url = reverse(urlnames['list'])
data = {'uri_prefix': instance.uri_prefix, 'uri_path': f'... |
def test_filerewriter_with_dir_out_windows_slash(windows, fs):
fs.os = OSType.WINDOWS
fs.create_file('/arb/myfile')
tr = FakeRewriter('formatter', 'encin', 'encout')
tr.files_in_to_out('/arb/myfile', 'out/mydir/')
tr.in_to_out_mock.assert_called_once_with(in_path=Path('/arb/myfile'), out_path=Path('... |
class LithiumMetalSurfaceForm(LithiumMetalBaseModel):
def get_fundamental_variables(self):
delta_phi = pybamm.Variable('Lithium metal interface surface potential difference [V]', domain='current collector')
variables = {'Lithium metal interface surface potential difference [V]': delta_phi}
r... |
def _load_pretrained(model_name, model, progress):
if ((model_name not in _MODEL_URLS) or (_MODEL_URLS[model_name] is None)):
raise ValueError('No checkpoint is available for model type {}'.format(model_name))
checkpoint_url = _MODEL_URLS[model_name]
model.load_state_dict(load_state_dict_from_url(ch... |
def make_asr_data(src_file, tgt_file, tgt_dicts, tokenizer, max_src_length=64, max_tgt_length=64, add_bos=True, data_type='int64', num_workers=1, verbose=False, input_type='word', stride=1, concat=4, prev_context=0, fp16=False, reshape=True, asr_format='scp', output_format='raw', external_tokenizer=None, src_lang=None,... |
class IRCBotFactory(protocol.ReconnectingClientFactory):
initialDelay = 1
factor = 1.5
maxDelay = 60
def __init__(self, sessionhandler, uid=None, botname=None, channel=None, network=None, port=None, ssl=None):
self.sessionhandler = sessionhandler
self.uid = uid
self.nickname = st... |
class Redis():
def __init__(self, host='localhost', port=6379, db=0, expire_time=None):
self.redis = redis.StrictRedis(host=host, port=port, db=db)
self.expire_time = expire_time
def set(self, k, v):
r = self.redis.set(k, pickle.dumps(v, protocol=(- 1)))
if (self.expire_time is n... |
def test_checklist_show_hide():
p = pt.Parameter.create(name='checklist', type='checklist', limits=['a', 'b', 'c'])
pi = ChecklistParameterItem(p, 0)
pi.setHidden = MagicMock()
p.hide()
pi.setHidden.assert_called_with(True)
assert (not p.opts['visible'])
p.show()
pi.setHidden.assert_call... |
class CMlp(nn.Module):
def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.0):
super().__init__()
out_features = (out_features or in_features)
hidden_features = (hidden_features or in_features)
self.fc1 = nn.Conv2d(in_features, hidden_fe... |
class ModalPromptSession(PromptSession):
_spec_class = ModeSpec
_current_mode = None
_default_settings = {}
_specs = OrderedDict()
_inputhook = None
add_history = True
search_no_duplicates = False
def _check_args(self, kwargs):
if ('specs' in kwargs):
specs = kwargs['... |
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, dilation=1, downsample=None, style='pytorch', with_cp=False, conv_cfg=None, norm_cfg=dict(type='BN'), dcn=None, gcb=None, gen_attention=None):
super(Bottleneck, self).__init__()
assert (style in ['pytorch',... |
class SRResNetYX2(nn.Module):
def __init__(self, min=0.0, max=1.0, tanh=True):
super(SRResNetYX2, self).__init__()
self.min = min
self.max = max
self.tanh = tanh
self.conv_input = nn.Conv2d(in_channels=1, out_channels=64, kernel_size=9, stride=1, padding=4, bias=False)
... |
def test_deferred_hook_checking(pytester: Pytester) -> None:
pytester.syspathinsert()
pytester.makepyfile(**{'plugin.py': '\n class Hooks(object):\n def pytest_my_hook(self, config):\n pass\n\n def pytest_configure(config):\n config.pluginmanager.add_hookspecs(... |
def mutation(mask_all, N, fitness, L):
(individual_mask, _) = roulette(mask_all, N, fitness)
idx = np.random.randint(0, L, 2)
(start_idx, end_idx) = (np.min(idx), np.max(idx))
individual_mask_copy = individual_mask.copy()
individual_mask_copy[start_idx:end_idx] = (np.ones((end_idx - start_idx)) - in... |
class TestFrequency(unittest.TestCase):
def test_frequency_with_valid_input(self) -> None:
input = torch.tensor([0.4826, 0.9517, 0.8967, 0.8995, 0.1584, 0.9445, 0.97])
torch.testing.assert_close(frequency_at_k(input, k=0.5), torch.tensor([1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0]))
torch.testing.as... |
class WorkerThread(Generic[RetT]):
def __init__(self, thread_cache: ThreadCache) -> None:
self._job: (tuple[(Callable[([], RetT)], Callable[([outcome.Outcome[RetT]], object)], (str | None))] | None) = None
self._thread_cache = thread_cache
self._worker_lock = Lock()
self._worker_lock... |
def test_realesrgan_paired_dataset():
with open('tests/data/test_realesrgan_paired_dataset.yml', mode='r') as f:
opt = yaml.load(f, Loader=yaml.FullLoader)
dataset = RealESRGANPairedDataset(opt)
assert (dataset.io_backend_opt['type'] == 'disk')
assert (len(dataset) == 2)
result = dataset.__g... |
class Profile(Object):
def __init__(self):
Object.__init__(self)
self.functions = {}
self.cycles = []
def add_function(self, function):
if (function.id in self.functions):
sys.stderr.write(('warning: overwriting function %s (id %s)\n' % (function.name, str(function.id... |
class TagModelUnicodeTest(TagTestManager, TestCase):
manage_models = [test_models.MixedTest]
def setUpExtra(self):
self.model = test_models.MixedTest
self.tag_model = test_models.MixedTestTagModel
self.o1 = self.create(self.model, name='Test', singletag='', tags='boy, nino, ')
def te... |
def npy2df(keywords, verbose=True):
database = keywords
dir_path = database[0][:(database[0].rindex('/') + 1)]
try:
if (verbose >= 2):
print('Loading iso...', end=' ')
iso = np.load((dir_path + 'iso.npy'))
if (verbose >= 2):
print('Done!')
if (verbose ... |
class TestHRSC2016GWD(TestHRSC2016):
def eval(self):
gwd = build_whole_network.DetectionNetworkGWD(cfgs=self.cfgs, is_training=False)
all_boxes_r = self.eval_with_plac(img_dir=self.args.img_dir, det_net=gwd, image_ext=self.args.image_ext)
imgs = os.listdir(self.args.img_dir)
real_tes... |
class kp_module(nn.Module):
def __init__(self, n, dims, modules, layer=residual, make_up_layer=make_layer, make_low_layer=make_layer, make_hg_layer=make_layer, make_hg_layer_revr=make_layer_revr, make_pool_layer=make_pool_layer, make_unpool_layer=make_unpool_layer, make_merge_layer=make_merge_layer, **kwargs):
... |
def hash_value(value: Any) -> (int | str):
if (value is None):
return
if isinstance(value, (tuple, list)):
value = ''.join((str(hash_value(i)) for i in value))
if isinstance(value, Path):
value = str(value)
if isinstance(value, str):
value = value.encode()
if isinsta... |
def run_test_check_json_rpc_geth():
(g1, client, v1) = is_supported_client('Geth/v1.7.3-unstable-e9295163/linux-amd64/go1.9.1')
(g2, _, v2) = is_supported_client('Geth/v1.7.2-unstable-e9295163/linux-amd64/go1.9.1')
(g3, _, v3) = is_supported_client('Geth/v1.8.2-unstable-e9295163/linux-amd64/go1.9.1')
(g... |
def test_archs_platform_native(platform, intercepted_build_args, monkeypatch):
monkeypatch.setenv('CIBW_ARCHS', 'native')
main()
options = intercepted_build_args.args[0]
if (platform in {'linux', 'macos'}):
assert (options.globals.architectures == {Architecture.x86_64})
elif (platform == 'wi... |
def build_text_embedding_lvis(categories, model):
templates = multiple_templates
with torch.no_grad():
all_text_embeddings = []
for category in tqdm(categories):
texts = [template.format(processed_name(category, rm_dot=True), article=article(category)) for template in templates]
... |
def slot_values_to_seq_sql(original_slot_values, single_answer=False):
sql_str = ''
tables = OrderedDict()
col_value = dict()
slot_values = {}
for (slot, value) in original_slot_values.items():
if (' ' in slot):
slot = slot.replace(' ', '_')
slot_values[slot] = value
... |
class Encoder(nn.Module):
def __init__(self, g, in_feats, n_hidden, activation):
super(Encoder, self).__init__()
self.g = g
self.conv = GCN(g, in_feats, n_hidden, activation)
def forward(self, features, corrupt=False):
if corrupt:
perm = torch.randperm(self.g.number_o... |
class VirtualEnv(venv.EnvBuilder):
def __init__(self, install_args: list[str], index_url: (str | None)=None, extra_index_urls: list[str]=[], state: AuditState=AuditState()):
super().__init__(with_pip=True)
self._install_args = install_args
self._index_url = index_url
self._extra_inde... |
class Producer():
def __init__(self, name: str, *, enabled: bool=True):
self.name = name
self.enabled = enabled
def __repr__(self) -> str:
return f'{type(self).__name__}({self.name!r}, enabled={self.enabled})'
def __call__(self, *a: Any, **k: Any) -> None:
if self.enabled:
... |
class SingleContextMultipleEncodingWeightedSoftmaxModel(MultipleContextModel):
def __init__(self, encoder: QuestionsAndParagraphsEncoder, word_embed: Optional[WordEmbedder], char_embed: Optional[CharWordEmbedder], embed_mapper: Optional[Union[(SequenceMapper, ElmoWrapper)]], sequence_multi_encoder: SequenceMultiEnc... |
class BaseTest():
test_flag = False
SRE_TYPE = type(re.match('', ''))
(autouse=True)
def _reset(self):
self.test_flag = False
async def response(self, application, update):
self.test_flag = False
async with application:
(await application.process_update(update))
... |
.parametrize('filename, default_filetype', [('foo.yaml', 'notarealfiletype'), ('foo.yml', 'notarealfiletype'), ('foo.yaml', 'json'), ('foo.yml', 'json'), ('foo.yaml', 'yaml'), ('foo.yml', 'yaml'), ('foo', 'yaml')])
def test_instanceloader_yaml_data(tmp_path, filename, default_filetype, open_wide):
f = (tmp_path / f... |
def buildVocActNet(vocListOri):
vocList = list()
for ele in vocListOri:
if (ele not in vocList):
vocList.append(ele)
word2idx = {}
idx2word = {}
for (i, ele) in enumerate(vocList):
word2idx[ele] = i
idx2word[i] = ele
return (word2idx, idx2word) |
class TorrentView(object):
def __init__(self, engine, viewname, matcher=None):
self.engine = engine
self.viewname = (viewname or 'default')
self.matcher = matcher
self._items = None
def __iter__(self):
return self.items()
def _fetch_items(self):
if (self._item... |
def map_instance_to_supertype(instance: Instance, superclass: TypeInfo) -> Instance:
if (instance.type == superclass):
return instance
if ((superclass.fullname == 'builtins.tuple') and instance.type.tuple_type):
if has_type_vars(instance.type.tuple_type):
alias = instance.type.specia... |
class PluginExecutor(BaseModel):
name: str
description: str
spec_model: SpecModel
meta_info: Dict[(str, Any)]
endpoint2caller: Dict[(str, Callable)]
endpoint2output_model: Dict[(str, Callable)]
api_key: str = None
class Config():
extra = Extra.forbid
arbitrary_types_allow... |
(HAS_TV_TUPLE)
def test_type_var_tuple_begin(model_spec, gen_models_ns):
from typing import Unpack
WithTVTupleBegin = gen_models_ns.WithTVTupleBegin
assert_fields_types(WithTVTupleBegin, {'a': Tuple[Unpack[Tuple[(Any, ...)]]], 'b': Any})
assert_fields_types(WithTVTupleBegin[(int, str)], {'a': Tuple[int]... |
def configurable(init_func=None, *, from_config=None):
if (init_func is not None):
assert (inspect.isfunction(init_func) and (from_config is None) and (init_func.__name__ == '__init__')), 'Incorrect use of Check API documentation for examples.'
(init_func)
def wrapped(self, *args, **kwargs)... |
class RandIntRV(RandomVariable):
name = 'randint'
ndim_supp = 0
ndims_params = [0, 0]
dtype = 'int64'
_print_name = ('randint', '\\operatorname{randint}')
def __call__(self, low, high=None, size=None, **kwargs):
if (high is None):
(low, high) = (0, low)
return super()... |
def upgrade(saveddata_engine):
if (saveddata_engine.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='fighters'").scalar() == 'fighters'):
try:
saveddata_engine.execute('SELECT active FROM fighters LIMIT 1')
except sqlalchemy.exc.DatabaseError:
saveddata_en... |
def test_ecbsr():
net = ECBSR(num_in_ch=1, num_out_ch=1, num_block=1, num_channel=4, with_idt=False, act_type='prelu', scale=4).cuda()
img = torch.rand((1, 1, 12, 12), dtype=torch.float32).cuda()
output = net(img)
assert (output.shape == (1, 1, 48, 48))
net = ECBSR(num_in_ch=3, num_out_ch=3, num_blo... |
class TwitterListener(StreamListener):
__listeners = {}
__lock = threading.RLock()
__max_size = 100
def get_listener(cls, phrases, subscriber):
with cls.__lock:
phrases = frozenset(map(str, phrases))
listener = cls.__listeners.get(phrases, None)
if (listener i... |
def build_fmtstr(id: Optional[Union[(int, str)]]=None, align: Optional[str]=None, field_len: Optional[Union[(int, str)]]=None, precision: Optional[Union[(int, str)]]=None, type: Optional[str]=None) -> str:
fmtstr = '{'
if (id is not None):
fmtstr += str(id)
fmtstr += ':'
if (align is not None):
... |
def _convolve1d2o_gpu(inp, out, ker, mode):
d_inp = cp.asarray(inp)
d_kernel = cp.asarray(ker)
(threadsperblock, blockspergrid) = _get_tpb_bpg()
k_type = 'convolve1D2O'
_populate_kernel_cache(out.dtype, k_type)
kernel = _get_backend_kernel(out.dtype, blockspergrid, threadsperblock, k_type)
k... |
def open_signal_receiver(*signals: (signal.Signals | int)) -> Generator[(AsyncIterator[int], None, None)]:
if (not signals):
raise TypeError('No signals were provided')
if (not is_main_thread()):
raise RuntimeError("Sorry, open_signal_receiver is only possible when running in Python interpreter'... |
class _WrappedModel():
def __init__(self, model, timestep_map, rescale_timesteps, original_num_steps):
self.model = model
self.timestep_map = timestep_map
self.rescale_timesteps = rescale_timesteps
self.original_num_steps = original_num_steps
def __call__(self, x, ts, **kwargs):
... |
class CPreprocessorParser(preprocessor.PreprocessorParser):
def __init__(self, cparser, **kwargs):
self.cparser = cparser
preprocessor.PreprocessorParser.__init__(self, **kwargs)
def push_file(self, filename, data=None):
if (not self.cparser.handle_include(filename)):
return
... |
def fidelityMatrixRandomUnitary(qnnArch, numTrainingPairs):
kind = 'randomUnitary'
networkUnitary = randomQubitUnitary(qnnArch[(- 1)])
trainingData = randomTrainingData(networkUnitary, numTrainingPairs)
fidMatrix = np.identity(numTrainingPairs)
for i in range(0, numTrainingPairs):
for j in r... |
class Migration(migrations.Migration):
dependencies = [('api', '0036_alter_nominations_api')]
operations = [migrations.RenameField(model_name='nomination', old_name='unnominate_reason', new_name='end_reason'), migrations.RenameField(model_name='nomination', old_name='unwatched_at', new_name='ended_at')] |
class AsmCmdLockMover(AsmCmdCheckable):
_id = 15
_menuText = QT_TRANSLATE_NOOP('asm3', 'Lock mover')
_tooltip = QT_TRANSLATE_NOOP('asm3', 'Lock mover for fixed part')
_iconName = 'Assembly_LockMover.svg'
_saveParam = True
def Activated(cls, checked):
super(AsmCmdLockMover, cls).Activated... |
class Info(MutableMapping):
__readable__ = True
__writeable__ = True
__updateable__ = True
__deleteable__ = True
def __init__(self, *args, **kwargs):
super(Info, self).__init__(*args, **kwargs)
def __getattr__(self, name):
return self.__getitem__(name)
def __setattr__(self, n... |
('hyperlink.text is the visible text of the hyperlink')
def then_hyperlink_text_is_the_visible_text_of_the_hyperlink(context: Context):
actual_value = context.hyperlink.text
expected_value = 'awesome hyperlink'
assert (actual_value == expected_value), f'expected: {expected_value}, got: {actual_value}' |
def load_annoataion(txt_path):
(boxes, labels) = ([], [])
fr = codecs.open(txt_path, 'r', 'utf-8')
lines = fr.readlines()
for line in lines:
b = line.split('\n')[0].split('\t')[:8]
line = list(map(float, b))
boxes.append(line)
labels.append('car')
return (np.array(box... |
class InputParams():
namespace: typing.Annotated[(str, validation.min(1))] = field(metadata={'name': 'Namespace', 'description': 'Namespace of the pod to which filter need to be appliedfor details.'})
direction: typing.List[str] = field(default_factory=(lambda : ['ingress', 'egress']), metadata={'name': 'Direct... |
class Svd():
def _check_params_and_throw(kw_args, expected_params, not_expected_params):
for param in expected_params:
if (param not in kw_args):
raise ValueError('Expected param: {} is missing'.format(param))
for param in not_expected_params:
if (param in kw_... |
class KazooClient(object):
def __init__(self, hosts='127.0.0.1:2181', timeout=10.0, client_id=None, handler=None, default_acl=None, auth_data=None, sasl_options=None, read_only=None, randomize_hosts=True, connection_retry=None, command_retry=None, logger=None, keyfile=None, keyfile_password=None, certfile=None, ca=... |
def convert_float(value):
if isinstance(value, str):
if (value[0] == '$'):
return value
try:
float(value)
except ValueError:
raise ValueError((value + 'is not a valid type of float input to openscenario, if a string is used as a float value (parameter or e... |
class TFormats(TestCase):
def setUp(self):
config.init()
def tearDown(self):
config.quit()
def test_presence(self):
self.assertTrue(formats.aac)
self.assertTrue(formats.aiff)
self.assertTrue(formats.midi)
self.assertTrue(formats.mod)
self.assertTrue(fo... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.