code stringlengths 281 23.7M |
|---|
class _lazyclassproperty():
def __init__(self, fn):
self.fn = fn
self.__doc__ = fn.__doc__
self.__name__ = fn.__name__
def __get__(self, obj, cls):
if (cls is None):
cls = type(obj)
if ((not hasattr(cls, '_intern')) or any(((cls._intern is getattr(superclass, ... |
class QlArchPPC(QlArch):
type = QL_ARCH.PPC
bits = 32
_property
def uc(self) -> Uc:
return Uc(UC_ARCH_PPC, (UC_MODE_PPC32 + UC_MODE_BIG_ENDIAN))
_property
def regs(self) -> QlRegisterManager:
regs_map = dict(**ppc_const.reg_map, **ppc_const.reg_float_map)
pc_reg = 'pc'
... |
class CollectAllInnerTypesQuery(TypeQuery[List[Type]]):
def __init__(self) -> None:
super().__init__(self.combine_lists_strategy)
def query_types(self, types: Iterable[Type]) -> list[Type]:
return (self.strategy([t.accept(self) for t in types]) + list(types))
def combine_lists_strategy(cls, ... |
def _write_saved_model(saved_model_path, frozen_graph_def, inputs, outputs):
with tf.Graph().as_default():
with session.Session() as sess:
tf.import_graph_def(frozen_graph_def, name='')
builder = tf.saved_model.builder.SavedModelBuilder(saved_model_path)
tensor_info_input... |
_metaclass(ABCMeta)
class Client(object):
def setup_client(self, registry_host, verify_tls):
def populate_test_image(self, registry_host, namespace, name):
def print_version(self):
def login(self, registry_host, username, password):
def push(self, registry_host, namespace, name):
def pre_pull_cl... |
def build_dataset(cfg, transforms, dataset_catalog, is_train=True):
dataset_list = (cfg.DATASETS.TRAIN if is_train else cfg.DATASETS.TEST)
if (not isinstance(dataset_list, (list, tuple))):
raise RuntimeError('dataset_list should be a list of strings, got {}'.format(dataset_list))
datasets = []
f... |
class TestWilderness(EvenniaTest):
def setUp(self):
super().setUp()
self.char1 = create_object(DefaultCharacter, key='char1')
self.char2 = create_object(DefaultCharacter, key='char2')
def get_wilderness_script(self, name='default'):
w = wilderness.WildernessScript.objects.get('de... |
class _CubeCameraRenderer(WgpuRenderer):
def __init__(self, target, blend_mode='default'):
assert _is_cube_texture(target), 'target must be a cube texture'
super().__init__(target, blend_mode=blend_mode)
self._target_views = []
for layer in range(6):
self._target_views.ap... |
_model
def identityformer_m48(pretrained=False, **kwargs):
model = MetaFormer(depths=[8, 8, 24, 8], dims=[96, 192, 384, 768], token_mixers=nn.Identity, norm_layers=partial(LayerNormGeneral, normalized_dim=(1, 2, 3), eps=1e-06, bias=False), **kwargs)
model.default_cfg = default_cfgs['identityformer_m48']
if ... |
class CenterCrop():
def __init__(self, size_image):
self.image_size = size_image
def __call__(self, image):
image = self.center_crop(image)
return image
def center_crop(self, image):
(w1, h1) = image.size
(tw, th) = self.image_size
if ((w1 == tw) and (h1 == th... |
def _version_split(version: str) -> List[str]:
result: List[str] = []
(epoch, _, rest) = version.rpartition('!')
result.append((epoch or '0'))
for item in rest.split('.'):
match = _prefix_regex.search(item)
if match:
result.extend(match.groups())
else:
res... |
def build_text_embedding_coco(categories, model):
templates = multiple_templates
with torch.no_grad():
zeroshot_weights = []
attn12_weights = []
for category in categories:
texts = [template.format(processed_name(category, rm_dot=True), article=article(category)) for template... |
class LinearAverage(nn.Module):
def __init__(self, inputSize, outputSize, T=0.07, momentum=0.5):
super(LinearAverage, self).__init__()
stdv = (1 / math.sqrt(inputSize))
self.nLem = outputSize
self.register_buffer('params', torch.tensor([T, momentum]))
stdv = (1.0 / math.sqrt(... |
class BaseTestVariablePath(BaseTestSimplePath):
path_name = '/resource/{resource_id}'
path_parameter_name = 'resource_id'
def parameter(self):
return {'name': self.path_parameter_name, 'in': 'path'}
def parameters(self, parameter):
return [parameter]
def path(self, operations, parame... |
def list_organization_member_permissions(organization, limit_to_user=None):
query = RepositoryPermission.select(RepositoryPermission, Repository, User).join(Repository).switch(RepositoryPermission).join(User).where((Repository.namespace_user == organization))
if (limit_to_user is not None):
query = quer... |
def test_acquire_batch_top_m(acq, xs, Y_mean, Y_var):
batch_xs = acq.acquire_batch(xs, Y_mean, Y_var, {})
top_m_idxs = np.argsort(Y_mean)[:((- 1) - acq.batch_sizes[0]):(- 1)]
top_m_xs = np.array(xs)[top_m_idxs]
assert (len(batch_xs) == len(top_m_xs))
assert (set(batch_xs) == set(top_m_xs)) |
class BaseDataset(Dataset):
def __init__(self, root_dir, file_format=None, annotation_path=None, annotation_meta=None, annotation_format='json', max_samples=(- 1), mirror=False, transform_kwargs=None):
self.root_dir = root_dir
self.dataset_name = os.path.splitext(os.path.basename(self.root_dir))[0]
... |
class JiebaPreTokenizer():
def __init__(self, vocab) -> None:
self.vocab = vocab
self.normalizers = normalizers.BertNormalizer(clean_text=False, handle_chinese_chars=True, strip_accents=False, lowercase=False)
try:
import rjieba
except ImportError:
raise Impor... |
class PresetPrimeQol(PresetTab, Ui_PresetPrimeQol):
def __init__(self, editor: PresetEditor, game_description: GameDescription, window_manager: WindowManager):
super().__init__(editor, game_description, window_manager)
self.setupUi(self)
self.description_label.setText(self.description_label.... |
def raise_window(window, alert=True):
window.setWindowState((window.windowState() & (~ Qt.WindowState.WindowMinimized)))
window.setWindowState((window.windowState() | Qt.WindowState.WindowActive))
window.raise_()
QCoreApplication.processEvents((QEventLoop.ProcessEventsFlag.ExcludeUserInputEvents | QEven... |
class IterativeDTWAligner(object):
def __init__(self, n_iter=3, dist=(lambda x, y: norm((x - y))), radius=1, max_iter_gmm=100, n_components_gmm=16, verbose=0):
self.n_iter = n_iter
self.dist = dist
self.radius = radius
self.max_iter_gmm = max_iter_gmm
self.n_components_gmm = ... |
class BuildMo(Command):
description = 'build message catalog files'
user_options = [('lang=', None, 'build mo for <lang>')]
def initialize_options(self):
self.build_base: (str | None) = None
self.lang = None
self.po_build_dir: (str | None) = None
def finalize_options(self):
... |
def register_hook_for_densenet(model, arch, gamma):
gamma = np.power(gamma, 0.5)
backward_hook_sgm = backward_hook(gamma)
for (name, module) in model.named_modules():
if (('relu' in name) and (not ('transition' in name))):
module.register_backward_hook(backward_hook_sgm) |
class resnetv1(Network):
def __init__(self, opt, batch_size=1, num_layers=50):
Network.__init__(self, batch_size=batch_size)
self._num_layers = num_layers
self.rnn_encoder = RNNEncoder(vocab_size=opt['vocab_size'], word_embedding_size=opt['word_embedding_size'], word_vec_size=opt['word_vec_s... |
class AmbiguousIntermediateExpander():
def __init__(self, tree_class, node_builder):
self.node_builder = node_builder
self.tree_class = tree_class
def __call__(self, children):
def _is_iambig_tree(child):
return (hasattr(child, 'data') and (child.data == '_iambig'))
d... |
class TaskLogSuccessDelHandler(BaseHandler):
.authenticated
async def get(self, taskid):
user = self.current_user
async with self.db.transaction() as sql_session:
task = self.check_permission((await self.db.task.get(taskid, fields=('id', 'tplid', 'userid', 'disabled'), sql_session=sq... |
.overload(operator.add)
def ga_add(a, b):
if (isinstance(a, MultiVectorType) and isinstance(b, MultiVectorType)):
if (a.layout_type != b.layout_type):
raise numba.TypingError('MultiVector objects belong to different layouts')
def impl(a, b):
return a.layout.MultiVector((a.val... |
class TestAHIHSDNavigation(unittest.TestCase):
('satpy.readers.ahi_hsd.np2str')
('satpy.readers.ahi_hsd.np.fromfile')
def test_region(self, fromfile, np2str):
from pyproj import CRS
np2str.side_effect = (lambda x: x)
m = mock.mock_open()
with mock.patch('satpy.readers.ahi_hsd... |
class _HoldingScopeFinder():
def __init__(self, pymodule):
self.pymodule = pymodule
def get_indents(self, lineno):
return codeanalyze.count_line_indents(self.lines.get_line(lineno))
def _get_scope_indents(self, scope):
return self.get_indents(scope.get_start())
def get_holding_sc... |
_grad()
def validate_itm(model, val_loader, task, step):
print('start running ITM validation...')
val_loss = 0
tot_score = 0
n_ex = 0
all_scores = []
all_targets = []
for (i, batch) in enumerate(val_loader):
scores = model(batch, task=task, compute_loss=False)
targets = Varia... |
def load_txt_info(gt_file, img_info):
anno_info = []
with open(gt_file, 'r') as f:
lines = f.readlines()
for line in lines:
(xmin, ymin, xmax, ymax) = line.split(',')[0:4]
x = max(0, int(xmin))
y = max(0, int(ymin))
w = (int(xmax) - x)
... |
class SeparableConv2D(layers.SeparableConv2D):
__doc__ += layers.SeparableConv2D.__doc__
def call(self, inputs, params=None):
if (params[(self.name + '/depthwise_kernel:0')] is None):
return super(layers.SeparableConv2D, self).call(inputs)
else:
depthwise_kernel = params.... |
def _get_sat_altitude(data_arr, key_prefixes):
orb_params = data_arr.attrs['orbital_parameters']
alt_keys = [(prefix + 'altitude') for prefix in key_prefixes]
try:
alt = _get_first_available_item(orb_params, alt_keys)
except KeyError:
alt = orb_params['projection_altitude']
warni... |
def deduplicate(population):
unique_smiles = set()
good_population = []
for item in population:
(smiles, _) = item
if (len(smiles) > 0):
if (smiles not in unique_smiles):
good_population.append(item)
unique_smiles.add(smiles)
return good_population |
class ReactionNetworkAssertion(ModelAssertion):
def __init__(self, *args, **kwargs):
super(ReactionNetworkAssertion, self).__init__(*args, **kwargs)
def check(self, model, **kwargs):
super(ReactionNetworkAssertion, self).check(model)
if (not model.species):
raise ModelAsserti... |
class BlockList(BaseDbModel):
class Meta():
table = 'block_list'
id = fields.IntField(pk=True)
block_id = fields.BigIntField()
block_id_type = fields.IntEnumField(BlockIdType)
blocked_by = fields.BigIntField(null=True)
reason = fields.CharField(max_length=250, null=True)
timestamp = ... |
class InstanceValidator():
requires_context = True
def __init__(self, instance=None):
self.instance = instance
self.serializer = None
def __call__(self, data, serializer=None):
if (serializer is not None):
self.instance = serializer.instance
self.serializer = ... |
class SpecifyShape(COp):
view_map = {0: [0]}
__props__ = ()
_f16_ok = True
_output_type_depends_on_input_value = True
def make_node(self, x, *shape):
from pytensor.tensor.basic import get_underlying_scalar_constant_value
x = ptb.as_tensor_variable(x)
shape = tuple(((NoneConst... |
def mosaic_cubes(cubes, spectral_block_size=100, combine_header_kwargs={}, **kwargs):
cube1 = cubes[0]
header = cube1.header
for cu in cubes[1:]:
header = combine_headers(header, cu.header, **combine_header_kwargs)
shape_opt = (header['NAXIS3'], header['NAXIS2'], header['NAXIS1'])
final_arra... |
class FakeGlobUnitTest(fake_filesystem_unittest.TestCase):
def setUp(self):
self.setUpPyfakefs()
directory = './xyzzy'
self.fs.create_dir(directory)
self.fs.create_dir(('%s/subdir' % directory))
self.fs.create_dir(('%s/subdir2' % directory))
self.fs.create_file(('%s/s... |
def accuracy(pred, target, topk=1, thresh=None):
assert isinstance(topk, (int, tuple))
if isinstance(topk, int):
topk = (topk,)
return_single = True
else:
return_single = False
maxk = max(topk)
if (pred.size(0) == 0):
accu = [pred.new_tensor(0.0) for i in range(len(to... |
def test_shufflenetv2_backbone():
with pytest.raises(ValueError):
ShuffleNetV2(widen_factor=3.0)
with pytest.raises(ValueError):
ShuffleNetV2(widen_factor=1.0, frozen_stages=4)
with pytest.raises(ValueError):
ShuffleNetV2(widen_factor=1.0, out_indices=(4,))
with pytest.raises(Typ... |
class GDIGlyphRenderer(Win32GlyphRenderer):
def __del__(self):
try:
if self._dc:
gdi32.DeleteDC(self._dc)
if self._bitmap:
gdi32.DeleteObject(self._bitmap)
except:
pass
def render(self, text: str) -> pyglet.font.base.Glyph:
... |
class Periodic(nn.Module):
def __init__(self, n_features: int, options: PeriodicOptions) -> None:
super().__init__()
if (options.initialization == 'log-linear'):
coefficients = (options.sigma ** (torch.arange(options.n) / options.n))
coefficients = coefficients[None].repeat(n... |
def ServoCalibrationThread(calibration):
servo = calibration.servo
def console(*text):
c = ''
for t in text:
c += (t + ' ')
calibration.console.set(c)
if printconsole:
print(c)
def command(value):
if (self.fwd_fault and (value < 0)):
... |
def get_bip44_purpose(addrtype: 'AddressType') -> int:
if (addrtype == AddressType.LEGACY):
return 44
elif (addrtype == AddressType.SH_WIT):
return 49
elif (addrtype == AddressType.WIT):
return 84
elif (addrtype == AddressType.TAP):
return 86
else:
raise Value... |
def test_apply_tag_hook(pytester):
pytester.makeconftest('\n import pytest\n\n (tryfirst=True)\n def pytest_bdd_apply_tag(tag, function):\n if tag == \'todo\':\n marker = pytest.mark.skipif(True, reason="Not implemented yet")\n marker(function)\n ... |
class SolverWrapper(object):
def __init__(self, network, loader, output_dir, tbdir, pretrained_model=None):
self.net = network
self.loader = loader
self.output_dir = output_dir
self.tbdir = tbdir
self.tbvaldir = (tbdir + '_val')
if (not os.path.exists(self.tbvaldir)):... |
def lin_reg_var_from_rss_of_sel(X, y, coef, intercept=None, zero_tol=1e-06, sample_weight=None):
if (sample_weight is not None):
raise NotImplementedError
n_nonzero = count_support(coef, zero_tol=zero_tol)
y_hat = (X coef)
if (intercept is not None):
y_hat += intercept
RSS = ((y - y... |
class STARTUPINFOW(Structure):
_fields_ = [('cb', DWORD), ('lpReserved', LPWSTR), ('lpDesktop', LPWSTR), ('lpTitle', LPWSTR), ('dwX', DWORD), ('dwY', DWORD), ('dwXSize', DWORD), ('dwYSize', DWORD), ('dwXCountChars', DWORD), ('dwYCountChars', DWORD), ('dwFillAttribute', DWORD), ('dwFlags', DWORD), ('wShowWindow', WO... |
class PrometheusInstrumentationTests(unittest.TestCase):
def setUp(self):
span_inner = mock.Mock()
span_inner.__enter__ = mock.Mock(return_value=mock.Mock())
span_inner.__exit__ = mock.Mock(return_value=None)
self.span = mock.Mock()
self.span.make_child = (lambda trace_name: ... |
def _set_random_seed(seed):
if ((seed is not None) and (seed > 0)):
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
if (torch.cuda.device_count() > 0):
mpu.model_parallel_cuda_manual_seed(seed)
else:
raise ValueError('Seed ({}) should be a posit... |
class AddressBookUI(UserInterface):
def assemble(self):
home = self.define_view('/', title='Show')
add = self.define_view('/add', title='Add')
edit = self.define_view('/edit', view_class=EditView, address_id=IntegerField())
home.set_slot('main', AddressBookPanel.factory())
ad... |
class Extension():
def __init__(self, name, sources, include_dirs=None, define_macros=None, undef_macros=None, library_dirs=None, libraries=None, runtime_library_dirs=None, extra_objects=None, extra_compile_args=None, extra_link_args=None, export_symbols=None, swig_opts=None, depends=None, language=None, optional=N... |
.parametrize('arg_list', [['version', '--short'], ['--version']])
def test_cli_version_short(cli_runner, arg_list):
result = cli_runner.invoke(cli.run, arg_list)
version = result.output.rstrip()
expected_short_version = get_system_spec()['raiden']
assert (version == expected_short_version)
assert (r... |
class BaseSSVM(BaseEstimator):
def __init__(self, model, max_iter=100, C=1.0, verbose=0, n_jobs=1, show_loss_every=0, logger=None):
self.model = model
self.max_iter = max_iter
self.C = C
self.verbose = verbose
self.show_loss_every = show_loss_every
self.n_jobs = n_job... |
def _format_traceback(frame=None, limit=None, offset=None):
limit = (None if (not limit) else abs(limit))
offset = (1 if (not offset) else (abs(offset) + 1))
(etype, value, tb) = sys.exc_info()
try:
stack = []
exception = []
callstack = traceback.extract_stack(frame)[::(- 1)][off... |
def once(func):
lock = threading.Lock()
def new_func(*args, **kwargs):
if new_func.called:
return
with lock:
if new_func.called:
return
rv = func(*args, **kwargs)
new_func.called = True
return rv
new_func = update_wr... |
def test_find_MAP_warning_non_free_RVs():
with pm.Model() as m:
x = pm.Normal('x')
y = pm.Normal('y')
det = pm.Deterministic('det', (x + y))
pm.Normal('z', det, 1e-05, observed=100)
msg = 'Intermediate variables (such as Deterministic or Potential) were passed'
with p... |
.parametrize('order', [12, 14, 16, 18])
.parametrize('length', [256, 512, 1024, 2048, 4096])
def test_mfcc(order, length):
def __test(length, order):
np.random.seed(98765)
dummy_input = np.random.rand(length)
cc = pysptk.mfcc(dummy_input, order, czero=True, power=True)
assert np.all(... |
def test_tooltip_click(page: Page):
expect(page.get_by_text('"last_object_clicked_tooltip":NULL')).to_be_visible()
page.frame_locator('internal:attr=[title="streamlit_folium.st_folium"i]').get_by_role('img').nth(0).click()
expect(page.get_by_text('"last_object_clicked_tooltip":"Liberty Bell"')).to_be_visibl... |
class TestNNT():
def test_return_infinity(self, data_set):
nnt = NNT()
nnt.fit(data_set, exposure='exp', outcome='dis')
assert np.isinf(nnt.number_needed_to_treat[1])
def test_match_inverse_of_risk_difference(self):
df = ze.load_sample_data(False)
rd = RiskDifference()
... |
class ASPP(nn.Module):
def __init__(self, in_channels, out_channels, dilations=(1, 3, 6, 1)):
super().__init__()
assert (dilations[(- 1)] == 1)
self.aspp = nn.ModuleList()
for dilation in dilations:
kernel_size = (3 if (dilation > 1) else 1)
padding = (dilatio... |
def main(_):
prepare_dirs_and_logger(config)
if (not config.task.lower().startswith('tsp')):
raise Exception('[!] Task should starts with TSP')
if (config.max_enc_length is None):
config.max_enc_length = config.max_data_length
if (config.max_dec_length is None):
config.max_dec_le... |
def deprecated(version, *, thing=None, issue, instead):
def do_wrap(fn):
nonlocal thing
(fn)
def wrapper(*args, **kwargs):
warn_deprecated(thing, version, instead=instead, issue=issue)
return fn(*args, **kwargs)
if (thing is None):
thing = wrapper
... |
def create_conf_file(use_requests: bool=False, use_swagger: bool=False) -> Union[(Exception, str)]:
CONFIG_FILE = os.getenv(CONFIGMAP_FILE_ENVIRONMENT, None)
if (not CONFIG_FILE):
CONFIG_FILE = DEFAULT_CONFIGMAP_FILENAME
if os.path.exists(CONFIG_FILE):
raise FileExistsError("Config file alre... |
class Synchronizer():
def __init__(self, module, dummy_batch, *forward_args, enabled=True, debug=False, dgrid=None, **forward_kwargs):
self.module = module
self.dummy_batch = copy.deepcopy(dummy_batch)
self.enabled = enabled
self.forward_args = forward_args
self.forward_kwarg... |
def test_resize(qtbot):
label = TextBase()
qtbot.add_widget(label)
long_string = ('Hello world! ' * 20)
label.setText(long_string)
with qtbot.wait_exposed(label):
label.show()
text_1 = label._elided_text
label.resize(20, 50)
text_2 = label._elided_text
assert (text_1 != text_... |
class Predictor(object):
def __init__(self, config):
self.config = config
self.model_name = config.model_name
self.use_cuda = config.device.startswith('cuda')
self.dataset_name = 'ClassificationDataset'
self.collate_name = ('FastTextCollator' if (self.model_name == 'FastText'... |
def run_aiter(tracer, async_iterator):
async def test():
with TracingAsyncIterator('test', async_iterator) as iterator:
async for i in iterator:
print(i)
with patch('rayllm.backend.observability.fn_call_metrics.tracer', tracer):
asyncio.run(test()) |
class TestTensorKey():
def test_eq(self):
x = torch.tensor((0.0, 0.5, 1.0))
key = pystiche.TensorKey(x)
assert (key == key)
assert (key == pystiche.TensorKey(x.flip(0)))
def test_eq_precision(self):
x = torch.tensor(1.0)
y = torch.tensor(1.0001)
assert (py... |
class FormattedString(StringMixin, Raw):
def __init__(self, src_str, **kwargs):
super(FormattedString, self).__init__(**kwargs)
self.src_str = str(src_str)
def output(self, key, obj, **kwargs):
try:
data = to_marshallable_type(obj)
return self.src_str.format(**dat... |
class _Int(_PrimitiveTemplateBase):
_valid_predicates = {Range}
def is_element(self, value):
return ((value is not True) and (value is not False) and isinstance(value, numbers.Integral))
def is_symbol_subtype(self, other):
if (other.get_name() == 'Float'):
return True
ret... |
def main(args):
pruning_method = args.pruning_method
threshold = args.threshold
model_name_or_path = args.model_name_or_path.rstrip('/')
target_model_path = args.target_model_path
print(f'Load fine-pruned model from {model_name_or_path}')
model = torch.load(os.path.join(model_name_or_path, 'pyto... |
class RadixSoftmax(nn.Module):
def __init__(self, radix, cardinality):
super(RadixSoftmax, self).__init__()
self.radix = radix
self.cardinality = cardinality
def forward(self, x):
batch = x.size(0)
if (self.radix > 1):
x = x.view(batch, self.cardinality, self.... |
class SetChatPermissions():
async def set_chat_permissions(self: 'pyrogram.Client', chat_id: Union[(int, str)], permissions: 'types.ChatPermissions') -> 'types.Chat':
r = (await self.invoke(raw.functions.messages.EditChatDefaultBannedRights(peer=(await self.resolve_peer(chat_id)), banned_rights=raw.types.Ch... |
def build_lm(args, data_lower, vocab_str):
print('\nCreating ARPA file ...')
lm_path = os.path.join(args.output_dir, 'lm.arpa')
subargs = [os.path.join(args.kenlm_bins, 'lmplz'), '--order', str(args.arpa_order), '--temp_prefix', args.output_dir, '--memory', args.max_arpa_memory, '--text', data_lower, '--arp... |
class base_dataset_parser(base_parser):
def __init__(self, dataset_config_path):
self.parser = DefaultConfigParser()
parser = self.parser
config = {}
if (len(self.parser.read(dataset_config_path)) == 0):
raise ValueError('dataset_parser(): %s not found', dataset_config_pa... |
.parametrize('dynamic', [False, True])
def test_setting_process(dynamic):
class Fake(FakeBase):
x = CommonBase.setting('OUT %d', '', set_process=(lambda v: int(bool(v))), dynamic=dynamic)
fake = Fake()
fake.x = False
assert (fake.read() == 'OUT 0')
fake.x = 2
assert (fake.read() == 'OUT ... |
class TestTransformerEchos(unittest.TestCase):
def test_default(self):
tfm = new_transformer()
tfm.echos()
actual_args = tfm.effects
expected_args = ['echos', '0.800000', '0.900000', '60.000000', '0.400000']
self.assertEqual(expected_args, actual_args)
actual_log = tf... |
def test_entry_points(copy_sample):
td = copy_sample('entrypoints_valid')
make_wheel_in((td / 'pyproject.toml'), td)
assert_isfile((td / 'package1-0.1-py2.py3-none-any.whl'))
with unpack((td / 'package1-0.1-py2.py3-none-any.whl')) as td_unpack:
entry_points = Path(td_unpack, 'package1-0.1.dist-i... |
def main() -> None:
build_dir = 'build'
try:
os.mkdir(build_dir)
except FileExistsError:
pass
opt_level = os.getenv('MYPYC_OPT_LEVEL', '3')
debug_level = os.getenv('MYPYC_DEBUG_LEVEL', '1')
setup_file = os.path.join(build_dir, 'setup.py')
with open(setup_file, 'w') as f:
... |
def test_ast_invalid_slice2():
class Parametrized(ComponentLevel2):
def construct(s, nbits):
s.x = Wire((nbits * 2))
def upA():
print(s.x[1][1:2][1])
a = Parametrized(1)
try:
a.elaborate()
except TypeError as e:
print(e)
return
... |
def divide(n, iterable):
if (n < 1):
raise ValueError('n must be at least 1')
try:
iterable[:0]
except TypeError:
seq = tuple(iterable)
else:
seq = iterable
(q, r) = divmod(len(seq), n)
ret = []
stop = 0
for i in range(1, (n + 1)):
start = stop
... |
def parse_args():
parser = argparse.ArgumentParser(description='mmpose test model')
parser.add_argument('config', help='test config file path')
parser.add_argument('checkpoint', help='checkpoint file')
parser.add_argument('--out', help='output result file')
parser.add_argument('--work-dir', help='th... |
def gather_training_data(env, data, filename='demo_playback.mp4', render=None):
env = env.env
FPS = 30
render_skip = max(1, round((1.0 / ((FPS * env.sim.model.opt.timestep) * env.frame_skip))))
t0 = timer.time()
env.reset()
init_qpos = data['qpos'][0].copy()
init_qvel = data['qvel'][0].copy(... |
def test_forward_ref():
assert (get_dataclass_shape(FRParent) == Shape(input=InputShape(constructor=FRParent, kwargs=None, fields=(InputField(type=int, id='fr_field', default=NoDefault(), is_required=True, metadata=MappingProxyType({}), original=ANY),), params=(Param(field_id='fr_field', name='fr_field', kind=Param... |
class MMapIndexedDatasetBuilder():
def __init__(self, out_file, dtype=np.int64):
self._data_file = open(out_file, 'wb')
self._dtype = dtype
self._sizes = []
def add_item(self, tensor):
np_array = np.array(tensor.numpy(), dtype=self._dtype)
self._data_file.write(np_array.t... |
def align_and_filter_dataset(args, t):
temp_folder = f'{args.out}_imagefolder'
if primary():
os.makedirs(temp_folder, exist_ok=True)
os.makedirs(args.out, exist_ok=True)
dataset = MultiResolutionDataset(args.real_data_path, resolution=args.real_size, transform=None)
if (args.flow_scores ... |
def embeddings(idx):
embed = []
embed.append((f'cvt.encoder.stages.{idx}.embedding.convolution_embeddings.projection.weight', f'stage{idx}.patch_embed.proj.weight'))
embed.append((f'cvt.encoder.stages.{idx}.embedding.convolution_embeddings.projection.bias', f'stage{idx}.patch_embed.proj.bias'))
embed.ap... |
class HeapAllocator(object):
def __init__(self, start: Addr, end: Addr, memory: Memory):
self.start: Addr = start
self.end: Addr = end
self._curr_offset: Addr = self.start
self._memory = memory
self.alloc_pool = dict()
self.free_pool = dict()
def alloc(self, size:... |
class DmgPatternNameValidator(BaseValidator):
def __init__(self):
BaseValidator.__init__(self)
def Clone(self):
return DmgPatternNameValidator()
def Validate(self, win):
entityEditor = win.Parent.parent
textCtrl = self.GetWindow()
text = textCtrl.GetValue().strip()
... |
def test_scroll_zoom_toggler():
m = folium.Map([45.0, 3.0], zoom_start=4)
szt = plugins.ScrollZoomToggler()
m.add_child(szt)
out = normalize(m._parent.render())
tmpl = Template('\n <img id="{{this.get_name()}}" alt="scroll"\n src=" style="z-index: 999999"\n onclick="{{thi... |
def write_fst_with_silence(lexicon, sil_prob, sil_phone, sil_disambig, nonterminals=None, left_context_phones=None):
assert ((sil_prob > 0.0) and (sil_prob < 1.0))
sil_cost = (- math.log(sil_prob))
no_sil_cost = (- math.log((1.0 - sil_prob)))
start_state = 0
loop_state = 1
sil_state = 2
next... |
class TestRandomAccessFloatPairVectorReader(_TestRandomAccessReaders, unittest.TestCase, FloatPairVectorExampleMixin):
def checkRead(self, reader):
self.assertEqual([(1.0, 1.0)], reader['one'])
self.assertEqual([], reader['three'])
self.assertEqual([(2.0, 3.0), (4.0, 5.0)], reader['two'])
... |
class ScrimSlotSelector(discord.ui.Select):
def __init__(self, slots: List[AssignedSlot], *, placeholder: str, multiple=False):
_options = []
for slot in slots:
_options.append(discord.SelectOption(label=f'Slot {slot.num}', description=ts(slot.team_name, 22), emoji=emote.TextChannel, val... |
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, cbam=None, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=3, padding=1, stride=stride)
self.bn1 = nn.BatchNorm2d(planes)
self.relu ... |
def get_cifar10(mean=(0.4914, 0.4822, 0.4465), std=(0.2023, 0.1994, 0.201), padding=(4, 4), root='./data', download=False):
transform_train = transforms.Compose([transforms.RandomCrop(32, padding=tuple(padding)), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mean, std)])
transfo... |
class TestUCASAODGWD(TestUCASAOD):
def eval(self):
gwd = build_whole_network.DetectionNetworkGWD(cfgs=self.cfgs, is_training=False)
all_boxes_r = self.eval_with_plac(img_dir=self.args.img_dir, det_net=gwd, image_ext=self.args.image_ext)
imgs = os.listdir(self.args.img_dir)
real_test_... |
def test_polar_stereographic_a_operation():
aeaop = PolarStereographicAConversion(latitude_natural_origin=(- 90), longitude_natural_origin=2, false_easting=3, false_northing=4, scale_factor_natural_origin=0.5)
assert (aeaop.name == 'unknown')
assert (aeaop.method_name == 'Polar Stereographic (variant A)')
... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.