code stringlengths 281 23.7M |
|---|
def _load_repl_repr_utt_configs(flags, model_parser):
assert flags.repl_utt_set_name
assert flags.repl_utt_repr_spec
assert flags.repl_utt_wspec
assert flags.repl_utt_list
assert flags.repl_utt_img_dir
assert flags.repl_utt_id_map
(exp_dir, set_name, model_conf, train_conf, dataset_conf) = _... |
def dataloader_cifar10(data_root, split='train', batch_size=128):
if (split == 'train'):
data_transform = transforms.Compose([transforms.RandomCrop(32, padding=4), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.201))])
trai... |
class RunningLogger(BaseController):
def __init__(self, config=None):
config = (config or dict())
config.setdefault('priority', 90)
config.setdefault('every_n_iters', 1)
super().__init__(config)
self._log_order = config.get('log_order', None)
self._log_resources = con... |
.fast
def test_operations_inplace(verbose=True, *args, **kwargs):
from radis.phys.units import Unit
s = load_spec(getTestFile('CO_Tgas1500K_mole_fraction0.01.spec'), binary=True)
s = s.take('radiance_noslit')
I_max = s.get('radiance_noslit', Iunit='mW/cm2/sr/nm')[1].max()
s += (1 * Unit('mW/cm2/sr/n... |
class BasicStem(CNNBlockBase):
def __init__(self, in_channels=3, out_channels=64, norm='BN'):
super().__init__(in_channels, out_channels, 4)
self.in_channels = in_channels
self.conv1 = Conv2d(in_channels, out_channels, kernel_size=7, stride=2, padding=3, bias=False, norm=get_norm(norm, out_c... |
class DefacingInterface(BaseReviewInterface):
def __init__(self, fig, axes, issue_list=cfg.defacing_default_issue_list, next_button_callback=None, quit_button_callback=None, processing_choice_callback=None, map_key_to_callback=None):
super().__init__(fig, axes, next_button_callback, quit_button_callback)
... |
def write_config():
print('Writing config file...')
content = CONFIG_TEMPLATE.substitute(all_region_aws_key_names=json.dumps(ALL_REGION_AWS_KEY_NAMES, indent=4), all_subnet_info=json.dumps(ALL_SUBNET_INFO, indent=4), all_region_aws_security_group_ids=json.dumps(ALL_REGION_AWS_SECURITY_GROUP_IDS, indent=4), s3_b... |
def generate_interleaved_data(results_file_path_original: str, results_file_path_interleaved: str, expected_results_file_path: str):
rb_opts = {}
shots = 200
rb_opts['nseeds'] = 2
rb_opts['rb_pattern'] = [[0, 2], [1]]
rb_opts['length_vector'] = np.arange(1, 100, 10)
rb_opts['length_multiplier'] ... |
class LFU_EvictionPolicy(MCHEvictionPolicy):
def __init__(self, threshold_filtering_func: Optional[Callable[([torch.Tensor], Tuple[(torch.Tensor, Union[(float, torch.Tensor)])])]]=None) -> None:
super().__init__(metadata_info=[MCHEvictionPolicyMetadataInfo(metadata_name='counts', is_mch_metadata=True, is_hi... |
_LOSS.register_module()
class MultiLoss(nn.Module):
def __init__(self, loss_cfgs):
super().__init__()
assert isinstance(loss_cfgs, list)
self.num_losses = len(loss_cfgs)
losses = []
for loss_cfg in loss_cfgs:
losses.append(OPENOCC_LOSS.build(loss_cfg))
sel... |
def stream(stream):
if (stream is None):
(yield)
return
with torch.cuda.stream(stream):
if (_cupy_import_error is None):
cupy_stream = cupy.cuda.ExternalStream(stream.cuda_stream)
with cupy_stream:
(yield)
else:
(yield) |
class Center(VersionBase):
def __init__(self, x, y, z):
self.x = convert_float(x)
self.y = convert_float(y)
self.z = convert_float(z)
def parse(element):
x = convert_float(element.attrib['x'])
y = convert_float(element.attrib['y'])
z = convert_float(element.attrib... |
class RefInfoSuite(DataSuite):
required_out_section = True
files = ['ref-info.test']
def run_case(self, testcase: DataDrivenTestCase) -> None:
options = Options()
options.use_builtins_fixtures = True
options.show_traceback = True
options.export_ref_info = True
src = '... |
('/PenguinDome/v1/update', methods=('POST',))
('/penguindome/v1/update', methods=('POST',))
_signature
_deprecated_port
_werkzeug_hostname
def update():
db = get_db()
data = json.loads(request.form['data'])
hostname = data['hostname']
old_release = data['old_release']
releases = sorted((r for r in o... |
def _move_to_room(caller, raw_string, **kwargs):
room = kwargs['room']
room.msg_char(caller, f"Entering room |c'{room.name}'|n ...")
room.msg_room(caller, f'~You |c~were just tricked in here too!|n')
old_location = caller.location
caller.location = room
room.at_object_receive(caller, old_locatio... |
def export_per_layer_sensitivity_analysis_plot(layer_wise_eval_score_dict: Dict, results_dir: str, title: str) -> plotting.Figure:
layer_names = []
eval_scores = []
for (layer_name, eval_score) in layer_wise_eval_score_dict.items():
layer_names.append(layer_name)
eval_scores.append(eval_scor... |
class Or(object):
def __init__(self, c1, c2):
self.c1 = current_module.__dict__[list(c1.keys())[0]](**c1[list(c1.keys())[0]])
self.c2 = current_module.__dict__[list(c2.keys())[0]](**c2[list(c2.keys())[0]])
def __call__(self, variables):
return (self.c1(variables) or self.c2(variables)) |
def _init_weight(m, n=''):
def _fan_in_out(w, groups=1):
dimensions = w.dim()
if (dimensions < 2):
raise ValueError('Fan in and fan out can not be computed for tensor with fewer than 2 dimensions')
num_input_fmaps = w.size(1)
num_output_fmaps = w.size(0)
receptive... |
class GaussianGRUPolicy(StochasticPolicy, LayersPowered, Serializable):
def __init__(self, name, env_spec, hidden_dim=32, feature_network=None, state_include_action=True, hidden_nonlinearity=tf.tanh, gru_layer_cls=L.GRULayer, learn_std=True, init_std=1.0, output_nonlinearity=None):
with tf.variable_scope(na... |
def import_func_from_string(func_string: str):
func = getattr(np, func_string, None)
if (func is not None):
return func
module = None
items = func_string.split('.')
for idx in range(1, len(items)):
try:
module = __import__('.'.join(items[:idx]))
except ImportError... |
def complex_phase_cmap():
cdict = {'blue': ((0.0, 0.0, 0.0), (0.25, 0.0, 0.0), (0.5, 1.0, 1.0), (0.75, 1.0, 1.0), (1.0, 0.0, 0.0)), 'green': ((0.0, 0.0, 0.0), (0.25, 1.0, 1.0), (0.5, 0.0, 0.0), (0.75, 1.0, 1.0), (1.0, 0.0, 0.0)), 'red': ((0.0, 1.0, 1.0), (0.25, 0.5, 0.5), (0.5, 0.0, 0.0), (0.75, 0.0, 0.0), (1.0, 1.... |
class DwsConvBlock(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, stride, padding, activate):
super(DwsConvBlock, self).__init__()
self.activate = activate
if self.activate:
self.activ = nn.ReLU(inplace=False)
self.conv = DwsConv(in_channels=in_cha... |
def output_data(d, r, target_name):
log('got {0}:{1}, length {2}', d.get_atom_name(r.property_type), r.format, len(r.value))
if (r.format == 8):
if (r.property_type == Xatom.STRING):
value = r.value.decode('ISO-8859-1')
elif (r.property_type == d.get_atom('UTF8_STRING')):
... |
class UnaryScalarOp(ScalarOp):
nin = 1
amd_float32: Optional[str] = None
amd_float64: Optional[str] = None
def c_code_contiguous(self, node, name, inputs, outputs, sub):
(x,) = inputs
(z,) = outputs
if ((not config.lib__amblibm) or (node.inputs[0].type != node.outputs[0].type)):
... |
class Migration(migrations.Migration):
dependencies = [('grants', '0011_alter_grant_user')]
operations = [migrations.AddField(model_name='grant', name='community_contribution', field=models.TextField(blank=True, verbose_name='Community contribution')), migrations.AddField(model_name='grant', name='github_handle... |
class TestSpiderDev0(unittest.TestCase):
(ONE_TEST_TIMEOUT)
def test_spider_dev(self):
split_name = 'dev'
i_query = 0
db_id = get_db_id(split_name, i_query)
(rdf_graph, schema) = get_graph_and_schema(split_name, db_id)
sql_query = get_sql_query(split_name, i_query)
... |
class ST_HexColor(BaseStringType):
def convert_from_xml(cls, str_value: str) -> (RGBColor | str):
if (str_value == 'auto'):
return ST_HexColorAuto.AUTO
return RGBColor.from_string(str_value)
def convert_to_xml(cls, value: RGBColor) -> str:
return ('%02X%02X%02X' % value)
... |
def _get_mangle(prefix, aliases, base_mangle=None):
def mangle(s):
if (s in aliases):
s = aliases[s]
elif (s[0] == '_'):
s = ('_%s__%s' % (prefix, s[1:]))
else:
s = ('%s__%s' % (prefix, s))
if (base_mangle is not None):
s = base_mangle(... |
class CharacterStyle(BaseStyle):
def base_style(self):
base_style = self._element.base_style
if (base_style is None):
return None
return StyleFactory(base_style)
_style.setter
def base_style(self, style):
style_id = (style.style_id if (style is not None) else None... |
.parametrize('constructor', [get_core_metadata_constructors()['2.2']])
class TestCoreMetadataV22():
def test_default(self, constructor, isolation, helpers):
metadata = ProjectMetadata(str(isolation), None, {'project': {'name': 'My.App', 'version': '0.1.0'}})
assert (constructor(metadata) == helpers.... |
class Vizio(VizioAsync):
def __init__(self, device_id: str, ip: str, name: str, auth_token: str='', device_type: str=DEFAULT_DEVICE_CLASS, timeout: int=DEFAULT_TIMEOUT) -> None:
super(Vizio, self).__init__(device_id, ip, name, auth_token, device_type, session=None, timeout=timeout)
def discovery_zerocon... |
(portal.IRealm)
class TestAuthRealm(object):
def __init__(self, template=BASIC_AUTH_PAGE):
self.template = template
def requestAvatar(self, avatarId, mind, *interfaces):
if (IResource in interfaces):
if (avatarId == checkers.ANONYMOUS):
return (IResource, TestHTTPUser... |
def train(train_queue, valid_queue, model, architect, criterion, optimizer, lr, epoch):
objs = utils.AvgrageMeter()
top1 = utils.AvgrageMeter()
top5 = utils.AvgrageMeter()
for (step, (input, target)) in enumerate(train_queue):
model.train()
n = input.size(0)
input = input.cuda()
... |
def mlp(input_, dim):
n_hidden1 = int((dim * 0.8))
n_hidden2 = int((n_hidden1 * 0.8))
n_out = int((n_hidden2 * 0.8))
with tf.variable_scope('mlp'):
h1 = tf.Variable(tf.random_normal([dim, n_hidden1]))
h2 = tf.Variable(tf.random_normal([n_hidden1, n_hidden2]))
hout = tf.Variable(t... |
class ParallelModel(KM.Model):
def __init__(self, keras_model, gpu_count):
self.inner_model = keras_model
self.gpu_count = gpu_count
merged_outputs = self.make_parallel()
super(ParallelModel, self).__init__(inputs=self.inner_model.inputs, outputs=merged_outputs)
def __getattribut... |
class TestReproducibility(unittest.TestCase):
def _test_reproducibility(self, name, extra_flags=None):
if (extra_flags is None):
extra_flags = []
with tempfile.TemporaryDirectory(name) as data_dir:
with contextlib.redirect_stdout(StringIO()):
test_binaries.cre... |
class EpisodeRunner():
def __init__(self, args, logger):
self.args = args
self.logger = logger
self.batch_size = self.args.batch_size_run
assert (self.batch_size == 1)
self.env = env_REGISTRY[self.args.env](**self.args.env_args)
self.episode_limit = self.env.episode_l... |
class TaskHandleTest(unittest.TestCase):
def test_trivial_case(self):
handle = rope.base.taskhandle.TaskHandle()
self.assertFalse(handle.is_stopped())
def test_stopping(self):
handle = rope.base.taskhandle.TaskHandle()
handle.stop()
self.assertTrue(handle.is_stopped())
... |
class Player():
def __init__(self):
self.x = 320
self.y = 240
self.speed = 4
def move_left(self):
self.x -= self.speed
def move_right(self):
self.x += self.speed
def move_up(self):
self.y -= self.speed
def move_down(self):
self.y += self.speed |
class TfPreprocessTransform():
def __init__(self, is_training=False, size=224, interpolation='bicubic'):
self.is_training = is_training
self.size = (size[0] if isinstance(size, tuple) else size)
self.interpolation = interpolation
self._image_bytes = None
self.process_image = ... |
_task('semisupervised_translation')
class SemisupervisedTranslationTask(MultilingualTranslationTask):
def add_args(parser):
MultilingualTranslationTask.add_args(parser)
parser.add_argument('--lambda-parallel-config', default='1.0', type=str, metavar='CONFIG', help='cross-entropy reconstruction coeff... |
class SimpleCNNMNIST(nn.Module):
def __init__(self, input_dim, hidden_dims, output_dim=10):
super(SimpleCNNMNIST, self).__init__()
self.conv1 = nn.Conv2d(1, 6, 5)
self.pool = nn.MaxPool2d(2, 2)
self.conv2 = nn.Conv2d(6, 16, 5)
self.fc1 = nn.Linear(input_dim, hidden_dims[0])
... |
class StackWidget(QtWidgets.QTreeWidget):
def __init__(self, parent=None):
QtWidgets.QTreeWidget.__init__(self, parent)
self.setAlternatingRowColors(True)
self.setHeaderHidden(True)
def selectedFrame(self):
sel = self.selectedItems()
if (len(sel) == 0):
return... |
def test():
print('Testing LAParser')
testcases = [('Scalar addition', 'a = b+c', 'a=(b+c)'), ('Vector addition', 'V3_a = V3_b + V3_c', 'vCopy(a,vAdd(b,c))'), ('Vector addition', 'V3_a=V3_b+V3_c', 'vCopy(a,vAdd(b,c))'), ('Matrix addition', 'M3_a = M3_b + M3_c', 'mCopy(a,mAdd(b,c))'), ('Matrix addition', 'M3_a=M... |
_filter('unique')
class UniqueFilter(BaseFilter, FileManagerAware):
def __init__(self, _):
self.unique = self.get_unique()
def __call__(self, fobj):
return (fobj in self.unique)
def __str__(self):
return '<Filter: unique>'
def get_unique(self):
unique = set()
for ... |
class InceptionA(nn.Module):
def __init__(self, in_channels):
super(InceptionA, self).__init__()
self.branch1_1 = nn.Conv2d(in_channels, 16, kernel_size=1)
self.branch5_5_1 = nn.Conv2d(in_channels, 16, kernel_size=1)
self.branch5_5_2 = nn.Conv2d(16, 24, kernel_size=5, padding=2)
... |
class AttributeViewSet(ModelViewSet):
permission_classes = ((HasModelPermission | HasObjectPermission),)
queryset = Attribute.objects.annotate(values_count=models.Count('values')).annotate(projects_count=models.Count('values__project', distinct=True)).prefetch_related('conditions', 'pages', 'questionsets', 'que... |
class NPM(Command):
description = 'install package.json dependencies using npm'
user_options = []
node_modules = join(node_root, 'node_modules')
targets = [join(here, 'qgrid', 'static', 'extension.js'), join(here, 'qgrid', 'static', 'index.js')]
def initialize_options(self):
pass
def fin... |
class LinformerTransformerEncoder(TransformerEncoder):
def __init__(self, args, dictionary, embed_tokens):
self.compress_layer = None
super().__init__(args, dictionary, embed_tokens)
def build_encoder_layer(self, args):
if ((self.args.shared_layer_kv_compressed == 1) and (self.compress_l... |
def _ldflags(ldflags_str, libs, flags, libs_dir, include_dir):
rval = []
if libs_dir:
found_dyn = False
dirs = [x[2:] for x in ldflags_str.split() if x.startswith('-L')]
l = _ldflags(ldflags_str=ldflags_str, libs=True, flags=False, libs_dir=False, include_dir=False)
for d in dirs... |
class MockMessageReference(CustomMockMixin, unittest.mock.MagicMock):
spec_set = message_reference_instance
def __init__(self, *, reference_author_is_bot: bool=False, **kwargs):
super().__init__(**kwargs)
referenced_msg_author = MockMember(name='bob', bot=reference_author_is_bot)
self.re... |
def test_write_two_disjoint_slices_no_reader():
class Top(ComponentLevel3):
def construct(s):
s.A = Wire(Bits32)
def up_wr_0_16():
s.A[0:16] = Bits16(255)
def up_wr_16_30():
s.A[16:30] = Bits14(255)
def up_rd_17_30():
... |
def with_defaults(**default_funcs):
def decorator(f):
(f)
def method(self, *args, **kwargs):
for (name, func) in iteritems(default_funcs):
if (name not in kwargs):
kwargs[name] = func(self)
return f(self, *args, **kwargs)
return met... |
def loading_scene_list(args):
scenes = []
for i in range(4):
if (args.phase == 'train'):
for j in range(20):
if (i == 0):
scenes.append(('FloorPlan' + str((j + 1))))
else:
scenes.append((('FloorPlan' + str((i + 1))) + ('... |
def main():
args = parse_args()
root_path = args.root_path
ratio = args.val_ratio
(trn_files, val_files) = collect_files(osp.join(root_path, 'imgs'), osp.join(root_path, 'annotations'), ratio)
trn_infos = collect_annotations(trn_files, nproc=args.nproc)
with mmcv.Timer(print_tmpl='It takes {}s t... |
class MyOp(DeepCopyOp):
nb_called = 0
def c_code_cache_version(self):
return ()
def c_code(self, node, name, inames, onames, sub):
MyOp.nb_called += 1
(iname,) = inames
(oname,) = onames
fail = sub['fail']
itype = node.inputs[0].type.__class__
if (ityp... |
class PersonTest(BaseTest):
def test_person_manager_with_one_result(self):
persons = Person.objects.search(' ')
self.assertEqual(len(persons), 1)
p = persons[0]
self.assertEqual(p.id, 351549)
self.assertEqual(p.name, ' ')
self.assertEqual(p.year_birth, 1919)
s... |
class DSRCNN_preFusion(nn.Module):
def __init__(self):
super(DSRCNN_preFusion, self).__init__()
self.conv_input = nn.Conv2d(in_channels=2, out_channels=64, kernel_size=9, stride=1, padding=4, bias=False)
self.relu = nn.LeakyReLU(0.2, inplace=True)
self.residual = self.make_layer(SRRe... |
def test_xlate_loc():
Metar.debug = True
report = Metar.Metar('METAR KEWR 111851Z VRB03G19KT 2SM R04R/3000VP6000FT TSRA BR FEW015 BKN040CB BKN065 OVC200 22/22 A2987 RMK AO2 PK WND 29028/1817 WSHFT 1812 TSB05RAB22 SLP114 FRQ LTGICCCCG TS OHD AND NW-N-E MOV NE P0013 T')
mstring = str(report)
assert (mstri... |
def test_resnest_bottleneck():
with pytest.raises(AssertionError):
BottleneckS(64, 64, radix=2, reduction_factor=4, style='tensorflow')
block = BottleneckS(64, 256, radix=2, reduction_factor=4, stride=2, style='pytorch')
assert (block.avd_layer.stride == 2)
assert (block.conv2.channels == 256)
... |
def distributed_main(i, main, args, kwargs):
args.device_id = i
if (torch.cuda.is_available() and (not args.cpu) and (not getattr(args, 'tpu', False))):
torch.cuda.set_device(args.device_id)
if (args.distributed_rank is None):
args.distributed_rank = (kwargs.pop('start_rank', 0) + i)
arg... |
def prepare_proxy(proxy):
Response = collections.namedtuple('Response', ['for_requests', 'for_urllib'])
for_urllib = None
for_requests = None
if proxy:
for_requests = {' proxy, ' proxy}
for_urllib = ProxyHandler(for_requests)
return Response(for_requests=for_requests, for_urllib=for_... |
.parametrize('parser', [('background-scenarioloop',)], indirect=['parser'])
def test_parse_background_for_scenario_loop(parser):
feature = parser.parse()
assert isinstance(feature.background, Background)
assert all(((s.background.sentence == feature.background.sentence) for s in feature.scenarios[0].scenari... |
def QVTKRenderWidgetConeExample(block=False):
from vtkmodules.vtkFiltersSources import vtkConeSource
from vtkmodules.vtkRenderingCore import vtkActor, vtkPolyDataMapper, vtkRenderer
import vtkmodules.vtkRenderingOpenGL2
import vtkmodules.vtkInteractionStyle
app = QApplication.instance()
if (not ... |
def stop_memory_tracing(memory_trace: Optional[MemoryTrace]=None, ignore_released_memory: bool=True) -> Optional[MemorySummary]:
global _is_memory_tracing_enabled
_is_memory_tracing_enabled = False
if ((memory_trace is not None) and (len(memory_trace) > 1)):
memory_diff_trace = []
memory_cur... |
class PluginVersion(models.Model):
plugin = models.ForeignKey(Plugin, on_delete=models.CASCADE)
created_on = models.DateTimeField(_('Created on'), auto_now_add=True, editable=False)
downloads = models.IntegerField(_('Downloads'), default=0, editable=False)
created_by = models.ForeignKey(User, verbose_na... |
def test_package() -> None:
poetry = Factory().create_poetry(project('complete'))
builder = SdistBuilder(poetry)
builder.build()
sdist = (((fixtures_dir / 'complete') / 'dist') / 'my_package-1.2.3.tar.gz')
assert sdist.exists()
with tarfile.open(str(sdist), 'r') as tar:
assert ('my_packa... |
class CodeTagFilter(Filter):
def __init__(self, **options):
Filter.__init__(self, **options)
tags = get_list_opt(options, 'codetags', ['XXX', 'TODO', 'FIXME', 'BUG', 'NOTE'])
self.tag_re = re.compile(('\\b(%s)\\b' % '|'.join([re.escape(tag) for tag in tags if tag])))
def filter(self, lex... |
def topdown_to_image(topdown_info: np.ndarray) -> np.ndarray:
top_down_map = topdown_info['map']
fog_of_war_mask = topdown_info['fog_of_war_mask']
top_down_map = maps.colorize_topdown_map(top_down_map, fog_of_war_mask)
map_agent_pos = topdown_info['agent_map_coord']
min_map_size = 200
if (top_do... |
def upgrade(op, tables, tester):
op.create_table('deletedrepository', sa.Column('id', sa.Integer(), nullable=False), sa.Column('repository_id', sa.Integer(), nullable=False), sa.Column('marked', sa.DateTime(), nullable=False), sa.Column('original_name', sa.String(length=255), nullable=False), sa.Column('queue_id', ... |
def load_checkpoint(weights, map_location=None, inplace=True, fuse=True):
LOGGER.info('Loading checkpoint from {}'.format(weights))
ckpt = torch.load(weights, map_location=map_location)
model = ckpt[('ema' if ckpt.get('ema') else 'model')].float()
if fuse:
LOGGER.info('\nFusing model...')
... |
class ExGaussian(Continuous):
rv_op = exgaussian
def dist(cls, mu=0.0, sigma=None, nu=None, *args, **kwargs):
mu = pt.as_tensor_variable(floatX(mu))
sigma = pt.as_tensor_variable(floatX(sigma))
nu = pt.as_tensor_variable(floatX(nu))
return super().dist([mu, sigma, nu], *args, **k... |
class System(ProxyType):
_typeID = '_SolverType'
_typeEnum = 'SolverType'
_propGroup = 'Solver'
_iconName = 'Assembly_Assembly_Tree.svg'
def setDefaultTypeID(mcs, obj, name=None):
if (not name):
info = mcs.getInfo()
idx = (1 if (len(info.TypeNames) > 1) else 0)
... |
class DBnew():
def __init__(self, path):
self.path = path
def new(self, userid, maindb):
try:
conn = sqlite3.connect(self.path)
c = conn.cursor()
c.execute('CREATE TABLE IF NOT EXISTS `user` (\n `id` INTEGER NOT NULL PRIMARY KEY AUTO_INCREMENT,\... |
class NetworkImageNet(nn.Module):
def __init__(self, C, num_classes, layers, auxiliary, genotype):
super(NetworkImageNet, self).__init__()
self._layers = layers
self._auxiliary = auxiliary
self.stem0 = nn.Sequential(nn.Conv2d(3, (C // 2), kernel_size=3, stride=2, padding=1, bias=Fals... |
def test_legacy_wheel_section_in_setup_cfg(temp_pkg):
temp_pkg.joinpath('setup.cfg').write_text('[wheel]\nuniversal=1', encoding='utf-8')
subprocess.check_call([sys.executable, 'setup.py', 'bdist_wheel'], cwd=str(temp_pkg))
dist_dir = temp_pkg.joinpath('dist')
assert dist_dir.is_dir()
wheels = list(... |
class OpenFileWithEncodingTest(FakeFileOpenTestBase):
def setUp(self):
super(OpenFileWithEncodingTest, self).setUp()
self.file_path = self.make_path('foo')
def test_write_str_read_bytes(self):
str_contents = ' '
with self.open(self.file_path, 'w', encoding='arabic') as f:
... |
class KeyboardHook(object):
ID_TO_KEY = {1: 'LButton', 2: 'RButton', 3: 'Cancel', 4: 'MButton', 5: 'XButton1', 6: 'XButton2', 7: 'Undefined1', 8: 'Back', 9: 'Tab', 10: 'Reserved1', 11: 'Reserved2', 12: 'Clear', 13: 'Return', 14: 'Undefined2', 15: 'Undefined3', 16: 'SHIFT', 17: 'CONTROL', 18: 'Menu', 19: 'Pause', 20... |
def test_missing_counts_query(initialized_db):
RepositoryActionCount.delete().execute()
yesterday = (datetime.utcnow() - timedelta(days=1))
found = list(model.repositoryactioncount.missing_counts_query(yesterday))
for repository in Repository.select():
assert (repository in found)
for reposi... |
class InceptionBUnit(nn.Module):
def __init__(self, in_channels, out_channels, mid_channels):
super(InceptionBUnit, self).__init__()
assert (in_channels == 768)
assert (out_channels == 768)
self.branches = Concurrent()
self.branches.add_module('branch1', Conv1x1Branch(in_chan... |
class Solution(object):
def findPeakElement(self, nums):
(start, end) = (0, (len(nums) - 1))
while (start < end):
mid = ((start + end) / 2)
if (nums[mid] < nums[(mid + 1)]):
start = (mid + 1)
else:
end = mid
return start |
class GPSFilter(object):
def __init__(self, client):
self.client = client
self.gps_system_time_offset = 0
self.stale_count = 0
self.use3d = False
posSigma = 10
velSigma = 0.25
if self.use3d:
self.R = np.diag([posSigma, posSigma, (posSigma * 2), vel... |
_module()
class Collect():
def __init__(self, keys, meta_keys=('filename', 'label', 'original_shape', 'img_shape', 'pad_shape', 'flip_direction', 'img_norm_cfg'), meta_name='img_metas', nested=False):
self.keys = keys
self.meta_keys = meta_keys
self.meta_name = meta_name
self.nested ... |
class TOperonPrint(TOperonBase):
def test_print(self):
self.check_false(['print'], False, True)
(o, e) = self.check_true(['print', self.f], True, False)
self.assertEqual(o.splitlines()[0], 'piman, jzig - Quod Libet Test Data - 02/10 - Silence')
(o, e) = self.check_true(['print', '-p'... |
.parametrize('cast, expected', ((float, 5.5), (ureg.Quantity, ureg.Quantity(5.5)), (str, '5.5'), ((lambda v: int(float(v))), 5)))
def test_measurement_cast(cast, expected):
class Fake(CommonBaseTesting):
x = CommonBase.measurement('x', 'doc', cast=cast)
with expected_protocol(Fake, [('x', '5.5')]) as in... |
class TestFactory(unittest.TestCase):
def test_column_cast(self):
data = [1, 2, 3]
col_int64 = ta.column(data, device='cpu')
self.assertEqual(list(col_int64), data)
self.assertEqual(col_int64.dtype, dt.int64)
col_int32 = ta.column(col_int64, dtype=dt.int32, device='cpu')
... |
class UpConv2DBlockCBNCond(nn.Module):
def __init__(self, input_nc, output_nc, kernel_size=4, stride=2, padding=1, cond_dim=256, use_bias=False, use_bn=True, up_mode='upconv', use_dropout=False):
super(UpConv2DBlockCBNCond, self).__init__()
assert (up_mode in ('upconv', 'upsample'))
self.use... |
class StateField(models.Field):
default_error_messages = {'invalid': _('Choose a valid state.'), 'wrong_type': _('Please enter a valid value (got %r).'), 'wrong_workflow': _('Please enter a value from the right workflow (got %r).'), 'invalid_state': _('%s is not a valid state.')}
description = _('State')
DE... |
def test_PatternPrinter():
(r1, r2) = (MyVariable('1'), MyVariable('2'))
op1 = MyOp('op1')
o1 = op1(r1, r2)
o1.name = 'o1'
pprint = PPrinter()
pprint.assign(op1, PatternPrinter(('|%(0)s - %(1)s|', (- 1000))))
pprint.assign((lambda pstate, r: True), default_printer)
res = pprint(o1)
a... |
.skipif((not _has_torchrec), reason='torchrec not found.')
class TestKJT():
.parametrize('index', [[0, 2], torch.tensor([0, 2]), range(0, 3, 2)])
def test_kjt_indexing(self, index):
jag_tensor = _get_kjt()
j0 = jag_tensor['index_0']
j1 = jag_tensor['index_1']
j2 = jag_tensor['ind... |
def test_all(hatch, helpers, temp_dir_data, config_file):
config_file.model.template.plugins['default']['tests'] = False
config_file.save()
project_name = 'My.App'
with temp_dir_data.as_cwd():
result = hatch('new', project_name)
assert (result.exit_code == 0), result.output
project_path ... |
def bench_dumping(no_gc: bool):
if no_gc:
data = create_response(GetRepoIssuesResponse, Issue, Reactions, PullRequest, Label, SimpleUser)
else:
data = create_response(GetRepoIssuesResponseNoGC, IssueNoGC, ReactionsNoGC, PullRequestNoGC, LabelNoGC, SimpleUserNoGC)
return benchmark_plan(msgspe... |
class Migration(migrations.Migration):
dependencies = [('conferences', '0017_auto__1607'), ('schedule', '0020_auto__0327')]
operations = [migrations.AddField(model_name='scheduleitem', name='audience_level', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='conferen... |
def ql_syscall_chmod(ql: Qiling, filename: int, mode: int):
vpath = ql.os.utils.read_cstring(filename)
hpath = ql.os.path.virtual_to_host_path(vpath)
if (not ql.os.path.is_safe_host_path(hpath)):
raise PermissionError(f'unsafe path: {hpath}')
try:
os.chmod(hpath, mode)
except OSError... |
def test_atlas_alloc_resize_with_freeing():
def get_index_in_use():
for index in range(atlas._index_counter):
if (atlas.get_region(index).size > 0):
return index
atlas = GlyphAtlas(256, 24)
assert (atlas.allocated_area == 0)
assert (atlas.total_area == 576)
prev_a... |
def test_simplified_solis_precipitable_water():
expected = pd.DataFrame(np.array([[1001., 1107., 128.], [1001., 1107., 128.], [983., 1089., 129.], [, 1064., 129.], [872., 974., 125.]]), columns=['dni', 'ghi', 'dhi'])
expected = expected[['ghi', 'dni', 'dhi']]
out = clearsky.simplified_solis(80, precipitable... |
class Attention(nn.Module):
def __init__(self, m, n):
super(Attention, self).__init__()
self.m = m
self.n = n
self.proj_1 = Parameter(torch.Tensor(30, m))
self.proj_2 = Parameter(torch.Tensor(30, n))
self.reset_parameters()
def reset_parameters(self):
stdv... |
def move_bytes(fobj, dest: int, src: int, count: int, BUFFER_SIZE: int=_DEFAULT_BUFFER_SIZE) -> None:
if ((dest < 0) or (src < 0) or (count < 0)):
raise ValueError
fobj.seek(0, 2)
filesize = fobj.tell()
if ((max(dest, src) + count) > filesize):
raise ValueError('area outside of file')
... |
def download_manifest_entries(manifest: Manifest, token_holder: Optional[Dict[(str, Any)]]=None, table_type: TableType=TableType.PYARROW, max_parallelism: Optional[int]=1, column_names: Optional[List[str]]=None, include_columns: Optional[List[str]]=None, file_reader_kwargs_provider: Optional[ReadKwargsProvider]=None) -... |
class Matrix(pybamm.Array):
def __init__(self, entries, name=None, domain=None, auxiliary_domains=None, domains=None, entries_string=None):
if isinstance(entries, list):
entries = np.array(entries)
if (name is None):
name = f'Matrix {entries.shape!s}'
if issparse(... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.