code stringlengths 281 23.7M |
|---|
.parametrize('nested', (False, True))
def test_unvalued_ir_reversion(nested):
x_rv = pt.random.normal()
y_rv = pt.clip(x_rv, 0, 1)
if nested:
y_rv = (y_rv + 5)
z_rv = pt.random.normal(y_rv, 1, name='z')
z_vv = z_rv.clone()
rv_values = {z_rv: z_vv}
(z_fgraph, _, memo) = construct_ir_f... |
def _confidence_predictor_head(args: SharedArgs, head_name: str, num_classes: int, confidence_class_counts: Optional[ConfidenceClassCounts]) -> ConfidencePredictorHead:
num_chunk_frames = math.floor((args.frame_rate * args.chunk_duration))
batch_norm = bool(args.batch_norm)
confidence_loss = ConfidenceLoss(... |
def getPaths(data_dir):
exts = ['*.png', '*.PNG', '*.jpg', '*.JPG', '*.JPEG', '*.bmp']
image_paths = []
for pattern in exts:
for (d, s, fList) in os.walk(data_dir):
for filename in fList:
if fnmatch.fnmatch(filename, pattern):
fname_ = os.path.join(d, ... |
def _norm_to_dict(obj):
if isinstance(obj, NormTV):
return {'variance': obj.variance, 'limit': (_norm_to_dict(obj.limit.value) if isinstance(obj.limit, Bound) else [_norm_to_dict(el) for el in obj.limit.value]), 'source': obj.source}
if isinstance(obj, NormParamSpecMarker):
return {'origin': obj... |
def get_host_data(offset, dtype):
exemplar = np.array([1], dtype=dtype)
print(offset)
print(exemplar.itemsize)
print(data[TARGET].itemsize)
index = (offset // data[TARGET].itemsize)
index = (index - (index % exemplar.itemsize))
print(index)
return data[TARGET][0:index].view(dtype) |
class ParameterDeclarations(VersionBase):
def __init__(self):
self.parameters = []
def parse(element):
parameter_declarations = ParameterDeclarations()
declarations = element.findall('ParameterDeclaration')
for declaration in declarations:
parameter_declaration = Para... |
.skipif((not HAVE_DEPS_FOR_RESOURCE_ESTIMATES), reason='pyscf and/or jax not installed.')
def test_sparse_int_obj():
mf = make_diamond_113_szv()
mymp = mp.KMP2(mf)
Luv = cholesky_from_df_ints(mymp)
for thresh in [0.001, 0.0001, 1e-05, 1e-06]:
abs_sum_coeffs = 0
helper = SparseFactorizati... |
class SspLexer(DelegatingLexer):
name = 'Scalate Server Page'
aliases = ['ssp']
filenames = ['*.ssp']
mimetypes = ['application/x-ssp']
url = '
version_added = '1.4'
def __init__(self, **options):
super().__init__(XmlLexer, JspRootLexer, **options)
def analyse_text(text):
... |
def custom_vcr():
dirname = os.path.dirname(__file__)
return VCR(decode_compressed_response=True, cassette_library_dir=os.path.join(dirname, 'fixtures/cassettes'), path_transformer=VCR.ensure_suffix('.yml'), filter_query_parameters=bad_fields, before_record_response=filter_payload, filter_post_data_parameters=b... |
def _create_densenet(variant, growth_rate, block_config, pretrained, **kwargs):
kwargs['growth_rate'] = growth_rate
kwargs['block_config'] = block_config
return build_model_with_cfg(DenseNet, variant, pretrained, default_cfg=default_cfgs[variant], feature_cfg=dict(flatten_sequential=True), pretrained_filter... |
class ModelWithFunctionalOps(nn.Module):
def __init__(self):
super(ModelWithFunctionalOps, self).__init__()
self.conv1 = nn.Conv2d(3, 8, kernel_size=2, stride=2, padding=2, bias=False)
self.bn1 = nn.BatchNorm2d(8)
self.relu1 = nn.ReLU(inplace=True)
self.fc = nn.Linear(2592, 1... |
class DSProject(Extension):
name = 'dsproject'
def augment_cli(self, parser):
parser.add_argument(self.flag, help=self.help_text, nargs=0, dest='extensions', action=include(NoSkeleton(), PreCommit(), self))
return self
def activate(self, actions: List[Action]) -> List[Action]:
action... |
def _add_spotting_graphs(html_file: TextIO, data_frame: DataFrame, class_names: List[str], video_id: str, debug_graphs: bool) -> None:
category_settings = create_category_settings(class_names)
convert_deltas_to_timestamps(data_frame)
add_detections_graph(html_file, data_frame, category_settings)
if debu... |
class FusedAdam(torch.optim.Optimizer):
def __init__(self, params, lr=0.001, bias_correction=True, betas=(0.9, 0.999), eps=1e-08, eps_inside_sqrt=False, weight_decay=0.0, max_grad_norm=0.0, amsgrad=False):
global fused_adam_cuda
import importlib
fused_adam_cuda = importlib.import_module('fus... |
def get_cache_dir() -> str:
cache_directory = os.path.expandvars('$XDG_CACHE_HOME')
if (cache_directory == '$XDG_CACHE_HOME'):
cache_directory = os.path.expanduser('~/.cache')
cache_directory = os.path.join(cache_directory, 'qtile')
if (not os.path.exists(cache_directory)):
os.makedirs(c... |
def test_run_update_uninstalls_after_removal_transient_dependency(installer: Installer, locker: Locker, repo: Repository, package: ProjectPackage, installed: CustomInstalledRepository) -> None:
locker.locked(True)
locker.mock_lock_data({'package': [{'name': 'A', 'version': '1.0', 'optional': False, 'platform': ... |
def rename_trivial_obj(df):
df = df.T
df = df.rename(index={'smiles_lstm': 'SMILES LSTM', 'smiles_ga': 'SMILES GA', 'graph_ga': 'Graph GA', 'best_from_chembl': 'Best from Data'})
df = df.reindex(columns=['CNS MPO', 'QED', 'C7H8N2O2', 'Pioglitazone MPO'], index=['Best from Data', 'SMILES LSTM', 'SMILES GA', ... |
class CNNEncoder4(nn.Module):
def __init__(self, n_channels, out_channels, patch_height, patch_width):
super(CNNEncoder4, self).__init__()
self.multi_cnn = nn.Sequential(nn.Conv2d(n_channels, 16, kernel_size=3, padding=1, bias=False), nn.BatchNorm2d(16), nn.ReLU(inplace=True), nn.MaxPool2d(2), nn.Co... |
def get_files(**kwargs):
metadata_directory = kwargs.get('metadata_directory', '')
files = []
for f in get_template_files(**kwargs):
if (str(f.path) == 'LICENSE.txt'):
files.append(File(Path(metadata_directory, 'licenses', f.path), f.contents))
if (f.path.parts[0] != kwargs['pack... |
class BahAttnDecoder(RnnDecoder):
def __init__(self, emb_dim, vocab_size, fc_emb_dim, attn_emb_dim, dropout, d_model, **kwargs):
super().__init__(emb_dim, vocab_size, fc_emb_dim, attn_emb_dim, dropout, d_model, **kwargs)
attn_size = kwargs.get('attn_size', self.d_model)
self.model = getattr(... |
class TxtProcessor(en.TxtProcessor):
def process(cls, txt, pre_align_args):
txt = cls.preprocess_text(txt)
phs = []
for p in txt.split(' '):
if (len(p) == 0):
continue
syl = SonoriPy(p)
if (len(syl) == 0):
phs += list(p)
... |
(unsafe_hash=True, init=False)
class Xref(R2Data):
XrefType = Literal[('NULL', 'CODE', 'CALL', 'DATA', 'STRN', 'UNKN')]
name: str
fromaddr: int
type: XrefType
perm: int
addr: int
refname: str
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.fromaddr = kwargs[... |
def test_inherited_robot_accounts_same_namespace_no_read_permission(get_monkeypatch, initialized_db, client):
patch_permissions(get_monkeypatch, False)
analyzer = TriggerAnalyzer(FakeHandler(), 'devtable', 'localhost:5000', {}, True)
result = analyzer.analyze_trigger()
assert (analyzer.analyze_trigger()... |
def process_rule_smiles_table(c, db_id):
(old_n,) = next(c.execute('SELECT count(*) from rule_smiles'))
c.execute('\nINSERT OR IGNORE INTO rule_smiles (smiles, num_heavies)\n SELECT smiles, num_heavies\n FROM old.rule_smiles\n ')
(new_n,) = next(c.execute('SELECT count(*) from rule_smiles'))
(old_n... |
class AddExtraFieldTest(tf.test.TestCase):
def setUp(self):
boxes = np.array([[3.0, 4.0, 6.0, 8.0], [14.0, 14.0, 15.0, 15.0], [0.0, 0.0, 20.0, 20.0]], dtype=float)
self.boxlist = np_box_list.BoxList(boxes)
def test_add_already_existed_field(self):
with self.assertRaises(ValueError):
... |
()
('--report-path', help='Store report at this location instead of a temp file.', type=ExpandablePath(dir_okay=False, writable=True, resolve_path=True))
('--debug', is_flag=True, help='Drop into pdb on errors.')
('--eth-client', type=EnumChoiceType(EthClient), default=EthClient.GETH.value, show_default=True, help='Whi... |
.parametrize('verbosity', ['', '-v', '-vv'])
def test_error_message_for_missing_steps(pytester, verbosity):
pytester.makefile('.feature', test=FEATURE)
pytester.makepyfile(textwrap.dedent(" from pytest_bdd import scenarios\n\n scenarios('.')\n "))
result = pytester.runpytest('--gherkin-... |
def parse_arguments():
description = 'Download packages of the Cityscapes Dataset.'
epilog = 'Requires an account that can be created via
parser = argparse.ArgumentParser(description=description, epilog=epilog)
parser.add_argument('-l', '--list_available', action='store_true', help='list available pack... |
class FlightsInline(FlightMixin, admin.TabularInline):
model = Flight
can_delete = False
fields = ('name', 'live', 'start_date', 'end_date', 'sold_clicks', 'sold_impressions', 'cpc', 'cpm', 'value_remaining', 'total_clicks', 'total_views', 'ctr', 'ecpm')
readonly_fields = fields
show_change_link = T... |
class EpochBatchIterating(object):
def __len__(self) -> int:
raise NotImplementedError
def next_epoch_idx(self):
raise NotImplementedError
def next_epoch_itr(self, shuffle=True, fix_batches_to_gpus=False):
raise NotImplementedError
def end_of_epoch(self) -> bool:
raise No... |
_BOX_FEATURE_EXTRACTORS.register('FPN2MLPFeatureExtractorNeighbor')
class FPN2MLPFeatureExtractorNeighbor(nn.Module):
def __init__(self, cfg):
super(FPN2MLPFeatureExtractorNeighbor, self).__init__()
resolution = cfg.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION
scales = cfg.MODEL.ROI_BOX_HEAD.POOLER_... |
_module
class HRFPN(nn.Module):
def __init__(self, in_channels, out_channels, num_outs=5, pooling_type='AVG', conv_cfg=None, norm_cfg=None, with_cp=False):
super(HRFPN, self).__init__()
assert isinstance(in_channels, list)
self.in_channels = in_channels
self.out_channels = out_channe... |
def create_test_hmm():
srng = pt.random.RandomStream()
N_pt = pt.iscalar('N')
N_pt.tag.test_value = 10
M_pt = pt.iscalar('M')
M_pt.tag.test_value = 2
mus_pt = pt.matrix('mus')
mus_pt.tag.test_value = np.stack([np.arange(0.0, 10), np.arange(0.0, (- 10), (- 1))], axis=(- 1)).astype(pytensor.co... |
def test_audio_player_time(driver, player):
source = SilentTestSource(10.0)
audio_player = driver.create_audio_player(source, player)
try:
audio_player.play()
player.timer.start()
last_time = audio_player.get_time()
for _ in range(15):
player.wait(0.1)
... |
class Directory(FileSystemObject, Accumulator, Loadable):
is_directory = True
enterable = False
load_generator = None
cycle_list = None
loading = False
progressbar_supported = True
flat = 0
filenames = None
files = None
files_all = None
temporary_filter = None
narrow_filt... |
def load_txt_info(gt_file, img_info):
with open(gt_file, 'r', encoding='utf-8') as f:
anno_info = []
for line in f:
line = line.strip('\n')
ann = line.split(',')
bbox = ann[0:8]
word = line[(len(','.join(bbox)) + 1):]
bbox = [int(coord) for... |
def predict_density_with_size_and_computation(m, comp_time, P):
alpha = (4 * 0.000436)
beta = ((4 * 9e-06) * 0.001)
def _denseallreduce_model(P, m):
return (((2 * (P - 1)) * alpha) + ((((2 * (P - 1)) / P) * m) * beta))
def _sparseallreduce_model(P, m, rho=0.001):
return (np.log2(P) + (((... |
class ErrorTree():
_instance = _unset
def __init__(self, errors: Iterable[ValidationError]=()):
self.errors: MutableMapping[(str, ValidationError)] = {}
self._contents: Mapping[(str, ErrorTree)] = defaultdict(self.__class__)
for error in errors:
container = self
f... |
class SpatialSELayer3D(nn.Module):
def __init__(self, num_channels):
super(SpatialSELayer3D, self).__init__()
self.conv = nn.Conv3d(num_channels, 1, 1)
self.sigmoid = nn.Sigmoid()
def forward(self, x, weights=None):
(batch_size, channel, D, H, W) = x.size()
if weights:
... |
class Metadata(_MetadataBase):
def load(cls, filepath, column_types=None, column_missing_schemes=None, default_missing_scheme=DEFAULT_MISSING):
from .io import MetadataReader
return MetadataReader(filepath).read(into=cls, column_types=column_types, column_missing_schemes=column_missing_schemes, defa... |
def main():
(srcpathx, in_fname, out_fname) = sys.argv[1:]
srcpath = srcpathx[:(- 1)]
with open(in_fname, 'r') as in_file:
source = in_file.read()
code = compile(source, srcpath, 'exec', dont_inherit=True)
with open(out_fname, 'wb') as out_file:
marshal.dump(code, out_file) |
class TestBasicModels(TestCase):
def test_dfn_well_posed(self):
model = pybamm.lithium_ion.BasicDFN()
model.check_well_posedness()
copy = model.new_copy()
copy.check_well_posedness()
def test_spm_well_posed(self):
model = pybamm.lithium_ion.BasicSPM()
model.check_... |
class Element():
def __init__(self, grid_proportion=GridProportion.Eight):
self.grid_proportion = grid_proportion
def generate_html(self, document: Optional[Document]=None) -> str:
raise NotImplementedError()
def get_grid_proportion_css_class(self) -> str:
return str(self.grid_propor... |
def parse_pls(file) -> Collection[IRFile]:
data = {}
lines = file.read().decode('utf-8', 'replace').splitlines()
if ((not lines) or ('[playlist]' not in lines.pop(0))):
return []
for line in lines:
try:
(head, val) = line.strip().split('=', 1)
except (TypeError, Value... |
def test_find_union_pairs_input_four_sets_where_only_two_have_overlap():
listed_sets = dict(enumerate((SET_A, SET_B, SET_C, SET_D)))
this = GetNonOverlapUnionsBaseClass(listed_sets)
retv = this._find_union_pair(listed_sets)
assert (retv == ((1, 3), {2, 4, 6, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19})) |
def actions(label: str='', buttons: List[Union[(Dict[(str, Any)], Tuple, List, str)]]=None, name: str=None, help_text: str=None):
assert (buttons is not None), 'Required `buttons` parameter in actions()'
(item_spec, valid_func, onchange_func) = _parse_args(locals())
item_spec['type'] = 'actions'
item_sp... |
('time.sleep')
def test_while_until_true_callable_with_max_attempts(mock_sleep):
decorate_me = MagicMock()
decorate_me.side_effect = [False, False, True, False]
assert poll.while_until_true(interval=arb_callable, max_attempts=3)(decorate_me)('arg1', 'arg2')
assert (decorate_me.mock_calls == [call(1, 'ar... |
_module()
class EncodeDecodeRecognizer(BaseRecognizer):
def __init__(self, preprocessor=None, backbone=None, encoder=None, decoder=None, loss=None, label_convertor=None, train_cfg=None, test_cfg=None, max_seq_len=40, pretrained=None, init_cfg=None):
super().__init__(init_cfg=init_cfg)
assert (label_... |
_voxelnet
class Shape_Grouping_Heads_lyft(VoxelNet):
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
assert (self._num_class == 9)
assert isinstance(self.rpn, rpn.RPNNoHead)
self.small_classes = ['bicycle', 'motorcycle']
self.tiny_classes = ['pedestrian', 'anim... |
def test_export_game_raises_with_wrong_dotnet_exit_code(mocker):
dotnet_process = mocker.patch('subprocess.run')
dotnet_process.returncode = 1
exporter = AM2RGameExporter()
with pytest.raises(UnableToExportError):
exporter._do_export_game(MagicMock(), MagicMock(), MagicMock()) |
def get_big_bloq_counts_graph_1(bloq: Bloq) -> Tuple[(nx.DiGraph, Dict[(Bloq, int)])]:
ss = SympySymbolAllocator()
n_c = ss.new_symbol('n_c')
def generalize(bloq: Bloq) -> Optional[Bloq]:
if isinstance(bloq, ArbitraryClifford):
return attrs.evolve(bloq, n=n_c)
return bloq
ret... |
class TSongLibrary(TLibrary):
Fake = FakeSong
Frange = staticmethod(FSrange)
Library = SongLibrary
def test_rename_dirty(self):
self.library.dirty = False
song = self.Fake(10)
self.library.add([song])
self.assertTrue(self.library.dirty)
self.library.dirty = False
... |
def ctrl(conn):
keycodeMapping = {}
def Op(key, op, ox, oy):
if (key == 4):
mouse.move(ox, oy)
elif (key == 1):
if (op == 100):
ag.mouseDown(button=ag.LEFT)
elif (op == 117):
ag.mouseUp(button=ag.LEFT)
elif (key == 2):
... |
def test_solver_dependency_cache_respects_source_type(root: ProjectPackage, provider: Provider, repo: Repository) -> None:
dependency_pypi = Factory.create_dependency('demo', '>=0.1.0')
dependency_git = Factory.create_dependency('demo', {'git': ' groups=['dev'])
root.add_dependency(dependency_pypi)
root... |
class EvForm(object):
def __init__(self, filename=None, cells=None, tables=None, form=None, **kwargs):
self.filename = filename
self.input_form_dict = form
self.cells_mapping = (dict(((to_str(key), value) for (key, value) in cells.items())) if cells else {})
self.tables_mapping = (di... |
def clean_platfiles():
deletables = [(ProjectDir / 'build'), (ModuleDir_Raw / BindingsFN), (ModuleDir_Raw / VersionFN)]
deletables += [(ModuleDir_Raw / fn) for fn in LibnameForSystem.values()]
for fp in deletables:
if fp.is_file():
fp.unlink()
elif fp.is_dir():
shutil... |
def main(data_dir, client, bc, config):
benchmark(read_tables, data_dir, bc, dask_profile=config['dask_profile'])
query_distinct = f'''
SELECT DISTINCT ss_item_sk, ss_ticket_number
FROM store_sales s, item i
WHERE s.ss_item_sk = i.i_item_sk
AND i.i_category_id IN ({q01_i_category... |
class TestJavaCasting():
.parametrize(['python_object', 'java_class'], [(r5py.TransportMode.AIR, com.conveyal.r5.api.util.TransitModes), (r5py.TransportMode.BUS, com.conveyal.r5.api.util.TransitModes), (r5py.TransportMode.CABLE_CAR, com.conveyal.r5.api.util.TransitModes), (r5py.TransportMode.FERRY, com.conveyal.r5.... |
def apply_tf_padding(features: torch.Tensor, conv_layer: nn.Conv2d) -> torch.Tensor:
in_height = int(features.shape[(- 2)])
in_width = int(features.shape[(- 1)])
(stride_height, stride_width) = conv_layer.stride
(kernel_height, kernel_width) = conv_layer.kernel_size
(dilation_height, dilation_width)... |
def extract_by_timeline(result_value_dfs, sampling_frequency, final_time):
sampling_time = 0
avg_list = []
while True:
sampling_time += sampling_frequency
acc_list = []
for df in result_value_dfs:
if df[0].gt(sampling_time).any():
latest_sample_row = df.il... |
def test_exp_transform_rv():
base_rv = pt.random.normal(0, 1, size=3, name='base_rv')
y_rv = pt.exp(base_rv)
y_rv.name = 'y'
y_vv = y_rv.clone()
logp_fn = pytensor.function([y_vv], logp(y_rv, y_vv))
logcdf_fn = pytensor.function([y_vv], logcdf(y_rv, y_vv))
icdf_fn = pytensor.function([y_vv],... |
def delete_matching_notifications(target, kind_name, **kwargs):
kind_ref = NotificationKind.get(name=kind_name)
notifications = Notification.select().where((Notification.target == target), (Notification.kind == kind_ref))
for notification in notifications:
matches = True
try:
met... |
def restart_app(app: RaidenService, restart_node: RestartNode) -> RaidenService:
new_transport = MatrixTransport(config=app.config.transport, environment=app.config.environment_type)
raiden_event_handler = RaidenEventHandler()
hold_handler = HoldRaidenEventHandler(raiden_event_handler)
app = RaidenServi... |
def test_client_tcp_connect():
with mock.patch.object(socket, 'create_connection') as mock_method:
_socket = mock.MagicMock()
mock_method.return_value = _socket
client = lib_client.ModbusTcpClient('127.0.0.1')
_socket.getsockname.return_value = ('dmmy', 1234)
assert client.co... |
_torch
class AlignTextModelTest(ModelTesterMixin, unittest.TestCase):
all_model_classes = ((AlignTextModel,) if is_torch_available() else ())
fx_compatible = False
test_pruning = False
test_head_masking = False
def setUp(self):
self.model_tester = AlignTextModelTester(self)
self.conf... |
class RSAKey(object):
def __init__(self, n=0, e=0, d=0, p=0, q=0, dP=0, dQ=0, qInv=0):
if ((n and (not e)) or (e and (not n))):
raise AssertionError()
self.n = n
self.e = e
self.d = d
self.p = p
self.q = q
self.dP = dP
self.dQ = dQ
... |
_mode()
def _predict_impl(state: State, predict_unit: TPredictUnit, callback_handler: CallbackHandler) -> None:
predict_state = none_throws(state.predict_state)
state._active_phase = ActivePhase.PREDICT
logger.info(f'Started predict with max_steps_per_epoch={predict_state.max_steps_per_epoch}')
tracked_... |
def score_poisson(est, X, y, sample_weight=None, level=1):
y_pred = est.predict(X)
out = {}
out['dsq'] = poisson_dsq_score(y_true=y, y_pred=y_pred, sample_weight=sample_weight)
if (level >= 1):
to_add = additional_regression_data(y_true=y, y_pred=y_pred, coef=est.coef_, sample_weight=sample_weig... |
.parametrize('addargs', (['--disable-formats', 'uri-reference', '--disable-formats', 'date-time'], ['--disable-formats', 'uri-reference,date-time']))
def test_disable_selected_formats(runner, mock_parse_result, addargs, in_tmp_dir, tmp_path):
touch_files(tmp_path, 'foo.json')
runner.invoke(cli_main, (['--schema... |
class MemMasterIfcFL(Interface):
def construct(s, ReqType=None, RespType=None):
s.read = CallerIfcFL()
s.write = CallerIfcFL()
s.amo = CallerIfcFL()
def __str__(s):
return f'r{s.read}|w{s.write}|a{s.amo}'
def connect(s, other, parent):
if isinstance(other, MemMinionIf... |
def read_power_status(data):
values = {}
power_type = data['type']
try:
for (name, path) in data.items():
if ('type' in name):
continue
elif ('status' in name):
values[name] = cat(path).strip()
continue
elif ('online... |
def random_graph(n, weight_range=10, edge_prob=0.3, negative_weight=True, savefile=None, seed=None):
assert (weight_range >= 0)
if seed:
aqua_globals.random_seed = seed
w = np.zeros((n, n))
m = 0
for i in range(n):
for j in range((i + 1), n):
if (aqua_globals.random.rando... |
_tasklist(margin=0)
def test_tasklist_click_task(tasklist_manager):
tasklist_manager.test_window('One')
tasklist_manager.test_window('Two')
assert (tasklist_manager.c.window.info()['name'] == 'Two')
tasklist_manager.c.bar['top'].fake_button_press(0, 'top', 0, 0, 1)
assert (tasklist_manager.c.window.... |
class HttpContext():
backend_name = ''
def request_obj(self):
pass
def request_method(self) -> str:
pass
def request_headers(self) -> Dict:
pass
def request_url_parameter(self, name, default=None) -> str:
pass
def request_body(self) -> bytes:
return b''
... |
def parse_args():
parser = argparse.ArgumentParser(description='Check correctness of QDMR annotation.')
parser.add_argument('--dev', action='store_true', help='if true, use dev, else use train')
parser.add_argument('--break_idx', type=int, help='index of spider example, use only for debugging')
parser.a... |
class MemoryBuffer():
def __init__(self, name, numel, dtype, track_usage):
if (torch.distributed.get_rank() == 0):
element_size = torch.tensor([], dtype=dtype).element_size()
print('> building the {} memory buffer with {} num elements and {} dtype ({:.1f} MB)...'.format(name, numel, ... |
def inference_on_dataset(model, data_loader, evaluator: Union[(DatasetEvaluator, List[DatasetEvaluator], None)]):
num_devices = get_world_size()
logger = logging.getLogger(__name__)
logger.info('Start inference on {} batches'.format(len(data_loader)))
total = len(data_loader)
if (evaluator is None):... |
class QGVersionZeroForcedField(models.CharField):
description = 'Field to store version strings ("a.b.c.d") in a way it is sortable and QGIS scheme compatible (x.y.z).'
def get_prep_value(self, value):
return vjust(value, fillchar='0', level=2, force_zero=True)
def to_python(self, value):
... |
class TestLogger(TestCase):
def test_logger(self):
logger = pybamm.logger
self.assertEqual(logger.level, 30)
pybamm.set_logging_level('INFO')
self.assertEqual(logger.level, 20)
pybamm.set_logging_level('ERROR')
self.assertEqual(logger.level, 40)
pybamm.set_log... |
class SequentialSampler(Sampler):
def __init__(self, data_source, checkpoint):
self.data_source = data_source
if ((checkpoint is not None) and (checkpoint['dataset_perm'] is not None)):
self.dataset_perm = checkpoint['dataset_perm']
self.perm = self.dataset_perm[(checkpoint['... |
class EnvironmentSettings():
def __init__(self):
root_path = '/home/sgn/Data1/yan/'
self.workspace_dir = (root_path + 'pytracking-models/')
self.tensorboard_dir = (self.workspace_dir + '/tensorboard/')
self.pretrained_networks = (self.workspace_dir + '/pretrained_networks/')
... |
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, in_planes, planes, stride=1, is_last=False):
super(Bottleneck, self).__init__()
self.is_last = is_last
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self... |
def get_model(rnn_dim: int, use_elmo):
recurrent_layer = CudnnGru(rnn_dim, w_init=TruncatedNormal(stddev=0.05))
embed_mapper = SequenceMapperSeq(VariationalDropoutLayer(0.8), recurrent_layer, VariationalDropoutLayer(0.8))
elmo_model = None
if use_elmo:
print('Using Elmo!')
elmo_model = g... |
class QuantumAlgorithm(ABC):
def __init__(self, quantum_instance: Optional[Union[(QuantumInstance, Backend, BaseBackend, Backend)]]) -> None:
self._quantum_instance = None
if quantum_instance:
self.quantum_instance = quantum_instance
def random(self):
return aqua_globals.rand... |
_model
def visformer_tiny(pretrained=False, **kwargs):
model_cfg = dict(init_channels=16, embed_dim=192, depth=(7, 4, 4), num_heads=3, mlp_ratio=4.0, group=8, attn_stage='011', spatial_conv='100', norm_layer=nn.BatchNorm2d, conv_init=True, embed_norm=nn.BatchNorm2d, **kwargs)
model = _create_visformer('visforme... |
def mock_snakemake(rulename, **wildcards):
import os
import snakemake as sm
from pypsa.descriptors import Dict
from snakemake.script import Snakemake
script_dir = Path(__file__).parent.resolve()
assert (Path.cwd().resolve() == script_dir), f'mock_snakemake has to be run from the repository scrip... |
class DatasetDataFactory(factory.Factory):
class Meta():
model = dict
column_names = [six.u('Date'), six.u('column.1'), six.u('column.2'), six.u('column.3')]
data = [['2015-07-11', 444.3, 10, 3], ['2015-07-13', 433.3, 4, 3], ['2015-07-14', 437.5, 3, 3], ['2015-07-15', 440.0, 2, 3]]
start_date = ... |
def test_user_avatar_history(api, mock_req):
mock_req({'getUserProfilePhotos': {'ok': True, 'result': {'total_count': 3, 'photos': [[{'file_id': 'aaaaaa', 'width': 50, 'height': 50, 'file_size': 128}], [{'file_id': 'bbbbbb', 'width': 50, 'height': 50, 'file_size': 128}], [{'file_id': 'cccccc', 'width': 50, 'height'... |
def test_cof_dict_with_all_args():
with pytest.raises(Call) as err:
with patch_logger('blah', logging.INFO) as mock_logger_info:
cof_func(name='blah', instruction_type=Call, context=Context({'key': {'groups': ['b', 'c'], 'success': 'sg', 'failure': 'fg'}}), context_key='key')
cof = err.value... |
def test_ingivendirectory(tmp_path):
cwd = os.getcwd()
expected_path = os.path.join(str(tmp_path), 'foo')
with InGivenDirectory(expected_path) as path:
assert os.path.isdir(path)
assert os.path.samefile(path, os.getcwd())
assert os.path.samefile(path, expected_path)
assert os.pat... |
def safely_destruct_output_when_exp(content_param):
def decorator(func):
sig = inspect.signature(func)
(func)
def inner(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception:
params = ([content_param] if isinstance(conten... |
def postprocess_text(preds, labels):
preds = [str(pred).strip() for pred in preds]
labels = [str(label).strip() for label in labels]
preds = ['\n'.join(nltk.sent_tokenize(pred)) for pred in preds]
labels = ['\n'.join(nltk.sent_tokenize(label)) for label in labels]
return (preds, labels) |
class UnetBasicBlock(nn.Module):
def __init__(self, spatial_dims: int, in_channels: int, out_channels: int, kernel_size: Union[(Sequence[int], int)], stride: Union[(Sequence[int], int)], norm_name: Union[(Tuple, str)], act_name: Union[(Tuple, str)]=('leakyrelu', {'inplace': True, 'negative_slope': 0.01}), dropout: ... |
_fixtures(WebFixture)
def test_check_csrf_token_match(web_fixture):
token = CSRFToken(value='hello world')
csrf_token = token.as_signed_string()
assert (len(csrf_token.split(':')) == 3)
reconstructed_token = CSRFToken.from_coded_string(csrf_token)
assert (reconstructed_token.value == 'hello world')
... |
def test_no_mixed_overloads():
from pybind11_tests import debug_enabled
with pytest.raises(RuntimeError) as excinfo:
m.ExampleMandA.add_mixed_overloads1()
assert (str(excinfo.value) == ('overloading a method with both static and instance methods is not supported; ' + ('compile.sh in debug mode for m... |
def test_unorderable_types(pytester: Pytester) -> None:
pytester.makepyfile('\n class TestJoinEmpty(object):\n pass\n\n def make_test():\n class Test(object):\n pass\n Test.__name__ = "TestFoo"\n return Test\n TestFoo = make_test()\n ... |
def test_bad_py_spec():
text = 'python2.3.4.5'
spec = PythonSpec.from_string_spec(text)
assert (text in repr(spec))
assert (spec.str_spec == text)
assert (spec.path == text)
content = vars(spec)
del content['str_spec']
del content['path']
assert all(((v is None) for v in content.valu... |
def apply_mixins():
for (base_class, widget_class) in mixin_pairs:
if (reinteract.custom_result.CustomResult not in base_class.__bases__):
base_class.__bases__ += (reinteract.custom_result.CustomResult,)
base_class.create_widget = (lambda self: widget_class(self)) |
def deconv2D_layer_in(l0, name=None, filters=32, kernel_size=3, strides=2, padding='same', activation='relu', kernel_initializer='he_normal'):
l = Conv2DTranspose(filters=filters, name=name, kernel_size=kernel_size, strides=strides, padding=padding, activation=activation, kernel_initializer=kernel_initializer)(l0)
... |
class Pattern(FilterPredicate):
def __init__(self, pattern):
self.pattern = as_complex_pattern(pattern)
def __call__(self, component):
if isinstance(component, (Monomer, MonomerPattern, ComplexPattern)):
return match_complex_pattern(self.pattern, as_complex_pattern(component))
... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.