code stringlengths 281 23.7M |
|---|
def estimate_density_map(img_root, gt_dmap_root, model_param_path, index):
device = torch.device('cuda')
model = CANNet().to(device)
model.load_state_dict(torch.load(model_param_path))
dataset = CrowdDataset(img_root, gt_dmap_root, 8, phase='test')
dataloader = torch.utils.data.DataLoader(dataset, b... |
def register_all_bdd_tracking(root='datasets'):
thing_classes = ['pedestrian', 'rider', 'car', 'truck', 'bus', 'train', 'motorcycle', 'bicycle']
thing_classes_3cls = ['vehicle', 'pedestrian', 'cyclist']
for DATASETS in [_PREDEFINED_SPLITS_BDDT]:
for (key, value) in DATASETS.items():
meta... |
class TestServer(socketserver.TCPServer):
allow_reuse_address = True
def write_test_patterns(self):
self.write_blank_lines(100)
self.write_complex_strings(10)
self.write_non_ascii(8)
sys.stderr.write('started\n')
self.write_long_output(100)
self.write_garbage_byte... |
class AddressBookPanel(Div):
def __init__(self, view):
super().__init__(view)
self.add_child(H(view, 1, text='Addresses'))
self.page_index = SequentialPageIndex(Address.all_addresses(), items_per_page=5)
self.address_list = AddressList(view, self.page_index)
self.page_menu = ... |
def prepare(val, signext=False, size=SIZE) -> z3.BitVecRef:
if z3.is_bv(val):
szdiff = (size - val.size())
if (szdiff == 0):
result = val
elif (szdiff > 0):
if signext:
result = z3.SignExt(szdiff, val)
else:
result = z3.Zero... |
class _ZVector(Bloq):
bit: bool
state: bool = True
n: int = 1
def __attrs_post_init__(self):
if (self.n != 1):
raise NotImplementedError('Come back later.')
_property
def signature(self) -> 'Signature':
return Signature([Register('q', bitsize=1, side=(Side.RIGHT if se... |
def test_multi_create_pickup_data_for_other(pickup_for_create_pickup_data):
solo = pickup_exporter.PickupExporterSolo(pickup_exporter.GenericAcquiredMemo(), RandovaniaGame.METROID_PRIME_ECHOES)
creator = pickup_exporter.PickupExporterMulti(solo, PlayersConfiguration(0, {0: 'You', 1: 'Someone'}))
data = crea... |
class SEBlock(nn.Module):
def __init__(self, inplanes, r=16):
super(SEBlock, self).__init__()
self.global_pool = nn.AdaptiveAvgPool2d((1, 1))
self.se = nn.Sequential(nn.Linear(inplanes, (inplanes // r)), nn.ReLU(inplace=True), nn.Linear((inplanes // r), inplanes), nn.Sigmoid())
def forwa... |
def test_register_module_hooks():
_module_hooks = [dict(type='GPUNormalize', hooked_module='backbone', hook_pos='forward_pre', input_format='NCHW', mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375])]
repo_dpath = osp.dirname(osp.dirname(osp.dirname(__file__)))
config_fpath = osp.join(repo_dpath, '... |
class BCDataStream(object):
def __init__(self):
self.input = None
self.read_cursor = 0
def clear(self):
self.input = None
self.read_cursor = 0
def write(self, _bytes: Union[(bytes, bytearray)]):
assert isinstance(_bytes, (bytes, bytearray))
if (self.input is N... |
def main():
prediction_dir = os.path.abspath('prediction')
print('Start predicting...')
run_validation_cases(validation_keys_file=config['validation_file'], model_file=config['model_file'], training_modalities=config['training_modalities'], labels=config['labels'], overlap=0, hdf5_file=config['data_file'], ... |
.parametrize('superrep_conversion', [to_super, to_choi, to_chi, to_kraus])
def test_process_fidelity_identical_channels(superrep_conversion):
num_qubits = 2
for k in range(10):
oper = rand_super_bcsz((num_qubits * [2]))
oper = superrep_conversion(oper)
f = process_fidelity(oper, oper)
... |
class TestUtilsGeometry(unittest.TestCase):
def setUp(self):
self.bm = bmesh.new()
def tearDown(self):
self.bm.free()
def clean_bmesh(self):
[self.bm.verts.remove(v) for v in self.bm.verts]
def test_cube(self):
btools.utils.cube(self.bm)
self.assertEquals(len(self... |
class PrintGraph(Graph):
def __init__(self, data=None, name='', file=None, **attr):
Graph.__init__(self, data=data, name=name, **attr)
if (file is None):
import sys
self.fh = sys.stdout
else:
self.fh = open(file, 'w')
def add_node(self, n, attr_dict=No... |
class TXingHeader(TestCase):
def test_valid_info_header(self):
data = b'Info\x00\x00\x00\x0f\x00\x00:>\x00\xed\xbd8\x00\x03\x05\x07\n\r\x0f\x12\x14\x17\x1a\x1c\x1e"$&)+.1359;=\\^acfikmqsux{}\x80\x82\x84\x87\x8a\x8c\x8e\x92\x94\x96\x99\x9c\x9e\xa1\xa3\xa5\xa9\xab\xad\xb0\xb3\xb5\xb8\xba\xbd\xc0\xc2\xc4\xc6\x... |
def conv(inputs, kernel_shape, bias_shape, strides, w_i, b_i=None, activation=tf.nn.relu):
weights = tf.get_variable('weights', shape=kernel_shape, initializer=w_i)
conv = tf.nn.conv2d(inputs, weights, strides=strides, padding='SAME')
if (bias_shape is not None):
biases = tf.get_variable('biases', s... |
def assert_shape(tensor, ref_shape):
if (tensor.ndim != len(ref_shape)):
raise AssertionError(f'Wrong number of dimensions: got {tensor.ndim}, expected {len(ref_shape)}')
for (idx, (size, ref_size)) in enumerate(zip(tensor.shape, ref_shape)):
if (ref_size is None):
pass
elif ... |
def test_get_or_create_manifest_invalid_image(initialized_db):
repository = get_repository('devtable', 'simple')
latest_tag = get_tag(repository, 'latest')
manifest_bytes = Bytes.for_string_or_unicode(latest_tag.manifest.manifest_bytes)
parsed = parse_manifest_from_bytes(manifest_bytes, latest_tag.manif... |
def rate_limit(wait_length):
last_time = 0
def decorate(f):
(f)
def rate_limited(*args, **kwargs):
nonlocal last_time
diff = (perf_counter() - last_time)
if (diff < wait_length):
sleep((wait_length - diff))
r = f(*args, **kwargs)
... |
class New_Section_TestCase(ParserTest):
def __init__(self, *args, **kwargs):
ParserTest.__init__(self, *args, **kwargs)
self.ks = '\n%raw\n1234\nabcd\n%end\n'
def runTest(self):
self.parser.registerSection(RawSection(self.parser.handler))
self.parser.readKickstartFromString(self.... |
def evaluate(embeddings, actual_issame, threshold, nrof_folds=10):
thresholds = np.arange(0, 10, (0.01 / 4))
embeddings1 = embeddings[0::2]
embeddings2 = embeddings[1::2]
(tpr, fpr, accuracy) = utils.calculate_roc(thresholds, embeddings1, embeddings2, np.asarray(actual_issame), nrof_folds=nrof_folds)
... |
def test_capture_badoutput_issue412(pytester: Pytester) -> None:
pytester.makepyfile('\n import os\n\n def test_func():\n omg = bytearray([1,129,1])\n os.write(1, omg)\n assert 0\n ')
result = pytester.runpytest('--capture=fd')
result.stdout.fnmatch_line... |
def main():
if args.white_box_attack:
print('pgd white-box attack')
model = SmallCNN().to(device)
model.load_state_dict(torch.load(args.model_path))
eval_adv_test_whitebox(model, device, test_loader)
else:
print('pgd black-box attack')
model_target = SmallCNN().to... |
.parametrize('username,password', users)
def test_delete(db, client, username, password):
client.login(username=username, password=password)
instances = Question.objects.all()
for instance in instances:
url = reverse(urlnames['detail'], args=[instance.pk])
response = client.delete(url)
... |
def showhelp(config: Config) -> None:
import textwrap
reporter: Optional[TerminalReporter] = config.pluginmanager.get_plugin('terminalreporter')
assert (reporter is not None)
tw = reporter._tw
tw.write(config._parser.optparser.format_help())
tw.line()
tw.line('[pytest] ini-options in the fir... |
class TestTrainingExtensionBnFoldToScale():
.parametrize('config', quantsim_config_map.keys())
.parametrize('seed', range(10))
def test_fold_resnet18(self, seed, config):
quantsim_config = quantsim_config_map[config]
torch.manual_seed(seed)
model = models.resnet18().eval()
_i... |
def test_frame_getargs() -> None:
def f1(x) -> FrameType:
return sys._getframe(0)
fr1 = Frame(f1('a'))
assert (fr1.getargs(var=True) == [('x', 'a')])
def f2(x, *y) -> FrameType:
return sys._getframe(0)
fr2 = Frame(f2('a', 'b', 'c'))
assert (fr2.getargs(var=True) == [('x', 'a'), (... |
class WaterfallChart(Chart):
def __init__(self, percentage: Optional[bool]=False):
super().__init__()
self.total_value = None
self.cumulative_sum = None
self.percentage = percentage
def plot(self, figsize: Tuple[(float, float)]=None) -> None:
self._setup_axes_if_necessary... |
def main():
dsz.ui.Echo('')
dsz.ui.Echo(' CODE ')
dsz.ui.Echo('')
found_persistence = True
path_to_check = check_code_reg()
if (path_to_check is None):
found_persistence = False
dsz.ui.Echo('It appears CODE is NOT installed', dsz.ERROR)
found_bootstrap = False
if found_pe... |
def get_bindings(callable: Callable) -> Dict[(str, type)]:
look_for_explicit_bindings = False
if (not hasattr(callable, '__bindings__')):
type_hints = get_type_hints(callable, include_extras=True)
has_injectable_parameters = any(((_is_specialization(v, Annotated) and (_inject_marker in v.__metad... |
def all_py_files_in_source_are_in_py_typed_dirs(source: (zipfile.ZipFile | tarfile.TarFile)) -> bool:
py_typed_dirs: list[Path] = []
all_python_files: list[Path] = []
py_file_suffixes = {'.py', '.pyi'}
if isinstance(source, zipfile.ZipFile):
path_iter = (Path(zip_info.filename) for zip_info in s... |
_fixtures(MismatchScenarios)
def test_exception_on_mismatch_of_signature(mismatch_scenarios):
fixture = mismatch_scenarios
with expected(ProgrammerError):
class ModelObject():
(read_check=fixture.read_check, write_check=fixture.write_check)
def do_something_with_arguments(self, a... |
def get_access_token(username: Optional[str]=None, password: Optional[str]=None, app_id: Optional[str]=None, app_secret: Optional[str]=None, jwt: bool=True, refresh: bool=False) -> str:
session = get_local_session()
response = _get_jwt(session, only_if_cached=True)
if (response.ok and (not refresh)):
... |
_state_transitions.register
def _handle_receive_withdraw_confirmation(action: ReceiveWithdrawConfirmation, channel_state: NettingChannelState, block_number: BlockNumber, block_hash: BlockHash, **kwargs: Optional[Dict[(Any, Any)]]) -> TransitionResult[NettingChannelState]:
is_valid = is_valid_withdraw_confirmation(c... |
class GuacTask(enum.Enum):
ARIPIPRAZOLE = 'Aripiprazole_similarity'
OSIMERTINIB = 'Osimertinib_MPO'
RANOLAZINE = 'Ranolazine_MPO'
ZALEPLON = 'Zaleplon_MPO'
VALSARTAN = 'Valsartan_SMARTS'
DECO = 'decoration_hop'
SCAFFOLD = 'scaffold_hop'
PERINDOPRIL = 'Perindopril_MPO'
AMLODIPINE = 'A... |
class Credentials(rcreds.Creds):
__slots__ = ()
def __new__(cls, base: t.Optional[rcreds.Creds]=None, token: t.Optional[bytes]=None, name: t.Optional[rnames.Name]=None, lifetime: t.Optional[int]=None, mechs: t.Optional[t.Iterable[roids.OID]]=None, usage: str='both', store: t.Optional[t.Dict[(t.Union[(bytes, str... |
def test_version_tag_only_push(mocked_git_push: MagicMock, runtime_context_with_no_tags: RuntimeContext, cli_runner: CliRunner) -> None:
head_before = runtime_context_with_no_tags.repo.head.commit
args = [version.name, '--tag', '--no-commit', '--skip-build', '--no-vcs-release']
resp = cli_runner.invoke(main... |
class BTOOLS_OT_add_stairs(bpy.types.Operator):
bl_idname = 'btools.add_stairs'
bl_label = 'Add Stairs'
bl_options = {'REGISTER', 'UNDO', 'PRESET'}
props: bpy.props.PointerProperty(type=StairsProperty)
def poll(cls, context):
return ((context.object is not None) and (context.mode == 'EDIT_ME... |
.parametrize('provider', providers)
def test_wildcard_reference_from_previous_statements(provider: MetaDataProvider):
sql = 'create table test_x as\n select a, b\n from (\n select *, row_number() over (partition by id) as rn\n from db.tbl_x t\n ) t1\n where rn = 1\n ;\n\n create tabl... |
class Frame(object):
def __init__(self, client: CDPSession, parentFrame: Optional['Frame'], frameId: str) -> None:
self._client = client
self._parentFrame = parentFrame
self._url = ''
self._detached = False
self._id = frameId
self._documentPromise: Optional[ElementHan... |
class ActionFrame(ttk.Frame):
def __init__(self, parent, selected_color, selected_mask_type, selected_scaling, config_tools, patch_callback, refresh_callback, tk_vars):
logger.debug('Initializing %s: (selected_color: %s, selected_mask_type: %s, selected_scaling: %s, config_tools, patch_callback: %s, refresh... |
.parametrize('schema_version', [1, 2, 'oci'])
def test_push_pull_manifest_list(v22_protocol, basic_images, different_images, liveserver_session, app_reloader, schema_version, data_model):
credentials = ('devtable', 'password')
options = ProtocolOptions()
blobs = {}
signed = v22_protocol.build_schema1('d... |
def create_stem(in_chs, out_chs, stem_type='', conv_layer=None, act_layer=None, preact_feature=True):
stem_stride = 2
stem_feature = dict(num_chs=out_chs, reduction=2, module='stem.conv')
stem = OrderedDict()
assert (stem_type in ('', 'deep', 'deep_tiered', 'deep_quad', '3x3', '7x7', 'deep_pool', '3x3_p... |
def test_PushNegatives_simple_matrix():
dm = skcriteria.mkdm(matrix=[[1, (- 2), 3], [(- 1), 5, 6]], objectives=[min, max, min], weights=[1, 2, (- 1)])
expected = skcriteria.mkdm(matrix=[[2, 0, 3], [0, 7, 6]], objectives=[min, max, min], weights=[1, 2, (- 1)])
scaler = PushNegatives(target='matrix')
resu... |
class Solution():
def maxSubArray(self, nums: List[int]) -> int:
if (not nums):
return
maxsum = currentsum = nums[0]
for i in range(1, len(nums)):
currentsum = max((currentsum + nums[i]), nums[i])
if (currentsum > maxsum):
maxsum = currents... |
def test_checker_invalid_schemafile_scheme(run_line, tmp_path):
foo = (tmp_path / 'foo.json')
bar = (tmp_path / 'bar.json')
foo.write_text('{"title": "foo"}')
bar.write_text('{}')
res = run_line(['check-jsonschema', '--schemafile', f'ftp://{foo}', str(bar)])
assert (res.exit_code == 1)
asser... |
def cosine_similarity(x1: torch.Tensor, x2: torch.Tensor, eps: float=1e-08, batched_input: Optional[bool]=None) -> torch.Tensor:
if (batched_input is None):
msg = 'The default value of batched_input has changed from False to True in version 1.0.0. To suppress this warning, pass the wanted behavior explicitl... |
def test_five_nested_while_loop() -> None:
number = 10
test_list = [10, 20, 30]
sum_so_far = 0
list_so_far = []
with AccumulationTable(['number', 'sum_so_far', 'list_so_far']) as table:
if True:
while (number in test_list):
sum_so_far = (sum_so_far + number)
... |
def test_prepare_t_costs():
num_bits_p = 6
eta = 10
num_atoms = 10
lambda_zeta = 10
num_bits_nuc_pos = 16
b_r = 8
num_bits_m = 15
num_bits_t = 16
cost = 0
prep_first_quant = PrepareFirstQuantization(num_bits_p, eta, num_atoms, lambda_zeta, m_param=(2 ** num_bits_m), num_bits_nuc_... |
class PlayVehicleMoveServerBound(Packet):
id = 22
to = 0
def __init__(self, x: float, y: float, z: float, yaw: float, pitch: float) -> None:
super().__init__()
(self.x, self.y, self.z) = (x, y, z)
self.yaw = yaw
self.pitch = pitch
def decode(cls, buf: Buffer) -> PlayVehic... |
class ClientTimer(Timer):
def __init__(self, args, global_num_iterations, local_num_iterations_dict, client_index=None):
super().__init__(args)
self.role = 'client'
if (client_index is None):
self.client_index = args.client_index
else:
self.client_index = clie... |
def get_channelstate_settling(chain_state: ChainState, token_network_registry_address: TokenNetworkRegistryAddress, token_address: TokenAddress) -> List[NettingChannelState]:
return get_channelstate_filter(chain_state, token_network_registry_address, token_address, (lambda channel_state: (channel.get_status(channel... |
.parametrize('host', ['.', ' ', ' .', '. ', '. .', '. . .', ' . '])
def test_whitespace_hosts(host):
template = '
url = QUrl(template.format(host))
assert (not url.isValid())
with pytest.raises(urlmatch.ParseError, match='Invalid host|Pattern without host'):
urlmatch.UrlPattern(template.format(h... |
class TestImport(TestNameCheckVisitorBase):
_passes()
def test_import(self):
import pyanalyze as P
def capybara() -> None:
import pyanalyze
import pyanalyze as py
import pyanalyze.extensions as E
assert_is_value(pyanalyze, KnownValue(P))
... |
class base_fhvae_model_parser(base_parser):
def __init__(self, model_config_path):
self.parser = DefaultConfigParser()
parser = self.parser
config = {}
if (len(parser.read(model_config_path)) == 0):
raise ValueError('base_fhvae_model_parser(): %s not found', model_config_... |
def test_get_username_keyring_key_error_logged(entered_username, monkeypatch, config, caplog):
class FailKeyring():
def get_credential(system, username):
_raise_home_key_error()
monkeypatch.setattr(auth, 'keyring', FailKeyring)
assert (auth.Resolver(config, auth.CredentialInput()).userna... |
def test_edge_to_image_size_vert_horz():
aspect_ratio = 2.0
edge_size = 2
actual = image_.edge_to_image_size(edge_size, aspect_ratio, edge='vert')
desired = (edge_size, round((edge_size * aspect_ratio)))
assert (actual == desired)
actual = image_.edge_to_image_size(edge_size, aspect_ratio, edge=... |
class ForEnumerate(ForGenerator):
def need_cleanup(self) -> bool:
return True
def init(self, index1: Lvalue, index2: Lvalue, expr: Expression) -> None:
self.index_gen = ForInfiniteCounter(self.builder, index1, self.body_block, self.loop_exit, self.line, nested=True)
self.index_gen.init()... |
def test_creating_simple_scenarioloop():
scenario = ScenarioLoop(1, 'Scenario Loop', 'Iterations', 'I am a Scenario Loop', 'foo.feature', 1, parent=None, tags=None, preconditions=None, background=None)
assert (scenario.id == 1)
assert (scenario.keyword == 'Scenario Loop')
assert (scenario.iterations_key... |
class PylintCommand(distutils.cmd.Command):
description = 'run Pylint on Python source files'
user_options = [('pylint-rcfile=', None, 'path to Pylint config file')]
def initialize_options(self):
self.pylint_rcfile = ''
def finalize_options(self):
if self.pylint_rcfile:
asser... |
def _isProgramFilesPath(path):
targetenvironmentvars = _GetTargetEnvirons()
pathsplit = path.split(os.path.sep)
programfiles = targetenvironmentvars.get('ProgramFiles', None)
if (programfiles is not None):
programfiles = os.path.split(programfiles)[1]
else:
programfiles = 'Program Fi... |
class ActionPypilot(Action):
def __init__(self, hat, name, pypilot_name, pypilot_value=None):
super(ActionPypilot, self).__init__(hat, name)
self.pypilot_name = pypilot_name
self.value = pypilot_value
def trigger(self, count):
if (self.hat.client and (not count)):
sel... |
def test_windowed_groupby_aggs_with_start_state(stream):
example = pd.DataFrame({'name': [], 'amount': []})
sdf = DataFrame(stream, example=example)
output0 = sdf.window(5, with_state=True, start=None).groupby(['name']).amount.sum().stream.gather().sink_to_list()
df = pd.DataFrame({'name': ['Alice', 'To... |
class GRUStepLayer(L.MergeLayer):
def __init__(self, incomings, gru_layer, name=None):
super(GRUStepLayer, self).__init__(incomings, name)
self._gru_layer = gru_layer
def get_params(self, **tags):
return self._gru_layer.get_params(**tags)
def get_output_shape_for(self, input_shapes):... |
def verify_one_vector(vector):
digest_algorithm = vector['digest_algorithm']
message = vector['message']
x = vector['x']
y = vector['y']
signature = encode_dss_signature(vector['r'], vector['s'])
numbers = ec.EllipticCurvePublicNumbers(x, y, ec.SECP256K1())
key = numbers.public_key()
ver... |
def get_mini_data(data_path, seq_len, batch_size=32, l=4000):
train_ds = CAL_Dataset(data_path, 'train', seq_len=seq_len, subset_len=l)
val_ds = CAL_Dataset(data_path, 'train', seq_len=seq_len, subset_len=l)
return (DataLoader(train_ds, batch_size=batch_size, num_workers=10), DataLoader(val_ds, batch_size=(... |
def add_eval_options(parser):
parser.add_argument('--batch_size', type=int, default=0, help='if > 0 then overrule, otherwise load from checkpoint.')
parser.add_argument('--num_images', type=int, default=(- 1), help='how many images to use when periodically evaluating the loss? (-1 = all)')
parser.add_argume... |
def files_in_path(path, mask):
cmd = 'dir -mask {0} -path "{1}"'.format(mask, path.rstrip('\\'))
dsz.control.echo.Off()
dsz.cmd.Run(cmd, dsz.RUN_FLAG_RECORD)
dsz.control.echo.On()
list_of_files = dsz.cmd.data.Get('DirItem::FileItem::name', dsz.TYPE_STRING)
return list_of_files |
def main():
parser = HfArgumentParser((ModelArguments, DataTrainingArguments, TrainingArguments))
if ((len(sys.argv) == 2) and sys.argv[1].endswith('.json')):
(model_args, data_args, training_args) = parser.parse_json_file(json_file=os.path.abspath(sys.argv[1]))
else:
(model_args, data_args,... |
def good_repr(obj: object) -> str:
if isinstance(obj, str):
if (obj.count('\n') > 1):
bits = ["'''\\"]
for line in obj.split('\n'):
bits.append(repr(('"' + line))[2:(- 1)])
bits[(- 1)] += "'''"
return '\n'.join(bits)
return repr(obj) |
def convert_c2_detectron_names(weights):
logger = logging.getLogger(__name__)
logger.info('Renaming Caffe2 weights ......')
original_keys = sorted(weights.keys())
layer_keys = copy.deepcopy(original_keys)
layer_keys = convert_basic_c2_names(layer_keys)
layer_keys = [k.replace('conv.rpn.fpn2', 'p... |
def pytest_runtest_protocol(item, nextitem):
reruns = get_reruns_count(item)
if (reruns is None):
return
check_options(item.session.config)
delay = get_reruns_delay(item)
parallel = (not is_master(item.config))
db = item.session.config.failures_db
item.execution_count = db.get_test_f... |
def create_default_local_file():
path = os.path.join(os.path.dirname(__file__), 'local.py')
empty_str = "''"
default_settings = OrderedDict({'workspace_dir': empty_str, 'tensorboard_dir': "self.workspace_dir + '/tensorboard/'", 'pretrained_networks': "self.workspace_dir + '/pretrained_networks/'", 'lasot_di... |
class ExperimentSuite(object):
def __init__(self, city_name, task, weather, iters, scene):
self._city_name = city_name
self._task = task
self._weather = weather
self._iters = iters
self._scene = scene
self._experiments = self.build_experiments()
def calculate_time... |
.parametrize('hermitian_constructor', [real_hermitian, imaginary_hermitian, complex_hermitian])
.parametrize('n_levels', [2, 10])
def test_transformation_to_eigenbasis_is_reversible(hermitian_constructor, n_levels):
H1 = hermitian_constructor(n_levels)
(_, ekets) = H1.eigenstates()
Heb = H1.transform(ekets)... |
class DataLoaderConf():
_target_: str = 'torch.utils.data.dataloader.DataLoader'
dataset: Any = MISSING
batch_size: Any = 1
shuffle: Any = False
sampler: Any = None
batch_sampler: Any = None
num_workers: Any = 0
collate_fn: Any = None
pin_memory: Any = False
drop_last: Any = Fals... |
class ProjectSerializer(serializers.ModelSerializer):
snapshots = SnapshotSerializer(many=True)
values = serializers.SerializerMethodField()
catalog = serializers.CharField(source='catalog.uri', default=None, read_only=True)
tasks = serializers.SerializerMethodField()
views = serializers.SerializerM... |
class HTML():
def __init__(self, web_dir, title, reflesh=0):
self.title = title
self.web_dir = web_dir
self.img_dir = os.path.join(self.web_dir, 'images')
if (not os.path.exists(self.web_dir)):
os.makedirs(self.web_dir)
if (not os.path.exists(self.img_dir)):
... |
def describe_type(prop: dict) -> str:
prop_type = prop['type']
types = (prop_type if isinstance(prop_type, list) else [prop_type])
if ('null' in types):
types.remove('null')
if (len(types) == 1):
prop_type = types[0]
parts = [f'`{prop_type}`']
for option in types:
if (opt... |
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [migrations.CreateModel(name='Banner', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(help_text="Text to display in the banner's button"... |
def iload(paths, segment=None, format='detect', database=None, check=True, skip_unchanged=False, content=g_content_kinds, show_progress=True, update_selection=None):
from ..selection import Selection
n_db = 0
n_load = 0
selection = None
kind_ids = to_kind_ids(content)
if isinstance(paths, str):
... |
class Goal(object):
def __init__(self, mturker=True):
if mturker:
with open((data_dir + 'detailed_goals.pkl'), 'rb') as fh:
self.goal_pool = pkl.load(fh)
else:
with open((data_dir + 'detailed_goals_augmented.pkl'), 'rb') as fh:
self.goal_pool =... |
def read_aggregation(filename):
assert os.path.isfile(filename)
object_id_to_segs = {}
label_to_segs = {}
with open(filename) as f:
data = json.load(f)
num_objects = len(data['segGroups'])
for i in range(num_objects):
object_id = (data['segGroups'][i]['objectId'] + 1)... |
def plot(model_name, dataset, noise_ratio, lids, acc_train, acc_test):
fig = plt.figure()
xnew = np.arange(0, len(lids), 1)
lids = lids[xnew]
acc_train = acc_train[xnew]
acc_test = acc_test[xnew]
ax = fig.add_subplot(111)
ax.plot(xnew, lids, c='r', marker='o', markersize=3, linewidth=2, labe... |
_only
def copy_opt_file(opt_file, experiments_root):
import sys
import time
from shutil import copyfile
cmd = ' '.join(sys.argv)
filename = osp.join(experiments_root, osp.basename(opt_file))
copyfile(opt_file, filename)
with open(filename, 'r+') as f:
lines = f.readlines()
li... |
class SemEval_TD(data.Dataset):
def sort_key(ex):
return len(ex.text)
def __init__(self, text_field, left_text_field, right_text_field, sm_field, input_data, **kwargs):
text_field.preprocessing = data.Pipeline(clean_str)
left_text_field.preprocessing = data.Pipeline(clean_str)
le... |
('volume', args=1)
def _volume(app, value):
if (not value):
raise CommandError('invalid arg')
if (value[0] in ('+', '-')):
if (len(value) > 1):
try:
change = (float(value[1:]) / 100.0)
except ValueError:
return
else:
cha... |
def _get_labels_and_probs(y_pred: np.ndarray, task_type: TaskType, prediction_type: Optional[PredictionType]) -> Tuple[(np.ndarray, Optional[np.ndarray])]:
assert (task_type in (TaskType.BINCLASS, TaskType.MULTICLASS))
if (prediction_type is None):
return (y_pred, None)
if (prediction_type == Predic... |
class DatasetTransformsUtilTest(unittest.TestCase):
def get_test_image_dataset(self, sample_type):
return RandomImageBinaryClassDataset(crop_size=224, class_ratio=0.5, num_samples=100, seed=0, sample_type=sample_type)
def transform_checks(self, sample, transform, expected_sample, seed=0):
transf... |
def load_pytorch_checkpoint_in_tf2_model(tf_model, pytorch_checkpoint_path, tf_inputs=None, allow_missing_keys=False):
try:
import tensorflow as tf
import torch
except ImportError:
logger.error('Loading a PyTorch model in TensorFlow, requires both PyTorch and TensorFlow to be installed. ... |
def test_page_descendants(db):
instances = Page.objects.all()
for instance in instances:
descendant_ids = []
page_elements = sorted([*instance.page_questionsets.all(), *instance.page_questions.all()], key=(lambda e: e.order))
for page_element in page_elements:
element = page_... |
class SlurmSchedulerTest(unittest.TestCase):
def test_create_scheduler(self) -> None:
scheduler = create_scheduler('foo')
self.assertIsInstance(scheduler, SlurmScheduler)
def test_replica_request(self) -> None:
role = simple_role()
(sbatch, srun) = SlurmReplicaRequest.from_role('... |
def _action_set_choices_callable(self: argparse.Action, choices_callable: ChoicesCallable) -> None:
if (self.choices is not None):
err_msg = 'None of the following parameters can be used alongside a choices parameter:\nchoices_provider, completer'
raise TypeError(err_msg)
elif (self.nargs == 0):... |
def dump_al():
try:
from pyglet.media.drivers import openal
except:
print('OpenAL not available.')
return
print('Library:', openal.lib_openal._lib)
driver = openal.create_audio_driver()
print('Version: {}.{}'.format(*driver.get_version()))
print('Extensions:')
for ext... |
class Window(QWidget):
def __init__(self):
super(Window, self).__init__()
self.horizontalSliders = SlidersGroup(Qt.Horizontal, 'Horizontal')
self.verticalSliders = SlidersGroup(Qt.Vertical, 'Vertical')
self.stackedWidget = QStackedWidget()
self.stackedWidget.addWidget(self.ho... |
class Topology():
def __init__(self, world_size: int, compute_device: str, hbm_cap: Optional[int]=None, ddr_cap: Optional[int]=None, local_world_size: Optional[int]=None, hbm_mem_bw: float=HBM_MEM_BW, ddr_mem_bw: float=DDR_MEM_BW, intra_host_bw: float=INTRA_NODE_BANDWIDTH, inter_host_bw: float=CROSS_NODE_BANDWIDTH,... |
def test_create_project_generate_extension_files(tmpfolder, git_mock):
assert (not Path('proj/tests/extra.file').exists())
assert (not Path('proj/tests/another.file').exists())
def add_files(struct, opts):
struct = structure.ensure(struct, 'tests/extra.file', 'content')
struct = structure.me... |
def _classification_mask_report(report, mask, X, labels_dict):
report.features_correlation_matrix(mask=mask).plot()
report.features_correlation_matrix_by_class(mask=mask, labels_dict=labels_dict).plot()
report.efficiencies(features=X.columns[1:3], mask=mask, labels_dict=labels_dict).plot()
report.featur... |
def bundle_submissions_srgb_v1(submission_folder, session):
out_folder = os.path.join(submission_folder, session)
try:
os.mkdir(out_folder)
except:
pass
israw = False
eval_version = '1.0'
for i in range(50):
Idenoised = np.zeros((20,), dtype=np.object)
for bb in r... |
class TXMLFromPattern(_TPattern):
def test_markup_passthrough(self):
pat = XMLFromPattern('\\<b\\><<title>>\\</b\\>')
self.assertEqual(pat.format(self.a), '<b><Title5></b>')
self.assertEqual(pat.format(self.b), '<b><Title6></b>')
self.assertEqual(pat.format(self.c),... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.