code stringlengths 281 23.7M |
|---|
def convert_tf_checkpoint_to_pytorch(task, reset_position_index_per_cell, tf_checkpoint_path, tapas_config_file, pytorch_dump_path):
config = TapasConfig.from_json_file(tapas_config_file)
config.reset_position_index_per_cell = reset_position_index_per_cell
if (task == 'SQA'):
model = TapasForQuestio... |
def train_model(input_tensor, label, criterion=None, model=None):
emb_dim = 96
epochs = 2000
learning_rate = 0.0005
weight_decay = 0.0005
if (criterion is None):
criterion = SimLoss()
if (model is None):
model = MGFN(graph_num=7, node_num=180, output_dim=emb_dim)
optimizer = ... |
def train(args, train_loader, model, criterion, optimizer, epoch):
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
model.train()
for (i, (input, target)) in enumerate(train_loader):
input_var = torch.autograd.Variable(input).cuda()
target_var = torch.autograd.Vari... |
class _NumericOperand(_Operand):
def __add__(self, other):
return _Increment(self, self._to_operand(other))
def __radd__(self, other):
return _Increment(self._to_operand(other), self)
def __sub__(self, other):
return _Decrement(self, self._to_operand(other))
def __rsub__(self, ot... |
class LeakyReLUConv2d(nn.Module):
def __init__(self, n_in, n_out, kernel_size, stride, padding=0):
super(LeakyReLUConv2d, self).__init__()
model = []
model += [nn.Conv2d(n_in, n_out, kernel_size=kernel_size, stride=stride, padding=padding, bias=True)]
model += [nn.LeakyReLU(inplace=T... |
class TestRubinsRules():
def test_error_wrong_len(self):
rr_est = [1, 1, 3]
rr_std = [0.05, 0.05]
with pytest.raises(ValueError):
rubins_rules(rr_est, rr_std)
def test_match_sas1(self):
rr_est = [0.52, 0.31, (- 0.04)]
rr_var = [0.075, 0.083, 0.065]
est... |
class BasicConv2d(nn.Module):
def __init__(self, in_planes, out_planes, kernel_size, stride, padding=0):
super(BasicConv2d, self).__init__()
self.conv = nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=padding, bias=False)
self.bn = nn.BatchNorm2d(out_planes, ... |
def select_sub_channel(*args):
smallest_seq1 =
smallest_seq2 =
first_channel = None
for channel in args:
if (not isinstance(channel, SubChannel)):
raise ValueError('select_sub_channel() only accepts SUB channels.')
(seq1, seq2) = channel._get_pending_sequence_numbers()
... |
.parametrize('platform', ['ios', 'android'])
def test_register_volunteer_device(graphql_client, mocker, settings, platform):
settings.VOLUNTEERS_PUSH_NOTIFICATIONS_IOS_ARN = 'arn::ios_arn'
settings.VOLUNTEERS_PUSH_NOTIFICATIONS_ANDROID_ARN = 'arn::android_arn'
boto3_mock = mocker.patch('api.volunteers_notif... |
def _process_legacy_keyword(kwargs, oldkey, newkey, newval):
if (kwargs.get(oldkey) is not None):
warnings.warn(f"keyword '{oldkey}' is deprecated; use '{newkey}'", DeprecationWarning)
if (newval is not None):
raise ControlArgument(f"duplicate keywords '{oldkey}' and '{newkey}'")
... |
class Net(nn.Module):
def __init__(self):
super().__init__()
self.flatten = nn.Flatten()
self.linear_relu_stack = nn.Sequential(nn.Linear((28 * 28), 512), nn.ReLU(), nn.Linear(512, 512), nn.ReLU(), nn.Linear(512, 10))
def forward(self, x):
x = self.flatten(x)
logits = sel... |
class OffsetProgressUpdate():
def __init__(self, status_update: ProgressUpdateCallable, offset: float, scale: float):
self.status_update = status_update
self.offset = offset
self.scale = scale
def __call__(self, message: str, percentage: float) -> None:
percentage = min(percentag... |
def test_get_controlled_terms(requests_mock):
requests_mock.get(f'{API_V1}/controlled_terms', json=SAMPLE_DATA['get_controlled_terms'], status_code=200)
response = get_controlled_terms()
assert (len(response['results']) == 4)
first_result = response['results'][0]
assert (first_result['id'] == 12)
... |
def _create_video_feature_info(input_paths: List[Path], relative_dir: Path, output_dir: Path, feature_name: str) -> VideoFeatureInfo:
game_half = input_paths[0].stem[0]
output_path = ((output_dir / relative_dir) / f'{game_half}_{feature_name}.npy')
return VideoFeatureInfo(input_paths, output_path) |
class SimpleForm(Form):
def __init__(self, view):
super().__init__(view, 'simple_form')
self.use_layout(FormLayout())
if self.exception:
self.layout.add_alert_for_domain_exception(self.exception)
self.add_child(P(view, text='Press Submit to cause an error'))
self.... |
def extract_distinct_and_content(s, keyword_exceptions=[], remove_stopwords=True):
has_distinct = any(((w in s.lower()) for w in keywords_distinct))
content_str = remove_stop_words(s, (keywords_distinct + stop_words_distinct))
text_tokens = word_tokenize(content_str)
if remove_stopwords:
tokens_... |
def get_rand_data_binary(num_updates: int, num_tasks: int, batch_size: int, device: Optional[torch.device]=None) -> Tuple[(torch.Tensor, torch.Tensor)]:
if (device is None):
device = torch.device('cpu')
shape = [num_updates, num_tasks, batch_size]
if ((num_tasks == 1) and (num_updates == 1)):
... |
class SimpleTemplate(BaseTemplate):
def prepare(self, escape_func=html_escape, noescape=False, syntax=None, **ka):
self.cache = {}
enc = self.encoding
self._str = (lambda x: touni(x, enc))
self._escape = (lambda x: escape_func(touni(x, enc)))
self.syntax = syntax
if n... |
class Bluetooth(base._TextBox, base.MarginMixin):
defaults = [('hide_unnamed_devices', False, 'Devices with no name will be hidden from scan results'), ('symbol_connected', '*', 'Symbol to indicate device is connected'), ('symbol_paired', '-', 'Symbol to indicate device is paired but unconnected'), ('symbol_unknown... |
def _get_visibility_techniques(filename):
groups_dict = {}
(visibility_techniques, name, platform, domain) = load_techniques(filename)
group_id = 'VISIBILITY'
groups_dict[group_id] = {}
groups_dict[group_id]['group_name'] = 'Visibility'
groups_dict[group_id]['techniques'] = set()
groups_dict... |
class DLC(BaseContainer):
__name__ = 'DLC'
__type__ = 'container'
__version__ = '0.34'
__status__ = 'testing'
__pattern__ = '(?:.+\\.(?:dlc|DLC)|[\\w\\+^_]+==[\\w\\+^_/]+==)$'
__config__ = [('enabled', 'bool', 'Activated', True), ('use_premium', 'bool', 'Use premium account if available', True),... |
def grpc_channel(port_fixture_name, channel_arg_name='channel'):
def decorator(func):
if (hasattr(func, '__parallelized__') and func.__parallelized__):
raise TypeError('Cannot pass gRPC channel to already parallelized test, grpc_client_parallelize should be the last decorator in chain')
... |
def on_train_result(info):
result = info['result']
episodes_total = result['episodes_total']
learner_stats = result['info']['learner']
trainer = info['trainer']
trainer.workers.foreach_worker((lambda ev: ev.foreach_env((lambda env: env.wrapped.set_phase(episodes_total, learner_stats))))) |
def test_get_single_hud_text_all_standard_pickups(echoes_pickup_database, echoes_resource_database):
memo_data = default_database.default_prime2_memo_data()
for item in echoes_pickup_database.standard_pickups.values():
pickup = pickup_creator.create_standard_pickup(item, StandardPickupState(included_amm... |
def list_video(sequence):
video_clips = list()
tracks = sequence.videoTracks
for track in tracks:
print('Track :', (track.name or track.id))
clips = track.clips
for clip in clips:
print('\tName: {}'.format(clip.name))
print('\t- {:.<12}{}'.format('Path', (clip... |
def _get_decorator(parameters, returnvalue, fork_inst, mapper, mapper_kwargs):
def _decorator(decorated):
_validate_decoration(decorated, fork_inst)
args = [decorated, parameters, returnvalue, fork_inst, mapper, mapper_kwargs]
wrapper = (_get_async_wrapper(*args) if iscoroutinefunction(decor... |
def test_index_without_amd():
index = OCIIndex(Bytes.for_string_or_unicode(OCI_IMAGE_INDEX_MANIFEST_WITHOUT_AMD))
assert index.is_manifest_list
assert (index.digest == 'sha256:a0ed0f2b3949bcfaf4245f6872dc5bc98ee6ea5443f169')
assert (index.local_blob_digests == [])
assert (index.child_manifest_digest... |
class TestGeventClient(test_client.TestClient):
def setUp(self):
try:
import gevent
except ImportError:
pytest.skip('gevent not available.')
KazooTestCase.setUp(self)
def _makeOne(self, *args):
from kazoo.handlers.gevent import SequentialGeventHandler
... |
class WeaponRack(TutorialObject):
def at_object_creation(self):
self.cmdset.add_default(CmdSetWeaponRack, permanent=True)
self.db.rack_id = 'weaponrack_1'
self.db.get_weapon_msg = 'You find |c%s|n.'
self.db.no_more_weapons_msg = 'you find nothing else of use.'
self.db.availab... |
class g_net(nn.Module):
def __init__(self):
super(g_net, self).__init__()
self.encoder = nn.Sequential(nn.Conv2d(1, 64, 5, stride=1), nn.BatchNorm2d(64), nn.ReLU(True), nn.Conv2d(64, 128, 5, stride=1), nn.BatchNorm2d(128), nn.ReLU(True), nn.Conv2d(128, 256, 5, stride=1), nn.ReLU(True), nn.BatchNorm2... |
def test_group_search_types():
fixture_records = generate_fixtures(vectors_sizes=50)
vectors_config = models.VectorParams(size=50, distance=models.Distance.EUCLID)
searcher = TestGroupSearcher()
local_client = init_local()
init_client(local_client, fixture_records, vectors_config=vectors_config)
... |
class TestTexture3D(unittest.TestCase):
def create_image(self, width, height, color):
data = (colorbyte(color) * (width * height))
return ImageData(width, height, 'R', data)
def check_image(self, image, width, height, color):
self.assertTrue((image.width == width))
self.assertTru... |
_config
def test_spiral_right(manager):
manager.c.next_layout()
manager.c.next_layout()
manager.test_window('one')
assert_dimensions(manager, 0, 0, 798, 598)
manager.test_window('two')
assert_dimensions(manager, 0, 0, 398, 598)
manager.test_window('three')
assert_dimensions(manager, 0, 0... |
class GeodSharedMemoryBugTestIssue64(unittest.TestCase):
def setUp(self):
self.g = Geod(ellps='clrk66')
self.ga = self.g.a
self.mercury = Geod(a=2439700)
def test_not_shared_memory(self):
self.assertEqual(self.ga, self.g.a)
self.assertNotEqual(self.g.a, self.mercury.a)
... |
def test_context_not_connected(context):
assert (context.is_ready == False)
assert (context.is_failed == False)
assert (context.is_terminated == False)
assert (context.server == None)
assert isinstance(context.protocol_version, numbers.Integral)
assert (context.server_protocol_version == None)
... |
def extend_pandas():
from pandas.core.base import PandasObject as _po
_po.compsum = stats.compsum
_po.comp = stats.comp
_po.expected_return = stats.expected_return
_po.geometric_mean = stats.geometric_mean
_po.ghpr = stats.ghpr
_po.outliers = stats.outliers
_po.remove_outliers = stats.re... |
class ModelVar(Op):
def make_node(self, rv, *dims):
assert isinstance(rv, Variable)
dims = self._parse_dims(rv, *dims)
return Apply(self, [rv, *dims], [rv.type(name=rv.name)])
def _parse_dims(self, rv, *dims):
if dims:
dims = [pytensor.as_symbolic(dim) for dim in dims... |
class PathManager():
def open(path: str, mode: str='r', buffering: int=(- 1), encoding: Optional[str]=None, errors: Optional[str]=None, newline: Optional[str]=None):
if FVCorePathManager:
return FVCorePathManager.open(path=path, mode=mode, buffering=buffering, encoding=encoding, errors=errors, n... |
def sample_infinite_data(loader, seed=0):
rng = torch.Generator()
rng.manual_seed(seed)
BIG_NUMBER =
while True:
try:
shuffle_seed = torch.randint(0, BIG_NUMBER, (1,), generator=rng).item()
loader.sampler.set_epoch(shuffle_seed)
except AttributeError:
... |
_test
.skipif((K.backend() != 'tensorflow'), reason='Requires tensorflow backend')
def test_TensorBoard_multi_input_output(tmpdir):
np.random.seed(np.random.randint(1, .0))
filepath = str((tmpdir / 'logs'))
((X_train, y_train), (X_test, y_test)) = get_test_data(num_train=train_samples, num_test=test_samples... |
class TestSyntheticType(TestNameCheckVisitorBase):
_passes()
def test_functools(self):
import functools
import types
from pyanalyze.signature import ELLIPSIS_PARAM, Signature
sig = Signature.make([ELLIPSIS_PARAM], return_annotation=TypedValue(int))
def f() -> int:
... |
def main():
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('log_dir', type=path.Path, help='log dir')
args = parser.parse_args()
with open((args.log_dir / 'args.json')) as f:
args_dict = json.load(f)
model = contrib.models.Model(n... |
class TestPyAppVersion():
def test_default(self, isolation):
config = {'project': {'name': 'My.App', 'version': '0.1.0'}}
builder = AppBuilder(str(isolation), config=config)
assert (builder.config.pyapp_version == builder.config.pyapp_version == '')
def test_set(self, isolation):
... |
class Attribute(Generic[T]):
resources: ClassVar[Union[(List[Union[(Tuple[(constants.InterfaceType, str)], constants.EventType)]], Type[AllSessionTypes])]] = []
py_name: ClassVar[str] = 'To be specified'
visa_name: ClassVar[str] = 'To be specified'
visa_type: ClassVar[str] = ''
attribute_id: ClassVa... |
class RHEL5_PartData(FC4_PartData):
removedKeywords = FC4_PartData.removedKeywords
removedAttrs = FC4_PartData.removedAttrs
def __init__(self, *args, **kwargs):
FC4_PartData.__init__(self, *args, **kwargs)
self.encrypted = kwargs.get('encrypted', False)
self.passphrase = kwargs.get('... |
def LoadScene(sceneFile, project):
def addUuid(obj, uuid):
if (obj in project._ids):
return
project._ids[obj] = uuid
project._idMap[uuid] = obj
if (not Path(sceneFile).is_file()):
raise PyUnityException(f'The specified file does not exist: {sceneFile}')
data = Loa... |
def empty(path, try_trash=False, exist_ok=True):
if ((not exist_ok) and (not os.path.exists(path))):
raise OSError('Path not exists')
if os.path.isfile(path):
if try_trash:
origfile = (path + '.orig')
os.rename(path, origfile)
shutil.copy2(origfile, path)
... |
class Migration(migrations.Migration):
dependencies = [('adserver', '0072_data_region_topic')]
operations = [migrations.AddField(model_name='historicalpublisher', name='record_offer_details', field=models.BooleanField(default=False, help_text='Record additional offer details for this publisher')), migrations.Ad... |
class Appr(Inc_Learning_Appr):
def __init__(self, model, device, nepochs=160, lr=0.1, lr_min=0.0001, lr_factor=10, lr_patience=8, clipgrad=10000, momentum=0.9, wd=0.0005, multi_softmax=False, wu_nepochs=0, wu_lr_factor=1, fix_bn=False, eval_on_train=False, logger=None, exemplars_dataset=None, lamb=5.0, pod_flat_fac... |
def sort_by_hierarchy(tids, taxdump):
seq = [tids[0]]
pool = [x for x in tids[1:]]
while pool:
found = False
for (i, tid) in enumerate(pool):
if (taxdump[seq[(- 1)]]['parent'] == tid):
seq.append(tid)
found = True
elif (taxdump[tid]['pa... |
class MultiViewDataInjector():
def __init__(self, transform_list):
self.transform_list = transform_list
def __call__(self, sample):
output = [transform(sample).unsqueeze(0) for transform in self.transform_list]
output_cat = torch.cat(output, dim=0)
return output_cat |
def test_create_org_policy_nonexistent_method(initialized_db, app):
with client_with_identity('devtable', app) as cl:
response = conduct_api_call(cl, OrgAutoPrunePolicies, 'POST', {'orgname': 'sellnsmall'}, {'method': 'doesnotexist', 'value': '2w'}, expected_code=400).json
assert (response['error_me... |
def convert_image_to_fn(img_type, minsize, image, eps=0.02):
(width, height) = image.size
if (min(width, height) < minsize):
scale = ((minsize / min(width, height)) + eps)
image = image.resize((math.ceil((width * scale)), math.ceil((height * scale))))
if (image.mode != img_type):
ret... |
def build_text_embed(model_clip, caption):
run_on_gpu = torch.cuda.is_available()
with torch.no_grad():
texts = clip.tokenize(caption, truncate=True)
if run_on_gpu:
texts = texts.cuda()
model_clip = model_clip.cuda()
text_embeddings = model_clip.encode_text(texts)... |
class ImplicitInterfacesTest(unittest.TestCase):
def setUp(self):
super().setUp()
self.project = testutils.sample_project(validate_objectdb=True)
self.pycore = self.project.pycore
self.mod1 = testutils.create_module(self.project, 'mod1')
self.mod2 = testutils.create_module(se... |
class TimeMeter(Meter):
def __init__(self, init: int=0, n: int=0, round: Optional[int]=None):
self.round = round
self.reset(init, n)
def reset(self, init=0, n=0):
self.init = init
self.start = time.perf_counter()
self.n = n
self.i = 0
def update(self, val=1):
... |
def test_call_with_wraps():
mywrapper_called = False
myfunc_called = False
def myfunc(a, b):
nonlocal myfunc_called
myfunc_called = True
assert (a == 1)
assert (b == 2)
.wraps(myfunc)
def mywrapper(c):
nonlocal mywrapper_called
mywrapper_called = True
... |
class MenuComponent(ABC):
def add(self, menuComponent: MenuComponent) -> None:
raise UnsupportedOperationException
def remove(self, menuComponent: MenuComponent) -> None:
raise UnsupportedOperationException
def getChild(self, i: int) -> MenuComponent:
raise UnsupportedOperationExcept... |
class BasicValidation(Validation):
def __init__(self, dataframe: DataFrame=None):
super().__init__(dataframe)
def check(self) -> None:
self.validate_df_is_spark_df()
self.validate_column_ts()
self.validate_df_is_empty()
def validate_column_ts(self) -> None:
if (not se... |
class LiteHRModule(nn.Module):
def __init__(self, num_branches, num_blocks, in_channels, reduce_ratio, module_type, multiscale_output=False, with_fuse=True, conv_cfg=None, norm_cfg=dict(type='BN'), with_cp=False):
super().__init__()
self._check_branches(num_branches, in_channels)
self.in_cha... |
class VMSFSCollector(diamond.collector.Collector):
SYSFS = '/sys/fs/vmsfs'
VMSFS_STATS = {'resident': ('cur_resident', 4096), 'allocated': ('cur_allocated', 4096)}
def vmsfs_stats_read(self, filename):
stats = {}
stats_fd = None
try:
stats_fd = open(filename)
... |
class OptMPO_Valsartan(Molecule):
def _reward(self):
scorer = valsartan_smarts()
s_fn = scorer.wrapped_objective
molecule = Chem.MolFromSmiles(self._state)
if (molecule is None):
return 0.0
return (s_fn.score(self._state) * (self.discount_factor ** (self.max_steps... |
class TestErrors():
def test_subfactory_missing_funcarg(self, pytester: Pytester) -> None:
pytester.makepyfile('\n import pytest\n ()\n def gen(qwe123):\n return 1\n def test_something(gen):\n pass\n ')
result = pyteste... |
def make_Potsdam_dataloaders(config):
dataloaders = _create_dataloaders(config, potsdam.__dict__[config.dataset])
mapping_assignment_dataloader = _create_mapping_loader(config, potsdam.__dict__[config.dataset], partitions=config.mapping_assignment_partitions)
mapping_test_dataloader = _create_mapping_loader... |
class OnRawUpdate():
def on_raw_update(self=None, group: int=0) -> Callable:
def decorator(func: Callable) -> Callable:
if isinstance(self, pyrogram.Client):
self.add_handler(pyrogram.handlers.RawUpdateHandler(func), group)
else:
if (not hasattr(func, ... |
def test_pymtl3_list_interface_views():
a = CaseBits32MsgRdyIfcOnly.DUT()
a.elaborate()
assert rt.is_rtlir_convertible(a.in_)
assert (rtlir_getter.get_rtlir(a.in_) == rt.Array([5], rt.InterfaceView('Bits32MsgRdyIfc', {'msg': rt.Port('output', rdt.Vector(32)), 'rdy': rt.Port('input', rdt.Vector(1))}))) |
def _git_str():
if (BASEDIR is None):
return None
if (not os.path.isdir(os.path.join(BASEDIR, '.git'))):
return None
try:
commit_hash = _call_git(BASEDIR, 'describe', '--match=NeVeRmAtCh', '--always', '--dirty')
date = _call_git(BASEDIR, 'show', '-s', '--format=%ci', 'HEAD')
... |
class TrackContext():
def __init__(self, root, label):
assert isinstance(root, Root)
assert isinstance(label, str)
self.root = root
self.label = label
def __enter__(self):
global global_context
global_lock.acquire()
global_context = self
self.root.... |
class TestClientPlan(ClientTestCase):
def setUp(self):
super(TestClientPlan, self).setUp()
self.base_url = '{}/plans'.format(self.base_url)
self.plan_id = 'plan_8kihN0YqhnF8a7'
def test_plan_fetch_all(self):
result = mock_file('plan_collection')
url = self.base_url
... |
def _tensor_with_entanglement(all_qubits, entangled, entangled_locations):
n_entangled = len(entangled.dims[0])
n_separable = (len(all_qubits) - n_entangled)
separable = all_qubits.copy()
for location in sorted(entangled_locations, reverse=True):
del separable[location]
out = qutip.tensor(*s... |
class Joystick(EventDispatcher):
def __init__(self, device):
self.device = device
self.x = 0
self.y = 0
self.z = 0
self.rx = 0
self.ry = 0
self.rz = 0
self.hat_x = 0
self.hat_y = 0
self.buttons = []
self.x_control = None
... |
class TestParseResultClass():
def assertNotTuples(self, parses):
assert all(((type(p) != tuple) for p in parses))
def assertAllTuples(self, parses):
assert all(((type(p) == tuple) for p in parses))
def test_namedtuples(self, morph):
self.assertNotTuples(morph.parse(''))
def test_... |
def register_attention(name):
def register_attention_cls(cls):
if (name in ATTENTION_REGISTRY):
raise ValueError('Cannot register duplicate attention ({})'.format(name))
if (not issubclass(cls, BaseAttention)):
raise ValueError('Attention ({} : {}) must extend BaseAttention'.... |
def test_mark_stacking(testdir):
testdir.makepyfile("\n import pytest\n ()\n def get_marks(request):\n return [(mark.args[0], node.name) for node, mark\n in request.node.iter_markers_with_node(name='my_mark')]\n\n .my_mark('foo')\n def describe_marks(... |
def critical_band(frequency):
if isinstance(frequency, np.ndarray):
center = frequency.copy()
center[(frequency < 50.0)] = 50.0
else:
center = (50.0 if (frequency < 50) else frequency)
bandwidth = (((center > 500.0) * (center * 0.2)) + ((center <= 500.0) * 100.0))
upper = (center... |
def test_set(base_app):
(out, err) = run_cmd(base_app, 'set quiet True')
expected = normalize('\nquiet - was: False\nnow: True\n')
assert (out == expected)
assert (base_app.last_result is True)
(out, err) = run_cmd(base_app, 'set quiet')
expected = normalize("\nName Value ... |
def _trace_tensordictmodule(td_module: TensorDictModule) -> TDGraphModule:
graph = fx.Tracer().trace(td_module.module)
new_graph = fx.Graph()
env = {}
td = fx.Proxy(new_graph.placeholder('tensordict'))
node_iter = iter(graph.nodes)
_parse_input_nodes(td_module.in_keys, node_iter, td, {}, env)
... |
def single_qubit_bitstrings(num_qubits: int) -> List[Generator]:
res = [('1', '0', (i,)) for i in range(num_qubits)]
res += [('0', '1', (i,)) for i in range(num_qubits)]
if (len(res) != (2 * num_qubits)):
raise ValueError('Should have gotten 2n qubits, got {}'.format(len(res)))
return res |
def separate_vocal_from_audio(basename_without_ext: str, cache_path: str, ultrastar_audio_input_path: str) -> str:
audio_separation_path = os.path.join(cache_path, 'separated', 'htdemucs', basename_without_ext)
if (settings.use_separated_vocal or settings.create_karaoke):
separate_audio(ultrastar_audio_... |
def init():
root_dir = (home / 'data/ycb_video/YCB_Video_Models')
if (not root_dir.exists()):
gdown.cached_download(url=' path=(root_dir + '.zip'), md5='054ba9d38a3f080572dcb3c', postprocess=gdown.extractall)
class_names = []
for model_dir in sorted(root_dir.listdir()):
class_name = str(... |
class SimpleCNN_header(nn.Module):
def __init__(self, input_dim, hidden_dims, output_dim=10):
super(SimpleCNN_header, self).__init__()
self.conv1 = nn.Conv2d(3, 6, 5)
self.relu = nn.ReLU()
self.pool = nn.MaxPool2d(2, 2)
self.conv2 = nn.Conv2d(6, 16, 5)
self.fc1 = nn.L... |
class HelpButton(QToolButton):
def __init__(self, text):
QToolButton.__init__(self)
self.setText('?')
self.help_text = text
self.setFocusPolicy(Qt.NoFocus)
self.setFixedWidth(round((2.2 * char_width_in_lineedit())))
self.clicked.connect(self.onclick)
def onclick(s... |
(slots=True)
class ReservationNumber(ValueObject):
_DATETIME_FORMAT: ClassVar[str] = '%y%m%d%H%M%S'
_RANDOM_STR_LENGTH: ClassVar[int] = 7
value: str
def generate(cls) -> ReservationNumber:
time_part: str = datetime.utcnow().strftime(cls._DATETIME_FORMAT)
random_strings: str = ''.join((ra... |
class MazeEnv(ProxyEnv, Serializable):
MODEL_CLASS = None
ORI_IND = None
MAZE_HEIGHT = None
MAZE_SIZE_SCALING = None
MAZE_MAKE_CONTACTS = False
MAZE_STRUCTURE = [[1, 1, 1, 1, 1], [1, 'r', 0, 0, 1], [1, 1, 1, 0, 1], [1, 'g', 0, 0, 1], [1, 1, 1, 1, 1]]
MANUAL_COLLISION = False
def __init__... |
_auth
def admin_play(request, pk):
record = AdminRecord.objects.select_related('admin_login_user').get(id=pk)
if (record.admin_record_mode == 'ssh'):
return render(request, 'assets/ssh_play.html', locals())
else:
return render(request, 'assets/guacamole_play.html', locals()) |
def test_expand_alternatives_4(blank_resource_db):
db = blank_resource_db
a = make_single_set(make_req_a(db))
b = make_single_set(make_req_b(db))
expected = RequirementSet([RequirementList([make_req_a(db)[1]]), RequirementList([make_req_b(db)[1]])])
assert (a.expand_alternatives(b) == expected) |
def _compute_starting_line(source_c, end_c, bbox, mask):
center_x = ((source_c[0] + end_c[0]) / 2)
center_y = ((source_c[1] + end_c[1]) / 2)
length = np.sqrt((((end_c[1] - source_c[1]) ** 2) + ((end_c[0] - source_c[0]) ** 2)))
norm_vec = (((- (end_c[1] - source_c[1])) / length), ((end_c[0] - source_c[0]... |
def check_if_pod_exists(name: str, namespace: str) -> bool:
namespace_exists = check_if_namespace_exists(namespace)
if namespace_exists:
pod_list = list_pods(namespace=namespace)
if (name in pod_list):
return True
else:
logging.error(("Namespace '%s' doesn't exist" % str(... |
def gradients(ys, xs, grad_ys=None, checkpoints='collection', **kwargs):
if (not isinstance(ys, list)):
ys = [ys]
if (not isinstance(xs, list)):
xs = [xs]
bwd_ops = ge.get_backward_walk_ops([y.op for y in ys], inclusive=True)
debug_print('bwd_ops: {}'.format(bwd_ops))
fwd_ops = ge.ge... |
def muti_bce_loss_fusion(d0, d1, d2, d3, d4, d5, d6, d7, labels_v):
loss0 = bce_ssim_loss(d0, labels_v)
loss1 = bce_ssim_loss(d1, labels_v)
loss2 = bce_ssim_loss(d2, labels_v)
loss3 = bce_ssim_loss(d3, labels_v)
loss4 = bce_ssim_loss(d4, labels_v)
loss5 = bce_ssim_loss(d5, labels_v)
loss6 = ... |
class Minutes(IObserver):
def __init__(self):
self.key = None
self.time = None
def notify(self, observable, *args, **kwargs):
self.key = observable.key
self.time = kwargs['time']
self.set_number_minute()
self.set_replace_minute()
return self.time
def s... |
class Mesh():
def __init__(self, vertices: List[Vec3], indices: List[int], compute_inertia=True):
self.vertices = vertices
self.indices = indices
if compute_inertia:
com = np.mean(vertices, 0)
num_tris = int((len(indices) / 3))
weight = 0.25
al... |
class CmdLookBridge(Command):
key = 'look'
aliases = ['l']
locks = 'cmd:all()'
help_category = 'TutorialWorld'
def func(self):
caller = self.caller
bridge_position = self.caller.db.tutorial_bridge_position
location = self.obj
message = ('|c%s|n\n%s\n%s' % (location.ke... |
def test_raise_unprintable_assertion_error(pytester: Pytester) -> None:
pytester.makepyfile("\n def test_raise_assertion_error():\n raise AssertionError('\\xff')\n ")
result = pytester.runpytest()
result.stdout.fnmatch_lines(["> raise AssertionError('\\xff')", 'E AssertionEr... |
def test_can_fail(testdir):
testdir.makepyfile('\n def describe_something():\n def fails():\n assert False\n def describe_nested():\n def fails_too():\n assert False\n ')
result = testdir.runpytest()
result.assert_outcomes(... |
def make_one_source_episode_pipeline(dataset_spec, use_dag_ontology, use_bilevel_ontology, split, episode_descr_config, pool=None, shuffle_buffer_size=None, read_buffer_size_bytes=None, num_prefetch=0, image_size=None, num_to_take=None):
use_all_classes = False
if (pool is not None):
if (not data.POOL_S... |
def sys_tags(*, warn: bool=False) -> Iterator[Tag]:
interp_name = interpreter_name()
if (interp_name == 'cp'):
(yield from cpython_tags(warn=warn))
else:
(yield from generic_tags())
if (interp_name == 'pp'):
interp = 'pp3'
elif (interp_name == 'cp'):
interp = ('cp' + ... |
def test_qmenu_leak_workaround():
pg.mkQApp()
topmenu = QtWidgets.QMenu()
submenu = QtWidgets.QMenu()
refcnt1 = sys.getrefcount(submenu)
topmenu.addMenu(submenu)
submenu.setParent(None)
refcnt2 = sys.getrefcount(submenu)
assert (refcnt2 == refcnt1)
del topmenu
assert pg.Qt.isQObj... |
class custom_logger(object):
def __init__(self, log_path='./log', formatter_str=None, debug=None):
print(('setting logger and file handler (%s)' % log_path))
self.log_path = log_path
if (not os.path.isdir(os.path.dirname(log_path))):
os.makedirs(os.path.dirname(log_path))
... |
def load_model(base_model, psnet_model, decoder, regressor_delta, args):
ckpt_path = args.ckpts
if (not os.path.exists(ckpt_path)):
raise NotImplementedError(('no checkpoint file from path %s...' % ckpt_path))
print(('Loading weights from %s...' % ckpt_path))
state_dict = torch.load(ckpt_path, m... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.