code stringlengths 281 23.7M |
|---|
class TestConfig(DefaultConfig):
TESTING = True
SECRET_KEY = 'superdupersecret!!!1'
DATABASE_SECRET_KEY = 'anothercrazykey!'
BILLING_TYPE = 'FakeStripe'
TEST_DB_FILE = TEST_DB_FILE
DB_URI = os.environ.get('TEST_DATABASE_URI', 'sqlite:///{0}'.format(TEST_DB_FILE.name))
DB_CONNECTION_ARGS = {'... |
class DGroups():
def __init__(self, qtile, dgroups, key_binder=None, delay=1):
self.qtile = qtile
self.groups = dgroups
self.groups_map = {}
self.rules = []
self.rules_map = {}
self.last_rule_id = 0
for rule in getattr(qtile.config, 'dgroups_app_rules', []):
... |
class TestSqueezeCat():
def setup_method(self):
self.x = np.arange(10)
self.y = np.arange(10, 20)
def test_combine_false_squeeze_false(self):
expected = [self.x, self.y]
result = base._squeeze_cat([self.x, self.y], False, False)
npt.assert_equal(result, expected)
def ... |
def create_nettree():
global S
global ptn_len
nettree = [[] for i in range((ptn_len + 1))]
start = [0 for i in range((ptn_len + 1))]
for i in range(len(S)):
node0 = node(i)
if (S[i] == sub_ptn_list[0].start):
node0.toleave = True
nettree[0].append(node0)
... |
class TripletEvaluator(SentenceEvaluator):
def __init__(self, anchors: List[str], positives: List[str], negatives: List[str], main_distance_function: SimilarityFunction=None, name: str='', batch_size: int=16, show_progress_bar: bool=False, write_csv: bool=True):
self.anchors = anchors
self.positives... |
_2_unicode_compatible
class BaseNgramModel(object):
def __init__(self, ngram_counter):
self.ngram_counter = ngram_counter
self.ngrams = ngram_counter.ngrams[ngram_counter.order]
self._ngrams = ngram_counter.ngrams
self._order = ngram_counter.order
def _check_against_vocab(self, w... |
def multi_weight_inj(pfi, sdc_p=1e-05, function=_zero_rand_weight):
corrupt_idx = [[], [], [], [], []]
for layer_idx in range(pfi.get_total_layers()):
shape = list(pfi.get_weights_size(layer_idx))
dim_len = len(shape)
shape.extend([1 for i in range((4 - len(shape)))])
for k in ra... |
def get_model_classes() -> List[Type]:
import pyinaturalist.models
from pyinaturalist.models import BaseModel
model_classes = []
for (_, obj) in getmembers(pyinaturalist.models):
if (isclass(obj) and issubclass(obj, BaseModel)):
model_classes.append(obj)
return model_classes |
def _coverage_data_file(coverage_dir) -> str:
for root in (coverage_dir, os.getenv('TOXINIDIR', _PROJ_ROOT)):
path = Path(root)
if (list(path.glob('.coverage.*')) or (path / '.coverage').exists()):
return str((path / '.coverage'))
raise FileNotFoundError('.coverage (data_file to stor... |
def main():
args = parse_args()
assert (args.out or args.eval or args.format_only or args.show or args.show_dir), 'Please specify at least one operation (save/eval/format/show the results / save the results) with the argument "--out", "--eval", "--format-only", "--show" or "--show-dir"'
if (args.eval and ar... |
class Effect7231(BaseEffect):
type = 'passive'
def handler(fit, ship, context, projectionRange, **kwargs):
fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Repair Systems')), 'armorDamageAmount', ship.getModifiedItemAttr('shipBonusGC3'), skill='Gallente Cruiser', **kwargs) |
def main():
_validate_untrusted_args(sys.argv)
parser = get_argparser()
argv = sys.argv[1:]
args = parser.parse_args(argv)
if (args.json_args is not None):
args = _unpack_json_args(args)
earlyinit.early_init(args)
from qutebrowser import app
return app.run(args) |
def mixture_model(random_seed=1234):
np.random.seed(1234)
size = 1000
w_true = np.array([0.35, 0.4, 0.25])
mu_true = np.array([0.0, 2.0, 5.0])
sigma = np.array([0.5, 0.5, 1.0])
component = np.random.choice(mu_true.size, size=size, p=w_true)
x = np.random.normal(mu_true[component], sigma[comp... |
def rb_circuit_execution_2(rb_opts: dict, shots: int):
backend = qiskit.Aer.get_backend('qasm_simulator')
basis_gates = ['u1', 'u2', 'u3', 'cx']
(rb_circs, xdata) = rb.randomized_benchmarking_seq(**rb_opts)
noise_model = NoiseModel()
t_1 = 100.0
t_2 = 80.0
gate1q = 0.1
gate2q = 0.5
n... |
class ResNetPreTrainedModel(PreTrainedModel):
config_class = ResNetConfig
base_model_prefix = 'resnet'
main_input_name = 'pixel_values'
supports_gradient_checkpointing = True
def _init_weights(self, module):
if isinstance(module, nn.Conv2d):
nn.init.kaiming_normal_(module.weight,... |
class AgentRule():
def __init__(self, config):
self.all_facet_list = config.all_facet_list
self.facet_dim_list = config.facet_dim_list
with open(config.utt_gen_dict_path, 'r') as f:
self.utt_gen_dict = json.load(f)
def build_facet_index_dict():
facet_index_dic... |
(trylast=True)
def pytest_runtest_setup(item: Item) -> None:
if (not isinstance(item, Function)):
return
if isinstance(item, TestCaseFunction):
return
func = item
call_optional(func.obj, 'setup', func.nodeid)
func.addfinalizer((lambda : call_optional(func.obj, 'teardown', func.nodeid... |
class CountingDuckFactory(AbstractDuckFactory):
def createMallardDuck(self) -> Quackable:
return QuackCounter(MallardDuck())
def createRedheadDuck(self) -> Quackable:
return QuackCounter(RedheadDuck())
def createDuckCall(self) -> Quackable:
return QuackCounter(DuckCall())
def cre... |
def test_builtin_discovery_class_preferred(mocker):
mocker.patch('virtualenv.run.plugin.discovery._get_default_discovery', return_value=['pluginA', 'pluginX', 'builtin', 'Aplugin', 'Xplugin'])
options = VirtualEnvOptions()
session_via_cli(['venv'], options=options)
assert (options.discovery == 'builtin'... |
class Migration(migrations.Migration):
initial = True
dependencies = [('auth', '0012_alter_user_first_name_max_length')]
operations = [migrations.CreateModel(name='CustomUser', fields=[('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('password', models.C... |
def flags(cls):
assert (cls.__bases__ == (object,))
d = dict(cls.__dict__)
new_type = type(cls.__name__, (int,), d)
new_type.__module__ = cls.__module__
map_ = {}
for (key, value) in d.items():
if ((key.upper() == key) and isinstance(value, int)):
value_instance = new_type(va... |
.parametrize('other_user', [False, True])
def test_admin_session_change_layout_description_invalid(mock_emit_session_update: MagicMock, clean_database, other_user, flask_app):
user1 = database.User.create(id=1234, name='The Name')
user2 = database.User.create(id=1235, name='Other')
session = database.Multip... |
def _test(args):
assert args.load
test_fname = args.eval_data
model = models.Model(args, constant.ANSWER_NUM_DICT[args.goal])
model.cuda()
model.eval()
if (args.goal == 'onto'):
saved_path = constant.EXP_ROOT_ONTO
else:
saved_path = constant.EXP_ROOT
model.load_state_dict... |
_fixtures(WebFixture)
def test_bookmarks_from_other_sources(web_fixture):
fixture = web_fixture
class UIWithRelativeView(UserInterface):
def assemble(self):
view_factory = self.define_view('/aview', title='A View title')
fixture.bookmark_from_view_factory = view_factory.as_bookma... |
class TreeModel(QAbstractItemModel):
def __init__(self, glb, params, parent=None):
super(TreeModel, self).__init__(parent)
self.glb = glb
self.params = params
self.root = self.GetRoot()
self.last_row_read = 0
def Item(self, parent):
if parent.isValid():
... |
def evolve(*args, **changes):
if args:
try:
(inst,) = args
except ValueError:
msg = f'evolve() takes 1 positional argument, but {len(args)} were given'
raise TypeError(msg) from None
else:
try:
inst = changes.pop('inst')
except KeyE... |
_dataframe_method
def limit_column_characters(df: pd.DataFrame, column_length: int, col_separator: str='_') -> pd.DataFrame:
check('column_length', column_length, [int])
check('col_separator', col_separator, [str])
col_names = df.columns
col_names = [col_name[:column_length] for col_name in col_names]
... |
def yolo_body(anchors, num_classes=20, score_threshold=0.01):
num_anchors = len(anchors)
num_anchors_per_layer = (num_anchors // 3)
image_input = Input(shape=(None, None, 3), name='image_input')
fm_13_input = Input(shape=(None, None, num_anchors_per_layer, (num_classes + 5)), name='fm_13_input')
fm_... |
def test_cmdstep_cmd_is_string():
obj = CmdStep('blahname', Context({'cmd': 'blah'}))
assert (not obj.is_shell)
assert (obj.logger.name == 'blahname')
assert (obj.context == Context({'cmd': 'blah'}))
assert (obj.commands == [Command('blah', cwd=None, is_shell=False, is_save=False)]) |
def get_metric_func(metric: str) -> Callable[([Union[(List[int], List[float])], List[float]], float)]:
if (metric == 'auc'):
return roc_auc_score
if (metric == 'prc-auc'):
return prc_auc
if (metric == 'rmse'):
return rmse
if (metric == 'mse'):
return mse
if (metric ==... |
def _create_file(set_path, always_delete=False):
if os.path.exists(set_path):
if (always_delete or (not os.path.isfile(set_path))):
_rmtree(set_path)
open_mode = set_path.get('open')
with open(set_path, ('w' + open_mode)) as fd:
size = set_path.get('size')
content = set_p... |
def main():
if (len(sys.argv) != 4):
((print >> sys.stderr), 'Usage: svn_version_gen.py <output file> <CXX compiler> <CXX flags>')
sys.exit(1)
cxx_compiler = sys.argv[2]
cxx_flags = sys.argv[3]
with open(sys.argv[1], 'w') as result:
((print >> result), '#pragma once\n')
(... |
def generate_stubs(options: Options) -> None:
mypy_opts = mypy_options(options)
(py_modules, pyc_modules, c_modules) = collect_build_targets(options, mypy_opts)
all_modules = ((py_modules + pyc_modules) + c_modules)
all_module_names = sorted((m.module for m in all_modules))
generate_asts_for_modules... |
def test_assert_attrs_equal():
class TestExample(object):
(a, b, c) = (1, ('wvi', 3), [4.5, 3.14])
def test_func(self):
return self.b
assert mmcv.assert_attrs_equal(TestExample, {'a': 1, 'b': ('wvi', 3), 'c': [4.5, 3.14]})
assert (not mmcv.assert_attrs_equal(TestExample, {'a': 1,... |
.parametrize('BaseDataset', (AnimalBaseDataset, BottomUpBaseDataset, FaceBaseDataset, FashionBaseDataset, HandBaseDataset, TopDownBaseDataset, Body3DBaseDataset))
def test_dataset_base_class(BaseDataset):
with pytest.raises(ImportError):
class Dataset(BaseDataset):
pass
_ = Dataset() |
def parse():
parser = argparse.ArgumentParser()
parser.add_argument('--n_stn', help='(int) number of stations to use in training', default=20, type=int)
parser.add_argument('--mean', help='(str) latent mean, options = "Constant", "LogRBF"', default='LogRBF')
parser.add_argument('--nomg', help='(int) num... |
class Shuffle(PlaylistPlugin):
PLUGIN_ID = 'Shuffle Playlist'
PLUGIN_NAME = _('Shuffle Playlist')
PLUGIN_DESC = _('Randomly shuffles a playlist.')
PLUGIN_ICON = Icons.MEDIA_PLAYLIST_SHUFFLE
def plugin_playlist(self, playlist):
playlist.shuffle()
return True
def plugin_handles(sel... |
def add_create_databases(sub_parsers):
parser: ArgumentParser = sub_parsers.add_parser('create-new-database', help='Creates initial databases for a recently created game. Automatically ran after add-new-game')
parser.add_argument('--game', type=str, required=True, choices=[game.value for game in RandovaniaGame.... |
class StaticFileResource(SubResource):
sub_regex = '(?P<filename>[^/]+)'
sub_path_template = '%(filename)s'
def get_url(self):
return self.get_url_for(self.unique_name, filename=self.file.name)
def __init__(self, view, unique_name, a_file):
super().__init__(view, unique_name)
sel... |
def test_show_func_column_formatting():
from line_profiler.line_profiler import show_func
import line_profiler
import io
func = line_profiler.line_profiler.show_text
start_lineno = func.__code__.co_firstlineno
filename = func.__code__.co_filename
func_name = func.__name__
def get_func_li... |
class TestInvalidResourceName(BaseTestCase):
def test_bad_syntax(self):
e = rname.InvalidResourceName.bad_syntax('syntax', 'resource')
assert (str(e) == "Could not parse 'resource'. The syntax is 'syntax'.")
e = rname.InvalidResourceName.bad_syntax('syntax', 'resource', 'ex')
assert ... |
def check_style(top_level, commit):
separator = ('-' * len(commit.title))
print(separator)
print(commit.title)
print(separator)
issues = 0
for checker in CommitChecker.checkers():
for issue in checker.check(commit, top_level):
print(('%s%s%s' % (Colours.fg(Colours.Yellow), is... |
def _get_cache_contents(cache):
cache_contents = set()
rel_keys = os.path.relpath(cache.keys, cache.path)
rel_data = os.path.relpath(cache.data, cache.path)
rel_pools = os.path.relpath(cache.pools, cache.path)
rel_cache = os.path.relpath(cache.path, cache.path)
for key in os.listdir(cache.keys):... |
class LKA3d(nn.Module):
def __init__(self, dim):
super().__init__()
self.conv0 = nn.Conv3d(dim, dim, 5, padding=2, groups=dim)
self.conv_spatial = nn.Conv3d(dim, dim, 7, stride=1, padding=9, groups=dim, dilation=3)
self.conv1 = nn.Conv3d(dim, dim, 1)
def forward(self, x):
... |
class W2lFairseqLMDecoder(W2lDecoder):
def __init__(self, args, tgt_dict):
super().__init__(args, tgt_dict)
self.unit_lm = getattr(args, 'unit_lm', False)
self.lexicon = (load_words(args.lexicon) if args.lexicon else None)
self.idx_to_wrd = {}
checkpoint = torch.load(args.ken... |
def main():
scene = SceneManager.AddScene('Scene')
scene.mainCamera.transform.localPosition = Vector3(0, 3, (- 10))
scene.mainCamera.transform.eulerAngles = Vector3(15, 0, 0)
cube = GameObject('Cube')
cube.transform.localPosition = Vector3(2, 0, 0)
renderer = cube.AddComponent(MeshRenderer)
... |
def get_answer_start(tokenized_answer, context):
answer = ''.join((ch for ch in tokenized_answer if (not ch.isspace())))
(context_strip, offsets) = zip(*[(ch, ptr) for (ptr, ch) in enumerate(context) if (not ch.isspace())])
idx = ''.join(context_strip).index(answer)
(answer_start, answer_end) = (offsets... |
class Model(object):
def __init__(self, policy, env, nsteps, dropoutpi_keep_prob, dropoutpi_keep_prob_value, dropoutvf_keep_prob, dropoutvf_keep_prob_value, isbnpitrainmode, isbnvftrainmode, l1regpi, l2regpi, l1regvf, l2regvf, wclippi, wclipvf, ent_coef=0.01, vf_coef=0.5, max_grad_norm=0.5, lr=0.0007, alpha=0.99, e... |
def test_unregistered_base_implementations():
a = m.RegisteredDerived()
a.do_nothing()
assert (a.rw_value == 42)
assert (a.ro_value == 1.25)
a.rw_value += 5
assert (a.sum() == 48.25)
a.increase_value()
assert (a.rw_value == 48)
assert (a.ro_value == 1.5)
assert (a.sum() == 49.5)
... |
class DoubleProjectAttention(nn.Module):
def __init__(self, image_feat_dim, txt_rnn_embeding_dim, hidden_size, dropout=0.2):
super(DoubleProjectAttention, self).__init__()
self.att1 = ProjectAttention(image_feat_dim, txt_rnn_embeding_dim, hidden_size, dropout)
self.att2 = ProjectAttention(im... |
class TestBuild(support.TempdirManager):
def test_formats(self):
dist = self.create_dist()[1]
cmd = bdist(dist)
cmd.formats = ['gztar']
cmd.ensure_finalized()
assert (cmd.formats == ['gztar'])
formats = ['bztar', 'gztar', 'rpm', 'tar', 'xztar', 'zip', 'ztar']
... |
def solar_from_int(g):
y = int((((10000 * g) + 14780) / 3652425))
d = (g - ((((365 * y) + int((y / 4))) - int((y / 100))) + int((y / 400))))
if (d < 0):
y -= 1
d = (g - ((((365 * y) + int((y / 4))) - int((y / 100))) + int((y / 400))))
mi = int((((100 * d) + 52) / 3060))
m = (((mi + 2... |
class Network(object):
def __init__(self, inputs, trainable=True):
self.inputs = []
self.layers = dict(inputs)
self.trainable = trainable
self.setup()
def setup(self):
raise NotImplementedError('Must be subclassed.')
def load(self, data_path, session, ignore_missing=F... |
def get_transfer_secret(chain_state: ChainState, secrethash: SecretHash) -> Optional[Secret]:
transfer_task = chain_state.payment_mapping.secrethashes_to_task.get(secrethash)
if (transfer_task is None):
return None
return secret_from_transfer_task(transfer_task=transfer_task, secrethash=secrethash) |
class NewVersion():
def __init__(self, filename, found_date, release_date, source) -> None:
self.filename = filename
self.found_date = found_date
self.release_date = release_date
self.source = source
def from_dict(cls, dictionary):
return cls(filename=dictionary['filename... |
def delete_dbs_with_failed_migrations(valid_db_names: List[Path]) -> None:
for db_path in valid_db_names:
file_version = get_file_version(db_path)
with get_file_lock(db_path):
db_version = get_db_version(db_path)
if (db_version == file_version):
continue
... |
def formatannotation(annotation):
if isinstance(annotation, typing._UnionGenericAlias):
return ' | '.join(map(formatannotation, annotation.__args__))
elif isinstance(annotation, type):
return annotation.__name__
elif isinstance(annotation, (typing._SpecialGenericAlias, typing._SpecialForm)):... |
class TestClientPayment(ClientTestCase):
def setUp(self):
super(TestClientPayment, self).setUp()
self.base_url = '{}/payments'.format(self.base_url)
def test_payment_all(self):
result = mock_file('payment_collection')
url = self.base_url
responses.add(responses.GET, url, ... |
class SliceBuilder():
def __init__(self, raw_dataset, label_dataset, weight_dataset, patch_shape, stride_shape, **kwargs):
patch_shape = tuple(patch_shape)
stride_shape = tuple(stride_shape)
skip_shape_check = kwargs.get('skip_shape_check', False)
if (not skip_shape_check):
... |
.usefixtures('legacy_plot_signature')
.filterwarnings('ignore:.*non-positive left xlim:UserWarning')
def test_superimpose():
plt.figure(1)
plt.clf()
ctrl.bode_plot(ctrl.tf([1], [1, 2, 1]))
ctrl.bode_plot(ctrl.tf([5], [1, 1]))
(len(plt.gcf().axes) == 2)
for ax in plt.gcf().axes:
assert (l... |
def test_lsp2lpc():
lpc = np.fromfile(join(DATA_DIR, 'test16k.lsp2lpc'), np.float32).reshape(759, 26).astype(np.float64)
lsp = np.fromfile(join(DATA_DIR, 'test16k.lsp'), np.float32).reshape(759, 26).astype(np.float64)
lpc_hat = pysptk.lsp2lpc(lsp)
assert np.allclose(lpc, lpc_hat, atol=0.0001) |
class GenerativeNetwork(ABC):
def __init__(self):
super().__init__()
self._num_parameters = 0
self._num_qubits = 0
self._bounds = []
warn_package('aqua.components.neural_networks', 'qiskit_machine_learning.algorithms.distribution_learners.qgan', 'qiskit-machine-learning')
... |
def translate_opts(parser):
group = parser.add_argument_group('Model')
group.add('--model', '-model', dest='models', metavar='MODEL', nargs='+', type=str, default=[], required=True, help='Path to model .pt file(s). Multiple models can be specified, for ensemble decoding.')
group = parser.add_argument_group(... |
class _AnswerStrategy():
__slots__ = ('question', 'strategy_type', 'types', 'services')
def __init__(self, question: DNSQuestion, strategy_type: _int, types: List[str], services: List[ServiceInfo]) -> None:
self.question = question
self.strategy_type = strategy_type
self.types = types
... |
class Exponential1DSubMesh(SubMesh1D):
def __init__(self, lims, npts, side='symmetric', stretch=None):
(spatial_var, spatial_lims, tabs) = self.read_lims(lims)
a = spatial_lims['min']
b = spatial_lims['max']
npts = npts[spatial_var.name]
coord_sys = spatial_var.coord_sys
... |
_failure
.parametrize('start_raiden_apps', [False])
.parametrize('deposit', [0])
.parametrize('channels_per_node', [CHAIN])
.parametrize('number_of_nodes', [2])
def test_alarm_task_first_run_syncs_blockchain_events(raiden_network: List[RaidenService], blockchain_services):
(app0, _) = raiden_network
target_bloc... |
class TestManager():
def __init__(self, backend, debug_log):
self.backend = backend
self.log_level = (logging.DEBUG if debug_log else logging.INFO)
self.backend.manager = self
self.proc = None
self.c = None
self.testwindows = []
def __enter__(self):
self._... |
class TestUserSocialAuth(TestCase):
def setUp(self):
self.user_model = get_user_model()
self.user = self.user_model._default_manager.create_user(username='randomtester', email='')
self.usa = UserSocialAuth.objects.create(user=self.user, provider='my-provider', uid='1234')
def test_change... |
.parametrize('fixture', ('capfd', 'capsys'))
.xfail(reason='#10042')
def test_pdb_suspends_fixture_capturing(pytester: Pytester, fixture: str) -> None:
p1 = pytester.makepyfile('\n def test_inner({fixture}):\n import sys\n\n print("out_inner_before")\n sys.stderr.write("err_i... |
_network('Graphormer')
class GraphormerModel(torch.nn.Module):
def __init__(self, dim_in, dim_out):
super().__init__()
self.encoder = FeatureEncoder(dim_in)
dim_in = self.encoder.dim_in
if (cfg.gnn.layers_pre_mp > 0):
self.pre_mp = GNNPreMP(dim_in, cfg.gnn.dim_inner, cfg.... |
class ReqParseTest(object):
def test_api_shortcut(self, app):
api = Api(app)
parser = api.parser()
assert isinstance(parser, RequestParser)
def test_parse_model(self, app):
model = Model('Todo', {'task': fields.String(required=True)})
parser = RequestParser()
pars... |
class Volcano(object):
age = None
__fields__ = OrderedDict()
__slots__ = list(__fields__.keys())
def __init__(self, *args):
for ((attr, attr_type), value) in zip(self.__fields__.items(), args):
if (attr_type in (int, float)):
if ((not value) or (value == '?')):
... |
def Attack_Global(classifier, device, respace, t, eps=16, iter=10, name='attack_global', alpha=2, version='v1', skip=200):
pgd_conf = gen_pgd_confs(eps=eps, alpha=alpha, iter=iter, input_range=(0, 1))
save_path = f'vis/{name}_{version}/{classifier}_eps{eps}_iter{iter}_{respace}_t{t}/'
mp(save_path)
clas... |
def identify_geotiff_cmap(path, band=1):
try:
try:
import rasterio
from matplotlib.colors import ListedColormap
with rasterio.open(path) as tifffile:
c = tifffile.colormap(band)
colors = (np.array(list(c.values())) / 255)
bins = lis... |
def feature_func(sample, doc_tokend, query_tokend, vocab, vocab_tag, vocab_ner, is_train=True, dataset_name='squad'):
fea_dict = {}
fea_dict['uid'] = sample['uid']
fea_dict['context'] = sample['context']
fea_dict['query_tok'] = tok_func(query_tokend, vocab)
fea_dict['query_pos'] = postag_func(query_... |
def dsb_prediction_collate(batch):
error_msg = 'batch must contain tensors or str; found {}'
if isinstance(batch[0], torch.Tensor):
return torch.stack(batch, 0)
elif isinstance(batch[0], str):
return list(batch)
elif isinstance(batch[0], collections.Sequence):
transposed = zip(*b... |
def find_three_bit_multiplier_factors(product=[0, 0, 0, 0, 0, 0]):
x = Array.create('a', shape=3, vartype='BINARY')
y = Array.create('b', shape=3, vartype='BINARY')
H = ThreeBitMultiplier(x, y, product, 'mult')
model = H.compile()
(qubo, offset) = model.to_qubo()
sampler = neal.SimulatedAnnealin... |
class LmdbMaker():
def __init__(self, lmdb_path, map_size=(1024 ** 4), batch=5000, compress_level=1):
if (not lmdb_path.endswith('.lmdb')):
raise ValueError("lmdb_path must end with '.lmdb'.")
if osp.exists(lmdb_path):
print(f'Folder {lmdb_path} already exists. Exit.')
... |
def _check_ts_compatibility(ts0, ts1):
ts0 = _util.make_list_of_t(ts0)
ts1 = _util.make_list_of_t(ts1)
if (len(ts0) != len(ts1)):
raise ValueError('ts0 and ts1 have different sizes: {} != {}'.format(len(ts0), len(ts1)))
for (t0, t1) in zip(ts0, ts1):
(dtype0, dtype1) = (t0.dtype, t1.dtyp... |
_lr_scheduler('fixed')
class FixedSchedule(FairseqLRScheduler):
def __init__(self, args, optimizer):
super().__init__(args, optimizer)
args.warmup_updates = (getattr(args, 'warmup_updates', 0) or 0)
self.lr = args.lr[0]
if (args.warmup_updates > 0):
self.warmup_factor = (... |
def create_properties(properties: Properties, defaults: Properties=None) -> Properties:
full_defaults = deepcopy(type(properties).DEFAULTS)
if (defaults is not None):
full_defaults = _replace_properties(defaults, full_defaults)
return _replace_properties(properties, full_defaults) |
class TickTock():
def __init__(self):
self.time_pairs = []
self.current_time = None
def tick(self):
self.current_time = time.time()
def tock(self):
assert (self.current_time is not None), self.current_time
self.time_pairs.append([self.current_time, time.time()])
... |
.parametrize('max_num_attempts', [None, 2])
def test_driver_retry_fail(testdir, mocker, max_num_attempts):
mocker.patch('pytest_selenium.webdriver.firefox.webdriver.FirefoxRemoteConnection', side_effect=Exception('Connection Error'))
mocker.patch('pytest_selenium.webdriver.firefox.webdriver.Service')
mocker... |
def infer_domain(terms):
domains = {t.domain for t in terms}
num_domains = len(domains)
if (num_domains == 0):
return GENERIC
elif (num_domains == 1):
return domains.pop()
elif ((num_domains == 2) and (GENERIC in domains)):
domains.remove(GENERIC)
return domains.pop()... |
def test_limit_inference_result_amount() -> None:
code = "\n args = []\n\n if True:\n args += ['a']\n\n if True:\n args += ['b']\n\n if True:\n args += ['c']\n\n if True:\n args += ['d']\n\n args #\n "
result = extract_node(code).inferred()
assert (len(result... |
def save_state(filename, args, model_state_dict, criterion, optimizer, lr_scheduler, num_updates, optim_history=None, extra_state=None):
from fairseq import utils
if (optim_history is None):
optim_history = []
if (extra_state is None):
extra_state = {}
state_dict = {'args': args, 'model'... |
.parametrize('solve', [pytest.param(qutip.sesolve, id='sesolve'), pytest.param(functools.partial(qutip.mesolve, c_ops=[qutip.qzero(2)]), id='mesolve')])
def test_compatibility_with_solver(solve):
e_ops = [getattr(qutip, ('sigma' + x))() for x in 'xyzmp']
e_ops += [(lambda t, psi: np.sin(t))]
h = qutip.sigma... |
def test_both_cipher(A, B, size=(4 * 1024), repeat=16):
print('Testing', B.__name__, '...')
t1 = t2 = 0
for i in range(repeat):
assert ((A.KEY_LENGTH == B.KEY_LENGTH) and (A.IV_LENGTH == B.IV_LENGTH))
key = os.urandom(A.KEY_LENGTH)
iv = os.urandom(A.IV_LENGTH)
t = time.perf_c... |
def test_geth_request_pruned_data_raises_an_exception(deploy_client: JSONRPCClient, web3: Web3) -> None:
(contract_proxy, _) = deploy_rpc_test_contract(deploy_client, 'RpcWithStorageTest')
iterations = 1
def send_transaction() -> TransactionMined:
estimated_transaction = deploy_client.estimate_gas(c... |
def prepare_data(inp_lines, out_lines):
in_str = ''
for line in inp_lines:
in_str += line
out_str = ''
for line in out_lines:
out_str += line
(in_ls, out_ls) = (in_str.split('[SEP]')[:(- 1)], out_str.split('[SEP]')[:(- 1)])
in_ls = [ele[:(- 1)] for ele in in_ls]
out_ls = [ele... |
class Factory(object):
def make_ok(self, directive_text=''):
return Result(True, 1, 'This is a description.', Directive(directive_text))
def make_not_ok(self, directive_text=''):
return Result(False, 1, 'This is a description.', Directive(directive_text))
def make_bail(self, reason='Because ... |
def get_bloq_examples() -> List[BloqExample]:
reporoot = get_git_root()
bloqs_root = (reporoot / 'qualtran/bloqs')
paths = get_bloq_module_paths(bloqs_root)
bexamples: List[BloqExample] = []
for path in paths:
for (modname, name, be) in modpath_to_bloq_exs(path):
bexamples.append... |
def mass_law(freq, vol_density, thickness, theta=0, c=343, rho0=1.225):
rad_freq = ((2.0 * np.pi) * freq)
surface_density = (vol_density * thickness)
theta_rad = np.deg2rad(theta)
a = (((rad_freq * surface_density) * np.cos(theta_rad)) / ((2 * rho0) * c))
tl_theta = (10 * np.log10((1 + (a ** 2))))
... |
_start_docstrings('\n ResNet Model with an image classification head on top (a linear layer on top of the pooled features), e.g. for\n ImageNet.\n ', RESNET_START_DOCSTRING)
class ResNetForImageClassification(ResNetPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.... |
def _get_module_instance(node: torch._C.Node, node_name_to_module: Dict[(str, torch.nn.Module)]) -> torch.nn.Module:
input_name: str = node.input().debugName()
attributes = _get_attribute_name(node)
model = node_name_to_module[input_name]
sub_model = getattr(model, attributes['name'])
return sub_mod... |
def cache_input_ans(lib: str, mode: str, num_procs: int=8, source_dir: Union[(str, Path)]='ds1000_data'):
check_cpu_count(num_procs)
source_path = ((Path(source_dir) / lib) / mode)
problems = sorted(os.listdir(source_path), key=(lambda x: int(x.split('q')[1])))
problem_paths = []
for problem in prob... |
class property():
def __init__(self, fget: Optional[Callable[([Any], Any)]]=..., fset: Optional[Callable[([Any, Any], None)]]=..., fdel: Optional[Callable[([Any], None)]]=..., doc: Optional[str]=...) -> None:
...
def getter(self, fget: Callable[([Any], Any)]) -> property:
...
def setter(self... |
def convert_trunk_to_classy_model(state_dict_trunk, depth):
assert (depth in BLOCK_CONFIG.keys()), f'depth {depth} conversion not supported'
layers = BLOCK_CONFIG[depth]
output_dict = {}
for (k, val) in state_dict_trunk.items():
if any(((x in k) for x in _SKIP_LAYERS_IN_TRUNK)):
cont... |
def load_weights(checkpoint, hf_model, config):
hf_model.apply_weight_norm()
hf_model.conv_pre.weight_g.data = checkpoint['input_conv.weight_g']
hf_model.conv_pre.weight_v.data = checkpoint['input_conv.weight_v']
hf_model.conv_pre.bias.data = checkpoint['input_conv.bias']
for i in range(len(config.u... |
class PretrainedCfg():
url: Optional[Union[(str, Tuple[(str, str)])]] = None
file: Optional[str] = None
hf_hub_id: Optional[str] = None
hf_hub_filename: Optional[str] = None
source: Optional[str] = None
architecture: Optional[str] = None
custom_load: bool = False
input_size: Tuple[(int, ... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.