code stringlengths 281 23.7M |
|---|
def test_putup_with_update_dirty_workspace(cwd, putup):
run(f'{putup} myproj')
with chdir('myproj'):
with open('setup.py', 'w') as fh:
fh.write('DIRTY')
with pytest.raises(CalledProcessError):
run(f'{putup} --update myproj')
run(f'{putup} --update myproj --force') |
_config('cfg_gt')
def set_cfg_gt(cfg):
cfg.gt = CN()
cfg.gt.layer_type = 'SANLayer'
cfg.gt.layers = 3
cfg.gt.n_heads = 8
cfg.gt.dim_hidden = 64
cfg.gt.full_graph = True
cfg.gt.gamma = 1e-05
cfg.gt.pna_degrees = []
cfg.gt.dropout = 0.0
cfg.gt.attn_dropout = 0.0
cfg.gt.layer_no... |
class TFLACBadDuplicateVorbisComment(TestCase):
def setUp(self):
self.filename = get_temp_copy(os.path.join(DATA_DIR, 'silence-44-s.flac'))
some_tags = VCFLACDict()
some_tags['DUPLICATE'] = ['SECOND']
f = FLAC(self.filename)
f.tags['DUPLICATE'] = ['FIRST']
assert (f.t... |
def preprocess_external(args, raw_datasets, tokenizer, logger):
logger.info('preprocessing datasets')
column_names = raw_datasets['train'].column_names
text_column_name = ('text' if ('text' in column_names) else column_names[0])
padding = 'max_length'
def tokenize_function(examples):
example... |
def test_registering_steps_via_object(stepregistry):
class MySteps(object):
def some_step(self):
def some_other_step(self):
steps_object = MySteps()
stepregistry.register_object(steps_object)
assert (len(stepregistry.steps) == 2)
assert (stepregistry.steps['When I call some step'] ==... |
class RemoteSettingsChanged(Event):
def __init__(self):
self.changed_settings = {}
def from_settings(cls, old_settings, new_settings):
e = cls()
for (setting, new_value) in new_settings.items():
setting = _setting_code_from_int(setting)
original_value = old_settin... |
class Pascal3D(data.Dataset):
def __init__(self, opt, split):
print('==> initializing pascal3d Star {} data.'.format(split))
annot = {}
tags = ['bbox', 'anchors', 'vis', 'dataset', 'class_id', 'imgname', 'viewpoint_azimuth', 'viewpoint_elevation', 'viewpoint_theta', 'anchors_3d', 'space_embe... |
class Corr3dMMGradInputs(BaseCorr3dMM):
_direction = 'backprop inputs'
def make_node(self, kern, topgrad, shape=None):
kern = as_tensor_variable(kern)
topgrad = as_tensor_variable(topgrad)
(kern, topgrad) = self.as_common_dtype(kern, topgrad)
if (kern.type.ndim != 5):
... |
class VNetOutSingleBlock(nn.Module):
def __init__(self, in_channels, classes):
super(VNetOutSingleBlock, self).__init__()
self.conv = nn.Conv2d(in_channels, classes, kernel_size=1)
self.bn_out = nn.BatchNorm2d(classes)
self.af_out = nn.PReLU(classes)
def forward(self, x):
... |
class TestResponses():
(scope='class')
def spec(self):
return {'200': mock.sentinel.response_200, '299': mock.sentinel.response_299, '2XX': mock.sentinel.response_2XX, 'default': mock.sentinel.response_default}
(scope='class')
def responses(self, spec):
return SchemaPath.from_dict(spec)
... |
def method2():
sys.stdout.write('Method 2:\n')
bus = QDBusConnection.sessionBus()
dbus_iface = QDBusInterface('org.freedesktop.DBus', '/org/freedesktop/DBus', 'org.freedesktop.DBus', bus)
names = dbus_iface.call('ListNames').arguments()[0]
sys.stdout.write(('QVariant(QStringList, ("%s") )\n' % '", "... |
.skipif(WINDOWS, reason='Only test linux shells')
def test_bash(mocker: MockerFixture) -> None:
mocker.patch('cleo.io.inputs.string_input.StringInput.script_name', new_callable=mocker.PropertyMock, return_value='/path/to/my/script')
mocker.patch('cleo.commands.completions_command.CompletionsCommand._generate_fu... |
class ConversationsGenerator(DatasetGenerator):
config: ConversationsGeneratorConfig
def __init__(self, config: ConversationsGeneratorConfig) -> None:
super().__init__(config)
def initialize_options_configs(self, options_config_keys: List[str]=OPTIONS_CONFIG_KEYS, generator_config_keys: List[str]=GE... |
def test_insert_replace(qtbot, database):
table = database.table('Foo', ['name', 'val', 'lucky'], constraints={'name': 'PRIMARY KEY'})
with qtbot.wait_signal(table.changed):
table.insert({'name': 'one', 'val': 1, 'lucky': False}, replace=True)
with qtbot.wait_signal(table.changed):
table.ins... |
class TestCompileForwardSampler():
def get_function_roots(function):
return [var for var in pytensor.graph.basic.graph_inputs(function.maker.fgraph.outputs) if var.name]
def get_function_inputs(function):
return {i for i in function.maker.fgraph.inputs if (not isinstance(i, SharedVariable))}
... |
def _evaluate_predictions_on_coco(coco_gt, coco_results, iou_type, kpt_oks_sigmas=None, use_fast_impl=True, img_ids=None, max_dets_per_image=None, evaluator=MetaGraspeval):
assert (len(coco_results) > 0)
if (iou_type == 'segm'):
coco_results = copy.deepcopy(coco_results)
for c in coco_results:
... |
def train_classifier(model, dataset, cfg, distributed=False, validate=False, logger=None):
if (logger is None):
logger = get_root_logger(cfg.log_level)
if distributed:
raise NotImplementedError
_dist_train(model, dataset, cfg, validate=validate, logger=logger)
else:
_non_dist... |
def test_ema_hook():
class DemoModel(nn.Module):
def __init__(self):
super().__init__()
self.conv = nn.Conv2d(in_channels=1, out_channels=2, kernel_size=1, padding=1, bias=True)
self._init_weight()
def _init_weight(self):
constant_(self.conv.weight, 0)... |
class ConversationStringBufferMemory(BaseMemory):
human_prefix: str = 'Human'
ai_prefix: str = 'AI'
buffer: str = ''
output_key: Optional[str] = None
input_key: Optional[str] = None
memory_key: str = 'history'
_validator()
def validate_chains(cls, values: Dict) -> Dict:
if values... |
def assert_key_has_value(obj, key, caller, parent=None):
assert_key_exists(obj, key, caller, parent)
if (obj[key] is None):
if parent:
msg = f'context[{parent!r}][{key!r}] must have a value for {caller}.'
else:
msg = f'context[{key!r}] must have a value for {caller}.'
... |
def versionCheck(versionStr: str):
version = StrictVersion(versionStr)
builtInVersion = StrictVersion(mcquic.__version__)
if (builtInVersion < version):
raise ValueError(f"Version too new. Given {version}, but I'm {builtInVersion} now.")
(major, minor, revision) = version.version
(bMajor, bM... |
def write_namespace_total(namespace_id: int, manifest_id: int, namespace_total: int, operation: str):
namespace_size = get_namespace_size(namespace_id)
namespace_size_exists = (namespace_size is not None)
if (namespace_size_exists and (not namespace_size.backfill_complete)):
return
if ((operatio... |
def mol2graph(smiles_batch: List[str], args: Namespace) -> BatchMolGraph:
mol_graphs = []
if isinstance(smiles_batch, str):
smiles_batch = [smiles_batch]
for smiles in smiles_batch:
if (smiles in SMILES_TO_GRAPH):
mol_graph = SMILES_TO_GRAPH[smiles]
else:
mol_... |
class FeatureExtraction(torch.nn.Module):
def __init__(self, train_fe=False, feature_extraction_cnn='vgg', normalization=True, last_layer='', use_cuda=True):
super(FeatureExtraction, self).__init__()
self.normalization = normalization
if (feature_extraction_cnn == 'vgg'):
self.mo... |
def _op_push(i: int) -> str:
if (i < opcodes.OP_PUSHDATA1):
return int_to_hex(i)
elif (i <= 255):
return (opcodes.OP_PUSHDATA1.hex() + int_to_hex(i, 1))
elif (i <= 65535):
return (opcodes.OP_PUSHDATA2.hex() + int_to_hex(i, 2))
else:
return (opcodes.OP_PUSHDATA4.hex() + in... |
def test_project_variables(project):
variable = project.variables.create({'key': 'key1', 'value': 'value1'})
assert (variable.value == 'value1')
assert (variable in project.variables.list())
variable.value = 'new_value1'
variable.save()
variable = project.variables.get(variable.key)
assert (... |
class Losses_triplet_nll(nn.Module):
def __init__(self):
super(Losses_triplet_nll, self).__init__()
self.loss = nn.functional.mse_loss
def forward(self, real_img, input1, input2):
posi_dist = self.loss(input2, real_img)
nega_dist = self.loss(input1, real_img)
Pt = (torch.... |
class StreamingStdOutCallbackHandler(BaseCallbackHandler):
def on_llm_start(self, serialized: Dict[(str, Any)], prompts: List[str], **kwargs: Any) -> None:
def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
sys.stdout.write(token)
sys.stdout.flush()
def on_llm_end(self, response:... |
def _calculate_shard_io_sizes(sharding_type: str, batch_sizes: List[int], world_size: int, local_world_size: int, input_lengths: List[float], emb_dim: int, shard_sizes: List[List[int]], input_data_type_size: int, output_data_type_size: int, num_poolings: List[float], is_pooled: bool) -> Tuple[(List[int], List[int])]:
... |
class GuiAddCommandFitsCommand(wx.Command):
def __init__(self, fitID, commandFitIDs):
wx.Command.__init__(self, True, 'Add Command Fits')
self.internalHistory = InternalCommandHistory()
self.fitID = fitID
self.commandFitIDs = commandFitIDs
def Do(self):
results = []
... |
class _ROIAlign(Function):
def forward(ctx, input, roi, output_size, spatial_scale, sampling_ratio):
ctx.save_for_backward(roi)
ctx.output_size = _pair(output_size)
ctx.spatial_scale = spatial_scale
ctx.sampling_ratio = sampling_ratio
ctx.input_shape = input.size()
ou... |
def fill_diagonal(w, val=1.0, wsp=False):
w_new = copy.deepcopy(w.sparse)
w_new = w_new.tolil()
if issubclass(type(val), np.ndarray):
if (w.n != val.shape[0]):
raise Exception('shape of w and diagonal do not match')
w_new.setdiag(val)
elif isinstance(val, numbers.Number):
... |
class MutatedMixin():
def isMutated(self):
return bool((self.baseItemID and self.mutaplasmidID))
def baseItem(self):
return self.__baseItem
def mutaplasmid(self):
return self.__mutaplasmid
def fullName(self):
if self.isMutated:
mutaShortName = self.mutaplasmid... |
class CommonEvalConfig(FairseqDataclass):
path: Optional[str] = field(default=None, metadata={'help': 'path(s) to model file(s), colon separated'})
post_process: Optional[str] = field(default=None, metadata={'help': 'post-process text by removing pre-processing such as BPE, letter segmentation, etc (valid optio... |
def test_fromstring():
for filename in ['a.py', 'a.b.py', 'b.json', 'c.yaml']:
cfg_file = osp.join(data_path, 'config', filename)
file_format = osp.splitext(filename)[(- 1)]
in_cfg = Config.fromfile(cfg_file)
out_cfg = Config.fromstring(in_cfg.pretty_text, '.py')
assert (in_c... |
class UpUnit(nn.Module):
def __init__(self, in_channels, out_channels_list, dilation=1):
super(UpUnit, self).__init__()
self.blocks = nn.Sequential()
for (i, out_channels) in enumerate(out_channels_list):
squeeze = ((dilation > 1) and (i == 0))
self.blocks.add_module(... |
class Effect5820(BaseEffect):
type = 'passive'
def handler(fit, module, context, projectionRange, **kwargs):
fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Afterburner')), 'speedFactor', module.getModifiedItemAttr('shipBonusCC2'), skill='Caldari Cruiser', **kwargs) |
class ICManager(object):
def __init__(self, model, pruning_method='L2', pruning_ratio=0.5, pruning_step=0.05, total_epoch=20):
assert (1 > pruning_ratio >= pruning_step > 0)
assert ((total_epoch - 1) > (pruning_ratio / pruning_step))
self.model = model
self.pruning_method = pruning_m... |
def _RadioButtonTruncInfo(win):
lineFormat = win32defines.DT_SINGLELINE
if win.has_style(win32defines.BS_MULTILINE):
lineFormat = win32defines.DT_WORDBREAK
widthAdj = 19
if (win.has_style(win32defines.BS_BITMAP) or win.has_style(win32defines.BS_ICON)):
widthAdj = (- 9000)
lineFor... |
class FusedEmbeddingBagCollectionTest(unittest.TestCase):
(deadline=None)
(device=st.sampled_from(devices))
def test_unweighted(self, device: torch.device) -> None:
eb1_config = EmbeddingBagConfig(name='t1', embedding_dim=4, num_embeddings=10, feature_names=['f1'])
eb2_config = EmbeddingBagC... |
_module
class SSDHead(AnchorHead):
def __init__(self, input_size=300, num_classes=81, in_channels=(512, 1024, 512, 256, 256, 256), anchor_strides=(8, 16, 32, 64, 100, 300), basesize_ratio_range=(0.1, 0.9), anchor_ratios=([2], [2, 3], [2, 3], [2, 3], [2], [2]), target_means=(0.0, 0.0, 0.0, 0.0), target_stds=(1.0, 1.... |
class TestLogging(fake_filesystem_unittest.TestCase):
_config_file = ''
_default_log = 'ignis.log'
def setUp(self):
self.setUpPyfakefs()
super().setUp()
qiskit_dir = os.path.join(os.path.expanduser('~'), '.qiskit')
self._config_file = os.path.join(qiskit_dir, 'logging.yaml')
... |
def test_update_channel_reveal_timeout():
pseudo_random_generator = random.Random()
channel_state = factories.create(factories.NettingChannelStateProperties(settle_timeout=500, reveal_timeout=50))
invalid_reveal_timeout = 260
valid_reveal_timeout = 250
set_reveal_timeout = ActionChannelSetRevealTime... |
def main(args):
values = dict(np.load(args.input))
variables = {}
o_keys = old_keys()
n_keys = new_keys()
for (i, key) in enumerate(o_keys):
v = values[key]
variables[n_keys[i]] = v
with tf.Graph().as_default():
with tf.device('/cpu:0'):
tf_vars = [tf.get_vari... |
class UnaryScalarOpMixin(_GenericOpMixin):
shapes = [(x,) for x in shapes_unary()]
.parametrize('scalar', [pytest.param(0, id='zero'), pytest.param(4.5, id='real'), pytest.param(3j, id='complex')])
def test_mathematically_correct(self, op, data_m, scalar, out_type):
matrix = data_m()
expecte... |
.parametrize('manager', managers())
def test_managed_manager(manager):
length = 10000
dtype = cupy.uint8
data = cupy.array(np.arange(0, (length // cupy.dtype(dtype).type(0).itemsize), dtype=dtype))
compressor_instance = manager()
compressed = compressor_instance.compress(data)
manager = libnvcom... |
class PayPalJS(Library):
def __init__(self):
super().__init__('reahl-paypal')
self.egg_name = 'reahl-paypalsupport'
self.shipped_in_package = 'reahl.paypalsupport'
self.files = ['reahl-paypalbuttonspanel.js']
def inline_material(self, credentials, currency):
paypal_script... |
def dataflow(function=None, callback=None, maxworkers=3):
global PROCESSPOOL, THREADPOOL
def decorator(func):
def wrapper(f_func, *dargs, **dkwargs):
logging.debug('decorator: Calling decorator %s', f_func.__name__)
logging.debug('decorator: dargs %s', str(dargs))
def... |
def test_get_package_information_sets_appropriate_python_versions_if_wheels_only() -> None:
repo = MockRepository()
package = repo.package('futures', Version.parse('3.2.0'))
assert (package.name == 'futures')
assert (package.version.text == '3.2.0')
assert (package.python_versions == '>=2.6, <3') |
class DeepLabv3FinalBlock(nn.Module):
def __init__(self, in_channels, out_channels, bottleneck_factor=4):
super(DeepLabv3FinalBlock, self).__init__()
assert ((in_channels % bottleneck_factor) == 0)
mid_channels = (in_channels // bottleneck_factor)
self.conv1 = conv3x3_block(in_channe... |
class NewsArticleFactory(PageFactory):
class Meta():
model = NewsArticle
excerpt = 'Test'
body = factory.LazyAttribute((lambda o: RichText(f'<h2>{o.h2}</h2><p>{o.p}</p>')))
class Params():
h2 = factory.Faker('text', max_nb_chars=20)
p = factory.Faker('text', max_nb_chars=300) |
class F12_Fcoe(KickstartCommand):
removedKeywords = KickstartCommand.removedKeywords
removedAttrs = KickstartCommand.removedAttrs
def __init__(self, writePriority=71, *args, **kwargs):
KickstartCommand.__init__(self, writePriority, *args, **kwargs)
self.op = self._getParser()
self.fc... |
def save_dataset(ds: Dataset, store: Union[(PathType, MutableMapping[(str, bytes)])], storage_options: Optional[Dict[(str, str)]]=None, auto_rechunk: Optional[bool]=None, **kwargs: Any) -> None:
if isinstance(store, str):
storage_options = (storage_options or {})
store = fsspec.get_mapper(store, **s... |
def __leaf_08(ql: Qiling):
idx = ql.arch.regs.dl
if (not ql.os.fs_mapper.has_mapping(idx)):
ql.log.warning(f'Warning: No such disk: {idx:#x}')
ql.arch.regs.ah = DiskError.BadCommand.value
ql.os.set_cf()
return
disk = ql.os.fs_mapper.open(idx, None)
ql.arch.regs.dl = ql.os... |
def parse_args():
parser = argparse.ArgumentParser(description='build file list for HVU')
parser.add_argument('--input_csv', type=str, help='path of input csv file')
parser.add_argument('--src_dir', type=str, help='source video / frames directory')
parser.add_argument('--output', type=str, help='output ... |
def handle_lock_expired(payment_state: InitiatorPaymentState, state_change: ReceiveLockExpired, channelidentifiers_to_channels: Dict[(ChannelID, NettingChannelState)], block_number: BlockNumber) -> TransitionResult[InitiatorPaymentState]:
'Initiator also needs to handle LockExpired messages when refund transfers ar... |
class TestEncodingCommutationVerifier(QiskitOptimizationTestCase):
def check_problem_commutation(self, problem: QuadraticProgram, max_vars_per_qubit: int):
encoding = QuantumRandomAccessEncoding(max_vars_per_qubit=max_vars_per_qubit)
encoding.encode(problem)
estimator = Estimator()
v... |
class TestDownloadFile():
def test_main(self, tmpdir, test_image_url, test_image):
file = path.join(tmpdir, path.basename(test_image_url))
misc.download_file(test_image_url, file, md5='a858d33c424eaac1322cf3cab6d3d568')
actual = read_image(file)
desired = test_image
ptu.asser... |
class SparseInverseConvFunction(Function):
def forward(ctx, features, filters, indice_pairs, indice_pair_num, num_activate_out):
ctx.save_for_backward(indice_pairs, indice_pair_num, features, filters)
return ops.indice_conv(features, filters, indice_pairs, indice_pair_num, num_activate_out, True, Fa... |
class TestEuropeanCallExpectedValue(QiskitFinanceTestCase):
def setUp(self):
super().setUp()
self.seed = 457
aqua_globals.random_seed = self.seed
def test_ecev_circuit(self):
num_qubits = 3
rescaling_factor = 0.1
strike_price = 0.5
bounds = (0, 2)
... |
def mrr(qrels: Dict[(str, Dict[(str, int)])], results: Dict[(str, Dict[(str, float)])], k_values: List[int]) -> Tuple[Dict[(str, float)]]:
MRR = {}
for k in k_values:
MRR[f'{k}'] = 0.0
(k_max, top_hits) = (max(k_values), {})
logging.info('\n')
for (query_id, doc_scores) in results.items():
... |
class UpProject(nn.Module):
def __init__(self, in_channels, out_channels, batch_size):
super(UpProject, self).__init__()
self.batch_size = batch_size
self.conv1_1 = nn.Conv2d(in_channels, out_channels, 3)
self.conv1_2 = nn.Conv2d(in_channels, out_channels, (2, 3))
self.conv1_... |
_SEG_HEADS_REGISTRY.register()
class PerPixelBaselinePlusHead(PerPixelBaselineHead):
def _load_from_state_dict(self, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs):
version = local_metadata.get('version', None)
if ((version is None) or (version < 2)):
... |
def MakeMetaModel():
if (FLAGS.backbone_arch == 'resnet12'):
try:
from resnet12 import Models
except ImportError:
from models.resnet12 import Models
elif (FLAGS.backbone_arch == 'resnet18'):
try:
from resnet18 import Models
except ImportError:
... |
def replace_oovs(source_in, target_in, vocabulary, source_out, target_out):
def format_unk(pos):
return '<unk-{}>'.format(pos)
if (target_in is None):
target_in = []
for (seq_num, (source_seq, target_seq)) in enumerate(zip_longest(source_in, target_in)):
source_seq_out = []
t... |
def test_mws_xml_to_dotdict_method(simple_xml_response_str):
output = mws_xml_to_dotdict(simple_xml_response_str)
assert isinstance(output, DotDict)
assert isinstance(output, dict)
identifiers = output.ListMatchingProductsResult.Products.Product[0].Identifiers
assert (identifiers.MarketplaceASIN.Mar... |
class PythonHighlighter(QSyntaxHighlighter):
keywords = keyword.kwlist
def __init__(self, document, formats=None):
QSyntaxHighlighter.__init__(self, document)
self.styles = styles = dict(STYLES, **(formats or {}))
self.tri_single = (re.compile("'''"), 1, styles['string2'])
self.t... |
def accumulate_standing_stats(net, z, y, nclasses, num_accumulations=16):
initiate_standing_stats(net)
net.train()
for i in range(num_accumulations):
with torch.no_grad():
z.normal_()
y.random_(0, nclasses)
x = net(z, net.shared(y))
net.eval() |
def grammar():
colon = Literal(':')
equal = Suppress('=')
slash = Suppress('/')
open_paren = Suppress('(')
close_paren = Suppress(')')
open_brace = Suppress('{')
close_brace = Suppress('}')
nspfx = Word(alphas)
local_name = Word(alphanums)
tagname = Combine(((nspfx + colon) + loc... |
_fixtures(WebFixture)
def test_resources(web_fixture):
fixture = web_fixture
(Resource)
class ResourceStub(Resource):
called = None
def handle_something(self, request):
self.called = True
return 'something'
def handle_anotherthing(self, request):
p... |
def huoqu(url, type, cookie):
time_1 = int(time())
time_2 = localtime(time_1)
file = strftime('%Y-%m-%d', time_2)
try:
mkdir((file + '/'))
except:
pass
headers = {'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/115.0.0.0 Safa... |
class MultiDatasetWrapper(nn.Module):
def __init__(self, opt):
super(MultiDatasetWrapper, self).__init__()
self.layer_set = {'-1': None}
self.opt = opt
def add_layer(self, specific_name, layertype, *args, **kwargs):
for dataset in self.opt['train_datasets']:
id_layer ... |
def plot_pr_curve(precisions, recalls, out_image, title):
plt.step(recalls, precisions, color='b', alpha=0.2, where='post')
plt.fill_between(recalls, precisions, step='post', alpha=0.2, color='b')
plt.xlabel('Recall')
plt.ylabel('Precision')
plt.xlim([0.0, 1.05])
plt.ylim([0.0, 1.05])
plt.ti... |
def is_channel_registered(node_app: RaidenService, partner_app: RaidenService, canonical_identifier: CanonicalIdentifier) -> bool:
token_network = views.get_token_network_by_address(chain_state=views.state_from_raiden(node_app), token_network_address=canonical_identifier.token_network_address)
assert token_netw... |
def test(model, test_loader, criterion, num_classes=11, return_outputs=False, return_scale=False):
model.eval()
with torch.no_grad():
test_loss = 0
test_error = 0
I_tot = np.zeros(num_classes)
U_tot = np.zeros(num_classes)
if return_outputs:
output_list = []
... |
def eval_input_fn(features, labels, user_negative, test_neg):
data_path = os.path.join(DATA_PATH, 'test_data.npy')
if (not os.path.exists(data_path)):
dump_data(features, labels, user_negative, test_neg, False)
data = np.load(data_path).item()
print('Loading testing data finished!')
dataset ... |
class Dictionary(object):
def __init__(self, pad='<pad>', eos='</s>', unk='<unk>', bos='<s>', extra_special_symbols=None):
(self.unk_word, self.pad_word, self.eos_word) = (unk, pad, eos)
self.symbols = []
self.count = []
self.indices = {}
self.bos_index = self.add_symbol(bos)... |
class TestLoadAverageCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('LoadAverageCollector', {'interval': 10})
self.collector = LoadAverageCollector(config, None)
def test_import(self):
self.assertTrue(LoadAverageCollector)
('__builtin__.open')
('os.a... |
def test_prepare_metadata_for_build_wheel():
with TemporaryDirectory() as td, cwd(osp.join(samples_dir, 'pep517')):
dirname = buildapi.prepare_metadata_for_build_wheel(td)
assert dirname.endswith('.dist-info'), dirname
assert_isdir(osp.join(td, dirname))
assert_isfile(osp.join(td, di... |
def test_run():
r2p = r2pipe.open('test/tests/simplish', flags=['-2'])
r2p.cmd('s sym.check; aei; aeim; aer rdi=12605')
esilsolver = ESILSolver(r2p, debug=False, trace=False)
state = esilsolver.init_state()
state.set_symbolic_register('rdi')
rdi = state.registers['rdi']
esilsolver.run(target... |
def load_feature_shard(feat_dir, split, nshard, rank, percent):
feat_path = f'{feat_dir}/{split}_{rank}_{nshard}.npy'
leng_path = f'{feat_dir}/{split}_{rank}_{nshard}.len'
with open(leng_path, 'r') as f:
lengs = [int(line.rstrip()) for line in f]
offsets = ([0] + np.cumsum(lengs[:(- 1)]).tol... |
def compare(golden_events, predict_events, event_type):
total_num = 0
find_num = 0
correct_golden_num = 0
correct_predict_num = 0
golden_stastic_list = []
golden_list = []
predict_list = [[w[0], w[1].split(' ')[(- 1)]] for w in predict_events if (w[0] == event_type)]
find_num += len(pred... |
class AsyncTree():
def __init__(self, use_task_groups=False):
self.cache = {}
self.use_task_groups = use_task_groups
random.seed(RANDOM_SEED)
async def mock_io_call(self):
(await asyncio.sleep(IO_SLEEP_TIME))
async def workload_func(self):
raise NotImplementedError("T... |
def template(*args, **kwargs):
tpl = (args[0] if args else None)
adapter = kwargs.pop('template_adapter', SimpleTemplate)
lookup = kwargs.pop('template_lookup', TEMPLATE_PATH)
tplid = (id(lookup), tpl)
if ((tplid not in TEMPLATES) or DEBUG):
settings = kwargs.pop('template_settings', {})
... |
class FitbitOAuth1(BaseOAuth1):
name = 'fitbit'
AUTHORIZATION_URL = '
REQUEST_TOKEN_URL = '
ACCESS_TOKEN_URL = '
ID_KEY = 'encodedId'
EXTRA_DATA = [('encodedId', 'id'), ('displayName', 'username')]
def get_user_details(self, response):
return {'username': response.get('displayName'),... |
class loss(nn.Module):
def __init__(self):
super(loss, self).__init__()
self.bce_loss = nn.BCELoss()
def forward(self, x, y, z, label):
(alpha_1, alpha_2, alpha_3) = (0.3, 0.4, 0.3)
label = label.view((- 1), 1)
loss_1 = self.bce_loss(x, label)
loss_2 = self.bce_lo... |
def test_sre_performance_patch():
try:
import sre_parse
uniq = sre_parse._uniq
with sre_performance_patch():
assert (sre_parse._uniq(['5', '2', '3', '2', '5', '1']) == ['5', '2', '3', '1'])
assert (sre_parse._uniq == uniq)
except (ImportError, AttributeError):
... |
class LinearReplacementScheduler():
def __init__(self, bert_encoder: BertEncoder, base_replacing_rate, k):
self.bert_encoder = bert_encoder
self.base_replacing_rate = base_replacing_rate
self.step_counter = 0
self.k = k
self.bert_encoder.set_replacing_rate(base_replacing_rate... |
class SolarTerm24(IObserver):
def __init__(self):
self.key = None
self.time = None
def notify(self, observable, *args, **kwargs):
self.key = observable.key
self.time = kwargs['time']
self.set_24()
return self.time
def set_24(self):
rule = ''
ma... |
def _prune_unused_frames(vid_data_list, seq_infos, _print=print):
used_frame_idxs = [[(vd['frames'].shape[0] - 1)] for vd in vid_data_list]
for seq_info in seq_infos:
(vid_idx, seq_frame_idxs, seq_firstlast_idxs) = seq_info
used_frame_idxs[vid_idx] += [fi for fi in seq_frame_idxs if (fi is not N... |
def image_resize(img_width, img_height, width, height, em_width, max_width, preserve_ratio=1):
if width:
if width.isdigit():
width = (int(width) * em_width)
elif (width[(- 1)] == '%'):
width = int(((max_width * int(width[:(- 1)])) / 100))
elif (width[(- 2):] == 'px'):... |
def count_num_param(model):
num_param = (sum((p.numel() for p in model.parameters())) / 1000000.0)
if isinstance(model, nn.DataParallel):
model = model.module
if (hasattr(model, 'classifier') and isinstance(model.classifier, nn.Module)):
num_param -= (sum((p.numel() for p in model.classifier... |
class DelegatingHooks(implements(PipelineHooks)):
def __new__(cls, hooks):
if (len(hooks) == 0):
return NoHooks()
elif (len(hooks) == 1):
return hooks[0]
else:
self = super(DelegatingHooks, cls).__new__(cls)
self._hooks = hooks
retu... |
def validate_webhook_response(request: WebhookRequest, response: Response, spec: SchemaPath, base_url: Optional[str]=None, cls: Optional[WebhookResponseValidatorType]=None, **validator_kwargs: Any) -> None:
config = Config(server_base_url=base_url, webhook_response_validator_cls=(cls or _UNSET), **validator_kwargs)... |
def test_pylsp_format_line_length(config, unformatted_line_length, formatted_line_length):
config.update({'plugins': {'black': {'line_length': 79}}})
result = pylsp_format_document(config, unformatted_line_length)
assert (result == [{'range': {'start': {'line': 0, 'character': 0}, 'end': {'line': 3, 'charac... |
class AcoustidSearch(SongsMenuPlugin):
PLUGIN_ID = 'AcoustidSearch'
PLUGIN_NAME = _('Acoustic Fingerprint Lookup')
PLUGIN_DESC = _('Looks up song metadata through acoustic fingerprinting.')
PLUGIN_ICON = Icons.NETWORK_WORKGROUP
plugin_handles = each_song(is_finite, is_writable)
def plugin_songs(... |
def get_param_groups_and_shapes(named_model_params):
named_model_params = list(named_model_params)
scalar_vector_named_params = ([(n, p) for (n, p) in named_model_params if (p.ndim <= 1)], (- 1))
matrix_named_params = ([(n, p) for (n, p) in named_model_params if (p.ndim > 1)], (1, (- 1)))
return [scalar... |
def main(dataSetPath='./StrokeForecasting/data/datasetTest.csv'):
model_path = './StrokeForecasting/discreteFinal'
config = ast.literal_eval(open(f'{model_path}1/config', encoding='utf8').readline())
SAMPLES = 50
set_seed(config['seed_value'])
print(config['uniques_type'])
dataSet = pd.read_csv(... |
class TestStdCaptureFDinvalidFD():
def test_stdcapture_fd_invalid_fd(self, pytester: Pytester) -> None:
pytester.makepyfile('\n import os\n from fnmatch import fnmatch\n from _pytest import capture\n\n def StdCaptureFD(out=True, err=True, in_=True):\n ... |
def get_mol_embedding_func(feature):
if (feature == 'gin'):
embedding_func = (lambda smi: model(smi, device='cpu'))
elif (feature == 'fp_4096'):
embedding_func = (lambda smi: fp_embedding(smi, _nBits=4096))
elif (feature == 'fp_2048'):
embedding_func = (lambda smi: fp_embedding(smi, ... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.