code stringlengths 281 23.7M |
|---|
class TestConfigTypes():
def test_bool(self):
valids = {True: ['1', 1, True, 'true', 'True'], False: ['0', 0, False, 'false', 'False']}
param = configparser.BoolParam(None)
assert isinstance(param, configparser.ConfigParam)
assert (param.default is None)
for (outcome, inputs)... |
class SelectPauliLCU(SelectOracle, UnaryIterationGate):
selection_bitsize: int
target_bitsize: int
select_unitaries: Tuple[(cirq.DensePauliString, ...)] = attrs.field(converter=tuple)
control_val: Optional[int] = None
def __attrs_post_init__(self):
if any(((len(dps) != self.target_bitsize) f... |
class MaxxVitCfg():
embed_dim: Tuple[(int, ...)] = (96, 192, 384, 768)
depths: Tuple[(int, ...)] = (2, 3, 5, 2)
block_type: Tuple[(Union[(str, Tuple[(str, ...)])], ...)] = ('C', 'C', 'T', 'T')
stem_width: Union[(int, Tuple[(int, int)])] = 64
stem_bias: bool = False
conv_cfg: MaxxVitConvCfg = Max... |
class CustomProxy(QGraphicsProxyWidget):
def __init__(self, parent=None, wFlags=0):
super(CustomProxy, self).__init__(parent, wFlags)
self.popupShown = False
self.currentPopup = None
self.timeLine = QTimeLine(250, self)
self.timeLine.valueChanged.connect(self.updateStep)
... |
class ControlledAsymmetricLinearSwapNetworkTrotterStep(TrotterStep):
def trotter_step(self, qubits: Sequence[cirq.Qid], time: float, control_qubit: Optional[cirq.Qid]=None) -> cirq.OP_TREE:
n_qubits = len(qubits)
if (not isinstance(control_qubit, cirq.Qid)):
raise TypeError('Control qudi... |
class _ASTMatcher():
def __init__(self, body, pattern, does_match):
self.body = body
self.pattern = pattern
self.matches = None
self.ropevar = _RopeVariable()
self.matches_callback = does_match
def find_matches(self):
if (self.matches is None):
self.ma... |
def analysis():
search_space_3 = SearchSpace3()
search_space_3.sample(with_loose_ends=True)
cs = search_space_3.get_configuration_space()
nasbench = NasbenchWrapper('../nasbench_data/108_e/nasbench_full.tfrecord')
search_space_3.objective_function(nasbench, cs.sample_configuration())
test_error ... |
def conferences():
today = datetime.datetime.today()
day_before_yesterday = (today - datetime.timedelta(days=2))
yesterday = (today - datetime.timedelta(days=1))
closed_status = ConferenceStatus._CLOSED_CFP[0]
past = factories.create_conference(name='Past', start_date=day_before_yesterday, end_date=... |
def test_build_dataloader():
dataset = ToyDataset()
samples_per_gpu = 3
dataloader = build_dataloader(dataset, samples_per_gpu=samples_per_gpu, workers_per_gpu=2)
assert (dataloader.batch_size == samples_per_gpu)
assert (len(dataloader) == int(math.ceil((len(dataset) / samples_per_gpu))))
assert... |
def test_with_signature() -> None:
command = SignatureCommand()
assert (command.name == 'signature:command')
assert (command.description == 'description')
assert (command.help == 'help')
assert (len(command.definition.arguments) == 2)
assert (len(command.definition.options) == 2) |
def solve(fake_feature, true_feature):
M = distance(fake_feature, true_feature, True)
emd = ot.emd([], [], M.numpy())
map = np.zeros(fake_feature.size(0))
for i in range(0, fake_feature.size(0)):
for j in range(0, true_feature.size(0)):
if (emd[i][j] > 0):
map[i] = j
... |
class OsaBlock(nn.Module):
def __init__(self, in_chs, mid_chs, out_chs, layer_per_block, residual=False, depthwise=False, attn='', norm_layer=BatchNormAct2d, act_layer=nn.ReLU, drop_path=None):
super(OsaBlock, self).__init__()
self.residual = residual
self.depthwise = depthwise
conv_... |
class Graph(BaseGraph):
def __init__(self, machine):
self.custom_styles = {}
self.reset_styling()
super(Graph, self).__init__(machine)
def set_previous_transition(self, src, dst):
self.custom_styles['edge'][src][dst] = 'previous'
self.set_node_style(src, 'previous')
d... |
class InternCloudGuruCourse(CloudGuruCourse, CloudGuru):
def __init__(self, course, session, keep_alive):
self._info = ''
self._course = course
self._session = session
self._keep_alive = keep_alive
super(InternCloudGuruCourse, self).__init__()
def _fetch_course(self):
... |
def get_composed_augmentations():
aug_params = cfg.INPUT.AUG_PARAMS
augmentations = []
for (aug, aug_param) in zip(aug_list, aug_params):
if (aug_param[0] > 0):
augmentations.append(aug(*aug_param))
logger.info('Using {} aug with params {}'.format(aug, aug_param))
return ... |
def test_wcs_slice_unmatched_celestial():
wcs = WCS(naxis=3)
wcs.wcs.ctype = ['RA---TAN', 'DEC--TAN', 'FREQ']
wcs.wcs.crpix = [50.0, 45.0, 30.0]
with warnings.catch_warnings(record=True) as wrn:
wcs_new = drop_axis(wcs, 0)
assert ('is being removed' in str(wrn[(- 1)].message))
with warni... |
class juxt(object):
__slots__ = ['funcs']
def __init__(self, *funcs):
if ((len(funcs) == 1) and (not callable(funcs[0]))):
funcs = funcs[0]
self.funcs = tuple(funcs)
def __call__(self, *args, **kwargs):
return tuple((func(*args, **kwargs) for func in self.funcs))
def ... |
def InceptionTower(net, from_layer, tower_name, layer_params, **bn_param):
use_scale = False
for param in layer_params:
tower_layer = '{}/{}'.format(tower_name, param['name'])
del param['name']
if ('pool' in tower_layer):
net[tower_layer] = L.Pooling(net[from_layer], **param)... |
def check_recovery(kubeconfig_path, scenario, failed_post_scenarios, pre_action_output):
if failed_post_scenarios:
for failed_scenario in failed_post_scenarios:
post_action_output = run(kubeconfig_path, failed_scenario[0], failed_scenario[1])
if (post_action_output is not False):
... |
class TestOverallBehaviour():
PYPROJECT = ' [build-system]\n requires = ["setuptools"]\n build-backend = "setuptools.build_meta"\n\n [project]\n name = "mypkg"\n version = "3.14159"\n '
FLAT_LAYOUT = {'pyproject.toml': dedent(PYPROJECT), 'MANIFEST.in': EXAMPLE['M... |
class TMusepackWithID3(TestCase):
def setUp(self):
self.filename = get_temp_copy(os.path.join(DATA_DIR, 'click.mpc'))
def tearDown(self):
os.unlink(self.filename)
def test_ignore_id3(self):
id3 = ID3()
id3.add(TIT2(encoding=0, text='id3 title'))
id3.save(self.filename... |
class STM32F1xxDma(QlPeripheral):
class Type(ctypes.Structure):
_fields_ = [('ISR', ctypes.c_uint32), ('IFCR', ctypes.c_uint32), ('stream', (Stream * 8))]
def __init__(self, ql, label, stream0_intn=None, stream1_intn=None, stream2_intn=None, stream3_intn=None, stream4_intn=None, stream5_intn=None, strea... |
class GradCAM(BaseCAM):
def __init__(self, model, target_layers, use_cuda=False, reshape_transform=None):
super(GradCAM, self).__init__(model, target_layers, use_cuda, reshape_transform)
def get_cam_weights(self, input_tensor, target_layer, target_category, activations, grads):
return np.mean(gr... |
def is_subtype_helper(left: mypy.types.Type, right: mypy.types.Type) -> bool:
left = mypy.types.get_proper_type(left)
right = mypy.types.get_proper_type(right)
if (isinstance(left, mypy.types.LiteralType) and isinstance(left.value, int) and (left.value in (0, 1)) and mypy.types.is_named_instance(right, 'bui... |
def create_meta_expressions(data_root='data/ref-davis', output_root='data/ref-davis'):
train_img_path = os.path.join(output_root, 'train/JPEGImages')
val_img_path = os.path.join(output_root, 'valid/JPEGImages')
meta_train_path = os.path.join(output_root, 'meta_expressions/train')
meta_val_path = os.path... |
class Down(nn.Module):
def __init__(self, nn):
super(Down, self).__init__()
self.nn = nn
self.maxpool_with_argmax = torch.nn.MaxPool2d(kernel_size=2, stride=2, return_indices=True)
def forward(self, inputs):
down = self.nn(inputs)
unpooled_shape = down.size()
(out... |
def SiamFC_init(im, target_pos, target_sz, cfg):
state = {}
state['im_h'] = im.shape[0]
state['im_w'] = im.shape[1]
(target_pos, target_sz) = to_zero_indexed(target_pos, target_sz)
p = TrackerConfig()
p.update(cfg)
p.hann_window = np.outer(np.hanning(p.upscale_sz), np.hanning(p.upscale_sz))
... |
def test_CenitDistanceMatrixScaler_no_change_original_dm():
dm = skcriteria.mkdm(matrix=[[1, 0, 3], [0, 5, 6]], objectives=[min, max, min], weights=[1, 2, 0])
expected = dm.copy()
tfm = CenitDistanceMatrixScaler()
dmt = tfm.transform(dm)
assert (dm.equals(expected) and (not dmt.equals(expected)) and... |
class Effect6428(BaseEffect):
type = ('projected', 'active')
def handler(fit, module, context, projectionRange, **kwargs):
if ('projected' not in context):
return
if fit.ship.getModifiedItemAttr('disallowAssistance'):
return
rangeFactor = calculateRangeFactor(srcO... |
class AllProcessor(DataProcessor):
def get_train_examples(self, data_dir):
train_data_imdb = pd.read_csv(os.path.join('IMDB_data/', 'train.csv'), header=None, sep='\t').values
train_data_yelp_p = pd.read_csv(os.path.join('Yelp_p_data/yelp_polarity/', 'train.csv'), header=None, sep=',').values
... |
.parametrize('username,password', users)
def test_update(db, client, username, password):
client.login(username=username, password=password)
instances = Attribute.objects.order_by('-level')
for instance in instances:
url = reverse(urlnames['detail'], args=[instance.pk])
data = {'uri_prefix':... |
class SponsorContactFormTests(TestCase):
def test_ensure_model_form_configuration(self):
expected_fields = ['name', 'email', 'phone', 'primary', 'administrative', 'accounting']
meta = SponsorContactForm._meta
self.assertEqual(set(expected_fields), set(meta.fields))
self.assertEqual(S... |
def _run_queue_test(test_func):
def _run_sim(th, cmdline_opts, duts):
th.elaborate()
dut_objs = []
for dut in duts:
dut_objs.append(eval(f'th.{dut}'))
for obj in dut_objs:
obj.set_metadata(VerilogTranslationImportPass.enable, True)
th = VerilogTranslat... |
class Quant_Conv2d(Module):
def __init__(self, weight_bit, full_precision_flag=False):
super(Quant_Conv2d, self).__init__()
self.full_precision_flag = full_precision_flag
self.weight_bit = weight_bit
self.weight_function = AsymmetricQuantFunction.apply
def __repr__(self):
... |
def warmup_learning_rate(args, epoch, batch_id, total_batches, optimizer):
if (args.warm and (epoch <= args.warm_epochs)):
p = ((batch_id + ((epoch - 1) * total_batches)) / (args.warm_epochs * total_batches))
lr = (args.warmup_from + (p * (args.warmup_to - args.warmup_from)))
for param_group... |
class RagExampleArguments():
csv_path: str = field(default=str(((Path(__file__).parent / 'test_data') / 'my_knowledge_dataset.csv')), metadata={'help': "Path to a tab-separated csv file with columns 'title' and 'text'"})
question: Optional[str] = field(default=None, metadata={'help': "Question that is passed as... |
def test_get_pipeline_path_absolute_path():
abs_path = Path('tests/testpipelinewd.yaml').resolve()
str_abs_sans_yaml = str(abs_path.with_suffix(''))
path_found = fileloader.get_pipeline_path(str_abs_sans_yaml, None)
expected_path = cwd_tests.joinpath('testpipelinewd.yaml')
assert (path_found == expe... |
def pixel_values_check(imgs, interval, img_name):
if (not (imgs >= interval[0]).all()):
raise ValueError('Pixel values of {} are not >= {}'.format(img_name, interval[0]))
if (not (imgs <= interval[1]).all()):
raise ValueError('Pixel values of {} are not <= {}'.format(img_name, interval[1])) |
class GLTFModelDecoder(ModelDecoder):
def get_file_extensions(self):
return ['.gltf']
def decode(self, file, filename, batch):
if (not batch):
batch = pyglet.graphics.Batch()
vertex_lists = parse_gltf_file(file=file, filename=filename, batch=batch)
textures = {}
... |
class OpenFileEventFilter(QObject):
def __init__(self, windows):
self.windows = windows
super(OpenFileEventFilter, self).__init__()
def eventFilter(self, obj, event):
if (event.type() == QtCore.QEvent.FileOpen):
if (len(self.windows) >= 1):
self.windows[0].pay... |
def get_coder_layers0(model, type):
if (type == 'MultiLatentRPN'):
encoder_params = []
decoder_params = []
encoder_modules = []
decoder_modules = []
for idx in range(2, 5):
rpn = getattr(model.rpn_head, ('rpn' + str(idx)))
encoder_params += list(map(id... |
def quantitizer(base_function, handler_function=(lambda *args, **kwargs: 1.0)):
from .quantity import Quantity
def wrapped_function(*args, **kwargs):
handler_quantities = handler_function(*args, **kwargs)
args = list(args)
for i in range(len(args)):
if isinstance(args[i], Qua... |
def get_parser(parser=None):
if (parser is None):
parser = argparse.ArgumentParser()
model_arg = parser.add_argument_group('Model')
model_arg.add_argument('--num_layers', type=int, default=3, help='Number of LSTM layers')
model_arg.add_argument('--hidden', type=int, default=768, help='Hidden siz... |
class DBRef():
def __init__(self, is_sqlite3, dbname):
self.is_sqlite3 = is_sqlite3
self.dbname = dbname
self.TRUE = 'TRUE'
self.FALSE = 'FALSE'
if self.is_sqlite3:
self.TRUE = '1'
self.FALSE = '0'
def Open(self, connection_name):
dbname = ... |
class CustomCallbackSelect(discord.ui.Select):
def __init__(self, callback: Callable[([Interaction, discord.ui.Select], Coroutine[None])], *, custom_id: str=SELECT_MISSING, placeholder: (str | None)=None, min_values: int=1, max_values: int=1, options: list[SelectOption]=SELECT_MISSING, disabled: bool=False, row: (i... |
class TestMolecule(unittest.TestCase):
def test_get_orientations_in_wp(self):
m = pyxtal_molecule('Benzene')
g = Group(61)
self.assertTrue((len(m.get_orientations_in_wp(g[0])) == 1))
self.assertTrue((len(m.get_orientations_in_wp(g[1])) == 1))
self.assertTrue((len(m.get_orient... |
.parametrize('query', ['simple', 'public', 'repository'])
def test_search_query_count(query, app):
with client_with_identity('devtable', app) as cl:
params = {'query': query}
with assert_query_count(10):
result = conduct_api_call(cl, ConductSearch, 'GET', params, None, 200).json
... |
class Xskipper():
def __init__(self, sparkSession, uri, metadataStoreManagerClassName=None):
self.spark = sparkSession
self.uri = uri
if metadataStoreManagerClassName:
self.xskipper = self.spark._jvm.io.xskipper.Xskipper(self.spark._jsparkSession, uri, metadataStoreManagerClassNa... |
class DCSource(Seismosizer):
def setup(self):
self.set_name('Seismosizer: DCSource')
self.add_parameter(Param('Time', 'time', 0.0, (- 50.0), 50.0))
self.add_parameter(Param('North shift', 'north_km', 0.0, (- 50.0), 50.0))
self.add_parameter(Param('East shift', 'east_km', 0.0, (- 50.0... |
class VOC12ClassificationDatasetMSF(VOC12ClassificationDataset):
def __init__(self, img_name_list_path, voc12_root, img_normal=TorchvisionNormalize(), scales=(1.0,)):
self.scales = scales
super().__init__(img_name_list_path, voc12_root, img_normal=img_normal)
self.scales = scales
def __g... |
def test(model, test_loader, criterion):
device = torch.device(('cuda' if torch.cuda.is_available() else 'cpu'))
model.eval()
test_loss = 0
correct = 0
with torch.no_grad():
for (data, target) in test_loader:
(data, target) = (data.to(device), target.to(device))
outpu... |
class GitRepo(object):
def __init__(self, directory, auto_init=True, author_name='Pynag User', author_email=None):
self.directory = directory
if ((author_name is None) or (author_name.strip() == '')):
author_name = 'Pynag User'
if ((author_email is None) or (author_email.strip() ... |
def test_scene_to_pixmap_exporter_export_when_file_not_writeable(view, tmpdir):
filename = os.path.join(tmpdir, 'foo.png')
with open(filename, 'w') as f:
f.write('foo')
os.chmod(filename, stat.S_IREAD)
item_img = QtGui.QImage(1000, 1200, QtGui.QImage.Format.Format_RGB32)
item = BeePixmapItem... |
class EvaluateTool(object):
def __init__(self, args):
self.args = args
def evaluate(self, preds, golds, section):
summary = {}
(gold_answers, predicted_answers) = ({}, {})
for (pred, gold) in zip(preds, golds):
qid = gold['id']
gold_answer = [item.strip() ... |
class StudentTOutput(DistributionOutput):
args_dim: Dict[(str, int)] = {'df': 1, 'loc': 1, 'scale': 1}
distribution_class: type = StudentT
def domain_map(cls, df: torch.Tensor, loc: torch.Tensor, scale: torch.Tensor):
scale = cls.squareplus(scale)
df = (2.0 + cls.squareplus(df))
retu... |
(cc=STDCALL, params={'hSCManager': SC_HANDLE, 'lpServiceName': LPCSTR, 'lpDisplayName': LPCSTR, 'dwDesiredAccess': DWORD, 'dwServiceType': DWORD, 'dwStartType': DWORD, 'dwErrorControl': DWORD, 'lpBinaryPathName': LPCSTR, 'lpLoadOrderGroup': LPCSTR, 'lpdwTagId': LPDWORD, 'lpDependencies': LPCSTR, 'lpServiceStartName': L... |
class DRNConv(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, stride, padding, dilation, activate):
super(DRNConv, self).__init__()
self.activate = activate
self.conv = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride... |
def test_migrate_to_version_data_too_new() -> None:
data = {'schema_version': 3}
with pytest.raises(migration_lib.UnsupportedVersion, match='Found version 3, but only up to 2 is supported. This file was created using a newer Randovania version.'):
migration_lib.apply_migrations(data, [None]) |
def print_stats(var, name='', fmt='%.3g', cvt=(lambda x: x)):
var = np.asarray(var)
if name:
prefix = (name + ': ')
else:
prefix = ''
if (len(var) == 1):
print((('%sscalar: ' + fmt) % (prefix, cvt(var[0]))))
else:
fmt_str = ('mean: %s; std: %s; min: %s; max: %s' % (fm... |
class StrikerEnv(mujoco_env.MujocoEnv, utils.EzPickle):
def __init__(self):
utils.EzPickle.__init__(self)
self._striked = False
self._min_strike_dist = np.inf
self.strike_threshold = 0.2
mujoco_env.MujocoEnv.__init__(self, 'striker.xml', 5)
def _step(self, a):
vec... |
class distcheck(sdist):
def _check_manifest(self):
assert self.get_archive_files()
if (subprocess.call(['git', 'status'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) == 0):
included_files = self.filelist.files
assert included_files
process = subprocess.Popen([... |
def test_group():
cfg = {}
cfg['num_joints'] = 17
cfg['detection_threshold'] = 0.1
cfg['tag_threshold'] = 1
cfg['use_detection_val'] = True
cfg['ignore_too_much'] = False
cfg['nms_kernel'] = 5
cfg['nms_padding'] = 2
cfg['tag_per_joint'] = True
cfg['max_num_people'] = 1
parser... |
class logInF(object):
def __init__(self, logPre):
dirname = os.path.dirname(logPre)
if (not os.path.exists(dirname)):
os.makedirs(dirname)
logFile = ((logPre + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())) + 'log.txt')
self.prHandle = open(logFile, 'w')
def __... |
class DjangoCassandraModel(BaseModel, metaclass=DjangoCassandraModelMetaClass):
__queryset__ = DjangoCassandraQuerySet
__abstract__ = True
__table_name__ = None
__table_name_case_sensitive__ = False
__keyspace__ = None
__options__ = None
__discriminator_value__ = None
__compute_routing_k... |
def plot_perf_busy_with_sample(cpu_index):
file_name = 'cpu{:0>3}.csv'.format(cpu_index)
if os.path.exists(file_name):
output_png = ('cpu%03d_perf_busy_vs_samples.png' % cpu_index)
g_plot = common_all_gnuplot_settings(output_png)
g_plot('set y2range [0:200]')
g_plot('set y2tics 0... |
def test_fit_sandia_simple(get_test_iv_params, get_bad_iv_curves):
test_params = get_test_iv_params
test_params = dict(photocurrent=test_params['IL'], saturation_current=test_params['I0'], resistance_series=test_params['Rs'], resistance_shunt=test_params['Rsh'], nNsVth=test_params['nNsVth'])
testcurve = pvs... |
def get_txt(target):
out.verbose('Getting TXT records')
try:
res = lookup(target, 'TXT')
if res:
out.good('TXT records found')
for txt in res:
print(txt)
if outfile:
print(txt, file=outfile)
print('')
except:
return |
class GCN(BaseModel):
def __init__(self, num_classes, in_channels=3, pretrained=True, use_resnet_gcn=False, backbone='resnet50', use_deconv=False, num_filters=11, freeze_bn=False, **_):
super(GCN, self).__init__()
self.use_deconv = use_deconv
if use_resnet_gcn:
self.backbone = Re... |
class ResNet(nn.Module):
def __init__(self, block=BasicBlock, keep_prob=1.0, avg_pool=False, drop_rate=0.0, dropblock_size=5):
self.inplanes = 3
super(ResNet, self).__init__()
self.layer1 = self._make_layer(block, 64, stride=2, drop_rate=drop_rate)
self.layer2 = self._make_layer(bloc... |
class dataframe_cache(MutableMapping):
def __init__(self, path=None, lock=None, clean_on_failure=True, serialization='msgpack'):
self.path = (path if (path is not None) else mkdtemp())
self.lock = (lock if (lock is not None) else nop_context)
self.clean_on_failure = clean_on_failure
... |
_dataset('test_dataset')
class TestDataset(classy_dataset.ClassyDataset):
def __init__(self, samples, batchsize_per_replica=1, num_samples=None, shuffle=False, transform=None):
input_tensors = [sample['input'] for sample in samples]
target_tensors = [sample['target'] for sample in samples]
d... |
def _setattr(self, column_name, column, pos=False):
if (not len(self)):
return
isiterable = isinstance(column, (list, pd.Series, np.ndarray))
isdict = isinstance(column, dict)
if isiterable:
if (not (len(self) == len(column))):
raise Exception('DataFrame and column must be sa... |
class CoinCap(ExchangeBase):
async def get_rates(self, ccy):
json = (await self.get_json('api.coincap.io', '/v2/rates/qtum/'))
return {'USD': Decimal(json['data']['rateUsd'])}
def history_ccys(self):
return ['USD']
async def request_history(self, ccy):
history = (await self.g... |
class ToolbarTestCases(unittest.TestCase):
def setUp(self):
Timings.fast()
app = Application()
app.start(os.path.join(mfc_samples_folder, 'CmnCtrl1.exe'))
self.app = app
self.dlg = app.CommonControlsSample
self.dlg.SysTabControl.select(u'CToolBarCtrl')
self.ct... |
def connect_stations_same_station_id(lines, buses):
ac_freq = get_ac_frequency(lines)
station_id_list = buses.station_id.unique()
add_lines = []
from shapely.geometry import LineString
for s_id in station_id_list:
buses_station_id = buses[(buses.station_id == s_id)]
if (len(buses_sta... |
.xfail(reason='Relied on rewrite-case that is no longer supported by PyTensor')
def test_joint_logprob_subtensor():
size = 5
mu_base = np.power(10, np.arange(np.prod(size))).reshape(size)
mu = np.stack([mu_base, (- mu_base)])
sigma = 0.001
rng = pytensor.shared(np.random.RandomState(232), borrow=Tru... |
(1, 'lookfor')
def getCategory(lookfor, eager=None):
if isinstance(lookfor, int):
if (eager is None):
category = get_gamedata_session().query(Category).get(lookfor)
else:
category = get_gamedata_session().query(Category).options(*processEager(eager)).filter((Category.ID == lo... |
def set_window_focus_callback(window, cbfun):
window_addr = ctypes.cast(ctypes.pointer(window), ctypes.POINTER(ctypes.c_long)).contents.value
if (window_addr in _window_focus_callback_repository):
previous_callback = _window_focus_callback_repository[window_addr]
else:
previous_callback = No... |
class _override(contextlib.ContextDecorator):
def __init__(self, conf, **new_values):
self.conf = conf
self.new_values = new_values
self.initial_values = conf.snapshot()
def __enter__(self):
self.conf.load_dict(self.new_values)
def __exit__(self, *exc):
self.conf.load... |
def smooth(y, radius, mode='two_sided', valid_only=False):
assert (mode in ('two_sided', 'causal'))
if (len(y) < ((2 * radius) + 1)):
return (np.ones_like(y) * y.mean())
elif (mode == 'two_sided'):
convkernel = np.ones(((2 * radius) + 1))
out = (np.convolve(y, convkernel, mode='same'... |
def make_zone_file_from_dnsdb(zone):
zone_info = DnsdbApi.get_zone_info(zone)['data']
serial = zone_info['serial_num']
record_list = zone_info['records']
header = zone_info['header']
tmp_zonefile_path = _make_tmp_zone_filepath(zone)
make_zone_file(zone, tmp_zonefile_path, serial, header, record_... |
class Evaluator(object):
def __init__(self, num_class, ignore=False):
self.num_class = num_class
self.ignore = ignore
self.confusion_matrix = np.zeros(((self.num_class,) * 2))
def Precision_Recall(self):
precision = (np.diag(self.confusion_matrix) / (np.sum(self.confusion_matrix,... |
def default_list_deserializer(obj: list, cls: type=None, *, warn_on_fail: bool=False, tasks: int=1, task_type: type=Process, fork_inst: Type[StateHolder]=StateHolder, **kwargs) -> list:
cls_ = None
kwargs_ = {**kwargs}
cls_args = get_args(cls)
if cls_args:
cls_ = cls_args[0]
kwargs_['_in... |
def linguist(languageName):
locale = getLocale(languageName)
fname = 'pyzo_{}.tr'.format(locale.name())
filename = os.path.join(pyzo.pyzoDir, 'resources', 'translations', fname)
if (not os.path.isfile(filename)):
raise ValueError('Could not find {}'.format(filename))
qtcore_mod_name = pyzo.Q... |
def test_multiprocessing_write():
import numpy as np
import joblib
import time
def func():
n_jobs = 4
a = np.random.random((size, size))
def subprocess(i):
aa = a.copy()
time.sleep(2)
return aa[(i, i)]
results = joblib.Parallel(n_jobs=n... |
def _looks_like_parents_subscript(node: nodes.Subscript) -> bool:
if (not (isinstance(node.value, nodes.Attribute) and (node.value.attrname == 'parents'))):
return False
try:
value = next(node.value.infer())
except (InferenceError, StopIteration):
return False
return (isinstance(... |
def encode_task_experimental(task):
task = task.copy()
if ('tags' in task):
task['tags'] = ','.join(task['tags'])
for k in task:
task[k] = encode_task_value(k, task[k])
return [(('%s:"%s"' % (k, v)) if v else ('%s:' % (k,))) for (k, v) in sorted(task.items(), key=itemgetter(0))] |
def test_run_stdin(pytester: Pytester) -> None:
with pytest.raises(pytester.TimeoutExpired):
pytester.run(sys.executable, '-c', 'import sys, time; time.sleep(1); print(sys.stdin.read())', stdin=subprocess.PIPE, timeout=0.1)
with pytest.raises(pytester.TimeoutExpired):
result = pytester.run(sys.e... |
class Test_pep440_old(unittest.TestCase, Testing_renderer_case_mixin):
style = 'pep440-old'
expected = {'tagged_0_commits_clean': 'v1.2.3', 'tagged_0_commits_dirty': 'v1.2.3.post0.dev0', 'tagged_1_commits_clean': 'v1.2.3.post1', 'tagged_1_commits_dirty': 'v1.2.3.post1.dev0', 'untagged_0_commits_clean': '0.post0... |
class QRDialog(Factory.Popup):
def __init__(self, title, data, show_text, *, failure_cb=None, text_for_clipboard=None, help_text=None):
Factory.Popup.__init__(self)
self.app = App.get_running_app()
self.title = title
self.data = data
self.help_text = ((data if show_text else ... |
class Normalization(nn.Module):
def __init__(self, embed_dim, normalization='batch'):
super(Normalization, self).__init__()
normalizer_class = {'batch': nn.BatchNorm1d, 'instance': nn.InstanceNorm1d}.get(normalization, None)
self.normalizer = normalizer_class(embed_dim, affine=True)
def ... |
def spectral_norm_fc(module, coeff: float, n_power_iterations: int=1, name: str='weight', eps: float=1e-12, dim: int=None):
if (dim is None):
if isinstance(module, (torch.nn.ConvTranspose1d, torch.nn.ConvTranspose2d, torch.nn.ConvTranspose3d)):
dim = 1
else:
dim = 0
Spect... |
def test_external_object():
ext_obj = OSC.ExternalObjectReference('my object')
ext_obj2 = OSC.ExternalObjectReference('my object')
ext_obj3 = OSC.ExternalObjectReference('my object 2')
assert (ext_obj == ext_obj2)
assert (ext_obj != ext_obj3)
ext_obj4 = OSC.ExternalObjectReference.parse(ext_obj.... |
def simplified_semver_version(version: ScmVersion) -> str:
if version.exact:
return guess_next_simple_semver(version, retain=SEMVER_LEN, increment=False)
elif ((version.branch is not None) and ('feature' in version.branch)):
return version.format_next_version(guess_next_simple_semver, retain=SEM... |
def InternalMirror(source_local, dest_local, src_dir, dest_dir, force=False):
src_root = rpath.RPath(Globals.local_connection, src_dir)
dest_root = rpath.RPath(Globals.local_connection, dest_dir)
dest_rbdir = dest_root.append('rdiff-backup-data')
InternalBackup(source_local, dest_local, src_dir, dest_di... |
def command_snuffle(args):
from pyrocko.gui.snuffler import snuffler
(parser, options, args) = cl_parse('map', args)
if (len(args) == 0):
args.append('.')
fn = get_scenario_yml(args[0])
if (not fn):
parser.print_help()
sys.exit(1)
project_dir = args[0]
gf_stores_path ... |
def avg_log10(depth1, depth2):
assert np.all((((np.isfinite(depth1) & np.isfinite(depth2)) & (depth1 >= 0)) & (depth2 >= 0)))
log_diff = (np.log10(depth1) - np.log10(depth2))
num_pixels = float(log_diff.size)
if (num_pixels == 0):
return np.nan
else:
return (np.sum(np.absolute(log_di... |
class ResNet_Cifar(nn.Module):
def __init__(self, block, num_blocks, pretrained=False, norm=False, Embed=True, feat_dim=2048, embed_dim=2048):
super(ResNet_Cifar, self).__init__()
self.in_planes = 64
self.layer0_conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False)
... |
class FPN_OAMP(nn.Module):
def __init__(self, A, lat_layers=3, contraction_factor=0.99, eps=0.01, max_depth=15, structure='ResNet', num_channels=64):
super(FPN_OAMP, self).__init__()
self.A = A.to(device)
self.W_pinv = torch.from_numpy(np.linalg.pinv(A)).to(device)
self.step = (self.... |
class CMakeBuild(build_ext):
def run(self):
try:
subprocess.check_output(['cmake', '--version'])
except OSError:
raise RuntimeError('CMake is not available.') from None
super().run()
def build_extension(self, ext):
if (ext.name != 'torchdata._torchdata'):
... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.