code stringlengths 281 23.7M |
|---|
def test_logger_with_defaults():
with patch.object(logging, 'basicConfig') as mock_logger:
pypyr.log.logger.set_root_logger()
mock_logger.assert_called_once()
(args, kwargs) = mock_logger.call_args
assert (kwargs['format'] == '%(message)s')
assert (kwargs['datefmt'] == '%Y-%m-%d %H:%M:%S')
... |
class AssetCloseToDueDateNotificationToSponsorsTestCase(TestCase):
def setUp(self):
self.notification = notifications.AssetCloseToDueDateNotificationToSponsors()
self.user = baker.make(settings.AUTH_USER_MODEL, email='')
self.verified_email = baker.make(EmailAddress, verified=True)
s... |
class ControlledBloq(Bloq):
subbloq: Bloq = field(validator=_no_nesting_ctrls_yet)
def pretty_name(self) -> str:
return f'C[{self.subbloq.pretty_name()}]'
def short_name(self) -> str:
return f'C[{self.subbloq.short_name()}]'
def __str__(self) -> str:
return f'C[{self.subbloq}]'
... |
class Conv3d(_ConvBase):
def __init__(self, in_size: int, out_size: int, *, kernel_size: Tuple[(int, int, int)]=(1, 1, 1), stride: Tuple[(int, int, int)]=(1, 1, 1), padding: Tuple[(int, int, int)]=(0, 0, 0), activation=nn.ReLU(inplace=True), bn: bool=False, init=nn.init.kaiming_normal_, bias: bool=True, preact: boo... |
def make_model(vocab, dec_num):
is_eval = config.test
model = SEEK(vocab, decoder_number=dec_num, is_eval=is_eval, model_file_path=(config.model_path if is_eval else None))
model.to(config.device)
for (n, p) in model.named_parameters():
if ((p.dim() > 1) and ((n != 'embedding.lut.weight') and co... |
def test_exit_with_reason_works_ok(pytester: Pytester) -> None:
p = pytester.makepyfile('\n import pytest\n\n def test_exit_reason_only():\n pytest.exit(reason="foo")\n ')
result = pytester.runpytest(p)
result.stdout.fnmatch_lines('*_pytest.outcomes.Exit: foo*') |
class Wav2VecFeatureReader(object):
def __init__(self, cp_file):
(model, cfg, task) = fairseq.checkpoint_utils.load_model_ensemble_and_task([cp_file])
model = model[0]
model.eval()
model.cuda()
self.model = model
self.task = task
def read_audio(self, fname):
... |
def print_iface(iface):
print(('Name: %s - %s' % (iface.name, iface.description)))
print(('MAC: %s' % iface.address))
print(('IPs: %s' % ', '.join(map((lambda x: ('%s/%s' % (x.ip, iface.subnetmask))), iface.ipaddress))))
if iface.dhcpenabled:
print(('DHCP server: %s' % iface.dhcp.ip)) |
def _find_step_fixturedef(fixturemanager: FixtureManager, item: Function, step: Step) -> (Sequence[FixtureDef[Any]] | None):
with inject_fixturedefs_for_step(step=step, fixturemanager=fixturemanager, nodeid=item.nodeid):
bdd_name = get_step_fixture_name(step=step)
return fixturemanager.getfixturedef... |
class NVCtrlQueryBinaryDataReplyRequest(rq.ReplyRequest):
_request = rq.Struct(rq.Card8('opcode'), rq.Opcode(X_nvCtrlQueryBinaryData), rq.RequestLength(), rq.Card16('target_id'), rq.Card16('target_type'), rq.Card32('display_mask'), rq.Card32('attr'))
_reply = rq.Struct(rq.ReplyCode(), rq.Card8('pad0'), rq.Card1... |
class ProcessTable(object):
def __init__(self, stdscr, jetson):
self.stdscr = stdscr
self.jetson = jetson
self.line_sort = 8
self.type_reverse = True
def draw(self, pos_y, pos_x, width, height, key, mouse):
processes = self.jetson.processes
try:
self.s... |
class TestSvgplotApp(unittest.TestCase):
def setUpClass(cls):
import svgplot_app
cls.AppClass = svgplot_app.MyApp
def setUp(self):
self.AppClass.log_request = (lambda x, y: None)
def tearDown(self):
del self.AppClass.log_request
self.app.on_close()
def test_main(s... |
_change_dist_size.register(TruncatedRV)
def change_truncated_size(op, dist, new_size, expand):
(*rv_inputs, lower, upper, rng) = dist.owner.inputs
untruncated_rv = op.base_rv_op.make_node(rng, *rv_inputs).default_output()
if expand:
new_size = (to_tuple(new_size) + tuple(dist.shape))
return Trun... |
class WeirdBrokenOp(COp):
__props__ = ('behaviour',)
def __init__(self, behaviour):
super().__init__()
self.behaviour = behaviour
def make_node(self, a):
a_ = pt.as_tensor_variable(a)
r = Apply(self, [a_], [a_.type()])
return r
def perform(*args, **kwargs):
... |
def _assert_are_tokens_of_type(lexer, examples, expected_token_type):
for (test_number, example) in enumerate(examples.split(), 1):
token_count = 0
for (token_type, token_value) in lexer.get_tokens(example):
if (token_type != Whitespace):
token_count += 1
... |
.unit()
.parametrize(('path', 'ignored_paths', 'expected'), [(Path('example').resolve(), ['example'], True), (Path('example', 'file.py').resolve(), ['example'], False), (Path('example', 'file.py').resolve(), ['example/*'], True)])
def test_pytask_ignore_collect(path, ignored_paths, expected):
is_ignored = pytask_ig... |
def inference():
deep_punctuation.load_state_dict(torch.load(model_save_path))
deep_punctuation.eval()
with open(args.in_file, 'r', encoding='utf-8') as f:
text = f.read()
text = re.sub('[,:\\-.!;?]', '', text)
words_original_case = text.split()
words = text.lower().split()
word_pos ... |
def test_device_host_file_step_by_step(tmp_path):
tmpdir = (tmp_path / 'storage')
tmpdir.mkdir()
dhf = DeviceHostFile(device_memory_limit=(1024 * 16), memory_limit=(1024 * 16), worker_local_directory=tmpdir)
a = np.random.random(1000)
b = cupy.random.random(1000)
dhf['a1'] = a
assert (set(dh... |
class PassAvatarIdTerminalRealm(TerminalRealm):
noisy = False
def _getAvatar(self, avatarId):
comp = components.Componentized()
user = self.userFactory(comp, avatarId)
sess = self.sessionFactory(comp)
sess.transportFactory = self.transportFactory
sess.chainedProtocolFacto... |
def pipx_temp_env_helper(pipx_shared_dir, tmp_path, monkeypatch, request, utils_temp_dir, pypi):
home_dir = ((Path(tmp_path) / 'subdir') / 'pipxhome')
bin_dir = ((Path(tmp_path) / 'otherdir') / 'pipxbindir')
man_dir = ((Path(tmp_path) / 'otherdir') / 'pipxmandir')
monkeypatch.setattr(constants, 'PIPX_SH... |
def test_function_signatures(doc):
assert (doc(m.kw_func0) == 'kw_func0(arg0: int, arg1: int) -> str')
assert (doc(m.kw_func1) == 'kw_func1(x: int, y: int) -> str')
assert (doc(m.kw_func2) == 'kw_func2(x: int = 100, y: int = 200) -> str')
assert (doc(m.kw_func3) == "kw_func3(data: str = 'Hello world!') ... |
class Migration(migrations.Migration):
dependencies = [('domain', '0014_is_attribute')]
operations = [migrations.AlterModelOptions(name='attributeentity', options={'ordering': ('label',), 'verbose_name': 'AttributeEntity', 'verbose_name_plural': 'AttributeEntities'}), migrations.RenameField(model_name='attribut... |
class KiteNoisePlot(KitePlot):
class NoisePatchROI(pg.RectROI):
def _makePen(self):
if self.mouseHovering:
return pen_roi_highlight
else:
return self.pen
def __init__(self, model):
self.components_available = {'displacement': ['Displacement... |
.fast
def test_equilibrium_condition():
from radis.test.utils import getTestFile
from radis.tools.database import load_spec
s1 = load_spec(getTestFile('CO_Tgas1500K_mole_fraction0.01.spec'), binary=True)
s2 = s1.copy()
s2.conditions['thermal_equilibrium'] = False
assert (s1.conditions['thermal_e... |
_fixtures(WhereFixture.local)
def test_path(where):
command = Ngrok()
(Executable)
class NgrokStub(ExecutableStub):
path = ''
def execute(self, method, commandline_arguments, *args, **kwargs):
self.path = kwargs['env']['PATH']
ngrok = NgrokStub('ngrok')
with ngrok.inserte... |
class MaskedLinear(nn.Linear):
def __init__(self, in_features: int, out_features: int, bias: bool=True, mask_init: str='constant', mask_scale: float=0.0, pruning_method: str='topK'):
super(MaskedLinear, self).__init__(in_features=in_features, out_features=out_features, bias=bias)
assert (pruning_met... |
def test_util_convenience_methods_errors():
bb = BloqBuilder()
qs = np.asarray([bb.allocate(5), bb.allocate(5)])
with pytest.raises(ValueError, match='.*expects a single Soquet'):
qs = bb.split(qs)
qs = bb.allocate(5)
with pytest.raises(ValueError, match='.*expects a 1-d array'):
qs ... |
def start_apiserver(raiden_app: RaidenService, rest_api_port_number: Port) -> APIServer:
raiden_api = RaidenAPI(raiden_app)
rest_api = RestAPI(raiden_api)
api_server = APIServer(rest_api, config=RestApiConfig(host=Host('localhost'), port=rest_api_port_number))
api_server.flask_app.config['SERVER_NAME'] ... |
class TrainOptions(BaseOptions):
def initialize(self):
BaseOptions.initialize(self)
self.parser.add_argument('--display_freq', type=int, default=100, help='frequency of showing training results on screen')
self.parser.add_argument('--print_freq', type=int, default=100, help='frequency of sho... |
('/PenguinDome/v1/server_pipe/<peer_type>/send', methods=('POST',))
('/penguindome/v1/server_pipe/<peer_type>/send', methods=('POST',))
_content
def pipe_send(peer_type):
if (peer_type not in ('client', 'server')):
raise Exception('Invalid peer type "{}"'.format(peer_type))
data = json.loads(request.for... |
def load_plugin_from_script(path: str, script_name: str, plugin_class: type[T], plugin_id: str) -> type[T]:
import importlib
spec = importlib.util.spec_from_file_location(script_name, path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
plugin_finder = f'get_{plugin_i... |
class Panda(skrobot.models.Panda):
def __init__(self, *args, **kwargs):
root_dir = path.Path(safepicking.__file__).parent
urdf_file = (root_dir / '_pybullet/data/franka_panda/panda_drl.urdf')
super().__init__(urdf_file=urdf_file)
def rarm(self):
link_names = ['panda_link{}'.forma... |
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--save_path', required=True)
parser.add_argument('--load_path', default=None)
parser.add_argument('--n_mel_channels', type=int, default=80)
parser.add_argument('--ngf', type=int, default=32)
parser.add_argument('--n_residu... |
_REGISTRY.register()
def build_retinanet_mit_fpn_backbone(cfg, input_shape: ShapeSpec):
bottom_up = build_mit_backbone(cfg, input_shape)
in_features = cfg.MODEL.FPN.IN_FEATURES
out_channels = cfg.MODEL.FPN.OUT_CHANNELS
in_channels_top = out_channels
top_block = LastLevelP6P7(in_channels_top, out_cha... |
def test_repr_pyobjectsdef_pyclass(project, mod1):
code = 'class MyClass: pass'
mod = libutils.get_string_module(project, code, mod1)
obj = mod.get_attribute('MyClass').pyobject
assert isinstance(obj, pyobjectsdef.PyClass)
assert repr(obj).startswith('<rope.base.pyobjectsdef.PyClass "pkg1.mod1::MyCl... |
def process_nodes(watch_nodes, iteration, iter_track_time):
if watch_nodes:
watch_nodes_start_time = time.time()
(watch_nodes_status, failed_nodes) = monitor_nodes()
iter_track_time['watch_nodes'] = (time.time() - watch_nodes_start_time)
logging.info(('Iteration %s: Node status: %s' ... |
def meanIoU(y_pred, y_true):
iou = np.zeros(2)
y_pred = np.argmax(y_pred, axis=(- 1)).astype(bool)
y_true = np.argmax(y_true, axis=(- 1)).astype(bool)
al = y_pred.shape[1]
pos = np.sum((y_pred * y_true), axis=1)
neg = np.sum(((~ y_pred) * (~ y_true)), axis=1)
iou[0] = np.mean((neg / (al - po... |
def _test():
import torch
pretrained = False
models = [shufflenetv2b_wd2, shufflenetv2b_w1, shufflenetv2b_w3d2, shufflenetv2b_w2]
for model in models:
net = model(pretrained=pretrained)
net.eval()
weight_count = _calc_width(net)
print('m={}, {}'.format(model.__name__, wei... |
def main():
test_opts = TestOptions().parse()
os.makedirs(test_opts.exp_dir, exist_ok=True)
ckpt = torch.load(test_opts.checkpoint_path, map_location='cpu')
opts = ckpt['opts']
opts.update(vars(test_opts))
if ('learn_in_w' not in opts):
opts['learn_in_w'] = False
opts = Namespace(**o... |
.functions
def test_drop_duplicate_columns_for_second_duplicated_column(df_duplicated_columns):
clean_df = df_duplicated_columns.drop_duplicate_columns(column_name='a', nth_index=1)
expected_df = pd.DataFrame({'a': range(10), 'b': range(10), 'a*': range(20, 30)}).clean_names(remove_special=True)
assert (cle... |
def updateFunction(old, new, debug, depth=0, visited=None):
old.__code__ = new.__code__
old.__defaults__ = new.__defaults__
if hasattr(old, '__kwdefaults'):
old.__kwdefaults__ = new.__kwdefaults__
old.__doc__ = new.__doc__
if (visited is None):
visited = []
if (old in visited):
... |
class RPrimitive(RType):
primitive_map: ClassVar[dict[(str, RPrimitive)]] = {}
def __init__(self, name: str, *, is_unboxed: bool, is_refcounted: bool, is_native_int: bool=False, is_signed: bool=False, ctype: str='PyObject *', size: int=PLATFORM_SIZE, error_overlap: bool=False) -> None:
RPrimitive.primit... |
def _dump_1e_ints(hij: np.ndarray, mos: Union[(range, List[int])], outfile: TextIO, beta: bool=False) -> None:
idx_offset = (1 if (not beta) else (1 + len(mos)))
hij_elements = set()
for (i, j) in itertools.product(mos, repeat=2):
if (i == j):
_write_to_outfile(outfile, hij[i][j], ((i + ... |
def ql_afl_fuzz(ql: Qiling, input_file: str, place_input_callback: Callable[(['Qiling', bytes, int], bool)], exits: List[int], validate_crash_callback: Callable[(['Qiling', int, bytes, int], bool)]=None, always_validate: bool=False, persistent_iters: int=1):
def _dummy_fuzz_callback(_ql: 'Qiling'):
if isins... |
def test_net(args):
print('Tester start ... ')
(train_dataset, test_dataset) = builder.dataset_builder(args)
test_dataloader = torch.utils.data.DataLoader(test_dataset, batch_size=args.bs_test, shuffle=False, num_workers=int(args.workers), pin_memory=True)
(base_model, psnet_model, decoder, regressor_de... |
def test_print_packages_if_verbose(upload_settings, caplog):
dists_to_upload = {helpers.WHEEL_FIXTURE: '15.4 KB', helpers.NEW_WHEEL_FIXTURE: '21.9 KB', helpers.SDIST_FIXTURE: '20.8 KB', helpers.NEW_SDIST_FIXTURE: '26.1 KB'}
upload_settings.verbose = True
result = upload.upload(upload_settings, dists_to_uplo... |
def convert_config(model, is_finetuned):
config = SEWConfig()
if is_finetuned:
fs_config = model.w2v_encoder.w2v_model.cfg
else:
fs_config = model.cfg
config.conv_bias = fs_config.conv_bias
conv_layers = eval(fs_config.conv_feature_layers)
config.conv_dim = [x[0] for x in conv_la... |
def ImportCoco(path, path_to_images=None, name=None, encoding='utf-8'):
with open(path, encoding=encoding) as cocojson:
annotations_json = json.load(cocojson)
images = pd.json_normalize(annotations_json['images'])
images.columns = ('img_' + images.columns)
try:
images['img_folder']
e... |
def resnet101(pretrained=False, root='~/.encoding/models', **kwargs):
model = ResNet(Bottleneck, [3, 4, 23, 3], **kwargs)
if pretrained:
from ..models.model_store import get_model_file
model.load_state_dict(torch.load(get_model_file('resnet101', root=root)), strict=False)
return model |
class ThumbRating(EventPlugin, UserInterfacePlugin):
PLUGIN_ID = 'Thumb Rating'
PLUGIN_NAME = _('Thumb Rating')
PLUGIN_DESC_MARKUP = _('Adds a thumb-up / thumb-down scoring system which is converted to a rating value. Useful for keeping running vote totals and sorting by <b><tt>~#score</tt></b>.')
PLUGI... |
class SignalFilter(QObject):
BLACKLIST = {'cur_scroll_perc_changed', 'cur_progress', 'cur_link_hovered'}
def __init__(self, win_id, parent=None):
super().__init__(parent)
self._win_id = win_id
def create(self, signal, tab):
log_signal = (debug.signal_name(signal) not in self.BLACKLIS... |
def main():
args = parse_args()
cfg = Config.fromfile(args.config)
if (args.options is not None):
cfg.merge_from_dict(args.options)
if cfg.get('cudnn_benchmark', False):
torch.backends.cudnn.benchmark = True
if (args.work_dir is not None):
cfg.work_dir = args.work_dir
eli... |
class GraphEncoder(nn.Module):
def __init__(self, n_atom_feat, n_node_hidden, n_bond_feat, n_edge_hidden, n_layers):
super().__init__()
self.embedding = Embedding(n_atom_feat, n_node_hidden, n_bond_feat, n_edge_hidden)
self.mpnn = MPNN(n_node_hidden, n_edge_hidden, n_layers)
def forward(... |
def impuser(args):
if ((args['username'] == None) or (args['password'] == None)):
logging.error('username or password has to be given')
else:
printT('Try to impersonate via creds...')
imp = Impersonate()
status = imp.impersonateViaCreds(login=args['username'], password=args['pass... |
def init(args):
target_file = f'models/{args.dir_name}/{args.model}_best.pth.tar'
pretrain_dir = f'./models/{args.dir_name}/{args.model}/'
test_pred_out = f'data/{args.dir_name}/test_data_predict.csv'
train_file = f'data/{args.dir_name}/train.csv'
dev_file = f'data/{args.dir_name}/dev.csv'
test_... |
class SvoWidget(QWidget):
_last_info_msg = Info()
_publisher = None
_subscriber = None
_num_received_msgs = 0
_svo_namespace = None
def __init__(self, svo_namespace='svo'):
super(SvoWidget, self).__init__()
self.setObjectName('SvoWidget')
ui_file = os.path.join(rospkg.Ros... |
def tree_decomp(mol):
n_atoms = mol.GetNumAtoms()
if (n_atoms == 1):
return ([[0]], [])
cliques = []
for bond in mol.GetBonds():
a1 = bond.GetBeginAtom().GetIdx()
a2 = bond.GetEndAtom().GetIdx()
if (not bond.IsInRing()):
cliques.append([a1, a2])
ssr = [lis... |
def test_get_scene_dataset(dmg: LocalDataManager, tmp_path: Path, zarr_dataset: ChunkedDataset) -> None:
concat_count = 4
zarr_input_path = dmg.require('single_scene.zarr')
zarr_output_path = str((tmp_path / f'{uuid4()}.zarr'))
zarr_concat(([zarr_input_path] * concat_count), zarr_output_path)
zarr_c... |
class TestSLSTRReader(TestSLSTRL1B):
class FakeSpl():
def ev(foo_x, foo_y):
return np.zeros((3, 2))
('satpy.readers.slstr_l1b.xr')
('scipy.interpolate.RectBivariateSpline')
def test_instantiate(self, bvs_, xr_):
bvs_.return_value = self.FakeSpl
xr_.open_dataset.return... |
_vcs_handler('git', 'pieces_from_vcs')
def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
GITS = ['git']
if (sys.platform == 'win32'):
GITS = ['git.cmd', 'git.exe']
env = os.environ.copy()
env.pop('GIT_DIR', None)
runner = functools.partial(runner, env=env)
(_, rc) =... |
class CNFizer(DagWalker):
THEORY_PLACEHOLDER = '__Placeholder__'
TRUE_CNF = frozenset()
FALSE_CNF = frozenset([frozenset()])
def __init__(self, environment=None):
DagWalker.__init__(self, environment)
self.mgr = self.env.formula_manager
self._introduced_variables = {}
sel... |
def parse_args_and_arch(parser, input_args=None, parse_known=False):
(args, _) = parser.parse_known_args(input_args)
if hasattr(args, 'arch'):
model_specific_group = parser.add_argument_group('Model-specific configuration', argument_default=argparse.SUPPRESS)
ARCH_MODEL_REGISTRY[args.arch].add_a... |
class TestTransformSetInputFormat(unittest.TestCase):
def setUp(self):
self.tfm = new_transformer()
def test_defaults(self):
actual = self.tfm.input_format
expected = {}
self.assertEqual(expected, actual)
actual_args = self.tfm._input_format_args(self.tfm.input_format)
... |
def test_cli_job_artifacts(capsysbinary, gitlab_config, job_with_artifacts):
cmd = ['gitlab', '--config-file', gitlab_config, 'project-job', 'artifacts', '--id', str(job_with_artifacts.id), '--project-id', str(job_with_artifacts.pipeline['project_id'])]
with capsysbinary.disabled():
artifacts = subproce... |
def moving_statistic(values: da.Array, statistic: Callable[(..., ArrayLike)], size: int, step: int, dtype: DType, **kwargs: Any) -> da.Array:
length = values.shape[0]
chunks = values.chunks[0]
if (len(chunks) > 1):
min_chunksize = np.min(chunks[:(- 1)])
else:
min_chunksize = np.min(chunk... |
('--component', '-c', required=True, multiple=True, help='Which components? [name|CLUSTER]')
('--user', '-u', default='reanahub', help='DockerHub user name [reanahub]')
('--image-name', help='Should the component have a custom image name?')
('--registry', '-r', default='docker.io', help='Registry to use in the image ta... |
def change_value_transforms(model: Model, vars_to_transforms: Mapping[(ModelVariable, Union[(Transform, None)])]) -> Model:
vars_to_transforms = {parse_vars(model, var)[0]: transform for (var, transform) in vars_to_transforms.items()}
if (set(vars_to_transforms.keys()) - set(model.free_RVs)):
raise Valu... |
class Effect3861(BaseEffect):
type = 'passive'
def handler(fit, module, context, projectionRange, **kwargs):
fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Afterburner')), 'speedFactor', module.getModifiedItemAttr('subsystemBonusMinmatarPropulsion'), skill='Minmatar Propulsion System... |
class WarmupOptimizer(OptimizerWrapper):
def __init__(self, optimizer: KeyedOptimizer, stages: List[WarmupStage], lr: float=0.1, lr_param: str='lr', param_name: str='__warmup') -> None:
super().__init__(optimizer)
self._stages: List[WarmupStage] = _lr_stages(stages)
self._lr_param: str = lr_... |
def accuracy(output, target, topk=(1,)):
maxk = max(topk)
(_, pred) = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, (- 1)).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view((- 1)).float().sum(0)
res.append(correct_k.mul_(100... |
def get_files(**kwargs):
metadata_directory = kwargs.get('metadata_directory', '')
package_paths = kwargs.get('package_paths', [])
files = [File(Path(metadata_directory, 'licenses', f.path), f.contents) for f in get_template_files(**kwargs) if (str(f.path) == 'LICENSE.txt')]
pth_file_name = f"_{kwargs['... |
class Float(AbstractParser):
min_value = (- 3.4028235e+38)
max_value = 3.4028235e+38
def __init__(self, min_value: float=min_value, max_value: float=max_value) -> None:
self.min_value = min_value
self.max_value = max_value
def parse(self, s: str) -> tuple:
section = s.split()[0]
... |
def get_named_beta_schedule(schedule_name, num_diffusion_timesteps):
if (schedule_name == 'linear'):
scale = (1000 / num_diffusion_timesteps)
beta_start = (scale * 0.0001)
beta_end = (scale * 0.02)
return np.linspace(beta_start, beta_end, num_diffusion_timesteps, dtype=np.float64)
... |
def convert_tf_weight_name_to_pt_weight_name(tf_name, start_prefix_to_remove='', tf_weight_shape=None):
tf_name = tf_name.replace(':0', '')
tf_name = re.sub('/[^/]*___([^/]*)/', '/\\1/', tf_name)
tf_name = tf_name.replace('_._', '/')
tf_name = re.sub('//+', '/', tf_name)
tf_name = tf_name.split('/')... |
class WarmStartGradientReverseLayer(nn.Module):
def __init__(self, alpha: Optional[float]=1.0, lo: Optional[float]=0.0, hi: Optional[float]=1.0, max_iters: Optional[int]=1000.0, auto_step: Optional[bool]=False):
super(WarmStartGradientReverseLayer, self).__init__()
self.alpha = alpha
self.lo... |
def add_CollectionsServicer_to_server(servicer, server):
rpc_method_handlers = {'Get': grpc.unary_unary_rpc_method_handler(servicer.Get, request_deserializer=collections__pb2.GetCollectionInfoRequest.FromString, response_serializer=collections__pb2.GetCollectionInfoResponse.SerializeToString), 'List': grpc.unary_un... |
class CmdStateNN(_COMMAND_DEFAULT_CLASS):
key = 'nn'
help_category = 'BatchProcess'
locks = 'cmd:perm(batchcommands)'
def func(self):
caller = self.caller
arg = self.args
if (arg and arg.isdigit()):
step = int(self.args)
else:
step = 1
step... |
def replace_orderByLimit1_to_subquery(sql_query, column_names):
schema_for_parse = SchemaFromSpider.build_from_schema(column_names)
sql_data = get_sql(schema_for_parse, sql_query)
try:
assert (('limit' in sql_data) and (sql_data['limit'] == 1) and ('orderBy' in sql_data))
sort_direction = sq... |
def check_image_dtype_and_shape(image):
if (not isinstance(image, np.ndarray)):
raise Exception(f'image is not np.ndarray!')
if isinstance(image.dtype, (np.uint8, np.uint16)):
raise Exception(f'Unsupported image dtype, only support uint8 and uint16, got {image.dtype}!')
if (image.ndim not in... |
_bp.route(MANIFEST_DIGEST_ROUTE, methods=['DELETE'])
_for_account_recovery_mode
_repository_name()
_registry_jwt_auth(scopes=['pull', 'push'])
_repo_write(allow_for_superuser=True, disallow_for_restricted_users=True)
_protect
_readonly
def delete_manifest_by_digest(namespace_name, repo_name, manifest_ref):
with db_... |
class BaseGameTabWidget(QtWidgets.QTabWidget):
tab_intro: QtWidgets.QWidget
tab_generate_game: GenerateGameWidget
quick_generate_button: QtWidgets.QPushButton
game_cover_label: (QtWidgets.QLabel | None) = None
intro_label: (QtWidgets.QLabel | None) = None
faq_label: (QtWidgets.QLabel | None) = N... |
def evaluate_skip_marks(item: Item) -> Optional[Skip]:
for mark in item.iter_markers(name='skipif'):
if ('condition' not in mark.kwargs):
conditions = mark.args
else:
conditions = (mark.kwargs['condition'],)
if (not conditions):
reason = mark.kwargs.get('r... |
def test_asyncio_strict_mode_module_level_skip(pytester: Pytester):
pytester.makepyfile(dedent(' import pytest\n\n pytest.skip("Skip all tests", allow_module_level=True)\n\n .asyncio\n async def test_is_skipped():\n pass\n '))... |
class TickerHandler(object):
ticker_pool_class = TickerPool
def __init__(self, save_name='ticker_storage'):
self.ticker_storage = {}
self.save_name = save_name
self.ticker_pool = self.ticker_pool_class()
def _get_callback(self, callback):
(outobj, outpath, outcallfunc) = (Non... |
class NYUDataModule(pl.LightningDataModule):
def __init__(self, root, preprocess_root, n_relations=4, batch_size=4, frustum_size=4, num_workers=6):
super().__init__()
self.n_relations = n_relations
self.preprocess_root = preprocess_root
self.root = root
self.batch_size = batc... |
def _check_if_missing_docker_releases() -> None:
remaining_docker_releases = []
for component in REPO_LIST_CLUSTER:
if (not is_component_dockerised(component)):
continue
if (not git_is_current_version_tagged(component)):
remaining_docker_releases.append(component)
if ... |
class Effect1634(BaseEffect):
type = 'passive'
def handler(fit, container, context, projectionRange, **kwargs):
level = (container.level if ('skill' in context) else 1)
fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Capital Shield Operation')), 'capacitorNeed', (container.get... |
class MonokaiStyle(Style):
name = 'monokai'
background_color = '#272822'
highlight_color = '#49483e'
styles = {Token: '#f8f8f2', Whitespace: '', Error: '#ed007e bg:#1e0010', Other: '', Comment: '#959077', Comment.Multiline: '', Comment.Preproc: '', Comment.Single: '', Comment.Special: '', Keyword: '#66d... |
def change_rule(repository, rule_type, rule_value):
validate_rule(rule_type, rule_value)
mirrorRule = get_root_rule(repository)
if (not mirrorRule):
raise ValidationError('validation failed: rule not found')
query = RepoMirrorRule.update(rule_value=rule_value).where((RepoMirrorRule.id == mirrorR... |
def loss_dis(y_disc_r_det, y_disc_gen_det, fmap_r_det, fmap_gen_det, y_df_hat_r, y_df_hat_g, fmap_f_r, fmap_f_g, y_ds_hat_r, y_ds_hat_g, fmap_s_r, fmap_s_g, global_step, args):
disc_factor = adopt_weight(args.LAMBDA_ADV, global_step, threshold=args.discriminator_iter_start)
d_loss = (disc_factor * criterion_d(y... |
class UnetUpBlock(nn.Module):
def __init__(self, inplanes, planes, kernel_size=3, dilation=1, act_type='relu'):
super(UnetUpBlock, self).__init__()
self.up = nn.ConvTranspose2d(inplanes, (inplanes // 2), kernel_size=4, stride=2, padding=1)
self.conv = UnetBottleneck(inplanes, planes, kernel_... |
class SawyerDrawerOpenV2Policy(Policy):
_fully_parsed
def _parse_obs(obs):
return {'hand_pos': obs[:3], 'gripper': obs[3], 'drwr_pos': obs[4:7], 'unused_info': obs[7:]}
def get_action(self, obs):
o_d = self._parse_obs(obs)
action = Action({'delta_pos': np.arange(3), 'grab_effort': 3}... |
_request_params(docs._access_token, docs._create_observation)
def create_observation(**params) -> ListResponse:
(photos, sounds, _, params, kwargs) = convert_observation_params(params)
response = post(url=f'{API_V0}/observations.json', json={'observation': params}, **kwargs)
response_json = response.json()
... |
class TestKSMCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('KSMCollector', {'interval': 10, 'ksm_path': (os.path.dirname(__file__) + '/fixtures/')})
self.collector = KSMCollector(config, None)
def test_import(self):
self.assertTrue(KSMCollector)
('os.ac... |
class RDPLoss(torch.nn.Module):
def __init__(self, random_projection_net, reduction='mean'):
super(RDPLoss, self).__init__()
self.rp_net = random_projection_net
self.mse = torch.nn.MSELoss(reduction=reduction)
self.reduction = reduction
def forward(self, rep, rep1, x, x1):
... |
def check_script(program_str, precond, graph_path, inp_graph_dict=None, modify_graph=True, id_mapping={}, info={}):
helper = utils.graph_dict_helper(max_nodes=max_nodes)
try:
script = read_script_from_list_string(program_str)
except ScriptParseException:
return (None, None, None, None, None,... |
_config
def test_ls(manager):
client = ipc.Client(manager.sockfile)
command = IPCCommandInterface(client)
sh = QSh(command)
assert (sh.do_ls(None) == 'bar/ group/ layout/ screen/ widget/ window/ core/ ')
assert (sh.do_ls('') == 'bar/ group/ layout/ screen/ widget/ window/ core/ ... |
def train_segmentor(model, dataset, cfg, distributed=False, validate=False, timestamp=None, meta=None):
logger = get_root_logger(cfg.log_level)
dataset = (dataset if isinstance(dataset, (list, tuple)) else [dataset])
data_loaders = [build_dataloader(ds, cfg.data.samples_per_gpu, cfg.data.workers_per_gpu, le... |
.parametrize('response_code,response_body1,response_body2,expected', [(200, valid_targets_with_delegation, valid_delegation, {'targets/devs': {'targets': valid_delegation['signed']['targets'], 'expiration': valid_delegation['signed']['expires']}}), (200, {'garbage': 'data'}, {'garbage': 'data'}, {'targets': None})])
de... |
_pipeline_test
class SummarizationPipelineTests(unittest.TestCase):
model_mapping = MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
tf_model_mapping = TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
def get_test_pipeline(self, model, tokenizer, processor):
summarizer = SummarizationPipeline(model=model, tokenizer=... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.