code stringlengths 281 23.7M |
|---|
def test_local_hitran_co2(verbose=True, warnings=True, **kwargs):
df = hit2df(getTestFile('hitran_CO2_fragment.par'), cache='regen')
if verbose:
print('Read hitran_CO2_fragment.par')
print('')
print(df.head())
assert (list(df.loc[(0, ['v1u', 'v2u', 'l2u', 'v3u', 'v1l', 'v2l', 'l2l', ... |
def _csr_swap_in_row(row: sparse.csr_matrix, rng: np.random.Generator, p: float=0.1) -> sparse.csr_matrix:
assert (row.shape[0] == 1), f'Did not get a row!'
nonzero_idx = row.nonzero()[1]
shuffle_idx = np.arange(len(nonzero_idx))
n = int(round((len(shuffle_idx) * p)))
swap_idx = nonzero_idx[rng.choi... |
('pypyr.config.config.init')
def test_main_pass_with_defaults_context_positional(mock_config_init):
arg_list = ['blah', 'ctx string']
with patch('pypyr.pipelinerunner.run') as mock_pipeline_run:
with patch('pypyr.log.logger.set_root_logger') as mock_logger:
pypyr.cli.main(arg_list)
mock_... |
class TestExceptionSaver():
def test_exception_trapped(self):
with setuptools.sandbox.ExceptionSaver():
raise ValueError('details')
def test_exception_resumed(self):
with setuptools.sandbox.ExceptionSaver() as saved_exc:
raise ValueError('details')
with pytest.rai... |
class SquadQuestion(object):
def __init__(self, question_id: str, question: List[str], answers: Set[str], paragraph: SquadParagraph):
self.question_id = question_id
self.question = question
self.answers = answers
self.paragraph = paragraph
def __repr__(self) -> str:
retur... |
def Adjust_Initial_Num_Training_Step(adam_opt, step):
opt_dict = adam_opt.state_dict()
for param in opt_dict['param_groups'][0]['params']:
step_dict = {'step': step, 'exp_avg': torch.zeros(1), 'exp_avg_sq': torch.tensor(1)}
opt_dict['state'][param] = step_dict
adam_opt.load_state_dict(opt_di... |
class F32_Zipl(KickstartCommand):
removedKeywords = KickstartCommand.removedKeywords
removedAttrs = KickstartCommand.removedAttrs
def __init__(self, writePriority=10, *args, **kwargs):
KickstartCommand.__init__(self, writePriority, *args, **kwargs)
self.op = self._getParser()
self.se... |
def main():
parser = HfArgumentParser((ModelArguments, DataTrainingArguments, Seq2SeqTrainingArguments))
if ((len(sys.argv) == 2) and sys.argv[1].endswith('.json')):
(model_args, data_args, training_args) = parser.parse_json_file(json_file=os.path.abspath(sys.argv[1]))
else:
(model_args, dat... |
def solve_iteratively(batch: list[TypeVarId], graph: Graph, lowers: Bounds, uppers: Bounds) -> Solutions:
solutions = {}
s_batch = set(batch)
while s_batch:
for tv in sorted(s_batch, key=(lambda x: x.raw_id)):
if (lowers[tv] or uppers[tv]):
solvable_tv = tv
... |
def _decode_hook(d: Any) -> Any:
if ('tag' not in d):
if (('id' in d) and ('args' in d) and ('isEvent' in d) and ('timeout' in d)):
return Action(d['id'], d['args'], d['isEvent'], d['timeout'])
else:
return d
if (d['tag'] == 'RunResult'):
return RunResult(d['valid... |
class CiderScorer(object):
def copy(self):
new = CiderScorer(n=self.n)
new.ctest = copy.copy(self.ctest)
new.crefs = copy.copy(self.crefs)
return new
def __init__(self, df_mode='corpus', test=None, refs=None, n=4, sigma=6.0):
self.n = n
self.sigma = sigma
... |
def vector_copy_cont_get(src, src_start, src_end, dest, dest_start, i, env, cont, _vals):
from pycket.interpreter import check_one_val
val = check_one_val(_vals)
idx = (i + dest_start)
next = (i + 1)
return dest.vector_set(idx, val, env, goto_vector_copy_loop(src, src_start, src_end, dest, dest_star... |
class TestClassicOutputStyle():
def test_files(self, pytester: Pytester) -> None:
pytester.makepyfile(**{'test_one.py': 'def test_one(): pass', 'test_two.py': 'def test_two(): assert 0', 'sub/test_three.py': '\n def test_three_1(): pass\n def test_three_2(): assert 0\n ... |
class LinearSchedule(object):
def __init__(self, schedule_timesteps, final_p, initial_p=1.0):
self.schedule_timesteps = schedule_timesteps
self.final_p = final_p
self.initial_p = initial_p
def value(self, t):
fraction = min((float(t) / self.schedule_timesteps), 1.0)
retur... |
def generate_server_config(config):
config = (config or {})
tuf_server = config.get('TUF_SERVER', None)
tuf_host = config.get('TUF_HOST', None)
signing_enabled = config.get('FEATURE_SIGNING', False)
maximum_layer_size = config.get('MAXIMUM_LAYER_SIZE', '20G')
enable_rate_limits = config.get('FEA... |
def test_wheel_mode():
def build_wheel(extra_file_defs=None, **kwargs):
file_defs = {'setup.py': (DALS('\n # -*- coding: utf-8 -*-\n from setuptools import setup\n import setuptools\n setup(**%r)\n ') % kwargs).encode('utf-8')}
... |
class BaseModel(pl.LightningModule):
def __init__(self, cfg, *args, **kwargs):
super().__init__(*args, **kwargs)
self.cfg = CfgNode(nest_dict(cfg, sep='.'))
self.hparams = flatten_dict(cfg, sep='.')
self.loss = torch.nn.MSELoss()
self.criterion_psnr = mse2psnr
self.vo... |
def checkpoint_wrapper(m):
original_forward = m.forward
def _checkpointed_forward(*args, **kwargs):
(kwarg_keys, flat_args) = pack_kwargs(*args, **kwargs)
parent_ctx_dict = {}
output = CheckpointFunction.apply(original_forward, parent_ctx_dict, kwarg_keys, *flat_args)
if isinstan... |
class Annotate():
def __init__(self, image, alignments, original_roi=None):
logger.debug('Initializing %s: (alignments: %s, original_roi: %s)', self.__class__.__name__, alignments, original_roi)
self.image = image
self.alignments = alignments
self.roi = original_roi
self.colo... |
def test_FilterEQ():
dm = skc.mkdm(matrix=[[7, 5, 35], [5, 4, 26], [5, 6, 28], [1, 7, 30], [5, 8, 30]], objectives=[max, max, min], weights=[2, 4, 1], alternatives=['PE', 'JN', 'AA', 'MM', 'FN'], criteria=['ROE', 'CAP', 'RI'])
expected = skc.mkdm(matrix=[[5, 4, 26]], objectives=[max, max, min], weights=[2, 4, 1... |
def test_other_error(config: Config, pool: RepositoryPool, io: BufferedIO, mock_file_downloads: None, env: MockEnv, fixture_dir: FixtureDirGetter) -> None:
io.set_verbosity(Verbosity.NORMAL)
executor = Executor(env, pool, config, io)
package_name = 'simple-project'
package_version = '1.2.3'
director... |
class DeformConv3d(nn.Module):
def __init__(self, in_channels, groups, kernel_size=(3, 3), padding=1, stride=1, dilation=1, bias=True):
super(DeformConv3d, self).__init__()
self.offset_net = nn.Conv2d(in_channels=in_channels, out_channels=((2 * kernel_size[0]) * kernel_size[1]), kernel_size=3, paddi... |
_start_docstrings('The bare ResNet model outputting raw features without any specific head on top.', RESNET_START_DOCSTRING)
class ResNetModel(ResNetPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.config = config
self.embedder = ResNetEmbeddings(config.num_channel... |
class Clock():
def __init__(self):
self._fps = 60
self._frameDuration = (1 / self._fps)
def fps(self):
return self._fps
def fps(self, value):
self._fps = value
if (value == 0):
self._frameDuration = 0
else:
self._frameDuration = (1 / se... |
class Generator(nn.Module):
def __init__(self, use_vgg=True):
super(Generator, self).__init__()
if use_vgg:
self.encoder = models.vgg16_bn(pretrained=True)
self.encoder = nn.Sequential(*(self.encoder.features[i] for i in (range(23) + range(24, 33))))
self.encoder[... |
def ffmpeg_get_packet_pts(file, packet):
if (packet.contents.dts != AV_NOPTS_VALUE):
pts = packet.contents.dts
else:
pts = 0
timestamp = avutil.av_rescale_q(pts, file.context.contents.streams[packet.contents.stream_index].contents.time_base, AV_TIME_BASE_Q)
return timestamp |
def get_args():
parser = argparse.ArgumentParser(description='RL')
parser.add_argument('--dataset_path', type=str, help='The path of the environments where we test')
parser.add_argument('--mode', type=str, default='full', choices=['simple', 'full', 'check', 'check_neurips'], help='Environment type')
par... |
def build_template(data):
template_ttm_order = []
template_annual_order = []
template_order = []
level_detail = []
def traverse(node, level):
if (level > 5):
return
template_ttm_order.append(f"trailing{node['key']}")
template_annual_order.append(f"annual{node['key... |
class DeleteEntityAction(_ActionType):
def __init__(self, entityref):
self.entityref = entityref
def __eq__(self, other):
if isinstance(other, DeleteEntityAction):
if (self.get_attributes() == other.get_attributes()):
return True
return False
def parse(ele... |
def sc_pioglitazone_mpo() -> GoalDirectedBenchmark:
specification = uniform_specification(1, 10, 100)
benchmark_object = pioglitazone_mpo()
sa_biased = ScoringFunctionSAWrapper(benchmark_object.objective, SCScoreModifier())
return GoalDirectedBenchmark(name='SC_pioglitazone', objective=sa_biased, contri... |
class XEncoder(nn.Module):
def __init__(self, d_model, hid_dim, out_dim, n_heads, win_size, dropout, gamma, bias, norm=None):
super(XEncoder, self).__init__()
self.n_heads = n_heads
self.win_size = win_size
self.self_attn = TCA(d_model, hid_dim, hid_dim, n_heads, norm)
self.l... |
def gen_srcs_dep_nottaken_test():
return [gen_br2_srcs_dep_test(5, 'bne', 1, 1, False), gen_br2_srcs_dep_test(4, 'bne', 2, 2, False), gen_br2_srcs_dep_test(3, 'bne', 3, 3, False), gen_br2_srcs_dep_test(2, 'bne', 4, 4, False), gen_br2_srcs_dep_test(1, 'bne', 5, 5, False), gen_br2_srcs_dep_test(0, 'bne', 6, 6, False)... |
class TestEventManager(TestCase):
def setUp(self):
self.em = EventManager()
self.event1 = Event(Always())
self.event2 = Event(Always())
def test_add_event(self):
self.em.add_event(self.event1)
self.assertEqual(len(self.em._events), 1)
def test_add_event_prepend(self):... |
def _python_executable_from_version(python_version: tuple[(int, int)]) -> str:
if (sys.version_info[:2] == python_version):
return sys.executable
str_ver = '.'.join(map(str, python_version))
try:
sys_exe = subprocess.check_output((python_executable_prefix(str_ver) + ['-c', 'import sys; print... |
def check_for_scan_jobs():
console_address = engine.config_data['console_address']
console_port = engine.config_data['console_port']
scan_engine = engine.config_data['scan_engine']
api_token = engine.config_data['api_token']
url = f'{console_address}:{console_port}/api/scheduled_scans'
ROOT_LOGG... |
class InterruptMode(IntEnum):
InterruptOrDMADisabled = 0
DMARisingEdge = 1
DMAFallingEdge = 2
DMAEitherEdge = 3
FlagRisingEdge = 5
FlagFallingEdge = 6
FlagEitherEdge = 7
InterruptLogicZero = 8
InterruptRisingEdge = 9
InterruptFallingEdge = 10
InterruptEitherEdge = 11
Inte... |
class Evaluator(Callback):
def __init__(self, nr_eval, input_names, output_names, get_player_fn):
self.eval_episode = nr_eval
self.input_names = input_names
self.output_names = output_names
self.get_player_fn = get_player_fn
def _setup_graph(self):
nr_proc = min((multipro... |
class GuiRemoveLocalDronesCommand(wx.Command):
def __init__(self, fitID, positions, amount):
wx.Command.__init__(self, True, 'Remove Local Drones')
self.internalHistory = InternalCommandHistory()
self.fitID = fitID
self.positions = positions
self.amount = amount
def Do(se... |
def _get_msmr_potential_model(parameter_values, param):
V_max = param.voltage_high_cut
V_min = param.voltage_low_cut
x_n = param.n.prim.x
x_p = param.p.prim.x
model = pybamm.BaseModel()
Un = pybamm.Variable('Un')
Up = pybamm.Variable('Up')
x = pybamm.InputParameter('x')
y = pybamm.In... |
def get_other_data(src_dir, build_dir, data_file):
other_data = 'Other info:\n'
other_data += (((indent + 'Build by: ') + get_user()) + '\n')
other_data += (((indent + 'Top src dir: ') + src_dir) + '\n')
other_data += (((indent + 'Top build dir: ') + build_dir) + '\n')
other_data += (((indent + 'Hos... |
def get_model_summary(model, *input_tensors, item_length=26, verbose=False):
summary = []
ModuleDetails = namedtuple('Layer', ['name', 'input_size', 'output_size', 'num_parameters', 'multiply_adds'])
hooks = []
layer_instances = {}
def add_hooks(module):
def hook(module, input, output):
... |
class Tiff(BaseImageHeader):
def content_type(self):
return MIME_TYPE.TIFF
def default_ext(self):
return 'tiff'
def from_stream(cls, stream):
parser = _TiffParser.parse(stream)
px_width = parser.px_width
px_height = parser.px_height
horz_dpi = parser.horz_dpi
... |
def test_mediator_accept_sha256_empty_hash():
channels = mediator_make_channel_pair()
from_transfer = factories.make_signed_transfer_for(channels[0], LockedTransferSignedStateProperties(initiator=HOP1, secret=Secret(EMPTY_HASH)), allow_invalid=True)
iteration = mediator.state_transition(mediator_state=None,... |
class QtileMigrations(SimpleDirectiveMixin, Directive):
required_arguments = 0
option_spec = {'summary': directives.flag, 'help': directives.flag}
def make_rst(self):
load_migrations()
context = {'migrations': [(m, len(m.ID)) for m in MIGRATIONS]}
if ('summary' in self.options):
... |
class Learner(object):
def __init__(self, args):
if (args.model == 'training_free'):
self.model = TFS3D(args)
elif (args.model == 'training'):
self.model = TFS3D_T(args)
if torch.cuda.is_available():
self.model.cuda()
if (args.model == 'training'):... |
def SetListenInfo(type, existingConfigLines, advanced=True):
configLines = list()
askForListenHours = True
askForListenPorts = True
if (type.lower() == 'level3'):
if advanced:
if dsz.ui.Prompt('Change the number of LISTEN LOOPS?', False):
loops = 0
whi... |
class BeamSearchDecoder(object):
def __init__(self, model, batcher, vocab, dqn=None):
self._model = model
self._model.build_graph()
self._batcher = batcher
self._vocab = vocab
self._saver = tf.train.Saver()
self._sess = tf.Session(config=util.get_config())
if ... |
class TestLogNormalDistribution(QiskitTestCase):
def assertDistributionIsCorrect(self, circuit, num_qubits, mu, sigma, bounds, upto_diag):
if (not isinstance(num_qubits, (list, np.ndarray))):
num_qubits = [num_qubits]
if (not isinstance(mu, (list, np.ndarray))):
mu = [mu]
... |
class ClientAgent(NetworkAgent):
def send_hello(self, engines: List[FuzzingEngineInfo], arch: Arch=None, platform: Platform=None) -> bool:
msg = HelloMsg()
arch = (get_local_architecture() if (arch is None) else arch)
if (arch is None):
logging.error(f'current architecture: {plat... |
def supported_params_wadl(service, site=g_default_site, url=g_url, majorversion=1, timeout=None, method='query'):
wadl = cached_wadl(service, site, url, majorversion, timeout)
if wadl:
url = fillurl(service, site, url, majorversion, method)
return set(wadl.supported_param_names(url))
else:
... |
def get_git_dict(fpath):
info = {}
if (not os.path.exists((fpath + '/.git/config'))):
return info
git_test = git_call(fpath, 'rev-parse HEAD').strip()
if ((not git_test) or (len(git_test) != 40)):
return info
info['rev'] = git_test
info['author'] = git_call(fpath, ('log -1 --form... |
class StanfordCars():
def __init__(self, root: str, split: str):
assert (split in {'train', 'test'})
self.root = root
self.split = split
self.annotations = self._open_annotations()
self.image_folder = os.path.join(self.root, f'cars_{split}')
self.class_names = self._g... |
def get_callback_data(bot, chat, name, data=None):
name = hashed_callback_name(name)
if (data is None):
data = ''
data = data.encode('utf-8')
if (len(data) > 32):
raise ValueError(('The provided data is too big (%s bytes), try to reduce it to 32 bytes' % len(data)))
signature = get_s... |
def compute_dense_reward(self, action, obs) -> float:
DISTANCE_WEIGHT = 1.0
GOAL_REACHED_REWARD = 100.0
ACTION_PENALTY = 0.1
distance = np.linalg.norm((self.robot.ee_position - self.obj1.position))
goal_diff = np.linalg.norm((self.obj1.position - self.goal_position))
action_penalty = (ACTION_PEN... |
class PyPyOfficialDistribution(Distribution):
_property
def version(self) -> Version:
from packaging.version import Version
(*_, remaining) = self.source.partition('/pypy/')
(_, version, *_) = remaining.split('-')
return Version(f'0!{version[1:]}')
_property
def python_pa... |
def get_config():
config = get_default_configs()
training = config.training
training.sde = 'vesde'
training.continuous = False
step_size = 6.2e-06
n_steps_each = 5
ckpt_id = 300000
final_only = True
noise_removal = False
sampling = config.sampling
sampling.method = 'pc'
s... |
def iter_catalog_hooks():
for name in SCHEMA_CATALOG:
config = dict(SCHEMA_CATALOG[name]['hook_config'])
config['schema_name'] = name
config['id'] = f'check-{name}'
config['description'] = (config.get('description') or f"{config['name']} against the schema provided by SchemaStore")
... |
def _compute_autoccz_distillation_error(l1_distance: int, l2_distance: int, physical_error_rate: float) -> float:
L0_distance = (l1_distance // 2)
L0_distillation_error = physical_error_rate
L0_topological_error = _total_topological_error(unit_cells=100, code_distance=L0_distance, gate_err=physical_error_ra... |
class SHD():
def __init__(self, truth: Graph, est: Graph):
truth_node_map = {node.get_name(): node_id for (node, node_id) in truth.node_map.items()}
est_node_map = {node.get_name(): node_id for (node, node_id) in est.node_map.items()}
assert (set(truth_node_map.keys()) == set(est_node_map.ke... |
.parametrize(('pyproject_text', 'expected_maintainers_meta_value'), (pytest.param(PEP621_EXAMPLE, 'Brett Cannon <>, "John X. Arcec" <.org>, <.org>', id='non-international-emails'), pytest.param(PEP621_INTERNATIONAL_EMAIL_EXAMPLE, ' <-.>', marks=pytest.mark.xfail(reason="CPython's `email.headerregistry.Address` only... |
def create_linux_zip(folder_to_pack: Path):
output = _ROOT_FOLDER.joinpath(f'dist/{zip_folder}-linux.tar.gz')
with tarfile.open(output, 'w:gz') as release_zip:
print(f'Creating {output} from {folder_to_pack}.')
release_zip.add(folder_to_pack, zip_folder)
print('Finished.') |
class UpConvBlock(nn.Module):
def __init__(self, conv_block, in_channels, skip_channels, out_channels, num_convs=2, stride=1, dilation=1, with_cp=False, conv_cfg=None, norm_cfg=dict(type='BN'), act_cfg=dict(type='ReLU'), upsample_cfg=dict(type='InterpConv'), dcn=None, plugins=None):
super().__init__()
... |
def test_mul_multiple_terms():
operator = QubitOperator(((1, 'X'), (3, 'Y'), (8, 'Z')), 0.5)
operator += QubitOperator(((1, 'Z'), (3, 'X'), (8, 'Z')), 1.2)
operator += QubitOperator(((1, 'Z'), (3, 'Y'), (9, 'Z')), 1.4j)
res = (operator * operator)
correct = QubitOperator((), (((0.5 ** 2) + (1.2 ** 2... |
class TestSentWebAppMessageWithoutRequest(TestSentWebAppMessageBase):
def test_slot_behaviour(self, sent_web_app_message):
inst = sent_web_app_message
for attr in inst.__slots__:
assert (getattr(inst, attr, 'err') != 'err'), f"got extra slot '{attr}'"
assert (len(mro_slots(inst))... |
class EffectiveSEModule(nn.Module):
def __init__(self, channels, gate_layer='hard_sigmoid'):
super(EffectiveSEModule, self).__init__()
self.fc = nn.Conv2d(channels, channels, kernel_size=1, padding=0)
self.gate = create_act_layer(gate_layer, inplace=True)
def forward(self, x):
x_... |
class ResourceRequirement(Requirement):
__slots__ = ('resource', 'amount', 'negate')
resource: ResourceInfo
amount: int
negate: bool
def __post_init__(self) -> None:
assert TypeError('No ResourceRequirement should be directly created')
def __copy__(self) -> ResourceRequirement:
r... |
def test_storage_initial_volume_table():
filename = os.path.join(TEST_FOLDER, 'models', 'reservoir_initial_vol_from_table.json')
model = Model.load(filename)
np.testing.assert_allclose(model.nodes['supply1'].initial_volume, 0.0)
np.testing.assert_allclose(model.nodes['supply2'].initial_volume, 35.0) |
_fixtures(WebFixture, DataTableExampleFixture)
def test_pageable_table(web_fixture, data_table_example_fixture):
fixture = data_table_example_fixture
fixture.create_addresses()
browser = fixture.browser
browser.open('/')
assert (fixture.number_of_rows_in_table() == 10)
assert fixture.address_is_... |
class EquilibriumGraphRewriter(NodeProcessingGraphRewriter):
def __init__(self, rewriters: Sequence[Rewriter], failure_callback: Optional[FailureCallbackType]=None, ignore_newtrees: bool=True, tracks_on_change_inputs: bool=False, max_use_ratio: Optional[float]=None, final_rewriters: Optional[Sequence[GraphRewriter]... |
class CmdNewPassword(COMMAND_DEFAULT_CLASS):
key = 'userpassword'
locks = 'cmd:perm(newpassword) or perm(Admin)'
help_category = 'Admin'
def func(self):
caller = self.caller
if (not self.rhs):
self.msg('Usage: userpassword <user obj> = <new password>')
return
... |
def _validate_pep_references(line_num: int, line: str) -> MessageIterator:
line = line.removesuffix(',').rstrip()
if (line.count(', ') != line.count(',')):
(yield (line_num, "PEP references must be separated by comma-spaces (', ')"))
return
references = line.split(', ')
for reference in ... |
def get_dependencies_of_target(module_id: str, module_tree: MypyFile, target: Node, type_map: dict[(Expression, Type)], python_version: tuple[(int, int)]) -> dict[(str, set[str])]:
visitor = DependencyVisitor(type_map, python_version, module_tree.alias_deps)
with visitor.scope.module_scope(module_id):
i... |
.parametrize('version, expected', [('0', True), ('0.2', True), ('0.2.3', True), ('2!0.2.3', True), ('0.2.3+local', True), ('0.2.3.4', True), ('0.dev0', False), ('0.2dev0', False), ('0.2.3dev0', False), ('0.2.3.4dev0', False), ('0.post1', True), ('0.2.post1', True), ('0.2.3.post1', True), ('0.post1.dev0', False), ('0.2.... |
class ResidualBaseEncoder(nn.Module):
def __init__(self, channel, groups, alias=False):
super().__init__()
if alias:
self._net = nn.Sequential(ResidualBlockUnShuffle(3, channel), ResidualBlock(channel, channel, groups=groups), ResidualBlockUnShuffle(channel, channel, groups=groups), Atte... |
def register_token(token_network_registry_deploy_result: Callable[([], TokenNetworkRegistry)], token_deploy_result: Callable[([], Contract)]) -> TokenNetworkAddress:
token_network_registry_proxy = token_network_registry_deploy_result()
token_contract = token_deploy_result()
(_, token_network_address) = toke... |
def subst_vars(s, local_vars):
check_environ()
lookup = dict(os.environ)
lookup.update(((name, str(value)) for (name, value) in local_vars.items()))
try:
return _subst_compat(s).format_map(lookup)
except KeyError as var:
raise ValueError(f'invalid variable {var}') |
def run_test(cfg, model, vis, eval_score_iou, eval_all_depths=True):
eval_types = ('detection',)
output_folders = ([None] * len(cfg.DATASETS.TEST))
dataset_names = cfg.DATASETS.TEST
if cfg.OUTPUT_DIR:
for (idx, dataset_name) in enumerate(dataset_names):
output_folder = os.path.join(c... |
class F33_TestCase(CommandTest):
command = 'timesource'
def runTest(self):
data1 = F33_TimesourceData()
data1.ntp_server = 'ntp.example.com'
data2 = F33_TimesourceData()
data2.ntp_server = 'ntp.example.com'
self.assertEqual(data1, data2)
self.assertFalse((data1 !=... |
class Model(tf.keras.Model):
def __init__(self, blocks_args=None, global_params=None):
super(Model, self).__init__()
if (not isinstance(blocks_args, list)):
raise ValueError('blocks_args should be a list.')
self._global_params = global_params
self._blocks_args = blocks_ar... |
class BlazeSplitAdjustedEstimatesLoader(BlazeEstimatesLoader):
def __init__(self, expr, columns, split_adjustments_loader, split_adjusted_column_names, split_adjusted_asof, **kwargs):
self._split_adjustments = split_adjustments_loader
self._split_adjusted_column_names = split_adjusted_column_names
... |
class _EncodingAnalyzer(Generic[_Statistics], abc.ABC):
observer_cls: Type[_Observer[_Statistics]]
def __init__(self, shape):
self.observer = self.observer_cls(shape)
_grad()
def update_stats(self, x: torch.Tensor) -> _Statistics:
new_stats = self.observer.collect_stats(x)
self.o... |
def test_zipimport_hook(pytester: Pytester) -> None:
zipapp = pytest.importorskip('zipapp')
pytester.path.joinpath('app').mkdir()
pytester.makepyfile(**{'app/foo.py': "\n import pytest\n def main():\n pytest.main(['--pyargs', 'foo'])\n "})
target = pytester.pa... |
def approve_signed_sponsorship_view(ModelAdmin, request, pk):
sponsorship = get_object_or_404(ModelAdmin.get_queryset(request), pk=pk)
initial = {'package': sponsorship.package, 'start_date': sponsorship.start_date, 'end_date': sponsorship.end_date, 'sponsorship_fee': sponsorship.sponsorship_fee}
form = Sig... |
def test_tps_preprocessor():
with pytest.raises(AssertionError):
TPSPreprocessor(num_fiducial=(- 1))
with pytest.raises(AssertionError):
TPSPreprocessor(img_size=32)
with pytest.raises(AssertionError):
TPSPreprocessor(rectified_img_size=100)
with pytest.raises(AssertionError):
... |
def getElementPos(obj):
vertex = getElementShape(obj, Part.Vertex)
if vertex:
return vertex.Point
face = getElementShape(obj, Part.Face)
if face:
surface = face.Surface
pln = face.findPlane()
if pln:
if (not face.countElement('Edge')):
return g... |
class Migration(migrations.Migration):
dependencies = [('questions', '0092_remove_verbose_name_plural')]
operations = [migrations.AlterField(model_name='catalog', name='help_lang1', field=models.TextField(blank=True, help_text='The help text for this catalog (in the primary language).', verbose_name='Help (prim... |
class Session(object):
def __init__(self):
self._headers = HEADERS
self._session = requests.sessions.Session()
def _set_auth_headers(self, access_token='', client_id=''):
self._headers['Authorization'] = 'Bearer {}'.format(access_token)
self._headers['X-Udemy-Authorization'] = 'B... |
class Anagram(commands.Cog):
def __init__(self, bot: Bot):
self.bot = bot
self.games: dict[(int, AnagramGame)] = {}
(name='anagram', aliases=('anag', 'gram', 'ag'))
async def anagram_command(self, ctx: commands.Context) -> None:
if self.games.get(ctx.channel.id):
(await c... |
def main(annotation_file, category):
assert (category in ['action', 'attribute', 'concept', 'event', 'object', 'scene'])
data = mmcv.load(annotation_file)
basename = osp.basename(annotation_file)
dirname = osp.dirname(annotation_file)
basename = basename.replace('hvu', f'hvu_{category}')
target_... |
def test_id_to_recap_alias_schema_default():
json_schema_str = '\n {\n "$id": " "type": "object",\n "properties": {\n "field": {\n "type": "string"\n }\n }\n }\n '
Draft202012Validator.check_schema(loads(json_schema_str))
recap_type =... |
def run_ruff(file: File, source: str) -> tuple[(bool, str)]:
import ruff
result = subprocess.run([sys.executable, '-m', 'ruff', 'check', '--fix', '--unsafe-fixes', '--output-format=text', '--stdin-filename', file.path, '-'], input=source, capture_output=True, encoding='utf8')
if (result.returncode != 0):
... |
(check_parser)
def do_check(args: argparse.Namespace) -> None:
t0 = time.time()
response = request(args.status_file, 'check', files=args.files, export_types=args.export_types)
t1 = time.time()
response['roundtrip_time'] = (t1 - t0)
check_output(response, args.verbose, args.junit_xml, args.perf_stats... |
class TestTfConnectedGraph(unittest.TestCase):
def test_multiple_transpose_conv2d(self):
tf.compat.v1.reset_default_graph()
with tf.device('/cpu:0'):
model = tf.keras.Sequential()
model.add(tf.keras.layers.Conv2DTranspose(1, (4, 4), input_shape=(28, 28, 3)))
model... |
class AppDefTest(unittest.TestCase):
def test_application(self) -> None:
trainer = Role('trainer', 'test_image', entrypoint='/bin/sleep', args=['10'], num_replicas=2)
app = AppDef(name='test_app', roles=[trainer])
self.assertEqual('test_app', app.name)
self.assertEqual(1, len(app.rol... |
('python_ta.tokenize.open', side_effect=UnicodeDecodeError('', b'', 0, 0, ''))
def test_pre_check_log_pylint_unicode_error(_, caplog) -> None:
expected_logs = ['python_ta could not check your code due to an invalid character. Please check the following lines in your file and all characters that are marked with a .'... |
class SubPile(TracesGroup):
def __init__(self, parent):
TracesGroup.__init__(self, parent)
self.files = []
self.empty()
def add_file(self, file):
self.files.append(file)
file.set_parent(self)
self.add(file)
def remove_file(self, file):
self.files.remov... |
class CocoDataset(Pix2pixDataset):
def modify_commandline_options(parser, is_train):
parser = Pix2pixDataset.modify_commandline_options(parser, is_train)
parser.set_defaults(preprocess_mode='resize_and_crop')
if is_train:
parser.set_defaults(load_size=286)
else:
... |
def test_video_inference_recognizer():
if torch.cuda.is_available():
device = 'cuda:0'
else:
device = 'cpu'
model = init_recognizer(video_config_file, None, device)
with pytest.raises(RuntimeError):
inference_recognizer(model, 'missing.mp4', label_path)
with pytest.raises(Run... |
_validate_coincident
def _relative_neighborhood(coordinates):
if (not HAS_NUMBA):
warnings.warn('The numba package is used extensively in this module to accelerate the computation of graphs. Without numba, these computations may become unduly slow on large data.', stacklevel=3)
(edges, dt) = _voronoi_ed... |
class LoadDyLib(LoadCommand):
def __init__(self, data):
super().__init__(data)
self.str_offset = unpack('<L', self.FR.read(4))[0]
self.time_stamp = unpack('<L', self.FR.read(4))[0]
self.current_version = unpack('<L', self.FR.read(4))[0]
self.compatibility_version = unpack('<L... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.