code stringlengths 281 23.7M |
|---|
def interleave_blocks(types: Tuple[(str, str)], d, every: Union[(int, List[int])]=1, first: bool=False, **kwargs) -> Tuple[ByoBlockCfg]:
assert (len(types) == 2)
if isinstance(every, int):
every = list(range((0 if first else every), d, (every + 1)))
if (not every):
every = [(d - 1)]
... |
def plot_tps_meas_diff(displacement, meas_dose, monaco_dose, dosecheck_dose):
(fig, ax) = plt.subplots(1, 2, figsize=(10.5, 6), sharey=True)
ax[1].yaxis.set_tick_params(which='both', labelbottom=True)
ax_twin = list()
ax_twin.append(plot_one_axis(ax[0], displacement, meas_dose, monaco_dose))
ax_twin... |
def get_library() -> ctypes.CDLL:
lib = load_library(BTRACK_LIB_PATH)
lib.new_interface.restype = ctypes.c_void_p
lib.new_interface.argtypes = [ctypes.c_bool]
lib.del_interface.restype = None
lib.del_interface.argtypes = [ctypes.c_void_p]
lib.set_update_mode.restype = None
lib.set_update_mod... |
class NSDataset(object):
def __init__(self, case_id, Nx, Nz, dt, file_list, is_normalize=False):
super(NSDataset, self).__init__()
self.case_id = case_id
self.dataset_dir = f'{ROOT}/{case_id}'
self.interval = (Nx * Nz)
self.file_list = file_list
self.is_normalize = is... |
def single_gpu_training(args, rank, iterations, shared_results):
is_cuda = torch.cuda.is_available()
if is_cuda:
torch.cuda.set_device(rank)
(model, loss_fn, optimizer) = setup_model_loss_criterion(args, rank, is_cuda)
for _ in range(iterations):
input = torch.randn(1, args.input_size)
... |
class GDict():
def __init__(self, item=None, faster=False, **kwargs):
self.memory = (item if faster else self.to_item(item))
self.capacity = getattr(item, 'capacity', None)
def _is_final(cls, item):
return (not isinstance(item, (list, dict)))
def to_item(cls, item):
if isinst... |
.skipif((not numpyro_available), reason='Multinomial dispatch requires numpyro')
def test_multinomial():
rng = shared(np.random.RandomState(123))
n = np.array([10, 40])
p = np.array([[0.3, 0.7, 0.0], [0.1, 0.4, 0.5]])
g = pt.random.multinomial(n, p, size=(10000, 2), rng=rng)
g_fn = random_function([... |
def test_Stochastic():
nd = OSC.NormalDistribution(0, 1)
stoc = OSC.Stochastic(100, 1.234)
stoc.add_distribution('myparam1', nd)
stoc2 = OSC.Stochastic(100, 1.234)
stoc2.add_distribution('myparam1', nd)
stoc3 = OSC.Stochastic(100, 1.234)
stoc3.add_distribution('myparam1', nd)
stoc3.add_d... |
def main():
parser = HfArgumentParser((ModelArguments, DataTrainingArguments, TrainingArguments))
if ((len(sys.argv) == 2) and sys.argv[1].endswith('.json')):
(model_args, data_args, training_args) = parser.parse_json_file(json_file=os.path.abspath(sys.argv[1]))
else:
(model_args, data_args,... |
class ReduceLROnPlateauWithWarmup(object):
def __init__(self, optimizer, mode='min', factor=0.1, patience=10, threshold=0.0001, threshold_mode='rel', cooldown=0, min_lr=0, eps=1e-08, verbose=False, warmup_lr=None, warmup=0):
if (factor >= 1.0):
raise ValueError('Factor should be < 1.0.')
... |
class ListableAPIResource(APIResource):
def all(cls, *args, **params):
warnings.warn('The `all` class method is deprecated and willbe removed in future versions. Please use the `list` class method instead', DeprecationWarning)
return cls.list(*args, **params)
def auto_paging_iter(self, *args, **... |
def resolve_path(schema, fragment):
fragment = fragment.lstrip('/')
parts = (unquote(fragment).split('/') if fragment else [])
for part in parts:
part = part.replace('~1', '/').replace('~0', '~')
if isinstance(schema, list):
schema = schema[int(part)]
elif (part in schema... |
def reduce(fn, sequences, outputs_info, non_sequences=None, go_backwards=False, mode=None, name=None):
rval = scan(fn=fn, sequences=sequences, outputs_info=outputs_info, non_sequences=non_sequences, go_backwards=go_backwards, truncate_gradient=(- 1), mode=mode, name=name)
if isinstance(rval[0], (list, tuple)):
... |
_tf
_retrieval
_sentencepiece
class TFRagTestMixin():
all_model_classes = ((TFRagModel, TFRagTokenForGeneration, TFRagSequenceForGeneration) if (is_tf_available() and is_datasets_available() and is_faiss_available()) else ())
all_generative_model_classes = ((TFRagTokenForGeneration, TFRagSequenceForGeneration) ... |
.requires_user_action
class TextWindowEventsTest(WindowEventsTestCase):
number_of_checks = 10
text = '`-=~!#$%^&*()_+qwertyuiop[]\\QWERTYUIOP{}|asdfghjkl;\'ASDFGHJKL:"zxcvbnm,./ZXCVBNM<>?'
def setUp(self):
super(TextWindowEventsTest, self).setUp()
self.chosen_text = None
self.checks_... |
def main():
parser = HfArgumentParser((ModelArguments, DataTrainingArguments, TrainingArguments))
if ((len(sys.argv) == 2) and sys.argv[1].endswith('.json')):
(model_args, data_args, training_args) = parser.parse_json_file(json_file=os.path.abspath(sys.argv[1]))
else:
(model_args, data_args,... |
class OUStrategy(ExplorationStrategy, Serializable):
def __init__(self, env_spec, mu=0, theta=0.15, sigma=0.3, **kwargs):
assert isinstance(env_spec.action_space, Box)
assert (len(env_spec.action_space.shape) == 1)
Serializable.quick_init(self, locals())
self.mu = mu
self.the... |
class Resample2dFunction(Function):
def forward(ctx, input1, input2, kernel_size=1):
assert input1.is_contiguous()
assert input2.is_contiguous()
ctx.save_for_backward(input1, input2)
ctx.kernel_size = kernel_size
(_, d, _, _) = input1.size()
(b, _, h, w) = input2.size... |
def decodeguid(guid, key):
guid = guid.replace('-', '').replace('{', '').replace('}', '')
decryptleft = int(guid[0:16], 16)
decryptright = int(guid[16:32], 16)
leftkey = int(key[0:16], 16)
rightkey = int(key[16:32], 16)
return ('%016X%016X' % ((decryptleft ^ leftkey), (decryptright ^ rightkey))) |
class CharacterCreateView(CharacterMixin, ObjectCreateView):
template_name = 'website/character_form.html'
def form_valid(self, form):
account = self.request.user
character = None
self.attributes = {k: form.cleaned_data[k] for k in form.cleaned_data.keys()}
charname = self.attrib... |
class NVCompCompressor(CudaCodec):
def __init__(self, device_ordinal: int=0):
self.device_ordinal = device_ordinal
def get_nvcomp_manager(self) -> kvikio.nvcomp.nvCompManager:
pass
def encode(self, buf: BufferLike) -> cupy.typing.NDArray:
buf = cupy.asarray(ensure_contiguous_ndarray_... |
class BackBtn(discord.ui.Button):
view: EmbedBuilder
def __init__(self, ctx: Context, scrim: Scrim, msg: discord.Message=None):
super().__init__(style=discord.ButtonStyle.red, label='Exit')
self.ctx = ctx
self.scrim = scrim
self.msg = msg
async def callback(self, interaction:... |
def get_best_bundles_by_category(country_list, category, config_bundles, tutorial, config_enable):
df_matches = pd.DataFrame(columns=['bundle_name', 'bundle_size', 'n_matched'])
for (bname, bvalue) in config_bundles.items():
if ((bvalue['category'] == category) and (bvalue.get('tutorial', False) == tuto... |
class BranchModel(TreeModel):
progress = Signal(object)
def __init__(self, glb, event_id, where_clause, parent=None):
super(BranchModel, self).__init__(glb, None, parent)
self.event_id = event_id
self.more = True
self.populated = 0
self.have_ipc = IsSelectable(glb.db, 'sa... |
def _gamestats():
fpage_account_limit = 4
recent_users = AccountDB.objects.get_recently_connected_accounts()[:fpage_account_limit]
nplyrs_conn_recent = (len(recent_users) or 'none')
nplyrs = (AccountDB.objects.num_total_accounts() or 'none')
nplyrs_reg_recent = (len(AccountDB.objects.get_recently_cr... |
class PyPyLogLexer(RegexLexer):
name = 'PyPy Log'
aliases = ['pypylog', 'pypy']
filenames = ['*.pypylog']
mimetypes = ['application/x-pypylog']
url = 'pypy.org'
version_added = '1.5'
tokens = {'root': [('\\[\\w+\\] \\{jit-log-.*?$', Keyword, 'jit-log'), ('\\[\\w+\\] \\{jit-backend-counts$', ... |
class DBEngine():
def __init__(self, fdb):
self.db = records.Database('sqlite:///{}'.format(fdb))
self.conn = self.db.get_connection()
def execute_query(self, table_id, query, *args, **kwargs):
return self.execute(table_id, query.sel_index, query.agg_index, query.conditions, *args, **kwa... |
class SetComp(ComprehensionScope):
_astroid_fields = ('elt', 'generators')
_other_other_fields = ('locals',)
elt: NodeNG
def __init__(self, lineno: int, col_offset: int, parent: NodeNG, *, end_lineno: (int | None), end_col_offset: (int | None)) -> None:
self.locals = {}
self.generators: ... |
def as_manager_hook(ctx: DynamicClassDefContext) -> None:
class_def = ClassDef(ctx.name, Block([]))
class_def.fullname = ctx.api.qualified_name(ctx.name)
info = TypeInfo(SymbolTable(), class_def, ctx.api.cur_mod_id)
class_def.info = info
assert isinstance(ctx.call.callee, MemberExpr)
assert isin... |
def main(unused_argv):
a1 = 0.2
f1 = 0.1
l1 = 0.67
pose1 = 11
a2 = 0.02
f2 = 0.1
l2 = 0.67
pose2 = 11
train_coc = 1
fix_pose = True
n_frames = 90
config = utils.load_config()
dataset = datasets.get_dataset('test', FLAGS.data_dir, config)
(model, init_variables) = ... |
class SingleContextMaxSentenceModel(MultipleContextModel):
def __init__(self, encoder: QuestionsAndParagraphsEncoder, word_embed: Optional[WordEmbedder], char_embed: Optional[CharWordEmbedder], embed_mapper: Optional[Union[(SequenceMapper, ElmoWrapper)]], sequence_encoder: SequenceEncoder, sentences_encoder: Senten... |
def save_all_the_current_info(exp_name, file_title, iter_count, var_Q_circuit, var_Q_bias, iter_reward):
file_title = ((((exp_name + '/') + file_title) + '_Iter_Count_') + str(iter_count))
with open(((file_title + '_var_Q_circuit') + '.txt'), 'wb') as fp:
pickle.dump(var_Q_circuit, fp)
with open(((f... |
def fit_ctmp_meas_mitigator(cal_data: Dict[(int, Dict[(int, int)])], num_qubits: int, generators: List[Generator]=None) -> CTMPExpvalMeasMitigator:
if (not isinstance(num_qubits, int)):
raise QiskitError('Number of qubits must be an int')
if (generators is None):
generators = standard_generator_... |
class TestPythonVersion():
def test_default_no_source(self, isolation):
config = {'project': {'name': 'My.App', 'version': '0.1.0'}}
builder = AppBuilder(str(isolation), config=config)
assert (builder.config.python_version == builder.config.python_version == builder.config.SUPPORTED_VERSIONS... |
class IDWriteFontFileStream(com.IUnknown):
_methods_ = [('ReadFileFragment', com.STDMETHOD(POINTER(c_void_p), UINT64, UINT64, POINTER(c_void_p))), ('ReleaseFileFragment', com.STDMETHOD(c_void_p)), ('GetFileSize', com.STDMETHOD(POINTER(UINT64))), ('GetLastWriteTime', com.STDMETHOD(POINTER(UINT64)))] |
def parse_args():
special_args = [{'name': '--partition-size', 'default': '1 MiB', 'metavar': 'nbytes', 'type': parse_bytes, 'help': "Size of each partition (default '1 MB')"}, {'name': '--in-parts', 'default': 100, 'metavar': 'n', 'type': int, 'help': "Number of input partitions (default '100')"}, {'name': ['-b', ... |
class Bottleneck(nn.Module):
def __init__(self, in_planes, out_planes, stride, groups, is_last=False):
super(Bottleneck, self).__init__()
self.is_last = is_last
self.stride = stride
mid_planes = int((out_planes / 4))
g = (1 if (in_planes == 24) else groups)
self.conv1... |
class ExtraOptsParser(unittest.TestCase):
def testExtraOptsParser(self):
os.chdir(tests_dir)
e = pynag.Parsers.ExtraOptsParser(section_name='main', config_file='dataset01/extraopts/other.ini')
self.assertEqual('other.ini', e.get('filename'))
try:
e.get('does not exist')
... |
def test_RankInvariantChecker_remove_one_alternative_forbidden():
dm = skc.datasets.load_simple_stock_selection()
dmaker = RemoveAlternativeDMaker(TOPSIS(), ['AA'], 1)
rrt1 = RankInvariantChecker(dmaker, random_state=42, allow_missing_alternatives=False)
with pytest.raises(ValueError):
rrt1.eval... |
def get_organizations(disabled=True, deleted=False):
query = User.select().where((User.organization == True), (User.robot == False))
if (not disabled):
query = query.where((User.enabled == True))
elif (not deleted):
query = query.where(User.id.not_in(DeletedNamespace.select(DeletedNamespace.... |
class GuiRemoveImplantsCommand(wx.Command):
def __init__(self, fitID, positions):
wx.Command.__init__(self, True, 'Remove Implants')
self.internalHistory = InternalCommandHistory()
self.fitID = fitID
self.positions = positions
def Do(self):
sMkt = Market.getInstance()
... |
class HoverXRefBaseDomain():
hoverxref_types = ('hoverxref', 'hoverxreftooltip', 'hoverxrefmodal')
def _inject_hoverxref_data(self, env, refnode, typ):
from .extension import CSS_CLASSES, CSS_DEFAULT_CLASS
classes = [CSS_DEFAULT_CLASS]
type_class = None
if (typ == 'hoverxreftoolt... |
def main():
data_provider = daily_data_provider
prices_tms = data_provider.get_price(DummyTicker('AAA'), PriceField.Close, start_date, end_date)
marker_props = {'alpha': 0.5}
stemline_props = {'linestyle': '-.', 'linewidth': 0.2}
baseline_props = {'visible': False}
color = 'red'
marker_props... |
_fixtures(WebFixture, ConstraintRenderingFixture)
def test_remote_constraints(web_fixture, constraint_rendering_fixture):
fixture = constraint_rendering_fixture
class MyRemoteConstraint(RemoteConstraint):
def validate_input(self, unparsed_input):
if (unparsed_input == 'failing_string_value')... |
('python_ta.tokenize.open', side_effect=IndentationError)
def test_pre_check_log_indentation_error(_, caplog) -> None:
_verify_pre_check('', False)
assert ('python_ta could not check your code due to an indentation error at line' in caplog.text)
assert ('ERROR' == caplog.records[0].levelname) |
class Solution(object):
def connect(self, root):
if (root is None):
return
nodes = [root]
while (len(nodes) != 0):
next_step = []
last = None
for node in nodes:
if (last is not None):
last.next = node
... |
class GeneralizedRCNN(nn.Module):
def __init__(self, backbone, rpn, roi_heads, transform):
super(GeneralizedRCNN, self).__init__()
self.transform = transform
self.backbone = backbone
self.rpn = rpn
self.roi_heads = roi_heads
self._has_warned = False
.unused
de... |
def main(model, config):
set_seed(config.seed)
device = torch.device(config.device)
if (not os.path.exists(config.checkpoint_dir)):
os.mkdir(config.checkpoint_dir)
config.config_save = os.path.join(config.checkpoint_dir, ((model + config.experimental_stuff) + '_config.pt'))
config.model_save... |
def simpleTokenize(text):
splitPunctText = splitEdgePunct(text)
textLength = len(splitPunctText)
bads = []
badSpans = []
for match in Protected.finditer(splitPunctText):
if (match.start() != match.end()):
bads.append([splitPunctText[match.start():match.end()]])
badSpa... |
def parse1(f):
for line in f:
line = line.rstrip(b'\r\n')
m = re.match(b'\\s*(#(.+)|B(\\d\\d\\d)F(\\d\\d(-\\d\\d)?)\\s+(([^:]+):\\s*)?(.*))', line)
if m:
if m.group(2):
pass
elif m.group(3):
block = m.group(3)
field = m.... |
class DLCDecrypter(object):
KEY = b'cb99b5cbc24db398'
IV = b'9bc24cb995cb8db3'
API_URL = '
def __init__(self, plugin):
self.plugin = plugin
def decrypt(self, data):
if (not isinstance(data, bytes)):
raise TypeError('data must be bytes.')
data = data.strip()
... |
class ModelArguments():
model_name_or_path: str = field(metadata={'help': 'Path to pretrained model or model identifier from huggingface.co/models'})
config_name: Optional[str] = field(default=None, metadata={'help': 'Pretrained config name or path if not the same as model_name'})
tokenizer_name: Optional[s... |
class InputNormalize(ch.nn.Module):
def __init__(self, new_mean, new_std):
super(InputNormalize, self).__init__()
new_std = new_std[(..., None, None)]
new_mean = new_mean[(..., None, None)]
self.register_buffer('new_mean', new_mean)
self.register_buffer('new_std', new_std)
... |
def _get_sensor_angles(data_arr: xr.DataArray) -> tuple[(xr.DataArray, xr.DataArray)]:
preference = satpy.config.get('sensor_angles_position_preference', 'actual')
(sat_lon, sat_lat, sat_alt) = get_satpos(data_arr, preference=preference)
area_def = data_arr.attrs['area']
chunks = _geo_chunks_from_data_a... |
def parse(code, args, compile_opt):
(code, constants) = extract_constant(code)
names = re.findall('[0-9a-zA-Z_]+', code)
code = space_parts(code, names)
constants_names = [const[0] for const in constants]
new_code = []
ordered_constants = []
variables = []
typeCounts = defaultdict((lambd... |
def evaluation(model, eval_data_loaders, epoch, writer, device):
if (writer and (device == 0)):
for (eval_data_name, eval_data_loader) in eval_data_loaders:
handle_dict = {'job_dir': writer.get_logdir(), 'epoch': epoch, 'eval_data_name': eval_data_name}
(psnr, psnr_y, ssim, speed) = ... |
class ClassDispatcher(Generic[(K_co, V)]):
__slots__ = ('_mapping',)
def __init__(self, mapping: Optional[Mapping[(Type[K_co], V)]]=None):
self._mapping: Dict[(Type[K_co], V)] = ({} if (mapping is None) else dict(mapping))
def dispatch(self, key: Type[K_co]) -> V:
for parent in key.__mro__:
... |
class FIR2Data(Block):
_format = [E(2, 16, 'e15.8'), E(18, 32, 'e15.8'), E(34, 48, 'e15.8'), E(50, 64, 'e15.8'), E(66, 80, 'e15.8')]
factors = List.T(Float.T())
def values(self):
return (self.factors + ([None] * (5 - len(self.factors))))
def deserialize(cls, line, version_dialect):
facto... |
def test_cannot_update_schedule_if_submission_doesnt_have_a_matching_schedule(submission_factory, graphql_client, user, mocker):
mock_event = mocker.patch('api.schedule.mutations.send_new_schedule_invitation_answer')
graphql_client.force_login(user)
submission = submission_factory(speaker_id=user.id)
re... |
def _test():
import torch
pretrained = False
models = [seresnext50_32x4d, seresnext101_32x4d, seresnext101_64x4d]
for model in models:
net = model(pretrained=pretrained)
net.eval()
weight_count = _calc_width(net)
print('m={}, {}'.format(model.__name__, weight_count))
... |
class BaseRwEmbeddingSharding(EmbeddingSharding[(C, F, T, W)]):
def __init__(self, sharding_infos: List[EmbeddingShardingInfo], env: ShardingEnv, device: Optional[torch.device]=None, need_pos: bool=False, qcomm_codecs_registry: Optional[Dict[(str, QuantizedCommCodecs)]]=None) -> None:
super().__init__(qcomm... |
('pypyr.venv.EnvBuilderWithExtraDeps')
def test_venv_dsl_mapping_list_of_str_error_on_create(mock_builder):
context = get_simple_context()
mocked_builder = mock_builder.return_value
mocked_builder.context = context
mocked_builder.create.side_effect = env_builder_create_mock
step = VenvCreatorStep.fr... |
def _lock_add(caller, lock, **kwargs):
locks = _caller_locks(caller)
try:
(locktype, lockdef) = lock.split(':', 1)
except ValueError:
return "Lockstring lacks ':'."
locktype = locktype.strip().lower()
if ('delete' in kwargs):
try:
ind = locks.index(lock)
... |
class Sectioned():
_sample = textwrap.dedent('\n [sec1]\n # comments ignored\n a = 1\n b = 2\n\n [sec2]\n a = 2\n ').lstrip()
def section_pairs(cls, text):
return (section._replace(value=Pair.parse(section.value)) for section in cls.read(text, filter_=cls... |
def get_extrap_val(xqextrap, y, extrap):
shape = (*y.shape[:(- 1)], xqextrap.shape[(- 1)])
dtype = xqextrap.dtype
device = xqextrap.device
if ((extrap is None) or (extrap == 'nan')):
return (torch.empty(shape, dtype=dtype, device=device) * float('nan'))
elif (isinstance(extrap, int) or isins... |
class WxUDevMonitorObserver(MonitorObserver):
_action_event_map = {'add': DeviceAddedEvent, 'remove': DeviceRemovedEvent, 'change': DeviceChangedEvent, 'move': DeviceMovedEvent}
def __init__(self, monitor):
MonitorObserver.__init__(self, monitor)
import warnings
warnings.warn('Will be re... |
class DBMaterial():
def __init__(self, filename, interpolation_points=100, empty=False):
self.refractiveIndex = None
self.extinctionCoefficient = None
self.points = interpolation_points
if empty:
return
f = open(filename)
try:
material = yaml.s... |
def timeout_for_setup_and_call(item):
def report():
gevent.util.print_run_info()
raise RetryTestError(f'Setup and Call timeout >{item.timeout_setup_and_call}s')
def handler(signum, frame):
report()
signal.signal(signal.SIGALRM, handler)
item.remaining_timeout = item.timeout_setup... |
def FMNIST(train=False, batch_size=None, augm_flag=False, val_size=None):
if (batch_size == None):
if train:
batch_size = train_batch_size
else:
batch_size = test_batch_size
transform_base = [transforms.ToTensor()]
transform_train = transforms.Compose(([transforms.Ran... |
class ProjectedResidualLayer(Mapper):
def __init__(self, other: Union[(Mapper, SequenceMapper)]):
self.other = other
def apply(self, is_train, x, mask=None):
out = self.other.apply(is_train, x, mask)
w = tf.get_variable('project_w', (x.shape.as_list()[(- 1)], out.shape.as_list()[(- 1)]))... |
def get_model(p, pretrain_path=None):
if (p['backbone'] == 'resnet18'):
if (p['train_db_name'] in ['cifar-10', 'cifar-20']):
from models.resnet_cifar import resnet18
backbone = resnet18()
elif (p['train_db_name'] == 'stl-10'):
from models.resnet_stl import resnet1... |
def gather(data, dst=0, group=None, append=False):
if (get_world_size() == 1):
return [data]
if (group is None):
group = _get_global_gloo_group()
if (dist.get_world_size(group=group) == 1):
return [data]
rank = dist.get_rank(group=group)
tensor = _serialize_to_tensor(data, gr... |
_on_failure
.parametrize('number_of_nodes', [3])
.parametrize('enable_rest_api', [True])
.parametrize('number_of_tokens', [2])
def test_payment_events_endpoints(api_server_test_instance: APIServer, raiden_network: List[RaidenService], token_addresses, pfs_mock):
(app0, app1, app2) = raiden_network
token_address... |
def prune_state_dict(state_dict, args):
if ((not args) or (args.arch == 'ptt_transformer')):
return state_dict
encoder_layers_to_keep = (args.encoder_layers_to_keep if ('encoder_layers_to_keep' in vars(args)) else None)
decoder_layers_to_keep = (args.decoder_layers_to_keep if ('decoder_layers_to_kee... |
class MobilePandaDualArmDefaultConfig():
def __init__(self) -> None:
self.urdf_path = '{PACKAGE_ASSET_DIR}/descriptions/mobile_panda_dual_arm.urdf'
self.urdf_config = dict(_materials=dict(gripper=dict(static_friction=2.0, dynamic_friction=2.0, restitution=0.0)), link=dict(right_panda_leftfinger=dict... |
class _TestPolygons():
def test1(self):
lines = [[(0, 0), (4, 4), (5, 4), (1, 0), (0, 0)], [(1, 0), (5, 4), (6, 4), (2, 0), (1, 0)]]
shapes = []
for line in lines:
x = [v[0] for v in line]
y = [v[1] for v in line]
rec = {}
rec['BBOX Xmin'] = mi... |
.parametrize('plink_in, fam_sep', [(example_dataset_1, '\t'), (example_dataset_2, ' '), (example_dataset_3, ' ')])
def test_zarr_to_plink(shared_datadir, tmp_path, plink_in, fam_sep):
zarr_path = (tmp_path / 'plink.zarr')
plink_to_zarr(path=(shared_datadir / plink_in), output=zarr_path, fam_sep=fam_sep)
pat... |
(frozen=True)
class ExpectedRequest():
verb: str
path: int
def from_request(cls, request):
return cls(request.verb, request.path)
def __eq__(self, other):
if isinstance(other, (Request, ExpectedRequest)):
return ((self.verb == other.verb) and (self.path == other.path))
... |
def test_fileread_binary_true():
context = Context({'fileRead': {'path': '/arb', 'key': 'out', 'binary': True}})
with patch('pypyr.steps.fileread.open', mock_open(read_data=b'12345')) as mocked_open:
fileread.run_step(context)
assert (context['out'] == b'12345')
mocked_open.assert_called_once_wi... |
class DenseNet(nn.Module):
def __init__(self, block, nblocks, growth_rate=12, reduction=0.5, num_classes=10):
super(DenseNet, self).__init__()
self.growth_rate = growth_rate
num_planes = (2 * growth_rate)
self.conv1 = nn.Conv2d(3, num_planes, kernel_size=3, padding=1, bias=False)
... |
def main():
parser = HfArgumentParser((ModelArguments, DataTrainingArguments, TrainingArguments))
if ((len(sys.argv) == 2) and sys.argv[1].endswith('.json')):
(model_args, data_args, training_args) = parser.parse_json_file(json_file=os.path.abspath(sys.argv[1]))
else:
(model_args, data_args,... |
def make_wandb_name(cfg):
dataset_name = cfg.dataset.format
if dataset_name.startswith('OGB'):
dataset_name = dataset_name[3:]
if dataset_name.startswith('PyG-'):
dataset_name = dataset_name[4:]
if (dataset_name in ['GNNBenchmarkDataset', 'TUDataset']):
dataset_name = ''
if (... |
class QuantEmbeddingCollectionSharder(BaseQuantEmbeddingSharder[QuantEmbeddingCollection]):
def shard(self, module: QuantEmbeddingCollection, params: Dict[(str, ParameterSharding)], env: ShardingEnv, device: Optional[torch.device]=None) -> ShardedQuantEmbeddingCollection:
fused_params = (self.fused_params i... |
class ContextBlock(nn.Module):
def __init__(self, inplanes, ratio, pooling_type='att', fusion_types=('channel_add',)):
super(ContextBlock, self).__init__()
assert (pooling_type in ['avg', 'att'])
assert isinstance(fusion_types, (list, tuple))
valid_fusion_types = ['channel_add', 'cha... |
.parametrize('username,password', users)
def test_list(db, client, username, password):
client.login(username=username, password=password)
url = reverse(urlnames['list'])
response = client.get(url)
assert (response.status_code == status_map['list'].get(username, status_map['list']['default'])), response... |
class ZeroOrOne(_BaseChildElement):
def populate_class_members(self, element_cls: MetaOxmlElement, prop_name: str) -> None:
super(ZeroOrOne, self).populate_class_members(element_cls, prop_name)
self._add_getter()
self._add_creator()
self._add_inserter()
self._add_adder()
... |
def validate(val_loader, model, step, count):
print('Step {}: start validation ...'.format(step))
model.eval()
start_time = time.time()
results = {}
with torch.no_grad():
for (task, loader) in val_loader.items():
if task.startswith('mlm'):
val_log = validate_mlm(m... |
def after_branch_increfs(label: BasicBlock, pre_live: AnalysisDict[Value], pre_borrow: AnalysisDict[Value], source_borrowed: set[Value], ordering: dict[(Value, int)]) -> tuple[(Value, ...)]:
target_pre_live = pre_live[(label, 0)]
target_borrowed = pre_borrow[(label, 0)]
incref = ((source_borrowed - target_b... |
def sample_sawyer_multiple_objects():
size = 0.1
low = np.array([(- size), (0.4 - size), 0])
high = np.array([size, (0.4 + size), 0.1])
env = MultiSawyerEnv(do_render=False, finger_sensors=False, num_objects=1, object_meshes=None, fix_z=True, fix_gripper=True, fix_rotation=True, cylinder_radius=0.03, ma... |
class _QCBase():
def to_dict(self) -> dict[(str, Any)]:
def filter_none(d: list[tuple[(str, Any)]]) -> dict[(str, Any)]:
return {k: v for (k, v) in d if (v is not None)}
return asdict(self, dict_factory=filter_none)
def from_dict(cls, data: dict[(str, Any)]) -> _QCBase:
retur... |
def _get_expected_game_changes_text(rb_damage_mode: DreadRavenBeakDamageMode):
if (rb_damage_mode == DreadRavenBeakDamageMode.UNMODIFIED):
return ['Open Hanubia Shortcut, Easier Path to Itorash in Hanubia', 'Raven Beak Damage: Unmodified', 'Power Bomb Limitations']
elif (rb_damage_mode == DreadRavenBeak... |
(frozen=True)
class ContractSendChannelUpdateTransfer(ContractSendExpirableEvent):
balance_proof: BalanceProofSignedState
def token_network_address(self) -> TokenNetworkAddress:
return self.balance_proof.canonical_identifier.token_network_address
def channel_identifier(self) -> ChannelID:
re... |
def assert_package_metadata(test_metadata, ref_metadata):
assert (test_metadata.package_version != '')
assert isinstance(test_metadata.apps, list)
assert isinstance(test_metadata.app_paths, list)
test_metadata_replaced = test_metadata._replace(apps=sorted(test_metadata.apps), app_paths=sorted(test_metad... |
def make_markdown_table(lines):
if ((lines is None) or (len(lines) == 0)):
return ''
col_widths = {key: len(str(key)) for key in lines[0].keys()}
for line in lines:
for (key, value) in line.items():
if (col_widths[key] < len(_maybe_round(value))):
col_widths[key] ... |
class GroupObj_TestCase(DevelPackagesBase):
def runTest(self):
self.assertLess(Group('A'), Group('B'))
self.assertLessEqual(Group('A'), Group('B'))
self.assertLessEqual(Group('A'), Group('A'))
self.assertEqual(Group('A'), Group('A'))
self.assertNotEqual(Group('A'), Group('B')... |
class CosineLRWithRestarts(object):
def __init__(self, optimizer, batch_size, epoch_size, restart_period=100, t_mult=2, last_epoch=(- 1), eta_threshold=1000, verbose=False):
if (not isinstance(optimizer, Optimizer)):
raise TypeError('{} is not an Optimizer'.format(type(optimizer).__name__))
... |
def scope_done(scope, flowview):
log.debug('checking scope %s on view with offset %s', scope, flowview.offset)
result = True
bookkeeper = jsonpointer.JsonPointer(scope).resolve(flowview.bookkeeper)
for (k, v) in bookkeeper.items():
for (k, v) in bookkeeper.items():
if (k == '_meta'):... |
def test_activate_controller_action():
aca = OSC.ActivateControllerAction(True, True)
prettyprint(aca.get_element(), None)
aca2 = OSC.ActivateControllerAction(True, True)
aca3 = OSC.ActivateControllerAction(True, False)
assert (aca == aca2)
assert (aca != aca3)
aca4 = OSC.ActivateControllerA... |
def test_constant_doping():
from solcore import material, si
from solcore.structure import Junction, Layer
from solcore.sesame_drift_diffusion.process_structure import process_structure
from solcore.state import State
Si_n = material('Si')(Nd=1e+24, electron_minority_lifetime=1e-06, hole_minority_li... |
(trylast=True)
def pytask_collect_node(session: Session, path: Path, node_info: NodeInfo) -> PNode:
node = node_info.value
if isinstance(node, PythonNode):
node.node_info = node_info
if (not node.name):
node.name = create_name_of_python_node(node_info)
return node
if (isi... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.