code stringlengths 281 23.7M |
|---|
def visit_path(path: Path, *, filter: Callable[([Path], bool)], recurse: Callable[([Path], bool)]) -> Iterator[Path]:
for (dirpath, dirnames, filenames) in os.walk(path):
dirnames[:] = [x for x in dirnames if recurse(Path(dirpath, x))]
for name in chain(dirnames, filenames):
p = Path(dir... |
class Point(object):
def __init__(self, index: int, token: str, offset: int=0):
self.point_index = index
self.token = token
self.offset = offset
def __str__(self):
return ('(%d:%s[%d])' % (self.point_index, self.token, self.offset))
def __repr__(self):
return str(self... |
def test_multi_truth_table():
state = [ZeroState(), OneState()]
eff = [ZeroEffect(), OneEffect()]
n = 4
rs = np.random.RandomState(52)
all_cvs = rs.choice([0, 1], size=(2, n))
ctrl_strings = rs.choice([0, 1], size=(10, n))
for cvs in all_cvs:
for ctrl_string in ctrl_strings:
... |
def main(args, config):
utils.init_distributed_mode(args)
device = torch.device(args.device)
world_size = utils.get_world_size()
seed = (args.seed + utils.get_rank())
torch.manual_seed(seed)
np.random.seed(seed)
random.seed(seed)
cudnn.benchmark = True
print('Creating model', flush=T... |
def evaluate(args, model, tokenizer, prefix=''):
(dataset, examples, features) = load_and_cache_examples(args, tokenizer, evaluate=True, output_examples=True)
if ((not os.path.exists(args.output_dir)) and (args.local_rank in [(- 1), 0])):
os.makedirs(args.output_dir)
args.eval_batch_size = (args.per... |
def test_slice_inference_in_for_loops() -> None:
node = extract_node('\n for a, (c, *b) in [(1, (2, 3, 4)), (4, (5, 6))]:\n b #\n ')
inferred = next(node.infer())
assert isinstance(inferred, nodes.List)
assert (inferred.as_string() == '[3, 4]')
node = extract_node('\n for a, *b in [(1... |
def test_star_schedule_item(submission_factory, graphql_client, user, schedule_item_factory, slot_factory, day_factory):
graphql_client.force_login(user)
submission = submission_factory()
schedule_item = schedule_item_factory(status=ScheduleItem.STATUS.confirmed, speaker_invitation_notes='notes', submission... |
def generate_code(human_input):
system_prompt_template = 'You are expert coder Jon Carmack. Use the provided design context to create idomatic HTML/CSS code as possible based on the user request.\n Everything must be inline in one file and your response must be directly renderable by the browser.'
human_prom... |
class Migration(migrations.Migration):
dependencies = [('frontend', '0002_auto__1801'), ('digest', '0020_auto__0554')]
operations = [migrations.AddField(model_name='issue', name='tip', field=models.ForeignKey(verbose_name='', null=True, on_delete=models.CASCADE, blank=True, to='frontend.Tip'))] |
def _scan_files(library_path: str, filecount: int) -> Tuple[(int, int)]:
last_update = time.time()
files_scanned = 0
files_added = 0
for (dirpath, _, filenames) in os.walk(library_path):
if (os.path.abspath(dirpath) == os.path.abspath(conf.SONGS_CACHE_DIR)):
continue
now = ti... |
def create(win_id: int, private: bool, parent: QWidget=None) -> 'AbstractTab':
mode_manager = modeman.instance(win_id)
if (objects.backend == usertypes.Backend.QtWebEngine):
from qutebrowser.browser.webengine import webenginetab
tab_class: Type[AbstractTab] = webenginetab.WebEngineTab
elif (... |
def canonical_form(xsys, form='reachable'):
if (form == 'reachable'):
return reachable_form(xsys)
elif (form == 'observable'):
return observable_form(xsys)
elif (form == 'modal'):
return modal_form(xsys)
else:
raise ControlNotImplemented(("Canonical form '%s' not yet impl... |
def print_clusters_to_file(clustering_output, filename):
num_clusters = len(dict(Counter(clustering_output.labels_)).keys())
clusters = clustering_output.labels_.tolist()
output = open(filename, 'w')
for k in range((- 1), num_clusters):
output_json = {}
indices = [i for (i, x) in enumera... |
def get_matches(cur_name, train_names):
matches = []
for (m, detect_name_list) in enumerate(train_names):
dist = 1
for detect_name in detect_name_list:
dist = min(dist, huristic_dist(cur_name, detect_name))
matches.append((dist, m, detect_name_list))
matches = sorted(matc... |
class NIMA(nn.Module):
def __init__(self, base_model, num_classes=10):
super(NIMA, self).__init__()
self.features = base_model.features
self.classifier = nn.Sequential(nn.Dropout(p=0.75), nn.Linear(in_features=25088, out_features=num_classes), nn.Softmax())
def forward(self, x):
... |
.pydicom
def test_pretty_patient_name_all_params():
ds = pydicom.Dataset()
ds.PatientName = 'last^first^middle^^hon'
pretty_name_param_expected_combos = (({'surname_first': True, 'capitalise_surname': True, 'include_honorific': True}, 'Hon. LAST, First Middle'), ({'surname_first': True, 'capitalise_surname'... |
class CuContext():
def __init__(self, device, context):
self._device = device
self._context = context
def setVerbosity(level):
pass
def Open(device_id=0, flags=0, verbose=None):
_device = c_void_p(456)
_context = c_void_p(123)
return CuContext(_device, _contex... |
def test_plugin_dependencies_unmet(hatch, helpers, temp_dir, config_file, mock_plugin_installation):
config_file.model.template.plugins['default']['tests'] = False
config_file.save()
project_name = 'My.App'
with temp_dir.as_cwd():
result = hatch('new', project_name)
assert (result.exit_code ... |
class TDirectoryTree(TestCase):
if (os.name == 'nt'):
ROOTS = [get_home_dir(), 'C:\\']
else:
ROOTS = [get_home_dir(), '/']
def setUp(self):
quodlibet.config.init()
def tearDown(self):
quodlibet.config.quit()
def test_initial(self):
if (os.name == 'nt'):
... |
class Slab():
def __init__(self, type: SlabType) -> None:
self.type = type
self.byte_ranges: List[Tuple[(int, int)]] = []
self.buffer_stagers: List[BufferStager] = []
self.location: str = os.path.join('batched', str(uuid.uuid4()))
self.sz_bytes: int = 0
def add_buffer_sta... |
class TestMdStatCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('MdStatCollector', {'interval': 10})
self.collector = MdStatCollector(config, None)
def test_import(self):
self.assertTrue(MdStatCollector)
('__builtin__.open')
('os.access', Mock(return_... |
def test_session_scope(fixture_path):
result = fixture_path.runpytest('-v')
result.assert_outcomes(passed=10, failed=0)
result.stdout.fnmatch_lines(['test_classes.py::Test1::test_one PASSED', 'test_classes.py::Test2::test_one PASSED', 'test_classes.py::test_one PASSED', 'test_functions1.py::test1_one PASSED... |
def main():
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--type', choices=['all', 'selected'], default='all', help='type')
args = parser.parse_args()
pybullet_planning.connect(use_gui=False)
if (args.type == 'selected'):
class_... |
def get_senet(blocks, model_name=None, pretrained=False, root=os.path.join('~', '.torch', 'models'), **kwargs):
if (blocks == 16):
layers = [1, 1, 1, 1]
cardinality = 32
elif (blocks == 28):
layers = [2, 2, 2, 2]
cardinality = 32
elif (blocks == 40):
layers = [3, 3, 3... |
class XlibContext(Context):
def __init__(self, config, share):
super().__init__(config, share)
self.x_display = config.canvas.display._display
self.glx_context = self._create_glx_context(share)
if (not self.glx_context):
raise gl.ContextException('Could not create GL cont... |
def copy_vison_model_and_projection(hf_model, pt_model):
hf_model.visual_projection.weight.data = pt_model.visual.proj.data.T
copy_linear(hf_model.vision_model.pre_layrnorm, pt_model.visual.ln_pre)
copy_linear(hf_model.vision_model.post_layernorm, pt_model.visual.ln_post)
hf_model.vision_model.embedding... |
def test_cli_async_map_then_apply(runner, reactor, server):
base_url = '
in_stream = ''.join((base_url.format(i) for i in ([1, 2, 3, 4, 5] * 9)))
args = ['-m', 'mario', '--max-concurrent', '100', 'async-map', 'await asks.get(x) ! x.json()', 'filter', 'x["id"] % 6 == 0', 'map', "x['id']", 'apply', 'max(x)']
... |
class Monitorable(sa.Attributes):
def __init__(self):
self._attr = super(Monitorable, self)
self._attr.__init__()
def _check(self):
if (not hasattr(self, '_adaptor')):
raise se.IncorrectState('object is not fully initialized')
('Monitorable')
(rus.list_of(str))
de... |
class GetNotificationAttributes():
id: int
get_positive_action: bool
get_negative_action: bool
def to_list(self) -> List[int]:
msg = struct.pack('<BIBBHBH', CommandID.GetNotificationAttributes, self.id, NotificationAttributeID.AppIdentifier, NotificationAttributeID.Title, USHORT_MAX, Notificatio... |
def parse_calendar_date_formats(data, calendar):
date_formats = data.setdefault('date_formats', {})
for format in calendar.findall('dateFormats'):
for elem in format:
if (elem.tag == 'dateFormatLength'):
type = elem.attrib.get('type')
if _should_skip_elem(elem... |
class ExchangeBase(Logger):
def __init__(self, on_quotes, on_history):
Logger.__init__(self)
self.history = {}
self.quotes = {}
self.on_quotes = on_quotes
self.on_history = on_history
async def get_raw(self, site, get_string):
url = ''.join([' site, get_string])
... |
def create_HLa(kaldi_root: Path, fst_dir: Path, unique_label: str, h_graph: Path, l_graph: Path, disambig_in_words_file_int: Path) -> Path:
hla_graph = (fst_dir / f'HLa.{unique_label}.fst')
if (not hla_graph.exists()):
logger.info(f'Creating {hla_graph}')
fsttablecompose = (kaldi_root / 'src/fst... |
class Migration(migrations.Migration):
dependencies = [('options', '0022_cascade_options')]
operations = [migrations.RunPython(run_data_migration), migrations.AlterField(model_name='option', name='uri_prefix', field=models.URLField(help_text='The prefix for the URI of this option.', max_length=256, verbose_name... |
class _Operators():
def __init__(self):
super().__init__()
def operator_random(self, start, end):
return random.randint(start, end)
def operator_mathop(self, operator, number):
ops = {'abs': abs, 'floor': math.floor, 'ceiling': math.ceil, 'sqrt': math.sqrt, 'sin': (lambda n: math.sin... |
()
def blank_pickup(echoes_pickup_database, default_generator_params) -> PickupEntry:
return PickupEntry(name='Blank Pickup', model=PickupModel(game=RandovaniaGame.METROID_PRIME_ECHOES, name='EnergyTransferModule'), pickup_category=echoes_pickup_database.pickup_categories['suit'], broad_category=echoes_pickup_datab... |
class MetaModel(nn.Module):
def __init__(self, args, dropout=0.2, mode='meta'):
super().__init__()
self.args = args
self.mode = mode
self.init_backbone()
self.base_learner = BaseLearner(args, self.z_dim)
self.update_lr = self.args.base_lr
self.update_step = se... |
def compute_recall_precision(gt_polys, pred_polys):
assert isinstance(gt_polys, list)
assert isinstance(pred_polys, list)
gt_num = len(gt_polys)
det_num = len(pred_polys)
sz = [gt_num, det_num]
recall = np.zeros(sz)
precision = np.zeros(sz)
for gt_id in range(gt_num):
for pred_id... |
class MessageLogger():
def __init__(self, opt, start_iter=1, tb_logger=None):
self.exp_name = opt['name']
self.interval = opt['logger']['print_freq']
self.start_iter = start_iter
self.max_iters = opt['train']['total_iter']
self.use_tb_logger = opt['logger']['use_tb_logger']
... |
def DFCASCI(mf_or_mol, ncas, nelecas, auxbasis=None, ncore=None):
from pyscf import gto
from pyscf import scf
if isinstance(mf_or_mol, gto.MoleBase):
mf = mf_or_mol.RHF().density_fit()
else:
mf = mf_or_mol
if isinstance(mf, scf.uhf.UHF):
mf = mf.to_rhf()
if mf.mol.symmetr... |
def filter_vqa_result(data, result):
dataset_items = ['vqa', 'caption', 'action']
data_dict = {'frame_num': [], 'input': [], 'instruction': [], 'output': [], 'route_descriptors': [], 'vehicle_descriptors': [], 'pedestrian_descriptors': [], 'ego_vehicle_descriptor': []}
for d in data:
if ('vqa' in da... |
class Deserializer():
def __init__(self, raw):
self._raw = raw
self._buffer = bytearray()
self._data = None
self._index = 0
self._length = len(raw)
self._setup()
def run(self):
containers = list()
self._run(containers)
return containers[0]
... |
class GDIPlusDecoder(ImageDecoder):
def get_file_extensions(self):
return ['.bmp', '.gif', '.jpg', '.jpeg', '.exif', '.png', '.tif', '.tiff']
def get_animation_file_extensions(self):
return ['.gif']
def _load_bitmap(self, filename, file):
data = file.read()
hglob = kernel32.G... |
def copy_vector(v, env, cont):
from pycket.interpreter import return_value
if isinstance(v, values_vector.W_Vector):
return return_value(v._make_copy(immutable=True), env, cont)
len = v.length()
if (not len):
vector = values_vector.W_Vector.fromelements([])
return return_value(ve... |
class AssetFinderMultipleCountries(WithTradingCalendars, ZiplineTestCase):
def write_assets(self, **kwargs):
self._asset_writer.write(**kwargs)
def init_instance_fixtures(self):
super(AssetFinderMultipleCountries, self).init_instance_fixtures()
conn = self.enter_instance_context(empty_as... |
class PixelDiscriminator(nn.Module):
def __init__(self, input_nc, ndf=64, norm_layer=nn.BatchNorm2d, use_sigmoid=False):
super(PixelDiscriminator, self).__init__()
if (type(norm_layer) == functools.partial):
use_bias = (norm_layer.func == nn.InstanceNorm2d)
else:
use_... |
def test_write_two_disjoint_slices():
class Top(ComponentLevel3):
def construct(s):
s.A = Wire(Bits32)
def up_wr_0_16():
s.A[0:16] = Bits16(255)
def up_wr_16_30():
s.A[16:30] = Bits14(255)
def up_rd_12_30():
asse... |
def load_state_dict(checkpoint_path: str, map_location='cpu'):
checkpoint = torch.load(checkpoint_path, map_location=map_location)
if (isinstance(checkpoint, dict) and ('state_dict' in checkpoint)):
state_dict = checkpoint['state_dict']
else:
state_dict = checkpoint
state_dict_new = stat... |
class _1_0SetScreenConfig(rq.ReplyRequest):
_request = rq.Struct(rq.Card8('opcode'), rq.Opcode(2), rq.RequestLength(), rq.Drawable('drawable'), rq.Card32('timestamp'), rq.Card32('config_timestamp'), rq.Card16('size_id'), rq.Card16('rotation'))
_reply = rq.Struct(rq.ReplyCode(), rq.Card8('status'), rq.Card16('se... |
def assert_version(repo: Repo, expected_revision: str) -> None:
version = PyProjectTOML(path=Path(repo.path).joinpath('pyproject.toml')).poetry_config['version']
revision = Git.get_revision(repo=repo)
assert (revision == expected_revision)
assert (revision in REVISION_TO_VERSION_MAP)
assert (version... |
def main():
assert (torch.cuda.device_count() == 1)
opts = parse_args()
mkdir2(opts.out_dir)
vis_out = bool(opts.vis_dir)
if vis_out:
mkdir2(opts.vis_dir)
db = COCO(opts.annot_path)
class_names = [c['name'] for c in db.dataset['categories']]
n_class = len(class_names)
coco_ma... |
class CustomHelpCommand(HelpCommand):
def __init__(self):
super().__init__(command_attrs={'help': 'Shows help for bot commands'})
_output(destination_channel=Channels.bot_commands, bypass_roles=STAFF_PARTNERS_COMMUNITY_ROLES)
async def command_callback(self, ctx: Context, *, command: (str | None)=No... |
def reset_usb(reset_names):
if (shutil.which('usbreset') is None):
current_dir = os.path.dirname(os.path.realpath(__file__))
res = subprocess.call(f'gcc {current_dir}/usbreset.c -o /usr/local/bin/usbreset')
if (not (res == 0)):
rospy.logerr(f'usbreset install exit code: {res}')
... |
def compare_records(res1: list, res2: list, rel_tol: float=0.0001, abs_tol: float=0) -> None:
assert (len(res1) == len(res2)), f'len(res1) = {len(res1)}, len(res2) = {len(res2)}'
for i in range(len(res2)):
res1_item = res1[i]
res2_item = res2[i]
if (isinstance(res1_item, list) and isinst... |
class WeeklyLogFile(logfile.DailyLogFile):
day_rotation = 7
def shouldRotate(self):
now = self.toDate()
then = self.lastDate
return ((now[0] > then[0]) or (now[1] > then[1]) or (now[2] > (then[2] + self.day_rotation)))
def suffix(self, tupledate):
try:
return '_'.... |
class Pre_Given_Nochroot_TestCase(ParserTest):
def __init__(self, *args, **kwargs):
ParserTest.__init__(self, *args, **kwargs)
self.ks = '\n%pre --nochroot\nls /tmp\n%end\n'
def runTest(self):
self.assertRaises(KickstartParseError, self.parser.readKickstartFromString, self.ks) |
class PyFile():
def __init__(self, manager, id, url, name, size, status, error, pluginname, package, order):
self.m = self.manager = manager
self.m.cache[int(id)] = self
self.id = int(id)
self.url = url
self._name = None
self.name = name
self._size = None
... |
def make_dataset_test(dir):
images = []
assert os.path.isdir(dir), ('%s is not a valid directory' % dir)
f = dir.split('/')[(- 1)].split('_')[(- 1)]
for i in range(len([name for name in os.listdir(dir) if os.path.isfile(os.path.join(dir, name))])):
if ((f == 'label') or (f == 'labelref')):
... |
class ResNetBackbone(Backbone):
def __init__(self, backbone):
super(ResNetBackbone, self).__init__(backbone)
self.custom_objects.update(keras_resnet.custom_objects)
def retinanet(self, *args, **kwargs):
return resnet_retinanet(*args, backbone=self.backbone, **kwargs)
def fsaf(self, n... |
class K8sSetup():
def __init__(self, namespace_name, release_name, image_tag_details, runtime_props, image_script_dir, command):
self.namespace_name = namespace_name
self.release_name = release_name
self.image_tag_details = image_tag_details
self.runtime_props = (runtime_props or {})... |
class RequestFactory():
def __init__(self, baseplate: Baseplate):
self.baseplate = baseplate
def __call__(self, environ: Dict[(str, str)]) -> BaseplateRequest:
return BaseplateRequest(environ, context_config=self.baseplate._context_config)
def blank(self, path: str) -> BaseplateRequest:
... |
class LeNet(nn.Module):
def __init__(self):
super(LeNet, self).__init__()
self.conv1 = nn.Conv2d(3, 6, 5)
self.conv2 = nn.Conv2d(6, 16, 5)
self.fc1 = nn.Linear(((16 * 5) * 5), 120)
self.fc2 = nn.Linear(120, 84)
self.fc3 = nn.Linear(84, 10)
def forward(self, x):
... |
class ResNetStage(nn.Module):
def __init__(self, in_chs, out_chs, stride, dilation, depth, bottle_ratio=0.25, groups=1, block_dpr=None, block_fn=PreActBottleneck, act_layer=None, conv_layer=None, norm_layer=None, **block_kwargs):
super(ResNetStage, self).__init__()
first_dilation = (1 if (dilation i... |
def generate_triplets(binding_seq):
if (not isinstance(binding_seq, types.GeneratorType)):
binding_seq = (x for x in binding_seq)
prev = None
v = next(binding_seq, None)
nxt = next(binding_seq, None)
i = 0
(yield (prev, v, nxt, i))
while nxt:
prev = v
v = nxt
... |
def load_weights(G, D, M, state_dict, weights_root, experiment_name, name_suffix=None, G_ema=None, strict=True, load_optim=True):
root = '/data/users/yaxing/MineGAN_I2I/BigGAN'
if name_suffix:
print(('Loading %s weights from %s...' % (name_suffix, root)))
else:
print(('Loading weights from %... |
def _make_file_or_dir_dep(name: str, path: Path, base: (Path | None)=None, subdirectory: (str | None)=None, extras: (list[str] | None)=None) -> ((FileDependency | DirectoryDependency) | None):
from poetry.core.packages.directory_dependency import DirectoryDependency
from poetry.core.packages.file_dependency imp... |
def _check_inputs(node, storage_map, r_vals, dr_vals, active_nodes, clobber_dr_vals=True, perform=None, warn_input_not_reused=True):
destroyed_idx_list = []
destroy_map = node.op.destroy_map
for (o_pos, i_pos_list) in destroy_map.items():
destroyed_idx_list.extend(i_pos_list)
destroyed_res_list ... |
def events_for_onchain_secretreveal_if_closed(channelmap: Dict[(ChannelID, NettingChannelState)], transfers_pair: List[MediationPairState], secret: Secret, secrethash: SecretHash, block_hash: BlockHash) -> List[Event]:
events: List[Event] = []
all_payer_channels = []
for pair in transfers_pair:
chan... |
def time_wait(options):
print(options.describe())
listen0 = socket.socket()
listen0.bind(('127.0.0.1', 0))
sockaddr = listen0.getsockname()
listen0.close()
listen1 = socket.socket()
options.set('listen1_early', listen1)
listen1.bind(sockaddr)
listen1.listen(1)
options.set('listen... |
def get_optimizer_and_scheduler(training_args, model):
no_decay = ['bias', 'LayerNorm.weight']
optimizer_grouped_parameters = [{'params': [p for (n, p) in model.named_parameters() if (not any(((nd in n) for nd in no_decay)))], 'weight_decay': training_args.weight_decay}, {'params': [p for (n, p) in model.named_... |
class ScanDirIter():
def __init__(self, filesystem, path):
self.filesystem = filesystem
if isinstance(path, int):
if ((not use_scandir_package) and ((sys.version_info < (3, 7)) or self.filesystem.is_windows_fs)):
raise NotImplementedError('scandir does not support file de... |
.parametrize('value', [10, 65, 0, 1, 2, 134])
.parametrize('limits', [(5, 20, 500), (3, 50, 150)])
def test_encode_int_with_limits_round_trip(value: int, limits: tuple[(int, ...)]):
data = bitpacking._pack_encode_results(list(bitpacking.encode_int_with_limits(value, limits)))
decoded = bitpacking.decode_int_wit... |
()
('tab', value=cmdutils.Value.cur_tab)
('pid', completion=miscmodels.process)
('action', choices=['show', 'terminate', 'kill'])
def process(tab: apitypes.Tab, pid: int=None, action: str='show') -> None:
if (pid is None):
if (last_pid is None):
raise cmdutils.CommandError('No process executed y... |
class PasslibHash(BaseHash):
def __init__(self, algorithm):
super(PasslibHash, self).__init__(algorithm)
if (not PASSLIB_AVAILABLE):
raise Exception(("passlib must be installed and usable to hash with '%s'" % algorithm), orig_exc=PASSLIB_E)
try:
self.crypt_algo = geta... |
.end_to_end()
def test_use_data_catalog_with_different_name(runner, tmp_path):
source = '\n from pathlib import Path\n from typing_extensions import Annotated\n from pytask import DataCatalog\n\n data_catalog = DataCatalog(name="blob")\n\n def task_add_content() -> Annotated[str, data_catalog["new_co... |
()
def sys_dict():
sdict = {}
sdict['ss'] = ct.StateSpace([[(- 1)]], [[1]], [[1]], [[0]])
sdict['tf'] = ct.TransferFunction([1], [0.5, 1])
sdict['tfx'] = ct.TransferFunction([1, 1], [1])
sdict['frd'] = ct.frd([(10 + 0j), (9 + 1j), (8 + 2j), (7 + 3j)], [1, 2, 3, 4])
sdict['ios'] = ct.NonlinearIOS... |
class TrendStrengthSheet():
def __init__(self, settings: Settings, pdf_exporter: PDFExporter, price_provider: DataProvider, window_len=128):
self.settings = settings
self.pdf_exporter = pdf_exporter
self.price_provider = price_provider
self.window_len = window_len
self.start_... |
def compute_gradnorm(models, loss):
grad_norm = torch.tensor([]).cuda()
gradnorm = 0.0
models['backbone'].zero_grad()
loss.backward(retain_graph=True)
for param in models['backbone'].parameters():
if (param.grad is not None):
gradnorm = torch.norm(param.grad)
gradnorm... |
def setup_save_checkpoint_common_name(save_checkpoints_config, extra_name=None):
if (extra_name is not None):
checkpoint_common_name = ((('checkpoint-' + extra_name) + '-') + save_checkpoints_config['checkpoint_file_name_prefix'])
else:
checkpoint_common_name = ('checkpoint-' + save_checkpoints_... |
('/list/operation_log', methods=['GET'])
_params([dict(name='page', type=int, required=True, nullable=False), dict(name='page_size', type=int, required=True, nullable=False), dict(name='start_time', type=str, required=False), dict(name='end_time', type=str, required=False), dict(name='domain', type=str, required=False)... |
def test_envvar_auto_num_workers_warn(pytester: pytest.Pytester, monkeypatch: pytest.MonkeyPatch, monkeypatch_3_cpus) -> None:
from xdist.plugin import pytest_cmdline_main as check_options
monkeypatch.setenv('PYTEST_XDIST_AUTO_NUM_WORKERS', 'fourscore')
config = pytester.parseconfigure('-nauto')
with py... |
def test_collision_condition():
cond = OSC.CollisionCondition('Ego')
prettyprint(cond.get_element())
cond1 = OSC.CollisionCondition(OSC.ObjectType.pedestrian)
prettyprint(cond1.get_element())
cond2 = OSC.CollisionCondition('Ego')
assert (cond == cond2)
assert (cond != cond1)
cond3 = OSC.... |
class DataTrainingArguments():
max_seq_length: Optional[int] = field(default=196, metadata={'help': 'The maximum total input sequence length after tokenization. Sequences longer than this will be truncated, sequences shorter will be padded.'})
overwrite_cache: bool = field(default=False, metadata={'help': 'Over... |
def makemandel(mandelx, mandely, xbeg, xend, ybeg, yend, filename=None):
xa = (- 2.0)
xb = 1.0
ya = (- 1.5)
yb = 1.5
maxIt = 128
image = Image.new('RGB', ((xend - xbeg), (yend - ybeg)))
for y in range(ybeg, yend):
cy = (((y * (yb - ya)) / (mandely - 1)) + ya)
for x in range(x... |
def _visit(block: CFGBlock, graph: graphviz.Digraph, visited: Set[int], end: CFGBlock) -> None:
node_id = f'{graph.name}_{block.id}'
if (node_id in visited):
return
label = ('\n'.join([s.as_string() for s in block.statements]) + '\n')
label = label.replace('\\', '\\\\')
label = label.replace... |
def build_vectors(fips_vectors):
vectors = defaultdict(list)
for vector in fips_vectors:
vectors[vector['digest_algorithm']].append(vector['message'])
for (digest_algorithm, messages) in vectors.items():
if (digest_algorithm not in HASHLIB_HASH_TYPES):
continue
(yield '')... |
class IncrTestBench(Component):
def construct(s):
s.incr_in = b8(10)
s.incr_out = b8(0)
s.incr = IncrMethodModular()
_once
def upA():
s.incr.write(s.incr_in)
s.incr_in += 10
_once
def upC():
s.incr_out = s.incr.read()
de... |
def _weights2pairs():
return {'mbart50': _PAIRS_MBART50, 'mbart-large-50-many-to-many-mmt': _PAIRS_MBART50, 'facebook/mbart-large-50-many-to-many-mmt': _PAIRS_MBART50, 'm2m100': _PAIRS_M2M100, 'm2m100_418M': _PAIRS_M2M100, 'm2m100_1.2B': _PAIRS_M2M100, 'facebook/m2m100_418M': _PAIRS_M2M100, 'facebook/m2m100_1.2B': ... |
.parametrize(('path', 'result'), [(Path('/'), None), (Path('/ham/spam/other.py'), None), (Path('/ham/spam'), None), (Path('__init__.py'), None), (Path('/__init__.py'), None), (Path('spam/__init__.py'), Path('spam')), (Path('spam/__init__.pyc'), Path('spam')), (Path('spam/__init__.pyo'), Path('spam')), (Path('ham/spam/_... |
def test_pyproject_source_fix_no_deps(monkeypatch, req_file):
logger = pretend.stub(warning=pretend.call_recorder((lambda s: None)))
monkeypatch.setattr(pyproject, 'logger', logger)
source = _init_pyproject(req_file(), '\n[project]\n')
fix = ResolvedFixVersion(dep=ResolvedDependency(name='flask', versio... |
class MyEnumerator(com.COMObject):
_interfaces_ = [IDWriteFontFileEnumerator]
def __init__(self, factory, loader):
super().__init__()
self.factory = cast(factory, IDWriteFactory)
self.key = 'pyglet_dwrite'
self.size = len(self.key)
self.current_index = (- 1)
self.... |
.end_to_end()
def test_more_nested_pytree_and_python_node_as_return(runner, tmp_path):
source = '\n from pathlib import Path\n from typing import Any\n from typing_extensions import Annotated\n from pytask import PythonNode\n from typing import Dict\n\n nodes = [PythonNode(), (PythonNode(), Python... |
def webclient_options(session, *args, **kwargs):
account = session.account
clientoptions = account.db._saved_webclient_options
if (not clientoptions):
account.db._saved_webclient_options = settings.WEBCLIENT_OPTIONS.copy()
clientoptions = account.db._saved_webclient_options
try:
... |
class S3Store(Store):
base_url = '
def __init__(self, bucket: str, access_key: str, secret_key: str, region: str, max_age: int=DEFAULT_MAX_AGE, prefix: str=None, base_url: str=None, cdn_url: str=None, cdn_prefix_ignore: bool=False, acl: str='private'):
self.bucket = bucket
self.access_key = acce... |
def parse_visual_link(link):
origin = link.findall('origin')
assert ((len(origin) == 1) or (len(origin) == 0))
if len(origin):
xyz = str_to_tup(origin[0].attrib['xyz'])
rpy = str_to_tup(origin[0].attrib['rpy'])
else:
xyz = ([0.0] * 3)
rpy = ([0.0] * 3)
geometry = link... |
class TestDriverGaussianFromMat(QiskitChemistryTestCase, TestDriver):
def setUp(self):
super().setUp()
self.good_check = GaussianDriver._check_valid
GaussianDriver._check_valid = _check_valid
g16 = GaussianDriver()
matfile = self.get_resource_path('test_driver_gaussian_from_m... |
def initial_retrieval(encoder, workers, questions: List, k1: int, n1: int, safety_mult: int=1):
tokenized_qs = [tok_q.words() for tok_q in workers.imap(tokenize, [q['question'] for q in questions])]
q_search_encodings = encoder.encode_text_questions(tokenized_qs, return_search_vectors=True, show_progress=False)... |
class C4_head(nn.Module):
def __init__(self, in_channel=256, out_channel=512):
super(C4_head, self).__init__()
self.conv1 = nn.Conv3d(in_channel, out_channel, kernel_size=(3, 3, 3), stride=1, padding=0, bias=False)
self.bn1 = nn.BatchNorm3d(out_channel)
self.relu1 = nn.ReLU(inplace=T... |
def cancel_repository_build(build, build_queue):
from app import build_canceller
from buildman.jobutil.buildjob import BuildJobNotifier
cancel_builds = [create_cancel_build_in_queue(build.phase, build.queue_id, build_queue), create_cancel_build_in_manager(build.phase, build.uuid, build_canceller)]
for c... |
class MeanPool(Encoder):
def __init__(self, map_layer: Optional[Mapper]=None):
self.map_layer = map_layer
def apply(self, is_train, x, mask=None):
if (self.map_layer is not None):
x = self.map_layer.apply(is_train, x, mask)
rank = (len(x.shape) - 2)
if (mask is not No... |
def parse_args(description='I am lazy'):
import argparse
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--embedding', type=str, default='./data_qg/glove.6B.100d.txt', required=True)
parser.add_argument('--dict', type=str, required=True)
parser.add_argument('--output',... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.