code stringlengths 281 23.7M |
|---|
class KappaExporter(Exporter):
def export(self, dialect='kasim'):
kappa_str = ''
if self.docstring:
kappa_str += (('# ' + self.docstring.replace('\n', '\n# ')) + '\n')
gen = KappaGenerator(self.model, dialect=dialect)
kappa_str += gen.get_content()
return kappa_st... |
def dependencies_draft3(validator, dependencies, instance, schema):
if (not validator.is_type(instance, 'object')):
return
for (property, dependency) in dependencies.items():
if (property not in instance):
continue
if validator.is_type(dependency, 'object'):
(yiel... |
class TestSetup(TestCase):
def setUp(self) -> None:
assert (Item.objects.count() == 0)
Item.objects.create(name='Some item')
Item.objects.create(name='Some item again')
def test_count(self) -> None:
self.assertEqual(Item.objects.count(), 2)
assert (Item.objects.count() ==... |
class Field(object):
name = None
default = None
structcode = None
structvalues = 0
check_value = None
parse_value = None
keyword_args = False
def __init__(self):
pass
def parse_binary_value(self, data, display, length, format):
raise RuntimeError('Neither structcode o... |
def upload_table(table: Union[(LocalTable, DistributedDataset)], s3_base_url: str, s3_file_system: s3fs.S3FileSystem, s3_table_writer_func: Callable, s3_table_writer_kwargs: Optional[Dict[(str, Any)]], content_type: ContentType=ContentType.PARQUET, **s3_client_kwargs) -> ManifestEntryList:
if (s3_table_writer_kwarg... |
class PlayCollectItem(Packet):
id = 85
to = 1
def __init__(self, collected_eid: int, collector_eid: int, item_count: int) -> None:
super().__init__()
self.collected_eid = collected_eid
self.collector_eid = collector_eid
self.item_count = item_count
def encode(self) -> byt... |
class ObjectCollector():
def __init__(self, objects_stream: IO):
self.objects_stream = objects_stream
def collect(self, _frame: FrameType, timestamp: float) -> None:
sample_objects(timestamp, self.objects_stream)
self.objects_stream.flush()
def stop(self) -> None:
self.object... |
class TrieNode(object):
def __init__(self):
self.links = ([None] * 26)
self.isEnd = False
def containsKey(self, ch):
return (self.links[(ord(ch) - ord('a'))] != None)
def get(self, ch):
return self.links[(ord(ch) - ord('a'))]
def put(self, ch, node):
self.links[(o... |
class Net(nn.Module):
def __init__(self) -> None:
super(Net, self).__init__()
self.conv1 = nn.Conv2d(1, 32, 3, 1)
self.conv2 = nn.Conv2d(32, 64, 3, 1)
self.dropout1 = nn.Dropout(0.25)
self.dropout2 = nn.Dropout(0.5)
self.fc1 = nn.Linear(9216, 128)
self.fc2 = n... |
class MatrixGenerator(object):
_idx_start = 0
_idx_delim = '[]'
_base_printer = CodePrinter
_type_declar = 'double '
_line_contin = None
_comment_char = '#'
def __init__(self, arguments, matrices, cse=True):
required_args = set()
for matrix in matrices:
required_a... |
def decimal_strict_coercion_loader(data):
if (type(data) is str):
try:
return Decimal(data)
except InvalidOperation:
raise ValueLoadError('Bad string format', data)
if (type(data) is Decimal):
return data
raise TypeLoadError(Union[(str, Decimal)], data) |
class AssignResult(object):
def __init__(self, num_gts, gt_inds, max_overlaps, labels=None):
self.num_gts = num_gts
self.gt_inds = gt_inds
self.max_overlaps = max_overlaps
self.labels = labels
def add_gt_(self, gt_labels):
self_inds = torch.arange(1, (len(gt_labels) + 1),... |
def fuse_scales(xs, name='', upsample='transpose'):
if (len(xs) == 1):
return xs
fusion_outs = []
for i in range(len(xs)):
to_be_fused = []
for j in range(len(xs)):
x = xs[j]
if (j > i):
if (upsample == 'transpose'):
x = Con... |
class Test_MoreTimeouts(unittest.TestCase):
def setUp(self):
self.s = serial.serial_for_url(PORT, do_not_open=True)
def tearDown(self):
self.s.reset_output_buffer()
self.s.flush()
self.s.close()
self.s.timeout = 1
self.s.xonxoff = False
self.s.open()
... |
class ROIData():
mask: str
color: Union[(str, List[int])]
number: int
name: str
frame_of_reference_uid: int
description: str = ''
use_pin_hole: bool = False
approximate_contours: bool = True
roi_generation_algorithm: Union[(str, int)] = 0
def __post_init__(self):
self.val... |
class DataParser(object):
def __init__(self, feat_dict):
self.feat_dict = feat_dict
def parse(self, infile=None, df=None, has_label=False):
assert (not ((infile is None) and (df is None))), 'infile or df at least one is set'
assert (not ((infile is not None) and (df is not None))), 'only... |
def get_result_one(data, k=10):
print(('K=%d' % k))
data_back = copy.deepcopy(data)
(result, eval_result) = mondrian(data, k, RELAX)
if (DATA_SELECT == 'a'):
result = covert_to_raw(result)
else:
for r in result:
r[(- 1)] = ','.join(r[(- 1)])
write_to_file(result)
... |
def test_api_component_edit():
fakebz = tests.mockbackend.make_bz(component_create_args='data/mockargs/test_api_component_create1.txt', component_create_return={}, component_update_args='data/mockargs/test_api_component_update1.txt', component_update_return={})
fakebz.addcomponent({'initialowner': '', 'initialq... |
.parametrize('show_plotter', [True, False])
def test_background_plotting_axes_scale(qtbot, show_plotter, plotting):
plotter = BackgroundPlotter(show=show_plotter, off_screen=False, title='Testing Window')
assert_hasattr(plotter, 'app_window', MainWindow)
window = plotter.app_window
qtbot.addWidget(windo... |
()
def default_filler_config() -> FillerConfiguration:
return FillerConfiguration(randomization_mode=RandomizationMode.FULL, minimum_random_starting_pickups=0, maximum_random_starting_pickups=0, indices_to_exclude=frozenset(), logical_resource_action=LayoutLogicalResourceAction.RANDOMLY, first_progression_must_be_l... |
_caches
def test_inherited_mice_cache_keeps_unaffected_mice(redis_cache):
s = examples.basic_subsystem()
mechanism = (1,)
mice = s.find_mice(Direction.CAUSE, mechanism)
assert (s._mice_cache.size() == 1)
assert (mice.purview == (2,))
cut = models.Cut((0, 1), (2,))
cut_s = Subsystem(s.network... |
(persist=eval(os.getenv('PERSISTENT')))
def extract_topics_lda(text_file_paths, num_topics=0, num_words=10):
list_of_list_of_tokens = []
for text_file_path in text_file_paths:
with open(text_file_path, 'r') as f:
text = f.read()
doc_words = text.replace('\n', ' ').split(' ')
... |
def construct_gold_set(ex, doc, cur_event, doc_key, args):
gold_set = set()
gold_canonical_set = set()
for arg in cur_event['arguments']:
argname = arg['role']
entity_id = arg['entity_id']
entity = get_entity(ex, entity_id)
span = (entity['start'], (entity['end'] - 1))
... |
def test_issue940_with_metaclass_class_context_property() -> None:
node = builder.extract_node("\n class BaseMeta(type):\n pass\n class Parent(metaclass=BaseMeta):\n \n def __members__(self):\n return ['a', 'property']\n class Derived(Parent):\n pass\n Derived.__me... |
class TestMatMul():
def setup_method(self):
self.rng = np.random.default_rng(utt.fetch_seed())
self.op = matmul
def _validate_output(self, a, b):
pytensor_sol = self.op(a, b).eval()
numpy_sol = np.matmul(a, b)
assert _allclose(numpy_sol, pytensor_sol)
.parametrize('x1... |
class FileDatabaseMethods():
def filecount(self, queue):
self.c.execute('SELECT COUNT(*) FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=?', (queue,))
return self.c.fetchone()[0]
def queuecount(self, queue):
self.c.execute('SELECT COUNT(*) FROM links as l INNER J... |
def test_service_browser_is_aware_of_port_changes():
zc = Zeroconf(interfaces=['127.0.0.1'])
type_ = '_hap._tcp.local.'
registration_name = ('xxxyyy.%s' % type_)
callbacks = []
def on_service_state_change(zeroconf, service_type, state_change, name):
nonlocal callbacks
if (name == reg... |
class TypeChecker(NodeVisitor[None], CheckerPluginInterface):
is_stub = False
errors: Errors
msg: MessageBuilder
_type_maps: list[dict[(Expression, Type)]]
binder: ConditionalTypeBinder
expr_checker: mypy.checkexpr.ExpressionChecker
pattern_checker: PatternChecker
tscope: Scope
scope... |
def slload(file, file_format=None, **kwargs):
if isinstance(file, Path):
file = str(file)
if ((file_format is None) and is_str(file)):
file_format = file.split('.')[(- 1)]
if (file_format not in file_handlers):
raise TypeError(f'Unsupported format: {file_format}')
handler = file_... |
_grad()
def forward_and_adapt(x, model, optimizer, args=None, actual_bz=None, n_clips=None):
outputs = model(x)
if (args.arch == 'tanet'):
outputs = outputs.reshape(actual_bz, (args.test_crops * n_clips), (- 1)).mean(1)
loss = softmax_entropy(outputs).mean(0)
loss.backward()
optimizer.step()... |
def get_new_cuda_buffer() -> Callable[([int], object)]:
global _new_cuda_buffer
if (_new_cuda_buffer is not None):
return _new_cuda_buffer
try:
import rmm
_new_cuda_buffer = (lambda n: rmm.DeviceBuffer(size=n))
return _new_cuda_buffer
except ImportError:
pass
... |
def test_gte():
x = Bits(4, 12)
y = Bits(4, 3)
z = Bits(4, 12)
assert (x.uint() >= y.uint())
assert (x.uint() >= 2)
assert (x.uint() >= z.uint())
assert (x.uint() >= 12)
assert (x >= y.uint())
assert (x >= 2)
assert (x >= z.uint())
assert (x >= 12)
assert (x >= y)
ass... |
class Correlation1dFunction(Function):
def __init__(self, pad_size=3, kernel_size=3, max_displacement=20, stride1=1, stride2=2, corr_multiply=1):
super(Correlation1dFunction, self).__init__()
self.pad_size = pad_size
self.kernel_size = kernel_size
self.max_displacement = max_displace... |
.parametrize('reported,expected', [('linux-x86_64', 'linux_i686'), ('linux-aarch64', 'linux_armv7l')])
def test_get_platform_linux32(reported, expected, monkeypatch):
monkeypatch.setattr(sysconfig, 'get_platform', return_factory(reported))
monkeypatch.setattr(struct, 'calcsize', (lambda x: 4))
assert (get_p... |
def js_splice(arr: list, start: int, delete_count=None, *items):
try:
if (start > len(arr)):
start = len(arr)
if (start < 0):
start = (len(arr) - start)
except TypeError:
start = 0
if ((not delete_count) or (delete_count >= (len(arr) - start))):
delete... |
_fixtures(WebFixture)
def test_bookmarks(web_fixture):
bookmark = Bookmark('/base_path', '/relative_path', 'description')
af_bookmark = Bookmark('/base_path', '/relative_path', 'description', locale='af')
assert (af_bookmark.locale == 'af')
assert (af_bookmark.href.path == '/af/base_path/relative_path')... |
def run(core_args, daemon=False, pid_file=''):
if pid_file:
pid = is_already_running(pid_file)
if pid:
sys.stderr.write(f'''pyLoad already running with pid {pid}
''')
if (os.name == 'nt'):
sys.exit(70)
else:
sys.exit(os.EX_SOFTWARE)... |
def parse_keras_history(logs):
if hasattr(logs, 'history'):
if (not hasattr(logs, 'epoch')):
return (None, [], dict())
logs.history['epoch'] = logs.epoch
logs = logs.history
else:
logs = {log_key: [single_dict[log_key] for single_dict in logs] for log_key in logs[0]}
... |
def test_remove_multiple(tester: CommandTester, venvs_in_cache_dirs: list[str], venv_name: str, venv_cache: Path) -> None:
expected = {''}
removed_envs = venvs_in_cache_dirs[0:2]
remaining_envs = venvs_in_cache_dirs[2:]
tester.execute(' '.join(removed_envs))
for name in removed_envs:
assert ... |
_REGISTRY.register()
class SRGANModel(SRModel):
def init_training_settings(self):
train_opt = self.opt['train']
self.ema_decay = train_opt.get('ema_decay', 0)
if (self.ema_decay > 0):
logger = get_root_logger()
logger.info(f'Use Exponential Moving Average with decay: ... |
class CaseGenerator():
def __init__(self, job_init, num_mas, opes_per_job_min, opes_per_job_max, nums_ope=None, path='./ ', flag_same_opes=True, flag_doc=False):
self.str_time = time.strftime('%Y%m%d_%H%M%S', time.localtime(time.time()))
if (nums_ope is None):
nums_ope = []
self... |
class AttrVI_ATTR_TERMCHAR_EN(BooleanAttribute):
resources = [(constants.InterfaceType.gpib, 'INSTR'), (constants.InterfaceType.gpib, 'INTFC'), (constants.InterfaceType.asrl, 'INSTR'), (constants.InterfaceType.tcpip, 'INSTR'), (constants.InterfaceType.tcpip, 'SOCKET'), (constants.InterfaceType.usb, 'INSTR'), (const... |
def fcn_8(n_classes, encoder=vanilla_encoder, input_height=224, input_width=224):
(img_input, levels) = encoder(input_height=input_height, input_width=input_width)
[f1, f2, f3, f4, f5] = levels
o = f5
o = Conv2D(4096, (7, 7), activation='relu', padding='same', data_format=IMAGE_ORDERING)(o)
o = Drop... |
def handle_net_dev_xmit(event_info):
global of_count_tx_xmit_list
(name, context, cpu, time, pid, comm, skbaddr, skblen, rc, dev_name) = event_info
if (rc == 0):
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if (skb['skbaddr'] == skbaddr):
skb['xm... |
class DocumentLabel(layout.TextLayout):
def __init__(self, document=None, x=0, y=0, z=0, width=None, height=None, anchor_x='left', anchor_y='baseline', rotation=0, multiline=False, dpi=None, batch=None, group=None, init_document=True):
super().__init__(document, x, y, z, width, height, anchor_x, anchor_y, r... |
class _MultiBatchNorm(nn.Module):
_version = 2
def __init__(self, num_features, num_classes, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True):
super(_MultiBatchNorm, self).__init__()
self.bns = nn.ModuleList([nn.BatchNorm2d(num_features, eps, momentum, affine, track_running_stats)... |
class Magic8ball(commands.Cog):
(name='8ball')
async def output_answer(self, ctx: commands.Context, *, question: str) -> None:
if (len(question.split()) >= 3):
answer = random.choice(ANSWERS)
(await ctx.send(answer))
else:
(await ctx.send('Usage: .8ball <quest... |
def generate_quick_linesample_arrays(source_area_def, target_area_def, nprocs=1):
from pyresample.grid import get_linesample
(lons, lats) = target_area_def.get_lonlats(nprocs)
(source_pixel_y, source_pixel_x) = get_linesample(lons, lats, source_area_def, nprocs=nprocs)
source_pixel_x = _downcast_index_a... |
.skipif((not torch.cuda.is_available()), reason='requires CUDA support')
def test_anchor_head_loss():
s = 256
img_metas = [{'img_shape': (s, s, 3), 'scale_factor': 1, 'pad_shape': (s, s, 3)}]
cfg = mmcv.Config(dict(assigner=dict(type='MaxIoUAssigner', pos_iou_thr=0.5, neg_iou_thr=0.4, min_pos_iou=0, ignore_... |
class DCUN_TFC_GPoCM_TDF(DenseCUNet_GPoCM):
def __init__(self, n_fft, n_blocks, input_channels, internal_channels, n_internal_layers, first_conv_activation, last_activation, t_down_layers, f_down_layers, kernel_size_t, kernel_size_f, bn_factor, min_bn_units, tfc_tdf_bias, tfc_tdf_activation, control_vector_type, co... |
class FakeModel(model.DetectionModel):
def __init__(self, add_detection_masks=False):
self._add_detection_masks = add_detection_masks
def preprocess(self, inputs):
return tf.identity(inputs)
def predict(self, preprocessed_inputs):
return {'image': tf.layers.conv2d(preprocessed_inputs... |
def test_pipeline(root_path):
(opt, _) = parse_options(root_path, is_train=False)
torch.backends.cudnn.benchmark = True
make_exp_dirs(opt)
log_file = osp.join(opt['path']['log'], f"test_{opt['name']}_{get_time_str()}.log")
logger = get_root_logger(logger_name='basicsr', log_level=logging.INFO, log_f... |
class CapAmountColumn(GraphColumn):
name = 'CapAmount'
stickPrefixToValue = True
def __init__(self, fittingView, params):
super().__init__(fittingView, 1668)
def _getValue(self, fit):
return (fit.ship.getModifiedItemAttr('capacitorCapacity'), 'GJ')
def _getFitTooltip(self):
r... |
_module
def test_init_decorator_init_false(module: str):
node = astroid.extract_node(f'''
from {module} import dataclass
from typing import List
(init=False)
class A:
x: int
y: str
z: List[bool]
A.__init__ #
''')
init = next(node.infer())
assert (init._proxied... |
.parametrize('username,password', users)
def test_create(db, client, username, password):
client.login(username=username, password=password)
instances = Attribute.objects.order_by('-level')
for instance in instances:
url = reverse(urlnames['list'])
data = {'uri_prefix': instance.uri_prefix, ... |
def try_open_zarr_array(dirpath, shape, chunks, dtype):
try:
a = zarr.open_array(dirpath, mode='r')
chunks = (chunks or a.chunks)
if ((a.shape == shape) and (a.chunks == chunks) and (a.dtype == dtype)):
return a
except ArrayNotFoundError:
pass
return None |
class AnnCompoundReader(JSONReader):
VECTORS_FILE = 'vectors.npy'
QUERIES_FILE = 'tests.jsonl'
def read_vectors(self) -> Iterator[List[float]]:
vectors = np.load((self.path / self.VECTORS_FILE))
for vector in vectors:
if self.normalize:
vector = (vector / np.linal... |
class ProvidedFileAssetConfiguration(AssetConfigurationMixin, BaseProvidedFileAsset, BenefitFeatureConfiguration):
class Meta(BaseProvidedFileAsset.Meta, BenefitFeatureConfiguration.Meta):
verbose_name = 'Provided File Configuration'
verbose_name_plural = 'Provided File Configurations'
const... |
class SysvService(Service):
_property
def _service_command(self):
return self.find_command('service')
def is_running(self):
return (self.run_expect([0, 1, 3, 8], '%s %s status', self._service_command, self.name).rc == 0)
def is_enabled(self):
return bool(self.check_output('find -... |
class MaxOut(nn.Module):
def __init__(self, d, m, k):
super(MaxOut, self).__init__()
(self.d_in, self.d_out, self.pool_size) = (d, m, k)
self.lin = Linear(d, (m * k))
def forward(self, inputs):
original_size = inputs.size()
inputs = inputs.view((- 1), inputs.size((- 1)))
... |
class GuiChangeProjectedItemsProjectionRangeCommand(wx.Command):
def __init__(self, fitID, items, projectionRange):
wx.Command.__init__(self, True, 'Change Projected Items Projection Range')
self.internalHistory = InternalCommandHistory()
self.fitID = fitID
self.projectionRange = pro... |
.parametrize('constraint, versions, yanked_versions, expected', [('>=1', ['1', '2'], [], '2'), ('>=1', ['1', '2'], ['2'], '1'), ('>=1', ['1', '2', '3'], ['2'], '3'), ('>=1', ['1', '2', '3'], ['2', '3'], '1'), ('>1', ['1', '2'], ['2'], 'error'), ('>1', ['2'], ['2'], 'error'), ('>=2', ['2'], ['2'], 'error'), ('==2', ['2'... |
def train():
(gen, dis) = load_models()
(opt_g, opt_d) = (make_optimizer(gen), make_optimizer(dis))
train_loader = make_dataset()
z = torch.FloatTensor(opt.batch_size, opt.nz).cuda()
fixed_z = Variable(torch.FloatTensor((8 * 10), opt.nz).normal_(0, 1).cuda())
y_fake = torch.LongTensor(opt.batch_... |
class SparseToDense(Module):
def __init__(self, dimension, nPlanes):
Module.__init__(self)
self.dimension = dimension
self.nPlanes = nPlanes
def forward(self, input):
return SparseToDenseFunction.apply(input.features, input.metadata, input.spatial_size, self.dimension, self.nPlan... |
class TokenTextEncoder(TextEncoder):
def __init__(self, vocab_filename, reverse=False, vocab_list=None, replace_oov=None, num_reserved_ids=NUM_RESERVED_TOKENS):
super(TokenTextEncoder, self).__init__(num_reserved_ids=num_reserved_ids)
self._reverse = reverse
self._replace_oov = replace_oov
... |
_REGISTRY.register()
class GOPRODataset(data.Dataset):
def __init__(self, opt):
super(GOPRODataset, self).__init__()
self.opt = opt
(self.gt_root, self.lq_root) = (Path(opt['dataroot_gt']), Path(opt['dataroot_lq']))
self.num_frame = opt['num_frame']
self.num_half_frames = (op... |
class NotificationSettingsManager(GetWithoutIdMixin, UpdateMixin, RESTManager):
_path = '/notification_settings'
_obj_cls = NotificationSettings
_update_attrs = RequiredOptional(optional=('level', 'notification_email', 'new_note', 'new_issue', 'reopen_issue', 'close_issue', 'reassign_issue', 'new_merge_requ... |
class Effect4415(BaseEffect):
type = 'passive'
def handler(fit, ship, context, projectionRange, **kwargs):
fit.modules.filteredChargeBoost((lambda mod: mod.charge.requiresSkill('Torpedoes')), 'explosionDelay', ship.getModifiedItemAttr('shipBonusMF'), skill='Minmatar Frigate', **kwargs) |
def test_hour_angle():
longitude = (- 105.1786)
times = pd.DatetimeIndex(['2015-01-02 07:21:55.2132', '2015-01-02 16:47:42.9828', '2015-01-02 12:04:44.6340']).tz_localize('Etc/GMT+7')
eot = np.array([(- 3.935172), (- 4.117227), (- 4.026295)])
hours = solarposition.hour_angle(times, longitude, eot)
e... |
def decode_sequence(ix_to_word, seq):
(N, D) = seq.size()
out = []
for i in range(N):
txt = ''
for j in range(D):
ix = seq[(i, j)]
if (ix > 0):
if (j >= 1):
txt = (txt + ' ')
txt = (txt + ix_to_word[ix.item()])
... |
def test_filesystem_mount():
filename = 'images/test.mbr'
volumes = []
parser = ImageParser([fullpath(filename)])
for v in parser.init():
if ((v.flag == 'alloc') and (v.index != '4')):
assert (v.mountpoint is not None)
volumes.append(v)
parser.force_clean()
assert (le... |
class TestBernoulli(QiskitAquaTestCase):
def setUp(self):
super().setUp()
self._statevector = QuantumInstance(backend=BasicAer.get_backend('statevector_simulator'), seed_simulator=2, seed_transpiler=2)
self._unitary = QuantumInstance(backend=BasicAer.get_backend('unitary_simulator'), shots=1... |
def test_error_loading_external_extension():
extension = 'pyscaffoldext.fake.extension'
ex = str(ErrorLoadingExtension(extension))
assert ('an error loading' in ex)
assert ('fake' in ex)
fake = EntryPoint('fake', f'{extension}:Fake', 'pyscaffold.cli')
ex = str(ErrorLoadingExtension(entry_point=f... |
def _should_use_custom_op():
if (not enabled):
return False
if any((torch.__version__.startswith(x) for x in ['1.7.', '1.8.', '1.9'])):
return True
warnings.warn(f'grid_sample_gradfix not supported on PyTorch {torch.__version__}. Falling back to torch.nn.functional.grid_sample().')
retur... |
def test_goodbye_all_services():
zc = Zeroconf(interfaces=['127.0.0.1'])
out = zc.generate_unregister_all_services()
assert (out is None)
type_ = '_
registration_name = ('xxxyyy.%s' % type_)
desc = {'path': '/~paulsm/'}
info = r.ServiceInfo(type_, registration_name, 80, 0, 0, desc, 'ash-2.lo... |
def set_yaml_dv_comments(yaml_object):
yaml_object['comment'] = yaml_object.get('comment', '')
if (yaml_object['comment'] is None):
yaml_object['comment'] = ''
if ('score_logbook' in yaml_object):
for score_obj in yaml_object['score_logbook']:
score_obj['comment'] = score_obj.get... |
def test_git_local_info(source_url: str, remote_refs: FetchPackResult, remote_default_ref: bytes) -> None:
with Git.clone(url=source_url) as repo:
info = Git.info(repo=repo)
assert (info.origin == source_url)
assert (info.revision == remote_refs.refs[remote_default_ref].decode('utf-8')) |
def test_dvclive_hook(tmp_path):
sys.modules['dvclive'] = MagicMock()
runner = _build_demo_runner()
(tmp_path / 'dvclive').mkdir()
hook = DvcliveLoggerHook(str((tmp_path / 'dvclive')))
loader = DataLoader(torch.ones((5, 2)))
runner.register_hook(hook)
runner.run([loader, loader], [('train', ... |
class Effect11430(BaseEffect):
type = 'passive'
def handler(fit, ship, context, projectionRange, **kwargs):
fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Large Projectile Turret')), 'trackingSpeed', ship.getModifiedItemAttr('shipBonusMB'), skill='Minmatar Battleship', **kwargs) |
def gh_labels(pr_number):
query = f'''
{{
repository(owner: "pytorch", name: "data") {{
pullRequest(number: {pr_number}) {{
labels(first: 10) {{
edges {{
node {{
name
}}
}}
}}
}}
}}
}}
'''... |
def _create_dict_items(values: Mapping[(Any, Any)], node: Dict) -> list[tuple[(SuccessfulInferenceResult, SuccessfulInferenceResult)]]:
elements: list[tuple[(SuccessfulInferenceResult, SuccessfulInferenceResult)]] = []
for (key, value) in values.items():
key_node = const_factory(key)
key_node.pa... |
def get_optimizer(p, parameters):
if (p['optimizer'] == 'sgd'):
optimizer = torch.optim.SGD(parameters, **p['optimizer_kwargs'])
elif (p['optimizer'] == 'adam'):
optimizer = torch.optim.Adam(parameters, **p['optimizer_kwargs'])
else:
raise ValueError('Invalid optimizer {}'.format(p['... |
def test_read_setup_cfg(tmp_path):
with open((tmp_path / 'setup.cfg'), 'w') as f:
f.write(dedent('\n [options]\n python_requires = 1.234\n [metadata]\n something = other\n '))
assert (get_requires_python_str(tmp_path) == '1.234') |
def process_edge_index(edge_index_str):
res = []
edge_index_str = edge_index_str.strip()
edges = edge_index_str.split(',')
for edge in edges:
head = edge.split()[0].strip()
tail = edge.split()[1].strip()
res.append([int(head), int(tail)])
edge_index = torch.tensor(res, dtype=... |
def _get_ec_hash_alg(curve: ec.EllipticCurve) -> hashes.HashAlgorithm:
if isinstance(curve, ec.SECP256R1):
return hashes.SHA256()
elif isinstance(curve, ec.SECP384R1):
return hashes.SHA384()
else:
assert isinstance(curve, ec.SECP521R1)
return hashes.SHA512() |
def test_signature():
ping = Ping(nonce=0, current_protocol_version=constants.PROTOCOL_VERSION, signature=constants.EMPTY_SIGNATURE)
ping.sign(signer)
assert (ping.sender == ADDRESS)
message_data = ping._data_to_sign()
signature = signer.sign(data=message_data, v=0)
assert (ADDRESS == recover(me... |
class FunnelTokenizer(PreTrainedTokenizer):
vocab_files_names = VOCAB_FILES_NAMES
pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP
pretrained_init_configuration = PRETRAINED_INIT_CONFIGURATION
max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES
cls_token_type_id: int = 2
def __... |
class SpacedDiffusion(GaussianDiffusion):
def __init__(self, use_timesteps, conf=None, **kwargs):
self.use_timesteps = set(use_timesteps)
self.original_num_steps = len(kwargs['betas'])
self.conf = conf
base_diffusion = GaussianDiffusion(conf=conf, **kwargs)
if conf.respace_in... |
class ScriptLine(object):
def __init__(self, action: Action, parameters: List[ScriptObject], index: int):
self.action = action
self.parameters = parameters
self.index = index
def object(self):
return (self.parameters[0] if (len(self.parameters) > 0) else None)
def subject(sel... |
class EventSequenceFixture():
def __init__(self, event_loop):
self.event_loop = event_loop
self.listen_events = []
self.received_events = queue.Queue()
def create_window(self, **kwargs):
w = self.event_loop.create_window(**kwargs)
w.push_handlers(self)
return w
... |
def find_matched_molecular_pairs(index, fragment_reader, index_options=config.DEFAULT_INDEX_OPTIONS, environment_cache=EnvironmentCache(), min_radius=0, max_radius=5, reporter=None):
symmetric = index_options.symmetric
max_heavies_transf = index_options.max_heavies_transf
max_frac_trans = index_options.max_... |
def collate_fn(examples):
pixel_values = torch.stack([example['pixel_values'] for example in examples])
input_ids = torch.tensor([example['input_ids'] for example in examples], dtype=torch.long)
attention_mask = torch.tensor([example['attention_mask'] for example in examples], dtype=torch.long)
return {... |
(frozen=True)
class FileCache(Generic[T]):
path: Path
hash_type: str = 'sha256'
def __post_init__(self) -> None:
if (self.hash_type not in _HASHES):
raise ValueError(f"FileCache.hash_type is unknown value: '{self.hash_type}'.")
def get(self, key: str) -> (T | None):
return se... |
def create_quant_sim_model(sess: tf.Session, start_op_names: List[str], output_op_names: List[str], use_cuda: bool, evaluator: Callable[([tf.Session, Any], None)], logdir: str, encoding_filename: str=None) -> QuantizationSimModel:
copied_sess = save_and_load_graph(sess=sess, meta_path=logdir)
quant_scheme = Qua... |
class MyTransformer(MyTransformerChatGlmLMHeadModel, with_pl=True):
def __init__(self, *args, **kwargs):
lora_args: LoraArguments = kwargs.pop('lora_args', None)
super(MyTransformer, self).__init__(*args, **kwargs)
self.lora_args = lora_args
if ((lora_args is not None) and lora_args.... |
class IDDocumentSection(FieldSet):
def __init__(self, form, id_document):
super().__init__(form.view, legend_text='New investor information', css_id='id_document_section')
self.enable_refresh()
self.use_layout(FormLayout())
self.layout.add_input(SelectInput(form, id_document.fields.d... |
def set_clipboard(data: str, selection: bool=False) -> None:
global fake_clipboard
if (selection and (not supports_selection())):
raise SelectionUnsupportedError
if log_clipboard:
what = ('primary selection' if selection else 'clipboard')
log.misc.debug('Setting fake {}: {}'.format(w... |
class Effect6361(BaseEffect):
type = 'passive'
def handler(fit, src, context, projectionRange, **kwargs):
fit.modules.filteredChargeBoost((lambda mod: mod.charge.requiresSkill('Rockets')), 'explosiveDamage', src.getModifiedItemAttr('shipBonus3MF'), skill='Minmatar Frigate', **kwargs) |
class SupportFuncUserSubclass1(SupportFuncProvider):
parser = cmd2.Cmd2ArgumentParser()
parser.add_argument('state', type=str, completer=SupportFuncProvider.complete_states)
.with_argparser(parser)
def do_user_sub1(self, ns: argparse.Namespace):
self._cmd.poutput('something {}'.format(ns.state)) |
class ManagedConsole(QtCore.QCoreApplication):
def __init__(self, procedure_class, log_channel='', log_level=logging.INFO):
super().__init__([])
self.procedure_class = procedure_class
self.log_channel = log_channel
self.log = logging.getLogger(log_channel)
self.log_level = lo... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.